2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
46 vlib buffer access methods.
50 /** \brief Translate buffer index into buffer pointer
52 @param vm - (vlib_main_t *) vlib main data structure pointer
53 @param buffer_index - (u32) buffer index
54 @return - (vlib_buffer_t *) buffer pointer
56 always_inline vlib_buffer_t *
57 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
59 vlib_buffer_main_t *bm = vm->buffer_main;
60 uword offset = ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
61 ASSERT (offset < bm->buffer_mem_size);
63 return uword_to_pointer (bm->buffer_mem_start + offset, void *);
66 /** \brief Translate buffer pointer into buffer index
68 @param vm - (vlib_main_t *) vlib main data structure pointer
69 @param p - (void *) buffer pointer
70 @return - (u32) buffer index
74 vlib_get_buffer_index (vlib_main_t * vm, void *p)
76 vlib_buffer_main_t *bm = vm->buffer_main;
77 uword offset = pointer_to_uword (p) - bm->buffer_mem_start;
78 ASSERT (pointer_to_uword (p) >= bm->buffer_mem_start);
79 ASSERT (offset < bm->buffer_mem_size);
80 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
81 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
84 /** \brief Get next buffer in buffer linklist, or zero for end of list.
86 @param vm - (vlib_main_t *) vlib main data structure pointer
87 @param b - (void *) buffer pointer
88 @return - (vlib_buffer_t *) next buffer, or NULL
90 always_inline vlib_buffer_t *
91 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
93 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
94 ? vlib_get_buffer (vm, b->next_buffer) : 0);
97 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
98 vlib_buffer_t * b_first);
100 /** \brief Get length in bytes of the buffer chain
102 @param vm - (vlib_main_t *) vlib main data structure pointer
103 @param b - (void *) buffer pointer
104 @return - (uword) length of buffer chain
107 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
109 uword len = b->current_length;
111 if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
114 if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
115 return len + b->total_length_not_including_first_buffer;
117 return vlib_buffer_length_in_chain_slow_path (vm, b);
120 /** \brief Get length in bytes of the buffer index buffer chain
122 @param vm - (vlib_main_t *) vlib main data structure pointer
123 @param bi - (u32) buffer index
124 @return - (uword) length of buffer chain
127 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
129 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
130 return vlib_buffer_length_in_chain (vm, b);
133 /** \brief Copy buffer contents to memory
135 @param vm - (vlib_main_t *) vlib main data structure pointer
136 @param buffer_index - (u32) buffer index
137 @param contents - (u8 *) memory, <strong>must be large enough</strong>
138 @return - (uword) length of buffer chain
141 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
143 uword content_len = 0;
149 b = vlib_get_buffer (vm, buffer_index);
150 l = b->current_length;
151 clib_memcpy (contents + content_len, b->data + b->current_data, l);
153 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
155 buffer_index = b->next_buffer;
161 /* Return physical address of buffer->data start. */
163 vlib_get_buffer_data_physical_address (vlib_main_t * vm, u32 buffer_index)
165 vlib_buffer_main_t *bm = vm->buffer_main;
166 vlib_buffer_t *b = vlib_get_buffer (vm, buffer_index);
167 vlib_buffer_pool_t *pool = vec_elt_at_index (bm->buffer_pools,
168 b->buffer_pool_index);
170 return vlib_physmem_virtual_to_physical (vm, pool->physmem_region, b->data);
173 /** \brief Prefetch buffer metadata by buffer index
174 The first 64 bytes of buffer contains most header information
176 @param vm - (vlib_main_t *) vlib main data structure pointer
177 @param bi - (u32) buffer index
178 @param type - LOAD, STORE. In most cases, STORE is the right answer
180 /* Prefetch buffer header given index. */
181 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
183 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
184 vlib_prefetch_buffer_header (_b, type); \
188 /* Iterate over known allocated vlib bufs. You probably do not want
190 @param vm the vlib_main_t
191 @param bi found allocated buffer index
192 @param body operation to perform on buffer index
193 function executes body for each allocated buffer index
195 #define vlib_buffer_foreach_allocated(vm,bi,body) \
197 vlib_main_t * _vmain = (vm); \
198 vlib_buffer_main_t * _bmain = &_vmain->buffer_main; \
199 hash_pair_t * _vbpair; \
200 hash_foreach_pair(_vbpair, _bmain->buffer_known_hash, ({ \
201 if (VLIB_BUFFER_KNOWN_ALLOCATED == _vbpair->value[0]) { \
202 (bi) = _vbpair->key; \
211 /* Index is unknown. */
214 /* Index is known and free/allocated. */
215 VLIB_BUFFER_KNOWN_FREE,
216 VLIB_BUFFER_KNOWN_ALLOCATED,
217 } vlib_buffer_known_state_t;
219 void vlib_buffer_validate_alloc_free (vlib_main_t * vm, u32 * buffers,
221 vlib_buffer_known_state_t
224 always_inline vlib_buffer_known_state_t
225 vlib_buffer_is_known (u32 buffer_index)
227 vlib_buffer_main_t *bm = vlib_global_main.buffer_main;
229 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
230 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
231 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
232 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
236 vlib_buffer_set_known_state (u32 buffer_index,
237 vlib_buffer_known_state_t state)
239 vlib_buffer_main_t *bm = vlib_global_main.buffer_main;
241 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
242 hash_set (bm->buffer_known_hash, buffer_index, state);
243 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
246 /* Validates sanity of a single buffer.
247 Returns format'ed vector with error message if any. */
248 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
252 vlib_buffer_round_size (u32 size)
254 return round_pow2 (size, sizeof (vlib_buffer_t));
257 always_inline vlib_buffer_free_list_index_t
258 vlib_buffer_get_free_list_index (vlib_buffer_t * b)
260 if (PREDICT_FALSE (b->flags & VLIB_BUFFER_NON_DEFAULT_FREELIST))
261 return b->free_list_index;
267 vlib_buffer_set_free_list_index (vlib_buffer_t * b,
268 vlib_buffer_free_list_index_t index)
270 if (PREDICT_FALSE (index))
272 b->flags |= VLIB_BUFFER_NON_DEFAULT_FREELIST;
273 b->free_list_index = index;
276 b->flags &= ~VLIB_BUFFER_NON_DEFAULT_FREELIST;
279 /** \brief Allocate buffers from specific freelist into supplied array
281 @param vm - (vlib_main_t *) vlib main data structure pointer
282 @param buffers - (u32 * ) buffer index array
283 @param n_buffers - (u32) number of buffers requested
284 @return - (u32) number of buffers actually allocated, may be
285 less than the number requested or zero
288 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
291 vlib_buffer_free_list_index_t index)
293 vlib_buffer_main_t *bm = vm->buffer_main;
294 vlib_buffer_free_list_t *fl;
298 ASSERT (bm->cb.vlib_buffer_fill_free_list_cb);
300 fl = pool_elt_at_index (bm->buffer_free_list_pool, index);
302 len = vec_len (fl->buffers);
304 if (PREDICT_FALSE (len < n_buffers))
306 bm->cb.vlib_buffer_fill_free_list_cb (vm, fl, n_buffers);
307 len = vec_len (fl->buffers);
309 /* even if fill free list didn't manage to refill free list
310 we should give what we have */
311 n_buffers = clib_min (len, n_buffers);
313 /* following code is intentionaly duplicated to allow compiler
314 to optimize fast path when n_buffers is constant value */
315 src = fl->buffers + len - n_buffers;
316 clib_memcpy (buffers, src, n_buffers * sizeof (u32));
317 _vec_len (fl->buffers) -= n_buffers;
319 /* Verify that buffers are known free. */
320 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
321 VLIB_BUFFER_KNOWN_FREE);
326 src = fl->buffers + len - n_buffers;
327 clib_memcpy (buffers, src, n_buffers * sizeof (u32));
328 _vec_len (fl->buffers) -= n_buffers;
330 /* Verify that buffers are known free. */
331 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
332 VLIB_BUFFER_KNOWN_FREE);
337 /** \brief Allocate buffers into supplied array
339 @param vm - (vlib_main_t *) vlib main data structure pointer
340 @param buffers - (u32 * ) buffer index array
341 @param n_buffers - (u32) number of buffers requested
342 @return - (u32) number of buffers actually allocated, may be
343 less than the number requested or zero
346 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
348 return vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
349 VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX);
352 /** \brief Allocate buffers into ring
354 @param vm - (vlib_main_t *) vlib main data structure pointer
355 @param buffers - (u32 * ) buffer index ring
356 @param start - (u32) first slot in the ring
357 @param ring_size - (u32) ring size
358 @param n_buffers - (u32) number of buffers requested
359 @return - (u32) number of buffers actually allocated, may be
360 less than the number requested or zero
363 vlib_buffer_alloc_to_ring (vlib_main_t * vm, u32 * ring, u32 start,
364 u32 ring_size, u32 n_buffers)
368 ASSERT (n_buffers <= ring_size);
370 if (PREDICT_TRUE (start + n_buffers <= ring_size))
371 return vlib_buffer_alloc (vm, ring + start, n_buffers);
373 n_alloc = vlib_buffer_alloc (vm, ring + start, ring_size - start);
375 if (PREDICT_TRUE (n_alloc == ring_size - start))
376 n_alloc += vlib_buffer_alloc (vm, ring, n_buffers - n_alloc);
381 /** \brief Free buffers
382 Frees the entire buffer chain for each buffer
384 @param vm - (vlib_main_t *) vlib main data structure pointer
385 @param buffers - (u32 * ) buffer index array
386 @param n_buffers - (u32) number of buffers to free
390 vlib_buffer_free (vlib_main_t * vm,
391 /* pointer to first buffer */
393 /* number of buffers to free */
396 vlib_buffer_main_t *bm = vm->buffer_main;
398 ASSERT (bm->cb.vlib_buffer_free_cb);
400 return bm->cb.vlib_buffer_free_cb (vm, buffers, n_buffers);
403 /** \brief Free buffers, does not free the buffer chain for each buffer
405 @param vm - (vlib_main_t *) vlib main data structure pointer
406 @param buffers - (u32 * ) buffer index array
407 @param n_buffers - (u32) number of buffers to free
411 vlib_buffer_free_no_next (vlib_main_t * vm,
412 /* pointer to first buffer */
414 /* number of buffers to free */
417 vlib_buffer_main_t *bm = vm->buffer_main;
419 ASSERT (bm->cb.vlib_buffer_free_no_next_cb);
421 return bm->cb.vlib_buffer_free_no_next_cb (vm, buffers, n_buffers);
424 /** \brief Free one buffer
425 Shorthand to free a single buffer chain.
427 @param vm - (vlib_main_t *) vlib main data structure pointer
428 @param buffer_index - (u32) buffer index to free
431 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
433 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
436 /* Add/delete buffer free lists. */
437 vlib_buffer_free_list_index_t vlib_buffer_create_free_list (vlib_main_t * vm,
441 vlib_buffer_delete_free_list (vlib_main_t * vm,
442 vlib_buffer_free_list_index_t free_list_index)
444 vlib_buffer_main_t *bm = vm->buffer_main;
446 ASSERT (bm->cb.vlib_buffer_delete_free_list_cb);
448 bm->cb.vlib_buffer_delete_free_list_cb (vm, free_list_index);
451 /* Find already existing public free list with given size or create one. */
452 vlib_buffer_free_list_index_t vlib_buffer_get_or_create_free_list (vlib_main_t
459 /* Merge two free lists */
460 void vlib_buffer_merge_free_lists (vlib_buffer_free_list_t * dst,
461 vlib_buffer_free_list_t * src);
463 /* Make sure we have at least given number of unaligned buffers. */
464 void vlib_buffer_free_list_fill_unaligned (vlib_main_t * vm,
465 vlib_buffer_free_list_t *
467 uword n_unaligned_buffers);
469 always_inline vlib_buffer_free_list_index_t
470 vlib_buffer_get_free_list_with_size (vlib_main_t * vm, u32 size)
472 vlib_buffer_main_t *bm = vm->buffer_main;
474 size = vlib_buffer_round_size (size);
475 uword *p = hash_get (bm->free_list_by_size, size);
476 return p ? p[0] : ~0;
479 always_inline vlib_buffer_free_list_t *
480 vlib_buffer_get_buffer_free_list (vlib_main_t * vm, vlib_buffer_t * b,
481 vlib_buffer_free_list_index_t * index)
483 vlib_buffer_main_t *bm = vm->buffer_main;
484 vlib_buffer_free_list_index_t i;
486 *index = i = vlib_buffer_get_free_list_index (b);
487 return pool_elt_at_index (bm->buffer_free_list_pool, i);
490 always_inline vlib_buffer_free_list_t *
491 vlib_buffer_get_free_list (vlib_main_t * vm,
492 vlib_buffer_free_list_index_t free_list_index)
494 vlib_buffer_main_t *bm = vm->buffer_main;
495 vlib_buffer_free_list_t *f;
497 f = pool_elt_at_index (bm->buffer_free_list_pool, free_list_index);
499 /* Sanity: indices must match. */
500 ASSERT (f->index == free_list_index);
506 vlib_buffer_free_list_buffer_size (vlib_main_t * vm,
507 vlib_buffer_free_list_index_t index)
509 vlib_buffer_free_list_t *f = vlib_buffer_get_free_list (vm, index);
510 return f->n_data_bytes;
513 void vlib_aligned_memcpy (void *_dst, void *_src, int n_bytes);
515 /* Reasonably fast buffer copy routine. */
517 vlib_copy_buffers (u32 * dst, u32 * src, u32 n)
538 /* Append given data to end of buffer, possibly allocating new buffers. */
539 u32 vlib_buffer_add_data (vlib_main_t * vm,
540 vlib_buffer_free_list_index_t free_list_index,
541 u32 buffer_index, void *data, u32 n_data_bytes);
543 /* duplicate all buffers in chain */
544 always_inline vlib_buffer_t *
545 vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
547 vlib_buffer_t *s, *d, *fd;
548 uword n_alloc, n_buffers = 1;
549 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
553 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
556 s = vlib_get_buffer (vm, s->next_buffer);
558 u32 new_buffers[n_buffers];
560 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
562 /* No guarantee that we'll get all the buffers we asked for */
563 if (PREDICT_FALSE (n_alloc < n_buffers))
566 vlib_buffer_free (vm, new_buffers, n_alloc);
572 fd = d = vlib_get_buffer (vm, new_buffers[0]);
573 d->current_data = s->current_data;
574 d->current_length = s->current_length;
575 d->flags = s->flags & flag_mask;
576 d->total_length_not_including_first_buffer =
577 s->total_length_not_including_first_buffer;
578 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
579 clib_memcpy (vlib_buffer_get_current (d),
580 vlib_buffer_get_current (s), s->current_length);
583 for (i = 1; i < n_buffers; i++)
586 d->next_buffer = new_buffers[i];
588 s = vlib_get_buffer (vm, s->next_buffer);
589 d = vlib_get_buffer (vm, new_buffers[i]);
590 d->current_data = s->current_data;
591 d->current_length = s->current_length;
592 clib_memcpy (vlib_buffer_get_current (d),
593 vlib_buffer_get_current (s), s->current_length);
594 d->flags = s->flags & flag_mask;
600 /** \brief Create a maximum of 256 clones of buffer and store them
601 in the supplied array
603 @param vm - (vlib_main_t *) vlib main data structure pointer
604 @param src_buffer - (u32) source buffer index
605 @param buffers - (u32 * ) buffer index array
606 @param n_buffers - (u16) number of buffer clones requested (<=256)
607 @param head_end_offset - (u16) offset relative to current position
608 where packet head ends
609 @return - (u16) number of buffers actually cloned, may be
610 less than the number requested or zero
613 vlib_buffer_clone_256 (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
614 u16 n_buffers, u16 head_end_offset)
617 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
619 ASSERT (s->n_add_refs == 0);
621 ASSERT (n_buffers <= 256);
623 if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
625 buffers[0] = src_buffer;
626 for (i = 1; i < n_buffers; i++)
629 d = vlib_buffer_copy (vm, s);
632 buffers[i] = vlib_get_buffer_index (vm, d);
638 if (PREDICT_FALSE (n_buffers == 1))
640 buffers[0] = src_buffer;
644 n_buffers = vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
645 vlib_buffer_get_free_list_index
648 for (i = 0; i < n_buffers; i++)
650 vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
651 d->current_data = s->current_data;
652 d->current_length = head_end_offset;
653 vlib_buffer_set_free_list_index (d,
654 vlib_buffer_get_free_list_index (s));
655 d->total_length_not_including_first_buffer =
656 s->total_length_not_including_first_buffer + s->current_length -
658 d->flags = s->flags | VLIB_BUFFER_NEXT_PRESENT;
659 d->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
660 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
661 clib_memcpy (vlib_buffer_get_current (d), vlib_buffer_get_current (s),
663 d->next_buffer = src_buffer;
665 vlib_buffer_advance (s, head_end_offset);
666 s->n_add_refs = n_buffers - 1;
667 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
669 s = vlib_get_buffer (vm, s->next_buffer);
670 s->n_add_refs = n_buffers - 1;
676 /** \brief Create multiple clones of buffer and store them
677 in the supplied array
679 @param vm - (vlib_main_t *) vlib main data structure pointer
680 @param src_buffer - (u32) source buffer index
681 @param buffers - (u32 * ) buffer index array
682 @param n_buffers - (u16) number of buffer clones requested (<=256)
683 @param head_end_offset - (u16) offset relative to current position
684 where packet head ends
685 @return - (u16) number of buffers actually cloned, may be
686 less than the number requested or zero
689 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
690 u16 n_buffers, u16 head_end_offset)
692 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
695 while (n_buffers > 256)
698 copy = vlib_buffer_copy (vm, s);
699 n_cloned += vlib_buffer_clone_256 (vm,
700 vlib_get_buffer_index (vm, copy),
701 (buffers + n_cloned),
702 256, head_end_offset);
705 n_cloned += vlib_buffer_clone_256 (vm, src_buffer,
707 n_buffers, head_end_offset);
712 /** \brief Attach cloned tail to the buffer
714 @param vm - (vlib_main_t *) vlib main data structure pointer
715 @param head - (vlib_buffer_t *) head buffer
716 @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
720 vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
721 vlib_buffer_t * tail)
723 ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
724 ASSERT (vlib_buffer_get_free_list_index (head) ==
725 vlib_buffer_get_free_list_index (tail));
727 head->flags |= VLIB_BUFFER_NEXT_PRESENT;
728 head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
729 head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
730 head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
731 head->next_buffer = vlib_get_buffer_index (vm, tail);
732 head->total_length_not_including_first_buffer = tail->current_length +
733 tail->total_length_not_including_first_buffer;
736 __sync_add_and_fetch (&tail->n_add_refs, 1);
738 if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
740 tail = vlib_get_buffer (vm, tail->next_buffer);
745 /* Initializes the buffer as an empty packet with no chained buffers. */
747 vlib_buffer_chain_init (vlib_buffer_t * first)
749 first->total_length_not_including_first_buffer = 0;
750 first->current_length = 0;
751 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
752 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
755 /* The provided next_bi buffer index is appended to the end of the packet. */
756 always_inline vlib_buffer_t *
757 vlib_buffer_chain_buffer (vlib_main_t * vm,
758 vlib_buffer_t * first,
759 vlib_buffer_t * last, u32 next_bi)
761 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
762 last->next_buffer = next_bi;
763 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
764 next_buffer->current_length = 0;
765 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
769 /* Increases or decreases the packet length.
770 * It does not allocate or deallocate new buffers.
771 * Therefore, the added length must be compatible
772 * with the last buffer. */
774 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
775 vlib_buffer_t * last, i32 len)
777 last->current_length += len;
779 first->total_length_not_including_first_buffer += len;
782 /* Copy data to the end of the packet and increases its length.
783 * It does not allocate new buffers.
784 * Returns the number of copied bytes. */
786 vlib_buffer_chain_append_data (vlib_main_t * vm,
787 vlib_buffer_free_list_index_t free_list_index,
788 vlib_buffer_t * first,
789 vlib_buffer_t * last, void *data, u16 data_len)
792 vlib_buffer_free_list_buffer_size (vm, free_list_index);
793 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
794 u16 len = clib_min (data_len,
795 n_buffer_bytes - last->current_length -
797 clib_memcpy (vlib_buffer_get_current (last) + last->current_length, data,
799 vlib_buffer_chain_increase_length (first, last, len);
803 /* Copy data to the end of the packet and increases its length.
804 * Allocates additional buffers from the free list if necessary.
805 * Returns the number of copied bytes.
806 * 'last' value is modified whenever new buffers are allocated and
807 * chained and points to the last buffer in the chain. */
809 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
810 vlib_buffer_free_list_index_t
812 vlib_buffer_t * first,
813 vlib_buffer_t ** last, void *data,
815 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
817 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
818 format_vlib_buffer_contents;
822 /* Vector of packet data. */
825 /* Number of buffers to allocate in each call to physmem
827 u32 min_n_buffers_each_physmem_alloc;
829 /* Buffer free list for this template. */
830 vlib_buffer_free_list_index_t free_list_index;
833 } vlib_packet_template_t;
835 void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
836 vlib_packet_template_t * t);
838 void vlib_packet_template_init (vlib_main_t * vm,
839 vlib_packet_template_t * t,
841 uword n_packet_data_bytes,
842 uword min_n_buffers_each_physmem_alloc,
845 void *vlib_packet_template_get_packet (vlib_main_t * vm,
846 vlib_packet_template_t * t,
850 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
852 vec_free (t->packet_data);
856 unserialize_vlib_buffer_n_bytes (serialize_main_t * m)
858 serialize_stream_t *s = &m->stream;
859 vlib_serialize_buffer_main_t *sm
860 = uword_to_pointer (m->stream.data_function_opaque,
861 vlib_serialize_buffer_main_t *);
862 vlib_main_t *vm = sm->vlib_main;
865 n = s->n_buffer_bytes - s->current_buffer_index;
866 if (sm->last_buffer != ~0)
868 vlib_buffer_t *b = vlib_get_buffer (vm, sm->last_buffer);
869 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
871 b = vlib_get_buffer (vm, b->next_buffer);
872 n += b->current_length;
877 clib_fifo_foreach (f, sm->rx.buffer_fifo, ({
878 n += vlib_buffer_index_length_in_chain (vm, f[0]);
885 /* Set a buffer quickly into "uninitialized" state. We want this to
886 be extremely cheap and arrange for all fields that need to be
887 initialized to be in the first 128 bits of the buffer. */
889 vlib_buffer_init_for_free_list (vlib_buffer_t * dst,
890 vlib_buffer_free_list_t * fl)
892 vlib_buffer_t *src = &fl->buffer_init_template;
894 /* Make sure vlib_buffer_t is cacheline aligned and sized */
895 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
896 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
897 CLIB_CACHE_LINE_BYTES);
898 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
899 CLIB_CACHE_LINE_BYTES * 2);
901 /* Make sure buffer template is sane. */
902 ASSERT (fl->index == vlib_buffer_get_free_list_index (src));
904 clib_memcpy (STRUCT_MARK_PTR (dst, template_start),
905 STRUCT_MARK_PTR (src, template_start),
906 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
907 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
909 /* Not in the first 16 octets. */
910 dst->n_add_refs = src->n_add_refs;
911 vlib_buffer_set_free_list_index (dst, fl->index);
913 /* Make sure it really worked. */
914 #define _(f) ASSERT (dst->f == src->f);
919 /* ASSERT (dst->total_length_not_including_first_buffer == 0); */
920 /* total_length_not_including_first_buffer is not in the template anymore
921 * so it may actually not zeroed for some buffers. One option is to
922 * uncomment the line lower (comes at a cost), the other, is to just not
924 /* dst->total_length_not_including_first_buffer = 0; */
925 ASSERT (dst->n_add_refs == 0);
929 vlib_buffer_add_to_free_list (vlib_main_t * vm,
930 vlib_buffer_free_list_t * f,
931 u32 buffer_index, u8 do_init)
934 b = vlib_get_buffer (vm, buffer_index);
935 if (PREDICT_TRUE (do_init))
936 vlib_buffer_init_for_free_list (b, f);
937 vec_add1_aligned (f->buffers, buffer_index, CLIB_CACHE_LINE_BYTES);
939 if (vec_len (f->buffers) > 4 * VLIB_FRAME_SIZE)
941 vlib_buffer_free_list_t *mf;
942 mf = vlib_buffer_get_free_list (vlib_mains[0], f->index);
943 clib_spinlock_lock (&mf->global_buffers_lock);
944 /* keep last stored buffers, as they are more likely hot in the cache */
945 vec_add_aligned (mf->global_buffers, f->buffers, VLIB_FRAME_SIZE,
946 CLIB_CACHE_LINE_BYTES);
947 vec_delete (f->buffers, VLIB_FRAME_SIZE, 0);
948 f->n_alloc -= VLIB_FRAME_SIZE;
949 clib_spinlock_unlock (&mf->global_buffers_lock);
954 extern u32 *vlib_buffer_state_validation_lock;
955 extern uword *vlib_buffer_state_validation_hash;
956 extern void *vlib_buffer_state_heap;
960 vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
966 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
968 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
971 p = hash_get (vlib_buffer_state_validation_hash, b);
973 /* If we don't know about b, declare it to be in the expected state */
976 hash_set (vlib_buffer_state_validation_hash, b, expected);
980 if (p[0] != expected)
984 vlib_main_t *vm = &vlib_global_main;
988 bi = vlib_get_buffer_index (vm, b);
990 clib_mem_set_heap (oldheap);
991 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
992 vlib_time_now (vm), bi,
993 p[0] ? "busy" : "free", expected ? "busy" : "free");
997 CLIB_MEMORY_BARRIER ();
998 *vlib_buffer_state_validation_lock = 0;
999 clib_mem_set_heap (oldheap);
1004 vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
1009 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1011 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
1014 hash_set (vlib_buffer_state_validation_hash, b, expected);
1016 CLIB_MEMORY_BARRIER ();
1017 *vlib_buffer_state_validation_lock = 0;
1018 clib_mem_set_heap (oldheap);
1022 /** minimum data size of first buffer in a buffer chain */
1023 #define VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE (256)
1026 * @brief compress buffer chain in a way where the first buffer is at least
1027 * VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE long
1029 * @param[in] vm - vlib_main
1030 * @param[in,out] first - first buffer in chain
1031 * @param[in,out] discard_vector - vector of buffer indexes which were removed
1035 vlib_buffer_chain_compress (vlib_main_t * vm,
1036 vlib_buffer_t * first, u32 ** discard_vector)
1038 if (first->current_length >= VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE ||
1039 !(first->flags & VLIB_BUFFER_NEXT_PRESENT))
1041 /* this is already big enough or not a chain */
1044 /* probe free list to find allocated buffer size to avoid overfill */
1045 vlib_buffer_free_list_index_t index;
1046 vlib_buffer_free_list_t *free_list =
1047 vlib_buffer_get_buffer_free_list (vm, first, &index);
1049 u32 want_first_size = clib_min (VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE,
1050 free_list->n_data_bytes -
1051 first->current_data);
1054 vlib_buffer_t *second = vlib_get_buffer (vm, first->next_buffer);
1055 u32 need = want_first_size - first->current_length;
1056 u32 amount_to_copy = clib_min (need, second->current_length);
1057 clib_memcpy (((u8 *) vlib_buffer_get_current (first)) +
1058 first->current_length,
1059 vlib_buffer_get_current (second), amount_to_copy);
1060 first->current_length += amount_to_copy;
1061 vlib_buffer_advance (second, amount_to_copy);
1062 if (first->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID)
1064 first->total_length_not_including_first_buffer -= amount_to_copy;
1066 if (!second->current_length)
1068 vec_add1 (*discard_vector, first->next_buffer);
1069 if (second->flags & VLIB_BUFFER_NEXT_PRESENT)
1071 first->next_buffer = second->next_buffer;
1075 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1077 second->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1080 while ((first->current_length < want_first_size) &&
1081 (first->flags & VLIB_BUFFER_NEXT_PRESENT));
1084 #endif /* included_vlib_buffer_funcs_h */
1087 * fd.io coding-style-patch-verification: ON
1090 * eval: (c-set-style "gnu")