2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
46 vlib buffer access methods.
50 /** \brief Translate buffer index into buffer pointer
52 @param vm - (vlib_main_t *) vlib main data structure pointer
53 @param buffer_index - (u32) buffer index
54 @return - (vlib_buffer_t *) buffer pointer
56 always_inline vlib_buffer_t *
57 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
59 vlib_buffer_main_t *bm = &buffer_main;
60 uword offset = ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
61 ASSERT (offset < bm->buffer_mem_size);
63 return uword_to_pointer (bm->buffer_mem_start + offset, void *);
66 /** \brief Translate buffer pointer into buffer index
68 @param vm - (vlib_main_t *) vlib main data structure pointer
69 @param p - (void *) buffer pointer
70 @return - (u32) buffer index
74 vlib_get_buffer_index (vlib_main_t * vm, void *p)
76 vlib_buffer_main_t *bm = &buffer_main;
77 uword offset = pointer_to_uword (p) - bm->buffer_mem_start;
78 ASSERT (pointer_to_uword (p) >= bm->buffer_mem_start);
79 ASSERT (offset < bm->buffer_mem_size);
80 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
81 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
84 /** \brief Get next buffer in buffer linklist, or zero for end of list.
86 @param vm - (vlib_main_t *) vlib main data structure pointer
87 @param b - (void *) buffer pointer
88 @return - (vlib_buffer_t *) next buffer, or NULL
90 always_inline vlib_buffer_t *
91 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
93 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
94 ? vlib_get_buffer (vm, b->next_buffer) : 0);
97 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
98 vlib_buffer_t * b_first);
100 /** \brief Get length in bytes of the buffer chain
102 @param vm - (vlib_main_t *) vlib main data structure pointer
103 @param b - (void *) buffer pointer
104 @return - (uword) length of buffer chain
107 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
109 uword len = b->current_length;
111 if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
114 if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
115 return len + b->total_length_not_including_first_buffer;
117 return vlib_buffer_length_in_chain_slow_path (vm, b);
120 /** \brief Get length in bytes of the buffer index buffer chain
122 @param vm - (vlib_main_t *) vlib main data structure pointer
123 @param bi - (u32) buffer index
124 @return - (uword) length of buffer chain
127 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
129 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
130 return vlib_buffer_length_in_chain (vm, b);
133 /** \brief Copy buffer contents to memory
135 @param vm - (vlib_main_t *) vlib main data structure pointer
136 @param buffer_index - (u32) buffer index
137 @param contents - (u8 *) memory, <strong>must be large enough</strong>
138 @return - (uword) length of buffer chain
141 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
143 uword content_len = 0;
149 b = vlib_get_buffer (vm, buffer_index);
150 l = b->current_length;
151 clib_memcpy (contents + content_len, b->data + b->current_data, l);
153 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
155 buffer_index = b->next_buffer;
161 /* Return physical address of buffer->data start. */
163 vlib_get_buffer_data_physical_address (vlib_main_t * vm, u32 buffer_index)
165 vlib_buffer_main_t *bm = &buffer_main;
166 vlib_buffer_t *b = vlib_get_buffer (vm, buffer_index);
167 vlib_buffer_pool_t *pool = vec_elt_at_index (bm->buffer_pools,
168 b->buffer_pool_index);
170 return vlib_physmem_virtual_to_physical (vm, pool->physmem_region, b->data);
173 /** \brief Prefetch buffer metadata by buffer index
174 The first 64 bytes of buffer contains most header information
176 @param vm - (vlib_main_t *) vlib main data structure pointer
177 @param bi - (u32) buffer index
178 @param type - LOAD, STORE. In most cases, STORE is the right answer
180 /* Prefetch buffer header given index. */
181 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
183 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
184 vlib_prefetch_buffer_header (_b, type); \
188 /* Iterate over known allocated vlib bufs. You probably do not want
190 @param vm the vlib_main_t
191 @param bi found allocated buffer index
192 @param body operation to perform on buffer index
193 function executes body for each allocated buffer index
195 #define vlib_buffer_foreach_allocated(vm,bi,body) \
197 vlib_main_t * _vmain = (vm); \
198 vlib_buffer_main_t * _bmain = &_vmain->buffer_main; \
199 hash_pair_t * _vbpair; \
200 hash_foreach_pair(_vbpair, _bmain->buffer_known_hash, ({ \
201 if (VLIB_BUFFER_KNOWN_ALLOCATED == _vbpair->value[0]) { \
202 (bi) = _vbpair->key; \
211 /* Index is unknown. */
214 /* Index is known and free/allocated. */
215 VLIB_BUFFER_KNOWN_FREE,
216 VLIB_BUFFER_KNOWN_ALLOCATED,
217 } vlib_buffer_known_state_t;
219 void vlib_buffer_validate_alloc_free (vlib_main_t * vm, u32 * buffers,
221 vlib_buffer_known_state_t
224 always_inline vlib_buffer_known_state_t
225 vlib_buffer_is_known (u32 buffer_index)
227 vlib_buffer_main_t *bm = &buffer_main;
229 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
230 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
231 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
232 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
236 vlib_buffer_set_known_state (u32 buffer_index,
237 vlib_buffer_known_state_t state)
239 vlib_buffer_main_t *bm = &buffer_main;
241 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
242 hash_set (bm->buffer_known_hash, buffer_index, state);
243 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
246 /* Validates sanity of a single buffer.
247 Returns format'ed vector with error message if any. */
248 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
252 vlib_buffer_round_size (u32 size)
254 return round_pow2 (size, sizeof (vlib_buffer_t));
257 always_inline vlib_buffer_free_list_index_t
258 vlib_buffer_get_free_list_index (vlib_buffer_t * b)
260 if (PREDICT_FALSE (b->flags & VLIB_BUFFER_NON_DEFAULT_FREELIST))
261 return b->free_list_index;
267 vlib_buffer_set_free_list_index (vlib_buffer_t * b,
268 vlib_buffer_free_list_index_t index)
270 if (PREDICT_FALSE (index))
272 b->flags |= VLIB_BUFFER_NON_DEFAULT_FREELIST;
273 b->free_list_index = index;
276 b->flags &= ~VLIB_BUFFER_NON_DEFAULT_FREELIST;
279 /** \brief Allocate buffers from specific freelist into supplied array
281 @param vm - (vlib_main_t *) vlib main data structure pointer
282 @param buffers - (u32 * ) buffer index array
283 @param n_buffers - (u32) number of buffers requested
284 @return - (u32) number of buffers actually allocated, may be
285 less than the number requested or zero
288 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
291 vlib_buffer_free_list_index_t index)
293 vlib_buffer_main_t *bm = &buffer_main;
294 vlib_buffer_free_list_t *fl;
298 ASSERT (bm->cb.vlib_buffer_fill_free_list_cb);
300 fl = pool_elt_at_index (vm->buffer_free_list_pool, index);
302 len = vec_len (fl->buffers);
304 if (PREDICT_FALSE (len < n_buffers))
306 bm->cb.vlib_buffer_fill_free_list_cb (vm, fl, n_buffers);
307 if (PREDICT_FALSE ((len = vec_len (fl->buffers)) == 0))
310 /* even if fill free list didn't manage to refill free list
311 we should give what we have */
312 n_buffers = clib_min (len, n_buffers);
314 /* following code is intentionaly duplicated to allow compiler
315 to optimize fast path when n_buffers is constant value */
316 src = fl->buffers + len - n_buffers;
317 clib_memcpy (buffers, src, n_buffers * sizeof (u32));
318 _vec_len (fl->buffers) -= n_buffers;
320 /* Verify that buffers are known free. */
321 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
322 VLIB_BUFFER_KNOWN_FREE);
327 src = fl->buffers + len - n_buffers;
328 clib_memcpy (buffers, src, n_buffers * sizeof (u32));
329 _vec_len (fl->buffers) -= n_buffers;
331 /* Verify that buffers are known free. */
332 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
333 VLIB_BUFFER_KNOWN_FREE);
338 /** \brief Allocate buffers into supplied array
340 @param vm - (vlib_main_t *) vlib main data structure pointer
341 @param buffers - (u32 * ) buffer index array
342 @param n_buffers - (u32) number of buffers requested
343 @return - (u32) number of buffers actually allocated, may be
344 less than the number requested or zero
347 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
349 return vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
350 VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX);
353 /** \brief Allocate buffers into ring
355 @param vm - (vlib_main_t *) vlib main data structure pointer
356 @param buffers - (u32 * ) buffer index ring
357 @param start - (u32) first slot in the ring
358 @param ring_size - (u32) ring size
359 @param n_buffers - (u32) number of buffers requested
360 @return - (u32) number of buffers actually allocated, may be
361 less than the number requested or zero
364 vlib_buffer_alloc_to_ring (vlib_main_t * vm, u32 * ring, u32 start,
365 u32 ring_size, u32 n_buffers)
369 ASSERT (n_buffers <= ring_size);
371 if (PREDICT_TRUE (start + n_buffers <= ring_size))
372 return vlib_buffer_alloc (vm, ring + start, n_buffers);
374 n_alloc = vlib_buffer_alloc (vm, ring + start, ring_size - start);
376 if (PREDICT_TRUE (n_alloc == ring_size - start))
377 n_alloc += vlib_buffer_alloc (vm, ring, n_buffers - n_alloc);
382 /** \brief Free buffers
383 Frees the entire buffer chain for each buffer
385 @param vm - (vlib_main_t *) vlib main data structure pointer
386 @param buffers - (u32 * ) buffer index array
387 @param n_buffers - (u32) number of buffers to free
391 vlib_buffer_free (vlib_main_t * vm,
392 /* pointer to first buffer */
394 /* number of buffers to free */
397 vlib_buffer_main_t *bm = &buffer_main;
399 ASSERT (bm->cb.vlib_buffer_free_cb);
401 return bm->cb.vlib_buffer_free_cb (vm, buffers, n_buffers);
404 /** \brief Free buffers, does not free the buffer chain for each buffer
406 @param vm - (vlib_main_t *) vlib main data structure pointer
407 @param buffers - (u32 * ) buffer index array
408 @param n_buffers - (u32) number of buffers to free
412 vlib_buffer_free_no_next (vlib_main_t * vm,
413 /* pointer to first buffer */
415 /* number of buffers to free */
418 vlib_buffer_main_t *bm = &buffer_main;
420 ASSERT (bm->cb.vlib_buffer_free_no_next_cb);
422 return bm->cb.vlib_buffer_free_no_next_cb (vm, buffers, n_buffers);
425 /** \brief Free one buffer
426 Shorthand to free a single buffer chain.
428 @param vm - (vlib_main_t *) vlib main data structure pointer
429 @param buffer_index - (u32) buffer index to free
432 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
434 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
437 /** \brief Free buffers from ring
439 @param vm - (vlib_main_t *) vlib main data structure pointer
440 @param buffers - (u32 * ) buffer index ring
441 @param start - (u32) first slot in the ring
442 @param ring_size - (u32) ring size
443 @param n_buffers - (u32) number of buffers
446 vlib_buffer_free_from_ring (vlib_main_t * vm, u32 * ring, u32 start,
447 u32 ring_size, u32 n_buffers)
449 ASSERT (n_buffers <= ring_size);
451 if (PREDICT_TRUE (start + n_buffers <= ring_size))
453 vlib_buffer_free (vm, ring + start, n_buffers);
457 vlib_buffer_free (vm, ring + start, ring_size - start);
458 vlib_buffer_free (vm, ring, n_buffers - (ring_size - start));
462 /** \brief Free buffers from ring without freeing tail buffers
464 @param vm - (vlib_main_t *) vlib main data structure pointer
465 @param buffers - (u32 * ) buffer index ring
466 @param start - (u32) first slot in the ring
467 @param ring_size - (u32) ring size
468 @param n_buffers - (u32) number of buffers
471 vlib_buffer_free_from_ring_no_next (vlib_main_t * vm, u32 * ring, u32 start,
472 u32 ring_size, u32 n_buffers)
474 ASSERT (n_buffers <= ring_size);
476 if (PREDICT_TRUE (start + n_buffers <= ring_size))
478 vlib_buffer_free (vm, ring + start, n_buffers);
482 vlib_buffer_free_no_next (vm, ring + start, ring_size - start);
483 vlib_buffer_free_no_next (vm, ring, n_buffers - (ring_size - start));
487 /* Add/delete buffer free lists. */
488 vlib_buffer_free_list_index_t vlib_buffer_create_free_list (vlib_main_t * vm,
492 vlib_buffer_delete_free_list (vlib_main_t * vm,
493 vlib_buffer_free_list_index_t free_list_index)
495 vlib_buffer_main_t *bm = &buffer_main;
497 ASSERT (bm->cb.vlib_buffer_delete_free_list_cb);
499 bm->cb.vlib_buffer_delete_free_list_cb (vm, free_list_index);
502 /* Make sure we have at least given number of unaligned buffers. */
503 void vlib_buffer_free_list_fill_unaligned (vlib_main_t * vm,
504 vlib_buffer_free_list_t *
506 uword n_unaligned_buffers);
508 always_inline vlib_buffer_free_list_t *
509 vlib_buffer_get_buffer_free_list (vlib_main_t * vm, vlib_buffer_t * b,
510 vlib_buffer_free_list_index_t * index)
512 vlib_buffer_free_list_index_t i;
514 *index = i = vlib_buffer_get_free_list_index (b);
515 return pool_elt_at_index (vm->buffer_free_list_pool, i);
518 always_inline vlib_buffer_free_list_t *
519 vlib_buffer_get_free_list (vlib_main_t * vm,
520 vlib_buffer_free_list_index_t free_list_index)
522 vlib_buffer_free_list_t *f;
524 f = pool_elt_at_index (vm->buffer_free_list_pool, free_list_index);
526 /* Sanity: indices must match. */
527 ASSERT (f->index == free_list_index);
533 vlib_buffer_free_list_buffer_size (vlib_main_t * vm,
534 vlib_buffer_free_list_index_t index)
536 vlib_buffer_free_list_t *f = vlib_buffer_get_free_list (vm, index);
537 return f->n_data_bytes;
540 void vlib_aligned_memcpy (void *_dst, void *_src, int n_bytes);
542 /* Reasonably fast buffer copy routine. */
544 vlib_copy_buffers (u32 * dst, u32 * src, u32 n)
565 /* Append given data to end of buffer, possibly allocating new buffers. */
566 u32 vlib_buffer_add_data (vlib_main_t * vm,
567 vlib_buffer_free_list_index_t free_list_index,
568 u32 buffer_index, void *data, u32 n_data_bytes);
570 /* duplicate all buffers in chain */
571 always_inline vlib_buffer_t *
572 vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
574 vlib_buffer_t *s, *d, *fd;
575 uword n_alloc, n_buffers = 1;
576 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
580 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
583 s = vlib_get_buffer (vm, s->next_buffer);
585 u32 new_buffers[n_buffers];
587 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
589 /* No guarantee that we'll get all the buffers we asked for */
590 if (PREDICT_FALSE (n_alloc < n_buffers))
593 vlib_buffer_free (vm, new_buffers, n_alloc);
599 fd = d = vlib_get_buffer (vm, new_buffers[0]);
600 d->current_data = s->current_data;
601 d->current_length = s->current_length;
602 d->flags = s->flags & flag_mask;
603 d->total_length_not_including_first_buffer =
604 s->total_length_not_including_first_buffer;
605 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
606 clib_memcpy (vlib_buffer_get_current (d),
607 vlib_buffer_get_current (s), s->current_length);
610 for (i = 1; i < n_buffers; i++)
613 d->next_buffer = new_buffers[i];
615 s = vlib_get_buffer (vm, s->next_buffer);
616 d = vlib_get_buffer (vm, new_buffers[i]);
617 d->current_data = s->current_data;
618 d->current_length = s->current_length;
619 clib_memcpy (vlib_buffer_get_current (d),
620 vlib_buffer_get_current (s), s->current_length);
621 d->flags = s->flags & flag_mask;
627 /** \brief Create a maximum of 256 clones of buffer and store them
628 in the supplied array
630 @param vm - (vlib_main_t *) vlib main data structure pointer
631 @param src_buffer - (u32) source buffer index
632 @param buffers - (u32 * ) buffer index array
633 @param n_buffers - (u16) number of buffer clones requested (<=256)
634 @param head_end_offset - (u16) offset relative to current position
635 where packet head ends
636 @return - (u16) number of buffers actually cloned, may be
637 less than the number requested or zero
640 vlib_buffer_clone_256 (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
641 u16 n_buffers, u16 head_end_offset)
644 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
646 ASSERT (s->n_add_refs == 0);
648 ASSERT (n_buffers <= 256);
650 if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
652 buffers[0] = src_buffer;
653 for (i = 1; i < n_buffers; i++)
656 d = vlib_buffer_copy (vm, s);
659 buffers[i] = vlib_get_buffer_index (vm, d);
665 if (PREDICT_FALSE (n_buffers == 1))
667 buffers[0] = src_buffer;
671 n_buffers = vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
672 vlib_buffer_get_free_list_index
675 for (i = 0; i < n_buffers; i++)
677 vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
678 d->current_data = s->current_data;
679 d->current_length = head_end_offset;
680 vlib_buffer_set_free_list_index (d,
681 vlib_buffer_get_free_list_index (s));
682 d->total_length_not_including_first_buffer =
683 s->total_length_not_including_first_buffer + s->current_length -
685 d->flags = s->flags | VLIB_BUFFER_NEXT_PRESENT;
686 d->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
687 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
688 clib_memcpy (vlib_buffer_get_current (d), vlib_buffer_get_current (s),
690 d->next_buffer = src_buffer;
692 vlib_buffer_advance (s, head_end_offset);
693 s->n_add_refs = n_buffers - 1;
694 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
696 s = vlib_get_buffer (vm, s->next_buffer);
697 s->n_add_refs = n_buffers - 1;
703 /** \brief Create multiple clones of buffer and store them
704 in the supplied array
706 @param vm - (vlib_main_t *) vlib main data structure pointer
707 @param src_buffer - (u32) source buffer index
708 @param buffers - (u32 * ) buffer index array
709 @param n_buffers - (u16) number of buffer clones requested (<=256)
710 @param head_end_offset - (u16) offset relative to current position
711 where packet head ends
712 @return - (u16) number of buffers actually cloned, may be
713 less than the number requested or zero
716 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
717 u16 n_buffers, u16 head_end_offset)
719 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
722 while (n_buffers > 256)
725 copy = vlib_buffer_copy (vm, s);
726 n_cloned += vlib_buffer_clone_256 (vm,
727 vlib_get_buffer_index (vm, copy),
728 (buffers + n_cloned),
729 256, head_end_offset);
732 n_cloned += vlib_buffer_clone_256 (vm, src_buffer,
734 n_buffers, head_end_offset);
739 /** \brief Attach cloned tail to the buffer
741 @param vm - (vlib_main_t *) vlib main data structure pointer
742 @param head - (vlib_buffer_t *) head buffer
743 @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
747 vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
748 vlib_buffer_t * tail)
750 ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
751 ASSERT (vlib_buffer_get_free_list_index (head) ==
752 vlib_buffer_get_free_list_index (tail));
754 head->flags |= VLIB_BUFFER_NEXT_PRESENT;
755 head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
756 head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
757 head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
758 head->next_buffer = vlib_get_buffer_index (vm, tail);
759 head->total_length_not_including_first_buffer = tail->current_length +
760 tail->total_length_not_including_first_buffer;
763 __sync_add_and_fetch (&tail->n_add_refs, 1);
765 if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
767 tail = vlib_get_buffer (vm, tail->next_buffer);
772 /* Initializes the buffer as an empty packet with no chained buffers. */
774 vlib_buffer_chain_init (vlib_buffer_t * first)
776 first->total_length_not_including_first_buffer = 0;
777 first->current_length = 0;
778 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
779 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
782 /* The provided next_bi buffer index is appended to the end of the packet. */
783 always_inline vlib_buffer_t *
784 vlib_buffer_chain_buffer (vlib_main_t * vm,
785 vlib_buffer_t * first,
786 vlib_buffer_t * last, u32 next_bi)
788 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
789 last->next_buffer = next_bi;
790 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
791 next_buffer->current_length = 0;
792 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
796 /* Increases or decreases the packet length.
797 * It does not allocate or deallocate new buffers.
798 * Therefore, the added length must be compatible
799 * with the last buffer. */
801 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
802 vlib_buffer_t * last, i32 len)
804 last->current_length += len;
806 first->total_length_not_including_first_buffer += len;
809 /* Copy data to the end of the packet and increases its length.
810 * It does not allocate new buffers.
811 * Returns the number of copied bytes. */
813 vlib_buffer_chain_append_data (vlib_main_t * vm,
814 vlib_buffer_free_list_index_t free_list_index,
815 vlib_buffer_t * first,
816 vlib_buffer_t * last, void *data, u16 data_len)
819 vlib_buffer_free_list_buffer_size (vm, free_list_index);
820 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
821 u16 len = clib_min (data_len,
822 n_buffer_bytes - last->current_length -
824 clib_memcpy (vlib_buffer_get_current (last) + last->current_length, data,
826 vlib_buffer_chain_increase_length (first, last, len);
830 /* Copy data to the end of the packet and increases its length.
831 * Allocates additional buffers from the free list if necessary.
832 * Returns the number of copied bytes.
833 * 'last' value is modified whenever new buffers are allocated and
834 * chained and points to the last buffer in the chain. */
836 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
837 vlib_buffer_free_list_index_t
839 vlib_buffer_t * first,
840 vlib_buffer_t ** last, void *data,
842 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
844 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
845 format_vlib_buffer_contents;
849 /* Vector of packet data. */
852 /* Number of buffers to allocate in each call to allocator. */
853 u32 min_n_buffers_each_alloc;
855 /* Buffer free list for this template. */
856 vlib_buffer_free_list_index_t free_list_index;
859 } vlib_packet_template_t;
861 void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
862 vlib_packet_template_t * t);
864 void vlib_packet_template_init (vlib_main_t * vm,
865 vlib_packet_template_t * t,
867 uword n_packet_data_bytes,
868 uword min_n_buffers_each_alloc,
871 void *vlib_packet_template_get_packet (vlib_main_t * vm,
872 vlib_packet_template_t * t,
876 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
878 vec_free (t->packet_data);
882 unserialize_vlib_buffer_n_bytes (serialize_main_t * m)
884 serialize_stream_t *s = &m->stream;
885 vlib_serialize_buffer_main_t *sm
886 = uword_to_pointer (m->stream.data_function_opaque,
887 vlib_serialize_buffer_main_t *);
888 vlib_main_t *vm = sm->vlib_main;
891 n = s->n_buffer_bytes - s->current_buffer_index;
892 if (sm->last_buffer != ~0)
894 vlib_buffer_t *b = vlib_get_buffer (vm, sm->last_buffer);
895 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
897 b = vlib_get_buffer (vm, b->next_buffer);
898 n += b->current_length;
903 clib_fifo_foreach (f, sm->rx.buffer_fifo, ({
904 n += vlib_buffer_index_length_in_chain (vm, f[0]);
911 /* Set a buffer quickly into "uninitialized" state. We want this to
912 be extremely cheap and arrange for all fields that need to be
913 initialized to be in the first 128 bits of the buffer. */
915 vlib_buffer_init_for_free_list (vlib_buffer_t * dst,
916 vlib_buffer_free_list_t * fl)
918 vlib_buffer_t *src = &fl->buffer_init_template;
920 /* Make sure vlib_buffer_t is cacheline aligned and sized */
921 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
922 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
923 CLIB_CACHE_LINE_BYTES);
924 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
925 CLIB_CACHE_LINE_BYTES * 2);
927 /* Make sure buffer template is sane. */
928 ASSERT (fl->index == vlib_buffer_get_free_list_index (src));
930 clib_memcpy (STRUCT_MARK_PTR (dst, template_start),
931 STRUCT_MARK_PTR (src, template_start),
932 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
933 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
935 /* Not in the first 16 octets. */
936 dst->n_add_refs = src->n_add_refs;
937 vlib_buffer_set_free_list_index (dst, fl->index);
939 /* Make sure it really worked. */
940 #define _(f) ASSERT (dst->f == src->f);
945 /* ASSERT (dst->total_length_not_including_first_buffer == 0); */
946 /* total_length_not_including_first_buffer is not in the template anymore
947 * so it may actually not zeroed for some buffers. One option is to
948 * uncomment the line lower (comes at a cost), the other, is to just not
950 /* dst->total_length_not_including_first_buffer = 0; */
951 ASSERT (dst->n_add_refs == 0);
955 vlib_buffer_add_to_free_list (vlib_main_t * vm,
956 vlib_buffer_free_list_t * f,
957 u32 buffer_index, u8 do_init)
959 vlib_buffer_pool_t *bp = vlib_buffer_pool_get (f->buffer_pool_index);
961 b = vlib_get_buffer (vm, buffer_index);
962 if (PREDICT_TRUE (do_init))
963 vlib_buffer_init_for_free_list (b, f);
964 vec_add1_aligned (f->buffers, buffer_index, CLIB_CACHE_LINE_BYTES);
966 if (vec_len (f->buffers) > 4 * VLIB_FRAME_SIZE)
968 clib_spinlock_lock (&bp->lock);
969 /* keep last stored buffers, as they are more likely hot in the cache */
970 vec_add_aligned (bp->buffers, f->buffers, VLIB_FRAME_SIZE,
971 CLIB_CACHE_LINE_BYTES);
972 vec_delete (f->buffers, VLIB_FRAME_SIZE, 0);
973 f->n_alloc -= VLIB_FRAME_SIZE;
974 clib_spinlock_unlock (&bp->lock);
979 extern u32 *vlib_buffer_state_validation_lock;
980 extern uword *vlib_buffer_state_validation_hash;
981 extern void *vlib_buffer_state_heap;
985 vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
991 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
993 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
996 p = hash_get (vlib_buffer_state_validation_hash, b);
998 /* If we don't know about b, declare it to be in the expected state */
1001 hash_set (vlib_buffer_state_validation_hash, b, expected);
1005 if (p[0] != expected)
1007 void cj_stop (void);
1009 vlib_main_t *vm = &vlib_global_main;
1013 bi = vlib_get_buffer_index (vm, b);
1015 clib_mem_set_heap (oldheap);
1016 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
1017 vlib_time_now (vm), bi,
1018 p[0] ? "busy" : "free", expected ? "busy" : "free");
1022 CLIB_MEMORY_BARRIER ();
1023 *vlib_buffer_state_validation_lock = 0;
1024 clib_mem_set_heap (oldheap);
1029 vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
1034 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1036 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
1039 hash_set (vlib_buffer_state_validation_hash, b, expected);
1041 CLIB_MEMORY_BARRIER ();
1042 *vlib_buffer_state_validation_lock = 0;
1043 clib_mem_set_heap (oldheap);
1047 /** minimum data size of first buffer in a buffer chain */
1048 #define VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE (256)
1051 * @brief compress buffer chain in a way where the first buffer is at least
1052 * VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE long
1054 * @param[in] vm - vlib_main
1055 * @param[in,out] first - first buffer in chain
1056 * @param[in,out] discard_vector - vector of buffer indexes which were removed
1060 vlib_buffer_chain_compress (vlib_main_t * vm,
1061 vlib_buffer_t * first, u32 ** discard_vector)
1063 if (first->current_length >= VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE ||
1064 !(first->flags & VLIB_BUFFER_NEXT_PRESENT))
1066 /* this is already big enough or not a chain */
1069 /* probe free list to find allocated buffer size to avoid overfill */
1070 vlib_buffer_free_list_index_t index;
1071 vlib_buffer_free_list_t *free_list =
1072 vlib_buffer_get_buffer_free_list (vm, first, &index);
1074 u32 want_first_size = clib_min (VLIB_BUFFER_CHAIN_MIN_FIRST_DATA_SIZE,
1075 free_list->n_data_bytes -
1076 first->current_data);
1079 vlib_buffer_t *second = vlib_get_buffer (vm, first->next_buffer);
1080 u32 need = want_first_size - first->current_length;
1081 u32 amount_to_copy = clib_min (need, second->current_length);
1082 clib_memcpy (((u8 *) vlib_buffer_get_current (first)) +
1083 first->current_length,
1084 vlib_buffer_get_current (second), amount_to_copy);
1085 first->current_length += amount_to_copy;
1086 vlib_buffer_advance (second, amount_to_copy);
1087 if (first->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID)
1089 first->total_length_not_including_first_buffer -= amount_to_copy;
1091 if (!second->current_length)
1093 vec_add1 (*discard_vector, first->next_buffer);
1094 if (second->flags & VLIB_BUFFER_NEXT_PRESENT)
1096 first->next_buffer = second->next_buffer;
1100 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1102 second->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1105 while ((first->current_length < want_first_size) &&
1106 (first->flags & VLIB_BUFFER_NEXT_PRESENT));
1109 #endif /* included_vlib_buffer_funcs_h */
1112 * fd.io coding-style-patch-verification: ON
1115 * eval: (c-set-style "gnu")