2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer.c: allocate/free network buffers.
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
44 * Allocate/free network buffers.
48 #include <rte_config.h>
50 #include <rte_common.h>
52 #include <rte_memory.h>
53 #include <rte_memzone.h>
54 #include <rte_tailq.h>
56 #include <rte_per_lcore.h>
57 #include <rte_launch.h>
58 #include <rte_atomic.h>
59 #include <rte_cycles.h>
60 #include <rte_prefetch.h>
61 #include <rte_lcore.h>
62 #include <rte_per_lcore.h>
63 #include <rte_branch_prediction.h>
64 #include <rte_interrupts.h>
66 #include <rte_random.h>
67 #include <rte_debug.h>
68 #include <rte_ether.h>
69 #include <rte_ethdev.h>
71 #include <rte_mempool.h>
73 #include <rte_version.h>
76 #include <vlib/vlib.h>
79 #pragma weak rte_mem_virt2phy
80 #pragma weak rte_eal_has_hugepages
81 #pragma weak rte_socket_id
82 #pragma weak rte_pktmbuf_pool_create
86 vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
87 vlib_buffer_t * b_first)
89 vlib_buffer_t *b = b_first;
90 uword l_first = b_first->current_length;
92 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
94 b = vlib_get_buffer (vm, b->next_buffer);
95 l += b->current_length;
97 b_first->total_length_not_including_first_buffer = l;
98 b_first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
103 format_vlib_buffer (u8 * s, va_list * args)
105 vlib_buffer_t *b = va_arg (*args, vlib_buffer_t *);
107 uword indent = format_get_indent (s);
109 s = format (s, "current data %d, length %d, free-list %d",
110 b->current_data, b->current_length, b->free_list_index);
112 if (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID)
113 s = format (s, ", totlen-nifb %d",
114 b->total_length_not_including_first_buffer);
116 if (b->flags & VLIB_BUFFER_IS_TRACED)
117 s = format (s, ", trace 0x%x", b->trace_index);
119 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
121 vlib_main_t *vm = vlib_get_main ();
122 u32 next_buffer = b->next_buffer;
123 b = vlib_get_buffer (vm, next_buffer);
125 s = format (s, "\n%Unext-buffer 0x%x, segment length %d",
126 format_white_space, indent, next_buffer, b->current_length);
131 s = format (s, "current data %d, length %d, free-list %d",
132 b->current_data, b->current_length, b->free_list_index);
134 if (b->flags & VLIB_BUFFER_IS_TRACED)
135 s = format (s, ", trace 0x%x", b->trace_index);
137 if (b->flags & VLIB_BUFFER_NEXT_PRESENT)
138 s = format (s, ", next-buffer 0x%x", b->next_buffer);
145 format_vlib_buffer_and_data (u8 * s, va_list * args)
147 vlib_buffer_t *b = va_arg (*args, vlib_buffer_t *);
149 s = format (s, "%U, %U",
150 format_vlib_buffer, b,
151 format_hex_bytes, vlib_buffer_get_current (b), 64);
158 format_vlib_buffer_known_state (u8 * s, va_list * args)
160 vlib_buffer_known_state_t state = va_arg (*args, vlib_buffer_known_state_t);
165 case VLIB_BUFFER_UNKNOWN:
169 case VLIB_BUFFER_KNOWN_ALLOCATED:
170 t = "known-allocated";
173 case VLIB_BUFFER_KNOWN_FREE:
182 return format (s, "%s", t);
187 format_vlib_buffer_contents (u8 * s, va_list * va)
189 vlib_main_t *vm = va_arg (*va, vlib_main_t *);
190 vlib_buffer_t *b = va_arg (*va, vlib_buffer_t *);
194 vec_add (s, vlib_buffer_get_current (b), b->current_length);
195 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
197 b = vlib_get_buffer (vm, b->next_buffer);
205 vlib_validate_buffer_helper (vlib_main_t * vm,
207 uword follow_buffer_next, uword ** unique_hash)
209 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
210 vlib_buffer_main_t *bm = vm->buffer_main;
211 vlib_buffer_free_list_t *fl;
213 if (pool_is_free_index (bm->buffer_free_list_pool, b->free_list_index))
214 return format (0, "unknown free list 0x%x", b->free_list_index);
216 fl = pool_elt_at_index (bm->buffer_free_list_pool, b->free_list_index);
218 if ((signed) b->current_data < (signed) -VLIB_BUFFER_PRE_DATA_SIZE)
219 return format (0, "current data %d before pre-data", b->current_data);
221 if (b->current_data + b->current_length > fl->n_data_bytes)
222 return format (0, "%d-%d beyond end of buffer %d",
223 b->current_data, b->current_length, fl->n_data_bytes);
226 if (follow_buffer_next && (b->flags & VLIB_BUFFER_NEXT_PRESENT))
228 vlib_buffer_known_state_t k;
231 k = vlib_buffer_is_known (vm, b->next_buffer);
232 if (k != VLIB_BUFFER_KNOWN_ALLOCATED)
233 return format (0, "next 0x%x: %U",
234 b->next_buffer, format_vlib_buffer_known_state, k);
238 if (hash_get (*unique_hash, b->next_buffer))
239 return format (0, "duplicate buffer 0x%x", b->next_buffer);
241 hash_set1 (*unique_hash, b->next_buffer);
244 msg = vlib_validate_buffer (vm, b->next_buffer, follow_buffer_next);
247 result = format (0, "next 0x%x: %v", b->next_buffer, msg);
257 vlib_validate_buffer (vlib_main_t * vm, u32 bi, uword follow_buffer_next)
259 return vlib_validate_buffer_helper (vm, bi, follow_buffer_next,
260 /* unique_hash */ 0);
264 vlib_validate_buffers (vlib_main_t * vm,
266 uword next_buffer_stride,
268 vlib_buffer_known_state_t known_state,
269 uword follow_buffer_next)
272 u32 bi, *b = buffers;
273 vlib_buffer_known_state_t k;
274 u8 *msg = 0, *result = 0;
276 hash = hash_create (0, 0);
277 for (i = 0; i < n_buffers; i++)
280 b += next_buffer_stride;
282 /* Buffer is not unique. */
283 if (hash_get (hash, bi))
285 msg = format (0, "not unique");
289 k = vlib_buffer_is_known (vm, bi);
290 if (k != known_state)
292 msg = format (0, "is %U; expected %U",
293 format_vlib_buffer_known_state, k,
294 format_vlib_buffer_known_state, known_state);
298 msg = vlib_validate_buffer_helper (vm, bi, follow_buffer_next, &hash);
302 hash_set1 (hash, bi);
308 result = format (0, "0x%x: %v", bi, msg);
316 vlib_main_t **vlib_mains;
319 /* When dubugging validate that given buffers are either known allocated
322 vlib_buffer_validate_alloc_free (vlib_main_t * vm,
325 vlib_buffer_known_state_t expected_state)
328 uword i, bi, is_free;
333 ASSERT (os_get_cpu_number () == 0);
335 /* smp disaster check */
337 ASSERT (vm == vlib_mains[0]);
339 is_free = expected_state == VLIB_BUFFER_KNOWN_ALLOCATED;
341 for (i = 0; i < n_buffers; i++)
343 vlib_buffer_known_state_t known;
347 known = vlib_buffer_is_known (vm, bi);
348 if (known != expected_state)
352 (vm, "%s %U buffer 0x%x",
353 is_free ? "freeing" : "allocating",
354 format_vlib_buffer_known_state, known, bi);
357 vlib_buffer_set_known_state
359 is_free ? VLIB_BUFFER_KNOWN_FREE : VLIB_BUFFER_KNOWN_ALLOCATED);
364 #define BUFFERS_PER_COPY (sizeof (vlib_copy_unit_t) / sizeof (u32))
366 /* Make sure we have at least given number of unaligned buffers. */
368 fill_unaligned (vlib_main_t * vm,
369 vlib_buffer_free_list_t * free_list,
370 uword n_unaligned_buffers)
372 word la = vec_len (free_list->aligned_buffers);
373 word lu = vec_len (free_list->unaligned_buffers);
375 /* Aligned come in aligned copy-sized chunks. */
376 ASSERT (la % BUFFERS_PER_COPY == 0);
378 ASSERT (la >= n_unaligned_buffers);
380 while (lu < n_unaligned_buffers)
382 /* Copy 4 buffers from end of aligned vector to unaligned vector. */
383 vec_add (free_list->unaligned_buffers,
384 free_list->aligned_buffers + la - BUFFERS_PER_COPY,
386 la -= BUFFERS_PER_COPY;
387 lu += BUFFERS_PER_COPY;
389 _vec_len (free_list->aligned_buffers) = la;
392 /* After free aligned buffers may not contain even sized chunks. */
394 trim_aligned (vlib_buffer_free_list_t * f)
398 /* Add unaligned to aligned before trim. */
399 l = vec_len (f->unaligned_buffers);
402 vec_add_aligned (f->aligned_buffers, f->unaligned_buffers, l,
403 /* align */ sizeof (vlib_copy_unit_t));
405 _vec_len (f->unaligned_buffers) = 0;
408 /* Remove unaligned buffers from end of aligned vector and save for next trim. */
409 l = vec_len (f->aligned_buffers);
410 n_trim = l % BUFFERS_PER_COPY;
413 /* Trim aligned -> unaligned. */
414 vec_add (f->unaligned_buffers, f->aligned_buffers + l - n_trim, n_trim);
416 /* Remove from aligned. */
417 _vec_len (f->aligned_buffers) = l - n_trim;
422 merge_free_lists (vlib_buffer_free_list_t * dst,
423 vlib_buffer_free_list_t * src)
431 l = vec_len (src->aligned_buffers);
434 vec_add2_aligned (dst->aligned_buffers, d, l,
435 /* align */ sizeof (vlib_copy_unit_t));
436 clib_memcpy (d, src->aligned_buffers, l * sizeof (d[0]));
437 vec_free (src->aligned_buffers);
440 l = vec_len (src->unaligned_buffers);
443 vec_add (dst->unaligned_buffers, src->unaligned_buffers, l);
444 vec_free (src->unaligned_buffers);
449 vlib_buffer_get_free_list_with_size (vlib_main_t * vm, u32 size)
451 vlib_buffer_main_t *bm = vm->buffer_main;
453 size = vlib_buffer_round_size (size);
454 uword *p = hash_get (bm->free_list_by_size, size);
455 return p ? p[0] : ~0;
458 /* Add buffer free list. */
460 vlib_buffer_create_free_list_helper (vlib_main_t * vm,
462 u32 is_public, u32 is_default, u8 * name)
464 vlib_buffer_main_t *bm = vm->buffer_main;
465 vlib_buffer_free_list_t *f;
469 ASSERT (os_get_cpu_number () == 0);
471 if (!is_default && pool_elts (bm->buffer_free_list_pool) == 0)
473 u32 default_free_free_list_index;
476 default_free_free_list_index =
477 vlib_buffer_create_free_list_helper
479 /* default buffer size */ VLIB_BUFFER_DEFAULT_FREE_LIST_BYTES,
484 ASSERT (default_free_free_list_index ==
485 VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX);
487 if (n_data_bytes == VLIB_BUFFER_DEFAULT_FREE_LIST_BYTES && is_public)
488 return default_free_free_list_index;
491 pool_get_aligned (bm->buffer_free_list_pool, f, CLIB_CACHE_LINE_BYTES);
493 memset (f, 0, sizeof (f[0]));
494 f->index = f - bm->buffer_free_list_pool;
495 f->n_data_bytes = vlib_buffer_round_size (n_data_bytes);
496 f->min_n_buffers_each_physmem_alloc = 16;
497 f->name = clib_mem_is_heap_object (name) ? name : format (0, "%s", name);
499 /* Setup free buffer template. */
500 f->buffer_init_template.free_list_index = f->index;
504 uword *p = hash_get (bm->free_list_by_size, f->n_data_bytes);
506 hash_set (bm->free_list_by_size, f->n_data_bytes, f->index);
509 for (i = 1; i < vec_len (vlib_mains); i++)
511 vlib_buffer_main_t *wbm = vlib_mains[i]->buffer_main;
512 vlib_buffer_free_list_t *wf;
513 pool_get_aligned (wbm->buffer_free_list_pool,
514 wf, CLIB_CACHE_LINE_BYTES);
515 ASSERT (f - bm->buffer_free_list_pool ==
516 wf - wbm->buffer_free_list_pool);
518 wf->aligned_buffers = 0;
519 wf->unaligned_buffers = 0;
524 if (!is_default && pool_elts (bm->buffer_free_list_pool) == 0)
526 u32 default_free_free_list_index;
528 default_free_free_list_index = vlib_buffer_create_free_list_helper (vm,
529 /* default buffer size */
530 VLIB_BUFFER_DEFAULT_FREE_LIST_BYTES,
538 ASSERT (default_free_free_list_index ==
539 VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX);
541 if (n_data_bytes == VLIB_BUFFER_DEFAULT_FREE_LIST_BYTES && is_public)
542 return default_free_free_list_index;
545 pool_get_aligned (bm->buffer_free_list_pool, f, CLIB_CACHE_LINE_BYTES);
547 memset (f, 0, sizeof (f[0]));
548 f->index = f - bm->buffer_free_list_pool;
549 f->n_data_bytes = vlib_buffer_round_size (n_data_bytes);
550 f->min_n_buffers_each_physmem_alloc = 256;
551 f->name = clib_mem_is_heap_object (name) ? name : format (0, "%s", name);
553 /* Setup free buffer template. */
554 f->buffer_init_template.free_list_index = f->index;
558 uword *p = hash_get (bm->free_list_by_size, f->n_data_bytes);
560 hash_set (bm->free_list_by_size, f->n_data_bytes, f->index);
568 vlib_buffer_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
575 name = va_format (0, fmt, &va);
578 return vlib_buffer_create_free_list_helper (vm, n_data_bytes,
585 vlib_buffer_get_or_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
588 u32 i = vlib_buffer_get_free_list_with_size (vm, n_data_bytes);
596 name = va_format (0, fmt, &va);
599 i = vlib_buffer_create_free_list_helper (vm, n_data_bytes,
609 del_free_list (vlib_main_t * vm, vlib_buffer_free_list_t * f)
616 for (i = 0; i < vec_len (f->unaligned_buffers); i++)
618 b = vlib_get_buffer (vm, f->unaligned_buffers[i]);
619 mb = rte_mbuf_from_vlib_buffer (b);
620 ASSERT (rte_mbuf_refcnt_read (mb) == 1);
621 rte_pktmbuf_free (mb);
623 for (i = 0; i < vec_len (f->aligned_buffers); i++)
625 b = vlib_get_buffer (vm, f->aligned_buffers[i]);
626 mb = rte_mbuf_from_vlib_buffer (b);
627 ASSERT (rte_mbuf_refcnt_read (mb) == 1);
628 rte_pktmbuf_free (mb);
633 for (i = 0; i < vec_len (f->buffer_memory_allocated); i++)
634 vm->os_physmem_free (f->buffer_memory_allocated[i]);
636 vec_free (f->buffer_memory_allocated);
638 vec_free (f->unaligned_buffers);
639 vec_free (f->aligned_buffers);
642 /* Add buffer free list. */
644 vlib_buffer_delete_free_list (vlib_main_t * vm, u32 free_list_index)
646 vlib_buffer_main_t *bm = vm->buffer_main;
647 vlib_buffer_free_list_t *f;
652 ASSERT (os_get_cpu_number () == 0);
654 f = vlib_buffer_get_free_list (vm, free_list_index);
656 merge_index = vlib_buffer_get_free_list_with_size (vm, f->n_data_bytes);
657 if (merge_index != ~0 && merge_index != free_list_index)
659 merge_free_lists (pool_elt_at_index (bm->buffer_free_list_pool,
663 del_free_list (vm, f);
666 memset (f, 0xab, sizeof (f[0]));
668 pool_put (bm->buffer_free_list_pool, f);
670 for (i = 1; i < vec_len (vlib_mains); i++)
672 bm = vlib_mains[i]->buffer_main;
673 f = vlib_buffer_get_free_list (vlib_mains[i], free_list_index);;
674 memset (f, 0xab, sizeof (f[0]));
675 pool_put (bm->buffer_free_list_pool, f);
679 f = vlib_buffer_get_free_list (vm, free_list_index);
681 ASSERT (vec_len (f->unaligned_buffers) + vec_len (f->aligned_buffers) ==
683 merge_index = vlib_buffer_get_free_list_with_size (vm, f->n_data_bytes);
684 if (merge_index != ~0 && merge_index != free_list_index)
686 merge_free_lists (pool_elt_at_index (bm->buffer_free_list_pool,
690 del_free_list (vm, f);
693 memset (f, 0xab, sizeof (f[0]));
695 pool_put (bm->buffer_free_list_pool, f);
699 /* Make sure free list has at least given number of free buffers. */
701 fill_free_list (vlib_main_t * vm,
702 vlib_buffer_free_list_t * fl, uword min_free_buffers)
708 u32 n_remaining = 0, n_alloc = 0;
709 unsigned socket_id = rte_socket_id ? rte_socket_id () : 0;
710 struct rte_mempool *rmp = vm->buffer_main->pktmbuf_pools[socket_id];
714 if (PREDICT_FALSE (rmp == 0))
719 /* Already have enough free buffers on free list? */
720 n = min_free_buffers - vec_len (fl->aligned_buffers);
722 return min_free_buffers;
724 /* Always allocate round number of buffers. */
725 n = round_pow2 (n, BUFFERS_PER_COPY);
727 /* Always allocate new buffers in reasonably large sized chunks. */
728 n = clib_max (n, fl->min_n_buffers_each_physmem_alloc);
730 vec_validate (vm->mbuf_alloc_list, n - 1);
732 if (rte_mempool_get_bulk (rmp, vm->mbuf_alloc_list, n) < 0)
735 _vec_len (vm->mbuf_alloc_list) = n;
737 for (i = 0; i < n; i++)
739 mb = vm->mbuf_alloc_list[i];
741 ASSERT (rte_mbuf_refcnt_read (mb) == 0);
742 rte_mbuf_refcnt_set (mb, 1);
744 b = vlib_buffer_from_rte_mbuf (mb);
745 bi = vlib_get_buffer_index (vm, b);
747 vec_add1_aligned (fl->aligned_buffers, bi, sizeof (vlib_copy_unit_t));
751 vlib_buffer_init_for_free_list (b, fl);
753 if (fl->buffer_init_function)
754 fl->buffer_init_function (vm, fl, &bi, 1);
761 vlib_buffer_t *buffers, *b;
764 u32 n_remaining, n_alloc, n_this_chunk;
768 /* Already have enough free buffers on free list? */
769 n = min_free_buffers - vec_len (fl->aligned_buffers);
771 return min_free_buffers;
773 /* Always allocate round number of buffers. */
774 n = round_pow2 (n, BUFFERS_PER_COPY);
776 /* Always allocate new buffers in reasonably large sized chunks. */
777 n = clib_max (n, fl->min_n_buffers_each_physmem_alloc);
781 while (n_remaining > 0)
783 n_this_chunk = clib_min (n_remaining, 16);
785 n_bytes = n_this_chunk * (sizeof (b[0]) + fl->n_data_bytes);
787 /* drb: removed power-of-2 ASSERT */
788 buffers = vm->os_physmem_alloc_aligned (&vm->physmem_main,
790 sizeof (vlib_buffer_t));
794 /* Record chunk as being allocated so we can free it later. */
795 vec_add1 (fl->buffer_memory_allocated, buffers);
797 fl->n_alloc += n_this_chunk;
798 n_alloc += n_this_chunk;
799 n_remaining -= n_this_chunk;
802 vec_add2_aligned (fl->aligned_buffers, bi, n_this_chunk,
803 sizeof (vlib_copy_unit_t));
804 for (i = 0; i < n_this_chunk; i++)
806 bi[i] = vlib_get_buffer_index (vm, b);
809 vlib_buffer_set_known_state (vm, bi[i], VLIB_BUFFER_KNOWN_FREE);
810 b = vlib_buffer_next_contiguous (b, fl->n_data_bytes);
813 memset (buffers, 0, n_bytes);
815 /* Initialize all new buffers. */
817 for (i = 0; i < n_this_chunk; i++)
819 vlib_buffer_init_for_free_list (b, fl);
820 b = vlib_buffer_next_contiguous (b, fl->n_data_bytes);
823 if (fl->buffer_init_function)
824 fl->buffer_init_function (vm, fl, bi, n_this_chunk);
831 copy_alignment (u32 * x)
833 return (pointer_to_uword (x) / sizeof (x[0])) % BUFFERS_PER_COPY;
837 alloc_from_free_list (vlib_main_t * vm,
838 vlib_buffer_free_list_t * free_list,
839 u32 * alloc_buffers, u32 n_alloc_buffers)
843 uword n_unaligned_start, n_unaligned_end, n_filled;
846 ASSERT (os_get_cpu_number () == 0);
849 n_left = n_alloc_buffers;
851 n_unaligned_start = ((BUFFERS_PER_COPY - copy_alignment (dst))
852 & (BUFFERS_PER_COPY - 1));
854 n_filled = fill_free_list (vm, free_list, n_alloc_buffers);
858 n_left = n_filled < n_left ? n_filled : n_left;
859 n_alloc_buffers = n_left;
861 if (n_unaligned_start >= n_left)
863 n_unaligned_start = n_left;
867 n_unaligned_end = copy_alignment (dst + n_alloc_buffers);
869 fill_unaligned (vm, free_list, n_unaligned_start + n_unaligned_end);
871 u_len = vec_len (free_list->unaligned_buffers);
872 u_src = free_list->unaligned_buffers + u_len - 1;
874 if (n_unaligned_start)
876 uword n_copy = n_unaligned_start;
888 /* Now dst should be aligned. */
890 ASSERT (pointer_to_uword (dst) % sizeof (vlib_copy_unit_t) == 0);
895 vlib_copy_unit_t *d, *s;
898 if (vec_len (free_list->aligned_buffers) <
899 ((n_left / BUFFERS_PER_COPY) * BUFFERS_PER_COPY))
902 n_copy = n_left / BUFFERS_PER_COPY;
903 n_left = n_left % BUFFERS_PER_COPY;
905 /* Remove buffers from aligned free list. */
906 _vec_len (free_list->aligned_buffers) -= n_copy * BUFFERS_PER_COPY;
908 s = (vlib_copy_unit_t *) vec_end (free_list->aligned_buffers);
909 d = (vlib_copy_unit_t *) dst;
911 /* Fast path loop. */
934 /* Unaligned copy. */
935 ASSERT (n_unaligned_end == n_left);
943 if (!free_list->unaligned_buffers)
946 _vec_len (free_list->unaligned_buffers) = u_len;
949 /* Verify that buffers are known free. */
950 vlib_buffer_validate_alloc_free (vm, alloc_buffers,
951 n_alloc_buffers, VLIB_BUFFER_KNOWN_FREE);
954 return n_alloc_buffers;
957 /* Allocate a given number of buffers into given array.
958 Returns number actually allocated which will be either zero or
961 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
963 vlib_buffer_main_t *bm = vm->buffer_main;
965 ASSERT (os_get_cpu_number () == 0);
968 return alloc_from_free_list
970 pool_elt_at_index (bm->buffer_free_list_pool,
971 VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX),
976 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
978 u32 n_buffers, u32 free_list_index)
980 vlib_buffer_main_t *bm = vm->buffer_main;
981 vlib_buffer_free_list_t *f;
982 f = pool_elt_at_index (bm->buffer_free_list_pool, free_list_index);
983 return alloc_from_free_list (vm, f, buffers, n_buffers);
987 add_buffer_to_free_list (vlib_main_t * vm,
988 vlib_buffer_free_list_t * f,
989 u32 buffer_index, u8 do_init)
992 b = vlib_get_buffer (vm, buffer_index);
993 if (PREDICT_TRUE (do_init))
994 vlib_buffer_init_for_free_list (b, f);
995 vec_add1_aligned (f->aligned_buffers, buffer_index,
996 sizeof (vlib_copy_unit_t));
999 always_inline vlib_buffer_free_list_t *
1000 buffer_get_free_list (vlib_main_t * vm, vlib_buffer_t * b, u32 * index)
1002 vlib_buffer_main_t *bm = vm->buffer_main;
1005 *index = i = b->free_list_index;
1006 return pool_elt_at_index (bm->buffer_free_list_pool, i);
1010 vlib_set_buffer_free_callback (vlib_main_t * vm, void *fp)
1012 vlib_buffer_main_t *bm = vm->buffer_main;
1013 void *rv = bm->buffer_free_callback;
1015 bm->buffer_free_callback = fp;
1020 void vnet_buffer_free_dpdk_mb (vlib_buffer_t * b) __attribute__ ((weak));
1022 vnet_buffer_free_dpdk_mb (vlib_buffer_t * b)
1027 static_always_inline void
1028 vlib_buffer_free_inline (vlib_main_t * vm,
1029 u32 * buffers, u32 n_buffers, u32 follow_buffer_next)
1032 vlib_buffer_main_t *bm = vm->buffer_main;
1033 vlib_buffer_free_list_t *fl;
1036 u32 (*cb) (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
1037 u32 follow_buffer_next);
1039 cb = bm->buffer_free_callback;
1041 if (PREDICT_FALSE (cb != 0))
1042 n_buffers = (*cb) (vm, buffers, n_buffers, follow_buffer_next);
1047 for (i = 0; i < n_buffers; i++)
1050 struct rte_mbuf *mb;
1052 b = vlib_get_buffer (vm, buffers[i]);
1054 fl = buffer_get_free_list (vm, b, &fi);
1056 /* The only current use of this callback: multicast recycle */
1057 if (PREDICT_FALSE (fl->buffers_added_to_freelist_function != 0))
1061 add_buffer_to_free_list
1062 (vm, fl, buffers[i], (b->flags & VLIB_BUFFER_RECYCLE) == 0);
1064 for (j = 0; j < vec_len (bm->announce_list); j++)
1066 if (fl == bm->announce_list[j])
1067 goto already_announced;
1069 vec_add1 (bm->announce_list, fl);
1075 if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_RECYCLE) == 0))
1077 mb = rte_mbuf_from_vlib_buffer (b);
1078 ASSERT (rte_mbuf_refcnt_read (mb) == 1);
1079 rte_pktmbuf_free (mb);
1083 if (vec_len (bm->announce_list))
1085 vlib_buffer_free_list_t *fl;
1086 for (i = 0; i < vec_len (bm->announce_list); i++)
1088 fl = bm->announce_list[i];
1089 fl->buffers_added_to_freelist_function (vm, fl);
1091 _vec_len (bm->announce_list) = 0;
1094 vlib_buffer_main_t *bm = vm->buffer_main;
1095 vlib_buffer_free_list_t *fl;
1096 static u32 *next_to_free[2]; /* smp bad */
1097 u32 i_next_to_free, *b, *n, *f, fi;
1100 static vlib_buffer_free_list_t **announce_list;
1101 vlib_buffer_free_list_t *fl0 = 0, *fl1 = 0;
1102 u32 bi0 = (u32) ~ 0, bi1 = (u32) ~ 0, fi0, fi1 = (u32) ~ 0;
1103 u8 free0, free1 = 0, free_next0, free_next1;
1104 u32 (*cb) (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
1105 u32 follow_buffer_next);
1107 ASSERT (os_get_cpu_number () == 0);
1109 cb = bm->buffer_free_callback;
1111 if (PREDICT_FALSE (cb != 0))
1112 n_buffers = (*cb) (vm, buffers, n_buffers, follow_buffer_next);
1117 /* Use first buffer to get default free list. */
1119 u32 bi0 = buffers[0];
1122 b0 = vlib_get_buffer (vm, bi0);
1123 fl = buffer_get_free_list (vm, b0, &fi);
1124 if (fl->buffers_added_to_freelist_function)
1125 vec_add1 (announce_list, fl);
1128 vec_validate (next_to_free[0], n_buffers - 1);
1129 vec_validate (next_to_free[1], n_buffers - 1);
1136 /* Verify that buffers are known allocated. */
1137 vlib_buffer_validate_alloc_free (vm, b,
1138 n_left, VLIB_BUFFER_KNOWN_ALLOCATED);
1140 vec_add2_aligned (fl->aligned_buffers, f, n_left,
1141 /* align */ sizeof (vlib_copy_unit_t));
1143 n = next_to_free[i_next_to_free];
1146 vlib_buffer_t *b0, *b1, *binit0, *binit1, dummy_buffers[2];
1157 /* Prefetch buffers for next iteration. */
1158 vlib_prefetch_buffer_with_index (vm, b[0], WRITE);
1159 vlib_prefetch_buffer_with_index (vm, b[1], WRITE);
1161 b0 = vlib_get_buffer (vm, bi0);
1162 b1 = vlib_get_buffer (vm, bi1);
1164 free0 = (b0->flags & VLIB_BUFFER_RECYCLE) == 0;
1165 free1 = (b1->flags & VLIB_BUFFER_RECYCLE) == 0;
1167 /* Must be before init which will over-write buffer flags. */
1168 if (follow_buffer_next)
1170 n[0] = b0->next_buffer;
1171 free_next0 = free0 && (b0->flags & VLIB_BUFFER_NEXT_PRESENT) != 0;
1174 n[0] = b1->next_buffer;
1175 free_next1 = free1 && (b1->flags & VLIB_BUFFER_NEXT_PRESENT) != 0;
1179 free_next0 = free_next1 = 0;
1181 /* Must be before init which will over-write buffer free list. */
1182 fi0 = b0->free_list_index;
1183 fi1 = b1->free_list_index;
1185 if (PREDICT_FALSE (fi0 != fi || fi1 != fi))
1188 binit0 = free0 ? b0 : &dummy_buffers[0];
1189 binit1 = free1 ? b1 : &dummy_buffers[1];
1191 vlib_buffer_init_two_for_free_list (binit0, binit1, fl);
1195 /* Backup speculation. */
1197 n -= free_next0 + free_next1;
1199 _vec_len (fl->aligned_buffers) = f - fl->aligned_buffers;
1201 fl0 = pool_elt_at_index (bm->buffer_free_list_pool, fi0);
1202 fl1 = pool_elt_at_index (bm->buffer_free_list_pool, fi1);
1204 add_buffer_to_free_list (vm, fl0, bi0, free0);
1205 if (PREDICT_FALSE (fl0->buffers_added_to_freelist_function != 0))
1208 for (i = 0; i < vec_len (announce_list); i++)
1209 if (fl0 == announce_list[i])
1211 vec_add1 (announce_list, fl0);
1214 if (PREDICT_FALSE (fl1->buffers_added_to_freelist_function != 0))
1217 for (i = 0; i < vec_len (announce_list); i++)
1218 if (fl1 == announce_list[i])
1220 vec_add1 (announce_list, fl1);
1224 add_buffer_to_free_list (vm, fl1, bi1, free1);
1226 /* Possibly change current free list. */
1227 if (fi0 != fi && fi1 != fi)
1230 fl = pool_elt_at_index (bm->buffer_free_list_pool, fi);
1233 vec_add2_aligned (fl->aligned_buffers, f, n_left,
1234 /* align */ sizeof (vlib_copy_unit_t));
1239 vlib_buffer_t *b0, *binit0, dummy_buffers[1];
1247 b0 = vlib_get_buffer (vm, bi0);
1249 free0 = (b0->flags & VLIB_BUFFER_RECYCLE) == 0;
1251 /* Must be before init which will over-write buffer flags. */
1252 if (follow_buffer_next)
1254 n[0] = b0->next_buffer;
1255 free_next0 = free0 && (b0->flags & VLIB_BUFFER_NEXT_PRESENT) != 0;
1261 /* Must be before init which will over-write buffer free list. */
1262 fi0 = b0->free_list_index;
1264 if (PREDICT_FALSE (fi0 != fi))
1267 binit0 = free0 ? b0 : &dummy_buffers[0];
1269 vlib_buffer_init_for_free_list (binit0, fl);
1273 /* Backup speculation. */
1277 _vec_len (fl->aligned_buffers) = f - fl->aligned_buffers;
1279 fl0 = pool_elt_at_index (bm->buffer_free_list_pool, fi0);
1281 add_buffer_to_free_list (vm, fl0, bi0, free0);
1282 if (PREDICT_FALSE (fl0->buffers_added_to_freelist_function != 0))
1285 for (i = 0; i < vec_len (announce_list); i++)
1286 if (fl0 == announce_list[i])
1288 vec_add1 (announce_list, fl0);
1293 fl = pool_elt_at_index (bm->buffer_free_list_pool, fi);
1295 vec_add2_aligned (fl->aligned_buffers, f, n_left,
1296 /* align */ sizeof (vlib_copy_unit_t));
1299 if (follow_buffer_next && ((n_left = n - next_to_free[i_next_to_free]) > 0))
1301 b = next_to_free[i_next_to_free];
1302 i_next_to_free ^= 1;
1306 _vec_len (fl->aligned_buffers) = f - fl->aligned_buffers;
1308 if (vec_len (announce_list))
1310 vlib_buffer_free_list_t *fl;
1311 for (i = 0; i < vec_len (announce_list); i++)
1313 fl = announce_list[i];
1314 fl->buffers_added_to_freelist_function (vm, fl);
1316 _vec_len (announce_list) = 0;
1322 vlib_buffer_free (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
1324 vlib_buffer_free_inline (vm, buffers, n_buffers, /* follow_buffer_next */
1329 vlib_buffer_free_no_next (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
1331 vlib_buffer_free_inline (vm, buffers, n_buffers, /* follow_buffer_next */
1336 /* Copy template packet data into buffers as they are allocated. */
1338 vlib_packet_template_buffer_init (vlib_main_t * vm,
1339 vlib_buffer_free_list_t * fl,
1340 u32 * buffers, u32 n_buffers)
1342 vlib_packet_template_t *t =
1343 uword_to_pointer (fl->buffer_init_function_opaque,
1344 vlib_packet_template_t *);
1347 for (i = 0; i < n_buffers; i++)
1349 vlib_buffer_t *b = vlib_get_buffer (vm, buffers[i]);
1350 ASSERT (b->current_length == vec_len (t->packet_data));
1351 clib_memcpy (vlib_buffer_get_current (b), t->packet_data,
1358 vlib_packet_template_init (vlib_main_t * vm,
1359 vlib_packet_template_t * t,
1361 uword n_packet_data_bytes,
1362 uword min_n_buffers_each_physmem_alloc,
1367 __attribute__ ((unused)) u8 *name;
1370 name = va_format (0, fmt, &va);
1373 vlib_worker_thread_barrier_sync (vm);
1374 memset (t, 0, sizeof (t[0]));
1376 vec_add (t->packet_data, packet_data, n_packet_data_bytes);
1378 vlib_worker_thread_barrier_release (vm);
1380 vlib_buffer_free_list_t *fl;
1385 name = va_format (0, fmt, &va);
1388 memset (t, 0, sizeof (t[0]));
1390 vec_add (t->packet_data, packet_data, n_packet_data_bytes);
1391 t->min_n_buffers_each_physmem_alloc = min_n_buffers_each_physmem_alloc;
1393 t->free_list_index = vlib_buffer_create_free_list_helper
1394 (vm, n_packet_data_bytes,
1399 ASSERT (t->free_list_index != 0);
1400 fl = vlib_buffer_get_free_list (vm, t->free_list_index);
1401 fl->min_n_buffers_each_physmem_alloc = t->min_n_buffers_each_physmem_alloc;
1403 fl->buffer_init_function = vlib_packet_template_buffer_init;
1404 fl->buffer_init_function_opaque = pointer_to_uword (t);
1406 fl->buffer_init_template.current_data = 0;
1407 fl->buffer_init_template.current_length = n_packet_data_bytes;
1408 fl->buffer_init_template.flags = 0;
1413 vlib_packet_template_get_packet (vlib_main_t * vm,
1414 vlib_packet_template_t * t, u32 * bi_result)
1419 if (vlib_buffer_alloc (vm, &bi, 1) != 1)
1424 b = vlib_get_buffer (vm, bi);
1425 clib_memcpy (vlib_buffer_get_current (b),
1426 t->packet_data, vec_len (t->packet_data));
1427 b->current_length = vec_len (t->packet_data);
1434 vlib_packet_template_get_packet_helper (vlib_main_t * vm,
1435 vlib_packet_template_t * t)
1437 word n = t->min_n_buffers_each_physmem_alloc;
1438 word l = vec_len (t->packet_data);
1442 ASSERT (vec_len (t->free_buffers) == 0);
1444 vec_validate (t->free_buffers, n - 1);
1445 n_alloc = vlib_buffer_alloc_from_free_list (vm, t->free_buffers,
1446 n, t->free_list_index);
1447 _vec_len (t->free_buffers) = n_alloc;
1451 /* Append given data to end of buffer, possibly allocating new buffers. */
1453 vlib_buffer_add_data (vlib_main_t * vm,
1454 u32 free_list_index,
1455 u32 buffer_index, void *data, u32 n_data_bytes)
1457 u32 n_buffer_bytes, n_left, n_left_this_buffer, bi;
1463 && 1 != vlib_buffer_alloc_from_free_list (vm, &bi, 1, free_list_index))
1464 goto out_of_buffers;
1467 n_left = n_data_bytes;
1468 n_buffer_bytes = vlib_buffer_free_list_buffer_size (vm, free_list_index);
1470 b = vlib_get_buffer (vm, bi);
1471 b->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
1473 /* Get to the end of the chain before we try to append data... */
1474 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
1475 b = vlib_get_buffer (vm, b->next_buffer);
1481 ASSERT (n_buffer_bytes >= b->current_length);
1482 n_left_this_buffer =
1483 n_buffer_bytes - (b->current_data + b->current_length);
1484 n = clib_min (n_left_this_buffer, n_left);
1485 clib_memcpy (vlib_buffer_get_current (b) + b->current_length, d, n);
1486 b->current_length += n;
1493 vlib_buffer_alloc_from_free_list (vm, &b->next_buffer, 1,
1495 goto out_of_buffers;
1497 b->flags |= VLIB_BUFFER_NEXT_PRESENT;
1499 b = vlib_get_buffer (vm, b->next_buffer);
1505 clib_error ("out of buffers");
1510 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
1511 u32 free_list_index,
1512 vlib_buffer_t * first,
1513 vlib_buffer_t ** last,
1514 void *data, u16 data_len)
1516 vlib_buffer_t *l = *last;
1517 u32 n_buffer_bytes =
1518 vlib_buffer_free_list_buffer_size (vm, free_list_index);
1520 ASSERT (n_buffer_bytes >= l->current_length + l->current_data);
1523 u16 max = n_buffer_bytes - l->current_length - l->current_data;
1527 vlib_buffer_alloc_from_free_list (vm, &l->next_buffer, 1,
1530 *last = l = vlib_buffer_chain_buffer (vm, first, l, l->next_buffer);
1531 max = n_buffer_bytes - l->current_length - l->current_data;
1534 u16 len = (data_len > max) ? max : data_len;
1535 clib_memcpy (vlib_buffer_get_current (l) + l->current_length,
1536 data + copied, len);
1537 vlib_buffer_chain_increase_length (first, l, len);
1546 vlib_buffer_pool_create (vlib_main_t * vm, unsigned num_mbufs,
1549 vlib_buffer_main_t *bm = vm->buffer_main;
1550 vlib_physmem_main_t *vpm = &vm->physmem_main;
1551 struct rte_mempool *rmp;
1554 if (!rte_pktmbuf_pool_create)
1555 return clib_error_return (0, "not linked with DPDK");
1557 vec_validate_aligned (bm->pktmbuf_pools, socket_id, CLIB_CACHE_LINE_BYTES);
1559 /* pool already exists, nothing to do */
1560 if (bm->pktmbuf_pools[socket_id])
1563 u8 *pool_name = format (0, "mbuf_pool_socket%u%c", socket_id, 0);
1565 rmp = rte_pktmbuf_pool_create ((char *) pool_name, /* pool name */
1566 num_mbufs, /* number of mbufs */
1567 512, /* cache size */
1568 VLIB_BUFFER_HDR_SIZE, /* priv size */
1569 VLIB_BUFFER_PRE_DATA_SIZE + VLIB_BUFFER_DATA_SIZE, /* dataroom size */
1570 socket_id); /* cpu socket */
1575 uword this_pool_end;
1576 uword this_pool_start;
1577 uword this_pool_size;
1578 uword save_vpm_start, save_vpm_end, save_vpm_size;
1579 struct rte_mempool_memhdr *memhdr;
1581 this_pool_start = ~0ULL;
1582 this_pool_end = 0LL;
1584 STAILQ_FOREACH (memhdr, &rmp->mem_list, next)
1586 if (((uword) (memhdr->addr + memhdr->len)) > this_pool_end)
1587 this_pool_end = (uword) (memhdr->addr + memhdr->len);
1588 if (((uword) memhdr->addr) < this_pool_start)
1589 this_pool_start = (uword) (memhdr->addr);
1591 ASSERT (this_pool_start < ~0ULL && this_pool_end > 0);
1592 this_pool_size = this_pool_end - this_pool_start;
1596 clib_warning ("%s: pool start %llx pool end %llx pool size %lld",
1597 pool_name, this_pool_start, this_pool_end,
1600 ("before: virtual.start %llx virtual.end %llx virtual.size %lld",
1601 vpm->virtual.start, vpm->virtual.end, vpm->virtual.size);
1604 save_vpm_start = vpm->virtual.start;
1605 save_vpm_end = vpm->virtual.end;
1606 save_vpm_size = vpm->virtual.size;
1608 if ((this_pool_start < vpm->virtual.start) || vpm->virtual.start == 0)
1609 vpm->virtual.start = this_pool_start;
1610 if (this_pool_end > vpm->virtual.end)
1611 vpm->virtual.end = this_pool_end;
1613 vpm->virtual.size = vpm->virtual.end - vpm->virtual.start;
1618 ("after: virtual.start %llx virtual.end %llx virtual.size %lld",
1619 vpm->virtual.start, vpm->virtual.end, vpm->virtual.size);
1622 /* check if fits into buffer index range */
1623 if ((u64) vpm->virtual.size >
1624 ((u64) 1 << (32 + CLIB_LOG2_CACHE_LINE_BYTES)))
1626 clib_warning ("physmem: virtual size out of range!");
1627 vpm->virtual.start = save_vpm_start;
1628 vpm->virtual.end = save_vpm_end;
1629 vpm->virtual.size = save_vpm_size;
1635 bm->pktmbuf_pools[socket_id] = rmp;
1636 vec_free (pool_name);
1641 vec_free (pool_name);
1643 /* no usable pool for this socket, try to use pool from another one */
1644 for (i = 0; i < vec_len (bm->pktmbuf_pools); i++)
1646 if (bm->pktmbuf_pools[i])
1649 ("WARNING: Failed to allocate mempool for CPU socket %u. "
1650 "Threads running on socket %u will use socket %u mempool.",
1651 socket_id, socket_id, i);
1652 bm->pktmbuf_pools[socket_id] = bm->pktmbuf_pools[i];
1657 return clib_error_return (0, "failed to allocate mempool on socket %u",
1663 vlib_serialize_tx (serialize_main_header_t * m, serialize_stream_t * s)
1666 vlib_serialize_buffer_main_t *sm;
1667 uword n, n_bytes_to_write;
1668 vlib_buffer_t *last;
1670 n_bytes_to_write = s->current_buffer_index;
1672 uword_to_pointer (s->data_function_opaque,
1673 vlib_serialize_buffer_main_t *);
1676 ASSERT (sm->tx.max_n_data_bytes_per_chain > 0);
1677 if (serialize_stream_is_end_of_stream (s)
1678 || sm->tx.n_total_data_bytes + n_bytes_to_write >
1679 sm->tx.max_n_data_bytes_per_chain)
1681 vlib_process_t *p = vlib_get_current_process (vm);
1683 last = vlib_get_buffer (vm, sm->last_buffer);
1684 last->current_length = n_bytes_to_write;
1686 vlib_set_next_frame_buffer (vm, &p->node_runtime, sm->tx.next_index,
1689 sm->first_buffer = sm->last_buffer = ~0;
1690 sm->tx.n_total_data_bytes = 0;
1693 else if (n_bytes_to_write == 0 && s->n_buffer_bytes == 0)
1695 ASSERT (sm->first_buffer == ~0);
1696 ASSERT (sm->last_buffer == ~0);
1698 vlib_buffer_alloc_from_free_list (vm, &sm->first_buffer, 1,
1699 sm->tx.free_list_index);
1703 ("vlib_buffer_alloc_from_free_list fails"));
1704 sm->last_buffer = sm->first_buffer;
1706 vlib_buffer_free_list_buffer_size (vm, sm->tx.free_list_index);
1709 if (n_bytes_to_write > 0)
1711 vlib_buffer_t *prev = vlib_get_buffer (vm, sm->last_buffer);
1713 vlib_buffer_alloc_from_free_list (vm, &sm->last_buffer, 1,
1714 sm->tx.free_list_index);
1718 ("vlib_buffer_alloc_from_free_list fails"));
1719 sm->tx.n_total_data_bytes += n_bytes_to_write;
1720 prev->current_length = n_bytes_to_write;
1721 prev->next_buffer = sm->last_buffer;
1722 prev->flags |= VLIB_BUFFER_NEXT_PRESENT;
1725 if (sm->last_buffer != ~0)
1727 last = vlib_get_buffer (vm, sm->last_buffer);
1728 s->buffer = vlib_buffer_get_current (last);
1729 s->current_buffer_index = 0;
1730 ASSERT (last->current_data == s->current_buffer_index);
1735 vlib_serialize_rx (serialize_main_header_t * m, serialize_stream_t * s)
1738 vlib_serialize_buffer_main_t *sm;
1739 vlib_buffer_t *last;
1742 uword_to_pointer (s->data_function_opaque,
1743 vlib_serialize_buffer_main_t *);
1746 if (serialize_stream_is_end_of_stream (s))
1749 if (sm->last_buffer != ~0)
1751 last = vlib_get_buffer (vm, sm->last_buffer);
1753 if (last->flags & VLIB_BUFFER_NEXT_PRESENT)
1754 sm->last_buffer = last->next_buffer;
1757 vlib_buffer_free (vm, &sm->first_buffer, /* count */ 1);
1758 sm->first_buffer = sm->last_buffer = ~0;
1762 if (sm->last_buffer == ~0)
1764 while (clib_fifo_elts (sm->rx.buffer_fifo) == 0)
1766 sm->rx.ready_one_time_event =
1767 vlib_process_create_one_time_event (vm, vlib_current_process (vm),
1769 vlib_process_wait_for_one_time_event (vm, /* no event data */ 0,
1770 sm->rx.ready_one_time_event);
1773 clib_fifo_sub1 (sm->rx.buffer_fifo, sm->first_buffer);
1774 sm->last_buffer = sm->first_buffer;
1777 ASSERT (sm->last_buffer != ~0);
1779 last = vlib_get_buffer (vm, sm->last_buffer);
1780 s->current_buffer_index = 0;
1781 s->buffer = vlib_buffer_get_current (last);
1782 s->n_buffer_bytes = last->current_length;
1786 serialize_open_vlib_helper (serialize_main_t * m,
1788 vlib_serialize_buffer_main_t * sm, uword is_read)
1790 /* Initialize serialize main but save overflow buffer for re-use between calls. */
1792 u8 *save = m->stream.overflow_buffer;
1793 memset (m, 0, sizeof (m[0]));
1794 m->stream.overflow_buffer = save;
1796 _vec_len (save) = 0;
1799 sm->first_buffer = sm->last_buffer = ~0;
1801 clib_fifo_reset (sm->rx.buffer_fifo);
1803 sm->tx.n_total_data_bytes = 0;
1805 m->header.data_function = is_read ? vlib_serialize_rx : vlib_serialize_tx;
1806 m->stream.data_function_opaque = pointer_to_uword (sm);
1810 serialize_open_vlib_buffer (serialize_main_t * m, vlib_main_t * vm,
1811 vlib_serialize_buffer_main_t * sm)
1813 serialize_open_vlib_helper (m, vm, sm, /* is_read */ 0);
1817 unserialize_open_vlib_buffer (serialize_main_t * m, vlib_main_t * vm,
1818 vlib_serialize_buffer_main_t * sm)
1820 serialize_open_vlib_helper (m, vm, sm, /* is_read */ 1);
1824 serialize_close_vlib_buffer (serialize_main_t * m)
1826 vlib_serialize_buffer_main_t *sm
1827 = uword_to_pointer (m->stream.data_function_opaque,
1828 vlib_serialize_buffer_main_t *);
1829 vlib_buffer_t *last;
1830 serialize_stream_t *s = &m->stream;
1832 last = vlib_get_buffer (sm->vlib_main, sm->last_buffer);
1833 last->current_length = s->current_buffer_index;
1835 if (vec_len (s->overflow_buffer) > 0)
1838 = vlib_buffer_add_data (sm->vlib_main, sm->tx.free_list_index,
1839 sm->last_buffer == ~0 ? 0 : sm->last_buffer,
1841 vec_len (s->overflow_buffer));
1842 _vec_len (s->overflow_buffer) = 0;
1845 return sm->first_buffer;
1849 unserialize_close_vlib_buffer (serialize_main_t * m)
1851 vlib_serialize_buffer_main_t *sm
1852 = uword_to_pointer (m->stream.data_function_opaque,
1853 vlib_serialize_buffer_main_t *);
1854 if (sm->first_buffer != ~0)
1855 vlib_buffer_free_one (sm->vlib_main, sm->first_buffer);
1856 clib_fifo_reset (sm->rx.buffer_fifo);
1857 if (m->stream.overflow_buffer)
1858 _vec_len (m->stream.overflow_buffer) = 0;
1862 format_vlib_buffer_free_list (u8 * s, va_list * va)
1864 vlib_buffer_free_list_t *f = va_arg (*va, vlib_buffer_free_list_t *);
1866 u32 threadnum = va_arg (*va, u32);
1867 uword bytes_alloc, bytes_free, n_free, size;
1870 return format (s, "%=7s%=30s%=12s%=12s%=12s%=12s%=12s%=12s",
1871 "Thread", "Name", "Index", "Size", "Alloc", "Free",
1874 size = sizeof (vlib_buffer_t) + f->n_data_bytes;
1875 n_free = vec_len (f->aligned_buffers) + vec_len (f->unaligned_buffers);
1876 bytes_alloc = size * f->n_alloc;
1877 bytes_free = size * n_free;
1879 s = format (s, "%7d%30s%12d%12d%=12U%=12U%=12d%=12d", threadnum,
1881 uword bytes_alloc, bytes_free, n_free, size;
1884 return format (s, "%=30s%=12s%=12s%=12s%=12s%=12s%=12s",
1885 "Name", "Index", "Size", "Alloc", "Free", "#Alloc",
1888 size = sizeof (vlib_buffer_t) + f->n_data_bytes;
1889 n_free = vec_len (f->aligned_buffers) + vec_len (f->unaligned_buffers);
1890 bytes_alloc = size * f->n_alloc;
1891 bytes_free = size * n_free;
1893 s = format (s, "%30s%12d%12d%=12U%=12U%=12d%=12d",
1895 f->name, f->index, f->n_data_bytes,
1896 format_memory_size, bytes_alloc,
1897 format_memory_size, bytes_free, f->n_alloc, n_free);
1902 static clib_error_t *
1903 show_buffers (vlib_main_t * vm,
1904 unformat_input_t * input, vlib_cli_command_t * cmd)
1907 vlib_buffer_main_t *bm;
1908 vlib_buffer_free_list_t *f;
1909 vlib_main_t *curr_vm;
1912 vlib_cli_output (vm, "%U", format_vlib_buffer_free_list, 0, 0);
1916 curr_vm = vec_len (vlib_mains) ? vlib_mains[vm_index] : vm;
1917 bm = curr_vm->buffer_main;
1920 pool_foreach (f, bm->buffer_free_list_pool, ({
1921 vlib_cli_output (vm, "%U", format_vlib_buffer_free_list, f, vm_index);
1927 while (vm_index < vec_len (vlib_mains));
1930 vlib_buffer_main_t *bm = vm->buffer_main;
1931 vlib_buffer_free_list_t *f;
1933 vlib_cli_output (vm, "%U", format_vlib_buffer_free_list, 0);
1935 pool_foreach (f, bm->buffer_free_list_pool, ({
1936 vlib_cli_output (vm, "%U", format_vlib_buffer_free_list, f);
1945 VLIB_CLI_COMMAND (show_buffers_command, static) = {
1946 .path = "show buffers",
1947 .short_help = "Show packet buffer allocation",
1948 .function = show_buffers,
1955 u32 *vlib_buffer_state_validation_lock;
1956 uword *vlib_buffer_state_validation_hash;
1957 void *vlib_buffer_state_heap;
1959 static clib_error_t *
1960 buffer_state_validation_init (vlib_main_t * vm)
1964 vlib_buffer_state_heap = mheap_alloc (0, 10 << 20);
1966 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
1968 vlib_buffer_state_validation_hash = hash_create (0, sizeof (uword));
1969 vec_validate_aligned (vlib_buffer_state_validation_lock, 0,
1970 CLIB_CACHE_LINE_BYTES);
1971 clib_mem_set_heap (oldheap);
1975 VLIB_INIT_FUNCTION (buffer_state_validation_init);
1982 * fd.io coding-style-patch-verification: ON
1985 * eval: (c-set-style "gnu")