2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
46 vlib buffer access methods.
50 /** \brief Translate buffer index into buffer pointer
52 @param vm - (vlib_main_t *) vlib main data structure pointer
53 @param buffer_index - (u32) buffer index
54 @return - (vlib_buffer_t *) buffer pointer
56 always_inline vlib_buffer_t *
57 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
59 return vlib_physmem_at_offset (&vm->physmem_main, ((uword) buffer_index)
60 << CLIB_LOG2_CACHE_LINE_BYTES);
63 /** \brief Translate buffer pointer into buffer index
65 @param vm - (vlib_main_t *) vlib main data structure pointer
66 @param p - (void *) buffer pointer
67 @return - (u32) buffer index
70 vlib_get_buffer_index (vlib_main_t * vm, void *p)
72 uword offset = vlib_physmem_offset_of (&vm->physmem_main, p);
73 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
74 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
77 /** \brief Get next buffer in buffer linklist, or zero for end of list.
79 @param vm - (vlib_main_t *) vlib main data structure pointer
80 @param b - (void *) buffer pointer
81 @return - (vlib_buffer_t *) next buffer, or NULL
83 always_inline vlib_buffer_t *
84 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
86 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
87 ? vlib_get_buffer (vm, b->next_buffer) : 0);
90 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
91 vlib_buffer_t * b_first);
93 /** \brief Get length in bytes of the buffer chain
95 @param vm - (vlib_main_t *) vlib main data structure pointer
96 @param b - (void *) buffer pointer
97 @return - (uword) length of buffer chain
100 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
102 uword l = b->current_length + b->total_length_not_including_first_buffer;
103 if (PREDICT_FALSE ((b->flags & (VLIB_BUFFER_NEXT_PRESENT
104 | VLIB_BUFFER_TOTAL_LENGTH_VALID))
105 == VLIB_BUFFER_NEXT_PRESENT))
106 return vlib_buffer_length_in_chain_slow_path (vm, b);
110 /** \brief Get length in bytes of the buffer index buffer chain
112 @param vm - (vlib_main_t *) vlib main data structure pointer
113 @param bi - (u32) buffer index
114 @return - (uword) length of buffer chain
117 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
119 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
120 return vlib_buffer_length_in_chain (vm, b);
123 /** \brief Copy buffer contents to memory
125 @param vm - (vlib_main_t *) vlib main data structure pointer
126 @param buffer_index - (u32) buffer index
127 @param contents - (u8 *) memory, <strong>must be large enough</strong>
128 @return - (uword) length of buffer chain
131 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
133 uword content_len = 0;
139 b = vlib_get_buffer (vm, buffer_index);
140 l = b->current_length;
141 clib_memcpy (contents + content_len, b->data + b->current_data, l);
143 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
145 buffer_index = b->next_buffer;
151 /* Return physical address of buffer->data start. */
153 vlib_get_buffer_data_physical_address (vlib_main_t * vm, u32 buffer_index)
155 return vlib_physmem_offset_to_physical (&vm->physmem_main,
156 (((uword) buffer_index) <<
157 CLIB_LOG2_CACHE_LINE_BYTES) +
158 STRUCT_OFFSET_OF (vlib_buffer_t,
162 /** \brief Prefetch buffer metadata by buffer index
163 The first 64 bytes of buffer contains most header information
165 @param vm - (vlib_main_t *) vlib main data structure pointer
166 @param bi - (u32) buffer index
167 @param type - LOAD, STORE. In most cases, STORE is the right answer
169 /* Prefetch buffer header given index. */
170 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
172 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
173 vlib_prefetch_buffer_header (_b, type); \
177 /* Iterate over known allocated vlib bufs. You probably do not want
179 @param vm the vlib_main_t
180 @param bi found allocated buffer index
181 @param body operation to perform on buffer index
182 function executes body for each allocated buffer index
184 #define vlib_buffer_foreach_allocated(vm,bi,body) \
186 vlib_main_t * _vmain = (vm); \
187 vlib_buffer_main_t * _bmain = &_vmain->buffer_main; \
188 hash_pair_t * _vbpair; \
189 hash_foreach_pair(_vbpair, _bmain->buffer_known_hash, ({ \
190 if (VLIB_BUFFER_KNOWN_ALLOCATED == _vbpair->value[0]) { \
191 (bi) = _vbpair->key; \
200 /* Index is unknown. */
203 /* Index is known and free/allocated. */
204 VLIB_BUFFER_KNOWN_FREE,
205 VLIB_BUFFER_KNOWN_ALLOCATED,
206 } vlib_buffer_known_state_t;
208 always_inline vlib_buffer_known_state_t
209 vlib_buffer_is_known (vlib_main_t * vm, u32 buffer_index)
211 vlib_buffer_main_t *bm = vm->buffer_main;
212 ASSERT (os_get_cpu_number () == 0);
214 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
215 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
219 vlib_buffer_set_known_state (vlib_main_t * vm,
221 vlib_buffer_known_state_t state)
223 vlib_buffer_main_t *bm = vm->buffer_main;
224 ASSERT (os_get_cpu_number () == 0);
225 hash_set (bm->buffer_known_hash, buffer_index, state);
228 /* Validates sanity of a single buffer.
229 Returns format'ed vector with error message if any. */
230 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
233 clib_error_t *vlib_buffer_pool_create (vlib_main_t * vm, unsigned num_mbufs,
236 /** \brief Allocate buffers into supplied array
238 @param vm - (vlib_main_t *) vlib main data structure pointer
239 @param buffers - (u32 * ) buffer index array
240 @param n_buffers - (u32) number of buffers requested
241 @return - (u32) number of buffers actually allocated, may be
242 less than the number requested or zero
245 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
247 vlib_buffer_main_t *bm = vm->buffer_main;
249 ASSERT (bm->cb.vlib_buffer_alloc_cb);
251 return bm->cb.vlib_buffer_alloc_cb (vm, buffers, n_buffers);
255 vlib_buffer_round_size (u32 size)
257 return round_pow2 (size, sizeof (vlib_buffer_t));
260 /** \brief Allocate buffers from specific freelist into supplied array
262 @param vm - (vlib_main_t *) vlib main data structure pointer
263 @param buffers - (u32 * ) buffer index array
264 @param n_buffers - (u32) number of buffers requested
265 @return - (u32) number of buffers actually allocated, may be
266 less than the number requested or zero
269 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
271 u32 n_buffers, u32 free_list_index)
273 vlib_buffer_main_t *bm = vm->buffer_main;
275 ASSERT (bm->cb.vlib_buffer_alloc_from_free_list_cb);
277 return bm->cb.vlib_buffer_alloc_from_free_list_cb (vm, buffers, n_buffers,
281 /** \brief Free buffers
282 Frees the entire buffer chain for each buffer
284 @param vm - (vlib_main_t *) vlib main data structure pointer
285 @param buffers - (u32 * ) buffer index array
286 @param n_buffers - (u32) number of buffers to free
290 vlib_buffer_free (vlib_main_t * vm,
291 /* pointer to first buffer */
293 /* number of buffers to free */
296 vlib_buffer_main_t *bm = vm->buffer_main;
298 ASSERT (bm->cb.vlib_buffer_free_cb);
300 return bm->cb.vlib_buffer_free_cb (vm, buffers, n_buffers);
303 /** \brief Free buffers, does not free the buffer chain for each buffer
305 @param vm - (vlib_main_t *) vlib main data structure pointer
306 @param buffers - (u32 * ) buffer index array
307 @param n_buffers - (u32) number of buffers to free
311 vlib_buffer_free_no_next (vlib_main_t * vm,
312 /* pointer to first buffer */
314 /* number of buffers to free */
317 vlib_buffer_main_t *bm = vm->buffer_main;
319 ASSERT (bm->cb.vlib_buffer_free_no_next_cb);
321 return bm->cb.vlib_buffer_free_no_next_cb (vm, buffers, n_buffers);
324 /** \brief Free one buffer
325 Shorthand to free a single buffer chain.
327 @param vm - (vlib_main_t *) vlib main data structure pointer
328 @param buffer_index - (u32) buffer index to free
331 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
333 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
336 /* Add/delete buffer free lists. */
337 u32 vlib_buffer_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
340 vlib_buffer_delete_free_list (vlib_main_t * vm, u32 free_list_index)
342 vlib_buffer_main_t *bm = vm->buffer_main;
344 ASSERT (bm->cb.vlib_buffer_delete_free_list_cb);
346 bm->cb.vlib_buffer_delete_free_list_cb (vm, free_list_index);
349 /* Find already existing public free list with given size or create one. */
350 u32 vlib_buffer_get_or_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
353 /* Merge two free lists */
354 void vlib_buffer_merge_free_lists (vlib_buffer_free_list_t * dst,
355 vlib_buffer_free_list_t * src);
357 /* Make sure we have at least given number of unaligned buffers. */
358 void vlib_buffer_free_list_fill_unaligned (vlib_main_t * vm,
359 vlib_buffer_free_list_t *
361 uword n_unaligned_buffers);
364 vlib_buffer_get_free_list_with_size (vlib_main_t * vm, u32 size)
366 vlib_buffer_main_t *bm = vm->buffer_main;
368 size = vlib_buffer_round_size (size);
369 uword *p = hash_get (bm->free_list_by_size, size);
370 return p ? p[0] : ~0;
373 always_inline vlib_buffer_free_list_t *
374 vlib_buffer_get_buffer_free_list (vlib_main_t * vm, vlib_buffer_t * b,
377 vlib_buffer_main_t *bm = vm->buffer_main;
380 *index = i = b->free_list_index;
381 return pool_elt_at_index (bm->buffer_free_list_pool, i);
384 always_inline vlib_buffer_free_list_t *
385 vlib_buffer_get_free_list (vlib_main_t * vm, u32 free_list_index)
387 vlib_buffer_main_t *bm = vm->buffer_main;
388 vlib_buffer_free_list_t *f;
390 f = pool_elt_at_index (bm->buffer_free_list_pool, free_list_index);
392 /* Sanity: indices must match. */
393 ASSERT (f->index == free_list_index);
399 vlib_buffer_free_list_buffer_size (vlib_main_t * vm, u32 free_list_index)
401 vlib_buffer_free_list_t *f =
402 vlib_buffer_get_free_list (vm, free_list_index);
403 return f->n_data_bytes;
406 void vlib_aligned_memcpy (void *_dst, void *_src, int n_bytes);
408 /* Reasonably fast buffer copy routine. */
410 vlib_copy_buffers (u32 * dst, u32 * src, u32 n)
432 vlib_physmem_alloc_aligned (vlib_main_t * vm, clib_error_t ** error,
433 uword n_bytes, uword alignment)
436 vm->os_physmem_alloc_aligned (&vm->physmem_main, n_bytes, alignment);
439 clib_error_return (0, "failed to allocate %wd bytes of I/O memory",
446 /* By default allocate I/O memory with cache line alignment. */
448 vlib_physmem_alloc (vlib_main_t * vm, clib_error_t ** error, uword n_bytes)
450 return vlib_physmem_alloc_aligned (vm, error, n_bytes,
451 CLIB_CACHE_LINE_BYTES);
455 vlib_physmem_free (vlib_main_t * vm, void *mem)
457 return vm->os_physmem_free (mem);
461 vlib_physmem_virtual_to_physical (vlib_main_t * vm, void *mem)
463 vlib_physmem_main_t *pm = &vm->physmem_main;
464 uword o = pointer_to_uword (mem) - pm->virtual.start;
465 return vlib_physmem_offset_to_physical (pm, o);
468 /* Append given data to end of buffer, possibly allocating new buffers. */
469 u32 vlib_buffer_add_data (vlib_main_t * vm,
471 u32 buffer_index, void *data, u32 n_data_bytes);
473 /* duplicate all buffers in chain */
474 always_inline vlib_buffer_t *
475 vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
477 vlib_buffer_t *s, *d, *fd;
478 uword n_alloc, n_buffers = 1;
479 u32 *new_buffers = 0;
480 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
484 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
487 s = vlib_get_buffer (vm, s->next_buffer);
490 vec_validate (new_buffers, n_buffers - 1);
491 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
493 /* No guarantee that we'll get all the buffers we asked for */
494 if (PREDICT_FALSE (n_alloc < n_buffers))
497 vlib_buffer_free (vm, new_buffers, n_alloc);
498 vec_free (new_buffers);
504 fd = d = vlib_get_buffer (vm, new_buffers[0]);
505 d->current_data = s->current_data;
506 d->current_length = s->current_length;
507 d->flags = s->flags & flag_mask;
508 d->total_length_not_including_first_buffer =
509 s->total_length_not_including_first_buffer;
510 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
511 clib_memcpy (vlib_buffer_get_current (d),
512 vlib_buffer_get_current (s), s->current_length);
515 for (i = 1; i < n_buffers; i++)
518 d->next_buffer = new_buffers[i];
520 s = vlib_get_buffer (vm, s->next_buffer);
521 d = vlib_get_buffer (vm, new_buffers[i]);
522 d->current_data = s->current_data;
523 d->current_length = s->current_length;
524 clib_memcpy (vlib_buffer_get_current (d),
525 vlib_buffer_get_current (s), s->current_length);
526 d->flags = s->flags & flag_mask;
529 vec_free (new_buffers);
533 /** \brief Create multiple clones of buffer and store them in the supplied array
535 @param vm - (vlib_main_t *) vlib main data structure pointer
536 @param src_buffer - (u32) source buffer index
537 @param buffers - (u32 * ) buffer index array
538 @param n_buffers - (u8) number of buffer clones requested
539 @param head_end_offset - (u16) offset relative to current position
540 where packet head ends
541 @return - (u8) number of buffers actually cloned, may be
542 less than the number requested or zero
546 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
547 u8 n_buffers, u16 head_end_offset)
550 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
552 ASSERT (s->n_add_refs == 0);
555 if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
557 buffers[0] = src_buffer;
558 for (i = 1; i < n_buffers; i++)
561 d = vlib_buffer_copy (vm, s);
564 buffers[i] = vlib_get_buffer_index (vm, d);
570 n_buffers = vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
572 if (PREDICT_FALSE (n_buffers == 0))
574 buffers[0] = src_buffer;
578 for (i = 0; i < n_buffers; i++)
580 vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
581 d->current_data = s->current_data;
582 d->current_length = head_end_offset;
583 d->free_list_index = s->free_list_index;
584 d->total_length_not_including_first_buffer =
585 s->total_length_not_including_first_buffer + s->current_length -
587 d->flags = s->flags | VLIB_BUFFER_NEXT_PRESENT;
588 d->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
589 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
590 clib_memcpy (vlib_buffer_get_current (d), vlib_buffer_get_current (s),
592 d->next_buffer = src_buffer;
594 vlib_buffer_advance (s, head_end_offset);
595 s->n_add_refs = n_buffers - 1;
596 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
598 s = vlib_get_buffer (vm, s->next_buffer);
599 s->n_add_refs = n_buffers - 1;
605 /** \brief Attach cloned tail to the buffer
607 @param vm - (vlib_main_t *) vlib main data structure pointer
608 @param head - (vlib_buffer_t *) head buffer
609 @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
613 vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
614 vlib_buffer_t * tail)
616 ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
617 ASSERT (head->free_list_index == tail->free_list_index);
619 head->flags |= VLIB_BUFFER_NEXT_PRESENT;
620 head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
621 head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
622 head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
623 head->next_buffer = vlib_get_buffer_index (vm, tail);
624 head->total_length_not_including_first_buffer = tail->current_length +
625 tail->total_length_not_including_first_buffer;
628 __sync_add_and_fetch (&tail->n_add_refs, 1);
630 if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
632 tail = vlib_get_buffer (vm, tail->next_buffer);
637 /* Initializes the buffer as an empty packet with no chained buffers. */
639 vlib_buffer_chain_init (vlib_buffer_t * first)
641 first->total_length_not_including_first_buffer = 0;
642 first->current_length = 0;
643 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
644 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
647 /* The provided next_bi buffer index is appended to the end of the packet. */
648 always_inline vlib_buffer_t *
649 vlib_buffer_chain_buffer (vlib_main_t * vm,
650 vlib_buffer_t * first,
651 vlib_buffer_t * last, u32 next_bi)
653 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
654 last->next_buffer = next_bi;
655 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
656 next_buffer->current_length = 0;
657 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
661 /* Increases or decreases the packet length.
662 * It does not allocate or deallocate new buffers.
663 * Therefore, the added length must be compatible
664 * with the last buffer. */
666 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
667 vlib_buffer_t * last, i32 len)
669 last->current_length += len;
671 first->total_length_not_including_first_buffer += len;
674 /* Copy data to the end of the packet and increases its length.
675 * It does not allocate new buffers.
676 * Returns the number of copied bytes. */
678 vlib_buffer_chain_append_data (vlib_main_t * vm,
680 vlib_buffer_t * first,
681 vlib_buffer_t * last, void *data, u16 data_len)
684 vlib_buffer_free_list_buffer_size (vm, free_list_index);
685 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
686 u16 len = clib_min (data_len,
687 n_buffer_bytes - last->current_length -
689 clib_memcpy (vlib_buffer_get_current (last) + last->current_length, data,
691 vlib_buffer_chain_increase_length (first, last, len);
695 /* Copy data to the end of the packet and increases its length.
696 * Allocates additional buffers from the free list if necessary.
697 * Returns the number of copied bytes.
698 * 'last' value is modified whenever new buffers are allocated and
699 * chained and points to the last buffer in the chain. */
701 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
703 vlib_buffer_t * first,
704 vlib_buffer_t ** last,
705 void *data, u16 data_len);
706 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
708 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
709 format_vlib_buffer_contents;
713 /* Vector of packet data. */
716 /* Number of buffers to allocate in each call to physmem
718 u32 min_n_buffers_each_physmem_alloc;
720 /* Buffer free list for this template. */
724 } vlib_packet_template_t;
726 void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
727 vlib_packet_template_t * t);
729 void vlib_packet_template_init (vlib_main_t * vm,
730 vlib_packet_template_t * t,
732 uword n_packet_data_bytes,
733 uword min_n_buffers_each_physmem_alloc,
736 void *vlib_packet_template_get_packet (vlib_main_t * vm,
737 vlib_packet_template_t * t,
741 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
743 vec_free (t->packet_data);
747 unserialize_vlib_buffer_n_bytes (serialize_main_t * m)
749 serialize_stream_t *s = &m->stream;
750 vlib_serialize_buffer_main_t *sm
751 = uword_to_pointer (m->stream.data_function_opaque,
752 vlib_serialize_buffer_main_t *);
753 vlib_main_t *vm = sm->vlib_main;
756 n = s->n_buffer_bytes - s->current_buffer_index;
757 if (sm->last_buffer != ~0)
759 vlib_buffer_t *b = vlib_get_buffer (vm, sm->last_buffer);
760 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
762 b = vlib_get_buffer (vm, b->next_buffer);
763 n += b->current_length;
768 clib_fifo_foreach (f, sm->rx.buffer_fifo, ({
769 n += vlib_buffer_index_length_in_chain (vm, f[0]);
776 /* Set a buffer quickly into "uninitialized" state. We want this to
777 be extremely cheap and arrange for all fields that need to be
778 initialized to be in the first 128 bits of the buffer. */
780 vlib_buffer_init_for_free_list (vlib_buffer_t * dst,
781 vlib_buffer_free_list_t * fl)
783 vlib_buffer_t *src = &fl->buffer_init_template;
785 /* Make sure vlib_buffer_t is cacheline aligned and sized */
786 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
787 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
788 CLIB_CACHE_LINE_BYTES);
789 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
790 CLIB_CACHE_LINE_BYTES * 2);
792 /* Make sure buffer template is sane. */
793 ASSERT (fl->index == fl->buffer_init_template.free_list_index);
795 clib_memcpy (STRUCT_MARK_PTR (dst, template_start),
796 STRUCT_MARK_PTR (src, template_start),
797 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
798 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
800 /* Not in the first 16 octets. */
801 dst->n_add_refs = src->n_add_refs;
803 /* Make sure it really worked. */
804 #define _(f) ASSERT (dst->f == src->f);
810 ASSERT (dst->total_length_not_including_first_buffer == 0);
811 ASSERT (dst->n_add_refs == 0);
815 vlib_buffer_add_to_free_list (vlib_main_t * vm,
816 vlib_buffer_free_list_t * f,
817 u32 buffer_index, u8 do_init)
820 b = vlib_get_buffer (vm, buffer_index);
821 if (PREDICT_TRUE (do_init))
822 vlib_buffer_init_for_free_list (b, f);
823 vec_add1_aligned (f->buffers, buffer_index, CLIB_CACHE_LINE_BYTES);
827 vlib_buffer_init_two_for_free_list (vlib_buffer_t * dst0,
828 vlib_buffer_t * dst1,
829 vlib_buffer_free_list_t * fl)
831 vlib_buffer_t *src = &fl->buffer_init_template;
833 /* Make sure buffer template is sane. */
834 ASSERT (fl->index == fl->buffer_init_template.free_list_index);
836 clib_memcpy (STRUCT_MARK_PTR (dst0, template_start),
837 STRUCT_MARK_PTR (src, template_start),
838 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
839 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
841 clib_memcpy (STRUCT_MARK_PTR (dst1, template_start),
842 STRUCT_MARK_PTR (src, template_start),
843 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
844 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
846 /* Not in the first 16 octets. */
847 dst0->n_add_refs = src->n_add_refs;
848 dst1->n_add_refs = src->n_add_refs;
850 /* Make sure it really worked. */
851 #define _(f) ASSERT (dst0->f == src->f); ASSERT( dst1->f == src->f)
858 ASSERT (dst0->total_length_not_including_first_buffer == 0);
859 ASSERT (dst1->total_length_not_including_first_buffer == 0);
860 ASSERT (dst0->n_add_refs == 0);
861 ASSERT (dst1->n_add_refs == 0);
865 extern u32 *vlib_buffer_state_validation_lock;
866 extern uword *vlib_buffer_state_validation_hash;
867 extern void *vlib_buffer_state_heap;
871 vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
877 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
879 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
882 p = hash_get (vlib_buffer_state_validation_hash, b);
884 /* If we don't know about b, declare it to be in the expected state */
887 hash_set (vlib_buffer_state_validation_hash, b, expected);
891 if (p[0] != expected)
895 vlib_main_t *vm = &vlib_global_main;
899 bi = vlib_get_buffer_index (vm, b);
901 clib_mem_set_heap (oldheap);
902 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
903 vlib_time_now (vm), bi,
904 p[0] ? "busy" : "free", expected ? "busy" : "free");
908 CLIB_MEMORY_BARRIER ();
909 *vlib_buffer_state_validation_lock = 0;
910 clib_mem_set_heap (oldheap);
915 vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
920 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
922 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
925 hash_set (vlib_buffer_state_validation_hash, b, expected);
927 CLIB_MEMORY_BARRIER ();
928 *vlib_buffer_state_validation_lock = 0;
929 clib_mem_set_heap (oldheap);
933 #endif /* included_vlib_buffer_funcs_h */
936 * fd.io coding-style-patch-verification: ON
939 * eval: (c-set-style "gnu")