2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
46 vlib buffer access methods.
50 /** \brief Translate buffer index into buffer pointer
52 @param vm - (vlib_main_t *) vlib main data structure pointer
53 @param buffer_index - (u32) buffer index
54 @return - (vlib_buffer_t *) buffer pointer
56 always_inline vlib_buffer_t *
57 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
59 return vlib_physmem_at_offset (&vm->physmem_main, ((uword) buffer_index)
60 << CLIB_LOG2_CACHE_LINE_BYTES);
63 /** \brief Translate buffer pointer into buffer index
65 @param vm - (vlib_main_t *) vlib main data structure pointer
66 @param p - (void *) buffer pointer
67 @return - (u32) buffer index
70 vlib_get_buffer_index (vlib_main_t * vm, void *p)
72 uword offset = vlib_physmem_offset_of (&vm->physmem_main, p);
73 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
74 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
77 /** \brief Get next buffer in buffer linklist, or zero for end of list.
79 @param vm - (vlib_main_t *) vlib main data structure pointer
80 @param b - (void *) buffer pointer
81 @return - (vlib_buffer_t *) next buffer, or NULL
83 always_inline vlib_buffer_t *
84 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
86 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
87 ? vlib_get_buffer (vm, b->next_buffer) : 0);
90 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
91 vlib_buffer_t * b_first);
93 /** \brief Get length in bytes of the buffer chain
95 @param vm - (vlib_main_t *) vlib main data structure pointer
96 @param b - (void *) buffer pointer
97 @return - (uword) length of buffer chain
100 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
102 uword l = b->current_length + b->total_length_not_including_first_buffer;
103 if (PREDICT_FALSE ((b->flags & (VLIB_BUFFER_NEXT_PRESENT
104 | VLIB_BUFFER_TOTAL_LENGTH_VALID))
105 == VLIB_BUFFER_NEXT_PRESENT))
106 return vlib_buffer_length_in_chain_slow_path (vm, b);
110 /** \brief Get length in bytes of the buffer index buffer chain
112 @param vm - (vlib_main_t *) vlib main data structure pointer
113 @param bi - (u32) buffer index
114 @return - (uword) length of buffer chain
117 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
119 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
120 return vlib_buffer_length_in_chain (vm, b);
123 /** \brief Copy buffer contents to memory
125 @param vm - (vlib_main_t *) vlib main data structure pointer
126 @param buffer_index - (u32) buffer index
127 @param contents - (u8 *) memory, <strong>must be large enough</strong>
128 @return - (uword) length of buffer chain
131 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
133 uword content_len = 0;
139 b = vlib_get_buffer (vm, buffer_index);
140 l = b->current_length;
141 clib_memcpy (contents + content_len, b->data + b->current_data, l);
143 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
145 buffer_index = b->next_buffer;
151 /* Return physical address of buffer->data start. */
153 vlib_get_buffer_data_physical_address (vlib_main_t * vm, u32 buffer_index)
155 return vlib_physmem_offset_to_physical (&vm->physmem_main,
156 (((uword) buffer_index) <<
157 CLIB_LOG2_CACHE_LINE_BYTES) +
158 STRUCT_OFFSET_OF (vlib_buffer_t,
162 /** \brief Prefetch buffer metadata by buffer index
163 The first 64 bytes of buffer contains most header information
165 @param vm - (vlib_main_t *) vlib main data structure pointer
166 @param bi - (u32) buffer index
167 @param type - LOAD, STORE. In most cases, STORE is the right answer
169 /* Prefetch buffer header given index. */
170 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
172 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
173 vlib_prefetch_buffer_header (_b, type); \
177 /* Iterate over known allocated vlib bufs. You probably do not want
179 @param vm the vlib_main_t
180 @param bi found allocated buffer index
181 @param body operation to perform on buffer index
182 function executes body for each allocated buffer index
184 #define vlib_buffer_foreach_allocated(vm,bi,body) \
186 vlib_main_t * _vmain = (vm); \
187 vlib_buffer_main_t * _bmain = &_vmain->buffer_main; \
188 hash_pair_t * _vbpair; \
189 hash_foreach_pair(_vbpair, _bmain->buffer_known_hash, ({ \
190 if (VLIB_BUFFER_KNOWN_ALLOCATED == _vbpair->value[0]) { \
191 (bi) = _vbpair->key; \
200 /* Index is unknown. */
203 /* Index is known and free/allocated. */
204 VLIB_BUFFER_KNOWN_FREE,
205 VLIB_BUFFER_KNOWN_ALLOCATED,
206 } vlib_buffer_known_state_t;
208 always_inline vlib_buffer_known_state_t
209 vlib_buffer_is_known (vlib_main_t * vm, u32 buffer_index)
211 vlib_buffer_main_t *bm = vm->buffer_main;
212 ASSERT (os_get_cpu_number () == 0);
214 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
215 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
219 vlib_buffer_set_known_state (vlib_main_t * vm,
221 vlib_buffer_known_state_t state)
223 vlib_buffer_main_t *bm = vm->buffer_main;
224 ASSERT (os_get_cpu_number () == 0);
225 hash_set (bm->buffer_known_hash, buffer_index, state);
228 /* Validates sanity of a single buffer.
229 Returns format'ed vector with error message if any. */
230 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
233 clib_error_t *vlib_buffer_pool_create (vlib_main_t * vm, unsigned num_mbufs,
236 /** \brief Allocate buffers into supplied array
238 @param vm - (vlib_main_t *) vlib main data structure pointer
239 @param buffers - (u32 * ) buffer index array
240 @param n_buffers - (u32) number of buffers requested
241 @return - (u32) number of buffers actually allocated, may be
242 less than the number requested or zero
245 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
247 vlib_buffer_main_t *bm = vm->buffer_main;
249 ASSERT (bm->cb.vlib_buffer_alloc_cb);
251 return bm->cb.vlib_buffer_alloc_cb (vm, buffers, n_buffers);
255 vlib_buffer_round_size (u32 size)
257 return round_pow2 (size, sizeof (vlib_buffer_t));
260 /** \brief Allocate buffers from specific freelist into supplied array
262 @param vm - (vlib_main_t *) vlib main data structure pointer
263 @param buffers - (u32 * ) buffer index array
264 @param n_buffers - (u32) number of buffers requested
265 @return - (u32) number of buffers actually allocated, may be
266 less than the number requested or zero
269 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
271 u32 n_buffers, u32 free_list_index)
273 vlib_buffer_main_t *bm = vm->buffer_main;
275 ASSERT (bm->cb.vlib_buffer_alloc_from_free_list_cb);
277 return bm->cb.vlib_buffer_alloc_from_free_list_cb (vm, buffers, n_buffers,
281 /** \brief Free buffers
282 Frees the entire buffer chain for each buffer
284 @param vm - (vlib_main_t *) vlib main data structure pointer
285 @param buffers - (u32 * ) buffer index array
286 @param n_buffers - (u32) number of buffers to free
290 vlib_buffer_free (vlib_main_t * vm,
291 /* pointer to first buffer */
293 /* number of buffers to free */
296 vlib_buffer_main_t *bm = vm->buffer_main;
298 ASSERT (bm->cb.vlib_buffer_free_cb);
300 return bm->cb.vlib_buffer_free_cb (vm, buffers, n_buffers);
303 /** \brief Free buffers, does not free the buffer chain for each buffer
305 @param vm - (vlib_main_t *) vlib main data structure pointer
306 @param buffers - (u32 * ) buffer index array
307 @param n_buffers - (u32) number of buffers to free
311 vlib_buffer_free_no_next (vlib_main_t * vm,
312 /* pointer to first buffer */
314 /* number of buffers to free */
317 vlib_buffer_main_t *bm = vm->buffer_main;
319 ASSERT (bm->cb.vlib_buffer_free_no_next_cb);
321 return bm->cb.vlib_buffer_free_no_next_cb (vm, buffers, n_buffers);
324 /** \brief Free one buffer
325 Shorthand to free a single buffer chain.
327 @param vm - (vlib_main_t *) vlib main data structure pointer
328 @param buffer_index - (u32) buffer index to free
331 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
333 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
336 /* Add/delete buffer free lists. */
337 u32 vlib_buffer_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
340 vlib_buffer_delete_free_list (vlib_main_t * vm, u32 free_list_index)
342 vlib_buffer_main_t *bm = vm->buffer_main;
344 ASSERT (bm->cb.vlib_buffer_delete_free_list_cb);
346 bm->cb.vlib_buffer_delete_free_list_cb (vm, free_list_index);
349 /* Find already existing public free list with given size or create one. */
350 u32 vlib_buffer_get_or_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
353 /* Merge two free lists */
354 void vlib_buffer_merge_free_lists (vlib_buffer_free_list_t * dst,
355 vlib_buffer_free_list_t * src);
357 /* Make sure we have at least given number of unaligned buffers. */
358 void vlib_buffer_free_list_fill_unaligned (vlib_main_t * vm,
359 vlib_buffer_free_list_t *
361 uword n_unaligned_buffers);
364 vlib_buffer_get_free_list_with_size (vlib_main_t * vm, u32 size)
366 vlib_buffer_main_t *bm = vm->buffer_main;
368 size = vlib_buffer_round_size (size);
369 uword *p = hash_get (bm->free_list_by_size, size);
370 return p ? p[0] : ~0;
373 always_inline vlib_buffer_free_list_t *
374 vlib_buffer_get_buffer_free_list (vlib_main_t * vm, vlib_buffer_t * b,
377 vlib_buffer_main_t *bm = vm->buffer_main;
380 *index = i = b->free_list_index;
381 return pool_elt_at_index (bm->buffer_free_list_pool, i);
384 always_inline vlib_buffer_free_list_t *
385 vlib_buffer_get_free_list (vlib_main_t * vm, u32 free_list_index)
387 vlib_buffer_main_t *bm = vm->buffer_main;
388 vlib_buffer_free_list_t *f;
390 f = pool_elt_at_index (bm->buffer_free_list_pool, free_list_index);
392 /* Sanity: indices must match. */
393 ASSERT (f->index == free_list_index);
399 vlib_buffer_free_list_buffer_size (vlib_main_t * vm, u32 free_list_index)
401 vlib_buffer_free_list_t *f =
402 vlib_buffer_get_free_list (vm, free_list_index);
403 return f->n_data_bytes;
406 void vlib_aligned_memcpy (void *_dst, void *_src, int n_bytes);
408 /* Reasonably fast buffer copy routine. */
410 vlib_copy_buffers (u32 * dst, u32 * src, u32 n)
432 vlib_physmem_alloc_aligned (vlib_main_t * vm, clib_error_t ** error,
433 uword n_bytes, uword alignment)
436 vm->os_physmem_alloc_aligned (&vm->physmem_main, n_bytes, alignment);
439 clib_error_return (0, "failed to allocate %wd bytes of I/O memory",
446 /* By default allocate I/O memory with cache line alignment. */
448 vlib_physmem_alloc (vlib_main_t * vm, clib_error_t ** error, uword n_bytes)
450 return vlib_physmem_alloc_aligned (vm, error, n_bytes,
451 CLIB_CACHE_LINE_BYTES);
455 vlib_physmem_free (vlib_main_t * vm, void *mem)
457 return vm->os_physmem_free (mem);
461 vlib_physmem_virtual_to_physical (vlib_main_t * vm, void *mem)
463 vlib_physmem_main_t *pm = &vm->physmem_main;
464 uword o = pointer_to_uword (mem) - pm->virtual.start;
465 return vlib_physmem_offset_to_physical (pm, o);
468 /* Append given data to end of buffer, possibly allocating new buffers. */
469 u32 vlib_buffer_add_data (vlib_main_t * vm,
471 u32 buffer_index, void *data, u32 n_data_bytes);
473 /* duplicate all buffers in chain */
474 always_inline vlib_buffer_t *
475 vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
477 vlib_buffer_t *s, *d, *fd;
478 uword n_alloc, n_buffers = 1;
479 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
483 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
486 s = vlib_get_buffer (vm, s->next_buffer);
488 u32 new_buffers[n_buffers];
490 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
492 /* No guarantee that we'll get all the buffers we asked for */
493 if (PREDICT_FALSE (n_alloc < n_buffers))
496 vlib_buffer_free (vm, new_buffers, n_alloc);
502 fd = d = vlib_get_buffer (vm, new_buffers[0]);
503 d->current_data = s->current_data;
504 d->current_length = s->current_length;
505 d->flags = s->flags & flag_mask;
506 d->total_length_not_including_first_buffer =
507 s->total_length_not_including_first_buffer;
508 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
509 clib_memcpy (vlib_buffer_get_current (d),
510 vlib_buffer_get_current (s), s->current_length);
513 for (i = 1; i < n_buffers; i++)
516 d->next_buffer = new_buffers[i];
518 s = vlib_get_buffer (vm, s->next_buffer);
519 d = vlib_get_buffer (vm, new_buffers[i]);
520 d->current_data = s->current_data;
521 d->current_length = s->current_length;
522 clib_memcpy (vlib_buffer_get_current (d),
523 vlib_buffer_get_current (s), s->current_length);
524 d->flags = s->flags & flag_mask;
530 /** \brief Create multiple clones of buffer and store them in the supplied array
532 @param vm - (vlib_main_t *) vlib main data structure pointer
533 @param src_buffer - (u32) source buffer index
534 @param buffers - (u32 * ) buffer index array
535 @param n_buffers - (u8) number of buffer clones requested
536 @param head_end_offset - (u16) offset relative to current position
537 where packet head ends
538 @return - (u8) number of buffers actually cloned, may be
539 less than the number requested or zero
543 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
544 u8 n_buffers, u16 head_end_offset)
547 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
549 ASSERT (s->n_add_refs == 0);
552 if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
554 buffers[0] = src_buffer;
555 for (i = 1; i < n_buffers; i++)
558 d = vlib_buffer_copy (vm, s);
561 buffers[i] = vlib_get_buffer_index (vm, d);
567 n_buffers = vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
569 if (PREDICT_FALSE (n_buffers == 0))
571 buffers[0] = src_buffer;
575 for (i = 0; i < n_buffers; i++)
577 vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
578 d->current_data = s->current_data;
579 d->current_length = head_end_offset;
580 d->free_list_index = s->free_list_index;
581 d->total_length_not_including_first_buffer =
582 s->total_length_not_including_first_buffer + s->current_length -
584 d->flags = s->flags | VLIB_BUFFER_NEXT_PRESENT;
585 d->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
586 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
587 clib_memcpy (vlib_buffer_get_current (d), vlib_buffer_get_current (s),
589 d->next_buffer = src_buffer;
591 vlib_buffer_advance (s, head_end_offset);
592 s->n_add_refs = n_buffers - 1;
593 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
595 s = vlib_get_buffer (vm, s->next_buffer);
596 s->n_add_refs = n_buffers - 1;
602 /** \brief Attach cloned tail to the buffer
604 @param vm - (vlib_main_t *) vlib main data structure pointer
605 @param head - (vlib_buffer_t *) head buffer
606 @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
610 vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
611 vlib_buffer_t * tail)
613 ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
614 ASSERT (head->free_list_index == tail->free_list_index);
616 head->flags |= VLIB_BUFFER_NEXT_PRESENT;
617 head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
618 head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
619 head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
620 head->next_buffer = vlib_get_buffer_index (vm, tail);
621 head->total_length_not_including_first_buffer = tail->current_length +
622 tail->total_length_not_including_first_buffer;
625 __sync_add_and_fetch (&tail->n_add_refs, 1);
627 if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
629 tail = vlib_get_buffer (vm, tail->next_buffer);
634 /* Initializes the buffer as an empty packet with no chained buffers. */
636 vlib_buffer_chain_init (vlib_buffer_t * first)
638 first->total_length_not_including_first_buffer = 0;
639 first->current_length = 0;
640 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
641 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
644 /* The provided next_bi buffer index is appended to the end of the packet. */
645 always_inline vlib_buffer_t *
646 vlib_buffer_chain_buffer (vlib_main_t * vm,
647 vlib_buffer_t * first,
648 vlib_buffer_t * last, u32 next_bi)
650 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
651 last->next_buffer = next_bi;
652 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
653 next_buffer->current_length = 0;
654 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
658 /* Increases or decreases the packet length.
659 * It does not allocate or deallocate new buffers.
660 * Therefore, the added length must be compatible
661 * with the last buffer. */
663 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
664 vlib_buffer_t * last, i32 len)
666 last->current_length += len;
668 first->total_length_not_including_first_buffer += len;
671 /* Copy data to the end of the packet and increases its length.
672 * It does not allocate new buffers.
673 * Returns the number of copied bytes. */
675 vlib_buffer_chain_append_data (vlib_main_t * vm,
677 vlib_buffer_t * first,
678 vlib_buffer_t * last, void *data, u16 data_len)
681 vlib_buffer_free_list_buffer_size (vm, free_list_index);
682 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
683 u16 len = clib_min (data_len,
684 n_buffer_bytes - last->current_length -
686 clib_memcpy (vlib_buffer_get_current (last) + last->current_length, data,
688 vlib_buffer_chain_increase_length (first, last, len);
692 /* Copy data to the end of the packet and increases its length.
693 * Allocates additional buffers from the free list if necessary.
694 * Returns the number of copied bytes.
695 * 'last' value is modified whenever new buffers are allocated and
696 * chained and points to the last buffer in the chain. */
698 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
700 vlib_buffer_t * first,
701 vlib_buffer_t ** last,
702 void *data, u16 data_len);
703 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
705 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
706 format_vlib_buffer_contents;
710 /* Vector of packet data. */
713 /* Number of buffers to allocate in each call to physmem
715 u32 min_n_buffers_each_physmem_alloc;
717 /* Buffer free list for this template. */
721 } vlib_packet_template_t;
723 void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
724 vlib_packet_template_t * t);
726 void vlib_packet_template_init (vlib_main_t * vm,
727 vlib_packet_template_t * t,
729 uword n_packet_data_bytes,
730 uword min_n_buffers_each_physmem_alloc,
733 void *vlib_packet_template_get_packet (vlib_main_t * vm,
734 vlib_packet_template_t * t,
738 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
740 vec_free (t->packet_data);
744 unserialize_vlib_buffer_n_bytes (serialize_main_t * m)
746 serialize_stream_t *s = &m->stream;
747 vlib_serialize_buffer_main_t *sm
748 = uword_to_pointer (m->stream.data_function_opaque,
749 vlib_serialize_buffer_main_t *);
750 vlib_main_t *vm = sm->vlib_main;
753 n = s->n_buffer_bytes - s->current_buffer_index;
754 if (sm->last_buffer != ~0)
756 vlib_buffer_t *b = vlib_get_buffer (vm, sm->last_buffer);
757 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
759 b = vlib_get_buffer (vm, b->next_buffer);
760 n += b->current_length;
765 clib_fifo_foreach (f, sm->rx.buffer_fifo, ({
766 n += vlib_buffer_index_length_in_chain (vm, f[0]);
773 /* Set a buffer quickly into "uninitialized" state. We want this to
774 be extremely cheap and arrange for all fields that need to be
775 initialized to be in the first 128 bits of the buffer. */
777 vlib_buffer_init_for_free_list (vlib_buffer_t * dst,
778 vlib_buffer_free_list_t * fl)
780 vlib_buffer_t *src = &fl->buffer_init_template;
782 /* Make sure vlib_buffer_t is cacheline aligned and sized */
783 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
784 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
785 CLIB_CACHE_LINE_BYTES);
786 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
787 CLIB_CACHE_LINE_BYTES * 2);
789 /* Make sure buffer template is sane. */
790 ASSERT (fl->index == fl->buffer_init_template.free_list_index);
792 clib_memcpy (STRUCT_MARK_PTR (dst, template_start),
793 STRUCT_MARK_PTR (src, template_start),
794 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
795 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
797 /* Not in the first 16 octets. */
798 dst->n_add_refs = src->n_add_refs;
800 /* Make sure it really worked. */
801 #define _(f) ASSERT (dst->f == src->f);
807 ASSERT (dst->total_length_not_including_first_buffer == 0);
808 ASSERT (dst->n_add_refs == 0);
812 vlib_buffer_add_to_free_list (vlib_main_t * vm,
813 vlib_buffer_free_list_t * f,
814 u32 buffer_index, u8 do_init)
817 b = vlib_get_buffer (vm, buffer_index);
818 if (PREDICT_TRUE (do_init))
819 vlib_buffer_init_for_free_list (b, f);
820 vec_add1_aligned (f->buffers, buffer_index, CLIB_CACHE_LINE_BYTES);
824 vlib_buffer_init_two_for_free_list (vlib_buffer_t * dst0,
825 vlib_buffer_t * dst1,
826 vlib_buffer_free_list_t * fl)
828 vlib_buffer_t *src = &fl->buffer_init_template;
830 /* Make sure buffer template is sane. */
831 ASSERT (fl->index == fl->buffer_init_template.free_list_index);
833 clib_memcpy (STRUCT_MARK_PTR (dst0, template_start),
834 STRUCT_MARK_PTR (src, template_start),
835 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
836 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
838 clib_memcpy (STRUCT_MARK_PTR (dst1, template_start),
839 STRUCT_MARK_PTR (src, template_start),
840 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
841 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
843 /* Not in the first 16 octets. */
844 dst0->n_add_refs = src->n_add_refs;
845 dst1->n_add_refs = src->n_add_refs;
847 /* Make sure it really worked. */
848 #define _(f) ASSERT (dst0->f == src->f); ASSERT( dst1->f == src->f)
855 ASSERT (dst0->total_length_not_including_first_buffer == 0);
856 ASSERT (dst1->total_length_not_including_first_buffer == 0);
857 ASSERT (dst0->n_add_refs == 0);
858 ASSERT (dst1->n_add_refs == 0);
862 extern u32 *vlib_buffer_state_validation_lock;
863 extern uword *vlib_buffer_state_validation_hash;
864 extern void *vlib_buffer_state_heap;
868 vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
874 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
876 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
879 p = hash_get (vlib_buffer_state_validation_hash, b);
881 /* If we don't know about b, declare it to be in the expected state */
884 hash_set (vlib_buffer_state_validation_hash, b, expected);
888 if (p[0] != expected)
892 vlib_main_t *vm = &vlib_global_main;
896 bi = vlib_get_buffer_index (vm, b);
898 clib_mem_set_heap (oldheap);
899 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
900 vlib_time_now (vm), bi,
901 p[0] ? "busy" : "free", expected ? "busy" : "free");
905 CLIB_MEMORY_BARRIER ();
906 *vlib_buffer_state_validation_lock = 0;
907 clib_mem_set_heap (oldheap);
912 vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
917 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
919 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
922 hash_set (vlib_buffer_state_validation_hash, b, expected);
924 CLIB_MEMORY_BARRIER ();
925 *vlib_buffer_state_validation_lock = 0;
926 clib_mem_set_heap (oldheap);
930 #endif /* included_vlib_buffer_funcs_h */
933 * fd.io coding-style-patch-verification: ON
936 * eval: (c-set-style "gnu")