2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
46 vlib buffer access methods.
50 /** \brief Translate buffer index into buffer pointer
52 @param vm - (vlib_main_t *) vlib main data structure pointer
53 @param buffer_index - (u32) buffer index
54 @return - (vlib_buffer_t *) buffer pointer
56 always_inline vlib_buffer_t *
57 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
59 return vlib_physmem_at_offset (&vm->physmem_main, ((uword) buffer_index)
60 << CLIB_LOG2_CACHE_LINE_BYTES);
63 /** \brief Translate buffer pointer into buffer index
65 @param vm - (vlib_main_t *) vlib main data structure pointer
66 @param p - (void *) buffer pointer
67 @return - (u32) buffer index
70 vlib_get_buffer_index (vlib_main_t * vm, void *p)
72 uword offset = vlib_physmem_offset_of (&vm->physmem_main, p);
73 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
74 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
77 /** \brief Get next buffer in buffer linklist, or zero for end of list.
79 @param vm - (vlib_main_t *) vlib main data structure pointer
80 @param b - (void *) buffer pointer
81 @return - (vlib_buffer_t *) next buffer, or NULL
83 always_inline vlib_buffer_t *
84 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
86 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
87 ? vlib_get_buffer (vm, b->next_buffer) : 0);
90 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
91 vlib_buffer_t * b_first);
93 /** \brief Get length in bytes of the buffer chain
95 @param vm - (vlib_main_t *) vlib main data structure pointer
96 @param b - (void *) buffer pointer
97 @return - (uword) length of buffer chain
100 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
102 uword l = b->current_length + b->total_length_not_including_first_buffer;
103 if (PREDICT_FALSE ((b->flags & (VLIB_BUFFER_NEXT_PRESENT
104 | VLIB_BUFFER_TOTAL_LENGTH_VALID))
105 == VLIB_BUFFER_NEXT_PRESENT))
106 return vlib_buffer_length_in_chain_slow_path (vm, b);
110 /** \brief Get length in bytes of the buffer index buffer chain
112 @param vm - (vlib_main_t *) vlib main data structure pointer
113 @param bi - (u32) buffer index
114 @return - (uword) length of buffer chain
117 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
119 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
120 return vlib_buffer_length_in_chain (vm, b);
123 /** \brief Copy buffer contents to memory
125 @param vm - (vlib_main_t *) vlib main data structure pointer
126 @param buffer_index - (u32) buffer index
127 @param contents - (u8 *) memory, <strong>must be large enough</strong>
128 @return - (uword) length of buffer chain
131 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
133 uword content_len = 0;
139 b = vlib_get_buffer (vm, buffer_index);
140 l = b->current_length;
141 clib_memcpy (contents + content_len, b->data + b->current_data, l);
143 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
145 buffer_index = b->next_buffer;
151 /* Return physical address of buffer->data start. */
153 vlib_get_buffer_data_physical_address (vlib_main_t * vm, u32 buffer_index)
155 return vlib_physmem_offset_to_physical (&vm->physmem_main,
156 (((uword) buffer_index) <<
157 CLIB_LOG2_CACHE_LINE_BYTES) +
158 STRUCT_OFFSET_OF (vlib_buffer_t,
162 /** \brief Prefetch buffer metadata by buffer index
163 The first 64 bytes of buffer contains most header information
165 @param vm - (vlib_main_t *) vlib main data structure pointer
166 @param bi - (u32) buffer index
167 @param type - LOAD, STORE. In most cases, STORE is the right answer
169 /* Prefetch buffer header given index. */
170 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
172 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
173 vlib_prefetch_buffer_header (_b, type); \
177 /* Iterate over known allocated vlib bufs. You probably do not want
179 @param vm the vlib_main_t
180 @param bi found allocated buffer index
181 @param body operation to perform on buffer index
182 function executes body for each allocated buffer index
184 #define vlib_buffer_foreach_allocated(vm,bi,body) \
186 vlib_main_t * _vmain = (vm); \
187 vlib_buffer_main_t * _bmain = &_vmain->buffer_main; \
188 hash_pair_t * _vbpair; \
189 hash_foreach_pair(_vbpair, _bmain->buffer_known_hash, ({ \
190 if (VLIB_BUFFER_KNOWN_ALLOCATED == _vbpair->value[0]) { \
191 (bi) = _vbpair->key; \
200 /* Index is unknown. */
203 /* Index is known and free/allocated. */
204 VLIB_BUFFER_KNOWN_FREE,
205 VLIB_BUFFER_KNOWN_ALLOCATED,
206 } vlib_buffer_known_state_t;
208 always_inline vlib_buffer_known_state_t
209 vlib_buffer_is_known (vlib_main_t * vm, u32 buffer_index)
211 vlib_buffer_main_t *bm = vm->buffer_main;
212 ASSERT (os_get_cpu_number () == 0);
214 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
215 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
219 vlib_buffer_set_known_state (vlib_main_t * vm,
221 vlib_buffer_known_state_t state)
223 vlib_buffer_main_t *bm = vm->buffer_main;
224 ASSERT (os_get_cpu_number () == 0);
225 hash_set (bm->buffer_known_hash, buffer_index, state);
228 /* Validates sanity of a single buffer.
229 Returns format'ed vector with error message if any. */
230 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
233 clib_error_t *vlib_buffer_pool_create (vlib_main_t * vm, unsigned num_mbufs,
236 /** \brief Allocate buffers into supplied array
238 @param vm - (vlib_main_t *) vlib main data structure pointer
239 @param buffers - (u32 * ) buffer index array
240 @param n_buffers - (u32) number of buffers requested
241 @return - (u32) number of buffers actually allocated, may be
242 less than the number requested or zero
245 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
247 vlib_buffer_main_t *bm = vm->buffer_main;
249 ASSERT (bm->cb.vlib_buffer_alloc_cb);
251 return bm->cb.vlib_buffer_alloc_cb (vm, buffers, n_buffers);
255 vlib_buffer_round_size (u32 size)
257 return round_pow2 (size, sizeof (vlib_buffer_t));
260 /** \brief Allocate buffers from specific freelist into supplied array
262 @param vm - (vlib_main_t *) vlib main data structure pointer
263 @param buffers - (u32 * ) buffer index array
264 @param n_buffers - (u32) number of buffers requested
265 @return - (u32) number of buffers actually allocated, may be
266 less than the number requested or zero
269 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
271 u32 n_buffers, u32 free_list_index)
273 vlib_buffer_main_t *bm = vm->buffer_main;
275 ASSERT (bm->cb.vlib_buffer_alloc_from_free_list_cb);
277 return bm->cb.vlib_buffer_alloc_from_free_list_cb (vm, buffers, n_buffers,
281 /** \brief Free buffers
282 Frees the entire buffer chain for each buffer
284 @param vm - (vlib_main_t *) vlib main data structure pointer
285 @param buffers - (u32 * ) buffer index array
286 @param n_buffers - (u32) number of buffers to free
290 vlib_buffer_free (vlib_main_t * vm,
291 /* pointer to first buffer */
293 /* number of buffers to free */
296 vlib_buffer_main_t *bm = vm->buffer_main;
298 ASSERT (bm->cb.vlib_buffer_free_cb);
300 return bm->cb.vlib_buffer_free_cb (vm, buffers, n_buffers);
303 /** \brief Free buffers, does not free the buffer chain for each buffer
305 @param vm - (vlib_main_t *) vlib main data structure pointer
306 @param buffers - (u32 * ) buffer index array
307 @param n_buffers - (u32) number of buffers to free
311 vlib_buffer_free_no_next (vlib_main_t * vm,
312 /* pointer to first buffer */
314 /* number of buffers to free */
317 vlib_buffer_main_t *bm = vm->buffer_main;
319 ASSERT (bm->cb.vlib_buffer_free_no_next_cb);
321 return bm->cb.vlib_buffer_free_no_next_cb (vm, buffers, n_buffers);
324 /** \brief Free one buffer
325 Shorthand to free a single buffer chain.
327 @param vm - (vlib_main_t *) vlib main data structure pointer
328 @param buffer_index - (u32) buffer index to free
331 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
333 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
336 /* Add/delete buffer free lists. */
337 u32 vlib_buffer_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
340 vlib_buffer_delete_free_list (vlib_main_t * vm, u32 free_list_index)
342 vlib_buffer_main_t *bm = vm->buffer_main;
344 ASSERT (bm->cb.vlib_buffer_delete_free_list_cb);
346 bm->cb.vlib_buffer_delete_free_list_cb (vm, free_list_index);
349 /* Find already existing public free list with given size or create one. */
350 u32 vlib_buffer_get_or_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
354 /* After free aligned buffers may not contain even sized chunks. */
355 void vlib_buffer_free_list_trim_aligned (vlib_buffer_free_list_t * f);
357 /* Merge two free lists */
358 void vlib_buffer_merge_free_lists (vlib_buffer_free_list_t * dst,
359 vlib_buffer_free_list_t * src);
361 /* Make sure we have at least given number of unaligned buffers. */
362 void vlib_buffer_free_list_fill_unaligned (vlib_main_t * vm,
363 vlib_buffer_free_list_t *
365 uword n_unaligned_buffers);
368 vlib_buffer_get_free_list_with_size (vlib_main_t * vm, u32 size)
370 vlib_buffer_main_t *bm = vm->buffer_main;
372 size = vlib_buffer_round_size (size);
373 uword *p = hash_get (bm->free_list_by_size, size);
374 return p ? p[0] : ~0;
377 always_inline vlib_buffer_free_list_t *
378 vlib_buffer_get_buffer_free_list (vlib_main_t * vm, vlib_buffer_t * b,
381 vlib_buffer_main_t *bm = vm->buffer_main;
384 *index = i = b->free_list_index;
385 return pool_elt_at_index (bm->buffer_free_list_pool, i);
388 always_inline vlib_buffer_free_list_t *
389 vlib_buffer_get_free_list (vlib_main_t * vm, u32 free_list_index)
391 vlib_buffer_main_t *bm = vm->buffer_main;
392 vlib_buffer_free_list_t *f;
394 f = pool_elt_at_index (bm->buffer_free_list_pool, free_list_index);
396 /* Sanity: indices must match. */
397 ASSERT (f->index == free_list_index);
403 vlib_buffer_free_list_buffer_size (vlib_main_t * vm, u32 free_list_index)
405 vlib_buffer_free_list_t *f =
406 vlib_buffer_get_free_list (vm, free_list_index);
407 return f->n_data_bytes;
410 void vlib_aligned_memcpy (void *_dst, void *_src, int n_bytes);
412 /* Reasonably fast buffer copy routine. */
414 vlib_copy_buffers (u32 * dst, u32 * src, u32 n)
436 vlib_physmem_alloc_aligned (vlib_main_t * vm, clib_error_t ** error,
437 uword n_bytes, uword alignment)
440 vm->os_physmem_alloc_aligned (&vm->physmem_main, n_bytes, alignment);
443 clib_error_return (0, "failed to allocate %wd bytes of I/O memory",
450 /* By default allocate I/O memory with cache line alignment. */
452 vlib_physmem_alloc (vlib_main_t * vm, clib_error_t ** error, uword n_bytes)
454 return vlib_physmem_alloc_aligned (vm, error, n_bytes,
455 CLIB_CACHE_LINE_BYTES);
459 vlib_physmem_free (vlib_main_t * vm, void *mem)
461 return vm->os_physmem_free (mem);
465 vlib_physmem_virtual_to_physical (vlib_main_t * vm, void *mem)
467 vlib_physmem_main_t *pm = &vm->physmem_main;
468 uword o = pointer_to_uword (mem) - pm->virtual.start;
469 return vlib_physmem_offset_to_physical (pm, o);
472 /* Append given data to end of buffer, possibly allocating new buffers. */
473 u32 vlib_buffer_add_data (vlib_main_t * vm,
475 u32 buffer_index, void *data, u32 n_data_bytes);
477 /* duplicate all buffers in chain */
478 always_inline vlib_buffer_t *
479 vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
481 vlib_buffer_t *s, *d, *fd;
482 uword n_alloc, n_buffers = 1;
483 u32 *new_buffers = 0;
484 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
488 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
491 s = vlib_get_buffer (vm, s->next_buffer);
494 vec_validate (new_buffers, n_buffers - 1);
495 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
496 ASSERT (n_alloc == n_buffers);
500 fd = d = vlib_get_buffer (vm, new_buffers[0]);
501 d->current_data = s->current_data;
502 d->current_length = s->current_length;
503 d->flags = s->flags & flag_mask;
504 d->total_length_not_including_first_buffer =
505 s->total_length_not_including_first_buffer;
506 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
507 clib_memcpy (vlib_buffer_get_current (d),
508 vlib_buffer_get_current (s), s->current_length);
511 for (i = 1; i < n_buffers; i++)
514 d->next_buffer = new_buffers[i];
516 s = vlib_get_buffer (vm, s->next_buffer);
517 d = vlib_get_buffer (vm, new_buffers[i]);
518 d->current_data = s->current_data;
519 d->current_length = s->current_length;
520 clib_memcpy (vlib_buffer_get_current (d),
521 vlib_buffer_get_current (s), s->current_length);
522 d->flags = s->flags & flag_mask;
528 /* Initializes the buffer as an empty packet with no chained buffers. */
530 vlib_buffer_chain_init (vlib_buffer_t * first)
532 first->total_length_not_including_first_buffer = 0;
533 first->current_length = 0;
534 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
535 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
538 /* The provided next_bi buffer index is appended to the end of the packet. */
539 always_inline vlib_buffer_t *
540 vlib_buffer_chain_buffer (vlib_main_t * vm,
541 vlib_buffer_t * first,
542 vlib_buffer_t * last, u32 next_bi)
544 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
545 last->next_buffer = next_bi;
546 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
547 next_buffer->current_length = 0;
548 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
552 /* Increases or decreases the packet length.
553 * It does not allocate or deallocate new buffers.
554 * Therefore, the added length must be compatible
555 * with the last buffer. */
557 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
558 vlib_buffer_t * last, i32 len)
560 last->current_length += len;
562 first->total_length_not_including_first_buffer += len;
565 /* Copy data to the end of the packet and increases its length.
566 * It does not allocate new buffers.
567 * Returns the number of copied bytes. */
569 vlib_buffer_chain_append_data (vlib_main_t * vm,
571 vlib_buffer_t * first,
572 vlib_buffer_t * last, void *data, u16 data_len)
575 vlib_buffer_free_list_buffer_size (vm, free_list_index);
576 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
577 u16 len = clib_min (data_len,
578 n_buffer_bytes - last->current_length -
580 clib_memcpy (vlib_buffer_get_current (last) + last->current_length, data,
582 vlib_buffer_chain_increase_length (first, last, len);
586 /* Copy data to the end of the packet and increases its length.
587 * Allocates additional buffers from the free list if necessary.
588 * Returns the number of copied bytes.
589 * 'last' value is modified whenever new buffers are allocated and
590 * chained and points to the last buffer in the chain. */
592 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
594 vlib_buffer_t * first,
595 vlib_buffer_t ** last,
596 void *data, u16 data_len);
597 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
599 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
600 format_vlib_buffer_contents;
604 /* Vector of packet data. */
607 /* Number of buffers to allocate in each call to physmem
609 u32 min_n_buffers_each_physmem_alloc;
611 /* Buffer free list for this template. */
615 } vlib_packet_template_t;
617 void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
618 vlib_packet_template_t * t);
620 void vlib_packet_template_init (vlib_main_t * vm,
621 vlib_packet_template_t * t,
623 uword n_packet_data_bytes,
624 uword min_n_buffers_each_physmem_alloc,
627 void *vlib_packet_template_get_packet (vlib_main_t * vm,
628 vlib_packet_template_t * t,
632 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
634 vec_free (t->packet_data);
638 unserialize_vlib_buffer_n_bytes (serialize_main_t * m)
640 serialize_stream_t *s = &m->stream;
641 vlib_serialize_buffer_main_t *sm
642 = uword_to_pointer (m->stream.data_function_opaque,
643 vlib_serialize_buffer_main_t *);
644 vlib_main_t *vm = sm->vlib_main;
647 n = s->n_buffer_bytes - s->current_buffer_index;
648 if (sm->last_buffer != ~0)
650 vlib_buffer_t *b = vlib_get_buffer (vm, sm->last_buffer);
651 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
653 b = vlib_get_buffer (vm, b->next_buffer);
654 n += b->current_length;
659 clib_fifo_foreach (f, sm->rx.buffer_fifo, ({
660 n += vlib_buffer_index_length_in_chain (vm, f[0]);
670 vlib_copy_unit_t i[sizeof (vlib_buffer_t) / sizeof (vlib_copy_unit_t)];
674 /* Set a buffer quickly into "uninitialized" state. We want this to
675 be extremely cheap and arrange for all fields that need to be
676 initialized to be in the first 128 bits of the buffer. */
678 vlib_buffer_init_for_free_list (vlib_buffer_t * _dst,
679 vlib_buffer_free_list_t * fl)
681 vlib_buffer_union_t *dst = (vlib_buffer_union_t *) _dst;
682 vlib_buffer_union_t *src =
683 (vlib_buffer_union_t *) & fl->buffer_init_template;
685 /* Make sure vlib_buffer_t is cacheline aligned and sized */
686 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
687 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
688 CLIB_CACHE_LINE_BYTES);
689 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
690 CLIB_CACHE_LINE_BYTES * 2);
692 /* Make sure buffer template is sane. */
693 ASSERT (fl->index == fl->buffer_init_template.free_list_index);
695 /* Copy template from src->current_data thru src->free_list_index */
696 dst->i[0] = src->i[0];
697 if (1 * sizeof (dst->i[0]) < 16)
698 dst->i[1] = src->i[1];
699 if (2 * sizeof (dst->i[0]) < 16)
700 dst->i[2] = src->i[2];
702 /* Make sure it really worked. */
703 #define _(f) ASSERT (dst->b.f == src->b.f)
709 ASSERT (dst->b.total_length_not_including_first_buffer == 0);
713 vlib_buffer_add_to_free_list (vlib_main_t * vm,
714 vlib_buffer_free_list_t * f,
715 u32 buffer_index, u8 do_init)
718 b = vlib_get_buffer (vm, buffer_index);
719 if (PREDICT_TRUE (do_init))
720 vlib_buffer_init_for_free_list (b, f);
721 vec_add1_aligned (f->aligned_buffers, buffer_index,
722 sizeof (vlib_copy_unit_t));
726 vlib_buffer_init_two_for_free_list (vlib_buffer_t * _dst0,
727 vlib_buffer_t * _dst1,
728 vlib_buffer_free_list_t * fl)
730 vlib_buffer_union_t *dst0 = (vlib_buffer_union_t *) _dst0;
731 vlib_buffer_union_t *dst1 = (vlib_buffer_union_t *) _dst1;
732 vlib_buffer_union_t *src =
733 (vlib_buffer_union_t *) & fl->buffer_init_template;
735 /* Make sure buffer template is sane. */
736 ASSERT (fl->index == fl->buffer_init_template.free_list_index);
738 /* Copy template from src->current_data thru src->free_list_index */
739 dst0->i[0] = dst1->i[0] = src->i[0];
740 if (1 * sizeof (dst0->i[0]) < 16)
741 dst0->i[1] = dst1->i[1] = src->i[1];
742 if (2 * sizeof (dst0->i[0]) < 16)
743 dst0->i[2] = dst1->i[2] = src->i[2];
745 /* Make sure it really worked. */
746 #define _(f) ASSERT (dst0->b.f == src->b.f && dst1->b.f == src->b.f)
752 ASSERT (dst0->b.total_length_not_including_first_buffer == 0);
753 ASSERT (dst1->b.total_length_not_including_first_buffer == 0);
757 extern u32 *vlib_buffer_state_validation_lock;
758 extern uword *vlib_buffer_state_validation_hash;
759 extern void *vlib_buffer_state_heap;
763 vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
769 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
771 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
774 p = hash_get (vlib_buffer_state_validation_hash, b);
776 /* If we don't know about b, declare it to be in the expected state */
779 hash_set (vlib_buffer_state_validation_hash, b, expected);
783 if (p[0] != expected)
787 vlib_main_t *vm = &vlib_global_main;
791 bi = vlib_get_buffer_index (vm, b);
793 clib_mem_set_heap (oldheap);
794 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
795 vlib_time_now (vm), bi,
796 p[0] ? "busy" : "free", expected ? "busy" : "free");
800 CLIB_MEMORY_BARRIER ();
801 *vlib_buffer_state_validation_lock = 0;
802 clib_mem_set_heap (oldheap);
807 vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
812 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
814 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
817 hash_set (vlib_buffer_state_validation_hash, b, expected);
819 CLIB_MEMORY_BARRIER ();
820 *vlib_buffer_state_validation_lock = 0;
821 clib_mem_set_heap (oldheap);
825 #endif /* included_vlib_buffer_funcs_h */
828 * fd.io coding-style-patch-verification: ON
831 * eval: (c-set-style "gnu")