2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
46 vlib buffer access methods.
50 /** \brief Translate buffer index into buffer pointer
52 @param vm - (vlib_main_t *) vlib main data structure pointer
53 @param buffer_index - (u32) buffer index
54 @return - (vlib_buffer_t *) buffer pointer
56 always_inline vlib_buffer_t *
57 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
59 return vlib_physmem_at_offset (&vm->physmem_main, ((uword) buffer_index)
60 << CLIB_LOG2_CACHE_LINE_BYTES);
63 /** \brief Translate buffer pointer into buffer index
65 @param vm - (vlib_main_t *) vlib main data structure pointer
66 @param p - (void *) buffer pointer
67 @return - (u32) buffer index
70 vlib_get_buffer_index (vlib_main_t * vm, void *p)
72 uword offset = vlib_physmem_offset_of (&vm->physmem_main, p);
73 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
74 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
77 /** \brief Get next buffer in buffer linklist, or zero for end of list.
79 @param vm - (vlib_main_t *) vlib main data structure pointer
80 @param b - (void *) buffer pointer
81 @return - (vlib_buffer_t *) next buffer, or NULL
83 always_inline vlib_buffer_t *
84 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
86 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
87 ? vlib_get_buffer (vm, b->next_buffer) : 0);
90 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
91 vlib_buffer_t * b_first);
93 /** \brief Get length in bytes of the buffer chain
95 @param vm - (vlib_main_t *) vlib main data structure pointer
96 @param b - (void *) buffer pointer
97 @return - (uword) length of buffer chain
100 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
102 uword l = b->current_length + b->total_length_not_including_first_buffer;
103 if (PREDICT_FALSE ((b->flags & (VLIB_BUFFER_NEXT_PRESENT
104 | VLIB_BUFFER_TOTAL_LENGTH_VALID))
105 == VLIB_BUFFER_NEXT_PRESENT))
106 return vlib_buffer_length_in_chain_slow_path (vm, b);
110 /** \brief Get length in bytes of the buffer index buffer chain
112 @param vm - (vlib_main_t *) vlib main data structure pointer
113 @param bi - (u32) buffer index
114 @return - (uword) length of buffer chain
117 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
119 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
120 return vlib_buffer_length_in_chain (vm, b);
123 /** \brief Copy buffer contents to memory
125 @param vm - (vlib_main_t *) vlib main data structure pointer
126 @param buffer_index - (u32) buffer index
127 @param contents - (u8 *) memory, <strong>must be large enough</strong>
128 @return - (uword) length of buffer chain
131 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
133 uword content_len = 0;
139 b = vlib_get_buffer (vm, buffer_index);
140 l = b->current_length;
141 clib_memcpy (contents + content_len, b->data + b->current_data, l);
143 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
145 buffer_index = b->next_buffer;
151 /* Return physical address of buffer->data start. */
153 vlib_get_buffer_data_physical_address (vlib_main_t * vm, u32 buffer_index)
155 return vlib_physmem_offset_to_physical (&vm->physmem_main,
156 (((uword) buffer_index) <<
157 CLIB_LOG2_CACHE_LINE_BYTES) +
158 STRUCT_OFFSET_OF (vlib_buffer_t,
162 /** \brief Prefetch buffer metadata by buffer index
163 The first 64 bytes of buffer contains most header information
165 @param vm - (vlib_main_t *) vlib main data structure pointer
166 @param bi - (u32) buffer index
167 @param type - LOAD, STORE. In most cases, STORE is the right answer
169 /* Prefetch buffer header given index. */
170 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
172 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
173 vlib_prefetch_buffer_header (_b, type); \
177 /* Iterate over known allocated vlib bufs. You probably do not want
179 @param vm the vlib_main_t
180 @param bi found allocated buffer index
181 @param body operation to perform on buffer index
182 function executes body for each allocated buffer index
184 #define vlib_buffer_foreach_allocated(vm,bi,body) \
186 vlib_main_t * _vmain = (vm); \
187 vlib_buffer_main_t * _bmain = &_vmain->buffer_main; \
188 hash_pair_t * _vbpair; \
189 hash_foreach_pair(_vbpair, _bmain->buffer_known_hash, ({ \
190 if (VLIB_BUFFER_KNOWN_ALLOCATED == _vbpair->value[0]) { \
191 (bi) = _vbpair->key; \
200 /* Index is unknown. */
203 /* Index is known and free/allocated. */
204 VLIB_BUFFER_KNOWN_FREE,
205 VLIB_BUFFER_KNOWN_ALLOCATED,
206 } vlib_buffer_known_state_t;
208 always_inline vlib_buffer_known_state_t
209 vlib_buffer_is_known (vlib_main_t * vm, u32 buffer_index)
211 vlib_buffer_main_t *bm = vm->buffer_main;
212 ASSERT (os_get_cpu_number () == 0);
214 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
215 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
219 vlib_buffer_set_known_state (vlib_main_t * vm,
221 vlib_buffer_known_state_t state)
223 vlib_buffer_main_t *bm = vm->buffer_main;
224 ASSERT (os_get_cpu_number () == 0);
225 hash_set (bm->buffer_known_hash, buffer_index, state);
228 /* Validates sanity of a single buffer.
229 Returns format'ed vector with error message if any. */
230 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
233 clib_error_t *vlib_buffer_pool_create (vlib_main_t * vm, unsigned num_mbufs,
236 /** \brief Allocate buffers into supplied array
238 @param vm - (vlib_main_t *) vlib main data structure pointer
239 @param buffers - (u32 * ) buffer index array
240 @param n_buffers - (u32) number of buffers requested
241 @return - (u32) number of buffers actually allocated, may be
242 less than the number requested or zero
245 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
247 vlib_buffer_main_t *bm = vm->buffer_main;
249 ASSERT (bm->cb.vlib_buffer_alloc_cb);
251 return bm->cb.vlib_buffer_alloc_cb (vm, buffers, n_buffers);
255 vlib_buffer_round_size (u32 size)
257 return round_pow2 (size, sizeof (vlib_buffer_t));
260 /** \brief Allocate buffers from specific freelist into supplied array
262 @param vm - (vlib_main_t *) vlib main data structure pointer
263 @param buffers - (u32 * ) buffer index array
264 @param n_buffers - (u32) number of buffers requested
265 @return - (u32) number of buffers actually allocated, may be
266 less than the number requested or zero
269 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
271 u32 n_buffers, u32 free_list_index)
273 vlib_buffer_main_t *bm = vm->buffer_main;
275 ASSERT (bm->cb.vlib_buffer_alloc_from_free_list_cb);
277 return bm->cb.vlib_buffer_alloc_from_free_list_cb (vm, buffers, n_buffers,
281 /** \brief Free buffers
282 Frees the entire buffer chain for each buffer
284 @param vm - (vlib_main_t *) vlib main data structure pointer
285 @param buffers - (u32 * ) buffer index array
286 @param n_buffers - (u32) number of buffers to free
290 vlib_buffer_free (vlib_main_t * vm,
291 /* pointer to first buffer */
293 /* number of buffers to free */
296 vlib_buffer_main_t *bm = vm->buffer_main;
298 ASSERT (bm->cb.vlib_buffer_free_cb);
300 return bm->cb.vlib_buffer_free_cb (vm, buffers, n_buffers);
303 /** \brief Free buffers, does not free the buffer chain for each buffer
305 @param vm - (vlib_main_t *) vlib main data structure pointer
306 @param buffers - (u32 * ) buffer index array
307 @param n_buffers - (u32) number of buffers to free
311 vlib_buffer_free_no_next (vlib_main_t * vm,
312 /* pointer to first buffer */
314 /* number of buffers to free */
317 vlib_buffer_main_t *bm = vm->buffer_main;
319 ASSERT (bm->cb.vlib_buffer_free_no_next_cb);
321 return bm->cb.vlib_buffer_free_no_next_cb (vm, buffers, n_buffers);
324 /** \brief Free one buffer
325 Shorthand to free a single buffer chain.
327 @param vm - (vlib_main_t *) vlib main data structure pointer
328 @param buffer_index - (u32) buffer index to free
331 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
333 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
336 /* Add/delete buffer free lists. */
337 u32 vlib_buffer_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
340 vlib_buffer_delete_free_list (vlib_main_t * vm, u32 free_list_index)
342 vlib_buffer_main_t *bm = vm->buffer_main;
344 ASSERT (bm->cb.vlib_buffer_delete_free_list_cb);
346 bm->cb.vlib_buffer_delete_free_list_cb (vm, free_list_index);
349 /* Find already existing public free list with given size or create one. */
350 u32 vlib_buffer_get_or_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
353 /* Merge two free lists */
354 void vlib_buffer_merge_free_lists (vlib_buffer_free_list_t * dst,
355 vlib_buffer_free_list_t * src);
357 /* Make sure we have at least given number of unaligned buffers. */
358 void vlib_buffer_free_list_fill_unaligned (vlib_main_t * vm,
359 vlib_buffer_free_list_t *
361 uword n_unaligned_buffers);
364 vlib_buffer_get_free_list_with_size (vlib_main_t * vm, u32 size)
366 vlib_buffer_main_t *bm = vm->buffer_main;
368 size = vlib_buffer_round_size (size);
369 uword *p = hash_get (bm->free_list_by_size, size);
370 return p ? p[0] : ~0;
373 always_inline vlib_buffer_free_list_t *
374 vlib_buffer_get_buffer_free_list (vlib_main_t * vm, vlib_buffer_t * b,
377 vlib_buffer_main_t *bm = vm->buffer_main;
380 *index = i = b->free_list_index;
381 return pool_elt_at_index (bm->buffer_free_list_pool, i);
384 always_inline vlib_buffer_free_list_t *
385 vlib_buffer_get_free_list (vlib_main_t * vm, u32 free_list_index)
387 vlib_buffer_main_t *bm = vm->buffer_main;
388 vlib_buffer_free_list_t *f;
390 f = pool_elt_at_index (bm->buffer_free_list_pool, free_list_index);
392 /* Sanity: indices must match. */
393 ASSERT (f->index == free_list_index);
399 vlib_buffer_free_list_buffer_size (vlib_main_t * vm, u32 free_list_index)
401 vlib_buffer_free_list_t *f =
402 vlib_buffer_get_free_list (vm, free_list_index);
403 return f->n_data_bytes;
406 void vlib_aligned_memcpy (void *_dst, void *_src, int n_bytes);
408 /* Reasonably fast buffer copy routine. */
410 vlib_copy_buffers (u32 * dst, u32 * src, u32 n)
432 vlib_physmem_alloc_aligned (vlib_main_t * vm, clib_error_t ** error,
433 uword n_bytes, uword alignment)
436 vm->os_physmem_alloc_aligned (&vm->physmem_main, n_bytes, alignment);
439 clib_error_return (0, "failed to allocate %wd bytes of I/O memory",
446 /* By default allocate I/O memory with cache line alignment. */
448 vlib_physmem_alloc (vlib_main_t * vm, clib_error_t ** error, uword n_bytes)
450 return vlib_physmem_alloc_aligned (vm, error, n_bytes,
451 CLIB_CACHE_LINE_BYTES);
455 vlib_physmem_free (vlib_main_t * vm, void *mem)
457 return vm->os_physmem_free (mem);
461 vlib_physmem_virtual_to_physical (vlib_main_t * vm, void *mem)
463 vlib_physmem_main_t *pm = &vm->physmem_main;
464 uword o = pointer_to_uword (mem) - pm->virtual.start;
465 return vlib_physmem_offset_to_physical (pm, o);
468 /* Append given data to end of buffer, possibly allocating new buffers. */
469 u32 vlib_buffer_add_data (vlib_main_t * vm,
471 u32 buffer_index, void *data, u32 n_data_bytes);
473 /* duplicate all buffers in chain */
474 always_inline vlib_buffer_t *
475 vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
477 vlib_buffer_t *s, *d, *fd;
478 uword n_alloc, n_buffers = 1;
479 u32 *new_buffers = 0;
480 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
484 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
487 s = vlib_get_buffer (vm, s->next_buffer);
490 vec_validate (new_buffers, n_buffers - 1);
491 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
492 ASSERT (n_alloc == n_buffers);
496 fd = d = vlib_get_buffer (vm, new_buffers[0]);
497 d->current_data = s->current_data;
498 d->current_length = s->current_length;
499 d->flags = s->flags & flag_mask;
500 d->total_length_not_including_first_buffer =
501 s->total_length_not_including_first_buffer;
502 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
503 clib_memcpy (vlib_buffer_get_current (d),
504 vlib_buffer_get_current (s), s->current_length);
507 for (i = 1; i < n_buffers; i++)
510 d->next_buffer = new_buffers[i];
512 s = vlib_get_buffer (vm, s->next_buffer);
513 d = vlib_get_buffer (vm, new_buffers[i]);
514 d->current_data = s->current_data;
515 d->current_length = s->current_length;
516 clib_memcpy (vlib_buffer_get_current (d),
517 vlib_buffer_get_current (s), s->current_length);
518 d->flags = s->flags & flag_mask;
524 /* Initializes the buffer as an empty packet with no chained buffers. */
526 vlib_buffer_chain_init (vlib_buffer_t * first)
528 first->total_length_not_including_first_buffer = 0;
529 first->current_length = 0;
530 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
531 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
534 /* The provided next_bi buffer index is appended to the end of the packet. */
535 always_inline vlib_buffer_t *
536 vlib_buffer_chain_buffer (vlib_main_t * vm,
537 vlib_buffer_t * first,
538 vlib_buffer_t * last, u32 next_bi)
540 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
541 last->next_buffer = next_bi;
542 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
543 next_buffer->current_length = 0;
544 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
548 /* Increases or decreases the packet length.
549 * It does not allocate or deallocate new buffers.
550 * Therefore, the added length must be compatible
551 * with the last buffer. */
553 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
554 vlib_buffer_t * last, i32 len)
556 last->current_length += len;
558 first->total_length_not_including_first_buffer += len;
561 /* Copy data to the end of the packet and increases its length.
562 * It does not allocate new buffers.
563 * Returns the number of copied bytes. */
565 vlib_buffer_chain_append_data (vlib_main_t * vm,
567 vlib_buffer_t * first,
568 vlib_buffer_t * last, void *data, u16 data_len)
571 vlib_buffer_free_list_buffer_size (vm, free_list_index);
572 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
573 u16 len = clib_min (data_len,
574 n_buffer_bytes - last->current_length -
576 clib_memcpy (vlib_buffer_get_current (last) + last->current_length, data,
578 vlib_buffer_chain_increase_length (first, last, len);
582 /* Copy data to the end of the packet and increases its length.
583 * Allocates additional buffers from the free list if necessary.
584 * Returns the number of copied bytes.
585 * 'last' value is modified whenever new buffers are allocated and
586 * chained and points to the last buffer in the chain. */
588 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
590 vlib_buffer_t * first,
591 vlib_buffer_t ** last,
592 void *data, u16 data_len);
593 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
595 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
596 format_vlib_buffer_contents;
600 /* Vector of packet data. */
603 /* Number of buffers to allocate in each call to physmem
605 u32 min_n_buffers_each_physmem_alloc;
607 /* Buffer free list for this template. */
611 } vlib_packet_template_t;
613 void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
614 vlib_packet_template_t * t);
616 void vlib_packet_template_init (vlib_main_t * vm,
617 vlib_packet_template_t * t,
619 uword n_packet_data_bytes,
620 uword min_n_buffers_each_physmem_alloc,
623 void *vlib_packet_template_get_packet (vlib_main_t * vm,
624 vlib_packet_template_t * t,
628 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
630 vec_free (t->packet_data);
634 unserialize_vlib_buffer_n_bytes (serialize_main_t * m)
636 serialize_stream_t *s = &m->stream;
637 vlib_serialize_buffer_main_t *sm
638 = uword_to_pointer (m->stream.data_function_opaque,
639 vlib_serialize_buffer_main_t *);
640 vlib_main_t *vm = sm->vlib_main;
643 n = s->n_buffer_bytes - s->current_buffer_index;
644 if (sm->last_buffer != ~0)
646 vlib_buffer_t *b = vlib_get_buffer (vm, sm->last_buffer);
647 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
649 b = vlib_get_buffer (vm, b->next_buffer);
650 n += b->current_length;
655 clib_fifo_foreach (f, sm->rx.buffer_fifo, ({
656 n += vlib_buffer_index_length_in_chain (vm, f[0]);
663 /* Set a buffer quickly into "uninitialized" state. We want this to
664 be extremely cheap and arrange for all fields that need to be
665 initialized to be in the first 128 bits of the buffer. */
667 vlib_buffer_init_for_free_list (vlib_buffer_t * dst,
668 vlib_buffer_free_list_t * fl)
670 vlib_buffer_t *src = &fl->buffer_init_template;
672 /* Make sure vlib_buffer_t is cacheline aligned and sized */
673 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
674 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
675 CLIB_CACHE_LINE_BYTES);
676 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
677 CLIB_CACHE_LINE_BYTES * 2);
679 /* Make sure buffer template is sane. */
680 ASSERT (fl->index == fl->buffer_init_template.free_list_index);
682 /* Make sure it really worked. */
683 #define _(f) dst->f = src->f
689 ASSERT (dst->total_length_not_including_first_buffer == 0);
693 vlib_buffer_add_to_free_list (vlib_main_t * vm,
694 vlib_buffer_free_list_t * f,
695 u32 buffer_index, u8 do_init)
698 b = vlib_get_buffer (vm, buffer_index);
699 if (PREDICT_TRUE (do_init))
700 vlib_buffer_init_for_free_list (b, f);
701 vec_add1_aligned (f->buffers, buffer_index, CLIB_CACHE_LINE_BYTES);
705 vlib_buffer_init_two_for_free_list (vlib_buffer_t * dst0,
706 vlib_buffer_t * dst1,
707 vlib_buffer_free_list_t * fl)
709 vlib_buffer_t *src = &fl->buffer_init_template;
711 /* Make sure buffer template is sane. */
712 ASSERT (fl->index == fl->buffer_init_template.free_list_index);
714 /* Make sure it really worked. */
715 #define _(f) dst0->f = src->f; dst1->f = src->f
721 ASSERT (dst0->total_length_not_including_first_buffer == 0);
722 ASSERT (dst1->total_length_not_including_first_buffer == 0);
726 extern u32 *vlib_buffer_state_validation_lock;
727 extern uword *vlib_buffer_state_validation_hash;
728 extern void *vlib_buffer_state_heap;
732 vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
738 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
740 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
743 p = hash_get (vlib_buffer_state_validation_hash, b);
745 /* If we don't know about b, declare it to be in the expected state */
748 hash_set (vlib_buffer_state_validation_hash, b, expected);
752 if (p[0] != expected)
756 vlib_main_t *vm = &vlib_global_main;
760 bi = vlib_get_buffer_index (vm, b);
762 clib_mem_set_heap (oldheap);
763 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
764 vlib_time_now (vm), bi,
765 p[0] ? "busy" : "free", expected ? "busy" : "free");
769 CLIB_MEMORY_BARRIER ();
770 *vlib_buffer_state_validation_lock = 0;
771 clib_mem_set_heap (oldheap);
776 vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
781 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
783 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
786 hash_set (vlib_buffer_state_validation_hash, b, expected);
788 CLIB_MEMORY_BARRIER ();
789 *vlib_buffer_state_validation_lock = 0;
790 clib_mem_set_heap (oldheap);
794 #endif /* included_vlib_buffer_funcs_h */
797 * fd.io coding-style-patch-verification: ON
800 * eval: (c-set-style "gnu")