2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
46 vlib buffer access methods.
50 /** \brief Translate buffer index into buffer pointer
52 @param vm - (vlib_main_t *) vlib main data structure pointer
53 @param buffer_index - (u32) buffer index
54 @return - (vlib_buffer_t *) buffer pointer
56 always_inline vlib_buffer_t *
57 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
59 vlib_buffer_main_t *bm = vm->buffer_main;
60 uword offset = ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
61 ASSERT (offset < bm->buffer_mem_size);
63 return uword_to_pointer (bm->buffer_mem_start + offset, void *);
66 /** \brief Translate buffer pointer into buffer index
68 @param vm - (vlib_main_t *) vlib main data structure pointer
69 @param p - (void *) buffer pointer
70 @return - (u32) buffer index
74 vlib_get_buffer_index (vlib_main_t * vm, void *p)
76 vlib_buffer_main_t *bm = vm->buffer_main;
77 uword offset = pointer_to_uword (p) - bm->buffer_mem_start;
78 ASSERT (pointer_to_uword (p) >= bm->buffer_mem_start);
79 ASSERT (offset < bm->buffer_mem_size);
80 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
81 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
84 /** \brief Get next buffer in buffer linklist, or zero for end of list.
86 @param vm - (vlib_main_t *) vlib main data structure pointer
87 @param b - (void *) buffer pointer
88 @return - (vlib_buffer_t *) next buffer, or NULL
90 always_inline vlib_buffer_t *
91 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
93 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
94 ? vlib_get_buffer (vm, b->next_buffer) : 0);
97 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
98 vlib_buffer_t * b_first);
100 /** \brief Get length in bytes of the buffer chain
102 @param vm - (vlib_main_t *) vlib main data structure pointer
103 @param b - (void *) buffer pointer
104 @return - (uword) length of buffer chain
107 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
109 uword l = b->current_length + b->total_length_not_including_first_buffer;
110 if (PREDICT_FALSE ((b->flags & (VLIB_BUFFER_NEXT_PRESENT
111 | VLIB_BUFFER_TOTAL_LENGTH_VALID))
112 == VLIB_BUFFER_NEXT_PRESENT))
113 return vlib_buffer_length_in_chain_slow_path (vm, b);
117 /** \brief Get length in bytes of the buffer index buffer chain
119 @param vm - (vlib_main_t *) vlib main data structure pointer
120 @param bi - (u32) buffer index
121 @return - (uword) length of buffer chain
124 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
126 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
127 return vlib_buffer_length_in_chain (vm, b);
130 /** \brief Copy buffer contents to memory
132 @param vm - (vlib_main_t *) vlib main data structure pointer
133 @param buffer_index - (u32) buffer index
134 @param contents - (u8 *) memory, <strong>must be large enough</strong>
135 @return - (uword) length of buffer chain
138 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
140 uword content_len = 0;
146 b = vlib_get_buffer (vm, buffer_index);
147 l = b->current_length;
148 clib_memcpy (contents + content_len, b->data + b->current_data, l);
150 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
152 buffer_index = b->next_buffer;
158 /* Return physical address of buffer->data start. */
160 vlib_get_buffer_data_physical_address (vlib_main_t * vm, u32 buffer_index)
162 return vlib_physmem_offset_to_physical (&vm->physmem_main,
163 (((uword) buffer_index) <<
164 CLIB_LOG2_CACHE_LINE_BYTES) +
165 STRUCT_OFFSET_OF (vlib_buffer_t,
169 /** \brief Prefetch buffer metadata by buffer index
170 The first 64 bytes of buffer contains most header information
172 @param vm - (vlib_main_t *) vlib main data structure pointer
173 @param bi - (u32) buffer index
174 @param type - LOAD, STORE. In most cases, STORE is the right answer
176 /* Prefetch buffer header given index. */
177 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
179 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
180 vlib_prefetch_buffer_header (_b, type); \
184 /* Iterate over known allocated vlib bufs. You probably do not want
186 @param vm the vlib_main_t
187 @param bi found allocated buffer index
188 @param body operation to perform on buffer index
189 function executes body for each allocated buffer index
191 #define vlib_buffer_foreach_allocated(vm,bi,body) \
193 vlib_main_t * _vmain = (vm); \
194 vlib_buffer_main_t * _bmain = &_vmain->buffer_main; \
195 hash_pair_t * _vbpair; \
196 hash_foreach_pair(_vbpair, _bmain->buffer_known_hash, ({ \
197 if (VLIB_BUFFER_KNOWN_ALLOCATED == _vbpair->value[0]) { \
198 (bi) = _vbpair->key; \
207 /* Index is unknown. */
210 /* Index is known and free/allocated. */
211 VLIB_BUFFER_KNOWN_FREE,
212 VLIB_BUFFER_KNOWN_ALLOCATED,
213 } vlib_buffer_known_state_t;
215 always_inline vlib_buffer_known_state_t
216 vlib_buffer_is_known (vlib_main_t * vm, u32 buffer_index)
218 vlib_buffer_main_t *bm = vm->buffer_main;
219 ASSERT (vlib_get_thread_index () == 0);
221 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
222 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
226 vlib_buffer_set_known_state (vlib_main_t * vm,
228 vlib_buffer_known_state_t state)
230 vlib_buffer_main_t *bm = vm->buffer_main;
231 ASSERT (vlib_get_thread_index () == 0);
232 hash_set (bm->buffer_known_hash, buffer_index, state);
235 /* Validates sanity of a single buffer.
236 Returns format'ed vector with error message if any. */
237 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
240 clib_error_t *vlib_buffer_pool_create (vlib_main_t * vm, unsigned num_mbufs,
243 /** \brief Allocate buffers into supplied array
245 @param vm - (vlib_main_t *) vlib main data structure pointer
246 @param buffers - (u32 * ) buffer index array
247 @param n_buffers - (u32) number of buffers requested
248 @return - (u32) number of buffers actually allocated, may be
249 less than the number requested or zero
252 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
254 vlib_buffer_main_t *bm = vm->buffer_main;
256 ASSERT (bm->cb.vlib_buffer_alloc_cb);
258 return bm->cb.vlib_buffer_alloc_cb (vm, buffers, n_buffers);
262 vlib_buffer_round_size (u32 size)
264 return round_pow2 (size, sizeof (vlib_buffer_t));
267 /** \brief Allocate buffers from specific freelist into supplied array
269 @param vm - (vlib_main_t *) vlib main data structure pointer
270 @param buffers - (u32 * ) buffer index array
271 @param n_buffers - (u32) number of buffers requested
272 @return - (u32) number of buffers actually allocated, may be
273 less than the number requested or zero
276 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
278 u32 n_buffers, u32 free_list_index)
280 vlib_buffer_main_t *bm = vm->buffer_main;
282 ASSERT (bm->cb.vlib_buffer_alloc_from_free_list_cb);
284 return bm->cb.vlib_buffer_alloc_from_free_list_cb (vm, buffers, n_buffers,
288 /** \brief Free buffers
289 Frees the entire buffer chain for each buffer
291 @param vm - (vlib_main_t *) vlib main data structure pointer
292 @param buffers - (u32 * ) buffer index array
293 @param n_buffers - (u32) number of buffers to free
297 vlib_buffer_free (vlib_main_t * vm,
298 /* pointer to first buffer */
300 /* number of buffers to free */
303 vlib_buffer_main_t *bm = vm->buffer_main;
305 ASSERT (bm->cb.vlib_buffer_free_cb);
307 return bm->cb.vlib_buffer_free_cb (vm, buffers, n_buffers);
310 /** \brief Free buffers, does not free the buffer chain for each buffer
312 @param vm - (vlib_main_t *) vlib main data structure pointer
313 @param buffers - (u32 * ) buffer index array
314 @param n_buffers - (u32) number of buffers to free
318 vlib_buffer_free_no_next (vlib_main_t * vm,
319 /* pointer to first buffer */
321 /* number of buffers to free */
324 vlib_buffer_main_t *bm = vm->buffer_main;
326 ASSERT (bm->cb.vlib_buffer_free_no_next_cb);
328 return bm->cb.vlib_buffer_free_no_next_cb (vm, buffers, n_buffers);
331 /** \brief Free one buffer
332 Shorthand to free a single buffer chain.
334 @param vm - (vlib_main_t *) vlib main data structure pointer
335 @param buffer_index - (u32) buffer index to free
338 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
340 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
343 /* Add/delete buffer free lists. */
344 u32 vlib_buffer_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
347 vlib_buffer_delete_free_list (vlib_main_t * vm, u32 free_list_index)
349 vlib_buffer_main_t *bm = vm->buffer_main;
351 ASSERT (bm->cb.vlib_buffer_delete_free_list_cb);
353 bm->cb.vlib_buffer_delete_free_list_cb (vm, free_list_index);
356 /* Find already existing public free list with given size or create one. */
357 u32 vlib_buffer_get_or_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
360 /* Merge two free lists */
361 void vlib_buffer_merge_free_lists (vlib_buffer_free_list_t * dst,
362 vlib_buffer_free_list_t * src);
364 /* Make sure we have at least given number of unaligned buffers. */
365 void vlib_buffer_free_list_fill_unaligned (vlib_main_t * vm,
366 vlib_buffer_free_list_t *
368 uword n_unaligned_buffers);
371 vlib_buffer_get_free_list_with_size (vlib_main_t * vm, u32 size)
373 vlib_buffer_main_t *bm = vm->buffer_main;
375 size = vlib_buffer_round_size (size);
376 uword *p = hash_get (bm->free_list_by_size, size);
377 return p ? p[0] : ~0;
380 always_inline vlib_buffer_free_list_t *
381 vlib_buffer_get_buffer_free_list (vlib_main_t * vm, vlib_buffer_t * b,
384 vlib_buffer_main_t *bm = vm->buffer_main;
387 *index = i = b->free_list_index;
388 return pool_elt_at_index (bm->buffer_free_list_pool, i);
391 always_inline vlib_buffer_free_list_t *
392 vlib_buffer_get_free_list (vlib_main_t * vm, u32 free_list_index)
394 vlib_buffer_main_t *bm = vm->buffer_main;
395 vlib_buffer_free_list_t *f;
397 f = pool_elt_at_index (bm->buffer_free_list_pool, free_list_index);
399 /* Sanity: indices must match. */
400 ASSERT (f->index == free_list_index);
406 vlib_buffer_free_list_buffer_size (vlib_main_t * vm, u32 free_list_index)
408 vlib_buffer_free_list_t *f =
409 vlib_buffer_get_free_list (vm, free_list_index);
410 return f->n_data_bytes;
413 void vlib_aligned_memcpy (void *_dst, void *_src, int n_bytes);
415 /* Reasonably fast buffer copy routine. */
417 vlib_copy_buffers (u32 * dst, u32 * src, u32 n)
439 vlib_physmem_alloc_aligned (vlib_main_t * vm, clib_error_t ** error,
440 uword n_bytes, uword alignment)
443 vm->os_physmem_alloc_aligned (&vm->physmem_main, n_bytes, alignment);
446 clib_error_return (0, "failed to allocate %wd bytes of I/O memory",
453 /* By default allocate I/O memory with cache line alignment. */
455 vlib_physmem_alloc (vlib_main_t * vm, clib_error_t ** error, uword n_bytes)
457 return vlib_physmem_alloc_aligned (vm, error, n_bytes,
458 CLIB_CACHE_LINE_BYTES);
462 vlib_physmem_free (vlib_main_t * vm, void *mem)
464 return vm->os_physmem_free (mem);
468 vlib_physmem_virtual_to_physical (vlib_main_t * vm, void *mem)
470 vlib_physmem_main_t *pm = &vm->physmem_main;
471 uword o = pointer_to_uword (mem) - pm->virtual.start;
472 return vlib_physmem_offset_to_physical (pm, o);
475 /* Append given data to end of buffer, possibly allocating new buffers. */
476 u32 vlib_buffer_add_data (vlib_main_t * vm,
478 u32 buffer_index, void *data, u32 n_data_bytes);
480 /* duplicate all buffers in chain */
481 always_inline vlib_buffer_t *
482 vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
484 vlib_buffer_t *s, *d, *fd;
485 uword n_alloc, n_buffers = 1;
486 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
490 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
493 s = vlib_get_buffer (vm, s->next_buffer);
495 u32 new_buffers[n_buffers];
497 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
499 /* No guarantee that we'll get all the buffers we asked for */
500 if (PREDICT_FALSE (n_alloc < n_buffers))
503 vlib_buffer_free (vm, new_buffers, n_alloc);
509 fd = d = vlib_get_buffer (vm, new_buffers[0]);
510 d->current_data = s->current_data;
511 d->current_length = s->current_length;
512 d->flags = s->flags & flag_mask;
513 d->total_length_not_including_first_buffer =
514 s->total_length_not_including_first_buffer;
515 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
516 clib_memcpy (vlib_buffer_get_current (d),
517 vlib_buffer_get_current (s), s->current_length);
520 for (i = 1; i < n_buffers; i++)
523 d->next_buffer = new_buffers[i];
525 s = vlib_get_buffer (vm, s->next_buffer);
526 d = vlib_get_buffer (vm, new_buffers[i]);
527 d->current_data = s->current_data;
528 d->current_length = s->current_length;
529 clib_memcpy (vlib_buffer_get_current (d),
530 vlib_buffer_get_current (s), s->current_length);
531 d->flags = s->flags & flag_mask;
537 /** \brief Create multiple clones of buffer and store them in the supplied array
539 @param vm - (vlib_main_t *) vlib main data structure pointer
540 @param src_buffer - (u32) source buffer index
541 @param buffers - (u32 * ) buffer index array
542 @param n_buffers - (u8) number of buffer clones requested
543 @param head_end_offset - (u16) offset relative to current position
544 where packet head ends
545 @return - (u8) number of buffers actually cloned, may be
546 less than the number requested or zero
550 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
551 u8 n_buffers, u16 head_end_offset)
554 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
556 ASSERT (s->n_add_refs == 0);
559 if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
561 buffers[0] = src_buffer;
562 for (i = 1; i < n_buffers; i++)
565 d = vlib_buffer_copy (vm, s);
568 buffers[i] = vlib_get_buffer_index (vm, d);
574 n_buffers = vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
576 if (PREDICT_FALSE (n_buffers == 0))
578 buffers[0] = src_buffer;
582 for (i = 0; i < n_buffers; i++)
584 vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
585 d->current_data = s->current_data;
586 d->current_length = head_end_offset;
587 d->free_list_index = s->free_list_index;
588 d->total_length_not_including_first_buffer =
589 s->total_length_not_including_first_buffer + s->current_length -
591 d->flags = s->flags | VLIB_BUFFER_NEXT_PRESENT;
592 d->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
593 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
594 clib_memcpy (vlib_buffer_get_current (d), vlib_buffer_get_current (s),
596 d->next_buffer = src_buffer;
598 vlib_buffer_advance (s, head_end_offset);
599 s->n_add_refs = n_buffers - 1;
600 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
602 s = vlib_get_buffer (vm, s->next_buffer);
603 s->n_add_refs = n_buffers - 1;
609 /** \brief Attach cloned tail to the buffer
611 @param vm - (vlib_main_t *) vlib main data structure pointer
612 @param head - (vlib_buffer_t *) head buffer
613 @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
617 vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
618 vlib_buffer_t * tail)
620 ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
621 ASSERT (head->free_list_index == tail->free_list_index);
623 head->flags |= VLIB_BUFFER_NEXT_PRESENT;
624 head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
625 head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
626 head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
627 head->next_buffer = vlib_get_buffer_index (vm, tail);
628 head->total_length_not_including_first_buffer = tail->current_length +
629 tail->total_length_not_including_first_buffer;
632 __sync_add_and_fetch (&tail->n_add_refs, 1);
634 if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
636 tail = vlib_get_buffer (vm, tail->next_buffer);
641 /* Initializes the buffer as an empty packet with no chained buffers. */
643 vlib_buffer_chain_init (vlib_buffer_t * first)
645 first->total_length_not_including_first_buffer = 0;
646 first->current_length = 0;
647 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
648 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
651 /* The provided next_bi buffer index is appended to the end of the packet. */
652 always_inline vlib_buffer_t *
653 vlib_buffer_chain_buffer (vlib_main_t * vm,
654 vlib_buffer_t * first,
655 vlib_buffer_t * last, u32 next_bi)
657 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
658 last->next_buffer = next_bi;
659 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
660 next_buffer->current_length = 0;
661 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
665 /* Increases or decreases the packet length.
666 * It does not allocate or deallocate new buffers.
667 * Therefore, the added length must be compatible
668 * with the last buffer. */
670 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
671 vlib_buffer_t * last, i32 len)
673 last->current_length += len;
675 first->total_length_not_including_first_buffer += len;
678 /* Copy data to the end of the packet and increases its length.
679 * It does not allocate new buffers.
680 * Returns the number of copied bytes. */
682 vlib_buffer_chain_append_data (vlib_main_t * vm,
684 vlib_buffer_t * first,
685 vlib_buffer_t * last, void *data, u16 data_len)
688 vlib_buffer_free_list_buffer_size (vm, free_list_index);
689 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
690 u16 len = clib_min (data_len,
691 n_buffer_bytes - last->current_length -
693 clib_memcpy (vlib_buffer_get_current (last) + last->current_length, data,
695 vlib_buffer_chain_increase_length (first, last, len);
699 /* Copy data to the end of the packet and increases its length.
700 * Allocates additional buffers from the free list if necessary.
701 * Returns the number of copied bytes.
702 * 'last' value is modified whenever new buffers are allocated and
703 * chained and points to the last buffer in the chain. */
705 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
707 vlib_buffer_t * first,
708 vlib_buffer_t ** last,
709 void *data, u16 data_len);
710 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
712 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
713 format_vlib_buffer_contents;
717 /* Vector of packet data. */
720 /* Number of buffers to allocate in each call to physmem
722 u32 min_n_buffers_each_physmem_alloc;
724 /* Buffer free list for this template. */
728 } vlib_packet_template_t;
730 void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
731 vlib_packet_template_t * t);
733 void vlib_packet_template_init (vlib_main_t * vm,
734 vlib_packet_template_t * t,
736 uword n_packet_data_bytes,
737 uword min_n_buffers_each_physmem_alloc,
740 void *vlib_packet_template_get_packet (vlib_main_t * vm,
741 vlib_packet_template_t * t,
745 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
747 vec_free (t->packet_data);
751 unserialize_vlib_buffer_n_bytes (serialize_main_t * m)
753 serialize_stream_t *s = &m->stream;
754 vlib_serialize_buffer_main_t *sm
755 = uword_to_pointer (m->stream.data_function_opaque,
756 vlib_serialize_buffer_main_t *);
757 vlib_main_t *vm = sm->vlib_main;
760 n = s->n_buffer_bytes - s->current_buffer_index;
761 if (sm->last_buffer != ~0)
763 vlib_buffer_t *b = vlib_get_buffer (vm, sm->last_buffer);
764 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
766 b = vlib_get_buffer (vm, b->next_buffer);
767 n += b->current_length;
772 clib_fifo_foreach (f, sm->rx.buffer_fifo, ({
773 n += vlib_buffer_index_length_in_chain (vm, f[0]);
780 /* Set a buffer quickly into "uninitialized" state. We want this to
781 be extremely cheap and arrange for all fields that need to be
782 initialized to be in the first 128 bits of the buffer. */
784 vlib_buffer_init_for_free_list (vlib_buffer_t * dst,
785 vlib_buffer_free_list_t * fl)
787 vlib_buffer_t *src = &fl->buffer_init_template;
789 /* Make sure vlib_buffer_t is cacheline aligned and sized */
790 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
791 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
792 CLIB_CACHE_LINE_BYTES);
793 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
794 CLIB_CACHE_LINE_BYTES * 2);
796 /* Make sure buffer template is sane. */
797 ASSERT (fl->index == fl->buffer_init_template.free_list_index);
799 clib_memcpy (STRUCT_MARK_PTR (dst, template_start),
800 STRUCT_MARK_PTR (src, template_start),
801 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
802 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
804 /* Not in the first 16 octets. */
805 dst->n_add_refs = src->n_add_refs;
807 /* Make sure it really worked. */
808 #define _(f) ASSERT (dst->f == src->f);
814 ASSERT (dst->total_length_not_including_first_buffer == 0);
815 ASSERT (dst->n_add_refs == 0);
819 vlib_buffer_add_to_free_list (vlib_main_t * vm,
820 vlib_buffer_free_list_t * f,
821 u32 buffer_index, u8 do_init)
824 b = vlib_get_buffer (vm, buffer_index);
825 if (PREDICT_TRUE (do_init))
826 vlib_buffer_init_for_free_list (b, f);
827 vec_add1_aligned (f->buffers, buffer_index, CLIB_CACHE_LINE_BYTES);
831 vlib_buffer_init_two_for_free_list (vlib_buffer_t * dst0,
832 vlib_buffer_t * dst1,
833 vlib_buffer_free_list_t * fl)
835 vlib_buffer_t *src = &fl->buffer_init_template;
837 /* Make sure buffer template is sane. */
838 ASSERT (fl->index == fl->buffer_init_template.free_list_index);
840 clib_memcpy (STRUCT_MARK_PTR (dst0, template_start),
841 STRUCT_MARK_PTR (src, template_start),
842 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
843 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
845 clib_memcpy (STRUCT_MARK_PTR (dst1, template_start),
846 STRUCT_MARK_PTR (src, template_start),
847 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
848 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
850 /* Not in the first 16 octets. */
851 dst0->n_add_refs = src->n_add_refs;
852 dst1->n_add_refs = src->n_add_refs;
854 /* Make sure it really worked. */
855 #define _(f) ASSERT (dst0->f == src->f); ASSERT( dst1->f == src->f)
862 ASSERT (dst0->total_length_not_including_first_buffer == 0);
863 ASSERT (dst1->total_length_not_including_first_buffer == 0);
864 ASSERT (dst0->n_add_refs == 0);
865 ASSERT (dst1->n_add_refs == 0);
869 extern u32 *vlib_buffer_state_validation_lock;
870 extern uword *vlib_buffer_state_validation_hash;
871 extern void *vlib_buffer_state_heap;
875 vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
881 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
883 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
886 p = hash_get (vlib_buffer_state_validation_hash, b);
888 /* If we don't know about b, declare it to be in the expected state */
891 hash_set (vlib_buffer_state_validation_hash, b, expected);
895 if (p[0] != expected)
899 vlib_main_t *vm = &vlib_global_main;
903 bi = vlib_get_buffer_index (vm, b);
905 clib_mem_set_heap (oldheap);
906 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
907 vlib_time_now (vm), bi,
908 p[0] ? "busy" : "free", expected ? "busy" : "free");
912 CLIB_MEMORY_BARRIER ();
913 *vlib_buffer_state_validation_lock = 0;
914 clib_mem_set_heap (oldheap);
919 vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
924 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
926 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
929 hash_set (vlib_buffer_state_validation_hash, b, expected);
931 CLIB_MEMORY_BARRIER ();
932 *vlib_buffer_state_validation_lock = 0;
933 clib_mem_set_heap (oldheap);
937 #endif /* included_vlib_buffer_funcs_h */
940 * fd.io coding-style-patch-verification: ON
943 * eval: (c-set-style "gnu")