2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
46 vlib buffer access methods.
50 /** \brief Translate buffer index into buffer pointer
52 @param vm - (vlib_main_t *) vlib main data structure pointer
53 @param buffer_index - (u32) buffer index
54 @return - (vlib_buffer_t *) buffer pointer
56 always_inline vlib_buffer_t *
57 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
59 vlib_buffer_main_t *bm = vm->buffer_main;
60 uword offset = ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
61 ASSERT (offset < bm->buffer_mem_size);
63 return uword_to_pointer (bm->buffer_mem_start + offset, void *);
66 /** \brief Translate buffer pointer into buffer index
68 @param vm - (vlib_main_t *) vlib main data structure pointer
69 @param p - (void *) buffer pointer
70 @return - (u32) buffer index
74 vlib_get_buffer_index (vlib_main_t * vm, void *p)
76 vlib_buffer_main_t *bm = vm->buffer_main;
77 uword offset = pointer_to_uword (p) - bm->buffer_mem_start;
78 ASSERT (pointer_to_uword (p) >= bm->buffer_mem_start);
79 ASSERT (offset < bm->buffer_mem_size);
80 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
81 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
84 /** \brief Get next buffer in buffer linklist, or zero for end of list.
86 @param vm - (vlib_main_t *) vlib main data structure pointer
87 @param b - (void *) buffer pointer
88 @return - (vlib_buffer_t *) next buffer, or NULL
90 always_inline vlib_buffer_t *
91 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
93 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
94 ? vlib_get_buffer (vm, b->next_buffer) : 0);
97 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
98 vlib_buffer_t * b_first);
100 /** \brief Get length in bytes of the buffer chain
102 @param vm - (vlib_main_t *) vlib main data structure pointer
103 @param b - (void *) buffer pointer
104 @return - (uword) length of buffer chain
107 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
109 uword len = b->current_length;
111 if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
114 if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
115 return len + b->total_length_not_including_first_buffer;
117 return vlib_buffer_length_in_chain_slow_path (vm, b);
120 /** \brief Get length in bytes of the buffer index buffer chain
122 @param vm - (vlib_main_t *) vlib main data structure pointer
123 @param bi - (u32) buffer index
124 @return - (uword) length of buffer chain
127 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
129 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
130 return vlib_buffer_length_in_chain (vm, b);
133 /** \brief Copy buffer contents to memory
135 @param vm - (vlib_main_t *) vlib main data structure pointer
136 @param buffer_index - (u32) buffer index
137 @param contents - (u8 *) memory, <strong>must be large enough</strong>
138 @return - (uword) length of buffer chain
141 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
143 uword content_len = 0;
149 b = vlib_get_buffer (vm, buffer_index);
150 l = b->current_length;
151 clib_memcpy (contents + content_len, b->data + b->current_data, l);
153 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
155 buffer_index = b->next_buffer;
161 /* Return physical address of buffer->data start. */
163 vlib_get_buffer_data_physical_address (vlib_main_t * vm, u32 buffer_index)
165 return vlib_physmem_offset_to_physical (&vm->physmem_main,
166 (((uword) buffer_index) <<
167 CLIB_LOG2_CACHE_LINE_BYTES) +
168 STRUCT_OFFSET_OF (vlib_buffer_t,
172 /** \brief Prefetch buffer metadata by buffer index
173 The first 64 bytes of buffer contains most header information
175 @param vm - (vlib_main_t *) vlib main data structure pointer
176 @param bi - (u32) buffer index
177 @param type - LOAD, STORE. In most cases, STORE is the right answer
179 /* Prefetch buffer header given index. */
180 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
182 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
183 vlib_prefetch_buffer_header (_b, type); \
187 /* Iterate over known allocated vlib bufs. You probably do not want
189 @param vm the vlib_main_t
190 @param bi found allocated buffer index
191 @param body operation to perform on buffer index
192 function executes body for each allocated buffer index
194 #define vlib_buffer_foreach_allocated(vm,bi,body) \
196 vlib_main_t * _vmain = (vm); \
197 vlib_buffer_main_t * _bmain = &_vmain->buffer_main; \
198 hash_pair_t * _vbpair; \
199 hash_foreach_pair(_vbpair, _bmain->buffer_known_hash, ({ \
200 if (VLIB_BUFFER_KNOWN_ALLOCATED == _vbpair->value[0]) { \
201 (bi) = _vbpair->key; \
210 /* Index is unknown. */
213 /* Index is known and free/allocated. */
214 VLIB_BUFFER_KNOWN_FREE,
215 VLIB_BUFFER_KNOWN_ALLOCATED,
216 } vlib_buffer_known_state_t;
218 always_inline vlib_buffer_known_state_t
219 vlib_buffer_is_known (vlib_main_t * vm, u32 buffer_index)
221 vlib_buffer_main_t *bm = vm->buffer_main;
222 ASSERT (vlib_get_thread_index () == 0);
224 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
225 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
229 vlib_buffer_set_known_state (vlib_main_t * vm,
231 vlib_buffer_known_state_t state)
233 vlib_buffer_main_t *bm = vm->buffer_main;
234 ASSERT (vlib_get_thread_index () == 0);
235 hash_set (bm->buffer_known_hash, buffer_index, state);
238 /* Validates sanity of a single buffer.
239 Returns format'ed vector with error message if any. */
240 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
243 /** \brief Allocate buffers into supplied array
245 @param vm - (vlib_main_t *) vlib main data structure pointer
246 @param buffers - (u32 * ) buffer index array
247 @param n_buffers - (u32) number of buffers requested
248 @return - (u32) number of buffers actually allocated, may be
249 less than the number requested or zero
252 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
254 vlib_buffer_main_t *bm = vm->buffer_main;
256 ASSERT (bm->cb.vlib_buffer_alloc_cb);
258 return bm->cb.vlib_buffer_alloc_cb (vm, buffers, n_buffers);
262 vlib_buffer_round_size (u32 size)
264 return round_pow2 (size, sizeof (vlib_buffer_t));
268 vlib_buffer_get_free_list_index (vlib_buffer_t * b)
270 return b->flags & VLIB_BUFFER_FREE_LIST_INDEX_MASK;
274 vlib_buffer_set_free_list_index (vlib_buffer_t * b, u32 index)
276 /* if there is an need for more free lists we should consider
277 storig data in the 2nd cacheline */
278 ASSERT (VLIB_BUFFER_FREE_LIST_INDEX_MASK & 1);
279 ASSERT (index <= VLIB_BUFFER_FREE_LIST_INDEX_MASK);
281 b->flags &= ~VLIB_BUFFER_FREE_LIST_INDEX_MASK;
282 b->flags |= index & VLIB_BUFFER_FREE_LIST_INDEX_MASK;
285 /** \brief Allocate buffers from specific freelist into supplied array
287 @param vm - (vlib_main_t *) vlib main data structure pointer
288 @param buffers - (u32 * ) buffer index array
289 @param n_buffers - (u32) number of buffers requested
290 @return - (u32) number of buffers actually allocated, may be
291 less than the number requested or zero
294 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
296 u32 n_buffers, u32 free_list_index)
298 vlib_buffer_main_t *bm = vm->buffer_main;
300 ASSERT (bm->cb.vlib_buffer_alloc_from_free_list_cb);
302 return bm->cb.vlib_buffer_alloc_from_free_list_cb (vm, buffers, n_buffers,
306 /** \brief Free buffers
307 Frees the entire buffer chain for each buffer
309 @param vm - (vlib_main_t *) vlib main data structure pointer
310 @param buffers - (u32 * ) buffer index array
311 @param n_buffers - (u32) number of buffers to free
315 vlib_buffer_free (vlib_main_t * vm,
316 /* pointer to first buffer */
318 /* number of buffers to free */
321 vlib_buffer_main_t *bm = vm->buffer_main;
323 ASSERT (bm->cb.vlib_buffer_free_cb);
325 return bm->cb.vlib_buffer_free_cb (vm, buffers, n_buffers);
328 /** \brief Free buffers, does not free the buffer chain for each buffer
330 @param vm - (vlib_main_t *) vlib main data structure pointer
331 @param buffers - (u32 * ) buffer index array
332 @param n_buffers - (u32) number of buffers to free
336 vlib_buffer_free_no_next (vlib_main_t * vm,
337 /* pointer to first buffer */
339 /* number of buffers to free */
342 vlib_buffer_main_t *bm = vm->buffer_main;
344 ASSERT (bm->cb.vlib_buffer_free_no_next_cb);
346 return bm->cb.vlib_buffer_free_no_next_cb (vm, buffers, n_buffers);
349 /** \brief Free one buffer
350 Shorthand to free a single buffer chain.
352 @param vm - (vlib_main_t *) vlib main data structure pointer
353 @param buffer_index - (u32) buffer index to free
356 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
358 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
361 /* Add/delete buffer free lists. */
362 u32 vlib_buffer_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
365 vlib_buffer_delete_free_list (vlib_main_t * vm, u32 free_list_index)
367 vlib_buffer_main_t *bm = vm->buffer_main;
369 ASSERT (bm->cb.vlib_buffer_delete_free_list_cb);
371 bm->cb.vlib_buffer_delete_free_list_cb (vm, free_list_index);
374 /* Find already existing public free list with given size or create one. */
375 u32 vlib_buffer_get_or_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
378 /* Merge two free lists */
379 void vlib_buffer_merge_free_lists (vlib_buffer_free_list_t * dst,
380 vlib_buffer_free_list_t * src);
382 /* Make sure we have at least given number of unaligned buffers. */
383 void vlib_buffer_free_list_fill_unaligned (vlib_main_t * vm,
384 vlib_buffer_free_list_t *
386 uword n_unaligned_buffers);
389 vlib_buffer_get_free_list_with_size (vlib_main_t * vm, u32 size)
391 vlib_buffer_main_t *bm = vm->buffer_main;
393 size = vlib_buffer_round_size (size);
394 uword *p = hash_get (bm->free_list_by_size, size);
395 return p ? p[0] : ~0;
398 always_inline vlib_buffer_free_list_t *
399 vlib_buffer_get_buffer_free_list (vlib_main_t * vm, vlib_buffer_t * b,
402 vlib_buffer_main_t *bm = vm->buffer_main;
405 *index = i = vlib_buffer_get_free_list_index (b);
406 return pool_elt_at_index (bm->buffer_free_list_pool, i);
409 always_inline vlib_buffer_free_list_t *
410 vlib_buffer_get_free_list (vlib_main_t * vm, u32 free_list_index)
412 vlib_buffer_main_t *bm = vm->buffer_main;
413 vlib_buffer_free_list_t *f;
415 f = pool_elt_at_index (bm->buffer_free_list_pool, free_list_index);
417 /* Sanity: indices must match. */
418 ASSERT (f->index == free_list_index);
424 vlib_buffer_free_list_buffer_size (vlib_main_t * vm, u32 free_list_index)
426 vlib_buffer_free_list_t *f =
427 vlib_buffer_get_free_list (vm, free_list_index);
428 return f->n_data_bytes;
431 void vlib_aligned_memcpy (void *_dst, void *_src, int n_bytes);
433 /* Reasonably fast buffer copy routine. */
435 vlib_copy_buffers (u32 * dst, u32 * src, u32 n)
457 vlib_physmem_alloc_aligned (vlib_main_t * vm, clib_error_t ** error,
458 uword n_bytes, uword alignment)
461 vm->os_physmem_alloc_aligned (&vm->physmem_main, n_bytes, alignment);
464 clib_error_return (0, "failed to allocate %wd bytes of I/O memory",
471 /* By default allocate I/O memory with cache line alignment. */
473 vlib_physmem_alloc (vlib_main_t * vm, clib_error_t ** error, uword n_bytes)
475 return vlib_physmem_alloc_aligned (vm, error, n_bytes,
476 CLIB_CACHE_LINE_BYTES);
480 vlib_physmem_free (vlib_main_t * vm, void *mem)
482 return vm->os_physmem_free (mem);
486 vlib_physmem_virtual_to_physical (vlib_main_t * vm, void *mem)
488 vlib_physmem_main_t *pm = &vm->physmem_main;
489 uword o = pointer_to_uword (mem) - pm->virtual.start;
490 return vlib_physmem_offset_to_physical (pm, o);
493 /* Append given data to end of buffer, possibly allocating new buffers. */
494 u32 vlib_buffer_add_data (vlib_main_t * vm,
496 u32 buffer_index, void *data, u32 n_data_bytes);
498 /* duplicate all buffers in chain */
499 always_inline vlib_buffer_t *
500 vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
502 vlib_buffer_t *s, *d, *fd;
503 uword n_alloc, n_buffers = 1;
504 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
508 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
511 s = vlib_get_buffer (vm, s->next_buffer);
513 u32 new_buffers[n_buffers];
515 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
517 /* No guarantee that we'll get all the buffers we asked for */
518 if (PREDICT_FALSE (n_alloc < n_buffers))
521 vlib_buffer_free (vm, new_buffers, n_alloc);
527 fd = d = vlib_get_buffer (vm, new_buffers[0]);
528 d->current_data = s->current_data;
529 d->current_length = s->current_length;
530 d->flags = s->flags & flag_mask;
531 d->total_length_not_including_first_buffer =
532 s->total_length_not_including_first_buffer;
533 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
534 clib_memcpy (vlib_buffer_get_current (d),
535 vlib_buffer_get_current (s), s->current_length);
538 for (i = 1; i < n_buffers; i++)
541 d->next_buffer = new_buffers[i];
543 s = vlib_get_buffer (vm, s->next_buffer);
544 d = vlib_get_buffer (vm, new_buffers[i]);
545 d->current_data = s->current_data;
546 d->current_length = s->current_length;
547 clib_memcpy (vlib_buffer_get_current (d),
548 vlib_buffer_get_current (s), s->current_length);
549 d->flags = s->flags & flag_mask;
555 /** \brief Create multiple clones of buffer and store them in the supplied array
557 @param vm - (vlib_main_t *) vlib main data structure pointer
558 @param src_buffer - (u32) source buffer index
559 @param buffers - (u32 * ) buffer index array
560 @param n_buffers - (u8) number of buffer clones requested
561 @param head_end_offset - (u16) offset relative to current position
562 where packet head ends
563 @return - (u8) number of buffers actually cloned, may be
564 less than the number requested or zero
568 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
569 u8 n_buffers, u16 head_end_offset)
572 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
574 ASSERT (s->n_add_refs == 0);
577 if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
579 buffers[0] = src_buffer;
580 for (i = 1; i < n_buffers; i++)
583 d = vlib_buffer_copy (vm, s);
586 buffers[i] = vlib_get_buffer_index (vm, d);
592 n_buffers = vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
593 vlib_buffer_get_free_list_index
595 if (PREDICT_FALSE (n_buffers == 0))
597 buffers[0] = src_buffer;
601 for (i = 0; i < n_buffers; i++)
603 vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
604 d->current_data = s->current_data;
605 d->current_length = head_end_offset;
606 vlib_buffer_set_free_list_index (d,
607 vlib_buffer_get_free_list_index (s));
608 d->total_length_not_including_first_buffer =
609 s->total_length_not_including_first_buffer + s->current_length -
611 d->flags = s->flags | VLIB_BUFFER_NEXT_PRESENT;
612 d->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
613 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
614 clib_memcpy (vlib_buffer_get_current (d), vlib_buffer_get_current (s),
616 d->next_buffer = src_buffer;
618 vlib_buffer_advance (s, head_end_offset);
619 s->n_add_refs = n_buffers - 1;
620 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
622 s = vlib_get_buffer (vm, s->next_buffer);
623 s->n_add_refs = n_buffers - 1;
629 /** \brief Attach cloned tail to the buffer
631 @param vm - (vlib_main_t *) vlib main data structure pointer
632 @param head - (vlib_buffer_t *) head buffer
633 @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
637 vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
638 vlib_buffer_t * tail)
640 ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
641 ASSERT (vlib_buffer_get_free_list_index (head) ==
642 vlib_buffer_get_free_list_index (tail));
644 head->flags |= VLIB_BUFFER_NEXT_PRESENT;
645 head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
646 head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
647 head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
648 head->next_buffer = vlib_get_buffer_index (vm, tail);
649 head->total_length_not_including_first_buffer = tail->current_length +
650 tail->total_length_not_including_first_buffer;
653 __sync_add_and_fetch (&tail->n_add_refs, 1);
655 if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
657 tail = vlib_get_buffer (vm, tail->next_buffer);
662 /* Initializes the buffer as an empty packet with no chained buffers. */
664 vlib_buffer_chain_init (vlib_buffer_t * first)
666 first->total_length_not_including_first_buffer = 0;
667 first->current_length = 0;
668 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
669 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
672 /* The provided next_bi buffer index is appended to the end of the packet. */
673 always_inline vlib_buffer_t *
674 vlib_buffer_chain_buffer (vlib_main_t * vm,
675 vlib_buffer_t * first,
676 vlib_buffer_t * last, u32 next_bi)
678 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
679 last->next_buffer = next_bi;
680 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
681 next_buffer->current_length = 0;
682 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
686 /* Increases or decreases the packet length.
687 * It does not allocate or deallocate new buffers.
688 * Therefore, the added length must be compatible
689 * with the last buffer. */
691 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
692 vlib_buffer_t * last, i32 len)
694 last->current_length += len;
696 first->total_length_not_including_first_buffer += len;
699 /* Copy data to the end of the packet and increases its length.
700 * It does not allocate new buffers.
701 * Returns the number of copied bytes. */
703 vlib_buffer_chain_append_data (vlib_main_t * vm,
705 vlib_buffer_t * first,
706 vlib_buffer_t * last, void *data, u16 data_len)
709 vlib_buffer_free_list_buffer_size (vm, free_list_index);
710 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
711 u16 len = clib_min (data_len,
712 n_buffer_bytes - last->current_length -
714 clib_memcpy (vlib_buffer_get_current (last) + last->current_length, data,
716 vlib_buffer_chain_increase_length (first, last, len);
720 /* Copy data to the end of the packet and increases its length.
721 * Allocates additional buffers from the free list if necessary.
722 * Returns the number of copied bytes.
723 * 'last' value is modified whenever new buffers are allocated and
724 * chained and points to the last buffer in the chain. */
726 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
728 vlib_buffer_t * first,
729 vlib_buffer_t ** last,
730 void *data, u16 data_len);
731 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
733 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
734 format_vlib_buffer_contents;
738 /* Vector of packet data. */
741 /* Number of buffers to allocate in each call to physmem
743 u32 min_n_buffers_each_physmem_alloc;
745 /* Buffer free list for this template. */
749 } vlib_packet_template_t;
751 void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
752 vlib_packet_template_t * t);
754 void vlib_packet_template_init (vlib_main_t * vm,
755 vlib_packet_template_t * t,
757 uword n_packet_data_bytes,
758 uword min_n_buffers_each_physmem_alloc,
761 void *vlib_packet_template_get_packet (vlib_main_t * vm,
762 vlib_packet_template_t * t,
766 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
768 vec_free (t->packet_data);
772 unserialize_vlib_buffer_n_bytes (serialize_main_t * m)
774 serialize_stream_t *s = &m->stream;
775 vlib_serialize_buffer_main_t *sm
776 = uword_to_pointer (m->stream.data_function_opaque,
777 vlib_serialize_buffer_main_t *);
778 vlib_main_t *vm = sm->vlib_main;
781 n = s->n_buffer_bytes - s->current_buffer_index;
782 if (sm->last_buffer != ~0)
784 vlib_buffer_t *b = vlib_get_buffer (vm, sm->last_buffer);
785 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
787 b = vlib_get_buffer (vm, b->next_buffer);
788 n += b->current_length;
793 clib_fifo_foreach (f, sm->rx.buffer_fifo, ({
794 n += vlib_buffer_index_length_in_chain (vm, f[0]);
801 /* Set a buffer quickly into "uninitialized" state. We want this to
802 be extremely cheap and arrange for all fields that need to be
803 initialized to be in the first 128 bits of the buffer. */
805 vlib_buffer_init_for_free_list (vlib_buffer_t * dst,
806 vlib_buffer_free_list_t * fl)
808 vlib_buffer_t *src = &fl->buffer_init_template;
810 /* Make sure vlib_buffer_t is cacheline aligned and sized */
811 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
812 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
813 CLIB_CACHE_LINE_BYTES);
814 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
815 CLIB_CACHE_LINE_BYTES * 2);
817 /* Make sure buffer template is sane. */
818 ASSERT (fl->index == vlib_buffer_get_free_list_index (src));
820 clib_memcpy (STRUCT_MARK_PTR (dst, template_start),
821 STRUCT_MARK_PTR (src, template_start),
822 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
823 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
825 /* Not in the first 16 octets. */
826 dst->n_add_refs = src->n_add_refs;
828 /* Make sure it really worked. */
829 #define _(f) ASSERT (dst->f == src->f);
834 ASSERT (dst->total_length_not_including_first_buffer == 0);
835 ASSERT (dst->n_add_refs == 0);
839 vlib_buffer_add_to_free_list (vlib_main_t * vm,
840 vlib_buffer_free_list_t * f,
841 u32 buffer_index, u8 do_init)
844 b = vlib_get_buffer (vm, buffer_index);
845 if (PREDICT_TRUE (do_init))
846 vlib_buffer_init_for_free_list (b, f);
847 vec_add1_aligned (f->buffers, buffer_index, CLIB_CACHE_LINE_BYTES);
851 vlib_buffer_init_two_for_free_list (vlib_buffer_t * dst0,
852 vlib_buffer_t * dst1,
853 vlib_buffer_free_list_t * fl)
855 vlib_buffer_t *src = &fl->buffer_init_template;
857 /* Make sure buffer template is sane. */
858 ASSERT (fl->index == vlib_buffer_get_free_list_index (src));
860 clib_memcpy (STRUCT_MARK_PTR (dst0, template_start),
861 STRUCT_MARK_PTR (src, template_start),
862 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
863 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
865 clib_memcpy (STRUCT_MARK_PTR (dst1, template_start),
866 STRUCT_MARK_PTR (src, template_start),
867 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
868 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
870 /* Not in the first 16 octets. */
871 dst0->n_add_refs = src->n_add_refs;
872 dst1->n_add_refs = src->n_add_refs;
874 /* Make sure it really worked. */
875 #define _(f) ASSERT (dst0->f == src->f); ASSERT( dst1->f == src->f)
881 ASSERT (dst0->total_length_not_including_first_buffer == 0);
882 ASSERT (dst1->total_length_not_including_first_buffer == 0);
883 ASSERT (dst0->n_add_refs == 0);
884 ASSERT (dst1->n_add_refs == 0);
888 extern u32 *vlib_buffer_state_validation_lock;
889 extern uword *vlib_buffer_state_validation_hash;
890 extern void *vlib_buffer_state_heap;
894 vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
900 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
902 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
905 p = hash_get (vlib_buffer_state_validation_hash, b);
907 /* If we don't know about b, declare it to be in the expected state */
910 hash_set (vlib_buffer_state_validation_hash, b, expected);
914 if (p[0] != expected)
918 vlib_main_t *vm = &vlib_global_main;
922 bi = vlib_get_buffer_index (vm, b);
924 clib_mem_set_heap (oldheap);
925 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
926 vlib_time_now (vm), bi,
927 p[0] ? "busy" : "free", expected ? "busy" : "free");
931 CLIB_MEMORY_BARRIER ();
932 *vlib_buffer_state_validation_lock = 0;
933 clib_mem_set_heap (oldheap);
938 vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
943 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
945 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
948 hash_set (vlib_buffer_state_validation_hash, b, expected);
950 CLIB_MEMORY_BARRIER ();
951 *vlib_buffer_state_validation_lock = 0;
952 clib_mem_set_heap (oldheap);
956 #endif /* included_vlib_buffer_funcs_h */
959 * fd.io coding-style-patch-verification: ON
962 * eval: (c-set-style "gnu")