2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
46 vlib buffer access methods.
50 /** \brief Translate buffer index into buffer pointer
52 @param vm - (vlib_main_t *) vlib main data structure pointer
53 @param buffer_index - (u32) buffer index
54 @return - (vlib_buffer_t *) buffer pointer
56 always_inline vlib_buffer_t *
57 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
59 vlib_buffer_main_t *bm = vm->buffer_main;
60 uword offset = ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
61 ASSERT (offset < bm->buffer_mem_size);
63 return uword_to_pointer (bm->buffer_mem_start + offset, void *);
66 /** \brief Translate buffer pointer into buffer index
68 @param vm - (vlib_main_t *) vlib main data structure pointer
69 @param p - (void *) buffer pointer
70 @return - (u32) buffer index
74 vlib_get_buffer_index (vlib_main_t * vm, void *p)
76 vlib_buffer_main_t *bm = vm->buffer_main;
77 uword offset = pointer_to_uword (p) - bm->buffer_mem_start;
78 ASSERT (pointer_to_uword (p) >= bm->buffer_mem_start);
79 ASSERT (offset < bm->buffer_mem_size);
80 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
81 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
84 /** \brief Get next buffer in buffer linklist, or zero for end of list.
86 @param vm - (vlib_main_t *) vlib main data structure pointer
87 @param b - (void *) buffer pointer
88 @return - (vlib_buffer_t *) next buffer, or NULL
90 always_inline vlib_buffer_t *
91 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
93 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
94 ? vlib_get_buffer (vm, b->next_buffer) : 0);
97 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
98 vlib_buffer_t * b_first);
100 /** \brief Get length in bytes of the buffer chain
102 @param vm - (vlib_main_t *) vlib main data structure pointer
103 @param b - (void *) buffer pointer
104 @return - (uword) length of buffer chain
107 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
109 uword len = b->current_length;
111 if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
114 if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
115 return len + b->total_length_not_including_first_buffer;
117 return vlib_buffer_length_in_chain_slow_path (vm, b);
120 /** \brief Get length in bytes of the buffer index buffer chain
122 @param vm - (vlib_main_t *) vlib main data structure pointer
123 @param bi - (u32) buffer index
124 @return - (uword) length of buffer chain
127 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
129 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
130 return vlib_buffer_length_in_chain (vm, b);
133 /** \brief Copy buffer contents to memory
135 @param vm - (vlib_main_t *) vlib main data structure pointer
136 @param buffer_index - (u32) buffer index
137 @param contents - (u8 *) memory, <strong>must be large enough</strong>
138 @return - (uword) length of buffer chain
141 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
143 uword content_len = 0;
149 b = vlib_get_buffer (vm, buffer_index);
150 l = b->current_length;
151 clib_memcpy (contents + content_len, b->data + b->current_data, l);
153 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
155 buffer_index = b->next_buffer;
161 /* Return physical address of buffer->data start. */
163 vlib_get_buffer_data_physical_address (vlib_main_t * vm, u32 buffer_index)
165 return vlib_physmem_offset_to_physical (&vm->physmem_main,
166 (((uword) buffer_index) <<
167 CLIB_LOG2_CACHE_LINE_BYTES) +
168 STRUCT_OFFSET_OF (vlib_buffer_t,
172 /** \brief Prefetch buffer metadata by buffer index
173 The first 64 bytes of buffer contains most header information
175 @param vm - (vlib_main_t *) vlib main data structure pointer
176 @param bi - (u32) buffer index
177 @param type - LOAD, STORE. In most cases, STORE is the right answer
179 /* Prefetch buffer header given index. */
180 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
182 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
183 vlib_prefetch_buffer_header (_b, type); \
187 /* Iterate over known allocated vlib bufs. You probably do not want
189 @param vm the vlib_main_t
190 @param bi found allocated buffer index
191 @param body operation to perform on buffer index
192 function executes body for each allocated buffer index
194 #define vlib_buffer_foreach_allocated(vm,bi,body) \
196 vlib_main_t * _vmain = (vm); \
197 vlib_buffer_main_t * _bmain = &_vmain->buffer_main; \
198 hash_pair_t * _vbpair; \
199 hash_foreach_pair(_vbpair, _bmain->buffer_known_hash, ({ \
200 if (VLIB_BUFFER_KNOWN_ALLOCATED == _vbpair->value[0]) { \
201 (bi) = _vbpair->key; \
210 /* Index is unknown. */
213 /* Index is known and free/allocated. */
214 VLIB_BUFFER_KNOWN_FREE,
215 VLIB_BUFFER_KNOWN_ALLOCATED,
216 } vlib_buffer_known_state_t;
218 always_inline vlib_buffer_known_state_t
219 vlib_buffer_is_known (vlib_main_t * vm, u32 buffer_index)
221 vlib_buffer_main_t *bm = vm->buffer_main;
223 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
224 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
225 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
226 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
230 vlib_buffer_set_known_state (vlib_main_t * vm,
232 vlib_buffer_known_state_t state)
234 vlib_buffer_main_t *bm = vm->buffer_main;
235 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
236 hash_set (bm->buffer_known_hash, buffer_index, state);
237 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
240 /* Validates sanity of a single buffer.
241 Returns format'ed vector with error message if any. */
242 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
245 /** \brief Allocate buffers into supplied array
247 @param vm - (vlib_main_t *) vlib main data structure pointer
248 @param buffers - (u32 * ) buffer index array
249 @param n_buffers - (u32) number of buffers requested
250 @return - (u32) number of buffers actually allocated, may be
251 less than the number requested or zero
254 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
256 vlib_buffer_main_t *bm = vm->buffer_main;
258 ASSERT (bm->cb.vlib_buffer_alloc_cb);
260 return bm->cb.vlib_buffer_alloc_cb (vm, buffers, n_buffers);
264 vlib_buffer_round_size (u32 size)
266 return round_pow2 (size, sizeof (vlib_buffer_t));
270 vlib_buffer_get_free_list_index (vlib_buffer_t * b)
272 return b->flags & VLIB_BUFFER_FREE_LIST_INDEX_MASK;
276 vlib_buffer_set_free_list_index (vlib_buffer_t * b, u32 index)
278 /* if there is an need for more free lists we should consider
279 storig data in the 2nd cacheline */
280 ASSERT (VLIB_BUFFER_FREE_LIST_INDEX_MASK & 1);
281 ASSERT (index <= VLIB_BUFFER_FREE_LIST_INDEX_MASK);
283 b->flags &= ~VLIB_BUFFER_FREE_LIST_INDEX_MASK;
284 b->flags |= index & VLIB_BUFFER_FREE_LIST_INDEX_MASK;
287 /** \brief Allocate buffers from specific freelist into supplied array
289 @param vm - (vlib_main_t *) vlib main data structure pointer
290 @param buffers - (u32 * ) buffer index array
291 @param n_buffers - (u32) number of buffers requested
292 @return - (u32) number of buffers actually allocated, may be
293 less than the number requested or zero
296 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
298 u32 n_buffers, u32 free_list_index)
300 vlib_buffer_main_t *bm = vm->buffer_main;
302 ASSERT (bm->cb.vlib_buffer_alloc_from_free_list_cb);
304 return bm->cb.vlib_buffer_alloc_from_free_list_cb (vm, buffers, n_buffers,
308 /** \brief Free buffers
309 Frees the entire buffer chain for each buffer
311 @param vm - (vlib_main_t *) vlib main data structure pointer
312 @param buffers - (u32 * ) buffer index array
313 @param n_buffers - (u32) number of buffers to free
317 vlib_buffer_free (vlib_main_t * vm,
318 /* pointer to first buffer */
320 /* number of buffers to free */
323 vlib_buffer_main_t *bm = vm->buffer_main;
325 ASSERT (bm->cb.vlib_buffer_free_cb);
327 return bm->cb.vlib_buffer_free_cb (vm, buffers, n_buffers);
330 /** \brief Free buffers, does not free the buffer chain for each buffer
332 @param vm - (vlib_main_t *) vlib main data structure pointer
333 @param buffers - (u32 * ) buffer index array
334 @param n_buffers - (u32) number of buffers to free
338 vlib_buffer_free_no_next (vlib_main_t * vm,
339 /* pointer to first buffer */
341 /* number of buffers to free */
344 vlib_buffer_main_t *bm = vm->buffer_main;
346 ASSERT (bm->cb.vlib_buffer_free_no_next_cb);
348 return bm->cb.vlib_buffer_free_no_next_cb (vm, buffers, n_buffers);
351 /** \brief Free one buffer
352 Shorthand to free a single buffer chain.
354 @param vm - (vlib_main_t *) vlib main data structure pointer
355 @param buffer_index - (u32) buffer index to free
358 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
360 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
363 /* Add/delete buffer free lists. */
364 u32 vlib_buffer_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
367 vlib_buffer_delete_free_list (vlib_main_t * vm, u32 free_list_index)
369 vlib_buffer_main_t *bm = vm->buffer_main;
371 ASSERT (bm->cb.vlib_buffer_delete_free_list_cb);
373 bm->cb.vlib_buffer_delete_free_list_cb (vm, free_list_index);
376 /* Find already existing public free list with given size or create one. */
377 u32 vlib_buffer_get_or_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
380 /* Merge two free lists */
381 void vlib_buffer_merge_free_lists (vlib_buffer_free_list_t * dst,
382 vlib_buffer_free_list_t * src);
384 /* Make sure we have at least given number of unaligned buffers. */
385 void vlib_buffer_free_list_fill_unaligned (vlib_main_t * vm,
386 vlib_buffer_free_list_t *
388 uword n_unaligned_buffers);
391 vlib_buffer_get_free_list_with_size (vlib_main_t * vm, u32 size)
393 vlib_buffer_main_t *bm = vm->buffer_main;
395 size = vlib_buffer_round_size (size);
396 uword *p = hash_get (bm->free_list_by_size, size);
397 return p ? p[0] : ~0;
400 always_inline vlib_buffer_free_list_t *
401 vlib_buffer_get_buffer_free_list (vlib_main_t * vm, vlib_buffer_t * b,
404 vlib_buffer_main_t *bm = vm->buffer_main;
407 *index = i = vlib_buffer_get_free_list_index (b);
408 return pool_elt_at_index (bm->buffer_free_list_pool, i);
411 always_inline vlib_buffer_free_list_t *
412 vlib_buffer_get_free_list (vlib_main_t * vm, u32 free_list_index)
414 vlib_buffer_main_t *bm = vm->buffer_main;
415 vlib_buffer_free_list_t *f;
417 f = pool_elt_at_index (bm->buffer_free_list_pool, free_list_index);
419 /* Sanity: indices must match. */
420 ASSERT (f->index == free_list_index);
426 vlib_buffer_free_list_buffer_size (vlib_main_t * vm, u32 free_list_index)
428 vlib_buffer_free_list_t *f =
429 vlib_buffer_get_free_list (vm, free_list_index);
430 return f->n_data_bytes;
433 void vlib_aligned_memcpy (void *_dst, void *_src, int n_bytes);
435 /* Reasonably fast buffer copy routine. */
437 vlib_copy_buffers (u32 * dst, u32 * src, u32 n)
459 vlib_physmem_alloc_aligned (vlib_main_t * vm, clib_error_t ** error,
460 uword n_bytes, uword alignment)
463 vm->os_physmem_alloc_aligned (&vm->physmem_main, n_bytes, alignment);
466 clib_error_return (0, "failed to allocate %wd bytes of I/O memory",
473 /* By default allocate I/O memory with cache line alignment. */
475 vlib_physmem_alloc (vlib_main_t * vm, clib_error_t ** error, uword n_bytes)
477 return vlib_physmem_alloc_aligned (vm, error, n_bytes,
478 CLIB_CACHE_LINE_BYTES);
482 vlib_physmem_free (vlib_main_t * vm, void *mem)
484 return vm->os_physmem_free (mem);
488 vlib_physmem_virtual_to_physical (vlib_main_t * vm, void *mem)
490 vlib_physmem_main_t *pm = &vm->physmem_main;
491 uword o = pointer_to_uword (mem) - pm->virtual.start;
492 return vlib_physmem_offset_to_physical (pm, o);
495 /* Append given data to end of buffer, possibly allocating new buffers. */
496 u32 vlib_buffer_add_data (vlib_main_t * vm,
498 u32 buffer_index, void *data, u32 n_data_bytes);
500 /* duplicate all buffers in chain */
501 always_inline vlib_buffer_t *
502 vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
504 vlib_buffer_t *s, *d, *fd;
505 uword n_alloc, n_buffers = 1;
506 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
510 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
513 s = vlib_get_buffer (vm, s->next_buffer);
515 u32 new_buffers[n_buffers];
517 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
519 /* No guarantee that we'll get all the buffers we asked for */
520 if (PREDICT_FALSE (n_alloc < n_buffers))
523 vlib_buffer_free (vm, new_buffers, n_alloc);
529 fd = d = vlib_get_buffer (vm, new_buffers[0]);
530 d->current_data = s->current_data;
531 d->current_length = s->current_length;
532 d->flags = s->flags & flag_mask;
533 d->total_length_not_including_first_buffer =
534 s->total_length_not_including_first_buffer;
535 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
536 clib_memcpy (vlib_buffer_get_current (d),
537 vlib_buffer_get_current (s), s->current_length);
540 for (i = 1; i < n_buffers; i++)
543 d->next_buffer = new_buffers[i];
545 s = vlib_get_buffer (vm, s->next_buffer);
546 d = vlib_get_buffer (vm, new_buffers[i]);
547 d->current_data = s->current_data;
548 d->current_length = s->current_length;
549 clib_memcpy (vlib_buffer_get_current (d),
550 vlib_buffer_get_current (s), s->current_length);
551 d->flags = s->flags & flag_mask;
557 /** \brief Create multiple clones of buffer and store them in the supplied array
559 @param vm - (vlib_main_t *) vlib main data structure pointer
560 @param src_buffer - (u32) source buffer index
561 @param buffers - (u32 * ) buffer index array
562 @param n_buffers - (u8) number of buffer clones requested
563 @param head_end_offset - (u16) offset relative to current position
564 where packet head ends
565 @return - (u8) number of buffers actually cloned, may be
566 less than the number requested or zero
570 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
571 u8 n_buffers, u16 head_end_offset)
574 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
576 ASSERT (s->n_add_refs == 0);
579 if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
581 buffers[0] = src_buffer;
582 for (i = 1; i < n_buffers; i++)
585 d = vlib_buffer_copy (vm, s);
588 buffers[i] = vlib_get_buffer_index (vm, d);
594 n_buffers = vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
595 vlib_buffer_get_free_list_index
597 if (PREDICT_FALSE (n_buffers == 0))
599 buffers[0] = src_buffer;
603 for (i = 0; i < n_buffers; i++)
605 vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
606 d->current_data = s->current_data;
607 d->current_length = head_end_offset;
608 vlib_buffer_set_free_list_index (d,
609 vlib_buffer_get_free_list_index (s));
610 d->total_length_not_including_first_buffer =
611 s->total_length_not_including_first_buffer + s->current_length -
613 d->flags = s->flags | VLIB_BUFFER_NEXT_PRESENT;
614 d->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
615 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
616 clib_memcpy (vlib_buffer_get_current (d), vlib_buffer_get_current (s),
618 d->next_buffer = src_buffer;
620 vlib_buffer_advance (s, head_end_offset);
621 s->n_add_refs = n_buffers - 1;
622 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
624 s = vlib_get_buffer (vm, s->next_buffer);
625 s->n_add_refs = n_buffers - 1;
631 /** \brief Attach cloned tail to the buffer
633 @param vm - (vlib_main_t *) vlib main data structure pointer
634 @param head - (vlib_buffer_t *) head buffer
635 @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
639 vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
640 vlib_buffer_t * tail)
642 ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
643 ASSERT (vlib_buffer_get_free_list_index (head) ==
644 vlib_buffer_get_free_list_index (tail));
646 head->flags |= VLIB_BUFFER_NEXT_PRESENT;
647 head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
648 head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
649 head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
650 head->next_buffer = vlib_get_buffer_index (vm, tail);
651 head->total_length_not_including_first_buffer = tail->current_length +
652 tail->total_length_not_including_first_buffer;
655 __sync_add_and_fetch (&tail->n_add_refs, 1);
657 if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
659 tail = vlib_get_buffer (vm, tail->next_buffer);
664 /* Initializes the buffer as an empty packet with no chained buffers. */
666 vlib_buffer_chain_init (vlib_buffer_t * first)
668 first->total_length_not_including_first_buffer = 0;
669 first->current_length = 0;
670 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
671 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
674 /* The provided next_bi buffer index is appended to the end of the packet. */
675 always_inline vlib_buffer_t *
676 vlib_buffer_chain_buffer (vlib_main_t * vm,
677 vlib_buffer_t * first,
678 vlib_buffer_t * last, u32 next_bi)
680 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
681 last->next_buffer = next_bi;
682 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
683 next_buffer->current_length = 0;
684 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
688 /* Increases or decreases the packet length.
689 * It does not allocate or deallocate new buffers.
690 * Therefore, the added length must be compatible
691 * with the last buffer. */
693 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
694 vlib_buffer_t * last, i32 len)
696 last->current_length += len;
698 first->total_length_not_including_first_buffer += len;
701 /* Copy data to the end of the packet and increases its length.
702 * It does not allocate new buffers.
703 * Returns the number of copied bytes. */
705 vlib_buffer_chain_append_data (vlib_main_t * vm,
707 vlib_buffer_t * first,
708 vlib_buffer_t * last, void *data, u16 data_len)
711 vlib_buffer_free_list_buffer_size (vm, free_list_index);
712 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
713 u16 len = clib_min (data_len,
714 n_buffer_bytes - last->current_length -
716 clib_memcpy (vlib_buffer_get_current (last) + last->current_length, data,
718 vlib_buffer_chain_increase_length (first, last, len);
722 /* Copy data to the end of the packet and increases its length.
723 * Allocates additional buffers from the free list if necessary.
724 * Returns the number of copied bytes.
725 * 'last' value is modified whenever new buffers are allocated and
726 * chained and points to the last buffer in the chain. */
728 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
730 vlib_buffer_t * first,
731 vlib_buffer_t ** last,
732 void *data, u16 data_len);
733 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
735 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
736 format_vlib_buffer_contents;
740 /* Vector of packet data. */
743 /* Number of buffers to allocate in each call to physmem
745 u32 min_n_buffers_each_physmem_alloc;
747 /* Buffer free list for this template. */
751 } vlib_packet_template_t;
753 void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
754 vlib_packet_template_t * t);
756 void vlib_packet_template_init (vlib_main_t * vm,
757 vlib_packet_template_t * t,
759 uword n_packet_data_bytes,
760 uword min_n_buffers_each_physmem_alloc,
763 void *vlib_packet_template_get_packet (vlib_main_t * vm,
764 vlib_packet_template_t * t,
768 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
770 vec_free (t->packet_data);
774 unserialize_vlib_buffer_n_bytes (serialize_main_t * m)
776 serialize_stream_t *s = &m->stream;
777 vlib_serialize_buffer_main_t *sm
778 = uword_to_pointer (m->stream.data_function_opaque,
779 vlib_serialize_buffer_main_t *);
780 vlib_main_t *vm = sm->vlib_main;
783 n = s->n_buffer_bytes - s->current_buffer_index;
784 if (sm->last_buffer != ~0)
786 vlib_buffer_t *b = vlib_get_buffer (vm, sm->last_buffer);
787 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
789 b = vlib_get_buffer (vm, b->next_buffer);
790 n += b->current_length;
795 clib_fifo_foreach (f, sm->rx.buffer_fifo, ({
796 n += vlib_buffer_index_length_in_chain (vm, f[0]);
803 /* Set a buffer quickly into "uninitialized" state. We want this to
804 be extremely cheap and arrange for all fields that need to be
805 initialized to be in the first 128 bits of the buffer. */
807 vlib_buffer_init_for_free_list (vlib_buffer_t * dst,
808 vlib_buffer_free_list_t * fl)
810 vlib_buffer_t *src = &fl->buffer_init_template;
812 /* Make sure vlib_buffer_t is cacheline aligned and sized */
813 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
814 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
815 CLIB_CACHE_LINE_BYTES);
816 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
817 CLIB_CACHE_LINE_BYTES * 2);
819 /* Make sure buffer template is sane. */
820 ASSERT (fl->index == vlib_buffer_get_free_list_index (src));
822 clib_memcpy (STRUCT_MARK_PTR (dst, template_start),
823 STRUCT_MARK_PTR (src, template_start),
824 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
825 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
827 /* Not in the first 16 octets. */
828 dst->n_add_refs = src->n_add_refs;
830 /* Make sure it really worked. */
831 #define _(f) ASSERT (dst->f == src->f);
836 /* ASSERT (dst->total_length_not_including_first_buffer == 0); */
837 /* total_length_not_including_first_buffer is not in the template anymore
838 * so it may actually not zeroed for some buffers. One option is to
839 * uncomment the line lower (comes at a cost), the other, is to just not
841 /* dst->total_length_not_including_first_buffer = 0; */
842 ASSERT (dst->n_add_refs == 0);
846 vlib_buffer_add_to_free_list (vlib_main_t * vm,
847 vlib_buffer_free_list_t * f,
848 u32 buffer_index, u8 do_init)
852 b = vlib_get_buffer (vm, buffer_index);
853 if (PREDICT_TRUE (do_init))
854 vlib_buffer_init_for_free_list (b, f);
855 vec_add1_aligned (f->buffers, buffer_index, CLIB_CACHE_LINE_BYTES);
857 if (vec_len (f->buffers) > 3 * VLIB_FRAME_SIZE)
859 /* keep last stored buffers, as they are more likely hot in the cache */
860 for (i = 0; i < VLIB_FRAME_SIZE; i++)
861 vm->os_physmem_free (vlib_get_buffer (vm, i));
862 vec_delete (f->buffers, VLIB_FRAME_SIZE, 0);
867 vlib_buffer_init_two_for_free_list (vlib_buffer_t * dst0,
868 vlib_buffer_t * dst1,
869 vlib_buffer_free_list_t * fl)
871 vlib_buffer_t *src = &fl->buffer_init_template;
873 /* Make sure buffer template is sane. */
874 ASSERT (fl->index == vlib_buffer_get_free_list_index (src));
876 clib_memcpy (STRUCT_MARK_PTR (dst0, template_start),
877 STRUCT_MARK_PTR (src, template_start),
878 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
879 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
881 clib_memcpy (STRUCT_MARK_PTR (dst1, template_start),
882 STRUCT_MARK_PTR (src, template_start),
883 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
884 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
886 /* Not in the first 16 octets. */
887 dst0->n_add_refs = src->n_add_refs;
888 dst1->n_add_refs = src->n_add_refs;
890 /* Make sure it really worked. */
891 #define _(f) ASSERT (dst0->f == src->f); ASSERT( dst1->f == src->f)
897 ASSERT (dst0->total_length_not_including_first_buffer == 0);
898 ASSERT (dst1->total_length_not_including_first_buffer == 0);
899 ASSERT (dst0->n_add_refs == 0);
900 ASSERT (dst1->n_add_refs == 0);
904 extern u32 *vlib_buffer_state_validation_lock;
905 extern uword *vlib_buffer_state_validation_hash;
906 extern void *vlib_buffer_state_heap;
910 vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
916 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
918 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
921 p = hash_get (vlib_buffer_state_validation_hash, b);
923 /* If we don't know about b, declare it to be in the expected state */
926 hash_set (vlib_buffer_state_validation_hash, b, expected);
930 if (p[0] != expected)
934 vlib_main_t *vm = &vlib_global_main;
938 bi = vlib_get_buffer_index (vm, b);
940 clib_mem_set_heap (oldheap);
941 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
942 vlib_time_now (vm), bi,
943 p[0] ? "busy" : "free", expected ? "busy" : "free");
947 CLIB_MEMORY_BARRIER ();
948 *vlib_buffer_state_validation_lock = 0;
949 clib_mem_set_heap (oldheap);
954 vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
959 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
961 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
964 hash_set (vlib_buffer_state_validation_hash, b, expected);
966 CLIB_MEMORY_BARRIER ();
967 *vlib_buffer_state_validation_lock = 0;
968 clib_mem_set_heap (oldheap);
972 #endif /* included_vlib_buffer_funcs_h */
975 * fd.io coding-style-patch-verification: ON
978 * eval: (c-set-style "gnu")