2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
46 vlib buffer access methods.
50 /** \brief Translate buffer index into buffer pointer
52 @param vm - (vlib_main_t *) vlib main data structure pointer
53 @param buffer_index - (u32) buffer index
54 @return - (vlib_buffer_t *) buffer pointer
56 always_inline vlib_buffer_t *
57 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
59 vlib_buffer_main_t *bm = vm->buffer_main;
60 uword offset = ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
61 ASSERT (offset < bm->buffer_mem_size);
63 return uword_to_pointer (bm->buffer_mem_start + offset, void *);
66 /** \brief Translate buffer pointer into buffer index
68 @param vm - (vlib_main_t *) vlib main data structure pointer
69 @param p - (void *) buffer pointer
70 @return - (u32) buffer index
74 vlib_get_buffer_index (vlib_main_t * vm, void *p)
76 vlib_buffer_main_t *bm = vm->buffer_main;
77 uword offset = pointer_to_uword (p) - bm->buffer_mem_start;
78 ASSERT (pointer_to_uword (p) >= bm->buffer_mem_start);
79 ASSERT (offset < bm->buffer_mem_size);
80 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
81 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
84 /** \brief Get next buffer in buffer linklist, or zero for end of list.
86 @param vm - (vlib_main_t *) vlib main data structure pointer
87 @param b - (void *) buffer pointer
88 @return - (vlib_buffer_t *) next buffer, or NULL
90 always_inline vlib_buffer_t *
91 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
93 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
94 ? vlib_get_buffer (vm, b->next_buffer) : 0);
97 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
98 vlib_buffer_t * b_first);
100 /** \brief Get length in bytes of the buffer chain
102 @param vm - (vlib_main_t *) vlib main data structure pointer
103 @param b - (void *) buffer pointer
104 @return - (uword) length of buffer chain
107 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
109 uword len = b->current_length;
111 if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
114 if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
115 return len + b->total_length_not_including_first_buffer;
117 return vlib_buffer_length_in_chain_slow_path (vm, b);
120 /** \brief Get length in bytes of the buffer index buffer chain
122 @param vm - (vlib_main_t *) vlib main data structure pointer
123 @param bi - (u32) buffer index
124 @return - (uword) length of buffer chain
127 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
129 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
130 return vlib_buffer_length_in_chain (vm, b);
133 /** \brief Copy buffer contents to memory
135 @param vm - (vlib_main_t *) vlib main data structure pointer
136 @param buffer_index - (u32) buffer index
137 @param contents - (u8 *) memory, <strong>must be large enough</strong>
138 @return - (uword) length of buffer chain
141 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
143 uword content_len = 0;
149 b = vlib_get_buffer (vm, buffer_index);
150 l = b->current_length;
151 clib_memcpy (contents + content_len, b->data + b->current_data, l);
153 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
155 buffer_index = b->next_buffer;
161 /* Return physical address of buffer->data start. */
163 vlib_get_buffer_data_physical_address (vlib_main_t * vm, u32 buffer_index)
165 vlib_physmem_region_index_t pri;
166 vlib_buffer_t *b = vlib_get_buffer (vm, buffer_index);
167 pri = vm->buffer_main->buffer_pools[b->buffer_pool_index].physmem_region;
168 return vlib_physmem_offset_to_physical (vm, pri,
169 (((uword) buffer_index) <<
170 CLIB_LOG2_CACHE_LINE_BYTES) +
171 STRUCT_OFFSET_OF (vlib_buffer_t,
175 /** \brief Prefetch buffer metadata by buffer index
176 The first 64 bytes of buffer contains most header information
178 @param vm - (vlib_main_t *) vlib main data structure pointer
179 @param bi - (u32) buffer index
180 @param type - LOAD, STORE. In most cases, STORE is the right answer
182 /* Prefetch buffer header given index. */
183 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
185 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
186 vlib_prefetch_buffer_header (_b, type); \
190 /* Iterate over known allocated vlib bufs. You probably do not want
192 @param vm the vlib_main_t
193 @param bi found allocated buffer index
194 @param body operation to perform on buffer index
195 function executes body for each allocated buffer index
197 #define vlib_buffer_foreach_allocated(vm,bi,body) \
199 vlib_main_t * _vmain = (vm); \
200 vlib_buffer_main_t * _bmain = &_vmain->buffer_main; \
201 hash_pair_t * _vbpair; \
202 hash_foreach_pair(_vbpair, _bmain->buffer_known_hash, ({ \
203 if (VLIB_BUFFER_KNOWN_ALLOCATED == _vbpair->value[0]) { \
204 (bi) = _vbpair->key; \
213 /* Index is unknown. */
216 /* Index is known and free/allocated. */
217 VLIB_BUFFER_KNOWN_FREE,
218 VLIB_BUFFER_KNOWN_ALLOCATED,
219 } vlib_buffer_known_state_t;
221 void vlib_buffer_validate_alloc_free (vlib_main_t * vm, u32 * buffers,
223 vlib_buffer_known_state_t
226 always_inline vlib_buffer_known_state_t
227 vlib_buffer_is_known (vlib_main_t * vm, u32 buffer_index)
229 vlib_buffer_main_t *bm = vm->buffer_main;
231 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
232 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
233 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
234 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
238 vlib_buffer_set_known_state (vlib_main_t * vm,
240 vlib_buffer_known_state_t state)
242 vlib_buffer_main_t *bm = vm->buffer_main;
243 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
244 hash_set (bm->buffer_known_hash, buffer_index, state);
245 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
248 /* Validates sanity of a single buffer.
249 Returns format'ed vector with error message if any. */
250 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
254 vlib_buffer_round_size (u32 size)
256 return round_pow2 (size, sizeof (vlib_buffer_t));
260 vlib_buffer_get_free_list_index (vlib_buffer_t * b)
262 return b->flags & VLIB_BUFFER_FREE_LIST_INDEX_MASK;
266 vlib_buffer_set_free_list_index (vlib_buffer_t * b, u32 index)
268 /* if there is an need for more free lists we should consider
269 storig data in the 2nd cacheline */
270 ASSERT (VLIB_BUFFER_FREE_LIST_INDEX_MASK & 1);
271 ASSERT (index <= VLIB_BUFFER_FREE_LIST_INDEX_MASK);
273 b->flags &= ~VLIB_BUFFER_FREE_LIST_INDEX_MASK;
274 b->flags |= index & VLIB_BUFFER_FREE_LIST_INDEX_MASK;
277 /** \brief Allocate buffers from specific freelist into supplied array
279 @param vm - (vlib_main_t *) vlib main data structure pointer
280 @param buffers - (u32 * ) buffer index array
281 @param n_buffers - (u32) number of buffers requested
282 @return - (u32) number of buffers actually allocated, may be
283 less than the number requested or zero
286 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
288 u32 n_buffers, u32 free_list_index)
290 vlib_buffer_main_t *bm = vm->buffer_main;
291 vlib_buffer_free_list_t *fl;
295 ASSERT (bm->cb.vlib_buffer_fill_free_list_cb);
297 fl = pool_elt_at_index (bm->buffer_free_list_pool, free_list_index);
299 len = vec_len (fl->buffers);
301 if (PREDICT_FALSE (len < n_buffers))
303 bm->cb.vlib_buffer_fill_free_list_cb (vm, fl, n_buffers);
304 len = vec_len (fl->buffers);
306 /* even if fill free list didn't manage to refill free list
307 we should give what we have */
308 n_buffers = clib_min (len, n_buffers);
310 /* following code is intentionaly duplicated to allow compiler
311 to optimize fast path when n_buffers is constant value */
312 src = fl->buffers + len - n_buffers;
313 clib_memcpy (buffers, src, n_buffers * sizeof (u32));
314 _vec_len (fl->buffers) -= n_buffers;
316 /* Verify that buffers are known free. */
317 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
318 VLIB_BUFFER_KNOWN_FREE);
323 src = fl->buffers + len - n_buffers;
324 clib_memcpy (buffers, src, n_buffers * sizeof (u32));
325 _vec_len (fl->buffers) -= n_buffers;
327 /* Verify that buffers are known free. */
328 vlib_buffer_validate_alloc_free (vm, buffers, n_buffers,
329 VLIB_BUFFER_KNOWN_FREE);
334 /** \brief Allocate buffers into supplied array
336 @param vm - (vlib_main_t *) vlib main data structure pointer
337 @param buffers - (u32 * ) buffer index array
338 @param n_buffers - (u32) number of buffers requested
339 @return - (u32) number of buffers actually allocated, may be
340 less than the number requested or zero
343 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
345 return vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
346 VLIB_BUFFER_DEFAULT_FREE_LIST_INDEX);
349 /** \brief Free buffers
350 Frees the entire buffer chain for each buffer
352 @param vm - (vlib_main_t *) vlib main data structure pointer
353 @param buffers - (u32 * ) buffer index array
354 @param n_buffers - (u32) number of buffers to free
358 vlib_buffer_free (vlib_main_t * vm,
359 /* pointer to first buffer */
361 /* number of buffers to free */
364 vlib_buffer_main_t *bm = vm->buffer_main;
366 ASSERT (bm->cb.vlib_buffer_free_cb);
368 return bm->cb.vlib_buffer_free_cb (vm, buffers, n_buffers);
371 /** \brief Free buffers, does not free the buffer chain for each buffer
373 @param vm - (vlib_main_t *) vlib main data structure pointer
374 @param buffers - (u32 * ) buffer index array
375 @param n_buffers - (u32) number of buffers to free
379 vlib_buffer_free_no_next (vlib_main_t * vm,
380 /* pointer to first buffer */
382 /* number of buffers to free */
385 vlib_buffer_main_t *bm = vm->buffer_main;
387 ASSERT (bm->cb.vlib_buffer_free_no_next_cb);
389 return bm->cb.vlib_buffer_free_no_next_cb (vm, buffers, n_buffers);
392 /** \brief Free one buffer
393 Shorthand to free a single buffer chain.
395 @param vm - (vlib_main_t *) vlib main data structure pointer
396 @param buffer_index - (u32) buffer index to free
399 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
401 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
404 /* Add/delete buffer free lists. */
405 u32 vlib_buffer_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
408 vlib_buffer_delete_free_list (vlib_main_t * vm, u32 free_list_index)
410 vlib_buffer_main_t *bm = vm->buffer_main;
412 ASSERT (bm->cb.vlib_buffer_delete_free_list_cb);
414 bm->cb.vlib_buffer_delete_free_list_cb (vm, free_list_index);
417 /* Find already existing public free list with given size or create one. */
418 u32 vlib_buffer_get_or_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
421 /* Merge two free lists */
422 void vlib_buffer_merge_free_lists (vlib_buffer_free_list_t * dst,
423 vlib_buffer_free_list_t * src);
425 /* Make sure we have at least given number of unaligned buffers. */
426 void vlib_buffer_free_list_fill_unaligned (vlib_main_t * vm,
427 vlib_buffer_free_list_t *
429 uword n_unaligned_buffers);
432 vlib_buffer_get_free_list_with_size (vlib_main_t * vm, u32 size)
434 vlib_buffer_main_t *bm = vm->buffer_main;
436 size = vlib_buffer_round_size (size);
437 uword *p = hash_get (bm->free_list_by_size, size);
438 return p ? p[0] : ~0;
441 always_inline vlib_buffer_free_list_t *
442 vlib_buffer_get_buffer_free_list (vlib_main_t * vm, vlib_buffer_t * b,
445 vlib_buffer_main_t *bm = vm->buffer_main;
448 *index = i = vlib_buffer_get_free_list_index (b);
449 return pool_elt_at_index (bm->buffer_free_list_pool, i);
452 always_inline vlib_buffer_free_list_t *
453 vlib_buffer_get_free_list (vlib_main_t * vm, u32 free_list_index)
455 vlib_buffer_main_t *bm = vm->buffer_main;
456 vlib_buffer_free_list_t *f;
458 f = pool_elt_at_index (bm->buffer_free_list_pool, free_list_index);
460 /* Sanity: indices must match. */
461 ASSERT (f->index == free_list_index);
467 vlib_buffer_free_list_buffer_size (vlib_main_t * vm, u32 free_list_index)
469 vlib_buffer_free_list_t *f =
470 vlib_buffer_get_free_list (vm, free_list_index);
471 return f->n_data_bytes;
474 void vlib_aligned_memcpy (void *_dst, void *_src, int n_bytes);
476 /* Reasonably fast buffer copy routine. */
478 vlib_copy_buffers (u32 * dst, u32 * src, u32 n)
499 /* Append given data to end of buffer, possibly allocating new buffers. */
500 u32 vlib_buffer_add_data (vlib_main_t * vm,
502 u32 buffer_index, void *data, u32 n_data_bytes);
504 /* duplicate all buffers in chain */
505 always_inline vlib_buffer_t *
506 vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
508 vlib_buffer_t *s, *d, *fd;
509 uword n_alloc, n_buffers = 1;
510 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
514 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
517 s = vlib_get_buffer (vm, s->next_buffer);
519 u32 new_buffers[n_buffers];
521 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
523 /* No guarantee that we'll get all the buffers we asked for */
524 if (PREDICT_FALSE (n_alloc < n_buffers))
527 vlib_buffer_free (vm, new_buffers, n_alloc);
533 fd = d = vlib_get_buffer (vm, new_buffers[0]);
534 d->current_data = s->current_data;
535 d->current_length = s->current_length;
536 d->flags = s->flags & flag_mask;
537 d->total_length_not_including_first_buffer =
538 s->total_length_not_including_first_buffer;
539 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
540 clib_memcpy (vlib_buffer_get_current (d),
541 vlib_buffer_get_current (s), s->current_length);
544 for (i = 1; i < n_buffers; i++)
547 d->next_buffer = new_buffers[i];
549 s = vlib_get_buffer (vm, s->next_buffer);
550 d = vlib_get_buffer (vm, new_buffers[i]);
551 d->current_data = s->current_data;
552 d->current_length = s->current_length;
553 clib_memcpy (vlib_buffer_get_current (d),
554 vlib_buffer_get_current (s), s->current_length);
555 d->flags = s->flags & flag_mask;
561 /** \brief Create multiple clones of buffer and store them in the supplied array
563 @param vm - (vlib_main_t *) vlib main data structure pointer
564 @param src_buffer - (u32) source buffer index
565 @param buffers - (u32 * ) buffer index array
566 @param n_buffers - (u8) number of buffer clones requested
567 @param head_end_offset - (u16) offset relative to current position
568 where packet head ends
569 @return - (u8) number of buffers actually cloned, may be
570 less than the number requested or zero
574 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
575 u8 n_buffers, u16 head_end_offset)
578 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
580 ASSERT (s->n_add_refs == 0);
583 if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
585 buffers[0] = src_buffer;
586 for (i = 1; i < n_buffers; i++)
589 d = vlib_buffer_copy (vm, s);
592 buffers[i] = vlib_get_buffer_index (vm, d);
598 n_buffers = vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
599 vlib_buffer_get_free_list_index
601 if (PREDICT_FALSE (n_buffers == 0))
603 buffers[0] = src_buffer;
607 for (i = 0; i < n_buffers; i++)
609 vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
610 d->current_data = s->current_data;
611 d->current_length = head_end_offset;
612 vlib_buffer_set_free_list_index (d,
613 vlib_buffer_get_free_list_index (s));
614 d->total_length_not_including_first_buffer =
615 s->total_length_not_including_first_buffer + s->current_length -
617 d->flags = s->flags | VLIB_BUFFER_NEXT_PRESENT;
618 d->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
619 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
620 clib_memcpy (vlib_buffer_get_current (d), vlib_buffer_get_current (s),
622 d->next_buffer = src_buffer;
624 vlib_buffer_advance (s, head_end_offset);
625 s->n_add_refs = n_buffers - 1;
626 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
628 s = vlib_get_buffer (vm, s->next_buffer);
629 s->n_add_refs = n_buffers - 1;
635 /** \brief Attach cloned tail to the buffer
637 @param vm - (vlib_main_t *) vlib main data structure pointer
638 @param head - (vlib_buffer_t *) head buffer
639 @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
643 vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
644 vlib_buffer_t * tail)
646 ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
647 ASSERT (vlib_buffer_get_free_list_index (head) ==
648 vlib_buffer_get_free_list_index (tail));
650 head->flags |= VLIB_BUFFER_NEXT_PRESENT;
651 head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
652 head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
653 head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
654 head->next_buffer = vlib_get_buffer_index (vm, tail);
655 head->total_length_not_including_first_buffer = tail->current_length +
656 tail->total_length_not_including_first_buffer;
659 __sync_add_and_fetch (&tail->n_add_refs, 1);
661 if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
663 tail = vlib_get_buffer (vm, tail->next_buffer);
668 /* Initializes the buffer as an empty packet with no chained buffers. */
670 vlib_buffer_chain_init (vlib_buffer_t * first)
672 first->total_length_not_including_first_buffer = 0;
673 first->current_length = 0;
674 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
675 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
678 /* The provided next_bi buffer index is appended to the end of the packet. */
679 always_inline vlib_buffer_t *
680 vlib_buffer_chain_buffer (vlib_main_t * vm,
681 vlib_buffer_t * first,
682 vlib_buffer_t * last, u32 next_bi)
684 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
685 last->next_buffer = next_bi;
686 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
687 next_buffer->current_length = 0;
688 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
692 /* Increases or decreases the packet length.
693 * It does not allocate or deallocate new buffers.
694 * Therefore, the added length must be compatible
695 * with the last buffer. */
697 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
698 vlib_buffer_t * last, i32 len)
700 last->current_length += len;
702 first->total_length_not_including_first_buffer += len;
705 /* Copy data to the end of the packet and increases its length.
706 * It does not allocate new buffers.
707 * Returns the number of copied bytes. */
709 vlib_buffer_chain_append_data (vlib_main_t * vm,
711 vlib_buffer_t * first,
712 vlib_buffer_t * last, void *data, u16 data_len)
715 vlib_buffer_free_list_buffer_size (vm, free_list_index);
716 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
717 u16 len = clib_min (data_len,
718 n_buffer_bytes - last->current_length -
720 clib_memcpy (vlib_buffer_get_current (last) + last->current_length, data,
722 vlib_buffer_chain_increase_length (first, last, len);
726 /* Copy data to the end of the packet and increases its length.
727 * Allocates additional buffers from the free list if necessary.
728 * Returns the number of copied bytes.
729 * 'last' value is modified whenever new buffers are allocated and
730 * chained and points to the last buffer in the chain. */
732 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
734 vlib_buffer_t * first,
735 vlib_buffer_t ** last,
736 void *data, u16 data_len);
737 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
739 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
740 format_vlib_buffer_contents;
744 /* Vector of packet data. */
747 /* Number of buffers to allocate in each call to physmem
749 u32 min_n_buffers_each_physmem_alloc;
751 /* Buffer free list for this template. */
755 } vlib_packet_template_t;
757 void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
758 vlib_packet_template_t * t);
760 void vlib_packet_template_init (vlib_main_t * vm,
761 vlib_packet_template_t * t,
763 uword n_packet_data_bytes,
764 uword min_n_buffers_each_physmem_alloc,
767 void *vlib_packet_template_get_packet (vlib_main_t * vm,
768 vlib_packet_template_t * t,
772 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
774 vec_free (t->packet_data);
778 unserialize_vlib_buffer_n_bytes (serialize_main_t * m)
780 serialize_stream_t *s = &m->stream;
781 vlib_serialize_buffer_main_t *sm
782 = uword_to_pointer (m->stream.data_function_opaque,
783 vlib_serialize_buffer_main_t *);
784 vlib_main_t *vm = sm->vlib_main;
787 n = s->n_buffer_bytes - s->current_buffer_index;
788 if (sm->last_buffer != ~0)
790 vlib_buffer_t *b = vlib_get_buffer (vm, sm->last_buffer);
791 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
793 b = vlib_get_buffer (vm, b->next_buffer);
794 n += b->current_length;
799 clib_fifo_foreach (f, sm->rx.buffer_fifo, ({
800 n += vlib_buffer_index_length_in_chain (vm, f[0]);
807 /* Set a buffer quickly into "uninitialized" state. We want this to
808 be extremely cheap and arrange for all fields that need to be
809 initialized to be in the first 128 bits of the buffer. */
811 vlib_buffer_init_for_free_list (vlib_buffer_t * dst,
812 vlib_buffer_free_list_t * fl)
814 vlib_buffer_t *src = &fl->buffer_init_template;
816 /* Make sure vlib_buffer_t is cacheline aligned and sized */
817 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
818 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
819 CLIB_CACHE_LINE_BYTES);
820 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
821 CLIB_CACHE_LINE_BYTES * 2);
823 /* Make sure buffer template is sane. */
824 ASSERT (fl->index == vlib_buffer_get_free_list_index (src));
826 clib_memcpy (STRUCT_MARK_PTR (dst, template_start),
827 STRUCT_MARK_PTR (src, template_start),
828 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
829 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
831 /* Not in the first 16 octets. */
832 dst->n_add_refs = src->n_add_refs;
834 /* Make sure it really worked. */
835 #define _(f) ASSERT (dst->f == src->f);
840 /* ASSERT (dst->total_length_not_including_first_buffer == 0); */
841 /* total_length_not_including_first_buffer is not in the template anymore
842 * so it may actually not zeroed for some buffers. One option is to
843 * uncomment the line lower (comes at a cost), the other, is to just not
845 /* dst->total_length_not_including_first_buffer = 0; */
846 ASSERT (dst->n_add_refs == 0);
850 vlib_buffer_add_to_free_list (vlib_main_t * vm,
851 vlib_buffer_free_list_t * f,
852 u32 buffer_index, u8 do_init)
855 b = vlib_get_buffer (vm, buffer_index);
856 if (PREDICT_TRUE (do_init))
857 vlib_buffer_init_for_free_list (b, f);
858 vec_add1_aligned (f->buffers, buffer_index, CLIB_CACHE_LINE_BYTES);
860 if (vec_len (f->buffers) > 4 * VLIB_FRAME_SIZE)
862 vlib_buffer_free_list_t *mf;
863 mf = vlib_buffer_get_free_list (vlib_mains[0], f->index);
864 clib_spinlock_lock (&mf->global_buffers_lock);
865 /* keep last stored buffers, as they are more likely hot in the cache */
866 vec_add_aligned (mf->global_buffers, f->buffers, VLIB_FRAME_SIZE,
867 CLIB_CACHE_LINE_BYTES);
868 vec_delete (f->buffers, VLIB_FRAME_SIZE, 0);
869 f->n_alloc -= VLIB_FRAME_SIZE;
870 clib_spinlock_unlock (&mf->global_buffers_lock);
875 vlib_buffer_init_two_for_free_list (vlib_buffer_t * dst0,
876 vlib_buffer_t * dst1,
877 vlib_buffer_free_list_t * fl)
879 vlib_buffer_t *src = &fl->buffer_init_template;
881 /* Make sure buffer template is sane. */
882 ASSERT (fl->index == vlib_buffer_get_free_list_index (src));
884 clib_memcpy (STRUCT_MARK_PTR (dst0, template_start),
885 STRUCT_MARK_PTR (src, template_start),
886 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
887 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
889 clib_memcpy (STRUCT_MARK_PTR (dst1, template_start),
890 STRUCT_MARK_PTR (src, template_start),
891 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
892 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
894 /* Not in the first 16 octets. */
895 dst0->n_add_refs = src->n_add_refs;
896 dst1->n_add_refs = src->n_add_refs;
898 /* Make sure it really worked. */
899 #define _(f) ASSERT (dst0->f == src->f); ASSERT( dst1->f == src->f)
905 ASSERT (dst0->total_length_not_including_first_buffer == 0);
906 ASSERT (dst1->total_length_not_including_first_buffer == 0);
907 ASSERT (dst0->n_add_refs == 0);
908 ASSERT (dst1->n_add_refs == 0);
912 extern u32 *vlib_buffer_state_validation_lock;
913 extern uword *vlib_buffer_state_validation_hash;
914 extern void *vlib_buffer_state_heap;
918 vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
924 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
926 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
929 p = hash_get (vlib_buffer_state_validation_hash, b);
931 /* If we don't know about b, declare it to be in the expected state */
934 hash_set (vlib_buffer_state_validation_hash, b, expected);
938 if (p[0] != expected)
942 vlib_main_t *vm = &vlib_global_main;
946 bi = vlib_get_buffer_index (vm, b);
948 clib_mem_set_heap (oldheap);
949 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
950 vlib_time_now (vm), bi,
951 p[0] ? "busy" : "free", expected ? "busy" : "free");
955 CLIB_MEMORY_BARRIER ();
956 *vlib_buffer_state_validation_lock = 0;
957 clib_mem_set_heap (oldheap);
962 vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
967 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
969 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
972 hash_set (vlib_buffer_state_validation_hash, b, expected);
974 CLIB_MEMORY_BARRIER ();
975 *vlib_buffer_state_validation_lock = 0;
976 clib_mem_set_heap (oldheap);
980 #endif /* included_vlib_buffer_funcs_h */
983 * fd.io coding-style-patch-verification: ON
986 * eval: (c-set-style "gnu")