2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
46 vlib buffer access methods.
50 /** \brief Translate buffer index into buffer pointer
52 @param vm - (vlib_main_t *) vlib main data structure pointer
53 @param buffer_index - (u32) buffer index
54 @return - (vlib_buffer_t *) buffer pointer
56 always_inline vlib_buffer_t *
57 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
59 vlib_buffer_main_t *bm = vm->buffer_main;
60 uword offset = ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
61 ASSERT (offset < bm->buffer_mem_size);
63 return uword_to_pointer (bm->buffer_mem_start + offset, void *);
66 /** \brief Translate buffer pointer into buffer index
68 @param vm - (vlib_main_t *) vlib main data structure pointer
69 @param p - (void *) buffer pointer
70 @return - (u32) buffer index
74 vlib_get_buffer_index (vlib_main_t * vm, void *p)
76 vlib_buffer_main_t *bm = vm->buffer_main;
77 uword offset = pointer_to_uword (p) - bm->buffer_mem_start;
78 ASSERT (pointer_to_uword (p) >= bm->buffer_mem_start);
79 ASSERT (offset < bm->buffer_mem_size);
80 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
81 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
84 /** \brief Get next buffer in buffer linklist, or zero for end of list.
86 @param vm - (vlib_main_t *) vlib main data structure pointer
87 @param b - (void *) buffer pointer
88 @return - (vlib_buffer_t *) next buffer, or NULL
90 always_inline vlib_buffer_t *
91 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
93 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
94 ? vlib_get_buffer (vm, b->next_buffer) : 0);
97 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
98 vlib_buffer_t * b_first);
100 /** \brief Get length in bytes of the buffer chain
102 @param vm - (vlib_main_t *) vlib main data structure pointer
103 @param b - (void *) buffer pointer
104 @return - (uword) length of buffer chain
107 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
109 uword len = b->current_length;
111 if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
114 if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
115 return len + b->total_length_not_including_first_buffer;
117 return vlib_buffer_length_in_chain_slow_path (vm, b);
120 /** \brief Get length in bytes of the buffer index buffer chain
122 @param vm - (vlib_main_t *) vlib main data structure pointer
123 @param bi - (u32) buffer index
124 @return - (uword) length of buffer chain
127 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
129 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
130 return vlib_buffer_length_in_chain (vm, b);
133 /** \brief Copy buffer contents to memory
135 @param vm - (vlib_main_t *) vlib main data structure pointer
136 @param buffer_index - (u32) buffer index
137 @param contents - (u8 *) memory, <strong>must be large enough</strong>
138 @return - (uword) length of buffer chain
141 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
143 uword content_len = 0;
149 b = vlib_get_buffer (vm, buffer_index);
150 l = b->current_length;
151 clib_memcpy (contents + content_len, b->data + b->current_data, l);
153 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
155 buffer_index = b->next_buffer;
161 /* Return physical address of buffer->data start. */
163 vlib_get_buffer_data_physical_address (vlib_main_t * vm, u32 buffer_index)
165 return vlib_physmem_offset_to_physical (vm, vm->buffer_main->physmem_region,
166 (((uword) buffer_index) <<
167 CLIB_LOG2_CACHE_LINE_BYTES) +
168 STRUCT_OFFSET_OF (vlib_buffer_t,
172 /** \brief Prefetch buffer metadata by buffer index
173 The first 64 bytes of buffer contains most header information
175 @param vm - (vlib_main_t *) vlib main data structure pointer
176 @param bi - (u32) buffer index
177 @param type - LOAD, STORE. In most cases, STORE is the right answer
179 /* Prefetch buffer header given index. */
180 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
182 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
183 vlib_prefetch_buffer_header (_b, type); \
187 /* Iterate over known allocated vlib bufs. You probably do not want
189 @param vm the vlib_main_t
190 @param bi found allocated buffer index
191 @param body operation to perform on buffer index
192 function executes body for each allocated buffer index
194 #define vlib_buffer_foreach_allocated(vm,bi,body) \
196 vlib_main_t * _vmain = (vm); \
197 vlib_buffer_main_t * _bmain = &_vmain->buffer_main; \
198 hash_pair_t * _vbpair; \
199 hash_foreach_pair(_vbpair, _bmain->buffer_known_hash, ({ \
200 if (VLIB_BUFFER_KNOWN_ALLOCATED == _vbpair->value[0]) { \
201 (bi) = _vbpair->key; \
210 /* Index is unknown. */
213 /* Index is known and free/allocated. */
214 VLIB_BUFFER_KNOWN_FREE,
215 VLIB_BUFFER_KNOWN_ALLOCATED,
216 } vlib_buffer_known_state_t;
218 always_inline vlib_buffer_known_state_t
219 vlib_buffer_is_known (vlib_main_t * vm, u32 buffer_index)
221 vlib_buffer_main_t *bm = vm->buffer_main;
223 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
224 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
225 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
226 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
230 vlib_buffer_set_known_state (vlib_main_t * vm,
232 vlib_buffer_known_state_t state)
234 vlib_buffer_main_t *bm = vm->buffer_main;
235 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
236 hash_set (bm->buffer_known_hash, buffer_index, state);
237 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
240 /* Validates sanity of a single buffer.
241 Returns format'ed vector with error message if any. */
242 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
245 /** \brief Allocate buffers into supplied array
247 @param vm - (vlib_main_t *) vlib main data structure pointer
248 @param buffers - (u32 * ) buffer index array
249 @param n_buffers - (u32) number of buffers requested
250 @return - (u32) number of buffers actually allocated, may be
251 less than the number requested or zero
254 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
256 vlib_buffer_main_t *bm = vm->buffer_main;
258 ASSERT (bm->cb.vlib_buffer_alloc_cb);
260 return bm->cb.vlib_buffer_alloc_cb (vm, buffers, n_buffers);
264 vlib_buffer_round_size (u32 size)
266 return round_pow2 (size, sizeof (vlib_buffer_t));
270 vlib_buffer_get_free_list_index (vlib_buffer_t * b)
272 return b->flags & VLIB_BUFFER_FREE_LIST_INDEX_MASK;
276 vlib_buffer_set_free_list_index (vlib_buffer_t * b, u32 index)
278 /* if there is an need for more free lists we should consider
279 storig data in the 2nd cacheline */
280 ASSERT (VLIB_BUFFER_FREE_LIST_INDEX_MASK & 1);
281 ASSERT (index <= VLIB_BUFFER_FREE_LIST_INDEX_MASK);
283 b->flags &= ~VLIB_BUFFER_FREE_LIST_INDEX_MASK;
284 b->flags |= index & VLIB_BUFFER_FREE_LIST_INDEX_MASK;
287 /** \brief Allocate buffers from specific freelist into supplied array
289 @param vm - (vlib_main_t *) vlib main data structure pointer
290 @param buffers - (u32 * ) buffer index array
291 @param n_buffers - (u32) number of buffers requested
292 @return - (u32) number of buffers actually allocated, may be
293 less than the number requested or zero
296 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
298 u32 n_buffers, u32 free_list_index)
300 vlib_buffer_main_t *bm = vm->buffer_main;
302 ASSERT (bm->cb.vlib_buffer_alloc_from_free_list_cb);
304 return bm->cb.vlib_buffer_alloc_from_free_list_cb (vm, buffers, n_buffers,
308 /** \brief Free buffers
309 Frees the entire buffer chain for each buffer
311 @param vm - (vlib_main_t *) vlib main data structure pointer
312 @param buffers - (u32 * ) buffer index array
313 @param n_buffers - (u32) number of buffers to free
317 vlib_buffer_free (vlib_main_t * vm,
318 /* pointer to first buffer */
320 /* number of buffers to free */
323 vlib_buffer_main_t *bm = vm->buffer_main;
325 ASSERT (bm->cb.vlib_buffer_free_cb);
327 return bm->cb.vlib_buffer_free_cb (vm, buffers, n_buffers);
330 /** \brief Free buffers, does not free the buffer chain for each buffer
332 @param vm - (vlib_main_t *) vlib main data structure pointer
333 @param buffers - (u32 * ) buffer index array
334 @param n_buffers - (u32) number of buffers to free
338 vlib_buffer_free_no_next (vlib_main_t * vm,
339 /* pointer to first buffer */
341 /* number of buffers to free */
344 vlib_buffer_main_t *bm = vm->buffer_main;
346 ASSERT (bm->cb.vlib_buffer_free_no_next_cb);
348 return bm->cb.vlib_buffer_free_no_next_cb (vm, buffers, n_buffers);
351 /** \brief Free one buffer
352 Shorthand to free a single buffer chain.
354 @param vm - (vlib_main_t *) vlib main data structure pointer
355 @param buffer_index - (u32) buffer index to free
358 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
360 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
363 /* Add/delete buffer free lists. */
364 u32 vlib_buffer_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
367 vlib_buffer_delete_free_list (vlib_main_t * vm, u32 free_list_index)
369 vlib_buffer_main_t *bm = vm->buffer_main;
371 ASSERT (bm->cb.vlib_buffer_delete_free_list_cb);
373 bm->cb.vlib_buffer_delete_free_list_cb (vm, free_list_index);
376 /* Find already existing public free list with given size or create one. */
377 u32 vlib_buffer_get_or_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
380 /* Merge two free lists */
381 void vlib_buffer_merge_free_lists (vlib_buffer_free_list_t * dst,
382 vlib_buffer_free_list_t * src);
384 /* Make sure we have at least given number of unaligned buffers. */
385 void vlib_buffer_free_list_fill_unaligned (vlib_main_t * vm,
386 vlib_buffer_free_list_t *
388 uword n_unaligned_buffers);
391 vlib_buffer_get_free_list_with_size (vlib_main_t * vm, u32 size)
393 vlib_buffer_main_t *bm = vm->buffer_main;
395 size = vlib_buffer_round_size (size);
396 uword *p = hash_get (bm->free_list_by_size, size);
397 return p ? p[0] : ~0;
400 always_inline vlib_buffer_free_list_t *
401 vlib_buffer_get_buffer_free_list (vlib_main_t * vm, vlib_buffer_t * b,
404 vlib_buffer_main_t *bm = vm->buffer_main;
407 *index = i = vlib_buffer_get_free_list_index (b);
408 return pool_elt_at_index (bm->buffer_free_list_pool, i);
411 always_inline vlib_buffer_free_list_t *
412 vlib_buffer_get_free_list (vlib_main_t * vm, u32 free_list_index)
414 vlib_buffer_main_t *bm = vm->buffer_main;
415 vlib_buffer_free_list_t *f;
417 f = pool_elt_at_index (bm->buffer_free_list_pool, free_list_index);
419 /* Sanity: indices must match. */
420 ASSERT (f->index == free_list_index);
426 vlib_buffer_free_list_buffer_size (vlib_main_t * vm, u32 free_list_index)
428 vlib_buffer_free_list_t *f =
429 vlib_buffer_get_free_list (vm, free_list_index);
430 return f->n_data_bytes;
433 void vlib_aligned_memcpy (void *_dst, void *_src, int n_bytes);
435 /* Reasonably fast buffer copy routine. */
437 vlib_copy_buffers (u32 * dst, u32 * src, u32 n)
458 /* Append given data to end of buffer, possibly allocating new buffers. */
459 u32 vlib_buffer_add_data (vlib_main_t * vm,
461 u32 buffer_index, void *data, u32 n_data_bytes);
463 /* duplicate all buffers in chain */
464 always_inline vlib_buffer_t *
465 vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
467 vlib_buffer_t *s, *d, *fd;
468 uword n_alloc, n_buffers = 1;
469 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
473 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
476 s = vlib_get_buffer (vm, s->next_buffer);
478 u32 new_buffers[n_buffers];
480 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
482 /* No guarantee that we'll get all the buffers we asked for */
483 if (PREDICT_FALSE (n_alloc < n_buffers))
486 vlib_buffer_free (vm, new_buffers, n_alloc);
492 fd = d = vlib_get_buffer (vm, new_buffers[0]);
493 d->current_data = s->current_data;
494 d->current_length = s->current_length;
495 d->flags = s->flags & flag_mask;
496 d->total_length_not_including_first_buffer =
497 s->total_length_not_including_first_buffer;
498 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
499 clib_memcpy (vlib_buffer_get_current (d),
500 vlib_buffer_get_current (s), s->current_length);
503 for (i = 1; i < n_buffers; i++)
506 d->next_buffer = new_buffers[i];
508 s = vlib_get_buffer (vm, s->next_buffer);
509 d = vlib_get_buffer (vm, new_buffers[i]);
510 d->current_data = s->current_data;
511 d->current_length = s->current_length;
512 clib_memcpy (vlib_buffer_get_current (d),
513 vlib_buffer_get_current (s), s->current_length);
514 d->flags = s->flags & flag_mask;
520 /** \brief Create multiple clones of buffer and store them in the supplied array
522 @param vm - (vlib_main_t *) vlib main data structure pointer
523 @param src_buffer - (u32) source buffer index
524 @param buffers - (u32 * ) buffer index array
525 @param n_buffers - (u8) number of buffer clones requested
526 @param head_end_offset - (u16) offset relative to current position
527 where packet head ends
528 @return - (u8) number of buffers actually cloned, may be
529 less than the number requested or zero
533 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
534 u8 n_buffers, u16 head_end_offset)
537 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
539 ASSERT (s->n_add_refs == 0);
542 if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
544 buffers[0] = src_buffer;
545 for (i = 1; i < n_buffers; i++)
548 d = vlib_buffer_copy (vm, s);
551 buffers[i] = vlib_get_buffer_index (vm, d);
557 n_buffers = vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
558 vlib_buffer_get_free_list_index
560 if (PREDICT_FALSE (n_buffers == 0))
562 buffers[0] = src_buffer;
566 for (i = 0; i < n_buffers; i++)
568 vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
569 d->current_data = s->current_data;
570 d->current_length = head_end_offset;
571 vlib_buffer_set_free_list_index (d,
572 vlib_buffer_get_free_list_index (s));
573 d->total_length_not_including_first_buffer =
574 s->total_length_not_including_first_buffer + s->current_length -
576 d->flags = s->flags | VLIB_BUFFER_NEXT_PRESENT;
577 d->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
578 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
579 clib_memcpy (vlib_buffer_get_current (d), vlib_buffer_get_current (s),
581 d->next_buffer = src_buffer;
583 vlib_buffer_advance (s, head_end_offset);
584 s->n_add_refs = n_buffers - 1;
585 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
587 s = vlib_get_buffer (vm, s->next_buffer);
588 s->n_add_refs = n_buffers - 1;
594 /** \brief Attach cloned tail to the buffer
596 @param vm - (vlib_main_t *) vlib main data structure pointer
597 @param head - (vlib_buffer_t *) head buffer
598 @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
602 vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
603 vlib_buffer_t * tail)
605 ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
606 ASSERT (vlib_buffer_get_free_list_index (head) ==
607 vlib_buffer_get_free_list_index (tail));
609 head->flags |= VLIB_BUFFER_NEXT_PRESENT;
610 head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
611 head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
612 head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
613 head->next_buffer = vlib_get_buffer_index (vm, tail);
614 head->total_length_not_including_first_buffer = tail->current_length +
615 tail->total_length_not_including_first_buffer;
618 __sync_add_and_fetch (&tail->n_add_refs, 1);
620 if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
622 tail = vlib_get_buffer (vm, tail->next_buffer);
627 /* Initializes the buffer as an empty packet with no chained buffers. */
629 vlib_buffer_chain_init (vlib_buffer_t * first)
631 first->total_length_not_including_first_buffer = 0;
632 first->current_length = 0;
633 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
634 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
637 /* The provided next_bi buffer index is appended to the end of the packet. */
638 always_inline vlib_buffer_t *
639 vlib_buffer_chain_buffer (vlib_main_t * vm,
640 vlib_buffer_t * first,
641 vlib_buffer_t * last, u32 next_bi)
643 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
644 last->next_buffer = next_bi;
645 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
646 next_buffer->current_length = 0;
647 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
651 /* Increases or decreases the packet length.
652 * It does not allocate or deallocate new buffers.
653 * Therefore, the added length must be compatible
654 * with the last buffer. */
656 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
657 vlib_buffer_t * last, i32 len)
659 last->current_length += len;
661 first->total_length_not_including_first_buffer += len;
664 /* Copy data to the end of the packet and increases its length.
665 * It does not allocate new buffers.
666 * Returns the number of copied bytes. */
668 vlib_buffer_chain_append_data (vlib_main_t * vm,
670 vlib_buffer_t * first,
671 vlib_buffer_t * last, void *data, u16 data_len)
674 vlib_buffer_free_list_buffer_size (vm, free_list_index);
675 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
676 u16 len = clib_min (data_len,
677 n_buffer_bytes - last->current_length -
679 clib_memcpy (vlib_buffer_get_current (last) + last->current_length, data,
681 vlib_buffer_chain_increase_length (first, last, len);
685 /* Copy data to the end of the packet and increases its length.
686 * Allocates additional buffers from the free list if necessary.
687 * Returns the number of copied bytes.
688 * 'last' value is modified whenever new buffers are allocated and
689 * chained and points to the last buffer in the chain. */
691 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
693 vlib_buffer_t * first,
694 vlib_buffer_t ** last,
695 void *data, u16 data_len);
696 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
698 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
699 format_vlib_buffer_contents;
703 /* Vector of packet data. */
706 /* Number of buffers to allocate in each call to physmem
708 u32 min_n_buffers_each_physmem_alloc;
710 /* Buffer free list for this template. */
714 } vlib_packet_template_t;
716 void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
717 vlib_packet_template_t * t);
719 void vlib_packet_template_init (vlib_main_t * vm,
720 vlib_packet_template_t * t,
722 uword n_packet_data_bytes,
723 uword min_n_buffers_each_physmem_alloc,
726 void *vlib_packet_template_get_packet (vlib_main_t * vm,
727 vlib_packet_template_t * t,
731 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
733 vec_free (t->packet_data);
737 unserialize_vlib_buffer_n_bytes (serialize_main_t * m)
739 serialize_stream_t *s = &m->stream;
740 vlib_serialize_buffer_main_t *sm
741 = uword_to_pointer (m->stream.data_function_opaque,
742 vlib_serialize_buffer_main_t *);
743 vlib_main_t *vm = sm->vlib_main;
746 n = s->n_buffer_bytes - s->current_buffer_index;
747 if (sm->last_buffer != ~0)
749 vlib_buffer_t *b = vlib_get_buffer (vm, sm->last_buffer);
750 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
752 b = vlib_get_buffer (vm, b->next_buffer);
753 n += b->current_length;
758 clib_fifo_foreach (f, sm->rx.buffer_fifo, ({
759 n += vlib_buffer_index_length_in_chain (vm, f[0]);
766 /* Set a buffer quickly into "uninitialized" state. We want this to
767 be extremely cheap and arrange for all fields that need to be
768 initialized to be in the first 128 bits of the buffer. */
770 vlib_buffer_init_for_free_list (vlib_buffer_t * dst,
771 vlib_buffer_free_list_t * fl)
773 vlib_buffer_t *src = &fl->buffer_init_template;
775 /* Make sure vlib_buffer_t is cacheline aligned and sized */
776 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
777 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
778 CLIB_CACHE_LINE_BYTES);
779 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
780 CLIB_CACHE_LINE_BYTES * 2);
782 /* Make sure buffer template is sane. */
783 ASSERT (fl->index == vlib_buffer_get_free_list_index (src));
785 clib_memcpy (STRUCT_MARK_PTR (dst, template_start),
786 STRUCT_MARK_PTR (src, template_start),
787 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
788 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
790 /* Not in the first 16 octets. */
791 dst->n_add_refs = src->n_add_refs;
793 /* Make sure it really worked. */
794 #define _(f) ASSERT (dst->f == src->f);
799 /* ASSERT (dst->total_length_not_including_first_buffer == 0); */
800 /* total_length_not_including_first_buffer is not in the template anymore
801 * so it may actually not zeroed for some buffers. One option is to
802 * uncomment the line lower (comes at a cost), the other, is to just not
804 /* dst->total_length_not_including_first_buffer = 0; */
805 ASSERT (dst->n_add_refs == 0);
809 vlib_buffer_add_to_free_list (vlib_main_t * vm,
810 vlib_buffer_free_list_t * f,
811 u32 buffer_index, u8 do_init)
814 b = vlib_get_buffer (vm, buffer_index);
815 if (PREDICT_TRUE (do_init))
816 vlib_buffer_init_for_free_list (b, f);
817 vec_add1_aligned (f->buffers, buffer_index, CLIB_CACHE_LINE_BYTES);
819 if (vec_len (f->buffers) > 4 * VLIB_FRAME_SIZE)
821 vlib_buffer_free_list_t *mf;
822 mf = vlib_buffer_get_free_list (vlib_mains[0], f->index);
823 clib_spinlock_lock (&mf->global_buffers_lock);
824 /* keep last stored buffers, as they are more likely hot in the cache */
825 vec_add_aligned (mf->global_buffers, f->buffers, VLIB_FRAME_SIZE,
826 CLIB_CACHE_LINE_BYTES);
827 vec_delete (f->buffers, VLIB_FRAME_SIZE, 0);
828 clib_spinlock_unlock (&mf->global_buffers_lock);
833 vlib_buffer_init_two_for_free_list (vlib_buffer_t * dst0,
834 vlib_buffer_t * dst1,
835 vlib_buffer_free_list_t * fl)
837 vlib_buffer_t *src = &fl->buffer_init_template;
839 /* Make sure buffer template is sane. */
840 ASSERT (fl->index == vlib_buffer_get_free_list_index (src));
842 clib_memcpy (STRUCT_MARK_PTR (dst0, template_start),
843 STRUCT_MARK_PTR (src, template_start),
844 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
845 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
847 clib_memcpy (STRUCT_MARK_PTR (dst1, template_start),
848 STRUCT_MARK_PTR (src, template_start),
849 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
850 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
852 /* Not in the first 16 octets. */
853 dst0->n_add_refs = src->n_add_refs;
854 dst1->n_add_refs = src->n_add_refs;
856 /* Make sure it really worked. */
857 #define _(f) ASSERT (dst0->f == src->f); ASSERT( dst1->f == src->f)
863 ASSERT (dst0->total_length_not_including_first_buffer == 0);
864 ASSERT (dst1->total_length_not_including_first_buffer == 0);
865 ASSERT (dst0->n_add_refs == 0);
866 ASSERT (dst1->n_add_refs == 0);
870 extern u32 *vlib_buffer_state_validation_lock;
871 extern uword *vlib_buffer_state_validation_hash;
872 extern void *vlib_buffer_state_heap;
876 vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
882 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
884 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
887 p = hash_get (vlib_buffer_state_validation_hash, b);
889 /* If we don't know about b, declare it to be in the expected state */
892 hash_set (vlib_buffer_state_validation_hash, b, expected);
896 if (p[0] != expected)
900 vlib_main_t *vm = &vlib_global_main;
904 bi = vlib_get_buffer_index (vm, b);
906 clib_mem_set_heap (oldheap);
907 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
908 vlib_time_now (vm), bi,
909 p[0] ? "busy" : "free", expected ? "busy" : "free");
913 CLIB_MEMORY_BARRIER ();
914 *vlib_buffer_state_validation_lock = 0;
915 clib_mem_set_heap (oldheap);
920 vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
925 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
927 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
930 hash_set (vlib_buffer_state_validation_hash, b, expected);
932 CLIB_MEMORY_BARRIER ();
933 *vlib_buffer_state_validation_lock = 0;
934 clib_mem_set_heap (oldheap);
938 #endif /* included_vlib_buffer_funcs_h */
941 * fd.io coding-style-patch-verification: ON
944 * eval: (c-set-style "gnu")