2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
46 vlib buffer access methods.
50 /** \brief Translate buffer index into buffer pointer
52 @param vm - (vlib_main_t *) vlib main data structure pointer
53 @param buffer_index - (u32) buffer index
54 @return - (vlib_buffer_t *) buffer pointer
56 always_inline vlib_buffer_t *
57 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
59 vlib_buffer_main_t *bm = vm->buffer_main;
60 uword offset = ((uword) buffer_index) << CLIB_LOG2_CACHE_LINE_BYTES;
61 ASSERT (offset < bm->buffer_mem_size);
63 return uword_to_pointer (bm->buffer_mem_start + offset, void *);
66 /** \brief Translate buffer pointer into buffer index
68 @param vm - (vlib_main_t *) vlib main data structure pointer
69 @param p - (void *) buffer pointer
70 @return - (u32) buffer index
74 vlib_get_buffer_index (vlib_main_t * vm, void *p)
76 vlib_buffer_main_t *bm = vm->buffer_main;
77 uword offset = pointer_to_uword (p) - bm->buffer_mem_start;
78 ASSERT (pointer_to_uword (p) >= bm->buffer_mem_start);
79 ASSERT (offset < bm->buffer_mem_size);
80 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
81 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
84 /** \brief Get next buffer in buffer linklist, or zero for end of list.
86 @param vm - (vlib_main_t *) vlib main data structure pointer
87 @param b - (void *) buffer pointer
88 @return - (vlib_buffer_t *) next buffer, or NULL
90 always_inline vlib_buffer_t *
91 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
93 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
94 ? vlib_get_buffer (vm, b->next_buffer) : 0);
97 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
98 vlib_buffer_t * b_first);
100 /** \brief Get length in bytes of the buffer chain
102 @param vm - (vlib_main_t *) vlib main data structure pointer
103 @param b - (void *) buffer pointer
104 @return - (uword) length of buffer chain
107 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
109 uword len = b->current_length;
111 if (PREDICT_TRUE ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0))
114 if (PREDICT_TRUE (b->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID))
115 return len + b->total_length_not_including_first_buffer;
117 return vlib_buffer_length_in_chain_slow_path (vm, b);
120 /** \brief Get length in bytes of the buffer index buffer chain
122 @param vm - (vlib_main_t *) vlib main data structure pointer
123 @param bi - (u32) buffer index
124 @return - (uword) length of buffer chain
127 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
129 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
130 return vlib_buffer_length_in_chain (vm, b);
133 /** \brief Copy buffer contents to memory
135 @param vm - (vlib_main_t *) vlib main data structure pointer
136 @param buffer_index - (u32) buffer index
137 @param contents - (u8 *) memory, <strong>must be large enough</strong>
138 @return - (uword) length of buffer chain
141 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
143 uword content_len = 0;
149 b = vlib_get_buffer (vm, buffer_index);
150 l = b->current_length;
151 clib_memcpy (contents + content_len, b->data + b->current_data, l);
153 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
155 buffer_index = b->next_buffer;
161 /* Return physical address of buffer->data start. */
163 vlib_get_buffer_data_physical_address (vlib_main_t * vm, u32 buffer_index)
165 vlib_physmem_region_index_t pri;
166 vlib_buffer_t *b = vlib_get_buffer (vm, buffer_index);
167 pri = vm->buffer_main->buffer_pools[b->buffer_pool_index].physmem_region;
168 return vlib_physmem_offset_to_physical (vm, pri,
169 (((uword) buffer_index) <<
170 CLIB_LOG2_CACHE_LINE_BYTES) +
171 STRUCT_OFFSET_OF (vlib_buffer_t,
175 /** \brief Prefetch buffer metadata by buffer index
176 The first 64 bytes of buffer contains most header information
178 @param vm - (vlib_main_t *) vlib main data structure pointer
179 @param bi - (u32) buffer index
180 @param type - LOAD, STORE. In most cases, STORE is the right answer
182 /* Prefetch buffer header given index. */
183 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
185 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
186 vlib_prefetch_buffer_header (_b, type); \
190 /* Iterate over known allocated vlib bufs. You probably do not want
192 @param vm the vlib_main_t
193 @param bi found allocated buffer index
194 @param body operation to perform on buffer index
195 function executes body for each allocated buffer index
197 #define vlib_buffer_foreach_allocated(vm,bi,body) \
199 vlib_main_t * _vmain = (vm); \
200 vlib_buffer_main_t * _bmain = &_vmain->buffer_main; \
201 hash_pair_t * _vbpair; \
202 hash_foreach_pair(_vbpair, _bmain->buffer_known_hash, ({ \
203 if (VLIB_BUFFER_KNOWN_ALLOCATED == _vbpair->value[0]) { \
204 (bi) = _vbpair->key; \
213 /* Index is unknown. */
216 /* Index is known and free/allocated. */
217 VLIB_BUFFER_KNOWN_FREE,
218 VLIB_BUFFER_KNOWN_ALLOCATED,
219 } vlib_buffer_known_state_t;
221 always_inline vlib_buffer_known_state_t
222 vlib_buffer_is_known (vlib_main_t * vm, u32 buffer_index)
224 vlib_buffer_main_t *bm = vm->buffer_main;
226 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
227 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
228 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
229 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
233 vlib_buffer_set_known_state (vlib_main_t * vm,
235 vlib_buffer_known_state_t state)
237 vlib_buffer_main_t *bm = vm->buffer_main;
238 clib_spinlock_lock (&bm->buffer_known_hash_lockp);
239 hash_set (bm->buffer_known_hash, buffer_index, state);
240 clib_spinlock_unlock (&bm->buffer_known_hash_lockp);
243 /* Validates sanity of a single buffer.
244 Returns format'ed vector with error message if any. */
245 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
248 /** \brief Allocate buffers into supplied array
250 @param vm - (vlib_main_t *) vlib main data structure pointer
251 @param buffers - (u32 * ) buffer index array
252 @param n_buffers - (u32) number of buffers requested
253 @return - (u32) number of buffers actually allocated, may be
254 less than the number requested or zero
257 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers)
259 vlib_buffer_main_t *bm = vm->buffer_main;
261 ASSERT (bm->cb.vlib_buffer_alloc_cb);
263 return bm->cb.vlib_buffer_alloc_cb (vm, buffers, n_buffers);
267 vlib_buffer_round_size (u32 size)
269 return round_pow2 (size, sizeof (vlib_buffer_t));
273 vlib_buffer_get_free_list_index (vlib_buffer_t * b)
275 return b->flags & VLIB_BUFFER_FREE_LIST_INDEX_MASK;
279 vlib_buffer_set_free_list_index (vlib_buffer_t * b, u32 index)
281 /* if there is an need for more free lists we should consider
282 storig data in the 2nd cacheline */
283 ASSERT (VLIB_BUFFER_FREE_LIST_INDEX_MASK & 1);
284 ASSERT (index <= VLIB_BUFFER_FREE_LIST_INDEX_MASK);
286 b->flags &= ~VLIB_BUFFER_FREE_LIST_INDEX_MASK;
287 b->flags |= index & VLIB_BUFFER_FREE_LIST_INDEX_MASK;
290 /** \brief Allocate buffers from specific freelist into supplied array
292 @param vm - (vlib_main_t *) vlib main data structure pointer
293 @param buffers - (u32 * ) buffer index array
294 @param n_buffers - (u32) number of buffers requested
295 @return - (u32) number of buffers actually allocated, may be
296 less than the number requested or zero
299 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
301 u32 n_buffers, u32 free_list_index)
303 vlib_buffer_main_t *bm = vm->buffer_main;
305 ASSERT (bm->cb.vlib_buffer_alloc_from_free_list_cb);
307 return bm->cb.vlib_buffer_alloc_from_free_list_cb (vm, buffers, n_buffers,
311 /** \brief Free buffers
312 Frees the entire buffer chain for each buffer
314 @param vm - (vlib_main_t *) vlib main data structure pointer
315 @param buffers - (u32 * ) buffer index array
316 @param n_buffers - (u32) number of buffers to free
320 vlib_buffer_free (vlib_main_t * vm,
321 /* pointer to first buffer */
323 /* number of buffers to free */
326 vlib_buffer_main_t *bm = vm->buffer_main;
328 ASSERT (bm->cb.vlib_buffer_free_cb);
330 return bm->cb.vlib_buffer_free_cb (vm, buffers, n_buffers);
333 /** \brief Free buffers, does not free the buffer chain for each buffer
335 @param vm - (vlib_main_t *) vlib main data structure pointer
336 @param buffers - (u32 * ) buffer index array
337 @param n_buffers - (u32) number of buffers to free
341 vlib_buffer_free_no_next (vlib_main_t * vm,
342 /* pointer to first buffer */
344 /* number of buffers to free */
347 vlib_buffer_main_t *bm = vm->buffer_main;
349 ASSERT (bm->cb.vlib_buffer_free_no_next_cb);
351 return bm->cb.vlib_buffer_free_no_next_cb (vm, buffers, n_buffers);
354 /** \brief Free one buffer
355 Shorthand to free a single buffer chain.
357 @param vm - (vlib_main_t *) vlib main data structure pointer
358 @param buffer_index - (u32) buffer index to free
361 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
363 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
366 /* Add/delete buffer free lists. */
367 u32 vlib_buffer_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
370 vlib_buffer_delete_free_list (vlib_main_t * vm, u32 free_list_index)
372 vlib_buffer_main_t *bm = vm->buffer_main;
374 ASSERT (bm->cb.vlib_buffer_delete_free_list_cb);
376 bm->cb.vlib_buffer_delete_free_list_cb (vm, free_list_index);
379 /* Find already existing public free list with given size or create one. */
380 u32 vlib_buffer_get_or_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
383 /* Merge two free lists */
384 void vlib_buffer_merge_free_lists (vlib_buffer_free_list_t * dst,
385 vlib_buffer_free_list_t * src);
387 /* Make sure we have at least given number of unaligned buffers. */
388 void vlib_buffer_free_list_fill_unaligned (vlib_main_t * vm,
389 vlib_buffer_free_list_t *
391 uword n_unaligned_buffers);
394 vlib_buffer_get_free_list_with_size (vlib_main_t * vm, u32 size)
396 vlib_buffer_main_t *bm = vm->buffer_main;
398 size = vlib_buffer_round_size (size);
399 uword *p = hash_get (bm->free_list_by_size, size);
400 return p ? p[0] : ~0;
403 always_inline vlib_buffer_free_list_t *
404 vlib_buffer_get_buffer_free_list (vlib_main_t * vm, vlib_buffer_t * b,
407 vlib_buffer_main_t *bm = vm->buffer_main;
410 *index = i = vlib_buffer_get_free_list_index (b);
411 return pool_elt_at_index (bm->buffer_free_list_pool, i);
414 always_inline vlib_buffer_free_list_t *
415 vlib_buffer_get_free_list (vlib_main_t * vm, u32 free_list_index)
417 vlib_buffer_main_t *bm = vm->buffer_main;
418 vlib_buffer_free_list_t *f;
420 f = pool_elt_at_index (bm->buffer_free_list_pool, free_list_index);
422 /* Sanity: indices must match. */
423 ASSERT (f->index == free_list_index);
429 vlib_buffer_free_list_buffer_size (vlib_main_t * vm, u32 free_list_index)
431 vlib_buffer_free_list_t *f =
432 vlib_buffer_get_free_list (vm, free_list_index);
433 return f->n_data_bytes;
436 void vlib_aligned_memcpy (void *_dst, void *_src, int n_bytes);
438 /* Reasonably fast buffer copy routine. */
440 vlib_copy_buffers (u32 * dst, u32 * src, u32 n)
461 /* Append given data to end of buffer, possibly allocating new buffers. */
462 u32 vlib_buffer_add_data (vlib_main_t * vm,
464 u32 buffer_index, void *data, u32 n_data_bytes);
466 /* duplicate all buffers in chain */
467 always_inline vlib_buffer_t *
468 vlib_buffer_copy (vlib_main_t * vm, vlib_buffer_t * b)
470 vlib_buffer_t *s, *d, *fd;
471 uword n_alloc, n_buffers = 1;
472 u32 flag_mask = VLIB_BUFFER_NEXT_PRESENT | VLIB_BUFFER_TOTAL_LENGTH_VALID;
476 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
479 s = vlib_get_buffer (vm, s->next_buffer);
481 u32 new_buffers[n_buffers];
483 n_alloc = vlib_buffer_alloc (vm, new_buffers, n_buffers);
485 /* No guarantee that we'll get all the buffers we asked for */
486 if (PREDICT_FALSE (n_alloc < n_buffers))
489 vlib_buffer_free (vm, new_buffers, n_alloc);
495 fd = d = vlib_get_buffer (vm, new_buffers[0]);
496 d->current_data = s->current_data;
497 d->current_length = s->current_length;
498 d->flags = s->flags & flag_mask;
499 d->total_length_not_including_first_buffer =
500 s->total_length_not_including_first_buffer;
501 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
502 clib_memcpy (vlib_buffer_get_current (d),
503 vlib_buffer_get_current (s), s->current_length);
506 for (i = 1; i < n_buffers; i++)
509 d->next_buffer = new_buffers[i];
511 s = vlib_get_buffer (vm, s->next_buffer);
512 d = vlib_get_buffer (vm, new_buffers[i]);
513 d->current_data = s->current_data;
514 d->current_length = s->current_length;
515 clib_memcpy (vlib_buffer_get_current (d),
516 vlib_buffer_get_current (s), s->current_length);
517 d->flags = s->flags & flag_mask;
523 /** \brief Create multiple clones of buffer and store them in the supplied array
525 @param vm - (vlib_main_t *) vlib main data structure pointer
526 @param src_buffer - (u32) source buffer index
527 @param buffers - (u32 * ) buffer index array
528 @param n_buffers - (u8) number of buffer clones requested
529 @param head_end_offset - (u16) offset relative to current position
530 where packet head ends
531 @return - (u8) number of buffers actually cloned, may be
532 less than the number requested or zero
536 vlib_buffer_clone (vlib_main_t * vm, u32 src_buffer, u32 * buffers,
537 u8 n_buffers, u16 head_end_offset)
540 vlib_buffer_t *s = vlib_get_buffer (vm, src_buffer);
542 ASSERT (s->n_add_refs == 0);
545 if (s->current_length <= head_end_offset + CLIB_CACHE_LINE_BYTES * 2)
547 buffers[0] = src_buffer;
548 for (i = 1; i < n_buffers; i++)
551 d = vlib_buffer_copy (vm, s);
554 buffers[i] = vlib_get_buffer_index (vm, d);
560 n_buffers = vlib_buffer_alloc_from_free_list (vm, buffers, n_buffers,
561 vlib_buffer_get_free_list_index
563 if (PREDICT_FALSE (n_buffers == 0))
565 buffers[0] = src_buffer;
569 for (i = 0; i < n_buffers; i++)
571 vlib_buffer_t *d = vlib_get_buffer (vm, buffers[i]);
572 d->current_data = s->current_data;
573 d->current_length = head_end_offset;
574 vlib_buffer_set_free_list_index (d,
575 vlib_buffer_get_free_list_index (s));
576 d->total_length_not_including_first_buffer =
577 s->total_length_not_including_first_buffer + s->current_length -
579 d->flags = s->flags | VLIB_BUFFER_NEXT_PRESENT;
580 d->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
581 clib_memcpy (d->opaque, s->opaque, sizeof (s->opaque));
582 clib_memcpy (vlib_buffer_get_current (d), vlib_buffer_get_current (s),
584 d->next_buffer = src_buffer;
586 vlib_buffer_advance (s, head_end_offset);
587 s->n_add_refs = n_buffers - 1;
588 while (s->flags & VLIB_BUFFER_NEXT_PRESENT)
590 s = vlib_get_buffer (vm, s->next_buffer);
591 s->n_add_refs = n_buffers - 1;
597 /** \brief Attach cloned tail to the buffer
599 @param vm - (vlib_main_t *) vlib main data structure pointer
600 @param head - (vlib_buffer_t *) head buffer
601 @param tail - (Vlib buffer_t *) tail buffer to clone and attach to head
605 vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
606 vlib_buffer_t * tail)
608 ASSERT ((head->flags & VLIB_BUFFER_NEXT_PRESENT) == 0);
609 ASSERT (vlib_buffer_get_free_list_index (head) ==
610 vlib_buffer_get_free_list_index (tail));
612 head->flags |= VLIB_BUFFER_NEXT_PRESENT;
613 head->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
614 head->flags &= ~VLIB_BUFFER_EXT_HDR_VALID;
615 head->flags |= (tail->flags & VLIB_BUFFER_TOTAL_LENGTH_VALID);
616 head->next_buffer = vlib_get_buffer_index (vm, tail);
617 head->total_length_not_including_first_buffer = tail->current_length +
618 tail->total_length_not_including_first_buffer;
621 __sync_add_and_fetch (&tail->n_add_refs, 1);
623 if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
625 tail = vlib_get_buffer (vm, tail->next_buffer);
630 /* Initializes the buffer as an empty packet with no chained buffers. */
632 vlib_buffer_chain_init (vlib_buffer_t * first)
634 first->total_length_not_including_first_buffer = 0;
635 first->current_length = 0;
636 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
637 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
640 /* The provided next_bi buffer index is appended to the end of the packet. */
641 always_inline vlib_buffer_t *
642 vlib_buffer_chain_buffer (vlib_main_t * vm,
643 vlib_buffer_t * first,
644 vlib_buffer_t * last, u32 next_bi)
646 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
647 last->next_buffer = next_bi;
648 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
649 next_buffer->current_length = 0;
650 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
654 /* Increases or decreases the packet length.
655 * It does not allocate or deallocate new buffers.
656 * Therefore, the added length must be compatible
657 * with the last buffer. */
659 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
660 vlib_buffer_t * last, i32 len)
662 last->current_length += len;
664 first->total_length_not_including_first_buffer += len;
667 /* Copy data to the end of the packet and increases its length.
668 * It does not allocate new buffers.
669 * Returns the number of copied bytes. */
671 vlib_buffer_chain_append_data (vlib_main_t * vm,
673 vlib_buffer_t * first,
674 vlib_buffer_t * last, void *data, u16 data_len)
677 vlib_buffer_free_list_buffer_size (vm, free_list_index);
678 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
679 u16 len = clib_min (data_len,
680 n_buffer_bytes - last->current_length -
682 clib_memcpy (vlib_buffer_get_current (last) + last->current_length, data,
684 vlib_buffer_chain_increase_length (first, last, len);
688 /* Copy data to the end of the packet and increases its length.
689 * Allocates additional buffers from the free list if necessary.
690 * Returns the number of copied bytes.
691 * 'last' value is modified whenever new buffers are allocated and
692 * chained and points to the last buffer in the chain. */
694 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
696 vlib_buffer_t * first,
697 vlib_buffer_t ** last,
698 void *data, u16 data_len);
699 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
701 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
702 format_vlib_buffer_contents;
706 /* Vector of packet data. */
709 /* Number of buffers to allocate in each call to physmem
711 u32 min_n_buffers_each_physmem_alloc;
713 /* Buffer free list for this template. */
717 } vlib_packet_template_t;
719 void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
720 vlib_packet_template_t * t);
722 void vlib_packet_template_init (vlib_main_t * vm,
723 vlib_packet_template_t * t,
725 uword n_packet_data_bytes,
726 uword min_n_buffers_each_physmem_alloc,
729 void *vlib_packet_template_get_packet (vlib_main_t * vm,
730 vlib_packet_template_t * t,
734 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
736 vec_free (t->packet_data);
740 unserialize_vlib_buffer_n_bytes (serialize_main_t * m)
742 serialize_stream_t *s = &m->stream;
743 vlib_serialize_buffer_main_t *sm
744 = uword_to_pointer (m->stream.data_function_opaque,
745 vlib_serialize_buffer_main_t *);
746 vlib_main_t *vm = sm->vlib_main;
749 n = s->n_buffer_bytes - s->current_buffer_index;
750 if (sm->last_buffer != ~0)
752 vlib_buffer_t *b = vlib_get_buffer (vm, sm->last_buffer);
753 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
755 b = vlib_get_buffer (vm, b->next_buffer);
756 n += b->current_length;
761 clib_fifo_foreach (f, sm->rx.buffer_fifo, ({
762 n += vlib_buffer_index_length_in_chain (vm, f[0]);
769 /* Set a buffer quickly into "uninitialized" state. We want this to
770 be extremely cheap and arrange for all fields that need to be
771 initialized to be in the first 128 bits of the buffer. */
773 vlib_buffer_init_for_free_list (vlib_buffer_t * dst,
774 vlib_buffer_free_list_t * fl)
776 vlib_buffer_t *src = &fl->buffer_init_template;
778 /* Make sure vlib_buffer_t is cacheline aligned and sized */
779 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
780 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
781 CLIB_CACHE_LINE_BYTES);
782 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
783 CLIB_CACHE_LINE_BYTES * 2);
785 /* Make sure buffer template is sane. */
786 ASSERT (fl->index == vlib_buffer_get_free_list_index (src));
788 clib_memcpy (STRUCT_MARK_PTR (dst, template_start),
789 STRUCT_MARK_PTR (src, template_start),
790 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
791 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
793 /* Not in the first 16 octets. */
794 dst->n_add_refs = src->n_add_refs;
796 /* Make sure it really worked. */
797 #define _(f) ASSERT (dst->f == src->f);
802 /* ASSERT (dst->total_length_not_including_first_buffer == 0); */
803 /* total_length_not_including_first_buffer is not in the template anymore
804 * so it may actually not zeroed for some buffers. One option is to
805 * uncomment the line lower (comes at a cost), the other, is to just not
807 /* dst->total_length_not_including_first_buffer = 0; */
808 ASSERT (dst->n_add_refs == 0);
812 vlib_buffer_add_to_free_list (vlib_main_t * vm,
813 vlib_buffer_free_list_t * f,
814 u32 buffer_index, u8 do_init)
817 b = vlib_get_buffer (vm, buffer_index);
818 if (PREDICT_TRUE (do_init))
819 vlib_buffer_init_for_free_list (b, f);
820 vec_add1_aligned (f->buffers, buffer_index, CLIB_CACHE_LINE_BYTES);
822 if (vec_len (f->buffers) > 4 * VLIB_FRAME_SIZE)
824 vlib_buffer_free_list_t *mf;
825 mf = vlib_buffer_get_free_list (vlib_mains[0], f->index);
826 clib_spinlock_lock (&mf->global_buffers_lock);
827 /* keep last stored buffers, as they are more likely hot in the cache */
828 vec_add_aligned (mf->global_buffers, f->buffers, VLIB_FRAME_SIZE,
829 CLIB_CACHE_LINE_BYTES);
830 vec_delete (f->buffers, VLIB_FRAME_SIZE, 0);
831 f->n_alloc -= VLIB_FRAME_SIZE;
832 clib_spinlock_unlock (&mf->global_buffers_lock);
837 vlib_buffer_init_two_for_free_list (vlib_buffer_t * dst0,
838 vlib_buffer_t * dst1,
839 vlib_buffer_free_list_t * fl)
841 vlib_buffer_t *src = &fl->buffer_init_template;
843 /* Make sure buffer template is sane. */
844 ASSERT (fl->index == vlib_buffer_get_free_list_index (src));
846 clib_memcpy (STRUCT_MARK_PTR (dst0, template_start),
847 STRUCT_MARK_PTR (src, template_start),
848 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
849 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
851 clib_memcpy (STRUCT_MARK_PTR (dst1, template_start),
852 STRUCT_MARK_PTR (src, template_start),
853 STRUCT_OFFSET_OF (vlib_buffer_t, template_end) -
854 STRUCT_OFFSET_OF (vlib_buffer_t, template_start));
856 /* Not in the first 16 octets. */
857 dst0->n_add_refs = src->n_add_refs;
858 dst1->n_add_refs = src->n_add_refs;
860 /* Make sure it really worked. */
861 #define _(f) ASSERT (dst0->f == src->f); ASSERT( dst1->f == src->f)
867 ASSERT (dst0->total_length_not_including_first_buffer == 0);
868 ASSERT (dst1->total_length_not_including_first_buffer == 0);
869 ASSERT (dst0->n_add_refs == 0);
870 ASSERT (dst1->n_add_refs == 0);
874 extern u32 *vlib_buffer_state_validation_lock;
875 extern uword *vlib_buffer_state_validation_hash;
876 extern void *vlib_buffer_state_heap;
880 vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
886 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
888 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
891 p = hash_get (vlib_buffer_state_validation_hash, b);
893 /* If we don't know about b, declare it to be in the expected state */
896 hash_set (vlib_buffer_state_validation_hash, b, expected);
900 if (p[0] != expected)
904 vlib_main_t *vm = &vlib_global_main;
908 bi = vlib_get_buffer_index (vm, b);
910 clib_mem_set_heap (oldheap);
911 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
912 vlib_time_now (vm), bi,
913 p[0] ? "busy" : "free", expected ? "busy" : "free");
917 CLIB_MEMORY_BARRIER ();
918 *vlib_buffer_state_validation_lock = 0;
919 clib_mem_set_heap (oldheap);
924 vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
929 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
931 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
934 hash_set (vlib_buffer_state_validation_hash, b, expected);
936 CLIB_MEMORY_BARRIER ();
937 *vlib_buffer_state_validation_lock = 0;
938 clib_mem_set_heap (oldheap);
942 #endif /* included_vlib_buffer_funcs_h */
945 * fd.io coding-style-patch-verification: ON
948 * eval: (c-set-style "gnu")