2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
46 #undef always_inline // dpdk and clib use conflicting always_inline macros.
47 #include <rte_config.h>
51 #define always_inline static inline
53 #define always_inline static inline __attribute__ ((__always_inline__))
58 vlib buffer access methods.
62 /** \brief Translate buffer index into buffer pointer
64 @param vm - (vlib_main_t *) vlib main data structure pointer
65 @param buffer_index - (u32) buffer index
66 @return - (vlib_buffer_t *) buffer pointer
68 always_inline vlib_buffer_t *
69 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
71 return vlib_physmem_at_offset (&vm->physmem_main, ((uword) buffer_index)
72 << CLIB_LOG2_CACHE_LINE_BYTES);
75 /** \brief Translate buffer pointer into buffer index
77 @param vm - (vlib_main_t *) vlib main data structure pointer
78 @param b - (void *) buffer pointer
79 @return - (u32) buffer index
82 vlib_get_buffer_index (vlib_main_t * vm, void *p)
84 uword offset = vlib_physmem_offset_of (&vm->physmem_main, p);
85 ASSERT ((offset % (1 << CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
86 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
89 /** \brief Get next buffer in buffer linklist, or zero for end of list.
91 @param vm - (vlib_main_t *) vlib main data structure pointer
92 @param b - (void *) buffer pointer
93 @return - (vlib_buffer_t *) next buffer, or NULL
95 always_inline vlib_buffer_t *
96 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
98 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
99 ? vlib_get_buffer (vm, b->next_buffer) : 0);
102 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm,
103 vlib_buffer_t * b_first);
105 /** \brief Get length in bytes of the buffer chain
107 @param vm - (vlib_main_t *) vlib main data structure pointer
108 @param b - (void *) buffer pointer
109 @return - (uword) length of buffer chain
112 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
114 uword l = b->current_length + b->total_length_not_including_first_buffer;
115 if (PREDICT_FALSE ((b->flags & (VLIB_BUFFER_NEXT_PRESENT
116 | VLIB_BUFFER_TOTAL_LENGTH_VALID))
117 == VLIB_BUFFER_NEXT_PRESENT))
118 return vlib_buffer_length_in_chain_slow_path (vm, b);
122 /** \brief Get length in bytes of the buffer index buffer chain
124 @param vm - (vlib_main_t *) vlib main data structure pointer
125 @param bi - (u32) buffer index
126 @return - (uword) length of buffer chain
129 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
131 vlib_buffer_t *b = vlib_get_buffer (vm, bi);
132 return vlib_buffer_length_in_chain (vm, b);
135 /** \brief Copy buffer contents to memory
137 @param vm - (vlib_main_t *) vlib main data structure pointer
138 @param bi - (u32) buffer index
139 @param contents - (u8 *) memory, <strong>must be large enough</strong>
140 @return - (uword) length of buffer chain
143 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
145 uword content_len = 0;
151 b = vlib_get_buffer (vm, buffer_index);
152 l = b->current_length;
153 clib_memcpy (contents + content_len, b->data + b->current_data, l);
155 if (!(b->flags & VLIB_BUFFER_NEXT_PRESENT))
157 buffer_index = b->next_buffer;
163 /* Return physical address of buffer->data start. */
165 vlib_get_buffer_data_physical_address (vlib_main_t * vm, u32 buffer_index)
167 return vlib_physmem_offset_to_physical (&vm->physmem_main,
168 (((uword) buffer_index) <<
169 CLIB_LOG2_CACHE_LINE_BYTES) +
170 STRUCT_OFFSET_OF (vlib_buffer_t,
174 /** \brief Prefetch buffer metadata by buffer index
175 The first 64 bytes of buffer contains most header information
177 @param vm - (vlib_main_t *) vlib main data structure pointer
178 @param bi - (u32) buffer index
179 @param type - LOAD, STORE. In most cases, STORE is the right answer
181 /* Prefetch buffer header given index. */
182 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
184 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
185 vlib_prefetch_buffer_header (_b, type); \
189 /* Iterate over known allocated vlib bufs. You probably do not want
191 @param vm the vlib_main_t
192 @param bi found allocated buffer index
193 @param body operation to perform on buffer index
194 function executes body for each allocated buffer index
196 #define vlib_buffer_foreach_allocated(vm,bi,body) \
198 vlib_main_t * _vmain = (vm); \
199 vlib_buffer_main_t * _bmain = &_vmain->buffer_main; \
200 hash_pair_t * _vbpair; \
201 hash_foreach_pair(_vbpair, _bmain->buffer_known_hash, ({ \
202 if (VLIB_BUFFER_KNOWN_ALLOCATED == _vbpair->value[0]) { \
203 (bi) = _vbpair->key; \
214 /* Index is unknown. */
217 /* Index is known and free/allocated. */
218 VLIB_BUFFER_KNOWN_FREE,
219 VLIB_BUFFER_KNOWN_ALLOCATED,
220 } vlib_buffer_known_state_t;
222 always_inline vlib_buffer_known_state_t
223 vlib_buffer_is_known (vlib_main_t * vm, u32 buffer_index)
225 vlib_buffer_main_t *bm = vm->buffer_main;
226 ASSERT (os_get_cpu_number () == 0);
228 uword *p = hash_get (bm->buffer_known_hash, buffer_index);
229 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
233 vlib_buffer_set_known_state (vlib_main_t * vm,
235 vlib_buffer_known_state_t state)
237 vlib_buffer_main_t *bm = vm->buffer_main;
238 ASSERT (os_get_cpu_number () == 0);
239 hash_set (bm->buffer_known_hash, buffer_index, state);
242 /* Validates sanity of a single buffer.
243 Returns format'ed vector with error message if any. */
244 u8 *vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index,
247 /* Validate an array of buffers. As above. */
248 u8 *vlib_validate_buffers (vlib_main_t * vm,
250 uword next_buffer_stride,
252 vlib_buffer_known_state_t known_state,
255 #endif /* DPDK == 0 */
257 clib_error_t *vlib_buffer_pool_create (vlib_main_t * vm, unsigned num_mbufs,
260 /** \brief Allocate buffers into supplied array
262 @param vm - (vlib_main_t *) vlib main data structure pointer
263 @param buffers - (u32 * ) buffer index array
264 @param n_buffers - (u32) number of buffers requested
265 @return - (u32) number of buffers actually allocated, may be
266 less than the number requested or zero
268 u32 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers);
271 vlib_buffer_round_size (u32 size)
273 return round_pow2 (size, sizeof (vlib_buffer_t));
276 /** \brief Allocate buffers from specific freelist into supplied array
278 @param vm - (vlib_main_t *) vlib main data structure pointer
279 @param buffers - (u32 * ) buffer index array
280 @param n_buffers - (u32) number of buffers requested
281 @return - (u32) number of buffers actually allocated, may be
282 less than the number requested or zero
284 u32 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
286 u32 n_buffers, u32 free_list_index);
288 /** \brief Free buffers
289 Frees the entire buffer chain for each buffer
291 @param vm - (vlib_main_t *) vlib main data structure pointer
292 @param buffers - (u32 * ) buffer index array
293 @param n_buffers - (u32) number of buffers to free
296 void vlib_buffer_free (vlib_main_t * vm,
297 /* pointer to first buffer */
299 /* number of buffers to free */
302 /** \brief Free buffers, does not free the buffer chain for each buffer
304 @param vm - (vlib_main_t *) vlib main data structure pointer
305 @param buffers - (u32 * ) buffer index array
306 @param n_buffers - (u32) number of buffers to free
309 void vlib_buffer_free_no_next (vlib_main_t * vm,
310 /* pointer to first buffer */
312 /* number of buffers to free */
315 /** \brief Free one buffer
316 Shorthand to free a single buffer chain.
318 @param vm - (vlib_main_t *) vlib main data structure pointer
319 @param buffer_index - (u32) buffer index to free
322 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
324 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
327 /* Add/delete buffer free lists. */
328 u32 vlib_buffer_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
330 void vlib_buffer_delete_free_list (vlib_main_t * vm, u32 free_list_index);
332 /* Find already existing public free list with given size or create one. */
333 u32 vlib_buffer_get_or_create_free_list (vlib_main_t * vm, u32 n_data_bytes,
336 always_inline vlib_buffer_free_list_t *
337 vlib_buffer_get_free_list (vlib_main_t * vm, u32 free_list_index)
339 vlib_buffer_main_t *bm = vm->buffer_main;
340 vlib_buffer_free_list_t *f;
342 f = pool_elt_at_index (bm->buffer_free_list_pool, free_list_index);
344 /* Sanity: indices must match. */
345 ASSERT (f->index == free_list_index);
351 vlib_buffer_free_list_buffer_size (vlib_main_t * vm, u32 free_list_index)
353 vlib_buffer_free_list_t *f =
354 vlib_buffer_get_free_list (vm, free_list_index);
355 return f->n_data_bytes;
358 void vlib_aligned_memcpy (void *_dst, void *_src, int n_bytes);
360 /* Reasonably fast buffer copy routine. */
362 vlib_copy_buffers (u32 * dst, u32 * src, u32 n)
384 vlib_physmem_alloc_aligned (vlib_main_t * vm, clib_error_t ** error,
385 uword n_bytes, uword alignment)
388 vm->os_physmem_alloc_aligned (&vm->physmem_main, n_bytes, alignment);
391 clib_error_return (0, "failed to allocate %wd bytes of I/O memory",
398 /* By default allocate I/O memory with cache line alignment. */
400 vlib_physmem_alloc (vlib_main_t * vm, clib_error_t ** error, uword n_bytes)
402 return vlib_physmem_alloc_aligned (vm, error, n_bytes,
403 CLIB_CACHE_LINE_BYTES);
407 vlib_physmem_free (vlib_main_t * vm, void *mem)
409 return vm->os_physmem_free (mem);
413 vlib_physmem_virtual_to_physical (vlib_main_t * vm, void *mem)
415 vlib_physmem_main_t *pm = &vm->physmem_main;
416 uword o = pointer_to_uword (mem) - pm->virtual.start;
417 return vlib_physmem_offset_to_physical (pm, o);
420 /* Append given data to end of buffer, possibly allocating new buffers. */
421 u32 vlib_buffer_add_data (vlib_main_t * vm,
423 u32 buffer_index, void *data, u32 n_data_bytes);
426 * vlib_buffer_chain_* functions provide a way to create long buffers.
427 * When DPDK is enabled, the 'hidden' DPDK header is taken care of transparently.
430 /* Initializes the buffer as an empty packet with no chained buffers. */
432 vlib_buffer_chain_init (vlib_buffer_t * first)
434 first->total_length_not_including_first_buffer = 0;
435 first->current_length = 0;
436 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
437 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
439 struct rte_mbuf *mb = rte_mbuf_from_vlib_buffer (first);
440 rte_pktmbuf_reset (mb);
441 mb->data_off = VLIB_BUFFER_PRE_DATA_SIZE + first->current_data;
445 /* The provided next_bi buffer index is appended to the end of the packet. */
446 always_inline vlib_buffer_t *
447 vlib_buffer_chain_buffer (vlib_main_t * vm,
448 vlib_buffer_t * first,
449 vlib_buffer_t * last, u32 next_bi)
451 vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
452 last->next_buffer = next_bi;
453 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
454 next_buffer->current_length = 0;
455 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
458 mb = rte_mbuf_from_vlib_buffer (first);
461 mb = rte_mbuf_from_vlib_buffer (last);
462 mb->next = rte_mbuf_from_vlib_buffer (next_buffer);
464 mb = rte_mbuf_from_vlib_buffer (next_buffer);
466 mb->data_off = VLIB_BUFFER_PRE_DATA_SIZE + next_buffer->current_data;
472 /* Increases or decreases the packet length.
473 * It does not allocate or deallocate new buffers.
474 * Therefore, the added length must be compatible
475 * with the last buffer. */
477 vlib_buffer_chain_increase_length (vlib_buffer_t * first,
478 vlib_buffer_t * last, i32 len)
480 last->current_length += len;
482 first->total_length_not_including_first_buffer += len;
484 struct rte_mbuf *mb_first = rte_mbuf_from_vlib_buffer (first);
485 struct rte_mbuf *mb_last = rte_mbuf_from_vlib_buffer (last);
486 mb_first->pkt_len += len;
487 mb_last->data_len += len;
491 /* Copy data to the end of the packet and increases its length.
492 * It does not allocate new buffers.
493 * Returns the number of copied bytes. */
495 vlib_buffer_chain_append_data (vlib_main_t * vm,
497 vlib_buffer_t * first,
498 vlib_buffer_t * last, void *data, u16 data_len)
501 vlib_buffer_free_list_buffer_size (vm, free_list_index);
502 ASSERT (n_buffer_bytes >= last->current_length + last->current_data);
503 u16 len = clib_min (data_len,
504 n_buffer_bytes - last->current_length -
506 clib_memcpy (vlib_buffer_get_current (last) + last->current_length, data,
508 vlib_buffer_chain_increase_length (first, last, len);
512 /* Copy data to the end of the packet and increases its length.
513 * Allocates additional buffers from the free list if necessary.
514 * Returns the number of copied bytes.
515 * 'last' value is modified whenever new buffers are allocated and
516 * chained and points to the last buffer in the chain. */
518 vlib_buffer_chain_append_data_with_alloc (vlib_main_t * vm,
520 vlib_buffer_t * first,
521 vlib_buffer_t ** last,
522 void *data, u16 data_len);
523 void vlib_buffer_chain_validate (vlib_main_t * vm, vlib_buffer_t * first);
525 format_function_t format_vlib_buffer, format_vlib_buffer_and_data,
526 format_vlib_buffer_contents;
530 /* Vector of packet data. */
533 /* Note: the next three fields are unused if DPDK == 1 */
535 /* Number of buffers to allocate in each call to physmem
537 u32 min_n_buffers_each_physmem_alloc;
539 /* Buffer free list for this template. */
543 } vlib_packet_template_t;
545 void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
546 vlib_packet_template_t * t);
548 void vlib_packet_template_init (vlib_main_t * vm,
549 vlib_packet_template_t * t,
551 uword n_packet_data_bytes,
552 uword min_n_buffers_each_physmem_alloc,
555 void *vlib_packet_template_get_packet (vlib_main_t * vm,
556 vlib_packet_template_t * t,
560 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
562 vec_free (t->packet_data);
566 unserialize_vlib_buffer_n_bytes (serialize_main_t * m)
568 serialize_stream_t *s = &m->stream;
569 vlib_serialize_buffer_main_t *sm
570 = uword_to_pointer (m->stream.data_function_opaque,
571 vlib_serialize_buffer_main_t *);
572 vlib_main_t *vm = sm->vlib_main;
575 n = s->n_buffer_bytes - s->current_buffer_index;
576 if (sm->last_buffer != ~0)
578 vlib_buffer_t *b = vlib_get_buffer (vm, sm->last_buffer);
579 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
581 b = vlib_get_buffer (vm, b->next_buffer);
582 n += b->current_length;
587 clib_fifo_foreach (f, sm->rx.buffer_fifo, ({
588 n += vlib_buffer_index_length_in_chain (vm, f[0]);
598 vlib_copy_unit_t i[sizeof (vlib_buffer_t) / sizeof (vlib_copy_unit_t)];
602 /* Set a buffer quickly into "uninitialized" state. We want this to
603 be extremely cheap and arrange for all fields that need to be
604 initialized to be in the first 128 bits of the buffer. */
606 vlib_buffer_init_for_free_list (vlib_buffer_t * _dst,
607 vlib_buffer_free_list_t * fl)
609 vlib_buffer_union_t *dst = (vlib_buffer_union_t *) _dst;
610 vlib_buffer_union_t *src =
611 (vlib_buffer_union_t *) & fl->buffer_init_template;
613 /* Make sure vlib_buffer_t is cacheline aligned and sized */
614 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline0) == 0);
615 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline1) ==
616 CLIB_CACHE_LINE_BYTES);
617 ASSERT (STRUCT_OFFSET_OF (vlib_buffer_t, cacheline2) ==
618 CLIB_CACHE_LINE_BYTES * 2);
620 /* Make sure buffer template is sane. */
621 ASSERT (fl->index == fl->buffer_init_template.free_list_index);
623 /* Copy template from src->current_data thru src->free_list_index */
624 dst->i[0] = src->i[0];
625 if (1 * sizeof (dst->i[0]) < 16)
626 dst->i[1] = src->i[1];
627 if (2 * sizeof (dst->i[0]) < 16)
628 dst->i[2] = src->i[2];
630 /* Make sure it really worked. */
631 #define _(f) ASSERT (dst->b.f == src->b.f)
637 ASSERT (dst->b.total_length_not_including_first_buffer == 0);
641 vlib_buffer_init_two_for_free_list (vlib_buffer_t * _dst0,
642 vlib_buffer_t * _dst1,
643 vlib_buffer_free_list_t * fl)
645 vlib_buffer_union_t *dst0 = (vlib_buffer_union_t *) _dst0;
646 vlib_buffer_union_t *dst1 = (vlib_buffer_union_t *) _dst1;
647 vlib_buffer_union_t *src =
648 (vlib_buffer_union_t *) & fl->buffer_init_template;
650 /* Make sure buffer template is sane. */
651 ASSERT (fl->index == fl->buffer_init_template.free_list_index);
653 /* Copy template from src->current_data thru src->free_list_index */
654 dst0->i[0] = dst1->i[0] = src->i[0];
655 if (1 * sizeof (dst0->i[0]) < 16)
656 dst0->i[1] = dst1->i[1] = src->i[1];
657 if (2 * sizeof (dst0->i[0]) < 16)
658 dst0->i[2] = dst1->i[2] = src->i[2];
660 /* Make sure it really worked. */
661 #define _(f) ASSERT (dst0->b.f == src->b.f && dst1->b.f == src->b.f)
667 ASSERT (dst0->b.total_length_not_including_first_buffer == 0);
668 ASSERT (dst1->b.total_length_not_including_first_buffer == 0);
672 u32 *vlib_buffer_state_validation_lock;
673 uword *vlib_buffer_state_validation_hash;
674 void *vlib_buffer_state_heap;
678 vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
684 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
686 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
689 p = hash_get (vlib_buffer_state_validation_hash, b);
691 /* If we don't know about b, declare it to be in the expected state */
694 hash_set (vlib_buffer_state_validation_hash, b, expected);
698 if (p[0] != expected)
702 vlib_main_t *vm = &vlib_global_main;
706 bi = vlib_get_buffer_index (vm, b);
708 clib_mem_set_heap (oldheap);
709 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
710 vlib_time_now (vm), bi,
711 p[0] ? "busy" : "free", expected ? "busy" : "free");
715 CLIB_MEMORY_BARRIER ();
716 *vlib_buffer_state_validation_lock = 0;
717 clib_mem_set_heap (oldheap);
722 vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
727 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
729 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
732 hash_set (vlib_buffer_state_validation_hash, b, expected);
734 CLIB_MEMORY_BARRIER ();
735 *vlib_buffer_state_validation_lock = 0;
736 clib_mem_set_heap (oldheap);
740 #endif /* included_vlib_buffer_funcs_h */
743 * fd.io coding-style-patch-verification: ON
746 * eval: (c-set-style "gnu")