2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * buffer_funcs.h: VLIB buffer related functions/inlines
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #ifndef included_vlib_buffer_funcs_h
41 #define included_vlib_buffer_funcs_h
43 #include <vppinfra/hash.h>
46 #undef always_inline // dpdk and clib use conflicting always_inline macros.
47 #include <rte_config.h>
51 #define always_inline static inline
53 #define always_inline static inline __attribute__ ((__always_inline__))
58 vlib buffer access methods.
62 /** \brief Translate buffer index into buffer pointer
64 @param vm - (vlib_main_t *) vlib main data structure pointer
65 @param buffer_index - (u32) buffer index
66 @return - (vlib_buffer_t *) buffer pointer
68 always_inline vlib_buffer_t *
69 vlib_get_buffer (vlib_main_t * vm, u32 buffer_index)
71 return vlib_physmem_at_offset (&vm->physmem_main, ((uword)buffer_index)
72 << CLIB_LOG2_CACHE_LINE_BYTES);
75 /** \brief Translate buffer pointer into buffer index
77 @param vm - (vlib_main_t *) vlib main data structure pointer
78 @param b - (void *) buffer pointer
79 @return - (u32) buffer index
82 vlib_get_buffer_index (vlib_main_t * vm, void * p)
84 uword offset = vlib_physmem_offset_of (&vm->physmem_main, p);
85 ASSERT((offset % (1<<CLIB_LOG2_CACHE_LINE_BYTES)) == 0);
86 return offset >> CLIB_LOG2_CACHE_LINE_BYTES;
89 /** \brief Get next buffer in buffer linklist, or zero for end of list.
91 @param vm - (vlib_main_t *) vlib main data structure pointer
92 @param b - (void *) buffer pointer
93 @return - (vlib_buffer_t *) next buffer, or NULL
95 always_inline vlib_buffer_t *
96 vlib_get_next_buffer (vlib_main_t * vm, vlib_buffer_t * b)
98 return (b->flags & VLIB_BUFFER_NEXT_PRESENT
99 ? vlib_get_buffer (vm, b->next_buffer)
103 uword vlib_buffer_length_in_chain_slow_path (vlib_main_t * vm, vlib_buffer_t * b_first);
105 /** \brief Get length in bytes of the buffer chain
107 @param vm - (vlib_main_t *) vlib main data structure pointer
108 @param b - (void *) buffer pointer
109 @return - (uword) length of buffer chain
112 vlib_buffer_length_in_chain (vlib_main_t * vm, vlib_buffer_t * b)
114 uword l = b->current_length + b->total_length_not_including_first_buffer;
115 if (PREDICT_FALSE ((b->flags & (VLIB_BUFFER_NEXT_PRESENT
116 | VLIB_BUFFER_TOTAL_LENGTH_VALID))
117 == VLIB_BUFFER_NEXT_PRESENT))
118 return vlib_buffer_length_in_chain_slow_path (vm, b);
122 /** \brief Get length in bytes of the buffer index buffer chain
124 @param vm - (vlib_main_t *) vlib main data structure pointer
125 @param bi - (u32) buffer index
126 @return - (uword) length of buffer chain
129 vlib_buffer_index_length_in_chain (vlib_main_t * vm, u32 bi)
131 vlib_buffer_t * b = vlib_get_buffer (vm, bi);
132 return vlib_buffer_length_in_chain (vm, b);
135 /** \brief Copy buffer contents to memory
137 @param vm - (vlib_main_t *) vlib main data structure pointer
138 @param bi - (u32) buffer index
139 @param contents - (u8 *) memory, <strong>must be large enough</strong>
140 @return - (uword) length of buffer chain
143 vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
145 uword content_len = 0;
151 b = vlib_get_buffer (vm, buffer_index);
152 l = b->current_length;
153 clib_memcpy (contents + content_len, b->data + b->current_data, l);
155 if (! (b->flags & VLIB_BUFFER_NEXT_PRESENT))
157 buffer_index = b->next_buffer;
163 /* Return physical address of buffer->data start. */
165 vlib_get_buffer_data_physical_address (vlib_main_t * vm, u32 buffer_index)
167 return vlib_physmem_offset_to_physical (&vm->physmem_main,
168 (((uword)buffer_index) <<
169 CLIB_LOG2_CACHE_LINE_BYTES) +
170 STRUCT_OFFSET_OF (vlib_buffer_t, data));
173 /** \brief Prefetch buffer metadata by buffer index
174 The first 64 bytes of buffer contains most header information
176 @param vm - (vlib_main_t *) vlib main data structure pointer
177 @param bi - (u32) buffer index
178 @param type - LOAD, STORE. In most cases, STORE is the right answer
180 /* Prefetch buffer header given index. */
181 #define vlib_prefetch_buffer_with_index(vm,bi,type) \
183 vlib_buffer_t * _b = vlib_get_buffer (vm, bi); \
184 vlib_prefetch_buffer_header (_b, type); \
188 /* Iterate over known allocated vlib bufs. You probably do not want
190 @param vm the vlib_main_t
191 @param bi found allocated buffer index
192 @param body operation to perform on buffer index
193 function executes body for each allocated buffer index
195 #define vlib_buffer_foreach_allocated(vm,bi,body) \
197 vlib_main_t * _vmain = (vm); \
198 vlib_buffer_main_t * _bmain = &_vmain->buffer_main; \
199 hash_pair_t * _vbpair; \
200 hash_foreach_pair(_vbpair, _bmain->buffer_known_hash, ({ \
201 if (VLIB_BUFFER_KNOWN_ALLOCATED == _vbpair->value[0]) { \
202 (bi) = _vbpair->key; \
212 /* Index is unknown. */
215 /* Index is known and free/allocated. */
216 VLIB_BUFFER_KNOWN_FREE,
217 VLIB_BUFFER_KNOWN_ALLOCATED,
218 } vlib_buffer_known_state_t;
220 always_inline vlib_buffer_known_state_t
221 vlib_buffer_is_known (vlib_main_t * vm, u32 buffer_index)
223 vlib_buffer_main_t * bm = vm->buffer_main;
224 ASSERT(os_get_cpu_number() == 0);
226 uword * p = hash_get (bm->buffer_known_hash, buffer_index);
227 return p ? p[0] : VLIB_BUFFER_UNKNOWN;
231 vlib_buffer_set_known_state (vlib_main_t * vm,
233 vlib_buffer_known_state_t state)
235 vlib_buffer_main_t * bm = vm->buffer_main;
236 ASSERT(os_get_cpu_number() == 0);
237 hash_set (bm->buffer_known_hash, buffer_index, state);
240 /* Validates sanity of a single buffer.
241 Returns format'ed vector with error message if any. */
242 u8 * vlib_validate_buffer (vlib_main_t * vm, u32 buffer_index, uword follow_chain);
244 /* Validate an array of buffers. As above. */
245 u8 * vlib_validate_buffers (vlib_main_t * vm,
247 uword next_buffer_stride,
249 vlib_buffer_known_state_t known_state,
252 #endif /* DPDK == 0 */
255 vlib_buffer_pool_create(vlib_main_t * vm, unsigned num_mbufs,
258 /** \brief Allocate buffers into supplied array
260 @param vm - (vlib_main_t *) vlib main data structure pointer
261 @param buffers - (u32 * ) buffer index array
262 @param n_buffers - (u32) number of buffers requested
263 @return - (u32) number of buffers actually allocated, may be
264 less than the number requested or zero
266 u32 vlib_buffer_alloc (vlib_main_t * vm, u32 * buffers, u32 n_buffers);
269 vlib_buffer_round_size (u32 size)
270 { return round_pow2 (size, sizeof (vlib_buffer_t)); }
272 /** \brief Allocate buffers from specific freelist into supplied array
274 @param vm - (vlib_main_t *) vlib main data structure pointer
275 @param buffers - (u32 * ) buffer index array
276 @param n_buffers - (u32) number of buffers requested
277 @return - (u32) number of buffers actually allocated, may be
278 less than the number requested or zero
280 u32 vlib_buffer_alloc_from_free_list (vlib_main_t * vm,
283 u32 free_list_index);
285 /** \brief Free buffers
286 Frees the entire buffer chain for each buffer
288 @param vm - (vlib_main_t *) vlib main data structure pointer
289 @param buffers - (u32 * ) buffer index array
290 @param n_buffers - (u32) number of buffers to free
293 void vlib_buffer_free (vlib_main_t * vm,
294 /* pointer to first buffer */
296 /* number of buffers to free */
299 /** \brief Free buffers, does not free the buffer chain for each buffer
301 @param vm - (vlib_main_t *) vlib main data structure pointer
302 @param buffers - (u32 * ) buffer index array
303 @param n_buffers - (u32) number of buffers to free
306 void vlib_buffer_free_no_next (vlib_main_t * vm,
307 /* pointer to first buffer */
309 /* number of buffers to free */
312 /** \brief Free one buffer
313 Shorthand to free a single buffer chain.
315 @param vm - (vlib_main_t *) vlib main data structure pointer
316 @param buffer_index - (u32) buffer index to free
319 vlib_buffer_free_one (vlib_main_t * vm, u32 buffer_index)
321 vlib_buffer_free (vm, &buffer_index, /* n_buffers */ 1);
324 /* Add/delete buffer free lists. */
325 u32 vlib_buffer_create_free_list (vlib_main_t * vm, u32 n_data_bytes, char * fmt, ...);
326 void vlib_buffer_delete_free_list (vlib_main_t * vm, u32 free_list_index);
328 /* Find already existing public free list with given size or create one. */
329 u32 vlib_buffer_get_or_create_free_list (vlib_main_t * vm, u32 n_data_bytes, char * fmt, ...);
331 always_inline vlib_buffer_free_list_t *
332 vlib_buffer_get_free_list (vlib_main_t * vm, u32 free_list_index)
334 vlib_buffer_main_t * bm = vm->buffer_main;
335 vlib_buffer_free_list_t * f;
337 f = pool_elt_at_index (bm->buffer_free_list_pool, free_list_index);
339 /* Sanity: indices must match. */
340 ASSERT (f->index == free_list_index);
346 vlib_buffer_free_list_buffer_size (vlib_main_t * vm, u32 free_list_index)
348 vlib_buffer_free_list_t * f = vlib_buffer_get_free_list (vm, free_list_index);
349 return f->n_data_bytes;
353 vlib_aligned_memcpy (void * _dst, void * _src, int n_bytes);
355 /* Reasonably fast buffer copy routine. */
357 vlib_copy_buffers (u32 * dst, u32 * src, u32 n)
379 vlib_physmem_alloc_aligned (vlib_main_t * vm, clib_error_t ** error,
380 uword n_bytes, uword alignment)
382 void * r = vm->os_physmem_alloc_aligned (&vm->physmem_main, n_bytes, alignment);
384 *error = clib_error_return (0, "failed to allocate %wd bytes of I/O memory", n_bytes);
390 /* By default allocate I/O memory with cache line alignment. */
392 vlib_physmem_alloc (vlib_main_t * vm, clib_error_t ** error, uword n_bytes)
393 { return vlib_physmem_alloc_aligned (vm, error, n_bytes, CLIB_CACHE_LINE_BYTES); }
396 vlib_physmem_free (vlib_main_t * vm, void * mem)
397 { return vm->os_physmem_free (mem); }
400 vlib_physmem_virtual_to_physical (vlib_main_t * vm, void * mem)
402 vlib_physmem_main_t * pm = &vm->physmem_main;
403 uword o = pointer_to_uword (mem) - pm->virtual.start;
404 return vlib_physmem_offset_to_physical (pm, o);
407 /* Append given data to end of buffer, possibly allocating new buffers. */
408 u32 vlib_buffer_add_data (vlib_main_t * vm,
411 void * data, u32 n_data_bytes);
414 * vlib_buffer_chain_* functions provide a way to create long buffers.
415 * When DPDK is enabled, the 'hidden' DPDK header is taken care of transparently.
418 /* Initializes the buffer as an empty packet with no chained buffers. */
420 vlib_buffer_chain_init(vlib_buffer_t *first)
422 first->total_length_not_including_first_buffer = 0;
423 first->current_length = 0;
424 first->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
425 first->flags |= VLIB_BUFFER_TOTAL_LENGTH_VALID;
427 struct rte_mbuf * mb = rte_mbuf_from_vlib_buffer(first);
428 rte_pktmbuf_reset(mb);
429 mb->data_off = VLIB_BUFFER_PRE_DATA_SIZE + first->current_data;
433 /* The provided next_bi buffer index is appended to the end of the packet. */
434 always_inline vlib_buffer_t *
435 vlib_buffer_chain_buffer(vlib_main_t *vm,
436 vlib_buffer_t *first,
440 vlib_buffer_t *next_buffer = vlib_get_buffer(vm, next_bi);
441 last->next_buffer = next_bi;
442 last->flags |= VLIB_BUFFER_NEXT_PRESENT;
443 next_buffer->current_length = 0;
444 next_buffer->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
446 struct rte_mbuf * mb;
447 mb = rte_mbuf_from_vlib_buffer(first);
450 mb = rte_mbuf_from_vlib_buffer(last);
451 mb->next = rte_mbuf_from_vlib_buffer(next_buffer);
453 mb = rte_mbuf_from_vlib_buffer(next_buffer);
455 mb->data_off = VLIB_BUFFER_PRE_DATA_SIZE + next_buffer->current_data;
461 /* Increases or decreases the packet length.
462 * It does not allocate or deallocate new buffers.
463 * Therefore, the added length must be compatible
464 * with the last buffer. */
466 vlib_buffer_chain_increase_length(vlib_buffer_t *first,
470 last->current_length += len;
472 first->total_length_not_including_first_buffer += len;
474 struct rte_mbuf * mb_first = rte_mbuf_from_vlib_buffer(first);
475 struct rte_mbuf * mb_last = rte_mbuf_from_vlib_buffer(last);
476 mb_first->pkt_len += len;
477 mb_last->data_len += len;
481 /* Copy data to the end of the packet and increases its length.
482 * It does not allocate new buffers.
483 * Returns the number of copied bytes. */
485 vlib_buffer_chain_append_data(vlib_main_t *vm,
487 vlib_buffer_t *first,
489 void *data, u16 data_len)
491 u32 n_buffer_bytes = vlib_buffer_free_list_buffer_size (vm, free_list_index);
492 ASSERT(n_buffer_bytes >= last->current_length + last->current_data);
493 u16 len = clib_min(data_len, n_buffer_bytes - last->current_length - last->current_data);
495 clib_memcpy(vlib_buffer_get_current (last) + last->current_length, data, len);
497 clib_memcpy(vlib_buffer_get_current (last) + last->current_length, data, len);
499 vlib_buffer_chain_increase_length(first, last, len);
503 /* Copy data to the end of the packet and increases its length.
504 * Allocates additional buffers from the free list if necessary.
505 * Returns the number of copied bytes.
506 * 'last' value is modified whenever new buffers are allocated and
507 * chained and points to the last buffer in the chain. */
509 vlib_buffer_chain_append_data_with_alloc(vlib_main_t *vm,
511 vlib_buffer_t *first,
512 vlib_buffer_t **last,
513 void * data, u16 data_len);
514 void vlib_buffer_chain_validate(vlib_main_t *vm, vlib_buffer_t *first);
516 format_function_t format_vlib_buffer, format_vlib_buffer_and_data, format_vlib_buffer_contents;
519 /* Vector of packet data. */
523 /* Number of buffers to allocate in each call to physmem
525 u32 min_n_buffers_each_physmem_alloc;
527 /* Buffer free list for this template. */
532 } vlib_packet_template_t;
534 void vlib_packet_template_get_packet_helper (vlib_main_t * vm,
535 vlib_packet_template_t * t);
537 void vlib_packet_template_init (vlib_main_t * vm,
538 vlib_packet_template_t * t,
540 uword n_packet_data_bytes,
541 uword min_n_buffers_each_physmem_alloc,
545 vlib_packet_template_get_packet (vlib_main_t * vm,
546 vlib_packet_template_t * t,
550 vlib_packet_template_free (vlib_main_t * vm, vlib_packet_template_t * t)
552 vec_free (t->packet_data);
556 unserialize_vlib_buffer_n_bytes (serialize_main_t * m)
558 serialize_stream_t * s = &m->stream;
559 vlib_serialize_buffer_main_t * sm
560 = uword_to_pointer (m->stream.data_function_opaque, vlib_serialize_buffer_main_t *);
561 vlib_main_t * vm = sm->vlib_main;
564 n = s->n_buffer_bytes - s->current_buffer_index;
565 if (sm->last_buffer != ~0)
567 vlib_buffer_t * b = vlib_get_buffer (vm, sm->last_buffer);
568 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
570 b = vlib_get_buffer (vm, b->next_buffer);
571 n += b->current_length;
575 clib_fifo_foreach (f, sm->rx.buffer_fifo, ({
576 n += vlib_buffer_index_length_in_chain (vm, f[0]);
584 vlib_copy_unit_t i[sizeof (vlib_buffer_t) / sizeof (vlib_copy_unit_t)];
585 } vlib_buffer_union_t;
587 /* Set a buffer quickly into "uninitialized" state. We want this to
588 be extremely cheap and arrange for all fields that need to be
589 initialized to be in the first 128 bits of the buffer. */
591 vlib_buffer_init_for_free_list (vlib_buffer_t * _dst,
592 vlib_buffer_free_list_t * fl)
594 vlib_buffer_union_t * dst = (vlib_buffer_union_t *) _dst;
595 vlib_buffer_union_t * src = (vlib_buffer_union_t *) &fl->buffer_init_template;
597 /* Make sure vlib_buffer_t is cacheline aligned and sized */
598 ASSERT(STRUCT_OFFSET_OF(vlib_buffer_t, cacheline0) == 0);
599 ASSERT(STRUCT_OFFSET_OF(vlib_buffer_t, cacheline1) == CLIB_CACHE_LINE_BYTES);
600 ASSERT(STRUCT_OFFSET_OF(vlib_buffer_t, cacheline2) == CLIB_CACHE_LINE_BYTES * 2);
602 /* Make sure buffer template is sane. */
603 ASSERT (fl->index == fl->buffer_init_template.free_list_index);
605 /* Copy template from src->current_data thru src->free_list_index */
606 dst->i[0] = src->i[0];
607 if (1 * sizeof (dst->i[0]) < 16)
608 dst->i[1] = src->i[1];
609 if (2 * sizeof (dst->i[0]) < 16)
610 dst->i[2] = src->i[2];
612 /* Make sure it really worked. */
613 #define _(f) ASSERT (dst->b.f == src->b.f)
619 ASSERT (dst->b.total_length_not_including_first_buffer == 0);
623 vlib_buffer_init_two_for_free_list (vlib_buffer_t * _dst0,
624 vlib_buffer_t * _dst1,
625 vlib_buffer_free_list_t * fl)
627 vlib_buffer_union_t * dst0 = (vlib_buffer_union_t *) _dst0;
628 vlib_buffer_union_t * dst1 = (vlib_buffer_union_t *) _dst1;
629 vlib_buffer_union_t * src = (vlib_buffer_union_t *) &fl->buffer_init_template;
631 /* Make sure buffer template is sane. */
632 ASSERT (fl->index == fl->buffer_init_template.free_list_index);
634 /* Copy template from src->current_data thru src->free_list_index */
635 dst0->i[0] = dst1->i[0] = src->i[0];
636 if (1 * sizeof (dst0->i[0]) < 16)
637 dst0->i[1] = dst1->i[1] = src->i[1];
638 if (2 * sizeof (dst0->i[0]) < 16)
639 dst0->i[2] = dst1->i[2] = src->i[2];
641 /* Make sure it really worked. */
642 #define _(f) ASSERT (dst0->b.f == src->b.f && dst1->b.f == src->b.f)
648 ASSERT (dst0->b.total_length_not_including_first_buffer == 0);
649 ASSERT (dst1->b.total_length_not_including_first_buffer == 0);
653 u32 * vlib_buffer_state_validation_lock;
654 uword * vlib_buffer_state_validation_hash;
655 void * vlib_buffer_state_heap;
659 vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
665 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
667 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
670 p = hash_get (vlib_buffer_state_validation_hash, b);
672 /* If we don't know about b, declare it to be in the expected state */
675 hash_set (vlib_buffer_state_validation_hash, b, expected);
679 if (p[0] != expected)
683 vlib_main_t * vm = &vlib_global_main;
687 bi = vlib_get_buffer_index (vm, b);
689 clib_mem_set_heap (oldheap);
690 clib_warning ("%.6f buffer %llx (%d): %s, not %s",
691 vlib_time_now(vm), bi,
692 p[0] ? "busy" : "free",
693 expected ? "busy" : "free");
697 CLIB_MEMORY_BARRIER();
698 *vlib_buffer_state_validation_lock = 0;
699 clib_mem_set_heap (oldheap);
704 vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
709 oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
711 while (__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1))
714 hash_set (vlib_buffer_state_validation_hash, b, expected);
716 CLIB_MEMORY_BARRIER();
717 *vlib_buffer_state_validation_lock = 0;
718 clib_mem_set_heap (oldheap);
722 #endif /* included_vlib_buffer_funcs_h */