Code Review
/
vpp.git
/ blobdiff
commit
grep
author
committer
pickaxe
?
search:
re
summary
|
shortlog
|
log
|
commit
|
commitdiff
|
review
|
tree
raw
|
inline
| side by side
vlib:remove unused argument
[vpp.git]
/
src
/
vlib
/
buffer_funcs.h
diff --git
a/src/vlib/buffer_funcs.h
b/src/vlib/buffer_funcs.h
index
667063c
..
ce62c8b
100644
(file)
--- a/
src/vlib/buffer_funcs.h
+++ b/
src/vlib/buffer_funcs.h
@@
-41,6
+41,7
@@
#define included_vlib_buffer_funcs_h
#include <vppinfra/hash.h>
#define included_vlib_buffer_funcs_h
#include <vppinfra/hash.h>
+#include <vppinfra/fifo.h>
/** \file
vlib buffer access methods.
/** \file
vlib buffer access methods.
@@
-97,12
+98,16
@@
vlib_get_buffers_with_offset (vlib_main_t * vm, u32 * bi, void **b, int count,
u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
/* shift and add to get vlib_buffer_t pointer */
u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
u64x4 b0 = u32x4_extend_to_u64x4 (u32x4_load_unaligned (bi));
/* shift and add to get vlib_buffer_t pointer */
u64x4_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
-#elif defined (CLIB_HAVE_VEC128)
&& defined (__x86_64__)
+#elif defined (CLIB_HAVE_VEC128)
u64x2 off = u64x2_splat (buffer_main.buffer_mem_start + offset);
u32x4 bi4 = u32x4_load_unaligned (bi);
u64x2 b0 = u32x4_extend_to_u64x2 ((u32x4) bi4);
u64x2 off = u64x2_splat (buffer_main.buffer_mem_start + offset);
u32x4 bi4 = u32x4_load_unaligned (bi);
u64x2 b0 = u32x4_extend_to_u64x2 ((u32x4) bi4);
+#if defined (__aarch64__)
+ u64x2 b1 = u32x4_extend_to_u64x2_high ((u32x4) bi4);
+#else
bi4 = u32x4_shuffle (bi4, 2, 3, 0, 1);
u64x2 b1 = u32x4_extend_to_u64x2 ((u32x4) bi4);
bi4 = u32x4_shuffle (bi4, 2, 3, 0, 1);
u64x2 b1 = u32x4_extend_to_u64x2 ((u32x4) bi4);
+#endif
u64x2_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
u64x2_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 2);
#else
u64x2_store_unaligned ((b0 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b);
u64x2_store_unaligned ((b1 << CLIB_LOG2_CACHE_LINE_BYTES) + off, b + 2);
#else
@@
-309,16
+314,16
@@
vlib_buffer_contents (vlib_main_t * vm, u32 buffer_index, u8 * contents)
return content_len;
}
return content_len;
}
-/* Return physical address of buffer->data start. */
-always_inline u64
-vlib_get_buffer_data_physical_address (vlib_main_t * vm, u32 buffer_index)
+always_inline uword
+vlib_buffer_get_pa (vlib_main_t * vm, vlib_buffer_t * b)
{
{
- vlib_buffer_main_t *bm = &buffer_main;
- vlib_buffer_t *b = vlib_get_buffer (vm, buffer_index);
- vlib_buffer_pool_t *pool = vec_elt_at_index (bm->buffer_pools,
- b->buffer_pool_index);
+ return vlib_physmem_get_pa (vm, b->data);
+}
- return vlib_physmem_virtual_to_physical (vm, pool->physmem_region, b->data);
+always_inline uword
+vlib_buffer_get_current_pa (vlib_main_t * vm, vlib_buffer_t * b)
+{
+ return vlib_buffer_get_pa (vm, b) + b->current_data;
}
/** \brief Prefetch buffer metadata by buffer index
}
/** \brief Prefetch buffer metadata by buffer index
@@
-918,7
+923,7
@@
vlib_buffer_attach_clone (vlib_main_t * vm, vlib_buffer_t * head,
tail->total_length_not_including_first_buffer;
next_segment:
tail->total_length_not_including_first_buffer;
next_segment:
-
__sync_add_an
d_fetch (&tail->n_add_refs, 1);
+
clib_atomic_ad
d_fetch (&tail->n_add_refs, 1);
if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
{
if (tail->flags & VLIB_BUFFER_NEXT_PRESENT)
{
@@
-939,9
+944,7
@@
vlib_buffer_chain_init (vlib_buffer_t * first)
/* The provided next_bi buffer index is appended to the end of the packet. */
always_inline vlib_buffer_t *
/* The provided next_bi buffer index is appended to the end of the packet. */
always_inline vlib_buffer_t *
-vlib_buffer_chain_buffer (vlib_main_t * vm,
- vlib_buffer_t * first,
- vlib_buffer_t * last, u32 next_bi)
+vlib_buffer_chain_buffer (vlib_main_t * vm, vlib_buffer_t * last, u32 next_bi)
{
vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
last->next_buffer = next_bi;
{
vlib_buffer_t *next_buffer = vlib_get_buffer (vm, next_bi);
last->next_buffer = next_bi;
@@
-1148,7
+1151,7
@@
vlib_validate_buffer_in_use (vlib_buffer_t * b, u32 expected)
oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
- while (
__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1
))
+ while (
clib_atomic_test_and_set (vlib_buffer_state_validation_lock
))
;
p = hash_get (vlib_buffer_state_validation_hash, b);
;
p = hash_get (vlib_buffer_state_validation_hash, b);
@@
-1191,7
+1194,7
@@
vlib_validate_buffer_set_in_use (vlib_buffer_t * b, u32 expected)
oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
oldheap = clib_mem_set_heap (vlib_buffer_state_heap);
- while (
__sync_lock_test_and_set (vlib_buffer_state_validation_lock, 1
))
+ while (
clib_atomic_test_and_set (vlib_buffer_state_validation_lock
))
;
hash_set (vlib_buffer_state_validation_hash, b, expected);
;
hash_set (vlib_buffer_state_validation_hash, b, expected);