2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * pg_input.c: buffer generator input
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #include <vlib/vlib.h>
41 #include <vnet/pg/pg.h>
42 #include <vnet/vnet.h>
43 #include <vnet/ethernet/ethernet.h>
44 #include <vnet/feature/feature.h>
45 #include <vnet/devices/devices.h>
48 validate_buffer_data2 (vlib_buffer_t * b, pg_stream_t * s,
49 u32 data_offset, u32 n_bytes)
55 pd = s->fixed_packet_data + data_offset;
56 pm = s->fixed_packet_data_mask + data_offset;
58 if (pd + n_bytes >= vec_end (s->fixed_packet_data))
59 n_bytes = (pd < vec_end (s->fixed_packet_data)
60 ? vec_end (s->fixed_packet_data) - pd : 0);
62 for (i = 0; i < n_bytes; i++)
63 if ((bd[i] & pm[i]) != pd[i])
69 clib_warning ("buffer %U", format_vnet_buffer, b);
70 clib_warning ("differ at index %d", i);
71 clib_warning ("is %U", format_hex_bytes, bd, n_bytes);
72 clib_warning ("mask %U", format_hex_bytes, pm, n_bytes);
73 clib_warning ("expect %U", format_hex_bytes, pd, n_bytes);
78 validate_buffer_data (vlib_buffer_t * b, pg_stream_t * s)
80 return validate_buffer_data2 (b, s, 0, s->buffer_bytes);
85 u64 v0, u64 v_min, u64 v_max, u32 n_bits, u32 is_net_byte_order)
87 ASSERT (v0 >= v_min && v0 <= v_max);
88 if (n_bits == BITS (u8))
92 else if (n_bits == BITS (u16))
94 if (is_net_byte_order)
95 v0 = clib_host_to_net_u16 (v0);
96 clib_mem_unaligned (a0, u16) = v0;
98 else if (n_bits == BITS (u32))
100 if (is_net_byte_order)
101 v0 = clib_host_to_net_u32 (v0);
102 clib_mem_unaligned (a0, u32) = v0;
104 else if (n_bits == BITS (u64))
106 if (is_net_byte_order)
107 v0 = clib_host_to_net_u64 (v0);
108 clib_mem_unaligned (a0, u64) = v0;
113 set_2 (void *a0, void *a1,
115 u64 v_min, u64 v_max,
116 u32 n_bits, u32 is_net_byte_order, u32 is_increment)
118 ASSERT (v0 >= v_min && v0 <= v_max);
119 ASSERT (v1 >= v_min && v1 <= (v_max + is_increment));
120 if (n_bits == BITS (u8))
125 else if (n_bits == BITS (u16))
127 if (is_net_byte_order)
129 v0 = clib_host_to_net_u16 (v0);
130 v1 = clib_host_to_net_u16 (v1);
132 clib_mem_unaligned (a0, u16) = v0;
133 clib_mem_unaligned (a1, u16) = v1;
135 else if (n_bits == BITS (u32))
137 if (is_net_byte_order)
139 v0 = clib_host_to_net_u32 (v0);
140 v1 = clib_host_to_net_u32 (v1);
142 clib_mem_unaligned (a0, u32) = v0;
143 clib_mem_unaligned (a1, u32) = v1;
145 else if (n_bits == BITS (u64))
147 if (is_net_byte_order)
149 v0 = clib_host_to_net_u64 (v0);
150 v1 = clib_host_to_net_u64 (v1);
152 clib_mem_unaligned (a0, u64) = v0;
153 clib_mem_unaligned (a1, u64) = v1;
157 static_always_inline void
158 do_set_fixed (pg_main_t * pg,
163 u32 byte_offset, u32 is_net_byte_order, u64 v_min, u64 v_max)
165 vlib_main_t *vm = vlib_get_main ();
167 while (n_buffers >= 4)
169 vlib_buffer_t *b0, *b1, *b2, *b3;
172 b0 = vlib_get_buffer (vm, buffers[0]);
173 b1 = vlib_get_buffer (vm, buffers[1]);
174 b2 = vlib_get_buffer (vm, buffers[2]);
175 b3 = vlib_get_buffer (vm, buffers[3]);
179 a0 = (void *) b0 + byte_offset;
180 a1 = (void *) b1 + byte_offset;
181 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
182 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
184 set_2 (a0, a1, v_min, v_min, v_min, v_max, n_bits, is_net_byte_order,
185 /* is_increment */ 0);
187 ASSERT (validate_buffer_data (b0, s));
188 ASSERT (validate_buffer_data (b1, s));
191 while (n_buffers > 0)
196 b0 = vlib_get_buffer (vm, buffers[0]);
200 a0 = (void *) b0 + byte_offset;
202 set_1 (a0, v_min, v_min, v_max, n_bits, is_net_byte_order);
204 ASSERT (validate_buffer_data (b0, s));
208 static_always_inline u64
209 do_set_increment (pg_main_t * pg,
215 u32 is_net_byte_order,
216 u32 want_sum, u64 * sum_result, u64 v_min, u64 v_max, u64 v)
218 vlib_main_t *vm = vlib_get_main ();
221 ASSERT (v >= v_min && v <= v_max);
223 while (n_buffers >= 4)
225 vlib_buffer_t *b0, *b1, *b2, *b3;
229 b0 = vlib_get_buffer (vm, buffers[0]);
230 b1 = vlib_get_buffer (vm, buffers[1]);
231 b2 = vlib_get_buffer (vm, buffers[2]);
232 b3 = vlib_get_buffer (vm, buffers[3]);
236 a0 = (void *) b0 + byte_offset;
237 a1 = (void *) b1 + byte_offset;
238 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
239 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
243 v = v > v_max ? v_min : v;
245 v_old + 0, v_old + 1, v_min, v_max, n_bits, is_net_byte_order,
246 /* is_increment */ 1);
249 sum += 2 * v_old + 1;
251 if (PREDICT_FALSE (v_old + 1 > v_max))
254 sum -= 2 * v_old + 1;
257 set_1 (a0, v + 0, v_min, v_max, n_bits, is_net_byte_order);
262 v = v > v_max ? v_min : v;
263 set_1 (a1, v + 0, v_min, v_max, n_bits, is_net_byte_order);
269 ASSERT (validate_buffer_data (b0, s));
270 ASSERT (validate_buffer_data (b1, s));
273 while (n_buffers > 0)
279 b0 = vlib_get_buffer (vm, buffers[0]);
283 a0 = (void *) b0 + byte_offset;
289 v = v > v_max ? v_min : v;
291 ASSERT (v_old >= v_min && v_old <= v_max);
292 set_1 (a0, v_old, v_min, v_max, n_bits, is_net_byte_order);
294 ASSERT (validate_buffer_data (b0, s));
303 static_always_inline void
304 do_set_random (pg_main_t * pg,
310 u32 is_net_byte_order,
311 u32 want_sum, u64 * sum_result, u64 v_min, u64 v_max)
313 vlib_main_t *vm = vlib_get_main ();
314 u64 v_diff = v_max - v_min + 1;
315 u64 r_mask = max_pow2 (v_diff) - 1;
320 random_data = clib_random_buffer_get_data
321 (&vm->random_buffer, n_buffers * n_bits / BITS (u8));
325 while (n_buffers >= 4)
327 vlib_buffer_t *b0, *b1, *b2, *b3;
329 u64 r0 = 0, r1 = 0; /* warnings be gone */
331 b0 = vlib_get_buffer (vm, buffers[0]);
332 b1 = vlib_get_buffer (vm, buffers[1]);
333 b2 = vlib_get_buffer (vm, buffers[2]);
334 b3 = vlib_get_buffer (vm, buffers[3]);
338 a0 = (void *) b0 + byte_offset;
339 a1 = (void *) b1 + byte_offset;
340 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
341 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
348 u##n * r = random_data; \
351 random_data = r + 2; \
363 /* Add power of 2 sized random number which may be out of range. */
367 /* Twice should be enough to reduce to v_min .. v_max range. */
368 v0 = v0 > v_max ? v0 - v_diff : v0;
369 v1 = v1 > v_max ? v1 - v_diff : v1;
370 v0 = v0 > v_max ? v0 - v_diff : v0;
371 v1 = v1 > v_max ? v1 - v_diff : v1;
376 set_2 (a0, a1, v0, v1, v_min, v_max, n_bits, is_net_byte_order,
377 /* is_increment */ 0);
379 ASSERT (validate_buffer_data (b0, s));
380 ASSERT (validate_buffer_data (b1, s));
383 while (n_buffers > 0)
387 u64 r0 = 0; /* warnings be gone */
389 b0 = vlib_get_buffer (vm, buffers[0]);
393 a0 = (void *) b0 + byte_offset;
400 u##n * r = random_data; \
402 random_data = r + 1; \
414 /* Add power of 2 sized random number which may be out of range. */
417 /* Twice should be enough to reduce to v_min .. v_max range. */
418 v0 = v0 > v_max ? v0 - v_diff : v0;
419 v0 = v0 > v_max ? v0 - v_diff : v0;
424 set_1 (a0, v0, v_min, v_max, n_bits, is_net_byte_order);
426 ASSERT (validate_buffer_data (b0, s));
434 clib_mem_unaligned (a##i, t) = \
435 clib_host_to_net_##t ((clib_net_to_host_mem_##t (a##i) &~ mask) \
441 u64 v_min, u64 v_max,
442 u32 max_bits, u32 n_bits, u64 mask, u32 shift)
444 ASSERT (v0 >= v_min && v0 <= v_max);
445 if (max_bits == BITS (u8))
446 ((u8 *) a0)[0] = (((u8 *) a0)[0] & ~mask) | (v0 << shift);
448 else if (max_bits == BITS (u16))
452 else if (max_bits == BITS (u32))
456 else if (max_bits == BITS (u64))
463 setbits_2 (void *a0, void *a1,
465 u64 v_min, u64 v_max,
466 u32 max_bits, u32 n_bits, u64 mask, u32 shift, u32 is_increment)
468 ASSERT (v0 >= v_min && v0 <= v_max);
469 ASSERT (v1 >= v_min && v1 <= v_max + is_increment);
470 if (max_bits == BITS (u8))
472 ((u8 *) a0)[0] = (((u8 *) a0)[0] & ~mask) | (v0 << shift);
473 ((u8 *) a1)[0] = (((u8 *) a1)[0] & ~mask) | (v1 << shift);
476 else if (max_bits == BITS (u16))
481 else if (max_bits == BITS (u32))
486 else if (max_bits == BITS (u64))
495 static_always_inline void
496 do_setbits_fixed (pg_main_t * pg,
502 u32 byte_offset, u64 v_min, u64 v_max, u64 mask, u32 shift)
504 vlib_main_t *vm = vlib_get_main ();
506 while (n_buffers >= 4)
508 vlib_buffer_t *b0, *b1, *b2, *b3;
511 b0 = vlib_get_buffer (vm, buffers[0]);
512 b1 = vlib_get_buffer (vm, buffers[1]);
513 b2 = vlib_get_buffer (vm, buffers[2]);
514 b3 = vlib_get_buffer (vm, buffers[3]);
518 a0 = (void *) b0 + byte_offset;
519 a1 = (void *) b1 + byte_offset;
520 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
521 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
524 v_min, v_min, v_min, v_max, max_bits, n_bits, mask, shift,
525 /* is_increment */ 0);
527 ASSERT (validate_buffer_data (b0, s));
528 ASSERT (validate_buffer_data (b1, s));
531 while (n_buffers > 0)
536 b0 = vlib_get_buffer (vm, buffers[0]);
540 a0 = (void *) b0 + byte_offset;
542 setbits_1 (a0, v_min, v_min, v_max, max_bits, n_bits, mask, shift);
543 ASSERT (validate_buffer_data (b0, s));
547 static_always_inline u64
548 do_setbits_increment (pg_main_t * pg,
555 u64 v_min, u64 v_max, u64 v, u64 mask, u32 shift)
557 vlib_main_t *vm = vlib_get_main ();
559 ASSERT (v >= v_min && v <= v_max);
561 while (n_buffers >= 4)
563 vlib_buffer_t *b0, *b1, *b2, *b3;
567 b0 = vlib_get_buffer (vm, buffers[0]);
568 b1 = vlib_get_buffer (vm, buffers[1]);
569 b2 = vlib_get_buffer (vm, buffers[2]);
570 b3 = vlib_get_buffer (vm, buffers[3]);
574 a0 = (void *) b0 + byte_offset;
575 a1 = (void *) b1 + byte_offset;
576 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
577 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
581 v = v > v_max ? v_min : v;
583 v_old + 0, v_old + 1,
584 v_min, v_max, max_bits, n_bits, mask, shift,
585 /* is_increment */ 1);
587 if (PREDICT_FALSE (v_old + 1 > v_max))
590 setbits_1 (a0, v + 0, v_min, v_max, max_bits, n_bits, mask, shift);
593 v = v > v_max ? v_min : v;
594 setbits_1 (a1, v + 0, v_min, v_max, max_bits, n_bits, mask, shift);
597 ASSERT (validate_buffer_data (b0, s));
598 ASSERT (validate_buffer_data (b1, s));
601 while (n_buffers > 0)
607 b0 = vlib_get_buffer (vm, buffers[0]);
611 a0 = (void *) b0 + byte_offset;
615 v = v > v_max ? v_min : v;
617 ASSERT (v_old >= v_min && v_old <= v_max);
618 setbits_1 (a0, v_old, v_min, v_max, max_bits, n_bits, mask, shift);
620 ASSERT (validate_buffer_data (b0, s));
626 static_always_inline void
627 do_setbits_random (pg_main_t * pg,
633 u32 byte_offset, u64 v_min, u64 v_max, u64 mask, u32 shift)
635 vlib_main_t *vm = vlib_get_main ();
636 u64 v_diff = v_max - v_min + 1;
637 u64 r_mask = max_pow2 (v_diff) - 1;
641 random_data = clib_random_buffer_get_data
642 (&vm->random_buffer, n_buffers * max_bits / BITS (u8));
645 while (n_buffers >= 4)
647 vlib_buffer_t *b0, *b1, *b2, *b3;
649 u64 r0 = 0, r1 = 0; /* warnings be gone */
651 b0 = vlib_get_buffer (vm, buffers[0]);
652 b1 = vlib_get_buffer (vm, buffers[1]);
653 b2 = vlib_get_buffer (vm, buffers[2]);
654 b3 = vlib_get_buffer (vm, buffers[3]);
658 a0 = (void *) b0 + byte_offset;
659 a1 = (void *) b1 + byte_offset;
660 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
661 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
668 u##n * r = random_data; \
671 random_data = r + 2; \
683 /* Add power of 2 sized random number which may be out of range. */
687 /* Twice should be enough to reduce to v_min .. v_max range. */
688 v0 = v0 > v_max ? v0 - v_diff : v0;
689 v1 = v1 > v_max ? v1 - v_diff : v1;
690 v0 = v0 > v_max ? v0 - v_diff : v0;
691 v1 = v1 > v_max ? v1 - v_diff : v1;
693 setbits_2 (a0, a1, v0, v1, v_min, v_max, max_bits, n_bits, mask, shift,
694 /* is_increment */ 0);
696 ASSERT (validate_buffer_data (b0, s));
697 ASSERT (validate_buffer_data (b1, s));
700 while (n_buffers > 0)
704 u64 r0 = 0; /* warnings be gone */
706 b0 = vlib_get_buffer (vm, buffers[0]);
710 a0 = (void *) b0 + byte_offset;
717 u##n * r = random_data; \
719 random_data = r + 1; \
731 /* Add power of 2 sized random number which may be out of range. */
734 /* Twice should be enough to reduce to v_min .. v_max range. */
735 v0 = v0 > v_max ? v0 - v_diff : v0;
736 v0 = v0 > v_max ? v0 - v_diff : v0;
738 setbits_1 (a0, v0, v_min, v_max, max_bits, n_bits, mask, shift);
740 ASSERT (validate_buffer_data (b0, s));
745 do_it (pg_main_t * pg,
749 u32 lo_bit, u32 hi_bit,
750 u64 v_min, u64 v_max, u64 v, pg_edit_type_t edit_type)
752 u32 max_bits, l0, l1, h1, start_bit;
755 edit_type = PG_EDIT_FIXED;
757 l0 = lo_bit / BITS (u8);
758 l1 = lo_bit % BITS (u8);
759 h1 = hi_bit % BITS (u8);
761 start_bit = l0 * BITS (u8);
763 max_bits = hi_bit - start_bit;
764 ASSERT (max_bits <= 64);
768 if (edit_type == PG_EDIT_INCREMENT) \
769 v = do_set_increment (pg, s, buffers, n_buffers, \
772 /* is_net_byte_order */ 1, \
773 /* want sum */ 0, 0, \
776 else if (edit_type == PG_EDIT_RANDOM) \
777 do_set_random (pg, s, buffers, n_buffers, \
780 /* is_net_byte_order */ 1, \
781 /* want sum */ 0, 0, \
783 else /* edit_type == PG_EDIT_FIXED */ \
784 do_set_fixed (pg, s, buffers, n_buffers, \
787 /* is_net_byte_order */ 1, \
791 if (l1 == 0 && h1 == 0)
807 u32 n_bits = max_bits;
809 max_bits = clib_max (max_pow2 (n_bits), 8);
811 mask = ((u64) 1 << (u64) n_bits) - 1;
812 mask &= ~(((u64) 1 << (u64) shift) - 1);
814 mask <<= max_bits - n_bits;
815 shift += max_bits - n_bits;
821 if (edit_type == PG_EDIT_INCREMENT) \
822 v = do_setbits_increment (pg, s, buffers, n_buffers, \
823 BITS (u##n), n_bits, \
824 l0, v_min, v_max, v, \
826 else if (edit_type == PG_EDIT_RANDOM) \
827 do_setbits_random (pg, s, buffers, n_buffers, \
828 BITS (u##n), n_bits, \
831 else /* edit_type == PG_EDIT_FIXED */ \
832 do_setbits_fixed (pg, s, buffers, n_buffers, \
833 BITS (u##n), n_bits, \
852 pg_generate_set_lengths (pg_main_t * pg,
853 pg_stream_t * s, u32 * buffers, u32 n_buffers)
855 u64 v_min, v_max, length_sum;
856 pg_edit_type_t edit_type;
858 v_min = s->min_packet_bytes;
859 v_max = s->max_packet_bytes;
860 edit_type = s->packet_size_edit_type;
862 if (edit_type == PG_EDIT_INCREMENT)
863 s->last_increment_packet_size
864 = do_set_increment (pg, s, buffers, n_buffers,
865 8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
866 STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
867 /* is_net_byte_order */ 0,
868 /* want sum */ 1, &length_sum,
869 v_min, v_max, s->last_increment_packet_size);
871 else if (edit_type == PG_EDIT_RANDOM)
872 do_set_random (pg, s, buffers, n_buffers,
873 8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
874 STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
875 /* is_net_byte_order */ 0,
876 /* want sum */ 1, &length_sum,
879 else /* edit_type == PG_EDIT_FIXED */
881 do_set_fixed (pg, s, buffers, n_buffers,
882 8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
883 STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
884 /* is_net_byte_order */ 0,
886 length_sum = v_min * n_buffers;
890 vnet_main_t *vnm = vnet_get_main ();
891 vnet_interface_main_t *im = &vnm->interface_main;
892 vnet_sw_interface_t *si =
893 vnet_get_sw_interface (vnm, s->sw_if_index[VLIB_RX]);
895 vlib_increment_combined_counter (im->combined_sw_if_counters
896 + VNET_INTERFACE_COUNTER_RX,
897 vlib_get_thread_index (),
898 si->sw_if_index, n_buffers, length_sum);
904 pg_generate_fix_multi_buffer_lengths (pg_main_t * pg,
906 u32 * buffers, u32 n_buffers)
908 vlib_main_t *vm = vlib_get_main ();
909 pg_buffer_index_t *pbi;
911 static u32 *unused_buffers = 0;
913 while (n_buffers > 0)
919 b = vlib_get_buffer (vm, bi);
921 /* Current length here is length of whole packet. */
922 n_bytes_left = b->current_length;
924 pbi = s->buffer_indices;
927 uword n = clib_min (n_bytes_left, s->buffer_bytes);
929 b->current_length = n;
931 if (n_bytes_left > 0)
932 b->flags |= VLIB_BUFFER_NEXT_PRESENT;
934 b->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
936 /* Return unused buffers to fifos. */
938 vec_add1 (unused_buffers, bi);
941 if (pbi >= vec_end (s->buffer_indices))
945 b = vlib_get_buffer (vm, bi);
947 ASSERT (n_bytes_left == 0);
953 if (vec_len (unused_buffers) > 0)
955 vlib_buffer_free_no_next (vm, unused_buffers, vec_len (unused_buffers));
956 _vec_len (unused_buffers) = 0;
961 pg_generate_edit (pg_main_t * pg,
962 pg_stream_t * s, u32 * buffers, u32 n_buffers)
966 vec_foreach (e, s->non_fixed_edits)
971 case PG_EDIT_INCREMENT:
976 v_min = pg_edit_get_value (e, PG_EDIT_LO);
977 v_max = pg_edit_get_value (e, PG_EDIT_HI);
979 hi_bit = (BITS (u8) * STRUCT_OFFSET_OF (vlib_buffer_t, data)
980 + BITS (u8) + e->lsb_bit_offset);
981 lo_bit = hi_bit - e->n_bits;
983 e->last_increment_value
984 = do_it (pg, s, buffers, n_buffers, lo_bit, hi_bit, v_min, v_max,
985 e->last_increment_value, e->type);
989 case PG_EDIT_UNSPECIFIED:
993 /* Should not be any fixed edits left. */
999 /* Call any edit functions to e.g. completely IP lengths, checksums, ... */
1002 for (i = vec_len (s->edit_groups) - 1; i >= 0; i--)
1004 pg_edit_group_t *g = s->edit_groups + i;
1005 if (g->edit_function)
1006 g->edit_function (pg, s, g, buffers, n_buffers);
1012 pg_set_next_buffer_pointers (pg_main_t * pg,
1014 u32 * buffers, u32 * next_buffers, u32 n_buffers)
1016 vlib_main_t *vm = vlib_get_main ();
1018 while (n_buffers >= 4)
1021 vlib_buffer_t *b0, *b1;
1023 b0 = vlib_get_buffer (vm, buffers[0]);
1024 b1 = vlib_get_buffer (vm, buffers[1]);
1025 ni0 = next_buffers[0];
1026 ni1 = next_buffers[1];
1028 vlib_prefetch_buffer_with_index (vm, buffers[2], WRITE);
1029 vlib_prefetch_buffer_with_index (vm, buffers[3], WRITE);
1031 b0->flags |= VLIB_BUFFER_NEXT_PRESENT;
1032 b1->flags |= VLIB_BUFFER_NEXT_PRESENT;
1033 b0->next_buffer = ni0;
1034 b1->next_buffer = ni1;
1041 while (n_buffers > 0)
1046 b0 = vlib_get_buffer (vm, buffers[0]);
1047 ni0 = next_buffers[0];
1052 b0->flags |= VLIB_BUFFER_NEXT_PRESENT;
1053 b0->next_buffer = ni0;
1057 static_always_inline void
1058 init_replay_buffers_inline (vlib_main_t * vm,
1061 u32 n_buffers, u32 data_offset, u32 n_data)
1063 u32 n_left, *b, i, l;
1067 i = s->current_replay_packet_index;
1068 l = vec_len (s->replay_packet_templates);
1080 b0 = vlib_get_buffer (vm, bi0);
1082 vnet_buffer (b0)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
1083 /* was s->sw_if_index[VLIB_TX]; */
1084 vnet_buffer (b0)->sw_if_index[VLIB_TX] = (u32) ~ 0;
1086 d0 = vec_elt (s->replay_packet_templates, i);
1087 vnet_buffer2 (b0)->pg_replay_timestamp = s->replay_packet_timestamps[i];
1090 if (data_offset + n_data >= vec_len (d0))
1091 n0 = vec_len (d0) > data_offset ? vec_len (d0) - data_offset : 0;
1093 b0->current_length = n0;
1095 clib_memcpy_fast (b0->data, d0 + data_offset, n0);
1096 i = i + 1 == l ? 0 : i + 1;
1100 static_always_inline void
1101 init_buffers_inline (vlib_main_t * vm,
1104 u32 n_buffers, u32 data_offset, u32 n_data, u32 set_data)
1109 if (vec_len (s->replay_packet_templates) > 0)
1110 return init_replay_buffers_inline (vm, s, buffers, n_buffers, data_offset,
1113 data = s->fixed_packet_data + data_offset;
1114 mask = s->fixed_packet_data_mask + data_offset;
1115 if (data + n_data >= vec_end (s->fixed_packet_data))
1116 n_data = (data < vec_end (s->fixed_packet_data)
1117 ? vec_end (s->fixed_packet_data) - data : 0);
1120 ASSERT (data + n_data <= vec_end (s->fixed_packet_data));
1121 ASSERT (mask + n_data <= vec_end (s->fixed_packet_data_mask));
1130 vlib_buffer_t *b0, *b1;
1132 /* Prefetch next iteration. */
1133 vlib_prefetch_buffer_with_index (vm, b[2], STORE);
1134 vlib_prefetch_buffer_with_index (vm, b[3], STORE);
1141 b0 = vlib_get_buffer (vm, bi0);
1142 b1 = vlib_get_buffer (vm, bi1);
1144 vnet_buffer (b0)->sw_if_index[VLIB_RX] =
1145 vnet_buffer (b1)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
1147 vnet_buffer (b0)->sw_if_index[VLIB_TX] =
1148 vnet_buffer (b1)->sw_if_index[VLIB_TX] = (u32) ~ 0;
1152 clib_memcpy_fast (b0->data, data, n_data);
1153 clib_memcpy_fast (b1->data, data, n_data);
1157 ASSERT (validate_buffer_data2 (b0, s, data_offset, n_data));
1158 ASSERT (validate_buffer_data2 (b1, s, data_offset, n_data));
1171 b0 = vlib_get_buffer (vm, bi0);
1172 vnet_buffer (b0)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
1173 /* s->sw_if_index[VLIB_TX]; */
1174 vnet_buffer (b0)->sw_if_index[VLIB_TX] = (u32) ~ 0;
1177 clib_memcpy_fast (b0->data, data, n_data);
1179 ASSERT (validate_buffer_data2 (b0, s, data_offset, n_data));
1184 pg_stream_fill_helper (pg_main_t * pg,
1186 pg_buffer_index_t * bi,
1187 u32 * buffers, u32 * next_buffers, u32 n_alloc)
1189 vlib_main_t *vm = vlib_get_main ();
1190 uword is_start_of_packet = bi == s->buffer_indices;
1193 n_allocated = vlib_buffer_alloc_from_free_list (vm,
1196 bi->free_list_index);
1197 if (n_allocated == 0)
1201 * We can't assume we got all the buffers we asked for...
1202 * This never worked until recently.
1204 n_alloc = n_allocated;
1206 /* Reinitialize buffers */
1210 n_alloc, (bi - s->buffer_indices) * s->buffer_bytes /* data offset */ ,
1215 pg_set_next_buffer_pointers (pg, s, buffers, next_buffers, n_alloc);
1217 if (is_start_of_packet)
1219 if (vec_len (s->replay_packet_templates) > 0)
1221 vnet_main_t *vnm = vnet_get_main ();
1222 vnet_interface_main_t *im = &vnm->interface_main;
1223 vnet_sw_interface_t *si =
1224 vnet_get_sw_interface (vnm, s->sw_if_index[VLIB_RX]);
1227 for (i = 0; i < n_alloc; i++)
1228 l += vlib_buffer_index_length_in_chain (vm, buffers[i]);
1229 vlib_increment_combined_counter (im->combined_sw_if_counters
1230 + VNET_INTERFACE_COUNTER_RX,
1231 vlib_get_thread_index (),
1232 si->sw_if_index, n_alloc, l);
1233 s->current_replay_packet_index += n_alloc;
1234 s->current_replay_packet_index %=
1235 vec_len (s->replay_packet_templates);
1239 pg_generate_set_lengths (pg, s, buffers, n_alloc);
1240 if (vec_len (s->buffer_indices) > 1)
1241 pg_generate_fix_multi_buffer_lengths (pg, s, buffers, n_alloc);
1243 pg_generate_edit (pg, s, buffers, n_alloc);
1251 pg_stream_fill (pg_main_t * pg, pg_stream_t * s, u32 n_buffers)
1253 pg_buffer_index_t *bi;
1254 word i, n_in_fifo, n_alloc, n_free, n_added;
1255 u32 *tail, *start, *end, *last_tail, *last_start;
1257 bi = s->buffer_indices;
1259 n_in_fifo = clib_fifo_elts (bi->buffer_fifo);
1260 if (n_in_fifo >= n_buffers)
1263 n_alloc = n_buffers - n_in_fifo;
1265 /* Round up, but never generate more than limit. */
1266 n_alloc = clib_max (VLIB_FRAME_SIZE, n_alloc);
1268 if (s->n_packets_limit > 0
1269 && s->n_packets_generated + n_in_fifo + n_alloc >= s->n_packets_limit)
1271 n_alloc = s->n_packets_limit - s->n_packets_generated - n_in_fifo;
1276 /* All buffer fifos should have the same size. */
1280 vec_foreach (bi, s->buffer_indices)
1282 e = clib_fifo_elts (bi->buffer_fifo);
1283 if (bi == s->buffer_indices)
1289 last_tail = last_start = 0;
1292 for (i = vec_len (s->buffer_indices) - 1; i >= 0; i--)
1294 bi = vec_elt_at_index (s->buffer_indices, i);
1296 n_free = clib_fifo_free_elts (bi->buffer_fifo);
1297 if (n_free < n_alloc)
1298 clib_fifo_resize (bi->buffer_fifo, n_alloc - n_free);
1300 tail = clib_fifo_advance_tail (bi->buffer_fifo, n_alloc);
1301 start = bi->buffer_fifo;
1302 end = clib_fifo_end (bi->buffer_fifo);
1304 if (tail + n_alloc <= end)
1307 pg_stream_fill_helper (pg, s, bi, tail, last_tail, n_alloc);
1311 u32 n = clib_min (end - tail, n_alloc);
1312 n_added = pg_stream_fill_helper (pg, s, bi, tail, last_tail, n);
1314 if (n_added == n && n_alloc > n_added)
1316 n_added += pg_stream_fill_helper
1317 (pg, s, bi, start, last_start, n_alloc - n_added);
1321 if (PREDICT_FALSE (n_added < n_alloc))
1322 tail = clib_fifo_advance_tail (bi->buffer_fifo, n_added - n_alloc);
1327 /* Verify that pkts in the fifo are properly allocated */
1330 return n_in_fifo + n_added;
1340 /* Use pre data for packet data. */
1341 vlib_buffer_t buffer;
1345 format_pg_input_trace (u8 * s, va_list * va)
1347 vlib_main_t *vm = va_arg (*va, vlib_main_t *);
1348 CLIB_UNUSED (vlib_node_t * node) = va_arg (*va, vlib_node_t *);
1349 pg_input_trace_t *t = va_arg (*va, pg_input_trace_t *);
1350 pg_main_t *pg = &pg_main;
1351 pg_stream_t *stream;
1353 u32 indent = format_get_indent (s);
1356 if (!pool_is_free_index (pg->streams, t->stream_index))
1357 stream = pool_elt_at_index (pg->streams, t->stream_index);
1360 s = format (s, "stream %v", pg->streams[t->stream_index].name);
1362 s = format (s, "stream %d", t->stream_index);
1364 s = format (s, ", %d bytes", t->packet_length);
1365 s = format (s, ", %d sw_if_index", t->sw_if_index);
1367 s = format (s, "\n%U%U",
1368 format_white_space, indent, format_vnet_buffer, &t->buffer);
1370 s = format (s, "\n%U", format_white_space, indent);
1374 n = vlib_get_node (vm, stream->node_index);
1376 if (n && n->format_buffer)
1377 s = format (s, "%U", n->format_buffer,
1378 t->buffer.pre_data, sizeof (t->buffer.pre_data));
1380 s = format (s, "%U",
1381 format_hex_bytes, t->buffer.pre_data,
1382 ARRAY_LEN (t->buffer.pre_data));
1387 pg_input_trace (pg_main_t * pg,
1388 vlib_node_runtime_t * node,
1389 pg_stream_t * s, u32 * buffers, u32 n_buffers)
1391 vlib_main_t *vm = vlib_get_main ();
1392 u32 *b, n_left, stream_index, next_index;
1396 stream_index = s - pg->streams;
1397 next_index = s->next_index;
1402 vlib_buffer_t *b0, *b1;
1403 pg_input_trace_t *t0, *t1;
1410 b0 = vlib_get_buffer (vm, bi0);
1411 b1 = vlib_get_buffer (vm, bi1);
1413 vlib_trace_buffer (vm, node, next_index, b0, /* follow_chain */ 1);
1414 vlib_trace_buffer (vm, node, next_index, b1, /* follow_chain */ 1);
1416 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
1417 t1 = vlib_add_trace (vm, node, b1, sizeof (t1[0]));
1419 t0->stream_index = stream_index;
1420 t1->stream_index = stream_index;
1422 t0->packet_length = vlib_buffer_length_in_chain (vm, b0);
1423 t1->packet_length = vlib_buffer_length_in_chain (vm, b1);
1425 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_RX];
1426 t1->sw_if_index = vnet_buffer (b1)->sw_if_index[VLIB_RX];
1428 clib_memcpy_fast (&t0->buffer, b0,
1429 sizeof (b0[0]) - sizeof (b0->pre_data));
1430 clib_memcpy_fast (&t1->buffer, b1,
1431 sizeof (b1[0]) - sizeof (b1->pre_data));
1433 clib_memcpy_fast (t0->buffer.pre_data, b0->data,
1434 sizeof (t0->buffer.pre_data));
1435 clib_memcpy_fast (t1->buffer.pre_data, b1->data,
1436 sizeof (t1->buffer.pre_data));
1443 pg_input_trace_t *t0;
1449 b0 = vlib_get_buffer (vm, bi0);
1451 vlib_trace_buffer (vm, node, next_index, b0, /* follow_chain */ 1);
1452 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
1454 t0->stream_index = stream_index;
1455 t0->packet_length = vlib_buffer_length_in_chain (vm, b0);
1456 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_RX];
1457 clib_memcpy_fast (&t0->buffer, b0,
1458 sizeof (b0[0]) - sizeof (b0->pre_data));
1459 clib_memcpy_fast (t0->buffer.pre_data, b0->data,
1460 sizeof (t0->buffer.pre_data));
1465 pg_generate_packets (vlib_node_runtime_t * node,
1467 pg_stream_t * s, uword n_packets_to_generate)
1469 vlib_main_t *vm = vlib_get_main ();
1470 u32 *to_next, n_this_frame, n_left, n_trace, n_packets_in_fifo;
1471 uword n_packets_generated;
1472 pg_buffer_index_t *bi, *bi0;
1473 u32 next_index = s->next_index;
1474 vnet_feature_main_t *fm = &feature_main;
1475 vnet_feature_config_main_t *cm;
1476 u8 feature_arc_index = fm->device_input_feature_arc_index;
1477 cm = &fm->feature_config_mains[feature_arc_index];
1478 u32 current_config_index = ~(u32) 0;
1481 bi0 = s->buffer_indices;
1483 n_packets_in_fifo = pg_stream_fill (pg, s, n_packets_to_generate);
1484 n_packets_to_generate = clib_min (n_packets_in_fifo, n_packets_to_generate);
1485 n_packets_generated = 0;
1488 (vnet_have_features (feature_arc_index, s->sw_if_index[VLIB_RX])))
1490 current_config_index =
1491 vec_elt (cm->config_index_by_sw_if_index, s->sw_if_index[VLIB_RX]);
1492 vnet_get_config_data (&cm->config_main, ¤t_config_index,
1496 while (n_packets_to_generate > 0)
1498 u32 *head, *start, *end;
1500 if (PREDICT_TRUE (next_index == VNET_DEVICE_INPUT_NEXT_ETHERNET_INPUT))
1502 vlib_next_frame_t *nf;
1504 ethernet_input_frame_t *ef;
1506 vlib_get_new_next_frame (vm, node, next_index, to_next, n_left);
1507 nf = vlib_node_runtime_get_next_frame (vm, node, next_index);
1508 f = vlib_get_frame (vm, nf->frame_index);
1509 f->flags = ETH_INPUT_FRAME_F_SINGLE_SW_IF_IDX;
1511 ef = vlib_frame_scalar_args (f);
1512 pi = pool_elt_at_index (pg->interfaces, s->pg_if_index);
1513 ef->sw_if_index = pi->sw_if_index;
1514 ef->hw_if_index = pi->hw_if_index;
1517 vlib_get_next_frame (vm, node, next_index, to_next, n_left);
1519 n_this_frame = n_packets_to_generate;
1520 if (n_this_frame > n_left)
1521 n_this_frame = n_left;
1523 start = bi0->buffer_fifo;
1524 end = clib_fifo_end (bi0->buffer_fifo);
1525 head = clib_fifo_head (bi0->buffer_fifo);
1527 if (head + n_this_frame <= end)
1528 vlib_copy_buffers (to_next, head, n_this_frame);
1532 vlib_copy_buffers (to_next + 0, head, n);
1533 vlib_copy_buffers (to_next + n, start, n_this_frame - n);
1536 vec_foreach (bi, s->buffer_indices)
1537 clib_fifo_advance_head (bi->buffer_fifo, n_this_frame);
1539 if (current_config_index != ~(u32) 0)
1540 for (i = 0; i < n_this_frame; i++)
1543 b = vlib_get_buffer (vm, to_next[i]);
1544 b->current_config_index = current_config_index;
1545 vnet_buffer (b)->feature_arc_index = feature_arc_index;
1548 n_trace = vlib_get_trace_count (vm, node);
1551 u32 n = clib_min (n_trace, n_this_frame);
1552 pg_input_trace (pg, node, s, to_next, n);
1553 vlib_set_trace_count (vm, node, n_trace - n);
1555 n_packets_to_generate -= n_this_frame;
1556 n_packets_generated += n_this_frame;
1557 n_left -= n_this_frame;
1558 vlib_put_next_frame (vm, node, next_index, n_left);
1561 return n_packets_generated;
1565 pg_input_stream (vlib_node_runtime_t * node, pg_main_t * pg, pg_stream_t * s)
1567 vlib_main_t *vm = vlib_get_main ();
1571 if (s->n_packets_limit > 0 && s->n_packets_generated >= s->n_packets_limit)
1573 pg_stream_enable_disable (pg, s, /* want_enabled */ 0);
1577 /* Apply rate limit. */
1578 time_now = vlib_time_now (vm);
1579 if (s->time_last_generate == 0)
1580 s->time_last_generate = time_now;
1582 dt = time_now - s->time_last_generate;
1583 s->time_last_generate = time_now;
1585 n_packets = VLIB_FRAME_SIZE;
1586 if (s->rate_packets_per_second > 0)
1588 s->packet_accumulator += dt * s->rate_packets_per_second;
1589 n_packets = s->packet_accumulator;
1591 /* Never allow accumulator to grow if we get behind. */
1592 s->packet_accumulator -= n_packets;
1595 /* Apply fixed limit. */
1596 if (s->n_packets_limit > 0
1597 && s->n_packets_generated + n_packets > s->n_packets_limit)
1598 n_packets = s->n_packets_limit - s->n_packets_generated;
1600 /* Generate up to one frame's worth of packets. */
1601 if (n_packets > VLIB_FRAME_SIZE)
1602 n_packets = VLIB_FRAME_SIZE;
1605 n_packets = pg_generate_packets (node, pg, s, n_packets);
1607 s->n_packets_generated += n_packets;
1613 pg_input (vlib_main_t * vm, vlib_node_runtime_t * node, vlib_frame_t * frame)
1616 pg_main_t *pg = &pg_main;
1617 uword n_packets = 0;
1618 u32 worker_index = 0;
1620 if (vlib_num_workers ())
1621 worker_index = vlib_get_current_worker_index ();
1624 clib_bitmap_foreach (i, pg->enabled_streams[worker_index], ({
1625 pg_stream_t *s = vec_elt_at_index (pg->streams, i);
1626 n_packets += pg_input_stream (node, pg, s);
1634 VLIB_REGISTER_NODE (pg_input_node) = {
1635 .function = pg_input,
1637 .sibling_of = "device-input",
1638 .type = VLIB_NODE_TYPE_INPUT,
1640 .format_trace = format_pg_input_trace,
1642 /* Input node will be left disabled until a stream is active. */
1643 .state = VLIB_NODE_STATE_DISABLED,
1648 * fd.io coding-style-patch-verification: ON
1651 * eval: (c-set-style "gnu")