2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * pg_input.c: buffer generator input
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
41 * To be honest, the packet generator needs an extreme
42 * makeover. Two key assumptions which drove the current implementation
43 * are no longer true. First, buffer managers implement a
44 * post-TX recycle list. Second, that packet generator performance
45 * is first-order important.
48 #include <vlib/vlib.h>
49 #include <vnet/pg/pg.h>
50 #include <vnet/vnet.h>
51 #include <vnet/ethernet/ethernet.h>
52 #include <vnet/feature/feature.h>
53 #include <vnet/devices/devices.h>
56 validate_buffer_data2 (vlib_buffer_t * b, pg_stream_t * s,
57 u32 data_offset, u32 n_bytes)
63 pd = s->fixed_packet_data + data_offset;
64 pm = s->fixed_packet_data_mask + data_offset;
66 if (pd + n_bytes >= vec_end (s->fixed_packet_data))
67 n_bytes = (pd < vec_end (s->fixed_packet_data)
68 ? vec_end (s->fixed_packet_data) - pd : 0);
70 for (i = 0; i < n_bytes; i++)
71 if ((bd[i] & pm[i]) != pd[i])
77 clib_warning ("buffer %U", format_vnet_buffer, b);
78 clib_warning ("differ at index %d", i);
79 clib_warning ("is %U", format_hex_bytes, bd, n_bytes);
80 clib_warning ("mask %U", format_hex_bytes, pm, n_bytes);
81 clib_warning ("expect %U", format_hex_bytes, pd, n_bytes);
86 validate_buffer_data (vlib_buffer_t * b, pg_stream_t * s)
88 return validate_buffer_data2 (b, s, 0, s->buffer_bytes);
93 u64 v0, u64 v_min, u64 v_max, u32 n_bits, u32 is_net_byte_order)
95 ASSERT (v0 >= v_min && v0 <= v_max);
96 if (n_bits == BITS (u8))
100 else if (n_bits == BITS (u16))
102 if (is_net_byte_order)
103 v0 = clib_host_to_net_u16 (v0);
104 clib_mem_unaligned (a0, u16) = v0;
106 else if (n_bits == BITS (u32))
108 if (is_net_byte_order)
109 v0 = clib_host_to_net_u32 (v0);
110 clib_mem_unaligned (a0, u32) = v0;
112 else if (n_bits == BITS (u64))
114 if (is_net_byte_order)
115 v0 = clib_host_to_net_u64 (v0);
116 clib_mem_unaligned (a0, u64) = v0;
121 set_2 (void *a0, void *a1,
123 u64 v_min, u64 v_max,
124 u32 n_bits, u32 is_net_byte_order, u32 is_increment)
126 ASSERT (v0 >= v_min && v0 <= v_max);
127 ASSERT (v1 >= v_min && v1 <= (v_max + is_increment));
128 if (n_bits == BITS (u8))
133 else if (n_bits == BITS (u16))
135 if (is_net_byte_order)
137 v0 = clib_host_to_net_u16 (v0);
138 v1 = clib_host_to_net_u16 (v1);
140 clib_mem_unaligned (a0, u16) = v0;
141 clib_mem_unaligned (a1, u16) = v1;
143 else if (n_bits == BITS (u32))
145 if (is_net_byte_order)
147 v0 = clib_host_to_net_u32 (v0);
148 v1 = clib_host_to_net_u32 (v1);
150 clib_mem_unaligned (a0, u32) = v0;
151 clib_mem_unaligned (a1, u32) = v1;
153 else if (n_bits == BITS (u64))
155 if (is_net_byte_order)
157 v0 = clib_host_to_net_u64 (v0);
158 v1 = clib_host_to_net_u64 (v1);
160 clib_mem_unaligned (a0, u64) = v0;
161 clib_mem_unaligned (a1, u64) = v1;
165 static_always_inline void
166 do_set_fixed (pg_main_t * pg,
171 u32 byte_offset, u32 is_net_byte_order, u64 v_min, u64 v_max)
173 vlib_main_t *vm = vlib_get_main ();
175 while (n_buffers >= 4)
177 vlib_buffer_t *b0, *b1, *b2, *b3;
180 b0 = vlib_get_buffer (vm, buffers[0]);
181 b1 = vlib_get_buffer (vm, buffers[1]);
182 b2 = vlib_get_buffer (vm, buffers[2]);
183 b3 = vlib_get_buffer (vm, buffers[3]);
187 a0 = (void *) b0 + byte_offset;
188 a1 = (void *) b1 + byte_offset;
189 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
190 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
192 set_2 (a0, a1, v_min, v_min, v_min, v_max, n_bits, is_net_byte_order,
193 /* is_increment */ 0);
195 ASSERT (validate_buffer_data (b0, s));
196 ASSERT (validate_buffer_data (b1, s));
199 while (n_buffers > 0)
204 b0 = vlib_get_buffer (vm, buffers[0]);
208 a0 = (void *) b0 + byte_offset;
210 set_1 (a0, v_min, v_min, v_max, n_bits, is_net_byte_order);
212 ASSERT (validate_buffer_data (b0, s));
216 static_always_inline u64
217 do_set_increment (pg_main_t * pg,
223 u32 is_net_byte_order,
224 u32 want_sum, u64 * sum_result, u64 v_min, u64 v_max, u64 v)
226 vlib_main_t *vm = vlib_get_main ();
229 ASSERT (v >= v_min && v <= v_max);
231 while (n_buffers >= 4)
233 vlib_buffer_t *b0, *b1, *b2, *b3;
237 b0 = vlib_get_buffer (vm, buffers[0]);
238 b1 = vlib_get_buffer (vm, buffers[1]);
239 b2 = vlib_get_buffer (vm, buffers[2]);
240 b3 = vlib_get_buffer (vm, buffers[3]);
244 a0 = (void *) b0 + byte_offset;
245 a1 = (void *) b1 + byte_offset;
246 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
247 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
251 v = v > v_max ? v_min : v;
253 v_old + 0, v_old + 1, v_min, v_max, n_bits, is_net_byte_order,
254 /* is_increment */ 1);
257 sum += 2 * v_old + 1;
259 if (PREDICT_FALSE (v_old + 1 > v_max))
262 sum -= 2 * v_old + 1;
265 set_1 (a0, v + 0, v_min, v_max, n_bits, is_net_byte_order);
270 v = v > v_max ? v_min : v;
271 set_1 (a1, v + 0, v_min, v_max, n_bits, is_net_byte_order);
277 ASSERT (validate_buffer_data (b0, s));
278 ASSERT (validate_buffer_data (b1, s));
281 while (n_buffers > 0)
287 b0 = vlib_get_buffer (vm, buffers[0]);
291 a0 = (void *) b0 + byte_offset;
297 v = v > v_max ? v_min : v;
299 ASSERT (v_old >= v_min && v_old <= v_max);
300 set_1 (a0, v_old, v_min, v_max, n_bits, is_net_byte_order);
302 ASSERT (validate_buffer_data (b0, s));
311 static_always_inline void
312 do_set_random (pg_main_t * pg,
318 u32 is_net_byte_order,
319 u32 want_sum, u64 * sum_result, u64 v_min, u64 v_max)
321 vlib_main_t *vm = vlib_get_main ();
322 u64 v_diff = v_max - v_min + 1;
323 u64 r_mask = max_pow2 (v_diff) - 1;
328 random_data = clib_random_buffer_get_data
329 (&vm->random_buffer, n_buffers * n_bits / BITS (u8));
333 while (n_buffers >= 4)
335 vlib_buffer_t *b0, *b1, *b2, *b3;
337 u64 r0 = 0, r1 = 0; /* warnings be gone */
339 b0 = vlib_get_buffer (vm, buffers[0]);
340 b1 = vlib_get_buffer (vm, buffers[1]);
341 b2 = vlib_get_buffer (vm, buffers[2]);
342 b3 = vlib_get_buffer (vm, buffers[3]);
346 a0 = (void *) b0 + byte_offset;
347 a1 = (void *) b1 + byte_offset;
348 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
349 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
356 u##n * r = random_data; \
359 random_data = r + 2; \
371 /* Add power of 2 sized random number which may be out of range. */
375 /* Twice should be enough to reduce to v_min .. v_max range. */
376 v0 = v0 > v_max ? v0 - v_diff : v0;
377 v1 = v1 > v_max ? v1 - v_diff : v1;
378 v0 = v0 > v_max ? v0 - v_diff : v0;
379 v1 = v1 > v_max ? v1 - v_diff : v1;
384 set_2 (a0, a1, v0, v1, v_min, v_max, n_bits, is_net_byte_order,
385 /* is_increment */ 0);
387 ASSERT (validate_buffer_data (b0, s));
388 ASSERT (validate_buffer_data (b1, s));
391 while (n_buffers > 0)
395 u64 r0 = 0; /* warnings be gone */
397 b0 = vlib_get_buffer (vm, buffers[0]);
401 a0 = (void *) b0 + byte_offset;
408 u##n * r = random_data; \
410 random_data = r + 1; \
422 /* Add power of 2 sized random number which may be out of range. */
425 /* Twice should be enough to reduce to v_min .. v_max range. */
426 v0 = v0 > v_max ? v0 - v_diff : v0;
427 v0 = v0 > v_max ? v0 - v_diff : v0;
432 set_1 (a0, v0, v_min, v_max, n_bits, is_net_byte_order);
434 ASSERT (validate_buffer_data (b0, s));
442 clib_mem_unaligned (a##i, t) = \
443 clib_host_to_net_##t ((clib_net_to_host_mem_##t (a##i) &~ mask) \
449 u64 v_min, u64 v_max,
450 u32 max_bits, u32 n_bits, u64 mask, u32 shift)
452 ASSERT (v0 >= v_min && v0 <= v_max);
453 if (max_bits == BITS (u8))
454 ((u8 *) a0)[0] = (((u8 *) a0)[0] & ~mask) | (v0 << shift);
456 else if (max_bits == BITS (u16))
460 else if (max_bits == BITS (u32))
464 else if (max_bits == BITS (u64))
471 setbits_2 (void *a0, void *a1,
473 u64 v_min, u64 v_max,
474 u32 max_bits, u32 n_bits, u64 mask, u32 shift, u32 is_increment)
476 ASSERT (v0 >= v_min && v0 <= v_max);
477 ASSERT (v1 >= v_min && v1 <= v_max + is_increment);
478 if (max_bits == BITS (u8))
480 ((u8 *) a0)[0] = (((u8 *) a0)[0] & ~mask) | (v0 << shift);
481 ((u8 *) a1)[0] = (((u8 *) a1)[0] & ~mask) | (v1 << shift);
484 else if (max_bits == BITS (u16))
489 else if (max_bits == BITS (u32))
494 else if (max_bits == BITS (u64))
503 static_always_inline void
504 do_setbits_fixed (pg_main_t * pg,
510 u32 byte_offset, u64 v_min, u64 v_max, u64 mask, u32 shift)
512 vlib_main_t *vm = vlib_get_main ();
514 while (n_buffers >= 4)
516 vlib_buffer_t *b0, *b1, *b2, *b3;
519 b0 = vlib_get_buffer (vm, buffers[0]);
520 b1 = vlib_get_buffer (vm, buffers[1]);
521 b2 = vlib_get_buffer (vm, buffers[2]);
522 b3 = vlib_get_buffer (vm, buffers[3]);
526 a0 = (void *) b0 + byte_offset;
527 a1 = (void *) b1 + byte_offset;
528 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
529 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
532 v_min, v_min, v_min, v_max, max_bits, n_bits, mask, shift,
533 /* is_increment */ 0);
535 ASSERT (validate_buffer_data (b0, s));
536 ASSERT (validate_buffer_data (b1, s));
539 while (n_buffers > 0)
544 b0 = vlib_get_buffer (vm, buffers[0]);
548 a0 = (void *) b0 + byte_offset;
550 setbits_1 (a0, v_min, v_min, v_max, max_bits, n_bits, mask, shift);
551 ASSERT (validate_buffer_data (b0, s));
555 static_always_inline u64
556 do_setbits_increment (pg_main_t * pg,
563 u64 v_min, u64 v_max, u64 v, u64 mask, u32 shift)
565 vlib_main_t *vm = vlib_get_main ();
567 ASSERT (v >= v_min && v <= v_max);
569 while (n_buffers >= 4)
571 vlib_buffer_t *b0, *b1, *b2, *b3;
575 b0 = vlib_get_buffer (vm, buffers[0]);
576 b1 = vlib_get_buffer (vm, buffers[1]);
577 b2 = vlib_get_buffer (vm, buffers[2]);
578 b3 = vlib_get_buffer (vm, buffers[3]);
582 a0 = (void *) b0 + byte_offset;
583 a1 = (void *) b1 + byte_offset;
584 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
585 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
589 v = v > v_max ? v_min : v;
591 v_old + 0, v_old + 1,
592 v_min, v_max, max_bits, n_bits, mask, shift,
593 /* is_increment */ 1);
595 if (PREDICT_FALSE (v_old + 1 > v_max))
598 setbits_1 (a0, v + 0, v_min, v_max, max_bits, n_bits, mask, shift);
601 v = v > v_max ? v_min : v;
602 setbits_1 (a1, v + 0, v_min, v_max, max_bits, n_bits, mask, shift);
605 ASSERT (validate_buffer_data (b0, s));
606 ASSERT (validate_buffer_data (b1, s));
609 while (n_buffers > 0)
615 b0 = vlib_get_buffer (vm, buffers[0]);
619 a0 = (void *) b0 + byte_offset;
623 v = v > v_max ? v_min : v;
625 ASSERT (v_old >= v_min && v_old <= v_max);
626 setbits_1 (a0, v_old, v_min, v_max, max_bits, n_bits, mask, shift);
628 ASSERT (validate_buffer_data (b0, s));
634 static_always_inline void
635 do_setbits_random (pg_main_t * pg,
641 u32 byte_offset, u64 v_min, u64 v_max, u64 mask, u32 shift)
643 vlib_main_t *vm = vlib_get_main ();
644 u64 v_diff = v_max - v_min + 1;
645 u64 r_mask = max_pow2 (v_diff) - 1;
649 random_data = clib_random_buffer_get_data
650 (&vm->random_buffer, n_buffers * max_bits / BITS (u8));
653 while (n_buffers >= 4)
655 vlib_buffer_t *b0, *b1, *b2, *b3;
657 u64 r0 = 0, r1 = 0; /* warnings be gone */
659 b0 = vlib_get_buffer (vm, buffers[0]);
660 b1 = vlib_get_buffer (vm, buffers[1]);
661 b2 = vlib_get_buffer (vm, buffers[2]);
662 b3 = vlib_get_buffer (vm, buffers[3]);
666 a0 = (void *) b0 + byte_offset;
667 a1 = (void *) b1 + byte_offset;
668 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
669 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
676 u##n * r = random_data; \
679 random_data = r + 2; \
691 /* Add power of 2 sized random number which may be out of range. */
695 /* Twice should be enough to reduce to v_min .. v_max range. */
696 v0 = v0 > v_max ? v0 - v_diff : v0;
697 v1 = v1 > v_max ? v1 - v_diff : v1;
698 v0 = v0 > v_max ? v0 - v_diff : v0;
699 v1 = v1 > v_max ? v1 - v_diff : v1;
701 setbits_2 (a0, a1, v0, v1, v_min, v_max, max_bits, n_bits, mask, shift,
702 /* is_increment */ 0);
704 ASSERT (validate_buffer_data (b0, s));
705 ASSERT (validate_buffer_data (b1, s));
708 while (n_buffers > 0)
712 u64 r0 = 0; /* warnings be gone */
714 b0 = vlib_get_buffer (vm, buffers[0]);
718 a0 = (void *) b0 + byte_offset;
725 u##n * r = random_data; \
727 random_data = r + 1; \
739 /* Add power of 2 sized random number which may be out of range. */
742 /* Twice should be enough to reduce to v_min .. v_max range. */
743 v0 = v0 > v_max ? v0 - v_diff : v0;
744 v0 = v0 > v_max ? v0 - v_diff : v0;
746 setbits_1 (a0, v0, v_min, v_max, max_bits, n_bits, mask, shift);
748 ASSERT (validate_buffer_data (b0, s));
753 do_it (pg_main_t * pg,
757 u32 lo_bit, u32 hi_bit,
758 u64 v_min, u64 v_max, u64 v, pg_edit_type_t edit_type)
760 u32 max_bits, l0, l1, h1, start_bit;
763 edit_type = PG_EDIT_FIXED;
765 l0 = lo_bit / BITS (u8);
766 l1 = lo_bit % BITS (u8);
767 h1 = hi_bit % BITS (u8);
769 start_bit = l0 * BITS (u8);
771 max_bits = hi_bit - start_bit;
772 ASSERT (max_bits <= 64);
776 if (edit_type == PG_EDIT_INCREMENT) \
777 v = do_set_increment (pg, s, buffers, n_buffers, \
780 /* is_net_byte_order */ 1, \
781 /* want sum */ 0, 0, \
784 else if (edit_type == PG_EDIT_RANDOM) \
785 do_set_random (pg, s, buffers, n_buffers, \
788 /* is_net_byte_order */ 1, \
789 /* want sum */ 0, 0, \
791 else /* edit_type == PG_EDIT_FIXED */ \
792 do_set_fixed (pg, s, buffers, n_buffers, \
795 /* is_net_byte_order */ 1, \
799 if (l1 == 0 && h1 == 0)
815 u32 n_bits = max_bits;
817 max_bits = clib_max (max_pow2 (n_bits), 8);
819 mask = ((u64) 1 << (u64) n_bits) - 1;
820 mask &= ~(((u64) 1 << (u64) shift) - 1);
822 mask <<= max_bits - n_bits;
823 shift += max_bits - n_bits;
829 if (edit_type == PG_EDIT_INCREMENT) \
830 v = do_setbits_increment (pg, s, buffers, n_buffers, \
831 BITS (u##n), n_bits, \
832 l0, v_min, v_max, v, \
834 else if (edit_type == PG_EDIT_RANDOM) \
835 do_setbits_random (pg, s, buffers, n_buffers, \
836 BITS (u##n), n_bits, \
839 else /* edit_type == PG_EDIT_FIXED */ \
840 do_setbits_fixed (pg, s, buffers, n_buffers, \
841 BITS (u##n), n_bits, \
860 pg_generate_set_lengths (pg_main_t * pg,
861 pg_stream_t * s, u32 * buffers, u32 n_buffers)
863 u64 v_min, v_max, length_sum;
864 pg_edit_type_t edit_type;
866 v_min = s->min_packet_bytes;
867 v_max = s->max_packet_bytes;
868 edit_type = s->packet_size_edit_type;
870 if (edit_type == PG_EDIT_INCREMENT)
871 s->last_increment_packet_size
872 = do_set_increment (pg, s, buffers, n_buffers,
873 8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
874 STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
875 /* is_net_byte_order */ 0,
876 /* want sum */ 1, &length_sum,
877 v_min, v_max, s->last_increment_packet_size);
879 else if (edit_type == PG_EDIT_RANDOM)
880 do_set_random (pg, s, buffers, n_buffers,
881 8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
882 STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
883 /* is_net_byte_order */ 0,
884 /* want sum */ 1, &length_sum,
887 else /* edit_type == PG_EDIT_FIXED */
889 do_set_fixed (pg, s, buffers, n_buffers,
890 8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
891 STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
892 /* is_net_byte_order */ 0,
894 length_sum = v_min * n_buffers;
898 vnet_main_t *vnm = vnet_get_main ();
899 vnet_interface_main_t *im = &vnm->interface_main;
900 vnet_sw_interface_t *si =
901 vnet_get_sw_interface (vnm, s->sw_if_index[VLIB_RX]);
903 vlib_increment_combined_counter (im->combined_sw_if_counters
904 + VNET_INTERFACE_COUNTER_RX,
905 vlib_get_thread_index (),
906 si->sw_if_index, n_buffers, length_sum);
912 pg_generate_fix_multi_buffer_lengths (pg_main_t * pg,
914 u32 * buffers, u32 n_buffers)
916 vlib_main_t *vm = vlib_get_main ();
917 pg_buffer_index_t *pbi;
919 static u32 *unused_buffers = 0;
921 while (n_buffers > 0)
927 b = vlib_get_buffer (vm, bi);
929 /* Current length here is length of whole packet. */
930 n_bytes_left = b->current_length;
932 pbi = s->buffer_indices;
935 uword n = clib_min (n_bytes_left, s->buffer_bytes);
937 b->current_length = n;
939 if (n_bytes_left > 0)
940 b->flags |= VLIB_BUFFER_NEXT_PRESENT;
942 b->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
944 /* Return unused buffers to fifos. */
946 vec_add1 (unused_buffers, bi);
949 if (pbi >= vec_end (s->buffer_indices))
953 b = vlib_get_buffer (vm, bi);
955 ASSERT (n_bytes_left == 0);
961 if (vec_len (unused_buffers) > 0)
963 vlib_buffer_free_no_next (vm, unused_buffers, vec_len (unused_buffers));
964 _vec_len (unused_buffers) = 0;
969 pg_generate_edit (pg_main_t * pg,
970 pg_stream_t * s, u32 * buffers, u32 n_buffers)
974 vec_foreach (e, s->non_fixed_edits)
979 case PG_EDIT_INCREMENT:
984 v_min = pg_edit_get_value (e, PG_EDIT_LO);
985 v_max = pg_edit_get_value (e, PG_EDIT_HI);
987 hi_bit = (BITS (u8) * STRUCT_OFFSET_OF (vlib_buffer_t, data)
988 + BITS (u8) + e->lsb_bit_offset);
989 lo_bit = hi_bit - e->n_bits;
991 e->last_increment_value
992 = do_it (pg, s, buffers, n_buffers, lo_bit, hi_bit, v_min, v_max,
993 e->last_increment_value, e->type);
997 case PG_EDIT_UNSPECIFIED:
1001 /* Should not be any fixed edits left. */
1007 /* Call any edit functions to e.g. completely IP lengths, checksums, ... */
1010 for (i = vec_len (s->edit_groups) - 1; i >= 0; i--)
1012 pg_edit_group_t *g = s->edit_groups + i;
1013 if (g->edit_function)
1014 g->edit_function (pg, s, g, buffers, n_buffers);
1020 pg_set_next_buffer_pointers (pg_main_t * pg,
1022 u32 * buffers, u32 * next_buffers, u32 n_buffers)
1024 vlib_main_t *vm = vlib_get_main ();
1026 while (n_buffers >= 4)
1029 vlib_buffer_t *b0, *b1;
1031 b0 = vlib_get_buffer (vm, buffers[0]);
1032 b1 = vlib_get_buffer (vm, buffers[1]);
1033 ni0 = next_buffers[0];
1034 ni1 = next_buffers[1];
1036 vlib_prefetch_buffer_with_index (vm, buffers[2], WRITE);
1037 vlib_prefetch_buffer_with_index (vm, buffers[3], WRITE);
1039 b0->flags |= VLIB_BUFFER_NEXT_PRESENT;
1040 b1->flags |= VLIB_BUFFER_NEXT_PRESENT;
1041 b0->next_buffer = ni0;
1042 b1->next_buffer = ni1;
1049 while (n_buffers > 0)
1054 b0 = vlib_get_buffer (vm, buffers[0]);
1055 ni0 = next_buffers[0];
1060 b0->flags |= VLIB_BUFFER_NEXT_PRESENT;
1061 b0->next_buffer = ni0;
1065 static_always_inline void
1066 init_buffers_inline (vlib_main_t * vm,
1069 u32 n_buffers, u32 data_offset, u32 n_data, u32 set_data)
1074 ASSERT (s->replay_packet_templates == 0);
1076 data = s->fixed_packet_data + data_offset;
1077 mask = s->fixed_packet_data_mask + data_offset;
1078 if (data + n_data >= vec_end (s->fixed_packet_data))
1079 n_data = (data < vec_end (s->fixed_packet_data)
1080 ? vec_end (s->fixed_packet_data) - data : 0);
1083 ASSERT (data + n_data <= vec_end (s->fixed_packet_data));
1084 ASSERT (mask + n_data <= vec_end (s->fixed_packet_data_mask));
1093 vlib_buffer_t *b0, *b1;
1095 /* Prefetch next iteration. */
1096 vlib_prefetch_buffer_with_index (vm, b[2], STORE);
1097 vlib_prefetch_buffer_with_index (vm, b[3], STORE);
1104 b0 = vlib_get_buffer (vm, bi0);
1105 b1 = vlib_get_buffer (vm, bi1);
1107 vnet_buffer (b0)->sw_if_index[VLIB_RX] =
1108 vnet_buffer (b1)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
1110 vnet_buffer (b0)->sw_if_index[VLIB_TX] =
1111 vnet_buffer (b1)->sw_if_index[VLIB_TX] = (u32) ~ 0;
1115 clib_memcpy_fast (b0->data, data, n_data);
1116 clib_memcpy_fast (b1->data, data, n_data);
1120 ASSERT (validate_buffer_data2 (b0, s, data_offset, n_data));
1121 ASSERT (validate_buffer_data2 (b1, s, data_offset, n_data));
1134 b0 = vlib_get_buffer (vm, bi0);
1135 vnet_buffer (b0)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
1136 /* s->sw_if_index[VLIB_TX]; */
1137 vnet_buffer (b0)->sw_if_index[VLIB_TX] = (u32) ~ 0;
1140 clib_memcpy_fast (b0->data, data, n_data);
1142 ASSERT (validate_buffer_data2 (b0, s, data_offset, n_data));
1147 pg_stream_fill_helper (pg_main_t * pg,
1149 pg_buffer_index_t * bi,
1150 u32 * buffers, u32 * next_buffers, u32 n_alloc)
1152 vlib_main_t *vm = vlib_get_main ();
1153 uword is_start_of_packet = bi == s->buffer_indices;
1156 ASSERT (vec_len (s->replay_packet_templates) == 0);
1158 n_allocated = vlib_buffer_alloc (vm, buffers, n_alloc);
1159 if (n_allocated == 0)
1163 * We can't assume we got all the buffers we asked for...
1164 * This never worked until recently.
1166 n_alloc = n_allocated;
1168 /* Reinitialize buffers */
1172 n_alloc, (bi - s->buffer_indices) * s->buffer_bytes /* data offset */ ,
1177 pg_set_next_buffer_pointers (pg, s, buffers, next_buffers, n_alloc);
1179 if (is_start_of_packet)
1181 pg_generate_set_lengths (pg, s, buffers, n_alloc);
1182 if (vec_len (s->buffer_indices) > 1)
1183 pg_generate_fix_multi_buffer_lengths (pg, s, buffers, n_alloc);
1185 pg_generate_edit (pg, s, buffers, n_alloc);
1192 pg_stream_fill_replay (pg_main_t * pg, pg_stream_t * s, u32 n_alloc)
1194 pg_buffer_index_t *bi;
1196 u32 buffer_alloc_request = 0;
1197 u32 buffer_alloc_result;
1198 u32 current_buffer_index;
1200 vlib_main_t *vm = vlib_get_main ();
1201 vnet_main_t *vnm = vnet_get_main ();
1202 u32 buf_sz = vlib_buffer_get_default_data_size (vm);
1203 vnet_interface_main_t *im = &vnm->interface_main;
1204 vnet_sw_interface_t *si;
1206 buffers = pg->replay_buffers_by_thread[vm->thread_index];
1207 vec_reset_length (buffers);
1208 bi = s->buffer_indices;
1211 i = s->current_replay_packet_index;
1212 l = vec_len (s->replay_packet_templates);
1214 /* Figure out how many buffers we need */
1219 d0 = vec_elt (s->replay_packet_templates, i);
1220 buffer_alloc_request += (vec_len (d0) + (buf_sz - 1)) / buf_sz;
1222 i = ((i + 1) == l) ? 0 : i + 1;
1226 ASSERT (buffer_alloc_request > 0);
1227 vec_validate (buffers, buffer_alloc_request - 1);
1229 /* Allocate that many buffers */
1230 buffer_alloc_result = vlib_buffer_alloc (vm, buffers, buffer_alloc_request);
1231 if (buffer_alloc_result < buffer_alloc_request)
1233 clib_warning ("alloc failure, got %d not %d", buffer_alloc_result,
1234 buffer_alloc_request);
1235 vlib_buffer_free_no_next (vm, buffers, buffer_alloc_result);
1236 pg->replay_buffers_by_thread[vm->thread_index] = buffers;
1240 /* Now go generate the buffers, and add them to the FIFO */
1243 current_buffer_index = 0;
1244 i = s->current_replay_packet_index;
1245 l = vec_len (s->replay_packet_templates);
1251 u32 bytes_to_copy, bytes_this_chunk;
1254 d0 = vec_elt (s->replay_packet_templates, i);
1256 bytes_to_copy = vec_len (d0);
1258 /* Add head chunk to pg fifo */
1259 clib_fifo_add1 (bi->buffer_fifo, buffers[current_buffer_index]);
1262 while (bytes_to_copy)
1264 bytes_this_chunk = clib_min (bytes_to_copy, buf_sz);
1265 ASSERT (current_buffer_index < vec_len (buffers));
1266 b = vlib_get_buffer (vm, buffers[current_buffer_index]);
1267 clib_memcpy_fast (b->data, d0 + data_offset, bytes_this_chunk);
1268 vnet_buffer (b)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
1269 vnet_buffer (b)->sw_if_index[VLIB_TX] = (u32) ~ 0;
1272 b->current_data = 0;
1273 b->current_length = bytes_this_chunk;
1275 not_last = bytes_this_chunk < bytes_to_copy;
1278 ASSERT (current_buffer_index < (vec_len (buffers) - 1));
1279 b->flags |= VLIB_BUFFER_NEXT_PRESENT;
1280 b->next_buffer = buffers[current_buffer_index + 1];
1282 bytes_to_copy -= bytes_this_chunk;
1283 data_offset += bytes_this_chunk;
1284 current_buffer_index++;
1287 i = ((i + 1) == l) ? 0 : i + 1;
1291 /* Update the interface counters */
1292 si = vnet_get_sw_interface (vnm, s->sw_if_index[VLIB_RX]);
1294 for (i = 0; i < n_alloc; i++)
1295 l += vlib_buffer_index_length_in_chain (vm, buffers[i]);
1296 vlib_increment_combined_counter (im->combined_sw_if_counters
1297 + VNET_INTERFACE_COUNTER_RX,
1298 vlib_get_thread_index (),
1299 si->sw_if_index, n_alloc, l);
1301 s->current_replay_packet_index += n_alloc;
1302 s->current_replay_packet_index %= vec_len (s->replay_packet_templates);
1304 pg->replay_buffers_by_thread[vm->thread_index] = buffers;
1310 pg_stream_fill (pg_main_t * pg, pg_stream_t * s, u32 n_buffers)
1312 pg_buffer_index_t *bi;
1313 word i, n_in_fifo, n_alloc, n_free, n_added;
1314 u32 *tail, *start, *end, *last_tail, *last_start;
1316 bi = s->buffer_indices;
1318 n_in_fifo = clib_fifo_elts (bi->buffer_fifo);
1319 if (n_in_fifo >= n_buffers)
1322 n_alloc = n_buffers - n_in_fifo;
1324 /* Round up, but never generate more than limit. */
1325 n_alloc = clib_max (VLIB_FRAME_SIZE, n_alloc);
1327 if (s->n_packets_limit > 0
1328 && s->n_packets_generated + n_in_fifo + n_alloc >= s->n_packets_limit)
1330 n_alloc = s->n_packets_limit - s->n_packets_generated - n_in_fifo;
1336 * Handle pcap replay directly
1338 if (s->replay_packet_templates)
1339 return pg_stream_fill_replay (pg, s, n_alloc);
1341 /* All buffer fifos should have the same size. */
1345 vec_foreach (bi, s->buffer_indices)
1347 e = clib_fifo_elts (bi->buffer_fifo);
1348 if (bi == s->buffer_indices)
1354 last_tail = last_start = 0;
1357 for (i = vec_len (s->buffer_indices) - 1; i >= 0; i--)
1359 bi = vec_elt_at_index (s->buffer_indices, i);
1361 n_free = clib_fifo_free_elts (bi->buffer_fifo);
1362 if (n_free < n_alloc)
1363 clib_fifo_resize (bi->buffer_fifo, n_alloc - n_free);
1365 tail = clib_fifo_advance_tail (bi->buffer_fifo, n_alloc);
1366 start = bi->buffer_fifo;
1367 end = clib_fifo_end (bi->buffer_fifo);
1369 if (tail + n_alloc <= end)
1372 pg_stream_fill_helper (pg, s, bi, tail, last_tail, n_alloc);
1376 u32 n = clib_min (end - tail, n_alloc);
1377 n_added = pg_stream_fill_helper (pg, s, bi, tail, last_tail, n);
1379 if (n_added == n && n_alloc > n_added)
1381 n_added += pg_stream_fill_helper
1382 (pg, s, bi, start, last_start, n_alloc - n_added);
1386 if (PREDICT_FALSE (n_added < n_alloc))
1387 tail = clib_fifo_advance_tail (bi->buffer_fifo, n_added - n_alloc);
1392 /* Verify that pkts in the fifo are properly allocated */
1395 return n_in_fifo + n_added;
1405 /* Use pre data for packet data. */
1406 vlib_buffer_t buffer;
1410 format_pg_input_trace (u8 * s, va_list * va)
1412 vlib_main_t *vm = va_arg (*va, vlib_main_t *);
1413 CLIB_UNUSED (vlib_node_t * node) = va_arg (*va, vlib_node_t *);
1414 pg_input_trace_t *t = va_arg (*va, pg_input_trace_t *);
1415 pg_main_t *pg = &pg_main;
1416 pg_stream_t *stream;
1418 u32 indent = format_get_indent (s);
1421 if (!pool_is_free_index (pg->streams, t->stream_index))
1422 stream = pool_elt_at_index (pg->streams, t->stream_index);
1425 s = format (s, "stream %v", pg->streams[t->stream_index].name);
1427 s = format (s, "stream %d", t->stream_index);
1429 s = format (s, ", %d bytes", t->packet_length);
1430 s = format (s, ", %d sw_if_index", t->sw_if_index);
1432 s = format (s, "\n%U%U",
1433 format_white_space, indent, format_vnet_buffer, &t->buffer);
1435 s = format (s, "\n%U", format_white_space, indent);
1439 n = vlib_get_node (vm, stream->node_index);
1441 if (n && n->format_buffer)
1442 s = format (s, "%U", n->format_buffer,
1443 t->buffer.pre_data, sizeof (t->buffer.pre_data));
1445 s = format (s, "%U",
1446 format_hex_bytes, t->buffer.pre_data,
1447 ARRAY_LEN (t->buffer.pre_data));
1452 pg_input_trace (pg_main_t * pg,
1453 vlib_node_runtime_t * node, u32 stream_index, u32 next_index,
1454 u32 * buffers, u32 n_buffers)
1456 vlib_main_t *vm = vlib_get_main ();
1465 vlib_buffer_t *b0, *b1;
1466 pg_input_trace_t *t0, *t1;
1473 b0 = vlib_get_buffer (vm, bi0);
1474 b1 = vlib_get_buffer (vm, bi1);
1476 vlib_trace_buffer (vm, node, next_index, b0, /* follow_chain */ 1);
1477 vlib_trace_buffer (vm, node, next_index, b1, /* follow_chain */ 1);
1479 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
1480 t1 = vlib_add_trace (vm, node, b1, sizeof (t1[0]));
1482 t0->stream_index = stream_index;
1483 t1->stream_index = stream_index;
1485 t0->packet_length = vlib_buffer_length_in_chain (vm, b0);
1486 t1->packet_length = vlib_buffer_length_in_chain (vm, b1);
1488 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_RX];
1489 t1->sw_if_index = vnet_buffer (b1)->sw_if_index[VLIB_RX];
1491 clib_memcpy_fast (&t0->buffer, b0,
1492 sizeof (b0[0]) - sizeof (b0->pre_data));
1493 clib_memcpy_fast (&t1->buffer, b1,
1494 sizeof (b1[0]) - sizeof (b1->pre_data));
1496 clib_memcpy_fast (t0->buffer.pre_data, b0->data,
1497 sizeof (t0->buffer.pre_data));
1498 clib_memcpy_fast (t1->buffer.pre_data, b1->data,
1499 sizeof (t1->buffer.pre_data));
1506 pg_input_trace_t *t0;
1512 b0 = vlib_get_buffer (vm, bi0);
1514 vlib_trace_buffer (vm, node, next_index, b0, /* follow_chain */ 1);
1515 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
1517 t0->stream_index = stream_index;
1518 t0->packet_length = vlib_buffer_length_in_chain (vm, b0);
1519 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_RX];
1520 clib_memcpy_fast (&t0->buffer, b0,
1521 sizeof (b0[0]) - sizeof (b0->pre_data));
1522 clib_memcpy_fast (t0->buffer.pre_data, b0->data,
1523 sizeof (t0->buffer.pre_data));
1528 pg_generate_packets (vlib_node_runtime_t * node,
1530 pg_stream_t * s, uword n_packets_to_generate)
1532 vlib_main_t *vm = vlib_get_main ();
1533 u32 *to_next, n_this_frame, n_left, n_trace, n_packets_in_fifo;
1534 uword n_packets_generated;
1535 pg_buffer_index_t *bi, *bi0;
1536 u32 next_index = s->next_index;
1537 vnet_feature_main_t *fm = &feature_main;
1538 vnet_feature_config_main_t *cm;
1539 u8 feature_arc_index = fm->device_input_feature_arc_index;
1540 cm = &fm->feature_config_mains[feature_arc_index];
1541 u32 current_config_index = ~(u32) 0;
1544 bi0 = s->buffer_indices;
1546 n_packets_in_fifo = pg_stream_fill (pg, s, n_packets_to_generate);
1547 n_packets_to_generate = clib_min (n_packets_in_fifo, n_packets_to_generate);
1548 n_packets_generated = 0;
1551 (vnet_have_features (feature_arc_index, s->sw_if_index[VLIB_RX])))
1553 current_config_index =
1554 vec_elt (cm->config_index_by_sw_if_index, s->sw_if_index[VLIB_RX]);
1555 vnet_get_config_data (&cm->config_main, ¤t_config_index,
1559 while (n_packets_to_generate > 0)
1561 u32 *head, *start, *end;
1563 if (PREDICT_TRUE (next_index == VNET_DEVICE_INPUT_NEXT_ETHERNET_INPUT))
1565 vlib_next_frame_t *nf;
1567 ethernet_input_frame_t *ef;
1569 vlib_get_new_next_frame (vm, node, next_index, to_next, n_left);
1570 nf = vlib_node_runtime_get_next_frame (vm, node, next_index);
1571 f = vlib_get_frame (vm, nf->frame_index);
1572 f->flags = ETH_INPUT_FRAME_F_SINGLE_SW_IF_IDX;
1574 ef = vlib_frame_scalar_args (f);
1575 pi = pool_elt_at_index (pg->interfaces, s->pg_if_index);
1576 ef->sw_if_index = pi->sw_if_index;
1577 ef->hw_if_index = pi->hw_if_index;
1578 vlib_frame_no_append (f);
1581 vlib_get_next_frame (vm, node, next_index, to_next, n_left);
1583 n_this_frame = n_packets_to_generate;
1584 if (n_this_frame > n_left)
1585 n_this_frame = n_left;
1587 start = bi0->buffer_fifo;
1588 end = clib_fifo_end (bi0->buffer_fifo);
1589 head = clib_fifo_head (bi0->buffer_fifo);
1591 if (head + n_this_frame <= end)
1592 vlib_buffer_copy_indices (to_next, head, n_this_frame);
1596 vlib_buffer_copy_indices (to_next + 0, head, n);
1597 vlib_buffer_copy_indices (to_next + n, start, n_this_frame - n);
1600 if (s->replay_packet_templates == 0)
1602 vec_foreach (bi, s->buffer_indices)
1603 clib_fifo_advance_head (bi->buffer_fifo, n_this_frame);
1607 clib_fifo_advance_head (bi0->buffer_fifo, n_this_frame);
1610 if (current_config_index != ~(u32) 0)
1611 for (i = 0; i < n_this_frame; i++)
1614 b = vlib_get_buffer (vm, to_next[i]);
1615 b->current_config_index = current_config_index;
1616 vnet_buffer (b)->feature_arc_index = feature_arc_index;
1619 n_trace = vlib_get_trace_count (vm, node);
1622 u32 n = clib_min (n_trace, n_this_frame);
1623 pg_input_trace (pg, node, s - pg->streams, next_index, to_next, n);
1624 vlib_set_trace_count (vm, node, n_trace - n);
1626 n_packets_to_generate -= n_this_frame;
1627 n_packets_generated += n_this_frame;
1628 n_left -= n_this_frame;
1634 for (i = 0; i < n_this_frame; i++)
1636 b = vlib_get_buffer (vm, to_next[i]);
1637 ASSERT ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0 ||
1638 b->current_length >= VLIB_BUFFER_MIN_CHAIN_SEG_SIZE);
1641 vlib_put_next_frame (vm, node, next_index, n_left);
1644 return n_packets_generated;
1648 pg_input_stream (vlib_node_runtime_t * node, pg_main_t * pg, pg_stream_t * s)
1650 vlib_main_t *vm = vlib_get_main ();
1654 if (s->n_packets_limit > 0 && s->n_packets_generated >= s->n_packets_limit)
1656 pg_stream_enable_disable (pg, s, /* want_enabled */ 0);
1660 /* Apply rate limit. */
1661 time_now = vlib_time_now (vm);
1662 if (s->time_last_generate == 0)
1663 s->time_last_generate = time_now;
1665 dt = time_now - s->time_last_generate;
1666 s->time_last_generate = time_now;
1668 n_packets = VLIB_FRAME_SIZE;
1669 if (s->rate_packets_per_second > 0)
1671 s->packet_accumulator += dt * s->rate_packets_per_second;
1672 n_packets = s->packet_accumulator;
1674 /* Never allow accumulator to grow if we get behind. */
1675 s->packet_accumulator -= n_packets;
1678 /* Apply fixed limit. */
1679 if (s->n_packets_limit > 0
1680 && s->n_packets_generated + n_packets > s->n_packets_limit)
1681 n_packets = s->n_packets_limit - s->n_packets_generated;
1683 /* Generate up to one frame's worth of packets. */
1684 if (n_packets > VLIB_FRAME_SIZE)
1685 n_packets = VLIB_FRAME_SIZE;
1688 n_packets = pg_generate_packets (node, pg, s, n_packets);
1690 s->n_packets_generated += n_packets;
1696 pg_input (vlib_main_t * vm, vlib_node_runtime_t * node, vlib_frame_t * frame)
1699 pg_main_t *pg = &pg_main;
1700 uword n_packets = 0;
1701 u32 worker_index = 0;
1703 if (vlib_num_workers ())
1704 worker_index = vlib_get_current_worker_index ();
1707 clib_bitmap_foreach (i, pg->enabled_streams[worker_index], ({
1708 pg_stream_t *s = vec_elt_at_index (pg->streams, i);
1709 n_packets += pg_input_stream (node, pg, s);
1717 VLIB_REGISTER_NODE (pg_input_node) = {
1718 .function = pg_input,
1720 .sibling_of = "device-input",
1721 .type = VLIB_NODE_TYPE_INPUT,
1723 .format_trace = format_pg_input_trace,
1725 /* Input node will be left disabled until a stream is active. */
1726 .state = VLIB_NODE_STATE_DISABLED,
1731 * fd.io coding-style-patch-verification: ON
1734 * eval: (c-set-style "gnu")