2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * pg_input.c: buffer generator input
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
41 * To be honest, the packet generator needs an extreme
42 * makeover. Two key assumptions which drove the current implementation
43 * are no longer true. First, buffer managers implement a
44 * post-TX recycle list. Second, that packet generator performance
45 * is first-order important.
48 #include <vlib/vlib.h>
49 #include <vnet/pg/pg.h>
50 #include <vnet/vnet.h>
51 #include <vnet/ethernet/ethernet.h>
52 #include <vnet/feature/feature.h>
53 #include <vnet/ip/ip4_packet.h>
54 #include <vnet/ip/ip6_packet.h>
55 #include <vnet/udp/udp_packet.h>
56 #include <vnet/devices/devices.h>
59 validate_buffer_data2 (vlib_buffer_t * b, pg_stream_t * s,
60 u32 data_offset, u32 n_bytes)
66 pd = s->fixed_packet_data + data_offset;
67 pm = s->fixed_packet_data_mask + data_offset;
69 if (pd + n_bytes >= vec_end (s->fixed_packet_data))
70 n_bytes = (pd < vec_end (s->fixed_packet_data)
71 ? vec_end (s->fixed_packet_data) - pd : 0);
73 for (i = 0; i < n_bytes; i++)
74 if ((bd[i] & pm[i]) != pd[i])
80 clib_warning ("buffer %U", format_vnet_buffer, b);
81 clib_warning ("differ at index %d", i);
82 clib_warning ("is %U", format_hex_bytes, bd, n_bytes);
83 clib_warning ("mask %U", format_hex_bytes, pm, n_bytes);
84 clib_warning ("expect %U", format_hex_bytes, pd, n_bytes);
89 validate_buffer_data (vlib_buffer_t * b, pg_stream_t * s)
91 return validate_buffer_data2 (b, s, 0, s->buffer_bytes);
96 u64 v0, u64 v_min, u64 v_max, u32 n_bits, u32 is_net_byte_order)
98 ASSERT (v0 >= v_min && v0 <= v_max);
99 if (n_bits == BITS (u8))
103 else if (n_bits == BITS (u16))
105 if (is_net_byte_order)
106 v0 = clib_host_to_net_u16 (v0);
107 clib_mem_unaligned (a0, u16) = v0;
109 else if (n_bits == BITS (u32))
111 if (is_net_byte_order)
112 v0 = clib_host_to_net_u32 (v0);
113 clib_mem_unaligned (a0, u32) = v0;
115 else if (n_bits == BITS (u64))
117 if (is_net_byte_order)
118 v0 = clib_host_to_net_u64 (v0);
119 clib_mem_unaligned (a0, u64) = v0;
124 set_2 (void *a0, void *a1,
126 u64 v_min, u64 v_max,
127 u32 n_bits, u32 is_net_byte_order, u32 is_increment)
129 ASSERT (v0 >= v_min && v0 <= v_max);
130 ASSERT (v1 >= v_min && v1 <= (v_max + is_increment));
131 if (n_bits == BITS (u8))
136 else if (n_bits == BITS (u16))
138 if (is_net_byte_order)
140 v0 = clib_host_to_net_u16 (v0);
141 v1 = clib_host_to_net_u16 (v1);
143 clib_mem_unaligned (a0, u16) = v0;
144 clib_mem_unaligned (a1, u16) = v1;
146 else if (n_bits == BITS (u32))
148 if (is_net_byte_order)
150 v0 = clib_host_to_net_u32 (v0);
151 v1 = clib_host_to_net_u32 (v1);
153 clib_mem_unaligned (a0, u32) = v0;
154 clib_mem_unaligned (a1, u32) = v1;
156 else if (n_bits == BITS (u64))
158 if (is_net_byte_order)
160 v0 = clib_host_to_net_u64 (v0);
161 v1 = clib_host_to_net_u64 (v1);
163 clib_mem_unaligned (a0, u64) = v0;
164 clib_mem_unaligned (a1, u64) = v1;
168 static_always_inline void
169 do_set_fixed (pg_main_t * pg,
174 u32 byte_offset, u32 is_net_byte_order, u64 v_min, u64 v_max)
176 vlib_main_t *vm = vlib_get_main ();
178 while (n_buffers >= 4)
180 vlib_buffer_t *b0, *b1, *b2, *b3;
183 b0 = vlib_get_buffer (vm, buffers[0]);
184 b1 = vlib_get_buffer (vm, buffers[1]);
185 b2 = vlib_get_buffer (vm, buffers[2]);
186 b3 = vlib_get_buffer (vm, buffers[3]);
190 a0 = (void *) b0 + byte_offset;
191 a1 = (void *) b1 + byte_offset;
192 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
193 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
195 set_2 (a0, a1, v_min, v_min, v_min, v_max, n_bits, is_net_byte_order,
196 /* is_increment */ 0);
198 ASSERT (validate_buffer_data (b0, s));
199 ASSERT (validate_buffer_data (b1, s));
202 while (n_buffers > 0)
207 b0 = vlib_get_buffer (vm, buffers[0]);
211 a0 = (void *) b0 + byte_offset;
213 set_1 (a0, v_min, v_min, v_max, n_bits, is_net_byte_order);
215 ASSERT (validate_buffer_data (b0, s));
219 static_always_inline u64
220 do_set_increment (pg_main_t * pg,
226 u32 is_net_byte_order,
227 u32 want_sum, u64 * sum_result, u64 v_min, u64 v_max, u64 v)
229 vlib_main_t *vm = vlib_get_main ();
232 ASSERT (v >= v_min && v <= v_max);
234 while (n_buffers >= 4)
236 vlib_buffer_t *b0, *b1, *b2, *b3;
240 b0 = vlib_get_buffer (vm, buffers[0]);
241 b1 = vlib_get_buffer (vm, buffers[1]);
242 b2 = vlib_get_buffer (vm, buffers[2]);
243 b3 = vlib_get_buffer (vm, buffers[3]);
247 a0 = (void *) b0 + byte_offset;
248 a1 = (void *) b1 + byte_offset;
249 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
250 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
254 v = v > v_max ? v_min : v;
256 v_old + 0, v_old + 1, v_min, v_max, n_bits, is_net_byte_order,
257 /* is_increment */ 1);
260 sum += 2 * v_old + 1;
262 if (PREDICT_FALSE (v_old + 1 > v_max))
265 sum -= 2 * v_old + 1;
268 set_1 (a0, v + 0, v_min, v_max, n_bits, is_net_byte_order);
273 v = v > v_max ? v_min : v;
274 set_1 (a1, v + 0, v_min, v_max, n_bits, is_net_byte_order);
280 ASSERT (validate_buffer_data (b0, s));
281 ASSERT (validate_buffer_data (b1, s));
284 while (n_buffers > 0)
290 b0 = vlib_get_buffer (vm, buffers[0]);
294 a0 = (void *) b0 + byte_offset;
300 v = v > v_max ? v_min : v;
302 ASSERT (v_old >= v_min && v_old <= v_max);
303 set_1 (a0, v_old, v_min, v_max, n_bits, is_net_byte_order);
305 ASSERT (validate_buffer_data (b0, s));
314 static_always_inline void
315 do_set_random (pg_main_t * pg,
321 u32 is_net_byte_order,
322 u32 want_sum, u64 * sum_result, u64 v_min, u64 v_max)
324 vlib_main_t *vm = vlib_get_main ();
325 u64 v_diff = v_max - v_min + 1;
326 u64 r_mask = max_pow2 (v_diff) - 1;
331 random_data = clib_random_buffer_get_data
332 (&vm->random_buffer, n_buffers * n_bits / BITS (u8));
336 while (n_buffers >= 4)
338 vlib_buffer_t *b0, *b1, *b2, *b3;
340 u64 r0 = 0, r1 = 0; /* warnings be gone */
342 b0 = vlib_get_buffer (vm, buffers[0]);
343 b1 = vlib_get_buffer (vm, buffers[1]);
344 b2 = vlib_get_buffer (vm, buffers[2]);
345 b3 = vlib_get_buffer (vm, buffers[3]);
349 a0 = (void *) b0 + byte_offset;
350 a1 = (void *) b1 + byte_offset;
351 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
352 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
359 u##n * r = random_data; \
362 random_data = r + 2; \
374 /* Add power of 2 sized random number which may be out of range. */
378 /* Twice should be enough to reduce to v_min .. v_max range. */
379 v0 = v0 > v_max ? v0 - v_diff : v0;
380 v1 = v1 > v_max ? v1 - v_diff : v1;
381 v0 = v0 > v_max ? v0 - v_diff : v0;
382 v1 = v1 > v_max ? v1 - v_diff : v1;
387 set_2 (a0, a1, v0, v1, v_min, v_max, n_bits, is_net_byte_order,
388 /* is_increment */ 0);
390 ASSERT (validate_buffer_data (b0, s));
391 ASSERT (validate_buffer_data (b1, s));
394 while (n_buffers > 0)
398 u64 r0 = 0; /* warnings be gone */
400 b0 = vlib_get_buffer (vm, buffers[0]);
404 a0 = (void *) b0 + byte_offset;
411 u##n * r = random_data; \
413 random_data = r + 1; \
425 /* Add power of 2 sized random number which may be out of range. */
428 /* Twice should be enough to reduce to v_min .. v_max range. */
429 v0 = v0 > v_max ? v0 - v_diff : v0;
430 v0 = v0 > v_max ? v0 - v_diff : v0;
435 set_1 (a0, v0, v_min, v_max, n_bits, is_net_byte_order);
437 ASSERT (validate_buffer_data (b0, s));
445 clib_mem_unaligned (a##i, t) = \
446 clib_host_to_net_##t ((clib_net_to_host_mem_##t (a##i) &~ mask) \
452 u64 v_min, u64 v_max,
453 u32 max_bits, u32 n_bits, u64 mask, u32 shift)
455 ASSERT (v0 >= v_min && v0 <= v_max);
456 if (max_bits == BITS (u8))
457 ((u8 *) a0)[0] = (((u8 *) a0)[0] & ~mask) | (v0 << shift);
459 else if (max_bits == BITS (u16))
463 else if (max_bits == BITS (u32))
467 else if (max_bits == BITS (u64))
474 setbits_2 (void *a0, void *a1,
476 u64 v_min, u64 v_max,
477 u32 max_bits, u32 n_bits, u64 mask, u32 shift, u32 is_increment)
479 ASSERT (v0 >= v_min && v0 <= v_max);
480 ASSERT (v1 >= v_min && v1 <= v_max + is_increment);
481 if (max_bits == BITS (u8))
483 ((u8 *) a0)[0] = (((u8 *) a0)[0] & ~mask) | (v0 << shift);
484 ((u8 *) a1)[0] = (((u8 *) a1)[0] & ~mask) | (v1 << shift);
487 else if (max_bits == BITS (u16))
492 else if (max_bits == BITS (u32))
497 else if (max_bits == BITS (u64))
506 static_always_inline void
507 do_setbits_fixed (pg_main_t * pg,
513 u32 byte_offset, u64 v_min, u64 v_max, u64 mask, u32 shift)
515 vlib_main_t *vm = vlib_get_main ();
517 while (n_buffers >= 4)
519 vlib_buffer_t *b0, *b1, *b2, *b3;
522 b0 = vlib_get_buffer (vm, buffers[0]);
523 b1 = vlib_get_buffer (vm, buffers[1]);
524 b2 = vlib_get_buffer (vm, buffers[2]);
525 b3 = vlib_get_buffer (vm, buffers[3]);
529 a0 = (void *) b0 + byte_offset;
530 a1 = (void *) b1 + byte_offset;
531 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
532 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
535 v_min, v_min, v_min, v_max, max_bits, n_bits, mask, shift,
536 /* is_increment */ 0);
538 ASSERT (validate_buffer_data (b0, s));
539 ASSERT (validate_buffer_data (b1, s));
542 while (n_buffers > 0)
547 b0 = vlib_get_buffer (vm, buffers[0]);
551 a0 = (void *) b0 + byte_offset;
553 setbits_1 (a0, v_min, v_min, v_max, max_bits, n_bits, mask, shift);
554 ASSERT (validate_buffer_data (b0, s));
558 static_always_inline u64
559 do_setbits_increment (pg_main_t * pg,
566 u64 v_min, u64 v_max, u64 v, u64 mask, u32 shift)
568 vlib_main_t *vm = vlib_get_main ();
570 ASSERT (v >= v_min && v <= v_max);
572 while (n_buffers >= 4)
574 vlib_buffer_t *b0, *b1, *b2, *b3;
578 b0 = vlib_get_buffer (vm, buffers[0]);
579 b1 = vlib_get_buffer (vm, buffers[1]);
580 b2 = vlib_get_buffer (vm, buffers[2]);
581 b3 = vlib_get_buffer (vm, buffers[3]);
585 a0 = (void *) b0 + byte_offset;
586 a1 = (void *) b1 + byte_offset;
587 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
588 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
592 v = v > v_max ? v_min : v;
594 v_old + 0, v_old + 1,
595 v_min, v_max, max_bits, n_bits, mask, shift,
596 /* is_increment */ 1);
598 if (PREDICT_FALSE (v_old + 1 > v_max))
601 setbits_1 (a0, v + 0, v_min, v_max, max_bits, n_bits, mask, shift);
604 v = v > v_max ? v_min : v;
605 setbits_1 (a1, v + 0, v_min, v_max, max_bits, n_bits, mask, shift);
608 ASSERT (validate_buffer_data (b0, s));
609 ASSERT (validate_buffer_data (b1, s));
612 while (n_buffers > 0)
618 b0 = vlib_get_buffer (vm, buffers[0]);
622 a0 = (void *) b0 + byte_offset;
626 v = v > v_max ? v_min : v;
628 ASSERT (v_old >= v_min && v_old <= v_max);
629 setbits_1 (a0, v_old, v_min, v_max, max_bits, n_bits, mask, shift);
631 ASSERT (validate_buffer_data (b0, s));
637 static_always_inline void
638 do_setbits_random (pg_main_t * pg,
644 u32 byte_offset, u64 v_min, u64 v_max, u64 mask, u32 shift)
646 vlib_main_t *vm = vlib_get_main ();
647 u64 v_diff = v_max - v_min + 1;
648 u64 r_mask = max_pow2 (v_diff) - 1;
652 random_data = clib_random_buffer_get_data
653 (&vm->random_buffer, n_buffers * max_bits / BITS (u8));
656 while (n_buffers >= 4)
658 vlib_buffer_t *b0, *b1, *b2, *b3;
660 u64 r0 = 0, r1 = 0; /* warnings be gone */
662 b0 = vlib_get_buffer (vm, buffers[0]);
663 b1 = vlib_get_buffer (vm, buffers[1]);
664 b2 = vlib_get_buffer (vm, buffers[2]);
665 b3 = vlib_get_buffer (vm, buffers[3]);
669 a0 = (void *) b0 + byte_offset;
670 a1 = (void *) b1 + byte_offset;
671 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
672 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
679 u##n * r = random_data; \
682 random_data = r + 2; \
694 /* Add power of 2 sized random number which may be out of range. */
698 /* Twice should be enough to reduce to v_min .. v_max range. */
699 v0 = v0 > v_max ? v0 - v_diff : v0;
700 v1 = v1 > v_max ? v1 - v_diff : v1;
701 v0 = v0 > v_max ? v0 - v_diff : v0;
702 v1 = v1 > v_max ? v1 - v_diff : v1;
704 setbits_2 (a0, a1, v0, v1, v_min, v_max, max_bits, n_bits, mask, shift,
705 /* is_increment */ 0);
707 ASSERT (validate_buffer_data (b0, s));
708 ASSERT (validate_buffer_data (b1, s));
711 while (n_buffers > 0)
715 u64 r0 = 0; /* warnings be gone */
717 b0 = vlib_get_buffer (vm, buffers[0]);
721 a0 = (void *) b0 + byte_offset;
728 u##n * r = random_data; \
730 random_data = r + 1; \
742 /* Add power of 2 sized random number which may be out of range. */
745 /* Twice should be enough to reduce to v_min .. v_max range. */
746 v0 = v0 > v_max ? v0 - v_diff : v0;
747 v0 = v0 > v_max ? v0 - v_diff : v0;
749 setbits_1 (a0, v0, v_min, v_max, max_bits, n_bits, mask, shift);
751 ASSERT (validate_buffer_data (b0, s));
756 do_it (pg_main_t * pg,
760 u32 lo_bit, u32 hi_bit,
761 u64 v_min, u64 v_max, u64 v, pg_edit_type_t edit_type)
763 u32 max_bits, l0, l1, h1, start_bit;
766 edit_type = PG_EDIT_FIXED;
768 l0 = lo_bit / BITS (u8);
769 l1 = lo_bit % BITS (u8);
770 h1 = hi_bit % BITS (u8);
772 start_bit = l0 * BITS (u8);
774 max_bits = hi_bit - start_bit;
775 ASSERT (max_bits <= 64);
779 if (edit_type == PG_EDIT_INCREMENT) \
780 v = do_set_increment (pg, s, buffers, n_buffers, \
783 /* is_net_byte_order */ 1, \
784 /* want sum */ 0, 0, \
787 else if (edit_type == PG_EDIT_RANDOM) \
788 do_set_random (pg, s, buffers, n_buffers, \
791 /* is_net_byte_order */ 1, \
792 /* want sum */ 0, 0, \
794 else /* edit_type == PG_EDIT_FIXED */ \
795 do_set_fixed (pg, s, buffers, n_buffers, \
798 /* is_net_byte_order */ 1, \
802 if (l1 == 0 && h1 == 0)
818 u32 n_bits = max_bits;
820 max_bits = clib_max (max_pow2 (n_bits), 8);
822 mask = ((u64) 1 << (u64) n_bits) - 1;
823 mask &= ~(((u64) 1 << (u64) shift) - 1);
825 mask <<= max_bits - n_bits;
826 shift += max_bits - n_bits;
832 if (edit_type == PG_EDIT_INCREMENT) \
833 v = do_setbits_increment (pg, s, buffers, n_buffers, \
834 BITS (u##n), n_bits, \
835 l0, v_min, v_max, v, \
837 else if (edit_type == PG_EDIT_RANDOM) \
838 do_setbits_random (pg, s, buffers, n_buffers, \
839 BITS (u##n), n_bits, \
842 else /* edit_type == PG_EDIT_FIXED */ \
843 do_setbits_fixed (pg, s, buffers, n_buffers, \
844 BITS (u##n), n_bits, \
863 pg_generate_set_lengths (pg_main_t * pg,
864 pg_stream_t * s, u32 * buffers, u32 n_buffers)
866 u64 v_min, v_max, length_sum;
867 pg_edit_type_t edit_type;
869 v_min = s->min_packet_bytes;
870 v_max = s->max_packet_bytes;
871 edit_type = s->packet_size_edit_type;
873 if (edit_type == PG_EDIT_INCREMENT)
874 s->last_increment_packet_size
875 = do_set_increment (pg, s, buffers, n_buffers,
876 8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
877 STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
878 /* is_net_byte_order */ 0,
879 /* want sum */ 1, &length_sum,
880 v_min, v_max, s->last_increment_packet_size);
882 else if (edit_type == PG_EDIT_RANDOM)
883 do_set_random (pg, s, buffers, n_buffers,
884 8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
885 STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
886 /* is_net_byte_order */ 0,
887 /* want sum */ 1, &length_sum,
890 else /* edit_type == PG_EDIT_FIXED */
892 do_set_fixed (pg, s, buffers, n_buffers,
893 8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
894 STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
895 /* is_net_byte_order */ 0,
897 length_sum = v_min * n_buffers;
901 vnet_main_t *vnm = vnet_get_main ();
902 vnet_interface_main_t *im = &vnm->interface_main;
903 vnet_sw_interface_t *si =
904 vnet_get_sw_interface (vnm, s->sw_if_index[VLIB_RX]);
906 vlib_increment_combined_counter (im->combined_sw_if_counters
907 + VNET_INTERFACE_COUNTER_RX,
908 vlib_get_thread_index (),
909 si->sw_if_index, n_buffers, length_sum);
915 pg_generate_fix_multi_buffer_lengths (pg_main_t * pg,
917 u32 * buffers, u32 n_buffers)
919 vlib_main_t *vm = vlib_get_main ();
920 pg_buffer_index_t *pbi;
922 static u32 *unused_buffers = 0;
924 while (n_buffers > 0)
930 b = vlib_get_buffer (vm, bi);
932 /* Current length here is length of whole packet. */
933 n_bytes_left = b->current_length;
935 pbi = s->buffer_indices;
938 uword n = clib_min (n_bytes_left, s->buffer_bytes);
940 b->current_length = n;
942 if (n_bytes_left > 0)
943 b->flags |= VLIB_BUFFER_NEXT_PRESENT;
945 b->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
947 /* Return unused buffers to fifos. */
949 vec_add1 (unused_buffers, bi);
952 if (pbi >= vec_end (s->buffer_indices))
956 b = vlib_get_buffer (vm, bi);
958 ASSERT (n_bytes_left == 0);
964 if (vec_len (unused_buffers) > 0)
966 vlib_buffer_free_no_next (vm, unused_buffers, vec_len (unused_buffers));
967 _vec_len (unused_buffers) = 0;
972 pg_generate_edit (pg_main_t * pg,
973 pg_stream_t * s, u32 * buffers, u32 n_buffers)
977 vec_foreach (e, s->non_fixed_edits)
982 case PG_EDIT_INCREMENT:
987 v_min = pg_edit_get_value (e, PG_EDIT_LO);
988 v_max = pg_edit_get_value (e, PG_EDIT_HI);
990 hi_bit = (BITS (u8) * STRUCT_OFFSET_OF (vlib_buffer_t, data)
991 + BITS (u8) + e->lsb_bit_offset);
992 lo_bit = hi_bit - e->n_bits;
994 e->last_increment_value
995 = do_it (pg, s, buffers, n_buffers, lo_bit, hi_bit, v_min, v_max,
996 e->last_increment_value, e->type);
1000 case PG_EDIT_UNSPECIFIED:
1004 /* Should not be any fixed edits left. */
1010 /* Call any edit functions to e.g. completely IP lengths, checksums, ... */
1013 for (i = vec_len (s->edit_groups) - 1; i >= 0; i--)
1015 pg_edit_group_t *g = s->edit_groups + i;
1016 if (g->edit_function)
1017 g->edit_function (pg, s, g, buffers, n_buffers);
1023 pg_set_next_buffer_pointers (pg_main_t * pg,
1025 u32 * buffers, u32 * next_buffers, u32 n_buffers)
1027 vlib_main_t *vm = vlib_get_main ();
1029 while (n_buffers >= 4)
1032 vlib_buffer_t *b0, *b1;
1034 b0 = vlib_get_buffer (vm, buffers[0]);
1035 b1 = vlib_get_buffer (vm, buffers[1]);
1036 ni0 = next_buffers[0];
1037 ni1 = next_buffers[1];
1039 vlib_prefetch_buffer_with_index (vm, buffers[2], WRITE);
1040 vlib_prefetch_buffer_with_index (vm, buffers[3], WRITE);
1042 b0->flags |= VLIB_BUFFER_NEXT_PRESENT;
1043 b1->flags |= VLIB_BUFFER_NEXT_PRESENT;
1044 b0->next_buffer = ni0;
1045 b1->next_buffer = ni1;
1052 while (n_buffers > 0)
1057 b0 = vlib_get_buffer (vm, buffers[0]);
1058 ni0 = next_buffers[0];
1063 b0->flags |= VLIB_BUFFER_NEXT_PRESENT;
1064 b0->next_buffer = ni0;
1068 static_always_inline void
1069 init_buffers_inline (vlib_main_t * vm,
1072 u32 n_buffers, u32 data_offset, u32 n_data, u32 set_data)
1077 ASSERT (s->replay_packet_templates == 0);
1079 data = s->fixed_packet_data + data_offset;
1080 mask = s->fixed_packet_data_mask + data_offset;
1081 if (data + n_data >= vec_end (s->fixed_packet_data))
1082 n_data = (data < vec_end (s->fixed_packet_data)
1083 ? vec_end (s->fixed_packet_data) - data : 0);
1086 ASSERT (data + n_data <= vec_end (s->fixed_packet_data));
1087 ASSERT (mask + n_data <= vec_end (s->fixed_packet_data_mask));
1096 vlib_buffer_t *b0, *b1;
1098 /* Prefetch next iteration. */
1099 vlib_prefetch_buffer_with_index (vm, b[2], STORE);
1100 vlib_prefetch_buffer_with_index (vm, b[3], STORE);
1107 b0 = vlib_get_buffer (vm, bi0);
1108 b1 = vlib_get_buffer (vm, bi1);
1109 b0->flags |= s->buffer_flags;
1110 b1->flags |= s->buffer_flags;
1112 vnet_buffer (b0)->sw_if_index[VLIB_RX] =
1113 vnet_buffer (b1)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
1115 vnet_buffer (b0)->sw_if_index[VLIB_TX] =
1116 vnet_buffer (b1)->sw_if_index[VLIB_TX] = s->sw_if_index[VLIB_TX];
1120 clib_memcpy_fast (b0->data, data, n_data);
1121 clib_memcpy_fast (b1->data, data, n_data);
1125 ASSERT (validate_buffer_data2 (b0, s, data_offset, n_data));
1126 ASSERT (validate_buffer_data2 (b1, s, data_offset, n_data));
1139 b0 = vlib_get_buffer (vm, bi0);
1140 b0->flags |= s->buffer_flags;
1141 vnet_buffer (b0)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
1142 vnet_buffer (b0)->sw_if_index[VLIB_TX] = s->sw_if_index[VLIB_TX];
1145 clib_memcpy_fast (b0->data, data, n_data);
1147 ASSERT (validate_buffer_data2 (b0, s, data_offset, n_data));
1152 pg_stream_fill_helper (pg_main_t * pg,
1154 pg_buffer_index_t * bi,
1155 u32 * buffers, u32 * next_buffers, u32 n_alloc)
1157 vlib_main_t *vm = vlib_get_main ();
1158 uword is_start_of_packet = bi == s->buffer_indices;
1161 ASSERT (vec_len (s->replay_packet_templates) == 0);
1163 n_allocated = vlib_buffer_alloc (vm, buffers, n_alloc);
1164 if (n_allocated == 0)
1168 * We can't assume we got all the buffers we asked for...
1169 * This never worked until recently.
1171 n_alloc = n_allocated;
1173 /* Reinitialize buffers */
1177 n_alloc, (bi - s->buffer_indices) * s->buffer_bytes /* data offset */ ,
1182 pg_set_next_buffer_pointers (pg, s, buffers, next_buffers, n_alloc);
1184 if (is_start_of_packet)
1186 pg_generate_set_lengths (pg, s, buffers, n_alloc);
1187 if (vec_len (s->buffer_indices) > 1)
1188 pg_generate_fix_multi_buffer_lengths (pg, s, buffers, n_alloc);
1190 pg_generate_edit (pg, s, buffers, n_alloc);
1197 pg_stream_fill_replay (pg_main_t * pg, pg_stream_t * s, u32 n_alloc)
1199 pg_buffer_index_t *bi;
1201 u32 buffer_alloc_request = 0;
1202 u32 buffer_alloc_result;
1203 u32 current_buffer_index;
1205 vlib_main_t *vm = vlib_get_main ();
1206 vnet_main_t *vnm = vnet_get_main ();
1207 u32 buf_sz = vlib_buffer_get_default_data_size (vm);
1208 vnet_interface_main_t *im = &vnm->interface_main;
1209 vnet_sw_interface_t *si;
1211 buffers = pg->replay_buffers_by_thread[vm->thread_index];
1212 vec_reset_length (buffers);
1213 bi = s->buffer_indices;
1216 i = s->current_replay_packet_index;
1217 l = vec_len (s->replay_packet_templates);
1219 /* Figure out how many buffers we need */
1224 d0 = vec_elt (s->replay_packet_templates, i);
1225 buffer_alloc_request += (vec_len (d0) + (buf_sz - 1)) / buf_sz;
1227 i = ((i + 1) == l) ? 0 : i + 1;
1231 ASSERT (buffer_alloc_request > 0);
1232 vec_validate (buffers, buffer_alloc_request - 1);
1234 /* Allocate that many buffers */
1235 buffer_alloc_result = vlib_buffer_alloc (vm, buffers, buffer_alloc_request);
1236 if (buffer_alloc_result < buffer_alloc_request)
1238 clib_warning ("alloc failure, got %d not %d", buffer_alloc_result,
1239 buffer_alloc_request);
1240 vlib_buffer_free_no_next (vm, buffers, buffer_alloc_result);
1241 pg->replay_buffers_by_thread[vm->thread_index] = buffers;
1245 /* Now go generate the buffers, and add them to the FIFO */
1248 current_buffer_index = 0;
1249 i = s->current_replay_packet_index;
1250 l = vec_len (s->replay_packet_templates);
1256 u32 bytes_to_copy, bytes_this_chunk;
1259 d0 = vec_elt (s->replay_packet_templates, i);
1261 bytes_to_copy = vec_len (d0);
1263 /* Add head chunk to pg fifo */
1264 clib_fifo_add1 (bi->buffer_fifo, buffers[current_buffer_index]);
1267 while (bytes_to_copy)
1269 bytes_this_chunk = clib_min (bytes_to_copy, buf_sz);
1270 ASSERT (current_buffer_index < vec_len (buffers));
1271 b = vlib_get_buffer (vm, buffers[current_buffer_index]);
1272 clib_memcpy_fast (b->data, d0 + data_offset, bytes_this_chunk);
1273 vnet_buffer (b)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
1274 vnet_buffer (b)->sw_if_index[VLIB_TX] = s->sw_if_index[VLIB_TX];
1275 b->flags = s->buffer_flags;
1277 b->current_data = 0;
1278 b->current_length = bytes_this_chunk;
1280 not_last = bytes_this_chunk < bytes_to_copy;
1283 ASSERT (current_buffer_index < (vec_len (buffers) - 1));
1284 b->flags |= VLIB_BUFFER_NEXT_PRESENT;
1285 b->next_buffer = buffers[current_buffer_index + 1];
1287 bytes_to_copy -= bytes_this_chunk;
1288 data_offset += bytes_this_chunk;
1289 current_buffer_index++;
1292 i = ((i + 1) == l) ? 0 : i + 1;
1296 /* Update the interface counters */
1297 si = vnet_get_sw_interface (vnm, s->sw_if_index[VLIB_RX]);
1299 for (i = 0; i < n_alloc; i++)
1300 l += vlib_buffer_index_length_in_chain (vm, buffers[i]);
1301 vlib_increment_combined_counter (im->combined_sw_if_counters
1302 + VNET_INTERFACE_COUNTER_RX,
1303 vlib_get_thread_index (),
1304 si->sw_if_index, n_alloc, l);
1306 s->current_replay_packet_index += n_alloc;
1307 s->current_replay_packet_index %= vec_len (s->replay_packet_templates);
1309 pg->replay_buffers_by_thread[vm->thread_index] = buffers;
1315 pg_stream_fill (pg_main_t * pg, pg_stream_t * s, u32 n_buffers)
1317 pg_buffer_index_t *bi;
1318 word i, n_in_fifo, n_alloc, n_free, n_added;
1319 u32 *tail, *start, *end, *last_tail, *last_start;
1321 bi = s->buffer_indices;
1323 n_in_fifo = clib_fifo_elts (bi->buffer_fifo);
1324 if (n_in_fifo >= n_buffers)
1327 n_alloc = n_buffers - n_in_fifo;
1329 /* Round up, but never generate more than limit. */
1330 n_alloc = clib_max (VLIB_FRAME_SIZE, n_alloc);
1332 if (s->n_packets_limit > 0
1333 && s->n_packets_generated + n_in_fifo + n_alloc >= s->n_packets_limit)
1335 n_alloc = s->n_packets_limit - s->n_packets_generated - n_in_fifo;
1341 * Handle pcap replay directly
1343 if (s->replay_packet_templates)
1344 return pg_stream_fill_replay (pg, s, n_alloc);
1346 /* All buffer fifos should have the same size. */
1350 vec_foreach (bi, s->buffer_indices)
1352 e = clib_fifo_elts (bi->buffer_fifo);
1353 if (bi == s->buffer_indices)
1359 last_tail = last_start = 0;
1362 for (i = vec_len (s->buffer_indices) - 1; i >= 0; i--)
1364 bi = vec_elt_at_index (s->buffer_indices, i);
1366 n_free = clib_fifo_free_elts (bi->buffer_fifo);
1367 if (n_free < n_alloc)
1368 clib_fifo_resize (bi->buffer_fifo, n_alloc - n_free);
1370 tail = clib_fifo_advance_tail (bi->buffer_fifo, n_alloc);
1371 start = bi->buffer_fifo;
1372 end = clib_fifo_end (bi->buffer_fifo);
1374 if (tail + n_alloc <= end)
1377 pg_stream_fill_helper (pg, s, bi, tail, last_tail, n_alloc);
1381 u32 n = clib_min (end - tail, n_alloc);
1382 n_added = pg_stream_fill_helper (pg, s, bi, tail, last_tail, n);
1384 if (n_added == n && n_alloc > n_added)
1386 n_added += pg_stream_fill_helper
1387 (pg, s, bi, start, last_start, n_alloc - n_added);
1391 if (PREDICT_FALSE (n_added < n_alloc))
1392 tail = clib_fifo_advance_tail (bi->buffer_fifo, n_added - n_alloc);
1397 /* Verify that pkts in the fifo are properly allocated */
1400 return n_in_fifo + n_added;
1410 /* Use pre data for packet data. */
1411 vlib_buffer_t buffer;
1415 format_pg_input_trace (u8 * s, va_list * va)
1417 vlib_main_t *vm = va_arg (*va, vlib_main_t *);
1418 CLIB_UNUSED (vlib_node_t * node) = va_arg (*va, vlib_node_t *);
1419 pg_input_trace_t *t = va_arg (*va, pg_input_trace_t *);
1420 pg_main_t *pg = &pg_main;
1421 pg_stream_t *stream;
1423 u32 indent = format_get_indent (s);
1426 if (!pool_is_free_index (pg->streams, t->stream_index))
1427 stream = pool_elt_at_index (pg->streams, t->stream_index);
1430 s = format (s, "stream %v", pg->streams[t->stream_index].name);
1432 s = format (s, "stream %d", t->stream_index);
1434 s = format (s, ", %d bytes", t->packet_length);
1435 s = format (s, ", sw_if_index %d", t->sw_if_index);
1437 s = format (s, "\n%U%U",
1438 format_white_space, indent, format_vnet_buffer, &t->buffer);
1440 s = format (s, "\n%U", format_white_space, indent);
1444 n = vlib_get_node (vm, stream->node_index);
1446 if (n && n->format_buffer)
1447 s = format (s, "%U", n->format_buffer,
1448 t->buffer.pre_data, sizeof (t->buffer.pre_data));
1450 s = format (s, "%U",
1451 format_hex_bytes, t->buffer.pre_data,
1452 ARRAY_LEN (t->buffer.pre_data));
1457 pg_input_trace (pg_main_t * pg,
1458 vlib_node_runtime_t * node, u32 stream_index, u32 next_index,
1459 u32 * buffers, u32 n_buffers)
1461 vlib_main_t *vm = vlib_get_main ();
1470 vlib_buffer_t *b0, *b1;
1471 pg_input_trace_t *t0, *t1;
1478 b0 = vlib_get_buffer (vm, bi0);
1479 b1 = vlib_get_buffer (vm, bi1);
1481 vlib_trace_buffer (vm, node, next_index, b0, /* follow_chain */ 1);
1482 vlib_trace_buffer (vm, node, next_index, b1, /* follow_chain */ 1);
1484 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
1485 t1 = vlib_add_trace (vm, node, b1, sizeof (t1[0]));
1487 t0->stream_index = stream_index;
1488 t1->stream_index = stream_index;
1490 t0->packet_length = vlib_buffer_length_in_chain (vm, b0);
1491 t1->packet_length = vlib_buffer_length_in_chain (vm, b1);
1493 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_RX];
1494 t1->sw_if_index = vnet_buffer (b1)->sw_if_index[VLIB_RX];
1496 clib_memcpy_fast (&t0->buffer, b0,
1497 sizeof (b0[0]) - sizeof (b0->pre_data));
1498 clib_memcpy_fast (&t1->buffer, b1,
1499 sizeof (b1[0]) - sizeof (b1->pre_data));
1501 clib_memcpy_fast (t0->buffer.pre_data, b0->data,
1502 sizeof (t0->buffer.pre_data));
1503 clib_memcpy_fast (t1->buffer.pre_data, b1->data,
1504 sizeof (t1->buffer.pre_data));
1511 pg_input_trace_t *t0;
1517 b0 = vlib_get_buffer (vm, bi0);
1519 vlib_trace_buffer (vm, node, next_index, b0, /* follow_chain */ 1);
1520 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
1522 t0->stream_index = stream_index;
1523 t0->packet_length = vlib_buffer_length_in_chain (vm, b0);
1524 t0->sw_if_index = vnet_buffer (b0)->sw_if_index[VLIB_RX];
1525 clib_memcpy_fast (&t0->buffer, b0,
1526 sizeof (b0[0]) - sizeof (b0->pre_data));
1527 clib_memcpy_fast (t0->buffer.pre_data, b0->data,
1528 sizeof (t0->buffer.pre_data));
1532 static_always_inline void
1533 fill_gso_buffer_flags (vlib_main_t * vm, u32 * buffers, u32 n_buffers,
1534 u32 packet_data_size)
1537 for (int i = 0; i < n_buffers; i++)
1539 vlib_buffer_t *b0 = vlib_get_buffer (vm, buffers[i]);
1543 ethernet_header_t *eh =
1544 (ethernet_header_t *) vlib_buffer_get_current (b0);
1545 u16 ethertype = clib_net_to_host_u16 (eh->type);
1546 u16 l2hdr_sz = sizeof (ethernet_header_t);
1548 if (ethernet_frame_is_tagged (ethertype))
1550 ethernet_vlan_header_t *vlan = (ethernet_vlan_header_t *) (eh + 1);
1552 ethertype = clib_net_to_host_u16 (vlan->type);
1553 l2hdr_sz += sizeof (*vlan);
1554 if (ethertype == ETHERNET_TYPE_VLAN)
1557 ethertype = clib_net_to_host_u16 (vlan->type);
1558 l2hdr_sz += sizeof (*vlan);
1562 vnet_buffer (b0)->l2_hdr_offset = 0;
1563 vnet_buffer (b0)->l3_hdr_offset = l2hdr_sz;
1565 if (PREDICT_TRUE (ethertype == ETHERNET_TYPE_IP4))
1568 (ip4_header_t *) (vlib_buffer_get_current (b0) + l2hdr_sz);
1569 vnet_buffer (b0)->l4_hdr_offset = l2hdr_sz + ip4_header_bytes (ip4);
1570 l4_proto = ip4->protocol;
1572 (VNET_BUFFER_F_IS_IP4 | VNET_BUFFER_F_OFFLOAD_IP_CKSUM);
1573 b0->flags |= (VNET_BUFFER_F_L2_HDR_OFFSET_VALID
1574 | VNET_BUFFER_F_L3_HDR_OFFSET_VALID |
1575 VNET_BUFFER_F_L4_HDR_OFFSET_VALID);
1577 else if (PREDICT_TRUE (ethertype == ETHERNET_TYPE_IP6))
1580 (ip6_header_t *) (vlib_buffer_get_current (b0) + l2hdr_sz);
1581 vnet_buffer (b0)->l4_hdr_offset = l2hdr_sz + sizeof (ip6_header_t);
1582 /* FIXME IPv6 EH traversal */
1583 l4_proto = ip6->protocol;
1585 (VNET_BUFFER_F_IS_IP6 | VNET_BUFFER_F_L2_HDR_OFFSET_VALID |
1586 VNET_BUFFER_F_L3_HDR_OFFSET_VALID |
1587 VNET_BUFFER_F_L4_HDR_OFFSET_VALID);
1589 if (l4_proto == IP_PROTOCOL_TCP)
1591 b0->flags |= (VNET_BUFFER_F_OFFLOAD_TCP_CKSUM | VNET_BUFFER_F_GSO);
1592 tcp_header_t *tcp = (tcp_header_t *) (vlib_buffer_get_current (b0) +
1594 (b0)->l4_hdr_offset);
1595 l4_hdr_sz = tcp_header_bytes (tcp);
1597 vnet_buffer2 (b0)->gso_l4_hdr_sz = l4_hdr_sz;
1598 vnet_buffer2 (b0)->gso_size = packet_data_size;
1600 else if (l4_proto == IP_PROTOCOL_UDP)
1602 b0->flags |= VNET_BUFFER_F_OFFLOAD_UDP_CKSUM;
1603 udp_header_t *udp = (udp_header_t *) (vlib_buffer_get_current (b0) +
1605 (b0)->l4_hdr_offset);
1606 vnet_buffer2 (b0)->gso_l4_hdr_sz = sizeof (*udp);
1613 pg_generate_packets (vlib_node_runtime_t * node,
1615 pg_stream_t * s, uword n_packets_to_generate)
1617 vlib_main_t *vm = vlib_get_main ();
1618 u32 *to_next, n_this_frame, n_left, n_trace, n_packets_in_fifo;
1619 uword n_packets_generated;
1620 pg_buffer_index_t *bi, *bi0;
1621 u32 next_index = s->next_index;
1622 vnet_feature_main_t *fm = &feature_main;
1623 vnet_feature_config_main_t *cm;
1624 u8 feature_arc_index = fm->device_input_feature_arc_index;
1625 cm = &fm->feature_config_mains[feature_arc_index];
1626 u32 current_config_index = ~(u32) 0;
1627 pg_interface_t *pi = pool_elt_at_index (pg->interfaces, s->pg_if_index);
1630 bi0 = s->buffer_indices;
1632 n_packets_in_fifo = pg_stream_fill (pg, s, n_packets_to_generate);
1633 n_packets_to_generate = clib_min (n_packets_in_fifo, n_packets_to_generate);
1634 n_packets_generated = 0;
1637 (vnet_have_features (feature_arc_index, s->sw_if_index[VLIB_RX])))
1639 current_config_index =
1640 vec_elt (cm->config_index_by_sw_if_index, s->sw_if_index[VLIB_RX]);
1641 vnet_get_config_data (&cm->config_main, ¤t_config_index,
1645 while (n_packets_to_generate > 0)
1647 u32 *head, *start, *end;
1649 if (PREDICT_TRUE (next_index == VNET_DEVICE_INPUT_NEXT_ETHERNET_INPUT))
1651 vlib_next_frame_t *nf;
1653 ethernet_input_frame_t *ef;
1654 vlib_get_new_next_frame (vm, node, next_index, to_next, n_left);
1655 nf = vlib_node_runtime_get_next_frame (vm, node, next_index);
1656 f = vlib_get_frame (vm, nf->frame);
1657 f->flags = ETH_INPUT_FRAME_F_SINGLE_SW_IF_IDX;
1659 ef = vlib_frame_scalar_args (f);
1660 ef->sw_if_index = pi->sw_if_index;
1661 ef->hw_if_index = pi->hw_if_index;
1662 vlib_frame_no_append (f);
1665 vlib_get_next_frame (vm, node, next_index, to_next, n_left);
1667 n_this_frame = n_packets_to_generate;
1668 if (n_this_frame > n_left)
1669 n_this_frame = n_left;
1671 start = bi0->buffer_fifo;
1672 end = clib_fifo_end (bi0->buffer_fifo);
1673 head = clib_fifo_head (bi0->buffer_fifo);
1675 if (head + n_this_frame <= end)
1676 vlib_buffer_copy_indices (to_next, head, n_this_frame);
1680 vlib_buffer_copy_indices (to_next + 0, head, n);
1681 vlib_buffer_copy_indices (to_next + n, start, n_this_frame - n);
1684 if (s->replay_packet_templates == 0)
1686 vec_foreach (bi, s->buffer_indices)
1687 clib_fifo_advance_head (bi->buffer_fifo, n_this_frame);
1691 clib_fifo_advance_head (bi0->buffer_fifo, n_this_frame);
1694 if (current_config_index != ~(u32) 0)
1695 for (i = 0; i < n_this_frame; i++)
1698 b = vlib_get_buffer (vm, to_next[i]);
1699 b->current_config_index = current_config_index;
1700 vnet_buffer (b)->feature_arc_index = feature_arc_index;
1703 if (pi->gso_enabled)
1704 fill_gso_buffer_flags (vm, to_next, n_this_frame, pi->gso_size);
1706 n_trace = vlib_get_trace_count (vm, node);
1709 u32 n = clib_min (n_trace, n_this_frame);
1710 pg_input_trace (pg, node, s - pg->streams, next_index, to_next, n);
1711 vlib_set_trace_count (vm, node, n_trace - n);
1713 n_packets_to_generate -= n_this_frame;
1714 n_packets_generated += n_this_frame;
1715 n_left -= n_this_frame;
1721 for (i = 0; i < n_this_frame; i++)
1723 b = vlib_get_buffer (vm, to_next[i]);
1724 ASSERT ((b->flags & VLIB_BUFFER_NEXT_PRESENT) == 0 ||
1725 b->current_length >= VLIB_BUFFER_MIN_CHAIN_SEG_SIZE);
1728 vlib_put_next_frame (vm, node, next_index, n_left);
1731 return n_packets_generated;
1735 pg_input_stream (vlib_node_runtime_t * node, pg_main_t * pg, pg_stream_t * s)
1737 vlib_main_t *vm = vlib_get_main ();
1741 if (s->n_packets_limit > 0 && s->n_packets_generated >= s->n_packets_limit)
1743 pg_stream_enable_disable (pg, s, /* want_enabled */ 0);
1747 /* Apply rate limit. */
1748 time_now = vlib_time_now (vm);
1749 if (s->time_last_generate == 0)
1750 s->time_last_generate = time_now;
1752 dt = time_now - s->time_last_generate;
1753 s->time_last_generate = time_now;
1755 n_packets = VLIB_FRAME_SIZE;
1756 if (s->rate_packets_per_second > 0)
1758 s->packet_accumulator += dt * s->rate_packets_per_second;
1759 n_packets = s->packet_accumulator;
1761 /* Never allow accumulator to grow if we get behind. */
1762 s->packet_accumulator -= n_packets;
1765 /* Apply fixed limit. */
1766 if (s->n_packets_limit > 0
1767 && s->n_packets_generated + n_packets > s->n_packets_limit)
1768 n_packets = s->n_packets_limit - s->n_packets_generated;
1770 /* Generate up to one frame's worth of packets. */
1771 if (n_packets > s->n_max_frame)
1772 n_packets = s->n_max_frame;
1775 n_packets = pg_generate_packets (node, pg, s, n_packets);
1777 s->n_packets_generated += n_packets;
1783 pg_input (vlib_main_t * vm, vlib_node_runtime_t * node, vlib_frame_t * frame)
1786 pg_main_t *pg = &pg_main;
1787 uword n_packets = 0;
1788 u32 worker_index = 0;
1790 if (vlib_num_workers ())
1791 worker_index = vlib_get_current_worker_index ();
1794 clib_bitmap_foreach (i, pg->enabled_streams[worker_index], ({
1795 pg_stream_t *s = vec_elt_at_index (pg->streams, i);
1796 n_packets += pg_input_stream (node, pg, s);
1804 VLIB_REGISTER_NODE (pg_input_node) = {
1805 .function = pg_input,
1806 .flags = VLIB_NODE_FLAG_TRACE_SUPPORTED,
1808 .sibling_of = "device-input",
1809 .type = VLIB_NODE_TYPE_INPUT,
1811 .format_trace = format_pg_input_trace,
1813 /* Input node will be left disabled until a stream is active. */
1814 .state = VLIB_NODE_STATE_DISABLED,
1819 * fd.io coding-style-patch-verification: ON
1822 * eval: (c-set-style "gnu")