2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * pg_input.c: buffer generator input
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #include <vlib/vlib.h>
41 #include <vnet/pg/pg.h>
42 #include <vnet/vnet.h>
45 #include <vnet/devices/dpdk/dpdk.h>
49 pg_set_mbuf_metadata (pg_main_t * pg, u32 * buffers, u32 n_alloc)
52 vlib_main_t * vm = vlib_get_main();
61 for (i = 0; i < n_alloc; i++)
63 b = vlib_get_buffer (vm, buffers[i]);
64 mb = ((struct rte_mbuf *)b) - 1;
66 delta = vlib_buffer_length_in_chain (vm, b) - (i16) mb->pkt_len;
67 new_data_len = (u16)((i16) mb->data_len + delta);
68 new_pkt_len = (u16)((i16) mb->pkt_len + delta);
70 mb->data_len = new_data_len;
71 mb->pkt_len = new_pkt_len;
72 mb->data_off = (u16)((RTE_PKTMBUF_HEADROOM) + b->current_data);
78 validate_buffer_data2 (vlib_buffer_t * b, pg_stream_t * s,
79 u32 data_offset, u32 n_bytes)
85 pd = s->fixed_packet_data + data_offset;
86 pm = s->fixed_packet_data_mask + data_offset;
88 if (pd + n_bytes >= vec_end (s->fixed_packet_data))
89 n_bytes = (pd < vec_end (s->fixed_packet_data)
90 ? vec_end (s->fixed_packet_data) - pd
93 for (i = 0; i < n_bytes; i++)
94 if ((bd[i] & pm[i]) != pd[i])
100 clib_warning ("buffer %U", format_vlib_buffer, b);
101 clib_warning ("differ at index %d", i);
102 clib_warning ("is %U", format_hex_bytes, bd, n_bytes);
103 clib_warning ("mask %U", format_hex_bytes, pm, n_bytes);
104 clib_warning ("expect %U", format_hex_bytes, pd, n_bytes);
109 validate_buffer_data (vlib_buffer_t * b, pg_stream_t * s)
110 { return validate_buffer_data2 (b, s, 0, s->buffer_bytes); }
115 u64 v_min, u64 v_max,
117 u32 is_net_byte_order)
119 ASSERT (v0 >= v_min && v0 <= v_max);
120 if (n_bits == BITS (u8))
124 else if (n_bits == BITS (u16))
126 if (is_net_byte_order)
127 v0 = clib_host_to_net_u16 (v0);
128 clib_mem_unaligned (a0, u16) = v0;
130 else if (n_bits == BITS (u32))
132 if (is_net_byte_order)
133 v0 = clib_host_to_net_u32 (v0);
134 clib_mem_unaligned (a0, u32) = v0;
136 else if (n_bits == BITS (u64))
138 if (is_net_byte_order)
139 v0 = clib_host_to_net_u64 (v0);
140 clib_mem_unaligned (a0, u64) = v0;
145 set_2 (void * a0, void * a1,
147 u64 v_min, u64 v_max,
149 u32 is_net_byte_order,
152 ASSERT (v0 >= v_min && v0 <= v_max);
153 ASSERT (v1 >= v_min && v1 <= (v_max + is_increment));
154 if (n_bits == BITS (u8))
159 else if (n_bits == BITS (u16))
161 if (is_net_byte_order)
163 v0 = clib_host_to_net_u16 (v0);
164 v1 = clib_host_to_net_u16 (v1);
166 clib_mem_unaligned (a0, u16) = v0;
167 clib_mem_unaligned (a1, u16) = v1;
169 else if (n_bits == BITS (u32))
171 if (is_net_byte_order)
173 v0 = clib_host_to_net_u32 (v0);
174 v1 = clib_host_to_net_u32 (v1);
176 clib_mem_unaligned (a0, u32) = v0;
177 clib_mem_unaligned (a1, u32) = v1;
179 else if (n_bits == BITS (u64))
181 if (is_net_byte_order)
183 v0 = clib_host_to_net_u64 (v0);
184 v1 = clib_host_to_net_u64 (v1);
186 clib_mem_unaligned (a0, u64) = v0;
187 clib_mem_unaligned (a1, u64) = v1;
191 static_always_inline void
192 do_set_fixed (pg_main_t * pg,
198 u32 is_net_byte_order,
199 u64 v_min, u64 v_max)
202 vlib_main_t * vm = pg->vlib_main;
204 while (n_buffers >= 4)
206 vlib_buffer_t * b0, * b1, * b2, * b3;
209 b0 = vlib_get_buffer (vm, buffers[0]);
210 b1 = vlib_get_buffer (vm, buffers[1]);
211 b2 = vlib_get_buffer (vm, buffers[2]);
212 b3 = vlib_get_buffer (vm, buffers[3]);
216 a0 = (void *) b0 + byte_offset;
217 a1 = (void *) b1 + byte_offset;
218 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
219 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
221 set_2 (a0, a1, v_min, v_min,
223 n_bits, is_net_byte_order,
224 /* is_increment */ 0);
226 ASSERT (validate_buffer_data (b0, s));
227 ASSERT (validate_buffer_data (b1, s));
230 while (n_buffers > 0)
235 b0 = vlib_get_buffer (vm, buffers[0]);
239 a0 = (void *) b0 + byte_offset;
243 n_bits, is_net_byte_order);
245 ASSERT (validate_buffer_data (b0, s));
249 static_always_inline u64
250 do_set_increment (pg_main_t * pg,
256 u32 is_net_byte_order,
259 u64 v_min, u64 v_max,
262 vlib_main_t * vm = pg->vlib_main;
265 ASSERT (v >= v_min && v <= v_max);
267 while (n_buffers >= 4)
269 vlib_buffer_t * b0, * b1, * b2, * b3;
273 b0 = vlib_get_buffer (vm, buffers[0]);
274 b1 = vlib_get_buffer (vm, buffers[1]);
275 b2 = vlib_get_buffer (vm, buffers[2]);
276 b3 = vlib_get_buffer (vm, buffers[3]);
280 a0 = (void *) b0 + byte_offset;
281 a1 = (void *) b1 + byte_offset;
282 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
283 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
287 v = v > v_max ? v_min : v;
289 v_old + 0, v_old + 1,
291 n_bits, is_net_byte_order,
292 /* is_increment */ 1);
297 if (PREDICT_FALSE (v_old + 1 > v_max))
303 set_1 (a0, v + 0, v_min, v_max, n_bits, is_net_byte_order);
308 v = v > v_max ? v_min : v;
309 set_1 (a1, v + 0, v_min, v_max, n_bits, is_net_byte_order);
315 ASSERT (validate_buffer_data (b0, s));
316 ASSERT (validate_buffer_data (b1, s));
319 while (n_buffers > 0)
325 b0 = vlib_get_buffer (vm, buffers[0]);
329 a0 = (void *) b0 + byte_offset;
335 v = v > v_max ? v_min : v;
337 ASSERT (v_old >= v_min && v_old <= v_max);
338 set_1 (a0, v_old, v_min, v_max, n_bits, is_net_byte_order);
340 ASSERT (validate_buffer_data (b0, s));
349 static_always_inline void
350 do_set_random (pg_main_t * pg,
356 u32 is_net_byte_order,
359 u64 v_min, u64 v_max)
362 vlib_main_t * vm = pg->vlib_main;
363 u64 v_diff = v_max - v_min + 1;
364 u64 r_mask = max_pow2 (v_diff) - 1;
369 random_data = clib_random_buffer_get_data
370 (&vm->random_buffer, n_buffers * n_bits / BITS (u8));
374 while (n_buffers >= 4)
376 vlib_buffer_t * b0, * b1, * b2, * b3;
378 u64 r0=0, r1=0; /* warnings be gone */
380 b0 = vlib_get_buffer (vm, buffers[0]);
381 b1 = vlib_get_buffer (vm, buffers[1]);
382 b2 = vlib_get_buffer (vm, buffers[2]);
383 b3 = vlib_get_buffer (vm, buffers[3]);
387 a0 = (void *) b0 + byte_offset;
388 a1 = (void *) b1 + byte_offset;
389 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
390 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
397 u##n * r = random_data; \
400 random_data = r + 2; \
412 /* Add power of 2 sized random number which may be out of range. */
416 /* Twice should be enough to reduce to v_min .. v_max range. */
417 v0 = v0 > v_max ? v0 - v_diff : v0;
418 v1 = v1 > v_max ? v1 - v_diff : v1;
419 v0 = v0 > v_max ? v0 - v_diff : v0;
420 v1 = v1 > v_max ? v1 - v_diff : v1;
428 n_bits, is_net_byte_order,
429 /* is_increment */ 0);
431 ASSERT (validate_buffer_data (b0, s));
432 ASSERT (validate_buffer_data (b1, s));
435 while (n_buffers > 0)
439 u64 r0 = 0; /* warnings be gone */
441 b0 = vlib_get_buffer (vm, buffers[0]);
445 a0 = (void *) b0 + byte_offset;
452 u##n * r = random_data; \
454 random_data = r + 1; \
466 /* Add power of 2 sized random number which may be out of range. */
469 /* Twice should be enough to reduce to v_min .. v_max range. */
470 v0 = v0 > v_max ? v0 - v_diff : v0;
471 v0 = v0 > v_max ? v0 - v_diff : v0;
476 set_1 (a0, v0, v_min, v_max, n_bits, is_net_byte_order);
478 ASSERT (validate_buffer_data (b0, s));
486 clib_mem_unaligned (a##i, t) = \
487 clib_host_to_net_##t ((clib_net_to_host_mem_##t (a##i) &~ mask) \
491 setbits_1 (void * a0,
493 u64 v_min, u64 v_max,
499 ASSERT (v0 >= v_min && v0 <= v_max);
500 if (max_bits == BITS (u8))
501 ((u8 *) a0)[0] = (((u8 *) a0)[0] &~ mask) | (v0 << shift);
503 else if (max_bits == BITS (u16))
507 else if (max_bits == BITS (u32))
511 else if (max_bits == BITS (u64))
518 setbits_2 (void * a0, void * a1,
520 u64 v_min, u64 v_max,
527 ASSERT (v0 >= v_min && v0 <= v_max);
528 ASSERT (v1 >= v_min && v1 <= v_max + is_increment);
529 if (max_bits == BITS (u8))
531 ((u8 *) a0)[0] = (((u8 *) a0)[0] &~ mask) | (v0 << shift);
532 ((u8 *) a1)[0] = (((u8 *) a1)[0] &~ mask) | (v1 << shift);
535 else if (max_bits == BITS (u16))
540 else if (max_bits == BITS (u32))
545 else if (max_bits == BITS (u64))
554 static_always_inline void
555 do_setbits_fixed (pg_main_t * pg,
562 u64 v_min, u64 v_max,
567 vlib_main_t * vm = pg->vlib_main;
569 while (n_buffers >= 4)
571 vlib_buffer_t * b0, * b1, * b2, * b3;
574 b0 = vlib_get_buffer (vm, buffers[0]);
575 b1 = vlib_get_buffer (vm, buffers[1]);
576 b2 = vlib_get_buffer (vm, buffers[2]);
577 b3 = vlib_get_buffer (vm, buffers[3]);
581 a0 = (void *) b0 + byte_offset;
582 a1 = (void *) b1 + byte_offset;
583 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
584 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
589 max_bits, n_bits, mask, shift,
590 /* is_increment */ 0);
592 ASSERT (validate_buffer_data (b0, s));
593 ASSERT (validate_buffer_data (b1, s));
596 while (n_buffers > 0)
601 b0 = vlib_get_buffer (vm, buffers[0]);
605 a0 = (void *) b0 + byte_offset;
607 setbits_1 (a0, v_min, v_min, v_max, max_bits, n_bits, mask, shift);
608 ASSERT (validate_buffer_data (b0, s));
612 static_always_inline u64
613 do_setbits_increment (pg_main_t * pg,
620 u64 v_min, u64 v_max,
625 vlib_main_t * vm = pg->vlib_main;
627 ASSERT (v >= v_min && v <= v_max);
629 while (n_buffers >= 4)
631 vlib_buffer_t * b0, * b1, * b2, * b3;
635 b0 = vlib_get_buffer (vm, buffers[0]);
636 b1 = vlib_get_buffer (vm, buffers[1]);
637 b2 = vlib_get_buffer (vm, buffers[2]);
638 b3 = vlib_get_buffer (vm, buffers[3]);
642 a0 = (void *) b0 + byte_offset;
643 a1 = (void *) b1 + byte_offset;
644 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
645 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
649 v = v > v_max ? v_min : v;
651 v_old + 0, v_old + 1,
653 max_bits, n_bits, mask, shift,
654 /* is_increment */ 1);
656 if (PREDICT_FALSE (v_old + 1 > v_max))
659 setbits_1 (a0, v + 0, v_min, v_max, max_bits, n_bits, mask, shift);
662 v = v > v_max ? v_min : v;
663 setbits_1 (a1, v + 0, v_min, v_max, max_bits, n_bits, mask, shift);
666 ASSERT (validate_buffer_data (b0, s));
667 ASSERT (validate_buffer_data (b1, s));
670 while (n_buffers > 0)
676 b0 = vlib_get_buffer (vm, buffers[0]);
680 a0 = (void *) b0 + byte_offset;
684 v = v > v_max ? v_min : v;
686 ASSERT (v_old >= v_min && v_old <= v_max);
687 setbits_1 (a0, v_old, v_min, v_max, max_bits, n_bits, mask, shift);
689 ASSERT (validate_buffer_data (b0, s));
695 static_always_inline void
696 do_setbits_random (pg_main_t * pg,
703 u64 v_min, u64 v_max,
707 vlib_main_t * vm = pg->vlib_main;
708 u64 v_diff = v_max - v_min + 1;
709 u64 r_mask = max_pow2 (v_diff) - 1;
713 random_data = clib_random_buffer_get_data
714 (&vm->random_buffer, n_buffers * max_bits / BITS (u8));
717 while (n_buffers >= 4)
719 vlib_buffer_t * b0, * b1, * b2, * b3;
721 u64 r0=0, r1=0; /* warnings be gone */
723 b0 = vlib_get_buffer (vm, buffers[0]);
724 b1 = vlib_get_buffer (vm, buffers[1]);
725 b2 = vlib_get_buffer (vm, buffers[2]);
726 b3 = vlib_get_buffer (vm, buffers[3]);
730 a0 = (void *) b0 + byte_offset;
731 a1 = (void *) b1 + byte_offset;
732 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
733 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
740 u##n * r = random_data; \
743 random_data = r + 2; \
755 /* Add power of 2 sized random number which may be out of range. */
759 /* Twice should be enough to reduce to v_min .. v_max range. */
760 v0 = v0 > v_max ? v0 - v_diff : v0;
761 v1 = v1 > v_max ? v1 - v_diff : v1;
762 v0 = v0 > v_max ? v0 - v_diff : v0;
763 v1 = v1 > v_max ? v1 - v_diff : v1;
768 max_bits, n_bits, mask, shift,
769 /* is_increment */ 0);
771 ASSERT (validate_buffer_data (b0, s));
772 ASSERT (validate_buffer_data (b1, s));
775 while (n_buffers > 0)
779 u64 r0 = 0; /* warnings be gone */
781 b0 = vlib_get_buffer (vm, buffers[0]);
785 a0 = (void *) b0 + byte_offset;
792 u##n * r = random_data; \
794 random_data = r + 1; \
806 /* Add power of 2 sized random number which may be out of range. */
809 /* Twice should be enough to reduce to v_min .. v_max range. */
810 v0 = v0 > v_max ? v0 - v_diff : v0;
811 v0 = v0 > v_max ? v0 - v_diff : v0;
813 setbits_1 (a0, v0, v_min, v_max, max_bits, n_bits, mask, shift);
815 ASSERT (validate_buffer_data (b0, s));
819 static u64 do_it (pg_main_t * pg,
823 u32 lo_bit, u32 hi_bit,
824 u64 v_min, u64 v_max,
826 pg_edit_type_t edit_type)
828 u32 max_bits, l0, l1, h1, start_bit;
831 edit_type = PG_EDIT_FIXED;
833 l0 = lo_bit / BITS (u8);
834 l1 = lo_bit % BITS (u8);
835 h1 = hi_bit % BITS (u8);
837 start_bit = l0 * BITS (u8);
839 max_bits = hi_bit - start_bit;
840 ASSERT (max_bits <= 64);
844 if (edit_type == PG_EDIT_INCREMENT) \
845 v = do_set_increment (pg, s, buffers, n_buffers, \
848 /* is_net_byte_order */ 1, \
849 /* want sum */ 0, 0, \
852 else if (edit_type == PG_EDIT_RANDOM) \
853 do_set_random (pg, s, buffers, n_buffers, \
856 /* is_net_byte_order */ 1, \
857 /* want sum */ 0, 0, \
859 else /* edit_type == PG_EDIT_FIXED */ \
860 do_set_fixed (pg, s, buffers, n_buffers, \
863 /* is_net_byte_order */ 1, \
867 if (l1 == 0 && h1 == 0)
883 u32 n_bits = max_bits;
885 max_bits = clib_max (max_pow2 (n_bits), 8);
887 mask = ((u64) 1 << (u64) n_bits) - 1;
888 mask &= ~(((u64) 1 << (u64) shift) - 1);
890 mask <<= max_bits - n_bits;
891 shift += max_bits - n_bits;
897 if (edit_type == PG_EDIT_INCREMENT) \
898 v = do_setbits_increment (pg, s, buffers, n_buffers, \
899 BITS (u##n), n_bits, \
900 l0, v_min, v_max, v, \
902 else if (edit_type == PG_EDIT_RANDOM) \
903 do_setbits_random (pg, s, buffers, n_buffers, \
904 BITS (u##n), n_bits, \
907 else /* edit_type == PG_EDIT_FIXED */ \
908 do_setbits_fixed (pg, s, buffers, n_buffers, \
909 BITS (u##n), n_bits, \
928 pg_generate_set_lengths (pg_main_t * pg,
933 u64 v_min, v_max, length_sum;
934 pg_edit_type_t edit_type;
936 v_min = s->min_packet_bytes;
937 v_max = s->max_packet_bytes;
938 edit_type = s->packet_size_edit_type;
940 if (edit_type == PG_EDIT_INCREMENT)
941 s->last_increment_packet_size
942 = do_set_increment (pg, s, buffers, n_buffers,
943 8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
944 STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
945 /* is_net_byte_order */ 0,
946 /* want sum */ 1, &length_sum,
948 s->last_increment_packet_size);
950 else if (edit_type == PG_EDIT_RANDOM)
951 do_set_random (pg, s, buffers, n_buffers,
952 8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
953 STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
954 /* is_net_byte_order */ 0,
955 /* want sum */ 1, &length_sum,
958 else /* edit_type == PG_EDIT_FIXED */
960 do_set_fixed (pg, s, buffers, n_buffers,
961 8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
962 STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
963 /* is_net_byte_order */ 0,
965 length_sum = v_min * n_buffers;
969 vnet_main_t * vnm = vnet_get_main();
970 vnet_interface_main_t * im = &vnm->interface_main;
971 vnet_sw_interface_t * si = vnet_get_sw_interface (vnm, s->sw_if_index[VLIB_RX]);
973 vlib_increment_combined_counter (im->combined_sw_if_counters
974 + VNET_INTERFACE_COUNTER_RX,
981 pg_set_mbuf_metadata (pg, buffers, n_buffers);
985 pg_generate_fix_multi_buffer_lengths (pg_main_t * pg,
990 vlib_main_t * vm = pg->vlib_main;
991 pg_buffer_index_t * pbi;
993 static u32 * unused_buffers = 0;
995 while (n_buffers > 0)
1001 b = vlib_get_buffer (vm, bi);
1003 /* Current length here is length of whole packet. */
1004 n_bytes_left = b->current_length;
1006 pbi = s->buffer_indices;
1009 uword n = clib_min (n_bytes_left, s->buffer_bytes);
1011 b->current_length = n;
1013 if (n_bytes_left > 0)
1014 b->flags |= VLIB_BUFFER_NEXT_PRESENT;
1016 b->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
1018 /* Return unused buffers to fifos. */
1020 vec_add1 (unused_buffers, bi);
1023 if (pbi >= vec_end (s->buffer_indices))
1026 bi = b->next_buffer;
1027 b = vlib_get_buffer (vm, bi);
1029 ASSERT (n_bytes_left == 0);
1035 if (vec_len (unused_buffers) > 0)
1037 vlib_buffer_free_no_next (vm, unused_buffers,
1038 vec_len (unused_buffers));
1039 _vec_len (unused_buffers) = 0;
1044 pg_generate_edit (pg_main_t * pg,
1051 vec_foreach (e, s->non_fixed_edits)
1055 case PG_EDIT_RANDOM:
1056 case PG_EDIT_INCREMENT:
1061 v_min = pg_edit_get_value (e, PG_EDIT_LO);
1062 v_max = pg_edit_get_value (e, PG_EDIT_HI);
1064 hi_bit = (BITS (u8) * STRUCT_OFFSET_OF (vlib_buffer_t, data)
1066 + e->lsb_bit_offset);
1067 lo_bit = hi_bit - e->n_bits;
1069 e->last_increment_value
1070 = do_it (pg, s, buffers, n_buffers, lo_bit, hi_bit, v_min, v_max,
1071 e->last_increment_value,
1076 case PG_EDIT_UNSPECIFIED:
1080 /* Should not be any fixed edits left. */
1086 /* Call any edit functions to e.g. completely IP lengths, checksums, ... */
1089 for (i = vec_len (s->edit_groups) - 1; i >= 0; i--)
1091 pg_edit_group_t * g = s->edit_groups + i;
1092 if (g->edit_function)
1093 g->edit_function (pg, s, g, buffers, n_buffers);
1099 pg_set_next_buffer_pointers (pg_main_t * pg,
1105 vlib_main_t * vm = pg->vlib_main;
1107 while (n_buffers >= 4)
1110 vlib_buffer_t * b0, * b1;
1112 b0 = vlib_get_buffer (vm, buffers[0]);
1113 b1 = vlib_get_buffer (vm, buffers[1]);
1114 ni0 = next_buffers[0];
1115 ni1 = next_buffers[1];
1117 vlib_prefetch_buffer_with_index (vm, buffers[2], WRITE);
1118 vlib_prefetch_buffer_with_index (vm, buffers[3], WRITE);
1120 b0->flags |= VLIB_BUFFER_NEXT_PRESENT;
1121 b1->flags |= VLIB_BUFFER_NEXT_PRESENT;
1122 b0->next_buffer = ni0;
1123 b1->next_buffer = ni1;
1130 while (n_buffers > 0)
1135 b0 = vlib_get_buffer (vm, buffers[0]);
1136 ni0 = next_buffers[0];
1141 b0->flags |= VLIB_BUFFER_NEXT_PRESENT;
1142 b0->next_buffer = ni0;
1146 static_always_inline void
1147 init_replay_buffers_inline (vlib_main_t * vm,
1154 u32 n_left, * b, i, l;
1158 i = s->current_replay_packet_index;
1159 l = vec_len (s->replay_packet_templates);
1171 b0 = vlib_get_buffer (vm, bi0);
1173 vnet_buffer (b0)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
1174 /* was s->sw_if_index[VLIB_TX]; */
1175 vnet_buffer (b0)->sw_if_index[VLIB_TX] = (u32)~0;
1177 d0 = vec_elt (s->replay_packet_templates, i);
1180 if (data_offset + n_data >= vec_len (d0))
1181 n0 = vec_len (d0) > data_offset ? vec_len (d0) - data_offset : 0;
1183 b0->current_length = n0;
1185 memcpy (b0->data, d0 + data_offset, n0);
1186 i = i + 1 == l ? 0 : i + 1;
1190 static_always_inline void
1191 init_buffers_inline (vlib_main_t * vm,
1202 if (vec_len (s->replay_packet_templates) > 0)
1203 return init_replay_buffers_inline (vm, s, buffers, n_buffers, data_offset, n_data);
1205 data = s->fixed_packet_data + data_offset;
1206 mask = s->fixed_packet_data_mask + data_offset;
1207 if (data + n_data >= vec_end (s->fixed_packet_data))
1208 n_data = (data < vec_end (s->fixed_packet_data)
1209 ? vec_end (s->fixed_packet_data) - data
1213 ASSERT (data + n_data <= vec_end (s->fixed_packet_data));
1214 ASSERT (mask + n_data <= vec_end (s->fixed_packet_data_mask));
1223 vlib_buffer_t * b0, * b1;
1225 /* Prefetch next iteration. */
1226 vlib_prefetch_buffer_with_index (vm, b[2], STORE);
1227 vlib_prefetch_buffer_with_index (vm, b[3], STORE);
1234 b0 = vlib_get_buffer (vm, bi0);
1235 b1 = vlib_get_buffer (vm, bi1);
1237 vnet_buffer (b0)->sw_if_index[VLIB_RX] =
1238 vnet_buffer (b1)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
1240 vnet_buffer (b0)->sw_if_index[VLIB_TX] =
1241 vnet_buffer (b1)->sw_if_index[VLIB_TX] = (u32)~0;
1245 memcpy (b0->data, data, n_data);
1246 memcpy (b1->data, data, n_data);
1250 ASSERT (validate_buffer_data2 (b0, s, data_offset, n_data));
1251 ASSERT (validate_buffer_data2 (b1, s, data_offset, n_data));
1264 b0 = vlib_get_buffer (vm, bi0);
1265 vnet_buffer (b0)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
1266 /* s->sw_if_index[VLIB_TX]; */
1267 vnet_buffer (b0)->sw_if_index[VLIB_TX] = (u32)~0;
1270 memcpy (b0->data, data, n_data);
1272 ASSERT (validate_buffer_data2 (b0, s, data_offset, n_data));
1276 static void pg_buffer_init (vlib_main_t * vm,
1277 vlib_buffer_free_list_t * fl,
1281 pg_main_t * pg = &pg_main;
1285 si = fl->buffer_init_function_opaque & pow2_mask (24);
1286 bi = fl->buffer_init_function_opaque >> 24;
1288 s = pool_elt_at_index (pg->streams, si);
1290 init_buffers_inline (vm, s, buffers, n_buffers,
1291 /* data_offset */ bi * s->buffer_bytes,
1292 /* n_data */ s->buffer_bytes,
1297 pg_stream_fill_helper (pg_main_t * pg,
1299 pg_buffer_index_t * bi,
1304 vlib_main_t * vm = pg->vlib_main;
1305 vlib_buffer_free_list_t * f;
1306 uword is_start_of_packet = bi == s->buffer_indices;
1309 f = vlib_buffer_get_free_list (vm, bi->free_list_index);
1312 * Historically, the pg maintained its own free lists and
1313 * device drivers tx paths would return pkts. With the DPDK,
1314 * that doesn't happen.
1316 if (DPDK == 0 && ! (s->flags & PG_STREAM_FLAGS_DISABLE_BUFFER_RECYCLE))
1317 f->buffer_init_function = pg_buffer_init;
1318 f->buffer_init_function_opaque =
1319 (s - pg->streams) | ((bi - s->buffer_indices) << 24);
1321 if (is_start_of_packet)
1322 vnet_buffer (&f->buffer_init_template)->sw_if_index[VLIB_RX]
1323 = vnet_main.local_interface_sw_if_index;
1325 n_allocated = vlib_buffer_alloc_from_free_list (vm,
1328 bi->free_list_index);
1329 if (n_allocated == 0)
1333 * We can't assume we got all the buffers we asked for...
1334 * This never worked until recently.
1336 n_alloc = n_allocated;
1338 /* Reinitialize buffers */
1339 if (DPDK == 0 || CLIB_DEBUG > 0
1340 || (s->flags & PG_STREAM_FLAGS_DISABLE_BUFFER_RECYCLE))
1345 (bi - s->buffer_indices) * s->buffer_bytes /* data offset */,
1348 DPDK == 1 || (s->flags & PG_STREAM_FLAGS_DISABLE_BUFFER_RECYCLE) != 0);
1350 /* $$$ this doesn't work at the moment */
1351 ASSERT(next_buffers == 0);
1353 pg_set_next_buffer_pointers (pg, s, buffers, next_buffers, n_alloc);
1355 if (is_start_of_packet)
1357 if (vec_len (s->replay_packet_templates) > 0)
1359 vnet_main_t * vnm = vnet_get_main();
1360 vnet_interface_main_t * im = &vnm->interface_main;
1361 vnet_sw_interface_t * si =
1362 vnet_get_sw_interface (vnm, s->sw_if_index[VLIB_RX]);
1365 for (i = 0; i < n_alloc; i++)
1366 l += vlib_buffer_index_length_in_chain (vm, buffers[i]);
1367 vlib_increment_combined_counter (im->combined_sw_if_counters
1368 + VNET_INTERFACE_COUNTER_RX,
1369 os_get_cpu_number(),
1373 s->current_replay_packet_index += n_alloc;
1374 s->current_replay_packet_index %=
1375 vec_len (s->replay_packet_templates);
1379 pg_generate_set_lengths (pg, s, buffers, n_alloc);
1380 if (vec_len (s->buffer_indices) > 1)
1381 pg_generate_fix_multi_buffer_lengths (pg, s, buffers, n_alloc);
1383 pg_generate_edit (pg, s, buffers, n_alloc);
1391 pg_stream_fill (pg_main_t * pg, pg_stream_t * s, u32 n_buffers)
1393 pg_buffer_index_t * bi;
1394 word i, n_in_fifo, n_alloc, n_free, n_added;
1395 u32 * tail, * start, * end, * last_tail, * last_start;
1397 bi = s->buffer_indices;
1399 n_in_fifo = clib_fifo_elts (bi->buffer_fifo);
1400 if (n_in_fifo >= n_buffers)
1403 n_alloc = n_buffers - n_in_fifo;
1405 /* Round up, but never generate more than limit. */
1406 n_alloc = clib_max (VLIB_FRAME_SIZE, n_alloc);
1408 if (s->n_packets_limit > 0
1409 && s->n_packets_generated + n_in_fifo + n_alloc >= s->n_packets_limit)
1411 n_alloc = s->n_packets_limit - s->n_packets_generated - n_in_fifo;
1416 /* All buffer fifos should have the same size. */
1420 vec_foreach (bi, s->buffer_indices)
1422 e = clib_fifo_elts (bi->buffer_fifo);
1423 if (bi == s->buffer_indices)
1429 last_tail = last_start = 0;
1432 for (i = vec_len (s->buffer_indices) - 1; i >= 0; i--)
1434 bi = vec_elt_at_index (s->buffer_indices, i);
1436 n_free = clib_fifo_free_elts (bi->buffer_fifo);
1437 if (n_free < n_alloc)
1438 clib_fifo_resize (bi->buffer_fifo, n_alloc - n_free);
1440 tail = clib_fifo_advance_tail (bi->buffer_fifo, n_alloc);
1441 start = bi->buffer_fifo;
1442 end = clib_fifo_end (bi->buffer_fifo);
1444 if (tail + n_alloc <= end)
1446 n_added = pg_stream_fill_helper (pg, s, bi, tail, last_tail, n_alloc);
1450 u32 n = clib_min (end - tail, n_alloc);
1451 n_added = pg_stream_fill_helper (pg, s, bi, tail, last_tail, n);
1453 if (n_added == n && n_alloc > n_added)
1455 n_added += pg_stream_fill_helper
1456 (pg, s, bi, start, last_start, n_alloc - n_added);
1460 if (PREDICT_FALSE (n_added < n_alloc))
1461 tail = clib_fifo_advance_tail (bi->buffer_fifo, n_added - n_alloc);
1466 /* Verify that pkts in the fifo are properly allocated */
1471 vlib_main_t * vm = vlib_get_main();
1472 clib_fifo_foreach (bi0, bi->buffer_fifo,
1475 struct rte_mbuf *mb;
1477 b = vlib_get_buffer(vm, bi0[0]);
1478 mb = (struct rte_mbuf *)b - 1;
1479 ASSERT(rte_mbuf_refcnt_read(mb) == 1);
1485 return n_in_fifo + n_added;
1493 /* Use pre data for packet data. */
1494 vlib_buffer_t buffer;
1497 static u8 * format_pg_input_trace (u8 * s, va_list * va)
1499 vlib_main_t * vm = va_arg (*va, vlib_main_t *);
1500 CLIB_UNUSED (vlib_node_t * node) = va_arg (*va, vlib_node_t *);
1501 pg_input_trace_t * t = va_arg (*va, pg_input_trace_t *);
1502 pg_main_t * pg = &pg_main;
1503 pg_stream_t * stream;
1505 uword indent = format_get_indent (s);
1508 if (! pool_is_free_index (pg->streams, t->stream_index))
1509 stream = pool_elt_at_index (pg->streams, t->stream_index);
1512 s = format (s, "stream %v", pg->streams[t->stream_index].name);
1514 s = format (s, "stream %d", t->stream_index);
1516 s = format (s, ", %d bytes", t->packet_length);
1518 s = format (s, "\n%U%U",
1519 format_white_space, indent,
1520 format_vlib_buffer, &t->buffer);
1522 s = format (s, "\n%U",
1523 format_white_space, indent);
1527 n = vlib_get_node (vm, stream->node_index);
1529 if (n && n->format_buffer)
1530 s = format (s, "%U", n->format_buffer,
1532 sizeof (t->buffer.pre_data));
1534 s = format (s, "%U",
1535 format_hex_bytes, t->buffer.pre_data,
1536 ARRAY_LEN (t->buffer.pre_data));
1541 pg_input_trace (pg_main_t * pg,
1542 vlib_node_runtime_t * node,
1547 vlib_main_t * vm = pg->vlib_main;
1548 u32 * b, n_left, stream_index, next_index;
1552 stream_index = s - pg->streams;
1553 next_index = s->next_index;
1558 vlib_buffer_t * b0, * b1;
1559 pg_input_trace_t * t0, * t1;
1566 b0 = vlib_get_buffer (vm, bi0);
1567 b1 = vlib_get_buffer (vm, bi1);
1569 vlib_trace_buffer (vm, node, next_index, b0, /* follow_chain */ 1);
1570 vlib_trace_buffer (vm, node, next_index, b1, /* follow_chain */ 1);
1572 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
1573 t1 = vlib_add_trace (vm, node, b1, sizeof (t1[0]));
1575 t0->stream_index = stream_index;
1576 t1->stream_index = stream_index;
1578 t0->packet_length = vlib_buffer_length_in_chain (vm, b0);
1579 t1->packet_length = vlib_buffer_length_in_chain (vm, b1);
1581 memcpy (&t0->buffer, b0, sizeof (b0[0]) - sizeof (b0->pre_data));
1582 memcpy (&t1->buffer, b1, sizeof (b1[0]) - sizeof (b1->pre_data));
1584 memcpy (t0->buffer.pre_data, b0->data, sizeof (t0->buffer.pre_data));
1585 memcpy (t1->buffer.pre_data, b1->data, sizeof (t1->buffer.pre_data));
1592 pg_input_trace_t * t0;
1598 b0 = vlib_get_buffer (vm, bi0);
1600 vlib_trace_buffer (vm, node, next_index, b0, /* follow_chain */ 1);
1601 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
1603 t0->stream_index = stream_index;
1604 t0->packet_length = vlib_buffer_length_in_chain (vm, b0);
1605 memcpy (&t0->buffer, b0, sizeof (b0[0]) - sizeof (b0->pre_data));
1606 memcpy (t0->buffer.pre_data, b0->data, sizeof (t0->buffer.pre_data));
1611 pg_generate_packets (vlib_node_runtime_t * node,
1614 uword n_packets_to_generate)
1616 vlib_main_t * vm = pg->vlib_main;
1617 u32 * to_next, n_this_frame, n_left, n_trace, n_packets_in_fifo;
1618 uword n_packets_generated;
1619 pg_buffer_index_t * bi, * bi0;
1621 bi0 = s->buffer_indices;
1623 n_packets_in_fifo = pg_stream_fill (pg, s, n_packets_to_generate);
1624 n_packets_to_generate = clib_min (n_packets_in_fifo, n_packets_to_generate);
1625 n_packets_generated = 0;
1627 while (n_packets_to_generate > 0)
1629 u32 * head, * start, * end;
1631 vlib_get_next_frame (vm, node, s->next_index, to_next, n_left);
1633 n_this_frame = n_packets_to_generate;
1634 if (n_this_frame > n_left)
1635 n_this_frame = n_left;
1637 start = bi0->buffer_fifo;
1638 end = clib_fifo_end (bi0->buffer_fifo);
1639 head = clib_fifo_head (bi0->buffer_fifo);
1641 if (head + n_this_frame <= end)
1642 vlib_copy_buffers (to_next, head, n_this_frame);
1646 vlib_copy_buffers (to_next + 0, head, n);
1647 vlib_copy_buffers (to_next + n, start, n_this_frame - n);
1650 vec_foreach (bi, s->buffer_indices)
1651 clib_fifo_advance_head (bi->buffer_fifo, n_this_frame);
1653 n_trace = vlib_get_trace_count (vm, node);
1656 u32 n = clib_min (n_trace, n_this_frame);
1657 pg_input_trace (pg, node, s, to_next, n);
1658 vlib_set_trace_count (vm, node, n_trace - n);
1660 n_packets_to_generate -= n_this_frame;
1661 n_packets_generated += n_this_frame;
1662 n_left -= n_this_frame;
1663 vlib_put_next_frame (vm, node, s->next_index, n_left);
1666 return n_packets_generated;
1670 pg_input_stream (vlib_node_runtime_t * node,
1674 vlib_main_t * vm = pg->vlib_main;
1678 if (s->n_packets_limit > 0
1679 && s->n_packets_generated >= s->n_packets_limit)
1681 pg_stream_enable_disable (pg, s, /* want_enabled */ 0);
1685 /* Apply rate limit. */
1686 time_now = vlib_time_now (vm);
1687 if (s->time_last_generate == 0)
1688 s->time_last_generate = time_now;
1690 dt = time_now - s->time_last_generate;
1691 s->time_last_generate = time_now;
1693 n_packets = VLIB_FRAME_SIZE;
1694 if (s->rate_packets_per_second > 0)
1696 s->packet_accumulator += dt * s->rate_packets_per_second;
1697 n_packets = s->packet_accumulator;
1699 /* Never allow accumulator to grow if we get behind. */
1700 s->packet_accumulator -= n_packets;
1703 /* Apply fixed limit. */
1704 if (s->n_packets_limit > 0
1705 && s->n_packets_generated + n_packets > s->n_packets_limit)
1706 n_packets = s->n_packets_limit - s->n_packets_generated;
1708 /* Generate up to one frame's worth of packets. */
1709 if (n_packets > VLIB_FRAME_SIZE)
1710 n_packets = VLIB_FRAME_SIZE;
1713 n_packets = pg_generate_packets (node, pg, s, n_packets);
1715 s->n_packets_generated += n_packets;
1721 pg_input (vlib_main_t * vm,
1722 vlib_node_runtime_t * node,
1723 vlib_frame_t * frame)
1726 pg_main_t * pg = &pg_main;
1727 uword n_packets = 0;
1729 clib_bitmap_foreach (i, pg->enabled_streams, ({
1730 n_packets += pg_input_stream (node, pg, vec_elt_at_index (pg->streams, i));
1736 VLIB_REGISTER_NODE (pg_input_node) = {
1737 .function = pg_input,
1739 .type = VLIB_NODE_TYPE_INPUT,
1741 .format_trace = format_pg_input_trace,
1743 /* Input node will be left disabled until a stream is active. */
1744 .state = VLIB_NODE_STATE_DISABLED,