2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
16 * pg_input.c: buffer generator input
18 * Copyright (c) 2008 Eliot Dresselhaus
20 * Permission is hereby granted, free of charge, to any person obtaining
21 * a copy of this software and associated documentation files (the
22 * "Software"), to deal in the Software without restriction, including
23 * without limitation the rights to use, copy, modify, merge, publish,
24 * distribute, sublicense, and/or sell copies of the Software, and to
25 * permit persons to whom the Software is furnished to do so, subject to
26 * the following conditions:
28 * The above copyright notice and this permission notice shall be
29 * included in all copies or substantial portions of the Software.
31 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
32 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
33 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
34 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
35 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
36 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
37 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
40 #include <vlib/vlib.h>
41 #include <vnet/pg/pg.h>
42 #include <vnet/vnet.h>
45 #include <vnet/devices/dpdk/dpdk.h>
49 pg_set_mbuf_metadata (pg_main_t * pg, u32 * buffers, u32 n_alloc)
52 vlib_main_t *vm = vlib_get_main ();
61 for (i = 0; i < n_alloc; i++)
63 b = vlib_get_buffer (vm, buffers[i]);
64 mb = rte_mbuf_from_vlib_buffer (b);
66 delta = vlib_buffer_length_in_chain (vm, b) - (i16) mb->pkt_len;
67 new_data_len = (u16) ((i16) mb->data_len + delta);
68 new_pkt_len = (u16) ((i16) mb->pkt_len + delta);
70 mb->data_len = new_data_len;
71 mb->pkt_len = new_pkt_len;
72 mb->data_off = (u16) ((RTE_PKTMBUF_HEADROOM) + b->current_data);
78 validate_buffer_data2 (vlib_buffer_t * b, pg_stream_t * s,
79 u32 data_offset, u32 n_bytes)
85 pd = s->fixed_packet_data + data_offset;
86 pm = s->fixed_packet_data_mask + data_offset;
88 if (pd + n_bytes >= vec_end (s->fixed_packet_data))
89 n_bytes = (pd < vec_end (s->fixed_packet_data)
90 ? vec_end (s->fixed_packet_data) - pd : 0);
92 for (i = 0; i < n_bytes; i++)
93 if ((bd[i] & pm[i]) != pd[i])
99 clib_warning ("buffer %U", format_vlib_buffer, b);
100 clib_warning ("differ at index %d", i);
101 clib_warning ("is %U", format_hex_bytes, bd, n_bytes);
102 clib_warning ("mask %U", format_hex_bytes, pm, n_bytes);
103 clib_warning ("expect %U", format_hex_bytes, pd, n_bytes);
108 validate_buffer_data (vlib_buffer_t * b, pg_stream_t * s)
110 return validate_buffer_data2 (b, s, 0, s->buffer_bytes);
115 u64 v0, u64 v_min, u64 v_max, u32 n_bits, u32 is_net_byte_order)
117 ASSERT (v0 >= v_min && v0 <= v_max);
118 if (n_bits == BITS (u8))
122 else if (n_bits == BITS (u16))
124 if (is_net_byte_order)
125 v0 = clib_host_to_net_u16 (v0);
126 clib_mem_unaligned (a0, u16) = v0;
128 else if (n_bits == BITS (u32))
130 if (is_net_byte_order)
131 v0 = clib_host_to_net_u32 (v0);
132 clib_mem_unaligned (a0, u32) = v0;
134 else if (n_bits == BITS (u64))
136 if (is_net_byte_order)
137 v0 = clib_host_to_net_u64 (v0);
138 clib_mem_unaligned (a0, u64) = v0;
143 set_2 (void *a0, void *a1,
145 u64 v_min, u64 v_max,
146 u32 n_bits, u32 is_net_byte_order, u32 is_increment)
148 ASSERT (v0 >= v_min && v0 <= v_max);
149 ASSERT (v1 >= v_min && v1 <= (v_max + is_increment));
150 if (n_bits == BITS (u8))
155 else if (n_bits == BITS (u16))
157 if (is_net_byte_order)
159 v0 = clib_host_to_net_u16 (v0);
160 v1 = clib_host_to_net_u16 (v1);
162 clib_mem_unaligned (a0, u16) = v0;
163 clib_mem_unaligned (a1, u16) = v1;
165 else if (n_bits == BITS (u32))
167 if (is_net_byte_order)
169 v0 = clib_host_to_net_u32 (v0);
170 v1 = clib_host_to_net_u32 (v1);
172 clib_mem_unaligned (a0, u32) = v0;
173 clib_mem_unaligned (a1, u32) = v1;
175 else if (n_bits == BITS (u64))
177 if (is_net_byte_order)
179 v0 = clib_host_to_net_u64 (v0);
180 v1 = clib_host_to_net_u64 (v1);
182 clib_mem_unaligned (a0, u64) = v0;
183 clib_mem_unaligned (a1, u64) = v1;
187 static_always_inline void
188 do_set_fixed (pg_main_t * pg,
193 u32 byte_offset, u32 is_net_byte_order, u64 v_min, u64 v_max)
195 vlib_main_t *vm = pg->vlib_main;
197 while (n_buffers >= 4)
199 vlib_buffer_t *b0, *b1, *b2, *b3;
202 b0 = vlib_get_buffer (vm, buffers[0]);
203 b1 = vlib_get_buffer (vm, buffers[1]);
204 b2 = vlib_get_buffer (vm, buffers[2]);
205 b3 = vlib_get_buffer (vm, buffers[3]);
209 a0 = (void *) b0 + byte_offset;
210 a1 = (void *) b1 + byte_offset;
211 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
212 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
214 set_2 (a0, a1, v_min, v_min, v_min, v_max, n_bits, is_net_byte_order,
215 /* is_increment */ 0);
217 ASSERT (validate_buffer_data (b0, s));
218 ASSERT (validate_buffer_data (b1, s));
221 while (n_buffers > 0)
226 b0 = vlib_get_buffer (vm, buffers[0]);
230 a0 = (void *) b0 + byte_offset;
232 set_1 (a0, v_min, v_min, v_max, n_bits, is_net_byte_order);
234 ASSERT (validate_buffer_data (b0, s));
238 static_always_inline u64
239 do_set_increment (pg_main_t * pg,
245 u32 is_net_byte_order,
246 u32 want_sum, u64 * sum_result, u64 v_min, u64 v_max, u64 v)
248 vlib_main_t *vm = pg->vlib_main;
251 ASSERT (v >= v_min && v <= v_max);
253 while (n_buffers >= 4)
255 vlib_buffer_t *b0, *b1, *b2, *b3;
259 b0 = vlib_get_buffer (vm, buffers[0]);
260 b1 = vlib_get_buffer (vm, buffers[1]);
261 b2 = vlib_get_buffer (vm, buffers[2]);
262 b3 = vlib_get_buffer (vm, buffers[3]);
266 a0 = (void *) b0 + byte_offset;
267 a1 = (void *) b1 + byte_offset;
268 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
269 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
273 v = v > v_max ? v_min : v;
275 v_old + 0, v_old + 1, v_min, v_max, n_bits, is_net_byte_order,
276 /* is_increment */ 1);
279 sum += 2 * v_old + 1;
281 if (PREDICT_FALSE (v_old + 1 > v_max))
284 sum -= 2 * v_old + 1;
287 set_1 (a0, v + 0, v_min, v_max, n_bits, is_net_byte_order);
292 v = v > v_max ? v_min : v;
293 set_1 (a1, v + 0, v_min, v_max, n_bits, is_net_byte_order);
299 ASSERT (validate_buffer_data (b0, s));
300 ASSERT (validate_buffer_data (b1, s));
303 while (n_buffers > 0)
309 b0 = vlib_get_buffer (vm, buffers[0]);
313 a0 = (void *) b0 + byte_offset;
319 v = v > v_max ? v_min : v;
321 ASSERT (v_old >= v_min && v_old <= v_max);
322 set_1 (a0, v_old, v_min, v_max, n_bits, is_net_byte_order);
324 ASSERT (validate_buffer_data (b0, s));
333 static_always_inline void
334 do_set_random (pg_main_t * pg,
340 u32 is_net_byte_order,
341 u32 want_sum, u64 * sum_result, u64 v_min, u64 v_max)
343 vlib_main_t *vm = pg->vlib_main;
344 u64 v_diff = v_max - v_min + 1;
345 u64 r_mask = max_pow2 (v_diff) - 1;
350 random_data = clib_random_buffer_get_data
351 (&vm->random_buffer, n_buffers * n_bits / BITS (u8));
355 while (n_buffers >= 4)
357 vlib_buffer_t *b0, *b1, *b2, *b3;
359 u64 r0 = 0, r1 = 0; /* warnings be gone */
361 b0 = vlib_get_buffer (vm, buffers[0]);
362 b1 = vlib_get_buffer (vm, buffers[1]);
363 b2 = vlib_get_buffer (vm, buffers[2]);
364 b3 = vlib_get_buffer (vm, buffers[3]);
368 a0 = (void *) b0 + byte_offset;
369 a1 = (void *) b1 + byte_offset;
370 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
371 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
378 u##n * r = random_data; \
381 random_data = r + 2; \
393 /* Add power of 2 sized random number which may be out of range. */
397 /* Twice should be enough to reduce to v_min .. v_max range. */
398 v0 = v0 > v_max ? v0 - v_diff : v0;
399 v1 = v1 > v_max ? v1 - v_diff : v1;
400 v0 = v0 > v_max ? v0 - v_diff : v0;
401 v1 = v1 > v_max ? v1 - v_diff : v1;
406 set_2 (a0, a1, v0, v1, v_min, v_max, n_bits, is_net_byte_order,
407 /* is_increment */ 0);
409 ASSERT (validate_buffer_data (b0, s));
410 ASSERT (validate_buffer_data (b1, s));
413 while (n_buffers > 0)
417 u64 r0 = 0; /* warnings be gone */
419 b0 = vlib_get_buffer (vm, buffers[0]);
423 a0 = (void *) b0 + byte_offset;
430 u##n * r = random_data; \
432 random_data = r + 1; \
444 /* Add power of 2 sized random number which may be out of range. */
447 /* Twice should be enough to reduce to v_min .. v_max range. */
448 v0 = v0 > v_max ? v0 - v_diff : v0;
449 v0 = v0 > v_max ? v0 - v_diff : v0;
454 set_1 (a0, v0, v_min, v_max, n_bits, is_net_byte_order);
456 ASSERT (validate_buffer_data (b0, s));
464 clib_mem_unaligned (a##i, t) = \
465 clib_host_to_net_##t ((clib_net_to_host_mem_##t (a##i) &~ mask) \
471 u64 v_min, u64 v_max,
472 u32 max_bits, u32 n_bits, u64 mask, u32 shift)
474 ASSERT (v0 >= v_min && v0 <= v_max);
475 if (max_bits == BITS (u8))
476 ((u8 *) a0)[0] = (((u8 *) a0)[0] & ~mask) | (v0 << shift);
478 else if (max_bits == BITS (u16))
482 else if (max_bits == BITS (u32))
486 else if (max_bits == BITS (u64))
493 setbits_2 (void *a0, void *a1,
495 u64 v_min, u64 v_max,
496 u32 max_bits, u32 n_bits, u64 mask, u32 shift, u32 is_increment)
498 ASSERT (v0 >= v_min && v0 <= v_max);
499 ASSERT (v1 >= v_min && v1 <= v_max + is_increment);
500 if (max_bits == BITS (u8))
502 ((u8 *) a0)[0] = (((u8 *) a0)[0] & ~mask) | (v0 << shift);
503 ((u8 *) a1)[0] = (((u8 *) a1)[0] & ~mask) | (v1 << shift);
506 else if (max_bits == BITS (u16))
511 else if (max_bits == BITS (u32))
516 else if (max_bits == BITS (u64))
525 static_always_inline void
526 do_setbits_fixed (pg_main_t * pg,
532 u32 byte_offset, u64 v_min, u64 v_max, u64 mask, u32 shift)
534 vlib_main_t *vm = pg->vlib_main;
536 while (n_buffers >= 4)
538 vlib_buffer_t *b0, *b1, *b2, *b3;
541 b0 = vlib_get_buffer (vm, buffers[0]);
542 b1 = vlib_get_buffer (vm, buffers[1]);
543 b2 = vlib_get_buffer (vm, buffers[2]);
544 b3 = vlib_get_buffer (vm, buffers[3]);
548 a0 = (void *) b0 + byte_offset;
549 a1 = (void *) b1 + byte_offset;
550 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
551 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
554 v_min, v_min, v_min, v_max, max_bits, n_bits, mask, shift,
555 /* is_increment */ 0);
557 ASSERT (validate_buffer_data (b0, s));
558 ASSERT (validate_buffer_data (b1, s));
561 while (n_buffers > 0)
566 b0 = vlib_get_buffer (vm, buffers[0]);
570 a0 = (void *) b0 + byte_offset;
572 setbits_1 (a0, v_min, v_min, v_max, max_bits, n_bits, mask, shift);
573 ASSERT (validate_buffer_data (b0, s));
577 static_always_inline u64
578 do_setbits_increment (pg_main_t * pg,
585 u64 v_min, u64 v_max, u64 v, u64 mask, u32 shift)
587 vlib_main_t *vm = pg->vlib_main;
589 ASSERT (v >= v_min && v <= v_max);
591 while (n_buffers >= 4)
593 vlib_buffer_t *b0, *b1, *b2, *b3;
597 b0 = vlib_get_buffer (vm, buffers[0]);
598 b1 = vlib_get_buffer (vm, buffers[1]);
599 b2 = vlib_get_buffer (vm, buffers[2]);
600 b3 = vlib_get_buffer (vm, buffers[3]);
604 a0 = (void *) b0 + byte_offset;
605 a1 = (void *) b1 + byte_offset;
606 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
607 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
611 v = v > v_max ? v_min : v;
613 v_old + 0, v_old + 1,
614 v_min, v_max, max_bits, n_bits, mask, shift,
615 /* is_increment */ 1);
617 if (PREDICT_FALSE (v_old + 1 > v_max))
620 setbits_1 (a0, v + 0, v_min, v_max, max_bits, n_bits, mask, shift);
623 v = v > v_max ? v_min : v;
624 setbits_1 (a1, v + 0, v_min, v_max, max_bits, n_bits, mask, shift);
627 ASSERT (validate_buffer_data (b0, s));
628 ASSERT (validate_buffer_data (b1, s));
631 while (n_buffers > 0)
637 b0 = vlib_get_buffer (vm, buffers[0]);
641 a0 = (void *) b0 + byte_offset;
645 v = v > v_max ? v_min : v;
647 ASSERT (v_old >= v_min && v_old <= v_max);
648 setbits_1 (a0, v_old, v_min, v_max, max_bits, n_bits, mask, shift);
650 ASSERT (validate_buffer_data (b0, s));
656 static_always_inline void
657 do_setbits_random (pg_main_t * pg,
663 u32 byte_offset, u64 v_min, u64 v_max, u64 mask, u32 shift)
665 vlib_main_t *vm = pg->vlib_main;
666 u64 v_diff = v_max - v_min + 1;
667 u64 r_mask = max_pow2 (v_diff) - 1;
671 random_data = clib_random_buffer_get_data
672 (&vm->random_buffer, n_buffers * max_bits / BITS (u8));
675 while (n_buffers >= 4)
677 vlib_buffer_t *b0, *b1, *b2, *b3;
679 u64 r0 = 0, r1 = 0; /* warnings be gone */
681 b0 = vlib_get_buffer (vm, buffers[0]);
682 b1 = vlib_get_buffer (vm, buffers[1]);
683 b2 = vlib_get_buffer (vm, buffers[2]);
684 b3 = vlib_get_buffer (vm, buffers[3]);
688 a0 = (void *) b0 + byte_offset;
689 a1 = (void *) b1 + byte_offset;
690 CLIB_PREFETCH ((void *) b2 + byte_offset, sizeof (v_min), WRITE);
691 CLIB_PREFETCH ((void *) b3 + byte_offset, sizeof (v_min), WRITE);
698 u##n * r = random_data; \
701 random_data = r + 2; \
713 /* Add power of 2 sized random number which may be out of range. */
717 /* Twice should be enough to reduce to v_min .. v_max range. */
718 v0 = v0 > v_max ? v0 - v_diff : v0;
719 v1 = v1 > v_max ? v1 - v_diff : v1;
720 v0 = v0 > v_max ? v0 - v_diff : v0;
721 v1 = v1 > v_max ? v1 - v_diff : v1;
723 setbits_2 (a0, a1, v0, v1, v_min, v_max, max_bits, n_bits, mask, shift,
724 /* is_increment */ 0);
726 ASSERT (validate_buffer_data (b0, s));
727 ASSERT (validate_buffer_data (b1, s));
730 while (n_buffers > 0)
734 u64 r0 = 0; /* warnings be gone */
736 b0 = vlib_get_buffer (vm, buffers[0]);
740 a0 = (void *) b0 + byte_offset;
747 u##n * r = random_data; \
749 random_data = r + 1; \
761 /* Add power of 2 sized random number which may be out of range. */
764 /* Twice should be enough to reduce to v_min .. v_max range. */
765 v0 = v0 > v_max ? v0 - v_diff : v0;
766 v0 = v0 > v_max ? v0 - v_diff : v0;
768 setbits_1 (a0, v0, v_min, v_max, max_bits, n_bits, mask, shift);
770 ASSERT (validate_buffer_data (b0, s));
775 do_it (pg_main_t * pg,
779 u32 lo_bit, u32 hi_bit,
780 u64 v_min, u64 v_max, u64 v, pg_edit_type_t edit_type)
782 u32 max_bits, l0, l1, h1, start_bit;
785 edit_type = PG_EDIT_FIXED;
787 l0 = lo_bit / BITS (u8);
788 l1 = lo_bit % BITS (u8);
789 h1 = hi_bit % BITS (u8);
791 start_bit = l0 * BITS (u8);
793 max_bits = hi_bit - start_bit;
794 ASSERT (max_bits <= 64);
798 if (edit_type == PG_EDIT_INCREMENT) \
799 v = do_set_increment (pg, s, buffers, n_buffers, \
802 /* is_net_byte_order */ 1, \
803 /* want sum */ 0, 0, \
806 else if (edit_type == PG_EDIT_RANDOM) \
807 do_set_random (pg, s, buffers, n_buffers, \
810 /* is_net_byte_order */ 1, \
811 /* want sum */ 0, 0, \
813 else /* edit_type == PG_EDIT_FIXED */ \
814 do_set_fixed (pg, s, buffers, n_buffers, \
817 /* is_net_byte_order */ 1, \
821 if (l1 == 0 && h1 == 0)
837 u32 n_bits = max_bits;
839 max_bits = clib_max (max_pow2 (n_bits), 8);
841 mask = ((u64) 1 << (u64) n_bits) - 1;
842 mask &= ~(((u64) 1 << (u64) shift) - 1);
844 mask <<= max_bits - n_bits;
845 shift += max_bits - n_bits;
851 if (edit_type == PG_EDIT_INCREMENT) \
852 v = do_setbits_increment (pg, s, buffers, n_buffers, \
853 BITS (u##n), n_bits, \
854 l0, v_min, v_max, v, \
856 else if (edit_type == PG_EDIT_RANDOM) \
857 do_setbits_random (pg, s, buffers, n_buffers, \
858 BITS (u##n), n_bits, \
861 else /* edit_type == PG_EDIT_FIXED */ \
862 do_setbits_fixed (pg, s, buffers, n_buffers, \
863 BITS (u##n), n_bits, \
882 pg_generate_set_lengths (pg_main_t * pg,
883 pg_stream_t * s, u32 * buffers, u32 n_buffers)
885 u64 v_min, v_max, length_sum;
886 pg_edit_type_t edit_type;
888 v_min = s->min_packet_bytes;
889 v_max = s->max_packet_bytes;
890 edit_type = s->packet_size_edit_type;
892 if (edit_type == PG_EDIT_INCREMENT)
893 s->last_increment_packet_size
894 = do_set_increment (pg, s, buffers, n_buffers,
895 8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
896 STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
897 /* is_net_byte_order */ 0,
898 /* want sum */ 1, &length_sum,
899 v_min, v_max, s->last_increment_packet_size);
901 else if (edit_type == PG_EDIT_RANDOM)
902 do_set_random (pg, s, buffers, n_buffers,
903 8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
904 STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
905 /* is_net_byte_order */ 0,
906 /* want sum */ 1, &length_sum,
909 else /* edit_type == PG_EDIT_FIXED */
911 do_set_fixed (pg, s, buffers, n_buffers,
912 8 * STRUCT_SIZE_OF (vlib_buffer_t, current_length),
913 STRUCT_OFFSET_OF (vlib_buffer_t, current_length),
914 /* is_net_byte_order */ 0,
916 length_sum = v_min * n_buffers;
920 vnet_main_t *vnm = vnet_get_main ();
921 vnet_interface_main_t *im = &vnm->interface_main;
922 vnet_sw_interface_t *si =
923 vnet_get_sw_interface (vnm, s->sw_if_index[VLIB_RX]);
925 vlib_increment_combined_counter (im->combined_sw_if_counters
926 + VNET_INTERFACE_COUNTER_RX,
927 os_get_cpu_number (),
928 si->sw_if_index, n_buffers, length_sum);
931 pg_set_mbuf_metadata (pg, buffers, n_buffers);
935 pg_generate_fix_multi_buffer_lengths (pg_main_t * pg,
937 u32 * buffers, u32 n_buffers)
939 vlib_main_t *vm = pg->vlib_main;
940 pg_buffer_index_t *pbi;
942 static u32 *unused_buffers = 0;
944 while (n_buffers > 0)
950 b = vlib_get_buffer (vm, bi);
952 /* Current length here is length of whole packet. */
953 n_bytes_left = b->current_length;
955 pbi = s->buffer_indices;
958 uword n = clib_min (n_bytes_left, s->buffer_bytes);
960 b->current_length = n;
962 if (n_bytes_left > 0)
963 b->flags |= VLIB_BUFFER_NEXT_PRESENT;
965 b->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
967 /* Return unused buffers to fifos. */
969 vec_add1 (unused_buffers, bi);
972 if (pbi >= vec_end (s->buffer_indices))
976 b = vlib_get_buffer (vm, bi);
978 ASSERT (n_bytes_left == 0);
984 if (vec_len (unused_buffers) > 0)
986 vlib_buffer_free_no_next (vm, unused_buffers, vec_len (unused_buffers));
987 _vec_len (unused_buffers) = 0;
992 pg_generate_edit (pg_main_t * pg,
993 pg_stream_t * s, u32 * buffers, u32 n_buffers)
997 vec_foreach (e, s->non_fixed_edits)
1001 case PG_EDIT_RANDOM:
1002 case PG_EDIT_INCREMENT:
1007 v_min = pg_edit_get_value (e, PG_EDIT_LO);
1008 v_max = pg_edit_get_value (e, PG_EDIT_HI);
1010 hi_bit = (BITS (u8) * STRUCT_OFFSET_OF (vlib_buffer_t, data)
1011 + BITS (u8) + e->lsb_bit_offset);
1012 lo_bit = hi_bit - e->n_bits;
1014 e->last_increment_value
1015 = do_it (pg, s, buffers, n_buffers, lo_bit, hi_bit, v_min, v_max,
1016 e->last_increment_value, e->type);
1020 case PG_EDIT_UNSPECIFIED:
1024 /* Should not be any fixed edits left. */
1030 /* Call any edit functions to e.g. completely IP lengths, checksums, ... */
1033 for (i = vec_len (s->edit_groups) - 1; i >= 0; i--)
1035 pg_edit_group_t *g = s->edit_groups + i;
1036 if (g->edit_function)
1037 g->edit_function (pg, s, g, buffers, n_buffers);
1043 pg_set_next_buffer_pointers (pg_main_t * pg,
1045 u32 * buffers, u32 * next_buffers, u32 n_buffers)
1047 vlib_main_t *vm = pg->vlib_main;
1049 while (n_buffers >= 4)
1052 vlib_buffer_t *b0, *b1;
1054 b0 = vlib_get_buffer (vm, buffers[0]);
1055 b1 = vlib_get_buffer (vm, buffers[1]);
1056 ni0 = next_buffers[0];
1057 ni1 = next_buffers[1];
1059 vlib_prefetch_buffer_with_index (vm, buffers[2], WRITE);
1060 vlib_prefetch_buffer_with_index (vm, buffers[3], WRITE);
1062 b0->flags |= VLIB_BUFFER_NEXT_PRESENT;
1063 b1->flags |= VLIB_BUFFER_NEXT_PRESENT;
1064 b0->next_buffer = ni0;
1065 b1->next_buffer = ni1;
1072 while (n_buffers > 0)
1077 b0 = vlib_get_buffer (vm, buffers[0]);
1078 ni0 = next_buffers[0];
1083 b0->flags |= VLIB_BUFFER_NEXT_PRESENT;
1084 b0->next_buffer = ni0;
1088 static_always_inline void
1089 init_replay_buffers_inline (vlib_main_t * vm,
1092 u32 n_buffers, u32 data_offset, u32 n_data)
1094 u32 n_left, *b, i, l;
1098 i = s->current_replay_packet_index;
1099 l = vec_len (s->replay_packet_templates);
1111 b0 = vlib_get_buffer (vm, bi0);
1113 vnet_buffer (b0)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
1114 /* was s->sw_if_index[VLIB_TX]; */
1115 vnet_buffer (b0)->sw_if_index[VLIB_TX] = (u32) ~ 0;
1117 d0 = vec_elt (s->replay_packet_templates, i);
1120 if (data_offset + n_data >= vec_len (d0))
1121 n0 = vec_len (d0) > data_offset ? vec_len (d0) - data_offset : 0;
1123 b0->current_length = n0;
1125 clib_memcpy (b0->data, d0 + data_offset, n0);
1126 i = i + 1 == l ? 0 : i + 1;
1130 static_always_inline void
1131 init_buffers_inline (vlib_main_t * vm,
1134 u32 n_buffers, u32 data_offset, u32 n_data, u32 set_data)
1139 if (vec_len (s->replay_packet_templates) > 0)
1140 return init_replay_buffers_inline (vm, s, buffers, n_buffers, data_offset,
1143 data = s->fixed_packet_data + data_offset;
1144 mask = s->fixed_packet_data_mask + data_offset;
1145 if (data + n_data >= vec_end (s->fixed_packet_data))
1146 n_data = (data < vec_end (s->fixed_packet_data)
1147 ? vec_end (s->fixed_packet_data) - data : 0);
1150 ASSERT (data + n_data <= vec_end (s->fixed_packet_data));
1151 ASSERT (mask + n_data <= vec_end (s->fixed_packet_data_mask));
1160 vlib_buffer_t *b0, *b1;
1162 /* Prefetch next iteration. */
1163 vlib_prefetch_buffer_with_index (vm, b[2], STORE);
1164 vlib_prefetch_buffer_with_index (vm, b[3], STORE);
1171 b0 = vlib_get_buffer (vm, bi0);
1172 b1 = vlib_get_buffer (vm, bi1);
1174 vnet_buffer (b0)->sw_if_index[VLIB_RX] =
1175 vnet_buffer (b1)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
1177 vnet_buffer (b0)->sw_if_index[VLIB_TX] =
1178 vnet_buffer (b1)->sw_if_index[VLIB_TX] = (u32) ~ 0;
1182 clib_memcpy (b0->data, data, n_data);
1183 clib_memcpy (b1->data, data, n_data);
1187 ASSERT (validate_buffer_data2 (b0, s, data_offset, n_data));
1188 ASSERT (validate_buffer_data2 (b1, s, data_offset, n_data));
1201 b0 = vlib_get_buffer (vm, bi0);
1202 vnet_buffer (b0)->sw_if_index[VLIB_RX] = s->sw_if_index[VLIB_RX];
1203 /* s->sw_if_index[VLIB_TX]; */
1204 vnet_buffer (b0)->sw_if_index[VLIB_TX] = (u32) ~ 0;
1207 clib_memcpy (b0->data, data, n_data);
1209 ASSERT (validate_buffer_data2 (b0, s, data_offset, n_data));
1214 pg_buffer_init (vlib_main_t * vm,
1215 vlib_buffer_free_list_t * fl, u32 * buffers, u32 n_buffers)
1217 pg_main_t *pg = &pg_main;
1221 si = fl->buffer_init_function_opaque & pow2_mask (24);
1222 bi = fl->buffer_init_function_opaque >> 24;
1224 s = pool_elt_at_index (pg->streams, si);
1226 init_buffers_inline (vm, s, buffers, n_buffers,
1227 /* data_offset */ bi * s->buffer_bytes,
1228 /* n_data */ s->buffer_bytes,
1233 pg_stream_fill_helper (pg_main_t * pg,
1235 pg_buffer_index_t * bi,
1236 u32 * buffers, u32 * next_buffers, u32 n_alloc)
1238 vlib_main_t *vm = pg->vlib_main;
1239 vlib_buffer_free_list_t *f;
1240 uword is_start_of_packet = bi == s->buffer_indices;
1243 f = vlib_buffer_get_free_list (vm, bi->free_list_index);
1246 * Historically, the pg maintained its own free lists and
1247 * device drivers tx paths would return pkts. With the DPDK,
1248 * that doesn't happen.
1250 if (DPDK == 0 && !(s->flags & PG_STREAM_FLAGS_DISABLE_BUFFER_RECYCLE))
1251 f->buffer_init_function = pg_buffer_init;
1252 f->buffer_init_function_opaque =
1253 (s - pg->streams) | ((bi - s->buffer_indices) << 24);
1255 if (is_start_of_packet)
1256 vnet_buffer (&f->buffer_init_template)->sw_if_index[VLIB_RX]
1257 = vnet_main.local_interface_sw_if_index;
1259 n_allocated = vlib_buffer_alloc_from_free_list (vm,
1262 bi->free_list_index);
1263 if (n_allocated == 0)
1267 * We can't assume we got all the buffers we asked for...
1268 * This never worked until recently.
1270 n_alloc = n_allocated;
1272 /* Reinitialize buffers */
1273 if (DPDK == 0 || CLIB_DEBUG > 0
1274 || (s->flags & PG_STREAM_FLAGS_DISABLE_BUFFER_RECYCLE))
1278 n_alloc, (bi - s->buffer_indices) * s->buffer_bytes /* data offset */ ,
1281 DPDK == 1 || (s->flags & PG_STREAM_FLAGS_DISABLE_BUFFER_RECYCLE) != 0);
1283 /* $$$ this doesn't work at the moment */
1284 ASSERT (next_buffers == 0);
1286 pg_set_next_buffer_pointers (pg, s, buffers, next_buffers, n_alloc);
1288 if (is_start_of_packet)
1290 if (vec_len (s->replay_packet_templates) > 0)
1292 vnet_main_t *vnm = vnet_get_main ();
1293 vnet_interface_main_t *im = &vnm->interface_main;
1294 vnet_sw_interface_t *si =
1295 vnet_get_sw_interface (vnm, s->sw_if_index[VLIB_RX]);
1298 for (i = 0; i < n_alloc; i++)
1299 l += vlib_buffer_index_length_in_chain (vm, buffers[i]);
1300 vlib_increment_combined_counter (im->combined_sw_if_counters
1301 + VNET_INTERFACE_COUNTER_RX,
1302 os_get_cpu_number (),
1303 si->sw_if_index, n_alloc, l);
1304 s->current_replay_packet_index += n_alloc;
1305 s->current_replay_packet_index %=
1306 vec_len (s->replay_packet_templates);
1310 pg_generate_set_lengths (pg, s, buffers, n_alloc);
1311 if (vec_len (s->buffer_indices) > 1)
1312 pg_generate_fix_multi_buffer_lengths (pg, s, buffers, n_alloc);
1314 pg_generate_edit (pg, s, buffers, n_alloc);
1322 pg_stream_fill (pg_main_t * pg, pg_stream_t * s, u32 n_buffers)
1324 pg_buffer_index_t *bi;
1325 word i, n_in_fifo, n_alloc, n_free, n_added;
1326 u32 *tail, *start, *end, *last_tail, *last_start;
1328 bi = s->buffer_indices;
1330 n_in_fifo = clib_fifo_elts (bi->buffer_fifo);
1331 if (n_in_fifo >= n_buffers)
1334 n_alloc = n_buffers - n_in_fifo;
1336 /* Round up, but never generate more than limit. */
1337 n_alloc = clib_max (VLIB_FRAME_SIZE, n_alloc);
1339 if (s->n_packets_limit > 0
1340 && s->n_packets_generated + n_in_fifo + n_alloc >= s->n_packets_limit)
1342 n_alloc = s->n_packets_limit - s->n_packets_generated - n_in_fifo;
1347 /* All buffer fifos should have the same size. */
1351 vec_foreach (bi, s->buffer_indices)
1353 e = clib_fifo_elts (bi->buffer_fifo);
1354 if (bi == s->buffer_indices)
1360 last_tail = last_start = 0;
1363 for (i = vec_len (s->buffer_indices) - 1; i >= 0; i--)
1365 bi = vec_elt_at_index (s->buffer_indices, i);
1367 n_free = clib_fifo_free_elts (bi->buffer_fifo);
1368 if (n_free < n_alloc)
1369 clib_fifo_resize (bi->buffer_fifo, n_alloc - n_free);
1371 tail = clib_fifo_advance_tail (bi->buffer_fifo, n_alloc);
1372 start = bi->buffer_fifo;
1373 end = clib_fifo_end (bi->buffer_fifo);
1375 if (tail + n_alloc <= end)
1378 pg_stream_fill_helper (pg, s, bi, tail, last_tail, n_alloc);
1382 u32 n = clib_min (end - tail, n_alloc);
1383 n_added = pg_stream_fill_helper (pg, s, bi, tail, last_tail, n);
1385 if (n_added == n && n_alloc > n_added)
1387 n_added += pg_stream_fill_helper
1388 (pg, s, bi, start, last_start, n_alloc - n_added);
1392 if (PREDICT_FALSE (n_added < n_alloc))
1393 tail = clib_fifo_advance_tail (bi->buffer_fifo, n_added - n_alloc);
1398 /* Verify that pkts in the fifo are properly allocated */
1403 vlib_main_t *vm = vlib_get_main ();
1405 clib_fifo_foreach (bi0, bi->buffer_fifo,
1408 struct rte_mbuf *mb;
1410 b = vlib_get_buffer(vm, bi0[0]);
1411 mb = rte_mbuf_from_vlib_buffer(b);
1412 ASSERT(rte_mbuf_refcnt_read(mb) == 1);
1419 return n_in_fifo + n_added;
1428 /* Use pre data for packet data. */
1429 vlib_buffer_t buffer;
1433 format_pg_input_trace (u8 * s, va_list * va)
1435 vlib_main_t *vm = va_arg (*va, vlib_main_t *);
1436 CLIB_UNUSED (vlib_node_t * node) = va_arg (*va, vlib_node_t *);
1437 pg_input_trace_t *t = va_arg (*va, pg_input_trace_t *);
1438 pg_main_t *pg = &pg_main;
1439 pg_stream_t *stream;
1441 uword indent = format_get_indent (s);
1444 if (!pool_is_free_index (pg->streams, t->stream_index))
1445 stream = pool_elt_at_index (pg->streams, t->stream_index);
1448 s = format (s, "stream %v", pg->streams[t->stream_index].name);
1450 s = format (s, "stream %d", t->stream_index);
1452 s = format (s, ", %d bytes", t->packet_length);
1454 s = format (s, "\n%U%U",
1455 format_white_space, indent, format_vlib_buffer, &t->buffer);
1457 s = format (s, "\n%U", format_white_space, indent);
1461 n = vlib_get_node (vm, stream->node_index);
1463 if (n && n->format_buffer)
1464 s = format (s, "%U", n->format_buffer,
1465 t->buffer.pre_data, sizeof (t->buffer.pre_data));
1467 s = format (s, "%U",
1468 format_hex_bytes, t->buffer.pre_data,
1469 ARRAY_LEN (t->buffer.pre_data));
1474 pg_input_trace (pg_main_t * pg,
1475 vlib_node_runtime_t * node,
1476 pg_stream_t * s, u32 * buffers, u32 n_buffers)
1478 vlib_main_t *vm = pg->vlib_main;
1479 u32 *b, n_left, stream_index, next_index;
1483 stream_index = s - pg->streams;
1484 next_index = s->next_index;
1489 vlib_buffer_t *b0, *b1;
1490 pg_input_trace_t *t0, *t1;
1497 b0 = vlib_get_buffer (vm, bi0);
1498 b1 = vlib_get_buffer (vm, bi1);
1500 vlib_trace_buffer (vm, node, next_index, b0, /* follow_chain */ 1);
1501 vlib_trace_buffer (vm, node, next_index, b1, /* follow_chain */ 1);
1503 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
1504 t1 = vlib_add_trace (vm, node, b1, sizeof (t1[0]));
1506 t0->stream_index = stream_index;
1507 t1->stream_index = stream_index;
1509 t0->packet_length = vlib_buffer_length_in_chain (vm, b0);
1510 t1->packet_length = vlib_buffer_length_in_chain (vm, b1);
1512 clib_memcpy (&t0->buffer, b0, sizeof (b0[0]) - sizeof (b0->pre_data));
1513 clib_memcpy (&t1->buffer, b1, sizeof (b1[0]) - sizeof (b1->pre_data));
1515 clib_memcpy (t0->buffer.pre_data, b0->data,
1516 sizeof (t0->buffer.pre_data));
1517 clib_memcpy (t1->buffer.pre_data, b1->data,
1518 sizeof (t1->buffer.pre_data));
1525 pg_input_trace_t *t0;
1531 b0 = vlib_get_buffer (vm, bi0);
1533 vlib_trace_buffer (vm, node, next_index, b0, /* follow_chain */ 1);
1534 t0 = vlib_add_trace (vm, node, b0, sizeof (t0[0]));
1536 t0->stream_index = stream_index;
1537 t0->packet_length = vlib_buffer_length_in_chain (vm, b0);
1538 clib_memcpy (&t0->buffer, b0, sizeof (b0[0]) - sizeof (b0->pre_data));
1539 clib_memcpy (t0->buffer.pre_data, b0->data,
1540 sizeof (t0->buffer.pre_data));
1545 pg_generate_packets (vlib_node_runtime_t * node,
1547 pg_stream_t * s, uword n_packets_to_generate)
1549 vlib_main_t *vm = pg->vlib_main;
1550 u32 *to_next, n_this_frame, n_left, n_trace, n_packets_in_fifo;
1551 uword n_packets_generated;
1552 pg_buffer_index_t *bi, *bi0;
1554 bi0 = s->buffer_indices;
1556 n_packets_in_fifo = pg_stream_fill (pg, s, n_packets_to_generate);
1557 n_packets_to_generate = clib_min (n_packets_in_fifo, n_packets_to_generate);
1558 n_packets_generated = 0;
1560 while (n_packets_to_generate > 0)
1562 u32 *head, *start, *end;
1564 vlib_get_next_frame (vm, node, s->next_index, to_next, n_left);
1566 n_this_frame = n_packets_to_generate;
1567 if (n_this_frame > n_left)
1568 n_this_frame = n_left;
1570 start = bi0->buffer_fifo;
1571 end = clib_fifo_end (bi0->buffer_fifo);
1572 head = clib_fifo_head (bi0->buffer_fifo);
1574 if (head + n_this_frame <= end)
1575 vlib_copy_buffers (to_next, head, n_this_frame);
1579 vlib_copy_buffers (to_next + 0, head, n);
1580 vlib_copy_buffers (to_next + n, start, n_this_frame - n);
1583 vec_foreach (bi, s->buffer_indices)
1584 clib_fifo_advance_head (bi->buffer_fifo, n_this_frame);
1586 n_trace = vlib_get_trace_count (vm, node);
1589 u32 n = clib_min (n_trace, n_this_frame);
1590 pg_input_trace (pg, node, s, to_next, n);
1591 vlib_set_trace_count (vm, node, n_trace - n);
1593 n_packets_to_generate -= n_this_frame;
1594 n_packets_generated += n_this_frame;
1595 n_left -= n_this_frame;
1596 vlib_put_next_frame (vm, node, s->next_index, n_left);
1599 return n_packets_generated;
1603 pg_input_stream (vlib_node_runtime_t * node, pg_main_t * pg, pg_stream_t * s)
1605 vlib_main_t *vm = pg->vlib_main;
1609 if (s->n_packets_limit > 0 && s->n_packets_generated >= s->n_packets_limit)
1611 pg_stream_enable_disable (pg, s, /* want_enabled */ 0);
1615 /* Apply rate limit. */
1616 time_now = vlib_time_now (vm);
1617 if (s->time_last_generate == 0)
1618 s->time_last_generate = time_now;
1620 dt = time_now - s->time_last_generate;
1621 s->time_last_generate = time_now;
1623 n_packets = VLIB_FRAME_SIZE;
1624 if (s->rate_packets_per_second > 0)
1626 s->packet_accumulator += dt * s->rate_packets_per_second;
1627 n_packets = s->packet_accumulator;
1629 /* Never allow accumulator to grow if we get behind. */
1630 s->packet_accumulator -= n_packets;
1633 /* Apply fixed limit. */
1634 if (s->n_packets_limit > 0
1635 && s->n_packets_generated + n_packets > s->n_packets_limit)
1636 n_packets = s->n_packets_limit - s->n_packets_generated;
1638 /* Generate up to one frame's worth of packets. */
1639 if (n_packets > VLIB_FRAME_SIZE)
1640 n_packets = VLIB_FRAME_SIZE;
1643 n_packets = pg_generate_packets (node, pg, s, n_packets);
1645 s->n_packets_generated += n_packets;
1651 pg_input (vlib_main_t * vm, vlib_node_runtime_t * node, vlib_frame_t * frame)
1654 pg_main_t *pg = &pg_main;
1655 uword n_packets = 0;
1658 clib_bitmap_foreach (i, pg->enabled_streams, ({
1659 n_packets += pg_input_stream (node, pg, vec_elt_at_index (pg->streams, i));
1667 VLIB_REGISTER_NODE (pg_input_node) = {
1668 .function = pg_input,
1670 .type = VLIB_NODE_TYPE_INPUT,
1672 .format_trace = format_pg_input_trace,
1674 /* Input node will be left disabled until a stream is active. */
1675 .state = VLIB_NODE_STATE_DISABLED,
1680 * fd.io coding-style-patch-verification: ON
1683 * eval: (c-set-style "gnu")