1 # Copyright (c) 2024 Cisco and/or its affiliates.
2 # Copyright (c) 2024 PANTHEON.tech and/or its affiliates.
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at:
7 # http://www.apache.org/licenses/LICENSE-2.0
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
17 # This library defines functions used by multiple entry scripts.
18 # Keep functions ordered alphabetically, please.
20 # TODO: Add a link to bash style guide.
21 # TODO: Consider putting every die into a {} block,
22 # the code might become more readable (but longer).
25 function activate_docker_topology () {
27 # Create virtual vpp-device topology. Output of the function is topology
28 # file describing created environment saved to a file.
31 # - BASH_FUNCTION_DIR - Path to existing directory this file is located in.
32 # - TOPOLOGIES - Available topologies.
33 # - NODENESS - Node multiplicity of desired testbed.
34 # - FLAVOR - Node flavor string, usually describing the processor.
35 # - IMAGE_VER_FILE - Name of file that contains the image version.
36 # - CSIT_DIR - Directory where ${IMAGE_VER_FILE} is located.
38 # - WORKING_TOPOLOGY - Path to topology file.
42 source "${BASH_FUNCTION_DIR}/device.sh" || {
45 device_image="$(< ${CSIT_DIR}/${IMAGE_VER_FILE})"
46 case_text="${NODENESS}_${FLAVOR}"
47 case "${case_text}" in
48 "1n_skx" | "1n_alt" | "1n_spr")
49 # We execute reservation over csit-shim-dcr (ssh) which runs sourced
50 # script's functions. Env variables are read from ssh output
51 # back to localhost for further processing.
52 # Shim and Jenkins executor are in the same network on the same host
53 # Connect to docker's default gateway IP and shim's exposed port
54 ssh="ssh root@172.17.0.1 -p 6022"
55 run="activate_wrapper ${NODENESS} ${FLAVOR} ${device_image}"
56 # The "declare -f" output is long and boring.
58 # backtics to avoid https://midnight-commander.org/ticket/2142
59 env_vars=`${ssh} "$(declare -f); ${run}"` || {
60 die "Topology reservation via shim-dcr failed!"
64 source <(echo "$env_vars" | grep -v /usr/bin/docker) || {
70 # We execute reservation on localhost. Sourced script automatially
71 # sets environment variables for further processing.
72 activate_wrapper "${NODENESS}" "${FLAVOR}" "${device_image}" || die
75 die "Unknown specification: ${case_text}!"
78 trap 'deactivate_docker_topology' EXIT || {
79 die "Trap attempt failed, please cleanup manually. Aborting!"
82 parse_env_variables || die "Parse of environment variables failed!"
84 # Replace all variables in template with those in environment.
85 source <(echo 'cat <<EOF >topo.yml'; cat ${TOPOLOGIES[0]}; echo EOF;) || {
86 die "Topology file create failed!"
89 WORKING_TOPOLOGY="${CSIT_DIR}/topologies/available/vpp_device.yaml"
90 mv topo.yml "${WORKING_TOPOLOGY}" || {
91 die "Topology move failed!"
93 cat ${WORKING_TOPOLOGY} | grep -v password || {
94 die "Topology read failed!"
97 # Subfunctions to update data that may depend on topology reserved.
98 set_environment_variables || die
100 compose_robot_arguments || die
105 function activate_virtualenv () {
107 # Update virtualenv pip package, delete and create virtualenv directory,
108 # activate the virtualenv, install requirements, set PYTHONPATH.
111 # - ${1} - Path to existing directory for creating virtualenv in.
112 # If missing or empty, ${CSIT_DIR} is used.
113 # - ${2} - Path to requirements file, ${CSIT_DIR}/requirements.txt if empty.
115 # - CSIT_DIR - Path to existing root of local CSIT git repository.
116 # Variables exported:
117 # - PYTHONPATH - CSIT_DIR, as CSIT Python scripts usually need this.
119 # - die - Print to stderr and exit.
123 root_path="${1-$CSIT_DIR}"
124 env_dir="${root_path}/env"
125 req_path=${2-$CSIT_DIR/requirements.txt}
126 rm -rf "${env_dir}" || die "Failed to clean previous virtualenv."
127 pip3 install virtualenv==20.15.1 || {
128 die "Virtualenv package install failed."
130 virtualenv --no-download --python=$(which python3) "${env_dir}" || {
131 die "Virtualenv creation for $(which python3) failed."
134 source "${env_dir}/bin/activate" || die "Virtualenv activation failed."
136 pip3 install -r "${req_path}" || {
137 die "Requirements installation failed."
139 # Most CSIT Python scripts assume PYTHONPATH is set and exported.
140 export PYTHONPATH="${CSIT_DIR}" || die "Export failed."
144 function archive_tests () {
146 # Create .tar.gz of generated/tests for archiving.
147 # To be run after generate_tests, kept separate to offer more flexibility.
150 # - ${GENERATED_DIR}/tests - Tree of executed suites to archive.
152 # - ${ARCHIVE_DIR}/generated_tests.tar.gz - Archive of generated tests.
156 pushd "${ARCHIVE_DIR}" || die
157 tar czf "generated_tests.tar.gz" "${GENERATED_DIR}/tests" || true
162 function check_download_dir () {
164 # Fail if there are no files visible in ${DOWNLOAD_DIR}.
167 # - DOWNLOAD_DIR - Path to directory robot takes the build to test from.
169 # - ${DOWNLOAD_DIR} - Has to be non-empty to proceed.
171 # - die - Print to stderr and exit.
175 if [[ ! "$(ls -A "${DOWNLOAD_DIR}")" ]]; then
176 die "No artifacts downloaded!"
181 function check_prerequisites () {
183 # Fail if prerequisites are not met.
186 # - installed - Check if application is installed/present in system.
187 # - die - Print to stderr and exit.
191 if ! installed sshpass; then
192 die "Please install sshpass before continue!"
197 function common_dirs () {
199 # Set global variables, create some directories (without touching content).
202 # - BASH_FUNCTION_DIR - Path to existing directory this file is located in.
203 # - CSIT_DIR - Path to existing root of local CSIT git repository.
204 # - TOPOLOGIES_DIR - Path to existing directory with available topologies.
205 # - JOB_SPECS_DIR - Path to existing directory with job test specifications.
206 # - RESOURCES_DIR - Path to existing CSIT subdirectory "resources".
207 # - TOOLS_DIR - Path to existing resources subdirectory "tools".
208 # - PYTHON_SCRIPTS_DIR - Path to existing tools subdirectory "scripts".
209 # - ARCHIVE_DIR - Path to created CSIT subdirectory "archives".
210 # The name is chosen to match what ci-management expects.
211 # - DOWNLOAD_DIR - Path to created CSIT subdirectory "download_dir".
212 # - GENERATED_DIR - Path to created CSIT subdirectory "generated".
213 # Directories created if not present:
214 # ARCHIVE_DIR, DOWNLOAD_DIR, GENERATED_DIR.
216 # - die - Print to stderr and exit.
220 this_file=$(readlink -e "${BASH_SOURCE[0]}") || {
221 die "Some error during locating of this source file."
223 BASH_FUNCTION_DIR=$(dirname "${this_file}") || {
224 die "Some error during dirname call."
226 # Current working directory could be in a different repo, e.g. VPP.
227 pushd "${BASH_FUNCTION_DIR}" || die "Pushd failed"
228 relative_csit_dir=$(git rev-parse --show-toplevel) || {
229 die "Git rev-parse failed."
231 CSIT_DIR=$(readlink -e "${relative_csit_dir}") || die "Readlink failed."
232 popd || die "Popd failed."
233 TOPOLOGIES_DIR=$(readlink -e "${CSIT_DIR}/topologies/available") || {
234 die "Readlink failed."
236 JOB_SPECS_DIR=$(readlink -e "${CSIT_DIR}/resources/job_specs") || {
237 die "Readlink failed."
239 RESOURCES_DIR=$(readlink -e "${CSIT_DIR}/resources") || {
240 die "Readlink failed."
242 TOOLS_DIR=$(readlink -e "${RESOURCES_DIR}/tools") || {
243 die "Readlink failed."
245 PYTHON_SCRIPTS_DIR=$(readlink -e "${TOOLS_DIR}/scripts") || {
246 die "Readlink failed."
249 ARCHIVE_DIR=$(readlink -f "${CSIT_DIR}/archives") || {
250 die "Readlink failed."
252 mkdir -p "${ARCHIVE_DIR}" || die "Mkdir failed."
253 DOWNLOAD_DIR=$(readlink -f "${CSIT_DIR}/download_dir") || {
254 die "Readlink failed."
256 mkdir -p "${DOWNLOAD_DIR}" || die "Mkdir failed."
257 GENERATED_DIR=$(readlink -f "${CSIT_DIR}/generated") || {
258 die "Readlink failed."
260 mkdir -p "${GENERATED_DIR}" || die "Mkdir failed."
264 function compose_robot_arguments () {
266 # This function is called by run_tests function.
267 # The reason is that some jobs (bisect) perform reservation multiple times,
268 # so WORKING_TOPOLOGY can be different each time.
271 # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
272 # - DUT - CSIT test/ subdirectory, set while processing tags.
273 # - TAGS - Array variable holding selected tag boolean expressions.
274 # - TOPOLOGIES_TAGS - Tag boolean expression filtering tests for topology.
275 # - TEST_CODE - The test selection string from environment or argument.
276 # - SELECTION_MODE - Selection criteria [test, suite, include, exclude].
278 # - ROBOT_ARGS - String holding part of all arguments for robot.
279 # - EXPANDED_TAGS - Array of strings robot arguments compiled from tags.
283 # No explicit check needed with "set -u".
284 ROBOT_ARGS=("--loglevel" "TRACE")
285 ROBOT_ARGS+=("--variable" "TOPOLOGY_PATH:${WORKING_TOPOLOGY}")
287 # TODO: The rest does not need to be recomputed on each reservation.
288 # Refactor TEST_CODE so this part can be called only once.
289 case "${TEST_CODE}" in
291 ROBOT_ARGS+=("--suite" "tests.${DUT}.device")
293 *"perf"* | *"bisect"*)
294 ROBOT_ARGS+=("--suite" "tests.${DUT}.perf")
297 die "Unknown specification: ${TEST_CODE}"
301 for tag in "${TAGS[@]}"; do
302 if [[ ${tag} == "!"* ]]; then
303 EXPANDED_TAGS+=("--exclude" "${tag#$"!"}")
305 if [[ ${SELECTION_MODE} == "--test" ]]; then
306 EXPANDED_TAGS+=("--test" "${tag}")
308 EXPANDED_TAGS+=("--include" "${TOPOLOGIES_TAGS}AND${tag}")
313 if [[ ${SELECTION_MODE} == "--test" ]]; then
314 EXPANDED_TAGS+=("--include" "${TOPOLOGIES_TAGS}")
319 function deactivate_docker_topology () {
321 # Deactivate virtual vpp-device topology by removing containers.
324 # - NODENESS - Node multiplicity of desired testbed.
325 # - FLAVOR - Node flavor string, usually describing the processor.
329 case_text="${NODENESS}_${FLAVOR}"
330 case "${case_text}" in
331 "1n_skx" | "1n_alt" | "1n_spr")
332 ssh="ssh root@172.17.0.1 -p 6022"
333 env_vars=$(env | grep CSIT_ | tr '\n' ' ' ) || die
334 # The "declare -f" output is long and boring.
336 ${ssh} "$(declare -f); deactivate_wrapper ${env_vars}" || {
337 die "Topology cleanup via shim-dcr failed!"
343 clean_environment || {
344 die "Topology cleanup locally failed!"
349 die "Unknown specification: ${case_text}!"
356 # Print the message to standard error end exit with error code specified
357 # by the second argument.
360 # - The default error message.
362 # - ${1} - The whole error message, be sure to quote. Optional
363 # - ${2} - the code to exit with, default: 1.
367 warn "${1:-Unspecified run-time error occurred!}"
372 function die_on_robot_error () {
374 # Source this fragment if you want to abort on any failed test case.
377 # - ROBOT_EXIT_STATUS - Set by a robot running fragment.
379 # - die - Print to stderr and exit.
383 if [[ "${ROBOT_EXIT_STATUS}" != "0" ]]; then
384 die "Test failures are present!" "${ROBOT_EXIT_STATUS}"
389 function generate_tests () {
391 # Populate ${GENERATED_DIR}/tests based on ${CSIT_DIR}/tests/.
392 # Any previously existing content of ${GENERATED_DIR}/tests is wiped before.
393 # The generation is done by executing any *.py executable
394 # within any subdirectory after copying.
396 # This is a separate function, because this code is called
397 # both by autogen checker and entries calling run_robot.
400 # - ${CSIT_DIR}/tests - Used as templates for the generated tests.
401 # Directories replaced:
402 # - ${GENERATED_DIR}/tests - Overwritten by the generated tests.
406 rm -rf "${GENERATED_DIR}/tests" || die
407 cp -r "${CSIT_DIR}/tests" "${GENERATED_DIR}/tests" || die
408 cmd_line=("find" "${GENERATED_DIR}/tests" "-type" "f")
409 cmd_line+=("-executable" "-name" "*.py")
410 # We sort the directories, so log output can be compared between runs.
411 file_list=$("${cmd_line[@]}" | sort) || die
413 for gen in ${file_list}; do
414 directory="$(dirname "${gen}")" || die
415 filename="$(basename "${gen}")" || die
416 pushd "${directory}" || die
417 ./"${filename}" || die
423 function get_test_code () {
426 # - ${1} - Optional, argument of entry script (or empty as unset).
427 # Test code value to override job name from environment.
429 # - JOB_NAME - String affecting test selection, default if not argument.
431 # - TEST_CODE - The test selection string from environment or argument.
432 # - NODENESS - Node multiplicity of desired testbed.
433 # - FLAVOR - Node flavor string, usually describing the processor.
437 TEST_CODE="${1-}" || die "Reading optional argument failed, somehow."
438 if [[ -z "${TEST_CODE}" ]]; then
439 TEST_CODE="${JOB_NAME-}" || die "Reading job name failed, somehow."
442 case "${TEST_CODE}" in
541 FLAVOR="${TEST_CODE#*2n-}"
545 FLAVOR="${TEST_CODE#*3n-}"
551 function get_test_tag_string () {
554 # - GERRIT_EVENT_TYPE - Event type set by gerrit, can be unset.
555 # - GERRIT_EVENT_COMMENT_TEXT - Comment text, read for "comment-added" type.
556 # - TEST_CODE - The test selection string from environment or argument.
558 # - TEST_TAG_STRING - The string following trigger word in gerrit comment.
559 # May be empty, or even not set on event types not adding comment.
560 # - GIT_BISECT_FROM - If bisecttest, the commit hash to bisect from.
562 # Variables exported optionally:
563 # - GRAPH_NODE_VARIANT - Node variant to test with, set if found in trigger.
565 # TODO: ci-management scripts no longer need to perform this.
569 if [[ "${GERRIT_EVENT_TYPE-}" == "comment-added" ]]; then
570 case "${TEST_CODE}" in
571 # Order matters, bisect job contains "perf" in its name.
582 die "Unknown specification: ${TEST_CODE}"
584 # Ignore lines not containing the trigger word.
585 comment=$(fgrep "${trigger}" <<< "${GERRIT_EVENT_COMMENT_TEXT}" || true)
586 # The vpp-csit triggers trail stuff we are not interested in.
587 # Removing them and trigger word: https://unix.stackexchange.com/a/13472
588 # (except relying on \s whitespace, \S non-whitespace and . both).
589 # The last string is concatenated, only the middle part is expanded.
590 cmd=("grep" "-oP" '\S*'"${trigger}"'\S*\s\K.+$') || die "Unset trigger?"
591 # On parsing error, TEST_TAG_STRING probably stays empty.
592 TEST_TAG_STRING=$("${cmd[@]}" <<< "${comment}" || true)
593 if [[ -z "${TEST_TAG_STRING-}" ]]; then
594 # Probably we got a base64 encoded comment.
595 comment="${GERRIT_EVENT_COMMENT_TEXT}"
596 comment=$(base64 --decode <<< "${comment}" || true)
597 comment=$(fgrep "${trigger}" <<< "${comment}" || true)
598 TEST_TAG_STRING=$("${cmd[@]}" <<< "${comment}" || true)
600 if [[ "${trigger}" == "bisecttest" ]]; then
601 # Intentionally without quotes, so spaces delimit elements.
602 test_tag_array=(${TEST_TAG_STRING}) || die "How could this fail?"
603 # First "argument" of bisecttest is a commit hash.
604 GIT_BISECT_FROM="${test_tag_array[0]}" || {
605 die "Bisect job requires commit hash."
607 # Update the tag string (tag expressions only, no commit hash).
608 TEST_TAG_STRING="${test_tag_array[@]:1}" || {
609 die "Bisect job needs a single test, no default."
612 if [[ -n "${TEST_TAG_STRING-}" ]]; then
613 test_tag_array=(${TEST_TAG_STRING})
614 if [[ "${test_tag_array[0]}" == "icl" ]]; then
615 export GRAPH_NODE_VARIANT="icl"
616 TEST_TAG_STRING="${test_tag_array[@]:1}" || true
617 elif [[ "${test_tag_array[0]}" == "skx" ]]; then
618 export GRAPH_NODE_VARIANT="skx"
619 TEST_TAG_STRING="${test_tag_array[@]:1}" || true
626 function installed () {
628 # Check if the given utility is installed. Fail if not installed.
630 # Duplicate of common.sh function, as this file is also used standalone.
633 # - ${1} - Utility to check.
635 # - 0 - If command is installed.
636 # - 1 - If command is not installed.
644 function move_archives () {
646 # Move archive directory to top of workspace, if not already there.
648 # ARCHIVE_DIR is positioned relative to CSIT_DIR,
649 # but in some jobs CSIT_DIR is not same as WORKSPACE
650 # (e.g. under VPP_DIR). To simplify ci-management settings,
651 # we want to move the data to the top. We do not want simple copy,
652 # as ci-management is eager with recursive search.
654 # As some scripts may call this function multiple times,
655 # the actual implementation use copying and deletion,
656 # so the workspace gets "union" of contents (except overwrites on conflict).
657 # The consequence is empty ARCHIVE_DIR remaining after this call.
659 # As the source directory is emptied,
660 # the check for dirs being different is essential.
663 # - WORKSPACE - Jenkins workspace, move only if the value is not empty.
664 # Can be unset, then it speeds up manual testing.
665 # - ARCHIVE_DIR - Path to directory with content to be moved.
666 # Directories updated:
667 # - ${WORKSPACE}/archives/ - Created if does not exist.
668 # Content of ${ARCHIVE_DIR}/ is moved.
670 # - die - Print to stderr and exit.
674 if [[ -n "${WORKSPACE-}" ]]; then
675 target=$(readlink -f "${WORKSPACE}/archives")
676 if [[ "${target}" != "${ARCHIVE_DIR}" ]]; then
677 mkdir -p "${target}" || die "Archives dir create failed."
678 cp -rf "${ARCHIVE_DIR}"/* "${target}" || die "Copy failed."
679 rm -rf "${ARCHIVE_DIR}"/* || die "Delete failed."
685 function prepare_topology () {
687 # Prepare virtual testbed topology if needed based on flavor.
690 # - TEST_CODE - String affecting test selection, usually jenkins job name.
691 # - NODENESS - Node multiplicity of testbed, either "2n" or "3n".
692 # - FLAVOR - Node flavor string, e.g. "clx" or "skx".
694 # - TERRAFORM_MODULE_DIR - Terraform module directory.
696 # - die - Print to stderr and exit.
697 # - terraform_init - Terraform init topology.
698 # - terraform_apply - Terraform apply topology.
702 case_text="${NODENESS}_${FLAVOR}"
703 case "${case_text}" in
704 "1n_aws" | "2n_aws" | "3n_aws")
705 export TF_VAR_testbed_name="${TEST_CODE}"
706 TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}-c5n"
707 terraform_init || die "Failed to call terraform init."
708 trap "terraform_destroy" ERR EXIT || {
709 die "Trap attempt failed, please cleanup manually. Aborting!"
711 terraform_apply || die "Failed to call terraform apply."
713 "2n_c7gn" | "3n_c7gn")
714 export TF_VAR_testbed_name="${TEST_CODE}"
715 TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-c7gn"
716 terraform_init || die "Failed to call terraform init."
717 trap "terraform_destroy" ERR EXIT || {
718 die "Trap attempt failed, please cleanup manually. Aborting!"
720 terraform_apply || die "Failed to call terraform apply."
722 "1n_c6in" | "2n_c6in" | "3n_c6in")
723 export TF_VAR_testbed_name="${TEST_CODE}"
724 TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-c6in"
725 terraform_init || die "Failed to call terraform init."
726 trap "terraform_destroy" ERR EXIT || {
727 die "Trap attempt failed, please cleanup manually. Aborting!"
729 terraform_apply || die "Failed to call terraform apply."
735 function reserve_and_cleanup_testbed () {
737 # Reserve physical testbed, perform cleanup, register trap to unreserve.
738 # When cleanup fails, remove from topologies and keep retrying
739 # until all topologies are removed.
741 # Multiple other functions are called from here,
742 # as they set variables that depend on reserved topology data.
745 # - TOPOLOGIES - Array of paths to topology yaml to attempt reservation on.
746 # - PYTHON_SCRIPTS_DIR - Path to directory holding the reservation script.
747 # - BUILD_TAG - Any string suitable as filename, identifying
748 # test run executing this function. May be unset.
750 # - TOPOLOGIES - Array of paths to topologies, with failed cleanups removed.
751 # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
753 # - die - Print to stderr and exit.
754 # - ansible_playbook - Perform an action using ansible, see ansible.sh
756 # - EXIT - Calls cancel_all for ${WORKING_TOPOLOGY}.
761 for topo in "${TOPOLOGIES[@]}"; do
763 scrpt="${PYTHON_SCRIPTS_DIR}/topo_reservation.py"
764 opts=("-t" "${topo}" "-r" "${BUILD_TAG:-Unknown}")
765 python3 "${scrpt}" "${opts[@]}"
768 if [[ "${result}" == "0" ]]; then
769 # Trap unreservation before cleanup check,
770 # so multiple jobs showing failed cleanup improve chances
771 # of humans to notice and fix.
772 WORKING_TOPOLOGY="${topo}"
773 echo "Reserved: ${WORKING_TOPOLOGY}"
774 trap "untrap_and_unreserve_testbed" EXIT || {
775 message="TRAP ATTEMPT AND UNRESERVE FAILED, FIX MANUALLY."
776 untrap_and_unreserve_testbed "${message}" || {
777 die "Teardown should have died, not failed."
779 die "Trap attempt failed, unreserve succeeded. Aborting."
781 # Cleanup + calibration checks
783 ansible_playbook "cleanup, calibration"
786 if [[ "${result}" == "0" ]]; then
789 warn "Testbed cleanup failed: ${topo}"
790 untrap_and_unreserve_testbed "Fail of unreserve after cleanup."
792 # Else testbed is accessible but currently reserved, moving on.
795 if [[ -n "${WORKING_TOPOLOGY-}" ]]; then
796 # Exit the infinite while loop if we made a reservation.
797 warn "Reservation and cleanup successful."
801 if [[ "${#TOPOLOGIES[@]}" == "0" ]]; then
802 die "Run out of operational testbeds!"
805 # Wait ~3minutes before next try.
806 sleep_time="$[ ( ${RANDOM} % 20 ) + 180 ]s" || {
807 die "Sleep time calculation failed."
809 echo "Sleeping ${sleep_time}"
810 sleep "${sleep_time}" || die "Sleep failed."
813 # Subfunctions to update data that may depend on topology reserved.
814 set_environment_variables || die
816 compose_robot_arguments || die
820 function run_robot () {
822 # Run robot with options based on input variables.
824 # Testbed has to be reserved already,
825 # as some data may have changed between reservations,
826 # for example excluded NICs.
829 # - CSIT_DIR - Path to existing root of local CSIT git repository.
830 # - ARCHIVE_DIR - Path to store robot result files in.
831 # - ROBOT_ARGS, EXPANDED_TAGS - See compose_robot_arguments.sh
832 # - GENERATED_DIR - Tests are assumed to be generated under there.
833 # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
834 # - DUT - CSIT test/ subdirectory, set while processing tags.
835 # - TAGS - Array variable holding selected tag boolean expressions.
836 # - TOPOLOGIES_TAGS - Tag boolean expression filtering tests for topology.
837 # - TEST_CODE - The test selection string from environment or argument.
839 # - ROBOT_ARGS - String holding part of all arguments for robot.
840 # - EXPANDED_TAGS - Array of string robot arguments compiled from tags.
841 # - ROBOT_EXIT_STATUS - Exit status of most recent robot invocation.
843 # - die - Print to stderr and exit.
847 all_options=("--outputdir" "${ARCHIVE_DIR}" "${ROBOT_ARGS[@]}")
848 all_options+=("${EXPANDED_TAGS[@]}")
850 pushd "${CSIT_DIR}" || die "Change directory operation failed."
852 robot "${all_options[@]}" "${GENERATED_DIR}/tests/"
853 ROBOT_EXIT_STATUS="$?"
856 popd || die "Change directory operation failed."
860 function select_arch_os () {
862 # Set variables affected by local CPU architecture and operating system.
865 # - VPP_VER_FILE - Name of file in CSIT dir containing vpp stable version.
866 # - IMAGE_VER_FILE - Name of file in CSIT dir containing the image name.
867 # - PKG_SUFFIX - Suffix of OS package file name, "rpm" or "deb."
871 source /etc/os-release || die "Get OS release failed."
876 *"LTS (Jammy Jellyfish)"*)
877 IMAGE_VER_FILE="VPP_DEVICE_IMAGE_UBUNTU_JAMMY"
878 VPP_VER_FILE="VPP_STABLE_VER_UBUNTU_JAMMY"
882 die "Unsupported Ubuntu version!"
887 die "Unsupported distro or OS!"
891 arch=$(uname -m) || {
892 die "Get CPU architecture failed."
897 IMAGE_VER_FILE="${IMAGE_VER_FILE}_ARM"
905 function select_tags () {
907 # Only to be called from the reservation function,
908 # as resulting tags may change based on topology data.
911 # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
912 # - TEST_CODE - String affecting test selection, usually jenkins job name.
913 # - DUT - CSIT test/ subdirectory, set while processing tags.
914 # - TEST_TAG_STRING - String selecting tags, from gerrit comment.
916 # - TOPOLOGIES_DIR - Path to existing directory with available tpologies.
917 # - BASH_FUNCTION_DIR - Directory with input files to process.
919 # - TAGS - Array of processed tag boolean expressions.
920 # - SELECTION_MODE - Selection criteria [test, suite, include, exclude].
925 case "${TEST_CODE}" in
926 *"1n-aws"* | *"1n-c6in"*)
927 start_pattern='^ SUT:'
930 start_pattern='^ TG:'
933 end_pattern='^ \? \?[A-Za-z0-9]\+:'
934 # Remove the sections from topology file
935 sed_command="/${start_pattern}/,/${end_pattern}/d"
936 # All topologies NICs
937 available=$(sed "${sed_command}" "${TOPOLOGIES_DIR}"/* \
938 | grep -hoP "model: \K.*" | sort -u)
939 # Selected topology NICs
940 reserved=$(sed "${sed_command}" "${WORKING_TOPOLOGY}" \
941 | grep -hoP "model: \K.*" | sort -u)
942 # All topologies NICs - Selected topology NICs
943 exclude_nics=($(comm -13 <(echo "${reserved}") <(echo "${available}"))) || {
944 die "Computation of excluded NICs failed."
947 # Select default NIC tag.
948 case "${TEST_CODE}" in
950 default_nic="nic_intel-e822cq"
953 default_nic="nic_intel-e823c"
956 default_nic="nic_intel-x520-da2"
958 *"3n-icx" | *"2n-icx")
959 default_nic="nic_intel-e810cq"
962 default_nic="nic_mellanox-cx7veat"
965 default_nic="nic_intel-e810cq"
968 default_nic="nic_intel-e810cq"
970 *"2n-clx" | *"2n-zn2")
971 default_nic="nic_intel-xxv710"
973 *"2n-tx2" | *"3n-alt")
974 default_nic="nic_intel-xl710"
976 *"1n-aws" | *"2n-aws" | *"3n-aws")
977 default_nic="nic_amazon-nitro-50g"
979 *"2n-c7gn" | *"3n-c7gn")
980 default_nic="nic_amazon-nitro-100g"
982 *"1n-c6in" | *"2n-c6in" | *"3n-c6in")
983 default_nic="nic_amazon-nitro-200g"
985 *"2n-x-"* | *"3n-x-"*)
986 default_nic="nic_intel-e810cq"
989 default_nic="nic_intel-x710"
993 sed_nic_sub_cmd="sed s/\${default_nic}/${default_nic}/"
995 awk_nics_sub_cmd+='gsub("xxv710","25ge2p1xxv710");'
996 awk_nics_sub_cmd+='gsub("x710","10ge2p1x710");'
997 awk_nics_sub_cmd+='gsub("xl710","40ge2p1xl710");'
998 awk_nics_sub_cmd+='gsub("x520-da2","10ge2p1x520");'
999 awk_nics_sub_cmd+='gsub("cx556a","100ge2p1cx556a");'
1000 awk_nics_sub_cmd+='gsub("2p1cx7veat","200ge2p1cx7veat");'
1001 awk_nics_sub_cmd+='gsub("6p3cx7veat","200ge6p3cx7veat");'
1002 awk_nics_sub_cmd+='gsub("cx6dx","100ge2p1cx6dx");'
1003 awk_nics_sub_cmd+='gsub("e810cq","100ge2p1e810cq");'
1004 awk_nics_sub_cmd+='gsub("e822cq","25ge2p1e822cq");'
1005 awk_nics_sub_cmd+='gsub("e823c","25ge2p1e823c");'
1006 awk_nics_sub_cmd+='gsub("vic1227","10ge2p1vic1227");'
1007 awk_nics_sub_cmd+='gsub("vic1385","40ge2p1vic1385");'
1008 awk_nics_sub_cmd+='gsub("nitro-50g","50ge1p1ENA");'
1009 awk_nics_sub_cmd+='gsub("nitro-100g","100ge1p1ENA");'
1010 awk_nics_sub_cmd+='gsub("nitro-200g","200ge1p1ENA");'
1011 awk_nics_sub_cmd+='gsub("virtual","1ge1p82540em");'
1012 awk_nics_sub_cmd+='if ($9 =="drv_avf") drv="avf-";'
1013 awk_nics_sub_cmd+='else if ($9 =="drv_rdma_core") drv ="rdma-";'
1014 awk_nics_sub_cmd+='else if ($9 =="drv_mlx5_core") drv ="mlx5-";'
1015 awk_nics_sub_cmd+='else if ($9 =="drv_af_xdp") drv ="af-xdp-";'
1016 awk_nics_sub_cmd+='else drv="";'
1017 awk_nics_sub_cmd+='if ($1 =="-") cores="";'
1018 awk_nics_sub_cmd+='else cores=$1;'
1019 awk_nics_sub_cmd+='print "*"$7"-" drv $11"-"$5"."$3"-" cores "-" drv $11"-"$5'
1021 # Tag file directory shorthand.
1022 tfd="${JOB_SPECS_DIR}"
1023 case "${TEST_CODE}" in
1024 # Select specific performance tests based on jenkins job type variable.
1026 readarray -t test_tag_array <<< $(grep -v "#" \
1027 ${tfd}/vpp_device/${DUT}-${NODENESS}-${FLAVOR}.md |
1028 awk {"$awk_nics_sub_cmd"} || echo "devicetest") || die
1029 SELECTION_MODE="--test"
1031 *"hoststack-daily"* )
1032 readarray -t test_tag_array <<< $(grep -v "#" \
1033 ${tfd}/hoststack_daily/${DUT}-${NODENESS}-${FLAVOR}.md |
1034 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
1035 SELECTION_MODE="--test"
1038 readarray -t test_tag_array <<< $(grep -v "#" \
1039 ${tfd}/ndrpdr_weekly/${DUT}-${NODENESS}-${FLAVOR}.md |
1040 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
1041 SELECTION_MODE="--test"
1044 readarray -t test_tag_array <<< $(grep -v "#" \
1045 ${tfd}/mrr_daily/${DUT}-${NODENESS}-${FLAVOR}.md |
1046 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
1047 SELECTION_MODE="--test"
1050 readarray -t test_tag_array <<< $(grep -v "#" \
1051 ${tfd}/mrr_weekly/${DUT}-${NODENESS}-${FLAVOR}.md |
1052 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
1053 SELECTION_MODE="--test"
1055 *"report-iterative"* )
1056 test_sets=(${TEST_TAG_STRING//:/ })
1057 # Run only one test set per run
1058 report_file=${test_sets[0]}.md
1059 readarray -t test_tag_array <<< $(grep -v "#" \
1060 ${tfd}/report_iterative/${NODENESS}-${FLAVOR}/${report_file} |
1061 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
1062 SELECTION_MODE="--test"
1064 *"report-coverage"* )
1065 test_sets=(${TEST_TAG_STRING//:/ })
1066 # Run only one test set per run
1067 report_file=${test_sets[0]}.md
1068 readarray -t test_tag_array <<< $(grep -v "#" \
1069 ${tfd}/report_coverage/${NODENESS}-${FLAVOR}/${report_file} |
1070 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
1071 SELECTION_MODE="--test"
1074 if [[ -z "${TEST_TAG_STRING-}" ]]; then
1075 # If nothing is specified, we will run pre-selected tests by
1077 test_tag_array=("mrrAND${default_nic}AND1cAND64bANDethip4-ip4base"
1078 "mrrAND${default_nic}AND1cAND78bANDethip6-ip6base"
1079 "mrrAND${default_nic}AND1cAND64bANDeth-l2bdbasemaclrn"
1080 "mrrAND${default_nic}AND1cAND64bANDeth-l2xcbase"
1081 "!drv_af_xdp" "!drv_avf")
1083 # If trigger contains tags, split them into array.
1084 test_tag_array=(${TEST_TAG_STRING//:/ })
1086 SELECTION_MODE="--include"
1090 # Blacklisting certain tags per topology.
1092 # Reasons for blacklisting:
1093 # - ipsechw - Blacklisted on testbeds without crypto hardware accelerator.
1094 case "${TEST_CODE}" in
1096 test_tag_array+=("!avf")
1097 test_tag_array+=("!vhost")
1098 test_tag_array+=("!flow")
1101 test_tag_array+=("!flow")
1104 test_tag_array+=("!ipsechw")
1107 test_tag_array+=("!ipsechw")
1112 test_tag_array+=("!ipsechw")
1115 test_tag_array+=("!ipsechw")
1118 test_tag_array+=("!ipsechw")
1121 test_tag_array+=("!ipsechw")
1122 test_tag_array+=("!3_node_double_link_topoANDnic_intel-xxv710")
1133 test_tag_array+=("!drv_avf")
1134 test_tag_array+=("!ipsechw")
1136 *"1n-aws" | *"2n-aws" | *"3n-aws")
1137 test_tag_array+=("!ipsechw")
1139 *"2n-c7gn" | *"3n-c7gn")
1140 test_tag_array+=("!ipsechw")
1142 *"1n-c6in" | *"2n-c6in" | *"3n-c6in")
1143 test_tag_array+=("!ipsechw")
1145 *"2n-x-"* | *"3n-x-"*)
1149 # We will add excluded NICs.
1150 test_tag_array+=("${exclude_nics[@]/#/!NIC_}")
1154 if [[ "${TEST_CODE}" != *"daily"* ]]; then
1155 if [[ "${TEST_CODE}" == "vpp-"* ]]; then
1156 if [[ "${TEST_CODE}" != *"device"* ]]; then
1157 # Automatic prefixing for VPP perf jobs to limit the NIC used.
1158 if [[ "${TEST_TAG_STRING-}" != *"nic_"* ]]; then
1159 prefix="${default_nic}AND"
1165 for tag in "${test_tag_array[@]}"; do
1166 if [[ "${tag}" == "!"* ]]; then
1167 # Exclude tags are not prefixed.
1169 elif [[ "${tag}" == " "* || "${tag}" == *"perftest"* ]]; then
1170 # Badly formed tag expressions can trigger way too much tests.
1172 warn "The following tag expression hints at bad trigger: ${tag}"
1173 warn "Possible cause: Multiple triggers in a single comment."
1174 die "Aborting to avoid triggering too many tests."
1175 elif [[ "${tag}" == *"OR"* ]]; then
1176 # If OR had higher precedence than AND, it would be useful here.
1177 # Some people think it does, thus triggering way too much tests.
1179 warn "The following tag expression hints at bad trigger: ${tag}"
1180 warn "Operator OR has lower precedence than AND. Use space instead."
1181 die "Aborting to avoid triggering too many tests."
1182 elif [[ "${tag}" != "" && "${tag}" != "#"* ]]; then
1183 # Empty and comment lines are skipped.
1184 # Other lines are normal tags, they are to be prefixed.
1185 TAGS+=("${prefix}${tag}")
1192 function select_topology () {
1195 # - NODENESS - Node multiplicity of testbed, either "2n" or "3n".
1196 # - FLAVOR - Node flavor string, e.g. "clx" or "skx".
1197 # - CSIT_DIR - Path to existing root of local CSIT git repository.
1198 # - TOPOLOGIES_DIR - Path to existing directory with available topologies.
1200 # - TOPOLOGIES - Array of paths to suitable topology yaml files.
1201 # - TOPOLOGIES_TAGS - Tag expression selecting tests for the topology.
1203 # - die - Print to stderr and exit.
1207 case_text="${NODENESS}_${FLAVOR}"
1208 case "${case_text}" in
1210 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*1n-aws*.yaml )
1211 TOPOLOGIES_TAGS="1_node_single_link_topo"
1214 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*1n-c6in*.yaml )
1215 TOPOLOGIES_TAGS="1_node_single_link_topo"
1217 "1n_alt" | "1n_spr")
1218 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*vpp_device*.template )
1219 TOPOLOGIES_TAGS="2_node_single_link_topo"
1222 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*vpp_device*.template )
1223 TOPOLOGIES_TAGS="2_node_single_link_topo"
1226 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-aws*.yaml )
1227 TOPOLOGIES_TAGS="2_node_single_link_topo"
1230 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-c7gn*.yaml )
1231 TOPOLOGIES_TAGS="2_node_single_link_topo"
1234 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-c6in*.yaml )
1235 TOPOLOGIES_TAGS="2_node_single_link_topo"
1238 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_clx_*.yaml )
1239 TOPOLOGIES_TAGS="2_node_*_link_topo"
1242 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_icx_*.yaml )
1243 TOPOLOGIES_TAGS="2_node_*_link_topo"
1246 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_spr_*.yaml )
1247 TOPOLOGIES_TAGS="2_node_*_link_topo"
1250 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_tx2_*.yaml )
1251 TOPOLOGIES_TAGS="2_node_single_link_topo"
1254 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_zn2_*.yaml )
1255 TOPOLOGIES_TAGS="2_node_*_link_topo"
1258 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_alt_*.yaml )
1259 TOPOLOGIES_TAGS="3_node_single_link_topo"
1262 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-aws*.yaml )
1263 TOPOLOGIES_TAGS="3_node_single_link_topo"
1266 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-c7gn*.yaml )
1267 TOPOLOGIES_TAGS="3_node_single_link_topo"
1270 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-c6in*.yaml )
1271 TOPOLOGIES_TAGS="3_node_single_link_topo"
1274 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_icx_*.yaml )
1275 # Trailing underscore is needed to distinguish from 3n_icxd.
1276 TOPOLOGIES_TAGS="3_node_*_link_topo"
1279 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_icxd_*.yaml )
1280 TOPOLOGIES_TAGS="3_node_single_link_topo"
1283 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_snr_*.yaml )
1284 TOPOLOGIES_TAGS="3_node_single_link_topo"
1287 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_tsh_*.yaml )
1288 TOPOLOGIES_TAGS="3_node_single_link_topo"
1291 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3na_spr_*.yaml )
1292 TOPOLOGIES_TAGS="3_node_*_link_topo"
1295 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3nb_spr_*.yaml )
1296 TOPOLOGIES_TAGS="3_node_*_link_topo"
1299 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_"${FLAVOR}"*.yaml )
1300 TOPOLOGIES_TAGS="2_node_single_link_topo"
1303 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_"${FLAVOR}"*.yaml )
1304 TOPOLOGIES_TAGS="3_node_single_link_topo"
1307 # No falling back to default, that should have been done
1308 # by the function which has set NODENESS and FLAVOR.
1309 die "Unknown specification: ${case_text}"
1312 if [[ -z "${TOPOLOGIES-}" ]]; then
1313 die "No applicable topology found!"
1318 function set_environment_variables () {
1320 # Depending on testbed topology, overwrite defaults set in the
1321 # resources/libraries/python/Constants.py file
1323 # Only to be called from the reservation function,
1324 # as resulting values may change based on topology data.
1327 # - TEST_CODE - String affecting test selection, usually jenkins job name.
1329 # See specific cases
1333 case "${TEST_CODE}" in
1334 *"1n-aws" | *"2n-aws" | *"3n-aws")
1335 export TREX_RX_DESCRIPTORS_COUNT=1024
1336 export TREX_EXTRA_CMDLINE="--mbuf-factor 19"
1337 export TREX_CORE_COUNT=6
1338 # Settings to prevent duration stretching.
1339 export PERF_TRIAL_STL_DELAY=0.1
1341 *"2n-c7gn" | *"3n-c7gn")
1342 export TREX_RX_DESCRIPTORS_COUNT=1024
1343 export TREX_EXTRA_CMDLINE="--mbuf-factor 19"
1344 export TREX_CORE_COUNT=6
1345 # Settings to prevent duration stretching.
1346 export PERF_TRIAL_STL_DELAY=0.1
1348 *"1n-c6in" | *"2n-c6in" | *"3n-c6in")
1349 export TREX_RX_DESCRIPTORS_COUNT=1024
1350 export TREX_EXTRA_CMDLINE="--mbuf-factor 19"
1351 export TREX_CORE_COUNT=6
1352 # Settings to prevent duration stretching.
1353 export PERF_TRIAL_STL_DELAY=0.1
1356 # Maciek's workaround for Zen2 with lower amount of cores.
1357 export TREX_CORE_COUNT=14
1359 *"2n-x-"* | *"3n-x-"* )
1360 export TREX_CORE_COUNT=2
1366 function untrap_and_unreserve_testbed () {
1368 # Use this as a trap function to ensure testbed does not remain reserved.
1369 # Perhaps call directly before script exit, to free testbed for other jobs.
1370 # This function is smart enough to avoid multiple unreservations (so safe).
1371 # Topo cleanup is executed (call it best practice), ignoring failures.
1374 # - default message to die with if testbed might remain reserved.
1376 # - ${1} - Message to die with if unreservation fails. Default hardcoded.
1377 # Variables read (by inner function):
1378 # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
1379 # - PYTHON_SCRIPTS_DIR - Path to directory holding Python scripts.
1381 # - TERRAFORM_MODULE_DIR - Terraform module directory.
1382 # - WORKING_TOPOLOGY - Set to empty string on successful unreservation.
1383 # Trap unregistered:
1384 # - EXIT - Failure to untrap is reported, but ignored otherwise.
1386 # - die - Print to stderr and exit.
1387 # - ansible_playbook - Perform an action using ansible, see ansible.sh
1390 set +eu # We do not want to exit early in a "teardown" function.
1391 trap - EXIT || echo "Trap deactivation failed, continuing anyway."
1392 wt="${WORKING_TOPOLOGY}" # Just to avoid too long lines.
1393 if [[ -z "${wt-}" ]]; then
1395 warn "Testbed looks unreserved already. Trap removal failed before?"
1397 ansible_playbook "cleanup" || true
1398 python3 "${PYTHON_SCRIPTS_DIR}/topo_reservation.py" -c -t "${wt}" || {
1399 die "${1:-FAILED TO UNRESERVE, FIX MANUALLY.}" 2
1401 case "${TEST_CODE}" in
1402 *"1n-aws" | *"2n-aws" | *"3n-aws")
1403 TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}-c5n"
1404 terraform_destroy || die "Failed to call terraform destroy."
1406 *"2n-c7gn" | *"3n-c7gn")
1407 TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}"
1408 terraform_destroy || die "Failed to call terraform destroy."
1410 *"1n-c6in" | *"2n-c6in" | *"3n-c6in")
1411 TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}"
1412 terraform_destroy || die "Failed to call terraform destroy."
1425 # Print the message to standard error.
1428 # - ${@} - The text of the message.