1 # Copyright (c) 2022 Cisco and/or its affiliates.
2 # Copyright (c) 2022 PANTHEON.tech and/or its affiliates.
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at:
7 # http://www.apache.org/licenses/LICENSE-2.0
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
17 # This library defines functions used by multiple entry scripts.
18 # Keep functions ordered alphabetically, please.
20 # TODO: Add a link to bash style guide.
21 # TODO: Consider putting every die into a {} block,
22 # the code might become more readable (but longer).
25 function activate_docker_topology () {
27 # Create virtual vpp-device topology. Output of the function is topology
28 # file describing created environment saved to a file.
31 # - BASH_FUNCTION_DIR - Path to existing directory this file is located in.
32 # - TOPOLOGIES - Available topologies.
33 # - NODENESS - Node multiplicity of desired testbed.
34 # - FLAVOR - Node flavor string, usually describing the processor.
35 # - IMAGE_VER_FILE - Name of file that contains the image version.
36 # - CSIT_DIR - Directory where ${IMAGE_VER_FILE} is located.
38 # - WORKING_TOPOLOGY - Path to topology file.
42 source "${BASH_FUNCTION_DIR}/device.sh" || {
45 device_image="$(< ${CSIT_DIR}/${IMAGE_VER_FILE})"
46 case_text="${NODENESS}_${FLAVOR}"
47 case "${case_text}" in
49 # We execute reservation over csit-shim-dcr (ssh) which runs sourced
50 # script's functions. Env variables are read from ssh output
51 # back to localhost for further processing.
52 # Shim and Jenkins executor are in the same network on the same host
53 # Connect to docker's default gateway IP and shim's exposed port
54 ssh="ssh root@172.17.0.1 -p 6022"
55 run="activate_wrapper ${NODENESS} ${FLAVOR} ${device_image}"
56 # The "declare -f" output is long and boring.
58 # backtics to avoid https://midnight-commander.org/ticket/2142
59 env_vars=`${ssh} "$(declare -f); ${run}"` || {
60 die "Topology reservation via shim-dcr failed!"
64 source <(echo "$env_vars" | grep -v /usr/bin/docker) || {
70 # We execute reservation on localhost. Sourced script automatially
71 # sets environment variables for further processing.
72 activate_wrapper "${NODENESS}" "${FLAVOR}" "${device_image}" || die
75 die "Unknown specification: ${case_text}!"
78 trap 'deactivate_docker_topology' EXIT || {
79 die "Trap attempt failed, please cleanup manually. Aborting!"
82 parse_env_variables || die "Parse of environment variables failed!"
84 # Replace all variables in template with those in environment.
85 source <(echo 'cat <<EOF >topo.yml'; cat ${TOPOLOGIES[0]}; echo EOF;) || {
86 die "Topology file create failed!"
89 WORKING_TOPOLOGY="${CSIT_DIR}/topologies/available/vpp_device.yaml"
90 mv topo.yml "${WORKING_TOPOLOGY}" || {
91 die "Topology move failed!"
93 cat ${WORKING_TOPOLOGY} | grep -v password || {
94 die "Topology read failed!"
99 function activate_virtualenv () {
101 # Update virtualenv pip package, delete and create virtualenv directory,
102 # activate the virtualenv, install requirements, set PYTHONPATH.
105 # - ${1} - Path to existing directory for creating virtualenv in.
106 # If missing or empty, ${CSIT_DIR} is used.
107 # - ${2} - Path to requirements file, ${CSIT_DIR}/requirements.txt if empty.
109 # - CSIT_DIR - Path to existing root of local CSIT git repository.
110 # Variables exported:
111 # - PYTHONPATH - CSIT_DIR, as CSIT Python scripts usually need this.
113 # - die - Print to stderr and exit.
117 root_path="${1-$CSIT_DIR}"
118 env_dir="${root_path}/env"
119 req_path=${2-$CSIT_DIR/requirements.txt}
120 rm -rf "${env_dir}" || die "Failed to clean previous virtualenv."
121 pip3 install virtualenv==20.0.20 || {
122 die "Virtualenv package install failed."
124 virtualenv --no-download --python=$(which python3) "${env_dir}" || {
125 die "Virtualenv creation for $(which python3) failed."
128 source "${env_dir}/bin/activate" || die "Virtualenv activation failed."
130 pip3 install -r "${req_path}" || {
131 die "Requirements installation failed."
133 # Most CSIT Python scripts assume PYTHONPATH is set and exported.
134 export PYTHONPATH="${CSIT_DIR}" || die "Export failed."
138 function archive_tests () {
140 # Create .tar.gz of generated/tests for archiving.
141 # To be run after generate_tests, kept separate to offer more flexibility.
144 # - ${GENERATED_DIR}/tests - Tree of executed suites to archive.
146 # - ${ARCHIVE_DIR}/generated_tests.tar.gz - Archive of generated tests.
150 pushd "${ARCHIVE_DIR}" || die
151 tar czf "generated_tests.tar.gz" "${GENERATED_DIR}/tests" || true
156 function check_download_dir () {
158 # Fail if there are no files visible in ${DOWNLOAD_DIR}.
161 # - DOWNLOAD_DIR - Path to directory pybot takes the build to test from.
163 # - ${DOWNLOAD_DIR} - Has to be non-empty to proceed.
165 # - die - Print to stderr and exit.
169 if [[ ! "$(ls -A "${DOWNLOAD_DIR}")" ]]; then
170 die "No artifacts downloaded!"
175 function check_prerequisites () {
177 # Fail if prerequisites are not met.
180 # - installed - Check if application is installed/present in system.
181 # - die - Print to stderr and exit.
185 if ! installed sshpass; then
186 die "Please install sshpass before continue!"
191 function common_dirs () {
193 # Set global variables, create some directories (without touching content).
196 # - BASH_FUNCTION_DIR - Path to existing directory this file is located in.
197 # - CSIT_DIR - Path to existing root of local CSIT git repository.
198 # - TOPOLOGIES_DIR - Path to existing directory with available topologies.
199 # - JOB_SPECS_DIR - Path to existing directory with job test specifications.
200 # - RESOURCES_DIR - Path to existing CSIT subdirectory "resources".
201 # - TOOLS_DIR - Path to existing resources subdirectory "tools".
202 # - PYTHON_SCRIPTS_DIR - Path to existing tools subdirectory "scripts".
203 # - ARCHIVE_DIR - Path to created CSIT subdirectory "archives".
204 # The name is chosen to match what ci-management expects.
205 # - DOWNLOAD_DIR - Path to created CSIT subdirectory "download_dir".
206 # - GENERATED_DIR - Path to created CSIT subdirectory "generated".
207 # Directories created if not present:
208 # ARCHIVE_DIR, DOWNLOAD_DIR, GENERATED_DIR.
210 # - die - Print to stderr and exit.
214 this_file=$(readlink -e "${BASH_SOURCE[0]}") || {
215 die "Some error during locating of this source file."
217 BASH_FUNCTION_DIR=$(dirname "${this_file}") || {
218 die "Some error during dirname call."
220 # Current working directory could be in a different repo, e.g. VPP.
221 pushd "${BASH_FUNCTION_DIR}" || die "Pushd failed"
222 relative_csit_dir=$(git rev-parse --show-toplevel) || {
223 die "Git rev-parse failed."
225 CSIT_DIR=$(readlink -e "${relative_csit_dir}") || die "Readlink failed."
226 popd || die "Popd failed."
227 TOPOLOGIES_DIR=$(readlink -e "${CSIT_DIR}/topologies/available") || {
228 die "Readlink failed."
230 JOB_SPECS_DIR=$(readlink -e "${CSIT_DIR}/docs/job_specs") || {
231 die "Readlink failed."
233 RESOURCES_DIR=$(readlink -e "${CSIT_DIR}/resources") || {
234 die "Readlink failed."
236 TOOLS_DIR=$(readlink -e "${RESOURCES_DIR}/tools") || {
237 die "Readlink failed."
239 DOC_GEN_DIR=$(readlink -e "${TOOLS_DIR}/doc_gen") || {
240 die "Readlink failed."
242 PYTHON_SCRIPTS_DIR=$(readlink -e "${TOOLS_DIR}/scripts") || {
243 die "Readlink failed."
246 ARCHIVE_DIR=$(readlink -f "${CSIT_DIR}/archives") || {
247 die "Readlink failed."
249 mkdir -p "${ARCHIVE_DIR}" || die "Mkdir failed."
250 DOWNLOAD_DIR=$(readlink -f "${CSIT_DIR}/download_dir") || {
251 die "Readlink failed."
253 mkdir -p "${DOWNLOAD_DIR}" || die "Mkdir failed."
254 GENERATED_DIR=$(readlink -f "${CSIT_DIR}/generated") || {
255 die "Readlink failed."
257 mkdir -p "${GENERATED_DIR}" || die "Mkdir failed."
261 function compose_pybot_arguments () {
264 # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
265 # - DUT - CSIT test/ subdirectory, set while processing tags.
266 # - TAGS - Array variable holding selected tag boolean expressions.
267 # - TOPOLOGIES_TAGS - Tag boolean expression filtering tests for topology.
268 # - TEST_CODE - The test selection string from environment or argument.
269 # - SELECTION_MODE - Selection criteria [test, suite, include, exclude].
271 # - PYBOT_ARGS - String holding part of all arguments for pybot.
272 # - EXPANDED_TAGS - Array of strings pybot arguments compiled from tags.
276 # No explicit check needed with "set -u".
277 PYBOT_ARGS=("--loglevel" "TRACE")
278 PYBOT_ARGS+=("--variable" "TOPOLOGY_PATH:${WORKING_TOPOLOGY}")
280 case "${TEST_CODE}" in
282 PYBOT_ARGS+=("--suite" "tests.${DUT}.device")
285 PYBOT_ARGS+=("--suite" "tests.${DUT}.perf")
288 die "Unknown specification: ${TEST_CODE}"
292 for tag in "${TAGS[@]}"; do
293 if [[ ${tag} == "!"* ]]; then
294 EXPANDED_TAGS+=("--exclude" "${tag#$"!"}")
296 if [[ ${SELECTION_MODE} == "--test" ]]; then
297 EXPANDED_TAGS+=("--test" "${tag}")
299 EXPANDED_TAGS+=("--include" "${TOPOLOGIES_TAGS}AND${tag}")
304 if [[ ${SELECTION_MODE} == "--test" ]]; then
305 EXPANDED_TAGS+=("--include" "${TOPOLOGIES_TAGS}")
310 function deactivate_docker_topology () {
312 # Deactivate virtual vpp-device topology by removing containers.
315 # - NODENESS - Node multiplicity of desired testbed.
316 # - FLAVOR - Node flavor string, usually describing the processor.
320 case_text="${NODENESS}_${FLAVOR}"
321 case "${case_text}" in
323 ssh="ssh root@172.17.0.1 -p 6022"
324 env_vars=$(env | grep CSIT_ | tr '\n' ' ' ) || die
325 # The "declare -f" output is long and boring.
327 ${ssh} "$(declare -f); deactivate_wrapper ${env_vars}" || {
328 die "Topology cleanup via shim-dcr failed!"
334 clean_environment || {
335 die "Topology cleanup locally failed!"
340 die "Unknown specification: ${case_text}!"
347 # Print the message to standard error end exit with error code specified
348 # by the second argument.
351 # - The default error message.
353 # - ${1} - The whole error message, be sure to quote. Optional
354 # - ${2} - the code to exit with, default: 1.
358 warn "${1:-Unspecified run-time error occurred!}"
363 function die_on_pybot_error () {
365 # Source this fragment if you want to abort on any failed test case.
368 # - PYBOT_EXIT_STATUS - Set by a pybot running fragment.
370 # - die - Print to stderr and exit.
374 if [[ "${PYBOT_EXIT_STATUS}" != "0" ]]; then
375 die "Test failures are present!" "${PYBOT_EXIT_STATUS}"
380 function generate_tests () {
382 # Populate ${GENERATED_DIR}/tests based on ${CSIT_DIR}/tests/.
383 # Any previously existing content of ${GENERATED_DIR}/tests is wiped before.
384 # The generation is done by executing any *.py executable
385 # within any subdirectory after copying.
387 # This is a separate function, because this code is called
388 # both by autogen checker and entries calling run_pybot.
391 # - ${CSIT_DIR}/tests - Used as templates for the generated tests.
392 # Directories replaced:
393 # - ${GENERATED_DIR}/tests - Overwritten by the generated tests.
397 rm -rf "${GENERATED_DIR}/tests" || die
398 cp -r "${CSIT_DIR}/tests" "${GENERATED_DIR}/tests" || die
399 cmd_line=("find" "${GENERATED_DIR}/tests" "-type" "f")
400 cmd_line+=("-executable" "-name" "*.py")
401 # We sort the directories, so log output can be compared between runs.
402 file_list=$("${cmd_line[@]}" | sort) || die
404 for gen in ${file_list}; do
405 directory="$(dirname "${gen}")" || die
406 filename="$(basename "${gen}")" || die
407 pushd "${directory}" || die
408 ./"${filename}" || die
414 function get_test_code () {
417 # - ${1} - Optional, argument of entry script (or empty as unset).
418 # Test code value to override job name from environment.
420 # - JOB_NAME - String affecting test selection, default if not argument.
422 # - TEST_CODE - The test selection string from environment or argument.
423 # - NODENESS - Node multiplicity of desired testbed.
424 # - FLAVOR - Node flavor string, usually describing the processor.
428 TEST_CODE="${1-}" || die "Reading optional argument failed, somehow."
429 if [[ -z "${TEST_CODE}" ]]; then
430 TEST_CODE="${JOB_NAME-}" || die "Reading job name failed, somehow."
433 case "${TEST_CODE}" in
506 function get_test_tag_string () {
509 # - GERRIT_EVENT_TYPE - Event type set by gerrit, can be unset.
510 # - GERRIT_EVENT_COMMENT_TEXT - Comment text, read for "comment-added" type.
511 # - TEST_CODE - The test selection string from environment or argument.
513 # - TEST_TAG_STRING - The string following trigger word in gerrit comment.
514 # May be empty, or even not set on event types not adding comment.
516 # TODO: ci-management scripts no longer need to perform this.
520 if [[ "${GERRIT_EVENT_TYPE-}" == "comment-added" ]]; then
521 case "${TEST_CODE}" in
529 die "Unknown specification: ${TEST_CODE}"
531 # Ignore lines not containing the trigger word.
532 comment=$(fgrep "${trigger}" <<< "${GERRIT_EVENT_COMMENT_TEXT}" || true)
533 # The vpp-csit triggers trail stuff we are not interested in.
534 # Removing them and trigger word: https://unix.stackexchange.com/a/13472
535 # (except relying on \s whitespace, \S non-whitespace and . both).
536 # The last string is concatenated, only the middle part is expanded.
537 cmd=("grep" "-oP" '\S*'"${trigger}"'\S*\s\K.+$') || die "Unset trigger?"
538 # On parsing error, TEST_TAG_STRING probably stays empty.
539 TEST_TAG_STRING=$("${cmd[@]}" <<< "${comment}" || true)
540 if [[ -z "${TEST_TAG_STRING-}" ]]; then
541 # Probably we got a base64 encoded comment.
542 comment="${GERRIT_EVENT_COMMENT_TEXT}"
543 comment=$(base64 --decode <<< "${comment}" || true)
544 comment=$(fgrep "${trigger}" <<< "${comment}" || true)
545 TEST_TAG_STRING=$("${cmd[@]}" <<< "${comment}" || true)
547 if [[ -n "${TEST_TAG_STRING-}" ]]; then
548 test_tag_array=(${TEST_TAG_STRING})
549 if [[ "${test_tag_array[0]}" == "icl" ]]; then
550 export GRAPH_NODE_VARIANT="icl"
551 TEST_TAG_STRING="${test_tag_array[@]:1}" || true
552 elif [[ "${test_tag_array[0]}" == "skx" ]]; then
553 export GRAPH_NODE_VARIANT="skx"
554 TEST_TAG_STRING="${test_tag_array[@]:1}" || true
561 function installed () {
563 # Check if the given utility is installed. Fail if not installed.
565 # Duplicate of common.sh function, as this file is also used standalone.
568 # - ${1} - Utility to check.
570 # - 0 - If command is installed.
571 # - 1 - If command is not installed.
579 function move_archives () {
581 # Move archive directory to top of workspace, if not already there.
583 # ARCHIVE_DIR is positioned relative to CSIT_DIR,
584 # but in some jobs CSIT_DIR is not same as WORKSPACE
585 # (e.g. under VPP_DIR). To simplify ci-management settings,
586 # we want to move the data to the top. We do not want simple copy,
587 # as ci-management is eager with recursive search.
589 # As some scripts may call this function multiple times,
590 # the actual implementation use copying and deletion,
591 # so the workspace gets "union" of contents (except overwrites on conflict).
592 # The consequence is empty ARCHIVE_DIR remaining after this call.
594 # As the source directory is emptied,
595 # the check for dirs being different is essential.
598 # - WORKSPACE - Jenkins workspace, move only if the value is not empty.
599 # Can be unset, then it speeds up manual testing.
600 # - ARCHIVE_DIR - Path to directory with content to be moved.
601 # Directories updated:
602 # - ${WORKSPACE}/archives/ - Created if does not exist.
603 # Content of ${ARCHIVE_DIR}/ is moved.
605 # - die - Print to stderr and exit.
609 if [[ -n "${WORKSPACE-}" ]]; then
610 target=$(readlink -f "${WORKSPACE}/archives")
611 if [[ "${target}" != "${ARCHIVE_DIR}" ]]; then
612 mkdir -p "${target}" || die "Archives dir create failed."
613 cp -rf "${ARCHIVE_DIR}"/* "${target}" || die "Copy failed."
614 rm -rf "${ARCHIVE_DIR}"/* || die "Delete failed."
620 function post_process_robot_outputs () {
622 # Generate INFO level output_info.xml by rebot.
623 # Archive UTI raw json outputs.
626 # - ARCHIVE_DIR - Path to post-processed files.
630 # Compress raw json outputs, as they will never be post-processed.
631 pushd "${ARCHIVE_DIR}" || die
632 if [ -d "tests" ]; then
633 # Use deterministic order.
634 options+=("--sort=name")
635 # We are keeping info outputs where they are.
636 # Assuming we want to move anything but info files (and dirs).
637 options+=("--exclude=*.info.json")
638 tar czf "generated_output_raw.tar.gz" "${options[@]}" "tests" || true
639 # Tar can remove when archiving, but chokes (not deterministically)
640 # on attempting to remove dirs (not empty as info files are there).
641 # So we need to delete the raw files manually.
642 find "tests" -type f -name "*.raw.json" -delete || true
646 # Generate INFO level output_info.xml for post-processing.
647 all_options=("--loglevel" "INFO")
648 all_options+=("--log" "none")
649 all_options+=("--report" "none")
650 all_options+=("--output" "${ARCHIVE_DIR}/output_info.xml")
651 all_options+=("${ARCHIVE_DIR}/output.xml")
652 rebot "${all_options[@]}" || true
656 function prepare_topology () {
658 # Prepare virtual testbed topology if needed based on flavor.
661 # - TEST_CODE - String affecting test selection, usually jenkins job name.
662 # - NODENESS - Node multiplicity of testbed, either "2n" or "3n".
663 # - FLAVOR - Node flavor string, e.g. "clx" or "skx".
665 # - die - Print to stderr and exit.
666 # - terraform_init - Terraform init topology.
667 # - terraform_apply - Terraform apply topology.
671 case_text="${NODENESS}_${FLAVOR}"
672 case "${case_text}" in
673 "1n_aws" | "2n_aws" | "3n_aws")
674 export TF_VAR_testbed_name="${TEST_CODE}"
675 terraform_init || die "Failed to call terraform init."
676 terraform_apply || die "Failed to call terraform apply."
682 function reserve_and_cleanup_testbed () {
684 # Reserve physical testbed, perform cleanup, register trap to unreserve.
685 # When cleanup fails, remove from topologies and keep retrying
686 # until all topologies are removed.
689 # - TOPOLOGIES - Array of paths to topology yaml to attempt reservation on.
690 # - PYTHON_SCRIPTS_DIR - Path to directory holding the reservation script.
691 # - BUILD_TAG - Any string suitable as filename, identifying
692 # test run executing this function. May be unset.
694 # - TOPOLOGIES - Array of paths to topologies, with failed cleanups removed.
695 # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
697 # - die - Print to stderr and exit.
698 # - ansible_playbook - Perform an action using ansible, see ansible.sh
700 # - EXIT - Calls cancel_all for ${WORKING_TOPOLOGY}.
705 for topo in "${TOPOLOGIES[@]}"; do
707 scrpt="${PYTHON_SCRIPTS_DIR}/topo_reservation.py"
708 opts=("-t" "${topo}" "-r" "${BUILD_TAG:-Unknown}")
709 python3 "${scrpt}" "${opts[@]}"
712 if [[ "${result}" == "0" ]]; then
713 # Trap unreservation before cleanup check,
714 # so multiple jobs showing failed cleanup improve chances
715 # of humans to notice and fix.
716 WORKING_TOPOLOGY="${topo}"
717 echo "Reserved: ${WORKING_TOPOLOGY}"
718 trap "untrap_and_unreserve_testbed" EXIT || {
719 message="TRAP ATTEMPT AND UNRESERVE FAILED, FIX MANUALLY."
720 untrap_and_unreserve_testbed "${message}" || {
721 die "Teardown should have died, not failed."
723 die "Trap attempt failed, unreserve succeeded. Aborting."
725 # Cleanup + calibration checks
727 ansible_playbook "cleanup, calibration"
730 if [[ "${result}" == "0" ]]; then
733 warn "Testbed cleanup failed: ${topo}"
734 untrap_and_unreserve_testbed "Fail of unreserve after cleanup."
736 # Else testbed is accessible but currently reserved, moving on.
739 if [[ -n "${WORKING_TOPOLOGY-}" ]]; then
740 # Exit the infinite while loop if we made a reservation.
741 warn "Reservation and cleanup successful."
745 if [[ "${#TOPOLOGIES[@]}" == "0" ]]; then
746 die "Run out of operational testbeds!"
749 # Wait ~3minutes before next try.
750 sleep_time="$[ ( ${RANDOM} % 20 ) + 180 ]s" || {
751 die "Sleep time calculation failed."
753 echo "Sleeping ${sleep_time}"
754 sleep "${sleep_time}" || die "Sleep failed."
759 function run_pybot () {
761 # Run pybot with options based on input variables.
762 # Generate INFO level output_info.xml by rebot.
763 # Archive UTI raw json outputs.
766 # - CSIT_DIR - Path to existing root of local CSIT git repository.
767 # - ARCHIVE_DIR - Path to store robot result files in.
768 # - PYBOT_ARGS, EXPANDED_TAGS - See compose_pybot_arguments.sh
769 # - GENERATED_DIR - Tests are assumed to be generated under there.
771 # - PYBOT_EXIT_STATUS - Exit status of most recent pybot invocation.
773 # - die - Print to stderr and exit.
777 all_options=("--outputdir" "${ARCHIVE_DIR}" "${PYBOT_ARGS[@]}")
778 all_options+=("--noncritical" "EXPECTED_FAILING")
779 all_options+=("${EXPANDED_TAGS[@]}")
781 pushd "${CSIT_DIR}" || die "Change directory operation failed."
783 robot "${all_options[@]}" "${GENERATED_DIR}/tests/"
784 PYBOT_EXIT_STATUS="$?"
787 post_process_robot_outputs || die
789 popd || die "Change directory operation failed."
793 function select_arch_os () {
795 # Set variables affected by local CPU architecture and operating system.
798 # - VPP_VER_FILE - Name of file in CSIT dir containing vpp stable version.
799 # - IMAGE_VER_FILE - Name of file in CSIT dir containing the image name.
800 # - PKG_SUFFIX - Suffix of OS package file name, "rpm" or "deb."
804 source /etc/os-release || die "Get OS release failed."
809 *"LTS (Focal Fossa)"*)
810 IMAGE_VER_FILE="VPP_DEVICE_IMAGE_UBUNTU"
811 VPP_VER_FILE="VPP_STABLE_VER_UBUNTU_FOCAL"
815 die "Unsupported Ubuntu version!"
820 die "Unsupported distro or OS!"
824 arch=$(uname -m) || {
825 die "Get CPU architecture failed."
830 IMAGE_VER_FILE="${IMAGE_VER_FILE}_ARM"
838 function select_tags () {
841 # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
842 # - TEST_CODE - String affecting test selection, usually jenkins job name.
843 # - DUT - CSIT test/ subdirectory, set while processing tags.
844 # - TEST_TAG_STRING - String selecting tags, from gerrit comment.
846 # - TOPOLOGIES_DIR - Path to existing directory with available tpologies.
847 # - BASH_FUNCTION_DIR - Directory with input files to process.
849 # - TAGS - Array of processed tag boolean expressions.
850 # - SELECTION_MODE - Selection criteria [test, suite, include, exclude].
855 case "${TEST_CODE}" in
857 start_pattern='^ SUT:'
860 start_pattern='^ TG:'
863 end_pattern='^ \? \?[A-Za-z0-9]\+:'
864 # Remove the sections from topology file
865 sed_command="/${start_pattern}/,/${end_pattern}/d"
866 # All topologies NICs
867 available=$(sed "${sed_command}" "${TOPOLOGIES_DIR}"/* \
868 | grep -hoP "model: \K.*" | sort -u)
869 # Selected topology NICs
870 reserved=$(sed "${sed_command}" "${WORKING_TOPOLOGY}" \
871 | grep -hoP "model: \K.*" | sort -u)
872 # All topologies NICs - Selected topology NICs
873 exclude_nics=($(comm -13 <(echo "${reserved}") <(echo "${available}"))) || {
874 die "Computation of excluded NICs failed."
877 # Select default NIC tag.
878 case "${TEST_CODE}" in
879 *"3n-dnv"* | *"2n-dnv"*)
880 default_nic="nic_intel-x553"
883 default_nic="nic_intel-x520-da2"
885 *"3n-icx"* | *"2n-icx"*)
886 default_nic="nic_intel-xxv710"
888 *"3n-skx"* | *"2n-skx"* | *"2n-clx"* | *"2n-zn2"*)
889 default_nic="nic_intel-xxv710"
891 *"2n-tx2"* | *"3n-alt"* | *"mrr-daily-master")
892 default_nic="nic_intel-xl710"
894 *"1n-aws"* | *"2n-aws"* | *"3n-aws"*)
895 default_nic="nic_amazon-nitro-50g"
898 default_nic="nic_intel-x710"
902 sed_nic_sub_cmd="sed s/\${default_nic}/${default_nic}/"
904 awk_nics_sub_cmd+='gsub("xxv710","25ge2p1xxv710");'
905 awk_nics_sub_cmd+='gsub("x710","10ge2p1x710");'
906 awk_nics_sub_cmd+='gsub("xl710","40ge2p1xl710");'
907 awk_nics_sub_cmd+='gsub("x520-da2","10ge2p1x520");'
908 awk_nics_sub_cmd+='gsub("x553","10ge2p1x553");'
909 awk_nics_sub_cmd+='gsub("cx556a","100ge2p1cx556a");'
910 awk_nics_sub_cmd+='gsub("e810cq","100ge2p1e810cq");'
911 awk_nics_sub_cmd+='gsub("vic1227","10ge2p1vic1227");'
912 awk_nics_sub_cmd+='gsub("vic1385","40ge2p1vic1385");'
913 awk_nics_sub_cmd+='gsub("nitro-50g","50ge1p1ENA");'
914 awk_nics_sub_cmd+='if ($9 =="drv_avf") drv="avf-";'
915 awk_nics_sub_cmd+='else if ($9 =="drv_rdma_core") drv ="rdma-";'
916 awk_nics_sub_cmd+='else if ($9 =="drv_af_xdp") drv ="af-xdp-";'
917 awk_nics_sub_cmd+='else drv="";'
918 awk_nics_sub_cmd+='if ($1 =="-") cores="";'
919 awk_nics_sub_cmd+='else cores=$1;'
920 awk_nics_sub_cmd+='print "*"$7"-" drv $11"-"$5"."$3"-" cores "-" drv $11"-"$5'
922 # Tag file directory shorthand.
923 tfd="${JOB_SPECS_DIR}"
924 case "${TEST_CODE}" in
925 # Select specific performance tests based on jenkins job type variable.
927 readarray -t test_tag_array <<< $(grep -v "#" \
928 ${tfd}/vpp_device/${DUT}-${NODENESS}-${FLAVOR}.md |
929 awk {"$awk_nics_sub_cmd"} || echo "devicetest") || die
930 SELECTION_MODE="--test"
933 readarray -t test_tag_array <<< $(grep -v "#" \
934 ${tfd}/mlr_weekly/${DUT}-${NODENESS}-${FLAVOR}.md |
935 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
936 SELECTION_MODE="--test"
939 readarray -t test_tag_array <<< $(grep -v "#" \
940 ${tfd}/mrr_daily/${DUT}-${NODENESS}-${FLAVOR}.md |
941 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
942 SELECTION_MODE="--test"
945 readarray -t test_tag_array <<< $(grep -v "#" \
946 ${tfd}/mrr_weekly/${DUT}-${NODENESS}-${FLAVOR}.md |
947 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
948 SELECTION_MODE="--test"
950 *"report-iterative"* )
951 test_sets=(${TEST_TAG_STRING//:/ })
952 # Run only one test set per run
953 report_file=${test_sets[0]}.md
954 readarray -t test_tag_array <<< $(grep -v "#" \
955 ${tfd}/report_iterative/${NODENESS}-${FLAVOR}/${report_file} |
956 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
957 SELECTION_MODE="--test"
959 *"report-coverage"* )
960 test_sets=(${TEST_TAG_STRING//:/ })
961 # Run only one test set per run
962 report_file=${test_sets[0]}.md
963 readarray -t test_tag_array <<< $(grep -v "#" \
964 ${tfd}/report_coverage/${NODENESS}-${FLAVOR}/${report_file} |
965 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
966 SELECTION_MODE="--test"
969 if [[ -z "${TEST_TAG_STRING-}" ]]; then
970 # If nothing is specified, we will run pre-selected tests by
972 test_tag_array=("mrrAND${default_nic}AND1cAND64bANDethip4-ip4base"
973 "mrrAND${default_nic}AND1cAND78bANDethip6-ip6base"
974 "mrrAND${default_nic}AND1cAND64bANDeth-l2bdbasemaclrn"
975 "mrrAND${default_nic}AND1cAND64bANDeth-l2xcbase"
976 "!drv_af_xdp" "!drv_avf")
978 # If trigger contains tags, split them into array.
979 test_tag_array=(${TEST_TAG_STRING//:/ })
981 SELECTION_MODE="--include"
985 # Blacklisting certain tags per topology.
987 # Reasons for blacklisting:
988 # - ipsechw - Blacklisted on testbeds without crypto hardware accelerator.
989 case "${TEST_CODE}" in
991 test_tag_array+=("!avf")
992 test_tag_array+=("!vhost")
993 test_tag_array+=("!flow")
996 test_tag_array+=("!flow")
999 test_tag_array+=("!ipsechw")
1002 test_tag_array+=("!ipsechw")
1003 # Not enough nic_intel-xxv710 to support double link tests.
1004 test_tag_array+=("!3_node_double_link_topoANDnic_intel-xxv710")
1007 test_tag_array+=("!ipsechw")
1010 test_tag_array+=("!ipsechw")
1013 test_tag_array+=("!ipsechw")
1014 # Not enough nic_intel-xxv710 to support double link tests.
1015 test_tag_array+=("!3_node_double_link_topoANDnic_intel-xxv710")
1018 test_tag_array+=("!ipsechw")
1021 test_tag_array+=("!memif")
1022 test_tag_array+=("!srv6_proxy")
1023 test_tag_array+=("!vhost")
1024 test_tag_array+=("!vts")
1025 test_tag_array+=("!drv_avf")
1027 *"2n-tx2"* | *"3n-alt"*)
1028 test_tag_array+=("!ipsechw")
1031 test_tag_array+=("!memif")
1032 test_tag_array+=("!srv6_proxy")
1033 test_tag_array+=("!vhost")
1034 test_tag_array+=("!vts")
1035 test_tag_array+=("!drv_avf")
1038 # 3n-tsh only has x520 NICs which don't work with AVF
1039 test_tag_array+=("!drv_avf")
1040 test_tag_array+=("!ipsechw")
1042 *"1n-aws"* | *"2n-aws"* | *"3n-aws"*)
1043 test_tag_array+=("!ipsechw")
1047 # We will add excluded NICs.
1048 test_tag_array+=("${exclude_nics[@]/#/!NIC_}")
1054 if [[ "${TEST_CODE}" == "vpp-"* ]]; then
1055 if [[ "${TEST_CODE}" != *"device"* ]]; then
1056 # Automatic prefixing for VPP perf jobs to limit the NIC used and
1057 # traffic evaluation to MRR.
1058 if [[ "${TEST_TAG_STRING-}" == *"nic_"* ]]; then
1059 prefix="${prefix}mrrAND"
1061 prefix="${prefix}mrrAND${default_nic}AND"
1065 for tag in "${test_tag_array[@]}"; do
1066 if [[ "${tag}" == "!"* ]]; then
1067 # Exclude tags are not prefixed.
1069 elif [[ "${tag}" == " "* || "${tag}" == *"perftest"* ]]; then
1070 # Badly formed tag expressions can trigger way too much tests.
1072 warn "The following tag expression hints at bad trigger: ${tag}"
1073 warn "Possible cause: Multiple triggers in a single comment."
1074 die "Aborting to avoid triggering too many tests."
1075 elif [[ "${tag}" == *"OR"* ]]; then
1076 # If OR had higher precedence than AND, it would be useful here.
1077 # Some people think it does, thus triggering way too much tests.
1079 warn "The following tag expression hints at bad trigger: ${tag}"
1080 warn "Operator OR has lower precedence than AND. Use space instead."
1081 die "Aborting to avoid triggering too many tests."
1082 elif [[ "${tag}" != "" && "${tag}" != "#"* ]]; then
1083 # Empty and comment lines are skipped.
1084 # Other lines are normal tags, they are to be prefixed.
1085 TAGS+=("${prefix}${tag}")
1092 function select_topology () {
1095 # - NODENESS - Node multiplicity of testbed, either "2n" or "3n".
1096 # - FLAVOR - Node flavor string, e.g. "clx" or "skx".
1097 # - CSIT_DIR - Path to existing root of local CSIT git repository.
1098 # - TOPOLOGIES_DIR - Path to existing directory with available topologies.
1100 # - TOPOLOGIES - Array of paths to suitable topology yaml files.
1101 # - TOPOLOGIES_TAGS - Tag expression selecting tests for the topology.
1103 # - die - Print to stderr and exit.
1107 case_text="${NODENESS}_${FLAVOR}"
1108 case "${case_text}" in
1110 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*vpp_device*.template )
1111 TOPOLOGIES_TAGS="2_node_single_link_topo"
1113 "1n_skx" | "1n_tx2")
1114 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*vpp_device*.template )
1115 TOPOLOGIES_TAGS="2_node_single_link_topo"
1118 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_skx*.yaml )
1119 TOPOLOGIES_TAGS="2_node_*_link_topo"
1122 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_zn2*.yaml )
1123 TOPOLOGIES_TAGS="2_node_*_link_topo"
1126 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_skx*.yaml )
1127 TOPOLOGIES_TAGS="3_node_*_link_topo"
1130 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_icx*.yaml )
1131 TOPOLOGIES_TAGS="3_node_*_link_topo"
1134 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_clx*.yaml )
1135 TOPOLOGIES_TAGS="2_node_*_link_topo"
1138 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_icx*.yaml )
1139 TOPOLOGIES_TAGS="2_node_*_link_topo"
1142 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_dnv*.yaml )
1143 TOPOLOGIES_TAGS="2_node_single_link_topo"
1146 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_dnv*.yaml )
1147 TOPOLOGIES_TAGS="3_node_single_link_topo"
1150 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_tsh*.yaml )
1151 TOPOLOGIES_TAGS="3_node_single_link_topo"
1154 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_tx2*.yaml )
1155 TOPOLOGIES_TAGS="2_node_single_link_topo"
1158 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_alt*.yaml )
1159 TOPOLOGIES_TAGS="3_node_single_link_topo"
1162 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*1n-aws*.yaml )
1163 TOPOLOGIES_TAGS="1_node_single_link_topo"
1166 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-aws*.yaml )
1167 TOPOLOGIES_TAGS="2_node_single_link_topo"
1170 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-aws*.yaml )
1171 TOPOLOGIES_TAGS="3_node_single_link_topo"
1174 # No falling back to default, that should have been done
1175 # by the function which has set NODENESS and FLAVOR.
1176 die "Unknown specification: ${case_text}"
1179 if [[ -z "${TOPOLOGIES-}" ]]; then
1180 die "No applicable topology found!"
1185 function set_environment_variables () {
1187 # Depending on testbed topology, overwrite defaults set in the
1188 # resources/libraries/python/Constants.py file
1191 # - TEST_CODE - String affecting test selection, usually jenkins job name.
1193 # See specific cases
1197 case "${TEST_CODE}" in
1198 *"1n-aws"* | *"2n-aws"* | *"3n-aws"*)
1199 # T-Rex 2.88+ workaround for ENA NICs.
1200 export TREX_RX_DESCRIPTORS_COUNT=1024
1201 export TREX_EXTRA_CMDLINE="--mbuf-factor 19"
1202 export TREX_CORE_COUNT=6
1203 # Settings to prevent duration stretching.
1204 export PERF_TRIAL_STL_DELAY=0.1
1207 # Maciek's workaround for Zen2 with lower amount of cores.
1208 export TREX_CORE_COUNT=14
1213 function untrap_and_unreserve_testbed () {
1215 # Use this as a trap function to ensure testbed does not remain reserved.
1216 # Perhaps call directly before script exit, to free testbed for other jobs.
1217 # This function is smart enough to avoid multiple unreservations (so safe).
1218 # Topo cleanup is executed (call it best practice), ignoring failures.
1221 # - default message to die with if testbed might remain reserved.
1223 # - ${1} - Message to die with if unreservation fails. Default hardcoded.
1224 # Variables read (by inner function):
1225 # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
1226 # - PYTHON_SCRIPTS_DIR - Path to directory holding Python scripts.
1227 # Variables written:
1228 # - WORKING_TOPOLOGY - Set to empty string on successful unreservation.
1229 # Trap unregistered:
1230 # - EXIT - Failure to untrap is reported, but ignored otherwise.
1232 # - die - Print to stderr and exit.
1233 # - ansible_playbook - Perform an action using ansible, see ansible.sh
1236 set +eu # We do not want to exit early in a "teardown" function.
1237 trap - EXIT || echo "Trap deactivation failed, continuing anyway."
1238 wt="${WORKING_TOPOLOGY}" # Just to avoid too long lines.
1239 if [[ -z "${wt-}" ]]; then
1241 warn "Testbed looks unreserved already. Trap removal failed before?"
1243 ansible_playbook "cleanup" || true
1244 python3 "${PYTHON_SCRIPTS_DIR}/topo_reservation.py" -c -t "${wt}" || {
1245 die "${1:-FAILED TO UNRESERVE, FIX MANUALLY.}" 2
1247 case "${TEST_CODE}" in
1248 *"1n-aws"* | *"2n-aws"* | *"3n-aws"*)
1249 terraform_destroy || die "Failed to call terraform destroy."
1262 # Print the message to standard error.
1265 # - ${@} - The text of the message.