feat(aws): Add new flavors
[csit.git] / resources / libraries / bash / function / common.sh
1 # Copyright (c) 2023 Cisco and/or its affiliates.
2 # Copyright (c) 2023 PANTHEON.tech and/or its affiliates.
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at:
6 #
7 #     http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 set -exuo pipefail
16
17 # This library defines functions used by multiple entry scripts.
18 # Keep functions ordered alphabetically, please.
19
20 # TODO: Add a link to bash style guide.
21 # TODO: Consider putting every die into a {} block,
22 #   the code might become more readable (but longer).
23
24
25 function activate_docker_topology () {
26
27     # Create virtual vpp-device topology. Output of the function is topology
28     # file describing created environment saved to a file.
29     #
30     # Variables read:
31     # - BASH_FUNCTION_DIR - Path to existing directory this file is located in.
32     # - TOPOLOGIES - Available topologies.
33     # - NODENESS - Node multiplicity of desired testbed.
34     # - FLAVOR - Node flavor string, usually describing the processor.
35     # - IMAGE_VER_FILE - Name of file that contains the image version.
36     # - CSIT_DIR - Directory where ${IMAGE_VER_FILE} is located.
37     # Variables set:
38     # - WORKING_TOPOLOGY - Path to topology file.
39
40     set -exuo pipefail
41
42     source "${BASH_FUNCTION_DIR}/device.sh" || {
43         die "Source failed!"
44     }
45     device_image="$(< ${CSIT_DIR}/${IMAGE_VER_FILE})"
46     case_text="${NODENESS}_${FLAVOR}"
47     case "${case_text}" in
48         "1n_skx" | "1n_tx2")
49             # We execute reservation over csit-shim-dcr (ssh) which runs sourced
50             # script's functions. Env variables are read from ssh output
51             # back to localhost for further processing.
52             # Shim and Jenkins executor are in the same network on the same host
53             # Connect to docker's default gateway IP and shim's exposed port
54             ssh="ssh root@172.17.0.1 -p 6022"
55             run="activate_wrapper ${NODENESS} ${FLAVOR} ${device_image}"
56             # The "declare -f" output is long and boring.
57             set +x
58             # backtics to avoid https://midnight-commander.org/ticket/2142
59             env_vars=`${ssh} "$(declare -f); ${run}"` || {
60                 die "Topology reservation via shim-dcr failed!"
61             }
62             set -x
63             set -a
64             source <(echo "$env_vars" | grep -v /usr/bin/docker) || {
65                 die "Source failed!"
66             }
67             set +a
68             ;;
69         "1n_vbox")
70             # We execute reservation on localhost. Sourced script automatially
71             # sets environment variables for further processing.
72             activate_wrapper "${NODENESS}" "${FLAVOR}" "${device_image}" || die
73             ;;
74         *)
75             die "Unknown specification: ${case_text}!"
76     esac
77
78     trap 'deactivate_docker_topology' EXIT || {
79          die "Trap attempt failed, please cleanup manually. Aborting!"
80     }
81
82     parse_env_variables || die "Parse of environment variables failed!"
83
84     # Replace all variables in template with those in environment.
85     source <(echo 'cat <<EOF >topo.yml'; cat ${TOPOLOGIES[0]}; echo EOF;) || {
86         die "Topology file create failed!"
87     }
88
89     WORKING_TOPOLOGY="${CSIT_DIR}/topologies/available/vpp_device.yaml"
90     mv topo.yml "${WORKING_TOPOLOGY}" || {
91         die "Topology move failed!"
92     }
93     cat ${WORKING_TOPOLOGY} | grep -v password || {
94         die "Topology read failed!"
95     }
96 }
97
98
99 function activate_virtualenv () {
100
101     # Update virtualenv pip package, delete and create virtualenv directory,
102     # activate the virtualenv, install requirements, set PYTHONPATH.
103
104     # Arguments:
105     # - ${1} - Path to existing directory for creating virtualenv in.
106     #          If missing or empty, ${CSIT_DIR} is used.
107     # - ${2} - Path to requirements file, ${CSIT_DIR}/requirements.txt if empty.
108     # Variables read:
109     # - CSIT_DIR - Path to existing root of local CSIT git repository.
110     # Variables exported:
111     # - PYTHONPATH - CSIT_DIR, as CSIT Python scripts usually need this.
112     # Functions called:
113     # - die - Print to stderr and exit.
114
115     set -exuo pipefail
116
117     root_path="${1-$CSIT_DIR}"
118     env_dir="${root_path}/env"
119     req_path=${2-$CSIT_DIR/requirements.txt}
120     rm -rf "${env_dir}" || die "Failed to clean previous virtualenv."
121     pip3 install virtualenv==20.15.1 || {
122         die "Virtualenv package install failed."
123     }
124     virtualenv --no-download --python=$(which python3) "${env_dir}" || {
125         die "Virtualenv creation for $(which python3) failed."
126     }
127     set +u
128     source "${env_dir}/bin/activate" || die "Virtualenv activation failed."
129     set -u
130     pip3 install -r "${req_path}" || {
131         die "Requirements installation failed."
132     }
133     # Most CSIT Python scripts assume PYTHONPATH is set and exported.
134     export PYTHONPATH="${CSIT_DIR}" || die "Export failed."
135 }
136
137
138 function archive_tests () {
139
140     # Create .tar.gz of generated/tests for archiving.
141     # To be run after generate_tests, kept separate to offer more flexibility.
142
143     # Directory read:
144     # - ${GENERATED_DIR}/tests - Tree of executed suites to archive.
145     # File rewriten:
146     # - ${ARCHIVE_DIR}/generated_tests.tar.gz - Archive of generated tests.
147
148     set -exuo pipefail
149
150     pushd "${ARCHIVE_DIR}" || die
151     tar czf "generated_tests.tar.gz" "${GENERATED_DIR}/tests" || true
152     popd || die
153 }
154
155
156 function check_download_dir () {
157
158     # Fail if there are no files visible in ${DOWNLOAD_DIR}.
159     #
160     # Variables read:
161     # - DOWNLOAD_DIR - Path to directory robot takes the build to test from.
162     # Directories read:
163     # - ${DOWNLOAD_DIR} - Has to be non-empty to proceed.
164     # Functions called:
165     # - die - Print to stderr and exit.
166
167     set -exuo pipefail
168
169     if [[ ! "$(ls -A "${DOWNLOAD_DIR}")" ]]; then
170         die "No artifacts downloaded!"
171     fi
172 }
173
174
175 function check_prerequisites () {
176
177     # Fail if prerequisites are not met.
178     #
179     # Functions called:
180     # - installed - Check if application is installed/present in system.
181     # - die - Print to stderr and exit.
182
183     set -exuo pipefail
184
185     if ! installed sshpass; then
186         die "Please install sshpass before continue!"
187     fi
188 }
189
190
191 function common_dirs () {
192
193     # Set global variables, create some directories (without touching content).
194
195     # Variables set:
196     # - BASH_FUNCTION_DIR - Path to existing directory this file is located in.
197     # - CSIT_DIR - Path to existing root of local CSIT git repository.
198     # - TOPOLOGIES_DIR - Path to existing directory with available topologies.
199     # - JOB_SPECS_DIR - Path to existing directory with job test specifications.
200     # - RESOURCES_DIR - Path to existing CSIT subdirectory "resources".
201     # - TOOLS_DIR - Path to existing resources subdirectory "tools".
202     # - PYTHON_SCRIPTS_DIR - Path to existing tools subdirectory "scripts".
203     # - ARCHIVE_DIR - Path to created CSIT subdirectory "archives".
204     #   The name is chosen to match what ci-management expects.
205     # - DOWNLOAD_DIR - Path to created CSIT subdirectory "download_dir".
206     # - GENERATED_DIR - Path to created CSIT subdirectory "generated".
207     # Directories created if not present:
208     # ARCHIVE_DIR, DOWNLOAD_DIR, GENERATED_DIR.
209     # Functions called:
210     # - die - Print to stderr and exit.
211
212     set -exuo pipefail
213
214     this_file=$(readlink -e "${BASH_SOURCE[0]}") || {
215         die "Some error during locating of this source file."
216     }
217     BASH_FUNCTION_DIR=$(dirname "${this_file}") || {
218         die "Some error during dirname call."
219     }
220     # Current working directory could be in a different repo, e.g. VPP.
221     pushd "${BASH_FUNCTION_DIR}" || die "Pushd failed"
222     relative_csit_dir=$(git rev-parse --show-toplevel) || {
223         die "Git rev-parse failed."
224     }
225     CSIT_DIR=$(readlink -e "${relative_csit_dir}") || die "Readlink failed."
226     popd || die "Popd failed."
227     TOPOLOGIES_DIR=$(readlink -e "${CSIT_DIR}/topologies/available") || {
228         die "Readlink failed."
229     }
230     JOB_SPECS_DIR=$(readlink -e "${CSIT_DIR}/resources/job_specs") || {
231         die "Readlink failed."
232     }
233     RESOURCES_DIR=$(readlink -e "${CSIT_DIR}/resources") || {
234         die "Readlink failed."
235     }
236     TOOLS_DIR=$(readlink -e "${RESOURCES_DIR}/tools") || {
237         die "Readlink failed."
238     }
239     PYTHON_SCRIPTS_DIR=$(readlink -e "${TOOLS_DIR}/scripts") || {
240         die "Readlink failed."
241     }
242
243     ARCHIVE_DIR=$(readlink -f "${CSIT_DIR}/archives") || {
244         die "Readlink failed."
245     }
246     mkdir -p "${ARCHIVE_DIR}" || die "Mkdir failed."
247     DOWNLOAD_DIR=$(readlink -f "${CSIT_DIR}/download_dir") || {
248         die "Readlink failed."
249     }
250     mkdir -p "${DOWNLOAD_DIR}" || die "Mkdir failed."
251     GENERATED_DIR=$(readlink -f "${CSIT_DIR}/generated") || {
252         die "Readlink failed."
253     }
254     mkdir -p "${GENERATED_DIR}" || die "Mkdir failed."
255 }
256
257
258 function compose_robot_arguments () {
259
260     # Variables read:
261     # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
262     # - DUT - CSIT test/ subdirectory, set while processing tags.
263     # - TAGS - Array variable holding selected tag boolean expressions.
264     # - TOPOLOGIES_TAGS - Tag boolean expression filtering tests for topology.
265     # - TEST_CODE - The test selection string from environment or argument.
266     # - SELECTION_MODE - Selection criteria [test, suite, include, exclude].
267     # Variables set:
268     # - ROBOT_ARGS - String holding part of all arguments for robot.
269     # - EXPANDED_TAGS - Array of strings robot arguments compiled from tags.
270
271     set -exuo pipefail
272
273     # No explicit check needed with "set -u".
274     ROBOT_ARGS=("--loglevel" "TRACE")
275     ROBOT_ARGS+=("--variable" "TOPOLOGY_PATH:${WORKING_TOPOLOGY}")
276
277     case "${TEST_CODE}" in
278         *"device"*)
279             ROBOT_ARGS+=("--suite" "tests.${DUT}.device")
280             ;;
281         *"perf"*)
282             ROBOT_ARGS+=("--suite" "tests.${DUT}.perf")
283             ;;
284         *)
285             die "Unknown specification: ${TEST_CODE}"
286     esac
287
288     EXPANDED_TAGS=()
289     for tag in "${TAGS[@]}"; do
290         if [[ ${tag} == "!"* ]]; then
291             EXPANDED_TAGS+=("--exclude" "${tag#$"!"}")
292         else
293             if [[ ${SELECTION_MODE} == "--test" ]]; then
294                 EXPANDED_TAGS+=("--test" "${tag}")
295             else
296                 EXPANDED_TAGS+=("--include" "${TOPOLOGIES_TAGS}AND${tag}")
297             fi
298         fi
299     done
300
301     if [[ ${SELECTION_MODE} == "--test" ]]; then
302         EXPANDED_TAGS+=("--include" "${TOPOLOGIES_TAGS}")
303     fi
304 }
305
306
307 function deactivate_docker_topology () {
308
309     # Deactivate virtual vpp-device topology by removing containers.
310     #
311     # Variables read:
312     # - NODENESS - Node multiplicity of desired testbed.
313     # - FLAVOR - Node flavor string, usually describing the processor.
314
315     set -exuo pipefail
316
317     case_text="${NODENESS}_${FLAVOR}"
318     case "${case_text}" in
319         "1n_skx" | "1n_tx2")
320             ssh="ssh root@172.17.0.1 -p 6022"
321             env_vars=$(env | grep CSIT_ | tr '\n' ' ' ) || die
322             # The "declare -f" output is long and boring.
323             set +x
324             ${ssh} "$(declare -f); deactivate_wrapper ${env_vars}" || {
325                 die "Topology cleanup via shim-dcr failed!"
326             }
327             set -x
328             ;;
329         "1n_vbox")
330             enter_mutex || die
331             clean_environment || {
332                 die "Topology cleanup locally failed!"
333             }
334             exit_mutex || die
335             ;;
336         *)
337             die "Unknown specification: ${case_text}!"
338     esac
339 }
340
341
342 function die () {
343
344     # Print the message to standard error end exit with error code specified
345     # by the second argument.
346     #
347     # Hardcoded values:
348     # - The default error message.
349     # Arguments:
350     # - ${1} - The whole error message, be sure to quote. Optional
351     # - ${2} - the code to exit with, default: 1.
352
353     set -x
354     set +eu
355     warn "${1:-Unspecified run-time error occurred!}"
356     exit "${2:-1}"
357 }
358
359
360 function die_on_robot_error () {
361
362     # Source this fragment if you want to abort on any failed test case.
363     #
364     # Variables read:
365     # - ROBOT_EXIT_STATUS - Set by a robot running fragment.
366     # Functions called:
367     # - die - Print to stderr and exit.
368
369     set -exuo pipefail
370
371     if [[ "${ROBOT_EXIT_STATUS}" != "0" ]]; then
372         die "Test failures are present!" "${ROBOT_EXIT_STATUS}"
373     fi
374 }
375
376
377 function generate_tests () {
378
379     # Populate ${GENERATED_DIR}/tests based on ${CSIT_DIR}/tests/.
380     # Any previously existing content of ${GENERATED_DIR}/tests is wiped before.
381     # The generation is done by executing any *.py executable
382     # within any subdirectory after copying.
383
384     # This is a separate function, because this code is called
385     # both by autogen checker and entries calling run_robot.
386
387     # Directories read:
388     # - ${CSIT_DIR}/tests - Used as templates for the generated tests.
389     # Directories replaced:
390     # - ${GENERATED_DIR}/tests - Overwritten by the generated tests.
391
392     set -exuo pipefail
393
394     rm -rf "${GENERATED_DIR}/tests" || die
395     cp -r "${CSIT_DIR}/tests" "${GENERATED_DIR}/tests" || die
396     cmd_line=("find" "${GENERATED_DIR}/tests" "-type" "f")
397     cmd_line+=("-executable" "-name" "*.py")
398     # We sort the directories, so log output can be compared between runs.
399     file_list=$("${cmd_line[@]}" | sort) || die
400
401     for gen in ${file_list}; do
402         directory="$(dirname "${gen}")" || die
403         filename="$(basename "${gen}")" || die
404         pushd "${directory}" || die
405         ./"${filename}" || die
406         popd || die
407     done
408 }
409
410
411 function get_test_code () {
412
413     # Arguments:
414     # - ${1} - Optional, argument of entry script (or empty as unset).
415     #   Test code value to override job name from environment.
416     # Variables read:
417     # - JOB_NAME - String affecting test selection, default if not argument.
418     # Variables set:
419     # - TEST_CODE - The test selection string from environment or argument.
420     # - NODENESS - Node multiplicity of desired testbed.
421     # - FLAVOR - Node flavor string, usually describing the processor.
422
423     set -exuo pipefail
424
425     TEST_CODE="${1-}" || die "Reading optional argument failed, somehow."
426     if [[ -z "${TEST_CODE}" ]]; then
427         TEST_CODE="${JOB_NAME-}" || die "Reading job name failed, somehow."
428     fi
429
430     case "${TEST_CODE}" in
431         *"1n-vbox"*)
432             NODENESS="1n"
433             FLAVOR="vbox"
434             ;;
435         *"1n-skx"*)
436             NODENESS="1n"
437             FLAVOR="skx"
438             ;;
439         *"1n-tx2"*)
440             NODENESS="1n"
441             FLAVOR="tx2"
442             ;;
443         *"1n-aws"*)
444             NODENESS="1n"
445             FLAVOR="aws"
446             ;;
447         *"2n-aws"*)
448             NODENESS="2n"
449             FLAVOR="aws"
450             ;;
451         *"3n-aws"*)
452             NODENESS="3n"
453             FLAVOR="aws"
454             ;;
455         *"1n-c6gn"*)
456             NODENESS="1n"
457             FLAVOR="c6gn"
458             ;;
459         *"2n-c6gn"*)
460             NODENESS="2n"
461             FLAVOR="c6gn"
462             ;;
463         *"3n-c6gn"*)
464             NODENESS="3n"
465             FLAVOR="c6gn"
466             ;;
467         *"1n-c6in"*)
468             NODENESS="1n"
469             FLAVOR="c6in"
470             ;;
471         *"2n-c6in"*)
472             NODENESS="2n"
473             FLAVOR="c6in"
474             ;;
475         *"3n-c6in"*)
476             NODENESS="3n"
477             FLAVOR="c6in"
478             ;;
479         *"2n-zn2"*)
480             NODENESS="2n"
481             FLAVOR="zn2"
482             ;;
483         *"2n-clx"*)
484             NODENESS="2n"
485             FLAVOR="clx"
486             ;;
487         *"2n-icx"*)
488             NODENESS="2n"
489             FLAVOR="icx"
490             ;;
491         *"2n-spr"*)
492             NODENESS="2n"
493             FLAVOR="spr"
494             ;;
495         *"3n-icx"*)
496             NODENESS="3n"
497             FLAVOR="icx"
498             ;;
499         *"3na-spr"*)
500             NODENESS="3na"
501             FLAVOR="spr"
502             ;;
503         *"3nb-spr"*)
504             NODENESS="3nb"
505             FLAVOR="spr"
506             ;;
507         *"3n-snr"*)
508             NODENESS="3n"
509             FLAVOR="snr"
510             ;;
511         *"2n-tx2"*)
512             NODENESS="2n"
513             FLAVOR="tx2"
514             ;;
515         *"3n-tsh"*)
516             NODENESS="3n"
517             FLAVOR="tsh"
518             ;;
519         *"3n-alt"*)
520             NODENESS="3n"
521             FLAVOR="alt"
522             ;;
523     esac
524 }
525
526
527 function get_test_tag_string () {
528
529     # Variables read:
530     # - GERRIT_EVENT_TYPE - Event type set by gerrit, can be unset.
531     # - GERRIT_EVENT_COMMENT_TEXT - Comment text, read for "comment-added" type.
532     # - TEST_CODE - The test selection string from environment or argument.
533     # Variables set:
534     # - TEST_TAG_STRING - The string following trigger word in gerrit comment.
535     #   May be empty, or even not set on event types not adding comment.
536
537     # TODO: ci-management scripts no longer need to perform this.
538
539     set -exuo pipefail
540
541     if [[ "${GERRIT_EVENT_TYPE-}" == "comment-added" ]]; then
542         case "${TEST_CODE}" in
543             *"device"*)
544                 trigger="devicetest"
545                 ;;
546             *"perf"*)
547                 trigger="perftest"
548                 ;;
549             *)
550                 die "Unknown specification: ${TEST_CODE}"
551         esac
552         # Ignore lines not containing the trigger word.
553         comment=$(fgrep "${trigger}" <<< "${GERRIT_EVENT_COMMENT_TEXT}" || true)
554         # The vpp-csit triggers trail stuff we are not interested in.
555         # Removing them and trigger word: https://unix.stackexchange.com/a/13472
556         # (except relying on \s whitespace, \S non-whitespace and . both).
557         # The last string is concatenated, only the middle part is expanded.
558         cmd=("grep" "-oP" '\S*'"${trigger}"'\S*\s\K.+$') || die "Unset trigger?"
559         # On parsing error, TEST_TAG_STRING probably stays empty.
560         TEST_TAG_STRING=$("${cmd[@]}" <<< "${comment}" || true)
561         if [[ -z "${TEST_TAG_STRING-}" ]]; then
562             # Probably we got a base64 encoded comment.
563             comment="${GERRIT_EVENT_COMMENT_TEXT}"
564             comment=$(base64 --decode <<< "${comment}" || true)
565             comment=$(fgrep "${trigger}" <<< "${comment}" || true)
566             TEST_TAG_STRING=$("${cmd[@]}" <<< "${comment}" || true)
567         fi
568         if [[ -n "${TEST_TAG_STRING-}" ]]; then
569             test_tag_array=(${TEST_TAG_STRING})
570             if [[ "${test_tag_array[0]}" == "icl" ]]; then
571                 export GRAPH_NODE_VARIANT="icl"
572                 TEST_TAG_STRING="${test_tag_array[@]:1}" || true
573             elif [[ "${test_tag_array[0]}" == "skx" ]]; then
574                 export GRAPH_NODE_VARIANT="skx"
575                 TEST_TAG_STRING="${test_tag_array[@]:1}" || true
576             fi
577         fi
578     fi
579 }
580
581
582 function installed () {
583
584     # Check if the given utility is installed. Fail if not installed.
585     #
586     # Duplicate of common.sh function, as this file is also used standalone.
587     #
588     # Arguments:
589     # - ${1} - Utility to check.
590     # Returns:
591     # - 0 - If command is installed.
592     # - 1 - If command is not installed.
593
594     set -exuo pipefail
595
596     command -v "${1}"
597 }
598
599
600 function move_archives () {
601
602     # Move archive directory to top of workspace, if not already there.
603     #
604     # ARCHIVE_DIR is positioned relative to CSIT_DIR,
605     # but in some jobs CSIT_DIR is not same as WORKSPACE
606     # (e.g. under VPP_DIR). To simplify ci-management settings,
607     # we want to move the data to the top. We do not want simple copy,
608     # as ci-management is eager with recursive search.
609     #
610     # As some scripts may call this function multiple times,
611     # the actual implementation use copying and deletion,
612     # so the workspace gets "union" of contents (except overwrites on conflict).
613     # The consequence is empty ARCHIVE_DIR remaining after this call.
614     #
615     # As the source directory is emptied,
616     # the check for dirs being different is essential.
617     #
618     # Variables read:
619     # - WORKSPACE - Jenkins workspace, move only if the value is not empty.
620     #   Can be unset, then it speeds up manual testing.
621     # - ARCHIVE_DIR - Path to directory with content to be moved.
622     # Directories updated:
623     # - ${WORKSPACE}/archives/ - Created if does not exist.
624     #   Content of ${ARCHIVE_DIR}/ is moved.
625     # Functions called:
626     # - die - Print to stderr and exit.
627
628     set -exuo pipefail
629
630     if [[ -n "${WORKSPACE-}" ]]; then
631         target=$(readlink -f "${WORKSPACE}/archives")
632         if [[ "${target}" != "${ARCHIVE_DIR}" ]]; then
633             mkdir -p "${target}" || die "Archives dir create failed."
634             cp -rf "${ARCHIVE_DIR}"/* "${target}" || die "Copy failed."
635             rm -rf "${ARCHIVE_DIR}"/* || die "Delete failed."
636         fi
637     fi
638 }
639
640
641 function prepare_topology () {
642
643     # Prepare virtual testbed topology if needed based on flavor.
644
645     # Variables read:
646     # - TEST_CODE - String affecting test selection, usually jenkins job name.
647     # - NODENESS - Node multiplicity of testbed, either "2n" or "3n".
648     # - FLAVOR - Node flavor string, e.g. "clx" or "skx".
649     # Variables set:
650     # - TERRAFORM_MODULE_DIR - Terraform module directory.
651     # Functions called:
652     # - die - Print to stderr and exit.
653     # - terraform_init - Terraform init topology.
654     # - terraform_apply - Terraform apply topology.
655
656     set -exuo pipefail
657
658     case_text="${NODENESS}_${FLAVOR}"
659     case "${case_text}" in
660         "1n_aws" | "2n_aws" | "3n_aws")
661             export TF_VAR_testbed_name="${TEST_CODE}"
662             TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}-c5n"
663             terraform_init || die "Failed to call terraform init."
664             trap "terraform_destroy" ERR EXIT || {
665                 die "Trap attempt failed, please cleanup manually. Aborting!"
666             }
667             terraform_apply || die "Failed to call terraform apply."
668             ;;
669         "1n_c6gn" | "2n_c6gn" | "3n_c6gn")
670             export TF_VAR_testbed_name="${TEST_CODE}"
671             TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-c6gn"
672             terraform_init || die "Failed to call terraform init."
673             trap "terraform_destroy" ERR EXIT || {
674                 die "Trap attempt failed, please cleanup manually. Aborting!"
675             }
676             terraform_apply || die "Failed to call terraform apply."
677             ;;
678         "1n_c6in" | "2n_c6in" | "3n_c6in")
679             export TF_VAR_testbed_name="${TEST_CODE}"
680             TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-c6in"
681             terraform_init || die "Failed to call terraform init."
682             trap "terraform_destroy" ERR EXIT || {
683                 die "Trap attempt failed, please cleanup manually. Aborting!"
684             }
685             terraform_apply || die "Failed to call terraform apply."
686             ;;
687     esac
688 }
689
690
691 function reserve_and_cleanup_testbed () {
692
693     # Reserve physical testbed, perform cleanup, register trap to unreserve.
694     # When cleanup fails, remove from topologies and keep retrying
695     # until all topologies are removed.
696     #
697     # Variables read:
698     # - TOPOLOGIES - Array of paths to topology yaml to attempt reservation on.
699     # - PYTHON_SCRIPTS_DIR - Path to directory holding the reservation script.
700     # - BUILD_TAG - Any string suitable as filename, identifying
701     #   test run executing this function. May be unset.
702     # Variables set:
703     # - TOPOLOGIES - Array of paths to topologies, with failed cleanups removed.
704     # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
705     # Functions called:
706     # - die - Print to stderr and exit.
707     # - ansible_playbook - Perform an action using ansible, see ansible.sh
708     # Traps registered:
709     # - EXIT - Calls cancel_all for ${WORKING_TOPOLOGY}.
710
711     set -exuo pipefail
712
713     while true; do
714         for topo in "${TOPOLOGIES[@]}"; do
715             set +e
716             scrpt="${PYTHON_SCRIPTS_DIR}/topo_reservation.py"
717             opts=("-t" "${topo}" "-r" "${BUILD_TAG:-Unknown}")
718             python3 "${scrpt}" "${opts[@]}"
719             result="$?"
720             set -e
721             if [[ "${result}" == "0" ]]; then
722                 # Trap unreservation before cleanup check,
723                 # so multiple jobs showing failed cleanup improve chances
724                 # of humans to notice and fix.
725                 WORKING_TOPOLOGY="${topo}"
726                 echo "Reserved: ${WORKING_TOPOLOGY}"
727                 trap "untrap_and_unreserve_testbed" EXIT || {
728                     message="TRAP ATTEMPT AND UNRESERVE FAILED, FIX MANUALLY."
729                     untrap_and_unreserve_testbed "${message}" || {
730                         die "Teardown should have died, not failed."
731                     }
732                     die "Trap attempt failed, unreserve succeeded. Aborting."
733                 }
734                 # Cleanup + calibration checks
735                 set +e
736                 ansible_playbook "cleanup, calibration"
737                 result="$?"
738                 set -e
739                 if [[ "${result}" == "0" ]]; then
740                     break
741                 fi
742                 warn "Testbed cleanup failed: ${topo}"
743                 untrap_and_unreserve_testbed "Fail of unreserve after cleanup."
744             fi
745             # Else testbed is accessible but currently reserved, moving on.
746         done
747
748         if [[ -n "${WORKING_TOPOLOGY-}" ]]; then
749             # Exit the infinite while loop if we made a reservation.
750             warn "Reservation and cleanup successful."
751             break
752         fi
753
754         if [[ "${#TOPOLOGIES[@]}" == "0" ]]; then
755             die "Run out of operational testbeds!"
756         fi
757
758         # Wait ~3minutes before next try.
759         sleep_time="$[ ( ${RANDOM} % 20 ) + 180 ]s" || {
760             die "Sleep time calculation failed."
761         }
762         echo "Sleeping ${sleep_time}"
763         sleep "${sleep_time}" || die "Sleep failed."
764     done
765 }
766
767
768 function run_robot () {
769
770     # Run robot with options based on input variables.
771     #
772     # Variables read:
773     # - CSIT_DIR - Path to existing root of local CSIT git repository.
774     # - ARCHIVE_DIR - Path to store robot result files in.
775     # - ROBOT_ARGS, EXPANDED_TAGS - See compose_robot_arguments.sh
776     # - GENERATED_DIR - Tests are assumed to be generated under there.
777     # Variables set:
778     # - ROBOT_EXIT_STATUS - Exit status of most recent robot invocation.
779     # Functions called:
780     # - die - Print to stderr and exit.
781
782     set -exuo pipefail
783
784     all_options=("--outputdir" "${ARCHIVE_DIR}" "${ROBOT_ARGS[@]}")
785     all_options+=("${EXPANDED_TAGS[@]}")
786
787     pushd "${CSIT_DIR}" || die "Change directory operation failed."
788     set +e
789     robot "${all_options[@]}" "${GENERATED_DIR}/tests/"
790     ROBOT_EXIT_STATUS="$?"
791     set -e
792
793     popd || die "Change directory operation failed."
794 }
795
796
797 function select_arch_os () {
798
799     # Set variables affected by local CPU architecture and operating system.
800     #
801     # Variables set:
802     # - VPP_VER_FILE - Name of file in CSIT dir containing vpp stable version.
803     # - IMAGE_VER_FILE - Name of file in CSIT dir containing the image name.
804     # - PKG_SUFFIX - Suffix of OS package file name, "rpm" or "deb."
805
806     set -exuo pipefail
807
808     source /etc/os-release || die "Get OS release failed."
809
810     case "${ID}" in
811         "ubuntu"*)
812             case "${VERSION}" in
813                 *"LTS (Jammy Jellyfish)"*)
814                     IMAGE_VER_FILE="VPP_DEVICE_IMAGE_UBUNTU_JAMMY"
815                     VPP_VER_FILE="VPP_STABLE_VER_UBUNTU_JAMMY"
816                     PKG_SUFFIX="deb"
817                     ;;
818                 *)
819                     die "Unsupported Ubuntu version!"
820                     ;;
821             esac
822             ;;
823         *)
824             die "Unsupported distro or OS!"
825             ;;
826     esac
827
828     arch=$(uname -m) || {
829         die "Get CPU architecture failed."
830     }
831
832     case "${arch}" in
833         "aarch64")
834             IMAGE_VER_FILE="${IMAGE_VER_FILE}_ARM"
835             ;;
836         *)
837             ;;
838     esac
839 }
840
841
842 function select_tags () {
843
844     # Variables read:
845     # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
846     # - TEST_CODE - String affecting test selection, usually jenkins job name.
847     # - DUT - CSIT test/ subdirectory, set while processing tags.
848     # - TEST_TAG_STRING - String selecting tags, from gerrit comment.
849     #   Can be unset.
850     # - TOPOLOGIES_DIR - Path to existing directory with available tpologies.
851     # - BASH_FUNCTION_DIR - Directory with input files to process.
852     # Variables set:
853     # - TAGS - Array of processed tag boolean expressions.
854     # - SELECTION_MODE - Selection criteria [test, suite, include, exclude].
855
856     set -exuo pipefail
857
858     # NIC SELECTION
859     case "${TEST_CODE}" in
860         *"1n-aws"* | *"1n-c6gn"* | *"1n-c6in"*)
861             start_pattern='^  SUT:'
862             ;;
863         *)
864             start_pattern='^  TG:'
865             ;;
866     esac
867     end_pattern='^ \? \?[A-Za-z0-9]\+:'
868     # Remove the sections from topology file
869     sed_command="/${start_pattern}/,/${end_pattern}/d"
870     # All topologies NICs
871     available=$(sed "${sed_command}" "${TOPOLOGIES_DIR}"/* \
872                 | grep -hoP "model: \K.*" | sort -u)
873     # Selected topology NICs
874     reserved=$(sed "${sed_command}" "${WORKING_TOPOLOGY}" \
875                | grep -hoP "model: \K.*" | sort -u)
876     # All topologies NICs - Selected topology NICs
877     exclude_nics=($(comm -13 <(echo "${reserved}") <(echo "${available}"))) || {
878         die "Computation of excluded NICs failed."
879     }
880
881     # Select default NIC tag.
882     case "${TEST_CODE}" in
883         *"3n-snr"*)
884             default_nic="nic_intel-e822cq"
885             ;;
886         *"3n-tsh"*)
887             default_nic="nic_intel-x520-da2"
888             ;;
889         *"3n-icx"* | *"2n-icx"*)
890             default_nic="nic_intel-e810cq"
891             ;;
892         *"3na-spr"*)
893             default_nic="nic_mellanox-cx7veat"
894             ;;
895         *"3nb-spr"*)
896             default_nic="nic_intel-e810cq"
897             ;;
898         *"2n-spr"*)
899             default_nic="nic_intel-e810cq"
900             ;;
901         *"2n-clx"* | *"2n-zn2"*)
902             default_nic="nic_intel-xxv710"
903             ;;
904         *"2n-tx2"* | *"3n-alt"*)
905             default_nic="nic_intel-xl710"
906             ;;
907         *"1n-aws"* | *"2n-aws"* | *"3n-aws"*)
908             default_nic="nic_amazon-nitro-50g"
909             ;;
910         *"1n-c6gn"* | *"2n-c6gn"* | *"3n-c6gn"*)
911             default_nic="nic_amazon-nitro-100g"
912             ;;
913         *"1n-c6in"* | *"2n-c6in"* | *"3n-c6in"*)
914             default_nic="nic_amazon-nitro-200g"
915             ;;
916         *)
917             default_nic="nic_intel-x710"
918             ;;
919     esac
920
921     sed_nic_sub_cmd="sed s/\${default_nic}/${default_nic}/"
922     awk_nics_sub_cmd=""
923     awk_nics_sub_cmd+='gsub("xxv710","25ge2p1xxv710");'
924     awk_nics_sub_cmd+='gsub("x710","10ge2p1x710");'
925     awk_nics_sub_cmd+='gsub("xl710","40ge2p1xl710");'
926     awk_nics_sub_cmd+='gsub("x520-da2","10ge2p1x520");'
927     awk_nics_sub_cmd+='gsub("cx556a","100ge2p1cx556a");'
928     awk_nics_sub_cmd+='gsub("cx7veat","200ge2p1cx7veat");'
929     awk_nics_sub_cmd+='gsub("cx6dx","100ge2p1cx6dx");'
930     awk_nics_sub_cmd+='gsub("e810cq","100ge2p1e810cq");'
931     awk_nics_sub_cmd+='gsub("vic1227","10ge2p1vic1227");'
932     awk_nics_sub_cmd+='gsub("vic1385","40ge2p1vic1385");'
933     awk_nics_sub_cmd+='gsub("nitro-50g","50ge1p1ENA");'
934     awk_nics_sub_cmd+='gsub("nitro-100g","100ge1p1ENA");'
935     awk_nics_sub_cmd+='gsub("nitro-200g","200ge1p1ENA");'
936     awk_nics_sub_cmd+='if ($9 =="drv_avf") drv="avf-";'
937     awk_nics_sub_cmd+='else if ($9 =="drv_rdma_core") drv ="rdma-";'
938     awk_nics_sub_cmd+='else if ($9 =="drv_mlx5_core") drv ="mlx5-";'
939     awk_nics_sub_cmd+='else if ($9 =="drv_af_xdp") drv ="af-xdp-";'
940     awk_nics_sub_cmd+='else drv="";'
941     awk_nics_sub_cmd+='if ($1 =="-") cores="";'
942     awk_nics_sub_cmd+='else cores=$1;'
943     awk_nics_sub_cmd+='print "*"$7"-" drv $11"-"$5"."$3"-" cores "-" drv $11"-"$5'
944
945     # Tag file directory shorthand.
946     tfd="${JOB_SPECS_DIR}"
947     case "${TEST_CODE}" in
948         # Select specific performance tests based on jenkins job type variable.
949         *"device"* )
950             readarray -t test_tag_array <<< $(grep -v "#" \
951                 ${tfd}/vpp_device/${DUT}-${NODENESS}-${FLAVOR}.md |
952                 awk {"$awk_nics_sub_cmd"} || echo "devicetest") || die
953             SELECTION_MODE="--test"
954             ;;
955         *"hoststack-daily"* )
956             readarray -t test_tag_array <<< $(grep -v "#" \
957                 ${tfd}/hoststack_daily/${DUT}-${NODENESS}-${FLAVOR}.md |
958                 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
959             SELECTION_MODE="--test"
960             ;;
961         *"ndrpdr-weekly"* )
962             readarray -t test_tag_array <<< $(grep -v "#" \
963                 ${tfd}/ndrpdr_weekly/${DUT}-${NODENESS}-${FLAVOR}.md |
964                 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
965             SELECTION_MODE="--test"
966             ;;
967         *"mrr-daily"* )
968             readarray -t test_tag_array <<< $(grep -v "#" \
969                 ${tfd}/mrr_daily/${DUT}-${NODENESS}-${FLAVOR}.md |
970                 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
971             SELECTION_MODE="--test"
972             ;;
973         *"mrr-weekly"* )
974             readarray -t test_tag_array <<< $(grep -v "#" \
975                 ${tfd}/mrr_weekly/${DUT}-${NODENESS}-${FLAVOR}.md |
976                 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
977             SELECTION_MODE="--test"
978             ;;
979         *"report-iterative"* )
980             test_sets=(${TEST_TAG_STRING//:/ })
981             # Run only one test set per run
982             report_file=${test_sets[0]}.md
983             readarray -t test_tag_array <<< $(grep -v "#" \
984                 ${tfd}/report_iterative/${NODENESS}-${FLAVOR}/${report_file} |
985                 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
986             SELECTION_MODE="--test"
987             ;;
988         *"report-coverage"* )
989             test_sets=(${TEST_TAG_STRING//:/ })
990             # Run only one test set per run
991             report_file=${test_sets[0]}.md
992             readarray -t test_tag_array <<< $(grep -v "#" \
993                 ${tfd}/report_coverage/${NODENESS}-${FLAVOR}/${report_file} |
994                 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
995             SELECTION_MODE="--test"
996             ;;
997         * )
998             if [[ -z "${TEST_TAG_STRING-}" ]]; then
999                 # If nothing is specified, we will run pre-selected tests by
1000                 # following tags.
1001                 test_tag_array=("mrrAND${default_nic}AND1cAND64bANDethip4-ip4base"
1002                                 "mrrAND${default_nic}AND1cAND78bANDethip6-ip6base"
1003                                 "mrrAND${default_nic}AND1cAND64bANDeth-l2bdbasemaclrn"
1004                                 "mrrAND${default_nic}AND1cAND64bANDeth-l2xcbase"
1005                                 "!drv_af_xdp" "!drv_avf")
1006             else
1007                 # If trigger contains tags, split them into array.
1008                 test_tag_array=(${TEST_TAG_STRING//:/ })
1009             fi
1010             SELECTION_MODE="--include"
1011             ;;
1012     esac
1013
1014     # Blacklisting certain tags per topology.
1015     #
1016     # Reasons for blacklisting:
1017     # - ipsechw - Blacklisted on testbeds without crypto hardware accelerator.
1018     case "${TEST_CODE}" in
1019         *"1n-vbox"*)
1020             test_tag_array+=("!avf")
1021             test_tag_array+=("!vhost")
1022             test_tag_array+=("!flow")
1023             ;;
1024         *"1n_tx2"*)
1025             test_tag_array+=("!flow")
1026             ;;
1027         *"2n-clx"*)
1028             test_tag_array+=("!ipsechw")
1029             ;;
1030         *"2n-icx"*)
1031             test_tag_array+=("!ipsechw")
1032             ;;
1033         *"2n-spr"*)
1034             test_tag_array+=("!ipsechw")
1035             ;;
1036         *"2n-tx2"*)
1037             test_tag_array+=("!ipsechw")
1038             ;;
1039         *"2n-zn2"*)
1040             test_tag_array+=("!ipsechw")
1041             ;;
1042         *"3n-alt"*)
1043             test_tag_array+=("!ipsechw")
1044             ;;
1045         *"3n-icx"*)
1046             test_tag_array+=("!ipsechw")
1047             test_tag_array+=("!3_node_double_link_topoANDnic_intel-xxv710")
1048             ;;
1049         *"3n-snr"*)
1050             ;;
1051         *"3na-spr"*)
1052             test_tag_array+=("!ipsechw")
1053             ;;
1054         *"3nb-spr"*)
1055             test_tag_array+=("!ipsechw")
1056             ;;
1057         *"3n-tsh"*)
1058             test_tag_array+=("!drv_avf")
1059             test_tag_array+=("!ipsechw")
1060             ;;
1061         *"1n-aws"* | *"2n-aws"* | *"3n-aws"*)
1062             test_tag_array+=("!ipsechw")
1063             ;;
1064         *"1n-c6gn"* | *"2n-c6gn"* | *"3n-c6gn"*)
1065             test_tag_array+=("!ipsechw")
1066             ;;
1067         *"1n-c6in"* | *"2n-c6in"* | *"3n-c6in"*)
1068             test_tag_array+=("!ipsechw")
1069             ;;
1070     esac
1071
1072     # We will add excluded NICs.
1073     test_tag_array+=("${exclude_nics[@]/#/!NIC_}")
1074
1075     TAGS=()
1076     prefix=""
1077
1078     set +x
1079     if [[ "${TEST_CODE}" == "vpp-"* ]]; then
1080         if [[ "${TEST_CODE}" != *"device"* ]]; then
1081             # Automatic prefixing for VPP perf jobs to limit the NIC used and
1082             # traffic evaluation to MRR.
1083             if [[ "${TEST_TAG_STRING-}" == *"nic_"* ]]; then
1084                 prefix="${prefix}mrrAND"
1085             else
1086                 prefix="${prefix}mrrAND${default_nic}AND"
1087             fi
1088         fi
1089     fi
1090     for tag in "${test_tag_array[@]}"; do
1091         if [[ "${tag}" == "!"* ]]; then
1092             # Exclude tags are not prefixed.
1093             TAGS+=("${tag}")
1094         elif [[ "${tag}" == " "* || "${tag}" == *"perftest"* ]]; then
1095             # Badly formed tag expressions can trigger way too much tests.
1096             set -x
1097             warn "The following tag expression hints at bad trigger: ${tag}"
1098             warn "Possible cause: Multiple triggers in a single comment."
1099             die "Aborting to avoid triggering too many tests."
1100         elif [[ "${tag}" == *"OR"* ]]; then
1101             # If OR had higher precedence than AND, it would be useful here.
1102             # Some people think it does, thus triggering way too much tests.
1103             set -x
1104             warn "The following tag expression hints at bad trigger: ${tag}"
1105             warn "Operator OR has lower precedence than AND. Use space instead."
1106             die "Aborting to avoid triggering too many tests."
1107         elif [[ "${tag}" != "" && "${tag}" != "#"* ]]; then
1108             # Empty and comment lines are skipped.
1109             # Other lines are normal tags, they are to be prefixed.
1110             TAGS+=("${prefix}${tag}")
1111         fi
1112     done
1113     set -x
1114 }
1115
1116
1117 function select_topology () {
1118
1119     # Variables read:
1120     # - NODENESS - Node multiplicity of testbed, either "2n" or "3n".
1121     # - FLAVOR - Node flavor string, e.g. "clx" or "skx".
1122     # - CSIT_DIR - Path to existing root of local CSIT git repository.
1123     # - TOPOLOGIES_DIR - Path to existing directory with available topologies.
1124     # Variables set:
1125     # - TOPOLOGIES - Array of paths to suitable topology yaml files.
1126     # - TOPOLOGIES_TAGS - Tag expression selecting tests for the topology.
1127     # Functions called:
1128     # - die - Print to stderr and exit.
1129
1130     set -exuo pipefail
1131
1132     case_text="${NODENESS}_${FLAVOR}"
1133     case "${case_text}" in
1134         "1n_vbox")
1135             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*vpp_device*.template )
1136             TOPOLOGIES_TAGS="2_node_single_link_topo"
1137             ;;
1138         "1n_skx" | "1n_tx2")
1139             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*vpp_device*.template )
1140             TOPOLOGIES_TAGS="2_node_single_link_topo"
1141             ;;
1142         "2n_skx")
1143             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_skx*.yaml )
1144             TOPOLOGIES_TAGS="2_node_*_link_topo"
1145             ;;
1146         "2n_zn2")
1147             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_zn2*.yaml )
1148             TOPOLOGIES_TAGS="2_node_*_link_topo"
1149             ;;
1150         "3n_skx")
1151             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_skx*.yaml )
1152             TOPOLOGIES_TAGS="3_node_*_link_topo"
1153             ;;
1154         "3n_icx")
1155             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_icx*.yaml )
1156             TOPOLOGIES_TAGS="3_node_*_link_topo"
1157             ;;
1158         "3na_spr")
1159             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3na_spr*.yaml )
1160             TOPOLOGIES_TAGS="3_node_*_link_topo"
1161             ;;
1162         "3nb_spr")
1163             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3nb_spr*.yaml )
1164             TOPOLOGIES_TAGS="3_node_*_link_topo"
1165             ;;
1166         "2n_clx")
1167             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_clx*.yaml )
1168             TOPOLOGIES_TAGS="2_node_*_link_topo"
1169             ;;
1170         "2n_icx")
1171             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_icx*.yaml )
1172             TOPOLOGIES_TAGS="2_node_*_link_topo"
1173             ;;
1174         "2n_spr")
1175             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_spr*.yaml )
1176             TOPOLOGIES_TAGS="2_node_*_link_topo"
1177             ;;
1178         "3n_snr")
1179             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_snr*.yaml )
1180             TOPOLOGIES_TAGS="3_node_single_link_topo"
1181             ;;
1182         "3n_tsh")
1183             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_tsh*.yaml )
1184             TOPOLOGIES_TAGS="3_node_single_link_topo"
1185             ;;
1186         "2n_tx2")
1187             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_tx2*.yaml )
1188             TOPOLOGIES_TAGS="2_node_single_link_topo"
1189             ;;
1190         "3n_alt")
1191             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_alt*.yaml )
1192             TOPOLOGIES_TAGS="3_node_single_link_topo"
1193             ;;
1194         "1n_aws")
1195             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*1n-aws*.yaml )
1196             TOPOLOGIES_TAGS="1_node_single_link_topo"
1197             ;;
1198         "2n_aws")
1199             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-aws*.yaml )
1200             TOPOLOGIES_TAGS="2_node_single_link_topo"
1201             ;;
1202         "3n_aws")
1203             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-aws*.yaml )
1204             TOPOLOGIES_TAGS="3_node_single_link_topo"
1205             ;;
1206         "1n_c6gn")
1207             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*1n-c6gn*.yaml )
1208             TOPOLOGIES_TAGS="1_node_single_link_topo"
1209             ;;
1210         "2n_c6gn")
1211             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-c6gn*.yaml )
1212             TOPOLOGIES_TAGS="2_node_single_link_topo"
1213             ;;
1214         "3n_c6gn")
1215             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-c6gn*.yaml )
1216             TOPOLOGIES_TAGS="3_node_single_link_topo"
1217             ;;
1218         "1n_c6in")
1219             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*1n-c6in*.yaml )
1220             TOPOLOGIES_TAGS="1_node_single_link_topo"
1221             ;;
1222         "2n_c6in")
1223             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-c6in*.yaml )
1224             TOPOLOGIES_TAGS="2_node_single_link_topo"
1225             ;;
1226         "3n_c6in")
1227             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-c6in*.yaml )
1228             TOPOLOGIES_TAGS="3_node_single_link_topo"
1229             ;;
1230         *)
1231             # No falling back to default, that should have been done
1232             # by the function which has set NODENESS and FLAVOR.
1233             die "Unknown specification: ${case_text}"
1234     esac
1235
1236     if [[ -z "${TOPOLOGIES-}" ]]; then
1237         die "No applicable topology found!"
1238     fi
1239 }
1240
1241
1242 function set_environment_variables () {
1243
1244     # Depending on testbed topology, overwrite defaults set in the
1245     # resources/libraries/python/Constants.py file
1246     #
1247     # Variables read:
1248     # - TEST_CODE - String affecting test selection, usually jenkins job name.
1249     # Variables set:
1250     # See specific cases
1251
1252     set -exuo pipefail
1253
1254     case "${TEST_CODE}" in
1255         *"1n-aws"* | *"2n-aws"* | *"3n-aws"*)
1256             export TREX_RX_DESCRIPTORS_COUNT=1024
1257             export TREX_EXTRA_CMDLINE="--mbuf-factor 19"
1258             export TREX_CORE_COUNT=6
1259             # Settings to prevent duration stretching.
1260             export PERF_TRIAL_STL_DELAY=0.1
1261             ;;
1262         *"1n-c6gn"* | *"2n-c6gn"* | *"3n-c6gn"*)
1263             export TREX_RX_DESCRIPTORS_COUNT=1024
1264             export TREX_EXTRA_CMDLINE="--mbuf-factor 19"
1265             export TREX_CORE_COUNT=6
1266             # Settings to prevent duration stretching.
1267             export PERF_TRIAL_STL_DELAY=0.1
1268             ;;
1269         *"1n-c6in"* | *"2n-c6in"* | *"3n-c6in"*)
1270             export TREX_RX_DESCRIPTORS_COUNT=1024
1271             export TREX_EXTRA_CMDLINE="--mbuf-factor 19"
1272             export TREX_CORE_COUNT=6
1273             # Settings to prevent duration stretching.
1274             export PERF_TRIAL_STL_DELAY=0.1
1275             ;;
1276         *"2n-zn2"*)
1277             # Maciek's workaround for Zen2 with lower amount of cores.
1278             export TREX_CORE_COUNT=14
1279     esac
1280 }
1281
1282
1283 function untrap_and_unreserve_testbed () {
1284
1285     # Use this as a trap function to ensure testbed does not remain reserved.
1286     # Perhaps call directly before script exit, to free testbed for other jobs.
1287     # This function is smart enough to avoid multiple unreservations (so safe).
1288     # Topo cleanup is executed (call it best practice), ignoring failures.
1289     #
1290     # Hardcoded values:
1291     # - default message to die with if testbed might remain reserved.
1292     # Arguments:
1293     # - ${1} - Message to die with if unreservation fails. Default hardcoded.
1294     # Variables read (by inner function):
1295     # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
1296     # - PYTHON_SCRIPTS_DIR - Path to directory holding Python scripts.
1297     # Variables set:
1298     # - TERRAFORM_MODULE_DIR - Terraform module directory.
1299     # - WORKING_TOPOLOGY - Set to empty string on successful unreservation.
1300     # Trap unregistered:
1301     # - EXIT - Failure to untrap is reported, but ignored otherwise.
1302     # Functions called:
1303     # - die - Print to stderr and exit.
1304     # - ansible_playbook - Perform an action using ansible, see ansible.sh
1305
1306     set -xo pipefail
1307     set +eu  # We do not want to exit early in a "teardown" function.
1308     trap - EXIT || echo "Trap deactivation failed, continuing anyway."
1309     wt="${WORKING_TOPOLOGY}"  # Just to avoid too long lines.
1310     if [[ -z "${wt-}" ]]; then
1311         set -eu
1312         warn "Testbed looks unreserved already. Trap removal failed before?"
1313     else
1314         ansible_playbook "cleanup" || true
1315         python3 "${PYTHON_SCRIPTS_DIR}/topo_reservation.py" -c -t "${wt}" || {
1316             die "${1:-FAILED TO UNRESERVE, FIX MANUALLY.}" 2
1317         }
1318         case "${TEST_CODE}" in
1319             *"1n-aws"* | *"2n-aws"* | *"3n-aws"*)
1320                 TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}-c5n"
1321                 terraform_destroy || die "Failed to call terraform destroy."
1322                 ;;
1323             *"1n-c6gn"* | *"2n-c6gn"* | *"3n-c6gn"*)
1324                 TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}"
1325                 terraform_destroy || die "Failed to call terraform destroy."
1326                 ;;
1327             *"1n-c6in"* | *"2n-c6in"* | *"3n-c6in"*)
1328                 TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}"
1329                 terraform_destroy || die "Failed to call terraform destroy."
1330                 ;;
1331             *)
1332                 ;;
1333         esac
1334         WORKING_TOPOLOGY=""
1335         set -eu
1336     fi
1337 }
1338
1339
1340 function warn () {
1341
1342     # Print the message to standard error.
1343     #
1344     # Arguments:
1345     # - ${@} - The text of the message.
1346
1347     set -exuo pipefail
1348
1349     echo "$@" >&2
1350 }