fix(core): common.sh typos
[csit.git] / resources / libraries / bash / function / common.sh
1 # Copyright (c) 2023 Cisco and/or its affiliates.
2 # Copyright (c) 2023 PANTHEON.tech and/or its affiliates.
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at:
6 #
7 #     http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 set -exuo pipefail
16
17 # This library defines functions used by multiple entry scripts.
18 # Keep functions ordered alphabetically, please.
19
20 # TODO: Add a link to bash style guide.
21 # TODO: Consider putting every die into a {} block,
22 #   the code might become more readable (but longer).
23
24
25 function activate_docker_topology () {
26
27     # Create virtual vpp-device topology. Output of the function is topology
28     # file describing created environment saved to a file.
29     #
30     # Variables read:
31     # - BASH_FUNCTION_DIR - Path to existing directory this file is located in.
32     # - TOPOLOGIES - Available topologies.
33     # - NODENESS - Node multiplicity of desired testbed.
34     # - FLAVOR - Node flavor string, usually describing the processor.
35     # - IMAGE_VER_FILE - Name of file that contains the image version.
36     # - CSIT_DIR - Directory where ${IMAGE_VER_FILE} is located.
37     # Variables set:
38     # - WORKING_TOPOLOGY - Path to topology file.
39
40     set -exuo pipefail
41
42     source "${BASH_FUNCTION_DIR}/device.sh" || {
43         die "Source failed!"
44     }
45     device_image="$(< ${CSIT_DIR}/${IMAGE_VER_FILE})"
46     case_text="${NODENESS}_${FLAVOR}"
47     case "${case_text}" in
48         "1n_skx" | "1n_tx2" | "1n_spr")
49             # We execute reservation over csit-shim-dcr (ssh) which runs sourced
50             # script's functions. Env variables are read from ssh output
51             # back to localhost for further processing.
52             # Shim and Jenkins executor are in the same network on the same host
53             # Connect to docker's default gateway IP and shim's exposed port
54             ssh="ssh root@172.17.0.1 -p 6022"
55             run="activate_wrapper ${NODENESS} ${FLAVOR} ${device_image}"
56             # The "declare -f" output is long and boring.
57             set +x
58             # backtics to avoid https://midnight-commander.org/ticket/2142
59             env_vars=`${ssh} "$(declare -f); ${run}"` || {
60                 die "Topology reservation via shim-dcr failed!"
61             }
62             set -x
63             set -a
64             source <(echo "$env_vars" | grep -v /usr/bin/docker) || {
65                 die "Source failed!"
66             }
67             set +a
68             ;;
69         "1n_vbox")
70             # We execute reservation on localhost. Sourced script automatially
71             # sets environment variables for further processing.
72             activate_wrapper "${NODENESS}" "${FLAVOR}" "${device_image}" || die
73             ;;
74         *)
75             die "Unknown specification: ${case_text}!"
76     esac
77
78     trap 'deactivate_docker_topology' EXIT || {
79          die "Trap attempt failed, please cleanup manually. Aborting!"
80     }
81
82     parse_env_variables || die "Parse of environment variables failed!"
83
84     # Replace all variables in template with those in environment.
85     source <(echo 'cat <<EOF >topo.yml'; cat ${TOPOLOGIES[0]}; echo EOF;) || {
86         die "Topology file create failed!"
87     }
88
89     WORKING_TOPOLOGY="${CSIT_DIR}/topologies/available/vpp_device.yaml"
90     mv topo.yml "${WORKING_TOPOLOGY}" || {
91         die "Topology move failed!"
92     }
93     cat ${WORKING_TOPOLOGY} | grep -v password || {
94         die "Topology read failed!"
95     }
96 }
97
98
99 function activate_virtualenv () {
100
101     # Update virtualenv pip package, delete and create virtualenv directory,
102     # activate the virtualenv, install requirements, set PYTHONPATH.
103
104     # Arguments:
105     # - ${1} - Path to existing directory for creating virtualenv in.
106     #          If missing or empty, ${CSIT_DIR} is used.
107     # - ${2} - Path to requirements file, ${CSIT_DIR}/requirements.txt if empty.
108     # Variables read:
109     # - CSIT_DIR - Path to existing root of local CSIT git repository.
110     # Variables exported:
111     # - PYTHONPATH - CSIT_DIR, as CSIT Python scripts usually need this.
112     # Functions called:
113     # - die - Print to stderr and exit.
114
115     set -exuo pipefail
116
117     root_path="${1-$CSIT_DIR}"
118     env_dir="${root_path}/env"
119     req_path=${2-$CSIT_DIR/requirements.txt}
120     rm -rf "${env_dir}" || die "Failed to clean previous virtualenv."
121     pip3 install virtualenv==20.15.1 || {
122         die "Virtualenv package install failed."
123     }
124     virtualenv --no-download --python=$(which python3) "${env_dir}" || {
125         die "Virtualenv creation for $(which python3) failed."
126     }
127     set +u
128     source "${env_dir}/bin/activate" || die "Virtualenv activation failed."
129     set -u
130     pip3 install -r "${req_path}" || {
131         die "Requirements installation failed."
132     }
133     # Most CSIT Python scripts assume PYTHONPATH is set and exported.
134     export PYTHONPATH="${CSIT_DIR}" || die "Export failed."
135 }
136
137
138 function archive_tests () {
139
140     # Create .tar.gz of generated/tests for archiving.
141     # To be run after generate_tests, kept separate to offer more flexibility.
142
143     # Directory read:
144     # - ${GENERATED_DIR}/tests - Tree of executed suites to archive.
145     # File rewriten:
146     # - ${ARCHIVE_DIR}/generated_tests.tar.gz - Archive of generated tests.
147
148     set -exuo pipefail
149
150     pushd "${ARCHIVE_DIR}" || die
151     tar czf "generated_tests.tar.gz" "${GENERATED_DIR}/tests" || true
152     popd || die
153 }
154
155
156 function check_download_dir () {
157
158     # Fail if there are no files visible in ${DOWNLOAD_DIR}.
159     #
160     # Variables read:
161     # - DOWNLOAD_DIR - Path to directory robot takes the build to test from.
162     # Directories read:
163     # - ${DOWNLOAD_DIR} - Has to be non-empty to proceed.
164     # Functions called:
165     # - die - Print to stderr and exit.
166
167     set -exuo pipefail
168
169     if [[ ! "$(ls -A "${DOWNLOAD_DIR}")" ]]; then
170         die "No artifacts downloaded!"
171     fi
172 }
173
174
175 function check_prerequisites () {
176
177     # Fail if prerequisites are not met.
178     #
179     # Functions called:
180     # - installed - Check if application is installed/present in system.
181     # - die - Print to stderr and exit.
182
183     set -exuo pipefail
184
185     if ! installed sshpass; then
186         die "Please install sshpass before continue!"
187     fi
188 }
189
190
191 function common_dirs () {
192
193     # Set global variables, create some directories (without touching content).
194
195     # Variables set:
196     # - BASH_FUNCTION_DIR - Path to existing directory this file is located in.
197     # - CSIT_DIR - Path to existing root of local CSIT git repository.
198     # - TOPOLOGIES_DIR - Path to existing directory with available topologies.
199     # - JOB_SPECS_DIR - Path to existing directory with job test specifications.
200     # - RESOURCES_DIR - Path to existing CSIT subdirectory "resources".
201     # - TOOLS_DIR - Path to existing resources subdirectory "tools".
202     # - PYTHON_SCRIPTS_DIR - Path to existing tools subdirectory "scripts".
203     # - ARCHIVE_DIR - Path to created CSIT subdirectory "archives".
204     #   The name is chosen to match what ci-management expects.
205     # - DOWNLOAD_DIR - Path to created CSIT subdirectory "download_dir".
206     # - GENERATED_DIR - Path to created CSIT subdirectory "generated".
207     # Directories created if not present:
208     # ARCHIVE_DIR, DOWNLOAD_DIR, GENERATED_DIR.
209     # Functions called:
210     # - die - Print to stderr and exit.
211
212     set -exuo pipefail
213
214     this_file=$(readlink -e "${BASH_SOURCE[0]}") || {
215         die "Some error during locating of this source file."
216     }
217     BASH_FUNCTION_DIR=$(dirname "${this_file}") || {
218         die "Some error during dirname call."
219     }
220     # Current working directory could be in a different repo, e.g. VPP.
221     pushd "${BASH_FUNCTION_DIR}" || die "Pushd failed"
222     relative_csit_dir=$(git rev-parse --show-toplevel) || {
223         die "Git rev-parse failed."
224     }
225     CSIT_DIR=$(readlink -e "${relative_csit_dir}") || die "Readlink failed."
226     popd || die "Popd failed."
227     TOPOLOGIES_DIR=$(readlink -e "${CSIT_DIR}/topologies/available") || {
228         die "Readlink failed."
229     }
230     JOB_SPECS_DIR=$(readlink -e "${CSIT_DIR}/resources/job_specs") || {
231         die "Readlink failed."
232     }
233     RESOURCES_DIR=$(readlink -e "${CSIT_DIR}/resources") || {
234         die "Readlink failed."
235     }
236     TOOLS_DIR=$(readlink -e "${RESOURCES_DIR}/tools") || {
237         die "Readlink failed."
238     }
239     PYTHON_SCRIPTS_DIR=$(readlink -e "${TOOLS_DIR}/scripts") || {
240         die "Readlink failed."
241     }
242
243     ARCHIVE_DIR=$(readlink -f "${CSIT_DIR}/archives") || {
244         die "Readlink failed."
245     }
246     mkdir -p "${ARCHIVE_DIR}" || die "Mkdir failed."
247     DOWNLOAD_DIR=$(readlink -f "${CSIT_DIR}/download_dir") || {
248         die "Readlink failed."
249     }
250     mkdir -p "${DOWNLOAD_DIR}" || die "Mkdir failed."
251     GENERATED_DIR=$(readlink -f "${CSIT_DIR}/generated") || {
252         die "Readlink failed."
253     }
254     mkdir -p "${GENERATED_DIR}" || die "Mkdir failed."
255 }
256
257
258 function compose_robot_arguments () {
259
260     # Variables read:
261     # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
262     # - DUT - CSIT test/ subdirectory, set while processing tags.
263     # - TAGS - Array variable holding selected tag boolean expressions.
264     # - TOPOLOGIES_TAGS - Tag boolean expression filtering tests for topology.
265     # - TEST_CODE - The test selection string from environment or argument.
266     # - SELECTION_MODE - Selection criteria [test, suite, include, exclude].
267     # Variables set:
268     # - ROBOT_ARGS - String holding part of all arguments for robot.
269     # - EXPANDED_TAGS - Array of strings robot arguments compiled from tags.
270
271     set -exuo pipefail
272
273     # No explicit check needed with "set -u".
274     ROBOT_ARGS=("--loglevel" "TRACE")
275     ROBOT_ARGS+=("--variable" "TOPOLOGY_PATH:${WORKING_TOPOLOGY}")
276
277     case "${TEST_CODE}" in
278         *"device"*)
279             ROBOT_ARGS+=("--suite" "tests.${DUT}.device")
280             ;;
281         *"perf"*)
282             ROBOT_ARGS+=("--suite" "tests.${DUT}.perf")
283             ;;
284         *)
285             die "Unknown specification: ${TEST_CODE}"
286     esac
287
288     EXPANDED_TAGS=()
289     for tag in "${TAGS[@]}"; do
290         if [[ ${tag} == "!"* ]]; then
291             EXPANDED_TAGS+=("--exclude" "${tag#$"!"}")
292         else
293             if [[ ${SELECTION_MODE} == "--test" ]]; then
294                 EXPANDED_TAGS+=("--test" "${tag}")
295             else
296                 EXPANDED_TAGS+=("--include" "${TOPOLOGIES_TAGS}AND${tag}")
297             fi
298         fi
299     done
300
301     if [[ ${SELECTION_MODE} == "--test" ]]; then
302         EXPANDED_TAGS+=("--include" "${TOPOLOGIES_TAGS}")
303     fi
304 }
305
306
307 function deactivate_docker_topology () {
308
309     # Deactivate virtual vpp-device topology by removing containers.
310     #
311     # Variables read:
312     # - NODENESS - Node multiplicity of desired testbed.
313     # - FLAVOR - Node flavor string, usually describing the processor.
314
315     set -exuo pipefail
316
317     case_text="${NODENESS}_${FLAVOR}"
318     case "${case_text}" in
319         "1n_skx" | "1n_tx2" | "1n_spr")
320             ssh="ssh root@172.17.0.1 -p 6022"
321             env_vars=$(env | grep CSIT_ | tr '\n' ' ' ) || die
322             # The "declare -f" output is long and boring.
323             set +x
324             ${ssh} "$(declare -f); deactivate_wrapper ${env_vars}" || {
325                 die "Topology cleanup via shim-dcr failed!"
326             }
327             set -x
328             ;;
329         "1n_vbox")
330             enter_mutex || die
331             clean_environment || {
332                 die "Topology cleanup locally failed!"
333             }
334             exit_mutex || die
335             ;;
336         *)
337             die "Unknown specification: ${case_text}!"
338     esac
339 }
340
341
342 function die () {
343
344     # Print the message to standard error end exit with error code specified
345     # by the second argument.
346     #
347     # Hardcoded values:
348     # - The default error message.
349     # Arguments:
350     # - ${1} - The whole error message, be sure to quote. Optional
351     # - ${2} - the code to exit with, default: 1.
352
353     set -x
354     set +eu
355     warn "${1:-Unspecified run-time error occurred!}"
356     exit "${2:-1}"
357 }
358
359
360 function die_on_robot_error () {
361
362     # Source this fragment if you want to abort on any failed test case.
363     #
364     # Variables read:
365     # - ROBOT_EXIT_STATUS - Set by a robot running fragment.
366     # Functions called:
367     # - die - Print to stderr and exit.
368
369     set -exuo pipefail
370
371     if [[ "${ROBOT_EXIT_STATUS}" != "0" ]]; then
372         die "Test failures are present!" "${ROBOT_EXIT_STATUS}"
373     fi
374 }
375
376
377 function generate_tests () {
378
379     # Populate ${GENERATED_DIR}/tests based on ${CSIT_DIR}/tests/.
380     # Any previously existing content of ${GENERATED_DIR}/tests is wiped before.
381     # The generation is done by executing any *.py executable
382     # within any subdirectory after copying.
383
384     # This is a separate function, because this code is called
385     # both by autogen checker and entries calling run_robot.
386
387     # Directories read:
388     # - ${CSIT_DIR}/tests - Used as templates for the generated tests.
389     # Directories replaced:
390     # - ${GENERATED_DIR}/tests - Overwritten by the generated tests.
391
392     set -exuo pipefail
393
394     rm -rf "${GENERATED_DIR}/tests" || die
395     cp -r "${CSIT_DIR}/tests" "${GENERATED_DIR}/tests" || die
396     cmd_line=("find" "${GENERATED_DIR}/tests" "-type" "f")
397     cmd_line+=("-executable" "-name" "*.py")
398     # We sort the directories, so log output can be compared between runs.
399     file_list=$("${cmd_line[@]}" | sort) || die
400
401     for gen in ${file_list}; do
402         directory="$(dirname "${gen}")" || die
403         filename="$(basename "${gen}")" || die
404         pushd "${directory}" || die
405         ./"${filename}" || die
406         popd || die
407     done
408 }
409
410
411 function get_test_code () {
412
413     # Arguments:
414     # - ${1} - Optional, argument of entry script (or empty as unset).
415     #   Test code value to override job name from environment.
416     # Variables read:
417     # - JOB_NAME - String affecting test selection, default if not argument.
418     # Variables set:
419     # - TEST_CODE - The test selection string from environment or argument.
420     # - NODENESS - Node multiplicity of desired testbed.
421     # - FLAVOR - Node flavor string, usually describing the processor.
422
423     set -exuo pipefail
424
425     TEST_CODE="${1-}" || die "Reading optional argument failed, somehow."
426     if [[ -z "${TEST_CODE}" ]]; then
427         TEST_CODE="${JOB_NAME-}" || die "Reading job name failed, somehow."
428     fi
429
430     case "${TEST_CODE}" in
431         *"1n-vbox")
432             NODENESS="1n"
433             FLAVOR="vbox"
434             ;;
435         *"1n-skx")
436             NODENESS="1n"
437             FLAVOR="skx"
438             ;;
439         *"1n-spr")
440             NODENESS="1n"
441             FLAVOR="spr"
442             ;;
443         *"1n-tx2")
444             NODENESS="1n"
445             FLAVOR="tx2"
446             ;;
447         *"1n-aws")
448             NODENESS="1n"
449             FLAVOR="aws"
450             ;;
451         *"2n-aws")
452             NODENESS="2n"
453             FLAVOR="aws"
454             ;;
455         *"3n-aws")
456             NODENESS="3n"
457             FLAVOR="aws"
458             ;;
459         *"2n-c6gn")
460             NODENESS="2n"
461             FLAVOR="c6gn"
462             ;;
463         *"3n-c6gn")
464             NODENESS="3n"
465             FLAVOR="c6gn"
466             ;;
467         *"2n-c7gn")
468             NODENESS="2n"
469             FLAVOR="c7gn"
470             ;;
471         *"3n-c7gn")
472             NODENESS="3n"
473             FLAVOR="c7gn"
474             ;;
475         *"1n-c6in")
476             NODENESS="1n"
477             FLAVOR="c6in"
478             ;;
479         *"2n-c6in")
480             NODENESS="2n"
481             FLAVOR="c6in"
482             ;;
483         *"3n-c6in")
484             NODENESS="3n"
485             FLAVOR="c6in"
486             ;;
487         *"2n-zn2")
488             NODENESS="2n"
489             FLAVOR="zn2"
490             ;;
491         *"2n-clx")
492             NODENESS="2n"
493             FLAVOR="clx"
494             ;;
495         *"2n-icx")
496             NODENESS="2n"
497             FLAVOR="icx"
498             ;;
499         *"2n-spr")
500             NODENESS="2n"
501             FLAVOR="spr"
502             ;;
503         *"3n-icx")
504             NODENESS="3n"
505             FLAVOR="icx"
506             ;;
507         *"3na-spr")
508             NODENESS="3na"
509             FLAVOR="spr"
510             ;;
511         *"3nb-spr")
512             NODENESS="3nb"
513             FLAVOR="spr"
514             ;;
515         *"3n-snr")
516             NODENESS="3n"
517             FLAVOR="snr"
518             ;;
519         *"3n-icxd")
520             NODENESS="3n"
521             FLAVOR="icxd"
522             ;;
523         *"2n-tx2")
524             NODENESS="2n"
525             FLAVOR="tx2"
526             ;;
527         *"3n-tsh")
528             NODENESS="3n"
529             FLAVOR="tsh"
530             ;;
531         *"3n-alt")
532             NODENESS="3n"
533             FLAVOR="alt"
534             ;;
535     esac
536 }
537
538
539 function get_test_tag_string () {
540
541     # Variables read:
542     # - GERRIT_EVENT_TYPE - Event type set by gerrit, can be unset.
543     # - GERRIT_EVENT_COMMENT_TEXT - Comment text, read for "comment-added" type.
544     # - TEST_CODE - The test selection string from environment or argument.
545     # Variables set:
546     # - TEST_TAG_STRING - The string following trigger word in gerrit comment.
547     #   May be empty, or even not set on event types not adding comment.
548     # Variables exported optionally:
549     # - GRAPH_NODE_VARIANT - Node variant to test with, set if found in trigger.
550
551     # TODO: ci-management scripts no longer need to perform this.
552
553     set -exuo pipefail
554
555     if [[ "${GERRIT_EVENT_TYPE-}" == "comment-added" ]]; then
556         case "${TEST_CODE}" in
557             *"device"*)
558                 trigger="devicetest"
559                 ;;
560             *"perf"*)
561                 trigger="perftest"
562                 ;;
563             *)
564                 die "Unknown specification: ${TEST_CODE}"
565         esac
566         # Ignore lines not containing the trigger word.
567         comment=$(fgrep "${trigger}" <<< "${GERRIT_EVENT_COMMENT_TEXT}" || true)
568         # The vpp-csit triggers trail stuff we are not interested in.
569         # Removing them and trigger word: https://unix.stackexchange.com/a/13472
570         # (except relying on \s whitespace, \S non-whitespace and . both).
571         # The last string is concatenated, only the middle part is expanded.
572         cmd=("grep" "-oP" '\S*'"${trigger}"'\S*\s\K.+$') || die "Unset trigger?"
573         # On parsing error, TEST_TAG_STRING probably stays empty.
574         TEST_TAG_STRING=$("${cmd[@]}" <<< "${comment}" || true)
575         if [[ -z "${TEST_TAG_STRING-}" ]]; then
576             # Probably we got a base64 encoded comment.
577             comment="${GERRIT_EVENT_COMMENT_TEXT}"
578             comment=$(base64 --decode <<< "${comment}" || true)
579             comment=$(fgrep "${trigger}" <<< "${comment}" || true)
580             TEST_TAG_STRING=$("${cmd[@]}" <<< "${comment}" || true)
581         fi
582         if [[ -n "${TEST_TAG_STRING-}" ]]; then
583             test_tag_array=(${TEST_TAG_STRING})
584             if [[ "${test_tag_array[0]}" == "icl" ]]; then
585                 export GRAPH_NODE_VARIANT="icl"
586                 TEST_TAG_STRING="${test_tag_array[@]:1}" || true
587             elif [[ "${test_tag_array[0]}" == "skx" ]]; then
588                 export GRAPH_NODE_VARIANT="skx"
589                 TEST_TAG_STRING="${test_tag_array[@]:1}" || true
590             fi
591         fi
592     fi
593 }
594
595
596 function installed () {
597
598     # Check if the given utility is installed. Fail if not installed.
599     #
600     # Duplicate of common.sh function, as this file is also used standalone.
601     #
602     # Arguments:
603     # - ${1} - Utility to check.
604     # Returns:
605     # - 0 - If command is installed.
606     # - 1 - If command is not installed.
607
608     set -exuo pipefail
609
610     command -v "${1}"
611 }
612
613
614 function move_archives () {
615
616     # Move archive directory to top of workspace, if not already there.
617     #
618     # ARCHIVE_DIR is positioned relative to CSIT_DIR,
619     # but in some jobs CSIT_DIR is not same as WORKSPACE
620     # (e.g. under VPP_DIR). To simplify ci-management settings,
621     # we want to move the data to the top. We do not want simple copy,
622     # as ci-management is eager with recursive search.
623     #
624     # As some scripts may call this function multiple times,
625     # the actual implementation use copying and deletion,
626     # so the workspace gets "union" of contents (except overwrites on conflict).
627     # The consequence is empty ARCHIVE_DIR remaining after this call.
628     #
629     # As the source directory is emptied,
630     # the check for dirs being different is essential.
631     #
632     # Variables read:
633     # - WORKSPACE - Jenkins workspace, move only if the value is not empty.
634     #   Can be unset, then it speeds up manual testing.
635     # - ARCHIVE_DIR - Path to directory with content to be moved.
636     # Directories updated:
637     # - ${WORKSPACE}/archives/ - Created if does not exist.
638     #   Content of ${ARCHIVE_DIR}/ is moved.
639     # Functions called:
640     # - die - Print to stderr and exit.
641
642     set -exuo pipefail
643
644     if [[ -n "${WORKSPACE-}" ]]; then
645         target=$(readlink -f "${WORKSPACE}/archives")
646         if [[ "${target}" != "${ARCHIVE_DIR}" ]]; then
647             mkdir -p "${target}" || die "Archives dir create failed."
648             cp -rf "${ARCHIVE_DIR}"/* "${target}" || die "Copy failed."
649             rm -rf "${ARCHIVE_DIR}"/* || die "Delete failed."
650         fi
651     fi
652 }
653
654
655 function prepare_topology () {
656
657     # Prepare virtual testbed topology if needed based on flavor.
658
659     # Variables read:
660     # - TEST_CODE - String affecting test selection, usually jenkins job name.
661     # - NODENESS - Node multiplicity of testbed, either "2n" or "3n".
662     # - FLAVOR - Node flavor string, e.g. "clx" or "skx".
663     # Variables set:
664     # - TERRAFORM_MODULE_DIR - Terraform module directory.
665     # Functions called:
666     # - die - Print to stderr and exit.
667     # - terraform_init - Terraform init topology.
668     # - terraform_apply - Terraform apply topology.
669
670     set -exuo pipefail
671
672     case_text="${NODENESS}_${FLAVOR}"
673     case "${case_text}" in
674         "1n_aws" | "2n_aws" | "3n_aws")
675             export TF_VAR_testbed_name="${TEST_CODE}"
676             TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}-c5n"
677             terraform_init || die "Failed to call terraform init."
678             trap "terraform_destroy" ERR EXIT || {
679                 die "Trap attempt failed, please cleanup manually. Aborting!"
680             }
681             terraform_apply || die "Failed to call terraform apply."
682             ;;
683         "2n_c6gn" | "3n_c6gn")
684             export TF_VAR_testbed_name="${TEST_CODE}"
685             TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-c6gn"
686             terraform_init || die "Failed to call terraform init."
687             trap "terraform_destroy" ERR EXIT || {
688                 die "Trap attempt failed, please cleanup manually. Aborting!"
689             }
690             terraform_apply || die "Failed to call terraform apply."
691             ;;
692         "2n_c7gn" | "3n_c7gn")
693             export TF_VAR_testbed_name="${TEST_CODE}"
694             TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-c7gn"
695             terraform_init || die "Failed to call terraform init."
696             trap "terraform_destroy" ERR EXIT || {
697                 die "Trap attempt failed, please cleanup manually. Aborting!"
698             }
699             terraform_apply || die "Failed to call terraform apply."
700             ;;
701         "1n_c6in" | "2n_c6in" | "3n_c6in")
702             export TF_VAR_testbed_name="${TEST_CODE}"
703             TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-c6in"
704             terraform_init || die "Failed to call terraform init."
705             trap "terraform_destroy" ERR EXIT || {
706                 die "Trap attempt failed, please cleanup manually. Aborting!"
707             }
708             terraform_apply || die "Failed to call terraform apply."
709             ;;
710     esac
711 }
712
713
714 function reserve_and_cleanup_testbed () {
715
716     # Reserve physical testbed, perform cleanup, register trap to unreserve.
717     # When cleanup fails, remove from topologies and keep retrying
718     # until all topologies are removed.
719     #
720     # Variables read:
721     # - TOPOLOGIES - Array of paths to topology yaml to attempt reservation on.
722     # - PYTHON_SCRIPTS_DIR - Path to directory holding the reservation script.
723     # - BUILD_TAG - Any string suitable as filename, identifying
724     #   test run executing this function. May be unset.
725     # Variables set:
726     # - TOPOLOGIES - Array of paths to topologies, with failed cleanups removed.
727     # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
728     # Functions called:
729     # - die - Print to stderr and exit.
730     # - ansible_playbook - Perform an action using ansible, see ansible.sh
731     # Traps registered:
732     # - EXIT - Calls cancel_all for ${WORKING_TOPOLOGY}.
733
734     set -exuo pipefail
735
736     while true; do
737         for topo in "${TOPOLOGIES[@]}"; do
738             set +e
739             scrpt="${PYTHON_SCRIPTS_DIR}/topo_reservation.py"
740             opts=("-t" "${topo}" "-r" "${BUILD_TAG:-Unknown}")
741             python3 "${scrpt}" "${opts[@]}"
742             result="$?"
743             set -e
744             if [[ "${result}" == "0" ]]; then
745                 # Trap unreservation before cleanup check,
746                 # so multiple jobs showing failed cleanup improve chances
747                 # of humans to notice and fix.
748                 WORKING_TOPOLOGY="${topo}"
749                 echo "Reserved: ${WORKING_TOPOLOGY}"
750                 trap "untrap_and_unreserve_testbed" EXIT || {
751                     message="TRAP ATTEMPT AND UNRESERVE FAILED, FIX MANUALLY."
752                     untrap_and_unreserve_testbed "${message}" || {
753                         die "Teardown should have died, not failed."
754                     }
755                     die "Trap attempt failed, unreserve succeeded. Aborting."
756                 }
757                 # Cleanup + calibration checks
758                 set +e
759                 ansible_playbook "cleanup, calibration"
760                 result="$?"
761                 set -e
762                 if [[ "${result}" == "0" ]]; then
763                     break
764                 fi
765                 warn "Testbed cleanup failed: ${topo}"
766                 untrap_and_unreserve_testbed "Fail of unreserve after cleanup."
767             fi
768             # Else testbed is accessible but currently reserved, moving on.
769         done
770
771         if [[ -n "${WORKING_TOPOLOGY-}" ]]; then
772             # Exit the infinite while loop if we made a reservation.
773             warn "Reservation and cleanup successful."
774             break
775         fi
776
777         if [[ "${#TOPOLOGIES[@]}" == "0" ]]; then
778             die "Run out of operational testbeds!"
779         fi
780
781         # Wait ~3minutes before next try.
782         sleep_time="$[ ( ${RANDOM} % 20 ) + 180 ]s" || {
783             die "Sleep time calculation failed."
784         }
785         echo "Sleeping ${sleep_time}"
786         sleep "${sleep_time}" || die "Sleep failed."
787     done
788 }
789
790
791 function run_robot () {
792
793     # Run robot with options based on input variables.
794     #
795     # Variables read:
796     # - CSIT_DIR - Path to existing root of local CSIT git repository.
797     # - ARCHIVE_DIR - Path to store robot result files in.
798     # - ROBOT_ARGS, EXPANDED_TAGS - See compose_robot_arguments.sh
799     # - GENERATED_DIR - Tests are assumed to be generated under there.
800     # Variables set:
801     # - ROBOT_EXIT_STATUS - Exit status of most recent robot invocation.
802     # Functions called:
803     # - die - Print to stderr and exit.
804
805     set -exuo pipefail
806
807     all_options=("--outputdir" "${ARCHIVE_DIR}" "${ROBOT_ARGS[@]}")
808     all_options+=("${EXPANDED_TAGS[@]}")
809
810     pushd "${CSIT_DIR}" || die "Change directory operation failed."
811     set +e
812     robot "${all_options[@]}" "${GENERATED_DIR}/tests/"
813     ROBOT_EXIT_STATUS="$?"
814     set -e
815
816     popd || die "Change directory operation failed."
817 }
818
819
820 function select_arch_os () {
821
822     # Set variables affected by local CPU architecture and operating system.
823     #
824     # Variables set:
825     # - VPP_VER_FILE - Name of file in CSIT dir containing vpp stable version.
826     # - IMAGE_VER_FILE - Name of file in CSIT dir containing the image name.
827     # - PKG_SUFFIX - Suffix of OS package file name, "rpm" or "deb."
828
829     set -exuo pipefail
830
831     source /etc/os-release || die "Get OS release failed."
832
833     case "${ID}" in
834         "ubuntu"*)
835             case "${VERSION}" in
836                 *"LTS (Jammy Jellyfish)"*)
837                     IMAGE_VER_FILE="VPP_DEVICE_IMAGE_UBUNTU_JAMMY"
838                     VPP_VER_FILE="VPP_STABLE_VER_UBUNTU_JAMMY"
839                     PKG_SUFFIX="deb"
840                     ;;
841                 *)
842                     die "Unsupported Ubuntu version!"
843                     ;;
844             esac
845             ;;
846         *)
847             die "Unsupported distro or OS!"
848             ;;
849     esac
850
851     arch=$(uname -m) || {
852         die "Get CPU architecture failed."
853     }
854
855     case "${arch}" in
856         "aarch64")
857             IMAGE_VER_FILE="${IMAGE_VER_FILE}_ARM"
858             ;;
859         *)
860             ;;
861     esac
862 }
863
864
865 function select_tags () {
866
867     # Variables read:
868     # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
869     # - TEST_CODE - String affecting test selection, usually jenkins job name.
870     # - DUT - CSIT test/ subdirectory, set while processing tags.
871     # - TEST_TAG_STRING - String selecting tags, from gerrit comment.
872     #   Can be unset.
873     # - TOPOLOGIES_DIR - Path to existing directory with available tpologies.
874     # - BASH_FUNCTION_DIR - Directory with input files to process.
875     # Variables set:
876     # - TAGS - Array of processed tag boolean expressions.
877     # - SELECTION_MODE - Selection criteria [test, suite, include, exclude].
878
879     set -exuo pipefail
880
881     # NIC SELECTION
882     case "${TEST_CODE}" in
883         *"1n-aws"* | *"1n-c6in"*)
884             start_pattern='^  SUT:'
885             ;;
886         *)
887             start_pattern='^  TG:'
888             ;;
889     esac
890     end_pattern='^ \? \?[A-Za-z0-9]\+:'
891     # Remove the sections from topology file
892     sed_command="/${start_pattern}/,/${end_pattern}/d"
893     # All topologies NICs
894     available=$(sed "${sed_command}" "${TOPOLOGIES_DIR}"/* \
895                 | grep -hoP "model: \K.*" | sort -u)
896     # Selected topology NICs
897     reserved=$(sed "${sed_command}" "${WORKING_TOPOLOGY}" \
898                | grep -hoP "model: \K.*" | sort -u)
899     # All topologies NICs - Selected topology NICs
900     exclude_nics=($(comm -13 <(echo "${reserved}") <(echo "${available}"))) || {
901         die "Computation of excluded NICs failed."
902     }
903
904     # Select default NIC tag.
905     case "${TEST_CODE}" in
906         *"3n-snr")
907             default_nic="nic_intel-e822cq"
908             ;;
909         *"3n-icxd")
910             default_nic="nic_intel-e823c"
911             ;;
912         *"3n-tsh")
913             default_nic="nic_intel-x520-da2"
914             ;;
915         *"3n-icx" | *"2n-icx")
916             default_nic="nic_intel-e810cq"
917             ;;
918         *"3na-spr")
919             default_nic="nic_mellanox-cx7veat"
920             ;;
921         *"3nb-spr")
922             default_nic="nic_intel-e810cq"
923             ;;
924         *"2n-spr")
925             default_nic="nic_intel-e810cq"
926             ;;
927         *"2n-clx" | *"2n-zn2")
928             default_nic="nic_intel-xxv710"
929             ;;
930         *"2n-tx2" | *"3n-alt")
931             default_nic="nic_intel-xl710"
932             ;;
933         *"1n-aws" | *"2n-aws" | *"3n-aws")
934             default_nic="nic_amazon-nitro-50g"
935             ;;
936         *"2n-c6gn" | *"3n-c6gn")
937             default_nic="nic_amazon-nitro-100g"
938             ;;
939         *"2n-c7gn" | *"3n-c7gn")
940             default_nic="nic_amazon-nitro-100g"
941             ;;
942         *"1n-c6in" | *"2n-c6in" | *"3n-c6in")
943             default_nic="nic_amazon-nitro-200g"
944             ;;
945         *)
946             default_nic="nic_intel-x710"
947             ;;
948     esac
949
950     sed_nic_sub_cmd="sed s/\${default_nic}/${default_nic}/"
951     awk_nics_sub_cmd=""
952     awk_nics_sub_cmd+='gsub("xxv710","25ge2p1xxv710");'
953     awk_nics_sub_cmd+='gsub("x710","10ge2p1x710");'
954     awk_nics_sub_cmd+='gsub("xl710","40ge2p1xl710");'
955     awk_nics_sub_cmd+='gsub("x520-da2","10ge2p1x520");'
956     awk_nics_sub_cmd+='gsub("cx556a","100ge2p1cx556a");'
957     awk_nics_sub_cmd+='gsub("cx7veat","200ge2p1cx7veat");'
958     awk_nics_sub_cmd+='gsub("cx6dx","100ge2p1cx6dx");'
959     awk_nics_sub_cmd+='gsub("e810cq","100ge2p1e810cq");'
960     awk_nics_sub_cmd+='gsub("e822cq","25ge2p1e822cq");'
961     awk_nics_sub_cmd+='gsub("e823c","25ge2p1e823c");'
962     awk_nics_sub_cmd+='gsub("vic1227","10ge2p1vic1227");'
963     awk_nics_sub_cmd+='gsub("vic1385","40ge2p1vic1385");'
964     awk_nics_sub_cmd+='gsub("nitro-50g","50ge1p1ENA");'
965     awk_nics_sub_cmd+='gsub("nitro-100g","100ge1p1ENA");'
966     awk_nics_sub_cmd+='gsub("nitro-200g","200ge1p1ENA");'
967     awk_nics_sub_cmd+='gsub("virtual","1ge1p82540em");'
968     awk_nics_sub_cmd+='if ($9 =="drv_avf") drv="avf-";'
969     awk_nics_sub_cmd+='else if ($9 =="drv_rdma_core") drv ="rdma-";'
970     awk_nics_sub_cmd+='else if ($9 =="drv_mlx5_core") drv ="mlx5-";'
971     awk_nics_sub_cmd+='else if ($9 =="drv_af_xdp") drv ="af-xdp-";'
972     awk_nics_sub_cmd+='else drv="";'
973     awk_nics_sub_cmd+='if ($1 =="-") cores="";'
974     awk_nics_sub_cmd+='else cores=$1;'
975     awk_nics_sub_cmd+='print "*"$7"-" drv $11"-"$5"."$3"-" cores "-" drv $11"-"$5'
976
977     # Tag file directory shorthand.
978     tfd="${JOB_SPECS_DIR}"
979     case "${TEST_CODE}" in
980         # Select specific performance tests based on jenkins job type variable.
981         *"device"* )
982             readarray -t test_tag_array <<< $(grep -v "#" \
983                 ${tfd}/vpp_device/${DUT}-${NODENESS}-${FLAVOR}.md |
984                 awk {"$awk_nics_sub_cmd"} || echo "devicetest") || die
985             SELECTION_MODE="--test"
986             ;;
987         *"hoststack-daily"* )
988             readarray -t test_tag_array <<< $(grep -v "#" \
989                 ${tfd}/hoststack_daily/${DUT}-${NODENESS}-${FLAVOR}.md |
990                 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
991             SELECTION_MODE="--test"
992             ;;
993         *"ndrpdr-weekly"* )
994             readarray -t test_tag_array <<< $(grep -v "#" \
995                 ${tfd}/ndrpdr_weekly/${DUT}-${NODENESS}-${FLAVOR}.md |
996                 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
997             SELECTION_MODE="--test"
998             ;;
999         *"mrr-daily"* )
1000             readarray -t test_tag_array <<< $(grep -v "#" \
1001                 ${tfd}/mrr_daily/${DUT}-${NODENESS}-${FLAVOR}.md |
1002                 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
1003             SELECTION_MODE="--test"
1004             ;;
1005         *"mrr-weekly"* )
1006             readarray -t test_tag_array <<< $(grep -v "#" \
1007                 ${tfd}/mrr_weekly/${DUT}-${NODENESS}-${FLAVOR}.md |
1008                 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
1009             SELECTION_MODE="--test"
1010             ;;
1011         *"report-iterative"* )
1012             test_sets=(${TEST_TAG_STRING//:/ })
1013             # Run only one test set per run
1014             report_file=${test_sets[0]}.md
1015             readarray -t test_tag_array <<< $(grep -v "#" \
1016                 ${tfd}/report_iterative/${NODENESS}-${FLAVOR}/${report_file} |
1017                 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
1018             SELECTION_MODE="--test"
1019             ;;
1020         *"report-coverage"* )
1021             test_sets=(${TEST_TAG_STRING//:/ })
1022             # Run only one test set per run
1023             report_file=${test_sets[0]}.md
1024             readarray -t test_tag_array <<< $(grep -v "#" \
1025                 ${tfd}/report_coverage/${NODENESS}-${FLAVOR}/${report_file} |
1026                 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
1027             SELECTION_MODE="--test"
1028             ;;
1029         * )
1030             if [[ -z "${TEST_TAG_STRING-}" ]]; then
1031                 # If nothing is specified, we will run pre-selected tests by
1032                 # following tags.
1033                 test_tag_array=("mrrAND${default_nic}AND1cAND64bANDethip4-ip4base"
1034                                 "mrrAND${default_nic}AND1cAND78bANDethip6-ip6base"
1035                                 "mrrAND${default_nic}AND1cAND64bANDeth-l2bdbasemaclrn"
1036                                 "mrrAND${default_nic}AND1cAND64bANDeth-l2xcbase"
1037                                 "!drv_af_xdp" "!drv_avf")
1038             else
1039                 # If trigger contains tags, split them into array.
1040                 test_tag_array=(${TEST_TAG_STRING//:/ })
1041             fi
1042             SELECTION_MODE="--include"
1043             ;;
1044     esac
1045
1046     # Blacklisting certain tags per topology.
1047     #
1048     # Reasons for blacklisting:
1049     # - ipsechw - Blacklisted on testbeds without crypto hardware accelerator.
1050     case "${TEST_CODE}" in
1051         *"1n-vbox")
1052             test_tag_array+=("!avf")
1053             test_tag_array+=("!vhost")
1054             test_tag_array+=("!flow")
1055             ;;
1056         *"1n-tx2")
1057             test_tag_array+=("!flow")
1058             ;;
1059         *"2n-clx")
1060             test_tag_array+=("!ipsechw")
1061             ;;
1062         *"2n-icx")
1063             test_tag_array+=("!ipsechw")
1064             ;;
1065         *"2n-spr")
1066             ;;
1067         *"2n-tx2")
1068             test_tag_array+=("!ipsechw")
1069             ;;
1070         *"2n-zn2")
1071             test_tag_array+=("!ipsechw")
1072             ;;
1073         *"3n-alt")
1074             test_tag_array+=("!ipsechw")
1075             ;;
1076         *"3n-icx")
1077             test_tag_array+=("!ipsechw")
1078             test_tag_array+=("!3_node_double_link_topoANDnic_intel-xxv710")
1079             ;;
1080         *"3n-snr")
1081             ;;
1082         *"3n-icxd")
1083             ;;
1084         *"3na-spr")
1085             ;;
1086         *"3nb-spr")
1087             ;;
1088         *"3n-tsh")
1089             test_tag_array+=("!drv_avf")
1090             test_tag_array+=("!ipsechw")
1091             ;;
1092         *"1n-aws" | *"2n-aws" | *"3n-aws")
1093             test_tag_array+=("!ipsechw")
1094             ;;
1095         *"2n-c6gn" | *"3n-c6gn")
1096             test_tag_array+=("!ipsechw")
1097             ;;
1098         *"2n-c7gn" | *"3n-c7gn")
1099             test_tag_array+=("!ipsechw")
1100             ;;
1101         *"1n-c6in" | *"2n-c6in" | *"3n-c6in")
1102             test_tag_array+=("!ipsechw")
1103             ;;
1104     esac
1105
1106     # We will add excluded NICs.
1107     test_tag_array+=("${exclude_nics[@]/#/!NIC_}")
1108
1109     TAGS=()
1110     prefix=""
1111     if [[ "${TEST_CODE}" == "vpp-"* ]]; then
1112         if [[ "${TEST_CODE}" != *"device"* ]]; then
1113             # Automatic prefixing for VPP perf jobs to limit the NIC used.
1114             if [[ "${TEST_TAG_STRING-}" != *"nic_"* ]]; then
1115                 prefix="${default_nic}AND"
1116             fi
1117         fi
1118     fi
1119     set +x
1120     for tag in "${test_tag_array[@]}"; do
1121         if [[ "${tag}" == "!"* ]]; then
1122             # Exclude tags are not prefixed.
1123             TAGS+=("${tag}")
1124         elif [[ "${tag}" == " "* || "${tag}" == *"perftest"* ]]; then
1125             # Badly formed tag expressions can trigger way too much tests.
1126             set -x
1127             warn "The following tag expression hints at bad trigger: ${tag}"
1128             warn "Possible cause: Multiple triggers in a single comment."
1129             die "Aborting to avoid triggering too many tests."
1130         elif [[ "${tag}" == *"OR"* ]]; then
1131             # If OR had higher precedence than AND, it would be useful here.
1132             # Some people think it does, thus triggering way too much tests.
1133             set -x
1134             warn "The following tag expression hints at bad trigger: ${tag}"
1135             warn "Operator OR has lower precedence than AND. Use space instead."
1136             die "Aborting to avoid triggering too many tests."
1137         elif [[ "${tag}" != "" && "${tag}" != "#"* ]]; then
1138             # Empty and comment lines are skipped.
1139             # Other lines are normal tags, they are to be prefixed.
1140             TAGS+=("${prefix}${tag}")
1141         fi
1142     done
1143     set -x
1144 }
1145
1146
1147 function select_topology () {
1148
1149     # Variables read:
1150     # - NODENESS - Node multiplicity of testbed, either "2n" or "3n".
1151     # - FLAVOR - Node flavor string, e.g. "clx" or "skx".
1152     # - CSIT_DIR - Path to existing root of local CSIT git repository.
1153     # - TOPOLOGIES_DIR - Path to existing directory with available topologies.
1154     # Variables set:
1155     # - TOPOLOGIES - Array of paths to suitable topology yaml files.
1156     # - TOPOLOGIES_TAGS - Tag expression selecting tests for the topology.
1157     # Functions called:
1158     # - die - Print to stderr and exit.
1159
1160     set -exuo pipefail
1161
1162     case_text="${NODENESS}_${FLAVOR}"
1163     case "${case_text}" in
1164         "1n_aws")
1165             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*1n-aws*.yaml )
1166             TOPOLOGIES_TAGS="1_node_single_link_topo"
1167             ;;
1168         "1n_c6in")
1169             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*1n-c6in*.yaml )
1170             TOPOLOGIES_TAGS="1_node_single_link_topo"
1171             ;;
1172         "1n_tx2" | "1n_spr")
1173             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*vpp_device*.template )
1174             TOPOLOGIES_TAGS="2_node_single_link_topo"
1175             ;;
1176         "1n_vbox")
1177             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*vpp_device*.template )
1178             TOPOLOGIES_TAGS="2_node_single_link_topo"
1179             ;;
1180         "2n_aws")
1181             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-aws*.yaml )
1182             TOPOLOGIES_TAGS="2_node_single_link_topo"
1183             ;;
1184         "2n_c6gn")
1185             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-c6gn*.yaml )
1186             TOPOLOGIES_TAGS="2_node_single_link_topo"
1187             ;;
1188         "2n_c7gn")
1189             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-c7gn*.yaml )
1190             TOPOLOGIES_TAGS="2_node_single_link_topo"
1191             ;;
1192         "2n_c6in")
1193             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-c6in*.yaml )
1194             TOPOLOGIES_TAGS="2_node_single_link_topo"
1195             ;;
1196         "2n_clx")
1197             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_clx_*.yaml )
1198             TOPOLOGIES_TAGS="2_node_*_link_topo"
1199             ;;
1200         "2n_icx")
1201             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_icx_*.yaml )
1202             TOPOLOGIES_TAGS="2_node_*_link_topo"
1203             ;;
1204         "2n_spr")
1205             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_spr_*.yaml )
1206             TOPOLOGIES_TAGS="2_node_*_link_topo"
1207             ;;
1208         "2n_tx2")
1209             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_tx2_*.yaml )
1210             TOPOLOGIES_TAGS="2_node_single_link_topo"
1211             ;;
1212         "2n_zn2")
1213             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_zn2_*.yaml )
1214             TOPOLOGIES_TAGS="2_node_*_link_topo"
1215             ;;
1216         "3n_alt")
1217             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_alt_*.yaml )
1218             TOPOLOGIES_TAGS="3_node_single_link_topo"
1219             ;;
1220         "3n_aws")
1221             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-aws*.yaml )
1222             TOPOLOGIES_TAGS="3_node_single_link_topo"
1223             ;;
1224         "3n_c6gn")
1225             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-c6gn*.yaml )
1226             TOPOLOGIES_TAGS="3_node_single_link_topo"
1227             ;;
1228         "3n_c7gn")
1229             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-c7gn*.yaml )
1230             TOPOLOGIES_TAGS="3_node_single_link_topo"
1231             ;;
1232         "3n_c6in")
1233             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-c6in*.yaml )
1234             TOPOLOGIES_TAGS="3_node_single_link_topo"
1235             ;;
1236         "3n_icx")
1237             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_icx_*.yaml )
1238             # Trailing underscore is needed to distinguish from 3n_icxd.
1239             TOPOLOGIES_TAGS="3_node_*_link_topo"
1240             ;;
1241         "3n_icxd")
1242             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_icxd_*.yaml )
1243             TOPOLOGIES_TAGS="3_node_single_link_topo"
1244             ;;
1245         "3n_snr")
1246             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_snr_*.yaml )
1247             TOPOLOGIES_TAGS="3_node_single_link_topo"
1248             ;;
1249         "3n_tsh")
1250             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_tsh_*.yaml )
1251             TOPOLOGIES_TAGS="3_node_single_link_topo"
1252             ;;
1253         "3na_spr")
1254             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3na_spr_*.yaml )
1255             TOPOLOGIES_TAGS="3_node_*_link_topo"
1256             ;;
1257         "3nb_spr")
1258             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3nb_spr_*.yaml )
1259             TOPOLOGIES_TAGS="3_node_*_link_topo"
1260             ;;
1261         *)
1262             # No falling back to default, that should have been done
1263             # by the function which has set NODENESS and FLAVOR.
1264             die "Unknown specification: ${case_text}"
1265     esac
1266
1267     if [[ -z "${TOPOLOGIES-}" ]]; then
1268         die "No applicable topology found!"
1269     fi
1270 }
1271
1272
1273 function set_environment_variables () {
1274
1275     # Depending on testbed topology, overwrite defaults set in the
1276     # resources/libraries/python/Constants.py file
1277     #
1278     # Variables read:
1279     # - TEST_CODE - String affecting test selection, usually jenkins job name.
1280     # Variables set:
1281     # See specific cases
1282
1283     set -exuo pipefail
1284
1285     case "${TEST_CODE}" in
1286         *"1n-aws" | *"2n-aws" | *"3n-aws")
1287             export TREX_RX_DESCRIPTORS_COUNT=1024
1288             export TREX_EXTRA_CMDLINE="--mbuf-factor 19"
1289             export TREX_CORE_COUNT=6
1290             # Settings to prevent duration stretching.
1291             export PERF_TRIAL_STL_DELAY=0.1
1292             ;;
1293         *"2n-c6gn" | *"3n-c6gn")
1294             export TREX_RX_DESCRIPTORS_COUNT=1024
1295             export TREX_EXTRA_CMDLINE="--mbuf-factor 19"
1296             export TREX_CORE_COUNT=6
1297             # Settings to prevent duration stretching.
1298             export PERF_TRIAL_STL_DELAY=0.1
1299             ;;
1300         *"2n-c7gn" | *"3n-c7gn")
1301             export TREX_RX_DESCRIPTORS_COUNT=1024
1302             export TREX_EXTRA_CMDLINE="--mbuf-factor 19"
1303             export TREX_CORE_COUNT=6
1304             # Settings to prevent duration stretching.
1305             export PERF_TRIAL_STL_DELAY=0.1
1306             ;;
1307         *"1n-c6in" | *"2n-c6in" | *"3n-c6in")
1308             export TREX_RX_DESCRIPTORS_COUNT=1024
1309             export TREX_EXTRA_CMDLINE="--mbuf-factor 19"
1310             export TREX_CORE_COUNT=6
1311             # Settings to prevent duration stretching.
1312             export PERF_TRIAL_STL_DELAY=0.1
1313             ;;
1314         *"2n-zn2")
1315             # Maciek's workaround for Zen2 with lower amount of cores.
1316             export TREX_CORE_COUNT=14
1317     esac
1318 }
1319
1320
1321 function untrap_and_unreserve_testbed () {
1322
1323     # Use this as a trap function to ensure testbed does not remain reserved.
1324     # Perhaps call directly before script exit, to free testbed for other jobs.
1325     # This function is smart enough to avoid multiple unreservations (so safe).
1326     # Topo cleanup is executed (call it best practice), ignoring failures.
1327     #
1328     # Hardcoded values:
1329     # - default message to die with if testbed might remain reserved.
1330     # Arguments:
1331     # - ${1} - Message to die with if unreservation fails. Default hardcoded.
1332     # Variables read (by inner function):
1333     # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
1334     # - PYTHON_SCRIPTS_DIR - Path to directory holding Python scripts.
1335     # Variables set:
1336     # - TERRAFORM_MODULE_DIR - Terraform module directory.
1337     # - WORKING_TOPOLOGY - Set to empty string on successful unreservation.
1338     # Trap unregistered:
1339     # - EXIT - Failure to untrap is reported, but ignored otherwise.
1340     # Functions called:
1341     # - die - Print to stderr and exit.
1342     # - ansible_playbook - Perform an action using ansible, see ansible.sh
1343
1344     set -xo pipefail
1345     set +eu  # We do not want to exit early in a "teardown" function.
1346     trap - EXIT || echo "Trap deactivation failed, continuing anyway."
1347     wt="${WORKING_TOPOLOGY}"  # Just to avoid too long lines.
1348     if [[ -z "${wt-}" ]]; then
1349         set -eu
1350         warn "Testbed looks unreserved already. Trap removal failed before?"
1351     else
1352         ansible_playbook "cleanup" || true
1353         python3 "${PYTHON_SCRIPTS_DIR}/topo_reservation.py" -c -t "${wt}" || {
1354             die "${1:-FAILED TO UNRESERVE, FIX MANUALLY.}" 2
1355         }
1356         case "${TEST_CODE}" in
1357             *"1n-aws" | *"2n-aws" | *"3n-aws")
1358                 TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}-c5n"
1359                 terraform_destroy || die "Failed to call terraform destroy."
1360                 ;;
1361             *"2n-c6gn" | *"3n-c6gn")
1362                 TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}"
1363                 terraform_destroy || die "Failed to call terraform destroy."
1364                 ;;
1365             *"2n-c7gn" | *"3n-c7gn")
1366                 TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}"
1367                 terraform_destroy || die "Failed to call terraform destroy."
1368                 ;;
1369             *"1n-c6in" | *"2n-c6in" | *"3n-c6in")
1370                 TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}"
1371                 terraform_destroy || die "Failed to call terraform destroy."
1372                 ;;
1373             *)
1374                 ;;
1375         esac
1376         WORKING_TOPOLOGY=""
1377         set -eu
1378     fi
1379 }
1380
1381
1382 function warn () {
1383
1384     # Print the message to standard error.
1385     #
1386     # Arguments:
1387     # - ${@} - The text of the message.
1388
1389     set -exuo pipefail
1390
1391     echo "$@" >&2
1392 }