Revert "fix(jobspec): Delete ipsec nfv density tests"
[csit.git] / resources / libraries / bash / function / common.sh
index c0aa5f9..4f104db 100644 (file)
@@ -1,5 +1,5 @@
-# Copyright (c) 2020 Cisco and/or its affiliates.
-# Copyright (c) 2020 PANTHEON.tech and/or its affiliates.
+# Copyright (c) 2024 Cisco and/or its affiliates.
+# Copyright (c) 2024 PANTHEON.tech and/or its affiliates.
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at:
@@ -45,7 +45,7 @@ function activate_docker_topology () {
     device_image="$(< ${CSIT_DIR}/${IMAGE_VER_FILE})"
     case_text="${NODENESS}_${FLAVOR}"
     case "${case_text}" in
-        "1n_skx" | "1n_tx2")
+        "1n_skx" | "1n_alt" | "1n_spr")
             # We execute reservation over csit-shim-dcr (ssh) which runs sourced
             # script's functions. Env variables are read from ssh output
             # back to localhost for further processing.
@@ -79,18 +79,26 @@ function activate_docker_topology () {
          die "Trap attempt failed, please cleanup manually. Aborting!"
     }
 
+    parse_env_variables || die "Parse of environment variables failed!"
+
     # Replace all variables in template with those in environment.
     source <(echo 'cat <<EOF >topo.yml'; cat ${TOPOLOGIES[0]}; echo EOF;) || {
         die "Topology file create failed!"
     }
 
-    WORKING_TOPOLOGY="/tmp/topology.yaml"
+    WORKING_TOPOLOGY="${CSIT_DIR}/topologies/available/vpp_device.yaml"
     mv topo.yml "${WORKING_TOPOLOGY}" || {
         die "Topology move failed!"
     }
     cat ${WORKING_TOPOLOGY} | grep -v password || {
         die "Topology read failed!"
     }
+
+    # Subfunctions to update data that may depend on topology reserved.
+    set_environment_variables || die
+    select_tags || die
+    compose_robot_arguments || die
+
 }
 
 
@@ -116,7 +124,7 @@ function activate_virtualenv () {
     env_dir="${root_path}/env"
     req_path=${2-$CSIT_DIR/requirements.txt}
     rm -rf "${env_dir}" || die "Failed to clean previous virtualenv."
-    pip3 install virtualenv==20.0.20 || {
+    pip3 install virtualenv==20.15.1 || {
         die "Virtualenv package install failed."
     }
     virtualenv --no-download --python=$(which python3) "${env_dir}" || {
@@ -135,19 +143,19 @@ function activate_virtualenv () {
 
 function archive_tests () {
 
-    # Create .tar.xz of generated/tests for archiving.
+    # Create .tar.gz of generated/tests for archiving.
     # To be run after generate_tests, kept separate to offer more flexibility.
 
     # Directory read:
     # - ${GENERATED_DIR}/tests - Tree of executed suites to archive.
     # File rewriten:
-    # - ${ARCHIVE_DIR}/tests.tar.xz - Archive of generated tests.
+    # - ${ARCHIVE_DIR}/generated_tests.tar.gz - Archive of generated tests.
 
     set -exuo pipefail
 
-    tar c "${GENERATED_DIR}/tests" | xz -3 > "${ARCHIVE_DIR}/tests.tar.xz" || {
-        die "Error creating archive of generated tests."
-    }
+    pushd "${ARCHIVE_DIR}" || die
+    tar czf "generated_tests.tar.gz" "${GENERATED_DIR}/tests" || true
+    popd || die
 }
 
 
@@ -156,7 +164,7 @@ function check_download_dir () {
     # Fail if there are no files visible in ${DOWNLOAD_DIR}.
     #
     # Variables read:
-    # - DOWNLOAD_DIR - Path to directory pybot takes the build to test from.
+    # - DOWNLOAD_DIR - Path to directory robot takes the build to test from.
     # Directories read:
     # - ${DOWNLOAD_DIR} - Has to be non-empty to proceed.
     # Functions called:
@@ -225,7 +233,7 @@ function common_dirs () {
     TOPOLOGIES_DIR=$(readlink -e "${CSIT_DIR}/topologies/available") || {
         die "Readlink failed."
     }
-    JOB_SPECS_DIR=$(readlink -e "${CSIT_DIR}/docs/job_specs") || {
+    JOB_SPECS_DIR=$(readlink -e "${CSIT_DIR}/resources/job_specs") || {
         die "Readlink failed."
     }
     RESOURCES_DIR=$(readlink -e "${CSIT_DIR}/resources") || {
@@ -234,9 +242,6 @@ function common_dirs () {
     TOOLS_DIR=$(readlink -e "${RESOURCES_DIR}/tools") || {
         die "Readlink failed."
     }
-    DOC_GEN_DIR=$(readlink -e "${TOOLS_DIR}/doc_gen") || {
-        die "Readlink failed."
-    }
     PYTHON_SCRIPTS_DIR=$(readlink -e "${TOOLS_DIR}/scripts") || {
         die "Readlink failed."
     }
@@ -256,30 +261,37 @@ function common_dirs () {
 }
 
 
-function compose_pybot_arguments () {
+function compose_robot_arguments () {
 
+    # This function is called by run_tests function.
+    # The reason is that some jobs (bisect) perform reservation multiple times,
+    # so WORKING_TOPOLOGY can be different each time.
+    #
     # Variables read:
     # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
     # - DUT - CSIT test/ subdirectory, set while processing tags.
     # - TAGS - Array variable holding selected tag boolean expressions.
     # - TOPOLOGIES_TAGS - Tag boolean expression filtering tests for topology.
     # - TEST_CODE - The test selection string from environment or argument.
+    # - SELECTION_MODE - Selection criteria [test, suite, include, exclude].
     # Variables set:
-    # - PYBOT_ARGS - String holding part of all arguments for pybot.
-    # - EXPANDED_TAGS - Array of strings pybot arguments compiled from tags.
+    # - ROBOT_ARGS - String holding part of all arguments for robot.
+    # - EXPANDED_TAGS - Array of strings robot arguments compiled from tags.
 
     set -exuo pipefail
 
     # No explicit check needed with "set -u".
-    PYBOT_ARGS=("--loglevel" "TRACE")
-    PYBOT_ARGS+=("--variable" "TOPOLOGY_PATH:${WORKING_TOPOLOGY}")
+    ROBOT_ARGS=("--loglevel" "TRACE")
+    ROBOT_ARGS+=("--variable" "TOPOLOGY_PATH:${WORKING_TOPOLOGY}")
 
+    # TODO: The rest does not need to be recomputed on each reservation.
+    #       Refactor TEST_CODE so this part can be called only once.
     case "${TEST_CODE}" in
         *"device"*)
-            PYBOT_ARGS+=("--suite" "tests.${DUT}.device")
+            ROBOT_ARGS+=("--suite" "tests.${DUT}.device")
             ;;
-        *"perf"*)
-            PYBOT_ARGS+=("--suite" "tests.${DUT}.perf")
+        *"perf"* | *"bisect"*)
+            ROBOT_ARGS+=("--suite" "tests.${DUT}.perf")
             ;;
         *)
             die "Unknown specification: ${TEST_CODE}"
@@ -290,9 +302,17 @@ function compose_pybot_arguments () {
         if [[ ${tag} == "!"* ]]; then
             EXPANDED_TAGS+=("--exclude" "${tag#$"!"}")
         else
-            EXPANDED_TAGS+=("--include" "${TOPOLOGIES_TAGS}AND${tag}")
+            if [[ ${SELECTION_MODE} == "--test" ]]; then
+                EXPANDED_TAGS+=("--test" "${tag}")
+            else
+                EXPANDED_TAGS+=("--include" "${TOPOLOGIES_TAGS}AND${tag}")
+            fi
         fi
     done
+
+    if [[ ${SELECTION_MODE} == "--test" ]]; then
+        EXPANDED_TAGS+=("--include" "${TOPOLOGIES_TAGS}")
+    fi
 }
 
 
@@ -308,7 +328,7 @@ function deactivate_docker_topology () {
 
     case_text="${NODENESS}_${FLAVOR}"
     case "${case_text}" in
-        "1n_skx" | "1n_tx2")
+        "1n_skx" | "1n_alt" | "1n_spr")
             ssh="ssh root@172.17.0.1 -p 6022"
             env_vars=$(env | grep CSIT_ | tr '\n' ' ' ) || die
             # The "declare -f" output is long and boring.
@@ -349,19 +369,19 @@ function die () {
 }
 
 
-function die_on_pybot_error () {
+function die_on_robot_error () {
 
     # Source this fragment if you want to abort on any failed test case.
     #
     # Variables read:
-    # - PYBOT_EXIT_STATUS - Set by a pybot running fragment.
+    # - ROBOT_EXIT_STATUS - Set by a robot running fragment.
     # Functions called:
     # - die - Print to stderr and exit.
 
     set -exuo pipefail
 
-    if [[ "${PYBOT_EXIT_STATUS}" != "0" ]]; then
-        die "Test failures are present!" "${PYBOT_EXIT_STATUS}"
+    if [[ "${ROBOT_EXIT_STATUS}" != "0" ]]; then
+        die "Test failures are present!" "${ROBOT_EXIT_STATUS}"
     fi
 }
 
@@ -374,7 +394,7 @@ function generate_tests () {
     # within any subdirectory after copying.
 
     # This is a separate function, because this code is called
-    # both by autogen checker and entries calling run_pybot.
+    # both by autogen checker and entries calling run_robot.
 
     # Directories read:
     # - ${CSIT_DIR}/tests - Used as templates for the generated tests.
@@ -420,50 +440,109 @@ function get_test_code () {
     fi
 
     case "${TEST_CODE}" in
-        *"1n-vbox"*)
+        *"1n-vbox")
             NODENESS="1n"
             FLAVOR="vbox"
             ;;
-        *"1n-skx"*)
+        *"1n-skx")
             NODENESS="1n"
             FLAVOR="skx"
             ;;
-       *"1n-tx2"*)
+        *"1n-spr")
             NODENESS="1n"
-            FLAVOR="tx2"
+            FLAVOR="spr"
+            ;;
+        *"1n-alt")
+            NODENESS="1n"
+            FLAVOR="alt"
             ;;
-        *"2n-skx"*)
+        *"1n-aws")
+            NODENESS="1n"
+            FLAVOR="aws"
+            ;;
+        *"2n-aws")
             NODENESS="2n"
-            FLAVOR="skx"
+            FLAVOR="aws"
             ;;
-        *"2n-zn2"*)
+        *"3n-aws")
+            NODENESS="3n"
+            FLAVOR="aws"
+            ;;
+        *"2n-c7gn")
             NODENESS="2n"
-            FLAVOR="zn2"
+            FLAVOR="c7gn"
             ;;
-        *"3n-skx"*)
+        *"3n-c7gn")
             NODENESS="3n"
-            FLAVOR="skx"
+            FLAVOR="c7gn"
+            ;;
+        *"1n-c6in")
+            NODENESS="1n"
+            FLAVOR="c6in"
+            ;;
+        *"2n-c6in")
+            NODENESS="2n"
+            FLAVOR="c6in"
+            ;;
+        *"3n-c6in")
+            NODENESS="3n"
+            FLAVOR="c6in"
             ;;
-        *"2n-clx"*)
+        *"2n-zn2")
+            NODENESS="2n"
+            FLAVOR="zn2"
+            ;;
+        *"2n-clx")
             NODENESS="2n"
             FLAVOR="clx"
             ;;
-        *"2n-dnv"*)
+        *"2n-icx")
             NODENESS="2n"
-            FLAVOR="dnv"
+            FLAVOR="icx"
+            ;;
+        *"2n-spr")
+            NODENESS="2n"
+            FLAVOR="spr"
+            ;;
+        *"3n-icx")
+            NODENESS="3n"
+            FLAVOR="icx"
             ;;
-        *"3n-dnv"*)
+        *"3na-spr")
+            NODENESS="3na"
+            FLAVOR="spr"
+            ;;
+        *"3nb-spr")
+            NODENESS="3nb"
+            FLAVOR="spr"
+            ;;
+        *"3n-snr")
+            NODENESS="3n"
+            FLAVOR="snr"
+            ;;
+        *"3n-icxd")
             NODENESS="3n"
-            FLAVOR="dnv"
+            FLAVOR="icxd"
+            ;;
+        *"2n-tx2")
+            NODENESS="2n"
+            FLAVOR="tx2"
             ;;
-        *"3n-tsh"*)
+        *"3n-tsh")
             NODENESS="3n"
             FLAVOR="tsh"
             ;;
-        *)
-            # Fallback to 3-node Haswell by default (backward compatibility)
+        *"3n-alt")
             NODENESS="3n"
-            FLAVOR="hsw"
+            FLAVOR="alt"
+            ;;
+        *"2n-x-"*)
+            NODENESS="2n"
+            FLAVOR="${TEST_CODE#*2n-}"
+            ;;
+        *"3n-x-"*)
+            NODENESS="3n"
+            FLAVOR="${TEST_CODE#*3n-}"
             ;;
     esac
 }
@@ -478,6 +557,10 @@ function get_test_tag_string () {
     # Variables set:
     # - TEST_TAG_STRING - The string following trigger word in gerrit comment.
     #   May be empty, or even not set on event types not adding comment.
+    # - GIT_BISECT_FROM - If bisecttest, the commit hash to bisect from.
+    #   Else not set.
+    # Variables exported optionally:
+    # - GRAPH_NODE_VARIANT - Node variant to test with, set if found in trigger.
 
     # TODO: ci-management scripts no longer need to perform this.
 
@@ -485,6 +568,10 @@ function get_test_tag_string () {
 
     if [[ "${GERRIT_EVENT_TYPE-}" == "comment-added" ]]; then
         case "${TEST_CODE}" in
+            # Order matters, bisect job contains "perf" in its name.
+            *"bisect"*)
+                trigger="bisecttest"
+                ;;
             *"device"*)
                 trigger="devicetest"
                 ;;
@@ -505,10 +592,23 @@ function get_test_tag_string () {
         TEST_TAG_STRING=$("${cmd[@]}" <<< "${comment}" || true)
         if [[ -z "${TEST_TAG_STRING-}" ]]; then
             # Probably we got a base64 encoded comment.
-            comment=$(base64 --decode <<< "${GERRIT_EVENT_COMMENT_TEXT}" || true)
+            comment="${GERRIT_EVENT_COMMENT_TEXT}"
+            comment=$(base64 --decode <<< "${comment}" || true)
             comment=$(fgrep "${trigger}" <<< "${comment}" || true)
             TEST_TAG_STRING=$("${cmd[@]}" <<< "${comment}" || true)
         fi
+        if [[ "${trigger}" == "bisecttest" ]]; then
+            # Intentionally without quotes, so spaces delimit elements.
+            test_tag_array=(${TEST_TAG_STRING}) || die "How could this fail?"
+            # First "argument" of bisecttest is a commit hash.
+            GIT_BISECT_FROM="${test_tag_array[0]}" || {
+                die "Bisect job requires commit hash."
+            }
+            # Update the tag string (tag expressions only, no commit hash).
+            TEST_TAG_STRING="${test_tag_array[@]:1}" || {
+                die "Bisect job needs a single test, no default."
+            }
+        fi
         if [[ -n "${TEST_TAG_STRING-}" ]]; then
             test_tag_array=(${TEST_TAG_STRING})
             if [[ "${test_tag_array[0]}" == "icl" ]]; then
@@ -517,9 +617,6 @@ function get_test_tag_string () {
             elif [[ "${test_tag_array[0]}" == "skx" ]]; then
                 export GRAPH_NODE_VARIANT="skx"
                 TEST_TAG_STRING="${test_tag_array[@]:1}" || true
-            elif [[ "${test_tag_array[0]}" == "hsw" ]]; then
-                export GRAPH_NODE_VARIANT="hsw"
-                TEST_TAG_STRING="${test_tag_array[@]:1}" || true
             fi
         fi
     fi
@@ -585,12 +682,65 @@ function move_archives () {
 }
 
 
+function prepare_topology () {
+
+    # Prepare virtual testbed topology if needed based on flavor.
+
+    # Variables read:
+    # - TEST_CODE - String affecting test selection, usually jenkins job name.
+    # - NODENESS - Node multiplicity of testbed, either "2n" or "3n".
+    # - FLAVOR - Node flavor string, e.g. "clx" or "skx".
+    # Variables set:
+    # - TERRAFORM_MODULE_DIR - Terraform module directory.
+    # Functions called:
+    # - die - Print to stderr and exit.
+    # - terraform_init - Terraform init topology.
+    # - terraform_apply - Terraform apply topology.
+
+    set -exuo pipefail
+
+    case_text="${NODENESS}_${FLAVOR}"
+    case "${case_text}" in
+        "1n_aws" | "2n_aws" | "3n_aws")
+            export TF_VAR_testbed_name="${TEST_CODE}"
+            TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}-c5n"
+            terraform_init || die "Failed to call terraform init."
+            trap "terraform_destroy" ERR EXIT || {
+                die "Trap attempt failed, please cleanup manually. Aborting!"
+            }
+            terraform_apply || die "Failed to call terraform apply."
+            ;;
+        "2n_c7gn" | "3n_c7gn")
+            export TF_VAR_testbed_name="${TEST_CODE}"
+            TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-c7gn"
+            terraform_init || die "Failed to call terraform init."
+            trap "terraform_destroy" ERR EXIT || {
+                die "Trap attempt failed, please cleanup manually. Aborting!"
+            }
+            terraform_apply || die "Failed to call terraform apply."
+            ;;
+        "1n_c6in" | "2n_c6in" | "3n_c6in")
+            export TF_VAR_testbed_name="${TEST_CODE}"
+            TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-c6in"
+            terraform_init || die "Failed to call terraform init."
+            trap "terraform_destroy" ERR EXIT || {
+                die "Trap attempt failed, please cleanup manually. Aborting!"
+            }
+            terraform_apply || die "Failed to call terraform apply."
+            ;;
+    esac
+}
+
+
 function reserve_and_cleanup_testbed () {
 
     # Reserve physical testbed, perform cleanup, register trap to unreserve.
     # When cleanup fails, remove from topologies and keep retrying
     # until all topologies are removed.
     #
+    # Multiple other functions are called from here,
+    # as they set variables that depend on reserved topology data.
+    #
     # Variables read:
     # - TOPOLOGIES - Array of paths to topology yaml to attempt reservation on.
     # - PYTHON_SCRIPTS_DIR - Path to directory holding the reservation script.
@@ -628,7 +778,7 @@ function reserve_and_cleanup_testbed () {
                     }
                     die "Trap attempt failed, unreserve succeeded. Aborting."
                 }
-                # Cleanup + calibration checks.
+                # Cleanup + calibration checks
                 set +e
                 ansible_playbook "cleanup, calibration"
                 result="$?"
@@ -659,42 +809,50 @@ function reserve_and_cleanup_testbed () {
         echo "Sleeping ${sleep_time}"
         sleep "${sleep_time}" || die "Sleep failed."
     done
+
+    # Subfunctions to update data that may depend on topology reserved.
+    set_environment_variables || die
+    select_tags || die
+    compose_robot_arguments || die
 }
 
 
-function run_pybot () {
+function run_robot () {
 
-    # Run pybot with options based on input variables. Create output_info.xml
+    # Run robot with options based on input variables.
+    #
+    # Testbed has to be reserved already,
+    # as some data may have changed between reservations,
+    # for example excluded NICs.
     #
     # Variables read:
     # - CSIT_DIR - Path to existing root of local CSIT git repository.
     # - ARCHIVE_DIR - Path to store robot result files in.
-    # - PYBOT_ARGS, EXPANDED_TAGS - See compose_pybot_arguments.sh
+    # - ROBOT_ARGS, EXPANDED_TAGS - See compose_robot_arguments.sh
     # - GENERATED_DIR - Tests are assumed to be generated under there.
+    # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
+    # - DUT - CSIT test/ subdirectory, set while processing tags.
+    # - TAGS - Array variable holding selected tag boolean expressions.
+    # - TOPOLOGIES_TAGS - Tag boolean expression filtering tests for topology.
+    # - TEST_CODE - The test selection string from environment or argument.
     # Variables set:
-    # - PYBOT_EXIT_STATUS - Exit status of most recent pybot invocation.
+    # - ROBOT_ARGS - String holding part of all arguments for robot.
+    # - EXPANDED_TAGS - Array of string robot arguments compiled from tags.
+    # - ROBOT_EXIT_STATUS - Exit status of most recent robot invocation.
     # Functions called:
     # - die - Print to stderr and exit.
 
     set -exuo pipefail
 
-    all_options=("--outputdir" "${ARCHIVE_DIR}" "${PYBOT_ARGS[@]}")
-    all_options+=("--noncritical" "EXPECTED_FAILING")
+    all_options=("--outputdir" "${ARCHIVE_DIR}" "${ROBOT_ARGS[@]}")
     all_options+=("${EXPANDED_TAGS[@]}")
 
     pushd "${CSIT_DIR}" || die "Change directory operation failed."
     set +e
     robot "${all_options[@]}" "${GENERATED_DIR}/tests/"
-    PYBOT_EXIT_STATUS="$?"
+    ROBOT_EXIT_STATUS="$?"
     set -e
 
-    # Generate INFO level output_info.xml for post-processing.
-    all_options=("--loglevel" "INFO")
-    all_options+=("--log" "none")
-    all_options+=("--report" "none")
-    all_options+=("--output" "${ARCHIVE_DIR}/output_info.xml")
-    all_options+=("${ARCHIVE_DIR}/output.xml")
-    rebot "${all_options[@]}" || true
     popd || die "Change directory operation failed."
 }
 
@@ -710,23 +868,23 @@ function select_arch_os () {
 
     set -exuo pipefail
 
-    os_id=$(grep '^ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g') || {
-        die "Get OS release failed."
-    }
+    source /etc/os-release || die "Get OS release failed."
 
-    case "${os_id}" in
+    case "${ID}" in
         "ubuntu"*)
-            IMAGE_VER_FILE="VPP_DEVICE_IMAGE_UBUNTU"
-            VPP_VER_FILE="VPP_STABLE_VER_UBUNTU_BIONIC"
-            PKG_SUFFIX="deb"
-            ;;
-        "centos"*)
-            IMAGE_VER_FILE="VPP_DEVICE_IMAGE_CENTOS"
-            VPP_VER_FILE="VPP_STABLE_VER_CENTOS"
-            PKG_SUFFIX="rpm"
+            case "${VERSION}" in
+                *"LTS (Jammy Jellyfish)"*)
+                    IMAGE_VER_FILE="VPP_DEVICE_IMAGE_UBUNTU_JAMMY"
+                    VPP_VER_FILE="VPP_STABLE_VER_UBUNTU_JAMMY"
+                    PKG_SUFFIX="deb"
+                    ;;
+                *)
+                    die "Unsupported Ubuntu version!"
+                    ;;
+            esac
             ;;
         *)
-            die "Unable to identify distro or os from ${os_id}"
+            die "Unsupported distro or OS!"
             ;;
     esac
 
@@ -746,6 +904,9 @@ function select_arch_os () {
 
 function select_tags () {
 
+    # Only to be called from the reservation function,
+    # as resulting tags may change based on topology data.
+    #
     # Variables read:
     # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
     # - TEST_CODE - String affecting test selection, usually jenkins job name.
@@ -756,101 +917,179 @@ function select_tags () {
     # - BASH_FUNCTION_DIR - Directory with input files to process.
     # Variables set:
     # - TAGS - Array of processed tag boolean expressions.
+    # - SELECTION_MODE - Selection criteria [test, suite, include, exclude].
 
     set -exuo pipefail
 
     # NIC SELECTION
-    start_pattern='^  TG:'
+    case "${TEST_CODE}" in
+        *"1n-aws"* | *"1n-c6in"*)
+            start_pattern='^  SUT:'
+            ;;
+        *)
+            start_pattern='^  TG:'
+            ;;
+    esac
     end_pattern='^ \? \?[A-Za-z0-9]\+:'
-    # Remove the TG section from topology file
+    # Remove the sections from topology file
     sed_command="/${start_pattern}/,/${end_pattern}/d"
-    # All topologies DUT NICs
+    # All topologies NICs
     available=$(sed "${sed_command}" "${TOPOLOGIES_DIR}"/* \
                 | grep -hoP "model: \K.*" | sort -u)
-    # Selected topology DUT NICs
+    # Selected topology NICs
     reserved=$(sed "${sed_command}" "${WORKING_TOPOLOGY}" \
                | grep -hoP "model: \K.*" | sort -u)
-    # All topologies DUT NICs - Selected topology DUT NICs
+    # All topologies NICs - Selected topology NICs
     exclude_nics=($(comm -13 <(echo "${reserved}") <(echo "${available}"))) || {
         die "Computation of excluded NICs failed."
     }
 
     # Select default NIC tag.
     case "${TEST_CODE}" in
-        *"3n-dnv"* | *"2n-dnv"*)
-            default_nic="nic_intel-x553"
+        *"3n-snr")
+            default_nic="nic_intel-e822cq"
             ;;
-        *"3n-tsh"*)
+        *"3n-icxd")
+            default_nic="nic_intel-e823c"
+            ;;
+        *"3n-tsh")
             default_nic="nic_intel-x520-da2"
             ;;
-        *"3n-skx"* | *"2n-skx"* | *"2n-clx"* | *"2n-zn2"*)
+        *"3n-icx" | *"2n-icx")
+            default_nic="nic_intel-e810cq"
+            ;;
+        *"3na-spr")
+            default_nic="nic_mellanox-cx7veat"
+            ;;
+        *"3nb-spr")
+            default_nic="nic_intel-e810cq"
+            ;;
+        *"2n-spr")
+            default_nic="nic_intel-e810cq"
+            ;;
+        *"2n-clx" | *"2n-zn2")
             default_nic="nic_intel-xxv710"
             ;;
-        *"3n-hsw"* | *"mrr-daily-master")
+        *"2n-tx2" | *"3n-alt")
             default_nic="nic_intel-xl710"
             ;;
+        *"1n-aws" | *"2n-aws" | *"3n-aws")
+            default_nic="nic_amazon-nitro-50g"
+            ;;
+        *"2n-c7gn" | *"3n-c7gn")
+            default_nic="nic_amazon-nitro-100g"
+            ;;
+        *"1n-c6in" | *"2n-c6in" | *"3n-c6in")
+            default_nic="nic_amazon-nitro-200g"
+            ;;
+        *"2n-x-"* | *"3n-x-"*)
+            default_nic="nic_intel-e810cq"
+            ;;
         *)
             default_nic="nic_intel-x710"
             ;;
     esac
 
     sed_nic_sub_cmd="sed s/\${default_nic}/${default_nic}/"
-    sed_nics_sub_cmd="sed -e s/ANDxxv710/ANDnic_intel-xxv710/"
-    sed_nics_sub_cmd+=" | sed -e s/ANDx710/ANDnic_intel-x710/"
-    sed_nics_sub_cmd+=" | sed -e s/ANDxl710/ANDnic_intel-xl710/"
-    sed_nics_sub_cmd+=" | sed -e s/ANDx520-da2/ANDnic_intel-x520-da2/"
-    sed_nics_sub_cmd+=" | sed -e s/ANDx553/ANDnic_intel-x553/"
-    sed_nics_sub_cmd+=" | sed -e s/ANDcx556a/ANDnic_mellanox-cx556a/"
-    sed_nics_sub_cmd+=" | sed -e s/ANDvic1227/ANDnic_cisco-vic-1227/"
-    sed_nics_sub_cmd+=" | sed -e s/ANDvic1385/ANDnic_cisco-vic-1385/"
+    awk_nics_sub_cmd=""
+    awk_nics_sub_cmd+='gsub("xxv710","25ge2p1xxv710");'
+    awk_nics_sub_cmd+='gsub("x710","10ge2p1x710");'
+    awk_nics_sub_cmd+='gsub("xl710","40ge2p1xl710");'
+    awk_nics_sub_cmd+='gsub("x520-da2","10ge2p1x520");'
+    awk_nics_sub_cmd+='gsub("cx556a","100ge2p1cx556a");'
+    awk_nics_sub_cmd+='gsub("2p1cx7veat","200ge2p1cx7veat");'
+    awk_nics_sub_cmd+='gsub("6p3cx7veat","200ge6p3cx7veat");'
+    awk_nics_sub_cmd+='gsub("cx6dx","100ge2p1cx6dx");'
+    awk_nics_sub_cmd+='gsub("e810cq","100ge2p1e810cq");'
+    awk_nics_sub_cmd+='gsub("e822cq","25ge2p1e822cq");'
+    awk_nics_sub_cmd+='gsub("e823c","25ge2p1e823c");'
+    awk_nics_sub_cmd+='gsub("vic1227","10ge2p1vic1227");'
+    awk_nics_sub_cmd+='gsub("vic1385","40ge2p1vic1385");'
+    awk_nics_sub_cmd+='gsub("nitro-50g","50ge1p1ENA");'
+    awk_nics_sub_cmd+='gsub("nitro-100g","100ge1p1ENA");'
+    awk_nics_sub_cmd+='gsub("nitro-200g","200ge1p1ENA");'
+    awk_nics_sub_cmd+='gsub("virtual","1ge1p82540em");'
+    awk_nics_sub_cmd+='if ($9 =="drv_avf") drv="avf-";'
+    awk_nics_sub_cmd+='else if ($9 =="drv_rdma_core") drv ="rdma-";'
+    awk_nics_sub_cmd+='else if ($9 =="drv_mlx5_core") drv ="mlx5-";'
+    awk_nics_sub_cmd+='else if ($9 =="drv_af_xdp") drv ="af-xdp-";'
+    awk_nics_sub_cmd+='else drv="";'
+    awk_nics_sub_cmd+='if ($1 =="-") cores="";'
+    awk_nics_sub_cmd+='else cores=$1;'
+    awk_nics_sub_cmd+='print "*"$7"-" drv $11"-"$5"."$3"-" cores "-" drv $11"-"$5'
+
     # Tag file directory shorthand.
     tfd="${JOB_SPECS_DIR}"
     case "${TEST_CODE}" in
         # Select specific performance tests based on jenkins job type variable.
+        *"device"* )
+            readarray -t test_tag_array <<< $(grep -v "#" \
+                ${tfd}/vpp_device/${DUT}-${NODENESS}-${FLAVOR}.md |
+                awk {"$awk_nics_sub_cmd"} || echo "devicetest") || die
+            SELECTION_MODE="--test"
+            ;;
+        *"hoststack-daily"* )
+            readarray -t test_tag_array <<< $(grep -v "#" \
+                ${tfd}/hoststack_daily/${DUT}-${NODENESS}-${FLAVOR}.md |
+                awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
+            SELECTION_MODE="--test"
+            ;;
         *"ndrpdr-weekly"* )
-            readarray -t test_tag_array <<< $(sed 's/ //g' \
-                ${tfd}/mlr_weekly/${DUT}-${NODENESS}-${FLAVOR}.md |
-                eval ${sed_nics_sub_cmd} || echo "perftest") || die
+            readarray -t test_tag_array <<< $(grep -v "#" \
+                ${tfd}/ndrpdr_weekly/${DUT}-${NODENESS}-${FLAVOR}.md |
+                awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
+            SELECTION_MODE="--test"
             ;;
         *"mrr-daily"* )
-            readarray -t test_tag_array <<< $(sed 's/ //g' \
+            readarray -t test_tag_array <<< $(grep -v "#" \
                 ${tfd}/mrr_daily/${DUT}-${NODENESS}-${FLAVOR}.md |
-                eval ${sed_nics_sub_cmd} || echo "perftest") || die
+                awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
+            SELECTION_MODE="--test"
             ;;
         *"mrr-weekly"* )
-            readarray -t test_tag_array <<< $(sed 's/ //g' \
+            readarray -t test_tag_array <<< $(grep -v "#" \
                 ${tfd}/mrr_weekly/${DUT}-${NODENESS}-${FLAVOR}.md |
-                eval ${sed_nics_sub_cmd} || echo "perftest") || die
+                awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
+            SELECTION_MODE="--test"
+            ;;
+        *"soak-weekly"* )
+            readarray -t test_tag_array <<< $(grep -v "#" \
+                ${tfd}/soak_weekly/${DUT}-${NODENESS}-${FLAVOR}.md |
+                awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
+            SELECTION_MODE="--test"
             ;;
         *"report-iterative"* )
             test_sets=(${TEST_TAG_STRING//:/ })
             # Run only one test set per run
             report_file=${test_sets[0]}.md
-            readarray -t test_tag_array <<< $(sed 's/ //g' \
+            readarray -t test_tag_array <<< $(grep -v "#" \
                 ${tfd}/report_iterative/${NODENESS}-${FLAVOR}/${report_file} |
-                eval ${sed_nics_sub_cmd} || echo "perftest") || die
+                awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
+            SELECTION_MODE="--test"
             ;;
         *"report-coverage"* )
             test_sets=(${TEST_TAG_STRING//:/ })
             # Run only one test set per run
             report_file=${test_sets[0]}.md
-            readarray -t test_tag_array <<< $(sed 's/ //g' \
+            readarray -t test_tag_array <<< $(grep -v "#" \
                 ${tfd}/report_coverage/${NODENESS}-${FLAVOR}/${report_file} |
-                eval ${sed_nics_sub_cmd} || echo "perftest") || die
+                awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
+            SELECTION_MODE="--test"
             ;;
         * )
             if [[ -z "${TEST_TAG_STRING-}" ]]; then
                 # If nothing is specified, we will run pre-selected tests by
                 # following tags.
-                test_tag_array=("mrrAND${default_nic}AND1cAND64bANDip4base"
-                                "mrrAND${default_nic}AND1cAND78bANDip6base"
-                                "mrrAND${default_nic}AND1cAND64bANDl2bdbase"
-                                "mrrAND${default_nic}AND1cAND64bANDl2xcbase"
-                                "!dot1q" "!drv_avf")
+                test_tag_array=("mrrAND${default_nic}AND1cAND64bANDethip4-ip4base"
+                                "mrrAND${default_nic}AND1cAND78bANDethip6-ip6base"
+                                "mrrAND${default_nic}AND1cAND64bANDeth-l2bdbasemaclrn"
+                                "mrrAND${default_nic}AND1cAND64bANDeth-l2xcbase"
+                                "!drv_af_xdp" "!drv_avf")
             else
                 # If trigger contains tags, split them into array.
                 test_tag_array=(${TEST_TAG_STRING//:/ })
             fi
+            SELECTION_MODE="--include"
             ;;
     esac
 
@@ -858,53 +1097,58 @@ function select_tags () {
     #
     # Reasons for blacklisting:
     # - ipsechw - Blacklisted on testbeds without crypto hardware accelerator.
-    # TODO: Add missing reasons here (if general) or where used (if specific).
     case "${TEST_CODE}" in
-        *"2n-skx"*)
-            test_tag_array+=("!ipsec")
+        *"1n-vbox")
+            test_tag_array+=("!avf")
+            test_tag_array+=("!vhost")
+            test_tag_array+=("!flow")
             ;;
-        *"3n-skx"*)
+        *"1n-alt")
+            test_tag_array+=("!flow")
+            ;;
+        *"2n-clx")
+            test_tag_array+=("!ipsechw")
+            ;;
+        *"2n-icx")
             test_tag_array+=("!ipsechw")
-            # Not enough nic_intel-xxv710 to support double link tests.
-            test_tag_array+=("!3_node_double_link_topoANDnic_intel-xxv710")
             ;;
-        *"2n-clx"*)
-            test_tag_array+=("!ipsec")
+        *"2n-spr")
             ;;
-        *"2n-zn2"*)
-            test_tag_array+=("!ipsec")
+        *"2n-tx2")
+            test_tag_array+=("!ipsechw")
             ;;
-        *"2n-dnv"*)
+        *"2n-zn2")
             test_tag_array+=("!ipsechw")
-            test_tag_array+=("!memif")
-            test_tag_array+=("!srv6_proxy")
-            test_tag_array+=("!vhost")
-            test_tag_array+=("!vts")
-            test_tag_array+=("!drv_avf")
             ;;
-        *"3n-dnv"*)
-            test_tag_array+=("!memif")
-            test_tag_array+=("!srv6_proxy")
-            test_tag_array+=("!vhost")
-            test_tag_array+=("!vts")
-            test_tag_array+=("!drv_avf")
+        *"3n-alt")
+            test_tag_array+=("!ipsechw")
             ;;
-        *"3n-tsh"*)
-            # 3n-tsh only has x520 NICs which don't work with AVF
-            test_tag_array+=("!drv_avf")
+        *"3n-icx")
             test_tag_array+=("!ipsechw")
+            test_tag_array+=("!3_node_double_link_topoANDnic_intel-xxv710")
             ;;
-        *"3n-hsw"*)
-            test_tag_array+=("!drv_avf")
-            # All cards have access to QAT. But only one card (xl710)
-            # resides in same NUMA as QAT. Other cards must go over QPI
-            # which we do not want to even run.
-            test_tag_array+=("!ipsechwNOTnic_intel-xl710")
+        *"3n-snr")
             ;;
-        *)
-            # Default to 3n-hsw due to compatibility.
+        *"3n-icxd")
+            ;;
+        *"3na-spr")
+            ;;
+        *"3nb-spr")
+            ;;
+        *"3n-tsh")
             test_tag_array+=("!drv_avf")
-            test_tag_array+=("!ipsechwNOTnic_intel-xl710")
+            test_tag_array+=("!ipsechw")
+            ;;
+        *"1n-aws" | *"2n-aws" | *"3n-aws")
+            test_tag_array+=("!ipsechw")
+            ;;
+        *"2n-c7gn" | *"3n-c7gn")
+            test_tag_array+=("!ipsechw")
+            ;;
+        *"1n-c6in" | *"2n-c6in" | *"3n-c6in")
+            test_tag_array+=("!ipsechw")
+            ;;
+        *"2n-x-"* | *"3n-x-"*)
             ;;
     esac
 
@@ -912,20 +1156,18 @@ function select_tags () {
     test_tag_array+=("${exclude_nics[@]/#/!NIC_}")
 
     TAGS=()
-
-    # We will prefix with perftest to prevent running other tests
-    # (e.g. Functional).
-    prefix="perftestAND"
-    set +x
-    if [[ "${TEST_CODE}" == "vpp-"* ]]; then
-        # Automatic prefixing for VPP jobs to limit the NIC used and
-        # traffic evaluation to MRR.
-        if [[ "${TEST_TAG_STRING-}" == *"nic_"* ]]; then
-            prefix="${prefix}mrrAND"
-        else
-            prefix="${prefix}mrrAND${default_nic}AND"
+    prefix=""
+    if [[ "${TEST_CODE}" != *"daily"* ]]; then
+        if [[ "${TEST_CODE}" == "vpp-"* ]]; then
+            if [[ "${TEST_CODE}" != *"device"* ]]; then
+                # Automatic prefixing for VPP perf jobs to limit the NIC used.
+                if [[ "${TEST_TAG_STRING-}" != *"nic_"* ]]; then
+                    prefix="${default_nic}AND"
+                fi
+            fi
         fi
     fi
+    set +x
     for tag in "${test_tag_array[@]}"; do
         if [[ "${tag}" == "!"* ]]; then
             # Exclude tags are not prefixed.
@@ -957,7 +1199,7 @@ function select_topology () {
 
     # Variables read:
     # - NODENESS - Node multiplicity of testbed, either "2n" or "3n".
-    # - FLAVOR - Node flavor string, currently either "hsw" or "skx".
+    # - FLAVOR - Node flavor string, e.g. "clx" or "skx".
     # - CSIT_DIR - Path to existing root of local CSIT git repository.
     # - TOPOLOGIES_DIR - Path to existing directory with available topologies.
     # Variables set:
@@ -970,50 +1212,105 @@ function select_topology () {
 
     case_text="${NODENESS}_${FLAVOR}"
     case "${case_text}" in
-        # TODO: Move tags to "# Blacklisting certain tags per topology" section.
-        # TODO: Double link availability depends on NIC used.
-        "1n_vbox")
+        "1n_aws")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*1n-aws*.yaml )
+            TOPOLOGIES_TAGS="1_node_single_link_topo"
+            ;;
+        "1n_c6in")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*1n-c6in*.yaml )
+            TOPOLOGIES_TAGS="1_node_single_link_topo"
+            ;;
+        "1n_alt" | "1n_spr")
             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*vpp_device*.template )
             TOPOLOGIES_TAGS="2_node_single_link_topo"
             ;;
-        "1n_skx" | "1n_tx2")
+        "1n_vbox")
             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*vpp_device*.template )
             TOPOLOGIES_TAGS="2_node_single_link_topo"
             ;;
-        "2n_skx")
-            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_skx*.yaml )
-            TOPOLOGIES_TAGS="2_node_*_link_topo"
+        "2n_aws")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-aws*.yaml )
+            TOPOLOGIES_TAGS="2_node_single_link_topo"
             ;;
-        "2n_zn2")
-            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_zn2*.yaml )
-            TOPOLOGIES_TAGS="2_node_*_link_topo"
+        "2n_c7gn")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-c7gn*.yaml )
+            TOPOLOGIES_TAGS="2_node_single_link_topo"
             ;;
-        "3n_skx")
-            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_skx*.yaml )
-            TOPOLOGIES_TAGS="3_node_*_link_topo"
+        "2n_c6in")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-c6in*.yaml )
+            TOPOLOGIES_TAGS="2_node_single_link_topo"
             ;;
         "2n_clx")
-            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_clx*.yaml )
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_clx_*.yaml )
+            TOPOLOGIES_TAGS="2_node_*_link_topo"
+            ;;
+        "2n_icx")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_icx_*.yaml )
             TOPOLOGIES_TAGS="2_node_*_link_topo"
             ;;
-        "2n_dnv")
-            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_dnv*.yaml )
+        "2n_spr")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_spr_*.yaml )
+            TOPOLOGIES_TAGS="2_node_*_link_topo"
+            ;;
+        "2n_tx2")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_tx2_*.yaml )
             TOPOLOGIES_TAGS="2_node_single_link_topo"
             ;;
-        "3n_dnv")
-            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_dnv*.yaml )
+        "2n_zn2")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_zn2_*.yaml )
+            TOPOLOGIES_TAGS="2_node_*_link_topo"
+            ;;
+        "3n_alt")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_alt_*.yaml )
+            TOPOLOGIES_TAGS="3_node_single_link_topo"
+            ;;
+        "3n_aws")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-aws*.yaml )
+            TOPOLOGIES_TAGS="3_node_single_link_topo"
+            ;;
+        "3n_c7gn")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-c7gn*.yaml )
+            TOPOLOGIES_TAGS="3_node_single_link_topo"
+            ;;
+        "3n_c6in")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-c6in*.yaml )
+            TOPOLOGIES_TAGS="3_node_single_link_topo"
+            ;;
+        "3n_icx")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_icx_*.yaml )
+            # Trailing underscore is needed to distinguish from 3n_icxd.
+            TOPOLOGIES_TAGS="3_node_*_link_topo"
+            ;;
+        "3n_icxd")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_icxd_*.yaml )
             TOPOLOGIES_TAGS="3_node_single_link_topo"
             ;;
-        "3n_hsw")
-            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_hsw*.yaml )
+        "3n_snr")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_snr_*.yaml )
             TOPOLOGIES_TAGS="3_node_single_link_topo"
             ;;
         "3n_tsh")
-            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_tsh*.yaml )
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_tsh_*.yaml )
+            TOPOLOGIES_TAGS="3_node_single_link_topo"
+            ;;
+        "3na_spr")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3na_spr_*.yaml )
+            TOPOLOGIES_TAGS="3_node_*_link_topo"
+            ;;
+        "3nb_spr")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3nb_spr_*.yaml )
+            TOPOLOGIES_TAGS="3_node_*_link_topo"
+            ;;
+        "2n_x"*)
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_"${FLAVOR}"*.yaml )
+            TOPOLOGIES_TAGS="2_node_single_link_topo"
+            ;;
+        "3n_x"*)
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_"${FLAVOR}"*.yaml )
             TOPOLOGIES_TAGS="3_node_single_link_topo"
             ;;
         *)
-            # No falling back to 3n_hsw default, that should have been done
+            # No falling back to default, that should have been done
             # by the function which has set NODENESS and FLAVOR.
             die "Unknown specification: ${case_text}"
     esac
@@ -1024,65 +1321,54 @@ function select_topology () {
 }
 
 
-function select_vpp_device_tags () {
+function set_environment_variables () {
 
+    # Depending on testbed topology, overwrite defaults set in the
+    # resources/libraries/python/Constants.py file
+    #
+    # Only to be called from the reservation function,
+    # as resulting values may change based on topology data.
+    #
     # Variables read:
     # - TEST_CODE - String affecting test selection, usually jenkins job name.
-    # - TEST_TAG_STRING - String selecting tags, from gerrit comment.
-    #   Can be unset.
     # Variables set:
-    # - TAGS - Array of processed tag boolean expressions.
+    # See specific cases
 
     set -exuo pipefail
 
     case "${TEST_CODE}" in
-        # Select specific device tests based on jenkins job type variable.
-        * )
-            if [[ -z "${TEST_TAG_STRING-}" ]]; then
-                # If nothing is specified, we will run pre-selected tests by
-                # following tags. Items of array will be concatenated by OR
-                # in Robot Framework.
-                test_tag_array=()
-            else
-                # If trigger contains tags, split them into array.
-                test_tag_array=(${TEST_TAG_STRING//:/ })
-            fi
+        *"1n-aws" | *"2n-aws" | *"3n-aws")
+            export TREX_RX_DESCRIPTORS_COUNT=1024
+            export TREX_EXTRA_CMDLINE="--mbuf-factor 19"
+            export TREX_CORE_COUNT=6
+            # Settings to prevent duration stretching.
+            export PERF_TRIAL_STL_DELAY=0.1
             ;;
-    esac
-
-    # Blacklisting certain tags per topology.
-    #
-    # Reasons for blacklisting:
-    # - avf - AVF is not possible to run on enic driver of VirtualBox.
-    # - vhost - VirtualBox does not support nesting virtualization on Intel CPU.
-    case "${TEST_CODE}" in
-        *"1n-vbox"*)
-            test_tag_array+=("!avf")
-            test_tag_array+=("!vhost")
+        *"2n-c7gn" | *"3n-c7gn")
+            export TREX_RX_DESCRIPTORS_COUNT=1024
+            export TREX_EXTRA_CMDLINE="--mbuf-factor 19"
+            export TREX_CORE_COUNT=6
+            # Settings to prevent duration stretching.
+            export PERF_TRIAL_STL_DELAY=0.1
             ;;
-        *)
+        *"1n-c6in" | *"2n-c6in" | *"3n-c6in")
+            export TREX_RX_DESCRIPTORS_COUNT=1024
+            export TREX_EXTRA_CMDLINE="--mbuf-factor 19"
+            export TREX_CORE_COUNT=6
+            # Settings to prevent duration stretching.
+            export PERF_TRIAL_STL_DELAY=0.1
+            ;;
+        *"2n-zn2")
+            # Maciek's workaround for Zen2 with lower amount of cores.
+            export TREX_CORE_COUNT=14
+            ;;
+        *"2n-x-"* | *"3n-x-"* )
+            export TREX_CORE_COUNT=2
             ;;
     esac
-
-    TAGS=()
-
-    # We will prefix with devicetest to prevent running other tests
-    # (e.g. Functional).
-    prefix="devicetestAND"
-    if [[ "${TEST_CODE}" == "vpp-"* ]]; then
-        # Automatic prefixing for VPP jobs to limit testing.
-        prefix="${prefix}"
-    fi
-    for tag in "${test_tag_array[@]}"; do
-        if [[ ${tag} == "!"* ]]; then
-            # Exclude tags are not prefixed.
-            TAGS+=("${tag}")
-        else
-            TAGS+=("${prefix}${tag}")
-        fi
-    done
 }
 
+
 function untrap_and_unreserve_testbed () {
 
     # Use this as a trap function to ensure testbed does not remain reserved.
@@ -1097,7 +1383,8 @@ function untrap_and_unreserve_testbed () {
     # Variables read (by inner function):
     # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
     # - PYTHON_SCRIPTS_DIR - Path to directory holding Python scripts.
-    # Variables written:
+    # Variables set:
+    # - TERRAFORM_MODULE_DIR - Terraform module directory.
     # - WORKING_TOPOLOGY - Set to empty string on successful unreservation.
     # Trap unregistered:
     # - EXIT - Failure to untrap is reported, but ignored otherwise.
@@ -1117,6 +1404,22 @@ function untrap_and_unreserve_testbed () {
         python3 "${PYTHON_SCRIPTS_DIR}/topo_reservation.py" -c -t "${wt}" || {
             die "${1:-FAILED TO UNRESERVE, FIX MANUALLY.}" 2
         }
+        case "${TEST_CODE}" in
+            *"1n-aws" | *"2n-aws" | *"3n-aws")
+                TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}-c5n"
+                terraform_destroy || die "Failed to call terraform destroy."
+                ;;
+            *"2n-c7gn" | *"3n-c7gn")
+                TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}"
+                terraform_destroy || die "Failed to call terraform destroy."
+                ;;
+            *"1n-c6in" | *"2n-c6in" | *"3n-c6in")
+                TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}"
+                terraform_destroy || die "Failed to call terraform destroy."
+                ;;
+            *)
+                ;;
+        esac
         WORKING_TOPOLOGY=""
         set -eu
     fi