Revert "fix(jobspec): Delete ipsec nfv density tests"
[csit.git] / resources / libraries / bash / function / common.sh
index 39b2be4..4f104db 100644 (file)
@@ -1,5 +1,5 @@
-# Copyright (c) 2022 Cisco and/or its affiliates.
-# Copyright (c) 2022 PANTHEON.tech and/or its affiliates.
+# Copyright (c) 2024 Cisco and/or its affiliates.
+# Copyright (c) 2024 PANTHEON.tech and/or its affiliates.
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at:
@@ -45,7 +45,7 @@ function activate_docker_topology () {
     device_image="$(< ${CSIT_DIR}/${IMAGE_VER_FILE})"
     case_text="${NODENESS}_${FLAVOR}"
     case "${case_text}" in
-        "1n_skx" | "1n_tx2")
+        "1n_skx" | "1n_alt" | "1n_spr")
             # We execute reservation over csit-shim-dcr (ssh) which runs sourced
             # script's functions. Env variables are read from ssh output
             # back to localhost for further processing.
@@ -93,6 +93,12 @@ function activate_docker_topology () {
     cat ${WORKING_TOPOLOGY} | grep -v password || {
         die "Topology read failed!"
     }
+
+    # Subfunctions to update data that may depend on topology reserved.
+    set_environment_variables || die
+    select_tags || die
+    compose_robot_arguments || die
+
 }
 
 
@@ -118,7 +124,7 @@ function activate_virtualenv () {
     env_dir="${root_path}/env"
     req_path=${2-$CSIT_DIR/requirements.txt}
     rm -rf "${env_dir}" || die "Failed to clean previous virtualenv."
-    pip3 install virtualenv==20.0.20 || {
+    pip3 install virtualenv==20.15.1 || {
         die "Virtualenv package install failed."
     }
     virtualenv --no-download --python=$(which python3) "${env_dir}" || {
@@ -158,7 +164,7 @@ function check_download_dir () {
     # Fail if there are no files visible in ${DOWNLOAD_DIR}.
     #
     # Variables read:
-    # - DOWNLOAD_DIR - Path to directory pybot takes the build to test from.
+    # - DOWNLOAD_DIR - Path to directory robot takes the build to test from.
     # Directories read:
     # - ${DOWNLOAD_DIR} - Has to be non-empty to proceed.
     # Functions called:
@@ -227,7 +233,7 @@ function common_dirs () {
     TOPOLOGIES_DIR=$(readlink -e "${CSIT_DIR}/topologies/available") || {
         die "Readlink failed."
     }
-    JOB_SPECS_DIR=$(readlink -e "${CSIT_DIR}/docs/job_specs") || {
+    JOB_SPECS_DIR=$(readlink -e "${CSIT_DIR}/resources/job_specs") || {
         die "Readlink failed."
     }
     RESOURCES_DIR=$(readlink -e "${CSIT_DIR}/resources") || {
@@ -236,9 +242,6 @@ function common_dirs () {
     TOOLS_DIR=$(readlink -e "${RESOURCES_DIR}/tools") || {
         die "Readlink failed."
     }
-    DOC_GEN_DIR=$(readlink -e "${TOOLS_DIR}/doc_gen") || {
-        die "Readlink failed."
-    }
     PYTHON_SCRIPTS_DIR=$(readlink -e "${TOOLS_DIR}/scripts") || {
         die "Readlink failed."
     }
@@ -258,8 +261,12 @@ function common_dirs () {
 }
 
 
-function compose_pybot_arguments () {
+function compose_robot_arguments () {
 
+    # This function is called by run_tests function.
+    # The reason is that some jobs (bisect) perform reservation multiple times,
+    # so WORKING_TOPOLOGY can be different each time.
+    #
     # Variables read:
     # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
     # - DUT - CSIT test/ subdirectory, set while processing tags.
@@ -268,21 +275,23 @@ function compose_pybot_arguments () {
     # - TEST_CODE - The test selection string from environment or argument.
     # - SELECTION_MODE - Selection criteria [test, suite, include, exclude].
     # Variables set:
-    # - PYBOT_ARGS - String holding part of all arguments for pybot.
-    # - EXPANDED_TAGS - Array of strings pybot arguments compiled from tags.
+    # - ROBOT_ARGS - String holding part of all arguments for robot.
+    # - EXPANDED_TAGS - Array of strings robot arguments compiled from tags.
 
     set -exuo pipefail
 
     # No explicit check needed with "set -u".
-    PYBOT_ARGS=("--loglevel" "TRACE")
-    PYBOT_ARGS+=("--variable" "TOPOLOGY_PATH:${WORKING_TOPOLOGY}")
+    ROBOT_ARGS=("--loglevel" "TRACE")
+    ROBOT_ARGS+=("--variable" "TOPOLOGY_PATH:${WORKING_TOPOLOGY}")
 
+    # TODO: The rest does not need to be recomputed on each reservation.
+    #       Refactor TEST_CODE so this part can be called only once.
     case "${TEST_CODE}" in
         *"device"*)
-            PYBOT_ARGS+=("--suite" "tests.${DUT}.device")
+            ROBOT_ARGS+=("--suite" "tests.${DUT}.device")
             ;;
-        *"perf"*)
-            PYBOT_ARGS+=("--suite" "tests.${DUT}.perf")
+        *"perf"* | *"bisect"*)
+            ROBOT_ARGS+=("--suite" "tests.${DUT}.perf")
             ;;
         *)
             die "Unknown specification: ${TEST_CODE}"
@@ -319,7 +328,7 @@ function deactivate_docker_topology () {
 
     case_text="${NODENESS}_${FLAVOR}"
     case "${case_text}" in
-        "1n_skx" | "1n_tx2")
+        "1n_skx" | "1n_alt" | "1n_spr")
             ssh="ssh root@172.17.0.1 -p 6022"
             env_vars=$(env | grep CSIT_ | tr '\n' ' ' ) || die
             # The "declare -f" output is long and boring.
@@ -360,19 +369,19 @@ function die () {
 }
 
 
-function die_on_pybot_error () {
+function die_on_robot_error () {
 
     # Source this fragment if you want to abort on any failed test case.
     #
     # Variables read:
-    # - PYBOT_EXIT_STATUS - Set by a pybot running fragment.
+    # - ROBOT_EXIT_STATUS - Set by a robot running fragment.
     # Functions called:
     # - die - Print to stderr and exit.
 
     set -exuo pipefail
 
-    if [[ "${PYBOT_EXIT_STATUS}" != "0" ]]; then
-        die "Test failures are present!" "${PYBOT_EXIT_STATUS}"
+    if [[ "${ROBOT_EXIT_STATUS}" != "0" ]]; then
+        die "Test failures are present!" "${ROBOT_EXIT_STATUS}"
     fi
 }
 
@@ -385,7 +394,7 @@ function generate_tests () {
     # within any subdirectory after copying.
 
     # This is a separate function, because this code is called
-    # both by autogen checker and entries calling run_pybot.
+    # both by autogen checker and entries calling run_robot.
 
     # Directories read:
     # - ${CSIT_DIR}/tests - Used as templates for the generated tests.
@@ -431,66 +440,110 @@ function get_test_code () {
     fi
 
     case "${TEST_CODE}" in
-        *"1n-vbox"*)
+        *"1n-vbox")
             NODENESS="1n"
             FLAVOR="vbox"
             ;;
-        *"1n-skx"*)
+        *"1n-skx")
             NODENESS="1n"
             FLAVOR="skx"
             ;;
-       *"1n-tx2"*)
+        *"1n-spr")
             NODENESS="1n"
-            FLAVOR="tx2"
+            FLAVOR="spr"
+            ;;
+        *"1n-alt")
+            NODENESS="1n"
+            FLAVOR="alt"
+            ;;
+        *"1n-aws")
+            NODENESS="1n"
+            FLAVOR="aws"
             ;;
-        *"2n-aws"*)
+        *"2n-aws")
             NODENESS="2n"
             FLAVOR="aws"
             ;;
-        *"3n-aws"*)
+        *"3n-aws")
             NODENESS="3n"
             FLAVOR="aws"
             ;;
-        *"2n-skx"*)
+        *"2n-c7gn")
             NODENESS="2n"
-            FLAVOR="skx"
+            FLAVOR="c7gn"
             ;;
-        *"3n-skx"*)
+        *"3n-c7gn")
             NODENESS="3n"
-            FLAVOR="skx"
+            FLAVOR="c7gn"
+            ;;
+        *"1n-c6in")
+            NODENESS="1n"
+            FLAVOR="c6in"
             ;;
-        *"2n-zn2"*)
+        *"2n-c6in")
+            NODENESS="2n"
+            FLAVOR="c6in"
+            ;;
+        *"3n-c6in")
+            NODENESS="3n"
+            FLAVOR="c6in"
+            ;;
+        *"2n-zn2")
             NODENESS="2n"
             FLAVOR="zn2"
             ;;
-        *"2n-clx"*)
+        *"2n-clx")
             NODENESS="2n"
             FLAVOR="clx"
             ;;
-        *"2n-icx"*)
+        *"2n-icx")
             NODENESS="2n"
             FLAVOR="icx"
             ;;
-        *"3n-icx"*)
+        *"2n-spr")
+            NODENESS="2n"
+            FLAVOR="spr"
+            ;;
+        *"3n-icx")
             NODENESS="3n"
             FLAVOR="icx"
             ;;
-        *"2n-dnv"*)
-            NODENESS="2n"
-            FLAVOR="dnv"
+        *"3na-spr")
+            NODENESS="3na"
+            FLAVOR="spr"
             ;;
-        *"3n-dnv"*)
+        *"3nb-spr")
+            NODENESS="3nb"
+            FLAVOR="spr"
+            ;;
+        *"3n-snr")
+            NODENESS="3n"
+            FLAVOR="snr"
+            ;;
+        *"3n-icxd")
             NODENESS="3n"
-            FLAVOR="dnv"
+            FLAVOR="icxd"
             ;;
-        *"2n-tx2"*)
+        *"2n-tx2")
             NODENESS="2n"
             FLAVOR="tx2"
             ;;
-        *"3n-tsh"*)
+        *"3n-tsh")
             NODENESS="3n"
             FLAVOR="tsh"
             ;;
+        *"3n-alt")
+            NODENESS="3n"
+            FLAVOR="alt"
+            ;;
+        *"2n-x-"*)
+            NODENESS="2n"
+            FLAVOR="${TEST_CODE#*2n-}"
+            ;;
+        *"3n-x-"*)
+            NODENESS="3n"
+            FLAVOR="${TEST_CODE#*3n-}"
+            ;;
     esac
 }
 
@@ -504,6 +557,10 @@ function get_test_tag_string () {
     # Variables set:
     # - TEST_TAG_STRING - The string following trigger word in gerrit comment.
     #   May be empty, or even not set on event types not adding comment.
+    # - GIT_BISECT_FROM - If bisecttest, the commit hash to bisect from.
+    #   Else not set.
+    # Variables exported optionally:
+    # - GRAPH_NODE_VARIANT - Node variant to test with, set if found in trigger.
 
     # TODO: ci-management scripts no longer need to perform this.
 
@@ -511,6 +568,10 @@ function get_test_tag_string () {
 
     if [[ "${GERRIT_EVENT_TYPE-}" == "comment-added" ]]; then
         case "${TEST_CODE}" in
+            # Order matters, bisect job contains "perf" in its name.
+            *"bisect"*)
+                trigger="bisecttest"
+                ;;
             *"device"*)
                 trigger="devicetest"
                 ;;
@@ -536,6 +597,18 @@ function get_test_tag_string () {
             comment=$(fgrep "${trigger}" <<< "${comment}" || true)
             TEST_TAG_STRING=$("${cmd[@]}" <<< "${comment}" || true)
         fi
+        if [[ "${trigger}" == "bisecttest" ]]; then
+            # Intentionally without quotes, so spaces delimit elements.
+            test_tag_array=(${TEST_TAG_STRING}) || die "How could this fail?"
+            # First "argument" of bisecttest is a commit hash.
+            GIT_BISECT_FROM="${test_tag_array[0]}" || {
+                die "Bisect job requires commit hash."
+            }
+            # Update the tag string (tag expressions only, no commit hash).
+            TEST_TAG_STRING="${test_tag_array[@]:1}" || {
+                die "Bisect job needs a single test, no default."
+            }
+        fi
         if [[ -n "${TEST_TAG_STRING-}" ]]; then
             test_tag_array=(${TEST_TAG_STRING})
             if [[ "${test_tag_array[0]}" == "icl" ]]; then
@@ -609,42 +682,6 @@ function move_archives () {
 }
 
 
-function post_process_robot_outputs () {
-
-    # Generate INFO level output_info.xml by rebot.
-    # Archive UTI raw json outputs.
-    #
-    # Variables read:
-    # - ARCHIVE_DIR - Path to post-processed files.
-
-    set -exuo pipefail
-
-    # Compress raw json outputs, as they will never be post-processed.
-    pushd "${ARCHIVE_DIR}" || die
-    if [ -d "tests" ]; then
-        # Use deterministic order.
-        options+=("--sort=name")
-        # We are keeping info outputs where they are.
-        # Assuming we want to move anything but info files (and dirs).
-        options+=("--exclude=*.info.json")
-        tar czf "generated_output_raw.tar.gz" "${options[@]}" "tests" || true
-        # Tar can remove when archiving, but chokes (not deterministically)
-        # on attempting to remove dirs (not empty as info files are there).
-        # So we need to delete the raw files manually.
-        find "tests" -type f -name "*.raw.json" -delete || true
-    fi
-    popd || die
-
-    # Generate INFO level output_info.xml for post-processing.
-    all_options=("--loglevel" "INFO")
-    all_options+=("--log" "none")
-    all_options+=("--report" "none")
-    all_options+=("--output" "${ARCHIVE_DIR}/output_info.xml")
-    all_options+=("${ARCHIVE_DIR}/output.xml")
-    rebot "${all_options[@]}" || true
-}
-
-
 function prepare_topology () {
 
     # Prepare virtual testbed topology if needed based on flavor.
@@ -653,6 +690,8 @@ function prepare_topology () {
     # - TEST_CODE - String affecting test selection, usually jenkins job name.
     # - NODENESS - Node multiplicity of testbed, either "2n" or "3n".
     # - FLAVOR - Node flavor string, e.g. "clx" or "skx".
+    # Variables set:
+    # - TERRAFORM_MODULE_DIR - Terraform module directory.
     # Functions called:
     # - die - Print to stderr and exit.
     # - terraform_init - Terraform init topology.
@@ -662,14 +701,31 @@ function prepare_topology () {
 
     case_text="${NODENESS}_${FLAVOR}"
     case "${case_text}" in
-        "2n_aws")
+        "1n_aws" | "2n_aws" | "3n_aws")
             export TF_VAR_testbed_name="${TEST_CODE}"
+            TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}-c5n"
             terraform_init || die "Failed to call terraform init."
+            trap "terraform_destroy" ERR EXIT || {
+                die "Trap attempt failed, please cleanup manually. Aborting!"
+            }
             terraform_apply || die "Failed to call terraform apply."
             ;;
-        "3n_aws")
+        "2n_c7gn" | "3n_c7gn")
             export TF_VAR_testbed_name="${TEST_CODE}"
+            TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-c7gn"
             terraform_init || die "Failed to call terraform init."
+            trap "terraform_destroy" ERR EXIT || {
+                die "Trap attempt failed, please cleanup manually. Aborting!"
+            }
+            terraform_apply || die "Failed to call terraform apply."
+            ;;
+        "1n_c6in" | "2n_c6in" | "3n_c6in")
+            export TF_VAR_testbed_name="${TEST_CODE}"
+            TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-c6in"
+            terraform_init || die "Failed to call terraform init."
+            trap "terraform_destroy" ERR EXIT || {
+                die "Trap attempt failed, please cleanup manually. Aborting!"
+            }
             terraform_apply || die "Failed to call terraform apply."
             ;;
     esac
@@ -682,6 +738,9 @@ function reserve_and_cleanup_testbed () {
     # When cleanup fails, remove from topologies and keep retrying
     # until all topologies are removed.
     #
+    # Multiple other functions are called from here,
+    # as they set variables that depend on reserved topology data.
+    #
     # Variables read:
     # - TOPOLOGIES - Array of paths to topology yaml to attempt reservation on.
     # - PYTHON_SCRIPTS_DIR - Path to directory holding the reservation script.
@@ -750,39 +809,50 @@ function reserve_and_cleanup_testbed () {
         echo "Sleeping ${sleep_time}"
         sleep "${sleep_time}" || die "Sleep failed."
     done
+
+    # Subfunctions to update data that may depend on topology reserved.
+    set_environment_variables || die
+    select_tags || die
+    compose_robot_arguments || die
 }
 
 
-function run_pybot () {
+function run_robot () {
 
-    # Run pybot with options based on input variables.
-    # Generate INFO level output_info.xml by rebot.
-    # Archive UTI raw json outputs.
+    # Run robot with options based on input variables.
+    #
+    # Testbed has to be reserved already,
+    # as some data may have changed between reservations,
+    # for example excluded NICs.
     #
     # Variables read:
     # - CSIT_DIR - Path to existing root of local CSIT git repository.
     # - ARCHIVE_DIR - Path to store robot result files in.
-    # - PYBOT_ARGS, EXPANDED_TAGS - See compose_pybot_arguments.sh
+    # - ROBOT_ARGS, EXPANDED_TAGS - See compose_robot_arguments.sh
     # - GENERATED_DIR - Tests are assumed to be generated under there.
+    # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
+    # - DUT - CSIT test/ subdirectory, set while processing tags.
+    # - TAGS - Array variable holding selected tag boolean expressions.
+    # - TOPOLOGIES_TAGS - Tag boolean expression filtering tests for topology.
+    # - TEST_CODE - The test selection string from environment or argument.
     # Variables set:
-    # - PYBOT_EXIT_STATUS - Exit status of most recent pybot invocation.
+    # - ROBOT_ARGS - String holding part of all arguments for robot.
+    # - EXPANDED_TAGS - Array of string robot arguments compiled from tags.
+    # - ROBOT_EXIT_STATUS - Exit status of most recent robot invocation.
     # Functions called:
     # - die - Print to stderr and exit.
 
     set -exuo pipefail
 
-    all_options=("--outputdir" "${ARCHIVE_DIR}" "${PYBOT_ARGS[@]}")
-    all_options+=("--noncritical" "EXPECTED_FAILING")
+    all_options=("--outputdir" "${ARCHIVE_DIR}" "${ROBOT_ARGS[@]}")
     all_options+=("${EXPANDED_TAGS[@]}")
 
     pushd "${CSIT_DIR}" || die "Change directory operation failed."
     set +e
     robot "${all_options[@]}" "${GENERATED_DIR}/tests/"
-    PYBOT_EXIT_STATUS="$?"
+    ROBOT_EXIT_STATUS="$?"
     set -e
 
-    post_process_robot_outputs || die
-
     popd || die "Change directory operation failed."
 }
 
@@ -803,9 +873,9 @@ function select_arch_os () {
     case "${ID}" in
         "ubuntu"*)
             case "${VERSION}" in
-                *"LTS (Focal Fossa)"*)
-                    IMAGE_VER_FILE="VPP_DEVICE_IMAGE_UBUNTU"
-                    VPP_VER_FILE="VPP_STABLE_VER_UBUNTU_FOCAL"
+                *"LTS (Jammy Jellyfish)"*)
+                    IMAGE_VER_FILE="VPP_DEVICE_IMAGE_UBUNTU_JAMMY"
+                    VPP_VER_FILE="VPP_STABLE_VER_UBUNTU_JAMMY"
                     PKG_SUFFIX="deb"
                     ;;
                 *)
@@ -834,6 +904,9 @@ function select_arch_os () {
 
 function select_tags () {
 
+    # Only to be called from the reservation function,
+    # as resulting tags may change based on topology data.
+    #
     # Variables read:
     # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
     # - TEST_CODE - String affecting test selection, usually jenkins job name.
@@ -849,41 +922,69 @@ function select_tags () {
     set -exuo pipefail
 
     # NIC SELECTION
-    start_pattern='^  TG:'
+    case "${TEST_CODE}" in
+        *"1n-aws"* | *"1n-c6in"*)
+            start_pattern='^  SUT:'
+            ;;
+        *)
+            start_pattern='^  TG:'
+            ;;
+    esac
     end_pattern='^ \? \?[A-Za-z0-9]\+:'
-    # Remove the TG section from topology file
+    # Remove the sections from topology file
     sed_command="/${start_pattern}/,/${end_pattern}/d"
-    # All topologies DUT NICs
+    # All topologies NICs
     available=$(sed "${sed_command}" "${TOPOLOGIES_DIR}"/* \
                 | grep -hoP "model: \K.*" | sort -u)
-    # Selected topology DUT NICs
+    # Selected topology NICs
     reserved=$(sed "${sed_command}" "${WORKING_TOPOLOGY}" \
                | grep -hoP "model: \K.*" | sort -u)
-    # All topologies DUT NICs - Selected topology DUT NICs
+    # All topologies NICs - Selected topology NICs
     exclude_nics=($(comm -13 <(echo "${reserved}") <(echo "${available}"))) || {
         die "Computation of excluded NICs failed."
     }
 
     # Select default NIC tag.
     case "${TEST_CODE}" in
-        *"3n-dnv"* | *"2n-dnv"*)
-            default_nic="nic_intel-x553"
+        *"3n-snr")
+            default_nic="nic_intel-e822cq"
             ;;
-        *"3n-tsh"*)
+        *"3n-icxd")
+            default_nic="nic_intel-e823c"
+            ;;
+        *"3n-tsh")
             default_nic="nic_intel-x520-da2"
             ;;
-        *"3n-icx"* | *"2n-icx"*)
-            default_nic="nic_intel-xxv710"
+        *"3n-icx" | *"2n-icx")
+            default_nic="nic_intel-e810cq"
             ;;
-        *"3n-skx"* | *"2n-skx"* | *"2n-clx"* | *"2n-zn2"*)
+        *"3na-spr")
+            default_nic="nic_mellanox-cx7veat"
+            ;;
+        *"3nb-spr")
+            default_nic="nic_intel-e810cq"
+            ;;
+        *"2n-spr")
+            default_nic="nic_intel-e810cq"
+            ;;
+        *"2n-clx" | *"2n-zn2")
             default_nic="nic_intel-xxv710"
             ;;
-        *"2n-tx2"* | *"mrr-daily-master")
+        *"2n-tx2" | *"3n-alt")
             default_nic="nic_intel-xl710"
             ;;
-        *"2n-aws"* | *"3n-aws"*)
+        *"1n-aws" | *"2n-aws" | *"3n-aws")
             default_nic="nic_amazon-nitro-50g"
             ;;
+        *"2n-c7gn" | *"3n-c7gn")
+            default_nic="nic_amazon-nitro-100g"
+            ;;
+        *"1n-c6in" | *"2n-c6in" | *"3n-c6in")
+            default_nic="nic_amazon-nitro-200g"
+            ;;
+        *"2n-x-"* | *"3n-x-"*)
+            default_nic="nic_intel-e810cq"
+            ;;
         *)
             default_nic="nic_intel-x710"
             ;;
@@ -895,14 +996,22 @@ function select_tags () {
     awk_nics_sub_cmd+='gsub("x710","10ge2p1x710");'
     awk_nics_sub_cmd+='gsub("xl710","40ge2p1xl710");'
     awk_nics_sub_cmd+='gsub("x520-da2","10ge2p1x520");'
-    awk_nics_sub_cmd+='gsub("x553","10ge2p1x553");'
     awk_nics_sub_cmd+='gsub("cx556a","100ge2p1cx556a");'
+    awk_nics_sub_cmd+='gsub("2p1cx7veat","200ge2p1cx7veat");'
+    awk_nics_sub_cmd+='gsub("6p3cx7veat","200ge6p3cx7veat");'
+    awk_nics_sub_cmd+='gsub("cx6dx","100ge2p1cx6dx");'
     awk_nics_sub_cmd+='gsub("e810cq","100ge2p1e810cq");'
+    awk_nics_sub_cmd+='gsub("e822cq","25ge2p1e822cq");'
+    awk_nics_sub_cmd+='gsub("e823c","25ge2p1e823c");'
     awk_nics_sub_cmd+='gsub("vic1227","10ge2p1vic1227");'
     awk_nics_sub_cmd+='gsub("vic1385","40ge2p1vic1385");'
     awk_nics_sub_cmd+='gsub("nitro-50g","50ge1p1ENA");'
+    awk_nics_sub_cmd+='gsub("nitro-100g","100ge1p1ENA");'
+    awk_nics_sub_cmd+='gsub("nitro-200g","200ge1p1ENA");'
+    awk_nics_sub_cmd+='gsub("virtual","1ge1p82540em");'
     awk_nics_sub_cmd+='if ($9 =="drv_avf") drv="avf-";'
     awk_nics_sub_cmd+='else if ($9 =="drv_rdma_core") drv ="rdma-";'
+    awk_nics_sub_cmd+='else if ($9 =="drv_mlx5_core") drv ="mlx5-";'
     awk_nics_sub_cmd+='else if ($9 =="drv_af_xdp") drv ="af-xdp-";'
     awk_nics_sub_cmd+='else drv="";'
     awk_nics_sub_cmd+='if ($1 =="-") cores="";'
@@ -919,9 +1028,15 @@ function select_tags () {
                 awk {"$awk_nics_sub_cmd"} || echo "devicetest") || die
             SELECTION_MODE="--test"
             ;;
+        *"hoststack-daily"* )
+            readarray -t test_tag_array <<< $(grep -v "#" \
+                ${tfd}/hoststack_daily/${DUT}-${NODENESS}-${FLAVOR}.md |
+                awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
+            SELECTION_MODE="--test"
+            ;;
         *"ndrpdr-weekly"* )
             readarray -t test_tag_array <<< $(grep -v "#" \
-                ${tfd}/mlr_weekly/${DUT}-${NODENESS}-${FLAVOR}.md |
+                ${tfd}/ndrpdr_weekly/${DUT}-${NODENESS}-${FLAVOR}.md |
                 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
             SELECTION_MODE="--test"
             ;;
@@ -937,6 +1052,12 @@ function select_tags () {
                 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
             SELECTION_MODE="--test"
             ;;
+        *"soak-weekly"* )
+            readarray -t test_tag_array <<< $(grep -v "#" \
+                ${tfd}/soak_weekly/${DUT}-${NODENESS}-${FLAVOR}.md |
+                awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
+            SELECTION_MODE="--test"
+            ;;
         *"report-iterative"* )
             test_sets=(${TEST_TAG_STRING//:/ })
             # Run only one test set per run
@@ -977,61 +1098,58 @@ function select_tags () {
     # Reasons for blacklisting:
     # - ipsechw - Blacklisted on testbeds without crypto hardware accelerator.
     case "${TEST_CODE}" in
-        *"1n-vbox"*)
+        *"1n-vbox")
             test_tag_array+=("!avf")
             test_tag_array+=("!vhost")
             test_tag_array+=("!flow")
             ;;
-        *"1n_tx2"*)
+        *"1n-alt")
             test_tag_array+=("!flow")
             ;;
-        *"2n-skx"*)
+        *"2n-clx")
             test_tag_array+=("!ipsechw")
             ;;
-        *"3n-skx"*)
+        *"2n-icx")
             test_tag_array+=("!ipsechw")
-            # Not enough nic_intel-xxv710 to support double link tests.
-            test_tag_array+=("!3_node_double_link_topoANDnic_intel-xxv710")
             ;;
-        *"2n-clx"*)
+        *"2n-spr")
+            ;;
+        *"2n-tx2")
             test_tag_array+=("!ipsechw")
             ;;
-        *"2n-icx"*)
+        *"2n-zn2")
             test_tag_array+=("!ipsechw")
             ;;
-        *"3n-icx"*)
+        *"3n-alt")
             test_tag_array+=("!ipsechw")
-            # Not enough nic_intel-xxv710 to support double link tests.
-            test_tag_array+=("!3_node_double_link_topoANDnic_intel-xxv710")
             ;;
-        *"2n-zn2"*)
+        *"3n-icx")
             test_tag_array+=("!ipsechw")
+            test_tag_array+=("!3_node_double_link_topoANDnic_intel-xxv710")
             ;;
-        *"2n-dnv"*)
-            test_tag_array+=("!memif")
-            test_tag_array+=("!srv6_proxy")
-            test_tag_array+=("!vhost")
-            test_tag_array+=("!vts")
-            test_tag_array+=("!drv_avf")
+        *"3n-snr")
             ;;
-        *"2n-tx2"*)
-            test_tag_array+=("!ipsechw")
+        *"3n-icxd")
             ;;
-        *"3n-dnv"*)
-            test_tag_array+=("!memif")
-            test_tag_array+=("!srv6_proxy")
-            test_tag_array+=("!vhost")
-            test_tag_array+=("!vts")
-            test_tag_array+=("!drv_avf")
+        *"3na-spr")
+            ;;
+        *"3nb-spr")
             ;;
-        *"3n-tsh"*)
-            # 3n-tsh only has x520 NICs which don't work with AVF
+        *"3n-tsh")
             test_tag_array+=("!drv_avf")
             test_tag_array+=("!ipsechw")
             ;;
-        *"2n-aws"* | *"3n-aws"*)
+        *"1n-aws" | *"2n-aws" | *"3n-aws")
+            test_tag_array+=("!ipsechw")
+            ;;
+        *"2n-c7gn" | *"3n-c7gn")
             test_tag_array+=("!ipsechw")
             ;;
+        *"1n-c6in" | *"2n-c6in" | *"3n-c6in")
+            test_tag_array+=("!ipsechw")
+            ;;
+        *"2n-x-"* | *"3n-x-"*)
+            ;;
     esac
 
     # We will add excluded NICs.
@@ -1039,19 +1157,17 @@ function select_tags () {
 
     TAGS=()
     prefix=""
-
-    set +x
-    if [[ "${TEST_CODE}" == "vpp-"* ]]; then
-        if [[ "${TEST_CODE}" != *"device"* ]]; then
-            # Automatic prefixing for VPP perf jobs to limit the NIC used and
-            # traffic evaluation to MRR.
-            if [[ "${TEST_TAG_STRING-}" == *"nic_"* ]]; then
-                prefix="${prefix}mrrAND"
-            else
-                prefix="${prefix}mrrAND${default_nic}AND"
+    if [[ "${TEST_CODE}" != *"daily"* ]]; then
+        if [[ "${TEST_CODE}" == "vpp-"* ]]; then
+            if [[ "${TEST_CODE}" != *"device"* ]]; then
+                # Automatic prefixing for VPP perf jobs to limit the NIC used.
+                if [[ "${TEST_TAG_STRING-}" != *"nic_"* ]]; then
+                    prefix="${default_nic}AND"
+                fi
             fi
         fi
     fi
+    set +x
     for tag in "${test_tag_array[@]}"; do
         if [[ "${tag}" == "!"* ]]; then
             # Exclude tags are not prefixed.
@@ -1096,60 +1212,101 @@ function select_topology () {
 
     case_text="${NODENESS}_${FLAVOR}"
     case "${case_text}" in
-        "1n_vbox")
+        "1n_aws")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*1n-aws*.yaml )
+            TOPOLOGIES_TAGS="1_node_single_link_topo"
+            ;;
+        "1n_c6in")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*1n-c6in*.yaml )
+            TOPOLOGIES_TAGS="1_node_single_link_topo"
+            ;;
+        "1n_alt" | "1n_spr")
             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*vpp_device*.template )
             TOPOLOGIES_TAGS="2_node_single_link_topo"
             ;;
-        "1n_skx" | "1n_tx2")
+        "1n_vbox")
             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*vpp_device*.template )
             TOPOLOGIES_TAGS="2_node_single_link_topo"
             ;;
-        "2n_skx")
-            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_skx*.yaml )
-            TOPOLOGIES_TAGS="2_node_*_link_topo"
-            ;;
-        "2n_zn2")
-            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_zn2*.yaml )
-            TOPOLOGIES_TAGS="2_node_*_link_topo"
+        "2n_aws")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-aws*.yaml )
+            TOPOLOGIES_TAGS="2_node_single_link_topo"
             ;;
-        "3n_skx")
-            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_skx*.yaml )
-            TOPOLOGIES_TAGS="3_node_*_link_topo"
+        "2n_c7gn")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-c7gn*.yaml )
+            TOPOLOGIES_TAGS="2_node_single_link_topo"
             ;;
-        "3n_icx")
-            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_icx*.yaml )
-            TOPOLOGIES_TAGS="3_node_*_link_topo"
+        "2n_c6in")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-c6in*.yaml )
+            TOPOLOGIES_TAGS="2_node_single_link_topo"
             ;;
         "2n_clx")
-            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_clx*.yaml )
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_clx_*.yaml )
             TOPOLOGIES_TAGS="2_node_*_link_topo"
             ;;
         "2n_icx")
-            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_icx*.yaml )
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_icx_*.yaml )
             TOPOLOGIES_TAGS="2_node_*_link_topo"
             ;;
-        "2n_dnv")
-            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_dnv*.yaml )
+        "2n_spr")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_spr_*.yaml )
+            TOPOLOGIES_TAGS="2_node_*_link_topo"
+            ;;
+        "2n_tx2")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_tx2_*.yaml )
             TOPOLOGIES_TAGS="2_node_single_link_topo"
             ;;
-        "3n_dnv")
-            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_dnv*.yaml )
+        "2n_zn2")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_zn2_*.yaml )
+            TOPOLOGIES_TAGS="2_node_*_link_topo"
+            ;;
+        "3n_alt")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_alt_*.yaml )
+            TOPOLOGIES_TAGS="3_node_single_link_topo"
+            ;;
+        "3n_aws")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-aws*.yaml )
+            TOPOLOGIES_TAGS="3_node_single_link_topo"
+            ;;
+        "3n_c7gn")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-c7gn*.yaml )
+            TOPOLOGIES_TAGS="3_node_single_link_topo"
+            ;;
+        "3n_c6in")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-c6in*.yaml )
+            TOPOLOGIES_TAGS="3_node_single_link_topo"
+            ;;
+        "3n_icx")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_icx_*.yaml )
+            # Trailing underscore is needed to distinguish from 3n_icxd.
+            TOPOLOGIES_TAGS="3_node_*_link_topo"
+            ;;
+        "3n_icxd")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_icxd_*.yaml )
+            TOPOLOGIES_TAGS="3_node_single_link_topo"
+            ;;
+        "3n_snr")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_snr_*.yaml )
             TOPOLOGIES_TAGS="3_node_single_link_topo"
             ;;
         "3n_tsh")
-            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_tsh*.yaml )
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_tsh_*.yaml )
             TOPOLOGIES_TAGS="3_node_single_link_topo"
             ;;
-        "2n_tx2")
-            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_tx2*.yaml )
-            TOPOLOGIES_TAGS="2_node_single_link_topo"
+        "3na_spr")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3na_spr_*.yaml )
+            TOPOLOGIES_TAGS="3_node_*_link_topo"
             ;;
-        "2n_aws")
-            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_aws*.yaml )
+        "3nb_spr")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3nb_spr_*.yaml )
+            TOPOLOGIES_TAGS="3_node_*_link_topo"
+            ;;
+        "2n_x"*)
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_"${FLAVOR}"*.yaml )
             TOPOLOGIES_TAGS="2_node_single_link_topo"
             ;;
-        "3n_aws")
-            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_aws*.yaml )
+        "3n_x"*)
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_"${FLAVOR}"*.yaml )
             TOPOLOGIES_TAGS="3_node_single_link_topo"
             ;;
         *)
@@ -1169,6 +1326,9 @@ function set_environment_variables () {
     # Depending on testbed topology, overwrite defaults set in the
     # resources/libraries/python/Constants.py file
     #
+    # Only to be called from the reservation function,
+    # as resulting values may change based on topology data.
+    #
     # Variables read:
     # - TEST_CODE - String affecting test selection, usually jenkins job name.
     # Variables set:
@@ -1177,14 +1337,34 @@ function set_environment_variables () {
     set -exuo pipefail
 
     case "${TEST_CODE}" in
-        *"2n-aws"* | *"3n-aws"*)
-            # T-Rex 2.88 workaround for ENA NICs
+        *"1n-aws" | *"2n-aws" | *"3n-aws")
             export TREX_RX_DESCRIPTORS_COUNT=1024
             export TREX_EXTRA_CMDLINE="--mbuf-factor 19"
             export TREX_CORE_COUNT=6
-            # Settings to prevent duration stretching
+            # Settings to prevent duration stretching.
             export PERF_TRIAL_STL_DELAY=0.1
             ;;
+        *"2n-c7gn" | *"3n-c7gn")
+            export TREX_RX_DESCRIPTORS_COUNT=1024
+            export TREX_EXTRA_CMDLINE="--mbuf-factor 19"
+            export TREX_CORE_COUNT=6
+            # Settings to prevent duration stretching.
+            export PERF_TRIAL_STL_DELAY=0.1
+            ;;
+        *"1n-c6in" | *"2n-c6in" | *"3n-c6in")
+            export TREX_RX_DESCRIPTORS_COUNT=1024
+            export TREX_EXTRA_CMDLINE="--mbuf-factor 19"
+            export TREX_CORE_COUNT=6
+            # Settings to prevent duration stretching.
+            export PERF_TRIAL_STL_DELAY=0.1
+            ;;
+        *"2n-zn2")
+            # Maciek's workaround for Zen2 with lower amount of cores.
+            export TREX_CORE_COUNT=14
+            ;;
+        *"2n-x-"* | *"3n-x-"* )
+            export TREX_CORE_COUNT=2
+            ;;
     esac
 }
 
@@ -1203,7 +1383,8 @@ function untrap_and_unreserve_testbed () {
     # Variables read (by inner function):
     # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
     # - PYTHON_SCRIPTS_DIR - Path to directory holding Python scripts.
-    # Variables written:
+    # Variables set:
+    # - TERRAFORM_MODULE_DIR - Terraform module directory.
     # - WORKING_TOPOLOGY - Set to empty string on successful unreservation.
     # Trap unregistered:
     # - EXIT - Failure to untrap is reported, but ignored otherwise.
@@ -1224,7 +1405,16 @@ function untrap_and_unreserve_testbed () {
             die "${1:-FAILED TO UNRESERVE, FIX MANUALLY.}" 2
         }
         case "${TEST_CODE}" in
-            *"2n-aws"* | *"3n-aws"*)
+            *"1n-aws" | *"2n-aws" | *"3n-aws")
+                TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}-c5n"
+                terraform_destroy || die "Failed to call terraform destroy."
+                ;;
+            *"2n-c7gn" | *"3n-c7gn")
+                TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}"
+                terraform_destroy || die "Failed to call terraform destroy."
+                ;;
+            *"1n-c6in" | *"2n-c6in" | *"3n-c6in")
+                TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}"
                 terraform_destroy || die "Failed to call terraform destroy."
                 ;;
             *)