X-Git-Url: https://gerrit.fd.io/r/gitweb?p=csit.git;a=blobdiff_plain;f=resources%2Flibraries%2Fbash%2Ffunction%2Fcommon.sh;h=aa842017227d9d47751caefdaaab98d2b00637e6;hp=78a99dae0da66393eef0bb6bc931fcc8f58f1253;hb=HEAD;hpb=f80c4dc842b77e9231e443f7ef10e4a735f18e1f diff --git a/resources/libraries/bash/function/common.sh b/resources/libraries/bash/function/common.sh index 78a99dae0d..4f104dbfd3 100644 --- a/resources/libraries/bash/function/common.sh +++ b/resources/libraries/bash/function/common.sh @@ -1,5 +1,5 @@ -# Copyright (c) 2023 Cisco and/or its affiliates. -# Copyright (c) 2023 PANTHEON.tech and/or its affiliates. +# Copyright (c) 2024 Cisco and/or its affiliates. +# Copyright (c) 2024 PANTHEON.tech and/or its affiliates. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at: @@ -45,7 +45,7 @@ function activate_docker_topology () { device_image="$(< ${CSIT_DIR}/${IMAGE_VER_FILE})" case_text="${NODENESS}_${FLAVOR}" case "${case_text}" in - "1n_skx" | "1n_tx2" | "1n_spr") + "1n_skx" | "1n_alt" | "1n_spr") # We execute reservation over csit-shim-dcr (ssh) which runs sourced # script's functions. Env variables are read from ssh output # back to localhost for further processing. @@ -93,6 +93,12 @@ function activate_docker_topology () { cat ${WORKING_TOPOLOGY} | grep -v password || { die "Topology read failed!" } + + # Subfunctions to update data that may depend on topology reserved. + set_environment_variables || die + select_tags || die + compose_robot_arguments || die + } @@ -257,6 +263,10 @@ function common_dirs () { function compose_robot_arguments () { + # This function is called by run_tests function. + # The reason is that some jobs (bisect) perform reservation multiple times, + # so WORKING_TOPOLOGY can be different each time. + # # Variables read: # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed. # - DUT - CSIT test/ subdirectory, set while processing tags. @@ -274,11 +284,13 @@ function compose_robot_arguments () { ROBOT_ARGS=("--loglevel" "TRACE") ROBOT_ARGS+=("--variable" "TOPOLOGY_PATH:${WORKING_TOPOLOGY}") + # TODO: The rest does not need to be recomputed on each reservation. + # Refactor TEST_CODE so this part can be called only once. case "${TEST_CODE}" in *"device"*) ROBOT_ARGS+=("--suite" "tests.${DUT}.device") ;; - *"perf"*) + *"perf"* | *"bisect"*) ROBOT_ARGS+=("--suite" "tests.${DUT}.perf") ;; *) @@ -316,7 +328,7 @@ function deactivate_docker_topology () { case_text="${NODENESS}_${FLAVOR}" case "${case_text}" in - "1n_skx" | "1n_tx2" | "1n_spr") + "1n_skx" | "1n_alt" | "1n_spr") ssh="ssh root@172.17.0.1 -p 6022" env_vars=$(env | grep CSIT_ | tr '\n' ' ' ) || die # The "declare -f" output is long and boring. @@ -428,102 +440,110 @@ function get_test_code () { fi case "${TEST_CODE}" in - *"1n-vbox"*) + *"1n-vbox") NODENESS="1n" FLAVOR="vbox" ;; - *"1n-skx"*) + *"1n-skx") NODENESS="1n" FLAVOR="skx" ;; - *"1n-spr"*) + *"1n-spr") NODENESS="1n" FLAVOR="spr" ;; - *"1n-tx2"*) + *"1n-alt") NODENESS="1n" - FLAVOR="tx2" + FLAVOR="alt" ;; - *"1n-aws"*) + *"1n-aws") NODENESS="1n" FLAVOR="aws" ;; - *"2n-aws"*) + *"2n-aws") NODENESS="2n" FLAVOR="aws" ;; - *"3n-aws"*) + *"3n-aws") NODENESS="3n" FLAVOR="aws" ;; - *"1n-c6gn"*) - NODENESS="1n" - FLAVOR="c6gn" - ;; - *"2n-c6gn"*) + *"2n-c7gn") NODENESS="2n" - FLAVOR="c6gn" + FLAVOR="c7gn" ;; - *"3n-c6gn"*) + *"3n-c7gn") NODENESS="3n" - FLAVOR="c6gn" + FLAVOR="c7gn" ;; - *"1n-c6in"*) + *"1n-c6in") NODENESS="1n" FLAVOR="c6in" ;; - *"2n-c6in"*) + *"2n-c6in") NODENESS="2n" FLAVOR="c6in" ;; - *"3n-c6in"*) + *"3n-c6in") NODENESS="3n" FLAVOR="c6in" ;; - *"2n-zn2"*) + *"2n-zn2") NODENESS="2n" FLAVOR="zn2" ;; - *"2n-clx"*) + *"2n-clx") NODENESS="2n" FLAVOR="clx" ;; - *"2n-icx"*) + *"2n-icx") NODENESS="2n" FLAVOR="icx" ;; - *"2n-spr"*) + *"2n-spr") NODENESS="2n" FLAVOR="spr" ;; - *"3n-icx"*) + *"3n-icx") NODENESS="3n" FLAVOR="icx" ;; - *"3na-spr"*) + *"3na-spr") NODENESS="3na" FLAVOR="spr" ;; - *"3nb-spr"*) + *"3nb-spr") NODENESS="3nb" FLAVOR="spr" ;; - *"3n-snr"*) + *"3n-snr") NODENESS="3n" FLAVOR="snr" ;; - *"2n-tx2"*) + *"3n-icxd") + NODENESS="3n" + FLAVOR="icxd" + ;; + *"2n-tx2") NODENESS="2n" FLAVOR="tx2" ;; - *"3n-tsh"*) + *"3n-tsh") NODENESS="3n" FLAVOR="tsh" ;; - *"3n-alt"*) + *"3n-alt") NODENESS="3n" FLAVOR="alt" ;; + *"2n-x-"*) + NODENESS="2n" + FLAVOR="${TEST_CODE#*2n-}" + ;; + *"3n-x-"*) + NODENESS="3n" + FLAVOR="${TEST_CODE#*3n-}" + ;; esac } @@ -537,6 +557,8 @@ function get_test_tag_string () { # Variables set: # - TEST_TAG_STRING - The string following trigger word in gerrit comment. # May be empty, or even not set on event types not adding comment. + # - GIT_BISECT_FROM - If bisecttest, the commit hash to bisect from. + # Else not set. # Variables exported optionally: # - GRAPH_NODE_VARIANT - Node variant to test with, set if found in trigger. @@ -546,6 +568,10 @@ function get_test_tag_string () { if [[ "${GERRIT_EVENT_TYPE-}" == "comment-added" ]]; then case "${TEST_CODE}" in + # Order matters, bisect job contains "perf" in its name. + *"bisect"*) + trigger="bisecttest" + ;; *"device"*) trigger="devicetest" ;; @@ -571,6 +597,18 @@ function get_test_tag_string () { comment=$(fgrep "${trigger}" <<< "${comment}" || true) TEST_TAG_STRING=$("${cmd[@]}" <<< "${comment}" || true) fi + if [[ "${trigger}" == "bisecttest" ]]; then + # Intentionally without quotes, so spaces delimit elements. + test_tag_array=(${TEST_TAG_STRING}) || die "How could this fail?" + # First "argument" of bisecttest is a commit hash. + GIT_BISECT_FROM="${test_tag_array[0]}" || { + die "Bisect job requires commit hash." + } + # Update the tag string (tag expressions only, no commit hash). + TEST_TAG_STRING="${test_tag_array[@]:1}" || { + die "Bisect job needs a single test, no default." + } + fi if [[ -n "${TEST_TAG_STRING-}" ]]; then test_tag_array=(${TEST_TAG_STRING}) if [[ "${test_tag_array[0]}" == "icl" ]]; then @@ -672,9 +710,9 @@ function prepare_topology () { } terraform_apply || die "Failed to call terraform apply." ;; - "1n_c6gn" | "2n_c6gn" | "3n_c6gn") + "2n_c7gn" | "3n_c7gn") export TF_VAR_testbed_name="${TEST_CODE}" - TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-c6gn" + TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-c7gn" terraform_init || die "Failed to call terraform init." trap "terraform_destroy" ERR EXIT || { die "Trap attempt failed, please cleanup manually. Aborting!" @@ -700,6 +738,9 @@ function reserve_and_cleanup_testbed () { # When cleanup fails, remove from topologies and keep retrying # until all topologies are removed. # + # Multiple other functions are called from here, + # as they set variables that depend on reserved topology data. + # # Variables read: # - TOPOLOGIES - Array of paths to topology yaml to attempt reservation on. # - PYTHON_SCRIPTS_DIR - Path to directory holding the reservation script. @@ -768,6 +809,11 @@ function reserve_and_cleanup_testbed () { echo "Sleeping ${sleep_time}" sleep "${sleep_time}" || die "Sleep failed." done + + # Subfunctions to update data that may depend on topology reserved. + set_environment_variables || die + select_tags || die + compose_robot_arguments || die } @@ -775,12 +821,23 @@ function run_robot () { # Run robot with options based on input variables. # + # Testbed has to be reserved already, + # as some data may have changed between reservations, + # for example excluded NICs. + # # Variables read: # - CSIT_DIR - Path to existing root of local CSIT git repository. # - ARCHIVE_DIR - Path to store robot result files in. # - ROBOT_ARGS, EXPANDED_TAGS - See compose_robot_arguments.sh # - GENERATED_DIR - Tests are assumed to be generated under there. + # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed. + # - DUT - CSIT test/ subdirectory, set while processing tags. + # - TAGS - Array variable holding selected tag boolean expressions. + # - TOPOLOGIES_TAGS - Tag boolean expression filtering tests for topology. + # - TEST_CODE - The test selection string from environment or argument. # Variables set: + # - ROBOT_ARGS - String holding part of all arguments for robot. + # - EXPANDED_TAGS - Array of string robot arguments compiled from tags. # - ROBOT_EXIT_STATUS - Exit status of most recent robot invocation. # Functions called: # - die - Print to stderr and exit. @@ -847,6 +904,9 @@ function select_arch_os () { function select_tags () { + # Only to be called from the reservation function, + # as resulting tags may change based on topology data. + # # Variables read: # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed. # - TEST_CODE - String affecting test selection, usually jenkins job name. @@ -863,7 +923,7 @@ function select_tags () { # NIC SELECTION case "${TEST_CODE}" in - *"1n-aws"* | *"1n-c6gn"* | *"1n-c6in"*) + *"1n-aws"* | *"1n-c6in"*) start_pattern='^ SUT:' ;; *) @@ -886,39 +946,45 @@ function select_tags () { # Select default NIC tag. case "${TEST_CODE}" in - *"3n-snr"*) + *"3n-snr") default_nic="nic_intel-e822cq" ;; - *"3n-tsh"*) + *"3n-icxd") + default_nic="nic_intel-e823c" + ;; + *"3n-tsh") default_nic="nic_intel-x520-da2" ;; - *"3n-icx"* | *"2n-icx"*) + *"3n-icx" | *"2n-icx") default_nic="nic_intel-e810cq" ;; - *"3na-spr"*) + *"3na-spr") default_nic="nic_mellanox-cx7veat" ;; - *"3nb-spr"*) + *"3nb-spr") default_nic="nic_intel-e810cq" ;; - *"2n-spr"*) + *"2n-spr") default_nic="nic_intel-e810cq" ;; - *"2n-clx"* | *"2n-zn2"*) + *"2n-clx" | *"2n-zn2") default_nic="nic_intel-xxv710" ;; - *"2n-tx2"* | *"3n-alt"*) + *"2n-tx2" | *"3n-alt") default_nic="nic_intel-xl710" ;; - *"1n-aws"* | *"2n-aws"* | *"3n-aws"*) + *"1n-aws" | *"2n-aws" | *"3n-aws") default_nic="nic_amazon-nitro-50g" ;; - *"1n-c6gn"* | *"2n-c6gn"* | *"3n-c6gn"*) + *"2n-c7gn" | *"3n-c7gn") default_nic="nic_amazon-nitro-100g" ;; - *"1n-c6in"* | *"2n-c6in"* | *"3n-c6in"*) + *"1n-c6in" | *"2n-c6in" | *"3n-c6in") default_nic="nic_amazon-nitro-200g" ;; + *"2n-x-"* | *"3n-x-"*) + default_nic="nic_intel-e810cq" + ;; *) default_nic="nic_intel-x710" ;; @@ -931,14 +997,18 @@ function select_tags () { awk_nics_sub_cmd+='gsub("xl710","40ge2p1xl710");' awk_nics_sub_cmd+='gsub("x520-da2","10ge2p1x520");' awk_nics_sub_cmd+='gsub("cx556a","100ge2p1cx556a");' - awk_nics_sub_cmd+='gsub("cx7veat","200ge2p1cx7veat");' + awk_nics_sub_cmd+='gsub("2p1cx7veat","200ge2p1cx7veat");' + awk_nics_sub_cmd+='gsub("6p3cx7veat","200ge6p3cx7veat");' awk_nics_sub_cmd+='gsub("cx6dx","100ge2p1cx6dx");' awk_nics_sub_cmd+='gsub("e810cq","100ge2p1e810cq");' + awk_nics_sub_cmd+='gsub("e822cq","25ge2p1e822cq");' + awk_nics_sub_cmd+='gsub("e823c","25ge2p1e823c");' awk_nics_sub_cmd+='gsub("vic1227","10ge2p1vic1227");' awk_nics_sub_cmd+='gsub("vic1385","40ge2p1vic1385");' awk_nics_sub_cmd+='gsub("nitro-50g","50ge1p1ENA");' awk_nics_sub_cmd+='gsub("nitro-100g","100ge1p1ENA");' awk_nics_sub_cmd+='gsub("nitro-200g","200ge1p1ENA");' + awk_nics_sub_cmd+='gsub("virtual","1ge1p82540em");' awk_nics_sub_cmd+='if ($9 =="drv_avf") drv="avf-";' awk_nics_sub_cmd+='else if ($9 =="drv_rdma_core") drv ="rdma-";' awk_nics_sub_cmd+='else if ($9 =="drv_mlx5_core") drv ="mlx5-";' @@ -982,6 +1052,12 @@ function select_tags () { awk {"$awk_nics_sub_cmd"} || echo "perftest") || die SELECTION_MODE="--test" ;; + *"soak-weekly"* ) + readarray -t test_tag_array <<< $(grep -v "#" \ + ${tfd}/soak_weekly/${DUT}-${NODENESS}-${FLAVOR}.md | + awk {"$awk_nics_sub_cmd"} || echo "perftest") || die + SELECTION_MODE="--test" + ;; *"report-iterative"* ) test_sets=(${TEST_TAG_STRING//:/ }) # Run only one test set per run @@ -1022,54 +1098,58 @@ function select_tags () { # Reasons for blacklisting: # - ipsechw - Blacklisted on testbeds without crypto hardware accelerator. case "${TEST_CODE}" in - *"1n-vbox"*) + *"1n-vbox") test_tag_array+=("!avf") test_tag_array+=("!vhost") test_tag_array+=("!flow") ;; - *"1n_tx2"*) + *"1n-alt") test_tag_array+=("!flow") ;; - *"2n-clx"*) + *"2n-clx") test_tag_array+=("!ipsechw") ;; - *"2n-icx"*) + *"2n-icx") test_tag_array+=("!ipsechw") ;; - *"2n-spr"*) + *"2n-spr") ;; - *"2n-tx2"*) + *"2n-tx2") test_tag_array+=("!ipsechw") ;; - *"2n-zn2"*) + *"2n-zn2") test_tag_array+=("!ipsechw") ;; - *"3n-alt"*) + *"3n-alt") test_tag_array+=("!ipsechw") ;; - *"3n-icx"*) + *"3n-icx") test_tag_array+=("!ipsechw") test_tag_array+=("!3_node_double_link_topoANDnic_intel-xxv710") ;; - *"3n-snr"*) + *"3n-snr") + ;; + *"3n-icxd") ;; - *"3na-spr"*) + *"3na-spr") ;; - *"3nb-spr"*) + *"3nb-spr") ;; - *"3n-tsh"*) + *"3n-tsh") test_tag_array+=("!drv_avf") test_tag_array+=("!ipsechw") ;; - *"1n-aws"* | *"2n-aws"* | *"3n-aws"*) + *"1n-aws" | *"2n-aws" | *"3n-aws") test_tag_array+=("!ipsechw") ;; - *"1n-c6gn"* | *"2n-c6gn"* | *"3n-c6gn"*) + *"2n-c7gn" | *"3n-c7gn") test_tag_array+=("!ipsechw") ;; - *"1n-c6in"* | *"2n-c6in"* | *"3n-c6in"*) + *"1n-c6in" | *"2n-c6in" | *"3n-c6in") test_tag_array+=("!ipsechw") ;; + *"2n-x-"* | *"3n-x-"*) + ;; esac # We will add excluded NICs. @@ -1077,11 +1157,13 @@ function select_tags () { TAGS=() prefix="" - if [[ "${TEST_CODE}" == "vpp-"* ]]; then - if [[ "${TEST_CODE}" != *"device"* ]]; then - # Automatic prefixing for VPP perf jobs to limit the NIC used. - if [[ "${TEST_TAG_STRING-}" != *"nic_"* ]]; then - prefix="${default_nic}AND" + if [[ "${TEST_CODE}" != *"daily"* ]]; then + if [[ "${TEST_CODE}" == "vpp-"* ]]; then + if [[ "${TEST_CODE}" != *"device"* ]]; then + # Automatic prefixing for VPP perf jobs to limit the NIC used. + if [[ "${TEST_TAG_STRING-}" != *"nic_"* ]]; then + prefix="${default_nic}AND" + fi fi fi fi @@ -1130,100 +1212,101 @@ function select_topology () { case_text="${NODENESS}_${FLAVOR}" case "${case_text}" in - "1n_vbox") + "1n_aws") + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*1n-aws*.yaml ) + TOPOLOGIES_TAGS="1_node_single_link_topo" + ;; + "1n_c6in") + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*1n-c6in*.yaml ) + TOPOLOGIES_TAGS="1_node_single_link_topo" + ;; + "1n_alt" | "1n_spr") TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*vpp_device*.template ) TOPOLOGIES_TAGS="2_node_single_link_topo" ;; - "1n_skx" | "1n_tx2" | "1n_spr") + "1n_vbox") TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*vpp_device*.template ) TOPOLOGIES_TAGS="2_node_single_link_topo" ;; - "2n_skx") - TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_skx*.yaml ) - TOPOLOGIES_TAGS="2_node_*_link_topo" - ;; - "2n_zn2") - TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_zn2*.yaml ) - TOPOLOGIES_TAGS="2_node_*_link_topo" - ;; - "3n_skx") - TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_skx*.yaml ) - TOPOLOGIES_TAGS="3_node_*_link_topo" - ;; - "3n_icx") - TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_icx*.yaml ) - TOPOLOGIES_TAGS="3_node_*_link_topo" + "2n_aws") + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-aws*.yaml ) + TOPOLOGIES_TAGS="2_node_single_link_topo" ;; - "3na_spr") - TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3na_spr*.yaml ) - TOPOLOGIES_TAGS="3_node_*_link_topo" + "2n_c7gn") + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-c7gn*.yaml ) + TOPOLOGIES_TAGS="2_node_single_link_topo" ;; - "3nb_spr") - TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3nb_spr*.yaml ) - TOPOLOGIES_TAGS="3_node_*_link_topo" + "2n_c6in") + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-c6in*.yaml ) + TOPOLOGIES_TAGS="2_node_single_link_topo" ;; "2n_clx") - TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_clx*.yaml ) + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_clx_*.yaml ) TOPOLOGIES_TAGS="2_node_*_link_topo" ;; "2n_icx") - TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_icx*.yaml ) + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_icx_*.yaml ) TOPOLOGIES_TAGS="2_node_*_link_topo" ;; "2n_spr") - TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_spr*.yaml ) + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_spr_*.yaml ) TOPOLOGIES_TAGS="2_node_*_link_topo" ;; - "3n_snr") - TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_snr*.yaml ) - TOPOLOGIES_TAGS="3_node_single_link_topo" - ;; - "3n_tsh") - TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_tsh*.yaml ) - TOPOLOGIES_TAGS="3_node_single_link_topo" - ;; "2n_tx2") - TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_tx2*.yaml ) + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_tx2_*.yaml ) TOPOLOGIES_TAGS="2_node_single_link_topo" ;; + "2n_zn2") + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_zn2_*.yaml ) + TOPOLOGIES_TAGS="2_node_*_link_topo" + ;; "3n_alt") - TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_alt*.yaml ) + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_alt_*.yaml ) TOPOLOGIES_TAGS="3_node_single_link_topo" ;; - "1n_aws") - TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*1n-aws*.yaml ) - TOPOLOGIES_TAGS="1_node_single_link_topo" - ;; - "2n_aws") - TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-aws*.yaml ) - TOPOLOGIES_TAGS="2_node_single_link_topo" - ;; "3n_aws") TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-aws*.yaml ) TOPOLOGIES_TAGS="3_node_single_link_topo" ;; - "1n_c6gn") - TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*1n-c6gn*.yaml ) - TOPOLOGIES_TAGS="1_node_single_link_topo" + "3n_c7gn") + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-c7gn*.yaml ) + TOPOLOGIES_TAGS="3_node_single_link_topo" ;; - "2n_c6gn") - TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-c6gn*.yaml ) - TOPOLOGIES_TAGS="2_node_single_link_topo" + "3n_c6in") + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-c6in*.yaml ) + TOPOLOGIES_TAGS="3_node_single_link_topo" + ;; + "3n_icx") + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_icx_*.yaml ) + # Trailing underscore is needed to distinguish from 3n_icxd. + TOPOLOGIES_TAGS="3_node_*_link_topo" ;; - "3n_c6gn") - TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-c6gn*.yaml ) + "3n_icxd") + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_icxd_*.yaml ) TOPOLOGIES_TAGS="3_node_single_link_topo" ;; - "1n_c6in") - TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*1n-c6in*.yaml ) - TOPOLOGIES_TAGS="1_node_single_link_topo" + "3n_snr") + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_snr_*.yaml ) + TOPOLOGIES_TAGS="3_node_single_link_topo" ;; - "2n_c6in") - TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-c6in*.yaml ) + "3n_tsh") + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_tsh_*.yaml ) + TOPOLOGIES_TAGS="3_node_single_link_topo" + ;; + "3na_spr") + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3na_spr_*.yaml ) + TOPOLOGIES_TAGS="3_node_*_link_topo" + ;; + "3nb_spr") + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3nb_spr_*.yaml ) + TOPOLOGIES_TAGS="3_node_*_link_topo" + ;; + "2n_x"*) + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_"${FLAVOR}"*.yaml ) TOPOLOGIES_TAGS="2_node_single_link_topo" ;; - "3n_c6in") - TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-c6in*.yaml ) + "3n_x"*) + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_"${FLAVOR}"*.yaml ) TOPOLOGIES_TAGS="3_node_single_link_topo" ;; *) @@ -1243,6 +1326,9 @@ function set_environment_variables () { # Depending on testbed topology, overwrite defaults set in the # resources/libraries/python/Constants.py file # + # Only to be called from the reservation function, + # as resulting values may change based on topology data. + # # Variables read: # - TEST_CODE - String affecting test selection, usually jenkins job name. # Variables set: @@ -1251,30 +1337,34 @@ function set_environment_variables () { set -exuo pipefail case "${TEST_CODE}" in - *"1n-aws"* | *"2n-aws"* | *"3n-aws"*) + *"1n-aws" | *"2n-aws" | *"3n-aws") export TREX_RX_DESCRIPTORS_COUNT=1024 export TREX_EXTRA_CMDLINE="--mbuf-factor 19" export TREX_CORE_COUNT=6 # Settings to prevent duration stretching. export PERF_TRIAL_STL_DELAY=0.1 ;; - *"1n-c6gn"* | *"2n-c6gn"* | *"3n-c6gn"*) + *"2n-c7gn" | *"3n-c7gn") export TREX_RX_DESCRIPTORS_COUNT=1024 export TREX_EXTRA_CMDLINE="--mbuf-factor 19" export TREX_CORE_COUNT=6 # Settings to prevent duration stretching. export PERF_TRIAL_STL_DELAY=0.1 ;; - *"1n-c6in"* | *"2n-c6in"* | *"3n-c6in"*) + *"1n-c6in" | *"2n-c6in" | *"3n-c6in") export TREX_RX_DESCRIPTORS_COUNT=1024 export TREX_EXTRA_CMDLINE="--mbuf-factor 19" export TREX_CORE_COUNT=6 # Settings to prevent duration stretching. export PERF_TRIAL_STL_DELAY=0.1 ;; - *"2n-zn2"*) + *"2n-zn2") # Maciek's workaround for Zen2 with lower amount of cores. export TREX_CORE_COUNT=14 + ;; + *"2n-x-"* | *"3n-x-"* ) + export TREX_CORE_COUNT=2 + ;; esac } @@ -1315,15 +1405,15 @@ function untrap_and_unreserve_testbed () { die "${1:-FAILED TO UNRESERVE, FIX MANUALLY.}" 2 } case "${TEST_CODE}" in - *"1n-aws"* | *"2n-aws"* | *"3n-aws"*) + *"1n-aws" | *"2n-aws" | *"3n-aws") TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}-c5n" terraform_destroy || die "Failed to call terraform destroy." ;; - *"1n-c6gn"* | *"2n-c6gn"* | *"3n-c6gn"*) + *"2n-c7gn" | *"3n-c7gn") TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}" terraform_destroy || die "Failed to call terraform destroy." ;; - *"1n-c6in"* | *"2n-c6in"* | *"3n-c6in"*) + *"1n-c6in" | *"2n-c6in" | *"3n-c6in") TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}" terraform_destroy || die "Failed to call terraform destroy." ;;