X-Git-Url: https://gerrit.fd.io/r/gitweb?a=blobdiff_plain;f=resources%2Flibraries%2Fbash%2Ffunction%2Fcommon.sh;h=78a99dae0da66393eef0bb6bc931fcc8f58f1253;hb=f80c4dc842b77e9231e443f7ef10e4a735f18e1f;hp=d98db18149746d1ea1aa75724a1291920127af12;hpb=d66bbe4d85079621e13d6e2940c933da1297ba2b;p=csit.git diff --git a/resources/libraries/bash/function/common.sh b/resources/libraries/bash/function/common.sh index d98db18149..78a99dae0d 100644 --- a/resources/libraries/bash/function/common.sh +++ b/resources/libraries/bash/function/common.sh @@ -45,7 +45,7 @@ function activate_docker_topology () { device_image="$(< ${CSIT_DIR}/${IMAGE_VER_FILE})" case_text="${NODENESS}_${FLAVOR}" case "${case_text}" in - "1n_skx" | "1n_tx2") + "1n_skx" | "1n_tx2" | "1n_spr") # We execute reservation over csit-shim-dcr (ssh) which runs sourced # script's functions. Env variables are read from ssh output # back to localhost for further processing. @@ -158,7 +158,7 @@ function check_download_dir () { # Fail if there are no files visible in ${DOWNLOAD_DIR}. # # Variables read: - # - DOWNLOAD_DIR - Path to directory pybot takes the build to test from. + # - DOWNLOAD_DIR - Path to directory robot takes the build to test from. # Directories read: # - ${DOWNLOAD_DIR} - Has to be non-empty to proceed. # Functions called: @@ -236,9 +236,6 @@ function common_dirs () { TOOLS_DIR=$(readlink -e "${RESOURCES_DIR}/tools") || { die "Readlink failed." } - DOC_GEN_DIR=$(readlink -e "${TOOLS_DIR}/doc_gen") || { - die "Readlink failed." - } PYTHON_SCRIPTS_DIR=$(readlink -e "${TOOLS_DIR}/scripts") || { die "Readlink failed." } @@ -258,7 +255,7 @@ function common_dirs () { } -function compose_pybot_arguments () { +function compose_robot_arguments () { # Variables read: # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed. @@ -268,21 +265,21 @@ function compose_pybot_arguments () { # - TEST_CODE - The test selection string from environment or argument. # - SELECTION_MODE - Selection criteria [test, suite, include, exclude]. # Variables set: - # - PYBOT_ARGS - String holding part of all arguments for pybot. - # - EXPANDED_TAGS - Array of strings pybot arguments compiled from tags. + # - ROBOT_ARGS - String holding part of all arguments for robot. + # - EXPANDED_TAGS - Array of strings robot arguments compiled from tags. set -exuo pipefail # No explicit check needed with "set -u". - PYBOT_ARGS=("--loglevel" "TRACE") - PYBOT_ARGS+=("--variable" "TOPOLOGY_PATH:${WORKING_TOPOLOGY}") + ROBOT_ARGS=("--loglevel" "TRACE") + ROBOT_ARGS+=("--variable" "TOPOLOGY_PATH:${WORKING_TOPOLOGY}") case "${TEST_CODE}" in *"device"*) - PYBOT_ARGS+=("--suite" "tests.${DUT}.device") + ROBOT_ARGS+=("--suite" "tests.${DUT}.device") ;; *"perf"*) - PYBOT_ARGS+=("--suite" "tests.${DUT}.perf") + ROBOT_ARGS+=("--suite" "tests.${DUT}.perf") ;; *) die "Unknown specification: ${TEST_CODE}" @@ -319,7 +316,7 @@ function deactivate_docker_topology () { case_text="${NODENESS}_${FLAVOR}" case "${case_text}" in - "1n_skx" | "1n_tx2") + "1n_skx" | "1n_tx2" | "1n_spr") ssh="ssh root@172.17.0.1 -p 6022" env_vars=$(env | grep CSIT_ | tr '\n' ' ' ) || die # The "declare -f" output is long and boring. @@ -360,19 +357,19 @@ function die () { } -function die_on_pybot_error () { +function die_on_robot_error () { # Source this fragment if you want to abort on any failed test case. # # Variables read: - # - PYBOT_EXIT_STATUS - Set by a pybot running fragment. + # - ROBOT_EXIT_STATUS - Set by a robot running fragment. # Functions called: # - die - Print to stderr and exit. set -exuo pipefail - if [[ "${PYBOT_EXIT_STATUS}" != "0" ]]; then - die "Test failures are present!" "${PYBOT_EXIT_STATUS}" + if [[ "${ROBOT_EXIT_STATUS}" != "0" ]]; then + die "Test failures are present!" "${ROBOT_EXIT_STATUS}" fi } @@ -385,7 +382,7 @@ function generate_tests () { # within any subdirectory after copying. # This is a separate function, because this code is called - # both by autogen checker and entries calling run_pybot. + # both by autogen checker and entries calling run_robot. # Directories read: # - ${CSIT_DIR}/tests - Used as templates for the generated tests. @@ -439,6 +436,10 @@ function get_test_code () { NODENESS="1n" FLAVOR="skx" ;; + *"1n-spr"*) + NODENESS="1n" + FLAVOR="spr" + ;; *"1n-tx2"*) NODENESS="1n" FLAVOR="tx2" @@ -455,6 +456,30 @@ function get_test_code () { NODENESS="3n" FLAVOR="aws" ;; + *"1n-c6gn"*) + NODENESS="1n" + FLAVOR="c6gn" + ;; + *"2n-c6gn"*) + NODENESS="2n" + FLAVOR="c6gn" + ;; + *"3n-c6gn"*) + NODENESS="3n" + FLAVOR="c6gn" + ;; + *"1n-c6in"*) + NODENESS="1n" + FLAVOR="c6in" + ;; + *"2n-c6in"*) + NODENESS="2n" + FLAVOR="c6in" + ;; + *"3n-c6in"*) + NODENESS="3n" + FLAVOR="c6in" + ;; *"2n-zn2"*) NODENESS="2n" FLAVOR="zn2" @@ -475,8 +500,12 @@ function get_test_code () { NODENESS="3n" FLAVOR="icx" ;; - *"3n-spr"*) - NODENESS="3n" + *"3na-spr"*) + NODENESS="3na" + FLAVOR="spr" + ;; + *"3nb-spr"*) + NODENESS="3nb" FLAVOR="spr" ;; *"3n-snr"*) @@ -508,6 +537,8 @@ function get_test_tag_string () { # Variables set: # - TEST_TAG_STRING - The string following trigger word in gerrit comment. # May be empty, or even not set on event types not adding comment. + # Variables exported optionally: + # - GRAPH_NODE_VARIANT - Node variant to test with, set if found in trigger. # TODO: ci-management scripts no longer need to perform this. @@ -613,25 +644,6 @@ function move_archives () { } -function post_process_robot_outputs () { - - # Generate INFO level output_info.xml by rebot. - # - # Variables read: - # - ARCHIVE_DIR - Path to post-processed files. - - set -exuo pipefail - - # Generate INFO level output_info.xml for post-processing. - all_options=("--loglevel" "INFO") - all_options+=("--log" "none") - all_options+=("--report" "none") - all_options+=("--output" "${ARCHIVE_DIR}/output_info.xml") - all_options+=("${ARCHIVE_DIR}/output.xml") - rebot "${all_options[@]}" || true -} - - function prepare_topology () { # Prepare virtual testbed topology if needed based on flavor. @@ -655,7 +667,25 @@ function prepare_topology () { export TF_VAR_testbed_name="${TEST_CODE}" TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}-c5n" terraform_init || die "Failed to call terraform init." - trap 'terraform_destroy' ERR || { + trap "terraform_destroy" ERR EXIT || { + die "Trap attempt failed, please cleanup manually. Aborting!" + } + terraform_apply || die "Failed to call terraform apply." + ;; + "1n_c6gn" | "2n_c6gn" | "3n_c6gn") + export TF_VAR_testbed_name="${TEST_CODE}" + TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-c6gn" + terraform_init || die "Failed to call terraform init." + trap "terraform_destroy" ERR EXIT || { + die "Trap attempt failed, please cleanup manually. Aborting!" + } + terraform_apply || die "Failed to call terraform apply." + ;; + "1n_c6in" | "2n_c6in" | "3n_c6in") + export TF_VAR_testbed_name="${TEST_CODE}" + TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-c6in" + terraform_init || die "Failed to call terraform init." + trap "terraform_destroy" ERR EXIT || { die "Trap attempt failed, please cleanup manually. Aborting!" } terraform_apply || die "Failed to call terraform apply." @@ -741,34 +771,31 @@ function reserve_and_cleanup_testbed () { } -function run_pybot () { +function run_robot () { - # Run pybot with options based on input variables. - # Generate INFO level output_info.xml by rebot. + # Run robot with options based on input variables. # # Variables read: # - CSIT_DIR - Path to existing root of local CSIT git repository. # - ARCHIVE_DIR - Path to store robot result files in. - # - PYBOT_ARGS, EXPANDED_TAGS - See compose_pybot_arguments.sh + # - ROBOT_ARGS, EXPANDED_TAGS - See compose_robot_arguments.sh # - GENERATED_DIR - Tests are assumed to be generated under there. # Variables set: - # - PYBOT_EXIT_STATUS - Exit status of most recent pybot invocation. + # - ROBOT_EXIT_STATUS - Exit status of most recent robot invocation. # Functions called: # - die - Print to stderr and exit. set -exuo pipefail - all_options=("--outputdir" "${ARCHIVE_DIR}" "${PYBOT_ARGS[@]}") + all_options=("--outputdir" "${ARCHIVE_DIR}" "${ROBOT_ARGS[@]}") all_options+=("${EXPANDED_TAGS[@]}") pushd "${CSIT_DIR}" || die "Change directory operation failed." set +e robot "${all_options[@]}" "${GENERATED_DIR}/tests/" - PYBOT_EXIT_STATUS="$?" + ROBOT_EXIT_STATUS="$?" set -e - post_process_robot_outputs || die - popd || die "Change directory operation failed." } @@ -836,7 +863,7 @@ function select_tags () { # NIC SELECTION case "${TEST_CODE}" in - *"1n-aws"*) + *"1n-aws"* | *"1n-c6gn"* | *"1n-c6in"*) start_pattern='^ SUT:' ;; *) @@ -866,9 +893,15 @@ function select_tags () { default_nic="nic_intel-x520-da2" ;; *"3n-icx"* | *"2n-icx"*) - default_nic="nic_intel-xxv710" + default_nic="nic_intel-e810cq" + ;; + *"3na-spr"*) + default_nic="nic_mellanox-cx7veat" + ;; + *"3nb-spr"*) + default_nic="nic_intel-e810cq" ;; - *"3n-spr"* | *"2n-spr"*) + *"2n-spr"*) default_nic="nic_intel-e810cq" ;; *"2n-clx"* | *"2n-zn2"*) @@ -880,6 +913,12 @@ function select_tags () { *"1n-aws"* | *"2n-aws"* | *"3n-aws"*) default_nic="nic_amazon-nitro-50g" ;; + *"1n-c6gn"* | *"2n-c6gn"* | *"3n-c6gn"*) + default_nic="nic_amazon-nitro-100g" + ;; + *"1n-c6in"* | *"2n-c6in"* | *"3n-c6in"*) + default_nic="nic_amazon-nitro-200g" + ;; *) default_nic="nic_intel-x710" ;; @@ -892,10 +931,14 @@ function select_tags () { awk_nics_sub_cmd+='gsub("xl710","40ge2p1xl710");' awk_nics_sub_cmd+='gsub("x520-da2","10ge2p1x520");' awk_nics_sub_cmd+='gsub("cx556a","100ge2p1cx556a");' + awk_nics_sub_cmd+='gsub("cx7veat","200ge2p1cx7veat");' + awk_nics_sub_cmd+='gsub("cx6dx","100ge2p1cx6dx");' awk_nics_sub_cmd+='gsub("e810cq","100ge2p1e810cq");' awk_nics_sub_cmd+='gsub("vic1227","10ge2p1vic1227");' awk_nics_sub_cmd+='gsub("vic1385","40ge2p1vic1385");' awk_nics_sub_cmd+='gsub("nitro-50g","50ge1p1ENA");' + awk_nics_sub_cmd+='gsub("nitro-100g","100ge1p1ENA");' + awk_nics_sub_cmd+='gsub("nitro-200g","200ge1p1ENA");' awk_nics_sub_cmd+='if ($9 =="drv_avf") drv="avf-";' awk_nics_sub_cmd+='else if ($9 =="drv_rdma_core") drv ="rdma-";' awk_nics_sub_cmd+='else if ($9 =="drv_mlx5_core") drv ="mlx5-";' @@ -994,7 +1037,6 @@ function select_tags () { test_tag_array+=("!ipsechw") ;; *"2n-spr"*) - test_tag_array+=("!ipsechw") ;; *"2n-tx2"*) test_tag_array+=("!ipsechw") @@ -1011,8 +1053,9 @@ function select_tags () { ;; *"3n-snr"*) ;; - *"3n-spr"*) - test_tag_array+=("!ipsechw") + *"3na-spr"*) + ;; + *"3nb-spr"*) ;; *"3n-tsh"*) test_tag_array+=("!drv_avf") @@ -1021,6 +1064,12 @@ function select_tags () { *"1n-aws"* | *"2n-aws"* | *"3n-aws"*) test_tag_array+=("!ipsechw") ;; + *"1n-c6gn"* | *"2n-c6gn"* | *"3n-c6gn"*) + test_tag_array+=("!ipsechw") + ;; + *"1n-c6in"* | *"2n-c6in"* | *"3n-c6in"*) + test_tag_array+=("!ipsechw") + ;; esac # We will add excluded NICs. @@ -1028,19 +1077,15 @@ function select_tags () { TAGS=() prefix="" - - set +x if [[ "${TEST_CODE}" == "vpp-"* ]]; then if [[ "${TEST_CODE}" != *"device"* ]]; then - # Automatic prefixing for VPP perf jobs to limit the NIC used and - # traffic evaluation to MRR. - if [[ "${TEST_TAG_STRING-}" == *"nic_"* ]]; then - prefix="${prefix}mrrAND" - else - prefix="${prefix}mrrAND${default_nic}AND" + # Automatic prefixing for VPP perf jobs to limit the NIC used. + if [[ "${TEST_TAG_STRING-}" != *"nic_"* ]]; then + prefix="${default_nic}AND" fi fi fi + set +x for tag in "${test_tag_array[@]}"; do if [[ "${tag}" == "!"* ]]; then # Exclude tags are not prefixed. @@ -1089,7 +1134,7 @@ function select_topology () { TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*vpp_device*.template ) TOPOLOGIES_TAGS="2_node_single_link_topo" ;; - "1n_skx" | "1n_tx2") + "1n_skx" | "1n_tx2" | "1n_spr") TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*vpp_device*.template ) TOPOLOGIES_TAGS="2_node_single_link_topo" ;; @@ -1109,6 +1154,14 @@ function select_topology () { TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_icx*.yaml ) TOPOLOGIES_TAGS="3_node_*_link_topo" ;; + "3na_spr") + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3na_spr*.yaml ) + TOPOLOGIES_TAGS="3_node_*_link_topo" + ;; + "3nb_spr") + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3nb_spr*.yaml ) + TOPOLOGIES_TAGS="3_node_*_link_topo" + ;; "2n_clx") TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_clx*.yaml ) TOPOLOGIES_TAGS="2_node_*_link_topo" @@ -1149,6 +1202,30 @@ function select_topology () { TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-aws*.yaml ) TOPOLOGIES_TAGS="3_node_single_link_topo" ;; + "1n_c6gn") + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*1n-c6gn*.yaml ) + TOPOLOGIES_TAGS="1_node_single_link_topo" + ;; + "2n_c6gn") + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-c6gn*.yaml ) + TOPOLOGIES_TAGS="2_node_single_link_topo" + ;; + "3n_c6gn") + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-c6gn*.yaml ) + TOPOLOGIES_TAGS="3_node_single_link_topo" + ;; + "1n_c6in") + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*1n-c6in*.yaml ) + TOPOLOGIES_TAGS="1_node_single_link_topo" + ;; + "2n_c6in") + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-c6in*.yaml ) + TOPOLOGIES_TAGS="2_node_single_link_topo" + ;; + "3n_c6in") + TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-c6in*.yaml ) + TOPOLOGIES_TAGS="3_node_single_link_topo" + ;; *) # No falling back to default, that should have been done # by the function which has set NODENESS and FLAVOR. @@ -1175,7 +1252,20 @@ function set_environment_variables () { case "${TEST_CODE}" in *"1n-aws"* | *"2n-aws"* | *"3n-aws"*) - # T-Rex 2.88+ workaround for ENA NICs. + export TREX_RX_DESCRIPTORS_COUNT=1024 + export TREX_EXTRA_CMDLINE="--mbuf-factor 19" + export TREX_CORE_COUNT=6 + # Settings to prevent duration stretching. + export PERF_TRIAL_STL_DELAY=0.1 + ;; + *"1n-c6gn"* | *"2n-c6gn"* | *"3n-c6gn"*) + export TREX_RX_DESCRIPTORS_COUNT=1024 + export TREX_EXTRA_CMDLINE="--mbuf-factor 19" + export TREX_CORE_COUNT=6 + # Settings to prevent duration stretching. + export PERF_TRIAL_STL_DELAY=0.1 + ;; + *"1n-c6in"* | *"2n-c6in"* | *"3n-c6in"*) export TREX_RX_DESCRIPTORS_COUNT=1024 export TREX_EXTRA_CMDLINE="--mbuf-factor 19" export TREX_CORE_COUNT=6 @@ -1229,6 +1319,14 @@ function untrap_and_unreserve_testbed () { TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}-c5n" terraform_destroy || die "Failed to call terraform destroy." ;; + *"1n-c6gn"* | *"2n-c6gn"* | *"3n-c6gn"*) + TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}" + terraform_destroy || die "Failed to call terraform destroy." + ;; + *"1n-c6in"* | *"2n-c6in"* | *"3n-c6in"*) + TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}" + terraform_destroy || die "Failed to call terraform destroy." + ;; *) ;; esac