feat(pip): Upgrade
[csit.git] / resources / libraries / bash / function / common.sh
index ce0c526..b194c31 100644 (file)
@@ -1,5 +1,5 @@
-# Copyright (c) 2021 Cisco and/or its affiliates.
-# Copyright (c) 2021 PANTHEON.tech and/or its affiliates.
+# Copyright (c) 2022 Cisco and/or its affiliates.
+# Copyright (c) 2022 PANTHEON.tech and/or its affiliates.
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at:
@@ -118,7 +118,7 @@ function activate_virtualenv () {
     env_dir="${root_path}/env"
     req_path=${2-$CSIT_DIR/requirements.txt}
     rm -rf "${env_dir}" || die "Failed to clean previous virtualenv."
-    pip3 install virtualenv==20.0.20 || {
+    pip3 install virtualenv==20.15.1 || {
         die "Virtualenv package install failed."
     }
     virtualenv --no-download --python=$(which python3) "${env_dir}" || {
@@ -137,19 +137,19 @@ function activate_virtualenv () {
 
 function archive_tests () {
 
-    # Create .tar.xz of generated/tests for archiving.
+    # Create .tar.gz of generated/tests for archiving.
     # To be run after generate_tests, kept separate to offer more flexibility.
 
     # Directory read:
     # - ${GENERATED_DIR}/tests - Tree of executed suites to archive.
     # File rewriten:
-    # - ${ARCHIVE_DIR}/tests.tar.xz - Archive of generated tests.
+    # - ${ARCHIVE_DIR}/generated_tests.tar.gz - Archive of generated tests.
 
     set -exuo pipefail
 
-    tar c "${GENERATED_DIR}/tests" | xz -3 > "${ARCHIVE_DIR}/tests.tar.xz" || {
-        die "Error creating archive of generated tests."
-    }
+    pushd "${ARCHIVE_DIR}" || die
+    tar czf "generated_tests.tar.gz" "${GENERATED_DIR}/tests" || true
+    popd || die
 }
 
 
@@ -439,10 +439,14 @@ function get_test_code () {
             NODENESS="1n"
             FLAVOR="skx"
             ;;
-       *"1n-tx2"*)
+        *"1n-tx2"*)
             NODENESS="1n"
             FLAVOR="tx2"
             ;;
+        *"1n-aws"*)
+            NODENESS="1n"
+            FLAVOR="aws"
+            ;;
         *"2n-aws"*)
             NODENESS="2n"
             FLAVOR="aws"
@@ -455,18 +459,26 @@ function get_test_code () {
             NODENESS="2n"
             FLAVOR="skx"
             ;;
-        *"2n-zn2"*)
-            NODENESS="2n"
-            FLAVOR="zn2"
-            ;;
         *"3n-skx"*)
             NODENESS="3n"
             FLAVOR="skx"
             ;;
+        *"2n-zn2"*)
+            NODENESS="2n"
+            FLAVOR="zn2"
+            ;;
         *"2n-clx"*)
             NODENESS="2n"
             FLAVOR="clx"
             ;;
+        *"2n-icx"*)
+            NODENESS="2n"
+            FLAVOR="icx"
+            ;;
+        *"3n-icx"*)
+            NODENESS="3n"
+            FLAVOR="icx"
+            ;;
         *"2n-dnv"*)
             NODENESS="2n"
             FLAVOR="dnv"
@@ -475,6 +487,10 @@ function get_test_code () {
             NODENESS="3n"
             FLAVOR="dnv"
             ;;
+        *"3n-snr"*)
+            NODENESS="3n"
+            FLAVOR="snr"
+            ;;
         *"2n-tx2"*)
             NODENESS="2n"
             FLAVOR="tx2"
@@ -483,6 +499,10 @@ function get_test_code () {
             NODENESS="3n"
             FLAVOR="tsh"
             ;;
+        *"3n-alt"*)
+            NODENESS="3n"
+            FLAVOR="alt"
+            ;;
     esac
 }
 
@@ -601,11 +621,48 @@ function move_archives () {
 }
 
 
+function post_process_robot_outputs () {
+
+    # Generate INFO level output_info.xml by rebot.
+    # Archive UTI raw json outputs.
+    #
+    # Variables read:
+    # - ARCHIVE_DIR - Path to post-processed files.
+
+    set -exuo pipefail
+
+    # Compress raw json outputs, as they will never be post-processed.
+    pushd "${ARCHIVE_DIR}" || die
+    if [ -d "tests" ]; then
+        # Use deterministic order.
+        options+=("--sort=name")
+        # We are keeping info outputs where they are.
+        # Assuming we want to move anything but info files (and dirs).
+        options+=("--exclude=*.info.json")
+        tar czf "generated_output_raw.tar.gz" "${options[@]}" "tests" || true
+        # Tar can remove when archiving, but chokes (not deterministically)
+        # on attempting to remove dirs (not empty as info files are there).
+        # So we need to delete the raw files manually.
+        find "tests" -type f -name "*.raw.json" -delete || true
+    fi
+    popd || die
+
+    # Generate INFO level output_info.xml for post-processing.
+    all_options=("--loglevel" "INFO")
+    all_options+=("--log" "none")
+    all_options+=("--report" "none")
+    all_options+=("--output" "${ARCHIVE_DIR}/output_info.xml")
+    all_options+=("${ARCHIVE_DIR}/output.xml")
+    rebot "${all_options[@]}" || true
+}
+
+
 function prepare_topology () {
 
     # Prepare virtual testbed topology if needed based on flavor.
 
     # Variables read:
+    # - TEST_CODE - String affecting test selection, usually jenkins job name.
     # - NODENESS - Node multiplicity of testbed, either "2n" or "3n".
     # - FLAVOR - Node flavor string, e.g. "clx" or "skx".
     # Functions called:
@@ -617,11 +674,8 @@ function prepare_topology () {
 
     case_text="${NODENESS}_${FLAVOR}"
     case "${case_text}" in
-        "2n_aws")
-            terraform_init || die "Failed to call terraform init."
-            terraform_apply || die "Failed to call terraform apply."
-            ;;
-        "3n_aws")
+        "1n_aws" | "2n_aws" | "3n_aws")
+            export TF_VAR_testbed_name="${TEST_CODE}"
             terraform_init || die "Failed to call terraform init."
             terraform_apply || die "Failed to call terraform apply."
             ;;
@@ -708,7 +762,9 @@ function reserve_and_cleanup_testbed () {
 
 function run_pybot () {
 
-    # Run pybot with options based on input variables. Create output_info.xml
+    # Run pybot with options based on input variables.
+    # Generate INFO level output_info.xml by rebot.
+    # Archive UTI raw json outputs.
     #
     # Variables read:
     # - CSIT_DIR - Path to existing root of local CSIT git repository.
@@ -723,7 +779,6 @@ function run_pybot () {
     set -exuo pipefail
 
     all_options=("--outputdir" "${ARCHIVE_DIR}" "${PYBOT_ARGS[@]}")
-    all_options+=("--noncritical" "EXPECTED_FAILING")
     all_options+=("${EXPANDED_TAGS[@]}")
 
     pushd "${CSIT_DIR}" || die "Change directory operation failed."
@@ -732,13 +787,8 @@ function run_pybot () {
     PYBOT_EXIT_STATUS="$?"
     set -e
 
-    # Generate INFO level output_info.xml for post-processing.
-    all_options=("--loglevel" "INFO")
-    all_options+=("--log" "none")
-    all_options+=("--report" "none")
-    all_options+=("--output" "${ARCHIVE_DIR}/output_info.xml")
-    all_options+=("${ARCHIVE_DIR}/output.xml")
-    rebot "${all_options[@]}" || true
+    post_process_robot_outputs || die
+
     popd || die "Change directory operation failed."
 }
 
@@ -805,17 +855,24 @@ function select_tags () {
     set -exuo pipefail
 
     # NIC SELECTION
-    start_pattern='^  TG:'
+    case "${TEST_CODE}" in
+        *"1n-aws"*)
+            start_pattern='^  SUT:'
+            ;;
+        *)
+            start_pattern='^  TG:'
+            ;;
+    esac
     end_pattern='^ \? \?[A-Za-z0-9]\+:'
-    # Remove the TG section from topology file
+    # Remove the sections from topology file
     sed_command="/${start_pattern}/,/${end_pattern}/d"
-    # All topologies DUT NICs
+    # All topologies NICs
     available=$(sed "${sed_command}" "${TOPOLOGIES_DIR}"/* \
                 | grep -hoP "model: \K.*" | sort -u)
-    # Selected topology DUT NICs
+    # Selected topology NICs
     reserved=$(sed "${sed_command}" "${WORKING_TOPOLOGY}" \
                | grep -hoP "model: \K.*" | sort -u)
-    # All topologies DUT NICs - Selected topology DUT NICs
+    # All topologies NICs - Selected topology NICs
     exclude_nics=($(comm -13 <(echo "${reserved}") <(echo "${available}"))) || {
         die "Computation of excluded NICs failed."
     }
@@ -825,16 +882,22 @@ function select_tags () {
         *"3n-dnv"* | *"2n-dnv"*)
             default_nic="nic_intel-x553"
             ;;
+        *"3n-snr"*)
+            default_nic="nic_intel-e810xxv"
+            ;;
         *"3n-tsh"*)
             default_nic="nic_intel-x520-da2"
             ;;
+        *"3n-icx"* | *"2n-icx"*)
+            default_nic="nic_intel-xxv710"
+            ;;
         *"3n-skx"* | *"2n-skx"* | *"2n-clx"* | *"2n-zn2"*)
             default_nic="nic_intel-xxv710"
             ;;
-        *"2n-tx2"* | *"mrr-daily-master")
+        *"2n-tx2"* | *"3n-alt"* | *"mrr-daily-master")
             default_nic="nic_intel-xl710"
             ;;
-        *"2n-aws"* | *"3n-aws"*)
+        *"1n-aws"* | *"2n-aws"* | *"3n-aws"*)
             default_nic="nic_amazon-nitro-50g"
             ;;
         *)
@@ -858,13 +921,15 @@ function select_tags () {
     awk_nics_sub_cmd+='else if ($9 =="drv_rdma_core") drv ="rdma-";'
     awk_nics_sub_cmd+='else if ($9 =="drv_af_xdp") drv ="af-xdp-";'
     awk_nics_sub_cmd+='else drv="";'
-    awk_nics_sub_cmd+='print "*"$7"-" drv $11"-"$5"."$3"-"$1"-" drv $11"-"$5'
+    awk_nics_sub_cmd+='if ($1 =="-") cores="";'
+    awk_nics_sub_cmd+='else cores=$1;'
+    awk_nics_sub_cmd+='print "*"$7"-" drv $11"-"$5"."$3"-" cores "-" drv $11"-"$5'
 
     # Tag file directory shorthand.
     tfd="${JOB_SPECS_DIR}"
     case "${TEST_CODE}" in
         # Select specific performance tests based on jenkins job type variable.
-        *"vpp-device"* )
+        *"device"* )
             readarray -t test_tag_array <<< $(grep -v "#" \
                 ${tfd}/vpp_device/${DUT}-${NODENESS}-${FLAVOR}.md |
                 awk {"$awk_nics_sub_cmd"} || echo "devicetest") || die
@@ -931,6 +996,10 @@ function select_tags () {
         *"1n-vbox"*)
             test_tag_array+=("!avf")
             test_tag_array+=("!vhost")
+            test_tag_array+=("!flow")
+            ;;
+        *"1n_tx2"*)
+            test_tag_array+=("!flow")
             ;;
         *"2n-skx"*)
             test_tag_array+=("!ipsechw")
@@ -943,18 +1012,25 @@ function select_tags () {
         *"2n-clx"*)
             test_tag_array+=("!ipsechw")
             ;;
+        *"2n-icx"*)
+            test_tag_array+=("!ipsechw")
+            ;;
+        *"3n-icx"*)
+            test_tag_array+=("!ipsechw")
+            # Not enough nic_intel-xxv710 to support double link tests.
+            test_tag_array+=("!3_node_double_link_topoANDnic_intel-xxv710")
+            ;;
         *"2n-zn2"*)
             test_tag_array+=("!ipsechw")
             ;;
         *"2n-dnv"*)
-            test_tag_array+=("!ipsechw")
             test_tag_array+=("!memif")
             test_tag_array+=("!srv6_proxy")
             test_tag_array+=("!vhost")
             test_tag_array+=("!vts")
             test_tag_array+=("!drv_avf")
             ;;
-        *"2n-tx2"*)
+        *"2n-tx2"* | *"3n-alt"*)
             test_tag_array+=("!ipsechw")
             ;;
         *"3n-dnv"*)
@@ -964,12 +1040,14 @@ function select_tags () {
             test_tag_array+=("!vts")
             test_tag_array+=("!drv_avf")
             ;;
+        *"3n-snr"*)
+            ;;
         *"3n-tsh"*)
             # 3n-tsh only has x520 NICs which don't work with AVF
             test_tag_array+=("!drv_avf")
             test_tag_array+=("!ipsechw")
             ;;
-        *"2n-aws"* | *"3n-aws"*)
+        *"1n-aws"* | *"2n-aws"* | *"3n-aws"*)
             test_tag_array+=("!ipsechw")
             ;;
     esac
@@ -982,12 +1060,14 @@ function select_tags () {
 
     set +x
     if [[ "${TEST_CODE}" == "vpp-"* ]]; then
-        # Automatic prefixing for VPP jobs to limit the NIC used and
-        # traffic evaluation to MRR.
-        if [[ "${TEST_TAG_STRING-}" == *"nic_"* ]]; then
-            prefix="${prefix}mrrAND"
-        else
-            prefix="${prefix}mrrAND${default_nic}AND"
+        if [[ "${TEST_CODE}" != *"device"* ]]; then
+            # Automatic prefixing for VPP perf jobs to limit the NIC used and
+            # traffic evaluation to MRR.
+            if [[ "${TEST_TAG_STRING-}" == *"nic_"* ]]; then
+                prefix="${prefix}mrrAND"
+            else
+                prefix="${prefix}mrrAND${default_nic}AND"
+            fi
         fi
     fi
     for tag in "${test_tag_array[@]}"; do
@@ -1054,10 +1134,18 @@ function select_topology () {
             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_skx*.yaml )
             TOPOLOGIES_TAGS="3_node_*_link_topo"
             ;;
+        "3n_icx")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_icx*.yaml )
+            TOPOLOGIES_TAGS="3_node_*_link_topo"
+            ;;
         "2n_clx")
             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_clx*.yaml )
             TOPOLOGIES_TAGS="2_node_*_link_topo"
             ;;
+        "2n_icx")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_icx*.yaml )
+            TOPOLOGIES_TAGS="2_node_*_link_topo"
+            ;;
         "2n_dnv")
             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_dnv*.yaml )
             TOPOLOGIES_TAGS="2_node_single_link_topo"
@@ -1074,12 +1162,20 @@ function select_topology () {
             TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_tx2*.yaml )
             TOPOLOGIES_TAGS="2_node_single_link_topo"
             ;;
+        "3n_alt")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_alt*.yaml )
+            TOPOLOGIES_TAGS="3_node_single_link_topo"
+            ;;
+        "1n_aws")
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*1n-aws*.yaml )
+            TOPOLOGIES_TAGS="1_node_single_link_topo"
+            ;;
         "2n_aws")
-            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_aws*.yaml )
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-aws*.yaml )
             TOPOLOGIES_TAGS="2_node_single_link_topo"
             ;;
         "3n_aws")
-            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_aws*.yaml )
+            TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-aws*.yaml )
             TOPOLOGIES_TAGS="3_node_single_link_topo"
             ;;
         *)
@@ -1107,13 +1203,17 @@ function set_environment_variables () {
     set -exuo pipefail
 
     case "${TEST_CODE}" in
-        *"2n-aws"* | *"3n-aws"*)
-            # T-Rex 2.88 workaround for ENA NICs
+        *"1n-aws"* | *"2n-aws"* | *"3n-aws"*)
+            # T-Rex 2.88+ workaround for ENA NICs.
             export TREX_RX_DESCRIPTORS_COUNT=1024
             export TREX_EXTRA_CMDLINE="--mbuf-factor 19"
-            # Settings to prevent duration stretching
+            export TREX_CORE_COUNT=6
+            # Settings to prevent duration stretching.
             export PERF_TRIAL_STL_DELAY=0.1
             ;;
+        *"2n-zn2"*)
+            # Maciek's workaround for Zen2 with lower amount of cores.
+            export TREX_CORE_COUNT=14
     esac
 }
 
@@ -1153,7 +1253,7 @@ function untrap_and_unreserve_testbed () {
             die "${1:-FAILED TO UNRESERVE, FIX MANUALLY.}" 2
         }
         case "${TEST_CODE}" in
-            *"2n-aws"* | *"3n-aws"*)
+            *"1n-aws"* | *"2n-aws"* | *"3n-aws"*)
                 terraform_destroy || die "Failed to call terraform destroy."
                 ;;
             *)