1 # Copyright (c) 2020 Cisco and/or its affiliates.
2 # Copyright (c) 2020 PANTHEON.tech and/or its affiliates.
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at:
7 # http://www.apache.org/licenses/LICENSE-2.0
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
17 # This library defines functions used by multiple entry scripts.
18 # Keep functions ordered alphabetically, please.
20 # TODO: Add a link to bash style guide.
21 # TODO: Consider putting every die into a {} block,
22 # the code might become more readable (but longer).
25 function activate_docker_topology () {
27 # Create virtual vpp-device topology. Output of the function is topology
28 # file describing created environment saved to a file.
31 # - BASH_FUNCTION_DIR - Path to existing directory this file is located in.
32 # - TOPOLOGIES - Available topologies.
33 # - NODENESS - Node multiplicity of desired testbed.
34 # - FLAVOR - Node flavor string, usually describing the processor.
35 # - IMAGE_VER_FILE - Name of file that contains the image version.
36 # - CSIT_DIR - Directory where ${IMAGE_VER_FILE} is located.
38 # - WORKING_TOPOLOGY - Path to topology file.
42 source "${BASH_FUNCTION_DIR}/device.sh" || {
45 device_image="$(< ${CSIT_DIR}/${IMAGE_VER_FILE})"
46 case_text="${NODENESS}_${FLAVOR}"
47 case "${case_text}" in
49 # We execute reservation over csit-shim-dcr (ssh) which runs sourced
50 # script's functions. Env variables are read from ssh output
51 # back to localhost for further processing.
52 # Shim and Jenkins executor are in the same network on the same host
53 # Connect to docker's default gateway IP and shim's exposed port
54 ssh="ssh root@172.17.0.1 -p 6022"
55 run="activate_wrapper ${NODENESS} ${FLAVOR} ${device_image}"
56 # The "declare -f" output is long and boring.
58 # backtics to avoid https://midnight-commander.org/ticket/2142
59 env_vars=`${ssh} "$(declare -f); ${run}"` || {
60 die "Topology reservation via shim-dcr failed!"
64 source <(echo "$env_vars" | grep -v /usr/bin/docker) || {
70 # We execute reservation on localhost. Sourced script automatially
71 # sets environment variables for further processing.
72 activate_wrapper "${NODENESS}" "${FLAVOR}" "${device_image}" || die
75 die "Unknown specification: ${case_text}!"
78 trap 'deactivate_docker_topology' EXIT || {
79 die "Trap attempt failed, please cleanup manually. Aborting!"
82 # Replace all variables in template with those in environment.
83 source <(echo 'cat <<EOF >topo.yml'; cat ${TOPOLOGIES[0]}; echo EOF;) || {
84 die "Topology file create failed!"
87 WORKING_TOPOLOGY="/tmp/topology.yaml"
88 mv topo.yml "${WORKING_TOPOLOGY}" || {
89 die "Topology move failed!"
91 cat ${WORKING_TOPOLOGY} | grep -v password || {
92 die "Topology read failed!"
97 function activate_virtualenv () {
99 # Update virtualenv pip package, delete and create virtualenv directory,
100 # activate the virtualenv, install requirements, set PYTHONPATH.
103 # - ${1} - Path to existing directory for creating virtualenv in.
104 # If missing or empty, ${CSIT_DIR} is used.
105 # - ${2} - Path to requirements file, ${CSIT_DIR}/requirements.txt if empty.
107 # - CSIT_DIR - Path to existing root of local CSIT git repository.
108 # Variables exported:
109 # - PYTHONPATH - CSIT_DIR, as CSIT Python scripts usually need this.
111 # - die - Print to stderr and exit.
115 root_path="${1-$CSIT_DIR}"
116 env_dir="${root_path}/env"
117 req_path=${2-$CSIT_DIR/requirements.txt}
118 rm -rf "${env_dir}" || die "Failed to clean previous virtualenv."
119 pip3 install virtualenv==20.0.20 || {
120 die "Virtualenv package install failed."
122 virtualenv --no-download --python=$(which python3) "${env_dir}" || {
123 die "Virtualenv creation for $(which python3) failed."
126 source "${env_dir}/bin/activate" || die "Virtualenv activation failed."
128 pip3 install -r "${req_path}" || {
129 die "Requirements installation failed."
131 # Most CSIT Python scripts assume PYTHONPATH is set and exported.
132 export PYTHONPATH="${CSIT_DIR}" || die "Export failed."
136 function archive_tests () {
138 # Create .tar.xz of generated/tests for archiving.
139 # To be run after generate_tests, kept separate to offer more flexibility.
142 # - ${GENERATED_DIR}/tests - Tree of executed suites to archive.
144 # - ${ARCHIVE_DIR}/tests.tar.xz - Archive of generated tests.
148 tar c "${GENERATED_DIR}/tests" | xz -3 > "${ARCHIVE_DIR}/tests.tar.xz" || {
149 die "Error creating archive of generated tests."
154 function check_download_dir () {
156 # Fail if there are no files visible in ${DOWNLOAD_DIR}.
159 # - DOWNLOAD_DIR - Path to directory pybot takes the build to test from.
161 # - ${DOWNLOAD_DIR} - Has to be non-empty to proceed.
163 # - die - Print to stderr and exit.
167 if [[ ! "$(ls -A "${DOWNLOAD_DIR}")" ]]; then
168 die "No artifacts downloaded!"
173 function check_prerequisites () {
175 # Fail if prerequisites are not met.
178 # - installed - Check if application is installed/present in system.
179 # - die - Print to stderr and exit.
183 if ! installed sshpass; then
184 die "Please install sshpass before continue!"
189 function common_dirs () {
191 # Set global variables, create some directories (without touching content).
194 # - BASH_FUNCTION_DIR - Path to existing directory this file is located in.
195 # - CSIT_DIR - Path to existing root of local CSIT git repository.
196 # - TOPOLOGIES_DIR - Path to existing directory with available topologies.
197 # - JOB_SPECS_DIR - Path to existing directory with job test specifications.
198 # - RESOURCES_DIR - Path to existing CSIT subdirectory "resources".
199 # - TOOLS_DIR - Path to existing resources subdirectory "tools".
200 # - PYTHON_SCRIPTS_DIR - Path to existing tools subdirectory "scripts".
201 # - ARCHIVE_DIR - Path to created CSIT subdirectory "archives".
202 # The name is chosen to match what ci-management expects.
203 # - DOWNLOAD_DIR - Path to created CSIT subdirectory "download_dir".
204 # - GENERATED_DIR - Path to created CSIT subdirectory "generated".
205 # Directories created if not present:
206 # ARCHIVE_DIR, DOWNLOAD_DIR, GENERATED_DIR.
208 # - die - Print to stderr and exit.
212 this_file=$(readlink -e "${BASH_SOURCE[0]}") || {
213 die "Some error during locating of this source file."
215 BASH_FUNCTION_DIR=$(dirname "${this_file}") || {
216 die "Some error during dirname call."
218 # Current working directory could be in a different repo, e.g. VPP.
219 pushd "${BASH_FUNCTION_DIR}" || die "Pushd failed"
220 relative_csit_dir=$(git rev-parse --show-toplevel) || {
221 die "Git rev-parse failed."
223 CSIT_DIR=$(readlink -e "${relative_csit_dir}") || die "Readlink failed."
224 popd || die "Popd failed."
225 TOPOLOGIES_DIR=$(readlink -e "${CSIT_DIR}/topologies/available") || {
226 die "Readlink failed."
228 JOB_SPECS_DIR=$(readlink -e "${CSIT_DIR}/docs/job_specs") || {
229 die "Readlink failed."
231 RESOURCES_DIR=$(readlink -e "${CSIT_DIR}/resources") || {
232 die "Readlink failed."
234 TOOLS_DIR=$(readlink -e "${RESOURCES_DIR}/tools") || {
235 die "Readlink failed."
237 DOC_GEN_DIR=$(readlink -e "${TOOLS_DIR}/doc_gen") || {
238 die "Readlink failed."
240 PYTHON_SCRIPTS_DIR=$(readlink -e "${TOOLS_DIR}/scripts") || {
241 die "Readlink failed."
244 ARCHIVE_DIR=$(readlink -f "${CSIT_DIR}/archives") || {
245 die "Readlink failed."
247 mkdir -p "${ARCHIVE_DIR}" || die "Mkdir failed."
248 DOWNLOAD_DIR=$(readlink -f "${CSIT_DIR}/download_dir") || {
249 die "Readlink failed."
251 mkdir -p "${DOWNLOAD_DIR}" || die "Mkdir failed."
252 GENERATED_DIR=$(readlink -f "${CSIT_DIR}/generated") || {
253 die "Readlink failed."
255 mkdir -p "${GENERATED_DIR}" || die "Mkdir failed."
259 function compose_pybot_arguments () {
262 # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
263 # - DUT - CSIT test/ subdirectory, set while processing tags.
264 # - TAGS - Array variable holding selected tag boolean expressions.
265 # - TOPOLOGIES_TAGS - Tag boolean expression filtering tests for topology.
266 # - TEST_CODE - The test selection string from environment or argument.
267 # - SELECTION_MODE - Selection criteria [test, suite, include, exclude].
269 # - PYBOT_ARGS - String holding part of all arguments for pybot.
270 # - EXPANDED_TAGS - Array of strings pybot arguments compiled from tags.
274 # No explicit check needed with "set -u".
275 PYBOT_ARGS=("--loglevel" "TRACE")
276 PYBOT_ARGS+=("--variable" "TOPOLOGY_PATH:${WORKING_TOPOLOGY}")
278 case "${TEST_CODE}" in
280 PYBOT_ARGS+=("--suite" "tests.${DUT}.device")
283 PYBOT_ARGS+=("--suite" "tests.${DUT}.perf")
286 die "Unknown specification: ${TEST_CODE}"
290 for tag in "${TAGS[@]}"; do
291 if [[ ${tag} == "!"* ]]; then
292 EXPANDED_TAGS+=("--exclude" "${tag#$"!"}")
294 if [[ ${SELECTION_MODE} == "--test" ]]; then
295 EXPANDED_TAGS+=("--test" "${tag}")
297 EXPANDED_TAGS+=("--include" "${TOPOLOGIES_TAGS}AND${tag}")
302 if [[ ${SELECTION_MODE} == "--test" ]]; then
303 EXPANDED_TAGS+=("--include" "${TOPOLOGIES_TAGS}")
308 function deactivate_docker_topology () {
310 # Deactivate virtual vpp-device topology by removing containers.
313 # - NODENESS - Node multiplicity of desired testbed.
314 # - FLAVOR - Node flavor string, usually describing the processor.
318 case_text="${NODENESS}_${FLAVOR}"
319 case "${case_text}" in
321 ssh="ssh root@172.17.0.1 -p 6022"
322 env_vars=$(env | grep CSIT_ | tr '\n' ' ' ) || die
323 # The "declare -f" output is long and boring.
325 ${ssh} "$(declare -f); deactivate_wrapper ${env_vars}" || {
326 die "Topology cleanup via shim-dcr failed!"
332 clean_environment || {
333 die "Topology cleanup locally failed!"
338 die "Unknown specification: ${case_text}!"
345 # Print the message to standard error end exit with error code specified
346 # by the second argument.
349 # - The default error message.
351 # - ${1} - The whole error message, be sure to quote. Optional
352 # - ${2} - the code to exit with, default: 1.
356 warn "${1:-Unspecified run-time error occurred!}"
361 function die_on_pybot_error () {
363 # Source this fragment if you want to abort on any failed test case.
366 # - PYBOT_EXIT_STATUS - Set by a pybot running fragment.
368 # - die - Print to stderr and exit.
372 if [[ "${PYBOT_EXIT_STATUS}" != "0" ]]; then
373 die "Test failures are present!" "${PYBOT_EXIT_STATUS}"
378 function generate_tests () {
380 # Populate ${GENERATED_DIR}/tests based on ${CSIT_DIR}/tests/.
381 # Any previously existing content of ${GENERATED_DIR}/tests is wiped before.
382 # The generation is done by executing any *.py executable
383 # within any subdirectory after copying.
385 # This is a separate function, because this code is called
386 # both by autogen checker and entries calling run_pybot.
389 # - ${CSIT_DIR}/tests - Used as templates for the generated tests.
390 # Directories replaced:
391 # - ${GENERATED_DIR}/tests - Overwritten by the generated tests.
395 rm -rf "${GENERATED_DIR}/tests" || die
396 cp -r "${CSIT_DIR}/tests" "${GENERATED_DIR}/tests" || die
397 cmd_line=("find" "${GENERATED_DIR}/tests" "-type" "f")
398 cmd_line+=("-executable" "-name" "*.py")
399 # We sort the directories, so log output can be compared between runs.
400 file_list=$("${cmd_line[@]}" | sort) || die
402 for gen in ${file_list}; do
403 directory="$(dirname "${gen}")" || die
404 filename="$(basename "${gen}")" || die
405 pushd "${directory}" || die
406 ./"${filename}" || die
412 function get_test_code () {
415 # - ${1} - Optional, argument of entry script (or empty as unset).
416 # Test code value to override job name from environment.
418 # - JOB_NAME - String affecting test selection, default if not argument.
420 # - TEST_CODE - The test selection string from environment or argument.
421 # - NODENESS - Node multiplicity of desired testbed.
422 # - FLAVOR - Node flavor string, usually describing the processor.
426 TEST_CODE="${1-}" || die "Reading optional argument failed, somehow."
427 if [[ -z "${TEST_CODE}" ]]; then
428 TEST_CODE="${JOB_NAME-}" || die "Reading job name failed, somehow."
431 case "${TEST_CODE}" in
477 # Fallback to 3-node Haswell by default (backward compatibility)
485 function get_test_tag_string () {
488 # - GERRIT_EVENT_TYPE - Event type set by gerrit, can be unset.
489 # - GERRIT_EVENT_COMMENT_TEXT - Comment text, read for "comment-added" type.
490 # - TEST_CODE - The test selection string from environment or argument.
492 # - TEST_TAG_STRING - The string following trigger word in gerrit comment.
493 # May be empty, or even not set on event types not adding comment.
495 # TODO: ci-management scripts no longer need to perform this.
499 if [[ "${GERRIT_EVENT_TYPE-}" == "comment-added" ]]; then
500 case "${TEST_CODE}" in
508 die "Unknown specification: ${TEST_CODE}"
510 # Ignore lines not containing the trigger word.
511 comment=$(fgrep "${trigger}" <<< "${GERRIT_EVENT_COMMENT_TEXT}" || true)
512 # The vpp-csit triggers trail stuff we are not interested in.
513 # Removing them and trigger word: https://unix.stackexchange.com/a/13472
514 # (except relying on \s whitespace, \S non-whitespace and . both).
515 # The last string is concatenated, only the middle part is expanded.
516 cmd=("grep" "-oP" '\S*'"${trigger}"'\S*\s\K.+$') || die "Unset trigger?"
517 # On parsing error, TEST_TAG_STRING probably stays empty.
518 TEST_TAG_STRING=$("${cmd[@]}" <<< "${comment}" || true)
519 if [[ -z "${TEST_TAG_STRING-}" ]]; then
520 # Probably we got a base64 encoded comment.
521 comment=$(base64 --decode <<< "${GERRIT_EVENT_COMMENT_TEXT}" || true)
522 comment=$(fgrep "${trigger}" <<< "${comment}" || true)
523 TEST_TAG_STRING=$("${cmd[@]}" <<< "${comment}" || true)
525 if [[ -n "${TEST_TAG_STRING-}" ]]; then
526 test_tag_array=(${TEST_TAG_STRING})
527 if [[ "${test_tag_array[0]}" == "icl" ]]; then
528 export GRAPH_NODE_VARIANT="icl"
529 TEST_TAG_STRING="${test_tag_array[@]:1}" || true
530 elif [[ "${test_tag_array[0]}" == "skx" ]]; then
531 export GRAPH_NODE_VARIANT="skx"
532 TEST_TAG_STRING="${test_tag_array[@]:1}" || true
533 elif [[ "${test_tag_array[0]}" == "hsw" ]]; then
534 export GRAPH_NODE_VARIANT="hsw"
535 TEST_TAG_STRING="${test_tag_array[@]:1}" || true
542 function installed () {
544 # Check if the given utility is installed. Fail if not installed.
546 # Duplicate of common.sh function, as this file is also used standalone.
549 # - ${1} - Utility to check.
551 # - 0 - If command is installed.
552 # - 1 - If command is not installed.
560 function move_archives () {
562 # Move archive directory to top of workspace, if not already there.
564 # ARCHIVE_DIR is positioned relative to CSIT_DIR,
565 # but in some jobs CSIT_DIR is not same as WORKSPACE
566 # (e.g. under VPP_DIR). To simplify ci-management settings,
567 # we want to move the data to the top. We do not want simple copy,
568 # as ci-management is eager with recursive search.
570 # As some scripts may call this function multiple times,
571 # the actual implementation use copying and deletion,
572 # so the workspace gets "union" of contents (except overwrites on conflict).
573 # The consequence is empty ARCHIVE_DIR remaining after this call.
575 # As the source directory is emptied,
576 # the check for dirs being different is essential.
579 # - WORKSPACE - Jenkins workspace, move only if the value is not empty.
580 # Can be unset, then it speeds up manual testing.
581 # - ARCHIVE_DIR - Path to directory with content to be moved.
582 # Directories updated:
583 # - ${WORKSPACE}/archives/ - Created if does not exist.
584 # Content of ${ARCHIVE_DIR}/ is moved.
586 # - die - Print to stderr and exit.
590 if [[ -n "${WORKSPACE-}" ]]; then
591 target=$(readlink -f "${WORKSPACE}/archives")
592 if [[ "${target}" != "${ARCHIVE_DIR}" ]]; then
593 mkdir -p "${target}" || die "Archives dir create failed."
594 cp -rf "${ARCHIVE_DIR}"/* "${target}" || die "Copy failed."
595 rm -rf "${ARCHIVE_DIR}"/* || die "Delete failed."
601 function reserve_and_cleanup_testbed () {
603 # Reserve physical testbed, perform cleanup, register trap to unreserve.
604 # When cleanup fails, remove from topologies and keep retrying
605 # until all topologies are removed.
608 # - TOPOLOGIES - Array of paths to topology yaml to attempt reservation on.
609 # - PYTHON_SCRIPTS_DIR - Path to directory holding the reservation script.
610 # - BUILD_TAG - Any string suitable as filename, identifying
611 # test run executing this function. May be unset.
613 # - TOPOLOGIES - Array of paths to topologies, with failed cleanups removed.
614 # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
616 # - die - Print to stderr and exit.
617 # - ansible_playbook - Perform an action using ansible, see ansible.sh
619 # - EXIT - Calls cancel_all for ${WORKING_TOPOLOGY}.
624 for topo in "${TOPOLOGIES[@]}"; do
626 scrpt="${PYTHON_SCRIPTS_DIR}/topo_reservation.py"
627 opts=("-t" "${topo}" "-r" "${BUILD_TAG:-Unknown}")
628 python3 "${scrpt}" "${opts[@]}"
631 if [[ "${result}" == "0" ]]; then
632 # Trap unreservation before cleanup check,
633 # so multiple jobs showing failed cleanup improve chances
634 # of humans to notice and fix.
635 WORKING_TOPOLOGY="${topo}"
636 echo "Reserved: ${WORKING_TOPOLOGY}"
637 trap "untrap_and_unreserve_testbed" EXIT || {
638 message="TRAP ATTEMPT AND UNRESERVE FAILED, FIX MANUALLY."
639 untrap_and_unreserve_testbed "${message}" || {
640 die "Teardown should have died, not failed."
642 die "Trap attempt failed, unreserve succeeded. Aborting."
644 # Cleanup + calibration checks.
646 ansible_playbook "cleanup, calibration"
649 if [[ "${result}" == "0" ]]; then
652 warn "Testbed cleanup failed: ${topo}"
653 untrap_and_unreserve_testbed "Fail of unreserve after cleanup."
655 # Else testbed is accessible but currently reserved, moving on.
658 if [[ -n "${WORKING_TOPOLOGY-}" ]]; then
659 # Exit the infinite while loop if we made a reservation.
660 warn "Reservation and cleanup successful."
664 if [[ "${#TOPOLOGIES[@]}" == "0" ]]; then
665 die "Run out of operational testbeds!"
668 # Wait ~3minutes before next try.
669 sleep_time="$[ ( ${RANDOM} % 20 ) + 180 ]s" || {
670 die "Sleep time calculation failed."
672 echo "Sleeping ${sleep_time}"
673 sleep "${sleep_time}" || die "Sleep failed."
678 function run_pybot () {
680 # Run pybot with options based on input variables. Create output_info.xml
683 # - CSIT_DIR - Path to existing root of local CSIT git repository.
684 # - ARCHIVE_DIR - Path to store robot result files in.
685 # - PYBOT_ARGS, EXPANDED_TAGS - See compose_pybot_arguments.sh
686 # - GENERATED_DIR - Tests are assumed to be generated under there.
688 # - PYBOT_EXIT_STATUS - Exit status of most recent pybot invocation.
690 # - die - Print to stderr and exit.
694 all_options=("--outputdir" "${ARCHIVE_DIR}" "${PYBOT_ARGS[@]}")
695 all_options+=("--noncritical" "EXPECTED_FAILING")
696 all_options+=("${EXPANDED_TAGS[@]}")
698 pushd "${CSIT_DIR}" || die "Change directory operation failed."
700 robot "${all_options[@]}" "${GENERATED_DIR}/tests/"
701 PYBOT_EXIT_STATUS="$?"
704 # Generate INFO level output_info.xml for post-processing.
705 all_options=("--loglevel" "INFO")
706 all_options+=("--log" "none")
707 all_options+=("--report" "none")
708 all_options+=("--output" "${ARCHIVE_DIR}/output_info.xml")
709 all_options+=("${ARCHIVE_DIR}/output.xml")
710 rebot "${all_options[@]}" || true
711 popd || die "Change directory operation failed."
715 function select_arch_os () {
717 # Set variables affected by local CPU architecture and operating system.
720 # - VPP_VER_FILE - Name of file in CSIT dir containing vpp stable version.
721 # - IMAGE_VER_FILE - Name of file in CSIT dir containing the image name.
722 # - PKG_SUFFIX - Suffix of OS package file name, "rpm" or "deb."
726 os_id=$(grep '^ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g') || {
727 die "Get OS release failed."
732 IMAGE_VER_FILE="VPP_DEVICE_IMAGE_UBUNTU"
733 VPP_VER_FILE="VPP_STABLE_VER_UBUNTU_BIONIC"
737 IMAGE_VER_FILE="VPP_DEVICE_IMAGE_CENTOS"
738 VPP_VER_FILE="VPP_STABLE_VER_CENTOS"
742 die "Unable to identify distro or os from ${os_id}"
746 arch=$(uname -m) || {
747 die "Get CPU architecture failed."
752 IMAGE_VER_FILE="${IMAGE_VER_FILE}_ARM"
760 function select_tags () {
763 # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
764 # - TEST_CODE - String affecting test selection, usually jenkins job name.
765 # - DUT - CSIT test/ subdirectory, set while processing tags.
766 # - TEST_TAG_STRING - String selecting tags, from gerrit comment.
768 # - TOPOLOGIES_DIR - Path to existing directory with available tpologies.
769 # - BASH_FUNCTION_DIR - Directory with input files to process.
771 # - TAGS - Array of processed tag boolean expressions.
772 # - SELECTION_MODE - Selection criteria [test, suite, include, exclude].
777 start_pattern='^ TG:'
778 end_pattern='^ \? \?[A-Za-z0-9]\+:'
779 # Remove the TG section from topology file
780 sed_command="/${start_pattern}/,/${end_pattern}/d"
781 # All topologies DUT NICs
782 available=$(sed "${sed_command}" "${TOPOLOGIES_DIR}"/* \
783 | grep -hoP "model: \K.*" | sort -u)
784 # Selected topology DUT NICs
785 reserved=$(sed "${sed_command}" "${WORKING_TOPOLOGY}" \
786 | grep -hoP "model: \K.*" | sort -u)
787 # All topologies DUT NICs - Selected topology DUT NICs
788 exclude_nics=($(comm -13 <(echo "${reserved}") <(echo "${available}"))) || {
789 die "Computation of excluded NICs failed."
792 # Select default NIC tag.
793 case "${TEST_CODE}" in
794 *"3n-dnv"* | *"2n-dnv"*)
795 default_nic="nic_intel-x553"
798 default_nic="nic_intel-x520-da2"
800 *"3n-skx"* | *"2n-skx"* | *"2n-clx"* | *"2n-zn2"*)
801 default_nic="nic_intel-xxv710"
803 *"3n-hsw"* | *"2n-tx2"* | *"mrr-daily-master")
804 default_nic="nic_intel-xl710"
807 default_nic="nic_intel-x710"
811 sed_nic_sub_cmd="sed s/\${default_nic}/${default_nic}/"
813 awk_nics_sub_cmd+='gsub("xxv710","25ge2p1xxv710");'
814 awk_nics_sub_cmd+='gsub("x710","10ge2p1x710");'
815 awk_nics_sub_cmd+='gsub("xl710","40ge2p1xl710");'
816 awk_nics_sub_cmd+='gsub("x520-da2","10ge2p1x520");'
817 awk_nics_sub_cmd+='gsub("x553","10ge2p1x553");'
818 awk_nics_sub_cmd+='gsub("cx556a","10ge2p1cx556a");'
819 awk_nics_sub_cmd+='gsub("vic1227","10ge2p1vic1227");'
820 awk_nics_sub_cmd+='gsub("vic1385","10ge2p1vic1385");'
821 awk_nics_sub_cmd+='if ($9 =="drv_avf") drv="avf-";'
822 awk_nics_sub_cmd+='else if ($9 =="drv_rdma_core") drv ="rdma-";'
823 awk_nics_sub_cmd+='else drv="";'
824 awk_nics_sub_cmd+='print "*"$7"-" drv $11"-"$5"."$3"-"$1"-" drv $11"-"$5'
826 # Tag file directory shorthand.
827 tfd="${JOB_SPECS_DIR}"
828 case "${TEST_CODE}" in
829 # Select specific performance tests based on jenkins job type variable.
831 readarray -t test_tag_array <<< $(grep -v "#" \
832 ${tfd}/mlr_weekly/${DUT}-${NODENESS}-${FLAVOR}.md |
833 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
834 SELECTION_MODE="--test"
837 readarray -t test_tag_array <<< $(grep -v "#" \
838 ${tfd}/mrr_daily/${DUT}-${NODENESS}-${FLAVOR}.md |
839 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
840 SELECTION_MODE="--test"
843 readarray -t test_tag_array <<< $(grep -v "#" \
844 ${tfd}/mrr_weekly/${DUT}-${NODENESS}-${FLAVOR}.md |
845 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
846 SELECTION_MODE="--test"
848 *"report-iterative"* )
849 test_sets=(${TEST_TAG_STRING//:/ })
850 # Run only one test set per run
851 report_file=${test_sets[0]}.md
852 readarray -t test_tag_array <<< $(grep -v "#" \
853 ${tfd}/report_iterative/${NODENESS}-${FLAVOR}/${report_file} |
854 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
855 SELECTION_MODE="--test"
857 *"report-coverage"* )
858 test_sets=(${TEST_TAG_STRING//:/ })
859 # Run only one test set per run
860 report_file=${test_sets[0]}.md
861 readarray -t test_tag_array <<< $(grep -v "#" \
862 ${tfd}/report_coverage/${NODENESS}-${FLAVOR}/${report_file} |
863 awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
864 SELECTION_MODE="--test"
867 if [[ -z "${TEST_TAG_STRING-}" ]]; then
868 # If nothing is specified, we will run pre-selected tests by
870 test_tag_array=("mrrAND${default_nic}AND1cAND64bANDip4base"
871 "mrrAND${default_nic}AND1cAND78bANDip6base"
872 "mrrAND${default_nic}AND1cAND64bANDl2bdbase"
873 "mrrAND${default_nic}AND1cAND64bANDl2xcbase"
876 # If trigger contains tags, split them into array.
877 test_tag_array=(${TEST_TAG_STRING//:/ })
879 SELECTION_MODE="--include"
883 # Blacklisting certain tags per topology.
885 # Reasons for blacklisting:
886 # - ipsechw - Blacklisted on testbeds without crypto hardware accelerator.
887 # TODO: Add missing reasons here (if general) or where used (if specific).
888 case "${TEST_CODE}" in
890 test_tag_array+=("!ipsec")
893 test_tag_array+=("!ipsechw")
894 # Not enough nic_intel-xxv710 to support double link tests.
895 test_tag_array+=("!3_node_double_link_topoANDnic_intel-xxv710")
898 test_tag_array+=("!ipsec")
901 test_tag_array+=("!ipsec")
904 test_tag_array+=("!ipsechw")
905 test_tag_array+=("!memif")
906 test_tag_array+=("!srv6_proxy")
907 test_tag_array+=("!vhost")
908 test_tag_array+=("!vts")
909 test_tag_array+=("!drv_avf")
912 test_tag_array+=("!ipsechw")
915 test_tag_array+=("!memif")
916 test_tag_array+=("!srv6_proxy")
917 test_tag_array+=("!vhost")
918 test_tag_array+=("!vts")
919 test_tag_array+=("!drv_avf")
922 # 3n-tsh only has x520 NICs which don't work with AVF
923 test_tag_array+=("!drv_avf")
924 test_tag_array+=("!ipsechw")
927 test_tag_array+=("!drv_avf")
928 # All cards have access to QAT. But only one card (xl710)
929 # resides in same NUMA as QAT. Other cards must go over QPI
930 # which we do not want to even run.
931 test_tag_array+=("!ipsechwNOTnic_intel-xl710")
934 # Default to 3n-hsw due to compatibility.
935 test_tag_array+=("!drv_avf")
936 test_tag_array+=("!ipsechwNOTnic_intel-xl710")
940 # We will add excluded NICs.
941 test_tag_array+=("${exclude_nics[@]/#/!NIC_}")
947 if [[ "${TEST_CODE}" == "vpp-"* ]]; then
948 # Automatic prefixing for VPP jobs to limit the NIC used and
949 # traffic evaluation to MRR.
950 if [[ "${TEST_TAG_STRING-}" == *"nic_"* ]]; then
951 prefix="${prefix}mrrAND"
953 prefix="${prefix}mrrAND${default_nic}AND"
956 for tag in "${test_tag_array[@]}"; do
957 if [[ "${tag}" == "!"* ]]; then
958 # Exclude tags are not prefixed.
960 elif [[ "${tag}" == " "* || "${tag}" == *"perftest"* ]]; then
961 # Badly formed tag expressions can trigger way too much tests.
963 warn "The following tag expression hints at bad trigger: ${tag}"
964 warn "Possible cause: Multiple triggers in a single comment."
965 die "Aborting to avoid triggering too many tests."
966 elif [[ "${tag}" == *"OR"* ]]; then
967 # If OR had higher precedence than AND, it would be useful here.
968 # Some people think it does, thus triggering way too much tests.
970 warn "The following tag expression hints at bad trigger: ${tag}"
971 warn "Operator OR has lower precedence than AND. Use space instead."
972 die "Aborting to avoid triggering too many tests."
973 elif [[ "${tag}" != "" && "${tag}" != "#"* ]]; then
974 # Empty and comment lines are skipped.
975 # Other lines are normal tags, they are to be prefixed.
976 TAGS+=("${prefix}${tag}")
983 function select_topology () {
986 # - NODENESS - Node multiplicity of testbed, either "2n" or "3n".
987 # - FLAVOR - Node flavor string, currently either "hsw" or "skx".
988 # - CSIT_DIR - Path to existing root of local CSIT git repository.
989 # - TOPOLOGIES_DIR - Path to existing directory with available topologies.
991 # - TOPOLOGIES - Array of paths to suitable topology yaml files.
992 # - TOPOLOGIES_TAGS - Tag expression selecting tests for the topology.
994 # - die - Print to stderr and exit.
998 case_text="${NODENESS}_${FLAVOR}"
999 case "${case_text}" in
1000 # TODO: Move tags to "# Blacklisting certain tags per topology" section.
1001 # TODO: Double link availability depends on NIC used.
1003 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*vpp_device*.template )
1004 TOPOLOGIES_TAGS="2_node_single_link_topo"
1006 "1n_skx" | "1n_tx2")
1007 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*vpp_device*.template )
1008 TOPOLOGIES_TAGS="2_node_single_link_topo"
1011 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_skx*.yaml )
1012 TOPOLOGIES_TAGS="2_node_*_link_topo"
1015 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_zn2*.yaml )
1016 TOPOLOGIES_TAGS="2_node_*_link_topo"
1019 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_skx*.yaml )
1020 TOPOLOGIES_TAGS="3_node_*_link_topo"
1023 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_clx*.yaml )
1024 TOPOLOGIES_TAGS="2_node_*_link_topo"
1027 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_dnv*.yaml )
1028 TOPOLOGIES_TAGS="2_node_single_link_topo"
1031 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_dnv*.yaml )
1032 TOPOLOGIES_TAGS="3_node_single_link_topo"
1035 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_hsw*.yaml )
1036 TOPOLOGIES_TAGS="3_node_single_link_topo"
1039 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_tsh*.yaml )
1040 TOPOLOGIES_TAGS="3_node_single_link_topo"
1043 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_tx2*.yaml )
1044 TOPOLOGIES_TAGS="2_node_single_link_topo"
1047 # No falling back to 3n_hsw default, that should have been done
1048 # by the function which has set NODENESS and FLAVOR.
1049 die "Unknown specification: ${case_text}"
1052 if [[ -z "${TOPOLOGIES-}" ]]; then
1053 die "No applicable topology found!"
1058 function select_vpp_device_tags () {
1061 # - TEST_CODE - String affecting test selection, usually jenkins job name.
1062 # - TEST_TAG_STRING - String selecting tags, from gerrit comment.
1065 # - TAGS - Array of processed tag boolean expressions.
1069 case "${TEST_CODE}" in
1070 # Select specific device tests based on jenkins job type variable.
1072 if [[ -z "${TEST_TAG_STRING-}" ]]; then
1073 # If nothing is specified, we will run pre-selected tests by
1074 # following tags. Items of array will be concatenated by OR
1075 # in Robot Framework.
1078 # If trigger contains tags, split them into array.
1079 test_tag_array=(${TEST_TAG_STRING//:/ })
1081 SELECTION_MODE="--include"
1085 # Blacklisting certain tags per topology.
1087 # Reasons for blacklisting:
1088 # - avf - AVF is not possible to run on enic driver of VirtualBox.
1089 # - vhost - VirtualBox does not support nesting virtualization on Intel CPU.
1090 case "${TEST_CODE}" in
1092 test_tag_array+=("!avf")
1093 test_tag_array+=("!vhost")
1101 # We will prefix with devicetest to prevent running other tests
1102 # (e.g. Functional).
1103 prefix="devicetestAND"
1104 if [[ "${TEST_CODE}" == "vpp-"* ]]; then
1105 # Automatic prefixing for VPP jobs to limit testing.
1108 for tag in "${test_tag_array[@]}"; do
1109 if [[ ${tag} == "!"* ]]; then
1110 # Exclude tags are not prefixed.
1113 TAGS+=("${prefix}${tag}")
1118 function untrap_and_unreserve_testbed () {
1120 # Use this as a trap function to ensure testbed does not remain reserved.
1121 # Perhaps call directly before script exit, to free testbed for other jobs.
1122 # This function is smart enough to avoid multiple unreservations (so safe).
1123 # Topo cleanup is executed (call it best practice), ignoring failures.
1126 # - default message to die with if testbed might remain reserved.
1128 # - ${1} - Message to die with if unreservation fails. Default hardcoded.
1129 # Variables read (by inner function):
1130 # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
1131 # - PYTHON_SCRIPTS_DIR - Path to directory holding Python scripts.
1132 # Variables written:
1133 # - WORKING_TOPOLOGY - Set to empty string on successful unreservation.
1134 # Trap unregistered:
1135 # - EXIT - Failure to untrap is reported, but ignored otherwise.
1137 # - die - Print to stderr and exit.
1138 # - ansible_playbook - Perform an action using ansible, see ansible.sh
1141 set +eu # We do not want to exit early in a "teardown" function.
1142 trap - EXIT || echo "Trap deactivation failed, continuing anyway."
1143 wt="${WORKING_TOPOLOGY}" # Just to avoid too long lines.
1144 if [[ -z "${wt-}" ]]; then
1146 warn "Testbed looks unreserved already. Trap removal failed before?"
1148 ansible_playbook "cleanup" || true
1149 python3 "${PYTHON_SCRIPTS_DIR}/topo_reservation.py" -c -t "${wt}" || {
1150 die "${1:-FAILED TO UNRESERVE, FIX MANUALLY.}" 2
1160 # Print the message to standard error.
1163 # - ${@} - The text of the message.