1 # Copyright (c) 2019 Cisco and/or its affiliates.
2 # Copyright (c) 2019 PANTHEON.tech and/or its affiliates.
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at:
7 # http://www.apache.org/licenses/LICENSE-2.0
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
17 # This library defines functions used by multiple entry scripts.
18 # Keep functions ordered alphabetically, please.
20 # TODO: Add a link to bash style guide.
21 # TODO: Consider putting every die into a {} block,
22 # the code might become more readable (but longer).
25 function activate_docker_topology () {
27 # Create virtual vpp-device topology. Output of the function is topology
28 # file describing created environment saved to a file.
31 # - BASH_FUNCTION_DIR - Path to existing directory this file is located in.
32 # - TOPOLOGIES - Available topologies.
33 # - NODENESS - Node multiplicity of desired testbed.
34 # - FLAVOR - Node flavor string, usually describing the processor.
35 # - IMAGE_VER_FILE - Name of file that contains the image version.
36 # - CSIT_DIR - Directory where ${IMAGE_VER_FILE} is located.
38 # - WORKING_TOPOLOGY - Path to topology file.
42 source "${BASH_FUNCTION_DIR}/device.sh" || {
46 device_image="$(< ${CSIT_DIR}/${IMAGE_VER_FILE})"
47 case_text="${NODENESS}_${FLAVOR}"
48 case "${case_text}" in
50 # We execute reservation over csit-shim-dcr (ssh) which runs sourced
51 # script's functions. Env variables are read from ssh output
52 # back to localhost for further processing.
53 hostname=$(grep search /etc/resolv.conf | cut -d' ' -f3) || die
54 ssh="ssh root@${hostname} -p 6022"
55 run="activate_wrapper ${NODENESS} ${FLAVOR} ${device_image}"
56 # backtics to avoid https://midnight-commander.org/ticket/2142
57 env_vars=`${ssh} "$(declare -f); ${run}"` || {
58 die "Topology reservation via shim-dcr failed!"
61 source <(echo "$env_vars" | grep -v /usr/bin/docker) || {
67 # We execute reservation on localhost. Sourced script automatially
68 # sets environment variables for further processing.
69 activate_wrapper "${NODENESS}" "${FLAVOR}" "${device_image}" || die
72 die "Unknown specification: ${case_text}!"
75 trap 'deactivate_docker_topology' EXIT || {
76 die "Trap attempt failed, please cleanup manually. Aborting!"
79 # Replace all variables in template with those in environment.
80 source <(echo 'cat <<EOF >topo.yml'; cat ${TOPOLOGIES[0]}; echo EOF;) || {
81 die "Topology file create failed!"
84 WORKING_TOPOLOGY="/tmp/topology.yaml"
85 mv topo.yml "${WORKING_TOPOLOGY}" || {
86 die "Topology move failed!"
88 cat ${WORKING_TOPOLOGY} | grep -v password || {
89 die "Topology read failed!"
94 function activate_virtualenv () {
96 # Update virtualenv pip package, delete and create virtualenv directory,
97 # activate the virtualenv, install requirements, set PYTHONPATH.
100 # - ${1} - Path to existing directory for creating virtualenv in.
101 # If missing or empty, ${CSIT_DIR} is used.
102 # - ${2} - Path to requirements file, ${CSIT_DIR}/requirements.txt if empty.
104 # - CSIT_DIR - Path to existing root of local CSIT git repository.
105 # Variables exported:
106 # - PYTHONPATH - CSIT_DIR, as CSIT Python scripts usually need this.
108 # - die - Print to stderr and exit.
112 root_path="${1-$CSIT_DIR}"
113 env_dir="${root_path}/env"
114 req_path=${2-$CSIT_DIR/requirements.txt}
115 rm -rf "${env_dir}" || die "Failed to clean previous virtualenv."
116 pip3 install --upgrade virtualenv || {
117 die "Virtualenv package install failed."
119 virtualenv -p $(which python3) "${env_dir}" || {
120 die "Virtualenv creation for $(which python) failed."
123 source "${env_dir}/bin/activate" || die "Virtualenv activation failed."
125 pip3 install --upgrade -r "${req_path}" || {
126 die "Requirements installation failed."
128 # Most CSIT Python scripts assume PYTHONPATH is set and exported.
129 export PYTHONPATH="${CSIT_DIR}" || die "Export failed."
133 function archive_tests () {
135 # Create .tar.xz of generated/tests for archiving.
136 # To be run after generate_tests, kept separate to offer more flexibility.
139 # - ${GENERATED_DIR}/tests - Tree of executed suites to archive.
141 # - ${ARCHIVE_DIR}/tests.tar.xz - Archive of generated tests.
145 tar c "${GENERATED_DIR}/tests" | xz -9e > "${ARCHIVE_DIR}/tests.tar.xz" || {
146 die "Error creating archive of generated tests."
151 function check_download_dir () {
153 # Fail if there are no files visible in ${DOWNLOAD_DIR}.
156 # - DOWNLOAD_DIR - Path to directory pybot takes the build to test from.
158 # - ${DOWNLOAD_DIR} - Has to be non-empty to proceed.
160 # - die - Print to stderr and exit.
164 if [[ ! "$(ls -A "${DOWNLOAD_DIR}")" ]]; then
165 die "No artifacts downloaded!"
170 function check_prerequisites () {
172 # Fail if prerequisites are not met.
175 # - installed - Check if application is installed/present in system.
176 # - die - Print to stderr and exit.
180 if ! installed sshpass; then
181 die "Please install sshpass before continue!"
186 function common_dirs () {
188 # Set global variables, create some directories (without touching content).
191 # - BASH_FUNCTION_DIR - Path to existing directory this file is located in.
192 # - CSIT_DIR - Path to existing root of local CSIT git repository.
193 # - TOPOLOGIES_DIR - Path to existing directory with available tpologies.
194 # - RESOURCES_DIR - Path to existing CSIT subdirectory "resources".
195 # - TOOLS_DIR - Path to existing resources subdirectory "tools".
196 # - PYTHON_SCRIPTS_DIR - Path to existing tools subdirectory "scripts".
197 # - ARCHIVE_DIR - Path to created CSIT subdirectory "archive".
198 # - DOWNLOAD_DIR - Path to created CSIT subdirectory "download_dir".
199 # - GENERATED_DIR - Path to created CSIT subdirectory "generated".
200 # Directories created if not present:
201 # ARCHIVE_DIR, DOWNLOAD_DIR, GENERATED_DIR.
203 # - die - Print to stderr and exit.
207 this_file=$(readlink -e "${BASH_SOURCE[0]}") || {
208 die "Some error during locating of this source file."
210 BASH_FUNCTION_DIR=$(dirname "${this_file}") || {
211 die "Some error during dirname call."
213 # Current working directory could be in a different repo, e.g. VPP.
214 pushd "${BASH_FUNCTION_DIR}" || die "Pushd failed"
215 relative_csit_dir=$(git rev-parse --show-toplevel) || {
216 die "Git rev-parse failed."
218 CSIT_DIR=$(readlink -e "${relative_csit_dir}") || die "Readlink failed."
219 popd || die "Popd failed."
220 TOPOLOGIES_DIR=$(readlink -e "${CSIT_DIR}/topologies/available") || {
221 die "Readlink failed."
223 RESOURCES_DIR=$(readlink -e "${CSIT_DIR}/resources") || {
224 die "Readlink failed."
226 TOOLS_DIR=$(readlink -e "${RESOURCES_DIR}/tools") || {
227 die "Readlink failed."
229 DOC_GEN_DIR=$(readlink -e "${TOOLS_DIR}/doc_gen") || {
230 die "Readlink failed."
232 PYTHON_SCRIPTS_DIR=$(readlink -e "${TOOLS_DIR}/scripts") || {
233 die "Readlink failed."
236 ARCHIVE_DIR=$(readlink -f "${CSIT_DIR}/archive") || {
237 die "Readlink failed."
239 mkdir -p "${ARCHIVE_DIR}" || die "Mkdir failed."
240 DOWNLOAD_DIR=$(readlink -f "${CSIT_DIR}/download_dir") || {
241 die "Readlink failed."
243 mkdir -p "${DOWNLOAD_DIR}" || die "Mkdir failed."
244 GENERATED_DIR=$(readlink -f "${CSIT_DIR}/generated") || {
245 die "Readlink failed."
247 mkdir -p "${GENERATED_DIR}" || die "Mkdir failed."
251 function compose_pybot_arguments () {
254 # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
255 # - DUT - CSIT test/ subdirectory, set while processing tags.
256 # - TAGS - Array variable holding selected tag boolean expressions.
257 # - TOPOLOGIES_TAGS - Tag boolean expression filtering tests for topology.
258 # - TEST_CODE - The test selection string from environment or argument.
260 # - PYBOT_ARGS - String holding part of all arguments for pybot.
261 # - EXPANDED_TAGS - Array of strings pybot arguments compiled from tags.
265 # No explicit check needed with "set -u".
266 PYBOT_ARGS=("--loglevel" "TRACE")
267 PYBOT_ARGS+=("--variable" "TOPOLOGY_PATH:${WORKING_TOPOLOGY}")
269 case "${TEST_CODE}" in
271 PYBOT_ARGS+=("--suite" "tests.${DUT}.device")
274 PYBOT_ARGS+=("--suite" "tests.${DUT}.func")
277 PYBOT_ARGS+=("--suite" "tests.${DUT}.perf")
280 die "Unknown specification: ${TEST_CODE}"
284 for tag in "${TAGS[@]}"; do
285 if [[ ${tag} == "!"* ]]; then
286 EXPANDED_TAGS+=("--exclude" "${tag#$"!"}")
288 EXPANDED_TAGS+=("--include" "${TOPOLOGIES_TAGS}AND${tag}")
294 function copy_archives () {
296 # Create additional archive if workspace variable is set.
297 # This way if script is running in jenkins all will be
298 # automatically archived to logs.fd.io.
301 # - WORKSPACE - Jenkins workspace, copy only if the value is not empty.
302 # Can be unset, then it speeds up manual testing.
303 # - ARCHIVE_DIR - Path to directory with content to be copied.
304 # Directories updated:
305 # - ${WORKSPACE}/archives/ - Created if does not exist.
306 # Content of ${ARCHIVE_DIR}/ is copied here.
308 # - die - Print to stderr and exit.
312 if [[ -n "${WORKSPACE-}" ]]; then
313 mkdir -p "${WORKSPACE}/archives/" || die "Archives dir create failed."
314 cp -rf "${ARCHIVE_DIR}"/* "${WORKSPACE}/archives" || die "Copy failed."
319 function deactivate_docker_topology () {
321 # Deactivate virtual vpp-device topology by removing containers.
324 # - NODENESS - Node multiplicity of desired testbed.
325 # - FLAVOR - Node flavor string, usually describing the processor.
329 case_text="${NODENESS}_${FLAVOR}"
330 case "${case_text}" in
332 hostname=$(grep search /etc/resolv.conf | cut -d' ' -f3) || die
333 ssh="ssh root@${hostname} -p 6022"
334 env_vars=$(env | grep CSIT_ | tr '\n' ' ' ) || die
335 ${ssh} "$(declare -f); deactivate_wrapper ${env_vars}" || {
336 die "Topology cleanup via shim-dcr failed!"
341 clean_environment || {
342 die "Topology cleanup locally failed!"
347 die "Unknown specification: ${case_text}!"
354 # Print the message to standard error end exit with error code specified
355 # by the second argument.
358 # - The default error message.
360 # - ${1} - The whole error message, be sure to quote. Optional
361 # - ${2} - the code to exit with, default: 1.
365 warn "${1:-Unspecified run-time error occurred!}"
370 function die_on_pybot_error () {
372 # Source this fragment if you want to abort on any failed test case.
375 # - PYBOT_EXIT_STATUS - Set by a pybot running fragment.
377 # - die - Print to stderr and exit.
381 if [[ "${PYBOT_EXIT_STATUS}" != "0" ]]; then
382 die "Test failures are present!" "${PYBOT_EXIT_STATUS}"
387 function generate_tests () {
389 # Populate ${GENERATED_DIR}/tests based on ${CSIT_DIR}/tests/.
390 # Any previously existing content of ${GENERATED_DIR}/tests is wiped before.
391 # The generation is done by executing any *.py executable
392 # within any subdirectory after copying.
394 # This is a separate function, because this code is called
395 # both by autogen checker and entries calling run_pybot.
398 # - ${CSIT_DIR}/tests - Used as templates for the generated tests.
399 # Directories replaced:
400 # - ${GENERATED_DIR}/tests - Overwritten by the generated tests.
404 rm -rf "${GENERATED_DIR}/tests" || die
405 cp -r "${CSIT_DIR}/tests" "${GENERATED_DIR}/tests" || die
406 cmd_line=("find" "${GENERATED_DIR}/tests" "-type" "f")
407 cmd_line+=("-executable" "-name" "*.py")
408 file_list=$("${cmd_line[@]}") || die
410 for gen in ${file_list}; do
411 directory="$(dirname "${gen}")" || die
412 filename="$(basename "${gen}")" || die
413 pushd "${directory}" || die
414 ./"${filename}" || die
420 function get_test_code () {
423 # - ${1} - Optional, argument of entry script (or empty as unset).
424 # Test code value to override job name from environment.
426 # - JOB_NAME - String affecting test selection, default if not argument.
428 # - TEST_CODE - The test selection string from environment or argument.
429 # - NODENESS - Node multiplicity of desired testbed.
430 # - FLAVOR - Node flavor string, usually describing the processor.
434 TEST_CODE="${1-}" || die "Reading optional argument failed, somehow."
435 if [[ -z "${TEST_CODE}" ]]; then
436 TEST_CODE="${JOB_NAME-}" || die "Reading job name failed, somehow."
439 case "${TEST_CODE}" in
477 # Fallback to 3-node Haswell by default (backward compatibility)
485 function get_test_tag_string () {
488 # - GERRIT_EVENT_TYPE - Event type set by gerrit, can be unset.
489 # - GERRIT_EVENT_COMMENT_TEXT - Comment text, read for "comment-added" type.
490 # - TEST_CODE - The test selection string from environment or argument.
492 # - TEST_TAG_STRING - The string following trigger word in gerrit comment.
493 # May be empty, or even not set on event types not adding comment.
495 # TODO: ci-management scripts no longer need to perform this.
499 if [[ "${GERRIT_EVENT_TYPE-}" == "comment-added" ]]; then
500 case "${TEST_CODE}" in
508 die "Unknown specification: ${TEST_CODE}"
510 # Ignore lines not containing the trigger word.
511 comment=$(fgrep "${trigger}" <<< "${GERRIT_EVENT_COMMENT_TEXT}") || true
512 # The vpp-csit triggers trail stuff we are not interested in.
513 # Removing them and trigger word: https://unix.stackexchange.com/a/13472
514 # (except relying on \s whitespace, \S non-whitespace and . both).
515 # The last string is concatenated, only the middle part is expanded.
516 cmd=("grep" "-oP" '\S*'"${trigger}"'\S*\s\K.+$') || die "Unset trigger?"
517 # On parsing error, TEST_TAG_STRING probably stays empty.
518 TEST_TAG_STRING=$("${cmd[@]}" <<< "${comment}") || true
523 function installed () {
525 # Check if the given utility is installed. Fail if not installed.
527 # Duplicate of common.sh function, as this file is also used standalone.
530 # - ${1} - Utility to check.
532 # - 0 - If command is installed.
533 # - 1 - If command is not installed.
541 function reserve_and_cleanup_testbed () {
543 # Reserve physical testbed, perform cleanup, register trap to unreserve.
544 # When cleanup fails, remove from topologies and keep retrying
545 # until all topologies are removed.
548 # - TOPOLOGIES - Array of paths to topology yaml to attempt reservation on.
549 # - PYTHON_SCRIPTS_DIR - Path to directory holding the reservation script.
550 # - BUILD_TAG - Any string suitable as filename, identifying
551 # test run executing this function. May be unset.
553 # - TOPOLOGIES - Array of paths to topologies, with failed cleanups removed.
554 # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
556 # - die - Print to stderr and exit.
557 # - ansible_hosts - Perform an action using ansible, see ansible.sh
559 # - EXIT - Calls cancel_all for ${WORKING_TOPOLOGY}.
564 for topo in "${TOPOLOGIES[@]}"; do
566 scrpt="${PYTHON_SCRIPTS_DIR}/topo_reservation.py"
567 opts=("-t" "${topo}" "-r" "${BUILD_TAG:-Unknown}")
568 python "${scrpt}" "${opts[@]}"
571 if [[ "${result}" == "0" ]]; then
572 # Trap unreservation before cleanup check,
573 # so multiple jobs showing failed cleanup improve chances
574 # of humans to notice and fix.
575 WORKING_TOPOLOGY="${topo}"
576 echo "Reserved: ${WORKING_TOPOLOGY}"
577 trap "untrap_and_unreserve_testbed" EXIT || {
578 message="TRAP ATTEMPT AND UNRESERVE FAILED, FIX MANUALLY."
579 untrap_and_unreserve_testbed "${message}" || {
580 die "Teardown should have died, not failed."
582 die "Trap attempt failed, unreserve succeeded. Aborting."
586 ansible_hosts "cleanup"
589 if [[ "${result}" == "0" ]]; then
592 warn "Testbed cleanup failed: ${topo}"
593 untrap_and_unreserve_testbed "Fail of unreserve after cleanup."
595 # Else testbed is accessible but currently reserved, moving on.
598 if [[ -n "${WORKING_TOPOLOGY-}" ]]; then
599 # Exit the infinite while loop if we made a reservation.
600 warn "Reservation and cleanup successful."
604 if [[ "${#TOPOLOGIES[@]}" == "0" ]]; then
605 die "Run out of operational testbeds!"
608 # Wait ~3minutes before next try.
609 sleep_time="$[ ( ${RANDOM} % 20 ) + 180 ]s" || {
610 die "Sleep time calculation failed."
612 echo "Sleeping ${sleep_time}"
613 sleep "${sleep_time}" || die "Sleep failed."
618 function run_pybot () {
620 # Run pybot with options based on input variables. Create output_info.xml
623 # - CSIT_DIR - Path to existing root of local CSIT git repository.
624 # - ARCHIVE_DIR - Path to store robot result files in.
625 # - PYBOT_ARGS, EXPANDED_TAGS - See compose_pybot_arguments.sh
626 # - GENERATED_DIR - Tests are assumed to be generated under there.
628 # - PYBOT_EXIT_STATUS - Exit status of most recent pybot invocation.
630 # - die - Print to stderr and exit.
634 all_options=("--outputdir" "${ARCHIVE_DIR}" "${PYBOT_ARGS[@]}")
635 all_options+=("--noncritical" "EXPECTED_FAILING")
636 all_options+=("${EXPANDED_TAGS[@]}")
638 pushd "${CSIT_DIR}" || die "Change directory operation failed."
641 robot "${all_options[@]}" "${GENERATED_DIR}/tests/"
642 PYBOT_EXIT_STATUS="$?"
645 # Generate INFO level output_info.xml for post-processing.
646 all_options=("--loglevel" "INFO")
647 all_options+=("--log" "none")
648 all_options+=("--report" "none")
649 all_options+=("--output" "${ARCHIVE_DIR}/output_info.xml")
650 all_options+=("${ARCHIVE_DIR}/output.xml")
651 rebot "${all_options[@]}" || true
652 popd || die "Change directory operation failed."
656 function select_arch_os () {
658 # Set variables affected by local CPU architecture and operating system.
661 # - VPP_VER_FILE - Name of file in CSIT dir containing vpp stable version.
662 # - IMAGE_VER_FILE - Name of file in CSIT dir containing the image name.
663 # - PKG_SUFFIX - Suffix of OS package file name, "rpm" or "deb."
667 os_id=$(grep '^ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g') || {
668 die "Get OS release failed."
673 IMAGE_VER_FILE="VPP_DEVICE_IMAGE_UBUNTU"
674 VPP_VER_FILE="VPP_STABLE_VER_UBUNTU_BIONIC"
678 IMAGE_VER_FILE="VPP_DEVICE_IMAGE_CENTOS"
679 VPP_VER_FILE="VPP_STABLE_VER_CENTOS"
683 die "Unable to identify distro or os from ${os_id}"
687 arch=$(uname -m) || {
688 die "Get CPU architecture failed."
693 IMAGE_VER_FILE="${IMAGE_VER_FILE}_ARM"
701 function select_tags () {
704 # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
705 # - TEST_CODE - String affecting test selection, usually jenkins job name.
706 # - TEST_TAG_STRING - String selecting tags, from gerrit comment.
708 # - TOPOLOGIES_DIR - Path to existing directory with available tpologies.
709 # - BASH_FUNCTION_DIR - Directory with input files to process.
711 # - TAGS - Array of processed tag boolean expressions.
716 start_pattern='^ TG:'
717 end_pattern='^ \? \?[A-Za-z0-9]\+:'
718 # Remove the TG section from topology file
719 sed_command="/${start_pattern}/,/${end_pattern}/d"
720 # All topologies DUT NICs
721 available=$(sed "${sed_command}" "${TOPOLOGIES_DIR}"/* \
722 | grep -hoP "model: \K.*" | sort -u)
723 # Selected topology DUT NICs
724 reserved=$(sed "${sed_command}" "${WORKING_TOPOLOGY}" \
725 | grep -hoP "model: \K.*" | sort -u)
726 # All topologies DUT NICs - Selected topology DUT NICs
727 exclude_nics=($(comm -13 <(echo "${reserved}") <(echo "${available}"))) || {
728 die "Computation of excluded NICs failed."
731 # Select default NIC tag.
732 case "${TEST_CODE}" in
733 *"3n-dnv"* | *"2n-dnv"*)
734 default_nic="nic_intel-x553"
737 default_nic="nic_intel-x520-da2"
739 *"3n-skx"* | *"2n-skx"* | *"2n-clx"*)
740 default_nic="nic_intel-xxv710"
742 *"3n-hsw"* | *"mrr-daily-master")
743 default_nic="nic_intel-xl710"
746 default_nic="nic_intel-x710"
750 sed_nic_sub_cmd="sed s/\${default_nic}/${default_nic}/"
751 # Tag file directory shorthand.
752 tfd="${BASH_FUNCTION_DIR}"
753 case "${TEST_CODE}" in
754 # Select specific performance tests based on jenkins job type variable.
756 readarray -t test_tag_array < "${tfd}/mlr-weekly.txt" || die
759 readarray -t test_tag_array <<< $(${sed_nic_sub_cmd} \
760 ${tfd}/mrr-daily-${FLAVOR}.txt) || die
763 readarray -t test_tag_array <<< $(${sed_nic_sub_cmd} \
764 ${tfd}/mrr-weekly.txt) || die
767 if [[ -z "${TEST_TAG_STRING-}" ]]; then
768 # If nothing is specified, we will run pre-selected tests by
770 test_tag_array=("mrrAND${default_nic}AND1cAND64bANDip4base"
771 "mrrAND${default_nic}AND1cAND78bANDip6base"
772 "mrrAND${default_nic}AND1cAND64bANDl2bdbase"
773 "mrrAND${default_nic}AND1cAND64bANDl2xcbase"
776 # If trigger contains tags, split them into array.
777 test_tag_array=(${TEST_TAG_STRING//:/ })
782 # Blacklisting certain tags per topology.
784 # Reasons for blacklisting:
785 # - ipsechw - Blacklisted on testbeds without crypto hardware accelerator.
786 # TODO: Add missing reasons here (if general) or where used (if specific).
787 case "${TEST_CODE}" in
789 test_tag_array+=("!ipsechw")
792 test_tag_array+=("!ipsechw")
793 # Not enough nic_intel-xxv710 to support double link tests.
794 test_tag_array+=("!3_node_double_link_topoANDnic_intel-xxv710")
797 test_tag_array+=("!ipsechw")
800 test_tag_array+=("!ipsechw")
801 test_tag_array+=("!memif")
802 test_tag_array+=("!srv6_proxy")
803 test_tag_array+=("!vhost")
804 test_tag_array+=("!vts")
805 test_tag_array+=("!drv_avf")
808 test_tag_array+=("!memif")
809 test_tag_array+=("!srv6_proxy")
810 test_tag_array+=("!vhost")
811 test_tag_array+=("!vts")
812 test_tag_array+=("!drv_avf")
815 # 3n-tsh only has x520 NICs which don't work with AVF
816 test_tag_array+=("!drv_avf")
817 test_tag_array+=("!ipsechw")
820 # TODO: Introduce NOIOMMU version of AVF tests.
821 # TODO: Make (both) AVF tests work on Haswell,
822 # or document why (some of) it is not possible.
823 # https://github.com/FDio/vpp/blob/master/src/plugins/avf/README.md
824 test_tag_array+=("!drv_avf")
825 # All cards have access to QAT. But only one card (xl710)
826 # resides in same NUMA as QAT. Other cards must go over QPI
827 # which we do not want to even run.
828 test_tag_array+=("!ipsechwNOTnic_intel-xl710")
831 # Default to 3n-hsw due to compatibility.
832 test_tag_array+=("!drv_avf")
833 test_tag_array+=("!ipsechwNOTnic_intel-xl710")
837 # We will add excluded NICs.
838 test_tag_array+=("${exclude_nics[@]/#/!NIC_}")
842 # We will prefix with perftest to prevent running other tests
846 if [[ "${TEST_CODE}" == "vpp-"* ]]; then
847 # Automatic prefixing for VPP jobs to limit the NIC used and
848 # traffic evaluation to MRR.
849 if [[ "${TEST_TAG_STRING-}" == *"nic_"* ]]; then
850 prefix="${prefix}mrrAND"
852 prefix="${prefix}mrrAND${default_nic}AND"
855 for tag in "${test_tag_array[@]}"; do
856 if [[ "${tag}" == "!"* ]]; then
857 # Exclude tags are not prefixed.
859 elif [[ "${tag}" == " "* || "${tag}" == *"perftest"* ]]; then
860 # Badly formed tag expressions can trigger way too much tests.
862 warn "The following tag expression hints at bad trigger: ${tag}"
863 warn "Possible cause: Multiple triggers in a single comment."
864 die "Aborting to avoid triggering too many tests."
865 elif [[ "${tag}" != "" && "${tag}" != "#"* ]]; then
866 # Empty and comment lines are skipped.
867 # Other lines are normal tags, they are to be prefixed.
868 TAGS+=("${prefix}${tag}")
875 function select_topology () {
878 # - NODENESS - Node multiplicity of testbed, either "2n" or "3n".
879 # - FLAVOR - Node flavor string, currently either "hsw" or "skx".
880 # - CSIT_DIR - Path to existing root of local CSIT git repository.
881 # - TOPOLOGIES_DIR - Path to existing directory with available topologies.
883 # - TOPOLOGIES - Array of paths to suitable topology yaml files.
884 # - TOPOLOGIES_TAGS - Tag expression selecting tests for the topology.
886 # - die - Print to stderr and exit.
890 case_text="${NODENESS}_${FLAVOR}"
891 case "${case_text}" in
892 # TODO: Move tags to "# Blacklisting certain tags per topology" section.
893 # TODO: Double link availability depends on NIC used.
895 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*vpp_device*.template )
896 TOPOLOGIES_TAGS="2_node_single_link_topo"
899 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*vpp_device*.template )
900 TOPOLOGIES_TAGS="2_node_single_link_topo"
903 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_skx*.yaml )
904 TOPOLOGIES_TAGS="2_node_*_link_topo"
907 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_skx*.yaml )
908 TOPOLOGIES_TAGS="3_node_*_link_topo"
911 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_clx*.yaml )
912 TOPOLOGIES_TAGS="2_node_*_link_topo"
915 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_dnv*.yaml )
916 TOPOLOGIES_TAGS="2_node_single_link_topo"
919 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_dnv*.yaml )
920 TOPOLOGIES_TAGS="3_node_single_link_topo"
923 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_hsw*.yaml )
924 TOPOLOGIES_TAGS="3_node_single_link_topo"
927 TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_tsh*.yaml )
928 TOPOLOGIES_TAGS="3_node_single_link_topo"
931 # No falling back to 3n_hsw default, that should have been done
932 # by the function which has set NODENESS and FLAVOR.
933 die "Unknown specification: ${case_text}"
936 if [[ -z "${TOPOLOGIES-}" ]]; then
937 die "No applicable topology found!"
942 function select_vpp_device_tags () {
945 # - TEST_CODE - String affecting test selection, usually jenkins job name.
946 # - TEST_TAG_STRING - String selecting tags, from gerrit comment.
949 # - TAGS - Array of processed tag boolean expressions.
953 case "${TEST_CODE}" in
954 # Select specific device tests based on jenkins job type variable.
956 if [[ -z "${TEST_TAG_STRING-}" ]]; then
957 # If nothing is specified, we will run pre-selected tests by
958 # following tags. Items of array will be concatenated by OR
959 # in Robot Framework.
962 # If trigger contains tags, split them into array.
963 test_tag_array=(${TEST_TAG_STRING//:/ })
968 # Blacklisting certain tags per topology.
970 # Reasons for blacklisting:
971 # - avf - AVF is not possible to run on enic driver of VirtualBox.
972 # - vhost - VirtualBox does not support nesting virtualization on Intel CPU.
973 case "${TEST_CODE}" in
975 test_tag_array+=("!avf")
976 test_tag_array+=("!vhost")
984 # We will prefix with devicetest to prevent running other tests
986 prefix="devicetestAND"
987 if [[ "${TEST_CODE}" == "vpp-"* ]]; then
988 # Automatic prefixing for VPP jobs to limit testing.
991 for tag in "${test_tag_array[@]}"; do
992 if [[ ${tag} == "!"* ]]; then
993 # Exclude tags are not prefixed.
996 TAGS+=("${prefix}${tag}")
1001 function untrap_and_unreserve_testbed () {
1003 # Use this as a trap function to ensure testbed does not remain reserved.
1004 # Perhaps call directly before script exit, to free testbed for other jobs.
1005 # This function is smart enough to avoid multiple unreservations (so safe).
1006 # Topo cleanup is executed (call it best practice), ignoring failures.
1009 # - default message to die with if testbed might remain reserved.
1011 # - ${1} - Message to die with if unreservation fails. Default hardcoded.
1012 # Variables read (by inner function):
1013 # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
1014 # - PYTHON_SCRIPTS_DIR - Path to directory holding Python scripts.
1015 # Variables written:
1016 # - WORKING_TOPOLOGY - Set to empty string on successful unreservation.
1017 # Trap unregistered:
1018 # - EXIT - Failure to untrap is reported, but ignored otherwise.
1020 # - die - Print to stderr and exit.
1021 # - ansible_hosts - Perform an action using ansible, see ansible.sh
1024 set +eu # We do not want to exit early in a "teardown" function.
1025 trap - EXIT || echo "Trap deactivation failed, continuing anyway."
1026 wt="${WORKING_TOPOLOGY}" # Just to avoid too long lines.
1027 if [[ -z "${wt-}" ]]; then
1029 warn "Testbed looks unreserved already. Trap removal failed before?"
1031 ansible_hosts "cleanup" || true
1032 python "${PYTHON_SCRIPTS_DIR}/topo_reservation.py" -c -t "${wt}" || {
1033 die "${1:-FAILED TO UNRESERVE, FIX MANUALLY.}" 2
1043 # Print the message to standard error.
1046 # - ${@} - The text of the message.