-# Copyright (c) 2018 Cisco and/or its affiliates.
+# Copyright (c) 2023 Cisco and/or its affiliates.
+# Copyright (c) 2023 PANTHEON.tech s.r.o.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
set -exuo pipefail
-# This library defines functions used mainly by "per_patch_perf.sh" entry script.
+# This library defines functions used mainly by per patch entry scripts.
# Generally, the functions assume "common.sh" library has been sourced already.
-
# Keep functions ordered alphabetically, please.
-# TODO: Add a link to bash style guide.
+function archive_test_results () {
+ # Arguments:
+ # - ${1}: Directory to archive to. Required. Parent has to exist.
+ # Variable set:
+ # - TARGET - Target directory.
+ # Variables read:
+ # - ARCHIVE_DIR - Path to where robot result files are created in.
+ # - VPP_DIR - Path to existing directory, root for to relative paths.
+ # Directories updated:
+ # - ${1} - Created, and robot and parsing files are moved/created there.
+ # Functions called:
+ # - die - Print to stderr and exit, defined in common.sh
-function build_vpp_ubuntu_amd64 () {
+ set -exuo pipefail
+
+ cd "${VPP_DIR}" || die "Change directory command failed."
+ TARGET="$(readlink -f "$1")"
+ mkdir -p "${TARGET}" || die "Directory creation failed."
+ file_list=("output.xml" "log.html" "report.html" "tests")
+ for filename in "${file_list[@]}"; do
+ mv "${ARCHIVE_DIR}/${filename}" "${TARGET}/${filename}" || {
+ die "Attempt to move '${filename}' failed."
+ }
+ done
+}
+
+
+function archive_parse_test_results () {
+
+ # Arguments:
+ # - ${1}: Directory to archive to. Required. Parent has to exist.
+ # Variables read:
+ # - TARGET - Target directory.
+ # Functions called:
+ # - die - Print to stderr and exit, defined in common.sh
+ # - archive_test_results - Archiving results.
+ # - parse_results - See definition in this file.
set -exuo pipefail
- # TODO: Make sure whether this works on other distros/archs too.
+ archive_test_results "$1" || die
+ parse_results "${TARGET}" || {
+ die "The function should have died on error."
+ }
+}
+
+function build_vpp_ubuntu_amd64 () {
+
+ # This function is using make pkg-verify to build VPP with all dependencies
+ # that is ARCH/OS aware. VPP repo is SSOT for building mechanics and CSIT
+ # is consuming artifacts. This way if VPP will introduce change in building
+ # mechanics they will not be blocked by CSIT repo.
# Arguments:
# - ${1} - String identifier for echo, can be unset.
# Variables read:
+ # - MAKE_PARALLEL_FLAGS - Make flags when building VPP.
+ # - MAKE_PARALLEL_JOBS - Number of cores to use when building VPP.
# - VPP_DIR - Path to existing directory, parent to accessed directories.
# Directories updated:
# - ${VPP_DIR} - Whole subtree, many files (re)created by the build process.
- # - ${VPP_DIR}/build-root - Final build artifacts for CSIT end up here.
- # - ${VPP_DIR}/dpdk - The dpdk artifact is built, but moved to build-root/.
# Functions called:
# - die - Print to stderr and exit, defined in common.sh
+ set -exuo pipefail
+
cd "${VPP_DIR}" || die "Change directory command failed."
- echo 'Building using "make build-root/vagrant/build.sh"'
- # TODO: Do we want to support "${DRYRUN}" == "True"?
- make UNATTENDED=yes install-dep || die "Make install-dep failed."
- # The per_patch script calls this function twice, first for the new commit,
- # then for its parent commit. On Jenkins, no dpdk is installed at first,
- # locally it might have been installed. New dpdk is installed second call.
- # If make detects installed vpp-dpdk-dev with matching version,
- # it skips building vpp-dpdk-dkms entirely, but we need that file.
- # On the other hand, if parent uses different dpdk version,
- # The new vpp-dpdk-dkms is built, but the old one is not removed
- # from the build directory if present. (Further functions move both,
- # and during test dpkg decides on its own which version gets installed.)
- # As per_patch is too dumb (yet) to detect any of that,
- # the only safe solution is to clean build directory and force rebuild.
- # TODO: Make this function smarter and skip DPDK rebuilds if possible.
- cmd=("dpkg-query" "--showformat='$${Version}'" "--show" "vpp-dpdk-dev")
- installed_deb_ver="$(sudo "${cmd[@]}" || true)"
- if [[ -n "${installed_deb_ver}" ]]; then
- sudo dpkg --purge "vpp-dpdk-dev" || {
- die "Dpdk package uninstalation failed."
- }
+ if [ -n "${MAKE_PARALLEL_FLAGS-}" ]; then
+ echo "Building VPP. Number of cores for build set with" \
+ "MAKE_PARALLEL_FLAGS='${MAKE_PARALLEL_FLAGS}'."
+ elif [ -n "${MAKE_PARALLEL_JOBS-}" ]; then
+ echo "Building VPP. Number of cores for build set with" \
+ "MAKE_PARALLEL_JOBS='${MAKE_PARALLEL_JOBS}'."
+ else
+ echo "Building VPP. Number of cores not set, " \
+ "using build default ($(grep -c ^processor /proc/cpuinfo))."
fi
- make UNATTENDED=yes dpdk-install-dev || {
- die "Make dpdk-install-dev failed."
- }
- build-root/vagrant/"build.sh" || die "Vagrant VPP build script failed."
- # CSIT also needs the DPDK artifacts, which is not in build-root.
- mv -v "dpdk/vpp-dpdk-dkms"*".deb" "build-root"/ || {
- die "*.deb move failed."
- }
- echo "*******************************************************************"
+ make UNATTENDED=y pkg-verify || die "VPP build with make pkg-verify failed."
echo "* VPP ${1-} BUILD SUCCESSFULLY COMPLETED" || {
die "Argument not found."
}
- echo "*******************************************************************"
}
function compare_test_results () {
- set -exuo pipefail
-
# Variables read:
# - VPP_DIR - Path to directory with VPP git repo (at least built parts).
# - ARCHIVE_DIR - Path to where robot result files are created in.
# of parent build.
# Functions called:
# - die - Print to stderr and exit, defined in common.sh
- # - parse_bmrr_results - See definition in this file.
# Exit code:
# - 0 - If the comparison utility sees no regression (nor data error).
# - 1 - If the comparison utility sees a regression (or data error).
- cd "${VPP_DIR}" || die "Change directory operation failed."
- rm -rf "csit_parent" || die "Remove operation failed."
- mkdir -p "csit_parent" || die "Directory creation failed."
- for filename in "output.xml" "log.html" "report.html"; do
- mv "${ARCHIVE_DIR}/${filename}" "csit_parent/${filename}" || {
- die "Attempt to move '${filename}' failed."
- }
- done
- parse_bmrr_results "csit_parent" || {
- die "The function should have died on error."
- }
+ set -exuo pipefail
+ cd "${VPP_DIR}" || die "Change directory operation failed."
# Reusing CSIT main virtualenv.
- pip install -r "${PYTHON_SCRIPTS_DIR}/perpatch_requirements.txt" || {
- die "Perpatch Python requirements installation failed."
- }
- python "${PYTHON_SCRIPTS_DIR}/compare_perpatch.py"
+ python3 "${TOOLS_DIR}/integrated/compare_perpatch.py"
# The exit code determines the vote result.
}
-function download_builds () {
+function initialize_csit_dirs () {
set -exuo pipefail
- # This is mostly useful only for Sandbox testing, to avoid recompilation.
- #
- # Arguments:
- # - ${1} - URL to download VPP builds from.
# Variables read:
# - VPP_DIR - Path to WORKSPACE, parent of created directories.
- # - DOWNLOAD_DIR - Path to directory pybot takes the build to test from.
# Directories created:
- # - archive - Ends up empty, not to be confused with ${ARCHIVE_DIR}.
- # - build_new - Holding built artifacts of the patch under test (PUT).
- # - built_parent - Holding built artifacts of parent of PUT.
- # - csit_new - (Re)set to a symlink to archive robot results on failure.
+ # - csit_current - Holding test results of the patch under test (PUT).
+ # - csit_parent - Holding test results of parent of PUT.
# Functions called:
# - die - Print to stderr and exit, defined in common.sh
+ set -exuo pipefail
+
cd "${VPP_DIR}" || die "Change directory operation failed."
- rm -rf "build-root" "build_parent" "build_new" "archive" "csit_new" || {
- die "Directory removal failed."
+ rm -rf "csit_current" "csit_parent" || {
+ die "Directory deletion failed."
}
- wget -N --progress=dot:giga "${1}" || die "Wget download failed."
- unzip "archive.zip" || die "Archive extraction failed."
- mv "archive/build_parent" ./ || die "Move operation failed."
- mv "archive/build_new" ./ || die "Move operation failed."
- cp -r "build_new"/*".deb" "${DOWNLOAD_DIR}" || {
- die "Copy operation failed."
+ mkdir -p "csit_current" "csit_parent" || {
+ die "Directory creation failed."
}
- # Create symlinks so that if job fails on robot, results can be archived.
- ln -s "${ARCHIVE_DIR}" "csit_new" || die "Symbolic link creation failed."
}
-function parse_bmrr_results () {
+function parse_results () {
- set -exuo pipefail
-
- # Currently "parsing" is just two greps.
- # TODO: Re-use PAL parsing code, make parsing more general and centralized.
+ # Currently "parsing" is just few greps on output.xml.
+ # TODO: Parse json outputs properly.
+ #
+ # The current implementation attempts to parse for BMRR, PDR and passrate.
+ # If failures are present, they are reported as fake throughput values,
+ # enabling bisection to focus on the cause (or the fix) of the failures.
+ #
+ # The fake values are created with MRR multiplicity,
+ # otherwise jumpavg (which dislikes short groups) could misclassify them.
#
# Arguments:
# - ${1} - Path to (existing) directory holding robot output.xml result.
# - output.xml - From argument location.
# Files updated:
# - results.txt - (Re)created, in argument location.
+ # Variables read:
+ # - CSIT_PERF_TRIAL_MULTIPLICITY - To create fake results of this length.
# Functions called:
# - die - Print to stderr and exit, defined in common.sh
+ # - parse_results_mrr - See definition in this file.
+ # - parse_results_ndrpdr - See definition in this file.
+ # - parse_results_passrate - See definition in this file.
+ # - parse_results_soak - See definition in this file.
+
+ set -exuo pipefail
rel_dir="$(readlink -e "${1}")" || die "Readlink failed."
- in_file="${rel_dir}/output.xml"
- out_file="${rel_dir}/results.txt"
+ in_file="${rel_dir}/output.xml" || die
+ out_file="${rel_dir}/results.txt" || die
+ echo "Parsing ${in_file} putting results into ${out_file}" || die
+ # Frst attempt: (B)MRR.
+ if parse_results_mrr "${in_file}" "${out_file}"; then
+ return 0
+ fi
+ # BMRR parsing failed. Attempt PDR/NDR.
+ if parse_results_ndrpdr "${in_file}" "${out_file}"; then
+ return 0
+ fi
+ # PDR/NDR parsing failed. Attempt soak.
+ if parse_results_soak "${in_file}" "${out_file}"; then
+ return 0
+ fi
+ # Soak parsing failed.
+ # Probably not a perf test at all (or a failed one),
+ # but we can still bisect by passrate.
+ parse_results_passrate "${in_file}" "${out_file}" || die
+}
- # TODO: Do we need to check echo exit code explicitly?
- echo "Parsing ${in_file} putting results into ${out_file}"
- echo "TODO: Re-use parts of PAL when they support subsample test parsing."
- pattern='Maximum Receive Rate trial results in packets'
- pattern+=' per second: .*\]</status>'
- grep -o "${pattern}" "${in_file}" | grep -o '\[.*\]' > "${out_file}" || {
- die "Some parsing grep command has failed."
- }
+function parse_results_mrr () {
+
+ # Parse MRR test message(s) into JSON-readable output.
+ #
+ # Return non-zero if parsing fails.
+ #
+ # Arguments:
+ # - ${1} - Path to (existing) input file. Required.
+ # - ${2} - Path to (overwritten if exists) output file. Required.
+ # Files read:
+ # - output.xml - The input file from argument location.
+ # Files updated:
+ # - results.txt - (Re)created, in argument location.
+ # Functions called:
+ # - die - Print to stderr and exit, defined in common.sh
+
+ set -exuo pipefail
+
+ in_file="${1}" || die "Two arguments needed."
+ out_file="${2}" || die "Two arguments needed."
+ pattern='Maximum Receive Rate trial results in .*' || die
+ pattern+=' per second: .*\]</status>' || die
+ # RC of the following line is returned.
+ grep -o "${pattern}" "${in_file}" | grep -o '\[.*\]' > "${out_file}"
}
-function prepare_build_parent () {
+function parse_results_ndrpdr () {
+
+ # Parse NDRPDR test message(s) for PDR_LOWER, into JSON-readable output.
+ #
+ # Return non-zero if parsing fails.
+ # Parse for PDR, unless environment variable says NDR.
+ #
+ # Arguments:
+ # - ${1} - Path to (existing) input file. Required.
+ # - ${2} - Path to (overwritten if exists) output file. Required.
+ # Variables read:
+ # - FDIO_CSIT_PERF_PARSE_NDR - If defined and "yes", parse for NDR, not PDR.
+ # Files read:
+ # - output.xml - The input file from argument location.
+ # Files updated:
+ # - results.txt - (Re)created, in argument location.
+ # Functions called:
+ # - die - Print to stderr and exit, defined in common.sh
set -exuo pipefail
+ in_file="${1}" || die "Two arguments needed."
+ out_file="${2}" || die "Two arguments needed."
+ if [[ "${FDIO_CSIT_PERF_PARSE_NDR:-no}" == "yes" ]]; then
+ pattern1="Arguments: [ '\\nNDR_LOWER: " || die
+ else
+ pattern1="Arguments: [ '\\nPDR_LOWER: " || die
+ fi
+ # Adapted from https://superuser.com/a/377084
+ pattern2='(?<=R: ).*(?= pps)' || die
+ if fgrep "${pattern1}" "${in_file}" | grep -Po "${pattern2}" >> "${out_file}"
+ then
+ # Add bracket https://www.shellhacks.com/sed-awk-add-end-beginning-line/
+ sed -i 's/.*/[&]/' "${out_file}"
+ # Returns nonzero if fails.
+ return "$?"
+ fi
+ # Maybe it was CPS instead of pps?
+ pattern2='(?<=R: ).*(?= CPS)' || die
+ if fgrep "${pattern1}" "${in_file}" | grep -Po "${pattern2}" >> "${out_file}"
+ then
+ # Add bracket https://www.shellhacks.com/sed-awk-add-end-beginning-line/
+ sed -i 's/.*/[&]/' "${out_file}"
+ # Returns nonzero if fails.
+ return "$?"
+ else
+ return 1
+ fi
+}
+
+
+function parse_results_passrate () {
+
+ # Create fake values for failed tests.
+ #
+ # This function always passes (or dies).
+ #
+ # A non-zero but small value is chosen for failed run, to distinguish from
+ # real nonzero perf (which are big in general) and real zero values.
+ # A medium sized value is chosen for a passed run.
+ # This way bisect can search for breakages and fixes in device tests.
+ # At least in theory, as device tests are bootstrapped too differently.
+ #
+ # The fake value is repeated according to BMRR multiplicity,
+ # because a single value can be lost in high stdev data.
+ # (And it does not hurt for single value outputs such as NDR.)
+ #
+ # TODO: Count number of tests and generate fake results for every one.
+ # Currently that would interfere with test retry logic.
+ #
+ # Arguments:
+ # - ${1} - Path to (existing) input file. Required.
+ # - ${2} - Path to (overwritten if exists) output file. Required.
# Variables read:
- # - VPP_DIR - Path to existing directory, parent to accessed directories.
- # Directories read:
- # - build-root - Existing directory with built VPP artifacts (also DPDK).
- # Directories updated:
- # - ${VPP_DIR} - A local git repository, parent commit gets checked out.
- # - build_new - Old contents removed, content of build-root copied here.
+ # - CSIT_PERF_TRIAL_MULTIPLICITY - To create fake results of this length.
+ # Files read:
+ # - output.xml - The input file from argument location.
+ # Files updated:
+ # - results.txt - (Re)created, in argument location.
# Functions called:
# - die - Print to stderr and exit, defined in common.sh
- cd "${VPP_DIR}" || die "Change directory operation failed."
- rm -rf "build_new" || die "Remove operation failed."
- mkdir -p "build_new" || die "Directory creation failed."
- mv "build-root"/*".deb" "build_new"/ || die "Move operation failed."
- # The previous build could have left some incompatible leftovers,
- # e.g. DPDK artifacts of different version.
- # "make -C dpdk clean" does not actually remove such .deb file.
- # Also, there usually is a copy of dpdk artifact in build-root.
- git clean -dffx "dpdk"/ "build-root"/ || die "Git clean operation failed."
- # Finally, check out the parent commit.
- git checkout HEAD~ || die "Git checkout operation failed."
- # Display any other leftovers.
- git status || die "Git status operation failed."
+ set -exuo pipefail
+
+ in_file="${1}" || die "Two arguments needed."
+ out_file="${2}" || die "Two arguments needed."
+ # The last status is the top level (global) robot status.
+ # It only passes if there were no (critical) test failures.
+ if fgrep '<status status=' "${out_file}" | tail -n 1 | fgrep '"PASS"'; then
+ fake_value="30.0" || die
+ else
+ fake_value="2.0" || die
+ fi
+ out_arr=("[") || die
+ for i in `seq "${CSIT_PERF_TRIAL_MULTIPLICITY:-1}"`; do
+ out_arr+=("${fake_value}" ",") || die
+ done
+ # The Python part uses JSON parser, the last comma has to be removed.
+ # Requires Bash 4.3 https://stackoverflow.com/a/36978740
+ out_arr[-1]="]" || die
+ # TODO: Is it possible to avoid space separation by manipulating IFS?
+ echo "${out_arr[@]}" > "${out_file}" || die
}
-function prepare_test_new () {
+function parse_results_soak () {
+
+ # Parse soak test message(s) for lower bound, into JSON-readable output.
+ #
+ # Return non-zero if parsing fails.
+ #
+ # Arguments:
+ # - ${1} - Path to (existing) input file. Required.
+ # - ${2} - Path to (overwritten if exists) output file. Required.
+ # Files read:
+ # - output.xml - The input file from argument location.
+ # Files updated:
+ # - results.txt - (Re)created, in argument location.
+ # Functions called:
+ # - die - Print to stderr and exit, defined in common.sh
set -exuo pipefail
+ in_file="${1}" || die "Two arguments needed."
+ out_file="${2}" || die "Two arguments needed."
+ pattern1='PLRsearch lower bound: .*, .*<' || die
+ # Adapted from https://superuser.com/a/377084
+ pattern2='(?<=: ).*(?= pps)' || die
+ if grep "${pattern1}" "${in_file}" | grep -Po "${pattern2}" >> "${out_file}"
+ then
+ # Add bracket https://www.shellhacks.com/sed-awk-add-end-beginning-line/
+ sed -i 's/.*/[&]/' "${out_file}"
+ # Returns nonzero if fails.
+ else
+ return 1
+ fi
+}
+
+
+function select_build () {
+
+ # Arguments:
+ # - ${1} - Path to directory to copy VPP artifacts from. Required.
# Variables read:
- # - VPP_DIR - Path to existing directory, parent of accessed directories.
# - DOWNLOAD_DIR - Path to directory where Robot takes builds to test from.
- # - ARCHIVE_DIR - Path to where robot result files are created in.
+ # - VPP_DIR - Path to existing directory, root for relative paths.
# Directories read:
- # - build-root - Existing directory with built VPP artifacts (also DPDK).
+ # - ${1} - Existing directory with built new VPP artifacts (and DPDK).
# Directories updated:
- # - build_parent - Old directory removed, build-root moved to become this.
- # - ${DOWNLOAD_DIR} - Old content removed, files from build_new copied here.
- # - csit_new - Currently a symlink to to archive robot results on failure.
+ # - ${DOWNLOAD_DIR} - Old content removed, .deb files from ${1} copied here.
# Functions called:
# - die - Print to stderr and exit, defined in common.sh
- cd "${VPP_DIR}" || die "Change directory operationf failed."
- rm -rf "build_parent" "csit_new" "${DOWNLOAD_DIR}"/* || die "Remove failed."
- mkdir -p "build_parent" || die "Directory creation operation failed."
- mv "build-root"/*".deb" "build_parent"/ || die "Move operation failed."
- cp "build_new"/*".deb" "${DOWNLOAD_DIR}" || die "Copy operation failed."
- # Create symlinks so that if job fails on robot, results can be archived.
- ln -s "${ARCHIVE_DIR}" "csit_new" || die "Symbolic link creation failed."
+ set -exuo pipefail
+
+ cd "${VPP_DIR}" || die "Change directory operation failed."
+ source_dir="$(readlink -e "$1")"
+ rm -rf "${DOWNLOAD_DIR}"/* || die "Cleanup of download dir failed."
+ cp "${source_dir}"/*".deb" "${DOWNLOAD_DIR}" || die "Copy operation failed."
+ # TODO: Is there a nice way to create symlinks,
+ # so that if job fails on robot, results can be archived?
}
-function prepare_test_parent () {
+function set_aside_commit_build_artifacts () {
+
+ # Function is copying VPP built artifacts from actual checkout commit for
+ # further use and clean git.
+ # Variables read:
+ # - VPP_DIR - Path to existing directory, parent to accessed directories.
+ # Directories read:
+ # - build-root - Existing directory with built VPP artifacts (also DPDK).
+ # Directories updated:
+ # - ${VPP_DIR} - A local git repository, parent commit gets checked out.
+ # - build_current - Old contents removed, content of build-root copied here.
+ # Functions called:
+ # - die - Print to stderr and exit, defined in common.sh
set -exuo pipefail
+ cd "${VPP_DIR}" || die "Change directory operation failed."
+ rm -rf "build_current" || die "Remove operation failed."
+ mkdir -p "build_current" || die "Directory creation failed."
+ mv "build-root"/*".deb" "build_current"/ || die "Move operation failed."
+ # The previous build could have left some incompatible leftovers,
+ # e.g. DPDK artifacts of different version (in build/external).
+ # Also, there usually is a copy of dpdk artifact in build-root.
+ git clean -dffx "build"/ "build-root"/ || die "Git clean operation failed."
+ # Finally, check out the parent commit.
+ git checkout HEAD~ || die "Git checkout operation failed."
+ # Display any other leftovers.
+ git status || die "Git status operation failed."
+}
+
+
+function set_aside_parent_build_artifacts () {
+
+ # Function is copying VPP built artifacts from parent checkout commit for
+ # further use. Checkout to parent is not part of this function.
# Variables read:
# - VPP_DIR - Path to existing directory, parent of accessed directories.
- # - CSIT_DIR - Path to existing root of local CSIT git repository.
- # - ARCHIVE_DIR and DOWNLOAD_DIR - Paths to directories to update.
# Directories read:
- # - build_parent - Build artifacts (to test next) are copied from here.
+ # - build-root - Existing directory with built VPP artifacts (also DPDK).
# Directories updated:
- # - csit_new - Deleted, then recreated and latest robot results copied here.
- # - ${CSIT_DIR} - Subjected to git reset and git clean.
- # - ${ARCHIVE_DIR} - Created if not existing (if deleted by git clean).
- # - ${DOWNLOAD_DIR} - Created after git clean, parent build copied here.
- # - csit_parent - Currently a symlink to csit/ to archive robot results.
+ # - build_parent - Old directory removed, build-root debs moved here.
# Functions called:
# - die - Print to stderr and exit, defined in common.sh
- # - parse_bmrr_results - See definition in this file.
- cd "${VPP_DIR}" || die "Change directory operation failed."
- rm -rf "csit_new" "csit_parent" || die "Remove operation failed."
- mkdir -p "csit_new" || die "Create directory operation failed."
- for filename in "output.xml" "log.html" "report.html"; do
- mv "${ARCHIVE_DIR}/${filename}" "csit_new/${filename}" || {
- die "Move operation of '${filename}' failed."
- }
- done
- parse_bmrr_results "csit_new" || {
- die "The function should have died on error."
- }
-
- pushd "${CSIT_DIR}" || die "Change directory operation failed."
- git reset --hard HEAD || die "Git reset operation failed."
- git clean -dffx || die "Git clean operation failed."
- popd || die "Change directory operation failed."
- mkdir -p "${ARCHIVE_DIR}" "${DOWNLOAD_DIR}" || die "Dir creation failed."
+ set -exuo pipefail
- cp "build_parent"/*".deb" "${DOWNLOAD_DIR}"/ || die "Copy failed."
- # Create symlinks so that if job fails on robot, results can be archived.
- ln -s "${ARCHIVE_DIR}" "csit_parent" || die "Symlink creation failed."
+ cd "${VPP_DIR}" || die "Change directory operation failed."
+ rm -rf "build_parent" || die "Remove failed."
+ mkdir -p "build_parent" || die "Directory creation operation failed."
+ mv "build-root"/*".deb" "build_parent"/ || die "Move operation failed."
}
function set_perpatch_dut () {
- set -exuo pipefail
-
# Variables set:
# - DUT - CSIT test/ subdirectory containing suites to execute.
# TODO: Detect DUT from job name, when we have more than just VPP perpatch.
+ set -exuo pipefail
+
DUT="vpp"
}
function set_perpatch_vpp_dir () {
- set -exuo pipefail
-
# Variables read:
# - CSIT_DIR - Path to existing root of local CSIT git repository.
# Variables set:
# Functions called:
# - die - Print to stderr and exit, defined in common.sh
+ set -exuo pipefail
+
# In perpatch, CSIT is cloned inside VPP clone.
VPP_DIR="$(readlink -e "${CSIT_DIR}/..")" || die "Readlink failed."
}