#!/bin/bash
-# Copyright (c) 2016 Cisco and/or its affiliates.
+# Copyright (c) 2018 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
# See the License for the specific language governing permissions and
# limitations under the License.
-set -x
+set -xo pipefail
# Space separated list of available testbeds, described by topology files
TOPOLOGIES="topologies/available/lf_testbed1.yaml \
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
-VPP_STABLE_VER=$(cat ${SCRIPT_DIR}/VPP_STABLE_VER)
-VPP_REPO_URL=$(cat ${SCRIPT_DIR}/VPP_REPO_URL)
-
# Reservation dir
RESERVATION_DIR="/tmp/reservation_dir"
INSTALLATION_DIR="/tmp/install_dir"
-PYBOT_ARGS="-W 150 --noncritical PERFTEST --exclude SKIP_PATCH"
-
-ARCHIVE_ARTIFACTS=(log.html output.xml report.html output_perf_data.xml)
+JOB_ARCHIVE_ARTIFACTS=(log.html output.xml report.html)
+LOG_ARCHIVE_ARTIFACTS=(log.html output.xml report.html)
+JOB_ARCHIVE_DIR="archive"
+LOG_ARCHIVE_DIR="$WORKSPACE/archives"
+mkdir -p ${JOB_ARCHIVE_DIR}
+mkdir -p ${LOG_ARCHIVE_DIR}
# If we run this script from CSIT jobs we want to use stable vpp version
if [[ ${JOB_NAME} == csit-* ]] ;
then
mkdir vpp_download
cd vpp_download
- #download vpp build from nexus and set VPP_DEBS variable
- wget -q "${VPP_REPO_URL}/vpp/${VPP_STABLE_VER}/vpp-${VPP_STABLE_VER}.deb" || exit
- wget -q "${VPP_REPO_URL}/vpp-dbg/${VPP_STABLE_VER}/vpp-dbg-${VPP_STABLE_VER}.deb" || exit
- wget -q "${VPP_REPO_URL}/vpp-dev/${VPP_STABLE_VER}/vpp-dev-${VPP_STABLE_VER}.deb" || exit
- wget -q "${VPP_REPO_URL}/vpp-dpdk-dev/${VPP_STABLE_VER}/vpp-dpdk-dev-${VPP_STABLE_VER}.deb" || exit
- wget -q "${VPP_REPO_URL}/vpp-dpdk-dkms/${VPP_STABLE_VER}/vpp-dpdk-dkms-${VPP_STABLE_VER}.deb" || exit
- wget -q "${VPP_REPO_URL}/vpp-lib/${VPP_STABLE_VER}/vpp-lib-${VPP_STABLE_VER}.deb" || exit
- VPP_DEBS="$( readlink -f *.deb | tr '\n' ' ' )"
- cd ..
+
+ if [[ ${TEST_TAG} == *DAILY ]] || \
+ [[ ${TEST_TAG} == *WEEKLY ]];
+ then
+ # Download the latest VPP build .deb install packages
+ echo Downloading VPP packages...
+ bash ${SCRIPT_DIR}/resources/tools/scripts/download_install_vpp_pkgs.sh --skip-install
+
+ VPP_DEBS="$( readlink -f *.deb | tr '\n' ' ' )"
+ # Take vpp package and get the vpp version
+ VPP_STABLE_VER="$( expr match $(ls *.deb | head -n 1) 'vpp-\(.*\)-deb.deb' )"
+ else
+ DPDK_STABLE_VER=$(cat ${SCRIPT_DIR}/DPDK_STABLE_VER)_amd64
+ VPP_REPO_URL=$(cat ${SCRIPT_DIR}/VPP_REPO_URL_UBUNTU)
+ VPP_STABLE_VER=$(cat ${SCRIPT_DIR}/VPP_STABLE_VER_UBUNTU)
+ VPP_CLASSIFIER="-deb"
+ # Download vpp build from nexus and set VPP_DEBS variable
+ wget -q "${VPP_REPO_URL}/vpp/${VPP_STABLE_VER}/vpp-${VPP_STABLE_VER}${VPP_CLASSIFIER}.deb" || exit
+ wget -q "${VPP_REPO_URL}/vpp-dbg/${VPP_STABLE_VER}/vpp-dbg-${VPP_STABLE_VER}${VPP_CLASSIFIER}.deb" || exit
+ wget -q "${VPP_REPO_URL}/vpp-dev/${VPP_STABLE_VER}/vpp-dev-${VPP_STABLE_VER}${VPP_CLASSIFIER}.deb" || exit
+ wget -q "${VPP_REPO_URL}/vpp-dpdk-dkms/${DPDK_STABLE_VER}/vpp-dpdk-dkms-${DPDK_STABLE_VER}${VPP_CLASSIFIER}.deb" || exit
+ wget -q "${VPP_REPO_URL}/vpp-lib/${VPP_STABLE_VER}/vpp-lib-${VPP_STABLE_VER}${VPP_CLASSIFIER}.deb" || exit
+ wget -q "${VPP_REPO_URL}/vpp-plugins/${VPP_STABLE_VER}/vpp-plugins-${VPP_STABLE_VER}${VPP_CLASSIFIER}.deb" || exit
+ VPP_DEBS="$( readlink -f *.deb | tr '\n' ' ' )"
+ fi
+
+ cd ${SCRIPT_DIR}
# If we run this script from vpp project we want to use local build
elif [[ ${JOB_NAME} == vpp-* ]] ;
then
- #use local packages provided as argument list
+ # Use local packages provided as argument list
# Jenkins VPP deb paths (convert to full path)
VPP_DEBS="$( readlink -f $@ | tr '\n' ' ' )"
+ # Take vpp package and get the vpp version
+ VPP_STABLE_VER="$( expr match $1 'vpp-\(.*\)-deb.deb' )"
else
echo "Unable to identify job type based on JOB_NAME variable: ${JOB_NAME}"
exit 1
while :; do
for TOPOLOGY in ${TOPOLOGIES};
do
- python ${SCRIPT_DIR}/resources/tools/topo_reservation.py -t ${TOPOLOGY}
+ python ${SCRIPT_DIR}/resources/tools/scripts/topo_reservation.py -t ${TOPOLOGY}
if [ $? -eq 0 ]; then
WORKING_TOPOLOGY=${TOPOLOGY}
echo "Reserved: ${WORKING_TOPOLOGY}"
done
function cancel_all {
- python ${SCRIPT_DIR}/resources/tools/topo_installation.py -c -d ${INSTALLATION_DIR} -t $1
- python ${SCRIPT_DIR}/resources/tools/topo_reservation.py -c -t $1
+ python ${SCRIPT_DIR}/resources/tools/scripts/topo_installation.py -c -d ${INSTALLATION_DIR} -t $1
+ python ${SCRIPT_DIR}/resources/tools/scripts/topo_reservation.py -c -t $1
}
# On script exit we cancel the reservation and installation and delete all vpp
# packages
trap "cancel_all ${WORKING_TOPOLOGY}" EXIT
-python ${SCRIPT_DIR}/resources/tools/topo_installation.py -t ${WORKING_TOPOLOGY} \
- -d ${INSTALLATION_DIR} \
- -p ${VPP_DEBS}
+python ${SCRIPT_DIR}/resources/tools/scripts/topo_installation.py \
+ -t ${WORKING_TOPOLOGY} -d ${INSTALLATION_DIR} -p ${VPP_DEBS}
if [ $? -eq 0 ]; then
echo "VPP Installed on hosts from: ${WORKING_TOPOLOGY}"
else
exit 1
fi
+# Based on job we will identify DUT
+if [[ ${JOB_NAME} == *hc2vpp* ]] ;
+then
+ DUT="hc2vpp"
+elif [[ ${JOB_NAME} == *vpp* ]] ;
+then
+ DUT="vpp"
+elif [[ ${JOB_NAME} == *ligato* ]] ;
+then
+ DUT="kubernetes"
+elif [[ ${JOB_NAME} == *dpdk* ]] ;
+then
+ DUT="dpdk"
+else
+ echo "Unable to identify dut type based on JOB_NAME variable: ${JOB_NAME}"
+ exit 1
+fi
+
+PYBOT_ARGS="--consolewidth 120 --loglevel TRACE --variable TOPOLOGY_PATH:${WORKING_TOPOLOGY} --suite tests.${DUT}.perf"
+
case "$TEST_TAG" in
- # run specific performance tests based on jenkins job type variable
- PERFTEST_LONG )
- pybot ${PYBOT_ARGS} \
- -L TRACE \
- -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \
- -s "tests.perf" \
- -i perftest_long \
- tests/
- RETURN_STATUS=$(echo $?)
- ;;
- PERFTEST_SHORT )
- pybot ${PYBOT_ARGS} \
- -L TRACE \
- -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \
- -s "tests.perf" \
- -i perftest_short \
- tests/
- RETURN_STATUS=$(echo $?)
- ;;
- PERFTEST_LONG_BRIDGE )
- pybot ${PYBOT_ARGS} \
- -L TRACE \
- -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \
- -s "tests.perf.Long_Bridge_Domain*" \
- tests/
- RETURN_STATUS=$(echo $?)
- ;;
- PERFTEST_LONG_IPV4 )
- pybot ${PYBOT_ARGS} \
- -L TRACE \
- -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \
- -s "tests.perf.Long_IPv4*" \
- tests/
- RETURN_STATUS=$(echo $?)
- ;;
- PERFTEST_LONG_IPV6 )
- pybot ${PYBOT_ARGS} \
- -L TRACE \
- -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \
- -s "tests.perf.Long_IPv6*" \
- tests/
- RETURN_STATUS=$(echo $?)
- ;;
- PERFTEST_LONG_XCONNECT )
- pybot ${PYBOT_ARGS} \
- -L TRACE \
- -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \
- -s "tests.perf.Long_Xconnect*" \
- tests/
- RETURN_STATUS=$(echo $?)
- ;;
- PERFTEST_LONG_XCONNECT_DOT1Q )
- pybot ${PYBOT_ARGS} \
- -L TRACE \
- -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \
- -s "tests.perf.Long_Xconnect_Dot1q*" \
- RETURN_STATUS=$(echo $?)
- ;;
- PERFTEST_NDR )
- pybot ${PYBOT_ARGS} \
- -L TRACE \
- -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \
- -s "tests.perf" -i NDR \
- tests/
- RETURN_STATUS=$(echo $?)
- ;;
- PERFTEST_PDR )
- pybot ${PYBOT_ARGS} \
- -L TRACE \
- -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \
- -s "tests.perf" -i PDR \
- tests/
- RETURN_STATUS=$(echo $?)
+ # select specific performance tests based on jenkins job type variable
+ PERFTEST_DAILY )
+ TAGS=('ndrdiscANDnic_intel-x520-da2AND1t1c'
+ 'ndrdiscANDnic_intel-x520-da2AND2t2c'
+ 'ndrpdrANDnic_intel-x520-da2AND1t1c'
+ 'ndrpdrANDnic_intel-x520-da2AND2t2c'
+ 'ndrdiscAND1t1cANDipsec'
+ 'ndrdiscAND2t2cANDipsec')
+ ;;
+ PERFTEST_SEMI_WEEKLY )
+ TAGS=('ndrdiscANDnic_intel-x710AND1t1c'
+ 'ndrdiscANDnic_intel-x710AND2t2c'
+ 'ndrdiscANDnic_intel-xl710AND1t1c'
+ 'ndrdiscANDnic_intel-xl710AND2t2c')
+ ;;
+ PERFTEST_MRR_DAILY )
+ TAGS=('mrrAND64bAND1t1c'
+ 'mrrAND64bAND2t2c'
+ 'mrrAND64bAND4t4c'
+ 'mrrAND78bAND1t1c'
+ 'mrrAND78bAND2t2c'
+ 'mrrAND78bAND4t4c'
+ 'mrrANDimixAND1t1cANDvhost'
+ 'mrrANDimixAND2t2cANDvhost'
+ 'mrrANDimixAND4t4cANDvhost'
+ 'mrrANDimixAND1t1cANDmemif'
+ 'mrrANDimixAND2t2cANDmemif'
+ 'mrrANDimixAND4t4cANDmemif')
+ ;;
+ VERIFY-PERF-NDRDISC )
+ TAGS=('ndrdiscAND1t1c'
+ 'ndrdiscAND2t2c')
+ ;;
+ VERIFY-PERF-PDRDISC )
+ TAGS=('pdrdiscAND1t1c'
+ 'pdrdiscAND2t2c')
+ ;;
+ VERIFY-PERF-MRR )
+ TAGS=('mrrAND1t1c'
+ 'mrrAND2t2c')
+ ;;
+ VERIFY-PERF-IP4 )
+ TAGS=('mrrANDnic_intel-x520-da2AND1t1cANDip4base'
+ 'mrrANDnic_intel-x520-da2AND1t1cANDip4fwdANDfib_2m')
+ ;;
+ VERIFY-PERF-IP6 )
+ TAGS=('mrrANDnic_intel-x520-da2AND1t1cANDip6base'
+ 'mrrANDnic_intel-x520-da2AND1t1cANDip6fwdANDfib_2m')
+ ;;
+ VERIFY-PERF-L2 )
+ TAGS=('mrrANDnic_intel-x520-da2AND1t1cANDl2xcbase'
+ 'mrrANDnic_intel-x520-da2AND1t1cANDl2bdbase')
+ ;;
+ VERIFY-PERF-LISP )
+ TAGS=('mrrANDnic_intel-x520-da2AND1t1cANDlisp')
+ ;;
+ VERIFY-PERF-VXLAN )
+ TAGS=('mrrANDnic_intel-x520-da2AND1t1cANDvxlan')
+ ;;
+ VERIFY-PERF-VHOST )
+ TAGS=('mrrANDnic_intel-x520-da2AND1t1cANDvhost')
+ ;;
+ VERIFY-PERF-MEMIF )
+ TAGS=('pdrdiscANDnic_intel-x520-da2AND1t1cANDmemif'
+ 'pdrdiscANDnic_intel-x520-da2AND2t2cANDmemif'
+ 'mrrANDnic_intel-x520-da2AND1t1cANDmemif'
+ 'mrrANDnic_intel-x520-da2AND2t2cANDmemif')
+ ;;
+ VERIFY-PERF-IPSECHW )
+ TAGS=('pdrdiscANDnic_intel-xl710AND1t1cANDipsechw'
+ 'pdrdiscANDnic_intel-xl710AND2t2cANDipsechw'
+ 'mrrANDnic_intel-xl710AND1t1cANDipsechw'
+ 'mrrANDnic_intel-xl710AND2t2cANDipsechw')
+ ;;
+ VERIFY-PERF-SRV6 )
+ TAGS=('mrrANDsrv6AND1t1c'
+ 'mrrANDsrv6AND2t2c')
+ ;;
+ VPP-VERIFY-PERF-IP4 )
+ TAGS=('mrrANDnic_intel-x520-da2AND1t1cANDip4base'
+ 'mrrANDnic_intel-x520-da2AND1t1cANDip4fwdANDfib_2m')
+ ;;
+ VPP-VERIFY-PERF-IP6 )
+ TAGS=('mrrANDnic_intel-x520-da2AND1t1cANDip6base'
+ 'mrrANDnic_intel-x520-da2AND1t1cANDip6fwdANDfib_2m')
+ ;;
+ VPP-VERIFY-PERF-L2 )
+ TAGS=('mrrANDnic_intel-x520-da2AND1t1cANDl2xcbase'
+ 'mrrANDnic_intel-x520-da2AND1t1cANDl2bdbase'
+ 'mrrANDnic_intel-x520-da2AND1t1cANDdot1q')
+ ;;
+ VPP-VERIFY-PERF-LISP )
+ TAGS=('mrrANDnic_intel-x520-da2AND1t1cANDlisp')
+ ;;
+ VPP-VERIFY-PERF-VXLAN )
+ TAGS=('mrrANDnic_intel-x520-da2AND1t1cANDvxlan')
+ ;;
+ VPP-VERIFY-PERF-VHOST )
+ TAGS=('mrrANDnic_intel-x520-da2AND1t1cANDvhost')
+ ;;
+ VPP-VERIFY-PERF-MEMIF )
+ TAGS=('pdrdiscANDnic_intel-x520-da2AND1t1cANDmemif'
+ 'pdrdiscANDnic_intel-x520-da2AND2t2cANDmemif'
+ 'mrrANDnic_intel-x520-da2AND1t1cANDmemif'
+ 'mrrANDnic_intel-x520-da2AND2t2cANDmemif')
+ ;;
+ VPP-VERIFY-PERF-ACL )
+ TAGS=('mrrANDnic_intel-x520-da2AND1t1cANDacl'
+ 'mrrANDnic_intel-x520-da2AND2t2cANDacl')
+ ;;
+ VPP-VERIFY-PERF-IPSECHW )
+ TAGS=('pdrdiscANDnic_intel-xl710AND1t1cANDipsechw'
+ 'pdrdiscANDnic_intel-xl710AND2t2cANDipsechw'
+ 'mrrANDnic_intel-xl710AND1t1cANDipsechw'
+ 'mrrANDnic_intel-xl710AND2t2cANDipsechw')
+ ;;
+ VPP-VERIFY-PERF-SRV6 )
+ TAGS=('mrrANDsrv6AND1t1c'
+ 'mrrANDsrv6AND2t2c')
;;
* )
- # run full performance test suite and exit on fail
- pybot ${PYBOT_ARGS} \
- -L TRACE \
- -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \
- -s "tests.perf" \
- tests/
- RETURN_STATUS=$(echo $?)
+ TAGS=('perftest')
esac
-# Pybot output post-processing
-echo Post-processing test data...
+# Catenate TAG selections by 'OR'
+printf -v INCLUDES " --include %s " "${TAGS[@]}"
-python ${SCRIPT_DIR}/resources/tools/robot_output_parser.py \
- -i ${SCRIPT_DIR}/output.xml \
- -o ${SCRIPT_DIR}/output_perf_data.xml \
- -v ${VPP_STABLE_VER}
-if [ ! $? -eq 0 ]; then
- echo "Parsing ${SCRIPT_DIR}/output.xml failed"
-fi
+# Execute the test
+pybot ${PYBOT_ARGS}${INCLUDES} tests/
+RETURN_STATUS=$(echo $?)
-# Archive artifacts
-mkdir archive
-for i in ${ARCHIVE_ARTIFACTS[@]}; do
- cp $( readlink -f ${i} | tr '\n' ' ' ) archive/
+# Archive JOB artifacts in jenkins
+for i in ${JOB_ARCHIVE_ARTIFACTS[@]}; do
+ cp $( readlink -f ${i} | tr '\n' ' ' ) ${JOB_ARCHIVE_DIR}/
+done
+# Archive JOB artifacts to logs.fd.io
+for i in ${LOG_ARCHIVE_ARTIFACTS[@]}; do
+ cp $( readlink -f ${i} | tr '\n' ' ' ) ${LOG_ARCHIVE_DIR}/
done
-
-echo Post-processing finished.
exit ${RETURN_STATUS}