X-Git-Url: https://gerrit.fd.io/r/gitweb?p=csit.git;a=blobdiff_plain;f=bootstrap-verify-perf.sh;h=a944ea3653b050cc0514302c349018719294d7e4;hp=e08a9234851d23e92a2c83177223d8b6703bf672;hb=6928c2b1620e5d020a19e944f416df6a1f4b85ad;hpb=e507c9603efd1a9377ff835d108e51ddb9a0b5c3 diff --git a/bootstrap-verify-perf.sh b/bootstrap-verify-perf.sh index e08a923485..a944ea3653 100755 --- a/bootstrap-verify-perf.sh +++ b/bootstrap-verify-perf.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright (c) 2016 Cisco and/or its affiliates. +# Copyright (c) 2018 Cisco and/or its affiliates. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at: @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -set -x +set -xo pipefail # Space separated list of available testbeds, described by topology files TOPOLOGIES="topologies/available/lf_testbed1.yaml \ @@ -25,9 +25,12 @@ SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" RESERVATION_DIR="/tmp/reservation_dir" INSTALLATION_DIR="/tmp/install_dir" -PYBOT_ARGS="-W 150" - -ARCHIVE_ARTIFACTS=(log.html output.xml report.html output_perf_data.xml) +JOB_ARCHIVE_ARTIFACTS=(log.html output.xml report.html) +LOG_ARCHIVE_ARTIFACTS=(log.html output.xml report.html) +JOB_ARCHIVE_DIR="archive" +LOG_ARCHIVE_DIR="$WORKSPACE/archives" +mkdir -p ${JOB_ARCHIVE_DIR} +mkdir -p ${LOG_ARCHIVE_DIR} # If we run this script from CSIT jobs we want to use stable vpp version if [[ ${JOB_NAME} == csit-* ]] ; @@ -35,32 +38,32 @@ then mkdir vpp_download cd vpp_download - if [[ ${TEST_TAG} == "PERFTEST_NIGHTLY" ]] ; + if [[ ${TEST_TAG} == *DAILY ]] || \ + [[ ${TEST_TAG} == *WEEKLY ]]; then # Download the latest VPP build .deb install packages echo Downloading VPP packages... - bash ${SCRIPT_DIR}/resources/tools/download_install_vpp_pkgs.sh --skip-install + bash ${SCRIPT_DIR}/resources/tools/scripts/download_install_vpp_pkgs.sh --skip-install VPP_DEBS="$( readlink -f *.deb | tr '\n' ' ' )" # Take vpp package and get the vpp version VPP_STABLE_VER="$( expr match $(ls *.deb | head -n 1) 'vpp-\(.*\)-deb.deb' )" else - DPDK_STABLE_VER=$(cat ${SCRIPT_DIR}/DPDK_STABLE_VER) - VPP_REPO_URL=$(cat ${SCRIPT_DIR}/VPP_REPO_URL) - VPP_STABLE_VER=$(cat ${SCRIPT_DIR}/VPP_STABLE_VER) + DPDK_STABLE_VER=$(cat ${SCRIPT_DIR}/DPDK_STABLE_VER)_amd64 + VPP_REPO_URL=$(cat ${SCRIPT_DIR}/VPP_REPO_URL_UBUNTU) + VPP_STABLE_VER=$(cat ${SCRIPT_DIR}/VPP_STABLE_VER_UBUNTU) VPP_CLASSIFIER="-deb" # Download vpp build from nexus and set VPP_DEBS variable wget -q "${VPP_REPO_URL}/vpp/${VPP_STABLE_VER}/vpp-${VPP_STABLE_VER}${VPP_CLASSIFIER}.deb" || exit wget -q "${VPP_REPO_URL}/vpp-dbg/${VPP_STABLE_VER}/vpp-dbg-${VPP_STABLE_VER}${VPP_CLASSIFIER}.deb" || exit wget -q "${VPP_REPO_URL}/vpp-dev/${VPP_STABLE_VER}/vpp-dev-${VPP_STABLE_VER}${VPP_CLASSIFIER}.deb" || exit - wget -q "${VPP_REPO_URL}/vpp-dpdk-dev/${DPDK_STABLE_VER}/vpp-dpdk-dev-${DPDK_STABLE_VER}${VPP_CLASSIFIER}.deb" || exit wget -q "${VPP_REPO_URL}/vpp-dpdk-dkms/${DPDK_STABLE_VER}/vpp-dpdk-dkms-${DPDK_STABLE_VER}${VPP_CLASSIFIER}.deb" || exit wget -q "${VPP_REPO_URL}/vpp-lib/${VPP_STABLE_VER}/vpp-lib-${VPP_STABLE_VER}${VPP_CLASSIFIER}.deb" || exit wget -q "${VPP_REPO_URL}/vpp-plugins/${VPP_STABLE_VER}/vpp-plugins-${VPP_STABLE_VER}${VPP_CLASSIFIER}.deb" || exit VPP_DEBS="$( readlink -f *.deb | tr '\n' ' ' )" fi - cd .. + cd ${SCRIPT_DIR} # If we run this script from vpp project we want to use local build elif [[ ${JOB_NAME} == vpp-* ]] ; @@ -70,14 +73,6 @@ then VPP_DEBS="$( readlink -f $@ | tr '\n' ' ' )" # Take vpp package and get the vpp version VPP_STABLE_VER="$( expr match $1 'vpp-\(.*\)-deb.deb' )" - # DPDK is not part of the vpp build - DPDK_STABLE_VER=$(cat ${SCRIPT_DIR}/DPDK_STABLE_VER) - VPP_REPO_URL=$(cat ${SCRIPT_DIR}/VPP_REPO_URL) - VPP_CLASSIFIER="-deb" - wget -q "${VPP_REPO_URL}/vpp-dpdk-dev/${DPDK_STABLE_VER}/vpp-dpdk-dev-${DPDK_STABLE_VER}${VPP_CLASSIFIER}.deb" || exit - wget -q "${VPP_REPO_URL}/vpp-dpdk-dkms/${DPDK_STABLE_VER}/vpp-dpdk-dkms-${DPDK_STABLE_VER}${VPP_CLASSIFIER}.deb" || exit - VPP_DEBS+=($( readlink -f vpp-dpdk-dev-${DPDK_STABLE_VER}${VPP_CLASSIFIER}.deb )) - VPP_DEBS+=($( readlink -f vpp-dpdk-dkms-${DPDK_STABLE_VER}${VPP_CLASSIFIER}.deb )) else echo "Unable to identify job type based on JOB_NAME variable: ${JOB_NAME}" exit 1 @@ -99,7 +94,7 @@ pip install -r requirements.txt while :; do for TOPOLOGY in ${TOPOLOGIES}; do - python ${SCRIPT_DIR}/resources/tools/topo_reservation.py -t ${TOPOLOGY} + python ${SCRIPT_DIR}/resources/tools/scripts/topo_reservation.py -t ${TOPOLOGY} if [ $? -eq 0 ]; then WORKING_TOPOLOGY=${TOPOLOGY} echo "Reserved: ${WORKING_TOPOLOGY}" @@ -119,17 +114,16 @@ while :; do done function cancel_all { - python ${SCRIPT_DIR}/resources/tools/topo_installation.py -c -d ${INSTALLATION_DIR} -t $1 - python ${SCRIPT_DIR}/resources/tools/topo_reservation.py -c -t $1 + python ${SCRIPT_DIR}/resources/tools/scripts/topo_installation.py -c -d ${INSTALLATION_DIR} -t $1 + python ${SCRIPT_DIR}/resources/tools/scripts/topo_reservation.py -c -t $1 } # On script exit we cancel the reservation and installation and delete all vpp # packages trap "cancel_all ${WORKING_TOPOLOGY}" EXIT -python ${SCRIPT_DIR}/resources/tools/topo_installation.py -t ${WORKING_TOPOLOGY} \ - -d ${INSTALLATION_DIR} \ - -p ${VPP_DEBS} +python ${SCRIPT_DIR}/resources/tools/scripts/topo_installation.py \ + -t ${WORKING_TOPOLOGY} -d ${INSTALLATION_DIR} -p ${VPP_DEBS} if [ $? -eq 0 ]; then echo "VPP Installed on hosts from: ${WORKING_TOPOLOGY}" else @@ -137,63 +131,170 @@ else exit 1 fi +# Based on job we will identify DUT +if [[ ${JOB_NAME} == *hc2vpp* ]] ; +then + DUT="hc2vpp" +elif [[ ${JOB_NAME} == *vpp* ]] ; +then + DUT="vpp" +elif [[ ${JOB_NAME} == *ligato* ]] ; +then + DUT="kubernetes" +elif [[ ${JOB_NAME} == *dpdk* ]] ; +then + DUT="dpdk" +else + echo "Unable to identify dut type based on JOB_NAME variable: ${JOB_NAME}" + exit 1 +fi + +PYBOT_ARGS="--consolewidth 120 --loglevel TRACE --variable TOPOLOGY_PATH:${WORKING_TOPOLOGY} --suite tests.${DUT}.perf" + case "$TEST_TAG" in - # run specific performance tests based on jenkins job type variable - PERFTEST_LONG ) - pybot ${PYBOT_ARGS} \ - -L TRACE \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.perf" \ - --exclude SKIP_PATCH \ - -i NDRPDRDISC \ - tests/ - RETURN_STATUS=$(echo $?) - ;; - PERFTEST_SHORT ) - pybot ${PYBOT_ARGS} \ - -L TRACE \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.perf" \ - -i NDRCHK \ - tests/ - RETURN_STATUS=$(echo $?) - ;; - PERFTEST_NIGHTLY ) - #run all available tests - pybot ${PYBOT_ARGS} \ - -L TRACE \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.perf" \ - tests/ - RETURN_STATUS=$(echo $?) + # select specific performance tests based on jenkins job type variable + PERFTEST_DAILY ) + TAGS=('ndrdiscANDnic_intel-x520-da2AND1t1c' + 'ndrdiscANDnic_intel-x520-da2AND2t2c' + 'ndrpdrANDnic_intel-x520-da2AND1t1c' + 'ndrpdrANDnic_intel-x520-da2AND2t2c' + 'ndrdiscAND1t1cANDipsec' + 'ndrdiscAND2t2cANDipsec') + ;; + PERFTEST_SEMI_WEEKLY ) + TAGS=('ndrdiscANDnic_intel-x710AND1t1c' + 'ndrdiscANDnic_intel-x710AND2t2c' + 'ndrdiscANDnic_intel-xl710AND1t1c' + 'ndrdiscANDnic_intel-xl710AND2t2c') + ;; + PERFTEST_MRR_DAILY ) + TAGS=('mrrAND64bAND1t1c' + 'mrrAND64bAND2t2c' + 'mrrAND64bAND4t4c' + 'mrrAND78bAND1t1c' + 'mrrAND78bAND2t2c' + 'mrrAND78bAND4t4c' + 'mrrANDimixAND1t1cANDvhost' + 'mrrANDimixAND2t2cANDvhost' + 'mrrANDimixAND4t4cANDvhost' + 'mrrANDimixAND1t1cANDmemif' + 'mrrANDimixAND2t2cANDmemif' + 'mrrANDimixAND4t4cANDmemif') + ;; + VERIFY-PERF-NDRDISC ) + TAGS=('ndrdiscAND1t1c' + 'ndrdiscAND2t2c') + ;; + VERIFY-PERF-PDRDISC ) + TAGS=('pdrdiscAND1t1c' + 'pdrdiscAND2t2c') + ;; + VERIFY-PERF-MRR ) + TAGS=('mrrAND1t1c' + 'mrrAND2t2c') + ;; + VERIFY-PERF-IP4 ) + TAGS=('mrrANDnic_intel-x520-da2AND1t1cANDip4base' + 'mrrANDnic_intel-x520-da2AND1t1cANDip4fwdANDfib_2m') + ;; + VERIFY-PERF-IP6 ) + TAGS=('mrrANDnic_intel-x520-da2AND1t1cANDip6base' + 'mrrANDnic_intel-x520-da2AND1t1cANDip6fwdANDfib_2m') + ;; + VERIFY-PERF-L2 ) + TAGS=('mrrANDnic_intel-x520-da2AND1t1cANDl2xcbase' + 'mrrANDnic_intel-x520-da2AND1t1cANDl2bdbase') + ETAGS=('lbond_dpdk') + ;; + VERIFY-PERF-LISP ) + TAGS=('mrrANDnic_intel-x520-da2AND1t1cANDlisp') + ;; + VERIFY-PERF-VXLAN ) + TAGS=('mrrANDnic_intel-x520-da2AND1t1cANDvxlan') + ;; + VERIFY-PERF-VHOST ) + TAGS=('mrrANDnic_intel-x520-da2AND1t1cANDvhost') + ETAGS=('lbond_dpdk') + ;; + VERIFY-PERF-MEMIF ) + TAGS=('pdrdiscANDnic_intel-x520-da2AND1t1cANDmemif' + 'pdrdiscANDnic_intel-x520-da2AND2t2cANDmemif' + 'mrrANDnic_intel-x520-da2AND1t1cANDmemif' + 'mrrANDnic_intel-x520-da2AND2t2cANDmemif') + ;; + VERIFY-PERF-IPSECHW ) + TAGS=('pdrdiscANDnic_intel-xl710AND1t1cANDipsechw' + 'pdrdiscANDnic_intel-xl710AND2t2cANDipsechw' + 'mrrANDnic_intel-xl710AND1t1cANDipsechw' + 'mrrANDnic_intel-xl710AND2t2cANDipsechw') + ;; + VERIFY-PERF-SRV6 ) + TAGS=('mrrANDsrv6AND1t1c' + 'mrrANDsrv6AND2t2c') + ;; + VPP-VERIFY-PERF-IP4 ) + TAGS=('mrrANDnic_intel-x520-da2AND1t1cANDip4base' + 'mrrANDnic_intel-x520-da2AND1t1cANDip4fwdANDfib_2m') + ;; + VPP-VERIFY-PERF-IP6 ) + TAGS=('mrrANDnic_intel-x520-da2AND1t1cANDip6base' + 'mrrANDnic_intel-x520-da2AND1t1cANDip6fwdANDfib_2m') + ;; + VPP-VERIFY-PERF-L2 ) + TAGS=('mrrANDnic_intel-x520-da2AND1t1cANDl2xcbase' + 'mrrANDnic_intel-x520-da2AND1t1cANDl2bdbase' + 'mrrANDnic_intel-x520-da2AND1t1cANDdot1q') + ETAGS=('lbond_dpdk') + ;; + VPP-VERIFY-PERF-LISP ) + TAGS=('mrrANDnic_intel-x520-da2AND1t1cANDlisp') + ;; + VPP-VERIFY-PERF-VXLAN ) + TAGS=('mrrANDnic_intel-x520-da2AND1t1cANDvxlan') + ;; + VPP-VERIFY-PERF-VHOST ) + TAGS=('mrrANDnic_intel-x520-da2AND1t1cANDvhost') + ETAGS=('lbond_dpdk') + ;; + VPP-VERIFY-PERF-MEMIF ) + TAGS=('pdrdiscANDnic_intel-x520-da2AND1t1cANDmemif' + 'pdrdiscANDnic_intel-x520-da2AND2t2cANDmemif' + 'mrrANDnic_intel-x520-da2AND1t1cANDmemif' + 'mrrANDnic_intel-x520-da2AND2t2cANDmemif') + ;; + VPP-VERIFY-PERF-ACL ) + TAGS=('mrrANDnic_intel-x520-da2AND1t1cANDacl' + 'mrrANDnic_intel-x520-da2AND2t2cANDacl') + ;; + VPP-VERIFY-PERF-IPSECHW ) + TAGS=('pdrdiscANDnic_intel-xl710AND1t1cANDipsechw' + 'pdrdiscANDnic_intel-xl710AND2t2cANDipsechw' + 'mrrANDnic_intel-xl710AND1t1cANDipsechw' + 'mrrANDnic_intel-xl710AND2t2cANDipsechw') + ;; + VPP-VERIFY-PERF-SRV6 ) + TAGS=('mrrANDsrv6AND1t1c' + 'mrrANDsrv6AND2t2c') ;; * ) - # run full performance test suite and exit on fail - pybot ${PYBOT_ARGS} \ - -L TRACE \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.perf" \ - tests/ - RETURN_STATUS=$(echo $?) + TAGS=('perftest') esac -# Pybot output post-processing -echo Post-processing test data... +# Catenate TAG selections by 'OR' +if [[ ! -z "$TAGS" ]]; then printf -v INCLUDES " --include %s " "${TAGS[@]}"; fi +if [[ ! -z "$ETAGS" ]]; then printf -v EXCLUDES " --exclude %s " "${ETAGS[@]}"; fi -python ${SCRIPT_DIR}/resources/tools/robot_output_parser.py \ - -i ${SCRIPT_DIR}/output.xml \ - -o ${SCRIPT_DIR}/output_perf_data.xml \ - -v ${VPP_STABLE_VER} -if [ ! $? -eq 0 ]; then - echo "Parsing ${SCRIPT_DIR}/output.xml failed" -fi +# Execute the test +pybot ${PYBOT_ARGS}${INCLUDES}${EXCLUDES} tests/ +RETURN_STATUS=$(echo $?) -# Archive artifacts -mkdir archive -for i in ${ARCHIVE_ARTIFACTS[@]}; do - cp $( readlink -f ${i} | tr '\n' ' ' ) archive/ +# Archive JOB artifacts in jenkins +for i in ${JOB_ARCHIVE_ARTIFACTS[@]}; do + cp $( readlink -f ${i} | tr '\n' ' ' ) ${JOB_ARCHIVE_DIR}/ +done +# Archive JOB artifacts to logs.fd.io +for i in ${LOG_ARCHIVE_ARTIFACTS[@]}; do + cp $( readlink -f ${i} | tr '\n' ' ' ) ${LOG_ARCHIVE_DIR}/ done - -echo Post-processing finished. exit ${RETURN_STATUS}