X-Git-Url: https://gerrit.fd.io/r/gitweb?p=csit.git;a=blobdiff_plain;f=bootstrap-vpp-verify-semiweekly.sh;h=af56e9f4268171f073eaa3ab3b557ef2c4b7b60d;hp=4f7c67333f93950649e16d53ed682268c1accef3;hb=83aaeda489a47d4cf03ee84e2894e52aa9c9fcc8;hpb=8a38a84dceac843151d3e5c9367667999638ab10 diff --git a/bootstrap-vpp-verify-semiweekly.sh b/bootstrap-vpp-verify-semiweekly.sh index 4f7c67333f..af56e9f426 100644 --- a/bootstrap-vpp-verify-semiweekly.sh +++ b/bootstrap-vpp-verify-semiweekly.sh @@ -21,36 +21,66 @@ RETURN_STATUS=0 cat /etc/hostname cat /etc/hosts -export DEBIAN_FRONTEND=noninteractive -sudo apt-get -y update -sudo apt-get -y install libpython2.7-dev python-virtualenv - PYBOT_ARGS="--noncritical MULTI_THREAD" -ARCHIVE_ARTIFACTS=(log.html output.xml report.html) +JOB_ARCHIVE_ARTIFACTS=(log.html output.xml report.html) +LOG_ARCHIVE_ARTIFACTS=(log.html output.xml report.html) +JOB_ARCHIVE_DIR="archive" +LOG_ARCHIVE_DIR="$WORKSPACE/archives" +mkdir -p ${JOB_ARCHIVE_DIR} +mkdir -p ${LOG_ARCHIVE_DIR} SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" export PYTHONPATH=${SCRIPT_DIR} +# Create tmp dir +mkdir ${SCRIPT_DIR}/tmp + +# Use tmp dir to store log files +LOG_PATH="${SCRIPT_DIR}/tmp" + +OS_ID=$(grep '^ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g') +OS_VERSION_ID=$(grep '^VERSION_ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g') + +if [ "$OS_ID" == "centos" ]; then + DISTRO="CENTOS" + PACKAGE="rpm" + sudo yum install -y python-devel python-virtualenv openssh-clients sshpass +elif [ "$OS_ID" == "ubuntu" ]; then + DISTRO="UBUNTU" + PACKAGE="deb" + export DEBIAN_FRONTEND=noninteractive + sudo apt-get -y update + sudo apt-get -y install libpython2.7-dev python-virtualenv +else + echo "$OS_ID is not yet supported." + exit 1 +fi # 1st step: Download and prepare VPP packages # Temporarily download VPP packages from nexus.fd.io -rm -f *.deb if [ "${#}" -ne "0" ]; then arr=(${@}) echo ${arr[0]} else - # Download the latest VPP build .deb install packages - echo Downloading VPP packages... - bash ${SCRIPT_DIR}/resources/tools/download_install_vpp_pkgs.sh --skip-install + # Download the latest VPP build install packages + CSIT_DIR=${SCRIPT_DIR} + source "${SCRIPT_DIR}/resources/libraries/bash/function/artifacts.sh" + download_artifacts + # Need to revert -euo as the rest of script is not optimized for this. + set +euo pipefail fi -VPP_DEBS=(*.deb) -echo ${VPP_DEBS[@]} +VIRL_DIR_LOC="/tmp/" +VPP_PKGS=(*vpp*.$PACKAGE) +VPP_PKGS_FULL=("${VPP_PKGS[@]/#/${VIRL_DIR_LOC}}") +echo ${VPP_PKGS[@]} -VPP_VER=$(echo ${VPP_DEBS#vpp-}) -VPP_VER=$(echo ${VPP_VER%-deb.deb}) +VIRL_TOPOLOGY=$(cat ${SCRIPT_DIR}/VIRL_TOPOLOGY_${DISTRO}) +VIRL_RELEASE=$(cat ${SCRIPT_DIR}/VIRL_RELEASE_${DISTRO}) + +VPP_VER="$( expr match $(ls vpp*.${PACKAGE} | head -n 1) 'vpp[-|_]\(.*\)'.${PACKAGE} )" set +x echo "****************************************************************************************************************************************" @@ -86,6 +116,7 @@ VIRL_USERNAME=jenkins-in VIRL_PKEY=priv_key VIRL_SERVER_STATUS_FILE="status" VIRL_SERVER_EXPECTED_STATUS="PRODUCTION" +VIRL_SESSION_EXPIRY="620" SSH_OPTIONS="-i ${VIRL_PKEY} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o BatchMode=yes -o LogLevel=error" @@ -161,20 +192,8 @@ do fi done - -VIRL_DIR_LOC="/tmp" -VPP_DEBS_VIRL=(${VPP_DEBS[@]}) - -# Prepend directory location at remote host to deb file list -for index in "${!VPP_DEBS_VIRL[@]}"; do - VPP_DEBS_VIRL[${index}]=${VIRL_DIR_LOC}/${VPP_DEBS_VIRL[${index}]} -done - -echo "Updated file names: " ${VPP_DEBS_VIRL[@]} - -cat ${VIRL_PKEY} # Copy the files to VIRL host -scp ${SSH_OPTIONS} *.deb \ +scp ${SSH_OPTIONS} ${VPP_PKGS[@]} \ ${VIRL_USERNAME}@${VIRL_SERVER}:${VIRL_DIR_LOC}/ result=$? @@ -194,9 +213,11 @@ function stop_virl_simulation { VIRL_SID=$(ssh ${SSH_OPTIONS} \ ${VIRL_USERNAME}@${VIRL_SERVER} \ - "start-testcase -c double-ring-nested ${VPP_DEBS_VIRL[@]}") + "start-testcase -vv --copy ${VIRL_TOPOLOGY} \ + --expiry ${VIRL_SESSION_EXPIRY} \ + --release ${VIRL_RELEASE} ${VPP_PKGS_FULL[@]}") retval=$? -if [ "$?" -ne "0" ]; then +if [ ${retval} -ne "0" ]; then echo "VIRL simulation start failed" exit ${retval} fi @@ -218,7 +239,7 @@ scp ${SSH_OPTIONS} \ topologies/enabled/topology_VIRL.yaml retval=$? -if [ "$?" -ne "0" ]; then +if [ ${retval} -ne "0" ]; then echo "Failed to copy topology file from VIRL simulation" exit ${retval} fi @@ -239,7 +260,8 @@ echo Running functional tests on the VIRL system... # There are used three iterations of functional tests there # to check the stability and reliability of the results. -for test_set in 1 2 3 +partial_logs="" +for test_set in 1 2 do echo echo Functional test loop: ${test_set} @@ -247,14 +269,17 @@ do pybot -L TRACE -W 136\ -v TOPOLOGY_PATH:${SCRIPT_DIR}/topologies/enabled/topology_VIRL.yaml \ - --suite "tests.func" \ + --suite "tests.vpp.func" \ --include vm_envAND3_node_single_link_topo \ --include vm_envAND3_node_double_link_topo \ --exclude PERFTEST \ + --exclude SOFTWIRE \ + --exclude SKIP_TEST \ --noncritical EXPECTED_FAILING \ - --output log_func_test_set${test_set} \ + --output ${LOG_PATH}/output_func_test_set${test_set} \ tests/ PARTIAL_RC=$(echo $?) + partial_logs="${partial_logs} ${LOG_PATH}/output_func_test_set${test_set}.xml" if [ ${PARTIAL_RC} -eq 250 ]; then MORE_FAILS=1 fi @@ -312,16 +337,18 @@ echo Post-processing test data... # Rebot output post-processing rebot --noncritical EXPECTED_FAILING \ - --output output.xml \ - ./log_func_test_set1.xml ./log_func_test_set2.xml ./log_func_test_set3.xml + --output output.xml ${partial_logs} # Remove unnecessary files -rm -f ./log_test_set1.xml ./log_test_set2.xml ./log_test_set3.xml +rm -f ${partial_logs} -# Archive artifacts -mkdir archive -for i in ${ARCHIVE_ARTIFACTS[@]}; do - cp $( readlink -f ${i} | tr '\n' ' ' ) archive/ +# Archive JOB artifacts in jenkins +for i in ${JOB_ARCHIVE_ARTIFACTS[@]}; do + cp $( readlink -f ${i} | tr '\n' ' ' ) ${JOB_ARCHIVE_DIR}/ +done +# Archive JOB artifacts to logs.fd.io +for i in ${LOG_ARCHIVE_ARTIFACTS[@]}; do + cp $( readlink -f ${i} | tr '\n' ' ' ) ${LOG_ARCHIVE_DIR}/ done echo Post-processing finished.