X-Git-Url: https://gerrit.fd.io/r/gitweb?p=csit.git;a=blobdiff_plain;f=bootstrap-vpp-verify-weekly.sh;h=6aea26f7bddb85989c91d5fb39b8413db09088fe;hp=5c94e89e0570017e731859e599854a8bfd8d79a1;hb=5df1c3270217408b1928bcc30e5283635112055c;hpb=022cc77a6848eca746b20408856897a4e7cb5947 diff --git a/bootstrap-vpp-verify-weekly.sh b/bootstrap-vpp-verify-weekly.sh index 5c94e89e05..6aea26f7bd 100644 --- a/bootstrap-vpp-verify-weekly.sh +++ b/bootstrap-vpp-verify-weekly.sh @@ -17,20 +17,72 @@ set -x cat /etc/hostname cat /etc/hosts -export DEBIAN_FRONTEND=noninteractive -sudo apt-get -y update -sudo apt-get -y install libpython2.7-dev python-virtualenv +VIRL_SERVERS=("10.30.51.28" "10.30.51.29" "10.30.51.30") +VIRL_SERVER="" + +VIRL_USERNAME=jenkins-in +VIRL_PKEY=priv_key +VIRL_SERVER_STATUS_FILE="status" +VIRL_SERVER_EXPECTED_STATUS="PRODUCTION" +VIRL_SESSION_EXPIRY="620" + +SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +export PYTHONPATH=${SCRIPT_DIR} + +# Create tmp dir +mkdir ${SCRIPT_DIR}/tmp + +# Use tmp dir to store log files +LOG_PATH="${SCRIPT_DIR}/tmp" + +OS_ID=$(grep '^ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g') +OS_VERSION_ID=$(grep '^VERSION_ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g') + +if [ "$OS_ID" == "centos" ]; then + DISTRO="CENTOS" + PACKAGE="rpm" + sudo yum install -y python-devel python-virtualenv openssh-clients sshpass +elif [ "$OS_ID" == "ubuntu" ]; then + DISTRO="UBUNTU" + PACKAGE="deb" + export DEBIAN_FRONTEND=noninteractive + sudo apt-get -y update + sudo apt-get -y install libpython2.7-dev python-virtualenv +else + echo "$OS_ID is not yet supported." + exit 1 +fi + +# Temporarily download VPP packages from nexus.fd.io +if [ "${#}" -ne "0" ]; then + arr=(${@}) + echo ${arr[0]} +else + # Download the specific VPP build install packages + VPP_VERSION=$(< ${SCRIPT_DIR}/VPP_STABLE_VER_${DISTRO}) + CSIT_DIR=${SCRIPT_DIR} + source "${SCRIPT_DIR}/resources/libraries/bash/function/artifacts.sh" + download_artifacts + # Need to revert -euo as the rest of script is not optimized for this. + set +euo pipefail +fi + +VIRL_DIR_LOC="/tmp/" +VPP_PKGS=(*vpp*.$PACKAGE) +VPP_PKGS_FULL=("${VPP_PKGS[@]/#/${VIRL_DIR_LOC}}") +echo ${VPP_PKGS[@]} + +VIRL_TOPOLOGY=$(cat ${SCRIPT_DIR}/VIRL_TOPOLOGY_${DISTRO}) +VIRL_RELEASE=$(cat ${SCRIPT_DIR}/VIRL_RELEASE_${DISTRO}) + +SSH_OPTIONS="-i ${VIRL_PKEY} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o BatchMode=yes -o LogLevel=error" function ssh_do() { echo echo "### " ssh $@ - ssh -i priv_key -o StrictHostKeyChecking=no $@ + ssh ${SSH_OPTIONS} $@ } -VIRL_SERVER=10.30.51.28 -VIRL_USERNAME=jenkins-in -VIRL_PKEY=priv_key - rm -f ${VIRL_PKEY} cat > ${VIRL_PKEY} <&1) + echo VIRL HOST $virl_server_candidate status is \"$virl_server_status\" + if [ "$virl_server_status" == "$VIRL_SERVER_EXPECTED_STATUS" ] + then + # Candidate is in good status. Select this server. + VIRL_SERVER="$virl_server_candidate" + else + # Candidate is in bad status. Remove from array. + VIRL_SERVERS=("${VIRL_SERVERS[@]:0:$element}" "${VIRL_SERVERS[@]:$[$element+1]}") + fi done -echo "Updated file names: " ${VPP_DEBS_FULL[@]} - # Copy the files to VIRL host -scp -i ${VIRL_PKEY} -o StrictHostKeyChecking=no *.deb \ - ${VIRL_USERNAME}@${VIRL_SERVER}:${VIRL_DIR_LOC}/ +scp ${SSH_OPTIONS} ${VPP_PKGS[@]} \ + ${VIRL_USERNAME}@${VIRL_SERVER}:${VIRL_DIR_LOC} result=$? if [ "${result}" -ne "0" ]; then @@ -106,15 +164,17 @@ fi echo "Starting simulation on VIRL server" function stop_virl_simulation { - ssh -i priv_key -o StrictHostKeyChecking=no ${VIRL_USERNAME}@${VIRL_SERVER}\ - "/home/jenkins-in/testcase-infra/bin/stop-testcase ${VIRL_SID}" + ssh ${SSH_OPTIONS} ${VIRL_USERNAME}@${VIRL_SERVER}\ + "stop-testcase ${VIRL_SID}" } -VIRL_SID=$(ssh -i priv_key -o StrictHostKeyChecking=no \ +VIRL_SID=$(ssh ${SSH_OPTIONS} \ ${VIRL_USERNAME}@${VIRL_SERVER} \ - "/home/jenkins-in/testcase-infra/bin/start-testcase -c double-ring-nested ${VPP_DEBS_FULL[@]}") + "start-testcase -vv --copy ${VIRL_TOPOLOGY} \ + --expiry ${VIRL_SESSION_EXPIRY} \ + --release ${VIRL_RELEASE} ${VPP_PKGS_FULL[@]}") retval=$? -if [ "$?" -ne "0" ]; then +if [ ${retval} -ne "0" ]; then echo "VIRL simulation start failed" exit ${retval} fi @@ -131,40 +191,103 @@ echo ${VIRL_SID} ssh_do ${VIRL_USERNAME}@${VIRL_SERVER} cat /scratch/${VIRL_SID}/topology.yaml # Download the topology file from virl session -scp -i ${VIRL_PKEY} -o StrictHostKeyChecking=no \ +scp ${SSH_OPTIONS} \ ${VIRL_USERNAME}@${VIRL_SERVER}:/scratch/${VIRL_SID}/topology.yaml \ topologies/enabled/topology.yaml retval=$? -if [ "$?" -ne "0" ]; then +if [ ${retval} -ne "0" ]; then echo "Failed to copy topology file from VIRL simulation" exit ${retval} fi -virtualenv env +virtualenv --system-site-packages env . env/bin/activate echo pip install -pip install -r requirements.txt - +pip install -r ${SCRIPT_DIR}/requirements.txt # There are used three iterations of tests there to check # the stability and reliability of the results -for test_set in 1 2 3 + +RC=0 +MORE_FAILS=0 + +partial_logs="" +for test_set in 1 2 do echo echo ${test_set}. test loop - PYTHONPATH=`pwd` pybot -L TRACE \ - -v TOPOLOGY_PATH:topologies/enabled/topology.yaml \ + PYTHONPATH=`pwd` pybot -L TRACE -W 136\ + -v TOPOLOGY_PATH:${SCRIPT_DIR}/topologies/enabled/topology.yaml \ + --suite "tests.vpp.func" \ --include vm_envAND3_node_single_link_topo \ --include vm_envAND3_node_double_link_topo \ --exclude PERFTEST \ + --exclude SOFTWIRE \ + --exclude SKIP_TEST \ --noncritical EXPECTED_FAILING \ - --output log_test_set${test_set} \ + --output ${LOG_PATH}/output_test_set${test_set} \ tests/ + PARTIAL_RC=$(echo $?) + partial_logs="${partial_logs} ${LOG_PATH}/output_test_set${test_set}.xml" + if [ ${PARTIAL_RC} -eq 250 ]; then + MORE_FAILS=1 + fi + RC=$((RC+PARTIAL_RC)) done -rebot --output output.xml ./log_test_set1.xml ./log_test_set2.xml ./log_test_set3.xml +# Log the final result +if [ ${RC} -eq 0 ]; then + set +x + echo + echo "========================================================================================================================================" + echo "Final result of all test loops: | PASS |" + echo "All critical tests have passed." + echo "========================================================================================================================================" + echo + set -x +elif [ ${MORE_FAILS} -eq 0 ]; then + if [ ${RC} -eq 1 ]; then + HLP_STR="test has" + else + HLP_STR="tests have" + fi + set +x + echo + echo "========================================================================================================================================" + echo "Final result of all test loops: | FAIL |" + echo "${RC} critical ${HLP_STR} failed." + echo "========================================================================================================================================" + echo + set -x +else + set +x + echo + echo "========================================================================================================================================" + echo "Final result of all test loops: | FAIL |" + echo "More then 250 critical tests have failed in one test loop." + echo "========================================================================================================================================" + echo + set -x +fi + +echo Post-processing test data... + +# Rebot output post-processing +rebot --noncritical EXPECTED_FAILING \ + --output output.xml ${partial_logs} + +# Remove unnecessary log files +rm -f ${partial_logs} + +echo Post-processing finished. + +if [ ${RC} -eq 0 ]; then + RETURN_STATUS=0 +else + RETURN_STATUS=1 +fi -rm -f ./log_test_set1.xml ./log_test_set2.xml ./log_test_set3.xml +exit ${RETURN_STATUS}