X-Git-Url: https://gerrit.fd.io/r/gitweb?p=csit.git;a=blobdiff_plain;f=bootstrap-verify-perf-ligato.sh;h=1ab881b6b37d4b2fc1ff708ae3de878b5cb63cbc;hp=44ebd8d9f38b30a97d1ac7ca6e4c26b8ad5d6fbc;hb=974d52d1976f802c99738712bbfe56e16d56d650;hpb=36da555ef3da4346ceaec1cd1651658ac539e689 diff --git a/bootstrap-verify-perf-ligato.sh b/bootstrap-verify-perf-ligato.sh index 44ebd8d9f3..1ab881b6b3 100644 --- a/bootstrap-verify-perf-ligato.sh +++ b/bootstrap-verify-perf-ligato.sh @@ -15,18 +15,18 @@ set -xo pipefail # Space separated list of available testbeds, described by topology files -TOPOLOGIES="topologies/available/lf_testbed1.yaml \ - topologies/available/lf_testbed2.yaml \ - topologies/available/lf_testbed3.yaml" +TOPOLOGIES="topologies/available/lf_3n_hsw_testbed1.yaml \ + topologies/available/lf_3n_hsw_testbed2.yaml \ + topologies/available/lf_3n_hsw_testbed3.yaml" SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +export PYTHONPATH=${SCRIPT_DIR} +export DEBIAN_FRONTEND=noninteractive # Reservation dir RESERVATION_DIR="/tmp/reservation_dir" INSTALLATION_DIR="/tmp/install_dir" -PYBOT_ARGS="-W 150 -L TRACE" - JOB_ARCHIVE_ARTIFACTS=(log.html output.xml report.html) LOG_ARCHIVE_ARTIFACTS=(log.html output.xml report.html) JOB_ARCHIVE_DIR="archive" @@ -37,77 +37,55 @@ mkdir -p ${LOG_ARCHIVE_DIR} # If we run this script from CSIT jobs we want to use stable vpp version if [[ ${JOB_NAME} == csit-* ]] ; then - mkdir -p vpp/build-root - cd vpp/build-root + mkdir -p vpp_download + cd vpp_download if [[ ${TEST_TAG} == *DAILY ]] || \ [[ ${TEST_TAG} == *WEEKLY ]]; then - # Download the latest VPP build .deb install packages - echo Downloading VPP packages... - bash ${SCRIPT_DIR}/resources/tools/scripts/download_install_vpp_pkgs.sh --skip-install - - VPP_DEBS="$( readlink -f *.deb | tr '\n' ' ' )" - # Take vpp package and get the vpp version - VPP_STABLE_VER="$( expr match $(ls *.deb | head -n 1) 'vpp-\(.*\)-deb.deb' )" + echo Downloading latest VPP packages from NEXUS... + bash ${SCRIPT_DIR}/resources/tools/scripts/download_install_vpp_pkgs.sh \ + --skip-install else - DPDK_STABLE_VER=$(cat ${SCRIPT_DIR}/DPDK_STABLE_VER)_amd64 - VPP_REPO_URL=$(cat ${SCRIPT_DIR}/VPP_REPO_URL_UBUNTU) + echo Downloading VPP packages of specific version from NEXUS... + DPDK_STABLE_VER=$(cat ${SCRIPT_DIR}/DPDK_STABLE_VER) VPP_STABLE_VER=$(cat ${SCRIPT_DIR}/VPP_STABLE_VER_UBUNTU) - VPP_CLASSIFIER="-deb" - # Download vpp build from nexus and set VPP_DEBS variable - wget -q "${VPP_REPO_URL}/vpp/${VPP_STABLE_VER}/vpp-${VPP_STABLE_VER}${VPP_CLASSIFIER}.deb" || exit - wget -q "${VPP_REPO_URL}/vpp-dbg/${VPP_STABLE_VER}/vpp-dbg-${VPP_STABLE_VER}${VPP_CLASSIFIER}.deb" || exit - wget -q "${VPP_REPO_URL}/vpp-dev/${VPP_STABLE_VER}/vpp-dev-${VPP_STABLE_VER}${VPP_CLASSIFIER}.deb" || exit - # Temporary disable using dpdk - # wget -q "${VPP_REPO_URL}/vpp-dpdk-dkms/${DPDK_STABLE_VER}/vpp-dpdk-dkms-${DPDK_STABLE_VER}${VPP_CLASSIFIER}.deb" || exit - wget -q "${VPP_REPO_URL}/vpp-lib/${VPP_STABLE_VER}/vpp-lib-${VPP_STABLE_VER}${VPP_CLASSIFIER}.deb" || exit - wget -q "${VPP_REPO_URL}/vpp-plugins/${VPP_STABLE_VER}/vpp-plugins-${VPP_STABLE_VER}${VPP_CLASSIFIER}.deb" || exit - VPP_DEBS="$( readlink -f *.deb | tr '\n' ' ' )" + #Temporary if arch will not be removed from VPP_STABLE_VER_UBUNTU + #VPP_STABLE_VER=${VPP_STABLE_VER%_amd64} + bash ${SCRIPT_DIR}/resources/tools/scripts/download_install_vpp_pkgs.sh \ + --skip-install --vpp ${VPP_STABLE_VER} --dkms ${DPDK_STABLE_VER} fi - - # Temporary workaround as ligato docker file requires specific file name - rename -v 's/^(.*)-(\d.*)-deb.deb/$1_$2.deb/' *.deb + # Jenkins VPP deb paths (convert to full path) + VPP_DEBS="$( readlink -f vpp*.deb | tr '\n' ' ' )" cd ${SCRIPT_DIR} # If we run this script from vpp project we want to use local build elif [[ ${JOB_NAME} == vpp-* ]] ; then - mkdir -p vpp/build-root # Use local packages provided as argument list # Jenkins VPP deb paths (convert to full path) VPP_DEBS="$( readlink -f $@ | tr '\n' ' ' )" - # Take vpp package and get the vpp version - VPP_STABLE_VER="$( expr match $1 'vpp-\(.*\)-deb.deb' )" - # Move files to build-root for packing - for deb in ${VPP_DEBS}; do mv ${deb} vpp/build-root/; done else echo "Unable to identify job type based on JOB_NAME variable: ${JOB_NAME}" exit 1 fi # Extract VPP API to specific folder -dpkg -x vpp/build-root/vpp_${VPP_STABLE_VER}.deb /tmp/vpp -# Compress all VPP debs and remove temporary directory -tar -zcvf ${SCRIPT_DIR}/vpp.tar.gz vpp/* && rm -R vpp +dpkg -x vpp_download/vpp_*.deb /tmp/vpp -LIGATO_REPO_URL=$(cat ${SCRIPT_DIR}/LIGATO_REPO_URL) +LIGATO_REPO_URL='https://github.com/ligato/' VPP_AGENT_STABLE_VER=$(cat ${SCRIPT_DIR}/VPP_AGENT_STABLE_VER) DOCKER_DEB="docker-ce_18.03.0~ce-0~ubuntu_amd64.deb" # Clone & checkout stable vnf-agent -cd .. && git clone ${LIGATO_REPO_URL}/vpp-agent +cd .. && git clone -b ${VPP_AGENT_STABLE_VER} --single-branch \ + ${LIGATO_REPO_URL}/vpp-agent vpp-agent # If the git clone fails, complain clearly and exit if [ $? != 0 ]; then - echo "Failed to run: git clone --depth 1 ${LIGATO_REPO_URL}/vpp-agent" - exit 1 -fi -cd vpp-agent && git checkout tags/${VPP_AGENT_STABLE_VER} -# If the git checkout fails, complain clearly and exit -if [ $? != 0 ]; then - echo "Failed to run: git checkout ${VPP_AGENT_STABLE_VER}" + echo "Failed to run: git clone ${LIGATO_REPO_URL}/vpp-agent" exit 1 fi +cd vpp-agent # Install Docker wget -q https://download.docker.com/linux/ubuntu/dists/xenial/pool/stable/amd64/${DOCKER_DEB} @@ -125,31 +103,30 @@ sudo docker tag ligato/dev-vpp-agent:${VPP_AGENT_STABLE_VER}\ # Start dev_vpp_agent container as daemon sudo docker run --rm -itd --name agentcnt dev_vpp_agent bash + # Copy latest vpp api into running container sudo docker cp /tmp/vpp/usr/share/vpp/api agentcnt:/usr/share/vpp +for f in ${SCRIPT_DIR}/vpp_download/*; do + sudo docker cp $f agentcnt:/opt/vpp-agent/dev/vpp/build-root/ +done + # Recompile vpp-agent sudo docker exec -i agentcnt \ - script -qec '. ~/.bashrc; cd /root/go/src/github.com/ligato/vpp-agent && make generate && make install' + script -qec '. ~/.bashrc; cd /go/src/github.com/ligato/vpp-agent && make generate && make install' if [ $? != 0 ]; then echo "Failed to build vpp-agent in Docker image." exit 1 fi -# Extract vpp-agent -rm -rf agent -mkdir -p agent -sudo docker cp agentcnt:/root/go/bin/vpp-agent agent/ -sudo docker cp agentcnt:/root/go/bin/vpp-agent-ctl agent/ -sudo docker cp agentcnt:/root/go/bin/agentctl agent/ -tar -zcvf ${SCRIPT_DIR}/../vpp-agent/docker/prod_vpp_agent/agent.tar.gz agent -# Kill running container -sudo docker rm -f agentcnt +# Save container state +sudo docker commit `sudo docker ps -q` dev_vpp_agent:latest # Build prod_vpp_agent docker image -cd ${SCRIPT_DIR}/../vpp-agent/docker/prod_vpp_agent/ &&\ - mv ${SCRIPT_DIR}/vpp.tar.gz . &&\ - sudo docker build -t prod_vpp_agent --no-cache . +cd docker/prod/ &&\ + sudo docker build --tag prod_vpp_agent --no-cache . # Export Docker image sudo docker save prod_vpp_agent | gzip > prod_vpp_agent.tar.gz +# Kill running agentcnt container +sudo docker rm -f agentcnt # If image build fails, complain clearly and exit if [ $? != 0 ]; then echo "Failed to build vpp-agent Docker image." @@ -160,7 +137,6 @@ DOCKER_IMAGE="$( readlink -f prod_vpp_agent.tar.gz | tr '\n' ' ' )" cd ${SCRIPT_DIR} WORKING_TOPOLOGY="" -export PYTHONPATH=${SCRIPT_DIR} sudo apt-get -y update sudo apt-get -y install libpython2.7-dev python-virtualenv @@ -171,6 +147,10 @@ virtualenv --system-site-packages env echo pip install pip install -r requirements.txt +if [ -z "${TOPOLOGIES}" ]; then + echo "No applicable topology found!" + exit 1 +fi # We iterate over available topologies and wait until we reserve topology while :; do for TOPOLOGY in ${TOPOLOGIES}; @@ -230,223 +210,86 @@ else exit 1 fi +PYBOT_ARGS="--consolewidth 100 --loglevel TRACE --variable TOPOLOGY_PATH:${WORKING_TOPOLOGY} --suite tests.${DUT}.perf" + case "$TEST_TAG" in - # run specific performance tests based on jenkins job type variable + # select specific performance tests based on jenkins job type variable PERFTEST_DAILY ) - pybot ${PYBOT_ARGS} \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.${DUT}.perf" \ - --include ndrdiscANDnic_intel-x520-da2AND1t1cORndrdiscANDnic_intel-x520-da2AND2t2c \ - --include ndrdiscAND1t1cANDipsecORndrdiscAND2t2cANDipsec \ - tests/ - RETURN_STATUS=$(echo $?) + TAGS=('ndrdiscANDnic_intel-x520-da2AND1c' + 'ndrdiscANDnic_intel-x520-da2AND2c' + 'ndrdiscAND1cANDipsec' + 'ndrdiscAND2cANDipsec') ;; PERFTEST_SEMI_WEEKLY ) - pybot ${PYBOT_ARGS} \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.${DUT}.perf" \ - --include ndrdiscANDnic_intel-x710AND1t1cORndrdiscANDnic_intel-x710AND2t2cORndrdiscANDnic_intel-xl710AND1t1cORndrdiscANDnic_intel-xl710AND2t2c \ - tests/ - RETURN_STATUS=$(echo $?) + TAGS=('ndrdiscANDnic_intel-x710AND1c' + 'ndrdiscANDnic_intel-x710AND2c' + 'ndrdiscANDnic_intel-xl710AND1c' + 'ndrdiscANDnic_intel-xl710AND2c') ;; PERFTEST_MRR_DAILY ) - pybot ${PYBOT_ARGS} \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.${DUT}.perf" \ - --include mrrAND64bAND1t1c \ - --include mrrAND64bAND2t2c \ - --include mrrAND64bAND4t4c \ - --include mrrAND78bAND1t1c \ - --include mrrAND78bAND2t2c \ - --include mrrAND78bAND4t4c \ - --include mrrANDimixAND1t1cANDvhost \ - --include mrrANDimixAND2t2cANDvhost \ - --include mrrANDimixAND4t4cANDvhost \ - --include mrrANDimixAND1t1cANDmemif \ - --include mrrANDimixAND2t2cANDmemif \ - --include mrrANDimixAND4t4cANDmemif \ - tests/ - RETURN_STATUS=$(echo $?) - ;; - VERIFY-PERF-NDRDISC ) - pybot ${PYBOT_ARGS} \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.${DUT}.perf" \ - --include ndrdiscAND1t1cORndrdiscAND2t2c \ - tests/ - RETURN_STATUS=$(echo $?) - ;; - VERIFY-PERF-PDRDISC ) - pybot ${PYBOT_ARGS} \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.${DUT}.perf" \ - --include pdrdiscAND1t1cORpdrdiscAND2t2c \ - tests/ - RETURN_STATUS=$(echo $?) - ;; - VERIFY-PERF-MRR ) - pybot ${PYBOT_ARGS} \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.${DUT}.perf" \ - --include mrrAND1t1cORmrrAND2t2c \ - tests/ - RETURN_STATUS=$(echo $?) - ;; - VERIFY-PERF-IP4 ) - pybot ${PYBOT_ARGS} \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.${DUT}.perf" \ - --include ndrdiscANDnic_intel-x520-da2AND1t1cANDip4baseORndrdiscANDnic_intel-x520-da2AND1t1cANDip4fwdANDfib_2m \ - tests/ - RETURN_STATUS=$(echo $?) - ;; - VERIFY-PERF-IP6 ) - pybot ${PYBOT_ARGS} \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.${DUT}.perf" \ - --include ndrdiscANDnic_intel-x520-da2AND1t1cANDip6baseORndrdiscANDnic_intel-x520-da2AND1t1cANDip6fwdANDfib_2m \ - tests/ - RETURN_STATUS=$(echo $?) + TAGS=('mrrAND64bAND1c' + 'mrrAND64bAND2c' + 'mrrAND64bAND4c' + 'mrrAND78bAND1c' + 'mrrAND78bAND2c' + 'mrrAND78bAND4c' + 'mrrANDimixAND1cANDvhost' + 'mrrANDimixAND2cANDvhost' + 'mrrANDimixAND4cANDvhost' + 'mrrANDimixAND1cANDmemif' + 'mrrANDimixAND2cANDmemif' + 'mrrANDimixAND4cANDmemif') ;; - VERIFY-PERF-L2 ) - pybot ${PYBOT_ARGS} \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.${DUT}.perf" \ - --include ndrdiscANDnic_intel-x520-da2AND1t1cANDl2xcbaseORndrdiscANDnic_intel-x520-da2AND1t1cANDl2bdbase \ - tests/ - RETURN_STATUS=$(echo $?) - ;; - VERIFY-PERF-LISP ) - pybot ${PYBOT_ARGS} \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.${DUT}.perf" \ - --include ndrdiscANDnic_intel-x520-da2AND1t1cANDlisp \ - tests/ - RETURN_STATUS=$(echo $?) - ;; - VERIFY-PERF-VXLAN ) - pybot ${PYBOT_ARGS} \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.${DUT}.perf" \ - --include ndrdiscANDnic_intel-x520-da2AND1t1cANDvxlan \ - tests/ - RETURN_STATUS=$(echo $?) - ;; - VERIFY-PERF-VHOST ) - pybot ${PYBOT_ARGS} \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.${DUT}.perf" \ - --include ndrdiscANDnic_intel-x520-da2AND1t1cANDvhost \ - tests/ - RETURN_STATUS=$(echo $?) - ;; - VERIFY-PERF-MEMIF ) - pybot ${PYBOT_ARGS} \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.${DUT}.perf" \ - --include ndrdiscANDnic_intel-x520-da2AND1t1cANDmemif \ - tests/ - RETURN_STATUS=$(echo $?) - ;; - VERIFY-PERF-IPSECHW ) - pybot ${PYBOT_ARGS} \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.${DUT}.perf.crypto" \ - --include ndrdiscANDnic_intel-xl710AND1t1cANDipsechw \ - --include ndrdiscANDnic_intel-xl710AND2t2cANDipsechw \ - --include mrrANDnic_intel-xl710AND1t1cANDipsechw \ - --include mrrANDnic_intel-xl710AND2t2cANDipsechw \ - tests/ - RETURN_STATUS=$(echo $?) - ;; - VPP-VERIFY-PERF-IP4 ) - pybot ${PYBOT_ARGS} \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.${DUT}.perf" \ - --include mrrANDnic_intel-x520-da2AND1t1cANDip4baseORmrrANDnic_intel-x520-da2AND1t1cANDip4fwdANDfib_2m \ - tests/ - RETURN_STATUS=$(echo $?) - ;; - VPP-VERIFY-PERF-IP6 ) - pybot ${PYBOT_ARGS} \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.${DUT}.perf" \ - --include mrrANDnic_intel-x520-da2AND1t1cANDip6baseORmrrANDnic_intel-x520-da2AND1t1cANDip6fwdANDfib_2m \ - tests/ - RETURN_STATUS=$(echo $?) - ;; - VPP-VERIFY-PERF-L2 ) - pybot ${PYBOT_ARGS} \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.${DUT}.perf" \ - --include mrrANDnic_intel-x520-da2AND1t1cANDl2xcbaseORmrrANDnic_intel-x520-da2AND1t1cANDl2bdbase \ - tests/ - RETURN_STATUS=$(echo $?) - ;; - VPP-VERIFY-PERF-LISP ) - pybot ${PYBOT_ARGS} \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.${DUT}.perf" \ - --include pdrchkANDnic_intel-x520-da2AND1t1cANDlisp \ - tests/ - RETURN_STATUS=$(echo $?) - ;; - VPP-VERIFY-PERF-VXLAN ) - pybot ${PYBOT_ARGS} \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.${DUT}.perf" \ - --include pdrchkANDnic_intel-x520-da2AND1t1cANDvxlan \ - tests/ - RETURN_STATUS=$(echo $?) - ;; - VPP-VERIFY-PERF-VHOST ) - pybot ${PYBOT_ARGS} \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.${DUT}.perf" \ - --include pdrdiscANDnic_intel-x520-da2AND1t1cANDvhost \ - tests/ - RETURN_STATUS=$(echo $?) - ;; - VPP-VERIFY-PERF-MEMIF ) - pybot ${PYBOT_ARGS} \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.${DUT}.perf" \ - --include pdrdiscANDnic_intel-x520-da2AND1t1cANDmemif \ - --include pdrdiscANDnic_intel-x520-da2AND2t2cANDmemif \ - --include mrrANDnic_intel-x520-da2AND1t1cANDmemif \ - --include mrrANDnic_intel-x520-da2AND2t2cANDmemif \ - tests/ - RETURN_STATUS=$(echo $?) - ;; - VPP-VERIFY-PERF-ACL ) - pybot ${PYBOT_ARGS} \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.${DUT}.perf" \ - --include pdrdiscANDnic_intel-x520-da2AND1t1cANDacl \ - --include pdrdiscANDnic_intel-x520-da2AND2t2cANDacl \ - tests/ - RETURN_STATUS=$(echo $?) - ;; - VPP-VERIFY-PERF-IPSECHW ) - pybot ${PYBOT_ARGS} \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.${DUT}.perf.crypto" \ - --include pdrdiscANDnic_intel-xl710AND1t1cANDipsechw \ - --include pdrdiscANDnic_intel-xl710AND2t2cANDipsechw \ - --include mrrANDnic_intel-xl710AND1t1cANDipsechw \ - --include mrrANDnic_intel-xl710AND2t2cANDipsechw \ - tests/ - RETURN_STATUS=$(echo $?) + VERIFY-PERF-PATCH ) + if [[ -z "$TEST_TAG_STRING" ]]; then + # If nothing is specified, we will run pre-selected tests by + # following tags. Items of array will be concatenated by OR in Robot + # Framework. + TEST_TAG_ARRAY=('mrrANDnic_intel-x710AND1cAND64bANDip4base' + 'mrrANDnic_intel-x710AND1cAND78bANDip6base' + 'mrrANDnic_intel-x710AND1cAND64bANDl2bdbase') + else + # If trigger contains tags, split them into array. + TEST_TAG_ARRAY=(${TEST_TAG_STRING//:/ }) + fi + + TAGS=() + + for TAG in "${TEST_TAG_ARRAY[@]}"; do + if [[ ${TAG} == "!"* ]]; then + # Exclude tags are not prefixed. + TAGS+=("${TAG}") + else + # We will prefix with perftest to prevent running other tests + # (e.g. Functional). + prefix="perftestAND" + if [[ ${JOB_NAME} == vpp-* ]] ; then + # Automatic prefixing for VPP jobs to limit the NIC used and + # traffic evaluation to MRR. + prefix="${prefix}mrrANDnic_intel-x710AND" + fi + TAGS+=("$prefix${TAG}") + fi + done ;; * ) - # run full performance test suite and exit on fail - pybot ${PYBOT_ARGS} \ - -v TOPOLOGY_PATH:${WORKING_TOPOLOGY} \ - -s "tests.${DUT}.perf" \ - tests/ - RETURN_STATUS=$(echo $?) + TAGS=('perftest') esac +# Catenate TAG selections +EXPANDED_TAGS=() +for TAG in "${TAGS[@]}"; do + if [[ ${TAG} == "!"* ]]; then + EXPANDED_TAGS+=(" --exclude ${TAG#$"!"} ") + else + EXPANDED_TAGS+=(" --include ${TAG} ") + fi +done + +# Execute the test +pybot ${PYBOT_ARGS}${EXPANDED_TAGS[@]} tests/ +RETURN_STATUS=$(echo $?) + # Archive JOB artifacts in jenkins for i in ${JOB_ARCHIVE_ARTIFACTS[@]}; do cp $( readlink -f ${i} | tr '\n' ' ' ) ${JOB_ARCHIVE_DIR}/