- Use the same bucket path as logs so that the docs can be viewed by
s/s3-logs/s3-docs-7day/ in the URL after selecting the logs URL from
the jenkins job page.
- Also, fix error output of get_gerrit_refspec bash function.
Change-Id: I73e8b7a1f310dbfb031afe9d164b114021c2cfe3
Signed-off-by: Dave Wallace <dwallacelf@gmail.com>
local query="$(ssh -p 29418 gerrit.fd.io gerrit query status:merged project:$project branch:$branch limit:1 --format=JSON --current-patch-set | tr ',' '\n' | grep refs | cut -d'"' -f4)"
if [ -z "$query" ] ; then
local query="$(ssh -p 29418 gerrit.fd.io gerrit query status:merged project:$project branch:$branch limit:1 --format=JSON --current-patch-set | tr ',' '\n' | grep refs | cut -d'"' -f4)"
if [ -z "$query" ] ; then
- echo "ERROR: Invalid project ($1) or branch ($2)"
+ echo "ERROR: Invalid argument(s): branch ($1) project ($2)"
+ echo "Usage: $0 <branch> <project>"
- publisher:
name: fdio-infra-publish-docs
- publisher:
name: fdio-infra-publish-docs
- # macro to finish up a build.
+ # macro to finish up a docs build.
#
# Handles the following:
#
# Handles the following:
- # - Shipping docs S3 logs repository
+ # - Mapping docs S3 bucket credentials for merge job docs upload
# - Cleanup workspace
publishers:
- postbuildscript:
# - Cleanup workspace
publishers:
- postbuildscript:
- "**/*.jenkins-trigger"
fail-build: false
- "**/*.jenkins-trigger"
fail-build: false
+- publisher:
+ name: fdio-infra-publish-docs-7day
+ # macro to finish up a verify docs build.
+ #
+ # Handles the following:
+ # - Mapping 7-day retention S3 bucket credentials for verify job docs upload
+ # - Cleanup workspace
+ publishers:
+ - postbuildscript:
+ builders:
+ - role: BOTH
+ build-on:
+ - ABORTED
+ - FAILURE
+ - NOT_BUILT
+ - SUCCESS
+ - UNSTABLE
+ build-steps:
+ - fdio-infra-ship-docs-7day
+ mark-unstable-if-failed: true
+ - workspace-cleanup:
+ exclude:
+ # Do not clean up *.jenkins-trigger files for jobs that use a
+ # properties file as input for triggering another build.
+ - "**/*.jenkins-trigger"
+ fail-build: false
+
- publisher:
name: fdio-infra-publish
# macro to finish up a build.
- publisher:
name: fdio-infra-publish
# macro to finish up a build.
- shell: !include-raw:
- ../global-jjb/shell/logs-clear-credentials.sh
- shell: !include-raw:
- ../global-jjb/shell/logs-clear-credentials.sh
+- builder:
+ name: fdio-infra-ship-docs-7day
+ builders:
+ - config-file-provider:
+ files:
+ - file-id: "jenkins-s3-vpp-docs-ship"
+ target: $HOME/.aws/credentials
+ - shell: !include-raw:
+ - scripts/terraform_s3_docs_ship.sh
+ - shell: !include-raw:
+ - scripts/publish_docs.sh
+ - shell: !include-raw:
+ - ../global-jjb/shell/logs-clear-credentials.sh
+
- builder:
name: fdio-infra-ship-logs
builders:
- builder:
name: fdio-infra-ship-logs
builders:
- shell: !include-raw-escape: ../scripts/hicn/docs.sh
publishers:
- shell: !include-raw-escape: ../scripts/hicn/docs.sh
publishers:
+ - fdio-infra-publish-docs-7day
- fdio-infra-publish
- job-template:
- fdio-infra-publish
- job-template:
bash scripts/build-packages.sh sphinx
bash scripts/build-packages.sh sphinx
-if [[ "${JOB_NAME}" == *merge* ]]; then
- mkdir -p "${SITE_DIR_ROOT}"
- mv -f "${DOC_DIR}" "${SITE_DIR}"
- find "${SITE_DIR}" -type f '(' -name '*.md5' -o -name '*.dot' -o -name '*.map' ')' -delete
-fi
+mkdir -p "${SITE_DIR_ROOT}"
+mv -f "${DOC_DIR}" "${SITE_DIR}"
+find "${SITE_DIR}" -type f '(' -name '*.md5' -o -name '*.dot' -o -name '*.map' ')' -delete
-if [[ "${SILO}" != "production" ]] ; then
- echo "WARNING: Doc upload not supported on Jenkins '${SILO}'..."
- exit 0
-fi
-
+bucket="fdio-docs-s3-cloudfront-index"
if [[ ${JOB_NAME} == *merge* ]]; then
case "${JOB_NAME}" in
if [[ ${JOB_NAME} == *merge* ]]; then
case "${JOB_NAME}" in
*)
die "Unknown job: ${JOB_NAME}"
esac
*)
die "Unknown job: ${JOB_NAME}"
esac
+elif [[ ${JOB_NAME} == *verify* ]]; then
+ bucket="vpp-docs-7day-retention"
+ # Use the same bucket path as logs so that the docs can be viewed by
+ # s/s3-logs/s3-docs-7day/ in the URL after selecting the logs URL from
+ # the jenkins job page.
+ bucket_path="$JENKINS_HOSTNAME/$JOB_NAME/$BUILD_NUMBER/"
+ case "${JOB_NAME}" in
+ *"hicn-docs"*)
+ workspace_dir="${WORKSPACE}/build/doc/deploy-site"
+ ;;
+ *"vpp-docs"*)
+ CDN_URL="s3-docs-7day.fd.io"
+ workspace_dir="${WORKSPACE}/build-root/docs/html"
+ ;;
+ *)
+ die "Unknown job: ${JOB_NAME}"
+ esac
+else
+ die "Unknown job: ${JOB_NAME}"
+fi
- export TF_VAR_workspace_dir=$workspace_dir
- export TF_VAR_bucket_path=$bucket_path
- export AWS_SHARED_CREDENTIALS_FILE=$HOME/.aws/credentials
- export AWS_DEFAULT_REGION="us-east-1"
+export TF_VAR_workspace_dir="$workspace_dir"
+export TF_VAR_bucket_path="$bucket_path"
+export TF_VAR_bucket="$bucket"
+export AWS_SHARED_CREDENTIALS_FILE=$HOME/.aws/credentials
+export AWS_DEFAULT_REGION="us-east-1"
- echo "INFO: archiving docs to S3"
- pushd ..
- terraform init -no-color
- terraform apply -no-color -auto-approve
- popd
+echo "INFO: archiving docs to S3 bucket '$bucket'"
+pushd ..
+terraform init -no-color
+terraform apply -no-color -auto-approve
+popd
- echo "S3 docs: <a href=\"https://${CDN_URL}${bucket_path}\">https://${CDN_URL}${bucket_path}</a>"
-fi
+echo "S3 docs: <a href=\"https://${CDN_URL}${bucket_path}\">https://${CDN_URL}${bucket_path}</a>"
provider "aws" {
region = "us-east-1"
profile = "default"
provider "aws" {
region = "us-east-1"
profile = "default"
- s3_force_path_style = false
+ s3_use_path_style = false
skip_credentials_validation = true
skip_metadata_api_check = true
skip_requesting_account_id = true
skip_credentials_validation = true
skip_metadata_api_check = true
skip_requesting_account_id = true
variable "bucket" {
description = "S3 bucket name"
type = string
variable "bucket" {
description = "S3 bucket name"
type = string
- default = "fdio-docs-s3-cloudfront-index"
}
variable "bucket_path" {
}
variable "bucket_path" {
- ../scripts/vpp/docs.sh
publishers:
- ../scripts/vpp/docs.sh
publishers:
+ - fdio-infra-publish-docs-7day
- fdio-infra-publish
- job-template:
- fdio-infra-publish
- job-template: