Docs deploy via Terraform 35/33735/7
authorpmikus <pmikus@cisco.com>
Thu, 16 Sep 2021 11:07:49 +0000 (11:07 +0000)
committerPeter Mikus <pmikus@cisco.com>
Thu, 23 Sep 2021 11:09:38 +0000 (11:09 +0000)
+ This fixes also problem with content-type detection.

Signed-off-by: pmikus <pmikus@cisco.com>
Change-Id: I5fb89668a1792c4497fce1367279657c415d8a32

jjb/global-macros.yaml
jjb/scripts/publish_docs.sh
jjb/scripts/terraform_s3_docs_ship.sh [new file with mode: 0644]

index e00d42d..0d3e934 100644 (file)
             - file-id: "jenkins-s3-docs-ship"
               target: $HOME/.aws/credentials
       - shell: !include-raw:
-          - scripts/publish_library_py.sh
+          - scripts/terraform_s3_docs_ship.sh
       - shell: !include-raw:
           - scripts/publish_docs.sh
       - shell: !include-raw:
index 7246ffc..86963b6 100644 (file)
@@ -17,32 +17,36 @@ echo "---> publish_docs.sh"
 
 set -exuo pipefail
 
-S3_BUCKET="fdio-docs-s3-cloudfront-index"
 CDN_URL="s3-docs.fd.io"
-PYTHON_SCRIPT="/w/workspace/publish_library.py"
 
 if [[ ${JOB_NAME} == *merge* ]]; then
     case "${JOB_NAME}" in
         *"csit-trending"*)
-            SITE_DIR="${WORKSPACE}/resources/tools/presentation/_build"
-            s3_path="csit/${GERRIT_BRANCH}/trending"
+            workspace_dir="${WORKSPACE}/resources/tools/presentation/_build"
+            bucket_path="/csit/${GERRIT_BRANCH}/trending/"
             ;;
         *"csit-report"*)
-            SITE_DIR="${WORKSPACE}/resources/tools/presentation/_build"
-            s3_path="csit/${GERRIT_BRANCH}/report"
+            workspace_dir="${WORKSPACE}/resources/tools/presentation/_build"
+            bucket_path="/csit/${GERRIT_BRANCH}/report/"
             ;;
         *"csit-docs"*)
-            SITE_DIR="${WORKSPACE}/resources/tools/doc_gen/_build"
-            s3_path="csit/${GERRIT_BRANCH}/docs"
+            workspace_dir="${WORKSPACE}/resources/tools/doc_gen/_build"
+            bucket_path="/csit/${GERRIT_BRANCH}/docs/"
             ;;
         *)
             die "Unknown job: ${JOB_NAME}"
     esac
 
-    echo "INFO: S3 path $s3_path"
+    export TF_VAR_workspace_dir=$workspace_dir
+    export TF_VAR_bucket_path=$bucket_path
+    export AWS_SHARED_CREDENTIALS_FILE=$HOME/.aws/credentials
+    export AWS_DEFAULT_REGION="us-east-1"
 
     echo "INFO: archiving docs to S3"
-    python3 $PYTHON_SCRIPT deploy_docs "$S3_BUCKET" "$s3_path" "$SITE_DIR"
+    pushd ..
+    terraform init -no-color
+    terraform apply -no-color -auto-approve
+    popd
 
-    echo "S3 docs: <a href=\"https://$CDN_URL/$s3_path\">https://$CDN_URL/$s3_path</a>"
+    echo "S3 docs: <a href=\"https://$CDN_URL/$bucket_path\">https://$CDN_URL/$bucket_path</a>"
 fi
diff --git a/jjb/scripts/terraform_s3_docs_ship.sh b/jjb/scripts/terraform_s3_docs_ship.sh
new file mode 100644 (file)
index 0000000..5989e3e
--- /dev/null
@@ -0,0 +1,91 @@
+#!/bin/bash
+
+# Copyright (c) 2021 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+echo "---> terraform_s3_docs_ship.sh"
+
+set -exuo pipefail
+
+cat >"/w/workspace/main.tf" <<'END_OF_TERRAFORM_SCRIPT'
+provider "aws" {
+  region                      = "us-east-1"
+  profile                     = "default"
+  s3_force_path_style         = false
+  skip_credentials_validation = true
+  skip_metadata_api_check     = true
+  skip_requesting_account_id  = true
+}
+
+locals {
+  mime_types = {
+    xml   = "application/xml",
+    html  = "text/html",
+    txt   = "text/plain",
+    log   = "text/plain",
+    css   = "text/css",
+    md    = "text/markdown",
+    rst   = "text/x-rst",
+    csv   = "text/csv",
+    svg   = "image/svg+xml",
+    jpg   = "image/jpeg",
+    png   = "image/png",
+    gif   = "image/gif",
+    js    = "application/javascript",
+    pdf   = "application/pdf"
+    json  = "application/json",
+    otf   = "font/otf",
+    ttf   = "font/ttf",
+    woff  = "font/woff",
+    woff2 = "font/woff2"
+  }
+}
+
+variable "workspace_dir" {
+  description = "Workspace base directory"
+  type        = string
+}
+
+variable "file_match_pattern" {
+  description = "File matching pattern"
+  type        = string
+  default     = "**/*"
+}
+
+variable "bucket" {
+  description = "S3 bucket name"
+  type        = string
+  default     = "fdio-docs-s3-cloudfront-index"
+}
+
+variable "bucket_path" {
+  description = "S3 bucket path to key"
+  type        = string
+}
+
+resource "aws_s3_bucket_object" "object" {
+  for_each = fileset(var.workspace_dir, var.file_match_pattern)
+
+  bucket = var.bucket
+  key    = "${var.bucket_path}${each.value}"
+  source = "${var.workspace_dir}/${each.value}"
+
+  cache_control = "no-store,max-age=0,s-maxage=0"
+  etag          = filemd5("${var.workspace_dir}/${each.value}")
+  content_type = lookup(
+    local.mime_types,
+    regex("\\.(?P<extension>[A-Za-z0-9]+)$", each.value).extension,
+    "application/octet-stream"
+  )
+}
+END_OF_TERRAFORM_SCRIPT