ci(gha): Add reusable flows 86/43786/1
authorPeter Mikus <[email protected]>
Mon, 29 Sep 2025 09:17:24 +0000 (11:17 +0200)
committerPeter Mikus <[email protected]>
Mon, 29 Sep 2025 09:17:24 +0000 (11:17 +0200)
Signed-off-by: Peter Mikus <[email protected]>
Change-Id: I5c237c998a51a9a8f0a12bf05465bccc1d137f54

.github/actions/aws_s3_publish_logs/README.md [new file with mode: 0644]
.github/actions/aws_s3_publish_logs/action.yml [new file with mode: 0644]
.github/actions/setup_executor_env/README.md
.github/workflows/gerrit-csit-perf-mrr-daily.yml
.github/workflows/gerrit-csit-tox-verify.yml

diff --git a/.github/actions/aws_s3_publish_logs/README.md b/.github/actions/aws_s3_publish_logs/README.md
new file mode 100644 (file)
index 0000000..9de762d
--- /dev/null
@@ -0,0 +1,32 @@
+# 🛠️ AWS S3 Publish Logs
+
+Uploads logs from archive folder into AWS S3 bucket. Logs are gzipped before
+upload.
+
+## Usage Example
+
+An example workflow step using this action:
+
+<!-- markdownlint-disable MD013 -->
+```yaml
+- name: AWS S3 Publish Logs
+  uses: fdio/csit/.github/actions/aws_s3_publish_logs@master
+```
+<!-- markdownlint-enable MD013 -->
+
+## Inputs
+
+<!-- markdownlint-disable MD013 -->
+
+| Variable Name   | Description                                     |
+| --------------- | ----------------------------------------------- |
+| S3_BUCKET       | Name of the Amazon S3 bucket.                   |
+| S3_PATH         | Path within Amazon AWS S3 bucket.               |
+| ARCHIVES_PATH   | Source directory with logs artifact to archive. |
+
+<!-- markdownlint-enable MD013 -->
+
+## Requirements/Dependencies
+
+The gzip command-line tool must be available in the environment for the action
+to succeed.
\ No newline at end of file
diff --git a/.github/actions/aws_s3_publish_logs/action.yml b/.github/actions/aws_s3_publish_logs/action.yml
new file mode 100644 (file)
index 0000000..addf6d9
--- /dev/null
@@ -0,0 +1,121 @@
+---
+name: "🛠️ AWS S3 Publish Logs"
+description: |
+  This GitHub Action uploads logs to AWS S3.
+
+inputs:
+  S3_BUCKET:
+    description: "Name of the Amazon S3 bucket."
+    required: true
+    default: "fdio-logs-s3-cloudfront-index"
+    type: string
+  S3_PATH:
+    description: "Path within Amazon AWS S3 bucket."
+    required: false
+    default: "vex-yul-rot-jenkins-1/${{ github.job }}/${{ github.run_id }}"
+    type: string
+  ARCHIVES_PATH:
+    description: "Source directory with logs artifact to archive."
+    required: false
+    default: "${{ github.workspace }}/archives"
+    type: string
+
+runs:
+  using: "composite"
+  steps:
+    - name: Check if AWS CLI is pre-installed
+      id: aws-binary-check
+      shell: bash
+      run: |
+        if command -v aws >/dev/null 2>&1; then
+            echo "AWS CLI is already installed. Skipping install."
+            echo "AWS_CLI_PREINSTALLED=true" >> "$GITHUB_OUTPUT"
+            aws --version
+            exit 0
+        fi
+        echo "AWS_CLI_PREINSTALLED=false" >> "$GITHUB_OUTPUT"
+
+    - name: Cache or Restore the zip
+      if: ${{ steps.aws-binary-check.outputs.AWS_CLI_PREINSTALLED == 'false' }}
+      uses: actions/cache@v4
+      id: aws-cli-cache
+      with:
+        path: ${{ runner.temp }}/aws-cli-cache/*.zip
+        key: ${{ runner.os }}-${{ runner.arch }}-aws-cli-v2-zip
+
+    - name: Install AWS CLI
+      shell: bash
+      if: ${{ steps.aws-binary-check.outputs.AWS_CLI_PREINSTALLED == 'false' }}
+      env:
+        CACHE_HIT: ${{ steps.aws-cli-cache.outputs.cache-hit == 'true' }}
+        CACHE_PATH: ${{ runner.temp }}/aws-cli-cache
+      run: |
+        AWS_PACKAGE="awscli-exe-linux-$(uname -m).zip"
+
+        mkdir -p "$CACHE_PATH"
+        cd "$CACHE_PATH"
+
+        if [ "$CACHE_HIT" = "false" ]; then
+            curl -fsSL \
+                --retry 3 \
+                --retry-delay 5 \
+                --connect-timeout 15 \
+                --max-time 60 \
+                -o "$AWS_PACKAGE" "https://awscli.amazonaws.com/$AWS_PACKAGE"
+        fi
+
+        unzip -o -q "$AWS_PACKAGE"
+        sudo ./aws/install --update
+        rm -rf ./aws
+
+    - name: Upload archives directory content to S3
+      shell: bash
+      run: |
+        get_content_type() {
+          local file_ext="${1##*.}"
+          case "$file_ext" in
+            xml)  echo "application/xml" ;;
+            html) echo "text/html" ;;
+            txt)  echo "text/plain" ;;
+            log)  echo "text/plain" ;;
+            css)  echo "text/css" ;;
+            md)  echo "text/markdown" ;;
+            rst)  echo "text/x-rst" ;;
+            csv)  echo "text/csv" ;;
+            svg) echo "image/svg+xml" ;;
+            jpg|jpeg) echo "image/jpeg" ;;
+            png) echo "image/png" ;;
+            gif) echo "image/gif" ;;
+            js)   echo "application/javascript" ;;
+            pdf) echo "application/pdf" ;;
+            json) echo "application/json" ;;
+            otf) echo "application/otf" ;;
+            ttf) echo "application/ttf" ;;
+            woff) echo "application/woff" ;;
+            woff2) echo "application/woff2" ;;
+            *)   echo "application/octet-stream" ;;
+          esac
+        }
+
+        export -f get_content_type
+
+        pushd ${{ inputs.ARCHIVES_PATH }}
+
+        # Traverse and upload
+        find . -type f | while read -r file; do
+          rel_path="${file#./}"              # relative path
+          tmp_file="$(mktemp).gz"            # temp gzip file
+          gzip -c "${file}" > "${tmp_file}"  # compress
+
+          content_type=$(get_content_type "${file}")
+
+          S3_ARN="${{ inputs.S3_BUCKET }}/${{ inputs.S3_PATH }}/${rel_path}.gz"
+
+          echo "Uploading ${rel_path}.gz -> ${S3_ARN} (Content-Type: $content_type, gzip)"
+          #aws s3 cp "${tmp_file}" "${S3_ARN}" \
+          #  --content-type "$content_type" \
+          #  --content-encoding "gzip"
+
+          rm -f "${tmp_file}"
+        done
+
index 0b7c645..038532d 100644 (file)
@@ -1,17 +1,18 @@
 # 🛠️ Setup Executor Environment
 
-Action to setup FD.io Nomad executor environment inside a GitHub action/workflow
-
-## setup_executor_env
+Action to setup FD.io Nomad executor environment inside a GitHub
+action/workflow.
 
 ## Usage Example
 
 Sets the OS details used for Git operations inside other actions/workflows.
 
+<!-- markdownlint-disable MD013 -->
 ```yaml
 - name: "Setup Environment"
   uses: fdio/csit/.github/actions/setup_executor_env@master
 ```
+<!-- markdownlint-enable MD013 -->
 
 ## Outputs
 
index 778b5d3..ed9190e 100644 (file)
@@ -93,4 +93,7 @@ jobs:
         with:
           name: ${{ env.JOB_NAME }}-${{ github.run_number }}
           path: archives/
-          if-no-files-found: "ignore"
\ No newline at end of file
+          if-no-files-found: "ignore"
+
+      - name: AWS S3 Publish Logs
+        uses: pmikus/csit/.github/actions/aws_s3_publish_logs@master
\ No newline at end of file
index 64e9bee..41a2af2 100644 (file)
@@ -71,11 +71,11 @@ jobs:
         # yamllint disable-line rule:line-length
         uses: lfit/checkout-gerrit-change-action@54d751e8bd167bc91f7d665dabe33fae87aaaa63 # v0.9
         with:
-          gerrit-refspec: ${{ inputs.GERRIT_REFSPEC }}
-          gerrit-project: ${{ inputs.GERRIT_PROJECT }}
+          gerrit-refspec: ${{ github.event.inputs.GERRIT_REFSPEC }}
+          gerrit-project: ${{ github.event.inputs.GERRIT_PROJECT }}
           gerrit-url: ${{ vars.GERRIT_URL }}
           delay: "30s"
-          ref: refs/heads/${{ inputs.GERRIT_BRANCH }}
+          ref: refs/heads/${{ github.event.inputs.GERRIT_BRANCH }}
 
       - name: "Retrieve GIT commit message"
         # yamllint disable-line rule:line-length
@@ -94,4 +94,7 @@ jobs:
         with:
           name: ${{ env.JOB_NAME }}-${{ github.run_number }}
           path: archives/
-          if-no-files-found: "ignore"
\ No newline at end of file
+          if-no-files-found: "ignore"
+
+      - name: AWS S3 Publish Logs
+        uses: pmikus/csit/.github/actions/aws_s3_publish_logs@master