Global: Rework archive artifacts 60/33060/29
authorpmikus <pmikus@cisco.com>
Thu, 8 Jul 2021 12:34:33 +0000 (12:34 +0000)
committerPeter Mikus <pmikus@cisco.com>
Thu, 5 Aug 2021 14:45:31 +0000 (14:45 +0000)
This patch removes archive-artifacts-parameter macro
and ARCHVIVE_ARTIFACTS env var from csit and vpp project.

All project specific artifacts to be uploaded with the
log files SHOULD BE copied to $WORKSPACE/archives.

The next step once this is merged will be to remove NEXUS
entirely via JCasC.

+ Remove archive-artifacts from all csit/vpp yaml files.
+ Add fdio-infra-ship-backup-logs macro
+ Remove unused jjb/include-raw-deploy-archives.sh
+ CSIT:
  - copy job artifacts to $WORKSPACE/archives
+ HC2VPP
  - remove CSIT
+ TLDK:
  - remove CSIT

Signed-off-by: pmikus <pmikus@cisco.com>
Signed-off-by: Dave Wallace <dwallacelf@gmail.com>
Change-Id: Iada020cf269714c34f9ce32d764d991827e3b003

23 files changed:
jjb/csit/csit-perf.yaml
jjb/csit/csit-tox.yaml
jjb/csit/csit-vpp-device.yaml
jjb/csit/csit.yaml
jjb/global-macros.yaml
jjb/hc2vpp/hc2vpp-csit.yaml [deleted file]
jjb/hc2vpp/include-raw-hc2vpp-csit-integration-odl.sh [deleted file]
jjb/hc2vpp/include-raw-hc2vpp-csit-integration.sh [deleted file]
jjb/hc2vpp/include-raw-hc2vpp-csit-perf.sh [deleted file]
jjb/hc2vpp/include-raw-hc2vpp-csit-verify-odl.sh [deleted file]
jjb/hc2vpp/include-raw-hc2vpp-csit-verify-prebuild.sh [deleted file]
jjb/hc2vpp/include-raw-hc2vpp-csit-verify.sh [deleted file]
jjb/include-raw-deploy-archives.sh [deleted file]
jjb/scripts/backup_upload_archives.sh [deleted file]
jjb/scripts/csit/device-semiweekly.sh
jjb/scripts/csit/perf-timed.sh
jjb/scripts/csit/tldk-functional-virl.sh [deleted file]
jjb/scripts/logs_publish.sh [new file with mode: 0644]
jjb/scripts/post_build_deploy_archives.sh
jjb/scripts/vpp/csit-device.sh
jjb/scripts/vpp/csit-perf.sh
jjb/tldk/tldk.yaml
jjb/vpp/vpp.yaml

index 3c5b213..d29cea3 100644 (file)
     project-type: freestyle
     node: 'builder-{os}-prod-{executor-arch}'
     concurrent: true
-    archive-artifacts: 'archive/*.*'
     latest-only: false
 
     build-discarder:
           csit-perf-trial-duration: "{csit-perf-trial-duration}"
       - csit-perf-trial-multiplicity-parameter:
           csit-perf-trial-multiplicity: "{csit-perf-trial-multiplicity}"
-      - string:
-          name: ARCHIVE_ARTIFACTS
-          default: '{archive-artifacts}'
-          description: Artifacts to archive to the logs server.
       # This is a manually-triggered verify job, part of API coverage.
       # CRC checking is needed to ensure vpp crc job breaks only when intended.
       # Can be removed when this job is no longer needed for full API coverage.
           - ../scripts/csit/perf-verify.sh
 
     publishers:
-      - robot:
-          output-path: 'archives'
-          other-files:
-            - '*.*'
-
       - fdio-infra-publish
 
 - job-template:
     project-type: freestyle
     node: 'builder-{os}-prod-{executor-arch}'
     concurrent: false
-    archive-artifacts: 'csit/archive/*.*'
     latest-only: false
 
     build-discarder:
           csit-perf-trial-duration: "{csit-perf-trial-duration}"
       - csit-perf-trial-multiplicity-parameter:
           csit-perf-trial-multiplicity: "{csit-perf-trial-multiplicity}"
-      - string:
-          name: ARCHIVE_ARTIFACTS
-          default: '{archive-artifacts}'
-          description: Artifacts to archive to the logs server.
 
     wrappers:
       - fdio-infra-wrappers-non-activity-timeout:
           - ../scripts/csit/perf-timed.sh
 
     publishers:
-      - robot:
-          output-path: 'archives'
-          other-files:
-            - '*.*'
-
       - fdio-infra-publish
 
 - job-template:
     project-type: freestyle
     node: 'builder-{os}-prod-{executor-arch}'
     concurrent: false
-    archive-artifacts: 'csit/archive/*.*'
     latest-only: false
 
     build-discarder:
           csit-perf-trial-duration: "{csit-perf-trial-duration}"
       - csit-perf-trial-multiplicity-parameter:
           csit-perf-trial-multiplicity: "{csit-perf-trial-multiplicity}"
-      - string:
-          name: ARCHIVE_ARTIFACTS
-          default: '{archive-artifacts}'
-          description: Artifacts to archive to the logs server.
 
     wrappers:
       - fdio-infra-wrappers-non-activity-timeout:
           - ../scripts/csit/perf-timed.sh
 
     publishers:
-      - robot:
-          output-path: 'archives'
-          other-files:
-            - '*.*'
-
       - fdio-infra-publish
 
 - job-template:
     project-type: freestyle
     node: 'builder-{os}-prod-{executor-arch}'
     concurrent: false
-    archive-artifacts: 'csit/archive/*.*'
     latest-only: false
 
     build-discarder:
           project: '{project}'
       - gerrit-parameter:
           branch: '{branch}'
-      - string:
-          name: ARCHIVE_ARTIFACTS
-          default: '{archive-artifacts}'
-          description: Artifacts to archive to the logs server.
 
     wrappers:
       - fdio-infra-wrappers-non-activity-timeout:
           - ../scripts/csit/perf-timed.sh
 
     publishers:
-      - robot:
-          output-path: 'archives'
-          other-files:
-            - '*.*'
-
       - fdio-infra-publish
 
 - job-template:
     project-type: freestyle
     node: 'builder-{os}-prod-{executor-arch}'
     concurrent: true
-    archive-artifacts: 'archive/*.*'
     latest-only: false
 
     build-discarder:
           csit-perf-trial-duration: "{csit-perf-trial-duration}"
       - csit-perf-trial-multiplicity-parameter:
           csit-perf-trial-multiplicity: "{csit-perf-trial-multiplicity}"
-      - string:
-          name: ARCHIVE_ARTIFACTS
-          default: '{archive-artifacts}'
-          description: Artifacts to archive to the logs server.
 
     scm:
       - gerrit-trigger-scm:
           - ../scripts/csit/perf-verify.sh
 
     publishers:
-      - robot:
-          output-path: 'archives'
-          other-files:
-            - '*.*'
-
       - fdio-infra-publish
 
 - job-template:
     project-type: freestyle
     node: 'builder-{os}-prod-{executor-arch}'
     concurrent: false
-    archive-artifacts: 'csit/archive/*.*'
     latest-only: false
 
     build-discarder:
           csit-perf-trial-duration: "{csit-perf-trial-duration}"
       - csit-perf-trial-multiplicity-parameter:
           csit-perf-trial-multiplicity: "{csit-perf-trial-multiplicity}"
-      - string:
-          name: ARCHIVE_ARTIFACTS
-          default: '{archive-artifacts}'
-          description: Artifacts to archive to the logs server.
 
     wrappers:
       - fdio-infra-wrappers-non-activity-timeout:
           - ../scripts/csit/perf-timed.sh
 
     publishers:
-      - robot:
-          output-path: 'archives'
-          other-files:
-            - '*.*'
-
       - fdio-infra-publish
 
 - job-template:
     project-type: freestyle
     node: 'builder-{os}-prod-{executor-arch}'
     concurrent: true
-    archive-artifacts: 'archive/*.*'
     latest-only: false
 
     build-discarder:
           csit-perf-trial-duration: "{csit-perf-trial-duration}"
       - csit-perf-trial-multiplicity-parameter:
           csit-perf-trial-multiplicity: "{csit-perf-trial-multiplicity}"
-      - string:
-          name: ARCHIVE_ARTIFACTS
-          default: '{archive-artifacts}'
-          description: Artifacts to archive to the logs server.
 
     scm:
       - gerrit-trigger-scm:
           - ../scripts/csit/perf-verify.sh
 
     publishers:
-      - robot:
-          output-path: 'archives'
-          other-files:
-            - '*.*'
-
       - fdio-infra-publish
 
 - job-template:
     project-type: freestyle
     node: 'builder-{os}-prod-{executor-arch}'
     concurrent: true
-    archive-artifacts: 'archive/*.*'
     latest-only: false
 
     build-discarder:
           csit-perf-trial-duration: "{csit-perf-trial-duration}"
       - csit-perf-trial-multiplicity-parameter:
           csit-perf-trial-multiplicity: "{csit-perf-trial-multiplicity}"
-      - string:
-          name: ARCHIVE_ARTIFACTS
-          default: '{archive-artifacts}'
-          description: Artifacts to archive to the logs server.
 
     scm:
       - gerrit-trigger-scm:
           - ../scripts/csit/perf-verify.sh
 
     publishers:
-      - robot:
-          output-path: 'archives'
-          other-files:
-            - '*.*'
-
       - fdio-infra-publish
 
 - job-template:
     project-type: freestyle
     node: 'builder-{os}-prod-{executor-arch}'
     concurrent: true
-    archive-artifacts: 'archive/*.*'
     latest-only: false
 
     build-discarder:
           csit-perf-trial-duration: "{csit-perf-trial-duration}"
       - csit-perf-trial-multiplicity-parameter:
           csit-perf-trial-multiplicity: "{csit-perf-trial-multiplicity}"
-      - string:
-          name: ARCHIVE_ARTIFACTS
-          default: '{archive-artifacts}'
-          description: Artifacts to archive to the logs server.
 
     scm:
       - gerrit-trigger-scm:
           - ../scripts/csit/perf-verify.sh
 
     publishers:
-      - robot:
-          output-path: 'archives'
-          other-files:
-            - '*.*'
-
       - fdio-infra-publish
 
 - job-template:
     project-type: freestyle
     node: 'builder-{os}-prod-{executor-arch}'
     concurrent: true
-    archive-artifacts: 'archive/*.*'
     latest-only: false
 
     build-discarder:
           csit-perf-trial-duration: "{csit-perf-trial-duration}"
       - csit-perf-trial-multiplicity-parameter:
           csit-perf-trial-multiplicity: "{csit-perf-trial-multiplicity}"
-      - string:
-          name: ARCHIVE_ARTIFACTS
-          default: '{archive-artifacts}'
-          description: Artifacts to archive to the logs server.
 
     scm:
       - gerrit-trigger-scm:
           - ../scripts/csit/perf-verify.sh
 
     publishers:
-      - robot:
-          output-path: 'archives'
-          other-files:
-            - '*.*'
-
       - fdio-infra-publish
index 725d2ff..fd2d1cc 100644 (file)
@@ -25,8 +25,6 @@
     stream:
       - master:
           branch: 'master'
-      - '2009':
-          branch: 'rls2009'
       - '2009_lts':
           branch: 'rls2009_lts'
       - '2101':
@@ -40,7 +38,6 @@
     project-type: freestyle
     node: 'builder-{os}-prod-{executor-arch}'
     concurrent: true
-    archive-artifacts: '*.log'
     latest-only: false
 
     build-discarder:
       - gerrit-parameter:
           branch: '{branch}'
       - gerrit-refspec-parameter
-      - string:
-          name: ARCHIVE_ARTIFACTS
-          default: '{archive-artifacts}'
-          description: Artifacts to archive to the logs server.
 
     scm:
       - gerrit-trigger-scm:
           - ../scripts/csit/tox.sh
 
     publishers:
-      # TODO: Remove this when a voting pylint difference checker
-      # with nice enough output is implemented.
-      - violations:
-          pylint:
-            min: 10
-            max: 250
-            unstable: 250
-            pattern: 'pylint.log'
       - fdio-infra-publish
index 6e61932..b84ef73 100644 (file)
@@ -80,7 +80,6 @@
     project-type: freestyle
     node: 'csit-builder-{os}-prod-{executor-arch}'
     concurrent: true
-    archive-artifacts: 'archive/*.*'
     latest-only: false
 
     build-discarder:
       - gerrit-refspec-parameter
       - gerrit-event-type-parameter
       - gerrit-event-comment-text-parameter
-      - string:
-          name: ARCHIVE_ARTIFACTS
-          default: '{archive-artifacts}'
-          description: Artifacts to archive to the logs server.
       # This is an automatic verify job, part of API coverage.
       # CRC checking is needed to ensure vpp crc job breaks only when intended.
       # Unlikely to be removed, as this job is primary source of API coverage.
           - ../scripts/csit/device-verify.sh
 
     publishers:
-      - robot:
-          output-path: 'archives'
-          other-files:
-            - '*.*'
       - fdio-infra-publish
 
 - job-template:
     project-type: freestyle
     node: 'csit-builder-{os}-prod-{executor-arch}'
     concurrent: false
-    archive-artifacts: 'csit/archive/*.*'
     latest-only: false
 
     build-discarder:
           make-parallel-jobs: '{make-parallel-jobs}'
       - os-parameter:
           os: '{os}'
-      - string:
-          name: ARCHIVE_ARTIFACTS
-          default: '{archive-artifacts}'
-          description: Artifacts to archive to the logs server.
 
     wrappers:
       - fdio-infra-wrappers-non-activity-timeout:
           - ../scripts/csit/device-semiweekly.sh
 
     publishers:
-      - robot:
-          output-path: 'archives'
-          other-files:
-            - '*.*'
       - fdio-infra-publish
 
 - job-template:
     project-type: freestyle
     node: 'csit-builder-{os}-prod-{executor-arch}'
     concurrent: false
-    archive-artifacts: 'archive/*.*'
     latest-only: false
 
     build-discarder:
           make-parallel-jobs: '{make-parallel-jobs}'
       - os-parameter:
           os: '{os}'
-      - string:
-          name: ARCHIVE_ARTIFACTS
-          default: '{archive-artifacts}'
-          description: Artifacts to archive to the logs server.
 
     scm:
       - git-scm:
           - ../scripts/csit/device-verify.sh
 
     publishers:
-      - robot:
-          output-path: 'archives'
-          other-files:
-            - '*.*'
       - fdio-infra-publish
index 9658292..204194a 100644 (file)
           branch: '{branch}'
       - maven-exec:
           maven-version: 'mvn33-new'
-      - string:
-          name: ARCHIVE_ARTIFACTS
-          default: '{archive-artifacts}'
-          description: Artifacts to archive to the logs server.
 
     scm:
       - gerrit-trigger-scm:
           branch: '{branch}'
       - maven-exec:
           maven-version: 'mvn33-new'
-      - string:
-          name: ARCHIVE_ARTIFACTS
-          default: '{archive-artifacts}'
-          description: Artifacts to archive to the logs server.
 
     scm:
       - gerrit-trigger-scm:
index 2fcc411..eed5c0b 100644 (file)
               build-steps:
                 - shell: !include-raw: scripts/post_build_executor_info.sh
                 - shell: !include-raw: scripts/post_build_deploy_archives.sh
-                - shell: !include-raw: scripts/backup_upload_archives.sh
+                - fdio-infra-ship-backup-logs
                 - fdio-infra-ship-logs
           mark-unstable-if-failed: true
       - workspace-cleanup:
           days-to-keep: '{build-days-to-keep}'
           num-to-keep: '{build-num-to-keep}'
 
+- builder:
+    name: fdio-infra-ship-backup-logs
+    builders:
+      - config-file-provider:
+          files:
+            - file-id: "jenkins-s3-log-ship"
+              variable: $HOME/.aws/credentials
+      - shell: !include-raw:
+          - scripts/logs_publish.sh
+      - shell: !include-raw:
+          - ../global-jjb/shell/logs-clear-credentials.sh
+
 - builder:
     name: fdio-infra-ship-logs
     builders:
       - shell: !include-raw:
           - ../global-jjb/shell/logs-clear-credentials.sh
       - description-setter:
-          regexp: "(^(Nexus|S3) build logs: .*)"
+          regexp: "(^Nexus build logs: .*)"
 
 - builder:
     name: packer-validate
diff --git a/jjb/hc2vpp/hc2vpp-csit.yaml b/jjb/hc2vpp/hc2vpp-csit.yaml
deleted file mode 100644 (file)
index 76ef874..0000000
+++ /dev/null
@@ -1,421 +0,0 @@
----
-- project:
-    name: hc2vpp-csit
-    description: 'jenkins jobs to test Hc2vpp features.'
-    jobs:
-      - 'hc2vpp-csit-integration-{stream}-{os}'
-      - 'hc2vpp-csit-verify-{stream}-{os}'
-      - 'hc2vpp-csit-integration-odl-{odl}-{stream}-{os}'
-      - 'hc2vpp-csit-verify-odl-{odl}-{stream}-{os}'
-    project: 'hc2vpp'
-    build-artifact-num-to-keep: 10
-    stream:
-      - master:
-          branch: 'master'
-          repo-stream-part: 'master'
-      - '1904':
-          branch: 'stable/1904'
-          repo-stream-part: 'stable.1904'
-
-    os:
-      - ubuntu1804:
-          repo-os-part: 'ubuntu.bionic.main'
-
-    odl: 'neon'
-
-- project:
-    name: csit-hc2vpp-perf
-    description: 'jenkins jobs to test Hc2vpp performance.'
-    jobs:
-      - 'hc2vpp-csit-perf-{stream}-{os}'
-    project: 'hc2vpp'
-    build-artifact-num-to-keep: 10
-    stream:
-      - master:
-          branch: 'master'
-          repo-stream-part: 'master'
-
-    os:
-      - ubuntu1804:
-          repo-os-part: 'ubuntu.bionic.main'
-
-    odl: 'neon'
-
-- job-template:
-    name: 'hc2vpp-csit-integration-{stream}-{os}'
-
-    project-type: freestyle
-    node: '{os}-builder-4c-4g'
-    concurrent: true
-    archive-artifacts: '**/csit/archive/*.*'
-    latest-only: false
-
-
-    build-discarder:
-      daysToKeep: '{build-days-to-keep}'
-      numToKeep: '{build-num-to-keep}'
-      artifactDaysToKeep: '{build-artifact-days-to-keep}'
-      artifactNumToKeep: '{build-artifact-num-to-keep}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-      - os-parameter:
-          os: '{os}'
-      - stream-parameter:
-          stream: '{stream}'
-      - string:
-          name: ARCHIVE_ARTIFACTS
-          default: '{archive-artifacts}'
-          description: Artifacts to archive to the logs server.
-
-    scm:
-      - git-scm:
-          credentials-id: 'jenkins-gerrit-credentials'
-          branch: '{branch}'
-
-    wrappers:
-      - fdio-infra-wrappers:
-          build-timeout: '{build-timeout}'
-
-    triggers:
-      - reverse:
-          jobs: 'hc2vpp-integration-{stream}-{os}'
-          result: 'success'
-
-    builders:
-      - shell:
-          !include-raw-escape: include-raw-hc2vpp-csit-integration.sh
-
-    publishers:
-      - email-notification:
-          email-prefix: '[hc2vpp]'
-
-      - robot-report:
-          output-path: 'csit/archive'
-
-      - lf-infra-publish
-
-- job-template:
-    name: 'hc2vpp-csit-verify-{stream}-{os}'
-
-    project-type: maven
-    node: '{os}-builder-4c-4g'
-    jdk: openjdk8-{os}
-    concurrent: true
-    archive-artifacts: '**/csit/archive/*.*'
-    latest-only: false
-
-    build-discarder:
-      daysToKeep: '{build-days-to-keep}'
-      numToKeep: '{build-num-to-keep}'
-      artifactDaysToKeep: '{build-artifact-days-to-keep}'
-      artifactNumToKeep: '{build-artifact-num-to-keep}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-      - gerrit-parameter:
-          branch: '{branch}'
-      - os-parameter:
-          os: '{os}'
-      - maven-project-parameter:
-          maven: 'mvn36'
-      - maven-exec:
-          maven-version: 'mvn36'
-      - stream-parameter:
-          stream: '{stream}'
-      - odl-parameter:
-          odl: '{odl}'
-      - string:
-          name: ARCHIVE_ARTIFACTS
-          default: '{archive-artifacts}'
-          description: Artifacts to archive to the logs server.
-
-    scm:
-      - gerrit-trigger-scm:
-          credentials-id: 'jenkins-gerrit-credentials'
-          refspec: '$GERRIT_REFSPEC'
-          choosing-strategy: 'gerrit'
-
-    wrappers:
-      - fdio-infra-wrappers:
-          build-timeout: '{build-timeout}'
-
-    triggers:
-      - gerrit:
-          server-name: 'Primary'
-          trigger-on:
-            - comment-added-contains-event:
-                comment-contains-value: 'verify-csit'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-          skip-vote:
-            successful: true
-            failed: true
-            unstable: true
-            notbuilt: true
-
-    prebuilders:
-      - provide-maven-settings:
-          settings-file: 'honeycomb-settings'
-          global-settings-file: 'global-settings'
-      - shell:
-          !include-raw-escape: include-raw-hc2vpp-csit-verify-prebuild.sh
-
-    maven:
-      maven-name: 'mvn36'
-      root-pom: 'pom.xml'
-      # yamllint disable-line rule:line-length
-      goals: 'clean install -V -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r -DskipTests -Dcheckstyle.skip=true'
-      maven-opts: '-Xmx2048m -Dmaven.compile.fork=true'
-      settings: 'hc2vpp-settings'
-      settings-type: cfp
-      global-settings: 'global-settings'
-      global-settings-type: cfp
-
-    postbuilders:
-      - shell:
-          !include-raw-escape: include-raw-hc2vpp-csit-verify.sh
-
-    publishers:
-      - email-notification:
-          email-prefix: '[hc2vpp]'
-
-      - robot-report:
-          output-path: 'csit/archive'
-
-      - lf-infra-publish
-
-- job-template:
-    name: 'hc2vpp-csit-integration-odl-{odl}-{stream}-{os}'
-
-    project-type: freestyle
-    node: '{os}-builder-4c-4g'
-    concurrent: true
-    archive-artifacts: '**/csit/archive/*.*'
-    latest-only: false
-
-
-    build-discarder:
-      daysToKeep: '{build-days-to-keep}'
-      numToKeep: '{build-num-to-keep}'
-      artifactDaysToKeep: '{build-artifact-days-to-keep}'
-      artifactNumToKeep: '{build-artifact-num-to-keep}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-      - os-parameter:
-          os: '{os}'
-      - stream-parameter:
-          stream: '{stream}'
-      - odl-parameter:
-          odl: '{odl}'
-      - string:
-          name: ARCHIVE_ARTIFACTS
-          default: '{archive-artifacts}'
-          description: Artifacts to archive to the logs server.
-
-    scm:
-      - git-scm:
-          credentials-id: 'jenkins-gerrit-credentials'
-          branch: '{branch}'
-
-    wrappers:
-      - fdio-infra-wrappers:
-          build-timeout: '{build-timeout}'
-
-    triggers:
-      - reverse:
-          jobs: 'hc2vpp-integration-{stream}-{os}'
-          result: 'success'
-
-    builders:
-      - shell:
-          !include-raw-escape: include-raw-hc2vpp-csit-integration-odl.sh
-
-    publishers:
-      - email-notification:
-          email-prefix: '[hc2vpp]'
-
-      - robot-report:
-          output-path: 'csit/archive'
-
-      - lf-infra-publish
-
-- job-template:
-    name: 'hc2vpp-csit-verify-odl-{odl}-{stream}-{os}'
-
-    project-type: maven
-    node: '{os}-builder-4c-4g'
-    jdk: openjdk8-{os}
-    concurrent: true
-    archive-artifacts: '**/csit/archive/*.*'
-    latest-only: false
-
-    build-discarder:
-      daysToKeep: '{build-days-to-keep}'
-      numToKeep: '{build-num-to-keep}'
-      artifactDaysToKeep: '{build-artifact-days-to-keep}'
-      artifactNumToKeep: '{build-artifact-num-to-keep}'
-    parameters:
-      - project-parameter:
-          project: '{project}'
-      - gerrit-parameter:
-          branch: '{branch}'
-      - os-parameter:
-          os: '{os}'
-      - maven-project-parameter:
-          maven: 'mvn36'
-      - maven-exec:
-          maven-version: 'mvn36'
-      - stream-parameter:
-          stream: '{stream}'
-      - odl-parameter:
-          odl: '{odl}'
-      - string:
-          name: ARCHIVE_ARTIFACTS
-          default: '{archive-artifacts}'
-          description: Artifacts to archive to the logs server.
-
-    scm:
-      - gerrit-trigger-scm:
-          credentials-id: 'jenkins-gerrit-credentials'
-          refspec: '$GERRIT_REFSPEC'
-          choosing-strategy: 'gerrit'
-
-    wrappers:
-      - fdio-infra-wrappers:
-          build-timeout: '{build-timeout}'
-
-    triggers:
-      - gerrit:
-          server-name: 'Primary'
-          trigger-on:
-            - comment-added-contains-event:
-                comment-contains-value: 'verify-csit'
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-          skip-vote:
-            successful: true
-            failed: true
-            unstable: true
-            notbuilt: true
-
-    prebuilders:
-      - provide-maven-settings:
-          settings-file: 'honeycomb-settings'
-          global-settings-file: 'global-settings'
-      - shell:
-          !include-raw-escape: include-raw-hc2vpp-csit-verify-prebuild.sh
-
-    maven:
-      maven-name: 'mvn36'
-      root-pom: 'pom.xml'
-      # yamllint disable-line rule:line-length
-      goals: 'clean install -V -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r -DskipTests -Dcheckstyle.skip=true'
-      maven-opts: '-Xmx2048m -Dmaven.compile.fork=true'
-      settings: 'hc2vpp-settings'
-      settings-type: cfp
-      global-settings: 'global-settings'
-      global-settings-type: cfp
-
-    postbuilders:
-      - shell:
-          !include-raw-escape: include-raw-hc2vpp-csit-verify-odl.sh
-
-    publishers:
-      - email-notification:
-          email-prefix: '[hc2vpp]'
-
-      - robot-report:
-          output-path: 'csit/archive'
-
-      - lf-infra-publish
-
-- job-template:
-    name: 'hc2vpp-csit-perf-{stream}-{os}'
-
-    project-type: freestyle
-    node: '{os}-builder-4c-4g'
-    concurrent: false
-    archive-artifacts: '**/csit/archive/*.*'
-    latest-only: false
-
-
-    build-discarder:
-      daysToKeep: '{build-days-to-keep}'
-      numToKeep: '{build-num-to-keep}'
-      artifactDaysToKeep: '{build-artifact-days-to-keep}'
-      artifactNumToKeep: '{build-artifact-num-to-keep}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-      - os-parameter:
-          os: '{os}'
-      - stream-parameter:
-          stream: '{stream}'
-      - odl-parameter:
-          odl: '{odl}'
-      - string:
-          name: ARCHIVE_ARTIFACTS
-          default: '{archive-artifacts}'
-          description: Artifacts to archive to the logs server.
-    scm:
-      - gerrit-trigger-scm:
-          credentials-id: 'jenkins-gerrit-credentials'
-          refspec: '$GERRIT_REFSPEC'
-          choosing-strategy: 'gerrit'
-
-    wrappers:
-      - fdio-infra-wrappers:
-          build-timeout: '{build-timeout}'
-
-    triggers:
-      - timed: '@weekly'
-
-      - gerrit:
-          server-name: 'Primary'
-          trigger-on:
-            - comment-added-contains-event:
-                comment-contains-value: 'verify-perf'
-
-          projects:
-            - project-compare-type: 'ANT'
-              project-pattern: '{project}'
-              branches:
-                - branch-compare-type: 'ANT'
-                  branch-pattern: '**/{branch}'
-          skip-vote:
-            successful: false
-            failed: false
-            unstable: false
-            notbuilt: false
-
-    builders:
-      - shell:
-          !include-raw-escape: include-raw-hc2vpp-csit-perf.sh
-
-    publishers:
-      - robot-report:
-          output-path: 'csit/archive'
-
-      - lf-infra-publish
-
-- parameter:
-    name: odl-parameter
-    parameters:
-      - string:
-          name: ODL
-          default: '{odl}'
-          description: "ODL version parameter."
diff --git a/jjb/hc2vpp/include-raw-hc2vpp-csit-integration-odl.sh b/jjb/hc2vpp/include-raw-hc2vpp-csit-integration-odl.sh
deleted file mode 100644 (file)
index d21d193..0000000
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/bin/bash
-set -xeu -o pipefail
-
-# Get CSIT branch
-if [[ -f csit-test-branch ]]; then
-    chmod +x csit-test-branch
-    CSIT_BRANCH=`./csit-test-branch`
-else
-    CSIT_BRANCH='master'
-fi
-
-# Clone csit
-git clone https://gerrit.fd.io/r/csit --branch ${CSIT_BRANCH}
-
-# If the git clone fails, complain clearly and exit
-if [[ $? != 0 ]]; then
-    echo "Failed to run: git clone https://gerrit.fd.io/r/csit --branch ${CSIT_BRANCH}"
-    exit 1
-fi
-
-cd csit
-# execute csit bootstrap script if it exists
-if [[ ! -e bootstrap-hc2vpp-integration-odl.sh ]]
-then
-    echo 'ERROR: No bootstrap-hc2vpp-integration.sh found'
-    exit 1
-else
-    # make sure that bootstrap.sh is executable
-    chmod +x bootstrap-hc2vpp-integration-odl.sh
-    # run the script
-    ./bootstrap-hc2vpp-integration-odl.sh ${STREAM} ${OS} ${ODL}
-fi
-
-# vim: ts=4 ts=4 sts=4 et :
diff --git a/jjb/hc2vpp/include-raw-hc2vpp-csit-integration.sh b/jjb/hc2vpp/include-raw-hc2vpp-csit-integration.sh
deleted file mode 100644 (file)
index 84a3818..0000000
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/bin/bash
-set -xeu -o pipefail
-
-# Get CSIT branch
-if [[ -f csit-test-branch ]]; then
-    chmod +x csit-test-branch
-    CSIT_BRANCH=`./csit-test-branch`
-else
-    CSIT_BRANCH='master'
-fi
-
-# Clone csit
-git clone https://gerrit.fd.io/r/csit --branch ${CSIT_BRANCH}
-
-# If the git clone fails, complain clearly and exit
-if [[ $? != 0 ]]; then
-    echo "Failed to run: git clone https://gerrit.fd.io/r/csit --branch ${CSIT_BRANCH}"
-    exit 1
-fi
-
-cd csit
-# execute csit bootstrap script if it exists
-if [[ ! -e bootstrap-hc2vpp-integration.sh ]]
-then
-    echo 'ERROR: No bootstrap-hc2vpp-integration.sh found'
-    exit 1
-else
-    # make sure that bootstrap.sh is executable
-    chmod +x bootstrap-hc2vpp-integration.sh
-    # run the script
-    ./bootstrap-hc2vpp-integration.sh ${STREAM} ${OS}
-fi
-
-# vim: ts=4 ts=4 sts=4 et :
diff --git a/jjb/hc2vpp/include-raw-hc2vpp-csit-perf.sh b/jjb/hc2vpp/include-raw-hc2vpp-csit-perf.sh
deleted file mode 100644 (file)
index bf46af5..0000000
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/bin/bash
-set -xeu -o pipefail
-
-# Get CSIT branch
-if [[ -f csit-test-branch ]]; then
-    chmod +x csit-test-branch
-    CSIT_BRANCH=`./csit-test-branch`
-else
-    CSIT_BRANCH='master'
-fi
-
-# Clone csit
-git clone https://gerrit.fd.io/r/csit --branch ${CSIT_BRANCH}
-
-# If the git clone fails, complain clearly and exit
-if [[ $? != 0 ]]; then
-    echo "Failed to run: git clone https://gerrit.fd.io/r/csit --branch ${CSIT_BRANCH}"
-    exit 1
-fi
-
-cd csit
-# execute csit bootstrap script if it exists
-if [[ ! -e bootstrap-hc2vpp-perf.sh ]]
-then
-    echo 'ERROR: No bootstrap-hc2vpp-perf.sh found'
-    exit 1
-else
-    # make sure that bootstrap.sh is executable
-    chmod +x bootstrap-hc2vpp-perf.sh
-    # run the script
-    ./bootstrap-hc2vpp-perf.sh ${STREAM} ${OS} ${ODL}
-fi
-
-# vim: ts=4 ts=4 sts=4 et :
diff --git a/jjb/hc2vpp/include-raw-hc2vpp-csit-verify-odl.sh b/jjb/hc2vpp/include-raw-hc2vpp-csit-verify-odl.sh
deleted file mode 100644 (file)
index 271668d..0000000
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/bin/bash
-set -xeu -o pipefail
-
-# Figure out what system we are running on
-if [[ -f /etc/lsb-release ]];then
-    . /etc/lsb-release
-elif [[ -f /etc/redhat-release ]];then
-    sudo yum install -y redhat-lsb
-    DISTRIB_ID=`lsb_release -si`
-    DISTRIB_RELEASE=`lsb_release -sr`
-    DISTRIB_CODENAME=`lsb_release -sc`
-    DISTRIB_DESCRIPTION=`lsb_release -sd`
-fi
-echo "----- OS INFO -----"
-echo DISTRIB_ID: ${DISTRIB_ID}
-echo DISTRIB_RELEASE: ${DISTRIB_RELEASE}
-echo DISTRIB_CODENAME: ${DISTRIB_CODENAME}
-echo DISTRIB_DESCRIPTION: ${DISTRIB_DESCRIPTION}
-DISTRIB_ID="Ubuntu"
-if [[ "$DISTRIB_ID" != "Ubuntu" ]]; then
-    echo 'ERROR: Only Ubuntu is supported currently.'
-    exit 2
-fi
-
-# create HC .deb packages
-./packaging/deb/${DISTRIB_CODENAME}/debuild.sh
-cp ./packaging/deb/${DISTRIB_CODENAME}/*.deb ${WORKSPACE}/csit
-
-cd ${WORKSPACE}/csit
-# execute csit bootstrap script if it exists
-if [[ ! -e bootstrap-hc2vpp-verify.sh ]]
-then
-    echo 'ERROR: No bootstrap-hc2vpp-verify.sh found'
-    exit 1
-else
-    # make sure that bootstrap.sh is executable
-    chmod +x bootstrap-hc2vpp-verify.sh
-    # run the script
-    ./bootstrap-hc2vpp-verify-odl.sh ${OS} ${ODL}
-fi
-
-# vim: ts=4 ts=4 sts=4 et :
\ No newline at end of file
diff --git a/jjb/hc2vpp/include-raw-hc2vpp-csit-verify-prebuild.sh b/jjb/hc2vpp/include-raw-hc2vpp-csit-verify-prebuild.sh
deleted file mode 100644 (file)
index 8e0c4ef..0000000
+++ /dev/null
@@ -1,89 +0,0 @@
-#!/bin/bash
-set -xe -o pipefail
-
-# Parse optional arguments from gerrit comment trigger
-for i in ${GERRIT_EVENT_COMMENT_TEXT}; do
-    case ${i} in
-        *honeycomb=*)
-            hc_commit_id=`echo "${i}" | cut -d = -f2-`
-        ;;
-        *jvpp=*)
-            jvpp_commit_id=`echo "${i}" | cut -d = -f2-`
-        ;;
-        *vpp=*)
-            vpp_commit_id=`echo "${i}" | cut -d = -f2-`
-        ;;
-        *nsh_sfc=*)
-            nsh_commit_id=`echo "${i}" | cut -d = -f2-`
-        ;;
-        *csit=*)
-            csit_commit_id=`echo "${i}" | cut -d = -f2-`
-        ;;
-        *)
-        ;;
-    esac
-done
-
-# If HC variable is set, clone and build Honeycomb infra from the specified commit
-# Otherwise skip this step, hc2vpp will use Honeycomb snapshots from Nexus
-if [[ -n "${hc_commit_id}" ]]; then
-    git clone https://gerrit.fd.io/r/honeycomb
-    cd honeycomb
-    ref=`git ls-remote -q | grep ${hc_commit_id} | awk '{print $2}'`
-    git fetch origin ${ref} && git checkout FETCH_HEAD
-    mvn clean install -DskipTests -Dcheckstyle.skip -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r -gs "${GLOBAL_SETTINGS_FILE}" -s "${SETTINGS_FILE}"
-    if [[ $? != 0 ]]; then
-        echo "Honeycomb infra build failed."
-        exit 1
-    fi
-    cd ${WORKSPACE}
-    # Clean up when done. Leftover build files interfere with building hc2vpp.
-    rm -rf honeycomb
-fi
-
-# TODO: Add option to build custom VPP and NSH packages
-
-# Get CSIT branch from which to test from
-if [[ -f csit-test-branch ]]; then
-    chmod +x csit-test-branch
-    CSIT_BRANCH=`./csit-test-branch`
-else
-    CSIT_BRANCH='master'
-fi
-
-# Clone csit
-git clone https://gerrit.fd.io/r/csit --branch ${CSIT_BRANCH}
-
-# If the git clone fails, complain clearly and exit
-if [[ $? != 0 ]]; then
-    echo "Failed to run: git clone https://gerrit.fd.io/r/csit --branch ${CSIT_BRANCH}"
-    exit 1
-fi
-
-cd csit
-
-# If CSIT commit ID is given, checkout the specified commit
-if [[ -n "${csit_commit_id}" ]]; then
-    # Example:
-    # ...
-    # e8f326efebb58e28dacb9ebb653baf95aad1448c refs/changes/08/11808/1
-    # ...
-    ref=`git ls-remote -q | grep ${csit_commit_id} | awk '{print $2}'`
-    git fetch origin ${ref} && git checkout FETCH_HEAD
-fi
-
-# Download VPP packages
-if [[ "1807 1810 1901" =~ .*$STREAM.* ]]; then
-    # add stable prefix for branches which have older version of package download script
-    # This can be removed when support for 1901 branch ends.
-    if [[ -n "${jvpp_commit_id}" ]]; then
-        echo "Error: Specifying jvpp custom commit is not supported for 1807,1810,1901 stable branches"
-        exit 1
-    fi
-    ./resources/tools/scripts/download_hc_build_pkgs.sh 'stable.'${STREAM} ${OS}
-else
-    # master and 1904+ branches use new package-cloud download script
-    ./resources/tools/scripts/download_hc_build_pkgs.sh ${STREAM} ${OS} ${jvpp_commit_id}
-fi
-
-cd ${WORKSPACE}
diff --git a/jjb/hc2vpp/include-raw-hc2vpp-csit-verify.sh b/jjb/hc2vpp/include-raw-hc2vpp-csit-verify.sh
deleted file mode 100644 (file)
index 21dc80c..0000000
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/bin/bash
-set -xeu -o pipefail
-
-# Figure out what system we are running on
-if [[ -f /etc/lsb-release ]];then
-    . /etc/lsb-release
-elif [[ -f /etc/redhat-release ]];then
-    sudo yum install -y redhat-lsb
-    DISTRIB_ID=`lsb_release -si`
-    DISTRIB_RELEASE=`lsb_release -sr`
-    DISTRIB_CODENAME=`lsb_release -sc`
-    DISTRIB_DESCRIPTION=`lsb_release -sd`
-fi
-echo "----- OS INFO -----"
-echo DISTRIB_ID: ${DISTRIB_ID}
-echo DISTRIB_RELEASE: ${DISTRIB_RELEASE}
-echo DISTRIB_CODENAME: ${DISTRIB_CODENAME}
-echo DISTRIB_DESCRIPTION: ${DISTRIB_DESCRIPTION}
-
-if [[ "$DISTRIB_ID" != "Ubuntu" ]]; then
-    echo 'ERROR: Only Ubuntu is supported currently.'
-    exit 2
-fi
-
-# create HC .deb packages
-./packaging/deb/${DISTRIB_CODENAME}/debuild.sh
-cp ./packaging/deb/${DISTRIB_CODENAME}/*.deb ${WORKSPACE}/csit
-
-cd ${WORKSPACE}/csit
-# execute csit bootstrap script if it exists
-if [[ ! -e bootstrap-hc2vpp-verify.sh ]]
-then
-    echo 'ERROR: No bootstrap-hc2vpp-verify.sh found'
-    exit 1
-else
-    # make sure that bootstrap.sh is executable
-    chmod +x bootstrap-hc2vpp-verify.sh
-    # run the script
-    ./bootstrap-hc2vpp-verify.sh ${OS}
-fi
-
-# vim: ts=4 ts=4 sts=4 et :
\ No newline at end of file
diff --git a/jjb/include-raw-deploy-archives.sh b/jjb/include-raw-deploy-archives.sh
deleted file mode 100644 (file)
index c51312a..0000000
+++ /dev/null
@@ -1,121 +0,0 @@
-#!/bin/bash
-
-# Copyright (c) 2020 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-echo "---> jjb/include-raw-deploy-archives.sh"
-
-set +e  # Do not affect the build result if some part of archiving fails.
-
-ARCHIVES_DIR="$JENKINS_HOSTNAME/$JOB_NAME/$BUILD_NUMBER"
-[ "$LOGS_SERVER" ] || LOGS_SERVER="https://logs.fd.io"
-[ "$LOGS_REPO_URL" ] || LOGS_REPO_URL="https://nexus.fd.io/service/local/repositories/logs"
-
-echo "Build logs: <a href=\"$LOGS_SERVER/$SILO/$ARCHIVES_DIR\">$LOGS_SERVER/$SILO/$ARCHIVES_DIR</a>"
-
-mkdir .archives
-cd .archives/
-
-cat > deploy-archives.xml <<EOF
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>logs</groupId>
-  <artifactId>logs</artifactId>
-  <version>1.0.0</version>
-  <packaging>pom</packaging>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-deploy-plugin</artifactId>
-        <version>2.8.2</version>
-        <configuration>
-          <skip>true</skip>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.sonatype.plugins</groupId>
-        <artifactId>maven-upload-plugin</artifactId>
-        <version>0.0.1</version>
-        <executions>
-          <execution>
-            <id>publish-site</id>
-            <phase>deploy</phase>
-            <goals>
-              <goal>upload-file</goal>
-            </goals>
-            <configuration>
-              <serverId>logs</serverId>
-              <repositoryUrl>$LOGS_REPO_URL/content-compressed</repositoryUrl>
-              <file>archives.zip</file>
-              <repositoryPath>$SILO</repositoryPath>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-</project>
-EOF
-
-mkdir -p $ARCHIVES_DIR
-mkdir -p $WORKSPACE/archives
-if [ ! -z "${{ARCHIVE_ARTIFACTS}}" ]; then
-    pushd $WORKSPACE
-    shopt -s globstar  # Enable globstar to copy archives
-    archive_artifacts=$(echo ${{ARCHIVE_ARTIFACTS}})
-    for f in $archive_artifacts; do
-        echo "Archiving $f"
-        mkdir -p $WORKSPACE/archives/$(dirname $f)
-        mv $f $WORKSPACE/archives/$f
-    done
-    shopt -u globstar  # Disable globstar once archives are copied
-    popd
-fi
-
-
-# Ignore logging if archives doesn't exist
-mv $WORKSPACE/archives/ $ARCHIVES_DIR > /dev/null 2>&1
-touch $ARCHIVES_DIR/_build-details.txt
-echo "build-url: ${{BUILD_URL}}" >> $ARCHIVES_DIR/_build-details.txt
-env > $ARCHIVES_DIR/_build-enviroment-variables.txt
-
-# capture system info
-touch $ARCHIVES_DIR/_sys-info.txt
-{{
-    echo -e "uname -a:\n `uname -a` \n"
-    echo -e "df -h:\n `df -h` \n"
-    echo -e "free -m:\n `free -m` \n"
-    echo -e "nproc:\n `nproc` \n"
-    echo -e "lscpu:\n `lscpu` \n"
-    echo -e "ip addr:\n  `/sbin/ip addr` \n"
-}} 2>&1 | tee -a $ARCHIVES_DIR/_sys-info.txt
-
-# Magic string used to trim console logs at the appropriate level during wget
-echo "-----END_OF_BUILD-----"
-wget -q --timeout=60 -O $ARCHIVES_DIR/console.log ${{BUILD_URL}}consoleText
-wget -q --timeout=60 -O $ARCHIVES_DIR/console-timestamp.log ${{BUILD_URL}}/timestamps?time=HH:mm:ss\&appendLog
-sed -i '/^-----END_OF_BUILD-----$/,$d' $ARCHIVES_DIR/console.log
-sed -i '/^.*-----END_OF_BUILD-----$/,$d' $ARCHIVES_DIR/console-timestamp.log
-
-gzip $ARCHIVES_DIR/*.txt $ARCHIVES_DIR/*.log
-# find and gzip any 'text' files
-find $ARCHIVES_DIR -type f -print0 \
-                | xargs -0r file \
-                | egrep -e ':.*text.*' \
-                | cut -d: -f1 \
-                | xargs -d'\n' -r gzip
-
-zip -r archives.zip $JENKINS_HOSTNAME/
-du -sh archives.zip
diff --git a/jjb/scripts/backup_upload_archives.sh b/jjb/scripts/backup_upload_archives.sh
deleted file mode 100755 (executable)
index 6cedc80..0000000
+++ /dev/null
@@ -1,189 +0,0 @@
-#!/bin/bash
-
-# Copyright (c) 2021 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-echo "---> jjb/scripts/backup_upload_archives.sh"
-
-PYTHON_SCRIPT="/w/workspace/test-logs/artifact.py"
-
-# This script uploads the artifacts to a backup upload location
-if [ -f "$PYTHON_SCRIPT" ]; then
-    echo "WARNING: $PYTHON_SCRIPT already exists - assume backup archive upload already done"
-    exit 0
-fi
-
-# the Python code below needs boto3 installed
-python3 -m pip install boto3
-mkdir -p $(dirname "$PYTHON_SCRIPT")
-
-cat >$PYTHON_SCRIPT <<'END_OF_PYTHON_SCRIPT'
-#!/usr/bin/python3
-
-"""Storage utilities library."""
-
-import argparse
-import gzip
-import os
-from mimetypes import MimeTypes
-
-from boto3 import resource
-from botocore.client import Config
-
-ENDPOINT_URL = u"http://storage.service.consul:9000"
-AWS_ACCESS_KEY_ID = u"storage"
-AWS_SECRET_ACCESS_KEY = u"Storage1234"
-REGION_NAME = u"yul1"
-COMPRESS_MIME = (
-    u"text/html",
-    u"text/xml",
-    u"application/octet-stream"
-)
-
-
-def compress(src_fpath):
-    """Compress a single file.
-
-    :param src_fpath: Input file path.
-    :type src_fpath: str
-    """
-    with open(src_fpath, u"rb") as orig_file:
-        with gzip.open(src_fpath + ".gz", u"wb") as zipped_file:
-            zipped_file.writelines(orig_file)
-
-
-def upload(storage, bucket, src_fpath, dst_fpath):
-    """Upload single file to destination bucket.
-
-    :param storage: S3 storage resource.
-    :param bucket: S3 bucket name.
-    :param src_fpath: Input file path.
-    :param dst_fpath: Destination file path on remote storage.
-    :type storage: Object
-    :type bucket: str
-    :type src_fpath: str
-    :type dst_fpath: str
-    """
-    mime_guess = MimeTypes().guess_type(src_fpath)
-    mime = mime_guess[0]
-    encoding = mime_guess[1]
-    if not mime:
-        mime = "application/octet-stream"
-
-    if mime in COMPRESS_MIME and bucket in "logs" and encoding != "gzip":
-        compress(src_fpath)
-        src_fpath = src_fpath + ".gz"
-        dst_fpath = dst_fpath + ".gz"
-
-    extra_args = dict()
-    extra_args['ContentType'] = mime
-
-    storage.Bucket(bucket + ".fd.io").upload_file(
-        src_fpath,
-        dst_fpath,
-        ExtraArgs=extra_args
-    )
-    print("https://" + bucket + ".nginx.service.consul/" + dst_fpath)
-
-
-def upload_recursive(storage, bucket, src_fpath):
-    """Recursively uploads input folder to destination.
-
-    Example:
-      - bucket: logs
-      - src_fpath: /home/user
-      - dst_fpath: logs.fd.io/home/user
-
-    :param storage: S3 storage resource.
-    :param bucket: S3 bucket name.
-    :param src_fpath: Input folder path.
-    :type storage: Object
-    :type bucket: str
-    :type src_fpath: str
-    """
-    for path, _, files in os.walk(src_fpath):
-        for file in files:
-            _path = path.replace(src_fpath, u"")
-            _dir = src_fpath[1:] if src_fpath[0] == "/" else src_fpath
-            _dst_fpath = os.path.normpath(_dir + "/" + _path + "/" + file)
-            _src_fpath = os.path.join(path, file)
-            upload(storage, bucket, _src_fpath, _dst_fpath)
-
-
-def main():
-    """Main function for storage manipulation."""
-
-    parser = argparse.ArgumentParser()
-    parser.add_argument(
-        u"-d", u"--dir", required=True, type=str,
-        help=u"Directory to upload to storage."
-    )
-    parser.add_argument(
-        u"-b", u"--bucket", required=True, type=str,
-        help=u"Target bucket on storage."
-    )
-    args = parser.parse_args()
-
-    # Create main storage resource.
-    storage = resource(
-        u"s3",
-        endpoint_url=ENDPOINT_URL,
-        aws_access_key_id=AWS_ACCESS_KEY_ID,
-        aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
-        config=Config(
-            signature_version=u"s3v4"
-        ),
-        region_name=REGION_NAME
-    )
-
-    upload_recursive(
-        storage=storage,
-        bucket=args.bucket,
-        src_fpath=args.dir
-    )
-
-
-if __name__ == u"__main__":
-    main()
-
-END_OF_PYTHON_SCRIPT
-
-WS_ARCHIVES_DIR="$WORKSPACE/archives"
-TMP_ARCHIVES_DIR="/tmp/archives"
-JENKINS_BUILD_ARCHIVE_DIR="$TMP_ARCHIVES_DIR/$JENKINS_HOSTNAME/$JOB_NAME/$BUILD_NUMBER"
-
-mkdir -p $JENKINS_BUILD_ARCHIVE_DIR
-
-if [ -e "$WS_ARCHIVES_DIR" ]; then
-    echo "Found $WS_ARCHIVES_DIR, uploading its contents"
-    cp -r $WS_ARCHIVES_DIR/* $JENKINS_BUILD_ARCHIVE_DIR
-else
-    echo "No $WS_ARCHIVES_DIR found. Creating a dummy file."
-    echo "No archives found while doing backup upload" > "$JENKINS_BUILD_ARCHIVE_DIR/no-archives-found.txt"
-fi
-
-console_log="$JENKINS_BUILD_ARCHIVE_DIR/console.log"
-echo "Retrieving Jenkins console log to '$console_log'"
-wget -qO "$console_log" "$BUILD_URL/consoleText"
-
-console_log="$JENKINS_BUILD_ARCHIVE_DIR/console-timestamp.log"
-echo "Retrieving Jenkins console timestamp log to '$console_log'"
-wget -qO "$console_log" "$BUILD_URL/timestamps?time=HH:mm:ss&appendLog"
-
-pushd $TMP_ARCHIVES_DIR
-echo "Contents of the archives dir '$TMP_ARCHIVES_DIR':"
-ls -alR $TMP_ARCHIVES_DIR
-archive_cmd="python3 $PYTHON_SCRIPT -d . -b logs"
-echo -e "\nRunning uploader script '$archive_cmd':\n"
-$archive_cmd || echo "Failed to upload logs"
-popd
index ff82cb5..ea405b2 100644 (file)
@@ -47,3 +47,4 @@ git checkout "${BRANCH_NAME}"
 popd
 csit_entry_dir="${WORKSPACE}/csit/resources/libraries/bash/entry"
 source "${csit_entry_dir}/bootstrap_vpp_device.sh"
+cp -R "${WORKSPACE}/csit/archives" "${WORKSPACE}/archives" || true
index 9356655..6d31f9e 100644 (file)
@@ -40,3 +40,4 @@ fi
 popd
 csit_entry_dir="${WORKSPACE}/csit/resources/libraries/bash/entry"
 source "${csit_entry_dir}/with_oper_for_vpp.sh" "bootstrap_verify_perf.sh"
+cp -R "${WORKSPACE}/csit/archives" "${WORKSPACE}/archives" || true
diff --git a/jjb/scripts/csit/tldk-functional-virl.sh b/jjb/scripts/csit/tldk-functional-virl.sh
deleted file mode 100644 (file)
index 5f30901..0000000
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/bin/bash
-
-# Copyright (c) 2020 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-echo "---> jjb/scripts/csit/tldk-functional-virl.sh"
-
-set -xeu -o pipefail
-
-# Clone tldk and start tests
-git clone https://gerrit.fd.io/r/tldk
-
-# If the git clone fails, complain clearly and exit
-if [ $? != 0 ]; then
-    echo "Failed to run: git clone https://gerrit.fd.io/r/tldk"
-    exit 1
-fi
-
-# execute tldk bootstrap script if it exists
-if [ -e bootstrap-TLDK.sh ]
-then
-    # make sure that bootstrap-TLDK.sh is executable
-    chmod +x bootstrap-TLDK.sh
-    # run the script
-    ./bootstrap-TLDK.sh
-else
-    echo 'ERROR: No bootstrap-TLDK.sh found'
-    exit 1
-fi
-
-# vim: ts=4 ts=4 sts=4 et :
diff --git a/jjb/scripts/logs_publish.sh b/jjb/scripts/logs_publish.sh
new file mode 100644 (file)
index 0000000..da3593c
--- /dev/null
@@ -0,0 +1,291 @@
+#!/bin/bash
+
+# Copyright (c) 2021 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+echo "---> logs_publish.sh"
+
+CDN_URL="logs.nginx.service.consul"
+export AWS_ENDPOINT_URL="http://storage.service.consul:9000"
+
+# FIXME: s3 config (until migrated to config provider, then pwd will be reset)
+mkdir -p ${HOME}/.aws
+echo "[default]
+aws_access_key_id = storage
+aws_secret_access_key = Storage1234" >> "$HOME/.aws/credentials"
+
+PYTHON_SCRIPT="/w/workspace/test-logs/logs_publish.py"
+
+# This script uploads the artifacts to a backup upload location
+if [ -f "$PYTHON_SCRIPT" ]; then
+    echo "WARNING: $PYTHON_SCRIPT already exists - assume backup archive upload already done"
+    exit 0
+fi
+
+pip3 install boto3
+mkdir -p $(dirname "$PYTHON_SCRIPT")
+
+cat >$PYTHON_SCRIPT <<'END_OF_PYTHON_SCRIPT'
+#!/usr/bin/python3
+
+"""Storage utilities library."""
+
+import gzip
+import logging
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+from mimetypes import MimeTypes
+
+import boto3
+from botocore.exceptions import ClientError
+import requests
+import six
+
+
+logging.basicConfig(
+    format=u"%(levelname)s: %(message)s",
+    stream=sys.stdout,
+    level=logging.INFO
+)
+logging.getLogger(u"botocore").setLevel(logging.INFO)
+
+COMPRESS_MIME = (
+    u"text/html",
+    u"text/xml",
+    u"text/plain",
+    u"application/octet-stream"
+)
+
+
+def compress(src_fpath):
+    """Compress a single file.
+
+    :param src_fpath: Input file path.
+    :type src_fpath: str
+    """
+    with open(src_fpath, u"rb") as orig_file:
+        with gzip.open(src_fpath + ".gz", u"wb") as zipped_file:
+            zipped_file.writelines(orig_file)
+
+
+def copy_archives(workspace):
+    """Copy files or directories in a $WORKSPACE/archives to the current
+    directory.
+
+    :params workspace: Workspace directery with archives directory.
+    :type workspace: str
+    """
+    archives_dir = os.path.join(workspace, u"archives")
+    dest_dir = os.getcwd()
+
+    logging.debug(u"Copying files from " + archives_dir + u" to " + dest_dir)
+
+    if os.path.exists(archives_dir):
+        if os.path.isfile(archives_dir):
+            logging.error(u"Target is a file, not a directory.")
+            raise RuntimeError(u"Not a directory.")
+        else:
+            logging.debug("Archives dir {} does exist.".format(archives_dir))
+            for file_or_dir in os.listdir(archives_dir):
+                f = os.path.join(archives_dir, file_or_dir)
+                try:
+                    logging.debug(u"Copying " + f)
+                    shutil.copy(f, dest_dir)
+                except shutil.Error as e:
+                    logging.error(e)
+                    raise RuntimeError(u"Could not copy " + f)
+    else:
+        logging.error(u"Archives dir does not exist.")
+        raise RuntimeError(u"Missing directory " + archives_dir)
+
+
+def upload(s3_resource, s3_bucket, src_fpath, s3_path):
+    """Upload single file to destination bucket.
+
+    :param s3_resource: S3 storage resource.
+    :param s3_bucket: S3 bucket name.
+    :param src_fpath: Input file path.
+    :param s3_path: Destination file path on remote storage.
+    :type s3_resource: Object
+    :type s3_bucket: str
+    :type src_fpath: str
+    :type s3_path: str
+    """
+    mime_guess = MimeTypes().guess_type(src_fpath)
+    mime = mime_guess[0]
+    encoding = mime_guess[1]
+    if not mime:
+        mime = u"application/octet-stream"
+
+    if s3_bucket not in u"docs.fd.io":
+        if mime in COMPRESS_MIME and encoding != u"gzip":
+            compress(src_fpath)
+            src_fpath = src_fpath + u".gz"
+            s3_path = s3_path + u".gz"
+
+    extra_args = {u"ContentType": mime}
+
+    try:
+        logging.info(u"Attempting to upload file " + src_fpath)
+        s3_resource.Bucket(s3_bucket).upload_file(
+            src_fpath, s3_path, ExtraArgs=extra_args
+        )
+        logging.info(u"Successfully uploaded to " + s3_path)
+    except ClientError as e:
+        logging.error(e)
+
+
+def upload_recursive(s3_resource, s3_bucket, src_fpath, s3_path):
+    """Recursively uploads input folder to destination.
+
+    Example:
+      - s3_bucket: logs.fd.io
+      - src_fpath: /workspace/archives.
+      - s3_path: /hostname/job/id/
+
+    :param s3_resource: S3 storage resource.
+    :param s3_bucket: S3 bucket name.
+    :param src_fpath: Input folder path.
+    :param s3_path: S3 destination path.
+    :type s3_resource: Object
+    :type s3_bucket: str
+    :type src_fpath: str
+    :type s3_path: str
+    """
+    for path, _, files in os.walk(src_fpath):
+        for file in files:
+            _path = path.replace(src_fpath, u"")
+            _src_fpath = path + u"/" + file
+            _s3_path = os.path.normpath(s3_path + u"/" + _path + u"/" + file)
+            upload(
+                s3_resource=s3_resource,
+                s3_bucket=s3_bucket,
+                src_fpath=_src_fpath,
+                s3_path=_s3_path
+            )
+
+
+def deploy_s3(s3_bucket, s3_path, build_url, workspace):
+    """Add logs and archives to temp directory to be shipped to S3 bucket.
+    Fetches logs and system information and pushes them and archives to S3
+    for log archiving.
+    Requires the s3 bucket to exist.
+
+    :param s3_bucket: Name of S3 bucket. Eg: lf-project-date
+    :param s3_path: Path on S3 bucket place the logs and archives. Eg:
+        $JENKINS_HOSTNAME/$JOB_NAME/$BUILD_NUMBER
+    :param build_url: URL of the Jenkins build. Jenkins typically provides this
+        via the $BUILD_URL environment variable.
+    :param workspace: Directory in which to search, typically in Jenkins this is
+        $WORKSPACE
+    :type s3_bucket: Object
+    :type s3_path: str
+    :type build_url: str
+    :type workspace: str
+    """
+    s3_resource = boto3.resource(
+        u"s3",
+        endpoint_url=os.environ[u"AWS_ENDPOINT_URL"]
+    )
+
+    previous_dir = os.getcwd()
+    work_dir = tempfile.mkdtemp(prefix="backup-s3.")
+    os.chdir(work_dir)
+
+    # Copy archive files to tmp dir.
+    copy_archives(workspace)
+
+    # Create additional build logs.
+    with open(u"_build-details.log", u"w+") as f:
+        f.write(u"build-url: " + build_url)
+
+    with open(u"_sys-info.log", u"w+") as f:
+        sys_cmds = []
+
+        logging.debug(u"Platform: " + sys.platform)
+        if sys.platform == u"linux" or sys.platform == u"linux2":
+            sys_cmds = [
+                [u"uname", u"-a"],
+                [u"lscpu"],
+                [u"nproc"],
+                [u"df", u"-h"],
+                [u"free", u"-m"],
+                [u"ip", u"addr"],
+                [u"sar", u"-b", u"-r", u"-n", u"DEV"],
+                [u"sar", u"-P", u"ALL"],
+            ]
+
+        for c in sys_cmds:
+            try:
+                output = subprocess.check_output(c).decode(u"utf-8")
+            except FileNotFoundError:
+                logging.debug(u"Command not found: " + c)
+                continue
+
+            cmd = u" ".join(c)
+            output = u"---> " + cmd + "\n" + output + "\n"
+            f.write(output)
+            logging.info(output)
+
+    # Magic string used to trim console logs at the appropriate level during
+    # wget.
+    MAGIC_STRING = u"-----END_OF_BUILD-----"
+    logging.info(MAGIC_STRING)
+
+    resp = requests.get(build_url + u"/consoleText")
+    with open(u"console.log", u"w+", encoding=u"utf-8") as f:
+        f.write(
+            six.text_type(resp.content.decode(u"utf-8").split(MAGIC_STRING)[0])
+        )
+
+    query = u"time=HH:mm:ss&appendLog"
+    resp = requests.get(build_url + u"/timestamps?" + query)
+    with open(u"console-timestamp.log", u"w+", encoding=u"utf-8") as f:
+        f.write(
+            six.text_type(resp.content.decode(u"utf-8").split(MAGIC_STRING)[0])
+        )
+
+    upload_recursive(
+        s3_resource=s3_resource,
+        s3_bucket=s3_bucket,
+        src_fpath=work_dir,
+        s3_path=s3_path
+    )
+
+    os.chdir(previous_dir)
+    shutil.rmtree(work_dir)
+
+
+if __name__ == u"__main__":
+    globals()[sys.argv[1]](*sys.argv[2:])
+
+END_OF_PYTHON_SCRIPT
+
+# The 'deploy_s3' command below expects the archives
+# directory to exist.  Normally lf-infra-sysstat or similar would
+# create it and add content, but to make sure this script is
+# self-contained, we ensure it exists here.
+mkdir -p "$WORKSPACE/archives"
+
+s3_path="$JENKINS_HOSTNAME/$JOB_NAME/$BUILD_NUMBER/"
+echo "INFO: S3 path $s3_path"
+
+echo "INFO: archiving backup logs to S3"
+# shellcheck disable=SC2086
+python3 $PYTHON_SCRIPT deploy_s3 "logs.fd.io" "$s3_path" \
+    "$BUILD_URL" "$WORKSPACE"
+
+echo "S3 build backup logs: <a href=\"https://$CDN_URL/$s3_path\">https://$CDN_URL/$s3_path</a>"
index 355d6fd..02a56f1 100755 (executable)
@@ -75,39 +75,6 @@ generate_vpp_stacktrace_and_delete_core() {
 
 mkdir -p "$WS_ARCHIVES_DIR"
 
-# Log the build environment variables
-echo "Logging build environment variables in '$BUILD_ENV_LOG'..."
-env > $BUILD_ENV_LOG
-
-echo "ARCHIVE_ARTIFACTS = '$ARCHIVE_ARTIFACTS'"
-if [ -n "${ARCHIVE_ARTIFACTS:-}" ] ; then
-    pushd "$WORKSPACE"
-    shopt -s globstar  # Enable globstar to copy archives
-    for file in $ARCHIVE_ARTIFACTS ; do
-        if [ -f "$file" ] ; then
-            echo "Archiving '$file' to '$destfile'"
-            destfile="$WS_ARCHIVE_DIR$file"
-            destdir="$(dirname $destfile)"
-            mkdir -p $destdir
-            mv -f $file $destfile
-        else
-            echo "Not archiving '$file'"
-            if ! grep -qe '*' <<<"$file" ; then
-                echo "WARNING: No artifacts detected in ARCHIVE_ARTIFACTS '$ARCHIVE_ARTIFACTS'!"
-            fi
-        fi
-    done
-    shopt -u globstar  # Disable globstar
-    popd
-fi
-
-# find and gzip any 'text' files
-find $WS_ARCHIVES_DIR -type f -print0 \
-                | xargs -0r file \
-                | egrep -e ':.*text.*' \
-                | cut -d: -f1 \
-                | xargs -d'\n' -r gzip
-
 # generate stack trace for VPP core files for upload instead of core file.
 if [ -d "$WORKSPACE/build-root" ] ; then
     for file in $(find $WS_ARCHIVES_DIR -type f -name 'core*.gz') ; do
index 6d4beb8..8c76d71 100644 (file)
@@ -38,3 +38,4 @@ fi
 popd
 csit_entry_dir="${WORKSPACE}/csit/resources/libraries/bash/entry"
 source "${csit_entry_dir}/with_oper_for_vpp.sh" "per_patch_device.sh"
+cp -R "${WORKSPACE}/csit_current/archives/"* "${WORKSPACE}/archives/" || true
index e573066..cb13557 100644 (file)
@@ -38,3 +38,5 @@ fi
 popd
 csit_entry_dir="${WORKSPACE}/csit/resources/libraries/bash/entry"
 source "${csit_entry_dir}/with_oper_for_vpp.sh" "per_patch_perf.sh"
+cp -R "${WORKSPACE}/csit_current/"* "${WORKSPACE}/archives/" || true
+cp -R "${WORKSPACE}/csit_parent/"* "${WORKSPACE}/archives/" || true
index 191b592..45cb354 100644 (file)
@@ -18,7 +18,6 @@
       - '{project}-verify-{stream}-{os}'
       - '{project}-merge-{stream}-{os}'
       - '{project}-verify-image-{stream}-{os}'
-      - 'tldk-csit-verify-func-{stream}-{os}-virl'
 
     project: 'tldk'
     os:
     publishers:
       - lf-infra-publish
 
-- job-template:
-    name: 'tldk-csit-verify-func-{stream}-{os}-virl'
-
-    project-type: freestyle
-    node: '{os}-builder-4c-4g'
-    concurrent: true
-    archive-artifacts: >
-      **/csit/report.html
-      **/csit/log.html
-      **/csit/output.xml
-    latest-only: false
-
-    build-discarder:
-      daysToKeep: '{build-days-to-keep}'
-      numToKeep: '{build-num-to-keep}'
-      artifactDaysToKeep: '{build-artifact-days-to-keep}'
-      artifactNumToKeep: '{build-artifact-num-to-keep}'
-
-    parameters:
-      - project-parameter:
-          project: '{project}'
-      - gerrit-parameter:
-          branch: '{branch}'
-      - string:
-          name: ARCHIVE_ARTIFACTS
-          default: '{archive-artifacts}'
-          description: Artifacts to archive to the logs server.
-
-    scm:
-      - gerrit-trigger-scm:
-          credentials-id: 'jenkins-gerrit-credentials'
-          refspec: '$GERRIT_REFSPEC'
-          choosing-strategy: 'gerrit'
-
-    wrappers:
-      - fdio-infra-wrappers:
-          build-timeout: '{build-timeout}'
-
-    triggers:
-      - gerrit-trigger-manually-triggered:
-          name: '{project}'
-          branch: '{branch}'
-          comment-trigger-value: 'verify-tldk-func'
-
-    builders:
-      - shell:
-          !include-raw-escape: include-raw-tldk-csit-functional-virl.sh
-
-    publishers:
-      - robot-report:
-          output-path: ''
-      - lf-infra-publish
-
 - project:
     name: tldk-info
     project-name: tldk
index 68788c7..3fddfae 100644 (file)
 
     # Please keep parameters in alphabetical order
     parameters:
-      - archive-artifacts-parameter:
-          artifacts: '{archive-artifacts}'
       - gerrit-parameter:
           branch: '{branch}'
       - gerrit-refspec-parameter:
 
     # Please keep parameters in alphabetical order
     parameters:
-      - archive-artifacts-parameter:
-          artifacts: '{archive-artifacts}'
       - gerrit-parameter:
           branch: '{branch}'
       - gerrit-refspec-parameter:
 
     # Please keep parameters in alphabetical order
     parameters:
-      - archive-artifacts-parameter:
-          artifacts: '{archive-artifacts}'
       - gerrit-parameter:
           branch: '{branch}'
       - gerrit-refspec-parameter:
           repo-name: '{repo-stream-part}.{repo-os-part}'
       - stream-parameter:
           stream: '{stream}'
-      - archive-artifacts-parameter:
-          artifacts: '{archive-artifacts}'
 
     scm:
       - gerrit-trigger-scm:
           repo-name: '{repo-stream-part}.{repo-os-part}'
       - stream-parameter:
           stream: '{stream}'
-      - archive-artifacts-parameter:
-          artifacts: '{archive-artifacts}'
 
     scm:
       - gerrit-trigger-scm:
     project-type: freestyle
     node: 'csit-builder-{os}-prod-{executor-arch}'
     concurrent: true
-    archive-artifacts: '**/csit_current/**/*.*'
     latest-only: false
 
     build-discarder:
 
     # Please keep parameters in alphabetical order
     parameters:
-      - archive-artifacts-parameter:
-          artifacts: '{archive-artifacts}'
       - gerrit-csit-refspec-parameter
       - gerrit-event-comment-text-parameter
       - gerrit-event-type-parameter
           - ../scripts/vpp/csit-device.sh
 
     publishers:
-      - robot-report:
-          output-path: 'archives'
-
       - fdio-infra-publish
 # [end] VPP-CSIT-VERIFY-DEVICE-PERIODIC JOB TEMPLATE
 
     project-type: freestyle
     node: 'csit-builder-{os}-prod-{executor-arch}'
     concurrent: true
-    archive-artifacts: '**/csit_current/**/*.*'
     latest-only: false
 
     build-discarder:
 
     # Please keep parameters in alphabetical order
     parameters:
-      - archive-artifacts-parameter:
-          artifacts: '{archive-artifacts}'
       - gerrit-csit-refspec-parameter
       - gerrit-event-comment-text-parameter
       - gerrit-event-type-parameter
           - ../scripts/vpp/csit-device.sh
 
     publishers:
-      - robot-report:
-          output-path: 'archives'
-
       - fdio-infra-publish
 # [end] VPP-CSIT-VERIFY-DEVICE-PERPATCH JOB TEMPLATE
 
     project-type: freestyle
     node: 'builder-{os}-prod-{executor-arch}'
     concurrent: true
-    archive-artifacts: >
-      **/csit_current/**/*.*
-      **/csit_parent/**/*.*
     latest-only: false
 
     build-discarder:
 
     # Please keep parameters in alphabetical order
     parameters:
-      - archive-artifacts-parameter:
-          artifacts: '{archive-artifacts}'
       - csit-perf-trial-duration-parameter:
           csit-perf-trial-duration: "10.0"
       - csit-perf-trial-multiplicity-parameter:
 
     # Please keep parameters in alphabetical order
     parameters:
-      - archive-artifacts-parameter:
-          artifacts: '{archive-artifacts}'
       # Not sure whether not failing has any useful usage,
       # but it does not hurt to have some flexibility for future.
       - csit-fail-on-crc-mismatch-parameter: