3 # Copyright (c) 2021 Cisco and/or its affiliates.
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 # You may obtain a copy of the License at:
8 # http://www.apache.org/licenses/LICENSE-2.0
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
16 echo "---> jjb/scripts/backup_upload_archives.sh"
18 PYTHON_SCRIPT="/w/workspace/test-logs/artifact.py"
20 # This script uploads the artifacts to a backup upload location
21 if [ -f "$PYTHON_SCRIPT" ]; then
22 echo "WARNING: $PYTHON_SCRIPT already exists - assume backup archive upload already done"
26 # the Python code below needs boto3 installed
27 python3 -m pip install boto3
28 mkdir -p $(dirname "$PYTHON_SCRIPT")
30 cat >$PYTHON_SCRIPT <<'END_OF_PYTHON_SCRIPT'
33 """Storage utilities library."""
38 from mimetypes import MimeTypes
40 from boto3 import resource
41 from botocore.client import Config
43 ENDPOINT_URL = u"http://storage.service.consul:9000"
44 AWS_ACCESS_KEY_ID = u"storage"
45 AWS_SECRET_ACCESS_KEY = u"Storage1234"
50 u"application/octet-stream"
54 def compress(src_fpath):
55 """Compress a single file.
57 :param src_fpath: Input file path.
60 with open(src_fpath, u"rb") as orig_file:
61 with gzip.open(src_fpath + ".gz", u"wb") as zipped_file:
62 zipped_file.writelines(orig_file)
65 def upload(storage, bucket, src_fpath, dst_fpath):
66 """Upload single file to destination bucket.
68 :param storage: S3 storage resource.
69 :param bucket: S3 bucket name.
70 :param src_fpath: Input file path.
71 :param dst_fpath: Destination file path on remote storage.
77 mime_guess = MimeTypes().guess_type(src_fpath)
79 encoding = mime_guess[1]
81 mime = "application/octet-stream"
83 if mime in COMPRESS_MIME and bucket in "logs" and encoding != "gzip":
85 src_fpath = src_fpath + ".gz"
86 dst_fpath = dst_fpath + ".gz"
89 extra_args['ContentType'] = mime
91 storage.Bucket(bucket + ".fd.io").upload_file(
96 print("https://" + bucket + ".nginx.service.consul/" + dst_fpath)
99 def upload_recursive(storage, bucket, src_fpath):
100 """Recursively uploads input folder to destination.
104 - src_fpath: /home/user
105 - dst_fpath: logs.fd.io/home/user
107 :param storage: S3 storage resource.
108 :param bucket: S3 bucket name.
109 :param src_fpath: Input folder path.
110 :type storage: Object
114 for path, _, files in os.walk(src_fpath):
116 _path = path.replace(src_fpath, u"")
117 _dir = src_fpath[1:] if src_fpath[0] == "/" else src_fpath
118 _dst_fpath = os.path.normpath(_dir + "/" + _path + "/" + file)
119 _src_fpath = os.path.join(path, file)
120 upload(storage, bucket, _src_fpath, _dst_fpath)
124 """Main function for storage manipulation."""
126 parser = argparse.ArgumentParser()
128 u"-d", u"--dir", required=True, type=str,
129 help=u"Directory to upload to storage."
132 u"-b", u"--bucket", required=True, type=str,
133 help=u"Target bucket on storage."
135 args = parser.parse_args()
137 # Create main storage resource.
140 endpoint_url=ENDPOINT_URL,
141 aws_access_key_id=AWS_ACCESS_KEY_ID,
142 aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
144 signature_version=u"s3v4"
146 region_name=REGION_NAME
156 if __name__ == u"__main__":
161 WS_ARCHIVES_DIR="$WORKSPACE/archives"
162 JENKINS_BUILD_ARCHIVE_DIR="$JENKINS_HOSTNAME/$JOB_NAME/$BUILD_NUMBER"
164 TMP_ARCHIVES_DIR="/tmp/archives"
165 mkdir -p $TMP_ARCHIVES_DIR
166 pushd $TMP_ARCHIVES_DIR
168 mkdir -p $JENKINS_BUILD_ARCHIVE_DIR
169 if [ -e "$WS_ARCHIVES_DIR" ]; then
170 echo "Found $WS_ARCHIVES_DIR, uploading its contents"
171 cp -r $WS_ARCHIVES_DIR/* $JENKINS_BUILD_ARCHIVE_DIR
173 echo "No $WS_ARCHIVES_DIR found. Creating a dummy file."
174 echo "No archives found while doing backup upload" > "$JENKINS_BUILD_ARCHIVE_DIR/no-archives-found.txt"
177 echo "Contents of the archives dir:"
178 ls -alR $TMP_ARCHIVES_DIR
179 echo "Running uploader script $PYTHON_SCRIPT:"
180 python3 $PYTHON_SCRIPT -d . -b logs || echo "Failed to upload logs"