Merge "Nexus retirement CI job clean up"
[ci-management.git] / jjb / scripts / publish_library_py.sh
1 #!/bin/bash
2
3 # Copyright (c) 2021 Cisco and/or its affiliates.
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 # You may obtain a copy of the License at:
7 #
8 #     http://www.apache.org/licenses/LICENSE-2.0
9 #
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 echo "---> publish_library_py.sh"
17
18 set -exuo pipefail
19
20 PYTHON_SCRIPT="/w/workspace/publish_library.py"
21
22 pip3 install boto3
23 mkdir -p $(dirname "$PYTHON_SCRIPT")
24
25 cat >$PYTHON_SCRIPT <<'END_OF_PYTHON_SCRIPT'
26 #!/usr/bin/python3
27
28 """S3 publish library."""
29
30 import glob
31 import gzip
32 import logging
33 import os
34 import shutil
35 import subprocess
36 import sys
37 import tempfile
38
39 import boto3
40 from botocore.exceptions import ClientError
41 import requests
42 import six
43
44
45 logging.basicConfig(
46     format=u"%(levelname)s: %(message)s",
47     stream=sys.stdout,
48     level=logging.INFO
49 )
50 logging.getLogger(u"botocore").setLevel(logging.INFO)
51
52
53 FILE_TYPE = {
54     u"xml": u"application/xml",
55     u"html": u"text/html",
56     u"txt": u"text/plain",
57     u"log": u"text/plain",
58     u"css": u"text/css",
59     u"md": u"text/markdown",
60     u"rst": u"text/x-rst",
61     u"csv": u"text/csv",
62     u"svg": u"image/svg+xml",
63     u"jpg": u"image/jpeg",
64     u"png": u"image/png",
65     u"gif": u"image/gif",
66     u"js": u"application/javascript",
67     u"pdf": u"application/pdf",
68     u"json": u"application/json",
69     u"otf": u"font/otf",
70     u"ttf": u"font/ttf",
71     u"woff": u"font/woff",
72     u"woff2": u"font/woff2"
73 }
74
75
76 def compress_text(src_dpath):
77     """Compress all text files in directory.
78
79     :param src_dpath: Input dir path.
80     :type src_dpath: str
81     """
82     save_dir = os.getcwd()
83     os.chdir(src_dpath)
84
85     compress_types = [
86         "**/*.html",
87         "**/*.log",
88         "**/*.txt",
89         "**/*.xml",
90         "**/*.json"
91     ]
92     paths = []
93     for _type in compress_types:
94         search = os.path.join(src_dpath, _type)
95         paths.extend(glob.glob(search, recursive=True))
96
97     for _file in paths:
98         # glob may follow symlink paths that open can't find
99         if os.path.exists(_file):
100             gz_file = u"{}.gz".format(_file)
101             with open(_file, "rb") as src, gzip.open(gz_file, "wb") as dest:
102                 shutil.copyfileobj(src, dest)
103                 os.remove(_file)
104
105     os.chdir(save_dir)
106
107
108 def copy_archives(workspace):
109     """Copy files or directories in a $WORKSPACE/archives to the current
110     directory.
111
112     :params workspace: Workspace directery with archives directory.
113     :type workspace: str
114     """
115     archives_dir = os.path.join(workspace, u"archives")
116     dest_dir = os.getcwd()
117
118     logging.debug(u"Copying files from " + archives_dir + u" to " + dest_dir)
119
120     if os.path.exists(archives_dir):
121         if os.path.isfile(archives_dir):
122             logging.error(u"Target is a file, not a directory.")
123             raise RuntimeError(u"Not a directory.")
124         else:
125             logging.debug("Archives dir {} does exist.".format(archives_dir))
126             for item in os.listdir(archives_dir):
127                 src = os.path.join(archives_dir, item)
128                 dst = os.path.join(dest_dir, item)
129                 try:
130                     if os.path.isdir(src):
131                         shutil.copytree(src, dst, symlinks=False, ignore=None)
132                     else:
133                         shutil.copy2(src, dst)
134                 except shutil.Error as e:
135                     logging.error(e)
136                     raise RuntimeError(u"Could not copy " + src)
137     else:
138         logging.error(u"Archives dir does not exist.")
139         raise RuntimeError(u"Missing directory " + archives_dir)
140
141
142 def upload(s3_resource, s3_bucket, src_fpath, s3_path):
143     """Upload single file to destination bucket.
144
145     :param s3_resource: S3 storage resource.
146     :param s3_bucket: S3 bucket name.
147     :param src_fpath: Input file path.
148     :param s3_path: Destination file path on remote storage.
149     :type s3_resource: Object
150     :type s3_bucket: str
151     :type src_fpath: str
152     :type s3_path: str
153     """
154     def is_gzip_file(filepath):
155         with open(filepath, u"rb") as test_f:
156             return test_f.read(2) == b"\x1f\x8b"
157
158     if os.path.isdir(src_fpath):
159         return
160     if os.path.isfile(src_fpath):
161         file_name, file_extension = os.path.splitext(src_fpath)
162         content_encoding = u""
163         content_type = u"application/octet-stream"
164         if is_gzip_file(src_fpath):
165             file_name, file_extension = os.path.splitext(file_name)
166             content_encoding = "gzip"
167         content_type = FILE_TYPE.get(
168             file_extension.strip("."),
169             u"application/octet-stream"
170         )
171
172         extra_args = dict()
173         extra_args[u"ContentType"] = content_type
174         if content_encoding:
175             extra_args[u"ContentEncoding"] = content_encoding
176
177     try:
178         s3_resource.Bucket(s3_bucket).upload_file(
179             src_fpath, s3_path, ExtraArgs=extra_args
180         )
181         logging.info(u"Successfully uploaded to " + s3_path)
182     except ClientError as e:
183         logging.error(e)
184
185
186 def upload_recursive(s3_resource, s3_bucket, src_fpath, s3_path):
187     """Recursively uploads input folder to destination.
188
189     Example:
190       - s3_bucket: logs.fd.io
191       - src_fpath: /workspace/archives.
192       - s3_path: /hostname/job/id/
193
194     :param s3_resource: S3 storage resource.
195     :param s3_bucket: S3 bucket name.
196     :param src_fpath: Input folder path.
197     :param s3_path: S3 destination path.
198     :type s3_resource: Object
199     :type s3_bucket: str
200     :type src_fpath: str
201     :type s3_path: str
202     """
203     for path, _, files in os.walk(src_fpath):
204         for file in files:
205             _path = path.replace(src_fpath, u"")
206             _src_fpath = path + u"/" + file
207             _s3_path = os.path.normpath(s3_path + u"/" + _path + u"/" + file)
208             upload(
209                 s3_resource=s3_resource,
210                 s3_bucket=s3_bucket,
211                 src_fpath=_src_fpath,
212                 s3_path=_s3_path
213             )
214
215
216 def deploy_docs(s3_bucket, s3_path, docs_dir):
217     """Ship docs dir content to S3 bucket. Requires the s3 bucket to exist.
218
219     :param s3_bucket: Name of S3 bucket. Eg: lf-project-date
220     :param s3_path: Path on S3 bucket to place the docs. Eg:
221         csit/${GERRIT_BRANCH}/report
222     :param docs_dir: Directory in which to recursively upload content.
223     :type s3_bucket: Object
224     :type s3_path: str
225     :type docs_dir: str
226     """
227     try:
228         s3_resource = boto3.resource(
229             u"s3",
230             endpoint_url=os.environ[u"AWS_ENDPOINT_URL"]
231         )
232     except KeyError:
233         s3_resource = boto3.resource(
234             u"s3"
235         )
236
237     upload_recursive(
238         s3_resource=s3_resource,
239         s3_bucket=s3_bucket,
240         src_fpath=docs_dir,
241         s3_path=s3_path
242     )
243
244
245 def deploy_s3(s3_bucket, s3_path, build_url, workspace):
246     """Add logs and archives to temp directory to be shipped to S3 bucket.
247     Fetches logs and system information and pushes them and archives to S3
248     for log archiving.
249     Requires the s3 bucket to exist.
250
251     :param s3_bucket: Name of S3 bucket. Eg: lf-project-date
252     :param s3_path: Path on S3 bucket place the logs and archives. Eg:
253         $JENKINS_HOSTNAME/$JOB_NAME/$BUILD_NUMBER
254     :param build_url: URL of the Jenkins build. Jenkins typically provides this
255         via the $BUILD_URL environment variable.
256     :param workspace: Directory in which to search, typically in Jenkins this is
257         $WORKSPACE
258     :type s3_bucket: Object
259     :type s3_path: str
260     :type build_url: str
261     :type workspace: str
262     """
263     try:
264         s3_resource = boto3.resource(
265             u"s3",
266             endpoint_url=os.environ[u"AWS_ENDPOINT_URL"]
267         )
268     except KeyError:
269         s3_resource = boto3.resource(
270             u"s3"
271         )
272
273     previous_dir = os.getcwd()
274     work_dir = tempfile.mkdtemp(prefix="backup-s3.")
275     os.chdir(work_dir)
276
277     # Copy archive files to tmp dir.
278     copy_archives(workspace)
279
280     # Create additional build logs.
281     with open(u"_build-details.log", u"w+") as f:
282         f.write(u"build-url: " + build_url)
283
284     # Magic string used to trim console logs at the appropriate level during
285     # wget.
286     MAGIC_STRING = u"-----END_OF_BUILD-----"
287     logging.info(MAGIC_STRING)
288
289     resp = requests.get(build_url + u"/consoleText")
290     with open(u"console.log", u"w+", encoding=u"utf-8") as f:
291         f.write(
292             six.text_type(resp.content.decode(u"utf-8").split(MAGIC_STRING)[0])
293         )
294
295     query = u"time=HH:mm:ss&appendLog"
296     resp = requests.get(build_url + u"/timestamps?" + query)
297     with open(u"console-timestamp.log", u"w+", encoding=u"utf-8") as f:
298         f.write(
299             six.text_type(resp.content.decode(u"utf-8").split(MAGIC_STRING)[0])
300         )
301
302     compress_text(work_dir)
303
304     upload_recursive(
305         s3_resource=s3_resource,
306         s3_bucket=s3_bucket,
307         src_fpath=work_dir,
308         s3_path=s3_path
309     )
310
311     os.chdir(previous_dir)
312     shutil.rmtree(work_dir)
313
314
315 if __name__ == u"__main__":
316     globals()[sys.argv[1]](*sys.argv[2:])
317
318 END_OF_PYTHON_SCRIPT