Merge "Add terraform to docker CI executor images."
[ci-management.git] / jjb / scripts / publish_library_py.sh
1 #!/bin/bash
2
3 # Copyright (c) 2021 Cisco and/or its affiliates.
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 # You may obtain a copy of the License at:
7 #
8 #     http://www.apache.org/licenses/LICENSE-2.0
9 #
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 echo "---> publish_library_py.sh"
17
18 set -exuo pipefail
19
20 PYTHON_SCRIPT="/w/workspace/publish_library.py"
21
22 pip3 install boto3
23 mkdir -p $(dirname "$PYTHON_SCRIPT")
24
25 cat >$PYTHON_SCRIPT <<'END_OF_PYTHON_SCRIPT'
26 #!/usr/bin/python3
27
28 """S3 publish library."""
29
30 import gzip
31 import logging
32 import os
33 import shutil
34 import subprocess
35 import sys
36 import tempfile
37 from mimetypes import MimeTypes
38
39 import boto3
40 from botocore.exceptions import ClientError
41 import requests
42 import six
43
44
45 logging.basicConfig(
46     format=u"%(levelname)s: %(message)s",
47     stream=sys.stdout,
48     level=logging.INFO
49 )
50 logging.getLogger(u"botocore").setLevel(logging.INFO)
51
52 COMPRESS_MIME = (
53     u"text/html",
54     u"text/xml",
55     u"text/plain",
56     u"application/octet-stream"
57 )
58
59
60 def compress(src_fpath):
61     """Compress a single file.
62
63     :param src_fpath: Input file path.
64     :type src_fpath: str
65     """
66     with open(src_fpath, u"rb") as orig_file:
67         with gzip.open(src_fpath + ".gz", u"wb") as zipped_file:
68             zipped_file.writelines(orig_file)
69
70
71 def copy_archives(workspace):
72     """Copy files or directories in a $WORKSPACE/archives to the current
73     directory.
74
75     :params workspace: Workspace directery with archives directory.
76     :type workspace: str
77     """
78     archives_dir = os.path.join(workspace, u"archives")
79     dest_dir = os.getcwd()
80
81     logging.debug(u"Copying files from " + archives_dir + u" to " + dest_dir)
82
83     if os.path.exists(archives_dir):
84         if os.path.isfile(archives_dir):
85             logging.error(u"Target is a file, not a directory.")
86             raise RuntimeError(u"Not a directory.")
87         else:
88             logging.debug("Archives dir {} does exist.".format(archives_dir))
89             for file_or_dir in os.listdir(archives_dir):
90                 f = os.path.join(archives_dir, file_or_dir)
91                 try:
92                     logging.debug(u"Copying " + f)
93                     shutil.copy(f, dest_dir)
94                 except shutil.Error as e:
95                     logging.error(e)
96                     raise RuntimeError(u"Could not copy " + f)
97     else:
98         logging.error(u"Archives dir does not exist.")
99         raise RuntimeError(u"Missing directory " + archives_dir)
100
101
102 def upload(s3_resource, s3_bucket, src_fpath, s3_path):
103     """Upload single file to destination bucket.
104
105     :param s3_resource: S3 storage resource.
106     :param s3_bucket: S3 bucket name.
107     :param src_fpath: Input file path.
108     :param s3_path: Destination file path on remote storage.
109     :type s3_resource: Object
110     :type s3_bucket: str
111     :type src_fpath: str
112     :type s3_path: str
113     """
114     mime_guess = MimeTypes().guess_type(src_fpath)
115     mime = mime_guess[0]
116     encoding = mime_guess[1]
117     if not mime:
118         mime = u"application/octet-stream"
119
120     if u"logs" in s3_bucket:
121         if mime in COMPRESS_MIME and encoding != u"gzip":
122             compress(src_fpath)
123             src_fpath = src_fpath + u".gz"
124             s3_path = s3_path + u".gz"
125
126     extra_args = {u"ContentType": mime}
127
128     try:
129         logging.info(u"Attempting to upload file " + src_fpath)
130         s3_resource.Bucket(s3_bucket).upload_file(
131             src_fpath, s3_path, ExtraArgs=extra_args
132         )
133         logging.info(u"Successfully uploaded to " + s3_path)
134     except ClientError as e:
135         logging.error(e)
136
137
138 def upload_recursive(s3_resource, s3_bucket, src_fpath, s3_path):
139     """Recursively uploads input folder to destination.
140
141     Example:
142       - s3_bucket: logs.fd.io
143       - src_fpath: /workspace/archives.
144       - s3_path: /hostname/job/id/
145
146     :param s3_resource: S3 storage resource.
147     :param s3_bucket: S3 bucket name.
148     :param src_fpath: Input folder path.
149     :param s3_path: S3 destination path.
150     :type s3_resource: Object
151     :type s3_bucket: str
152     :type src_fpath: str
153     :type s3_path: str
154     """
155     for path, _, files in os.walk(src_fpath):
156         for file in files:
157             _path = path.replace(src_fpath, u"")
158             _src_fpath = path + u"/" + file
159             _s3_path = os.path.normpath(s3_path + u"/" + _path + u"/" + file)
160             upload(
161                 s3_resource=s3_resource,
162                 s3_bucket=s3_bucket,
163                 src_fpath=_src_fpath,
164                 s3_path=_s3_path
165             )
166
167
168 def deploy_docs(s3_bucket, s3_path, docs_dir):
169     """Ship docs dir content to S3 bucket. Requires the s3 bucket to exist.
170
171     :param s3_bucket: Name of S3 bucket. Eg: lf-project-date
172     :param s3_path: Path on S3 bucket to place the docs. Eg:
173         csit/${GERRIT_BRANCH}/report
174     :param docs_dir: Directory in which to recursively upload content.
175     :type s3_bucket: Object
176     :type s3_path: str
177     :type docs_dir: str
178     """
179     s3_resource = boto3.resource(u"s3")
180
181     upload_recursive(
182         s3_resource=s3_resource,
183         s3_bucket=s3_bucket,
184         src_fpath=docs_dir,
185         s3_path=s3_path
186     )
187
188
189 def deploy_s3(s3_bucket, s3_path, build_url, workspace):
190     """Add logs and archives to temp directory to be shipped to S3 bucket.
191     Fetches logs and system information and pushes them and archives to S3
192     for log archiving.
193     Requires the s3 bucket to exist.
194
195     :param s3_bucket: Name of S3 bucket. Eg: lf-project-date
196     :param s3_path: Path on S3 bucket place the logs and archives. Eg:
197         $JENKINS_HOSTNAME/$JOB_NAME/$BUILD_NUMBER
198     :param build_url: URL of the Jenkins build. Jenkins typically provides this
199         via the $BUILD_URL environment variable.
200     :param workspace: Directory in which to search, typically in Jenkins this is
201         $WORKSPACE
202     :type s3_bucket: Object
203     :type s3_path: str
204     :type build_url: str
205     :type workspace: str
206     """
207     s3_resource = boto3.resource(
208         u"s3",
209         endpoint_url=os.environ[u"AWS_ENDPOINT_URL"]
210     )
211
212     previous_dir = os.getcwd()
213     work_dir = tempfile.mkdtemp(prefix="backup-s3.")
214     os.chdir(work_dir)
215
216     # Copy archive files to tmp dir.
217     copy_archives(workspace)
218
219     # Create additional build logs.
220     with open(u"_build-details.log", u"w+") as f:
221         f.write(u"build-url: " + build_url)
222
223     with open(u"_sys-info.log", u"w+") as f:
224         sys_cmds = []
225
226         logging.debug(u"Platform: " + sys.platform)
227         if sys.platform == u"linux" or sys.platform == u"linux2":
228             sys_cmds = [
229                 [u"uname", u"-a"],
230                 [u"lscpu"],
231                 [u"nproc"],
232                 [u"df", u"-h"],
233                 [u"free", u"-m"],
234                 [u"ip", u"addr"],
235                 [u"sar", u"-b", u"-r", u"-n", u"DEV"],
236                 [u"sar", u"-P", u"ALL"],
237             ]
238
239         for c in sys_cmds:
240             try:
241                 output = subprocess.check_output(c).decode(u"utf-8")
242             except FileNotFoundError:
243                 logging.debug(u"Command not found: " + c)
244                 continue
245
246             cmd = u" ".join(c)
247             output = u"---> " + cmd + "\n" + output + "\n"
248             f.write(output)
249             logging.info(output)
250
251     # Magic string used to trim console logs at the appropriate level during
252     # wget.
253     MAGIC_STRING = u"-----END_OF_BUILD-----"
254     logging.info(MAGIC_STRING)
255
256     resp = requests.get(build_url + u"/consoleText")
257     with open(u"console.log", u"w+", encoding=u"utf-8") as f:
258         f.write(
259             six.text_type(resp.content.decode(u"utf-8").split(MAGIC_STRING)[0])
260         )
261
262     query = u"time=HH:mm:ss&appendLog"
263     resp = requests.get(build_url + u"/timestamps?" + query)
264     with open(u"console-timestamp.log", u"w+", encoding=u"utf-8") as f:
265         f.write(
266             six.text_type(resp.content.decode(u"utf-8").split(MAGIC_STRING)[0])
267         )
268
269     upload_recursive(
270         s3_resource=s3_resource,
271         s3_bucket=s3_bucket,
272         src_fpath=work_dir,
273         s3_path=s3_path
274     )
275
276     os.chdir(previous_dir)
277     shutil.rmtree(work_dir)
278
279
280 if __name__ == u"__main__":
281     globals()[sys.argv[1]](*sys.argv[2:])
282
283 END_OF_PYTHON_SCRIPT