3 # Copyright (c) 2021 Cisco and/or its affiliates.
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 # You may obtain a copy of the License at:
8 # http://www.apache.org/licenses/LICENSE-2.0
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
16 echo "---> publish_library_py.sh"
20 PYTHON_SCRIPT="/w/workspace/publish_library.py"
23 mkdir -p $(dirname "$PYTHON_SCRIPT")
25 cat >$PYTHON_SCRIPT <<'END_OF_PYTHON_SCRIPT'
28 """S3 publish library."""
38 from mimetypes import MimeTypes
41 from botocore.exceptions import ClientError
47 format=u"%(levelname)s: %(message)s",
51 logging.getLogger(u"botocore").setLevel(logging.INFO)
54 def compress_text(src_dpath):
55 """Compress all text files in directory.
57 :param src_dpath: Input dir path.
60 save_dir = os.getcwd()
71 for _type in compress_types:
72 search = os.path.join(src_dpath, _type)
73 paths.extend(glob.glob(search, recursive=True))
76 # glob may follow symlink paths that open can't find
77 if os.path.exists(_file):
78 gz_file = u"{}.gz".format(_file)
79 with open(_file, "rb") as src, gzip.open(gz_file, "wb") as dest:
80 shutil.copyfileobj(src, dest)
86 def copy_archives(workspace):
87 """Copy files or directories in a $WORKSPACE/archives to the current
90 :params workspace: Workspace directery with archives directory.
93 archives_dir = os.path.join(workspace, u"archives")
94 dest_dir = os.getcwd()
96 logging.debug(u"Copying files from " + archives_dir + u" to " + dest_dir)
98 if os.path.exists(archives_dir):
99 if os.path.isfile(archives_dir):
100 logging.error(u"Target is a file, not a directory.")
101 raise RuntimeError(u"Not a directory.")
103 logging.debug("Archives dir {} does exist.".format(archives_dir))
104 for file_or_dir in os.listdir(archives_dir):
105 f = os.path.join(archives_dir, file_or_dir)
107 logging.debug(u"Copying " + f)
108 shutil.copy(f, dest_dir)
109 except shutil.Error as e:
111 raise RuntimeError(u"Could not copy " + f)
113 logging.error(u"Archives dir does not exist.")
114 raise RuntimeError(u"Missing directory " + archives_dir)
117 def upload(s3_resource, s3_bucket, src_fpath, s3_path):
118 """Upload single file to destination bucket.
120 :param s3_resource: S3 storage resource.
121 :param s3_bucket: S3 bucket name.
122 :param src_fpath: Input file path.
123 :param s3_path: Destination file path on remote storage.
124 :type s3_resource: Object
130 u"ContentType": u"text/plain"
132 text_html_extra_args = {
133 u"ContentType": u"text/html",
134 u"ContentEncoding": MimeTypes().guess_type(src_fpath)[1]
136 text_plain_extra_args = {
137 u"ContentType": u"text/plain",
138 u"ContentEncoding": MimeTypes().guess_type(src_fpath)[1]
140 app_xml_extra_args = {
141 u"ContentType": u"application/xml",
142 u"ContentEncoding": MimeTypes().guess_type(src_fpath)[1]
145 mime = MimeTypes().guess_type(src_fpath)[0]
146 encoding = MimeTypes().guess_type(src_fpath)[1]
148 if mime is None and encoding is None:
149 extra_args = extra_args
150 elif mime is None or mime in u"text/plain":
151 extra_args = text_plain_extra_args
152 elif mime in u"text/html":
153 extra_args = text_html_extra_args
154 elif mime in u"application/xml":
155 extra_args = app_xml_extra_args
157 extra_args = extra_args
160 s3_resource.Bucket(s3_bucket).upload_file(
161 src_fpath, s3_path, ExtraArgs=extra_args
163 logging.info(u"Successfully uploaded to " + s3_path)
164 except ClientError as e:
168 def upload_recursive(s3_resource, s3_bucket, src_fpath, s3_path):
169 """Recursively uploads input folder to destination.
172 - s3_bucket: logs.fd.io
173 - src_fpath: /workspace/archives.
174 - s3_path: /hostname/job/id/
176 :param s3_resource: S3 storage resource.
177 :param s3_bucket: S3 bucket name.
178 :param src_fpath: Input folder path.
179 :param s3_path: S3 destination path.
180 :type s3_resource: Object
185 for path, _, files in os.walk(src_fpath):
187 _path = path.replace(src_fpath, u"")
188 _src_fpath = path + u"/" + file
189 _s3_path = os.path.normpath(s3_path + u"/" + _path + u"/" + file)
191 s3_resource=s3_resource,
193 src_fpath=_src_fpath,
198 def deploy_docs(s3_bucket, s3_path, docs_dir):
199 """Ship docs dir content to S3 bucket. Requires the s3 bucket to exist.
201 :param s3_bucket: Name of S3 bucket. Eg: lf-project-date
202 :param s3_path: Path on S3 bucket to place the docs. Eg:
203 csit/${GERRIT_BRANCH}/report
204 :param docs_dir: Directory in which to recursively upload content.
205 :type s3_bucket: Object
210 s3_resource = boto3.resource(
212 endpoint_url=os.environ[u"AWS_ENDPOINT_URL"]
215 s3_resource = boto3.resource(
220 s3_resource=s3_resource,
227 def deploy_s3(s3_bucket, s3_path, build_url, workspace):
228 """Add logs and archives to temp directory to be shipped to S3 bucket.
229 Fetches logs and system information and pushes them and archives to S3
231 Requires the s3 bucket to exist.
233 :param s3_bucket: Name of S3 bucket. Eg: lf-project-date
234 :param s3_path: Path on S3 bucket place the logs and archives. Eg:
235 $JENKINS_HOSTNAME/$JOB_NAME/$BUILD_NUMBER
236 :param build_url: URL of the Jenkins build. Jenkins typically provides this
237 via the $BUILD_URL environment variable.
238 :param workspace: Directory in which to search, typically in Jenkins this is
240 :type s3_bucket: Object
246 s3_resource = boto3.resource(
248 endpoint_url=os.environ[u"AWS_ENDPOINT_URL"]
251 s3_resource = boto3.resource(
255 previous_dir = os.getcwd()
256 work_dir = tempfile.mkdtemp(prefix="backup-s3.")
259 # Copy archive files to tmp dir.
260 copy_archives(workspace)
262 # Create additional build logs.
263 with open(u"_build-details.log", u"w+") as f:
264 f.write(u"build-url: " + build_url)
266 # Magic string used to trim console logs at the appropriate level during
268 MAGIC_STRING = u"-----END_OF_BUILD-----"
269 logging.info(MAGIC_STRING)
271 resp = requests.get(build_url + u"/consoleText")
272 with open(u"console.log", u"w+", encoding=u"utf-8") as f:
274 six.text_type(resp.content.decode(u"utf-8").split(MAGIC_STRING)[0])
277 query = u"time=HH:mm:ss&appendLog"
278 resp = requests.get(build_url + u"/timestamps?" + query)
279 with open(u"console-timestamp.log", u"w+", encoding=u"utf-8") as f:
281 six.text_type(resp.content.decode(u"utf-8").split(MAGIC_STRING)[0])
284 compress_text(work_dir)
287 s3_resource=s3_resource,
293 os.chdir(previous_dir)
294 shutil.rmtree(work_dir)
297 if __name__ == u"__main__":
298 globals()[sys.argv[1]](*sys.argv[2:])