3 # Copyright (c) 2021 Cisco and/or its affiliates.
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 # You may obtain a copy of the License at:
8 # http://www.apache.org/licenses/LICENSE-2.0
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
16 echo "---> publish_library_py.sh"
20 PYTHON_SCRIPT="/w/workspace/publish_library.py"
23 mkdir -p $(dirname "$PYTHON_SCRIPT")
25 cat >$PYTHON_SCRIPT <<'END_OF_PYTHON_SCRIPT'
28 """S3 publish library."""
40 from botocore.exceptions import ClientError
46 format=u"%(levelname)s: %(message)s",
50 logging.getLogger(u"botocore").setLevel(logging.INFO)
54 u"xml": u"application/xml",
55 u"html": u"text/html",
56 u"txt": u"text/plain",
57 u"log": u"text/plain",
59 u"md": u"text/markdown",
60 u"rst": u"text/x-rst",
62 u"svg": u"image/svg+xml",
63 u"jpg": u"image/jpeg",
66 u"js": u"application/javascript",
67 u"pdf": u"application/pdf",
68 u"json": u"application/json",
71 u"woff": u"font/woff",
72 u"woff2": u"font/woff2"
76 def compress_text(src_dpath):
77 """Compress all text files in directory.
79 :param src_dpath: Input dir path.
82 save_dir = os.getcwd()
93 for _type in compress_types:
94 search = os.path.join(src_dpath, _type)
95 paths.extend(glob.glob(search, recursive=True))
98 # glob may follow symlink paths that open can't find
99 if os.path.exists(_file):
100 gz_file = u"{}.gz".format(_file)
101 with open(_file, "rb") as src, gzip.open(gz_file, "wb") as dest:
102 shutil.copyfileobj(src, dest)
108 def copy_archives(workspace):
109 """Copy files or directories in a $WORKSPACE/archives to the current
112 :params workspace: Workspace directery with archives directory.
115 archives_dir = os.path.join(workspace, u"archives")
116 dest_dir = os.getcwd()
118 logging.debug(u"Copying files from " + archives_dir + u" to " + dest_dir)
120 if os.path.exists(archives_dir):
121 if os.path.isfile(archives_dir):
122 logging.error(u"Target is a file, not a directory.")
123 raise RuntimeError(u"Not a directory.")
125 logging.debug("Archives dir {} does exist.".format(archives_dir))
126 for item in os.listdir(archives_dir):
127 src = os.path.join(archives_dir, item)
128 dst = os.path.join(dest_dir, item)
130 if os.path.isdir(src):
131 shutil.copytree(src, dst, symlinks=False, ignore=None)
133 shutil.copy2(src, dst)
134 except shutil.Error as e:
136 raise RuntimeError(u"Could not copy " + src)
138 logging.error(u"Archives dir does not exist.")
139 raise RuntimeError(u"Missing directory " + archives_dir)
142 def upload(s3_resource, s3_bucket, src_fpath, s3_path):
143 """Upload single file to destination bucket.
145 :param s3_resource: S3 storage resource.
146 :param s3_bucket: S3 bucket name.
147 :param src_fpath: Input file path.
148 :param s3_path: Destination file path on remote storage.
149 :type s3_resource: Object
154 def is_gzip_file(filepath):
155 with open(filepath, u"rb") as test_f:
156 return test_f.read(2) == b"\x1f\x8b"
158 if os.path.isdir(src_fpath):
160 if os.path.isfile(src_fpath):
161 file_name, file_extension = os.path.splitext(src_fpath)
162 content_encoding = u""
163 content_type = u"application/octet-stream"
164 if is_gzip_file(src_fpath):
165 file_name, file_extension = os.path.splitext(file_name)
166 content_encoding = "gzip"
167 content_type = FILE_TYPE.get(
168 file_extension.strip("."),
169 u"application/octet-stream"
173 extra_args[u"ContentType"] = content_type
175 extra_args[u"ContentEncoding"] = content_encoding
178 s3_resource.Bucket(s3_bucket).upload_file(
179 src_fpath, s3_path, ExtraArgs=extra_args
181 logging.info(u"Successfully uploaded to " + s3_path)
182 except ClientError as e:
186 def upload_recursive(s3_resource, s3_bucket, src_fpath, s3_path):
187 """Recursively uploads input folder to destination.
190 - s3_bucket: logs.fd.io
191 - src_fpath: /workspace/archives.
192 - s3_path: /hostname/job/id/
194 :param s3_resource: S3 storage resource.
195 :param s3_bucket: S3 bucket name.
196 :param src_fpath: Input folder path.
197 :param s3_path: S3 destination path.
198 :type s3_resource: Object
203 for path, _, files in os.walk(src_fpath):
205 _path = path.replace(src_fpath, u"")
206 _src_fpath = path + u"/" + file
207 _s3_path = os.path.normpath(s3_path + u"/" + _path + u"/" + file)
209 s3_resource=s3_resource,
211 src_fpath=_src_fpath,
216 def deploy_docs(s3_bucket, s3_path, docs_dir):
217 """Ship docs dir content to S3 bucket. Requires the s3 bucket to exist.
219 :param s3_bucket: Name of S3 bucket. Eg: lf-project-date
220 :param s3_path: Path on S3 bucket to place the docs. Eg:
221 csit/${GERRIT_BRANCH}/report
222 :param docs_dir: Directory in which to recursively upload content.
223 :type s3_bucket: Object
228 s3_resource = boto3.resource(
230 endpoint_url=os.environ[u"AWS_ENDPOINT_URL"]
233 s3_resource = boto3.resource(
238 s3_resource=s3_resource,
245 def deploy_s3(s3_bucket, s3_path, build_url, workspace):
246 """Add logs and archives to temp directory to be shipped to S3 bucket.
247 Fetches logs and system information and pushes them and archives to S3
249 Requires the s3 bucket to exist.
251 :param s3_bucket: Name of S3 bucket. Eg: lf-project-date
252 :param s3_path: Path on S3 bucket place the logs and archives. Eg:
253 $JENKINS_HOSTNAME/$JOB_NAME/$BUILD_NUMBER
254 :param build_url: URL of the Jenkins build. Jenkins typically provides this
255 via the $BUILD_URL environment variable.
256 :param workspace: Directory in which to search, typically in Jenkins this is
258 :type s3_bucket: Object
264 s3_resource = boto3.resource(
266 endpoint_url=os.environ[u"AWS_ENDPOINT_URL"]
269 s3_resource = boto3.resource(
273 previous_dir = os.getcwd()
274 work_dir = tempfile.mkdtemp(prefix="backup-s3.")
277 # Copy archive files to tmp dir.
278 copy_archives(workspace)
280 # Create additional build logs.
281 with open(u"_build-details.log", u"w+") as f:
282 f.write(u"build-url: " + build_url)
284 # Magic string used to trim console logs at the appropriate level during
286 MAGIC_STRING = u"-----END_OF_BUILD-----"
287 logging.info(MAGIC_STRING)
289 resp = requests.get(build_url + u"/consoleText")
290 with open(u"console.log", u"w+", encoding=u"utf-8") as f:
292 six.text_type(resp.content.decode(u"utf-8").split(MAGIC_STRING)[0])
295 query = u"time=HH:mm:ss&appendLog"
296 resp = requests.get(build_url + u"/timestamps?" + query)
297 with open(u"console-timestamp.log", u"w+", encoding=u"utf-8") as f:
299 six.text_type(resp.content.decode(u"utf-8").split(MAGIC_STRING)[0])
302 compress_text(work_dir)
305 s3_resource=s3_resource,
311 os.chdir(previous_dir)
312 shutil.rmtree(work_dir)
315 if __name__ == u"__main__":
316 globals()[sys.argv[1]](*sys.argv[2:])