3 # Copyright (c) 2021 Cisco and/or its affiliates.
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 # You may obtain a copy of the License at:
8 # http://www.apache.org/licenses/LICENSE-2.0
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
16 echo "---> publish_library_py.sh"
20 PYTHON_SCRIPT="/w/workspace/publish_library.py"
23 mkdir -p $(dirname "$PYTHON_SCRIPT")
25 cat >$PYTHON_SCRIPT <<'END_OF_PYTHON_SCRIPT'
28 """S3 publish library."""
37 from mimetypes import MimeTypes
40 from botocore.exceptions import ClientError
46 format=u"%(levelname)s: %(message)s",
50 logging.getLogger(u"botocore").setLevel(logging.INFO)
56 u"application/octet-stream"
60 def compress(src_fpath):
61 """Compress a single file.
63 :param src_fpath: Input file path.
66 with open(src_fpath, u"rb") as orig_file:
67 with gzip.open(src_fpath + ".gz", u"wb") as zipped_file:
68 zipped_file.writelines(orig_file)
71 def copy_archives(workspace):
72 """Copy files or directories in a $WORKSPACE/archives to the current
75 :params workspace: Workspace directery with archives directory.
78 archives_dir = os.path.join(workspace, u"archives")
79 dest_dir = os.getcwd()
81 logging.debug(u"Copying files from " + archives_dir + u" to " + dest_dir)
83 if os.path.exists(archives_dir):
84 if os.path.isfile(archives_dir):
85 logging.error(u"Target is a file, not a directory.")
86 raise RuntimeError(u"Not a directory.")
88 logging.debug("Archives dir {} does exist.".format(archives_dir))
89 for file_or_dir in os.listdir(archives_dir):
90 f = os.path.join(archives_dir, file_or_dir)
92 logging.debug(u"Copying " + f)
93 shutil.copy(f, dest_dir)
94 except shutil.Error as e:
96 raise RuntimeError(u"Could not copy " + f)
98 logging.error(u"Archives dir does not exist.")
99 raise RuntimeError(u"Missing directory " + archives_dir)
102 def upload(s3_resource, s3_bucket, src_fpath, s3_path):
103 """Upload single file to destination bucket.
105 :param s3_resource: S3 storage resource.
106 :param s3_bucket: S3 bucket name.
107 :param src_fpath: Input file path.
108 :param s3_path: Destination file path on remote storage.
109 :type s3_resource: Object
114 mime_guess = MimeTypes().guess_type(src_fpath)
116 encoding = mime_guess[1]
118 mime = u"application/octet-stream"
120 if u"logs" in s3_bucket:
121 if mime in COMPRESS_MIME and encoding != u"gzip":
123 src_fpath = src_fpath + u".gz"
124 s3_path = s3_path + u".gz"
126 extra_args = {u"ContentType": mime}
129 logging.info(u"Attempting to upload file " + src_fpath)
130 s3_resource.Bucket(s3_bucket).upload_file(
131 src_fpath, s3_path, ExtraArgs=extra_args
133 logging.info(u"Successfully uploaded to " + s3_path)
134 except ClientError as e:
138 def upload_recursive(s3_resource, s3_bucket, src_fpath, s3_path):
139 """Recursively uploads input folder to destination.
142 - s3_bucket: logs.fd.io
143 - src_fpath: /workspace/archives.
144 - s3_path: /hostname/job/id/
146 :param s3_resource: S3 storage resource.
147 :param s3_bucket: S3 bucket name.
148 :param src_fpath: Input folder path.
149 :param s3_path: S3 destination path.
150 :type s3_resource: Object
155 for path, _, files in os.walk(src_fpath):
157 _path = path.replace(src_fpath, u"")
158 _src_fpath = path + u"/" + file
159 _s3_path = os.path.normpath(s3_path + u"/" + _path + u"/" + file)
161 s3_resource=s3_resource,
163 src_fpath=_src_fpath,
168 def deploy_docs(s3_bucket, s3_path, docs_dir):
169 """Ship docs dir content to S3 bucket. Requires the s3 bucket to exist.
171 :param s3_bucket: Name of S3 bucket. Eg: lf-project-date
172 :param s3_path: Path on S3 bucket to place the docs. Eg:
173 csit/${GERRIT_BRANCH}/report
174 :param docs_dir: Directory in which to recursively upload content.
175 :type s3_bucket: Object
179 s3_resource = boto3.resource(u"s3")
182 s3_resource=s3_resource,
189 def deploy_s3(s3_bucket, s3_path, build_url, workspace):
190 """Add logs and archives to temp directory to be shipped to S3 bucket.
191 Fetches logs and system information and pushes them and archives to S3
193 Requires the s3 bucket to exist.
195 :param s3_bucket: Name of S3 bucket. Eg: lf-project-date
196 :param s3_path: Path on S3 bucket place the logs and archives. Eg:
197 $JENKINS_HOSTNAME/$JOB_NAME/$BUILD_NUMBER
198 :param build_url: URL of the Jenkins build. Jenkins typically provides this
199 via the $BUILD_URL environment variable.
200 :param workspace: Directory in which to search, typically in Jenkins this is
202 :type s3_bucket: Object
207 s3_resource = boto3.resource(
209 endpoint_url=os.environ[u"AWS_ENDPOINT_URL"]
212 previous_dir = os.getcwd()
213 work_dir = tempfile.mkdtemp(prefix="backup-s3.")
216 # Copy archive files to tmp dir.
217 copy_archives(workspace)
219 # Create additional build logs.
220 with open(u"_build-details.log", u"w+") as f:
221 f.write(u"build-url: " + build_url)
223 with open(u"_sys-info.log", u"w+") as f:
226 logging.debug(u"Platform: " + sys.platform)
227 if sys.platform == u"linux" or sys.platform == u"linux2":
235 [u"sar", u"-b", u"-r", u"-n", u"DEV"],
236 [u"sar", u"-P", u"ALL"],
241 output = subprocess.check_output(c).decode(u"utf-8")
242 except FileNotFoundError:
243 logging.debug(u"Command not found: " + c)
247 output = u"---> " + cmd + "\n" + output + "\n"
251 # Magic string used to trim console logs at the appropriate level during
253 MAGIC_STRING = u"-----END_OF_BUILD-----"
254 logging.info(MAGIC_STRING)
256 resp = requests.get(build_url + u"/consoleText")
257 with open(u"console.log", u"w+", encoding=u"utf-8") as f:
259 six.text_type(resp.content.decode(u"utf-8").split(MAGIC_STRING)[0])
262 query = u"time=HH:mm:ss&appendLog"
263 resp = requests.get(build_url + u"/timestamps?" + query)
264 with open(u"console-timestamp.log", u"w+", encoding=u"utf-8") as f:
266 six.text_type(resp.content.decode(u"utf-8").split(MAGIC_STRING)[0])
270 s3_resource=s3_resource,
276 os.chdir(previous_dir)
277 shutil.rmtree(work_dir)
280 if __name__ == u"__main__":
281 globals()[sys.argv[1]](*sys.argv[2:])