3 # Copyright (c) 2021 Cisco and/or its affiliates.
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 # You may obtain a copy of the License at:
8 # http://www.apache.org/licenses/LICENSE-2.0
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
16 echo "---> logs_publish.sh"
18 CDN_URL="logs.nginx.service.consul"
19 export AWS_ENDPOINT_URL="http://storage.service.consul:9000"
21 # FIXME: s3 config (until migrated to config provider, then pwd will be reset)
24 aws_access_key_id = storage
25 aws_secret_access_key = Storage1234" >> "$HOME/.aws/credentials"
27 PYTHON_SCRIPT="/w/workspace/test-logs/logs_publish.py"
29 # This script uploads the artifacts to a backup upload location
30 if [ -f "$PYTHON_SCRIPT" ]; then
31 echo "WARNING: $PYTHON_SCRIPT already exists - assume backup archive upload already done"
36 mkdir -p $(dirname "$PYTHON_SCRIPT")
38 cat >$PYTHON_SCRIPT <<'END_OF_PYTHON_SCRIPT'
41 """Storage utilities library."""
50 from mimetypes import MimeTypes
53 from botocore.exceptions import ClientError
59 format=u"%(levelname)s: %(message)s",
63 logging.getLogger(u"botocore").setLevel(logging.INFO)
69 u"application/octet-stream"
73 def compress(src_fpath):
74 """Compress a single file.
76 :param src_fpath: Input file path.
79 with open(src_fpath, u"rb") as orig_file:
80 with gzip.open(src_fpath + ".gz", u"wb") as zipped_file:
81 zipped_file.writelines(orig_file)
84 def copy_archives(workspace):
85 """Copy files or directories in a $WORKSPACE/archives to the current
88 :params workspace: Workspace directery with archives directory.
91 archives_dir = os.path.join(workspace, u"archives")
92 dest_dir = os.getcwd()
94 logging.debug(u"Copying files from " + archives_dir + u" to " + dest_dir)
96 if os.path.exists(archives_dir):
97 if os.path.isfile(archives_dir):
98 logging.error(u"Target is a file, not a directory.")
99 raise RuntimeError(u"Not a directory.")
101 logging.debug("Archives dir {} does exist.".format(archives_dir))
102 for file_or_dir in os.listdir(archives_dir):
103 f = os.path.join(archives_dir, file_or_dir)
105 logging.debug(u"Copying " + f)
106 shutil.copy(f, dest_dir)
107 except shutil.Error as e:
109 raise RuntimeError(u"Could not copy " + f)
111 logging.error(u"Archives dir does not exist.")
112 raise RuntimeError(u"Missing directory " + archives_dir)
115 def upload(s3_resource, s3_bucket, src_fpath, s3_path):
116 """Upload single file to destination bucket.
118 :param s3_resource: S3 storage resource.
119 :param s3_bucket: S3 bucket name.
120 :param src_fpath: Input file path.
121 :param s3_path: Destination file path on remote storage.
122 :type s3_resource: Object
127 mime_guess = MimeTypes().guess_type(src_fpath)
129 encoding = mime_guess[1]
131 mime = u"application/octet-stream"
133 if s3_bucket not in u"docs.fd.io":
134 if mime in COMPRESS_MIME and encoding != u"gzip":
136 src_fpath = src_fpath + u".gz"
137 s3_path = s3_path + u".gz"
139 extra_args = {u"ContentType": mime}
142 logging.info(u"Attempting to upload file " + src_fpath)
143 s3_resource.Bucket(s3_bucket).upload_file(
144 src_fpath, s3_path, ExtraArgs=extra_args
146 logging.info(u"Successfully uploaded to " + s3_path)
147 except ClientError as e:
151 def upload_recursive(s3_resource, s3_bucket, src_fpath, s3_path):
152 """Recursively uploads input folder to destination.
155 - s3_bucket: logs.fd.io
156 - src_fpath: /workspace/archives.
157 - s3_path: /hostname/job/id/
159 :param s3_resource: S3 storage resource.
160 :param s3_bucket: S3 bucket name.
161 :param src_fpath: Input folder path.
162 :param s3_path: S3 destination path.
163 :type s3_resource: Object
168 for path, _, files in os.walk(src_fpath):
170 _path = path.replace(src_fpath, u"")
171 _src_fpath = path + u"/" + file
172 _s3_path = os.path.normpath(s3_path + u"/" + _path + u"/" + file)
174 s3_resource=s3_resource,
176 src_fpath=_src_fpath,
181 def deploy_s3(s3_bucket, s3_path, build_url, workspace):
182 """Add logs and archives to temp directory to be shipped to S3 bucket.
183 Fetches logs and system information and pushes them and archives to S3
185 Requires the s3 bucket to exist.
187 :param s3_bucket: Name of S3 bucket. Eg: lf-project-date
188 :param s3_path: Path on S3 bucket place the logs and archives. Eg:
189 $JENKINS_HOSTNAME/$JOB_NAME/$BUILD_NUMBER
190 :param build_url: URL of the Jenkins build. Jenkins typically provides this
191 via the $BUILD_URL environment variable.
192 :param workspace: Directory in which to search, typically in Jenkins this is
194 :type s3_bucket: Object
199 s3_resource = boto3.resource(
201 endpoint_url=os.environ[u"AWS_ENDPOINT_URL"]
204 previous_dir = os.getcwd()
205 work_dir = tempfile.mkdtemp(prefix="backup-s3.")
208 # Copy archive files to tmp dir.
209 copy_archives(workspace)
211 # Create additional build logs.
212 with open(u"_build-details.log", u"w+") as f:
213 f.write(u"build-url: " + build_url)
215 with open(u"_sys-info.log", u"w+") as f:
218 logging.debug(u"Platform: " + sys.platform)
219 if sys.platform == u"linux" or sys.platform == u"linux2":
227 [u"sar", u"-b", u"-r", u"-n", u"DEV"],
228 [u"sar", u"-P", u"ALL"],
233 output = subprocess.check_output(c).decode(u"utf-8")
234 except FileNotFoundError:
235 logging.debug(u"Command not found: " + c)
239 output = u"---> " + cmd + "\n" + output + "\n"
243 # Magic string used to trim console logs at the appropriate level during
245 MAGIC_STRING = u"-----END_OF_BUILD-----"
246 logging.info(MAGIC_STRING)
248 resp = requests.get(build_url + u"/consoleText")
249 with open(u"console.log", u"w+", encoding=u"utf-8") as f:
251 six.text_type(resp.content.decode(u"utf-8").split(MAGIC_STRING)[0])
254 query = u"time=HH:mm:ss&appendLog"
255 resp = requests.get(build_url + u"/timestamps?" + query)
256 with open(u"console-timestamp.log", u"w+", encoding=u"utf-8") as f:
258 six.text_type(resp.content.decode(u"utf-8").split(MAGIC_STRING)[0])
262 s3_resource=s3_resource,
268 os.chdir(previous_dir)
269 shutil.rmtree(work_dir)
272 if __name__ == u"__main__":
273 globals()[sys.argv[1]](*sys.argv[2:])
277 # The 'deploy_s3' command below expects the archives
278 # directory to exist. Normally lf-infra-sysstat or similar would
279 # create it and add content, but to make sure this script is
280 # self-contained, we ensure it exists here.
281 mkdir -p "$WORKSPACE/archives"
283 s3_path="$JENKINS_HOSTNAME/$JOB_NAME/$BUILD_NUMBER/"
284 echo "INFO: S3 path $s3_path"
286 echo "INFO: archiving backup logs to S3"
287 # shellcheck disable=SC2086
288 python3 $PYTHON_SCRIPT deploy_s3 "logs.fd.io" "$s3_path" \
289 "$BUILD_URL" "$WORKSPACE"
291 echo "S3 build backup logs: <a href=\"https://$CDN_URL/$s3_path\">https://$CDN_URL/$s3_path</a>"