3 # Copyright (c) 2020 Cisco and/or its affiliates.
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 # You may obtain a copy of the License at:
8 # http://www.apache.org/licenses/LICENSE-2.0
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
16 """Storage utilities library."""
21 from mimetypes import MimeTypes
23 from boto3 import resource
24 from botocore.client import Config
27 ENDPOINT_URL = u"http://storage.service.consul:9000"
28 AWS_ACCESS_KEY_ID = u"storage"
29 AWS_SECRET_ACCESS_KEY = u"Storage1234"
34 u"application/octet-stream"
38 def compress(src_fpath):
39 """Compress a single file.
41 :param src_fpath: Input file path.
44 with open(src_fpath, u"rb") as orig_file:
45 with gzip.open(f"{src_fpath}.gz", u"wb") as zipped_file:
46 zipped_file.writelines(orig_file)
49 def upload(storage, bucket, src_fpath, dst_fpath):
50 """Upload single file to destination bucket.
52 :param storage: S3 storage resource.
53 :param bucket: S3 bucket name.
54 :param src_fpath: Input file path.
55 :param dst_fpath: Destination file path on remote storage.
61 mime = MimeTypes().guess_type(src_fpath)[0]
63 mime = "application/octet-stream"
65 if mime in COMPRESS_MIME and bucket in "logs":
67 src_fpath = f"{src_fpath}.gz"
68 dst_fpath = f"{dst_fpath}.gz"
70 storage.Bucket(f"{bucket}.fd.io").upload_file(
77 print(f"https://{bucket}.nginx.service.consul/{dst_fpath}")
80 def upload_recursive(storage, bucket, src_fpath):
81 """Recursively uploads input folder to destination.
85 - src_fpath: /home/user
86 - dst_fpath: logs.fd.io/home/user
88 :param storage: S3 storage resource.
89 :param bucket: S3 bucket name.
90 :param src_fpath: Input folder path.
95 for path, _, files in os.walk(src_fpath):
97 _path = path.replace(src_fpath, u"")
98 _dir = src_fpath[1:] if src_fpath[0] == "/" else src_fpath
99 _dst_fpath = os.path.normpath(f"{_dir}/{_path}/{file}")
100 _src_fpath = os.path.join(path, file)
101 upload(storage, bucket, _src_fpath, _dst_fpath)
105 """Main function for storage manipulation."""
107 parser = argparse.ArgumentParser()
109 u"-d", u"--dir", required=True, type=str,
110 help=u"Directory to upload to storage."
113 u"-b", u"--bucket", required=True, type=str,
114 help=u"Target bucket on storage."
116 args = parser.parse_args()
118 # Create main storage resource.
121 endpoint_url=ENDPOINT_URL,
122 aws_access_key_id=AWS_ACCESS_KEY_ID,
123 aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
125 signature_version=u"s3v4"
127 region_name=REGION_NAME
137 if __name__ == u"__main__":