feat(etl): Release pipelines
[csit.git] / csit.infra.etl / coverage_device_rls2406.py
similarity index 89%
rename from csit.infra.etl/coverage_device_rls2402.py
rename to csit.infra.etl/coverage_device_rls2406.py
index 2db8081..04f4135 100644 (file)
@@ -1,6 +1,6 @@
 #!/usr/bin/env python3
 
-# Copyright (c) 2023 Cisco and/or its affiliates.
+# Copyright (c) 2024 Cisco and/or its affiliates.
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at:
@@ -29,8 +29,8 @@ from pyspark.sql.functions import col, lit, regexp_replace
 from pyspark.sql.types import StructType
 
 
-S3_LOGS_BUCKET="fdio-logs-s3-cloudfront-index"
-S3_DOCS_BUCKET="fdio-docs-s3-cloudfront-index"
+S3_LOGS_BUCKET=environ.get("S3_LOGS_BUCKET", "fdio-logs-s3-cloudfront-index")
+S3_DOCS_BUCKET=environ.get("S3_DOCS_BUCKET", "fdio-docs-s3-cloudfront-index")
 PATH=f"s3://{S3_LOGS_BUCKET}/vex-yul-rot-jenkins-1/csit-vpp-device-*"
 SUFFIX="info.json.gz"
 IGNORE_SUFFIX=[
@@ -141,7 +141,7 @@ paths = wr.s3.list_objects(
     ignore_empty=True
 )
 
-filtered_paths = [path for path in paths if "report-coverage-2402" in path]
+filtered_paths = [path for path in paths if "report-coverage-2406" in path]
 
 out_sdf = process_json_to_dataframe("device", filtered_paths)
 out_sdf.printSchema()
@@ -151,20 +151,26 @@ out_sdf = out_sdf \
     .withColumn("day", lit(datetime.now().day)) \
     .repartition(1)
 
+try:
+    boto3_session = session.Session(
+        aws_access_key_id=environ["OUT_AWS_ACCESS_KEY_ID"],
+        aws_secret_access_key=environ["OUT_AWS_SECRET_ACCESS_KEY"],
+        region_name=environ["OUT_AWS_DEFAULT_REGION"]
+    )
+except KeyError:
+    boto3_session = session.Session()
+)
+
 try:
     wr.s3.to_parquet(
         df=out_sdf.toPandas(),
-        path=f"s3://{S3_DOCS_BUCKET}/csit/parquet/coverage_rls2402",
+        path=f"s3://{S3_DOCS_BUCKET}/csit/parquet/coverage_rls2406",
         dataset=True,
         partition_cols=["test_type", "year", "month", "day"],
         compression="snappy",
         use_threads=True,
         mode="overwrite_partitions",
-        boto3_session=session.Session(
-            aws_access_key_id=environ["OUT_AWS_ACCESS_KEY_ID"],
-            aws_secret_access_key=environ["OUT_AWS_SECRET_ACCESS_KEY"],
-            region_name=environ["OUT_AWS_DEFAULT_REGION"]
-        )
+        boto3_session=boto3_session
     )
 except EmptyDataFrame:
     pass