C-Dash: Add VPP Device coverage data 97/38697/2
authorTibor Frank <tifrank@cisco.com>
Tue, 18 Apr 2023 10:04:49 +0000 (12:04 +0200)
committerTibor Frank <tifrank@cisco.com>
Tue, 18 Apr 2023 12:06:14 +0000 (12:06 +0000)
Signed-off-by: Tibor Frank <tifrank@cisco.com>
Change-Id: Ib083d287b8483c8b5b1be14ef3ce6b798eb04352

csit.infra.dash/app/cdash/coverage/layout.py
csit.infra.dash/app/cdash/coverage/tables.py
csit.infra.dash/app/cdash/data/data.py
csit.infra.dash/app/cdash/data/data.yaml
csit.infra.dash/app/cdash/report/layout.py
csit.infra.dash/app/cdash/trending/layout.py
csit.infra.dash/app/cdash/utils/constants.py

index 03d2da7..f519f5a 100644 (file)
@@ -92,7 +92,7 @@ class Layout:
             if dut == "dpdk":
                 area = "dpdk"
             else:
-                area = "-".join(lst_test_id[3:-2])
+                area = ".".join(lst_test_id[3:-2])
             suite = lst_test_id[-2].replace("2n1l-", "").replace("1n1l-", "").\
                 replace("2n-", "")
             test = lst_test_id[-1]
index a773a22..31b227e 100644 (file)
@@ -75,8 +75,10 @@ def select_coverage_data(
                 inplace=True
             )
 
+    ttype = df["test_type"].to_list()[0]
+
     # Prepare the coverage data
-    def _latency(hdrh_string: str, percentile: float) -> int:
+    def _laten(hdrh_string: str, percentile: float) -> int:
         """Get latency from HDRH string for given percentile.
 
         :param hdrh_string: Encoded HDRH string.
@@ -105,109 +107,118 @@ def select_coverage_data(
         return test_id.split(".")[-1].replace("-ndrpdr", "")
 
     cov = pd.DataFrame()
-    cov["suite"] = df.apply(lambda row: _get_suite(row["test_id"]), axis=1)
+    cov["Suite"] = df.apply(lambda row: _get_suite(row["test_id"]), axis=1)
     cov["Test Name"] = df.apply(lambda row: _get_test(row["test_id"]), axis=1)
-    cov["Throughput_Unit"] = df["result_pdr_lower_rate_unit"]
-    cov["Throughput_NDR"] = df.apply(
-        lambda row: row["result_ndr_lower_rate_value"] / 1e6, axis=1
-    )
-    cov["Throughput_NDR_Mbps"] = df.apply(
-        lambda row: row["result_ndr_lower_bandwidth_value"] /1e9, axis=1
-    )
-    cov["Throughput_PDR"] = \
-        df.apply(lambda row: row["result_pdr_lower_rate_value"] / 1e6, axis=1)
-    cov["Throughput_PDR_Mbps"] = df.apply(
-        lambda row: row["result_pdr_lower_bandwidth_value"] /1e9, axis=1
-    )
-    cov["Latency Forward [us]_10% PDR_P50"] = df.apply(
-        lambda row: _latency(row["result_latency_forward_pdr_10_hdrh"], 50.0),
-        axis=1
-    )
-    cov["Latency Forward [us]_10% PDR_P90"] = df.apply(
-        lambda row: _latency(row["result_latency_forward_pdr_10_hdrh"], 90.0),
-        axis=1
-    )
-    cov["Latency Forward [us]_10% PDR_P99"] = df.apply(
-        lambda row: _latency(row["result_latency_forward_pdr_10_hdrh"], 99.0),
-        axis=1
-    )
-    cov["Latency Forward [us]_50% PDR_P50"] = df.apply(
-        lambda row: _latency(row["result_latency_forward_pdr_50_hdrh"], 50.0),
-        axis=1
-    )
-    cov["Latency Forward [us]_50% PDR_P90"] = df.apply(
-        lambda row: _latency(row["result_latency_forward_pdr_50_hdrh"], 90.0),
-        axis=1
-    )
-    cov["Latency Forward [us]_50% PDR_P99"] = df.apply(
-        lambda row: _latency(row["result_latency_forward_pdr_50_hdrh"], 99.0),
-        axis=1
-    )
-    cov["Latency Forward [us]_90% PDR_P50"] = df.apply(
-        lambda row: _latency(row["result_latency_forward_pdr_90_hdrh"], 50.0),
-        axis=1
-    )
-    cov["Latency Forward [us]_90% PDR_P90"] = df.apply(
-        lambda row: _latency(row["result_latency_forward_pdr_90_hdrh"], 90.0),
-        axis=1
-    )
-    cov["Latency Forward [us]_90% PDR_P99"] = df.apply(
-        lambda row: _latency(row["result_latency_forward_pdr_90_hdrh"], 99.0),
-        axis=1
-    )
-    cov["Latency Reverse [us]_10% PDR_P50"] = df.apply(
-        lambda row: _latency(row["result_latency_reverse_pdr_10_hdrh"], 50.0),
-        axis=1
-    )
-    cov["Latency Reverse [us]_10% PDR_P90"] = df.apply(
-        lambda row: _latency(row["result_latency_reverse_pdr_10_hdrh"], 90.0),
-        axis=1
-    )
-    cov["Latency Reverse [us]_10% PDR_P99"] = df.apply(
-        lambda row: _latency(row["result_latency_reverse_pdr_10_hdrh"], 99.0),
-        axis=1
-    )
-    cov["Latency Reverse [us]_50% PDR_P50"] = df.apply(
-        lambda row: _latency(row["result_latency_reverse_pdr_50_hdrh"], 50.0),
-        axis=1
-    )
-    cov["Latency Reverse [us]_50% PDR_P90"] = df.apply(
-        lambda row: _latency(row["result_latency_reverse_pdr_50_hdrh"], 90.0),
-        axis=1
-    )
-    cov["Latency Reverse [us]_50% PDR_P99"] = df.apply(
-        lambda row: _latency(row["result_latency_reverse_pdr_50_hdrh"], 99.0),
-        axis=1
-    )
-    cov["Latency Reverse [us]_90% PDR_P50"] = df.apply(
-        lambda row: _latency(row["result_latency_reverse_pdr_90_hdrh"], 50.0),
-        axis=1
-    )
-    cov["Latency Reverse [us]_90% PDR_P90"] = df.apply(
-        lambda row: _latency(row["result_latency_reverse_pdr_90_hdrh"], 90.0),
-        axis=1
-    )
-    cov["Latency Reverse [us]_90% PDR_P99"] = df.apply(
-        lambda row: _latency(row["result_latency_reverse_pdr_90_hdrh"], 99.0),
-        axis=1
-    )
+
+    if ttype == "device":
+        cov = cov.assign(Result="PASS")
+    else:
+        cov["Throughput_Unit"] = df["result_pdr_lower_rate_unit"]
+        cov["Throughput_NDR"] = df.apply(
+            lambda row: row["result_ndr_lower_rate_value"] / 1e6, axis=1
+        )
+        cov["Throughput_NDR_Mbps"] = df.apply(
+            lambda row: row["result_ndr_lower_bandwidth_value"] /1e9, axis=1
+        )
+        cov["Throughput_PDR"] = df.apply(
+            lambda row: row["result_pdr_lower_rate_value"] / 1e6, axis=1
+        )
+        cov["Throughput_PDR_Mbps"] = df.apply(
+            lambda row: row["result_pdr_lower_bandwidth_value"] /1e9, axis=1
+        )
+        cov["Latency Forward [us]_10% PDR_P50"] = df.apply(
+            lambda row: _laten(row["result_latency_forward_pdr_10_hdrh"], 50.0),
+            axis=1
+        )
+        cov["Latency Forward [us]_10% PDR_P90"] = df.apply(
+            lambda row: _laten(row["result_latency_forward_pdr_10_hdrh"], 90.0),
+            axis=1
+        )
+        cov["Latency Forward [us]_10% PDR_P99"] = df.apply(
+            lambda row: _laten(row["result_latency_forward_pdr_10_hdrh"], 99.0),
+            axis=1
+        )
+        cov["Latency Forward [us]_50% PDR_P50"] = df.apply(
+            lambda row: _laten(row["result_latency_forward_pdr_50_hdrh"], 50.0),
+            axis=1
+        )
+        cov["Latency Forward [us]_50% PDR_P90"] = df.apply(
+            lambda row: _laten(row["result_latency_forward_pdr_50_hdrh"], 90.0),
+            axis=1
+        )
+        cov["Latency Forward [us]_50% PDR_P99"] = df.apply(
+            lambda row: _laten(row["result_latency_forward_pdr_50_hdrh"], 99.0),
+            axis=1
+        )
+        cov["Latency Forward [us]_90% PDR_P50"] = df.apply(
+            lambda row: _laten(row["result_latency_forward_pdr_90_hdrh"], 50.0),
+            axis=1
+        )
+        cov["Latency Forward [us]_90% PDR_P90"] = df.apply(
+            lambda row: _laten(row["result_latency_forward_pdr_90_hdrh"], 90.0),
+            axis=1
+        )
+        cov["Latency Forward [us]_90% PDR_P99"] = df.apply(
+            lambda row: _laten(row["result_latency_forward_pdr_90_hdrh"], 99.0),
+            axis=1
+        )
+        cov["Latency Reverse [us]_10% PDR_P50"] = df.apply(
+            lambda row: _laten(row["result_latency_reverse_pdr_10_hdrh"], 50.0),
+            axis=1
+        )
+        cov["Latency Reverse [us]_10% PDR_P90"] = df.apply(
+            lambda row: _laten(row["result_latency_reverse_pdr_10_hdrh"], 90.0),
+            axis=1
+        )
+        cov["Latency Reverse [us]_10% PDR_P99"] = df.apply(
+            lambda row: _laten(row["result_latency_reverse_pdr_10_hdrh"], 99.0),
+            axis=1
+        )
+        cov["Latency Reverse [us]_50% PDR_P50"] = df.apply(
+            lambda row: _laten(row["result_latency_reverse_pdr_50_hdrh"], 50.0),
+            axis=1
+        )
+        cov["Latency Reverse [us]_50% PDR_P90"] = df.apply(
+            lambda row: _laten(row["result_latency_reverse_pdr_50_hdrh"], 90.0),
+            axis=1
+        )
+        cov["Latency Reverse [us]_50% PDR_P99"] = df.apply(
+            lambda row: _laten(row["result_latency_reverse_pdr_50_hdrh"], 99.0),
+            axis=1
+        )
+        cov["Latency Reverse [us]_90% PDR_P50"] = df.apply(
+            lambda row: _laten(row["result_latency_reverse_pdr_90_hdrh"], 50.0),
+            axis=1
+        )
+        cov["Latency Reverse [us]_90% PDR_P90"] = df.apply(
+            lambda row: _laten(row["result_latency_reverse_pdr_90_hdrh"], 90.0),
+            axis=1
+        )
+        cov["Latency Reverse [us]_90% PDR_P99"] = df.apply(
+            lambda row: _laten(row["result_latency_reverse_pdr_90_hdrh"], 99.0),
+            axis=1
+        )
 
     if csv:
         return cov
 
-    # Split data into tabels depending on the test suite.
-    for suite in cov["suite"].unique().tolist():
-        df_suite = pd.DataFrame(cov.loc[(cov["suite"] == suite)])
-        unit = df_suite["Throughput_Unit"].tolist()[0]
-        df_suite.rename(
-            columns={
-                "Throughput_NDR": f"Throughput_NDR_M{unit}",
-                "Throughput_PDR": f"Throughput_PDR_M{unit}"
-            },
-            inplace=True
-        )
-        df_suite.drop(["suite", "Throughput_Unit"], axis=1, inplace=True)
+    # Split data into tables depending on the test suite.
+    for suite in cov["Suite"].unique().tolist():
+        df_suite = pd.DataFrame(cov.loc[(cov["Suite"] == suite)])
+
+        if ttype !="device":
+            unit = df_suite["Throughput_Unit"].tolist()[0]
+            df_suite.rename(
+                columns={
+                    "Throughput_NDR": f"Throughput_NDR_M{unit}",
+                    "Throughput_PDR": f"Throughput_PDR_M{unit}"
+                },
+                inplace=True
+            )
+            df_suite.drop(["Suite", "Throughput_Unit"], axis=1, inplace=True)
+
         l_data.append((suite, df_suite, ))
+
     return l_data
 
 
@@ -224,34 +235,59 @@ def coverage_tables(data: pd.DataFrame, selected: dict) -> list:
 
     accordion_items = list()
     for suite, cov_data in select_coverage_data(data, selected):
-        cols = list()
-        for idx, col in enumerate(cov_data.columns):
-            if idx == 0:
-                cols.append({
-                    "name": ["", "", col],
+        if len(cov_data.columns) == 3:  # VPP Device
+            cols = [
+                {
+                    "name": col,
                     "id": col,
                     "deletable": False,
                     "selectable": False,
                     "type": "text"
-                })
-            elif idx < 5:
-                cols.append({
-                    "name": col.split("_"),
-                    "id": col,
-                    "deletable": False,
-                    "selectable": False,
-                    "type": "numeric",
-                    "format": Format(precision=2, scheme=Scheme.fixed)
-                })
-            else:
-                cols.append({
-                    "name": col.split("_"),
-                    "id": col,
-                    "deletable": False,
-                    "selectable": False,
-                    "type": "numeric",
-                    "format": Format(precision=0, scheme=Scheme.fixed)
-                })
+                } for col in cov_data.columns
+            ]
+            style_cell={"textAlign": "left"}
+            style_cell_conditional=[
+                {
+                    "if": {"column_id": "Result"},
+                    "textAlign": "right"
+                }
+            ]
+        else:  # Performance
+            cols = list()
+            for idx, col in enumerate(cov_data.columns):
+                if idx == 0:
+                    cols.append({
+                        "name": ["", "", col],
+                        "id": col,
+                        "deletable": False,
+                        "selectable": False,
+                        "type": "text"
+                    })
+                elif idx < 5:
+                    cols.append({
+                        "name": col.split("_"),
+                        "id": col,
+                        "deletable": False,
+                        "selectable": False,
+                        "type": "numeric",
+                        "format": Format(precision=2, scheme=Scheme.fixed)
+                    })
+                else:
+                    cols.append({
+                        "name": col.split("_"),
+                        "id": col,
+                        "deletable": False,
+                        "selectable": False,
+                        "type": "numeric",
+                        "format": Format(precision=0, scheme=Scheme.fixed)
+                    })
+            style_cell={"textAlign": "right"}
+            style_cell_conditional=[
+                {
+                    "if": {"column_id": "Test Name"},
+                    "textAlign": "left"
+                }
+            ]
 
         accordion_items.append(
             dbc.AccordionItem(
@@ -267,18 +303,14 @@ def coverage_tables(data: pd.DataFrame, selected: dict) -> list:
                     selected_columns=[],
                     selected_rows=[],
                     page_action="none",
-                    style_cell={"textAlign": "right"},
-                    style_cell_conditional=[{
-                        "if": {"column_id": "Test Name"},
-                        "textAlign": "left"
-                    }]
+                    style_cell=style_cell,
+                    style_cell_conditional=style_cell_conditional
                 )
             )
         )
-
     return dbc.Accordion(
-            children=accordion_items,
-            class_name="gy-2 p-0",
-            start_collapsed=True,
-            always_open=True
-        )
+        children=accordion_items,
+        class_name="gy-1 p-0",
+        start_collapsed=True,
+        always_open=True
+    )
index c8d5907..a0d698e 100644 (file)
@@ -122,7 +122,6 @@ class Data:
     def _create_dataframe_from_parquet(
             path, partition_filter=None,
             columns=None,
-            categories=None,
             validate_schema=False,
             last_modified_begin=None,
             last_modified_end=None,
@@ -141,8 +140,6 @@ class Data:
             extracted from S3. This function MUST return a bool, True to read
             the partition or False to ignore it. Ignored if dataset=False.
         :param columns: Names of columns to read from the file(s).
-        :param categories: List of columns names that should be returned as
-            pandas.Categorical.
         :param validate_schema: Check that individual file schemas are all the
             same / compatible. Schemas within a folder prefix should all be the
             same. Disable if you have schemas that are different and want to
@@ -156,7 +153,6 @@ class Data:
         :type path: Union[str, List[str]]
         :type partition_filter: Callable[[Dict[str, str]], bool], optional
         :type columns: List[str], optional
-        :type categories: List[str], optional
         :type validate_schema: bool, optional
         :type last_modified_begin: datetime, optional
         :type last_modified_end: datetime, optional
@@ -177,7 +173,6 @@ class Data:
                 use_threads=True,
                 dataset=True,
                 columns=columns,
-                categories=categories,
                 partition_filter=partition_filter,
                 last_modified_begin=last_modified_begin,
                 last_modified_end=last_modified_end
@@ -234,7 +229,6 @@ class Data:
                 path=data_set["path"],
                 partition_filter=partition_filter,
                 columns=data_set.get("columns", None),
-                categories=data_set.get("categories", None),
                 days=time_period
             )
 
index 975241b..8beee0b 100644 (file)
@@ -7,9 +7,6 @@
     - build
     - start_time
     - duration
-  categories:
-    - job
-    - build
 - data_type: trending
   partition: test_type
   partition_name: mrr
     - result_receive_rate_rate_stdev
     - result_receive_rate_rate_unit
     - telemetry
-  categories:
-    - job
-    - build
-    - dut_type
-    - dut_version
-    - version
 - data_type: trending
   partition: test_type
   partition_name: ndrpdr
     - result_latency_forward_pdr_10_hdrh
     - result_latency_forward_pdr_0_hdrh
     - telemetry
-  categories:
-    - job
-    - build
-    - dut_type
-    - dut_version
-    - version
 - data_type: trending
   partition: test_type
   partition_name: hoststack
     - telemetry
     - test_id
     - version
-  categories:
-    - job
-    - build
-    - dut_type
-    - dut_version
-    - tg_type
-    - version
 - data_type: iterative
   partition: test_type
   partition_name: mrr
     - result_receive_rate_rate_stdev
     - result_receive_rate_rate_unit
     - result_receive_rate_rate_values
-  categories:
-    - job
-    - build
-    - dut_type
-    - dut_version
-    - version
 - data_type: iterative
   partition: test_type
   partition_name: mrr
     - result_receive_rate_rate_stdev
     - result_receive_rate_rate_unit
     - result_receive_rate_rate_values
-  categories:
-    - job
-    - build
-    - dut_type
-    - dut_version
-    - version
 - data_type: iterative
   partition: test_type
   partition_name: mrr
     - result_receive_rate_rate_stdev
     - result_receive_rate_rate_unit
     - result_receive_rate_rate_values
-  categories:
-    - job
-    - build
-    - dut_type
-    - dut_version
-    - version
 - data_type: iterative
   partition: test_type
   partition_name: ndrpdr
     - result_latency_forward_pdr_50_unit
     - result_latency_forward_pdr_10_hdrh
     - result_latency_forward_pdr_0_hdrh
-  categories:
-    - job
-    - build
-    - dut_type
-    - dut_version
-    - version
 - data_type: iterative
   partition: test_type
   partition_name: ndrpdr
     - result_latency_forward_pdr_50_unit
     - result_latency_forward_pdr_10_hdrh
     - result_latency_forward_pdr_0_hdrh
-  categories:
-    - job
-    - build
-    - dut_type
-    - dut_version
-    - version
 - data_type: iterative
   partition: test_type
   partition_name: ndrpdr
     - result_latency_forward_pdr_50_unit
     - result_latency_forward_pdr_10_hdrh
     - result_latency_forward_pdr_0_hdrh
-  categories:
-    - job
-    - build
-    - dut_type
-    - dut_version
-    - version
 - data_type: iterative
   partition: test_type
   partition_name: hoststack
     - telemetry
     - test_id
     - version
-  categories:
-    - job
-    - build
-    - dut_type
-    - dut_version
-    - tg_type
-    - version
 - data_type: coverage
   partition: test_type
   partition_name: ndrpdr
     - result_latency_forward_pdr_90_hdrh
     - result_latency_forward_pdr_50_hdrh
     - result_latency_forward_pdr_10_hdrh
-  categories:
+- data_type: coverage
+  partition: test_type
+  partition_name: device
+  release: rls2302
+  path: s3://fdio-docs-s3-cloudfront-index/csit/parquet/coverage_rls2302
+  columns:
     - job
     - build
     - dut_type
     - dut_version
-    - tg_type
+    - passed
+    - test_id
     - version
-# - data_type: coverage
-#   partition: test_type
-#   partition_name: device
-#   release: rls2302
-#   path: s3://fdio-docs-s3-cloudfront-index/csit/parquet/coverage_rls2302
-#   columns:
-#     - job
-#     - build
-#     - dut_type
-#     - dut_version
-#     - start_time
-#     - passed
-#     - test_id
-#     - version
-#   categories:
-#     - job
-#     - build
-#     - dut_type
-#     - dut_version
-#     - version
index 8dbaea3..1e79b68 100644 (file)
@@ -125,7 +125,7 @@ class Layout:
             if dut == "dpdk":
                 area = "dpdk"
             else:
-                area = "-".join(lst_test_id[3:-2])
+                area = ".".join(lst_test_id[3:-2])
             suite = lst_test_id[-2].replace("2n1l-", "").replace("1n1l-", "").\
                 replace("2n-", "")
             test = lst_test_id[-1]
@@ -1312,7 +1312,7 @@ class Layout:
             Input("plot-btn-download", "n_clicks"),
             prevent_initial_call=True
         )
-        def _download_trending_data(store_sel, _):
+        def _download_iterative_data(store_sel, _):
             """Download the data
 
             :param store_sel: List of tests selected by user stored in the
index 4110614..005d1dc 100644 (file)
@@ -119,7 +119,7 @@ class Layout:
             if dut == "dpdk":
                 area = "dpdk"
             else:
-                area = "-".join(lst_test[3:-2])
+                area = ".".join(lst_test[3:-2])
             suite = lst_test[-2].replace("2n1l-", "").replace("1n1l-", "").\
                 replace("2n-", "")
             test = lst_test[-1]
index e9c08d3..6ab80d0 100644 (file)
@@ -124,12 +124,23 @@ class Constants:
         "lb": "Load Balancer",
         "srv6": "SRv6 Routing",
         "vm_vhost": "VMs vhost-user",
-        "nfv_density-dcr_memif-chain_ipsec": "CNF Service Chains Routing IPSec",
-        "nfv_density-vm_vhost-chain_dot1qip4vxlan":"VNF Service Chains Tunnels",
-        "nfv_density-vm_vhost-chain": "VNF Service Chains Routing",
-        "nfv_density-dcr_memif-pipeline": "CNF Service Pipelines Routing",
-        "nfv_density-dcr_memif-chain": "CNF Service Chains Routing",
-        "hoststack": "Hoststack"
+        "nfv_density.dcr_memif.chain_ipsec": "CNF Service Chains Routing IPSec",
+        "nfv_density.vm_vhost.chain_dot1qip4vxlan":"VNF Service Chains Tunnels",
+        "nfv_density.vm_vhost.chain": "VNF Service Chains Routing",
+        "nfv_density.dcr_memif.pipeline": "CNF Service Pipelines Routing",
+        "nfv_density.dcr_memif.chain": "CNF Service Chains Routing",
+        "hoststack": "Hoststack",
+        "flow": "Flow",
+        "l2bd": "L2 Bridge Domain",
+        "crypto.ethip4": "IPSec IPv4 Routing",
+        "crypto.ethip6": "IPSec IPv6 Routing",
+        "interfaces": "Interfaces",
+        "ip4_tunnels.lisp": "IPv4 Tunnels LISP",
+        "ip6_tunnels.lisp": "IPv6 Tunnels LISP",
+        "l2patch": "L2 Patch",
+        "l2xc": "L2 Cross Connect",
+        "vm_vhost.ethip4": "VMs vhost-user IPv4 Routing",
+        "vm_vhost.ethip6": "VMs vhost-user IPv6 Routing"
     }
 
     # URL style.