From: Tibor Frank Date: Wed, 12 Apr 2023 05:55:52 +0000 (+0200) Subject: C-Dash: Add coverage tables X-Git-Url: https://gerrit.fd.io/r/gitweb?p=csit.git;a=commitdiff_plain;h=273821dc854ba53015e022600574655160ce1a40 C-Dash: Add coverage tables Signed-off-by: Tibor Frank Change-Id: I4f20936db02dc4b974134c676c368c479e931038 --- diff --git a/csit.infra.dash/app/cdash/__init__.py b/csit.infra.dash/app/cdash/__init__.py index 0bc8bf7a19..dd1dc20169 100644 --- a/csit.infra.dash/app/cdash/__init__.py +++ b/csit.infra.dash/app/cdash/__init__.py @@ -99,6 +99,12 @@ def init_app(): data_iterative=data["iterative"] ) + from .coverage.coverage import init_coverage + app = init_coverage( + app, + data_coverage=data["coverage"] + ) + return app diff --git a/csit.infra.dash/app/cdash/comparisons/tables.py b/csit.infra.dash/app/cdash/comparisons/tables.py index 2cafc9fa51..31e268c6f0 100644 --- a/csit.infra.dash/app/cdash/comparisons/tables.py +++ b/csit.infra.dash/app/cdash/comparisons/tables.py @@ -18,6 +18,7 @@ import pandas as pd from numpy import mean, std from copy import deepcopy + from ..utils.constants import Constants as C from ..utils.utils import relative_change_stdev diff --git a/csit.infra.dash/app/cdash/coverage/__init__.py b/csit.infra.dash/app/cdash/coverage/__init__.py new file mode 100644 index 0000000000..f0d52c25b6 --- /dev/null +++ b/csit.infra.dash/app/cdash/coverage/__init__.py @@ -0,0 +1,12 @@ +# Copyright (c) 2023 Cisco and/or its affiliates. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at: +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/csit.infra.dash/app/cdash/coverage/coverage.py b/csit.infra.dash/app/cdash/coverage/coverage.py new file mode 100644 index 0000000000..4dfd7a80de --- /dev/null +++ b/csit.infra.dash/app/cdash/coverage/coverage.py @@ -0,0 +1,50 @@ +# Copyright (c) 2023 Cisco and/or its affiliates. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at: +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Instantiate the Coverage Dash application. +""" +import dash +import pandas as pd + +from ..utils.constants import Constants as C +from .layout import Layout + + +def init_coverage( + server, + data_coverage: pd.DataFrame + ) -> dash.Dash: + """Create a Plotly Dash dashboard. + + :param server: Flask server. + :type server: Flask + :returns: Dash app server. + :rtype: Dash + """ + + dash_app = dash.Dash( + server=server, + routes_pathname_prefix=C.COVERAGE_ROUTES_PATHNAME_PREFIX, + external_stylesheets=C.EXTERNAL_STYLESHEETS, + title=C.COVERAGE_TITLE + ) + + layout = Layout( + app=dash_app, + data_coverage=data_coverage, + html_layout_file=C.HTML_LAYOUT_FILE + ) + dash_app.index_string = layout.html_layout + dash_app.layout = layout.add_content() + + return dash_app.server diff --git a/csit.infra.dash/app/cdash/coverage/layout.py b/csit.infra.dash/app/cdash/coverage/layout.py new file mode 100644 index 0000000000..03d2da7fb7 --- /dev/null +++ b/csit.infra.dash/app/cdash/coverage/layout.py @@ -0,0 +1,661 @@ +# Copyright (c) 2023 Cisco and/or its affiliates. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at: +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Plotly Dash HTML layout override. +""" + + +import pandas as pd +import dash_bootstrap_components as dbc + +from flask import Flask +from dash import dcc +from dash import html +from dash import callback_context, no_update, ALL +from dash import Input, Output, State +from dash.exceptions import PreventUpdate +from ast import literal_eval + +from ..utils.constants import Constants as C +from ..utils.control_panel import ControlPanel +from ..utils.trigger import Trigger +from ..utils.utils import label, gen_new_url, generate_options +from ..utils.url_processing import url_decode +from .tables import coverage_tables, select_coverage_data + + +# Control panel partameters and their default values. +CP_PARAMS = { + "rls-val": str(), + "dut-opt": list(), + "dut-dis": True, + "dut-val": str(), + "dutver-opt": list(), + "dutver-dis": True, + "dutver-val": str(), + "phy-opt": list(), + "phy-dis": True, + "phy-val": str(), + "area-opt": list(), + "area-dis": True, + "area-val": str() +} + + +class Layout: + """The layout of the dash app and the callbacks. + """ + + def __init__( + self, + app: Flask, + data_coverage: pd.DataFrame, + html_layout_file: str + ) -> None: + """Initialization: + - save the input parameters, + - prepare data for the control panel, + - read HTML layout file, + + :param app: Flask application running the dash application. + :param html_layout_file: Path and name of the file specifying the HTML + layout of the dash application. + :type app: Flask + :type html_layout_file: str + """ + + # Inputs + self._app = app + self._html_layout_file = html_layout_file + self._data = data_coverage + + # Get structure of tests: + tbs = dict() + cols = ["job", "test_id", "dut_version", "release", ] + for _, row in self._data[cols].drop_duplicates().iterrows(): + rls = row["release"] + lst_job = row["job"].split("-") + dut = lst_job[1] + d_ver = row["dut_version"] + tbed = "-".join(lst_job[-2:]) + lst_test_id = row["test_id"].split(".") + if dut == "dpdk": + area = "dpdk" + else: + area = "-".join(lst_test_id[3:-2]) + suite = lst_test_id[-2].replace("2n1l-", "").replace("1n1l-", "").\ + replace("2n-", "") + test = lst_test_id[-1] + nic = suite.split("-")[0] + for drv in C.DRIVERS: + if drv in test: + driver = drv.replace("-", "_") + test = test.replace(f"{drv}-", "") + break + else: + driver = "dpdk" + infra = "-".join((tbed, nic, driver)) + + if tbs.get(rls, None) is None: + tbs[rls] = dict() + if tbs[rls].get(dut, None) is None: + tbs[rls][dut] = dict() + if tbs[rls][dut].get(d_ver, None) is None: + tbs[rls][dut][d_ver] = dict() + if tbs[rls][dut][d_ver].get(infra, None) is None: + tbs[rls][dut][d_ver][infra] = list() + if area not in tbs[rls][dut][d_ver][infra]: + tbs[rls][dut][d_ver][infra].append(area) + + self._spec_tbs = tbs + + # Read from files: + self._html_layout = str() + + try: + with open(self._html_layout_file, "r") as file_read: + self._html_layout = file_read.read() + except IOError as err: + raise RuntimeError( + f"Not possible to open the file {self._html_layout_file}\n{err}" + ) + + # Callbacks: + if self._app is not None and hasattr(self, "callbacks"): + self.callbacks(self._app) + + @property + def html_layout(self): + return self._html_layout + + def add_content(self): + """Top level method which generated the web page. + + It generates: + - Store for user input data, + - Navigation bar, + - Main area with control panel and ploting area. + + If no HTML layout is provided, an error message is displayed instead. + + :returns: The HTML div with the whole page. + :rtype: html.Div + """ + + if self.html_layout and self._spec_tbs: + return html.Div( + id="div-main", + className="small", + children=[ + dbc.Row( + id="row-navbar", + class_name="g-0", + children=[ + self._add_navbar() + ] + ), + dbc.Row( + id="row-main", + class_name="g-0", + children=[ + dcc.Store(id="store-selected-tests"), + dcc.Store(id="store-control-panel"), + dcc.Location(id="url", refresh=False), + self._add_ctrl_col(), + self._add_plotting_col() + ] + ) + ] + ) + else: + return html.Div( + id="div-main-error", + children=[ + dbc.Alert( + [ + "An Error Occured" + ], + color="danger" + ) + ] + ) + + def _add_navbar(self): + """Add nav element with navigation panel. It is placed on the top. + + :returns: Navigation bar. + :rtype: dbc.NavbarSimple + """ + return dbc.NavbarSimple( + id="navbarsimple-main", + children=[ + dbc.NavItem( + dbc.NavLink( + C.COVERAGE_TITLE, + disabled=True, + external_link=True, + href="#" + ) + ) + ], + brand=C.BRAND, + brand_href="/", + brand_external_link=True, + class_name="p-2", + fluid=True + ) + + def _add_ctrl_col(self) -> dbc.Col: + """Add column with controls. It is placed on the left side. + + :returns: Column with the control panel. + :rtype: dbc.Col + """ + return dbc.Col([ + html.Div( + children=self._add_ctrl_panel(), + className="sticky-top" + ) + ]) + + def _add_plotting_col(self) -> dbc.Col: + """Add column with plots. It is placed on the right side. + + :returns: Column with plots. + :rtype: dbc.Col + """ + return dbc.Col( + id="col-plotting-area", + children=[ + dbc.Spinner( + children=[ + dbc.Row( + id="plotting-area", + class_name="g-0 p-0", + children=[ + C.PLACEHOLDER + ] + ) + ] + ) + ], + width=9 + ) + + def _add_ctrl_panel(self) -> list: + """Add control panel. + + :returns: Control panel. + :rtype: list + """ + return [ + dbc.Row( + class_name="g-0 p-1", + children=[ + dbc.InputGroup( + [ + dbc.InputGroupText("CSIT Release"), + dbc.Select( + id={"type": "ctrl-dd", "index": "rls"}, + placeholder="Select a Release...", + options=sorted( + [ + {"label": k, "value": k} \ + for k in self._spec_tbs.keys() + ], + key=lambda d: d["label"] + ) + ) + ], + size="sm" + ) + ] + ), + dbc.Row( + class_name="g-0 p-1", + children=[ + dbc.InputGroup( + [ + dbc.InputGroupText("DUT"), + dbc.Select( + id={"type": "ctrl-dd", "index": "dut"}, + placeholder="Select a Device under Test..." + ) + ], + size="sm" + ) + ] + ), + dbc.Row( + class_name="g-0 p-1", + children=[ + dbc.InputGroup( + [ + dbc.InputGroupText("DUT Version"), + dbc.Select( + id={"type": "ctrl-dd", "index": "dutver"}, + placeholder=\ + "Select a Version of Device under Test..." + ) + ], + size="sm" + ) + ] + ), + dbc.Row( + class_name="g-0 p-1", + children=[ + dbc.InputGroup( + [ + dbc.InputGroupText("Infra"), + dbc.Select( + id={"type": "ctrl-dd", "index": "phy"}, + placeholder=\ + "Select a Physical Test Bed Topology..." + ) + ], + size="sm" + ) + ] + ), + dbc.Row( + class_name="g-0 p-1", + children=[ + dbc.InputGroup( + [ + dbc.InputGroupText("Area"), + dbc.Select( + id={"type": "ctrl-dd", "index": "area"}, + placeholder="Select an Area..." + ) + ], + size="sm" + ) + ] + ) + ] + + def _get_plotting_area(self, selected: dict, url: str) -> list: + """Generate the plotting area with all its content. + + :param selected: Selected parameters of tests. + :param url: URL to be displayed in the modal window. + :type selected: dict + :type url: str + :returns: List of rows with elements to be displayed in the plotting + area. + :rtype: list + """ + if not selected: + return C.PLACEHOLDER + + return [ + dbc.Row( + children=coverage_tables(self._data, selected), + class_name="g-0 p-0", + ), + dbc.Row( + children=C.PLACEHOLDER, + class_name="g-0 p-1" + ), + dbc.Row( + [ + dbc.Col([html.Div( + [ + dbc.Button( + id="plot-btn-url", + children="Show URL", + class_name="me-1", + color="info", + style={ + "text-transform": "none", + "padding": "0rem 1rem" + } + ), + dbc.Modal( + [ + dbc.ModalHeader(dbc.ModalTitle("URL")), + dbc.ModalBody(url) + ], + id="plot-mod-url", + size="xl", + is_open=False, + scrollable=True + ), + dbc.Button( + id="plot-btn-download", + children="Download Data", + class_name="me-1", + color="info", + style={ + "text-transform": "none", + "padding": "0rem 1rem" + } + ), + dcc.Download(id="download-iterative-data") + ], + className=\ + "d-grid gap-0 d-md-flex justify-content-md-end" + )]) + ], + class_name="g-0 p-0" + ), + dbc.Row( + children=C.PLACEHOLDER, + class_name="g-0 p-1" + ) + ] + + def callbacks(self, app): + """Callbacks for the whole application. + + :param app: The application. + :type app: Flask + """ + + @app.callback( + [ + Output("store-control-panel", "data"), + Output("store-selected-tests", "data"), + Output("plotting-area", "children"), + Output({"type": "ctrl-dd", "index": "rls"}, "value"), + Output({"type": "ctrl-dd", "index": "dut"}, "options"), + Output({"type": "ctrl-dd", "index": "dut"}, "disabled"), + Output({"type": "ctrl-dd", "index": "dut"}, "value"), + Output({"type": "ctrl-dd", "index": "dutver"}, "options"), + Output({"type": "ctrl-dd", "index": "dutver"}, "disabled"), + Output({"type": "ctrl-dd", "index": "dutver"}, "value"), + Output({"type": "ctrl-dd", "index": "phy"}, "options"), + Output({"type": "ctrl-dd", "index": "phy"}, "disabled"), + Output({"type": "ctrl-dd", "index": "phy"}, "value"), + Output({"type": "ctrl-dd", "index": "area"}, "options"), + Output({"type": "ctrl-dd", "index": "area"}, "disabled"), + Output({"type": "ctrl-dd", "index": "area"}, "value"), + ], + [ + State("store-control-panel", "data"), + State("store-selected-tests", "data") + ], + [ + Input("url", "href"), + Input({"type": "ctrl-dd", "index": ALL}, "value") + ] + ) + def _update_application( + control_panel: dict, + selected: dict, + href: str, + *_ + ) -> tuple: + """Update the application when the event is detected. + """ + + ctrl_panel = ControlPanel(CP_PARAMS, control_panel) + plotting_area = no_update + on_draw = False + if selected is None: + selected = dict() + + # Parse the url: + parsed_url = url_decode(href) + if parsed_url: + url_params = parsed_url["params"] + else: + url_params = None + + trigger = Trigger(callback_context.triggered) + + if trigger.type == "url" and url_params: + try: + selected = literal_eval(url_params["selection"][0]) + except (KeyError, IndexError): + pass + if selected: + ctrl_panel.set({ + "rls-val": selected["rls"], + "dut-val": selected["dut"], + "dut-opt": generate_options( + self._spec_tbs[selected["rls"]].keys() + ), + "dut-dis": False, + "dutver-val": selected["dutver"], + "dutver-opt": generate_options( + self._spec_tbs[selected["rls"]]\ + [selected["dut"]].keys() + ), + "dutver-dis": False, + "phy-val": selected["phy"], + "phy-opt": generate_options( + self._spec_tbs[selected["rls"]][selected["dut"]]\ + [selected["dutver"]].keys() + ), + "phy-dis": False, + "area-val": selected["area"], + "area-opt": [ + {"label": label(v), "value": v} for v in sorted( + self._spec_tbs[selected["rls"]]\ + [selected["dut"]][selected["dutver"]]\ + [selected["phy"]] + ) + ], + "area-dis": False + }) + on_draw = True + elif trigger.type == "ctrl-dd": + if trigger.idx == "rls": + try: + options = generate_options( + self._spec_tbs[trigger.value].keys() + ) + disabled = False + except KeyError: + options = list() + disabled = True + ctrl_panel.set({ + "rls-val": trigger.value, + "dut-val": str(), + "dut-opt": options, + "dut-dis": disabled, + "dutver-val": str(), + "dutver-opt": list(), + "dutver-dis": True, + "phy-val": str(), + "phy-opt": list(), + "phy-dis": True, + "area-val": str(), + "area-opt": list(), + "area-dis": True + }) + elif trigger.idx == "dut": + try: + rls = ctrl_panel.get("rls-val") + dut = self._spec_tbs[rls][trigger.value] + options = generate_options(dut.keys()) + disabled = False + except KeyError: + options = list() + disabled = True + ctrl_panel.set({ + "dut-val": trigger.value, + "dutver-val": str(), + "dutver-opt": options, + "dutver-dis": disabled, + "phy-val": str(), + "phy-opt": list(), + "phy-dis": True, + "area-val": str(), + "area-opt": list(), + "area-dis": True + }) + elif trigger.idx == "dutver": + try: + rls = ctrl_panel.get("rls-val") + dut = ctrl_panel.get("dut-val") + dutver = self._spec_tbs[rls][dut][trigger.value] + options = generate_options(dutver.keys()) + disabled = False + except KeyError: + options = list() + disabled = True + ctrl_panel.set({ + "dutver-val": trigger.value, + "phy-val": str(), + "phy-opt": options, + "phy-dis": disabled, + "area-val": str(), + "area-opt": list(), + "area-dis": True + }) + elif trigger.idx == "phy": + try: + rls = ctrl_panel.get("rls-val") + dut = ctrl_panel.get("dut-val") + dutver = ctrl_panel.get("dutver-val") + phy = self._spec_tbs[rls][dut][dutver][trigger.value] + options = [ + {"label": label(v), "value": v} for v in sorted(phy) + ] + disabled = False + except KeyError: + options = list() + disabled = True + ctrl_panel.set({ + "phy-val": trigger.value, + "area-val": str(), + "area-opt": options, + "area-dis": disabled + }) + elif trigger.idx == "area": + ctrl_panel.set({"area-val": trigger.value}) + selected = { + "rls": ctrl_panel.get("rls-val"), + "dut": ctrl_panel.get("dut-val"), + "dutver": ctrl_panel.get("dutver-val"), + "phy": ctrl_panel.get("phy-val"), + "area": ctrl_panel.get("area-val"), + } + on_draw = True + + if on_draw: + if selected: + plotting_area = self._get_plotting_area( + selected, + gen_new_url(parsed_url, {"selection": selected}) + ) + else: + plotting_area = C.PLACEHOLDER + selected = dict() + + ret_val = [ + ctrl_panel.panel, + selected, + plotting_area, + ] + ret_val.extend(ctrl_panel.values) + return ret_val + + @app.callback( + Output("plot-mod-url", "is_open"), + [Input("plot-btn-url", "n_clicks")], + [State("plot-mod-url", "is_open")], + ) + def toggle_plot_mod_url(n, is_open): + """Toggle the modal window with url. + """ + if n: + return not is_open + return is_open + + @app.callback( + Output("download-iterative-data", "data"), + State("store-selected-tests", "data"), + Input("plot-btn-download", "n_clicks"), + prevent_initial_call=True + ) + def _download_coverage_data(selection, _): + """Download the data + + :param selection: List of tests selected by user stored in the + browser. + :type selection: dict + :returns: dict of data frame content (base64 encoded) and meta data + used by the Download component. + :rtype: dict + """ + + if not selection: + raise PreventUpdate + + df = select_coverage_data(self._data, selection, csv=True) + + return dcc.send_data_frame(df.to_csv, C.COVERAGE_DOWNLOAD_FILE_NAME) diff --git a/csit.infra.dash/app/cdash/coverage/tables.py b/csit.infra.dash/app/cdash/coverage/tables.py new file mode 100644 index 0000000000..a773a2280c --- /dev/null +++ b/csit.infra.dash/app/cdash/coverage/tables.py @@ -0,0 +1,284 @@ +# Copyright (c) 2023 Cisco and/or its affiliates. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at: +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""The coverage data tables. +""" + +import hdrh.histogram +import hdrh.codec +import pandas as pd +import dash_bootstrap_components as dbc + +from dash import dash_table +from dash.dash_table.Format import Format, Scheme + +from ..utils.constants import Constants as C + + +def select_coverage_data( + data: pd.DataFrame, + selected: dict, + csv: bool=False + ) -> list: + """Select coverage data for the tables and generate tables as pandas data + frames. + + :param data: Coverage data. + :param selected: Dictionary with user selection. + :param csv: If True, pandas data frame with selected coverage data is + returned for "Download Data" feature. + :type data: pandas.DataFrame + :type selected: dict + :type csv: bool + :returns: List of tuples with suite name (str) and data (pandas dataframe) + or pandas dataframe if csv is True. + :rtype: list[tuple[str, pandas.DataFrame], ] or pandas.DataFrame + """ + + l_data = list() + + # Filter data selected by the user. + phy = selected["phy"].split("-") + if len(phy) == 4: + topo, arch, nic, drv = phy + drv = "" if drv == "dpdk" else drv.replace("_", "-") + else: + return l_data + + df = pd.DataFrame(data.loc[( + (data["passed"] == True) & + (data["dut_type"] == selected["dut"]) & + (data["dut_version"] == selected["dutver"]) & + (data["release"] == selected["rls"]) + )]) + df = df[ + (df.job.str.endswith(f"{topo}-{arch}")) & + (df.test_id.str.contains( + f"^.*\.{selected['area']}\..*{nic}.*{drv}.*$", + regex=True + )) + ] + if drv == "dpdk": + for driver in C.DRIVERS: + df.drop( + df[df.test_id.str.contains(f"-{driver}-")].index, + inplace=True + ) + + # Prepare the coverage data + def _latency(hdrh_string: str, percentile: float) -> int: + """Get latency from HDRH string for given percentile. + + :param hdrh_string: Encoded HDRH string. + :param percentile: Given percentile. + :type hdrh_string: str + :type percentile: float + :returns: The latency value for the given percentile from the encoded + HDRH string. + :rtype: int + """ + try: + hdr_lat = hdrh.histogram.HdrHistogram.decode(hdrh_string) + return hdr_lat.get_value_at_percentile(percentile) + except (hdrh.codec.HdrLengthException, TypeError): + return None + + def _get_suite(test_id: str) -> str: + """Get the suite name from the test ID. + """ + return test_id.split(".")[-2].replace("2n1l-", "").\ + replace("1n1l-", "").replace("2n-", "").replace("-ndrpdr", "") + + def _get_test(test_id: str) -> str: + """Get the test name from the test ID. + """ + return test_id.split(".")[-1].replace("-ndrpdr", "") + + cov = pd.DataFrame() + cov["suite"] = df.apply(lambda row: _get_suite(row["test_id"]), axis=1) + cov["Test Name"] = df.apply(lambda row: _get_test(row["test_id"]), axis=1) + cov["Throughput_Unit"] = df["result_pdr_lower_rate_unit"] + cov["Throughput_NDR"] = df.apply( + lambda row: row["result_ndr_lower_rate_value"] / 1e6, axis=1 + ) + cov["Throughput_NDR_Mbps"] = df.apply( + lambda row: row["result_ndr_lower_bandwidth_value"] /1e9, axis=1 + ) + cov["Throughput_PDR"] = \ + df.apply(lambda row: row["result_pdr_lower_rate_value"] / 1e6, axis=1) + cov["Throughput_PDR_Mbps"] = df.apply( + lambda row: row["result_pdr_lower_bandwidth_value"] /1e9, axis=1 + ) + cov["Latency Forward [us]_10% PDR_P50"] = df.apply( + lambda row: _latency(row["result_latency_forward_pdr_10_hdrh"], 50.0), + axis=1 + ) + cov["Latency Forward [us]_10% PDR_P90"] = df.apply( + lambda row: _latency(row["result_latency_forward_pdr_10_hdrh"], 90.0), + axis=1 + ) + cov["Latency Forward [us]_10% PDR_P99"] = df.apply( + lambda row: _latency(row["result_latency_forward_pdr_10_hdrh"], 99.0), + axis=1 + ) + cov["Latency Forward [us]_50% PDR_P50"] = df.apply( + lambda row: _latency(row["result_latency_forward_pdr_50_hdrh"], 50.0), + axis=1 + ) + cov["Latency Forward [us]_50% PDR_P90"] = df.apply( + lambda row: _latency(row["result_latency_forward_pdr_50_hdrh"], 90.0), + axis=1 + ) + cov["Latency Forward [us]_50% PDR_P99"] = df.apply( + lambda row: _latency(row["result_latency_forward_pdr_50_hdrh"], 99.0), + axis=1 + ) + cov["Latency Forward [us]_90% PDR_P50"] = df.apply( + lambda row: _latency(row["result_latency_forward_pdr_90_hdrh"], 50.0), + axis=1 + ) + cov["Latency Forward [us]_90% PDR_P90"] = df.apply( + lambda row: _latency(row["result_latency_forward_pdr_90_hdrh"], 90.0), + axis=1 + ) + cov["Latency Forward [us]_90% PDR_P99"] = df.apply( + lambda row: _latency(row["result_latency_forward_pdr_90_hdrh"], 99.0), + axis=1 + ) + cov["Latency Reverse [us]_10% PDR_P50"] = df.apply( + lambda row: _latency(row["result_latency_reverse_pdr_10_hdrh"], 50.0), + axis=1 + ) + cov["Latency Reverse [us]_10% PDR_P90"] = df.apply( + lambda row: _latency(row["result_latency_reverse_pdr_10_hdrh"], 90.0), + axis=1 + ) + cov["Latency Reverse [us]_10% PDR_P99"] = df.apply( + lambda row: _latency(row["result_latency_reverse_pdr_10_hdrh"], 99.0), + axis=1 + ) + cov["Latency Reverse [us]_50% PDR_P50"] = df.apply( + lambda row: _latency(row["result_latency_reverse_pdr_50_hdrh"], 50.0), + axis=1 + ) + cov["Latency Reverse [us]_50% PDR_P90"] = df.apply( + lambda row: _latency(row["result_latency_reverse_pdr_50_hdrh"], 90.0), + axis=1 + ) + cov["Latency Reverse [us]_50% PDR_P99"] = df.apply( + lambda row: _latency(row["result_latency_reverse_pdr_50_hdrh"], 99.0), + axis=1 + ) + cov["Latency Reverse [us]_90% PDR_P50"] = df.apply( + lambda row: _latency(row["result_latency_reverse_pdr_90_hdrh"], 50.0), + axis=1 + ) + cov["Latency Reverse [us]_90% PDR_P90"] = df.apply( + lambda row: _latency(row["result_latency_reverse_pdr_90_hdrh"], 90.0), + axis=1 + ) + cov["Latency Reverse [us]_90% PDR_P99"] = df.apply( + lambda row: _latency(row["result_latency_reverse_pdr_90_hdrh"], 99.0), + axis=1 + ) + + if csv: + return cov + + # Split data into tabels depending on the test suite. + for suite in cov["suite"].unique().tolist(): + df_suite = pd.DataFrame(cov.loc[(cov["suite"] == suite)]) + unit = df_suite["Throughput_Unit"].tolist()[0] + df_suite.rename( + columns={ + "Throughput_NDR": f"Throughput_NDR_M{unit}", + "Throughput_PDR": f"Throughput_PDR_M{unit}" + }, + inplace=True + ) + df_suite.drop(["suite", "Throughput_Unit"], axis=1, inplace=True) + l_data.append((suite, df_suite, )) + return l_data + + +def coverage_tables(data: pd.DataFrame, selected: dict) -> list: + """Generate an accordion with coverage tables. + + :param data: Coverage data. + :param selected: Dictionary with user selection. + :type data: pandas.DataFrame + :type selected: dict + :returns: Accordion with suite names (titles) and tables. + :rtype: dash_bootstrap_components.Accordion + """ + + accordion_items = list() + for suite, cov_data in select_coverage_data(data, selected): + cols = list() + for idx, col in enumerate(cov_data.columns): + if idx == 0: + cols.append({ + "name": ["", "", col], + "id": col, + "deletable": False, + "selectable": False, + "type": "text" + }) + elif idx < 5: + cols.append({ + "name": col.split("_"), + "id": col, + "deletable": False, + "selectable": False, + "type": "numeric", + "format": Format(precision=2, scheme=Scheme.fixed) + }) + else: + cols.append({ + "name": col.split("_"), + "id": col, + "deletable": False, + "selectable": False, + "type": "numeric", + "format": Format(precision=0, scheme=Scheme.fixed) + }) + + accordion_items.append( + dbc.AccordionItem( + title=suite, + children=dash_table.DataTable( + columns=cols, + data=cov_data.to_dict("records"), + merge_duplicate_headers=True, + editable=True, + filter_action="none", + sort_action="native", + sort_mode="multi", + selected_columns=[], + selected_rows=[], + page_action="none", + style_cell={"textAlign": "right"}, + style_cell_conditional=[{ + "if": {"column_id": "Test Name"}, + "textAlign": "left" + }] + ) + ) + ) + + return dbc.Accordion( + children=accordion_items, + class_name="gy-2 p-0", + start_collapsed=True, + always_open=True + ) diff --git a/csit.infra.dash/app/cdash/data/data.py b/csit.infra.dash/app/cdash/data/data.py index 8537cd8db1..c8d5907200 100644 --- a/csit.infra.dash/app/cdash/data/data.py +++ b/csit.infra.dash/app/cdash/data/data.py @@ -48,7 +48,12 @@ class Data: self._data_spec = list() # Data frame to keep the data: - self._data = pd.DataFrame() + self._data = { + "statistics": pd.DataFrame(), + "trending": pd.DataFrame(), + "iterative": pd.DataFrame(), + "coverage": pd.DataFrame() + } # Read from files: try: @@ -69,11 +74,13 @@ class Data: def data(self): return self._data - def _get_list_of_files(self, - path, - last_modified_begin=None, - last_modified_end=None, - days=None) -> list: + @staticmethod + def _get_list_of_files( + path, + last_modified_begin=None, + last_modified_end=None, + days=None + ) -> list: """Get list of interested files stored in S3 compatible storage and returns it. @@ -111,11 +118,11 @@ class Data: return file_list + @staticmethod def _create_dataframe_from_parquet( - self, path, partition_filter=None, columns=None, - categories=list(), + categories=None, validate_schema=False, last_modified_begin=None, last_modified_end=None, @@ -157,7 +164,7 @@ class Data: :returns: Pandas DataFrame or None if DataFrame cannot be fetched. :rtype: DataFrame """ - df = None + df = pd.DataFrame() start = time() if days: last_modified_begin = datetime.now(tz=UTC) - timedelta(days=days) @@ -170,7 +177,7 @@ class Data: use_threads=True, dataset=True, columns=columns, - # categories=categories, + categories=categories, partition_filter=partition_filter, last_modified_begin=last_modified_begin, last_modified_end=last_modified_end @@ -180,9 +187,19 @@ class Data: f"\nCreation of dataframe {path} took: {time() - start}\n" ) except NoFilesFound as err: - logging.error(f"No parquets found.\n{err}") + logging.error( + f"No parquets found in specified time period.\n" + f"Nr of days: {days}\n" + f"last_modified_begin: {last_modified_begin}\n" + f"{err}" + ) except EmptyDataFrame as err: - logging.error(f"No data.\n{err}") + logging.error( + f"No data in parquets in specified time period.\n" + f"Nr of days: {days}\n" + f"last_modified_begin: {last_modified_begin}\n" + f"{err}" + ) return df @@ -197,11 +214,9 @@ class Data: :rtype: dict(str: pandas.DataFrame) """ - self._data = dict() - self._data["trending"] = pd.DataFrame() - self._data["iterative"] = pd.DataFrame() lst_trending = list() lst_iterative = list() + lst_coverage = list() for data_set in self._data_spec: logging.info( @@ -211,13 +226,16 @@ class Data: partition_filter = lambda part: True \ if part[data_set["partition"]] == data_set["partition_name"] \ else False - - data = self._create_dataframe_from_parquet( + if data_set["data_type"] in ("trending", "statistics"): + time_period = days + else: + time_period = None + data = Data._create_dataframe_from_parquet( path=data_set["path"], partition_filter=partition_filter, - columns=data_set.get("columns", list()), - categories=data_set.get("categories", list()), - days=None if data_set["data_type"] == "iterative" else days + columns=data_set.get("columns", None), + categories=data_set.get("categories", None), + days=time_period ) if data_set["data_type"] == "statistics": @@ -228,6 +246,10 @@ class Data: data["release"] = data_set["release"] data["release"] = data["release"].astype("category") lst_iterative.append(data) + elif data_set["data_type"] == "coverage": + data["release"] = data_set["release"] + data["release"] = data["release"].astype("category") + lst_coverage.append(data) else: raise NotImplementedError( f"The data type {data_set['data_type']} is not implemented." @@ -243,6 +265,11 @@ class Data: ignore_index=True, copy=False ) + self._data["coverage"] = pd.concat( + lst_coverage, + ignore_index=True, + copy=False + ) for key in self._data.keys(): logging.info( diff --git a/csit.infra.dash/app/cdash/data/data.yaml b/csit.infra.dash/app/cdash/data/data.yaml index 4e78323fe7..975241b84e 100644 --- a/csit.infra.dash/app/cdash/data/data.yaml +++ b/csit.infra.dash/app/cdash/data/data.yaml @@ -90,7 +90,6 @@ - passed - telemetry - test_id - - test_type - version categories: - job @@ -301,14 +300,41 @@ - passed - telemetry - test_id - - test_type - version categories: - job - build - dut_type - dut_version + - tg_type + - version +- data_type: coverage + partition: test_type + partition_name: ndrpdr + release: rls2302 + path: s3://fdio-docs-s3-cloudfront-index/csit/parquet/coverage_rls2302 + columns: + - job + - build + - dut_type + - dut_version + - tg_type + - hosts + - start_time + - passed + - test_id - version + - result_pdr_lower_rate_unit + - result_pdr_lower_rate_value + - result_ndr_lower_rate_value + - result_pdr_lower_bandwidth_value + - result_ndr_lower_bandwidth_value + - result_latency_reverse_pdr_90_hdrh + - result_latency_reverse_pdr_50_hdrh + - result_latency_reverse_pdr_10_hdrh + - result_latency_forward_pdr_90_hdrh + - result_latency_forward_pdr_50_hdrh + - result_latency_forward_pdr_10_hdrh categories: - job - build @@ -316,3 +342,23 @@ - dut_version - tg_type - version +# - data_type: coverage +# partition: test_type +# partition_name: device +# release: rls2302 +# path: s3://fdio-docs-s3-cloudfront-index/csit/parquet/coverage_rls2302 +# columns: +# - job +# - build +# - dut_type +# - dut_version +# - start_time +# - passed +# - test_id +# - version +# categories: +# - job +# - build +# - dut_type +# - dut_version +# - version diff --git a/csit.infra.dash/app/cdash/routes.py b/csit.infra.dash/app/cdash/routes.py index 71c13edd6d..301738c643 100644 --- a/csit.infra.dash/app/cdash/routes.py +++ b/csit.infra.dash/app/cdash/routes.py @@ -32,5 +32,6 @@ def home(): report_title=C.REPORT_TITLE, comp_title=C.COMP_TITLE, stats_title=C.STATS_TITLE, - news_title=C.NEWS_TITLE + news_title=C.NEWS_TITLE, + cov_title=C.COVERAGE_TITLE ) diff --git a/csit.infra.dash/app/cdash/templates/base_layout.jinja2 b/csit.infra.dash/app/cdash/templates/base_layout.jinja2 index b799bda3a1..72504804d6 100644 --- a/csit.infra.dash/app/cdash/templates/base_layout.jinja2 +++ b/csit.infra.dash/app/cdash/templates/base_layout.jinja2 @@ -35,6 +35,12 @@ {{ comp_title }}

+

+

+ + {{ cov_title }} + +

{{ stats_title }} diff --git a/csit.infra.dash/app/cdash/utils/constants.py b/csit.infra.dash/app/cdash/utils/constants.py index a9c7c455fd..e9c08d36e3 100644 --- a/csit.infra.dash/app/cdash/utils/constants.py +++ b/csit.infra.dash/app/cdash/utils/constants.py @@ -117,9 +117,11 @@ class Constants: "container_memif": "LXC/DRC Container Memif", "crypto": "IPSec IPv4 Routing", "ip4": "IPv4 Routing", - "ip6": "IPv6 Routing", "ip4_tunnels": "IPv4 Tunnels", + "ip6": "IPv6 Routing", + "ip6_tunnels": "IPv6 Tunnels", "l2": "L2 Ethernet Switching", + "lb": "Load Balancer", "srv6": "SRv6 Routing", "vm_vhost": "VMs vhost-user", "nfv_density-dcr_memif-chain_ipsec": "CNF Service Chains Routing IPSec", @@ -336,3 +338,17 @@ class Constants: # Default name of downloaded file with selected data. TREND_DOWNLOAD_FILE_NAME = "trending_data.csv" + + ############################################################################ + # Coverage data. + + # The title. + COVERAGE_TITLE = "Per Release Coverage Data" + + # The pathname prefix for the application. + COVERAGE_ROUTES_PATHNAME_PREFIX = "/coverage/" + + # Default name of downloaded file with selected data. + COVERAGE_DOWNLOAD_FILE_NAME = "coverage_data.csv" + + ############################################################################ \ No newline at end of file