1 # Copyright (c) 2023 Cisco and/or its affiliates.
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at:
6 # http://www.apache.org/licenses/LICENSE-2.0
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
14 """The coverage data tables.
21 import dash_bootstrap_components as dbc
23 from dash import dash_table
24 from dash.dash_table.Format import Format, Scheme
26 from ..utils.constants import Constants as C
29 def select_coverage_data(
33 show_latency: bool=True
35 """Select coverage data for the tables and generate tables as pandas data
38 :param data: Coverage data.
39 :param selected: Dictionary with user selection.
40 :param csv: If True, pandas data frame with selected coverage data is
41 returned for "Download Data" feature.
42 :param show_latency: If True, latency is displayed in the tables.
43 :type data: pandas.DataFrame
46 :type show_latency: bool
47 :returns: List of tuples with suite name (str) and data (pandas dataframe)
48 or pandas dataframe if csv is True.
49 :rtype: list[tuple[str, pandas.DataFrame], ] or pandas.DataFrame
54 # Filter data selected by the user.
55 phy = selected["phy"].split("-")
57 topo, arch, nic, drv = phy
58 drv_str = "" if drv == "dpdk" else drv.replace("_", "-")
62 df = pd.DataFrame(data.loc[(
63 (data["passed"] == True) &
64 (data["dut_type"] == selected["dut"]) &
65 (data["dut_version"] == selected["dutver"]) &
66 (data["release"] == selected["rls"])
69 (df.job.str.endswith(f"{topo}-{arch}")) &
70 (df.test_id.str.contains(
71 f"^.*\.{selected['area']}\..*{nic}.*{drv_str}.*$",
76 for driver in C.DRIVERS:
78 df[df.test_id.str.contains(f"-{driver}-")].index,
82 ttype = df["test_type"].to_list()[0]
84 # Prepare the coverage data
85 def _latency(hdrh_string: str, percentile: float) -> int:
86 """Get latency from HDRH string for given percentile.
88 :param hdrh_string: Encoded HDRH string.
89 :param percentile: Given percentile.
90 :type hdrh_string: str
91 :type percentile: float
92 :returns: The latency value for the given percentile from the encoded
97 hdr_lat = hdrh.histogram.HdrHistogram.decode(hdrh_string)
98 return hdr_lat.get_value_at_percentile(percentile)
99 except (hdrh.codec.HdrLengthException, TypeError):
102 def _get_suite(test_id: str) -> str:
103 """Get the suite name from the test ID.
105 return test_id.split(".")[-2].replace("2n1l-", "").\
106 replace("1n1l-", "").replace("2n-", "").replace("-ndrpdr", "")
108 def _get_test(test_id: str) -> str:
109 """Get the test name from the test ID.
111 return test_id.split(".")[-1].replace("-ndrpdr", "")
114 cov["Suite"] = df.apply(lambda row: _get_suite(row["test_id"]), axis=1)
115 cov["Test Name"] = df.apply(lambda row: _get_test(row["test_id"]), axis=1)
117 if ttype == "device":
118 cov = cov.assign(Result="PASS")
120 cov["Throughput_Unit"] = df["result_receive_rate_rate_unit"]
121 cov["Throughput_AVG"] = df.apply(
122 lambda row: row["result_receive_rate_rate_avg"] / 1e9, axis=1
124 cov["Throughput_STDEV"] = df.apply(
125 lambda row: row["result_receive_rate_rate_stdev"] / 1e9, axis=1
128 cov["Throughput_Unit"] = df["result_pdr_lower_rate_unit"]
129 cov["Throughput_NDR"] = df.apply(
130 lambda row: row["result_ndr_lower_rate_value"] / 1e6, axis=1
132 cov["Throughput_NDR_Gbps"] = df.apply(
133 lambda row: row["result_ndr_lower_bandwidth_value"] / 1e9, axis=1
135 cov["Throughput_PDR"] = df.apply(
136 lambda row: row["result_pdr_lower_rate_value"] / 1e6, axis=1
138 cov["Throughput_PDR_Gbps"] = df.apply(
139 lambda row: row["result_pdr_lower_bandwidth_value"] / 1e9, axis=1
142 for way in ("Forward", "Reverse"):
143 for pdr in (10, 50, 90):
144 for perc in (50, 90, 99):
145 latency = f"result_latency_{way.lower()}_pdr_{pdr}_hdrh"
146 cov[f"Latency {way} [us]_{pdr}% PDR_P{perc}"] = \
148 lambda row: _latency(row[latency], perc),
155 # Split data into tables depending on the test suite.
156 for suite in cov["Suite"].unique().tolist():
157 df_suite = pd.DataFrame(cov.loc[(cov["Suite"] == suite)])
160 unit = df_suite["Throughput_Unit"].tolist()[0]
163 "Throughput_NDR": f"Throughput_NDR_M{unit}",
164 "Throughput_PDR": f"Throughput_PDR_M{unit}",
165 "Throughput_AVG": f"Throughput_G{unit}_AVG",
166 "Throughput_STDEV": f"Throughput_G{unit}_STDEV"
170 df_suite.drop(["Suite", "Throughput_Unit"], axis=1, inplace=True)
172 l_data.append((suite, df_suite, ))
180 show_latency: bool=True
182 """Generate an accordion with coverage tables.
184 :param data: Coverage data.
185 :param selected: Dictionary with user selection.
186 :param show_latency: If True, latency is displayed in the tables.
187 :type data: pandas.DataFrame
189 :type show_latency: bool
190 :returns: Accordion with suite names (titles) and tables.
191 :rtype: dash_bootstrap_components.Accordion
194 accordion_items = list()
196 select_coverage_data(data, selected, show_latency=show_latency)
197 for suite, cov_data in sel_data:
198 if ttype == "device": # VPP Device
206 } for col in cov_data.columns
208 style_cell={"textAlign": "left"}
209 style_cell_conditional=[
211 "if": {"column_id": "Result"},
215 elif ttype == "mrr": # MRR
217 for idx, col in enumerate(cov_data.columns):
220 "name": ["", "", col],
228 "name": col.split("_"),
233 "format": Format(precision=2, scheme=Scheme.fixed)
235 style_cell={"textAlign": "right"}
236 style_cell_conditional=[
238 "if": {"column_id": "Test Name"},
242 else: # Performance NDRPDR
244 for idx, col in enumerate(cov_data.columns):
247 "name": ["", "", col],
255 "name": col.split("_"),
260 "format": Format(precision=2, scheme=Scheme.fixed)
264 "name": col.split("_"),
269 "format": Format(precision=0, scheme=Scheme.fixed)
271 style_cell={"textAlign": "right"}
272 style_cell_conditional=[
274 "if": {"column_id": "Test Name"},
279 accordion_items.append(
282 children=dash_table.DataTable(
284 data=cov_data.to_dict("records"),
285 merge_duplicate_headers=True,
287 filter_action="none",
288 sort_action="native",
293 style_cell=style_cell,
294 style_cell_conditional=style_cell_conditional
298 return dbc.Accordion(
299 children=accordion_items,
300 class_name="gy-1 p-0",
301 start_collapsed=True,