1 # Copyright (c) 2023 Cisco and/or its affiliates.
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at:
6 # http://www.apache.org/licenses/LICENSE-2.0
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
14 """The coverage data tables.
20 import dash_bootstrap_components as dbc
22 from dash import dash_table
23 from dash.dash_table.Format import Format, Scheme
25 from ..utils.constants import Constants as C
28 def select_coverage_data(
33 """Select coverage data for the tables and generate tables as pandas data
36 :param data: Coverage data.
37 :param selected: Dictionary with user selection.
38 :param csv: If True, pandas data frame with selected coverage data is
39 returned for "Download Data" feature.
40 :type data: pandas.DataFrame
43 :returns: List of tuples with suite name (str) and data (pandas dataframe)
44 or pandas dataframe if csv is True.
45 :rtype: list[tuple[str, pandas.DataFrame], ] or pandas.DataFrame
50 # Filter data selected by the user.
51 phy = selected["phy"].split("-")
53 topo, arch, nic, drv = phy
54 drv = "" if drv == "dpdk" else drv.replace("_", "-")
58 df = pd.DataFrame(data.loc[(
59 (data["passed"] == True) &
60 (data["dut_type"] == selected["dut"]) &
61 (data["dut_version"] == selected["dutver"]) &
62 (data["release"] == selected["rls"])
65 (df.job.str.endswith(f"{topo}-{arch}")) &
66 (df.test_id.str.contains(
67 f"^.*\.{selected['area']}\..*{nic}.*{drv}.*$",
72 for driver in C.DRIVERS:
74 df[df.test_id.str.contains(f"-{driver}-")].index,
78 ttype = df["test_type"].to_list()[0]
80 # Prepare the coverage data
81 def _laten(hdrh_string: str, percentile: float) -> int:
82 """Get latency from HDRH string for given percentile.
84 :param hdrh_string: Encoded HDRH string.
85 :param percentile: Given percentile.
86 :type hdrh_string: str
87 :type percentile: float
88 :returns: The latency value for the given percentile from the encoded
93 hdr_lat = hdrh.histogram.HdrHistogram.decode(hdrh_string)
94 return hdr_lat.get_value_at_percentile(percentile)
95 except (hdrh.codec.HdrLengthException, TypeError):
98 def _get_suite(test_id: str) -> str:
99 """Get the suite name from the test ID.
101 return test_id.split(".")[-2].replace("2n1l-", "").\
102 replace("1n1l-", "").replace("2n-", "").replace("-ndrpdr", "")
104 def _get_test(test_id: str) -> str:
105 """Get the test name from the test ID.
107 return test_id.split(".")[-1].replace("-ndrpdr", "")
110 cov["Suite"] = df.apply(lambda row: _get_suite(row["test_id"]), axis=1)
111 cov["Test Name"] = df.apply(lambda row: _get_test(row["test_id"]), axis=1)
113 if ttype == "device":
114 cov = cov.assign(Result="PASS")
116 cov["Throughput_Unit"] = df["result_pdr_lower_rate_unit"]
117 cov["Throughput_NDR"] = df.apply(
118 lambda row: row["result_ndr_lower_rate_value"] / 1e6, axis=1
120 cov["Throughput_NDR_Mbps"] = df.apply(
121 lambda row: row["result_ndr_lower_bandwidth_value"] /1e9, axis=1
123 cov["Throughput_PDR"] = df.apply(
124 lambda row: row["result_pdr_lower_rate_value"] / 1e6, axis=1
126 cov["Throughput_PDR_Mbps"] = df.apply(
127 lambda row: row["result_pdr_lower_bandwidth_value"] /1e9, axis=1
129 cov["Latency Forward [us]_10% PDR_P50"] = df.apply(
130 lambda row: _laten(row["result_latency_forward_pdr_10_hdrh"], 50.0),
133 cov["Latency Forward [us]_10% PDR_P90"] = df.apply(
134 lambda row: _laten(row["result_latency_forward_pdr_10_hdrh"], 90.0),
137 cov["Latency Forward [us]_10% PDR_P99"] = df.apply(
138 lambda row: _laten(row["result_latency_forward_pdr_10_hdrh"], 99.0),
141 cov["Latency Forward [us]_50% PDR_P50"] = df.apply(
142 lambda row: _laten(row["result_latency_forward_pdr_50_hdrh"], 50.0),
145 cov["Latency Forward [us]_50% PDR_P90"] = df.apply(
146 lambda row: _laten(row["result_latency_forward_pdr_50_hdrh"], 90.0),
149 cov["Latency Forward [us]_50% PDR_P99"] = df.apply(
150 lambda row: _laten(row["result_latency_forward_pdr_50_hdrh"], 99.0),
153 cov["Latency Forward [us]_90% PDR_P50"] = df.apply(
154 lambda row: _laten(row["result_latency_forward_pdr_90_hdrh"], 50.0),
157 cov["Latency Forward [us]_90% PDR_P90"] = df.apply(
158 lambda row: _laten(row["result_latency_forward_pdr_90_hdrh"], 90.0),
161 cov["Latency Forward [us]_90% PDR_P99"] = df.apply(
162 lambda row: _laten(row["result_latency_forward_pdr_90_hdrh"], 99.0),
165 cov["Latency Reverse [us]_10% PDR_P50"] = df.apply(
166 lambda row: _laten(row["result_latency_reverse_pdr_10_hdrh"], 50.0),
169 cov["Latency Reverse [us]_10% PDR_P90"] = df.apply(
170 lambda row: _laten(row["result_latency_reverse_pdr_10_hdrh"], 90.0),
173 cov["Latency Reverse [us]_10% PDR_P99"] = df.apply(
174 lambda row: _laten(row["result_latency_reverse_pdr_10_hdrh"], 99.0),
177 cov["Latency Reverse [us]_50% PDR_P50"] = df.apply(
178 lambda row: _laten(row["result_latency_reverse_pdr_50_hdrh"], 50.0),
181 cov["Latency Reverse [us]_50% PDR_P90"] = df.apply(
182 lambda row: _laten(row["result_latency_reverse_pdr_50_hdrh"], 90.0),
185 cov["Latency Reverse [us]_50% PDR_P99"] = df.apply(
186 lambda row: _laten(row["result_latency_reverse_pdr_50_hdrh"], 99.0),
189 cov["Latency Reverse [us]_90% PDR_P50"] = df.apply(
190 lambda row: _laten(row["result_latency_reverse_pdr_90_hdrh"], 50.0),
193 cov["Latency Reverse [us]_90% PDR_P90"] = df.apply(
194 lambda row: _laten(row["result_latency_reverse_pdr_90_hdrh"], 90.0),
197 cov["Latency Reverse [us]_90% PDR_P99"] = df.apply(
198 lambda row: _laten(row["result_latency_reverse_pdr_90_hdrh"], 99.0),
205 # Split data into tables depending on the test suite.
206 for suite in cov["Suite"].unique().tolist():
207 df_suite = pd.DataFrame(cov.loc[(cov["Suite"] == suite)])
210 unit = df_suite["Throughput_Unit"].tolist()[0]
213 "Throughput_NDR": f"Throughput_NDR_M{unit}",
214 "Throughput_PDR": f"Throughput_PDR_M{unit}"
218 df_suite.drop(["Suite", "Throughput_Unit"], axis=1, inplace=True)
220 l_data.append((suite, df_suite, ))
225 def coverage_tables(data: pd.DataFrame, selected: dict) -> list:
226 """Generate an accordion with coverage tables.
228 :param data: Coverage data.
229 :param selected: Dictionary with user selection.
230 :type data: pandas.DataFrame
232 :returns: Accordion with suite names (titles) and tables.
233 :rtype: dash_bootstrap_components.Accordion
236 accordion_items = list()
237 for suite, cov_data in select_coverage_data(data, selected):
238 if len(cov_data.columns) == 3: # VPP Device
246 } for col in cov_data.columns
248 style_cell={"textAlign": "left"}
249 style_cell_conditional=[
251 "if": {"column_id": "Result"},
257 for idx, col in enumerate(cov_data.columns):
260 "name": ["", "", col],
268 "name": col.split("_"),
273 "format": Format(precision=2, scheme=Scheme.fixed)
277 "name": col.split("_"),
282 "format": Format(precision=0, scheme=Scheme.fixed)
284 style_cell={"textAlign": "right"}
285 style_cell_conditional=[
287 "if": {"column_id": "Test Name"},
292 accordion_items.append(
295 children=dash_table.DataTable(
297 data=cov_data.to_dict("records"),
298 merge_duplicate_headers=True,
300 filter_action="none",
301 sort_action="native",
306 style_cell=style_cell,
307 style_cell_conditional=style_cell_conditional
311 return dbc.Accordion(
312 children=accordion_items,
313 class_name="gy-1 p-0",
314 start_collapsed=True,