C-Dash: Add search in tests
[csit.git] / csit.infra.dash / app / cdash / coverage / tables.py
1 # Copyright (c) 2024 Cisco and/or its affiliates.
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at:
5 #
6 #     http://www.apache.org/licenses/LICENSE-2.0
7 #
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
13
14 """The coverage data tables.
15 """
16
17
18 import hdrh.histogram
19 import hdrh.codec
20 import pandas as pd
21 import dash_bootstrap_components as dbc
22
23 from dash import dash_table
24 from dash.dash_table.Format import Format, Scheme
25
26 from ..utils.constants import Constants as C
27
28
29 def select_coverage_data(
30         data: pd.DataFrame,
31         selected: dict,
32         csv: bool=False,
33         show_latency: bool=True
34     ) -> list:
35     """Select coverage data for the tables and generate tables as pandas data
36     frames.
37
38     :param data: Coverage data.
39     :param selected: Dictionary with user selection.
40     :param csv: If True, pandas data frame with selected coverage data is
41         returned for "Download Data" feature.
42     :param show_latency: If True, latency is displayed in the tables.
43     :type data: pandas.DataFrame
44     :type selected: dict
45     :type csv: bool
46     :type show_latency: bool
47     :returns: List of tuples with suite name (str) and data (pandas dataframe)
48         or pandas dataframe if csv is True.
49     :rtype: list[tuple[str, pandas.DataFrame], ] or pandas.DataFrame
50     """
51
52     l_data = list()
53
54     # Filter data selected by the user.
55     phy = selected["phy"].split("-")
56     if len(phy) == 4:
57         topo, arch, nic, drv = phy
58         drv_str = "" if drv == "dpdk" else drv.replace("_", "-")
59     else:
60         return l_data, None
61
62     df = pd.DataFrame(data.loc[(
63         (data["passed"] == True) &
64         (data["dut_type"] == selected["dut"]) &
65         (data["dut_version"] == selected["dutver"]) &
66         (data["release"] == selected["rls"])
67     )])
68     df = df[
69         (df.job.str.endswith(f"{topo}-{arch}")) &
70         (df.test_id.str.contains(
71             f"^.*\.{selected['area']}\..*{nic}.*{drv_str}.*$",
72             regex=True
73         ))
74     ]
75     if drv == "dpdk":
76         for driver in C.DRIVERS:
77             df.drop(
78                 df[df.test_id.str.contains(f"-{driver}-")].index,
79                 inplace=True
80             )
81     try:
82         ttype = df["test_type"].to_list()[0]
83     except IndexError:
84         return l_data, None
85
86     # Prepare the coverage data
87     def _latency(hdrh_string: str, percentile: float) -> int:
88         """Get latency from HDRH string for given percentile.
89
90         :param hdrh_string: Encoded HDRH string.
91         :param percentile: Given percentile.
92         :type hdrh_string: str
93         :type percentile: float
94         :returns: The latency value for the given percentile from the encoded
95             HDRH string.
96         :rtype: int
97         """
98         try:
99             hdr_lat = hdrh.histogram.HdrHistogram.decode(hdrh_string)
100             return hdr_lat.get_value_at_percentile(percentile)
101         except (hdrh.codec.HdrLengthException, TypeError):
102             return None
103
104     def _get_suite(test_id: str) -> str:
105         """Get the suite name from the test ID.
106         """
107         return test_id.split(".")[-2].replace("2n1l-", "").\
108             replace("1n1l-", "").replace("2n-", "").replace("-ndrpdr", "")
109
110     def _get_test(test_id: str) -> str:
111         """Get the test name from the test ID.
112         """
113         return test_id.split(".")[-1].replace("-ndrpdr", "")
114
115     cov = pd.DataFrame()
116     cov["Suite"] = df.apply(lambda row: _get_suite(row["test_id"]), axis=1)
117     cov["Test Name"] = df.apply(lambda row: _get_test(row["test_id"]), axis=1)
118
119     if ttype == "device":
120         cov = cov.assign(Result="PASS")
121     elif ttype == "mrr":
122         cov["Throughput_Unit"] = df["result_receive_rate_rate_unit"]
123         cov["Throughput_AVG"] = df.apply(
124             lambda row: row["result_receive_rate_rate_avg"] / 1e9, axis=1
125         )
126         cov["Throughput_STDEV"] = df.apply(
127             lambda row: row["result_receive_rate_rate_stdev"] / 1e9, axis=1
128         )
129     else:  # NDRPDR
130         cov["Throughput_Unit"] = df["result_pdr_lower_rate_unit"]
131         cov["Throughput_NDR"] = df.apply(
132             lambda row: row["result_ndr_lower_rate_value"] / 1e6, axis=1
133         )
134         cov["Throughput_NDR_Gbps"] = df.apply(
135             lambda row: row["result_ndr_lower_bandwidth_value"] / 1e9, axis=1
136         )
137         cov["Throughput_PDR"] = df.apply(
138             lambda row: row["result_pdr_lower_rate_value"] / 1e6, axis=1
139         )
140         cov["Throughput_PDR_Gbps"] = df.apply(
141             lambda row: row["result_pdr_lower_bandwidth_value"] / 1e9, axis=1
142         )
143         if show_latency:
144             for way in ("Forward", "Reverse"):
145                 for pdr in (10, 50, 90):
146                     for perc in (50, 90, 99):
147                         latency = f"result_latency_{way.lower()}_pdr_{pdr}_hdrh"
148                         cov[f"Latency {way} [us]_{pdr}% PDR_P{perc}"] = \
149                             df.apply(
150                                 lambda row: _latency(row[latency], perc),
151                                 axis=1
152                             )
153
154     if csv:
155         return cov
156
157     # Split data into tables depending on the test suite.
158     for suite in cov["Suite"].unique().tolist():
159         df_suite = pd.DataFrame(cov.loc[(cov["Suite"] == suite)])
160
161         if ttype !="device":
162             unit = df_suite["Throughput_Unit"].tolist()[0]
163             df_suite.rename(
164                 columns={
165                     "Throughput_NDR": f"Throughput_NDR_M{unit}",
166                     "Throughput_PDR": f"Throughput_PDR_M{unit}",
167                     "Throughput_AVG": f"Throughput_G{unit}_AVG",
168                     "Throughput_STDEV": f"Throughput_G{unit}_STDEV"
169                 },
170                 inplace=True
171             )
172             df_suite.drop(["Suite", "Throughput_Unit"], axis=1, inplace=True)
173
174         l_data.append((suite, df_suite, ))
175
176     return l_data, ttype
177
178
179 def coverage_tables(
180         data: pd.DataFrame,
181         selected: dict,
182         show_latency: bool=True,
183         start_collapsed: bool=True
184     ) -> dbc.Accordion:
185     """Generate an accordion with coverage tables.
186
187     :param data: Coverage data.
188     :param selected: Dictionary with user selection.
189     :param show_latency: If True, latency is displayed in the tables.
190     :param start_collapsed: If True, the accordion with tables is collapsed when
191         displayed.
192     :type data: pandas.DataFrame
193     :type selected: dict
194     :type show_latency: bool
195     :type start_collapsed: bool
196     :returns: Accordion with suite names (titles) and tables.
197     :rtype: dash_bootstrap_components.Accordion
198     """
199
200     accordion_items = list()
201     sel_data, ttype = \
202         select_coverage_data(data, selected, show_latency=show_latency)
203     for suite, cov_data in sel_data:
204         if ttype == "device":  # VPP Device
205             cols = [
206                 {
207                     "name": col,
208                     "id": col,
209                     "deletable": False,
210                     "selectable": False,
211                     "type": "text"
212                 } for col in cov_data.columns
213             ]
214             style_cell={"textAlign": "left"}
215             style_cell_conditional=[
216                 {
217                     "if": {"column_id": "Result"},
218                     "textAlign": "right"
219                 }
220             ]
221         elif ttype == "mrr":  # MRR
222             cols = list()
223             for idx, col in enumerate(cov_data.columns):
224                 if idx == 0:
225                     cols.append({
226                         "name": ["", "", col],
227                         "id": col,
228                         "deletable": False,
229                         "selectable": False,
230                         "type": "text"
231                     })
232                 else:
233                     cols.append({
234                         "name": col.split("_"),
235                         "id": col,
236                         "deletable": False,
237                         "selectable": False,
238                         "type": "numeric",
239                         "format": Format(precision=2, scheme=Scheme.fixed)
240                     })
241             style_cell={"textAlign": "right"}
242             style_cell_conditional=[
243                 {
244                     "if": {"column_id": "Test Name"},
245                     "textAlign": "left"
246                 }
247             ]
248         else:  # Performance NDRPDR
249             cols = list()
250             for idx, col in enumerate(cov_data.columns):
251                 if idx == 0:
252                     cols.append({
253                         "name": ["", "", col],
254                         "id": col,
255                         "deletable": False,
256                         "selectable": False,
257                         "type": "text"
258                     })
259                 elif idx < 5:
260                     cols.append({
261                         "name": col.split("_"),
262                         "id": col,
263                         "deletable": False,
264                         "selectable": False,
265                         "type": "numeric",
266                         "format": Format(precision=2, scheme=Scheme.fixed)
267                     })
268                 else:
269                     cols.append({
270                         "name": col.split("_"),
271                         "id": col,
272                         "deletable": False,
273                         "selectable": False,
274                         "type": "numeric",
275                         "format": Format(precision=0, scheme=Scheme.fixed)
276                     })
277             style_cell={"textAlign": "right"}
278             style_cell_conditional=[
279                 {
280                     "if": {"column_id": "Test Name"},
281                     "textAlign": "left"
282                 }
283             ]
284
285         accordion_items.append(
286             dbc.AccordionItem(
287                 title=suite,
288                 children=dash_table.DataTable(
289                     columns=cols,
290                     data=cov_data.to_dict("records"),
291                     merge_duplicate_headers=True,
292                     editable=False,
293                     filter_action="none",
294                     sort_action="native",
295                     sort_mode="multi",
296                     selected_columns=[],
297                     selected_rows=[],
298                     page_action="none",
299                     style_cell=style_cell,
300                     style_cell_conditional=style_cell_conditional
301                 )
302             )
303         )
304     if not accordion_items:
305         accordion_items.append(dbc.AccordionItem(
306             title="No data.",
307             children="No data."
308         ))
309         start_collapsed = True
310     return dbc.Accordion(
311         children=accordion_items,
312         class_name="gy-1 p-0",
313         start_collapsed=start_collapsed,
314         always_open=True
315     )