1 # Copyright (c) 2022 Cisco and/or its affiliates.
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at:
6 # http://www.apache.org/licenses/LICENSE-2.0
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
18 import plotly.graph_objects as go
24 from datetime import datetime
25 from numpy import isnan
27 from ..jumpavg import classify
30 _NORM_FREQUENCY = 2.0 # [GHz]
31 _FREQURENCY = { # [GHz]
60 _TICK_TEXT_TPUT = ["Regression", "Normal", "Progression"]
69 _TICK_TEXT_LAT = ["Progression", "Normal", "Regression"]
71 "mrr": "result_receive_rate_rate_avg",
72 "ndr": "result_ndr_lower_rate_value",
73 "pdr": "result_pdr_lower_rate_value",
74 "pdr-lat": "result_latency_forward_pdr_50_avg"
77 "mrr": "result_receive_rate_rate_unit",
78 "ndr": "result_ndr_lower_rate_unit",
79 "pdr": "result_pdr_lower_rate_unit",
80 "pdr-lat": "result_latency_forward_pdr_50_unit"
82 _LAT_HDRH = ( # Do not change the order
83 "result_latency_forward_pdr_0_hdrh",
84 "result_latency_reverse_pdr_0_hdrh",
85 "result_latency_forward_pdr_10_hdrh",
86 "result_latency_reverse_pdr_10_hdrh",
87 "result_latency_forward_pdr_50_hdrh",
88 "result_latency_reverse_pdr_50_hdrh",
89 "result_latency_forward_pdr_90_hdrh",
90 "result_latency_reverse_pdr_90_hdrh",
92 # This value depends on latency stream rate (9001 pps) and duration (5s).
93 # Keep it slightly higher to ensure rounding errors to not remove tick mark.
94 PERCENTILE_MAX = 99.999501
96 _GRAPH_LAT_HDRH_DESC = {
97 "result_latency_forward_pdr_0_hdrh": "No-load.",
98 "result_latency_reverse_pdr_0_hdrh": "No-load.",
99 "result_latency_forward_pdr_10_hdrh": "Low-load, 10% PDR.",
100 "result_latency_reverse_pdr_10_hdrh": "Low-load, 10% PDR.",
101 "result_latency_forward_pdr_50_hdrh": "Mid-load, 50% PDR.",
102 "result_latency_reverse_pdr_50_hdrh": "Mid-load, 50% PDR.",
103 "result_latency_forward_pdr_90_hdrh": "High-load, 90% PDR.",
104 "result_latency_reverse_pdr_90_hdrh": "High-load, 90% PDR."
108 def _get_color(idx: int) -> str:
112 "#1A1110", "#DA2647", "#214FC6", "#01786F", "#BD8260", "#FFD12A",
113 "#A6E7FF", "#738276", "#C95A49", "#FC5A8D", "#CEC8EF", "#391285",
114 "#6F2DA8", "#FF878D", "#45A27D", "#FFD0B9", "#FD5240", "#DB91EF",
115 "#44D7A8", "#4F86F7", "#84DE02", "#FFCFF1", "#614051"
117 return _COLORS[idx % len(_COLORS)]
120 def _get_hdrh_latencies(row: pd.Series, name: str) -> dict:
124 latencies = {"name": name}
125 for key in _LAT_HDRH:
127 latencies[key] = row[key]
134 def _classify_anomalies(data):
135 """Process the data and return anomalies and trending values.
137 Gather data into groups with average as trend value.
138 Decorate values within groups to be normal,
139 the first value of changed average as a regression, or a progression.
141 :param data: Full data set with unavailable samples replaced by nan.
142 :type data: OrderedDict
143 :returns: Classification and trend values
144 :rtype: 3-tuple, list of strings, list of floats and list of floats
146 # NaN means something went wrong.
147 # Use 0.0 to cause that being reported as a severe regression.
148 bare_data = [0.0 if isnan(sample) else sample for sample in data.values()]
149 # TODO: Make BitCountingGroupList a subclass of list again?
150 group_list = classify(bare_data).group_list
151 group_list.reverse() # Just to use .pop() for FIFO.
152 classification = list()
159 for sample in data.values():
161 classification.append("outlier")
163 stdevs.append(sample)
165 if values_left < 1 or active_group is None:
167 while values_left < 1: # Ignore empty groups (should not happen).
168 active_group = group_list.pop()
169 values_left = len(active_group.run_list)
170 avg = active_group.stats.avg
171 stdv = active_group.stats.stdev
172 classification.append(active_group.comment)
177 classification.append("normal")
181 return classification, avgs, stdevs
184 def select_trending_data(data: pd.DataFrame, itm:dict) -> pd.DataFrame:
188 phy = itm["phy"].split("-")
190 topo, arch, nic, drv = phy
195 drv = drv.replace("_", "-")
199 core = str() if itm["dut"] == "trex" else f"{itm['core']}"
200 ttype = "ndrpdr" if itm["testtype"] in ("ndr", "pdr") else itm["testtype"]
201 dut_v100 = "none" if itm["dut"] == "trex" else itm["dut"]
202 dut_v101 = itm["dut"]
207 (data["version"] == "1.0.0") &
208 (data["dut_type"].str.lower() == dut_v100)
211 (data["version"] == "1.0.1") &
212 (data["dut_type"].str.lower() == dut_v101)
215 (data["test_type"] == ttype) &
216 (data["passed"] == True)
218 df = df[df.job.str.endswith(f"{topo}-{arch}")]
219 df = df[df.test_id.str.contains(
220 f"^.*[.|-]{nic}.*{itm['framesize']}-{core}-{drv}{itm['test']}-{ttype}$",
222 )].sort_values(by="start_time", ignore_index=True)
227 def _generate_trending_traces(ttype: str, name: str, df: pd.DataFrame,
228 start: datetime, end: datetime, color: str, norm_factor: float) -> list:
232 df = df.dropna(subset=[_VALUE[ttype], ])
235 df = df.loc[((df["start_time"] >= start) & (df["start_time"] <= end))]
239 x_axis = df["start_time"].tolist()
240 y_data = [itm * norm_factor for itm in df[_VALUE[ttype]].tolist()]
242 anomalies, trend_avg, trend_stdev = _classify_anomalies(
243 {k: v for k, v in zip(x_axis, y_data)}
248 for _, row in df.iterrows():
249 d_type = "trex" if row["dut_type"] == "none" else row["dut_type"]
251 f"date: {row['start_time'].strftime('%Y-%m-%d %H:%M:%S')}<br>"
252 f"<prop> [{row[_UNIT[ttype]]}]: {row[_VALUE[ttype]]:,.0f}<br>"
254 f"{d_type}-ref: {row['dut_version']}<br>"
255 f"csit-ref: {row['job']}/{row['build']}<br>"
256 f"hosts: {', '.join(row['hosts'])}"
260 f"stdev [{row['result_receive_rate_rate_unit']}]: "
261 f"{row['result_receive_rate_rate_stdev']:,.0f}<br>"
265 hover_itm = hover_itm.replace(
266 "<prop>", "latency" if ttype == "pdr-lat" else "average"
267 ).replace("<stdev>", stdev)
268 hover.append(hover_itm)
269 if ttype == "pdr-lat":
270 customdata.append(_get_hdrh_latencies(row, name))
273 for avg, stdev, (_, row) in zip(trend_avg, trend_stdev, df.iterrows()):
274 d_type = "trex" if row["dut_type"] == "none" else row["dut_type"]
276 f"date: {row['start_time'].strftime('%Y-%m-%d %H:%M:%S')}<br>"
277 f"trend [pps]: {avg:,.0f}<br>"
278 f"stdev [pps]: {stdev:,.0f}<br>"
279 f"{d_type}-ref: {row['dut_version']}<br>"
280 f"csit-ref: {row['job']}/{row['build']}<br>"
281 f"hosts: {', '.join(row['hosts'])}"
283 if ttype == "pdr-lat":
284 hover_itm = hover_itm.replace("[pps]", "[us]")
285 hover_trend.append(hover_itm)
288 go.Scatter( # Samples
299 hoverinfo="text+name",
302 customdata=customdata
304 go.Scatter( # Trend line
315 hoverinfo="text+name",
324 anomaly_color = list()
326 for idx, anomaly in enumerate(anomalies):
327 if anomaly in ("regression", "progression"):
328 anomaly_x.append(x_axis[idx])
329 anomaly_y.append(trend_avg[idx])
330 anomaly_color.append(_ANOMALY_COLOR[anomaly])
332 f"date: {x_axis[idx].strftime('%Y-%m-%d %H:%M:%S')}<br>"
333 f"trend [pps]: {trend_avg[idx]:,.0f}<br>"
334 f"classification: {anomaly}"
336 if ttype == "pdr-lat":
337 hover_itm = hover_itm.replace("[pps]", "[us]")
338 hover.append(hover_itm)
339 anomaly_color.extend([0.0, 0.5, 1.0])
346 hoverinfo="text+name",
352 "symbol": "circle-open",
353 "color": anomaly_color,
354 "colorscale": _COLORSCALE_LAT \
355 if ttype == "pdr-lat" else _COLORSCALE_TPUT,
363 "title": "Circles Marking Data Classification",
364 "titleside": "right",
366 "tickvals": [0.167, 0.500, 0.833],
367 "ticktext": _TICK_TEXT_LAT \
368 if ttype == "pdr-lat" else _TICK_TEXT_TPUT,
381 def graph_trending(data: pd.DataFrame, sel:dict, layout: dict,
382 start: datetime, end: datetime, normalize: bool) -> tuple:
391 for idx, itm in enumerate(sel):
393 df = select_trending_data(data, itm)
394 if df is None or df.empty:
397 name = "-".join((itm["dut"], itm["phy"], itm["framesize"], itm["core"],
398 itm["test"], itm["testtype"], ))
400 phy = itm["phy"].split("-")
401 topo_arch = f"{phy[0]}-{phy[1]}" if len(phy) == 4 else str()
402 norm_factor = (_NORM_FREQUENCY / _FREQURENCY[topo_arch]) \
403 if topo_arch else 1.0
406 traces = _generate_trending_traces(
407 itm["testtype"], name, df, start, end, _get_color(idx), norm_factor
411 fig_tput = go.Figure()
412 fig_tput.add_traces(traces)
414 if itm["testtype"] == "pdr":
415 traces = _generate_trending_traces(
416 "pdr-lat", name, df, start, end, _get_color(idx), norm_factor
420 fig_lat = go.Figure()
421 fig_lat.add_traces(traces)
424 fig_tput.update_layout(layout.get("plot-trending-tput", dict()))
426 fig_lat.update_layout(layout.get("plot-trending-lat", dict()))
428 return fig_tput, fig_lat
431 def graph_hdrh_latency(data: dict, layout: dict) -> go.Figure:
438 for idx, (lat_name, lat_hdrh) in enumerate(data.items()):
440 decoded = hdrh.histogram.HdrHistogram.decode(lat_hdrh)
441 except (hdrh.codec.HdrLengthException, TypeError) as err:
448 for item in decoded.get_recorded_iterator():
449 # The real value is "percentile".
450 # For 100%, we cut that down to "x_perc" to avoid
452 percentile = item.percentile_level_iterated_to
453 x_perc = min(percentile, PERCENTILE_MAX)
454 xaxis.append(previous_x)
455 yaxis.append(item.value_iterated_to)
457 f"<b>{_GRAPH_LAT_HDRH_DESC[lat_name]}</b><br>"
458 f"Direction: {('W-E', 'E-W')[idx % 2]}<br>"
459 f"Percentile: {prev_perc:.5f}-{percentile:.5f}%<br>"
460 f"Latency: {item.value_iterated_to}uSec"
462 next_x = 100.0 / (100.0 - x_perc)
464 yaxis.append(item.value_iterated_to)
466 f"<b>{_GRAPH_LAT_HDRH_DESC[lat_name]}</b><br>"
467 f"Direction: {('W-E', 'E-W')[idx % 2]}<br>"
468 f"Percentile: {prev_perc:.5f}-{percentile:.5f}%<br>"
469 f"Latency: {item.value_iterated_to}uSec"
472 prev_perc = percentile
478 name=_GRAPH_LAT_HDRH_DESC[lat_name],
480 legendgroup=_GRAPH_LAT_HDRH_DESC[lat_name],
481 showlegend=bool(idx % 2),
483 color=_get_color(int(idx/2)),
485 width=1 if idx % 2 else 2
493 fig.add_traces(traces)
494 layout_hdrh = layout.get("plot-hdrh-latency", None)
496 fig.update_layout(layout_hdrh)