1 # Copyright (c) 2022 Cisco and/or its affiliates.
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at:
6 # http://www.apache.org/licenses/LICENSE-2.0
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
18 import plotly.graph_objects as go
24 from datetime import datetime
25 from numpy import isnan
27 from ..jumpavg import classify
30 _NORM_FREQUENCY = 2.0 # [GHz]
31 _FREQURENCY = { # [GHz]
60 _TICK_TEXT_TPUT = ["Regression", "Normal", "Progression"]
69 _TICK_TEXT_LAT = ["Progression", "Normal", "Regression"]
71 "mrr": "result_receive_rate_rate_avg",
72 "ndr": "result_ndr_lower_rate_value",
73 "pdr": "result_pdr_lower_rate_value",
74 "pdr-lat": "result_latency_forward_pdr_50_avg"
77 "mrr": "result_receive_rate_rate_unit",
78 "ndr": "result_ndr_lower_rate_unit",
79 "pdr": "result_pdr_lower_rate_unit",
80 "pdr-lat": "result_latency_forward_pdr_50_unit"
82 _LAT_HDRH = ( # Do not change the order
83 "result_latency_forward_pdr_0_hdrh",
84 "result_latency_reverse_pdr_0_hdrh",
85 "result_latency_forward_pdr_10_hdrh",
86 "result_latency_reverse_pdr_10_hdrh",
87 "result_latency_forward_pdr_50_hdrh",
88 "result_latency_reverse_pdr_50_hdrh",
89 "result_latency_forward_pdr_90_hdrh",
90 "result_latency_reverse_pdr_90_hdrh",
92 # This value depends on latency stream rate (9001 pps) and duration (5s).
93 # Keep it slightly higher to ensure rounding errors to not remove tick mark.
94 PERCENTILE_MAX = 99.999501
96 _GRAPH_LAT_HDRH_DESC = {
97 "result_latency_forward_pdr_0_hdrh": "No-load.",
98 "result_latency_reverse_pdr_0_hdrh": "No-load.",
99 "result_latency_forward_pdr_10_hdrh": "Low-load, 10% PDR.",
100 "result_latency_reverse_pdr_10_hdrh": "Low-load, 10% PDR.",
101 "result_latency_forward_pdr_50_hdrh": "Mid-load, 50% PDR.",
102 "result_latency_reverse_pdr_50_hdrh": "Mid-load, 50% PDR.",
103 "result_latency_forward_pdr_90_hdrh": "High-load, 90% PDR.",
104 "result_latency_reverse_pdr_90_hdrh": "High-load, 90% PDR."
108 def _get_color(idx: int) -> str:
112 "#1A1110", "#DA2647", "#214FC6", "#01786F", "#BD8260", "#FFD12A",
113 "#A6E7FF", "#738276", "#C95A49", "#FC5A8D", "#CEC8EF", "#391285",
114 "#6F2DA8", "#FF878D", "#45A27D", "#FFD0B9", "#FD5240", "#DB91EF",
115 "#44D7A8", "#4F86F7", "#84DE02", "#FFCFF1", "#614051"
117 return _COLORS[idx % len(_COLORS)]
120 def _get_hdrh_latencies(row: pd.Series, name: str) -> dict:
124 latencies = {"name": name}
125 for key in _LAT_HDRH:
127 latencies[key] = row[key]
134 def _classify_anomalies(data):
135 """Process the data and return anomalies and trending values.
137 Gather data into groups with average as trend value.
138 Decorate values within groups to be normal,
139 the first value of changed average as a regression, or a progression.
141 :param data: Full data set with unavailable samples replaced by nan.
142 :type data: OrderedDict
143 :returns: Classification and trend values
144 :rtype: 3-tuple, list of strings, list of floats and list of floats
146 # NaN means something went wrong.
147 # Use 0.0 to cause that being reported as a severe regression.
148 bare_data = [0.0 if isnan(sample) else sample for sample in data.values()]
149 # TODO: Make BitCountingGroupList a subclass of list again?
150 group_list = classify(bare_data).group_list
151 group_list.reverse() # Just to use .pop() for FIFO.
152 classification = list()
159 for sample in data.values():
161 classification.append("outlier")
163 stdevs.append(sample)
165 if values_left < 1 or active_group is None:
167 while values_left < 1: # Ignore empty groups (should not happen).
168 active_group = group_list.pop()
169 values_left = len(active_group.run_list)
170 avg = active_group.stats.avg
171 stdv = active_group.stats.stdev
172 classification.append(active_group.comment)
177 classification.append("normal")
181 return classification, avgs, stdevs
184 def select_trending_data(data: pd.DataFrame, itm:dict) -> pd.DataFrame:
188 phy = itm["phy"].split("-")
190 topo, arch, nic, drv = phy
195 drv = drv.replace("_", "-")
199 core = str() if itm["dut"] == "trex" else f"{itm['core']}"
200 ttype = "ndrpdr" if itm["testtype"] in ("ndr", "pdr") else itm["testtype"]
201 dut_v100 = "none" if itm["dut"] == "trex" else itm["dut"]
202 dut_v101 = itm["dut"]
207 (data["version"] == "1.0.0") &
208 (data["dut_type"].str.lower() == dut_v100)
211 (data["version"] == "1.0.1") &
212 (data["dut_type"].str.lower() == dut_v101)
215 (data["test_type"] == ttype) &
216 (data["passed"] == True)
218 df = df[df.job.str.endswith(f"{topo}-{arch}")]
219 df = df[df.test_id.str.contains(
220 f"^.*[.|-]{nic}.*{itm['framesize']}-{core}-{drv}{itm['test']}-{ttype}$",
222 )].sort_values(by="start_time", ignore_index=True)
227 def _generate_trending_traces(ttype: str, name: str, df: pd.DataFrame,
228 start: datetime, end: datetime, color: str, norm_factor: float) -> list:
232 df = df.dropna(subset=[_VALUE[ttype], ])
235 df = df.loc[((df["start_time"] >= start) & (df["start_time"] <= end))]
239 x_axis = df["start_time"].tolist()
240 if ttype == "pdr-lat":
241 y_data = [(itm / norm_factor) for itm in df[_VALUE[ttype]].tolist()]
243 y_data = [(itm * norm_factor) for itm in df[_VALUE[ttype]].tolist()]
245 anomalies, trend_avg, trend_stdev = _classify_anomalies(
246 {k: v for k, v in zip(x_axis, y_data)}
251 for idx, (_, row) in enumerate(df.iterrows()):
252 d_type = "trex" if row["dut_type"] == "none" else row["dut_type"]
254 f"date: {row['start_time'].strftime('%Y-%m-%d %H:%M:%S')}<br>"
255 f"<prop> [{row[_UNIT[ttype]]}]: {y_data[idx]:,.0f}<br>"
257 f"{d_type}-ref: {row['dut_version']}<br>"
258 f"csit-ref: {row['job']}/{row['build']}<br>"
259 f"hosts: {', '.join(row['hosts'])}"
263 f"stdev [{row['result_receive_rate_rate_unit']}]: "
264 f"{row['result_receive_rate_rate_stdev']:,.0f}<br>"
268 hover_itm = hover_itm.replace(
269 "<prop>", "latency" if ttype == "pdr-lat" else "average"
270 ).replace("<stdev>", stdev)
271 hover.append(hover_itm)
272 if ttype == "pdr-lat":
273 customdata.append(_get_hdrh_latencies(row, name))
276 for avg, stdev, (_, row) in zip(trend_avg, trend_stdev, df.iterrows()):
277 d_type = "trex" if row["dut_type"] == "none" else row["dut_type"]
279 f"date: {row['start_time'].strftime('%Y-%m-%d %H:%M:%S')}<br>"
280 f"trend [pps]: {avg:,.0f}<br>"
281 f"stdev [pps]: {stdev:,.0f}<br>"
282 f"{d_type}-ref: {row['dut_version']}<br>"
283 f"csit-ref: {row['job']}/{row['build']}<br>"
284 f"hosts: {', '.join(row['hosts'])}"
286 if ttype == "pdr-lat":
287 hover_itm = hover_itm.replace("[pps]", "[us]")
288 hover_trend.append(hover_itm)
291 go.Scatter( # Samples
302 hoverinfo="text+name",
305 customdata=customdata
307 go.Scatter( # Trend line
318 hoverinfo="text+name",
327 anomaly_color = list()
329 for idx, anomaly in enumerate(anomalies):
330 if anomaly in ("regression", "progression"):
331 anomaly_x.append(x_axis[idx])
332 anomaly_y.append(trend_avg[idx])
333 anomaly_color.append(_ANOMALY_COLOR[anomaly])
335 f"date: {x_axis[idx].strftime('%Y-%m-%d %H:%M:%S')}<br>"
336 f"trend [pps]: {trend_avg[idx]:,.0f}<br>"
337 f"classification: {anomaly}"
339 if ttype == "pdr-lat":
340 hover_itm = hover_itm.replace("[pps]", "[us]")
341 hover.append(hover_itm)
342 anomaly_color.extend([0.0, 0.5, 1.0])
349 hoverinfo="text+name",
355 "symbol": "circle-open",
356 "color": anomaly_color,
357 "colorscale": _COLORSCALE_LAT \
358 if ttype == "pdr-lat" else _COLORSCALE_TPUT,
366 "title": "Circles Marking Data Classification",
367 "titleside": "right",
369 "tickvals": [0.167, 0.500, 0.833],
370 "ticktext": _TICK_TEXT_LAT \
371 if ttype == "pdr-lat" else _TICK_TEXT_TPUT,
384 def graph_trending(data: pd.DataFrame, sel:dict, layout: dict,
385 start: datetime, end: datetime, normalize: bool) -> tuple:
394 for idx, itm in enumerate(sel):
396 df = select_trending_data(data, itm)
397 if df is None or df.empty:
400 name = "-".join((itm["dut"], itm["phy"], itm["framesize"], itm["core"],
401 itm["test"], itm["testtype"], ))
403 phy = itm["phy"].split("-")
404 topo_arch = f"{phy[0]}-{phy[1]}" if len(phy) == 4 else str()
405 norm_factor = (_NORM_FREQUENCY / _FREQURENCY[topo_arch]) \
406 if topo_arch else 1.0
409 traces = _generate_trending_traces(
410 itm["testtype"], name, df, start, end, _get_color(idx), norm_factor
414 fig_tput = go.Figure()
415 fig_tput.add_traces(traces)
417 if itm["testtype"] == "pdr":
418 traces = _generate_trending_traces(
419 "pdr-lat", name, df, start, end, _get_color(idx), norm_factor
423 fig_lat = go.Figure()
424 fig_lat.add_traces(traces)
427 fig_tput.update_layout(layout.get("plot-trending-tput", dict()))
429 fig_lat.update_layout(layout.get("plot-trending-lat", dict()))
431 return fig_tput, fig_lat
434 def graph_hdrh_latency(data: dict, layout: dict) -> go.Figure:
441 for idx, (lat_name, lat_hdrh) in enumerate(data.items()):
443 decoded = hdrh.histogram.HdrHistogram.decode(lat_hdrh)
444 except (hdrh.codec.HdrLengthException, TypeError) as err:
451 for item in decoded.get_recorded_iterator():
452 # The real value is "percentile".
453 # For 100%, we cut that down to "x_perc" to avoid
455 percentile = item.percentile_level_iterated_to
456 x_perc = min(percentile, PERCENTILE_MAX)
457 xaxis.append(previous_x)
458 yaxis.append(item.value_iterated_to)
460 f"<b>{_GRAPH_LAT_HDRH_DESC[lat_name]}</b><br>"
461 f"Direction: {('W-E', 'E-W')[idx % 2]}<br>"
462 f"Percentile: {prev_perc:.5f}-{percentile:.5f}%<br>"
463 f"Latency: {item.value_iterated_to}uSec"
465 next_x = 100.0 / (100.0 - x_perc)
467 yaxis.append(item.value_iterated_to)
469 f"<b>{_GRAPH_LAT_HDRH_DESC[lat_name]}</b><br>"
470 f"Direction: {('W-E', 'E-W')[idx % 2]}<br>"
471 f"Percentile: {prev_perc:.5f}-{percentile:.5f}%<br>"
472 f"Latency: {item.value_iterated_to}uSec"
475 prev_perc = percentile
481 name=_GRAPH_LAT_HDRH_DESC[lat_name],
483 legendgroup=_GRAPH_LAT_HDRH_DESC[lat_name],
484 showlegend=bool(idx % 2),
486 color=_get_color(int(idx/2)),
488 width=1 if idx % 2 else 2
496 fig.add_traces(traces)
497 layout_hdrh = layout.get("plot-hdrh-latency", None)
499 fig.update_layout(layout_hdrh)