1 # Copyright (c) 2022 Cisco and/or its affiliates.
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at:
6 # http://www.apache.org/licenses/LICENSE-2.0
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
17 import plotly.graph_objects as go
24 from datetime import datetime
25 from numpy import isnan
27 from ..jumpavg import classify
31 u"#1A1110", u"#DA2647", u"#214FC6", u"#01786F", u"#BD8260", u"#FFD12A",
32 u"#A6E7FF", u"#738276", u"#C95A49", u"#FC5A8D", u"#CEC8EF", u"#391285",
33 u"#6F2DA8", u"#FF878D", u"#45A27D", u"#FFD0B9", u"#FD5240", u"#DB91EF",
34 u"#44D7A8", u"#4F86F7", u"#84DE02", u"#FFCFF1", u"#614051"
49 _TICK_TEXT_TPUT = [u"Regression", u"Normal", u"Progression"]
58 _TICK_TEXT_LAT = [u"Progression", u"Normal", u"Regression"]
60 "mrr": "result_receive_rate_rate_avg",
61 "ndr": "result_ndr_lower_rate_value",
62 "pdr": "result_pdr_lower_rate_value",
63 "pdr-lat": "result_latency_forward_pdr_50_avg"
66 "mrr": "result_receive_rate_rate_unit",
67 "ndr": "result_ndr_lower_rate_unit",
68 "pdr": "result_pdr_lower_rate_unit",
69 "pdr-lat": "result_latency_forward_pdr_50_unit"
71 _LAT_HDRH = ( # Do not change the order
72 "result_latency_forward_pdr_0_hdrh",
73 "result_latency_reverse_pdr_0_hdrh",
74 "result_latency_forward_pdr_10_hdrh",
75 "result_latency_reverse_pdr_10_hdrh",
76 "result_latency_forward_pdr_50_hdrh",
77 "result_latency_reverse_pdr_50_hdrh",
78 "result_latency_forward_pdr_90_hdrh",
79 "result_latency_reverse_pdr_90_hdrh",
81 # This value depends on latency stream rate (9001 pps) and duration (5s).
82 # Keep it slightly higher to ensure rounding errors to not remove tick mark.
83 PERCENTILE_MAX = 99.999501
85 _GRAPH_LAT_HDRH_DESC = {
86 u"result_latency_forward_pdr_0_hdrh": u"No-load.",
87 u"result_latency_reverse_pdr_0_hdrh": u"No-load.",
88 u"result_latency_forward_pdr_10_hdrh": u"Low-load, 10% PDR.",
89 u"result_latency_reverse_pdr_10_hdrh": u"Low-load, 10% PDR.",
90 u"result_latency_forward_pdr_50_hdrh": u"Mid-load, 50% PDR.",
91 u"result_latency_reverse_pdr_50_hdrh": u"Mid-load, 50% PDR.",
92 u"result_latency_forward_pdr_90_hdrh": u"High-load, 90% PDR.",
93 u"result_latency_reverse_pdr_90_hdrh": u"High-load, 90% PDR."
97 def _get_hdrh_latencies(row: pd.Series, name: str) -> dict:
101 latencies = {"name": name}
102 for key in _LAT_HDRH:
104 latencies[key] = row[key]
111 def _classify_anomalies(data):
112 """Process the data and return anomalies and trending values.
114 Gather data into groups with average as trend value.
115 Decorate values within groups to be normal,
116 the first value of changed average as a regression, or a progression.
118 :param data: Full data set with unavailable samples replaced by nan.
119 :type data: OrderedDict
120 :returns: Classification and trend values
121 :rtype: 3-tuple, list of strings, list of floats and list of floats
123 # NaN means something went wrong.
124 # Use 0.0 to cause that being reported as a severe regression.
125 bare_data = [0.0 if isnan(sample) else sample for sample in data.values()]
126 # TODO: Make BitCountingGroupList a subclass of list again?
127 group_list = classify(bare_data).group_list
128 group_list.reverse() # Just to use .pop() for FIFO.
129 classification = list()
136 for sample in data.values():
138 classification.append(u"outlier")
140 stdevs.append(sample)
142 if values_left < 1 or active_group is None:
144 while values_left < 1: # Ignore empty groups (should not happen).
145 active_group = group_list.pop()
146 values_left = len(active_group.run_list)
147 avg = active_group.stats.avg
148 stdv = active_group.stats.stdev
149 classification.append(active_group.comment)
154 classification.append(u"normal")
158 return classification, avgs, stdevs
161 def select_trending_data(data: pd.DataFrame, itm:dict) -> pd.DataFrame:
165 phy = itm["phy"].split("-")
167 topo, arch, nic, drv = phy
172 drv = drv.replace("_", "-")
176 core = str() if itm["dut"] == "trex" else f"{itm['core']}"
177 ttype = "ndrpdr" if itm["testtype"] in ("ndr", "pdr") else itm["testtype"]
178 dut = "none" if itm["dut"] == "trex" else itm["dut"].upper()
181 (data["dut_type"] == dut) &
182 (data["test_type"] == ttype) &
183 (data["passed"] == True)
185 df = df[df.job.str.endswith(f"{topo}-{arch}")]
186 df = df[df.test_id.str.contains(
187 f"^.*[.|-]{nic}.*{itm['framesize']}-{core}-{drv}{itm['test']}-{ttype}$",
189 )].sort_values(by="start_time", ignore_index=True)
194 def _generate_trending_traces(ttype: str, name: str, df: pd.DataFrame,
195 start: datetime, end: datetime, color: str) -> list:
199 df = df.dropna(subset=[_VALUE[ttype], ])
202 df = df.loc[((df["start_time"] >= start) & (df["start_time"] <= end))]
206 x_axis = df["start_time"].tolist()
208 anomalies, trend_avg, trend_stdev = _classify_anomalies(
209 {k: v for k, v in zip(x_axis, df[_VALUE[ttype]])}
214 for _, row in df.iterrows():
216 f"date: {row['start_time'].strftime('%d-%m-%Y %H:%M:%S')}<br>"
217 f"<prop> [{row[_UNIT[ttype]]}]: {row[_VALUE[ttype]]}<br>"
219 f"{row['dut_type']}-ref: {row['dut_version']}<br>"
220 f"csit-ref: {row['job']}/{row['build']}<br>"
221 f"hosts: {', '.join(row['hosts'])}"
225 f"stdev [{row['result_receive_rate_rate_unit']}]: "
226 f"{row['result_receive_rate_rate_stdev']}<br>"
230 hover_itm = hover_itm.replace(
231 "<prop>", "latency" if ttype == "pdr-lat" else "average"
232 ).replace("<stdev>", stdev)
233 hover.append(hover_itm)
234 if ttype == "pdr-lat":
235 customdata.append(_get_hdrh_latencies(row, name))
238 for avg, stdev, (_, row) in zip(trend_avg, trend_stdev, df.iterrows()):
240 f"date: {row['start_time'].strftime('%d-%m-%Y %H:%M:%S')}<br>"
241 f"trend [pps]: {avg}<br>"
242 f"stdev [pps]: {stdev}<br>"
243 f"{row['dut_type']}-ref: {row['dut_version']}<br>"
244 f"csit-ref: {row['job']}/{row['build']}<br>"
245 f"hosts: {', '.join(row['hosts'])}"
247 if ttype == "pdr-lat":
248 hover_itm = hover_itm.replace("[pps]", "[us]")
249 hover_trend.append(hover_itm)
252 go.Scatter( # Samples
260 u"symbol": u"circle",
263 hoverinfo=u"text+name",
266 customdata=customdata
268 go.Scatter( # Trend line
279 hoverinfo=u"text+name",
288 anomaly_color = list()
290 for idx, anomaly in enumerate(anomalies):
291 if anomaly in (u"regression", u"progression"):
292 anomaly_x.append(x_axis[idx])
293 anomaly_y.append(trend_avg[idx])
294 anomaly_color.append(_ANOMALY_COLOR[anomaly])
296 f"date: {x_axis[idx].strftime('%d-%m-%Y %H:%M:%S')}<br>"
297 f"trend [pps]: {trend_avg[idx]}<br>"
298 f"classification: {anomaly}"
300 if ttype == "pdr-lat":
301 hover_itm = hover_itm.replace("[pps]", "[us]")
302 hover.append(hover_itm)
303 anomaly_color.extend([0.0, 0.5, 1.0])
310 hoverinfo=u"text+name",
316 u"symbol": u"circle-open",
317 u"color": anomaly_color,
318 u"colorscale": _COLORSCALE_LAT \
319 if ttype == "pdr-lat" else _COLORSCALE_TPUT,
327 u"title": u"Circles Marking Data Classification",
328 u"titleside": u"right",
329 u"tickmode": u"array",
330 u"tickvals": [0.167, 0.500, 0.833],
331 u"ticktext": _TICK_TEXT_LAT \
332 if ttype == "pdr-lat" else _TICK_TEXT_TPUT,
345 def graph_trending(data: pd.DataFrame, sel:dict, layout: dict,
346 start: datetime, end: datetime) -> tuple:
355 for idx, itm in enumerate(sel):
357 df = select_trending_data(data, itm)
358 if df is None or df.empty:
361 name = "-".join((itm["dut"], itm["phy"], itm["framesize"], itm["core"],
362 itm["test"], itm["testtype"], ))
363 traces = _generate_trending_traces(
364 itm["testtype"], name, df, start, end, _COLORS[idx % len(_COLORS)]
368 fig_tput = go.Figure()
369 fig_tput.add_traces(traces)
371 if itm["testtype"] == "pdr":
372 traces = _generate_trending_traces(
373 "pdr-lat", name, df, start, end, _COLORS[idx % len(_COLORS)]
377 fig_lat = go.Figure()
378 fig_lat.add_traces(traces)
381 fig_tput.update_layout(layout.get("plot-trending-tput", dict()))
383 fig_lat.update_layout(layout.get("plot-trending-lat", dict()))
385 return fig_tput, fig_lat
388 def graph_hdrh_latency(data: dict, layout: dict) -> go.Figure:
395 for idx, (lat_name, lat_hdrh) in enumerate(data.items()):
397 decoded = hdrh.histogram.HdrHistogram.decode(lat_hdrh)
398 except (hdrh.codec.HdrLengthException, TypeError) as err:
405 for item in decoded.get_recorded_iterator():
406 # The real value is "percentile".
407 # For 100%, we cut that down to "x_perc" to avoid
409 percentile = item.percentile_level_iterated_to
410 x_perc = min(percentile, PERCENTILE_MAX)
411 xaxis.append(previous_x)
412 yaxis.append(item.value_iterated_to)
414 f"<b>{_GRAPH_LAT_HDRH_DESC[lat_name]}</b><br>"
415 f"Direction: {(u'W-E', u'E-W')[idx % 2]}<br>"
416 f"Percentile: {prev_perc:.5f}-{percentile:.5f}%<br>"
417 f"Latency: {item.value_iterated_to}uSec"
419 next_x = 100.0 / (100.0 - x_perc)
421 yaxis.append(item.value_iterated_to)
423 f"<b>{_GRAPH_LAT_HDRH_DESC[lat_name]}</b><br>"
424 f"Direction: {(u'W-E', u'E-W')[idx % 2]}<br>"
425 f"Percentile: {prev_perc:.5f}-{percentile:.5f}%<br>"
426 f"Latency: {item.value_iterated_to}uSec"
429 prev_perc = percentile
435 name=_GRAPH_LAT_HDRH_DESC[lat_name],
437 legendgroup=_GRAPH_LAT_HDRH_DESC[lat_name],
438 showlegend=bool(idx % 2),
440 color=_COLORS[int(idx/2)],
442 width=1 if idx % 2 else 2
450 fig.add_traces(traces)
451 layout_hdrh = layout.get("plot-hdrh-latency", None)
453 fig.update_layout(layout_hdrh)