1 # Copyright (c) 2022 Cisco and/or its affiliates.
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at:
6 # http://www.apache.org/licenses/LICENSE-2.0
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
17 import plotly.graph_objects as go
24 from datetime import datetime
25 from numpy import isnan
27 from ..jumpavg import classify
31 u"#1A1110", u"#DA2647", u"#214FC6", u"#01786F", u"#BD8260", u"#FFD12A",
32 u"#A6E7FF", u"#738276", u"#C95A49", u"#FC5A8D", u"#CEC8EF", u"#391285",
33 u"#6F2DA8", u"#FF878D", u"#45A27D", u"#FFD0B9", u"#FD5240", u"#DB91EF",
34 u"#44D7A8", u"#4F86F7", u"#84DE02", u"#FFCFF1", u"#614051"
49 _TICK_TEXT_TPUT = [u"Regression", u"Normal", u"Progression"]
58 _TICK_TEXT_LAT = [u"Progression", u"Normal", u"Regression"]
60 "mrr": "result_receive_rate_rate_avg",
61 "ndr": "result_ndr_lower_rate_value",
62 "pdr": "result_pdr_lower_rate_value",
63 "pdr-lat": "result_latency_forward_pdr_50_avg"
66 "mrr": "result_receive_rate_rate_unit",
67 "ndr": "result_ndr_lower_rate_unit",
68 "pdr": "result_pdr_lower_rate_unit",
69 "pdr-lat": "result_latency_forward_pdr_50_unit"
71 _LAT_HDRH = ( # Do not change the order
72 "result_latency_forward_pdr_0_hdrh",
73 "result_latency_reverse_pdr_0_hdrh",
74 "result_latency_forward_pdr_10_hdrh",
75 "result_latency_reverse_pdr_10_hdrh",
76 "result_latency_forward_pdr_50_hdrh",
77 "result_latency_reverse_pdr_50_hdrh",
78 "result_latency_forward_pdr_90_hdrh",
79 "result_latency_reverse_pdr_90_hdrh",
81 # This value depends on latency stream rate (9001 pps) and duration (5s).
82 # Keep it slightly higher to ensure rounding errors to not remove tick mark.
83 PERCENTILE_MAX = 99.999501
85 _GRAPH_LAT_HDRH_DESC = {
86 u"result_latency_forward_pdr_0_hdrh": u"No-load.",
87 u"result_latency_reverse_pdr_0_hdrh": u"No-load.",
88 u"result_latency_forward_pdr_10_hdrh": u"Low-load, 10% PDR.",
89 u"result_latency_reverse_pdr_10_hdrh": u"Low-load, 10% PDR.",
90 u"result_latency_forward_pdr_50_hdrh": u"Mid-load, 50% PDR.",
91 u"result_latency_reverse_pdr_50_hdrh": u"Mid-load, 50% PDR.",
92 u"result_latency_forward_pdr_90_hdrh": u"High-load, 90% PDR.",
93 u"result_latency_reverse_pdr_90_hdrh": u"High-load, 90% PDR."
97 def _get_hdrh_latencies(row: pd.Series, name: str) -> dict:
101 latencies = {"name": name}
102 for key in _LAT_HDRH:
104 latencies[key] = row[key]
111 def _classify_anomalies(data):
112 """Process the data and return anomalies and trending values.
114 Gather data into groups with average as trend value.
115 Decorate values within groups to be normal,
116 the first value of changed average as a regression, or a progression.
118 :param data: Full data set with unavailable samples replaced by nan.
119 :type data: OrderedDict
120 :returns: Classification and trend values
121 :rtype: 3-tuple, list of strings, list of floats and list of floats
123 # NaN means something went wrong.
124 # Use 0.0 to cause that being reported as a severe regression.
125 bare_data = [0.0 if isnan(sample) else sample for sample in data.values()]
126 # TODO: Make BitCountingGroupList a subclass of list again?
127 group_list = classify(bare_data).group_list
128 group_list.reverse() # Just to use .pop() for FIFO.
129 classification = list()
136 for sample in data.values():
138 classification.append(u"outlier")
140 stdevs.append(sample)
142 if values_left < 1 or active_group is None:
144 while values_left < 1: # Ignore empty groups (should not happen).
145 active_group = group_list.pop()
146 values_left = len(active_group.run_list)
147 avg = active_group.stats.avg
148 stdv = active_group.stats.stdev
149 classification.append(active_group.comment)
154 classification.append(u"normal")
158 return classification, avgs, stdevs
161 def select_trending_data(data: pd.DataFrame, itm:dict) -> pd.DataFrame:
165 phy = itm["phy"].split("-")
167 topo, arch, nic, drv = phy
168 if drv in ("dpdk", "ixgbe"):
172 drv = drv.replace("_", "-")
176 "weekly" if (arch == "aws" or itm["testtype"] != "mrr") else "daily"
179 f"{itm['testtype'] if itm['testtype'] == 'mrr' else 'ndrpdr'}-"
180 f"{cadence}-master-{topo}-{arch}"
182 df_sel = data.loc[(data["job"] == sel_topo_arch)]
184 f"^.*{nic}.*\.{itm['framesize']}-{itm['core']}-{drv}{itm['test']}-"
185 f"{'mrr' if itm['testtype'] == 'mrr' else 'ndrpdr'}$"
188 df_sel["test_id"].apply(
189 lambda x: True if re.search(regex, x) else False
191 ].sort_values(by="start_time", ignore_index=True)
196 def _generate_trending_traces(ttype: str, name: str, df: pd.DataFrame,
197 start: datetime, end: datetime, color: str) -> list:
201 df = df.dropna(subset=[_VALUE[ttype], ])
205 x_axis = [d for d in df["start_time"] if d >= start and d <= end]
207 anomalies, trend_avg, trend_stdev = _classify_anomalies(
208 {k: v for k, v in zip(x_axis, df[_VALUE[ttype]])}
213 for _, row in df.iterrows():
215 f"date: {row['start_time'].strftime('%d-%m-%Y %H:%M:%S')}<br>"
216 f"<prop> [{row[_UNIT[ttype]]}]: {row[_VALUE[ttype]]}<br>"
218 f"{row['dut_type']}-ref: {row['dut_version']}<br>"
219 f"csit-ref: {row['job']}/{row['build']}<br>"
220 f"hosts: {', '.join(row['hosts'])}"
224 f"stdev [{row['result_receive_rate_rate_unit']}]: "
225 f"{row['result_receive_rate_rate_stdev']}<br>"
229 hover_itm = hover_itm.replace(
230 "<prop>", "latency" if ttype == "pdr-lat" else "average"
231 ).replace("<stdev>", stdev)
232 hover.append(hover_itm)
233 if ttype == "pdr-lat":
234 customdata.append(_get_hdrh_latencies(row, name))
237 for avg, stdev, (_, row) in zip(trend_avg, trend_stdev, df.iterrows()):
239 f"date: {row['start_time'].strftime('%d-%m-%Y %H:%M:%S')}<br>"
240 f"trend [pps]: {avg}<br>"
241 f"stdev [pps]: {stdev}<br>"
242 f"{row['dut_type']}-ref: {row['dut_version']}<br>"
243 f"csit-ref: {row['job']}/{row['build']}<br>"
244 f"hosts: {', '.join(row['hosts'])}"
246 if ttype == "pdr-lat":
247 hover_itm = hover_itm.replace("[pps]", "[us]")
248 hover_trend.append(hover_itm)
251 go.Scatter( # Samples
259 u"symbol": u"circle",
262 hoverinfo=u"text+name",
265 customdata=customdata
267 go.Scatter( # Trend line
278 hoverinfo=u"text+name",
287 anomaly_color = list()
288 for idx, anomaly in enumerate(anomalies):
289 if anomaly in (u"regression", u"progression"):
290 anomaly_x.append(x_axis[idx])
291 anomaly_y.append(trend_avg[idx])
292 anomaly_color.append(_ANOMALY_COLOR[anomaly])
293 anomaly_color.extend([0.0, 0.5, 1.0])
302 name=f"{name}-anomalies",
305 u"symbol": u"circle-open",
306 u"color": anomaly_color,
307 u"colorscale": _COLORSCALE_LAT \
308 if ttype == "pdr-lat" else _COLORSCALE_TPUT,
316 u"title": u"Circles Marking Data Classification",
317 u"titleside": u"right",
321 u"tickmode": u"array",
322 u"tickvals": [0.167, 0.500, 0.833],
323 u"ticktext": _TICK_TEXT_LAT \
324 if ttype == "pdr-lat" else _TICK_TEXT_TPUT,
337 def graph_trending(data: pd.DataFrame, sel:dict, layout: dict,
338 start: datetime, end: datetime) -> tuple:
347 for idx, itm in enumerate(sel):
349 df = select_trending_data(data, itm)
354 f"{itm['phy']}-{itm['framesize']}-{itm['core']}-"
355 f"{itm['test']}-{itm['testtype']}"
358 traces = _generate_trending_traces(
359 itm["testtype"], name, df, start, end, _COLORS[idx % len(_COLORS)]
363 fig_tput = go.Figure()
364 fig_tput.add_traces(traces)
366 if itm["testtype"] == "pdr":
367 traces = _generate_trending_traces(
368 "pdr-lat", name, df, start, end, _COLORS[idx % len(_COLORS)]
372 fig_lat = go.Figure()
373 fig_lat.add_traces(traces)
376 fig_tput.update_layout(layout.get("plot-trending-tput", dict()))
378 fig_lat.update_layout(layout.get("plot-trending-lat", dict()))
380 return fig_tput, fig_lat
383 def graph_hdrh_latency(data: dict, layout: dict) -> go.Figure:
390 name = data.pop("name")
391 except (KeyError, AttributeError):
395 for idx, (lat_name, lat_hdrh) in enumerate(data.items()):
397 decoded = hdrh.histogram.HdrHistogram.decode(lat_hdrh)
398 except (hdrh.codec.HdrLengthException, TypeError) as err:
405 for item in decoded.get_recorded_iterator():
406 # The real value is "percentile".
407 # For 100%, we cut that down to "x_perc" to avoid
409 percentile = item.percentile_level_iterated_to
410 x_perc = min(percentile, PERCENTILE_MAX)
411 xaxis.append(previous_x)
412 yaxis.append(item.value_iterated_to)
414 f"<b>{_GRAPH_LAT_HDRH_DESC[lat_name]}</b><br>"
415 f"Direction: {(u'W-E', u'E-W')[idx % 2]}<br>"
416 f"Percentile: {prev_perc:.5f}-{percentile:.5f}%<br>"
417 f"Latency: {item.value_iterated_to}uSec"
419 next_x = 100.0 / (100.0 - x_perc)
421 yaxis.append(item.value_iterated_to)
423 f"<b>{_GRAPH_LAT_HDRH_DESC[lat_name]}</b><br>"
424 f"Direction: {(u'W-E', u'E-W')[idx % 2]}<br>"
425 f"Percentile: {prev_perc:.5f}-{percentile:.5f}%<br>"
426 f"Latency: {item.value_iterated_to}uSec"
429 prev_perc = percentile
435 name=_GRAPH_LAT_HDRH_DESC[lat_name],
437 legendgroup=_GRAPH_LAT_HDRH_DESC[lat_name],
438 showlegend=bool(idx % 2),
440 color=_COLORS[int(idx/2)],
442 width=1 if idx % 2 else 2
450 fig.add_traces(traces)
451 layout_hdrh = layout.get("plot-hdrh-latency", None)
453 layout_hdrh["title"]["text"] = f"<b>{name}</b>"
454 fig.update_layout(layout_hdrh)