1 # Copyright (c) 2018 Cisco and/or its affiliates.
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at:
6 # http://www.apache.org/licenses/LICENSE-2.0
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
14 """Generation of Continuous Performance Trending and Analysis.
17 import multiprocessing
22 import plotly.offline as ploff
23 import plotly.graph_objs as plgo
24 import plotly.exceptions as plerr
27 from collections import OrderedDict
28 from datetime import datetime
30 from utils import split_outliers, archive_input_data, execute_command,\
31 classify_anomalies, Worker
34 # Command to build the html format of the report
35 HTML_BUILDER = 'sphinx-build -v -c conf_cpta -a ' \
38 '-D version="{date}" ' \
42 # .css file for the html format of the report
43 THEME_OVERRIDES = """/* override table width restrictions */
45 max-width: 1200px !important;
49 COLORS = ["SkyBlue", "Olive", "Purple", "Coral", "Indigo", "Pink",
50 "Chocolate", "Brown", "Magenta", "Cyan", "Orange", "Black",
51 "Violet", "Blue", "Yellow"]
54 def generate_cpta(spec, data):
55 """Generate all formats and versions of the Continuous Performance Trending
58 :param spec: Specification read from the specification file.
59 :param data: Full data set.
60 :type spec: Specification
64 logging.info("Generating the Continuous Performance Trending and Analysis "
67 ret_code = _generate_all_charts(spec, data)
69 cmd = HTML_BUILDER.format(
70 date=datetime.utcnow().strftime('%m/%d/%Y %H:%M UTC'),
71 working_dir=spec.environment["paths"]["DIR[WORKING,SRC]"],
72 build_dir=spec.environment["paths"]["DIR[BUILD,HTML]"])
75 with open(spec.environment["paths"]["DIR[CSS_PATCH_FILE]"], "w") as \
77 css_file.write(THEME_OVERRIDES)
79 with open(spec.environment["paths"]["DIR[CSS_PATCH_FILE2]"], "w") as \
81 css_file.write(THEME_OVERRIDES)
83 archive_input_data(spec)
90 def _generate_trending_traces(in_data, build_info, moving_win_size=10,
91 show_trend_line=True, name="", color=""):
92 """Generate the trending traces:
94 - trimmed moving median (trending line)
95 - outliers, regress, progress
97 :param in_data: Full data set.
98 :param build_info: Information about the builds.
99 :param moving_win_size: Window size.
100 :param show_trend_line: Show moving median (trending plot).
101 :param name: Name of the plot
102 :param color: Name of the color for the plot.
103 :type in_data: OrderedDict
104 :type build_info: dict
105 :type moving_win_size: int
106 :type show_trend_line: bool
109 :returns: Generated traces (list) and the evaluated result.
110 :rtype: tuple(traces, result)
113 data_x = list(in_data.keys())
114 data_y = list(in_data.values())
119 hover_text.append("vpp-ref: {0}<br>csit-ref: mrr-daily-build-{1}".
120 format(build_info[str(idx)][1].rsplit('~', 1)[0],
122 date = build_info[str(idx)][0]
123 xaxis.append(datetime(int(date[0:4]), int(date[4:6]), int(date[6:8]),
124 int(date[9:11]), int(date[12:])))
126 data_pd = pd.Series(data_y, index=xaxis)
128 t_data, outliers = split_outliers(data_pd, outlier_const=1.5,
129 window=moving_win_size)
130 anomaly_classification = classify_anomalies(t_data, window=moving_win_size)
132 anomalies = pd.Series()
133 anomalies_colors = list()
140 if anomaly_classification:
141 for idx, item in enumerate(data_pd.items()):
142 if anomaly_classification[idx] in \
143 ("outlier", "regression", "progression"):
144 anomalies = anomalies.append(pd.Series([item[1], ],
146 anomalies_colors.append(
147 anomaly_color[anomaly_classification[idx]])
148 anomalies_colors.extend([0.0, 0.33, 0.66, 1.0])
152 trace_samples = plgo.Scatter(
160 name="{name}-thput".format(name=name),
167 hoverinfo="x+y+text+name"
169 traces = [trace_samples, ]
171 trace_anomalies = plgo.Scatter(
178 name="{name}-anomalies".format(name=name),
181 "symbol": "circle-open",
182 "color": anomalies_colors,
183 "colorscale": [[0.00, "grey"],
198 "title": "Circles Marking Data Classification",
199 "titleside": 'right',
204 "tickvals": [0.125, 0.375, 0.625, 0.875],
205 "ticktext": ["Outlier", "Regression", "Normal", "Progression"],
213 traces.append(trace_anomalies)
216 data_trend = t_data.rolling(window=moving_win_size,
217 min_periods=2).median()
218 trace_trend = plgo.Scatter(
220 y=data_trend.tolist(),
228 name='{name}-trend'.format(name=name)
230 traces.append(trace_trend)
232 return traces, anomaly_classification[-1]
235 def _generate_all_charts(spec, input_data):
236 """Generate all charts specified in the specification file.
238 :param spec: Specification.
239 :param input_data: Full data set.
240 :type spec: Specification
241 :type input_data: InputData
244 def _generate_chart(_, data_q, graph):
245 """Generates the chart.
250 logging.info(" Generating the chart '{0}' ...".
251 format(graph.get("title", "")))
252 logs.append(("INFO", " Generating the chart '{0}' ...".
253 format(graph.get("title", ""))))
255 job_name = spec.cpta["data"].keys()[0]
261 logs.append(("INFO", " Creating the data set for the {0} '{1}'.".
262 format(graph.get("type", ""), graph.get("title", ""))))
263 data = input_data.filter_data(graph, continue_on_error=True)
265 logging.error("No data.")
270 for index, bld in job.items():
271 for test_name, test in bld.items():
272 if chart_data.get(test_name, None) is None:
273 chart_data[test_name] = OrderedDict()
275 chart_data[test_name][int(index)] = \
276 test["result"]["throughput"]
277 except (KeyError, TypeError):
280 # Add items to the csv table:
281 for tst_name, tst_data in chart_data.items():
283 for bld in builds_lst:
284 itm = tst_data.get(int(bld), '')
285 tst_lst.append(str(itm))
286 csv_tbl.append("{0},".format(tst_name) + ",".join(tst_lst) + '\n')
291 for test_name, test_data in chart_data.items():
293 logs.append(("WARNING", "No data for the test '{0}'".
296 test_name = test_name.split('.')[-1]
297 trace, rslt = _generate_trending_traces(
299 build_info=build_info,
300 moving_win_size=win_size,
301 name='-'.join(test_name.split('-')[3:-1]),
308 # Generate the chart:
309 graph["layout"]["xaxis"]["title"] = \
310 graph["layout"]["xaxis"]["title"].format(job=job_name)
311 name_file = "{0}-{1}{2}".format(spec.cpta["output-file"],
312 graph["output-file-name"],
313 spec.cpta["output-file-type"])
315 logs.append(("INFO", " Writing the file '{0}' ...".
317 plpl = plgo.Figure(data=traces, layout=graph["layout"])
319 ploff.plot(plpl, show_link=False, auto_open=False,
321 except plerr.PlotlyEmptyDataError:
322 logs.append(("WARNING", "No data for the plot. Skipped."))
325 "csv_table": csv_tbl,
331 job_name = spec.cpta["data"].keys()[0]
334 for build in spec.input["builds"][job_name]:
335 status = build["status"]
336 if status != "failed" and status != "not found":
337 builds_lst.append(str(build["build"]))
339 # Get "build ID": "date" dict:
340 build_info = OrderedDict()
341 for build in builds_lst:
343 build_info[build] = (
344 input_data.metadata(job_name, build)["generated"][:14],
345 input_data.metadata(job_name, build)["version"]
348 build_info[build] = ("", "")
350 work_queue = multiprocessing.JoinableQueue()
351 manager = multiprocessing.Manager()
352 data_queue = manager.Queue()
353 cpus = multiprocessing.cpu_count()
356 for cpu in range(cpus):
357 worker = Worker(work_queue,
362 workers.append(worker)
363 os.system("taskset -p -c {0} {1} > /dev/null 2>&1".
364 format(cpu, worker.pid))
366 for chart in spec.cpta["plots"]:
367 work_queue.put((chart, ))
370 anomaly_classifications = list()
374 header = "Build Number:," + ",".join(builds_lst) + '\n'
375 csv_table.append(header)
376 build_dates = [x[0] for x in build_info.values()]
377 header = "Build Date:," + ",".join(build_dates) + '\n'
378 csv_table.append(header)
379 vpp_versions = [x[1] for x in build_info.values()]
380 header = "VPP Version:," + ",".join(vpp_versions) + '\n'
381 csv_table.append(header)
383 while not data_queue.empty():
384 result = data_queue.get()
386 anomaly_classifications.extend(result["results"])
387 csv_table.extend(result["csv_table"])
389 for item in result["logs"]:
390 if item[0] == "INFO":
391 logging.info(item[1])
392 elif item[0] == "ERROR":
393 logging.error(item[1])
394 elif item[0] == "DEBUG":
395 logging.debug(item[1])
396 elif item[0] == "CRITICAL":
397 logging.critical(item[1])
398 elif item[0] == "WARNING":
399 logging.warning(item[1])
403 # Terminate all workers
404 for worker in workers:
409 file_name = spec.cpta["output-file"] + "-trending"
410 with open("{0}.csv".format(file_name), 'w') as file_handler:
411 file_handler.writelines(csv_table)
414 with open("{0}.csv".format(file_name), 'rb') as csv_file:
415 csv_content = csv.reader(csv_file, delimiter=',', quotechar='"')
417 for row in csv_content:
418 if txt_table is None:
419 txt_table = prettytable.PrettyTable(row)
422 for idx, item in enumerate(row):
424 row[idx] = str(round(float(item) / 1000000, 2))
428 txt_table.add_row(row)
429 except Exception as err:
430 logging.warning("Error occurred while generating TXT table:"
433 txt_table.align["Build Number:"] = "l"
434 with open("{0}.txt".format(file_name), "w") as txt_file:
435 txt_file.write(str(txt_table))
438 if anomaly_classifications:
440 for classification in anomaly_classifications:
441 if classification == "regression" or classification == "outlier":
447 logging.info("Partial results: {0}".format(anomaly_classifications))
448 logging.info("Result: {0}".format(result))