1 # Copyright (c) 2017 Cisco and/or its affiliates.
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at:
6 # http://www.apache.org/licenses/LICENSE-2.0
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
14 """Algorithms to generate tables.
22 from string import replace
24 from errors import PresentationError
25 from utils import mean, stdev, relative_change, remove_outliers
28 def generate_tables(spec, data):
29 """Generate all tables specified in the specification file.
31 :param spec: Specification read from the specification file.
32 :param data: Data to process.
33 :type spec: Specification
37 logging.info("Generating the tables ...")
38 for table in spec.tables:
40 eval(table["algorithm"])(table, data)
42 logging.error("The algorithm '{0}' is not defined.".
43 format(table["algorithm"]))
47 def table_details(table, input_data):
48 """Generate the table(s) with algorithm: table_detailed_test_results
49 specified in the specification file.
51 :param table: Table to generate.
52 :param input_data: Data to process.
53 :type table: pandas.Series
54 :type input_data: InputData
57 logging.info(" Generating the table {0} ...".
58 format(table.get("title", "")))
61 data = input_data.filter_data(table)
63 # Prepare the header of the tables
65 for column in table["columns"]:
66 header.append('"{0}"'.format(str(column["title"]).replace('"', '""')))
68 # Generate the data for the table according to the model in the table
70 job = table["data"].keys()[0]
71 build = str(table["data"][job][0])
73 suites = input_data.suites(job, build)
75 logging.error(" No data available. The table will not be generated.")
78 for suite_longname, suite in suites.iteritems():
80 suite_name = suite["name"]
82 for test in data[job][build].keys():
83 if data[job][build][test]["parent"] in suite_name:
85 for column in table["columns"]:
87 col_data = str(data[job][build][test][column["data"].
88 split(" ")[1]]).replace('"', '""')
89 if column["data"].split(" ")[1] in ("vat-history",
91 col_data = replace(col_data, " |br| ", "",
93 col_data = " |prein| {0} |preout| ".\
95 row_lst.append('"{0}"'.format(col_data))
97 row_lst.append("No data")
98 table_lst.append(row_lst)
100 # Write the data to file
102 file_name = "{0}_{1}{2}".format(table["output-file"], suite_name,
103 table["output-file-ext"])
104 logging.info(" Writing file: '{}'".format(file_name))
105 with open(file_name, "w") as file_handler:
106 file_handler.write(",".join(header) + "\n")
107 for item in table_lst:
108 file_handler.write(",".join(item) + "\n")
110 logging.info(" Done.")
113 def table_merged_details(table, input_data):
114 """Generate the table(s) with algorithm: table_merged_details
115 specified in the specification file.
117 :param table: Table to generate.
118 :param input_data: Data to process.
119 :type table: pandas.Series
120 :type input_data: InputData
123 logging.info(" Generating the table {0} ...".
124 format(table.get("title", "")))
127 data = input_data.filter_data(table)
128 data = input_data.merge_data(data)
129 data.sort_index(inplace=True)
131 suites = input_data.filter_data(table, data_set="suites")
132 suites = input_data.merge_data(suites)
134 # Prepare the header of the tables
136 for column in table["columns"]:
137 header.append('"{0}"'.format(str(column["title"]).replace('"', '""')))
139 for _, suite in suites.iteritems():
141 suite_name = suite["name"]
143 for test in data.keys():
144 if data[test]["parent"] in suite_name:
146 for column in table["columns"]:
148 col_data = str(data[test][column["data"].
149 split(" ")[1]]).replace('"', '""')
150 if column["data"].split(" ")[1] in ("vat-history",
152 col_data = replace(col_data, " |br| ", "",
154 col_data = " |prein| {0} |preout| ".\
155 format(col_data[:-5])
156 row_lst.append('"{0}"'.format(col_data))
158 row_lst.append("No data")
159 table_lst.append(row_lst)
161 # Write the data to file
163 file_name = "{0}_{1}{2}".format(table["output-file"], suite_name,
164 table["output-file-ext"])
165 logging.info(" Writing file: '{}'".format(file_name))
166 with open(file_name, "w") as file_handler:
167 file_handler.write(",".join(header) + "\n")
168 for item in table_lst:
169 file_handler.write(",".join(item) + "\n")
171 logging.info(" Done.")
174 def table_performance_improvements(table, input_data):
175 """Generate the table(s) with algorithm: table_performance_improvements
176 specified in the specification file.
178 :param table: Table to generate.
179 :param input_data: Data to process.
180 :type table: pandas.Series
181 :type input_data: InputData
184 def _write_line_to_file(file_handler, data):
185 """Write a line to the .csv file.
187 :param file_handler: File handler for the csv file. It must be open for
189 :param data: Item to be written to the file.
190 :type file_handler: BinaryIO
196 if isinstance(item["data"], str):
197 # Remove -?drdisc from the end
198 if item["data"].endswith("drdisc"):
199 item["data"] = item["data"][:-8]
200 line_lst.append(item["data"])
201 elif isinstance(item["data"], float):
202 line_lst.append("{:.1f}".format(item["data"]))
203 elif item["data"] is None:
205 file_handler.write(",".join(line_lst) + "\n")
207 logging.info(" Generating the table {0} ...".
208 format(table.get("title", "")))
211 file_name = table.get("template", None)
214 tmpl = _read_csv_template(file_name)
215 except PresentationError:
216 logging.error(" The template '{0}' does not exist. Skipping the "
217 "table.".format(file_name))
220 logging.error("The template is not defined. Skipping the table.")
224 data = input_data.filter_data(table)
226 # Prepare the header of the tables
228 for column in table["columns"]:
229 header.append(column["title"])
231 # Generate the data for the table according to the model in the table
234 for tmpl_item in tmpl:
236 for column in table["columns"]:
237 cmd = column["data"].split(" ")[0]
238 args = column["data"].split(" ")[1:]
239 if cmd == "template":
241 val = float(tmpl_item[int(args[0])])
243 val = tmpl_item[int(args[0])]
244 tbl_item.append({"data": val})
250 for build in data[job]:
252 data_lst.append(float(build[tmpl_item[0]]
253 ["throughput"]["value"]))
254 except (KeyError, TypeError):
258 tbl_item.append({"data": (eval(operation)(data_lst)) /
261 tbl_item.append({"data": None})
262 elif cmd == "operation":
265 nr1 = float(tbl_item[int(args[1])]["data"])
266 nr2 = float(tbl_item[int(args[2])]["data"])
268 tbl_item.append({"data": eval(operation)(nr1, nr2)})
270 tbl_item.append({"data": None})
271 except (IndexError, ValueError, TypeError):
272 logging.error("No data for {0}".format(tbl_item[0]["data"]))
273 tbl_item.append({"data": None})
276 logging.error("Not supported command {0}. Skipping the table.".
279 tbl_lst.append(tbl_item)
281 # Sort the table according to the relative change
282 tbl_lst.sort(key=lambda rel: rel[-1]["data"], reverse=True)
284 # Create the tables and write them to the files
286 "{0}_ndr_top{1}".format(table["output-file"], table["output-file-ext"]),
287 "{0}_pdr_top{1}".format(table["output-file"], table["output-file-ext"]),
288 "{0}_ndr_low{1}".format(table["output-file"], table["output-file-ext"]),
289 "{0}_pdr_low{1}".format(table["output-file"], table["output-file-ext"])
292 for file_name in file_names:
293 logging.info(" Writing the file '{0}'".format(file_name))
294 with open(file_name, "w") as file_handler:
295 file_handler.write(",".join(header) + "\n")
297 if isinstance(item[-1]["data"], float):
298 rel_change = round(item[-1]["data"], 1)
300 rel_change = item[-1]["data"]
301 if "ndr_top" in file_name \
302 and "ndr" in item[0]["data"] \
303 and rel_change >= 10.0:
304 _write_line_to_file(file_handler, item)
305 elif "pdr_top" in file_name \
306 and "pdr" in item[0]["data"] \
307 and rel_change >= 10.0:
308 _write_line_to_file(file_handler, item)
309 elif "ndr_low" in file_name \
310 and "ndr" in item[0]["data"] \
311 and rel_change < 10.0:
312 _write_line_to_file(file_handler, item)
313 elif "pdr_low" in file_name \
314 and "pdr" in item[0]["data"] \
315 and rel_change < 10.0:
316 _write_line_to_file(file_handler, item)
318 logging.info(" Done.")
321 def _read_csv_template(file_name):
322 """Read the template from a .csv file.
324 :param file_name: Name / full path / relative path of the file to read.
326 :returns: Data from the template as list (lines) of lists (items on line).
328 :raises: PresentationError if it is not possible to read the file.
332 with open(file_name, 'r') as csv_file:
334 for line in csv_file:
335 tmpl_data.append(line[:-1].split(","))
337 except IOError as err:
338 raise PresentationError(str(err), level="ERROR")
341 def table_performance_comparison(table, input_data):
342 """Generate the table(s) with algorithm: table_performance_comparison
343 specified in the specification file.
345 :param table: Table to generate.
346 :param input_data: Data to process.
347 :type table: pandas.Series
348 :type input_data: InputData
351 logging.info(" Generating the table {0} ...".
352 format(table.get("title", "")))
355 data = input_data.filter_data(table)
357 # Prepare the header of the tables
359 header = ["Test case",
360 "{0} Throughput [Mpps]".format(table["reference"]["title"]),
361 "{0} stdev [Mpps]".format(table["reference"]["title"]),
362 "{0} Throughput [Mpps]".format(table["compare"]["title"]),
363 "{0} stdev [Mpps]".format(table["compare"]["title"]),
365 header_str = ",".join(header) + "\n"
366 except (AttributeError, KeyError) as err:
367 logging.error("The model is invalid, missing parameter: {0}".
371 # Prepare data to the table:
373 for job, builds in table["reference"]["data"].items():
375 for tst_name, tst_data in data[job][str(build)].iteritems():
376 if tbl_dict.get(tst_name, None) is None:
377 name = "{0}-{1}".format(tst_data["parent"].split("-")[0],
378 "-".join(tst_data["name"].
380 tbl_dict[tst_name] = {"name": name,
384 tbl_dict[tst_name]["ref-data"].\
385 append(tst_data["throughput"]["value"])
387 pass # No data in output.xml for this test
389 for job, builds in table["compare"]["data"].items():
391 for tst_name, tst_data in data[job][str(build)].iteritems():
393 tbl_dict[tst_name]["cmp-data"].\
394 append(tst_data["throughput"]["value"])
398 tbl_dict.pop(tst_name, None)
401 for tst_name in tbl_dict.keys():
402 item = [tbl_dict[tst_name]["name"], ]
403 if tbl_dict[tst_name]["ref-data"]:
404 item.append(round(mean(remove_outliers(
405 tbl_dict[tst_name]["ref-data"],
406 table["outlier-const"])) / 1000000, 2))
407 item.append(round(stdev(remove_outliers(
408 tbl_dict[tst_name]["ref-data"],
409 table["outlier-const"])) / 1000000, 2))
411 item.extend([None, None])
412 if tbl_dict[tst_name]["cmp-data"]:
413 item.append(round(mean(remove_outliers(
414 tbl_dict[tst_name]["cmp-data"],
415 table["outlier-const"])) / 1000000, 2))
416 item.append(round(stdev(remove_outliers(
417 tbl_dict[tst_name]["cmp-data"],
418 table["outlier-const"])) / 1000000, 2))
420 item.extend([None, None])
421 if item[1] is not None and item[3] is not None:
422 item.append(int(relative_change(float(item[1]), float(item[3]))))
426 # Sort the table according to the relative change
427 tbl_lst.sort(key=lambda rel: rel[-1], reverse=True)
431 tbl_names = ["{0}-ndr-1t1c-full{1}".format(table["output-file"],
432 table["output-file-ext"]),
433 "{0}-ndr-2t2c-full{1}".format(table["output-file"],
434 table["output-file-ext"]),
435 "{0}-ndr-4t4c-full{1}".format(table["output-file"],
436 table["output-file-ext"]),
437 "{0}-pdr-1t1c-full{1}".format(table["output-file"],
438 table["output-file-ext"]),
439 "{0}-pdr-2t2c-full{1}".format(table["output-file"],
440 table["output-file-ext"]),
441 "{0}-pdr-4t4c-full{1}".format(table["output-file"],
442 table["output-file-ext"])
444 for file_name in tbl_names:
445 logging.info(" Writing file: '{}'".format(file_name))
446 with open(file_name, "w") as file_handler:
447 file_handler.write(header_str)
449 if (file_name.split("-")[-3] in test[0] and # NDR vs PDR
450 file_name.split("-")[-2] in test[0]): # cores
451 test[0] = "-".join(test[0].split("-")[:-1])
452 file_handler.write(",".join([str(item) for item in test]) +
456 tbl_names_txt = ["{0}-ndr-1t1c-full.txt".format(table["output-file"]),
457 "{0}-ndr-2t2c-full.txt".format(table["output-file"]),
458 "{0}-ndr-4t4c-full.txt".format(table["output-file"]),
459 "{0}-pdr-1t1c-full.txt".format(table["output-file"]),
460 "{0}-pdr-2t2c-full.txt".format(table["output-file"]),
461 "{0}-pdr-4t4c-full.txt".format(table["output-file"])
464 for i, txt_name in enumerate(tbl_names_txt):
466 logging.info(" Writing file: '{}'".format(txt_name))
467 with open(tbl_names[i], 'rb') as csv_file:
468 csv_content = csv.reader(csv_file, delimiter=',', quotechar='"')
469 for row in csv_content:
470 if txt_table is None:
471 txt_table = prettytable.PrettyTable(row)
473 txt_table.add_row(row)
474 txt_table.align["Test case"] = "l"
475 with open(txt_name, "w") as txt_file:
476 txt_file.write(str(txt_table))
478 # Selected tests in csv:
479 input_file = "{0}-ndr-1t1c-full{1}".format(table["output-file"],
480 table["output-file-ext"])
481 with open(input_file, "r") as in_file:
486 output_file = "{0}-ndr-1t1c-top{1}".format(table["output-file"],
487 table["output-file-ext"])
488 logging.info(" Writing file: '{}'".format(output_file))
489 with open(output_file, "w") as out_file:
490 out_file.write(header_str)
491 for i, line in enumerate(lines[1:]):
492 if i == table["nr-of-tests-shown"]:
496 output_file = "{0}-ndr-1t1c-bottom{1}".format(table["output-file"],
497 table["output-file-ext"])
498 logging.info(" Writing file: '{}'".format(output_file))
499 with open(output_file, "w") as out_file:
500 out_file.write(header_str)
501 for i, line in enumerate(lines[-1:0:-1]):
502 if i == table["nr-of-tests-shown"]:
506 input_file = "{0}-pdr-1t1c-full{1}".format(table["output-file"],
507 table["output-file-ext"])
508 with open(input_file, "r") as in_file:
513 output_file = "{0}-pdr-1t1c-top{1}".format(table["output-file"],
514 table["output-file-ext"])
515 logging.info(" Writing file: '{}'".format(output_file))
516 with open(output_file, "w") as out_file:
517 out_file.write(header_str)
518 for i, line in enumerate(lines[1:]):
519 if i == table["nr-of-tests-shown"]:
523 output_file = "{0}-pdr-1t1c-bottom{1}".format(table["output-file"],
524 table["output-file-ext"])
525 logging.info(" Writing file: '{}'".format(output_file))
526 with open(output_file, "w") as out_file:
527 out_file.write(header_str)
528 for i, line in enumerate(lines[-1:0:-1]):
529 if i == table["nr-of-tests-shown"]: