1 # Copyright (c) 2017 Cisco and/or its affiliates.
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at:
6 # http://www.apache.org/licenses/LICENSE-2.0
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
14 """Algorithms to generate tables.
22 from string import replace
23 from pprint import pformat
25 from errors import PresentationError
26 from utils import mean, stdev, relative_change
29 def generate_tables(spec, data):
30 """Generate all tables specified in the specification file.
32 :param spec: Specification read from the specification file.
33 :param data: Data to process.
34 :type spec: Specification
38 logging.info("Generating the tables ...")
39 for table in spec.tables:
41 eval(table["algorithm"])(table, data)
43 logging.error("The algorithm '{0}' is not defined.".
44 format(table["algorithm"]))
48 def table_details(table, input_data):
49 """Generate the table(s) with algorithm: table_detailed_test_results
50 specified in the specification file.
52 :param table: Table to generate.
53 :param input_data: Data to process.
54 :type table: pandas.Series
55 :type input_data: InputData
58 logging.info(" Generating the table {0} ...".
59 format(table.get("title", "")))
62 data = input_data.filter_data(table)
64 # Prepare the header of the tables
66 for column in table["columns"]:
67 header.append('"{0}"'.format(str(column["title"]).replace('"', '""')))
69 # Generate the data for the table according to the model in the table
71 job = table["data"].keys()[0]
72 build = str(table["data"][job][0])
74 suites = input_data.suites(job, build)
76 logging.error(" No data available. The table will not be generated.")
79 for suite_longname, suite in suites.iteritems():
81 suite_name = suite["name"]
83 for test in data[job][build].keys():
84 if data[job][build][test]["parent"] in suite_name:
86 for column in table["columns"]:
88 col_data = str(data[job][build][test][column["data"].
89 split(" ")[1]]).replace('"', '""')
90 if column["data"].split(" ")[1] in ("vat-history",
92 col_data = replace(col_data, " |br| ", "",
94 col_data = " |prein| {0} |preout| ".\
96 row_lst.append('"{0}"'.format(col_data))
98 row_lst.append("No data")
99 table_lst.append(row_lst)
101 # Write the data to file
103 file_name = "{0}_{1}{2}".format(table["output-file"], suite_name,
104 table["output-file-ext"])
105 logging.info(" Writing file: '{}'".format(file_name))
106 with open(file_name, "w") as file_handler:
107 file_handler.write(",".join(header) + "\n")
108 for item in table_lst:
109 file_handler.write(",".join(item) + "\n")
111 logging.info(" Done.")
114 def table_merged_details(table, input_data):
115 """Generate the table(s) with algorithm: table_merged_details
116 specified in the specification file.
118 :param table: Table to generate.
119 :param input_data: Data to process.
120 :type table: pandas.Series
121 :type input_data: InputData
124 logging.info(" Generating the table {0} ...".
125 format(table.get("title", "")))
128 data = input_data.filter_data(table)
129 data = input_data.merge_data(data)
130 data.sort_index(inplace=True)
132 suites = input_data.filter_data(table, data_set="suites")
133 suites = input_data.merge_data(suites)
135 # Prepare the header of the tables
137 for column in table["columns"]:
138 header.append('"{0}"'.format(str(column["title"]).replace('"', '""')))
140 for _, suite in suites.iteritems():
142 suite_name = suite["name"]
144 for test in data.keys():
145 if data[test]["parent"] in suite_name:
147 for column in table["columns"]:
149 col_data = str(data[test][column["data"].
150 split(" ")[1]]).replace('"', '""')
151 if column["data"].split(" ")[1] in ("vat-history",
153 col_data = replace(col_data, " |br| ", "",
155 col_data = " |prein| {0} |preout| ".\
156 format(col_data[:-5])
157 row_lst.append('"{0}"'.format(col_data))
159 row_lst.append("No data")
160 table_lst.append(row_lst)
162 # Write the data to file
164 file_name = "{0}_{1}{2}".format(table["output-file"], suite_name,
165 table["output-file-ext"])
166 logging.info(" Writing file: '{}'".format(file_name))
167 with open(file_name, "w") as file_handler:
168 file_handler.write(",".join(header) + "\n")
169 for item in table_lst:
170 file_handler.write(",".join(item) + "\n")
172 logging.info(" Done.")
175 def table_performance_improvements(table, input_data):
176 """Generate the table(s) with algorithm: table_performance_improvements
177 specified in the specification file.
179 :param table: Table to generate.
180 :param input_data: Data to process.
181 :type table: pandas.Series
182 :type input_data: InputData
185 def _write_line_to_file(file_handler, data):
186 """Write a line to the .csv file.
188 :param file_handler: File handler for the csv file. It must be open for
190 :param data: Item to be written to the file.
191 :type file_handler: BinaryIO
197 if isinstance(item["data"], str):
198 line_lst.append(item["data"])
199 elif isinstance(item["data"], float):
200 line_lst.append("{:.1f}".format(item["data"]))
201 elif item["data"] is None:
203 file_handler.write(",".join(line_lst) + "\n")
205 logging.info(" Generating the table {0} ...".
206 format(table.get("title", "")))
209 file_name = table.get("template", None)
212 tmpl = _read_csv_template(file_name)
213 except PresentationError:
214 logging.error(" The template '{0}' does not exist. Skipping the "
215 "table.".format(file_name))
218 logging.error("The template is not defined. Skipping the table.")
222 data = input_data.filter_data(table)
224 # Prepare the header of the tables
226 for column in table["columns"]:
227 header.append(column["title"])
229 # Generate the data for the table according to the model in the table
232 for tmpl_item in tmpl:
234 for column in table["columns"]:
235 cmd = column["data"].split(" ")[0]
236 args = column["data"].split(" ")[1:]
237 if cmd == "template":
239 val = float(tmpl_item[int(args[0])])
241 val = tmpl_item[int(args[0])]
242 tbl_item.append({"data": val})
248 for build in data[job]:
250 data_lst.append(float(build[tmpl_item[0]]
251 ["throughput"]["value"]))
252 except (KeyError, TypeError):
256 tbl_item.append({"data": (eval(operation)(data_lst)) /
259 tbl_item.append({"data": None})
260 elif cmd == "operation":
263 nr1 = float(tbl_item[int(args[1])]["data"])
264 nr2 = float(tbl_item[int(args[2])]["data"])
266 tbl_item.append({"data": eval(operation)(nr1, nr2)})
268 tbl_item.append({"data": None})
269 except (IndexError, ValueError, TypeError):
270 logging.error("No data for {0}".format(tbl_item[1]["data"]))
271 tbl_item.append({"data": None})
274 logging.error("Not supported command {0}. Skipping the table.".
277 tbl_lst.append(tbl_item)
279 # Sort the table according to the relative change
280 tbl_lst.sort(key=lambda rel: rel[-1]["data"], reverse=True)
282 # Create the tables and write them to the files
284 "{0}_ndr_top{1}".format(table["output-file"], table["output-file-ext"]),
285 "{0}_pdr_top{1}".format(table["output-file"], table["output-file-ext"]),
286 "{0}_ndr_low{1}".format(table["output-file"], table["output-file-ext"]),
287 "{0}_pdr_low{1}".format(table["output-file"], table["output-file-ext"])
290 for file_name in file_names:
291 logging.info(" Writing the file '{0}'".format(file_name))
292 with open(file_name, "w") as file_handler:
293 file_handler.write(",".join(header) + "\n")
295 if isinstance(item[-1]["data"], float):
296 rel_change = round(item[-1]["data"], 1)
298 rel_change = item[-1]["data"]
299 if "ndr_top" in file_name \
300 and "ndr" in item[1]["data"] \
301 and rel_change >= 10.0:
302 _write_line_to_file(file_handler, item)
303 elif "pdr_top" in file_name \
304 and "pdr" in item[1]["data"] \
305 and rel_change >= 10.0:
306 _write_line_to_file(file_handler, item)
307 elif "ndr_low" in file_name \
308 and "ndr" in item[1]["data"] \
309 and rel_change < 10.0:
310 _write_line_to_file(file_handler, item)
311 elif "pdr_low" in file_name \
312 and "pdr" in item[1]["data"] \
313 and rel_change < 10.0:
314 _write_line_to_file(file_handler, item)
316 logging.info(" Done.")
319 def _read_csv_template(file_name):
320 """Read the template from a .csv file.
322 :param file_name: Name / full path / relative path of the file to read.
324 :returns: Data from the template as list (lines) of lists (items on line).
326 :raises: PresentationError if it is not possible to read the file.
330 with open(file_name, 'r') as csv_file:
332 for line in csv_file:
333 tmpl_data.append(line[:-1].split(","))
335 except IOError as err:
336 raise PresentationError(str(err), level="ERROR")
339 def table_performance_comparison(table, input_data):
340 """Generate the table(s) with algorithm: table_performance_comparison
341 specified in the specification file.
343 :param table: Table to generate.
344 :param input_data: Data to process.
345 :type table: pandas.Series
346 :type input_data: InputData
349 logging.info(" Generating the table {0} ...".
350 format(table.get("title", "")))
353 data = input_data.filter_data(table)
356 # Prepare the header of the tables
358 header = ["Test case",
359 "{0} Throughput [Mpps]".format(table["reference"]["title"]),
360 "{0} stdev [Mpps]".format(table["reference"]["title"]),
361 "{0} Throughput [Mpps]".format(table["compare"]["title"]),
362 "{0} stdev [Mpps]".format(table["compare"]["title"]),
364 header_str = ",".join(header) + "\n"
365 except (AttributeError, KeyError) as err:
366 logging.error("The model is invalid, missing parameter: {0}".
370 # Prepare data to the table:
372 for job, builds in table["reference"]["data"].items():
374 for tst_name, tst_data in data[job][str(build)].iteritems():
375 if tbl_dict.get(tst_name, None) is None:
376 name = "{0}-{1}".format(tst_data["parent"].split("-")[0],
377 "-".join(tst_data["name"].
379 tbl_dict[tst_name] = {"name": name,
383 tbl_dict[tst_name]["ref-data"].\
384 append(tst_data["throughput"]["value"])
386 pass # No data in output.xml for this test
387 logging.info(pformat(tbl_dict))
389 for job, builds in table["compare"]["data"].items():
391 for tst_name, tst_data in data[job][str(build)].iteritems():
393 tbl_dict[tst_name]["cmp-data"].\
394 append(tst_data["throughput"]["value"])
398 tbl_dict.pop(tst_name, None)
400 logging.info(pformat(tbl_dict))
403 for tst_name in tbl_dict.keys():
404 item = [tbl_dict[tst_name]["name"], ]
405 if tbl_dict[tst_name]["ref-data"]:
406 item.append(round(mean(tbl_dict[tst_name]["ref-data"]) / 1000000,
408 item.append(round(stdev(tbl_dict[tst_name]["ref-data"]) / 1000000,
411 item.extend([None, None])
412 if tbl_dict[tst_name]["cmp-data"]:
413 item.append(round(mean(tbl_dict[tst_name]["cmp-data"]) / 1000000,
415 item.append(round(stdev(tbl_dict[tst_name]["cmp-data"]) / 1000000,
418 item.extend([None, None])
419 if item[1] is not None and item[3] is not None:
420 item.append(int(relative_change(float(item[1]), float(item[3]))))
424 # Sort the table according to the relative change
425 tbl_lst.sort(key=lambda rel: rel[-1], reverse=True)
426 logging.info(pformat(tbl_lst))
430 tbl_names = ["{0}-ndr-1t1c-full{1}".format(table["output-file"],
431 table["output-file-ext"]),
432 "{0}-ndr-2t2c-full{1}".format(table["output-file"],
433 table["output-file-ext"]),
434 "{0}-ndr-4t4c-full{1}".format(table["output-file"],
435 table["output-file-ext"]),
436 "{0}-pdr-1t1c-full{1}".format(table["output-file"],
437 table["output-file-ext"]),
438 "{0}-pdr-2t2c-full{1}".format(table["output-file"],
439 table["output-file-ext"]),
440 "{0}-pdr-4t4c-full{1}".format(table["output-file"],
441 table["output-file-ext"])
443 for file_name in tbl_names:
444 logging.info(" Writing file: '{}'".format(file_name))
445 with open(file_name, "w") as file_handler:
446 file_handler.write(header_str)
448 if (file_name.split("-")[-3] in test[0] and # NDR vs PDR
449 file_name.split("-")[-2] in test[0]): # cores
450 test[0] = "-".join(test[0].split("-")[:-1])
451 file_handler.write(",".join([str(item) for item in test]) +
455 tbl_names_txt = ["{0}-ndr-1t1c-full.txt".format(table["output-file"]),
456 "{0}-ndr-2t2c-full.txt".format(table["output-file"]),
457 "{0}-ndr-4t4c-full.txt".format(table["output-file"]),
458 "{0}-pdr-1t1c-full.txt".format(table["output-file"]),
459 "{0}-pdr-2t2c-full.txt".format(table["output-file"]),
460 "{0}-pdr-4t4c-full.txt".format(table["output-file"])
463 for i, txt_name in enumerate(tbl_names_txt):
465 logging.info(" Writing file: '{}'".format(txt_name))
466 with open(tbl_names[i], 'rb') as csv_file:
467 csv_content = csv.reader(csv_file, delimiter=',', quotechar='"')
468 for row in csv_content:
469 if txt_table is None:
470 txt_table = prettytable.PrettyTable(row)
472 txt_table.add_row(row)
473 with open(txt_name, "w") as txt_file:
474 txt_file.write(str(txt_table))
476 # Selected tests in csv:
477 input_file = "{0}-ndr-1t1c-full{1}".format(table["output-file"],
478 table["output-file-ext"])
479 with open(input_file, "r") as in_file:
484 output_file = "{0}-ndr-1t1c-top{1}".format(table["output-file"],
485 table["output-file-ext"])
486 logging.info(" Writing file: '{}'".format(output_file))
487 with open(output_file, "w") as out_file:
488 out_file.write(header_str)
489 for i, line in enumerate(lines[1:]):
490 if i == table["nr-of-tests-shown"]:
494 output_file = "{0}-ndr-1t1c-bottom{1}".format(table["output-file"],
495 table["output-file-ext"])
496 logging.info(" Writing file: '{}'".format(output_file))
497 with open(output_file, "w") as out_file:
498 out_file.write(header_str)
499 for i, line in enumerate(lines[-1:0:-1]):
500 if i == table["nr-of-tests-shown"]:
504 input_file = "{0}-pdr-1t1c-full{1}".format(table["output-file"],
505 table["output-file-ext"])
506 with open(input_file, "r") as in_file:
511 output_file = "{0}-pdr-1t1c-top{1}".format(table["output-file"],
512 table["output-file-ext"])
513 logging.info(" Writing file: '{}'".format(output_file))
514 with open(output_file, "w") as out_file:
515 out_file.write(header_str)
516 for i, line in enumerate(lines[1:]):
517 if i == table["nr-of-tests-shown"]:
521 output_file = "{0}-pdr-1t1c-bottom{1}".format(table["output-file"],
522 table["output-file-ext"])
523 logging.info(" Writing file: '{}'".format(output_file))
524 with open(output_file, "w") as out_file:
525 out_file.write(header_str)
526 for i, line in enumerate(lines[-1:0:-1]):
527 if i == table["nr-of-tests-shown"]: