1 # Copyright (c) 2017 Cisco and/or its affiliates.
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at:
6 # http://www.apache.org/licenses/LICENSE-2.0
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
14 """Algorithms to generate tables.
19 from string import replace
21 from errors import PresentationError
22 from utils import mean, stdev, relative_change
25 def generate_tables(spec, data):
26 """Generate all tables specified in the specification file.
28 :param spec: Specification read from the specification file.
29 :param data: Data to process.
30 :type spec: Specification
34 logging.info("Generating the tables ...")
35 for table in spec.tables:
37 eval(table["algorithm"])(table, data)
39 logging.error("The algorithm '{0}' is not defined.".
40 format(table["algorithm"]))
44 def table_details(table, input_data):
45 """Generate the table(s) with algorithm: table_detailed_test_results
46 specified in the specification file.
48 :param table: Table to generate.
49 :param input_data: Data to process.
50 :type table: pandas.Series
51 :type input_data: InputData
54 logging.info(" Generating the table {0} ...".
55 format(table.get("title", "")))
58 data = input_data.filter_data(table)
60 # Prepare the header of the tables
62 for column in table["columns"]:
63 header.append('"{0}"'.format(str(column["title"]).replace('"', '""')))
65 # Generate the data for the table according to the model in the table
68 job = table["data"].keys()[0]
69 build = str(table["data"][job][0])
71 suites = input_data.suites(job, build)
73 logging.error(" No data available. The table will not be generated.")
76 for suite_longname, suite in suites.iteritems():
78 suite_name = suite["name"]
80 for test in data[job][build].keys():
81 if data[job][build][test]["parent"] in suite_name:
83 for column in table["columns"]:
85 col_data = str(data[job][build][test][column["data"].
86 split(" ")[1]]).replace('"', '""')
87 if column["data"].split(" ")[1] in ("vat-history",
89 col_data = replace(col_data, " |br| ", "",
91 col_data = " |prein| {0} |preout| ".\
93 row_lst.append('"{0}"'.format(col_data))
95 row_lst.append("No data")
96 table_lst.append(row_lst)
98 # Write the data to file
100 file_name = "{0}_{1}{2}".format(table["output-file"], suite_name,
101 table["output-file-ext"])
102 logging.info(" Writing file: '{}'".format(file_name))
103 with open(file_name, "w") as file_handler:
104 file_handler.write(",".join(header) + "\n")
105 for item in table_lst:
106 file_handler.write(",".join(item) + "\n")
108 logging.info(" Done.")
111 def table_merged_details(table, input_data):
112 """Generate the table(s) with algorithm: table_merged_details
113 specified in the specification file.
115 :param table: Table to generate.
116 :param input_data: Data to process.
117 :type table: pandas.Series
118 :type input_data: InputData
121 logging.info(" Generating the table {0} ...".
122 format(table.get("title", "")))
125 data = input_data.filter_data(table)
126 data = input_data.merge_data(data)
127 data.sort_index(inplace=True)
129 suites = input_data.filter_data(table, data_set="suites")
130 suites = input_data.merge_data(suites)
132 # Prepare the header of the tables
134 for column in table["columns"]:
135 header.append('"{0}"'.format(str(column["title"]).replace('"', '""')))
137 for _, suite in suites.iteritems():
139 suite_name = suite["name"]
141 for test in data.keys():
142 if data[test]["parent"] in suite_name:
144 for column in table["columns"]:
146 col_data = str(data[test][column["data"].
147 split(" ")[1]]).replace('"', '""')
148 if column["data"].split(" ")[1] in ("vat-history",
150 col_data = replace(col_data, " |br| ", "",
152 col_data = " |prein| {0} |preout| ".\
153 format(col_data[:-5])
154 row_lst.append('"{0}"'.format(col_data))
156 row_lst.append("No data")
157 table_lst.append(row_lst)
159 # Write the data to file
161 file_name = "{0}_{1}{2}".format(table["output-file"], suite_name,
162 table["output-file-ext"])
163 logging.info(" Writing file: '{}'".format(file_name))
164 with open(file_name, "w") as file_handler:
165 file_handler.write(",".join(header) + "\n")
166 for item in table_lst:
167 file_handler.write(",".join(item) + "\n")
169 logging.info(" Done.")
172 def table_performance_improvements(table, input_data):
173 """Generate the table(s) with algorithm: table_performance_improvements
174 specified in the specification file.
176 :param table: Table to generate.
177 :param input_data: Data to process.
178 :type table: pandas.Series
179 :type input_data: InputData
182 def _write_line_to_file(file_handler, data):
183 """Write a line to the .csv file.
185 :param file_handler: File handler for the csv file. It must be open for
187 :param data: Item to be written to the file.
188 :type file_handler: BinaryIO
194 if isinstance(item["data"], str):
195 line_lst.append(item["data"])
196 elif isinstance(item["data"], float):
197 line_lst.append("{:.1f}".format(item["data"]))
198 elif item["data"] is None:
200 file_handler.write(",".join(line_lst) + "\n")
202 logging.info(" Generating the table {0} ...".
203 format(table.get("title", "")))
206 file_name = table.get("template", None)
209 tmpl = _read_csv_template(file_name)
210 except PresentationError:
211 logging.error(" The template '{0}' does not exist. Skipping the "
212 "table.".format(file_name))
215 logging.error("The template is not defined. Skipping the table.")
219 data = input_data.filter_data(table)
221 # Prepare the header of the tables
223 for column in table["columns"]:
224 header.append(column["title"])
226 # Generate the data for the table according to the model in the table
229 for tmpl_item in tmpl:
231 for column in table["columns"]:
232 cmd = column["data"].split(" ")[0]
233 args = column["data"].split(" ")[1:]
234 if cmd == "template":
236 val = float(tmpl_item[int(args[0])])
238 val = tmpl_item[int(args[0])]
239 tbl_item.append({"data": val})
245 for build in data[job]:
247 data_lst.append(float(build[tmpl_item[0]]
248 ["throughput"]["value"]))
249 except (KeyError, TypeError):
253 tbl_item.append({"data": (eval(operation)(data_lst)) /
256 tbl_item.append({"data": None})
257 elif cmd == "operation":
260 nr1 = float(tbl_item[int(args[1])]["data"])
261 nr2 = float(tbl_item[int(args[2])]["data"])
263 tbl_item.append({"data": eval(operation)(nr1, nr2)})
265 tbl_item.append({"data": None})
266 except (IndexError, ValueError, TypeError):
267 logging.error("No data for {0}".format(tbl_item[1]["data"]))
268 tbl_item.append({"data": None})
271 logging.error("Not supported command {0}. Skipping the table.".
274 tbl_lst.append(tbl_item)
276 # Sort the table according to the relative change
277 tbl_lst.sort(key=lambda rel: rel[-1]["data"], reverse=True)
279 # Create the tables and write them to the files
281 "{0}_ndr_top{1}".format(table["output-file"], table["output-file-ext"]),
282 "{0}_pdr_top{1}".format(table["output-file"], table["output-file-ext"]),
283 "{0}_ndr_low{1}".format(table["output-file"], table["output-file-ext"]),
284 "{0}_pdr_low{1}".format(table["output-file"], table["output-file-ext"])
287 for file_name in file_names:
288 logging.info(" Writing the file '{0}'".format(file_name))
289 with open(file_name, "w") as file_handler:
290 file_handler.write(",".join(header) + "\n")
292 if isinstance(item[-1]["data"], float):
293 rel_change = round(item[-1]["data"], 1)
295 rel_change = item[-1]["data"]
296 if "ndr_top" in file_name \
297 and "ndr" in item[1]["data"] \
298 and rel_change >= 10.0:
299 _write_line_to_file(file_handler, item)
300 elif "pdr_top" in file_name \
301 and "pdr" in item[1]["data"] \
302 and rel_change >= 10.0:
303 _write_line_to_file(file_handler, item)
304 elif "ndr_low" in file_name \
305 and "ndr" in item[1]["data"] \
306 and rel_change < 10.0:
307 _write_line_to_file(file_handler, item)
308 elif "pdr_low" in file_name \
309 and "pdr" in item[1]["data"] \
310 and rel_change < 10.0:
311 _write_line_to_file(file_handler, item)
313 logging.info(" Done.")
316 def _read_csv_template(file_name):
317 """Read the template from a .csv file.
319 :param file_name: Name / full path / relative path of the file to read.
321 :returns: Data from the template as list (lines) of lists (items on line).
323 :raises: PresentationError if it is not possible to read the file.
327 with open(file_name, 'r') as csv_file:
329 for line in csv_file:
330 tmpl_data.append(line[:-1].split(","))
332 except IOError as err:
333 raise PresentationError(str(err), level="ERROR")