X-Git-Url: https://gerrit.fd.io/r/gitweb?a=blobdiff_plain;f=resources%2Ftools%2Fpresentation%2Fgenerator_tables.py;h=78569902f98e31fa7aabc1c3e973ca4efd82896f;hb=0f70d751c7a31dc81bc7a1dc9854a07ff8c4b1b8;hp=0afbf87ac04a357386647e5d1c2b6f7d69bc09d7;hpb=d6c14f8b47849a4885aac6dad5c8d19baad4b9c3;p=csit.git diff --git a/resources/tools/presentation/generator_tables.py b/resources/tools/presentation/generator_tables.py index 0afbf87ac0..78569902f9 100644 --- a/resources/tools/presentation/generator_tables.py +++ b/resources/tools/presentation/generator_tables.py @@ -34,7 +34,7 @@ from pal_utils import mean, stdev, relative_change, classify_anomalies, \ convert_csv_to_pretty_txt, relative_change_stdev -REGEX_NIC = re.compile(r'\d*ge\dp\d\D*\d*') +REGEX_NIC = re.compile(r'(\d*ge\dp\d\D*\d*[a-z]*)') def generate_tables(spec, data): @@ -47,7 +47,6 @@ def generate_tables(spec, data): """ generator = { - u"table_details": table_details, u"table_merged_details": table_merged_details, u"table_perf_comparison": table_perf_comparison, u"table_perf_comparison_nic": table_perf_comparison_nic, @@ -57,7 +56,8 @@ def generate_tables(spec, data): u"table_perf_trending_dash_html": table_perf_trending_dash_html, u"table_last_failed_tests": table_last_failed_tests, u"table_failed_tests": table_failed_tests, - u"table_failed_tests_html": table_failed_tests_html + u"table_failed_tests_html": table_failed_tests_html, + u"table_oper_data_html": table_oper_data_html } logging.info(u"Generating the tables ...") @@ -72,8 +72,8 @@ def generate_tables(spec, data): logging.info(u"Done.") -def table_details(table, input_data): - """Generate the table(s) with algorithm: table_detailed_test_results +def table_oper_data_html(table, input_data): + """Generate the table(s) with algorithm: html_table_oper_data specified in the specification file. :param table: Table to generate. @@ -83,66 +83,186 @@ def table_details(table, input_data): """ logging.info(f" Generating the table {table.get(u'title', u'')} ...") - # Transform the data logging.info( f" Creating the data set for the {table.get(u'type', u'')} " f"{table.get(u'title', u'')}." ) - data = input_data.filter_data(table) + data = input_data.filter_data( + table, + params=[u"name", u"parent", u"show-run", u"type"], + continue_on_error=True + ) + if data.empty: + return + data = input_data.merge_data(data) + data.sort_index(inplace=True) - # Prepare the header of the tables - header = list() - for column in table[u"columns"]: - header.append( - u'"{0}"'.format(str(column[u"title"]).replace(u'"', u'""')) + suites = input_data.filter_data( + table, + continue_on_error=True, + data_set=u"suites" + ) + if suites.empty: + return + suites = input_data.merge_data(suites) + + def _generate_html_table(tst_data): + """Generate an HTML table with operational data for the given test. + + :param tst_data: Test data to be used to generate the table. + :type tst_data: pandas.Series + :returns: HTML table with operational data. + :rtype: str + """ + + colors = { + u"header": u"#7eade7", + u"empty": u"#ffffff", + u"body": (u"#e9f1fb", u"#d4e4f7") + } + + tbl = ET.Element(u"table", attrib=dict(width=u"100%", border=u"0")) + + trow = ET.SubElement(tbl, u"tr", attrib=dict(bgcolor=colors[u"header"])) + thead = ET.SubElement( + trow, u"th", attrib=dict(align=u"left", colspan=u"6") ) + thead.text = tst_data[u"name"] - # Generate the data for the table according to the model in the table - # specification - job = list(table[u"data"].keys())[0] - build = str(table[u"data"][job][0]) - try: - suites = input_data.suites(job, build) - except KeyError: - logging.error( - u" No data available. The table will not be generated." + trow = ET.SubElement(tbl, u"tr", attrib=dict(bgcolor=colors[u"empty"])) + thead = ET.SubElement( + trow, u"th", attrib=dict(align=u"left", colspan=u"6") ) - return + thead.text = u"\t" - for suite in suites.values: - # Generate data - suite_name = suite[u"name"] - table_lst = list() - for test in data[job][build].keys(): - if data[job][build][test][u"parent"] not in suite_name: + if tst_data.get(u"show-run", u"No Data") == u"No Data": + trow = ET.SubElement( + tbl, u"tr", attrib=dict(bgcolor=colors[u"header"]) + ) + tcol = ET.SubElement( + trow, u"td", attrib=dict(align=u"left", colspan=u"6") + ) + tcol.text = u"No Data" + return str(ET.tostring(tbl, encoding=u"unicode")) + + tbl_hdr = ( + u"Name", + u"Nr of Vectors", + u"Nr of Packets", + u"Suspends", + u"Cycles per Packet", + u"Average Vector Size" + ) + + for dut_name, dut_data in tst_data[u"show-run"].items(): + trow = ET.SubElement( + tbl, u"tr", attrib=dict(bgcolor=colors[u"header"]) + ) + tcol = ET.SubElement( + trow, u"td", attrib=dict(align=u"left", colspan=u"6") + ) + if dut_data.get(u"threads", None) is None: + tcol.text = u"No Data" continue - row_lst = list() - for column in table[u"columns"]: - try: - col_data = str(data[job][build][test][column[ - u"data"].split(" ")[1]]).replace(u'"', u'""') - if column[u"data"].split(u" ")[1] in \ - (u"conf-history", u"show-run"): - col_data = col_data.replace(u" |br| ", u"", 1) - col_data = f" |prein| {col_data[:-5]} |preout| " - row_lst.append(f'"{col_data}"') - except KeyError: - row_lst.append(u"No data") - table_lst.append(row_lst) + bold = ET.SubElement(tcol, u"b") + bold.text = dut_name - # Write the data to file - if table_lst: - file_name = ( - f"{table[u'output-file']}_{suite_name}" - f"{table[u'output-file-ext']}" + trow = ET.SubElement( + tbl, u"tr", attrib=dict(bgcolor=colors[u"body"][0]) ) - logging.info(f" Writing file: {file_name}") - with open(file_name, u"wt") as file_handler: - file_handler.write(u",".join(header) + u"\n") - for item in table_lst: - file_handler.write(u",".join(item) + u"\n") + tcol = ET.SubElement( + trow, u"td", attrib=dict(align=u"left", colspan=u"6") + ) + bold = ET.SubElement(tcol, u"b") + bold.text = ( + f"Host IP: {dut_data.get(u'host', '')}, " + f"Socket: {dut_data.get(u'socket', '')}" + ) + trow = ET.SubElement( + tbl, u"tr", attrib=dict(bgcolor=colors[u"empty"]) + ) + thead = ET.SubElement( + trow, u"th", attrib=dict(align=u"left", colspan=u"6") + ) + thead.text = u"\t" + + for thread_nr, thread in dut_data[u"threads"].items(): + trow = ET.SubElement( + tbl, u"tr", attrib=dict(bgcolor=colors[u"header"]) + ) + tcol = ET.SubElement( + trow, u"td", attrib=dict(align=u"left", colspan=u"6") + ) + bold = ET.SubElement(tcol, u"b") + bold.text = u"main" if thread_nr == 0 else f"worker_{thread_nr}" + trow = ET.SubElement( + tbl, u"tr", attrib=dict(bgcolor=colors[u"header"]) + ) + for idx, col in enumerate(tbl_hdr): + tcol = ET.SubElement( + trow, u"td", + attrib=dict(align=u"right" if idx else u"left") + ) + font = ET.SubElement( + tcol, u"font", attrib=dict(size=u"2") + ) + bold = ET.SubElement(font, u"b") + bold.text = col + for row_nr, row in enumerate(thread): + trow = ET.SubElement( + tbl, u"tr", + attrib=dict(bgcolor=colors[u"body"][row_nr % 2]) + ) + for idx, col in enumerate(row): + tcol = ET.SubElement( + trow, u"td", + attrib=dict(align=u"right" if idx else u"left") + ) + font = ET.SubElement( + tcol, u"font", attrib=dict(size=u"2") + ) + if isinstance(col, float): + font.text = f"{col:.2f}" + else: + font.text = str(col) + trow = ET.SubElement( + tbl, u"tr", attrib=dict(bgcolor=colors[u"empty"]) + ) + thead = ET.SubElement( + trow, u"th", attrib=dict(align=u"left", colspan=u"6") + ) + thead.text = u"\t" + trow = ET.SubElement(tbl, u"tr", attrib=dict(bgcolor=colors[u"empty"])) + thead = ET.SubElement( + trow, u"th", attrib=dict(align=u"left", colspan=u"6") + ) + font = ET.SubElement( + thead, u"font", attrib=dict(size=u"12px", color=u"#ffffff") + ) + font.text = u"." + + return str(ET.tostring(tbl, encoding=u"unicode")) + + for suite in suites.values: + html_table = str() + for test_data in data.values: + if test_data[u"parent"] not in suite[u"name"]: + continue + html_table += _generate_html_table(test_data) + if not html_table: + continue + try: + file_name = f"{table[u'output-file']}_{suite[u'name']}.rst" + with open(f"{file_name}", u'w') as html_file: + logging.info(f" Writing file: {file_name}") + html_file.write(u".. raw:: html\n\n\t") + html_file.write(html_table) + html_file.write(u"\n\t



\n") + except KeyError: + logging.warning(u"The output file is not defined.") + return logging.info(u" Done.") @@ -196,7 +316,17 @@ def table_merged_details(table, input_data): col_data = col_data.replace( u"No Data", u"Not Captured " ) - if column[u"data"].split(u" ")[1] in \ + if column[u"data"].split(u" ")[1] in (u"name", ): + if len(col_data) > 30: + col_data_lst = col_data.split(u"-") + half = int(len(col_data_lst) / 2) + col_data = f"{u'-'.join(col_data_lst[:half])}" \ + f"- |br| " \ + f"{u'-'.join(col_data_lst[half:])}" + col_data = f" |prein| {col_data} |preout| " + elif column[u"data"].split(u" ")[1] in (u"msg", ): + col_data = f" |prein| {col_data} |preout| " + elif column[u"data"].split(u" ")[1] in \ (u"conf-history", u"show-run"): col_data = col_data.replace(u" |br| ", u"", 1) col_data = f" |prein| {col_data[:-5]} |preout| " @@ -207,10 +337,7 @@ def table_merged_details(table, input_data): # Write the data to file if table_lst: - file_name = ( - f"{table[u'output-file']}_{suite_name}" - f"{table[u'output-file-ext']}" - ) + file_name = f"{table[u'output-file']}_{suite_name}.csv" logging.info(f" Writing file: {file_name}") with open(file_name, u"wt") as file_handler: file_handler.write(u",".join(header) + u"\n")