+def _tpc_modify_test_name(test_name):
+ test_name_mod = test_name.replace("-ndrpdrdisc", ""). \
+ replace("-ndrpdr", "").replace("-pdrdisc", ""). \
+ replace("-ndrdisc", "").replace("-pdr", ""). \
+ replace("-ndr", ""). \
+ replace("1t1c", "1c").replace("2t1c", "1c"). \
+ replace("2t2c", "2c").replace("4t2c", "2c"). \
+ replace("4t4c", "4c").replace("8t4c", "4c")
+ test_name_mod = re.sub(REGEX_NIC, "", test_name_mod)
+ return test_name_mod
+
+
+def _tpc_modify_displayed_test_name(test_name):
+ return test_name.replace("1t1c", "1c").replace("2t1c", "1c"). \
+ replace("2t2c", "2c").replace("4t2c", "2c"). \
+ replace("4t4c", "4c").replace("8t4c", "4c")
+
+
+def _tpc_insert_data(target, src, include_tests):
+ try:
+ if include_tests == "MRR":
+ target.append(src["result"]["receive-rate"].avg)
+ elif include_tests == "PDR":
+ target.append(src["throughput"]["PDR"]["LOWER"])
+ elif include_tests == "NDR":
+ target.append(src["throughput"]["NDR"]["LOWER"])
+ except (KeyError, TypeError):
+ pass
+
+
+def _tpc_sort_table(table):
+ # Sort the table:
+ # 1. New in CSIT-XXXX
+ # 2. See footnote
+ # 3. Delta
+ tbl_new = list()
+ tbl_see = list()
+ tbl_delta = list()
+ for item in table:
+ if isinstance(item[-1], str):
+ if "New in CSIT" in item[-1]:
+ tbl_new.append(item)
+ elif "See footnote" in item[-1]:
+ tbl_see.append(item)
+ else:
+ tbl_delta.append(item)
+
+ # Sort the tables:
+ tbl_new.sort(key=lambda rel: rel[0], reverse=False)
+ tbl_see.sort(key=lambda rel: rel[0], reverse=False)
+ tbl_see.sort(key=lambda rel: rel[-1], reverse=False)
+ tbl_delta.sort(key=lambda rel: rel[-1], reverse=True)
+
+ # Put the tables together:
+ table = list()
+ table.extend(tbl_new)
+ table.extend(tbl_see)
+ table.extend(tbl_delta)
+
+ return table
+
+
+def _tpc_generate_html_table(header, data, output_file_name):
+ """Generate html table from input data with simple sorting possibility.
+
+ :param header: Table header.
+ :param data: Input data to be included in the table. It is a list of lists.
+ Inner lists are rows in the table. All inner lists must be of the same
+ length. The length of these lists must be the same as the length of the
+ header.
+ :param output_file_name: The name (relative or full path) where the
+ generated html table is written.
+ :type header: list
+ :type data: list of lists
+ :type output_file_name: str
+ """
+
+ df = pd.DataFrame(data, columns=header)
+
+ df_sorted = [df.sort_values(
+ by=[key, header[0]], ascending=[True, True]
+ if key != header[0] else [False, True]) for key in header]
+ df_sorted_rev = [df.sort_values(
+ by=[key, header[0]], ascending=[False, True]
+ if key != header[0] else [True, True]) for key in header]
+ df_sorted.extend(df_sorted_rev)
+
+ fill_color = [["#d4e4f7" if idx % 2 else "#e9f1fb"
+ for idx in range(len(df))]]
+ table_header = dict(
+ values=["<b>{item}</b>".format(item=item) for item in header],
+ fill_color="#7eade7",
+ align=["left", "center"]
+ )
+
+ fig = go.Figure()
+
+ for table in df_sorted:
+ columns = [table.get(col) for col in header]
+ fig.add_trace(
+ go.Table(
+ columnwidth=[30, 10],
+ header=table_header,
+ cells=dict(
+ values=columns,
+ fill_color=fill_color,
+ align=["left", "right"]
+ )
+ )
+ )
+
+ buttons = list()
+ menu_items = ["<b>{0}</b> (ascending)".format(itm) for itm in header]
+ menu_items_rev = ["<b>{0}</b> (descending)".format(itm) for itm in header]
+ menu_items.extend(menu_items_rev)
+ for idx, hdr in enumerate(menu_items):
+ visible = [False, ] * len(menu_items)
+ visible[idx] = True
+ buttons.append(
+ dict(
+ label=hdr.replace(" [Mpps]", ""),
+ method="update",
+ args=[{"visible": visible}],
+ )
+ )
+
+ fig.update_layout(
+ updatemenus=[
+ go.layout.Updatemenu(
+ type="dropdown",
+ direction="down",
+ x=0.03,
+ xanchor="left",
+ y=1.045,
+ yanchor="top",
+ active=len(menu_items) - 1,
+ buttons=list(buttons)
+ )
+ ],
+ annotations=[
+ go.layout.Annotation(
+ text="<b>Sort by:</b>",
+ x=0,
+ xref="paper",
+ y=1.035,
+ yref="paper",
+ align="left",
+ showarrow=False
+ )
+ ]
+ )
+
+ ploff.plot(fig, show_link=False, auto_open=False, filename=output_file_name)
+
+