Trending: Add aws test beds
[csit.git] / resources / tools / presentation / generator_tables.py
index fe0eaaa..a995711 100644 (file)
@@ -17,6 +17,7 @@
 
 import logging
 import csv
 
 import logging
 import csv
+import math
 import re
 
 from collections import OrderedDict
 import re
 
 from collections import OrderedDict
@@ -24,11 +25,11 @@ from xml.etree import ElementTree as ET
 from datetime import datetime as dt
 from datetime import timedelta
 from copy import deepcopy
 from datetime import datetime as dt
 from datetime import timedelta
 from copy import deepcopy
-from json import loads
 
 import plotly.graph_objects as go
 import plotly.offline as ploff
 import pandas as pd
 
 import plotly.graph_objects as go
 import plotly.offline as ploff
 import pandas as pd
+import prettytable
 
 from numpy import nan, isnan
 from yaml import load, FullLoader, YAMLError
 
 from numpy import nan, isnan
 from yaml import load, FullLoader, YAMLError
@@ -59,7 +60,8 @@ def generate_tables(spec, data):
         u"table_failed_tests_html": table_failed_tests_html,
         u"table_oper_data_html": table_oper_data_html,
         u"table_comparison": table_comparison,
         u"table_failed_tests_html": table_failed_tests_html,
         u"table_oper_data_html": table_oper_data_html,
         u"table_comparison": table_comparison,
-        u"table_weekly_comparison": table_weekly_comparison
+        u"table_weekly_comparison": table_weekly_comparison,
+        u"table_job_spec_duration": table_job_spec_duration
     }
 
     logging.info(u"Generating the tables ...")
     }
 
     logging.info(u"Generating the tables ...")
@@ -76,6 +78,96 @@ def generate_tables(spec, data):
     logging.info(u"Done.")
 
 
     logging.info(u"Done.")
 
 
+def table_job_spec_duration(table, input_data):
+    """Generate the table(s) with algorithm: table_job_spec_duration
+    specified in the specification file.
+
+    :param table: Table to generate.
+    :param input_data: Data to process.
+    :type table: pandas.Series
+    :type input_data: InputData
+    """
+
+    _ = input_data
+
+    logging.info(f"  Generating the table {table.get(u'title', u'')} ...")
+
+    jb_type = table.get(u"jb-type", None)
+
+    tbl_lst = list()
+    if jb_type == u"iterative":
+        for line in table.get(u"lines", tuple()):
+            tbl_itm = {
+                u"name": line.get(u"job-spec", u""),
+                u"data": list()
+            }
+            for job, builds in line.get(u"data-set", dict()).items():
+                for build_nr in builds:
+                    try:
+                        minutes = input_data.metadata(
+                            job, str(build_nr)
+                        )[u"elapsedtime"] // 60000
+                    except (KeyError, IndexError, ValueError, AttributeError):
+                        continue
+                    tbl_itm[u"data"].append(minutes)
+            tbl_itm[u"mean"] = mean(tbl_itm[u"data"])
+            tbl_itm[u"stdev"] = stdev(tbl_itm[u"data"])
+            tbl_lst.append(tbl_itm)
+    elif jb_type == u"coverage":
+        job = table.get(u"data", None)
+        if not job:
+            return
+        for line in table.get(u"lines", tuple()):
+            try:
+                tbl_itm = {
+                    u"name": line.get(u"job-spec", u""),
+                    u"mean": input_data.metadata(
+                        list(job.keys())[0], str(line[u"build"])
+                    )[u"elapsedtime"] // 60000,
+                    u"stdev": float(u"nan")
+                }
+                tbl_itm[u"data"] = [tbl_itm[u"mean"], ]
+            except (KeyError, IndexError, ValueError, AttributeError):
+                continue
+            tbl_lst.append(tbl_itm)
+    else:
+        logging.warning(f"Wrong type of job-spec: {jb_type}. Skipping.")
+        return
+
+    for line in tbl_lst:
+        line[u"mean"] = \
+            f"{int(line[u'mean'] // 60):02d}:{int(line[u'mean'] % 60):02d}"
+        if math.isnan(line[u"stdev"]):
+            line[u"stdev"] = u""
+        else:
+            line[u"stdev"] = \
+                f"{int(line[u'stdev'] //60):02d}:{int(line[u'stdev'] % 60):02d}"
+
+    if not tbl_lst:
+        return
+
+    rows = list()
+    for itm in tbl_lst:
+        rows.append([
+            itm[u"name"],
+            f"{len(itm[u'data'])}",
+            f"{itm[u'mean']} +- {itm[u'stdev']}"
+            if itm[u"stdev"] != u"" else f"{itm[u'mean']}"
+        ])
+
+    txt_table = prettytable.PrettyTable(
+        [u"Job Specification", u"Nr of Runs", u"Duration [HH:MM]"]
+    )
+    for row in rows:
+        txt_table.add_row(row)
+    txt_table.align = u"r"
+    txt_table.align[u"Job Specification"] = u"l"
+
+    file_name = f"{table.get(u'output-file', u'')}.txt"
+    with open(file_name, u"wt", encoding='utf-8') as txt_file:
+        txt_file.write(str(txt_table))
+
+
 def table_oper_data_html(table, input_data):
     """Generate the table(s) with algorithm: html_table_oper_data
     specified in the specification file.
 def table_oper_data_html(table, input_data):
     """Generate the table(s) with algorithm: html_table_oper_data
     specified in the specification file.
@@ -1019,6 +1111,8 @@ def _generate_url(testbed, test_name):
         nic = u"x553"
     elif u"cx556" in test_name or u"cx556a" in test_name:
         nic = u"cx556a"
         nic = u"x553"
     elif u"cx556" in test_name or u"cx556a" in test_name:
         nic = u"cx556a"
+    elif u"ena" in test_name:
+        nic = u"nitro50g"
     else:
         nic = u""
 
     else:
         nic = u""
 
@@ -1051,15 +1145,18 @@ def _generate_url(testbed, test_name):
         cores = u"4t4c"
     elif u"2t1c" in test_name or \
          (u"-1c-" in test_name and
         cores = u"4t4c"
     elif u"2t1c" in test_name or \
          (u"-1c-" in test_name and
-          testbed in (u"2n-skx", u"3n-skx", u"2n-clx", u"2n-zn2")):
+          testbed in
+          (u"2n-skx", u"3n-skx", u"2n-clx", u"2n-zn2", u"2n-aws", u"3n-aws")):
         cores = u"2t1c"
     elif u"4t2c" in test_name or \
          (u"-2c-" in test_name and
         cores = u"2t1c"
     elif u"4t2c" in test_name or \
          (u"-2c-" in test_name and
-          testbed in (u"2n-skx", u"3n-skx", u"2n-clx", u"2n-zn2")):
+          testbed in
+          (u"2n-skx", u"3n-skx", u"2n-clx", u"2n-zn2", u"2n-aws", u"3n-aws")):
         cores = u"4t2c"
     elif u"8t4c" in test_name or \
          (u"-4c-" in test_name and
         cores = u"4t2c"
     elif u"8t4c" in test_name or \
          (u"-4c-" in test_name and
-          testbed in (u"2n-skx", u"3n-skx", u"2n-clx", u"2n-zn2")):
+          testbed in
+          (u"2n-skx", u"3n-skx", u"2n-clx", u"2n-zn2", u"2n-aws", u"3n-aws")):
         cores = u"8t4c"
     else:
         cores = u""
         cores = u"8t4c"
     else:
         cores = u""
@@ -1074,6 +1171,8 @@ def _generate_url(testbed, test_name):
         driver = u"rdma"
     elif u"dnv" in testbed or u"tsh" in testbed:
         driver = u"ixgbe"
         driver = u"rdma"
     elif u"dnv" in testbed or u"tsh" in testbed:
         driver = u"ixgbe"
+    elif u"ena" in test_name:
+        driver = u"ena"
     else:
         driver = u"dpdk"
 
     else:
         driver = u"dpdk"
 
@@ -1374,7 +1473,11 @@ def table_last_failed_tests(table, input_data):
                 if not groups:
                     continue
                 nic = groups.group(0)
                 if not groups:
                     continue
                 nic = groups.group(0)
-                failed_tests.append(f"{nic}-{tst_data[u'name']}")
+                msg = tst_data[u'msg'].replace(u"\n", u"")
+                msg = re.sub(r'(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})',
+                             'xxx.xxx.xxx.xxx', msg)
+                msg = msg.split(u'Also teardown failed')[0]
+                failed_tests.append(f"{nic}-{tst_data[u'name']}###{msg}")
             tbl_list.append(passed)
             tbl_list.append(failed)
             tbl_list.append(duration)
             tbl_list.append(passed)
             tbl_list.append(failed)
             tbl_list.append(duration)
@@ -1809,7 +1912,7 @@ def table_comparison(table, input_data):
                         )
                     except ZeroDivisionError:
                         break
                         )
                     except ZeroDivisionError:
                         break
-                    if delta in (None, float(u"nan"), u"nan", u"NaN"):
+                    if delta is None or math.isnan(delta):
                         break
                     new_row.append({
                         u"mean": delta * 1e6,
                         break
                     new_row.append({
                         u"mean": delta * 1e6,