Report: Detailed results tables
[csit.git] / resources / tools / presentation / input_data_parser.py
index eeabaa8..1ee5753 100644 (file)
@@ -30,6 +30,8 @@ from datetime import datetime as dt
 from datetime import timedelta
 from json import loads
 
+import hdrh.histogram
+import hdrh.codec
 import prettytable
 import pandas as pd
 
@@ -201,17 +203,39 @@ class ExecutionChecker(ResultVisitor):
     .. note:: ID is the lowercase full path to the test.
     """
 
-    REGEX_PLR_RATE = re.compile(r'PLRsearch lower bound::?\s(\d+.\d+).*\n'
-                                r'PLRsearch upper bound::?\s(\d+.\d+)')
-
-    REGEX_NDRPDR_RATE = re.compile(r'NDR_LOWER:\s(\d+.\d+).*\n.*\n'
-                                   r'NDR_UPPER:\s(\d+.\d+).*\n'
-                                   r'PDR_LOWER:\s(\d+.\d+).*\n.*\n'
-                                   r'PDR_UPPER:\s(\d+.\d+)')
-
-    REGEX_NDRPDR_LAT = re.compile(r'LATENCY.*\[\'(.*)\', \'(.*)\'\]\s\n.*\n.*\n'
-                                  r'LATENCY.*\[\'(.*)\', \'(.*)\'\]')
+    REGEX_PLR_RATE = re.compile(
+        r'PLRsearch lower bound::?\s(\d+.\d+).*\n'
+        r'PLRsearch upper bound::?\s(\d+.\d+)'
+    )
+    REGEX_NDRPDR_RATE = re.compile(
+        r'NDR_LOWER:\s(\d+.\d+).*\n.*\n'
+        r'NDR_UPPER:\s(\d+.\d+).*\n'
+        r'PDR_LOWER:\s(\d+.\d+).*\n.*\n'
+        r'PDR_UPPER:\s(\d+.\d+)'
+    )
+    REGEX_PERF_MSG_INFO = re.compile(
+        r'NDR_LOWER:\s(\d+.\d+)\s.*\s(\d+.\d+)\s.*\n.*\n.*\n'
+        r'PDR_LOWER:\s(\d+.\d+)\s.*\s(\d+.\d+)\s.*\n.*\n.*\n'
+        r'Latency at 90% PDR:.*\[\'(.*)\', \'(.*)\'\].*\n'
+        r'Latency at 50% PDR:.*\[\'(.*)\', \'(.*)\'\].*\n'
+        r'Latency at 10% PDR:.*\[\'(.*)\', \'(.*)\'\].*\n'
+    )
+    REGEX_MRR_MSG_INFO = re.compile(r'.*\[(.*)\]')
 
+    # TODO: Remove when not needed
+    REGEX_NDRPDR_LAT_BASE = re.compile(
+        r'LATENCY.*\[\'(.*)\', \'(.*)\'\]\s\n.*\n.*\n'
+        r'LATENCY.*\[\'(.*)\', \'(.*)\'\]'
+    )
+    REGEX_NDRPDR_LAT = re.compile(
+        r'LATENCY.*\[\'(.*)\', \'(.*)\'\]\s\n.*\n.*\n'
+        r'LATENCY.*\[\'(.*)\', \'(.*)\'\]\s\n.*\n'
+        r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n'
+        r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n'
+        r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n'
+        r'Latency.*\[\'(.*)\', \'(.*)\'\]'
+    )
+    # TODO: Remove when not needed
     REGEX_NDRPDR_LAT_LONG = re.compile(
         r'LATENCY.*\[\'(.*)\', \'(.*)\'\]\s\n.*\n.*\n'
         r'LATENCY.*\[\'(.*)\', \'(.*)\'\]\s\n.*\n'
@@ -223,26 +247,30 @@ class ExecutionChecker(ResultVisitor):
         r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n'
         r'Latency.*\[\'(.*)\', \'(.*)\'\]'
     )
-
-    REGEX_TOLERANCE = re.compile(r'^[\D\d]*LOSS_ACCEPTANCE:\s(\d*\.\d*)\s'
-                                 r'[\D\d]*')
-
-    REGEX_VERSION_VPP = re.compile(r"(return STDOUT Version:\s*|"
-                                   r"VPP Version:\s*|VPP version:\s*)(.*)")
-
-    REGEX_VERSION_DPDK = re.compile(r"(DPDK version:\s*|DPDK Version:\s*)(.*)")
-
-    REGEX_TCP = re.compile(r'Total\s(rps|cps|throughput):\s(\d*).*$')
-
-    REGEX_MRR = re.compile(r'MaxReceivedRate_Results\s\[pkts/(\d*)sec\]:\s'
-                           r'tx\s(\d*),\srx\s(\d*)')
-
-    REGEX_BMRR = re.compile(r'Maximum Receive Rate trial results'
-                            r' in packets per second: \[(.*)\]')
-
-    REGEX_RECONF_LOSS = re.compile(r'Packets lost due to reconfig: (\d*)')
-    REGEX_RECONF_TIME = re.compile(r'Implied time lost: (\d*.[\de-]*)')
-
+    REGEX_VERSION_VPP = re.compile(
+        r"(return STDOUT Version:\s*|"
+        r"VPP Version:\s*|VPP version:\s*)(.*)"
+    )
+    REGEX_VERSION_DPDK = re.compile(
+        r"(DPDK version:\s*|DPDK Version:\s*)(.*)"
+    )
+    REGEX_TCP = re.compile(
+        r'Total\s(rps|cps|throughput):\s(\d*).*$'
+    )
+    REGEX_MRR = re.compile(
+        r'MaxReceivedRate_Results\s\[pkts/(\d*)sec\]:\s'
+        r'tx\s(\d*),\srx\s(\d*)'
+    )
+    REGEX_BMRR = re.compile(
+        r'Maximum Receive Rate trial results'
+        r' in packets per second: \[(.*)\]'
+    )
+    REGEX_RECONF_LOSS = re.compile(
+        r'Packets lost due to reconfig: (\d*)'
+    )
+    REGEX_RECONF_TIME = re.compile(
+        r'Implied time lost: (\d*.[\de-]*)'
+    )
     REGEX_TC_TAG = re.compile(r'\d+[tT]\d+[cC]')
 
     REGEX_TC_NAME_OLD = re.compile(r'-\d+[tT]\d+[cC]-')
@@ -288,13 +316,9 @@ class ExecutionChecker(ResultVisitor):
         # 0 - no message
         # 1 - PAPI History of DUT1
         # 2 - PAPI History of DUT2
-        self._lookup_kw_nr = 0
         self._conf_history_lookup_nr = 0
 
-        # Number of Show Running messages found
-        # 0 - no message
-        # 1 - Show run message found
-        self._show_run_lookup_nr = 0
+        self._sh_run_counter = 0
 
         # Test ID of currently processed test- the lowercase full path to the
         # test
@@ -333,6 +357,132 @@ class ExecutionChecker(ResultVisitor):
         """
         return self._data
 
+    def _get_data_from_mrr_test_msg(self, msg):
+        """Get info from message of MRR performance tests.
+
+        :param msg: Message to be processed.
+        :type msg: str
+        :returns: Processed message or original message if a problem occurs.
+        :rtype: str
+        """
+
+        groups = re.search(self.REGEX_MRR_MSG_INFO, msg)
+        if not groups or groups.lastindex != 1:
+            return msg
+
+        try:
+            data = groups.group(1).split(u", ")
+        except (AttributeError, IndexError, ValueError, KeyError):
+            return msg
+
+        out_str = u"["
+        try:
+            for item in data:
+                out_str += f"{(float(item) / 1e6):.2f}, "
+            return out_str[:-2] + u"]"
+        except (AttributeError, IndexError, ValueError, KeyError):
+            return msg
+
+    def _get_data_from_perf_test_msg(self, msg):
+        """Get info from message of NDRPDR performance tests.
+
+        :param msg: Message to be processed.
+        :type msg: str
+        :returns: Processed message or original message if a problem occurs.
+        :rtype: str
+        """
+
+        groups = re.search(self.REGEX_PERF_MSG_INFO, msg)
+        if not groups or groups.lastindex != 10:
+            return msg
+
+        try:
+            data = {
+                u"ndr_low": float(groups.group(1)),
+                u"ndr_low_b": float(groups.group(2)),
+                u"pdr_low": float(groups.group(3)),
+                u"pdr_low_b": float(groups.group(4)),
+                u"pdr_lat_90_1": groups.group(5),
+                u"pdr_lat_90_2": groups.group(6),
+                u"pdr_lat_50_1": groups.group(7),
+                u"pdr_lat_50_2": groups.group(8),
+                u"pdr_lat_10_1": groups.group(9),
+                u"pdr_lat_10_2": groups.group(10),
+            }
+        except (AttributeError, IndexError, ValueError, KeyError):
+            return msg
+
+        def _process_lat(in_str_1, in_str_2):
+            """Extract min, avg, max values from latency string.
+
+            :param in_str_1: Latency string for one direction produced by robot
+                framework.
+            :param in_str_2: Latency string for second direction produced by
+                robot framework.
+            :type in_str_1: str
+            :type in_str_2: str
+            :returns: Processed latency string or empty string if a problem
+                occurs.
+            :rtype: tuple(str, str)
+            """
+            in_list_1 = in_str_1.split('/', 3)
+            in_list_2 = in_str_2.split('/', 3)
+
+            if len(in_list_1) != 4 and len(in_list_2) != 4:
+                return u""
+
+            in_list_1[3] += u"=" * (len(in_list_1[3]) % 4)
+            try:
+                hdr_lat_1 = hdrh.histogram.HdrHistogram.decode(in_list_1[3])
+            except hdrh.codec.HdrLengthException:
+                return u""
+
+            in_list_2[3] += u"=" * (len(in_list_2[3]) % 4)
+            try:
+                hdr_lat_2 = hdrh.histogram.HdrHistogram.decode(in_list_2[3])
+            except hdrh.codec.HdrLengthException:
+                return u""
+
+            if hdr_lat_1 and hdr_lat_2:
+                hdr_lat_1_50 = hdr_lat_1.get_value_at_percentile(50.0)
+                hdr_lat_1_90 = hdr_lat_1.get_value_at_percentile(90.0)
+                hdr_lat_1_99 = hdr_lat_1.get_value_at_percentile(99.0)
+                hdr_lat_2_50 = hdr_lat_2.get_value_at_percentile(50.0)
+                hdr_lat_2_90 = hdr_lat_2.get_value_at_percentile(90.0)
+                hdr_lat_2_99 = hdr_lat_2.get_value_at_percentile(99.0)
+
+                if (hdr_lat_1_50 + hdr_lat_1_90 + hdr_lat_1_99 +
+                        hdr_lat_2_50 + hdr_lat_2_90 + hdr_lat_2_99):
+                    return (
+                        f"{hdr_lat_1_50} {hdr_lat_1_90} {hdr_lat_1_99}      "
+                        f"{hdr_lat_2_50} {hdr_lat_2_90} {hdr_lat_2_99}"
+                    )
+
+            return u""
+
+        try:
+            pdr_lat_10 = _process_lat(data[u'pdr_lat_10_1'],
+                                      data[u'pdr_lat_10_2'])
+            pdr_lat_50 = _process_lat(data[u'pdr_lat_50_1'],
+                                      data[u'pdr_lat_50_2'])
+            pdr_lat_90 = _process_lat(data[u'pdr_lat_90_1'],
+                                      data[u'pdr_lat_90_2'])
+            pdr_lat_10 = f"\n3. {pdr_lat_10}" if pdr_lat_10 else u""
+            pdr_lat_50 = f"\n4. {pdr_lat_50}" if pdr_lat_50 else u""
+            pdr_lat_90 = f"\n5. {pdr_lat_90}" if pdr_lat_90 else u""
+
+            return (
+                f"1. {(data[u'ndr_low'] / 1e6):.2f}      "
+                f"{data[u'ndr_low_b']:.2f}"
+                f"\n2. {(data[u'pdr_low'] / 1e6):.2f}      "
+                f"{data[u'pdr_low_b']:.2f}"
+                f"{pdr_lat_10}"
+                f"{pdr_lat_50}"
+                f"{pdr_lat_90}"
+            )
+        except (AttributeError, IndexError, ValueError, KeyError):
+            return msg
+
     def _get_testbed(self, msg):
         """Called when extraction of testbed IP is required.
         The testbed is identified by TG node IP address.
@@ -454,96 +604,74 @@ class ExecutionChecker(ResultVisitor):
         :returns: Nothing.
         """
 
+        if not msg.message.count(u"stats runtime"):
+            return
+
+        # Temporary solution
+        if self._sh_run_counter > 1:
+            return
+
         if u"show-run" not in self._data[u"tests"][self._test_id].keys():
-            self._data[u"tests"][self._test_id][u"show-run"] = str()
+            self._data[u"tests"][self._test_id][u"show-run"] = dict()
 
-        if msg.message.count(u"stats runtime") or \
-                msg.message.count(u"Runtime"):
-            try:
-                host = str(re.search(self.REGEX_TC_PAPI_CLI, msg.message).
-                           group(1))
-            except (AttributeError, IndexError):
-                host = self._data[u"tests"][self._test_id][u"show-run"].\
-                           count(u"DUT:") + 1
-            try:
-                socket = str(re.search(self.REGEX_TC_PAPI_CLI, msg.message).
-                             group(2))
-                socket = f"/{socket}"
-            except (AttributeError, IndexError):
-                socket = u""
-            runtime = loads(
-                str(msg.message).
-                replace(u' ', u'').
-                replace(u'\n', u'').
-                replace(u"'", u'"').
-                replace(u'b"', u'"').
-                replace(u'u"', u'"').
-                split(u":", 1)[1]
-            )
-            try:
-                threads_nr = len(runtime[0][u"clocks"])
-            except (IndexError, KeyError):
-                return
-            tbl_hdr = [
-                u"Name",
-                u"Calls",
-                u"Vectors",
-                u"Suspends",
-                u"Clocks",
-                u"Vectors/Calls"
-            ]
-            table = [[tbl_hdr, ] for _ in range(threads_nr)]
-            for item in runtime:
-                for idx in range(threads_nr):
-                    name = format(item[u"name"])
-                    calls = format(item[u"calls"][idx])
-                    vectors = format(item[u"vectors"][idx])
-                    suspends = format(item[u"suspends"][idx])
-                    if item[u"vectors"][idx] > 0:
-                        clocks = format(
-                            item[u"clocks"][idx]/item[u"vectors"][idx], u".2e")
-                    elif item[u"calls"][idx] > 0:
-                        clocks = format(
-                            item[u"clocks"][idx]/item[u"calls"][idx], u".2e")
-                    elif item[u"suspends"][idx] > 0:
-                        clocks = format(
-                            item[u"clocks"][idx]/item[u"suspends"][idx], u".2e")
-                    else:
-                        clocks = 0
-                    if item[u"calls"][idx] > 0:
-                        vectors_call = format(
-                            item[u"vectors"][idx]/item[u"calls"][idx], u".2f")
-                    else:
-                        vectors_call = format(0, u".2f")
-                    if int(calls) + int(vectors) + int(suspends):
-                        table[idx].append([
-                            name, calls, vectors, suspends, clocks, vectors_call
-                        ])
-            text = ""
+        groups = re.search(self.REGEX_TC_PAPI_CLI, msg.message)
+        if not groups:
+            return
+        try:
+            host = groups.group(1)
+        except (AttributeError, IndexError):
+            host = u""
+        try:
+            sock = groups.group(2)
+        except (AttributeError, IndexError):
+            sock = u""
+
+        runtime = loads(str(msg.message).replace(u' ', u'').replace(u'\n', u'').
+                        replace(u"'", u'"').replace(u'b"', u'"').
+                        replace(u'u"', u'"').split(u":", 1)[1])
+
+        try:
+            threads_nr = len(runtime[0][u"clocks"])
+        except (IndexError, KeyError):
+            return
+
+        dut = u"DUT{nr}".format(
+            nr=len(self._data[u'tests'][self._test_id][u'show-run'].keys()) + 1)
+
+        oper = {
+            u"host": host,
+            u"socket": sock,
+            u"threads": OrderedDict({idx: list() for idx in range(threads_nr)})
+        }
+
+        for item in runtime:
             for idx in range(threads_nr):
-                text += f"Thread {idx} "
-                text += u"vpp_main\n" if idx == 0 else f"vpp_wk_{idx-1}\n"
-                txt_table = None
-                for row in table[idx]:
-                    if txt_table is None:
-                        txt_table = prettytable.PrettyTable(row)
-                    else:
-                        if any(row[1:]):
-                            txt_table.add_row(row)
-                txt_table.set_style(prettytable.MSWORD_FRIENDLY)
-                txt_table.align[u"Name"] = u"l"
-                txt_table.align[u"Calls"] = u"r"
-                txt_table.align[u"Vectors"] = u"r"
-                txt_table.align[u"Suspends"] = u"r"
-                txt_table.align[u"Clocks"] = u"r"
-                txt_table.align[u"Vectors/Calls"] = u"r"
-
-                text += txt_table.get_string(sortby=u"Name") + u'\n'
-            text = f"\n**DUT: {host}{socket}**\n{text}".\
-                replace(u'\n', u' |br| ').\
-                replace(u'\r', u'').\
-                replace(u'"', u"'")
-            self._data[u"tests"][self._test_id][u"show-run"] += text
+                if item[u"vectors"][idx] > 0:
+                    clocks = item[u"clocks"][idx] / item[u"vectors"][idx]
+                elif item[u"calls"][idx] > 0:
+                    clocks = item[u"clocks"][idx] / item[u"calls"][idx]
+                elif item[u"suspends"][idx] > 0:
+                    clocks = item[u"clocks"][idx] / item[u"suspends"][idx]
+                else:
+                    clocks = 0.0
+
+                if item[u"calls"][idx] > 0:
+                    vectors_call = item[u"vectors"][idx] / item[u"calls"][idx]
+                else:
+                    vectors_call = 0.0
+
+                if int(item[u"calls"][idx]) + int(item[u"vectors"][idx]) + \
+                        int(item[u"suspends"][idx]):
+                    oper[u"threads"][idx].append([
+                        item[u"name"],
+                        item[u"calls"][idx],
+                        item[u"vectors"][idx],
+                        item[u"suspends"][idx],
+                        clocks,
+                        vectors_call
+                    ])
+
+        self._data[u'tests'][self._test_id][u'show-run'][dut] = copy.copy(oper)
 
     def _get_ndrpdr_throughput(self, msg):
         """Get NDR_LOWER, NDR_UPPER, PDR_LOWER and PDR_UPPER from the test
@@ -628,18 +756,6 @@ class ExecutionChecker(ResultVisitor):
                 u"direction1": copy.copy(latency_default),
                 u"direction2": copy.copy(latency_default)
             },
-            u"NDR10": {
-                u"direction1": copy.copy(latency_default),
-                u"direction2": copy.copy(latency_default)
-            },
-            u"NDR50": {
-                u"direction1": copy.copy(latency_default),
-                u"direction2": copy.copy(latency_default)
-            },
-            u"NDR90": {
-                u"direction1": copy.copy(latency_default),
-                u"direction2": copy.copy(latency_default)
-            },
             u"PDR10": {
                 u"direction1": copy.copy(latency_default),
                 u"direction2": copy.copy(latency_default)
@@ -654,10 +770,12 @@ class ExecutionChecker(ResultVisitor):
             },
         }
 
-        groups = re.search(self.REGEX_NDRPDR_LAT, msg)
+        # TODO: Rewrite when long and base are not needed
+        groups = re.search(self.REGEX_NDRPDR_LAT_LONG, msg)
         if groups is None:
-            groups = re.search(self.REGEX_NDRPDR_LAT_LONG, msg)
-
+            groups = re.search(self.REGEX_NDRPDR_LAT, msg)
+        if groups is None:
+            groups = re.search(self.REGEX_NDRPDR_LAT_BASE, msg)
         if groups is None:
             return latency, u"FAIL"
 
@@ -697,6 +815,33 @@ class ExecutionChecker(ResultVisitor):
         except (IndexError, ValueError):
             pass
 
+        try:
+            latency[u"PDR90"][u"direction1"] = process_latency(groups.group(5))
+            latency[u"PDR90"][u"direction2"] = process_latency(groups.group(6))
+            latency[u"PDR50"][u"direction1"] = process_latency(groups.group(7))
+            latency[u"PDR50"][u"direction2"] = process_latency(groups.group(8))
+            latency[u"PDR10"][u"direction1"] = process_latency(groups.group(9))
+            latency[u"PDR10"][u"direction2"] = process_latency(groups.group(10))
+            latency[u"LAT0"][u"direction1"] = process_latency(groups.group(11))
+            latency[u"LAT0"][u"direction2"] = process_latency(groups.group(12))
+            if groups.lastindex == 12:
+                return latency, u"PASS"
+        except (IndexError, ValueError):
+            pass
+
+        # TODO: Remove when not needed
+        latency[u"NDR10"] = {
+            u"direction1": copy.copy(latency_default),
+            u"direction2": copy.copy(latency_default)
+        }
+        latency[u"NDR50"] = {
+            u"direction1": copy.copy(latency_default),
+            u"direction2": copy.copy(latency_default)
+        }
+        latency[u"NDR90"] = {
+            u"direction1": copy.copy(latency_default),
+            u"direction2": copy.copy(latency_default)
+        }
         try:
             latency[u"LAT0"][u"direction1"] = process_latency(groups.group(5))
             latency[u"LAT0"][u"direction2"] = process_latency(groups.group(6))
@@ -789,6 +934,8 @@ class ExecutionChecker(ResultVisitor):
         :returns: Nothing.
         """
 
+        self._sh_run_counter = 0
+
         longname_orig = test.longname.lower()
 
         # Check the ignore list
@@ -865,6 +1012,11 @@ class ExecutionChecker(ResultVisitor):
 
         if test.status == u"PASS":
             if u"NDRPDR" in tags:
+                test_result[u"msg"] = self._get_data_from_perf_test_msg(
+                    test.message). \
+                    replace(u'\n', u' |br| '). \
+                    replace(u'\r', u''). \
+                    replace(u'"', u"'")
                 test_result[u"type"] = u"NDRPDR"
                 test_result[u"throughput"], test_result[u"status"] = \
                     self._get_ndrpdr_throughput(test.message)
@@ -879,6 +1031,11 @@ class ExecutionChecker(ResultVisitor):
                 groups = re.search(self.REGEX_TCP, test.message)
                 test_result[u"result"] = int(groups.group(2))
             elif u"MRR" in tags or u"FRMOBL" in tags or u"BMRR" in tags:
+                test_result[u"msg"] = self._get_data_from_mrr_test_msg(
+                    test.message). \
+                    replace(u'\n', u' |br| '). \
+                    replace(u'\r', u''). \
+                    replace(u'"', u"'")
                 if u"MRR" in tags:
                     test_result[u"type"] = u"MRR"
                 else:
@@ -909,6 +1066,8 @@ class ExecutionChecker(ResultVisitor):
                     }
                 except (AttributeError, IndexError, ValueError, TypeError):
                     test_result[u"status"] = u"FAIL"
+            elif u"DEVICETEST" in tags:
+                test_result[u"type"] = u"DEVICETEST"
             else:
                 test_result[u"status"] = u"FAIL"
                 self._data[u"tests"][self._test_id] = test_result
@@ -945,10 +1104,8 @@ class ExecutionChecker(ResultVisitor):
             if keyword.type == u"setup":
                 self.visit_setup_kw(keyword)
             elif keyword.type == u"teardown":
-                self._lookup_kw_nr = 0
                 self.visit_teardown_kw(keyword)
             else:
-                self._lookup_kw_nr = 0
                 self.visit_test_kw(keyword)
         except AttributeError:
             pass
@@ -984,9 +1141,8 @@ class ExecutionChecker(ResultVisitor):
         """
         if test_kw.name.count(u"Show Runtime On All Duts") or \
                 test_kw.name.count(u"Show Runtime Counters On All Duts"):
-            self._lookup_kw_nr += 1
-            self._show_run_lookup_nr = 0
             self._msg_type = u"test-show-runtime"
+            self._sh_run_counter += 1
         elif test_kw.name.count(u"Install Dpdk Test") and not self._version:
             self._msg_type = u"dpdk-version"
         else:
@@ -1449,7 +1605,9 @@ class InputData:
         """
 
         try:
-            if element[u"filter"] in (u"all", u"template"):
+            if data_set == "suites":
+                cond = u"True"
+            elif element[u"filter"] in (u"all", u"template"):
                 cond = u"True"
             else:
                 cond = InputData._condition(element[u"filter"])
@@ -1631,3 +1789,46 @@ class InputData:
                     merged_data[item_id] = item_data
 
         return merged_data
+
+    def print_all_oper_data(self):
+        """Print all operational data to console.
+        """
+
+        tbl_hdr = (
+            u"Name",
+            u"Nr of Vectors",
+            u"Nr of Packets",
+            u"Suspends",
+            u"Cycles per Packet",
+            u"Average Vector Size"
+        )
+
+        for job in self._input_data.values:
+            for build in job.values:
+                for test_id, test_data in build[u"tests"].items():
+                    print(f"{test_id}")
+                    if test_data.get(u"show-run", None) is None:
+                        continue
+                    for dut_name, data in test_data[u"show-run"].items():
+                        if data.get(u"threads", None) is None:
+                            continue
+                        print(f"Host IP: {data.get(u'host', '')}, "
+                              f"Socket: {data.get(u'socket', '')}")
+                        for thread_nr, thread in data[u"threads"].items():
+                            txt_table = prettytable.PrettyTable(tbl_hdr)
+                            avg = 0.0
+                            for row in thread:
+                                txt_table.add_row(row)
+                                avg += row[-1]
+                            if len(thread) == 0:
+                                avg = u""
+                            else:
+                                avg = f", Average Vector Size per Node: " \
+                                      f"{(avg / len(thread)):.2f}"
+                            th_name = u"main" if thread_nr == 0 \
+                                else f"worker_{thread_nr}"
+                            print(f"{dut_name}, {th_name}{avg}")
+                            txt_table.float_format = u".2"
+                            txt_table.align = u"r"
+                            txt_table.align[u"Name"] = u"l"
+                            print(f"{txt_table.get_string()}\n")