Python3: resources and libraries
[csit.git] / resources / tools / presentation / input_data_parser.py
index 670cb32..46c8b9d 100644 (file)
@@ -19,6 +19,7 @@
 - filter the data using tags,
 """
 
 - filter the data using tags,
 """
 
+import copy
 import re
 import resource
 import pandas as pd
 import re
 import resource
 import pandas as pd
@@ -30,7 +31,6 @@ from robot import errors
 from collections import OrderedDict
 from string import replace
 from os import remove
 from collections import OrderedDict
 from string import replace
 from os import remove
-from os.path import join
 from datetime import datetime as dt
 from datetime import timedelta
 from json import loads
 from datetime import datetime as dt
 from datetime import timedelta
 from json import loads
@@ -98,24 +98,28 @@ class ExecutionChecker(ResultVisitor):
                         "direction1": {
                             "min": float,
                             "avg": float,
                         "direction1": {
                             "min": float,
                             "avg": float,
-                            "max": float
+                            "max": float,
+                            "hdrh": str
                         },
                         "direction2": {
                             "min": float,
                             "avg": float,
                         },
                         "direction2": {
                             "min": float,
                             "avg": float,
-                            "max": float
+                            "max": float,
+                            "hdrh": str
                         }
                     },
                     "PDR": {
                         "direction1": {
                             "min": float,
                             "avg": float,
                         }
                     },
                     "PDR": {
                         "direction1": {
                             "min": float,
                             "avg": float,
-                            "max": float
+                            "max": float,
+                            "hdrh": str
                         },
                         "direction2": {
                             "min": float,
                             "avg": float,
                         },
                         "direction2": {
                             "min": float,
                             "avg": float,
-                            "max": float
+                            "max": float,
+                            "hdrh": str
                         }
                     }
                 }
                         }
                     }
                 }
@@ -147,60 +151,6 @@ class ExecutionChecker(ResultVisitor):
                 }
             }
 
                 }
             }
 
-            # TODO: Remove when definitely no NDRPDRDISC tests are used:
-            # NDRPDRDISC tests:
-            "ID": {
-                "name": "Test name",
-                "parent": "Name of the parent of the test",
-                "doc": "Test documentation",
-                "msg": "Test message",
-                "tags": ["tag 1", "tag 2", "tag n"],
-                "type": "PDR" | "NDR",
-                "status": "PASS" | "FAIL",
-                "throughput": {  # Only type: "PDR" | "NDR"
-                    "value": int,
-                    "unit": "pps" | "bps" | "percentage"
-                },
-                "latency": {  # Only type: "PDR" | "NDR"
-                    "direction1": {
-                        "100": {
-                            "min": int,
-                            "avg": int,
-                            "max": int
-                        },
-                        "50": {  # Only for NDR
-                            "min": int,
-                            "avg": int,
-                            "max": int
-                        },
-                        "10": {  # Only for NDR
-                            "min": int,
-                            "avg": int,
-                            "max": int
-                        }
-                    },
-                    "direction2": {
-                        "100": {
-                            "min": int,
-                            "avg": int,
-                            "max": int
-                        },
-                        "50": {  # Only for NDR
-                            "min": int,
-                            "avg": int,
-                            "max": int
-                        },
-                        "10": {  # Only for NDR
-                            "min": int,
-                            "avg": int,
-                            "max": int
-                        }
-                    }
-                },
-                "lossTolerance": "lossTolerance",  # Only type: "PDR"
-                "conf-history": "DUT1 and DUT2 VAT History"
-                "show-run": "Show Run"
-            },
             "ID" {
                 # next test
             }
             "ID" {
                 # next test
             }
@@ -259,19 +209,6 @@ class ExecutionChecker(ResultVisitor):
                                    r'PDR_LOWER:\s(\d+.\d+).*\n.*\n'
                                    r'PDR_UPPER:\s(\d+.\d+)')
 
                                    r'PDR_LOWER:\s(\d+.\d+).*\n.*\n'
                                    r'PDR_UPPER:\s(\d+.\d+)')
 
-    # TODO: Remove when definitely no NDRPDRDISC tests are used:
-    REGEX_LAT_NDR = re.compile(r'^[\D\d]*'
-                               r'LAT_\d+%NDR:\s\[\'(-?\d+/-?\d+/-?\d+)\','
-                               r'\s\'(-?\d+/-?\d+/-?\d+)\'\]\s\n'
-                               r'LAT_\d+%NDR:\s\[\'(-?\d+/-?\d+/-?\d+)\','
-                               r'\s\'(-?\d+/-?\d+/-?\d+)\'\]\s\n'
-                               r'LAT_\d+%NDR:\s\[\'(-?\d+/-?\d+/-?\d+)\','
-                               r'\s\'(-?\d+/-?\d+/-?\d+)\'\]')
-
-    REGEX_LAT_PDR = re.compile(r'^[\D\d]*'
-                               r'LAT_\d+%PDR:\s\[\'(-?\d+/-?\d+/-?\d+)\','
-                               r'\s\'(-?\d+/-?\d+/-?\d+)\'\][\D\d]*')
-
     REGEX_NDRPDR_LAT = re.compile(r'LATENCY.*\[\'(.*)\', \'(.*)\'\]\s\n.*\n.*\n'
                                   r'LATENCY.*\[\'(.*)\', \'(.*)\'\]')
 
     REGEX_NDRPDR_LAT = re.compile(r'LATENCY.*\[\'(.*)\', \'(.*)\'\]\s\n.*\n.*\n'
                                   r'LATENCY.*\[\'(.*)\', \'(.*)\'\]')
 
@@ -283,7 +220,7 @@ class ExecutionChecker(ResultVisitor):
 
     REGEX_VERSION_DPDK = re.compile(r"(DPDK version:\s*|DPDK Version:\s*)(.*)")
 
 
     REGEX_VERSION_DPDK = re.compile(r"(DPDK version:\s*|DPDK Version:\s*)(.*)")
 
-    REGEX_TCP = re.compile(r'Total\s(rps|cps|throughput):\s([0-9]*).*$')
+    REGEX_TCP = re.compile(r'Total\s(rps|cps|throughput):\s(\d*).*$')
 
     REGEX_MRR = re.compile(r'MaxReceivedRate_Results\s\[pkts/(\d*)sec\]:\s'
                            r'tx\s(\d*),\srx\s(\d*)')
 
     REGEX_MRR = re.compile(r'MaxReceivedRate_Results\s\[pkts/(\d*)sec\]:\s'
                            r'tx\s(\d*),\srx\s(\d*)')
@@ -292,7 +229,7 @@ class ExecutionChecker(ResultVisitor):
                             r' in packets per second: \[(.*)\]')
 
     REGEX_RECONF_LOSS = re.compile(r'Packets lost due to reconfig: (\d*)')
                             r' in packets per second: \[(.*)\]')
 
     REGEX_RECONF_LOSS = re.compile(r'Packets lost due to reconfig: (\d*)')
-    REGEX_RECONF_TIME = re.compile(r'Implied time lost: (\d*.\d*)')
+    REGEX_RECONF_TIME = re.compile(r'Implied time lost: (\d*.[\de-]*)')
 
     REGEX_TC_TAG = re.compile(r'\d+[tT]\d+[cC]')
 
 
     REGEX_TC_TAG = re.compile(r'\d+[tT]\d+[cC]')
 
@@ -300,7 +237,9 @@ class ExecutionChecker(ResultVisitor):
 
     REGEX_TC_NAME_NEW = re.compile(r'-\d+[cC]-')
 
 
     REGEX_TC_NAME_NEW = re.compile(r'-\d+[cC]-')
 
-    REGEX_TC_NUMBER = re.compile(r'tc[0-9]{2}-')
+    REGEX_TC_NUMBER = re.compile(r'tc\d{2}-')
+
+    REGEX_TC_PAPI_CLI = re.compile(r'.*\((\d+.\d+.\d+.\d+.) - (.*)\)')
 
     def __init__(self, metadata, mapping, ignore):
         """Initialisation.
 
     def __init__(self, metadata, mapping, ignore):
         """Initialisation.
@@ -460,7 +399,7 @@ class ExecutionChecker(ResultVisitor):
                 self._data["tests"][self._test_ID]["conf-history"] = str()
             else:
                 self._msg_type = None
                 self._data["tests"][self._test_ID]["conf-history"] = str()
             else:
                 self._msg_type = None
-            text = re.sub("[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3} "
+            text = re.sub("\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3} "
                           "VAT command history:", "", msg.message, count=1). \
                 replace("\n\n", "\n").replace('\n', ' |br| ').\
                 replace('\r', '').replace('"', "'")
                           "VAT command history:", "", msg.message, count=1). \
                 replace("\n\n", "\n").replace('\n', ' |br| ').\
                 replace('\r', '').replace('"', "'")
@@ -482,7 +421,7 @@ class ExecutionChecker(ResultVisitor):
                 self._data["tests"][self._test_ID]["conf-history"] = str()
             else:
                 self._msg_type = None
                 self._data["tests"][self._test_ID]["conf-history"] = str()
             else:
                 self._msg_type = None
-            text = re.sub("[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3} "
+            text = re.sub("\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3} "
                           "PAPI command history:", "", msg.message, count=1). \
                 replace("\n\n", "\n").replace('\n', ' |br| ').\
                 replace('\r', '').replace('"', "'")
                           "PAPI command history:", "", msg.message, count=1). \
                 replace("\n\n", "\n").replace('\n', ' |br| ').\
                 replace('\r', '').replace('"', "'")
@@ -499,108 +438,77 @@ class ExecutionChecker(ResultVisitor):
         :type msg: Message
         :returns: Nothing.
         """
         :type msg: Message
         :returns: Nothing.
         """
-        if msg.message.count("Runtime:"):
-            self._show_run_lookup_nr += 1
-            if self._lookup_kw_nr == 1 and self._show_run_lookup_nr == 1:
-                self._data["tests"][self._test_ID]["show-run"] = str()
-            if self._lookup_kw_nr > 1:
-                self._msg_type = None
-            if self._show_run_lookup_nr > 0:
-                message = str(msg.message).replace(' ', '').replace('\n', '').\
-                    replace("'", '"').replace('b"', '"').replace('u"', '"')[8:]
-                runtime = loads(message)
-                try:
-                    threads_nr = len(runtime[0]["clocks"])
-                except (IndexError, KeyError):
-                    return
-                tbl_hdr = ["Name", "Calls", "Vectors", "Suspends", "Clocks"]
-                table = [[tbl_hdr, ] for _ in range(threads_nr)]
-                for item in runtime:
-                    for idx in range(threads_nr):
+        if not "show-run" in self._data["tests"][self._test_ID].keys():
+            self._data["tests"][self._test_ID]["show-run"] = str()
+
+        if msg.message.count("stats runtime"):
+            host = str(re.search(self.REGEX_TC_PAPI_CLI, msg.message).\
+                       group(1))
+            socket = str(re.search(self.REGEX_TC_PAPI_CLI, msg.message).\
+                         group(2))
+            message = str(msg.message).replace(' ', '').replace('\n', '').\
+                replace("'", '"').replace('b"', '"').replace('u"', '"').\
+                split(":",1)[1]
+            runtime = loads(message)
+            try:
+                threads_nr = len(runtime[0]["clocks"])
+            except (IndexError, KeyError):
+                return
+            tbl_hdr = ["Name", "Calls", "Vectors", "Suspends", "Clocks",
+                       "Vectors/Calls"]
+            table = [[tbl_hdr, ] for _ in range(threads_nr)]
+            for item in runtime:
+                for idx in range(threads_nr):
+                    name = format(item["name"])
+                    calls = format(item["calls"][idx])
+                    vectors = format(item["vectors"][idx])
+                    suspends = format(item["suspends"][idx])
+                    if item["vectors"][idx] > 0:
+                        clocks = format(
+                            item["clocks"][idx]/item["vectors"][idx], ".2e")
+                    elif item["calls"][idx] > 0:
+                        clocks = format(
+                            item["clocks"][idx]/item["calls"][idx], ".2e")
+                    elif item["suspends"][idx] > 0:
+                        clocks = format(
+                            item["clocks"][idx]/item["suspends"][idx], ".2e")
+                    else:
+                        clocks = 0
+                    if item["calls"][idx] > 0:
+                        vectors_call = format(
+                            item["vectors"][idx]/item["calls"][idx], ".2f")
+                    else:
+                        vectors_call = format(0, ".2f")
+                    if int(calls) + int(vectors) + int(suspends):
                         table[idx].append([
                         table[idx].append([
-                            item["name"],
-                            item["calls"][idx],
-                            item["vectors"][idx],
-                            item["suspends"][idx],
-                            item["clocks"][idx]
+                            name, calls, vectors, suspends, clocks, vectors_call
                         ])
                         ])
-                text = ""
-                for idx in range(threads_nr):
-                    text += "Thread {idx} ".format(idx=idx)
-                    text += "vpp_main\n" if idx == 0 else \
-                        "vpp_wk_{idx}\n".format(idx=idx-1)
-                    txt_table = None
-                    for row in table[idx]:
-                        if txt_table is None:
-                            txt_table = prettytable.PrettyTable(row)
-                        else:
-                            if any(row[1:]):
-                                txt_table.add_row(row)
-                    txt_table.set_style(prettytable.MSWORD_FRIENDLY)
-                    txt_table.align["Name"] = "l"
-                    txt_table.align["Calls"] = "r"
-                    txt_table.align["Vectors"] = "r"
-                    txt_table.align["Suspends"] = "r"
-                    txt_table.align["Clocks"] = "r"
-
-                    text += txt_table.get_string(sortby="Name") + '\n'
-
-                text = text.replace('\n', ' |br| ').replace('\r', '').\
-                    replace('"', "'")
-                try:
-                    self._data["tests"][self._test_ID]["show-run"] += " |br| "
-                    self._data["tests"][self._test_ID]["show-run"] += \
-                        "**DUT" + str(self._show_run_lookup_nr) + ":** |br| " \
-                        + text
-                except KeyError:
-                    pass
-
-    # TODO: Remove when definitely no NDRPDRDISC tests are used:
-    def _get_latency(self, msg, test_type):
-        """Get the latency data from the test message.
-
-        :param msg: Message to be parsed.
-        :param test_type: Type of the test - NDR or PDR.
-        :type msg: str
-        :type test_type: str
-        :returns: Latencies parsed from the message.
-        :rtype: dict
-        """
-
-        if test_type == "NDR":
-            groups = re.search(self.REGEX_LAT_NDR, msg)
-            groups_range = range(1, 7)
-        elif test_type == "PDR":
-            groups = re.search(self.REGEX_LAT_PDR, msg)
-            groups_range = range(1, 3)
-        else:
-            return {}
-
-        latencies = list()
-        for idx in groups_range:
-            try:
-                lat = [int(item) for item in str(groups.group(idx)).split('/')]
-            except (AttributeError, ValueError):
-                lat = [-1, -1, -1]
-            latencies.append(lat)
-
-        keys = ("min", "avg", "max")
-        latency = {
-            "direction1": {
-            },
-            "direction2": {
-            }
-        }
-
-        latency["direction1"]["100"] = dict(zip(keys, latencies[0]))
-        latency["direction2"]["100"] = dict(zip(keys, latencies[1]))
-        if test_type == "NDR":
-            latency["direction1"]["50"] = dict(zip(keys, latencies[2]))
-            latency["direction2"]["50"] = dict(zip(keys, latencies[3]))
-            latency["direction1"]["10"] = dict(zip(keys, latencies[4]))
-            latency["direction2"]["10"] = dict(zip(keys, latencies[5]))
-
-        return latency
+            text = ""
+            for idx in range(threads_nr):
+                text += "Thread {idx} ".format(idx=idx)
+                text += "vpp_main\n" if idx == 0 else \
+                    "vpp_wk_{idx}\n".format(idx=idx-1)
+                txt_table = None
+                for row in table[idx]:
+                    if txt_table is None:
+                        txt_table = prettytable.PrettyTable(row)
+                    else:
+                        if any(row[1:]):
+                            txt_table.add_row(row)
+                txt_table.set_style(prettytable.MSWORD_FRIENDLY)
+                txt_table.align["Name"] = "l"
+                txt_table.align["Calls"] = "r"
+                txt_table.align["Vectors"] = "r"
+                txt_table.align["Suspends"] = "r"
+                txt_table.align["Clocks"] = "r"
+                txt_table.align["Vectors/Calls"] = "r"
+
+                text += txt_table.get_string(sortby="Name") + '\n'
+            text = (" \n **DUT: {host}/{socket}** \n {text}".
+                    format(host=host, socket=socket, text=text))
+            text = text.replace('\n', ' |br| ').replace('\r', '').\
+                replace('"', "'")
+            self._data["tests"][self._test_ID]["show-run"] += text
 
     def _get_ndrpdr_throughput(self, msg):
         """Get NDR_LOWER, NDR_UPPER, PDR_LOWER and PDR_UPPER from the test
 
     def _get_ndrpdr_throughput(self, msg):
         """Get NDR_LOWER, NDR_UPPER, PDR_LOWER and PDR_UPPER from the test
@@ -666,31 +574,52 @@ class ExecutionChecker(ResultVisitor):
         :returns: Parsed data as a dict and the status (PASS/FAIL).
         :rtype: tuple(dict, str)
         """
         :returns: Parsed data as a dict and the status (PASS/FAIL).
         :rtype: tuple(dict, str)
         """
-
+        latency_default = {"min": -1.0, "avg": -1.0, "max": -1.0, "hdrh": ""}
         latency = {
             "NDR": {
         latency = {
             "NDR": {
-                "direction1": {"min": -1.0, "avg": -1.0, "max": -1.0},
-                "direction2": {"min": -1.0, "avg": -1.0, "max": -1.0}
+                "direction1": copy.copy(latency_default),
+                "direction2": copy.copy(latency_default)
             },
             "PDR": {
             },
             "PDR": {
-                "direction1": {"min": -1.0, "avg": -1.0, "max": -1.0},
-                "direction2": {"min": -1.0, "avg": -1.0, "max": -1.0}
+                "direction1": copy.copy(latency_default),
+                "direction2": copy.copy(latency_default)
             }
         }
         status = "FAIL"
         groups = re.search(self.REGEX_NDRPDR_LAT, msg)
 
             }
         }
         status = "FAIL"
         groups = re.search(self.REGEX_NDRPDR_LAT, msg)
 
+        def process_latency(in_str):
+            """Return object with parsed latency values.
+
+            TODO: Define class for the return type.
+
+            :param in_str: Input string, min/avg/max/hdrh format.
+            :type in_str: str
+            :returns: Dict with corresponding keys, except hdrh float values.
+            :rtype dict:
+            :throws IndexError: If in_str does not have enough substrings.
+            :throws ValueError: If a substring does not convert to float.
+            """
+            in_list = in_str.split('/')
+
+            rval = {
+                "min": float(in_list[0]),
+                "avg": float(in_list[1]),
+                "max": float(in_list[2]),
+                "hdrh": ""
+            }
+
+            if len(in_list) == 4:
+                rval["hdrh"] = str(in_list[3])
+
+            return rval
+
         if groups is not None:
         if groups is not None:
-            keys = ("min", "avg", "max")
             try:
             try:
-                latency["NDR"]["direction1"] = dict(
-                    zip(keys, [float(l) for l in groups.group(1).split('/')]))
-                latency["NDR"]["direction2"] = dict(
-                    zip(keys, [float(l) for l in groups.group(2).split('/')]))
-                latency["PDR"]["direction1"] = dict(
-                    zip(keys, [float(l) for l in groups.group(3).split('/')]))
-                latency["PDR"]["direction2"] = dict(
-                    zip(keys, [float(l) for l in groups.group(4).split('/')]))
+                latency["NDR"]["direction1"] = process_latency(groups.group(1))
+                latency["NDR"]["direction2"] = process_latency(groups.group(2))
+                latency["PDR"]["direction1"] = process_latency(groups.group(3))
+                latency["PDR"]["direction2"] = process_latency(groups.group(4))
                 status = "PASS"
             except (IndexError, ValueError):
                 pass
                 status = "PASS"
             except (IndexError, ValueError):
                 pass
@@ -1130,7 +1059,7 @@ class ExecutionChecker(ResultVisitor):
         pass
 
 
         pass
 
 
-class InputData(object):
+class InputData:
     """Input data
 
     The data is extracted from output.xml files generated by Jenkins jobs and
     """Input data
 
     The data is extracted from output.xml files generated by Jenkins jobs and
@@ -1312,23 +1241,6 @@ class InputData(object):
                             ("INFO",
                              "    The build {job}/{build} is outdated, will be "
                              "removed".format(job=job, build=build["build"])))
                             ("INFO",
                              "    The build {job}/{build} is outdated, will be "
                              "removed".format(job=job, build=build["build"])))
-                        file_name = self._cfg.input["file-name"]
-                        full_name = join(
-                            self._cfg.environment["paths"]["DIR[WORKING,DATA]"],
-                            "{job}{sep}{build}{sep}{name}".format(
-                                job=job,
-                                sep=SEPARATOR,
-                                build=build["build"],
-                                name=file_name))
-                        try:
-                            remove(full_name)
-                            logs.append(("INFO",
-                                         "    The file {name} has been removed".
-                                         format(name=full_name)))
-                        except OSError as err:
-                            logs.append(("ERROR",
-                                         "Cannot remove the file '{0}': {1}".
-                                         format(full_name, repr(err))))
         logs.append(("INFO", "  Done."))
 
         for level, line in logs:
         logs.append(("INFO", "  Done."))
 
         for level, line in logs: