PAL: Do not read sh run for trending 43/32843/2
authorTibor Frank <tifrank@cisco.com>
Tue, 22 Jun 2021 11:49:05 +0000 (13:49 +0200)
committerTibor Frank <tifrank@cisco.com>
Tue, 22 Jun 2021 12:30:05 +0000 (14:30 +0200)
Change-Id: I0e142c1c3bb48ddeb0bcd2bfa11c0323461c1368
Signed-off-by: Tibor Frank <tifrank@cisco.com>
resources/tools/presentation/convert_xml_json.py
resources/tools/presentation/generator_tables.py
resources/tools/presentation/input_data_parser.py
resources/tools/presentation/pal.py

index 61c6e84..f1994df 100644 (file)
@@ -28,6 +28,7 @@ import gzip
 from os.path import join
 from shutil import rmtree
 from copy import deepcopy
 from os.path import join
 from shutil import rmtree
 from copy import deepcopy
+from json import loads
 
 from pal_utils import get_files
 
 
 from pal_utils import get_files
 
@@ -207,7 +208,8 @@ def _export_test_from_xml_to_json(tid, in_data, out, template, metadata):
                 u"msg": u"show_runtime",
                 u"data": list()
             }
                 u"msg": u"show_runtime",
                 u"data": list()
             }
-            for item in val.get(u"runtime", list()):
+            runtime = loads(val.get(u"runtime", list()))
+            for item in runtime:
                 for metric, m_data in item.items():
                     if metric == u"name":
                         continue
                 for metric, m_data in item.items():
                     if metric == u"name":
                         continue
index d66a8fc..8218084 100644 (file)
@@ -24,6 +24,7 @@ from xml.etree import ElementTree as ET
 from datetime import datetime as dt
 from datetime import timedelta
 from copy import deepcopy
 from datetime import datetime as dt
 from datetime import timedelta
 from copy import deepcopy
+from json import loads
 
 import plotly.graph_objects as go
 import plotly.offline as ploff
 
 import plotly.graph_objects as go
 import plotly.offline as ploff
@@ -187,14 +188,16 @@ def table_oper_data_html(table, input_data):
                 tcol.text = u"No Data"
                 continue
 
                 tcol.text = u"No Data"
                 continue
 
+            runtime = loads(dut_data[u"runtime"])
+
             try:
             try:
-                threads_nr = len(dut_data[u"runtime"][0][u"clocks"])
+                threads_nr = len(runtime[0][u"clocks"])
             except (IndexError, KeyError):
                 tcol.text = u"No Data"
                 continue
 
             threads = OrderedDict({idx: list() for idx in range(threads_nr)})
             except (IndexError, KeyError):
                 tcol.text = u"No Data"
                 continue
 
             threads = OrderedDict({idx: list() for idx in range(threads_nr)})
-            for item in dut_data[u"runtime"]:
+            for item in runtime:
                 for idx in range(threads_nr):
                     if item[u"vectors"][idx] > 0:
                         clocks = item[u"clocks"][idx] / item[u"vectors"][idx]
                 for idx in range(threads_nr):
                     if item[u"vectors"][idx] > 0:
                         clocks = item[u"clocks"][idx] / item[u"vectors"][idx]
index 94f8e96..9151cf2 100644 (file)
@@ -295,7 +295,7 @@ class ExecutionChecker(ResultVisitor):
 
     REGEX_TC_PAPI_CLI = re.compile(r'.*\((\d+.\d+.\d+.\d+.) - (.*)\)')
 
 
     REGEX_TC_PAPI_CLI = re.compile(r'.*\((\d+.\d+.\d+.\d+.) - (.*)\)')
 
-    def __init__(self, metadata, mapping, ignore):
+    def __init__(self, metadata, mapping, ignore, for_output):
         """Initialisation.
 
         :param metadata: Key-value pairs to be included in "metadata" part of
         """Initialisation.
 
         :param metadata: Key-value pairs to be included in "metadata" part of
@@ -303,9 +303,11 @@ class ExecutionChecker(ResultVisitor):
         :param mapping: Mapping of the old names of test cases to the new
             (actual) one.
         :param ignore: List of TCs to be ignored.
         :param mapping: Mapping of the old names of test cases to the new
             (actual) one.
         :param ignore: List of TCs to be ignored.
+        :param for_output: Output to be generated from downloaded data.
         :type metadata: dict
         :type mapping: dict
         :type ignore: list
         :type metadata: dict
         :type mapping: dict
         :type ignore: list
+        :type for_output: str
         """
 
         # Type of message to parse out from the test messages
         """
 
         # Type of message to parse out from the test messages
@@ -326,6 +328,8 @@ class ExecutionChecker(ResultVisitor):
         # Ignore list
         self._ignore = ignore
 
         # Ignore list
         self._ignore = ignore
 
+        self._for_output = for_output
+
         # Number of PAPI History messages found:
         # 0 - no message
         # 1 - PAPI History of DUT1
         # Number of PAPI History messages found:
         # 0 - no message
         # 1 - PAPI History of DUT1
@@ -669,10 +673,6 @@ class ExecutionChecker(ResultVisitor):
         except (AttributeError, IndexError):
             sock = u""
 
         except (AttributeError, IndexError):
             sock = u""
 
-        runtime = loads(str(msg.message).replace(u' ', u'').replace(u'\n', u'').
-                        replace(u"'", u'"').replace(u'b"', u'"').
-                        replace(u'u"', u'"').split(u":", 1)[1])
-
         dut = u"dut{nr}".format(
             nr=len(self._data[u'tests'][self._test_id][u'show-run'].keys()) + 1)
 
         dut = u"dut{nr}".format(
             nr=len(self._data[u'tests'][self._test_id][u'show-run'].keys()) + 1)
 
@@ -681,7 +681,10 @@ class ExecutionChecker(ResultVisitor):
                 {
                     u"host": host,
                     u"socket": sock,
                 {
                     u"host": host,
                     u"socket": sock,
-                    u"runtime": runtime,
+                    u"runtime": str(msg.message).replace(u' ', u'').
+                                replace(u'\n', u'').replace(u"'", u'"').
+                                replace(u'b"', u'"').replace(u'u"', u'"').
+                                split(u":", 1)[1]
                 }
             )
 
                 }
             )
 
@@ -1225,9 +1228,10 @@ class ExecutionChecker(ResultVisitor):
         :type test_kw: Keyword
         :returns: Nothing.
         """
         :type test_kw: Keyword
         :returns: Nothing.
         """
-        if test_kw.name.count(u"Show Runtime On All Duts") or \
-                test_kw.name.count(u"Show Runtime Counters On All Duts") or \
-                test_kw.name.count(u"Vpp Show Runtime On All Duts"):
+        if ((self._for_output != u"trending") and
+            (test_kw.name.count(u"Show Runtime On All Duts") or
+             test_kw.name.count(u"Show Runtime Counters On All Duts") or
+             test_kw.name.count(u"Vpp Show Runtime On All Duts"))):
             self._msg_type = u"test-show-runtime"
             self._sh_run_counter += 1
         else:
             self._msg_type = u"test-show-runtime"
             self._sh_run_counter += 1
         else:
@@ -1366,16 +1370,20 @@ class InputData:
           (as described in ExecutionChecker documentation)
     """
 
           (as described in ExecutionChecker documentation)
     """
 
-    def __init__(self, spec):
+    def __init__(self, spec, for_output):
         """Initialization.
 
         :param spec: Specification.
         """Initialization.
 
         :param spec: Specification.
+        :param for_output: Output to be generated from downloaded data.
         :type spec: Specification
         :type spec: Specification
+        :type for_output: str
         """
 
         # Specification:
         self._cfg = spec
 
         """
 
         # Specification:
         self._cfg = spec
 
+        self._for_output = for_output
+
         # Data store:
         self._input_data = pd.Series()
 
         # Data store:
         self._input_data = pd.Series()
 
@@ -1450,7 +1458,7 @@ class InputData:
                 )
                 return None
         checker = ExecutionChecker(
                 )
                 return None
         checker = ExecutionChecker(
-            metadata, self._cfg.mapping, self._cfg.ignore
+            metadata, self._cfg.mapping, self._cfg.ignore, self._for_output
         )
         result.visit(checker)
 
         )
         result.visit(checker)
 
@@ -1998,13 +2006,14 @@ class InputData:
                     for dut_name, data in test_data[u"show-run"].items():
                         if data.get(u"runtime", None) is None:
                             continue
                     for dut_name, data in test_data[u"show-run"].items():
                         if data.get(u"runtime", None) is None:
                             continue
+                        runtime = loads(data[u"runtime"])
                         try:
                         try:
-                            threads_nr = len(data[u"runtime"][0][u"clocks"])
+                            threads_nr = len(runtime[0][u"clocks"])
                         except (IndexError, KeyError):
                             continue
                         threads = OrderedDict(
                             {idx: list() for idx in range(threads_nr)})
                         except (IndexError, KeyError):
                             continue
                         threads = OrderedDict(
                             {idx: list() for idx in range(threads_nr)})
-                        for item in data[u"runtime"]:
+                        for item in runtime:
                             for idx in range(threads_nr):
                                 if item[u"vectors"][idx] > 0:
                                     clocks = item[u"clocks"][idx] / \
                             for idx in range(threads_nr):
                                 if item[u"vectors"][idx] > 0:
                                     clocks = item[u"clocks"][idx] / \
index 7e2d9a8..4f9b24f 100644 (file)
@@ -148,7 +148,7 @@ def main():
 
         prepare_static_content(spec)
 
 
         prepare_static_content(spec)
 
-        data = InputData(spec)
+        data = InputData(spec, spec.output[u"output"])
         if args.input_file:
             data.process_local_file(args.input_file)
         elif args.input_directory:
         if args.input_file:
             data.process_local_file(args.input_file)
         elif args.input_directory: