X-Git-Url: https://gerrit.fd.io/r/gitweb?p=csit.git;a=blobdiff_plain;f=resources%2Ftools%2Fpresentation%2Finput_data_parser.py;h=8747f938a90fbabd3b8a9e1694ce24ab6d87dd46;hp=00c2380fdc8d17388944ea45a731374184b725db;hb=7bd576415bbc25c00d9f5dd1b4cd19482432718f;hpb=9789d21f6be4959e9eecc870cf46478960957463 diff --git a/resources/tools/presentation/input_data_parser.py b/resources/tools/presentation/input_data_parser.py index 00c2380fdc..8747f938a9 100644 --- a/resources/tools/presentation/input_data_parser.py +++ b/resources/tools/presentation/input_data_parser.py @@ -287,8 +287,6 @@ class ExecutionChecker(ResultVisitor): ) REGEX_TC_TAG = re.compile(r'\d+[tT]\d+[cC]') - REGEX_TC_NAME_OLD = re.compile(r'-\d+[tT]\d+[cC]-') - REGEX_TC_NAME_NEW = re.compile(r'-\d+[cC]-') REGEX_TC_NUMBER = re.compile(r'tc\d{2}-') @@ -362,7 +360,6 @@ class ExecutionChecker(ResultVisitor): # Dictionary defining the methods used to parse different types of # messages self.parse_msg = { - u"timestamp": self._get_timestamp, u"vpp-version": self._get_vpp_version, u"dpdk-version": self._get_dpdk_version, u"teardown-papi-history": self._get_papi_history, @@ -614,18 +611,6 @@ class ExecutionChecker(ResultVisitor): finally: self._msg_type = None - def _get_timestamp(self, msg): - """Called when extraction of timestamp is required. - - :param msg: Message to process. - :type msg: Message - :returns: Nothing. - """ - - self._timestamp = msg.timestamp[:14] - self._data[u"metadata"][u"generated"] = self._timestamp - self._msg_type = None - def _get_papi_history(self, msg): """Called when extraction of PAPI command history is required. @@ -1136,38 +1121,36 @@ class ExecutionChecker(ResultVisitor): else: test_result[u"msg"] = test.message - if u"PERFTEST" in tags: + if u"PERFTEST" in tags and u"TREX" not in tags: # Replace info about cores (e.g. -1c-) with the info about threads # and cores (e.g. -1t1c-) in the long test case names and in the # test case names if necessary. - groups = re.search(self.REGEX_TC_NAME_OLD, self._test_id) - if not groups: - tag_count = 0 - tag_tc = str() - for tag in test_result[u"tags"]: - groups = re.search(self.REGEX_TC_TAG, tag) - if groups: - tag_count += 1 - tag_tc = tag - - if tag_count == 1: - self._test_id = re.sub( - self.REGEX_TC_NAME_NEW, f"-{tag_tc.lower()}-", - self._test_id, count=1 - ) - test_result[u"name"] = re.sub( - self.REGEX_TC_NAME_NEW, f"-{tag_tc.lower()}-", - test_result["name"], count=1 - ) - else: - test_result[u"status"] = u"FAIL" - self._data[u"tests"][self._test_id] = test_result - logging.debug( - f"The test {self._test_id} has no or more than one " - f"multi-threading tags.\n" - f"Tags: {test_result[u'tags']}" - ) - return + tag_count = 0 + tag_tc = str() + for tag in test_result[u"tags"]: + groups = re.search(self.REGEX_TC_TAG, tag) + if groups: + tag_count += 1 + tag_tc = tag + + if tag_count == 1: + self._test_id = re.sub( + self.REGEX_TC_NAME_NEW, f"-{tag_tc.lower()}-", + self._test_id, count=1 + ) + test_result[u"name"] = re.sub( + self.REGEX_TC_NAME_NEW, f"-{tag_tc.lower()}-", + test_result["name"], count=1 + ) + else: + test_result[u"status"] = u"FAIL" + self._data[u"tests"][self._test_id] = test_result + logging.debug( + f"The test {self._test_id} has no or more than one " + f"multi-threading tags.\n" + f"Tags: {test_result[u'tags']}" + ) + return if u"DEVICETEST" in tags: test_result[u"type"] = u"DEVICETEST" @@ -1355,9 +1338,6 @@ class ExecutionChecker(ResultVisitor): elif setup_kw.name.count(u"Install Dpdk Framework On All Duts") and \ not self._version: self._msg_type = u"dpdk-version" - elif setup_kw.name.count(u"Set Global Variable") \ - and not self._timestamp: - self._msg_type = u"timestamp" elif setup_kw.name.count(u"Setup Framework") and not self._testbed: self._msg_type = u"testbed" else: @@ -1544,6 +1524,15 @@ class InputData: ) result.visit(checker) + checker.data[u"metadata"][u"tests_total"] = \ + result.statistics.total.all.total + checker.data[u"metadata"][u"tests_passed"] = \ + result.statistics.total.all.passed + checker.data[u"metadata"][u"tests_failed"] = \ + result.statistics.total.all.failed + checker.data[u"metadata"][u"elapsedtime"] = result.suite.elapsedtime + checker.data[u"metadata"][u"generated"] = result.suite.endtime[:14] + return checker.data def _download_and_parse_build(self, job, build, repeat, pid=10000):