X-Git-Url: https://gerrit.fd.io/r/gitweb?a=blobdiff_plain;f=resources%2Ftools%2Fpresentation%2Finput_data_parser.py;h=35ec69d07daa0a5d0b60fe4f1c7f1391be5c65a3;hb=73ec7448c57b8d7632ed90c1dbdcf1a32b748ccd;hp=f47f1bc6df62cc8e4ffee078f8506d7043e0746b;hpb=144ebec26ff0a32000283eddfac393e497e01164;p=csit.git diff --git a/resources/tools/presentation/input_data_parser.py b/resources/tools/presentation/input_data_parser.py index f47f1bc6df..35ec69d07d 100644 --- a/resources/tools/presentation/input_data_parser.py +++ b/resources/tools/presentation/input_data_parser.py @@ -251,8 +251,8 @@ class ExecutionChecker(ResultVisitor): # TODO: Remove when definitely no NDRPDRDISC tests are used: REGEX_RATE = re.compile(r'^[\D\d]*FINAL_RATE:\s(\d+\.\d+)\s(\w+)') - REGEX_PLR_RATE = re.compile(r'PLRsearch lower bound::\s(\d+.\d+).*\n' - r'PLRsearch upper bound::\s(\d+.\d+)') + REGEX_PLR_RATE = re.compile(r'PLRsearch lower bound::?\s(\d+.\d+).*\n' + r'PLRsearch upper bound::?\s(\d+.\d+)') REGEX_NDRPDR_RATE = re.compile(r'NDR_LOWER:\s(\d+.\d+).*\n.*\n' r'NDR_UPPER:\s(\d+.\d+).*\n' @@ -291,6 +291,9 @@ class ExecutionChecker(ResultVisitor): REGEX_BMRR = re.compile(r'Maximum Receive Rate trial results' r' in packets per second: \[(.*)\]') + REGEX_RECONF_LOSS = re.compile(r'Packets lost due to reconfig: (\d*)') + REGEX_RECONF_TIME = re.compile(r'Implied time lost: (\d*.\d*)') + REGEX_TC_TAG = re.compile(r'\d+[tT]\d+[cC]') REGEX_TC_NAME_OLD = re.compile(r'-\d+[tT]\d+[cC]-') @@ -502,7 +505,7 @@ class ExecutionChecker(ResultVisitor): self._data["tests"][self._test_ID]["show-run"] = str() if self._lookup_kw_nr > 1: self._msg_type = None - if self._show_run_lookup_nr == 1: + if self._show_run_lookup_nr > 0: message = str(msg.message).replace(' ', '').replace('\n', '').\ replace("'", '"').replace('b"', '"').replace('u"', '"')[8:] runtime = loads(message) @@ -547,7 +550,8 @@ class ExecutionChecker(ResultVisitor): try: self._data["tests"][self._test_ID]["show-run"] += " |br| " self._data["tests"][self._test_ID]["show-run"] += \ - "**DUT" + str(self._lookup_kw_nr) + ":** |br| " + text + "**DUT" + str(self._show_run_lookup_nr) + ":** |br| " \ + + text except KeyError: pass @@ -801,6 +805,7 @@ class ExecutionChecker(ResultVisitor): groups = re.search(self.REGEX_TC_NAME_OLD, self._test_ID) if not groups: tag_count = 0 + tag_tc = str() for tag in test_result["tags"]: groups = re.search(self.REGEX_TC_TAG, tag) if groups: @@ -829,7 +834,8 @@ class ExecutionChecker(ResultVisitor): "SOAK" in tags or "TCP" in tags or "MRR" in tags or - "BMRR" in tags): + "BMRR" in tags or + "RECONF" in tags): # TODO: Remove when definitely no NDRPDRDISC tests are used: if "NDRDISC" in tags: test_result["type"] = "NDR" @@ -846,6 +852,8 @@ class ExecutionChecker(ResultVisitor): test_result["type"] = "MRR" elif "FRMOBL" in tags or "BMRR" in tags: test_result["type"] = "BMRR" + elif "RECONF" in tags: + test_result["type"] = "RECONF" else: test_result["status"] = "FAIL" self._data["tests"][self._test_ID] = test_result @@ -907,6 +915,18 @@ class ExecutionChecker(ResultVisitor): AvgStdevMetadataFactory.from_data([ float(groups.group(3)) / float(groups.group(1)), ]) + elif test_result["type"] == "RECONF": + test_result["result"] = None + try: + grps_loss = re.search(self.REGEX_RECONF_LOSS, test.message) + grps_time = re.search(self.REGEX_RECONF_TIME, test.message) + test_result["result"] = { + "loss": int(grps_loss.group(1)), + "time": float(grps_time.group(1)) + } + except (AttributeError, IndexError, ValueError, TypeError): + test_result["status"] = "FAIL" + self._data["tests"][self._test_ID] = test_result def end_test(self, test): @@ -1299,7 +1319,7 @@ class InputData(object): job=job, sep=SEPARATOR, build=build["build"], - name=file_name)) + name=file_name.replace(".gz", ""))) try: remove(full_name) logs.append(("INFO", @@ -1405,7 +1425,7 @@ class InputData(object): index += 1 tag_filter = tag_filter[:index] + " in tags" + tag_filter[index:] - def filter_data(self, element, params=None, data_set="tests", + def filter_data(self, element, params=None, data=None, data_set="tests", continue_on_error=False): """Filter required data from the given jobs and builds. @@ -1428,13 +1448,16 @@ class InputData(object): :param element: Element which will use the filtered data. :param params: Parameters which will be included in the output. If None, - all parameters are included. + all parameters are included. + :param data: If not None, this data is used instead of data specified + in the element. :param data_set: The set of data to be filtered: tests, suites, - metadata. + metadata. :param continue_on_error: Continue if there is error while reading the - data. The Item will be empty then + data. The Item will be empty then :type element: pandas.Series :type params: list + :type data: dict :type data_set: str :type continue_on_error: bool :returns: Filtered data. @@ -1456,9 +1479,10 @@ class InputData(object): if params: params.append("type") + data_to_filter = data if data else element["data"] data = pd.Series() try: - for job, builds in element["data"].items(): + for job, builds in data_to_filter.items(): data[job] = pd.Series() for build in builds: data[job][str(build)] = pd.Series()