X-Git-Url: https://gerrit.fd.io/r/gitweb?p=csit.git;a=blobdiff_plain;f=resources%2Ftools%2Fpresentation%2Finput_data_parser.py;h=e1db03660d21d075bdc7640a03e10a845db0b757;hp=fcaf4a0d8ea13f3bee2f4ad44ce059a31cd27737;hb=37ea2ceb606bdfc338cc76330cb9289c12f63852;hpb=0df92678006f857a1ed051d9077868270ec1f543 diff --git a/resources/tools/presentation/input_data_parser.py b/resources/tools/presentation/input_data_parser.py index fcaf4a0d8e..e1db03660d 100644 --- a/resources/tools/presentation/input_data_parser.py +++ b/resources/tools/presentation/input_data_parser.py @@ -1,4 +1,4 @@ -# Copyright (c) 2020 Cisco and/or its affiliates. +# Copyright (c) 2021 Cisco and/or its affiliates. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at: @@ -229,9 +229,17 @@ class ExecutionChecker(ResultVisitor): r'Latency at 50% PDR:.*\[\'(.*)\', \'(.*)\'\].*\n' r'Latency at 10% PDR:.*\[\'(.*)\', \'(.*)\'\].*\n' ) + REGEX_CPS_MSG_INFO = re.compile( + r'NDR_LOWER:\s(\d+.\d+)\s.*\s.*\n.*\n.*\n' + r'PDR_LOWER:\s(\d+.\d+)\s.*\s.*\n.*\n.*' + ) + REGEX_PPS_MSG_INFO = re.compile( + r'NDR_LOWER:\s(\d+.\d+)\s.*\s(\d+.\d+)\s.*\n.*\n.*\n' + r'PDR_LOWER:\s(\d+.\d+)\s.*\s(\d+.\d+)\s.*\n.*\n.*' + ) REGEX_MRR_MSG_INFO = re.compile(r'.*\[(.*)\]') - # TODO: Remove when not needed + # Needed for CPS and PPS tests REGEX_NDRPDR_LAT_BASE = re.compile( r'LATENCY.*\[\'(.*)\', \'(.*)\'\]\s\n.*\n.*\n' r'LATENCY.*\[\'(.*)\', \'(.*)\'\]' @@ -244,18 +252,7 @@ class ExecutionChecker(ResultVisitor): r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n' r'Latency.*\[\'(.*)\', \'(.*)\'\]' ) - # TODO: Remove when not needed - REGEX_NDRPDR_LAT_LONG = re.compile( - r'LATENCY.*\[\'(.*)\', \'(.*)\'\]\s\n.*\n.*\n' - r'LATENCY.*\[\'(.*)\', \'(.*)\'\]\s\n.*\n' - r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n' - r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n' - r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n' - r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n' - r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n' - r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n' - r'Latency.*\[\'(.*)\', \'(.*)\'\]' - ) + REGEX_VERSION_VPP = re.compile( r"(return STDOUT Version:\s*|" r"VPP Version:\s*|VPP version:\s*)(.*)" @@ -271,8 +268,7 @@ class ExecutionChecker(ResultVisitor): r'tx\s(\d*),\srx\s(\d*)' ) REGEX_BMRR = re.compile( - r'Maximum Receive Rate trial results' - r' in packets per second: \[(.*)\]' + r'.*trial results.*: \[(.*)\]' ) REGEX_RECONF_LOSS = re.compile( r'Packets lost due to reconfig: (\d*)' @@ -392,12 +388,56 @@ class ExecutionChecker(ResultVisitor): except (AttributeError, IndexError, ValueError, KeyError): return u"Test Failed." + def _get_data_from_cps_test_msg(self, msg): + """Get info from message of NDRPDR CPS tests. + + :param msg: Message to be processed. + :type msg: str + :returns: Processed message or "Test Failed." if a problem occurs. + :rtype: str + """ + + groups = re.search(self.REGEX_CPS_MSG_INFO, msg) + if not groups or groups.lastindex != 2: + return u"Test Failed." + + try: + return ( + f"1. {(float(groups.group(1)) / 1e6):5.2f}\n" + f"2. {(float(groups.group(2)) / 1e6):5.2f}" + ) + except (AttributeError, IndexError, ValueError, KeyError): + return u"Test Failed." + + def _get_data_from_pps_test_msg(self, msg): + """Get info from message of NDRPDR PPS tests. + + :param msg: Message to be processed. + :type msg: str + :returns: Processed message or "Test Failed." if a problem occurs. + :rtype: str + """ + + groups = re.search(self.REGEX_PPS_MSG_INFO, msg) + if not groups or groups.lastindex != 4: + return u"Test Failed." + + try: + return ( + f"1. {(float(groups.group(1)) / 1e6):5.2f} " + f"{float(groups.group(2)):5.2f}\n" + f"2. {(float(groups.group(3)) / 1e6):5.2f} " + f"{float(groups.group(4)):5.2f}" + ) + except (AttributeError, IndexError, ValueError, KeyError): + return u"Test Failed." + def _get_data_from_perf_test_msg(self, msg): """Get info from message of NDRPDR performance tests. :param msg: Message to be processed. :type msg: str - :returns: Processed message or original message if a problem occurs. + :returns: Processed message or "Test Failed." if a problem occurs. :rtype: str """ @@ -532,8 +572,9 @@ class ExecutionChecker(ResultVisitor): if msg.message.count(u"return STDOUT Version:") or \ msg.message.count(u"VPP Version:") or \ msg.message.count(u"VPP version:"): - self._version = str(re.search(self.REGEX_VERSION_VPP, msg.message). - group(2)) + self._version = str( + re.search(self.REGEX_VERSION_VPP, msg.message).group(2) + ) self._data[u"metadata"][u"version"] = self._version self._msg_type = None @@ -582,10 +623,12 @@ class ExecutionChecker(ResultVisitor): self._data[u"tests"][self._test_id][u"conf-history"] = str() else: self._msg_type = None - text = re.sub(r"\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3} " - r"VAT command history:", u"", - msg.message, count=1).replace(u'\n', u' |br| ').\ - replace(u'"', u"'") + text = re.sub( + r"\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3} VAT command history:", + u"", + msg.message, + count=1 + ).replace(u'\n', u' |br| ').replace(u'"', u"'") self._data[u"tests"][self._test_id][u"conf-history"] += ( f" |br| **DUT{str(self._conf_history_lookup_nr)}:** {text}" @@ -604,10 +647,12 @@ class ExecutionChecker(ResultVisitor): self._data[u"tests"][self._test_id][u"conf-history"] = str() else: self._msg_type = None - text = re.sub(r"\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3} " - r"PAPI command history:", u"", - msg.message, count=1).replace(u'\n', u' |br| ').\ - replace(u'"', u"'") + text = re.sub( + r"\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3} PAPI command history:", + u"", + msg.message, + count=1 + ).replace(u'\n', u' |br| ').replace(u'"', u"'") self._data[u"tests"][self._test_id][u"conf-history"] += ( f" |br| **DUT{str(self._conf_history_lookup_nr)}:** {text}" ) @@ -816,10 +861,7 @@ class ExecutionChecker(ResultVisitor): }, } - # TODO: Rewrite when long and base are not needed - groups = re.search(self.REGEX_NDRPDR_LAT_LONG, msg) - if groups is None: - groups = re.search(self.REGEX_NDRPDR_LAT, msg) + groups = re.search(self.REGEX_NDRPDR_LAT, msg) if groups is None: groups = re.search(self.REGEX_NDRPDR_LAT_BASE, msg) if groups is None: @@ -1038,11 +1080,16 @@ class ExecutionChecker(ResultVisitor): name = test.name.lower() # Remove TC number from the TC long name (backward compatibility): - self._test_id = re.sub(self.REGEX_TC_NUMBER, u"", longname) + self._test_id = re.sub( + self.REGEX_TC_NUMBER, u"", longname.replace(u"snat", u"nat") + ) # Remove TC number from the TC name (not needed): - test_result[u"name"] = re.sub(self.REGEX_TC_NUMBER, "", name) + test_result[u"name"] = re.sub( + self.REGEX_TC_NUMBER, "", name.replace(u"snat", u"nat") + ) - test_result[u"parent"] = test.parent.name.lower() + test_result[u"parent"] = test.parent.name.lower().\ + replace(u"snat", u"nat") test_result[u"tags"] = tags test_result["doc"] = test.doc.\ replace(u'"', u"'").\ @@ -1055,9 +1102,18 @@ class ExecutionChecker(ResultVisitor): if test.status == u"PASS": if u"NDRPDR" in tags: - test_result[u"msg"] = self._get_data_from_perf_test_msg( - test.message).replace(u'\n', u' |br| ').\ - replace(u'\r', u'').replace(u'"', u"'") + if u"TCP_PPS" in tags or u"UDP_PPS" in tags: + test_result[u"msg"] = self._get_data_from_pps_test_msg( + test.message).replace(u'\n', u' |br| '). \ + replace(u'\r', u'').replace(u'"', u"'") + elif u"TCP_CPS" in tags or u"UDP_CPS" in tags: + test_result[u"msg"] = self._get_data_from_cps_test_msg( + test.message).replace(u'\n', u' |br| '). \ + replace(u'\r', u'').replace(u'"', u"'") + else: + test_result[u"msg"] = self._get_data_from_perf_test_msg( + test.message).replace(u'\n', u' |br| ').\ + replace(u'\r', u'').replace(u'"', u"'") elif u"MRR" in tags or u"FRMOBL" in tags or u"BMRR" in tags: test_result[u"msg"] = self._get_data_from_mrr_test_msg( test.message).replace(u'\n', u' |br| ').\ @@ -1102,26 +1158,19 @@ class ExecutionChecker(ResultVisitor): return if test.status == u"PASS": - if u"NDRPDR" in tags: - test_result[u"type"] = u"NDRPDR" + if u"DEVICETEST" in tags: + test_result[u"type"] = u"DEVICETEST" + elif u"NDRPDR" in tags: + if u"TCP_CPS" in tags or u"UDP_CPS" in tags: + test_result[u"type"] = u"CPS" + else: + test_result[u"type"] = u"NDRPDR" test_result[u"throughput"], test_result[u"status"] = \ self._get_ndrpdr_throughput(test.message) test_result[u"gbps"], test_result[u"status"] = \ self._get_ndrpdr_throughput_gbps(test.message) test_result[u"latency"], test_result[u"status"] = \ self._get_ndrpdr_latency(test.message) - elif u"SOAK" in tags: - test_result[u"type"] = u"SOAK" - test_result[u"throughput"], test_result[u"status"] = \ - self._get_plr_throughput(test.message) - elif u"HOSTSTACK" in tags: - test_result[u"type"] = u"HOSTSTACK" - test_result[u"result"], test_result[u"status"] = \ - self._get_hoststack_data(test.message, tags) - elif u"TCP" in tags: - test_result[u"type"] = u"TCP" - groups = re.search(self.REGEX_TCP, test.message) - test_result[u"result"] = int(groups.group(2)) elif u"MRR" in tags or u"FRMOBL" in tags or u"BMRR" in tags: if u"MRR" in tags: test_result[u"type"] = u"MRR" @@ -1133,16 +1182,30 @@ class ExecutionChecker(ResultVisitor): if groups is not None: items_str = groups.group(1) items_float = [ - float(item.strip()) for item in items_str.split(",") + float(item.strip().replace(u"'", u"")) + for item in items_str.split(",") ] # Use whole list in CSIT-1180. stats = jumpavg.AvgStdevStats.for_runs(items_float) + test_result[u"result"][u"samples"] = items_float test_result[u"result"][u"receive-rate"] = stats.avg test_result[u"result"][u"receive-stdev"] = stats.stdev else: groups = re.search(self.REGEX_MRR, test.message) test_result[u"result"][u"receive-rate"] = \ float(groups.group(3)) / float(groups.group(1)) + elif u"SOAK" in tags: + test_result[u"type"] = u"SOAK" + test_result[u"throughput"], test_result[u"status"] = \ + self._get_plr_throughput(test.message) + elif u"HOSTSTACK" in tags: + test_result[u"type"] = u"HOSTSTACK" + test_result[u"result"], test_result[u"status"] = \ + self._get_hoststack_data(test.message, tags) + elif u"TCP" in tags: + test_result[u"type"] = u"TCP" + groups = re.search(self.REGEX_TCP, test.message) + test_result[u"result"] = int(groups.group(2)) elif u"RECONF" in tags: test_result[u"type"] = u"RECONF" test_result[u"result"] = None @@ -1155,8 +1218,6 @@ class ExecutionChecker(ResultVisitor): } except (AttributeError, IndexError, ValueError, TypeError): test_result[u"status"] = u"FAIL" - elif u"DEVICETEST" in tags: - test_result[u"type"] = u"DEVICETEST" else: test_result[u"status"] = u"FAIL" self._data[u"tests"][self._test_id] = test_result @@ -1458,8 +1519,9 @@ class InputData: f"Error occurred while parsing output.xml: {repr(err)}" ) return None - checker = ExecutionChecker(metadata, self._cfg.mapping, - self._cfg.ignore) + checker = ExecutionChecker( + metadata, self._cfg.mapping, self._cfg.ignore + ) result.visit(checker) return checker.data @@ -1480,7 +1542,7 @@ class InputData: :type repeat: int """ - logging.info(f" Processing the job/build: {job}: {build[u'build']}") + logging.info(f"Processing the job/build: {job}: {build[u'build']}") state = u"failed" success = False @@ -1498,7 +1560,7 @@ class InputData: f"Skipped." ) if success: - logging.info(f" Processing data from build {build[u'build']}") + logging.info(f" Processing data from build {build[u'build']}") data = self._parse_tests(job, build) if data is None: logging.error( @@ -1518,7 +1580,7 @@ class InputData: # If the time-period is defined in the specification file, remove all # files which are outside the time period. is_last = False - timeperiod = self._cfg.input.get(u"time-period", None) + timeperiod = self._cfg.environment.get(u"time-period", None) if timeperiod and data: now = dt.utcnow() timeperiod = timedelta(int(timeperiod)) @@ -1533,11 +1595,9 @@ class InputData: data = None is_last = True logging.info( - f" The build {job}/{build[u'build']} is " + f" The build {job}/{build[u'build']} is " f"outdated, will be removed." ) - logging.info(u" Done.") - return { u"data": data, u"state": state, @@ -1557,7 +1617,7 @@ class InputData: logging.info(u"Downloading and parsing input files ...") - for job, builds in self._cfg.builds.items(): + for job, builds in self._cfg.input.items(): for build in builds: result = self._download_and_parse_build(job, build, repeat) @@ -1585,10 +1645,9 @@ class InputData: if self._input_data.get(job, None) is None: self._input_data[job] = pd.Series() self._input_data[job][str(build_nr)] = build_data - self._cfg.set_input_file_name( - job, build_nr, result[u"build"][u"file-name"]) - + job, build_nr, result[u"build"][u"file-name"] + ) self._cfg.set_input_state(job, build_nr, result[u"state"]) mem_alloc = \ @@ -1597,6 +1656,16 @@ class InputData: logging.info(u"Done.") + msg = f"Successful downloads from the sources:\n" + for source in self._cfg.environment[u"data-sources"]: + if source[u"successful-downloads"]: + msg += ( + f"{source[u'url']}/{source[u'path']}/" + f"{source[u'file-name']}: " + f"{source[u'successful-downloads']}\n" + ) + logging.info(msg) + def process_local_file(self, local_file, job=u"local", build_nr=1, replace=True): """Process local XML file given as a command-line parameter. @@ -1627,7 +1696,7 @@ class InputData: u"file-name": local_file } if replace: - self._cfg.builds = dict() + self._cfg.input = dict() self._cfg.add_build(job, build) logging.info(f"Processing {job}: {build_nr:2d}: {local_file}") @@ -1712,7 +1781,7 @@ class InputData: ) if replace: - self._cfg.builds = dict() + self._cfg.input = dict() for job, files in local_builds.items(): for idx, local_file in enumerate(files): @@ -1898,16 +1967,25 @@ class InputData: if params is None: params = element.get(u"parameters", None) - if params: + if params and u"type" not in params: params.append(u"type") + cores = element.get(u"core", None) + if cores: + tests = list() + for core in cores: + for test in include: + tests.append(test.format(core=core)) + else: + tests = include + data = pd.Series() try: for job, builds in element[u"data"].items(): data[job] = pd.Series() for build in builds: data[job][str(build)] = pd.Series() - for test in include: + for test in tests: try: reg_ex = re.compile(str(test).lower()) for test_id in self.data[job][