X-Git-Url: https://gerrit.fd.io/r/gitweb?p=csit.git;a=blobdiff_plain;f=resources%2Ftools%2Fpresentation%2Finput_data_parser.py;h=2cc2447ec0bc154b25610868401b9bc24a591092;hp=cd9c1a248d61497b86981df51631b0dc224040bd;hb=bb1a7058e8bbcbe998fdfd8dd5ed46e13fb90db7;hpb=4504d3cec6b801122d4f194284696601fbc3b492 diff --git a/resources/tools/presentation/input_data_parser.py b/resources/tools/presentation/input_data_parser.py index cd9c1a248d..2cc2447ec0 100644 --- a/resources/tools/presentation/input_data_parser.py +++ b/resources/tools/presentation/input_data_parser.py @@ -1,4 +1,4 @@ -# Copyright (c) 2019 Cisco and/or its affiliates. +# Copyright (c) 2021 Cisco and/or its affiliates. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at: @@ -216,6 +216,12 @@ class ExecutionChecker(ResultVisitor): r'PDR_LOWER:\s(\d+.\d+).*\n.*\n' r'PDR_UPPER:\s(\d+.\d+)' ) + REGEX_NDRPDR_GBPS = re.compile( + r'NDR_LOWER:.*,\s(\d+.\d+).*\n.*\n' + r'NDR_UPPER:.*,\s(\d+.\d+).*\n' + r'PDR_LOWER:.*,\s(\d+.\d+).*\n.*\n' + r'PDR_UPPER:.*,\s(\d+.\d+)' + ) REGEX_PERF_MSG_INFO = re.compile( r'NDR_LOWER:\s(\d+.\d+)\s.*\s(\d+.\d+)\s.*\n.*\n.*\n' r'PDR_LOWER:\s(\d+.\d+)\s.*\s(\d+.\d+)\s.*\n.*\n.*\n' @@ -223,9 +229,26 @@ class ExecutionChecker(ResultVisitor): r'Latency at 50% PDR:.*\[\'(.*)\', \'(.*)\'\].*\n' r'Latency at 10% PDR:.*\[\'(.*)\', \'(.*)\'\].*\n' ) + REGEX_CPS_MSG_INFO = re.compile( + r'NDR_LOWER:\s(\d+.\d+)\s.*\s.*\n.*\n.*\n' + r'PDR_LOWER:\s(\d+.\d+)\s.*\s.*\n.*\n.*' + ) + REGEX_PPS_MSG_INFO = re.compile( + r'NDR_LOWER:\s(\d+.\d+)\s.*\s(\d+.\d+)\s.*\n.*\n.*\n' + r'PDR_LOWER:\s(\d+.\d+)\s.*\s(\d+.\d+)\s.*\n.*\n.*' + ) REGEX_MRR_MSG_INFO = re.compile(r'.*\[(.*)\]') - # TODO: Remove when not needed + REGEX_VSAP_MSG_INFO = re.compile( + r'Transfer Rate: (\d*.\d*).*\n' + r'Latency: (\d*.\d*).*\n' + r'Completed requests: (\d*).*\n' + r'Failed requests: (\d*).*\n' + r'Total data transferred: (\d*).*\n' + r'Connection [cr]ps rate:\s*(\d*.\d*)' + ) + + # Needed for CPS and PPS tests REGEX_NDRPDR_LAT_BASE = re.compile( r'LATENCY.*\[\'(.*)\', \'(.*)\'\]\s\n.*\n.*\n' r'LATENCY.*\[\'(.*)\', \'(.*)\'\]' @@ -238,18 +261,7 @@ class ExecutionChecker(ResultVisitor): r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n' r'Latency.*\[\'(.*)\', \'(.*)\'\]' ) - # TODO: Remove when not needed - REGEX_NDRPDR_LAT_LONG = re.compile( - r'LATENCY.*\[\'(.*)\', \'(.*)\'\]\s\n.*\n.*\n' - r'LATENCY.*\[\'(.*)\', \'(.*)\'\]\s\n.*\n' - r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n' - r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n' - r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n' - r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n' - r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n' - r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n' - r'Latency.*\[\'(.*)\', \'(.*)\'\]' - ) + REGEX_VERSION_VPP = re.compile( r"(return STDOUT Version:\s*|" r"VPP Version:\s*|VPP version:\s*)(.*)" @@ -265,8 +277,7 @@ class ExecutionChecker(ResultVisitor): r'tx\s(\d*),\srx\s(\d*)' ) REGEX_BMRR = re.compile( - r'Maximum Receive Rate trial results' - r' in packets per second: \[(.*)\]' + r'.*trial results.*: \[(.*)\]' ) REGEX_RECONF_LOSS = re.compile( r'Packets lost due to reconfig: (\d*)' @@ -344,8 +355,6 @@ class ExecutionChecker(ResultVisitor): u"timestamp": self._get_timestamp, u"vpp-version": self._get_vpp_version, u"dpdk-version": self._get_dpdk_version, - # TODO: Remove when not needed: - u"teardown-vat-history": self._get_vat_history, u"teardown-papi-history": self._get_papi_history, u"test-show-runtime": self._get_show_run, u"testbed": self._get_testbed @@ -386,12 +395,56 @@ class ExecutionChecker(ResultVisitor): except (AttributeError, IndexError, ValueError, KeyError): return u"Test Failed." + def _get_data_from_cps_test_msg(self, msg): + """Get info from message of NDRPDR CPS tests. + + :param msg: Message to be processed. + :type msg: str + :returns: Processed message or "Test Failed." if a problem occurs. + :rtype: str + """ + + groups = re.search(self.REGEX_CPS_MSG_INFO, msg) + if not groups or groups.lastindex != 2: + return u"Test Failed." + + try: + return ( + f"1. {(float(groups.group(1)) / 1e6):5.2f}\n" + f"2. {(float(groups.group(2)) / 1e6):5.2f}" + ) + except (AttributeError, IndexError, ValueError, KeyError): + return u"Test Failed." + + def _get_data_from_pps_test_msg(self, msg): + """Get info from message of NDRPDR PPS tests. + + :param msg: Message to be processed. + :type msg: str + :returns: Processed message or "Test Failed." if a problem occurs. + :rtype: str + """ + + groups = re.search(self.REGEX_PPS_MSG_INFO, msg) + if not groups or groups.lastindex != 4: + return u"Test Failed." + + try: + return ( + f"1. {(float(groups.group(1)) / 1e6):5.2f} " + f"{float(groups.group(2)):5.2f}\n" + f"2. {(float(groups.group(3)) / 1e6):5.2f} " + f"{float(groups.group(4)):5.2f}" + ) + except (AttributeError, IndexError, ValueError, KeyError): + return u"Test Failed." + def _get_data_from_perf_test_msg(self, msg): """Get info from message of NDRPDR performance tests. :param msg: Message to be processed. :type msg: str - :returns: Processed message or original message if a problem occurs. + :returns: Processed message or "Test Failed." if a problem occurs. :rtype: str """ @@ -524,10 +577,11 @@ class ExecutionChecker(ResultVisitor): """ if msg.message.count(u"return STDOUT Version:") or \ - msg.message.count(u"VPP Version:") or \ - msg.message.count(u"VPP version:"): - self._version = str(re.search(self.REGEX_VERSION_VPP, msg.message). - group(2)) + msg.message.count(u"VPP Version:") or \ + msg.message.count(u"VPP version:"): + self._version = str( + re.search(self.REGEX_VERSION_VPP, msg.message).group(2) + ) self._data[u"metadata"][u"version"] = self._version self._msg_type = None @@ -561,30 +615,6 @@ class ExecutionChecker(ResultVisitor): self._data[u"metadata"][u"generated"] = self._timestamp self._msg_type = None - def _get_vat_history(self, msg): - """Called when extraction of VAT command history is required. - - TODO: Remove when not needed. - - :param msg: Message to process. - :type msg: Message - :returns: Nothing. - """ - if msg.message.count(u"VAT command history:"): - self._conf_history_lookup_nr += 1 - if self._conf_history_lookup_nr == 1: - self._data[u"tests"][self._test_id][u"conf-history"] = str() - else: - self._msg_type = None - text = re.sub(r"\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3} " - r"VAT command history:", u"", - msg.message, count=1).replace(u'\n', u' |br| ').\ - replace(u'"', u"'") - - self._data[u"tests"][self._test_id][u"conf-history"] += ( - f" |br| **DUT{str(self._conf_history_lookup_nr)}:** {text}" - ) - def _get_papi_history(self, msg): """Called when extraction of PAPI command history is required. @@ -598,12 +628,14 @@ class ExecutionChecker(ResultVisitor): self._data[u"tests"][self._test_id][u"conf-history"] = str() else: self._msg_type = None - text = re.sub(r"\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3} " - r"PAPI command history:", u"", - msg.message, count=1).replace(u'\n', u' |br| ').\ - replace(u'"', u"'") + text = re.sub( + r"\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3} PAPI command history:", + u"", + msg.message, + count=1 + ).replace(u'"', u"'") self._data[u"tests"][self._test_id][u"conf-history"] += ( - f" |br| **DUT{str(self._conf_history_lookup_nr)}:** {text}" + f"**DUT{str(self._conf_history_lookup_nr)}:** {text}" ) def _get_show_run(self, msg): @@ -646,12 +678,14 @@ class ExecutionChecker(ResultVisitor): except (IndexError, KeyError): return - dut = u"DUT{nr}".format( + dut = u"dut{nr}".format( nr=len(self._data[u'tests'][self._test_id][u'show-run'].keys()) + 1) oper = { u"host": host, u"socket": sock, + # Needed for json converter, enable when 'threads' is gone. + # u"runtime": runtime, u"threads": OrderedDict({idx: list() for idx in range(threads_nr)}) } @@ -713,6 +747,35 @@ class ExecutionChecker(ResultVisitor): return throughput, status + def _get_ndrpdr_throughput_gbps(self, msg): + """Get NDR_LOWER, NDR_UPPER, PDR_LOWER and PDR_UPPER in Gbps from the + test message. + + :param msg: The test message to be parsed. + :type msg: str + :returns: Parsed data as a dict and the status (PASS/FAIL). + :rtype: tuple(dict, str) + """ + + gbps = { + u"NDR": {u"LOWER": -1.0, u"UPPER": -1.0}, + u"PDR": {u"LOWER": -1.0, u"UPPER": -1.0} + } + status = u"FAIL" + groups = re.search(self.REGEX_NDRPDR_GBPS, msg) + + if groups is not None: + try: + gbps[u"NDR"][u"LOWER"] = float(groups.group(1)) + gbps[u"NDR"][u"UPPER"] = float(groups.group(2)) + gbps[u"PDR"][u"LOWER"] = float(groups.group(3)) + gbps[u"PDR"][u"UPPER"] = float(groups.group(4)) + status = u"PASS" + except (IndexError, ValueError): + pass + + return gbps, status + def _get_plr_throughput(self, msg): """Get PLRsearch lower bound and PLRsearch upper bound from the test message. @@ -781,10 +844,7 @@ class ExecutionChecker(ResultVisitor): }, } - # TODO: Rewrite when long and base are not needed - groups = re.search(self.REGEX_NDRPDR_LAT_LONG, msg) - if groups is None: - groups = re.search(self.REGEX_NDRPDR_LAT, msg) + groups = re.search(self.REGEX_NDRPDR_LAT, msg) if groups is None: groups = re.search(self.REGEX_NDRPDR_LAT_BASE, msg) if groups is None: @@ -840,38 +900,6 @@ class ExecutionChecker(ResultVisitor): except (IndexError, ValueError): pass - # TODO: Remove when not needed - latency[u"NDR10"] = { - u"direction1": copy.copy(latency_default), - u"direction2": copy.copy(latency_default) - } - latency[u"NDR50"] = { - u"direction1": copy.copy(latency_default), - u"direction2": copy.copy(latency_default) - } - latency[u"NDR90"] = { - u"direction1": copy.copy(latency_default), - u"direction2": copy.copy(latency_default) - } - try: - latency[u"LAT0"][u"direction1"] = process_latency(groups.group(5)) - latency[u"LAT0"][u"direction2"] = process_latency(groups.group(6)) - latency[u"NDR10"][u"direction1"] = process_latency(groups.group(7)) - latency[u"NDR10"][u"direction2"] = process_latency(groups.group(8)) - latency[u"NDR50"][u"direction1"] = process_latency(groups.group(9)) - latency[u"NDR50"][u"direction2"] = process_latency(groups.group(10)) - latency[u"NDR90"][u"direction1"] = process_latency(groups.group(11)) - latency[u"NDR90"][u"direction2"] = process_latency(groups.group(12)) - latency[u"PDR10"][u"direction1"] = process_latency(groups.group(13)) - latency[u"PDR10"][u"direction2"] = process_latency(groups.group(14)) - latency[u"PDR50"][u"direction1"] = process_latency(groups.group(15)) - latency[u"PDR50"][u"direction2"] = process_latency(groups.group(16)) - latency[u"PDR90"][u"direction1"] = process_latency(groups.group(17)) - latency[u"PDR90"][u"direction2"] = process_latency(groups.group(18)) - return latency, u"PASS" - except (IndexError, ValueError): - pass - return latency, u"FAIL" @staticmethod @@ -908,6 +936,39 @@ class ExecutionChecker(ResultVisitor): return result, status + def _get_vsap_data(self, msg, tags): + """Get data from the vsap test message. + + :param msg: The test message to be parsed. + :param tags: Test tags. + :type msg: str + :type tags: list + :returns: Parsed data as a JSON dict and the status (PASS/FAIL). + :rtype: tuple(dict, str) + """ + result = dict() + status = u"FAIL" + + groups = re.search(self.REGEX_VSAP_MSG_INFO, msg) + if groups is not None: + try: + result[u"transfer-rate"] = float(groups.group(1)) * 1e3 + result[u"latency"] = float(groups.group(2)) + result[u"completed-requests"] = int(groups.group(3)) + result[u"failed-requests"] = int(groups.group(4)) + result[u"bytes-transferred"] = int(groups.group(5)) + if u"TCP_CPS"in tags: + result[u"cps"] = float(groups.group(6)) + elif u"TCP_RPS" in tags: + result[u"rps"] = float(groups.group(6)) + else: + return result, status + status = u"PASS" + except (IndexError, ValueError): + pass + + return result, status + def visit_suite(self, suite): """Implements traversing through the suite and its direct children. @@ -933,19 +994,11 @@ class ExecutionChecker(ResultVisitor): except AttributeError: return - doc_str = suite.doc.\ - replace(u'"', u"'").\ - replace(u'\n', u' ').\ - replace(u'\r', u'').\ - replace(u'*[', u' |br| *[').\ - replace(u"*", u"**").\ - replace(u' |br| *[', u'*[', 1) - self._data[u"suites"][suite.longname.lower(). replace(u'"', u"'"). replace(u" ", u"_")] = { u"name": suite.name.lower(), - u"doc": doc_str, + u"doc": suite.doc, u"parent": parent_name, u"level": len(suite.longname.split(u".")) } @@ -1009,29 +1062,30 @@ class ExecutionChecker(ResultVisitor): test_result[u"parent"] = test.parent.name.lower() test_result[u"tags"] = tags - test_result["doc"] = test.doc.\ - replace(u'"', u"'").\ - replace(u'\n', u' ').\ - replace(u'\r', u'').\ - replace(u'[', u' |br| [').\ - replace(u' |br| [', u'[', 1) - test_result[u"type"] = u"FUNC" + test_result["doc"] = test.doc + test_result[u"type"] = u"" test_result[u"status"] = test.status + test_result[u"starttime"] = test.starttime + test_result[u"endtime"] = test.endtime if test.status == u"PASS": if u"NDRPDR" in tags: - test_result[u"msg"] = self._get_data_from_perf_test_msg( - test.message).replace(u'\n', u' |br| ').\ - replace(u'\r', u'').replace(u'"', u"'") + if u"TCP_PPS" in tags or u"UDP_PPS" in tags: + test_result[u"msg"] = self._get_data_from_pps_test_msg( + test.message) + elif u"TCP_CPS" in tags or u"UDP_CPS" in tags: + test_result[u"msg"] = self._get_data_from_cps_test_msg( + test.message) + else: + test_result[u"msg"] = self._get_data_from_perf_test_msg( + test.message) elif u"MRR" in tags or u"FRMOBL" in tags or u"BMRR" in tags: test_result[u"msg"] = self._get_data_from_mrr_test_msg( - test.message).replace(u'\n', u' |br| ').\ - replace(u'\r', u'').replace(u'"', u"'") + test.message) else: - test_result[u"msg"] = test.message.replace(u'\n', u' |br| ').\ - replace(u'\r', u'').replace(u'"', u"'") + test_result[u"msg"] = test.message else: - test_result[u"msg"] = u"Test Failed." + test_result[u"msg"] = test.message if u"PERFTEST" in tags: # Replace info about cores (e.g. -1c-) with the info about threads @@ -1066,47 +1120,65 @@ class ExecutionChecker(ResultVisitor): ) return - if test.status == u"PASS": - if u"NDRPDR" in tags: + if u"DEVICETEST" in tags: + test_result[u"type"] = u"DEVICETEST" + elif u"NDRPDR" in tags: + if u"TCP_CPS" in tags or u"UDP_CPS" in tags: + test_result[u"type"] = u"CPS" + else: test_result[u"type"] = u"NDRPDR" + if test.status == u"PASS": test_result[u"throughput"], test_result[u"status"] = \ self._get_ndrpdr_throughput(test.message) + test_result[u"gbps"], test_result[u"status"] = \ + self._get_ndrpdr_throughput_gbps(test.message) test_result[u"latency"], test_result[u"status"] = \ self._get_ndrpdr_latency(test.message) - elif u"SOAK" in tags: - test_result[u"type"] = u"SOAK" - test_result[u"throughput"], test_result[u"status"] = \ - self._get_plr_throughput(test.message) - elif u"HOSTSTACK" in tags: - test_result[u"type"] = u"HOSTSTACK" - test_result[u"result"], test_result[u"status"] = \ - self._get_hoststack_data(test.message, tags) - elif u"TCP" in tags: - test_result[u"type"] = u"TCP" - groups = re.search(self.REGEX_TCP, test.message) - test_result[u"result"] = int(groups.group(2)) - elif u"MRR" in tags or u"FRMOBL" in tags or u"BMRR" in tags: - if u"MRR" in tags: - test_result[u"type"] = u"MRR" - else: - test_result[u"type"] = u"BMRR" - + elif u"MRR" in tags or u"FRMOBL" in tags or u"BMRR" in tags: + if u"MRR" in tags: + test_result[u"type"] = u"MRR" + else: + test_result[u"type"] = u"BMRR" + if test.status == u"PASS": test_result[u"result"] = dict() groups = re.search(self.REGEX_BMRR, test.message) if groups is not None: items_str = groups.group(1) - items_float = [float(item.strip()) for item - in items_str.split(",")] + items_float = [ + float(item.strip().replace(u"'", u"")) + for item in items_str.split(",") + ] # Use whole list in CSIT-1180. stats = jumpavg.AvgStdevStats.for_runs(items_float) + test_result[u"result"][u"samples"] = items_float test_result[u"result"][u"receive-rate"] = stats.avg test_result[u"result"][u"receive-stdev"] = stats.stdev else: groups = re.search(self.REGEX_MRR, test.message) test_result[u"result"][u"receive-rate"] = \ float(groups.group(3)) / float(groups.group(1)) - elif u"RECONF" in tags: - test_result[u"type"] = u"RECONF" + elif u"SOAK" in tags: + test_result[u"type"] = u"SOAK" + if test.status == u"PASS": + test_result[u"throughput"], test_result[u"status"] = \ + self._get_plr_throughput(test.message) + elif u"HOSTSTACK" in tags: + test_result[u"type"] = u"HOSTSTACK" + if test.status == u"PASS": + test_result[u"result"], test_result[u"status"] = \ + self._get_hoststack_data(test.message, tags) + elif u"LDP_NGINX" in tags: + test_result[u"type"] = u"LDP_NGINX" + test_result[u"result"], test_result[u"status"] = \ + self._get_vsap_data(test.message, tags) + # elif u"TCP" in tags: # This might be not used + # test_result[u"type"] = u"TCP" + # if test.status == u"PASS": + # groups = re.search(self.REGEX_TCP, test.message) + # test_result[u"result"] = int(groups.group(2)) + elif u"RECONF" in tags: + test_result[u"type"] = u"RECONF" + if test.status == u"PASS": test_result[u"result"] = None try: grps_loss = re.search(self.REGEX_RECONF_LOSS, test.message) @@ -1117,12 +1189,8 @@ class ExecutionChecker(ResultVisitor): } except (AttributeError, IndexError, ValueError, TypeError): test_result[u"status"] = u"FAIL" - elif u"DEVICETEST" in tags: - test_result[u"type"] = u"DEVICETEST" - else: - test_result[u"status"] = u"FAIL" - self._data[u"tests"][self._test_id] = test_result - return + else: + test_result[u"status"] = u"FAIL" self._data[u"tests"][self._test_id] = test_result @@ -1191,12 +1259,10 @@ class ExecutionChecker(ResultVisitor): :returns: Nothing. """ if test_kw.name.count(u"Show Runtime On All Duts") or \ - test_kw.name.count(u"Show Runtime Counters On All Duts"): + test_kw.name.count(u"Show Runtime Counters On All Duts") or \ + test_kw.name.count(u"Vpp Show Runtime On All Duts"): self._msg_type = u"test-show-runtime" self._sh_run_counter += 1 - elif test_kw.name.count(u"Install Dpdk Test On All Duts") and \ - not self._version: - self._msg_type = u"dpdk-version" else: return test_kw.messages.visit(self) @@ -1233,6 +1299,9 @@ class ExecutionChecker(ResultVisitor): if setup_kw.name.count(u"Show Vpp Version On All Duts") \ and not self._version: self._msg_type = u"vpp-version" + elif setup_kw.name.count(u"Install Dpdk Framework On All Duts") and \ + not self._version: + self._msg_type = u"dpdk-version" elif setup_kw.name.count(u"Set Global Variable") \ and not self._timestamp: self._msg_type = u"timestamp" @@ -1270,13 +1339,7 @@ class ExecutionChecker(ResultVisitor): :type teardown_kw: Keyword :returns: Nothing. """ - - if teardown_kw.name.count(u"Show Vat History On All Duts"): - # TODO: Remove when not needed: - self._conf_history_lookup_nr = 0 - self._msg_type = u"teardown-vat-history" - teardown_kw.messages.visit(self) - elif teardown_kw.name.count(u"Show Papi History On All Duts"): + if teardown_kw.name.count(u"Show Papi History On All Duts"): self._conf_history_lookup_nr = 0 self._msg_type = u"teardown-papi-history" teardown_kw.messages.visit(self) @@ -1394,16 +1457,14 @@ class InputData: """ return self.data[job][build][u"tests"] - def _parse_tests(self, job, build, log): + def _parse_tests(self, job, build): """Process data from robot output.xml file and return JSON structured data. :param job: The name of job which build output data will be processed. :param build: The build which output data will be processed. - :param log: List of log messages. :type job: str :type build: dict - :type log: list of tuples (severity, msg) :returns: JSON data structure. :rtype: dict """ @@ -1417,13 +1478,13 @@ class InputData: try: result = ExecutionResult(data_file) except errors.DataError as err: - log.append( - (u"ERROR", f"Error occurred while parsing output.xml: " - f"{repr(err)}") + logging.error( + f"Error occurred while parsing output.xml: {repr(err)}" ) return None - checker = ExecutionChecker(metadata, self._cfg.mapping, - self._cfg.ignore) + checker = ExecutionChecker( + metadata, self._cfg.mapping, self._cfg.ignore + ) result.visit(checker) return checker.data @@ -1444,40 +1505,30 @@ class InputData: :type repeat: int """ - logs = list() - - logs.append( - (u"INFO", f" Processing the job/build: {job}: {build[u'build']}") - ) + logging.info(f"Processing the job/build: {job}: {build[u'build']}") state = u"failed" success = False data = None do_repeat = repeat while do_repeat: - success = download_and_unzip_data_file(self._cfg, job, build, pid, - logs) + success = download_and_unzip_data_file(self._cfg, job, build, pid) if success: break do_repeat -= 1 if not success: - logs.append( - (u"ERROR", - f"It is not possible to download the input data file from the " - f"job {job}, build {build[u'build']}, or it is damaged. " - f"Skipped.") + logging.error( + f"It is not possible to download the input data file from the " + f"job {job}, build {build[u'build']}, or it is damaged. " + f"Skipped." ) if success: - logs.append( - (u"INFO", - f" Processing data from the build {build[u'build']} ...") - ) - data = self._parse_tests(job, build, logs) + logging.info(f" Processing data from build {build[u'build']}") + data = self._parse_tests(job, build) if data is None: - logs.append( - (u"ERROR", - f"Input data file from the job {job}, build " - f"{build[u'build']} is damaged. Skipped.") + logging.error( + f"Input data file from the job {job}, build " + f"{build[u'build']} is damaged. Skipped." ) else: state = u"processed" @@ -1485,14 +1536,14 @@ class InputData: try: remove(build[u"file-name"]) except OSError as err: - logs.append( - ("ERROR", f"Cannot remove the file {build[u'file-name']}: " - f"{repr(err)}") + logging.error( + f"Cannot remove the file {build[u'file-name']}: {repr(err)}" ) # If the time-period is defined in the specification file, remove all # files which are outside the time period. - timeperiod = self._cfg.input.get(u"time-period", None) + is_last = False + timeperiod = self._cfg.environment.get(u"time-period", None) if timeperiod and data: now = dt.utcnow() timeperiod = timedelta(int(timeperiod)) @@ -1505,26 +1556,18 @@ class InputData: # Remove the data and the file: state = u"removed" data = None - logs.append( - (u"INFO", - f" The build {job}/{build[u'build']} is " - f"outdated, will be removed.") + is_last = True + logging.info( + f" The build {job}/{build[u'build']} is " + f"outdated, will be removed." ) - logs.append((u"INFO", u" Done.")) - - for level, line in logs: - if level == u"INFO": - logging.info(line) - elif level == u"ERROR": - logging.error(line) - elif level == u"DEBUG": - logging.debug(line) - elif level == u"CRITICAL": - logging.critical(line) - elif level == u"WARNING": - logging.warning(line) - - return {u"data": data, u"state": state, u"job": job, u"build": build} + return { + u"data": data, + u"state": state, + u"job": job, + u"build": build, + u"last": is_last + } def download_and_parse_data(self, repeat=1): """Download the input data files, parse input data from input files and @@ -1537,10 +1580,12 @@ class InputData: logging.info(u"Downloading and parsing input files ...") - for job, builds in self._cfg.builds.items(): + for job, builds in self._cfg.input.items(): for build in builds: result = self._download_and_parse_build(job, build, repeat) + if result[u"last"]: + break build_nr = result[u"build"][u"build"] if result[u"data"]: @@ -1563,10 +1608,9 @@ class InputData: if self._input_data.get(job, None) is None: self._input_data[job] = pd.Series() self._input_data[job][str(build_nr)] = build_data - self._cfg.set_input_file_name( - job, build_nr, result[u"build"][u"file-name"]) - + job, build_nr, result[u"build"][u"file-name"] + ) self._cfg.set_input_state(job, build_nr, result[u"state"]) mem_alloc = \ @@ -1575,6 +1619,16 @@ class InputData: logging.info(u"Done.") + msg = f"Successful downloads from the sources:\n" + for source in self._cfg.environment[u"data-sources"]: + if source[u"successful-downloads"]: + msg += ( + f"{source[u'url']}/{source[u'path']}/" + f"{source[u'file-name']}: " + f"{source[u'successful-downloads']}\n" + ) + logging.info(msg) + def process_local_file(self, local_file, job=u"local", build_nr=1, replace=True): """Process local XML file given as a command-line parameter. @@ -1605,11 +1659,11 @@ class InputData: u"file-name": local_file } if replace: - self._cfg.builds = dict() + self._cfg.input = dict() self._cfg.add_build(job, build) logging.info(f"Processing {job}: {build_nr:2d}: {local_file}") - data = self._parse_tests(job, build, list()) + data = self._parse_tests(job, build) if data is None: raise PresentationError( f"Error occurred while parsing the file {local_file}" @@ -1690,7 +1744,7 @@ class InputData: ) if replace: - self._cfg.builds = dict() + self._cfg.input = dict() for job, files in local_builds.items(): for idx, local_file in enumerate(files): @@ -1785,7 +1839,7 @@ class InputData: if params is None: params = element.get(u"parameters", None) if params: - params.append(u"type") + params.extend((u"type", u"status")) data_to_filter = data if data else element[u"data"] data = pd.Series() @@ -1876,16 +1930,25 @@ class InputData: if params is None: params = element.get(u"parameters", None) - if params: + if params and u"type" not in params: params.append(u"type") + cores = element.get(u"core", None) + if cores: + tests = list() + for core in cores: + for test in include: + tests.append(test.format(core=core)) + else: + tests = include + data = pd.Series() try: for job, builds in element[u"data"].items(): data[job] = pd.Series() for build in builds: data[job][str(build)] = pd.Series() - for test in include: + for test in tests: try: reg_ex = re.compile(str(test).lower()) for test_id in self.data[job][ @@ -1908,9 +1971,10 @@ class InputData: data[job][str(build)][ test_id][param] = u"No Data" except KeyError as err: - logging.error(repr(err)) if continue_on_error: + logging.debug(repr(err)) continue + logging.error(repr(err)) return None return data