X-Git-Url: https://gerrit.fd.io/r/gitweb?p=csit.git;a=blobdiff_plain;f=resources%2Ftools%2Fpresentation%2Finput_data_parser.py;h=bae7b5eead9da385d62a53d9c0efc593b2e068eb;hp=37194217a16a6551e3c19c6d28d778c4c743d374;hb=d59cb151a877c88b4a4fac28c1e01fa5220c1fb6;hpb=f0bd1290cd8b98d11c2a4188598cfdf009252ca9 diff --git a/resources/tools/presentation/input_data_parser.py b/resources/tools/presentation/input_data_parser.py index 37194217a1..bae7b5eead 100644 --- a/resources/tools/presentation/input_data_parser.py +++ b/resources/tools/presentation/input_data_parser.py @@ -19,9 +19,8 @@ - filter the data using tags, """ -import multiprocessing -import os import re +import resource import pandas as pd import logging @@ -37,7 +36,6 @@ from json import loads from jumpavg.AvgStdevMetadataFactory import AvgStdevMetadataFactory from input_data_files import download_and_unzip_data_file -from utils import Worker # Separator used in file names @@ -280,7 +278,7 @@ class ExecutionChecker(ResultVisitor): r'[\D\d]*') REGEX_VERSION_VPP = re.compile(r"(return STDOUT Version:\s*|" - r"VPP Version:\s*)(.*)") + r"VPP Version:\s*|VPP version:\s*)(.*)") REGEX_VERSION_DPDK = re.compile(r"(return STDOUT testpmd)([\d\D\n]*)" r"(RTE Version: 'DPDK )(.*)(')") @@ -389,14 +387,12 @@ class ExecutionChecker(ResultVisitor): :returns: Nothing. """ - if msg.message.count("Arguments:"): - message = str(msg.message).replace(' ', '').replace('\n', '').\ - replace("'", '"').replace('b"', '"').\ - replace("honeycom", "honeycomb") - message = loads(message[11:-1]) + if msg.message.count("Setup of TG node"): + reg_tg_ip = re.compile( + r'Setup of TG node (\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3}) done') try: - self._testbed = message["TG"]["host"] - except (KeyError, ValueError): + self._testbed = str(re.search(reg_tg_ip, msg.message).group(1)) + except (KeyError, ValueError, IndexError, AttributeError): pass finally: self._data["metadata"]["testbed"] = self._testbed @@ -411,7 +407,8 @@ class ExecutionChecker(ResultVisitor): """ if msg.message.count("return STDOUT Version:") or \ - msg.message.count("VPP Version:"): + msg.message.count("VPP Version:") or \ + msg.message.count("VPP version:"): self._version = str(re.search(self.REGEX_VERSION_VPP, msg.message). group(2)) self._data["metadata"]["version"] = self._version @@ -499,7 +496,7 @@ class ExecutionChecker(ResultVisitor): :type msg: Message :returns: Nothing. """ - if msg.message.count("return STDOUT Thread "): + if msg.message.count("Thread 0 vpp_main"): self._show_run_lookup_nr += 1 if self._lookup_kw_nr == 1 and self._show_run_lookup_nr == 1: self._data["tests"][self._test_ID]["show-run"] = str() @@ -986,8 +983,7 @@ class ExecutionChecker(ResultVisitor): if setup_kw.name.count("Show Vpp Version On All Duts") \ and not self._version: self._msg_type = "vpp-version" - - elif setup_kw.name.count("Setup performance global Variables") \ + elif setup_kw.name.count("Set Global Variable") \ and not self._timestamp: self._msg_type = "timestamp" elif setup_kw.name.count("Setup Framework") and not self._testbed: @@ -1187,13 +1183,10 @@ class InputData(object): return checker.data - def _download_and_parse_build(self, pid, data_queue, job, build, repeat): + def _download_and_parse_build(self, job, build, repeat, pid=10000): """Download and parse the input data file. :param pid: PID of the process executing this method. - :param data_queue: Shared memory between processes. Queue which keeps - the result data. This data is then read by the main process and used - in further processing. :param job: Name of the Jenkins job which generated the processed input file. :param build: Information about the Jenkins build which generated the @@ -1201,7 +1194,6 @@ class InputData(object): :param repeat: Repeat the download specified number of times if not successful. :type pid: int - :type data_queue: multiprocessing.Manager().Queue() :type job: str :type build: dict :type repeat: int @@ -1209,9 +1201,6 @@ class InputData(object): logs = list() - logging.info(" Processing the job/build: {0}: {1}". - format(job, build["build"])) - logs.append(("INFO", " Processing the job/build: {0}: {1}". format(job, build["build"]))) @@ -1231,7 +1220,7 @@ class InputData(object): "'{build}', or it is damaged. Skipped.". format(job=job, build=build["build"]))) if success: - logs.append(("INFO", " Processing data from the build '{0}' ...". + logs.append(("INFO", " Processing data from the build '{0}' ...". format(build["build"]))) data = self._parse_tests(job, build, logs) if data is None: @@ -1269,11 +1258,11 @@ class InputData(object): file_name = self._cfg.input["file-name"] full_name = join( self._cfg.environment["paths"]["DIR[WORKING,DATA]"], - "{job}{sep}{build}{sep}{name}". - format(job=job, - sep=SEPARATOR, - build=build["build"], - name=file_name)) + "{job}{sep}{build}{sep}{name}".format( + job=job, + sep=SEPARATOR, + build=build["build"], + name=file_name)) try: remove(full_name) logs.append(("INFO", @@ -1281,19 +1270,23 @@ class InputData(object): format(name=full_name))) except OSError as err: logs.append(("ERROR", - "Cannot remove the file '{0}': {1}". - format(full_name, repr(err)))) - + "Cannot remove the file '{0}': {1}". + format(full_name, repr(err)))) logs.append(("INFO", " Done.")) - result = { - "data": data, - "state": state, - "job": job, - "build": build, - "logs": logs - } - data_queue.put(result) + for level, line in logs: + if level == "INFO": + logging.info(line) + elif level == "ERROR": + logging.error(line) + elif level == "DEBUG": + logging.debug(line) + elif level == "CRITICAL": + logging.critical(line) + elif level == "WARNING": + logging.warning(line) + + return {"data": data, "state": state, "job": job, "build": build} def download_and_parse_data(self, repeat=1): """Download the input data files, parse input data from input files and @@ -1306,73 +1299,34 @@ class InputData(object): logging.info("Downloading and parsing input files ...") - work_queue = multiprocessing.JoinableQueue() - manager = multiprocessing.Manager() - data_queue = manager.Queue() - cpus = multiprocessing.cpu_count() - - workers = list() - for cpu in range(cpus): - worker = Worker(work_queue, - data_queue, - self._download_and_parse_build) - worker.daemon = True - worker.start() - workers.append(worker) - os.system("taskset -p -c {0} {1} > /dev/null 2>&1". - format(cpu, worker.pid)) - for job, builds in self._cfg.builds.items(): for build in builds: - work_queue.put((job, build, repeat)) - work_queue.join() + result = self._download_and_parse_build(job, build, repeat) + build_nr = result["build"]["build"] - logging.info("Done.") + if result["data"]: + data = result["data"] + build_data = pd.Series({ + "metadata": pd.Series( + data["metadata"].values(), + index=data["metadata"].keys()), + "suites": pd.Series(data["suites"].values(), + index=data["suites"].keys()), + "tests": pd.Series(data["tests"].values(), + index=data["tests"].keys())}) + + if self._input_data.get(job, None) is None: + self._input_data[job] = pd.Series() + self._input_data[job][str(build_nr)] = build_data + + self._cfg.set_input_file_name( + job, build_nr, result["build"]["file-name"]) + + self._cfg.set_input_state(job, build_nr, result["state"]) - while not data_queue.empty(): - result = data_queue.get() - - job = result["job"] - build_nr = result["build"]["build"] - - if result["data"]: - data = result["data"] - build_data = pd.Series({ - "metadata": pd.Series(data["metadata"].values(), - index=data["metadata"].keys()), - "suites": pd.Series(data["suites"].values(), - index=data["suites"].keys()), - "tests": pd.Series(data["tests"].values(), - index=data["tests"].keys())}) - - if self._input_data.get(job, None) is None: - self._input_data[job] = pd.Series() - self._input_data[job][str(build_nr)] = build_data - - self._cfg.set_input_file_name(job, build_nr, - result["build"]["file-name"]) - - self._cfg.set_input_state(job, build_nr, result["state"]) - - for item in result["logs"]: - if item[0] == "INFO": - logging.info(item[1]) - elif item[0] == "ERROR": - logging.error(item[1]) - elif item[0] == "DEBUG": - logging.debug(item[1]) - elif item[0] == "CRITICAL": - logging.critical(item[1]) - elif item[0] == "WARNING": - logging.warning(item[1]) - - del data_queue - - # Terminate all workers - for worker in workers: - worker.terminate() - worker.join() + logging.info("Memory allocation: {0:,d}MB".format( + resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1000)) logging.info("Done.")