X-Git-Url: https://gerrit.fd.io/r/gitweb?p=csit.git;a=blobdiff_plain;f=resources%2Ftools%2Fpresentation%2Finput_data_parser.py;h=7b36352ed9ce5f8ad780d010432c211a8d870d4d;hp=340b7dc372bb3e6b94f2ae1a4fb9cb810c3712ec;hb=53f44d52b513fae893e7774d77e3cff160ed9abe;hpb=344ea908c94faf37a10f23627b2f5656ea3e289b diff --git a/resources/tools/presentation/input_data_parser.py b/resources/tools/presentation/input_data_parser.py index 340b7dc372..7b36352ed9 100644 --- a/resources/tools/presentation/input_data_parser.py +++ b/resources/tools/presentation/input_data_parser.py @@ -1,4 +1,4 @@ -# Copyright (c) 2018 Cisco and/or its affiliates. +# Copyright (c) 2019 Cisco and/or its affiliates. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at: @@ -30,12 +30,20 @@ from robot import errors from collections import OrderedDict from string import replace from os import remove +from os.path import join +from datetime import datetime as dt +from datetime import timedelta +from json import loads from jumpavg.AvgStdevMetadataFactory import AvgStdevMetadataFactory from input_data_files import download_and_unzip_data_file from utils import Worker +# Separator used in file names +SEPARATOR = "__" + + class ExecutionChecker(ResultVisitor): """Class to traverse through the test suite structure. @@ -244,6 +252,9 @@ class ExecutionChecker(ResultVisitor): # TODO: Remove when definitely no NDRPDRDISC tests are used: REGEX_RATE = re.compile(r'^[\D\d]*FINAL_RATE:\s(\d+\.\d+)\s(\w+)') + REGEX_PLR_RATE = re.compile(r'PLRsearch lower bound::\s(\d+.\d+).*\n' + r'PLRsearch upper bound::\s(\d+.\d+)') + REGEX_NDRPDR_RATE = re.compile(r'NDR_LOWER:\s(\d+.\d+).*\n.*\n' r'NDR_UPPER:\s(\d+.\d+).*\n' r'PDR_LOWER:\s(\d+.\d+).*\n.*\n' @@ -268,7 +279,8 @@ class ExecutionChecker(ResultVisitor): REGEX_TOLERANCE = re.compile(r'^[\D\d]*LOSS_ACCEPTANCE:\s(\d*\.\d*)\s' r'[\D\d]*') - REGEX_VERSION_VPP = re.compile(r"(return STDOUT Version:\s*)(.*)") + REGEX_VERSION_VPP = re.compile(r"(return STDOUT Version:\s*|" + r"VPP Version:\s*)(.*)") REGEX_VERSION_DPDK = re.compile(r"(return STDOUT testpmd)([\d\D\n]*)" r"(RTE Version: 'DPDK )(.*)(')") @@ -311,6 +323,9 @@ class ExecutionChecker(ResultVisitor): # Timestamp self._timestamp = None + # Testbed. The testbed is identified by TG node IP address. + self._testbed = None + # Mapping of TCs long names self._mapping = mapping @@ -351,7 +366,8 @@ class ExecutionChecker(ResultVisitor): "vpp-version": self._get_vpp_version, "dpdk-version": self._get_dpdk_version, "teardown-vat-history": self._get_vat_history, - "test-show-runtime": self._get_show_run + "test-show-runtime": self._get_show_run, + "testbed": self._get_testbed } @property @@ -363,6 +379,28 @@ class ExecutionChecker(ResultVisitor): """ return self._data + def _get_testbed(self, msg): + """Called when extraction of testbed IP is required. + The testbed is identified by TG node IP address. + + :param msg: Message to process. + :type msg: Message + :returns: Nothing. + """ + + if msg.message.count("Arguments:"): + message = str(msg.message).replace(' ', '').replace('\n', '').\ + replace("'", '"').replace('b"', '"').\ + replace("honeycom", "honeycomb") + message = loads(message[11:-1]) + try: + self._testbed = message["TG"]["host"] + except (KeyError, ValueError): + pass + finally: + self._data["metadata"]["testbed"] = self._testbed + self._msg_type = None + def _get_vpp_version(self, msg): """Called when extraction of VPP version is required. @@ -371,7 +409,8 @@ class ExecutionChecker(ResultVisitor): :returns: Nothing. """ - if msg.message.count("return STDOUT Version:"): + if msg.message.count("return STDOUT Version:") or \ + msg.message.count("VPP Version:"): self._version = str(re.search(self.REGEX_VERSION_VPP, msg.message). group(2)) self._data["metadata"]["version"] = self._version @@ -531,6 +570,33 @@ class ExecutionChecker(ResultVisitor): return throughput, status + def _get_plr_throughput(self, msg): + """Get PLRsearch lower bound and PLRsearch upper bound from the test + message. + + :param msg: The test message to be parsed. + :type msg: str + :returns: Parsed data as a dict and the status (PASS/FAIL). + :rtype: tuple(dict, str) + """ + + throughput = { + "LOWER": -1.0, + "UPPER": -1.0 + } + status = "FAIL" + groups = re.search(self.REGEX_PLR_RATE, msg) + + if groups is not None: + try: + throughput["LOWER"] = float(groups.group(1)) + throughput["UPPER"] = float(groups.group(2)) + status = "PASS" + except (IndexError, ValueError): + pass + + return throughput, status + def _get_ndrpdr_latency(self, msg): """Get LATENCY from the test message. @@ -696,13 +762,14 @@ class ExecutionChecker(ResultVisitor): else: test_result["status"] = "FAIL" self._data["tests"][self._test_ID] = test_result - logging.error("The test '{0}' has no or more than one " + logging.debug("The test '{0}' has no or more than one " "multi-threading tags.".format(self._test_ID)) - logging.error("Tags: {0}".format(test_result["tags"])) + logging.debug("Tags: {0}".format(test_result["tags"])) return if test.status == "PASS" and ("NDRPDRDISC" in tags or "NDRPDR" in tags or + "SOAK" in tags or "TCP" in tags or "MRR" in tags or "BMRR" in tags): @@ -714,6 +781,8 @@ class ExecutionChecker(ResultVisitor): test_result["type"] = "PDR" elif "NDRPDR" in tags: test_result["type"] = "NDRPDR" + elif "SOAK" in tags: + test_result["type"] = "SOAK" elif "TCP" in tags: test_result["type"] = "TCP" elif "MRR" in tags: @@ -754,6 +823,10 @@ class ExecutionChecker(ResultVisitor): test_result["latency"], test_result["status"] = \ self._get_ndrpdr_latency(test.message) + elif test_result["type"] in ("SOAK", ): + test_result["throughput"], test_result["status"] = \ + self._get_plr_throughput(test.message) + elif test_result["type"] in ("TCP", ): groups = re.search(self.REGEX_TCP, test.message) test_result["result"] = int(groups.group(2)) @@ -765,8 +838,12 @@ class ExecutionChecker(ResultVisitor): items_str = groups.group(1) items_float = [float(item.strip()) for item in items_str.split(",")] - test_result["result"]["receive-rate"] = \ - AvgStdevMetadataFactory.from_data(items_float) + metadata = AvgStdevMetadataFactory.from_data(items_float) + # Next two lines have been introduced in CSIT-1179, + # to be removed in CSIT-1180. + metadata.size = 1 + metadata.stdev = 0.0 + test_result["result"]["receive-rate"] = metadata else: groups = re.search(self.REGEX_MRR, test.message) test_result["result"]["receive-rate"] = \ @@ -890,6 +967,8 @@ class ExecutionChecker(ResultVisitor): elif setup_kw.name.count("Setup performance global Variables") \ and not self._timestamp: self._msg_type = "timestamp" + elif setup_kw.name.count("Setup Framework") and not self._testbed: + self._msg_type = "testbed" else: return setup_kw.messages.visit(self) @@ -1139,7 +1218,45 @@ class InputData(object): remove(build["file-name"]) except OSError as err: logs.append(("ERROR", "Cannot remove the file '{0}': {1}". - format(build["file-name"], err))) + format(build["file-name"], repr(err)))) + + # If the time-period is defined in the specification file, remove all + # files which are outside the time period. + timeperiod = self._cfg.input.get("time-period", None) + if timeperiod and data: + now = dt.utcnow() + timeperiod = timedelta(int(timeperiod)) + metadata = data.get("metadata", None) + if metadata: + generated = metadata.get("generated", None) + if generated: + generated = dt.strptime(generated, "%Y%m%d %H:%M") + if (now - generated) > timeperiod: + # Remove the data and the file: + state = "removed" + data = None + logs.append( + ("INFO", + " The build {job}/{build} is outdated, will be " + "removed".format(job=job, build=build["build"]))) + file_name = self._cfg.input["file-name"] + full_name = join( + self._cfg.environment["paths"]["DIR[WORKING,DATA]"], + "{job}{sep}{build}{sep}{name}". + format(job=job, + sep=SEPARATOR, + build=build["build"], + name=file_name)) + try: + remove(full_name) + logs.append(("INFO", + " The file {name} has been removed". + format(name=full_name))) + except OSError as err: + logs.append(("ERROR", + "Cannot remove the file '{0}': {1}". + format(full_name, repr(err)))) + logs.append(("INFO", " Done.")) result = {