-# Copyright (c) 2020 Cisco and/or its affiliates.
+# Copyright (c) 2022 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
r'PDR_LOWER:\s(\d+.\d+).*\n.*\n'
r'PDR_UPPER:\s(\d+.\d+)'
)
+ REGEX_NDRPDR_GBPS = re.compile(
+ r'NDR_LOWER:.*,\s(\d+.\d+).*\n.*\n'
+ r'NDR_UPPER:.*,\s(\d+.\d+).*\n'
+ r'PDR_LOWER:.*,\s(\d+.\d+).*\n.*\n'
+ r'PDR_UPPER:.*,\s(\d+.\d+)'
+ )
REGEX_PERF_MSG_INFO = re.compile(
r'NDR_LOWER:\s(\d+.\d+)\s.*\s(\d+.\d+)\s.*\n.*\n.*\n'
r'PDR_LOWER:\s(\d+.\d+)\s.*\s(\d+.\d+)\s.*\n.*\n.*\n'
r'Latency at 50% PDR:.*\[\'(.*)\', \'(.*)\'\].*\n'
r'Latency at 10% PDR:.*\[\'(.*)\', \'(.*)\'\].*\n'
)
+ REGEX_CPS_MSG_INFO = re.compile(
+ r'NDR_LOWER:\s(\d+.\d+)\s.*\s.*\n.*\n.*\n'
+ r'PDR_LOWER:\s(\d+.\d+)\s.*\s.*\n.*\n.*'
+ )
+ REGEX_PPS_MSG_INFO = re.compile(
+ r'NDR_LOWER:\s(\d+.\d+)\s.*\s(\d+.\d+)\s.*\n.*\n.*\n'
+ r'PDR_LOWER:\s(\d+.\d+)\s.*\s(\d+.\d+)\s.*\n.*\n.*'
+ )
REGEX_MRR_MSG_INFO = re.compile(r'.*\[(.*)\]')
- # TODO: Remove when not needed
+ REGEX_VSAP_MSG_INFO = re.compile(
+ r'Transfer Rate: (\d*.\d*).*\n'
+ r'Latency: (\d*.\d*).*\n'
+ r'Completed requests: (\d*).*\n'
+ r'Failed requests: (\d*).*\n'
+ r'Total data transferred: (\d*).*\n'
+ r'Connection [cr]ps rate:\s*(\d*.\d*)'
+ )
+
+ # Needed for CPS and PPS tests
REGEX_NDRPDR_LAT_BASE = re.compile(
r'LATENCY.*\[\'(.*)\', \'(.*)\'\]\s\n.*\n.*\n'
r'LATENCY.*\[\'(.*)\', \'(.*)\'\]'
r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n'
r'Latency.*\[\'(.*)\', \'(.*)\'\]'
)
- # TODO: Remove when not needed
- REGEX_NDRPDR_LAT_LONG = re.compile(
- r'LATENCY.*\[\'(.*)\', \'(.*)\'\]\s\n.*\n.*\n'
- r'LATENCY.*\[\'(.*)\', \'(.*)\'\]\s\n.*\n'
- r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n'
- r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n'
- r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n'
- r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n'
- r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n'
- r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n'
- r'Latency.*\[\'(.*)\', \'(.*)\'\]'
- )
+
REGEX_VERSION_VPP = re.compile(
r"(return STDOUT Version:\s*|"
r"VPP Version:\s*|VPP version:\s*)(.*)"
r'tx\s(\d*),\srx\s(\d*)'
)
REGEX_BMRR = re.compile(
- r'Maximum Receive Rate trial results'
- r' in packets per second: \[(.*)\]'
+ r'.*trial results.*: \[(.*)\]'
)
REGEX_RECONF_LOSS = re.compile(
r'Packets lost due to reconfig: (\d*)'
)
REGEX_TC_TAG = re.compile(r'\d+[tT]\d+[cC]')
- REGEX_TC_NAME_OLD = re.compile(r'-\d+[tT]\d+[cC]-')
-
REGEX_TC_NAME_NEW = re.compile(r'-\d+[cC]-')
REGEX_TC_NUMBER = re.compile(r'tc\d{2}-')
REGEX_TC_PAPI_CLI = re.compile(r'.*\((\d+.\d+.\d+.\d+.) - (.*)\)')
- def __init__(self, metadata, mapping, ignore):
+ REGEX_SH_RUN_HOST = re.compile(
+ r'hostname=\"(\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3})\",hook=\"(.*)\"'
+ )
+
+ def __init__(self, metadata, mapping, ignore, process_oper):
"""Initialisation.
:param metadata: Key-value pairs to be included in "metadata" part of
:param mapping: Mapping of the old names of test cases to the new
(actual) one.
:param ignore: List of TCs to be ignored.
+ :param process_oper: If True, operational data (show run, telemetry) is
+ processed.
:type metadata: dict
:type mapping: dict
:type ignore: list
+ :type process_oper: bool
"""
# Type of message to parse out from the test messages
# Ignore list
self._ignore = ignore
+ self._process_oper = process_oper
+
# Number of PAPI History messages found:
# 0 - no message
# 1 - PAPI History of DUT1
self._conf_history_lookup_nr = 0
self._sh_run_counter = 0
+ self._telemetry_kw_counter = 0
+ self._telemetry_msg_counter = 0
# Test ID of currently processed test- the lowercase full path to the
# test
# Dictionary defining the methods used to parse different types of
# messages
self.parse_msg = {
- u"timestamp": self._get_timestamp,
u"vpp-version": self._get_vpp_version,
u"dpdk-version": self._get_dpdk_version,
- # TODO: Remove when not needed:
- u"teardown-vat-history": self._get_vat_history,
u"teardown-papi-history": self._get_papi_history,
u"test-show-runtime": self._get_show_run,
- u"testbed": self._get_testbed
+ u"testbed": self._get_testbed,
+ u"test-telemetry": self._get_telemetry
}
@property
except (AttributeError, IndexError, ValueError, KeyError):
return u"Test Failed."
+ def _get_data_from_cps_test_msg(self, msg):
+ """Get info from message of NDRPDR CPS tests.
+
+ :param msg: Message to be processed.
+ :type msg: str
+ :returns: Processed message or "Test Failed." if a problem occurs.
+ :rtype: str
+ """
+
+ groups = re.search(self.REGEX_CPS_MSG_INFO, msg)
+ if not groups or groups.lastindex != 2:
+ return u"Test Failed."
+
+ try:
+ return (
+ f"1. {(float(groups.group(1)) / 1e6):5.2f}\n"
+ f"2. {(float(groups.group(2)) / 1e6):5.2f}"
+ )
+ except (AttributeError, IndexError, ValueError, KeyError):
+ return u"Test Failed."
+
+ def _get_data_from_pps_test_msg(self, msg):
+ """Get info from message of NDRPDR PPS tests.
+
+ :param msg: Message to be processed.
+ :type msg: str
+ :returns: Processed message or "Test Failed." if a problem occurs.
+ :rtype: str
+ """
+
+ groups = re.search(self.REGEX_PPS_MSG_INFO, msg)
+ if not groups or groups.lastindex != 4:
+ return u"Test Failed."
+
+ try:
+ return (
+ f"1. {(float(groups.group(1)) / 1e6):5.2f} "
+ f"{float(groups.group(2)):5.2f}\n"
+ f"2. {(float(groups.group(3)) / 1e6):5.2f} "
+ f"{float(groups.group(4)):5.2f}"
+ )
+ except (AttributeError, IndexError, ValueError, KeyError):
+ return u"Test Failed."
+
def _get_data_from_perf_test_msg(self, msg):
"""Get info from message of NDRPDR performance tests.
:param msg: Message to be processed.
:type msg: str
- :returns: Processed message or original message if a problem occurs.
+ :returns: Processed message or "Test Failed." if a problem occurs.
:rtype: str
"""
return u"Test Failed."
def _process_lat(in_str_1, in_str_2):
- """Extract min, avg, max values from latency string.
+ """Extract P50, P90 and P99 latencies or min, avg, max values from
+ latency string.
:param in_str_1: Latency string for one direction produced by robot
framework.
try:
hdr_lat_1 = hdrh.histogram.HdrHistogram.decode(in_list_1[3])
except hdrh.codec.HdrLengthException:
- return None
+ hdr_lat_1 = None
in_list_2[3] += u"=" * (len(in_list_2[3]) % 4)
try:
hdr_lat_2 = hdrh.histogram.HdrHistogram.decode(in_list_2[3])
except hdrh.codec.HdrLengthException:
- return None
+ hdr_lat_2 = None
if hdr_lat_1 and hdr_lat_2:
hdr_lat = (
hdr_lat_2.get_value_at_percentile(90.0),
hdr_lat_2.get_value_at_percentile(99.0)
)
-
if all(hdr_lat):
return hdr_lat
- return None
+ hdr_lat = (
+ int(in_list_1[0]), int(in_list_1[1]), int(in_list_1[2]),
+ int(in_list_2[0]), int(in_list_2[1]), int(in_list_2[2])
+ )
+ for item in hdr_lat:
+ if item in (-1, 4294967295, 0):
+ return None
+ return hdr_lat
try:
out_msg = (
"""
if msg.message.count(u"return STDOUT Version:") or \
- msg.message.count(u"VPP Version:") or \
- msg.message.count(u"VPP version:"):
- self._version = str(re.search(self.REGEX_VERSION_VPP, msg.message).
- group(2))
+ msg.message.count(u"VPP Version:") or \
+ msg.message.count(u"VPP version:"):
+ self._version = str(
+ re.search(self.REGEX_VERSION_VPP, msg.message).group(2)
+ )
self._data[u"metadata"][u"version"] = self._version
self._msg_type = None
finally:
self._msg_type = None
- def _get_timestamp(self, msg):
- """Called when extraction of timestamp is required.
-
- :param msg: Message to process.
- :type msg: Message
- :returns: Nothing.
- """
-
- self._timestamp = msg.timestamp[:14]
- self._data[u"metadata"][u"generated"] = self._timestamp
- self._msg_type = None
-
- def _get_vat_history(self, msg):
- """Called when extraction of VAT command history is required.
-
- TODO: Remove when not needed.
-
- :param msg: Message to process.
- :type msg: Message
- :returns: Nothing.
- """
- if msg.message.count(u"VAT command history:"):
- self._conf_history_lookup_nr += 1
- if self._conf_history_lookup_nr == 1:
- self._data[u"tests"][self._test_id][u"conf-history"] = str()
- else:
- self._msg_type = None
- text = re.sub(r"\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3} "
- r"VAT command history:", u"",
- msg.message, count=1).replace(u'\n', u' |br| ').\
- replace(u'"', u"'")
-
- self._data[u"tests"][self._test_id][u"conf-history"] += (
- f" |br| **DUT{str(self._conf_history_lookup_nr)}:** {text}"
- )
-
def _get_papi_history(self, msg):
"""Called when extraction of PAPI command history is required.
self._data[u"tests"][self._test_id][u"conf-history"] = str()
else:
self._msg_type = None
- text = re.sub(r"\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3} "
- r"PAPI command history:", u"",
- msg.message, count=1).replace(u'\n', u' |br| ').\
- replace(u'"', u"'")
+ text = re.sub(
+ r"\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3} PAPI command history:",
+ u"",
+ msg.message,
+ count=1
+ ).replace(u'"', u"'")
self._data[u"tests"][self._test_id][u"conf-history"] += (
- f" |br| **DUT{str(self._conf_history_lookup_nr)}:** {text}"
+ f"**DUT{str(self._conf_history_lookup_nr)}:** {text}"
)
def _get_show_run(self, msg):
except (AttributeError, IndexError):
sock = u""
- runtime = loads(str(msg.message).replace(u' ', u'').replace(u'\n', u'').
- replace(u"'", u'"').replace(u'b"', u'"').
- replace(u'u"', u'"').split(u":", 1)[1])
+ dut = u"dut{nr}".format(
+ nr=len(self._data[u'tests'][self._test_id][u'show-run'].keys()) + 1)
- try:
- threads_nr = len(runtime[0][u"clocks"])
- except (IndexError, KeyError):
+ self._data[u'tests'][self._test_id][u'show-run'][dut] = \
+ copy.copy(
+ {
+ u"host": host,
+ u"socket": sock,
+ u"runtime": str(msg.message).replace(u' ', u'').
+ replace(u'\n', u'').replace(u"'", u'"').
+ replace(u'b"', u'"').replace(u'u"', u'"').
+ split(u":", 1)[1]
+ }
+ )
+
+ def _get_telemetry(self, msg):
+ """Called when extraction of VPP telemetry data is required.
+
+ :param msg: Message to process.
+ :type msg: Message
+ :returns: Nothing.
+ """
+
+ if self._telemetry_kw_counter > 1:
+ return
+ if not msg.message.count(u"# TYPE vpp_runtime_calls"):
return
- dut = u"DUT{nr}".format(
- nr=len(self._data[u'tests'][self._test_id][u'show-run'].keys()) + 1)
+ if u"telemetry-show-run" not in \
+ self._data[u"tests"][self._test_id].keys():
+ self._data[u"tests"][self._test_id][u"telemetry-show-run"] = dict()
- oper = {
+ self._telemetry_msg_counter += 1
+ groups = re.search(self.REGEX_SH_RUN_HOST, msg.message)
+ if not groups:
+ return
+ try:
+ host = groups.group(1)
+ except (AttributeError, IndexError):
+ host = u""
+ try:
+ sock = groups.group(2)
+ except (AttributeError, IndexError):
+ sock = u""
+ runtime = {
+ u"source_type": u"node",
+ u"source_id": host,
+ u"msg_type": u"metric",
+ u"log_level": u"INFO",
+ u"timestamp": msg.timestamp,
+ u"msg": u"show_runtime",
u"host": host,
u"socket": sock,
- u"threads": OrderedDict({idx: list() for idx in range(threads_nr)})
+ u"data": list()
}
-
- for item in runtime:
- for idx in range(threads_nr):
- if item[u"vectors"][idx] > 0:
- clocks = item[u"clocks"][idx] / item[u"vectors"][idx]
- elif item[u"calls"][idx] > 0:
- clocks = item[u"clocks"][idx] / item[u"calls"][idx]
- elif item[u"suspends"][idx] > 0:
- clocks = item[u"clocks"][idx] / item[u"suspends"][idx]
- else:
- clocks = 0.0
-
- if item[u"calls"][idx] > 0:
- vectors_call = item[u"vectors"][idx] / item[u"calls"][idx]
- else:
- vectors_call = 0.0
-
- if int(item[u"calls"][idx]) + int(item[u"vectors"][idx]) + \
- int(item[u"suspends"][idx]):
- oper[u"threads"][idx].append([
- item[u"name"],
- item[u"calls"][idx],
- item[u"vectors"][idx],
- item[u"suspends"][idx],
- clocks,
- vectors_call
- ])
-
- self._data[u'tests'][self._test_id][u'show-run'][dut] = copy.copy(oper)
+ for line in msg.message.splitlines():
+ if not line.startswith(u"vpp_runtime_"):
+ continue
+ try:
+ params, value, timestamp = line.rsplit(u" ", maxsplit=2)
+ cut = params.index(u"{")
+ name = params[:cut].split(u"_", maxsplit=2)[-1]
+ labels = eval(
+ u"dict" + params[cut:].replace('{', '(').replace('}', ')')
+ )
+ labels[u"graph_node"] = labels.pop(u"name")
+ runtime[u"data"].append(
+ {
+ u"name": name,
+ u"value": value,
+ u"timestamp": timestamp,
+ u"labels": labels
+ }
+ )
+ except (TypeError, ValueError, IndexError):
+ continue
+ self._data[u'tests'][self._test_id][u'telemetry-show-run']\
+ [f"dut{self._telemetry_msg_counter}"] = copy.copy(
+ {
+ u"host": host,
+ u"socket": sock,
+ u"runtime": runtime
+ }
+ )
def _get_ndrpdr_throughput(self, msg):
"""Get NDR_LOWER, NDR_UPPER, PDR_LOWER and PDR_UPPER from the test
return throughput, status
+ def _get_ndrpdr_throughput_gbps(self, msg):
+ """Get NDR_LOWER, NDR_UPPER, PDR_LOWER and PDR_UPPER in Gbps from the
+ test message.
+
+ :param msg: The test message to be parsed.
+ :type msg: str
+ :returns: Parsed data as a dict and the status (PASS/FAIL).
+ :rtype: tuple(dict, str)
+ """
+
+ gbps = {
+ u"NDR": {u"LOWER": -1.0, u"UPPER": -1.0},
+ u"PDR": {u"LOWER": -1.0, u"UPPER": -1.0}
+ }
+ status = u"FAIL"
+ groups = re.search(self.REGEX_NDRPDR_GBPS, msg)
+
+ if groups is not None:
+ try:
+ gbps[u"NDR"][u"LOWER"] = float(groups.group(1))
+ gbps[u"NDR"][u"UPPER"] = float(groups.group(2))
+ gbps[u"PDR"][u"LOWER"] = float(groups.group(3))
+ gbps[u"PDR"][u"UPPER"] = float(groups.group(4))
+ status = u"PASS"
+ except (IndexError, ValueError):
+ pass
+
+ return gbps, status
+
def _get_plr_throughput(self, msg):
"""Get PLRsearch lower bound and PLRsearch upper bound from the test
message.
},
}
- # TODO: Rewrite when long and base are not needed
- groups = re.search(self.REGEX_NDRPDR_LAT_LONG, msg)
- if groups is None:
- groups = re.search(self.REGEX_NDRPDR_LAT, msg)
+ groups = re.search(self.REGEX_NDRPDR_LAT, msg)
if groups is None:
groups = re.search(self.REGEX_NDRPDR_LAT_BASE, msg)
if groups is None:
except (IndexError, ValueError):
pass
- # TODO: Remove when not needed
- latency[u"NDR10"] = {
- u"direction1": copy.copy(latency_default),
- u"direction2": copy.copy(latency_default)
- }
- latency[u"NDR50"] = {
- u"direction1": copy.copy(latency_default),
- u"direction2": copy.copy(latency_default)
- }
- latency[u"NDR90"] = {
- u"direction1": copy.copy(latency_default),
- u"direction2": copy.copy(latency_default)
- }
- try:
- latency[u"LAT0"][u"direction1"] = process_latency(groups.group(5))
- latency[u"LAT0"][u"direction2"] = process_latency(groups.group(6))
- latency[u"NDR10"][u"direction1"] = process_latency(groups.group(7))
- latency[u"NDR10"][u"direction2"] = process_latency(groups.group(8))
- latency[u"NDR50"][u"direction1"] = process_latency(groups.group(9))
- latency[u"NDR50"][u"direction2"] = process_latency(groups.group(10))
- latency[u"NDR90"][u"direction1"] = process_latency(groups.group(11))
- latency[u"NDR90"][u"direction2"] = process_latency(groups.group(12))
- latency[u"PDR10"][u"direction1"] = process_latency(groups.group(13))
- latency[u"PDR10"][u"direction2"] = process_latency(groups.group(14))
- latency[u"PDR50"][u"direction1"] = process_latency(groups.group(15))
- latency[u"PDR50"][u"direction2"] = process_latency(groups.group(16))
- latency[u"PDR90"][u"direction1"] = process_latency(groups.group(17))
- latency[u"PDR90"][u"direction2"] = process_latency(groups.group(18))
- return latency, u"PASS"
- except (IndexError, ValueError):
- pass
-
return latency, u"FAIL"
@staticmethod
return result, status
+ def _get_vsap_data(self, msg, tags):
+ """Get data from the vsap test message.
+
+ :param msg: The test message to be parsed.
+ :param tags: Test tags.
+ :type msg: str
+ :type tags: list
+ :returns: Parsed data as a JSON dict and the status (PASS/FAIL).
+ :rtype: tuple(dict, str)
+ """
+ result = dict()
+ status = u"FAIL"
+
+ groups = re.search(self.REGEX_VSAP_MSG_INFO, msg)
+ if groups is not None:
+ try:
+ result[u"transfer-rate"] = float(groups.group(1)) * 1e3
+ result[u"latency"] = float(groups.group(2))
+ result[u"completed-requests"] = int(groups.group(3))
+ result[u"failed-requests"] = int(groups.group(4))
+ result[u"bytes-transferred"] = int(groups.group(5))
+ if u"TCP_CPS"in tags:
+ result[u"cps"] = float(groups.group(6))
+ elif u"TCP_RPS" in tags:
+ result[u"rps"] = float(groups.group(6))
+ else:
+ return result, status
+ status = u"PASS"
+ except (IndexError, ValueError):
+ pass
+
+ return result, status
+
def visit_suite(self, suite):
"""Implements traversing through the suite and its direct children.
except AttributeError:
return
- doc_str = suite.doc.\
- replace(u'"', u"'").\
- replace(u'\n', u' ').\
- replace(u'\r', u'').\
- replace(u'*[', u' |br| *[').\
- replace(u"*", u"**").\
- replace(u' |br| *[', u'*[', 1)
-
self._data[u"suites"][suite.longname.lower().
replace(u'"', u"'").
replace(u" ", u"_")] = {
u"name": suite.name.lower(),
- u"doc": doc_str,
+ u"doc": suite.doc,
u"parent": parent_name,
u"level": len(suite.longname.split(u"."))
}
"""
self._sh_run_counter = 0
+ self._telemetry_kw_counter = 0
+ self._telemetry_msg_counter = 0
longname_orig = test.longname.lower()
test_result[u"parent"] = test.parent.name.lower()
test_result[u"tags"] = tags
- test_result["doc"] = test.doc.\
- replace(u'"', u"'").\
- replace(u'\n', u' ').\
- replace(u'\r', u'').\
- replace(u'[', u' |br| [').\
- replace(u' |br| [', u'[', 1)
- test_result[u"type"] = u"FUNC"
+ test_result["doc"] = test.doc
+ test_result[u"type"] = u""
test_result[u"status"] = test.status
+ test_result[u"starttime"] = test.starttime
+ test_result[u"endtime"] = test.endtime
if test.status == u"PASS":
if u"NDRPDR" in tags:
- test_result[u"msg"] = self._get_data_from_perf_test_msg(
- test.message).replace(u'\n', u' |br| ').\
- replace(u'\r', u'').replace(u'"', u"'")
+ if u"TCP_PPS" in tags or u"UDP_PPS" in tags:
+ test_result[u"msg"] = self._get_data_from_pps_test_msg(
+ test.message)
+ elif u"TCP_CPS" in tags or u"UDP_CPS" in tags:
+ test_result[u"msg"] = self._get_data_from_cps_test_msg(
+ test.message)
+ else:
+ test_result[u"msg"] = self._get_data_from_perf_test_msg(
+ test.message)
elif u"MRR" in tags or u"FRMOBL" in tags or u"BMRR" in tags:
test_result[u"msg"] = self._get_data_from_mrr_test_msg(
- test.message).replace(u'\n', u' |br| ').\
- replace(u'\r', u'').replace(u'"', u"'")
+ test.message)
else:
- test_result[u"msg"] = test.message.replace(u'\n', u' |br| ').\
- replace(u'\r', u'').replace(u'"', u"'")
+ test_result[u"msg"] = test.message
else:
- test_result[u"msg"] = u"Test Failed."
+ test_result[u"msg"] = test.message
- if u"PERFTEST" in tags:
+ if u"PERFTEST" in tags and u"TREX" not in tags:
# Replace info about cores (e.g. -1c-) with the info about threads
# and cores (e.g. -1t1c-) in the long test case names and in the
# test case names if necessary.
- groups = re.search(self.REGEX_TC_NAME_OLD, self._test_id)
- if not groups:
- tag_count = 0
- tag_tc = str()
- for tag in test_result[u"tags"]:
- groups = re.search(self.REGEX_TC_TAG, tag)
- if groups:
- tag_count += 1
- tag_tc = tag
-
- if tag_count == 1:
- self._test_id = re.sub(
- self.REGEX_TC_NAME_NEW, f"-{tag_tc.lower()}-",
- self._test_id, count=1
- )
- test_result[u"name"] = re.sub(
- self.REGEX_TC_NAME_NEW, f"-{tag_tc.lower()}-",
- test_result["name"], count=1
- )
- else:
- test_result[u"status"] = u"FAIL"
- self._data[u"tests"][self._test_id] = test_result
- logging.debug(
- f"The test {self._test_id} has no or more than one "
- f"multi-threading tags.\n"
- f"Tags: {test_result[u'tags']}"
- )
- return
+ tag_count = 0
+ tag_tc = str()
+ for tag in test_result[u"tags"]:
+ groups = re.search(self.REGEX_TC_TAG, tag)
+ if groups:
+ tag_count += 1
+ tag_tc = tag
+
+ if tag_count == 1:
+ self._test_id = re.sub(
+ self.REGEX_TC_NAME_NEW, f"-{tag_tc.lower()}-",
+ self._test_id, count=1
+ )
+ test_result[u"name"] = re.sub(
+ self.REGEX_TC_NAME_NEW, f"-{tag_tc.lower()}-",
+ test_result["name"], count=1
+ )
+ else:
+ test_result[u"status"] = u"FAIL"
+ self._data[u"tests"][self._test_id] = test_result
+ logging.debug(
+ f"The test {self._test_id} has no or more than one "
+ f"multi-threading tags.\n"
+ f"Tags: {test_result[u'tags']}"
+ )
+ return
- if test.status == u"PASS":
- if u"NDRPDR" in tags:
+ if u"DEVICETEST" in tags:
+ test_result[u"type"] = u"DEVICETEST"
+ elif u"NDRPDR" in tags:
+ if u"TCP_CPS" in tags or u"UDP_CPS" in tags:
+ test_result[u"type"] = u"CPS"
+ else:
test_result[u"type"] = u"NDRPDR"
+ if test.status == u"PASS":
test_result[u"throughput"], test_result[u"status"] = \
self._get_ndrpdr_throughput(test.message)
+ test_result[u"gbps"], test_result[u"status"] = \
+ self._get_ndrpdr_throughput_gbps(test.message)
test_result[u"latency"], test_result[u"status"] = \
self._get_ndrpdr_latency(test.message)
- elif u"SOAK" in tags:
- test_result[u"type"] = u"SOAK"
- test_result[u"throughput"], test_result[u"status"] = \
- self._get_plr_throughput(test.message)
- elif u"HOSTSTACK" in tags:
- test_result[u"type"] = u"HOSTSTACK"
- test_result[u"result"], test_result[u"status"] = \
- self._get_hoststack_data(test.message, tags)
- elif u"TCP" in tags:
- test_result[u"type"] = u"TCP"
- groups = re.search(self.REGEX_TCP, test.message)
- test_result[u"result"] = int(groups.group(2))
- elif u"MRR" in tags or u"FRMOBL" in tags or u"BMRR" in tags:
- if u"MRR" in tags:
- test_result[u"type"] = u"MRR"
- else:
- test_result[u"type"] = u"BMRR"
-
+ elif u"MRR" in tags or u"FRMOBL" in tags or u"BMRR" in tags:
+ if u"MRR" in tags:
+ test_result[u"type"] = u"MRR"
+ else:
+ test_result[u"type"] = u"BMRR"
+ if test.status == u"PASS":
test_result[u"result"] = dict()
groups = re.search(self.REGEX_BMRR, test.message)
if groups is not None:
items_str = groups.group(1)
- items_float = [float(item.strip()) for item
- in items_str.split(",")]
+ items_float = [
+ float(item.strip().replace(u"'", u""))
+ for item in items_str.split(",")
+ ]
# Use whole list in CSIT-1180.
stats = jumpavg.AvgStdevStats.for_runs(items_float)
+ test_result[u"result"][u"samples"] = items_float
test_result[u"result"][u"receive-rate"] = stats.avg
test_result[u"result"][u"receive-stdev"] = stats.stdev
else:
groups = re.search(self.REGEX_MRR, test.message)
test_result[u"result"][u"receive-rate"] = \
float(groups.group(3)) / float(groups.group(1))
- elif u"RECONF" in tags:
- test_result[u"type"] = u"RECONF"
+ elif u"SOAK" in tags:
+ test_result[u"type"] = u"SOAK"
+ if test.status == u"PASS":
+ test_result[u"throughput"], test_result[u"status"] = \
+ self._get_plr_throughput(test.message)
+ elif u"LDP_NGINX" in tags:
+ test_result[u"type"] = u"LDP_NGINX"
+ test_result[u"result"], test_result[u"status"] = \
+ self._get_vsap_data(test.message, tags)
+ elif u"HOSTSTACK" in tags:
+ test_result[u"type"] = u"HOSTSTACK"
+ if test.status == u"PASS":
+ test_result[u"result"], test_result[u"status"] = \
+ self._get_hoststack_data(test.message, tags)
+ # elif u"TCP" in tags: # This might be not used
+ # test_result[u"type"] = u"TCP"
+ # if test.status == u"PASS":
+ # groups = re.search(self.REGEX_TCP, test.message)
+ # test_result[u"result"] = int(groups.group(2))
+ elif u"RECONF" in tags:
+ test_result[u"type"] = u"RECONF"
+ if test.status == u"PASS":
test_result[u"result"] = None
try:
grps_loss = re.search(self.REGEX_RECONF_LOSS, test.message)
}
except (AttributeError, IndexError, ValueError, TypeError):
test_result[u"status"] = u"FAIL"
- elif u"DEVICETEST" in tags:
- test_result[u"type"] = u"DEVICETEST"
- else:
- test_result[u"status"] = u"FAIL"
- self._data[u"tests"][self._test_id] = test_result
- return
+ else:
+ test_result[u"status"] = u"FAIL"
self._data[u"tests"][self._test_id] = test_result
:type test_kw: Keyword
:returns: Nothing.
"""
- if test_kw.name.count(u"Show Runtime On All Duts") or \
- test_kw.name.count(u"Show Runtime Counters On All Duts"):
+ if not self._process_oper:
+ return
+
+ if test_kw.name.count(u"Run Telemetry On All Duts"):
+ self._msg_type = u"test-telemetry"
+ self._telemetry_kw_counter += 1
+ elif test_kw.name.count(u"Show Runtime On All Duts"):
self._msg_type = u"test-show-runtime"
self._sh_run_counter += 1
- elif test_kw.name.count(u"Install Dpdk Test On All Duts") and \
- not self._version:
- self._msg_type = u"dpdk-version"
else:
return
test_kw.messages.visit(self)
if setup_kw.name.count(u"Show Vpp Version On All Duts") \
and not self._version:
self._msg_type = u"vpp-version"
- elif setup_kw.name.count(u"Set Global Variable") \
- and not self._timestamp:
- self._msg_type = u"timestamp"
+ elif setup_kw.name.count(u"Install Dpdk Framework On All Duts") and \
+ not self._version:
+ self._msg_type = u"dpdk-version"
elif setup_kw.name.count(u"Setup Framework") and not self._testbed:
self._msg_type = u"testbed"
else:
:type teardown_kw: Keyword
:returns: Nothing.
"""
-
- if teardown_kw.name.count(u"Show Vat History On All Duts"):
- # TODO: Remove when not needed:
- self._conf_history_lookup_nr = 0
- self._msg_type = u"teardown-vat-history"
- teardown_kw.messages.visit(self)
- elif teardown_kw.name.count(u"Show Papi History On All Duts"):
+ if teardown_kw.name.count(u"Show Papi History On All Duts"):
self._conf_history_lookup_nr = 0
self._msg_type = u"teardown-papi-history"
teardown_kw.messages.visit(self)
(as described in ExecutionChecker documentation)
"""
- def __init__(self, spec):
+ def __init__(self, spec, for_output):
"""Initialization.
:param spec: Specification.
+ :param for_output: Output to be generated from downloaded data.
:type spec: Specification
+ :type for_output: str
"""
# Specification:
self._cfg = spec
+ self._for_output = for_output
+
# Data store:
self._input_data = pd.Series()
f"Error occurred while parsing output.xml: {repr(err)}"
)
return None
- checker = ExecutionChecker(metadata, self._cfg.mapping,
- self._cfg.ignore)
+
+ process_oper = False
+ if u"-vpp-perf-report-coverage-" in job:
+ process_oper = True
+ # elif u"-vpp-perf-report-iterative-" in job:
+ # # Exceptions for TBs where we do not have coverage data:
+ # for item in (u"-2n-icx", ):
+ # if item in job:
+ # process_oper = True
+ # break
+ checker = ExecutionChecker(
+ metadata, self._cfg.mapping, self._cfg.ignore, process_oper
+ )
result.visit(checker)
+ checker.data[u"metadata"][u"tests_total"] = \
+ result.statistics.total.all.total
+ checker.data[u"metadata"][u"tests_passed"] = \
+ result.statistics.total.all.passed
+ checker.data[u"metadata"][u"tests_failed"] = \
+ result.statistics.total.all.failed
+ checker.data[u"metadata"][u"elapsedtime"] = result.suite.elapsedtime
+ checker.data[u"metadata"][u"generated"] = result.suite.endtime[:14]
+
return checker.data
def _download_and_parse_build(self, job, build, repeat, pid=10000):
:type repeat: int
"""
- logging.info(f" Processing the job/build: {job}: {build[u'build']}")
+ logging.info(f"Processing the job/build: {job}: {build[u'build']}")
state = u"failed"
success = False
do_repeat -= 1
if not success:
logging.error(
- f"It is not possible to download the input data file from the "
- f"job {job}, build {build[u'build']}, or it is damaged. "
- f"Skipped."
+ f"It is not possible to download the input data file from the "
+ f"job {job}, build {build[u'build']}, or it is damaged. "
+ f"Skipped."
)
if success:
- logging.info(f" Processing data from build {build[u'build']}")
+ logging.info(f" Processing data from build {build[u'build']}")
data = self._parse_tests(job, build)
if data is None:
logging.error(
- f"Input data file from the job {job}, build "
- f"{build[u'build']} is damaged. Skipped."
+ f"Input data file from the job {job}, build "
+ f"{build[u'build']} is damaged. Skipped."
)
else:
state = u"processed"
# If the time-period is defined in the specification file, remove all
# files which are outside the time period.
is_last = False
- timeperiod = self._cfg.input.get(u"time-period", None)
+ timeperiod = self._cfg.environment.get(u"time-period", None)
if timeperiod and data:
now = dt.utcnow()
timeperiod = timedelta(int(timeperiod))
data = None
is_last = True
logging.info(
- f" The build {job}/{build[u'build']} is "
+ f" The build {job}/{build[u'build']} is "
f"outdated, will be removed."
)
- logging.info(u" Done.")
-
return {
u"data": data,
u"state": state,
logging.info(u"Downloading and parsing input files ...")
- for job, builds in self._cfg.builds.items():
+ for job, builds in self._cfg.input.items():
for build in builds:
result = self._download_and_parse_build(job, build, repeat)
if self._input_data.get(job, None) is None:
self._input_data[job] = pd.Series()
self._input_data[job][str(build_nr)] = build_data
-
self._cfg.set_input_file_name(
- job, build_nr, result[u"build"][u"file-name"])
-
+ job, build_nr, result[u"build"][u"file-name"]
+ )
self._cfg.set_input_state(job, build_nr, result[u"state"])
mem_alloc = \
logging.info(u"Done.")
+ msg = f"Successful downloads from the sources:\n"
+ for source in self._cfg.environment[u"data-sources"]:
+ if source[u"successful-downloads"]:
+ msg += (
+ f"{source[u'url']}/{source[u'path']}/"
+ f"{source[u'file-name']}: "
+ f"{source[u'successful-downloads']}\n"
+ )
+ logging.info(msg)
+
def process_local_file(self, local_file, job=u"local", build_nr=1,
replace=True):
"""Process local XML file given as a command-line parameter.
u"file-name": local_file
}
if replace:
- self._cfg.builds = dict()
+ self._cfg.input = dict()
self._cfg.add_build(job, build)
logging.info(f"Processing {job}: {build_nr:2d}: {local_file}")
- data = self._parse_tests(job, build, list())
+ data = self._parse_tests(job, build)
if data is None:
raise PresentationError(
f"Error occurred while parsing the file {local_file}"
)
if replace:
- self._cfg.builds = dict()
+ self._cfg.input = dict()
for job, files in local_builds.items():
for idx, local_file in enumerate(files):
if params is None:
params = element.get(u"parameters", None)
if params:
- params.append(u"type")
+ params.extend((u"type", u"status"))
data_to_filter = data if data else element[u"data"]
data = pd.Series()
if params is None:
params = element.get(u"parameters", None)
- if params:
+ if params and u"type" not in params:
params.append(u"type")
+ cores = element.get(u"core", None)
+ if cores:
+ tests = list()
+ for core in cores:
+ for test in include:
+ tests.append(test.format(core=core))
+ else:
+ tests = include
+
data = pd.Series()
try:
for job, builds in element[u"data"].items():
data[job] = pd.Series()
for build in builds:
data[job][str(build)] = pd.Series()
- for test in include:
+ for test in tests:
try:
reg_ex = re.compile(str(test).lower())
for test_id in self.data[job][
"""Print all operational data to console.
"""
- tbl_hdr = (
- u"Name",
- u"Nr of Vectors",
- u"Nr of Packets",
- u"Suspends",
- u"Cycles per Packet",
- u"Average Vector Size"
- )
-
for job in self._input_data.values:
for build in job.values:
for test_id, test_data in build[u"tests"].items():
if test_data.get(u"show-run", None) is None:
continue
for dut_name, data in test_data[u"show-run"].items():
- if data.get(u"threads", None) is None:
+ if data.get(u"runtime", None) is None:
continue
+ runtime = loads(data[u"runtime"])
+ try:
+ threads_nr = len(runtime[0][u"clocks"])
+ except (IndexError, KeyError):
+ continue
+ threads = OrderedDict(
+ {idx: list() for idx in range(threads_nr)})
+ for item in runtime:
+ for idx in range(threads_nr):
+ if item[u"vectors"][idx] > 0:
+ clocks = item[u"clocks"][idx] / \
+ item[u"vectors"][idx]
+ elif item[u"calls"][idx] > 0:
+ clocks = item[u"clocks"][idx] / \
+ item[u"calls"][idx]
+ elif item[u"suspends"][idx] > 0:
+ clocks = item[u"clocks"][idx] / \
+ item[u"suspends"][idx]
+ else:
+ clocks = 0.0
+
+ if item[u"calls"][idx] > 0:
+ vectors_call = item[u"vectors"][idx] / \
+ item[u"calls"][idx]
+ else:
+ vectors_call = 0.0
+
+ if int(item[u"calls"][idx]) + int(
+ item[u"vectors"][idx]) + \
+ int(item[u"suspends"][idx]):
+ threads[idx].append([
+ item[u"name"],
+ item[u"calls"][idx],
+ item[u"vectors"][idx],
+ item[u"suspends"][idx],
+ clocks,
+ vectors_call
+ ])
+
print(f"Host IP: {data.get(u'host', '')}, "
f"Socket: {data.get(u'socket', '')}")
- for thread_nr, thread in data[u"threads"].items():
- txt_table = prettytable.PrettyTable(tbl_hdr)
+ for thread_nr, thread in threads.items():
+ txt_table = prettytable.PrettyTable(
+ (
+ u"Name",
+ u"Nr of Vectors",
+ u"Nr of Packets",
+ u"Suspends",
+ u"Cycles per Packet",
+ u"Average Vector Size"
+ )
+ )
avg = 0.0
for row in thread:
txt_table.add_row(row)