from datetime import timedelta
from json import loads
+import hdrh.histogram
+import hdrh.codec
import prettytable
import pandas as pd
.. note:: ID is the lowercase full path to the test.
"""
- REGEX_PLR_RATE = re.compile(r'PLRsearch lower bound::?\s(\d+.\d+).*\n'
- r'PLRsearch upper bound::?\s(\d+.\d+)')
-
- REGEX_NDRPDR_RATE = re.compile(r'NDR_LOWER:\s(\d+.\d+).*\n.*\n'
- r'NDR_UPPER:\s(\d+.\d+).*\n'
- r'PDR_LOWER:\s(\d+.\d+).*\n.*\n'
- r'PDR_UPPER:\s(\d+.\d+)')
-
- REGEX_NDRPDR_LAT = re.compile(r'LATENCY.*\[\'(.*)\', \'(.*)\'\]\s\n.*\n.*\n'
- r'LATENCY.*\[\'(.*)\', \'(.*)\'\]')
-
- REGEX_TOLERANCE = re.compile(r'^[\D\d]*LOSS_ACCEPTANCE:\s(\d*\.\d*)\s'
- r'[\D\d]*')
-
- REGEX_VERSION_VPP = re.compile(r"(return STDOUT Version:\s*|"
- r"VPP Version:\s*|VPP version:\s*)(.*)")
-
- REGEX_VERSION_DPDK = re.compile(r"(DPDK version:\s*|DPDK Version:\s*)(.*)")
-
- REGEX_TCP = re.compile(r'Total\s(rps|cps|throughput):\s(\d*).*$')
-
- REGEX_MRR = re.compile(r'MaxReceivedRate_Results\s\[pkts/(\d*)sec\]:\s'
- r'tx\s(\d*),\srx\s(\d*)')
-
- REGEX_BMRR = re.compile(r'Maximum Receive Rate trial results'
- r' in packets per second: \[(.*)\]')
-
- REGEX_RECONF_LOSS = re.compile(r'Packets lost due to reconfig: (\d*)')
- REGEX_RECONF_TIME = re.compile(r'Implied time lost: (\d*.[\de-]*)')
-
+ REGEX_PLR_RATE = re.compile(
+ r'PLRsearch lower bound::?\s(\d+.\d+).*\n'
+ r'PLRsearch upper bound::?\s(\d+.\d+)'
+ )
+ REGEX_NDRPDR_RATE = re.compile(
+ r'NDR_LOWER:\s(\d+.\d+).*\n.*\n'
+ r'NDR_UPPER:\s(\d+.\d+).*\n'
+ r'PDR_LOWER:\s(\d+.\d+).*\n.*\n'
+ r'PDR_UPPER:\s(\d+.\d+)'
+ )
+ REGEX_PERF_MSG_INFO = re.compile(
+ r'NDR_LOWER:\s(\d+.\d+)\s([a-zA-Z]*).*\s(\d+.\d+)\s([a-zA-Z]*).*\n'
+ r'LATENCY.*\[\'(.*)\', \'(.*)\'\].*\n'
+ r'NDR_UPPER:\s(\d+.\d+)\s([a-zA-Z]*).*\s(\d+.\d+)\s([a-zA-Z]*).*\n'
+ r'PDR_LOWER:\s(\d+.\d+)\s([a-zA-Z]*).*\s(\d+.\d+)\s([a-zA-Z]*).*\n'
+ r'LATENCY.*\[\'(.*)\', \'(.*)\'\].*\n'
+ r'PDR_UPPER:\s(\d+.\d+)\s([a-zA-Z]*).*\s(\d+.\d+)\s([a-zA-Z]*)'
+ )
+ # TODO: Remove when not needed
+ REGEX_NDRPDR_LAT_BASE = re.compile(
+ r'LATENCY.*\[\'(.*)\', \'(.*)\'\]\s\n.*\n.*\n'
+ r'LATENCY.*\[\'(.*)\', \'(.*)\'\]'
+ )
+ REGEX_NDRPDR_LAT = re.compile(
+ r'LATENCY.*\[\'(.*)\', \'(.*)\'\]\s\n.*\n.*\n'
+ r'LATENCY.*\[\'(.*)\', \'(.*)\'\]\s\n.*\n'
+ r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n'
+ r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n'
+ r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n'
+ r'Latency.*\[\'(.*)\', \'(.*)\'\]'
+ )
+ # TODO: Remove when not needed
+ REGEX_NDRPDR_LAT_LONG = re.compile(
+ r'LATENCY.*\[\'(.*)\', \'(.*)\'\]\s\n.*\n.*\n'
+ r'LATENCY.*\[\'(.*)\', \'(.*)\'\]\s\n.*\n'
+ r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n'
+ r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n'
+ r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n'
+ r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n'
+ r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n'
+ r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n'
+ r'Latency.*\[\'(.*)\', \'(.*)\'\]'
+ )
+ REGEX_VERSION_VPP = re.compile(
+ r"(return STDOUT Version:\s*|"
+ r"VPP Version:\s*|VPP version:\s*)(.*)"
+ )
+ REGEX_VERSION_DPDK = re.compile(
+ r"(DPDK version:\s*|DPDK Version:\s*)(.*)"
+ )
+ REGEX_TCP = re.compile(
+ r'Total\s(rps|cps|throughput):\s(\d*).*$'
+ )
+ REGEX_MRR = re.compile(
+ r'MaxReceivedRate_Results\s\[pkts/(\d*)sec\]:\s'
+ r'tx\s(\d*),\srx\s(\d*)'
+ )
+ REGEX_BMRR = re.compile(
+ r'Maximum Receive Rate trial results'
+ r' in packets per second: \[(.*)\]'
+ )
+ REGEX_RECONF_LOSS = re.compile(
+ r'Packets lost due to reconfig: (\d*)'
+ )
+ REGEX_RECONF_TIME = re.compile(
+ r'Implied time lost: (\d*.[\de-]*)'
+ )
REGEX_TC_TAG = re.compile(r'\d+[tT]\d+[cC]')
REGEX_TC_NAME_OLD = re.compile(r'-\d+[tT]\d+[cC]-')
# 0 - no message
# 1 - PAPI History of DUT1
# 2 - PAPI History of DUT2
- self._lookup_kw_nr = 0
self._conf_history_lookup_nr = 0
- # Number of Show Running messages found
- # 0 - no message
- # 1 - Show run message found
- self._show_run_lookup_nr = 0
-
# Test ID of currently processed test- the lowercase full path to the
# test
self._test_id = None
"""
return self._data
+ def _get_data_from_perf_test_msg(self, msg):
+ """Get
+ - NDR_LOWER
+ - LATENCY
+ - NDR_UPPER
+ - PDR_LOWER
+ - LATENCY
+ - PDR_UPPER
+ from message of NDRPDR performance tests.
+
+ :param msg: Message to be processed.
+ :type msg: str
+ :returns: Processed message or original message if a problem occurs.
+ :rtype: str
+ """
+
+ groups = re.search(self.REGEX_PERF_MSG_INFO, msg)
+ if not groups or groups.lastindex != 20:
+ return msg
+
+ try:
+ data = {
+ u"ndr_low": float(groups.group(1)),
+ u"ndr_low_unit": groups.group(2),
+ u"ndr_low_b": float(groups.group(3)),
+ u"ndr_low_b_unit": groups.group(4),
+ u"ndr_lat_1": groups.group(5),
+ u"ndr_lat_2": groups.group(6),
+ u"ndr_up": float(groups.group(7)),
+ u"ndr_up_unit": groups.group(8),
+ u"ndr_up_b": float(groups.group(9)),
+ u"ndr_up_b_unit": groups.group(10),
+ u"pdr_low": float(groups.group(11)),
+ u"pdr_low_unit": groups.group(12),
+ u"pdr_low_b": float(groups.group(13)),
+ u"pdr_low_b_unit": groups.group(14),
+ u"pdr_lat_1": groups.group(15),
+ u"pdr_lat_2": groups.group(16),
+ u"pdr_up": float(groups.group(17)),
+ u"pdr_up_unit": groups.group(18),
+ u"pdr_up_b": float(groups.group(19)),
+ u"pdr_up_b_unit": groups.group(20)
+ }
+ except (AttributeError, IndexError, ValueError, KeyError):
+ return msg
+
+ def _process_lat(in_str_1, in_str_2):
+ """Extract min, avg, max values from latency string.
+
+ :param in_str_1: Latency string for one direction produced by robot
+ framework.
+ :param in_str_2: Latency string for second direction produced by
+ robot framework.
+ :type in_str_1: str
+ :type in_str_2: str
+ :returns: Processed latency string or original string if a problem
+ occurs.
+ :rtype: tuple(str, str)
+ """
+ in_list_1 = in_str_1.split('/', 3)
+ if len(in_list_1) < 3:
+ return u"Not Measured.", u"Not Measured."
+
+ in_list_2 = in_str_2.split('/', 3)
+ if len(in_list_2) < 3:
+ return u"Not Measured.", u"Not Measured."
+
+ hdr_lat_1 = u""
+ if len(in_list_1) == 4:
+ in_list_1[3] += u"=" * (len(in_list_1[3]) % 4)
+ try:
+ hdr_lat_1 = hdrh.histogram.HdrHistogram.decode(in_list_1[3])
+ except hdrh.codec.HdrLengthException:
+ pass
+ hdr_lat_2 = u""
+ if len(in_list_2) == 4:
+ in_list_2[3] += u"=" * (len(in_list_2[3]) % 4)
+ try:
+ hdr_lat_2 = hdrh.histogram.HdrHistogram.decode(in_list_2[3])
+ except hdrh.codec.HdrLengthException:
+ pass
+
+ hdr_lat = u"Not Measured."
+ if hdr_lat_1 and hdr_lat_2:
+ hdr_lat = (
+ f"50%/90%/99%/99.9%, "
+ f"{hdr_lat_1.get_value_at_percentile(50.0)}/"
+ f"{hdr_lat_1.get_value_at_percentile(90.0)}/"
+ f"{hdr_lat_1.get_value_at_percentile(99.0)}/"
+ f"{hdr_lat_1.get_value_at_percentile(99.9)}, "
+ f"{hdr_lat_2.get_value_at_percentile(50.0)}/"
+ f"{hdr_lat_2.get_value_at_percentile(90.0)}/"
+ f"{hdr_lat_2.get_value_at_percentile(99.0)}/"
+ f"{hdr_lat_2.get_value_at_percentile(99.9)} "
+ f"uSec."
+ )
+
+ return (
+ f"Min/Avg/Max, "
+ f"{in_list_1[0]}/{in_list_1[1]}/{in_list_1[2]}, "
+ f"{in_list_2[0]}/{in_list_2[1]}/{in_list_2[2]} uSec.",
+ hdr_lat
+ )
+
+ try:
+ pdr_lat = _process_lat(data[u'pdr_lat_1'], data[u'pdr_lat_2'])
+ ndr_lat = _process_lat(data[u'ndr_lat_1'], data[u'ndr_lat_2'])
+ return (
+ f"NDR Throughput: {(data[u'ndr_low'] / 1e6):.2f} "
+ f"M{data[u'ndr_low_unit']}, "
+ f"{data[u'ndr_low_b']:.2f} {data[u'ndr_low_b_unit']}.\n"
+ f"One-Way Latency at NDR: {ndr_lat[0]}\n"
+ f"One-Way Latency at NDR by percentiles: {ndr_lat[1]}\n"
+ f"PDR Throughput: {(data[u'pdr_low'] / 1e6):.2f} "
+ f"M{data[u'pdr_low_unit']}, "
+ f"{data[u'pdr_low_b']:.2f} {data[u'pdr_low_b_unit']}.\n"
+ f"One-Way Latency at PDR: {pdr_lat[0]}\n"
+ f"One-Way Latency at PDR by percentiles: {pdr_lat[1]}"
+ )
+ except (AttributeError, IndexError, ValueError, KeyError):
+ return msg
+
def _get_testbed(self, msg):
"""Called when extraction of testbed IP is required.
The testbed is identified by TG node IP address.
:returns: Nothing.
"""
+ if not msg.message.count(u"stats runtime"):
+ return
+
if u"show-run" not in self._data[u"tests"][self._test_id].keys():
- self._data[u"tests"][self._test_id][u"show-run"] = str()
+ self._data[u"tests"][self._test_id][u"show-run"] = dict()
- if msg.message.count(u"stats runtime") or \
- msg.message.count(u"Runtime"):
- try:
- host = str(re.search(self.REGEX_TC_PAPI_CLI, msg.message).
- group(1))
- except (AttributeError, IndexError):
- host = self._data[u"tests"][self._test_id][u"show-run"].\
- count(u"DUT:") + 1
- try:
- socket = str(re.search(self.REGEX_TC_PAPI_CLI, msg.message).
- group(2))
- socket = f"/{socket}"
- except (AttributeError, IndexError):
- socket = u""
- runtime = loads(
- str(msg.message).
- replace(u' ', u'').
- replace(u'\n', u'').
- replace(u"'", u'"').
- replace(u'b"', u'"').
- replace(u'u"', u'"').
- split(u":", 1)[1]
- )
- try:
- threads_nr = len(runtime[0][u"clocks"])
- except (IndexError, KeyError):
- return
- tbl_hdr = [
- u"Name",
- u"Calls",
- u"Vectors",
- u"Suspends",
- u"Clocks",
- u"Vectors/Calls"
- ]
- table = [[tbl_hdr, ] for _ in range(threads_nr)]
- for item in runtime:
- for idx in range(threads_nr):
- name = format(item[u"name"])
- calls = format(item[u"calls"][idx])
- vectors = format(item[u"vectors"][idx])
- suspends = format(item[u"suspends"][idx])
- if item[u"vectors"][idx] > 0:
- clocks = format(
- item[u"clocks"][idx]/item[u"vectors"][idx], u".2e")
- elif item[u"calls"][idx] > 0:
- clocks = format(
- item[u"clocks"][idx]/item[u"calls"][idx], u".2e")
- elif item[u"suspends"][idx] > 0:
- clocks = format(
- item[u"clocks"][idx]/item[u"suspends"][idx], u".2e")
- else:
- clocks = 0
- if item[u"calls"][idx] > 0:
- vectors_call = format(
- item[u"vectors"][idx]/item[u"calls"][idx], u".2f")
- else:
- vectors_call = format(0, u".2f")
- if int(calls) + int(vectors) + int(suspends):
- table[idx].append([
- name, calls, vectors, suspends, clocks, vectors_call
- ])
- text = ""
+ groups = re.search(self.REGEX_TC_PAPI_CLI, msg.message)
+ if not groups:
+ return
+ try:
+ host = groups.group(1)
+ except (AttributeError, IndexError):
+ host = u""
+ try:
+ sock = groups.group(2)
+ except (AttributeError, IndexError):
+ sock = u""
+
+ runtime = loads(str(msg.message).replace(u' ', u'').replace(u'\n', u'').
+ replace(u"'", u'"').replace(u'b"', u'"').
+ replace(u'u"', u'"').split(u":", 1)[1])
+
+ try:
+ threads_nr = len(runtime[0][u"clocks"])
+ except (IndexError, KeyError):
+ return
+
+ dut = u"DUT{nr}".format(
+ nr=len(self._data[u'tests'][self._test_id][u'show-run'].keys()) + 1)
+
+ oper = {
+ u"host": host,
+ u"socket": sock,
+ u"threads": OrderedDict({idx: list() for idx in range(threads_nr)})
+ }
+
+ for item in runtime:
for idx in range(threads_nr):
- text += f"Thread {idx} "
- text += u"vpp_main\n" if idx == 0 else f"vpp_wk_{idx-1}\n"
- txt_table = None
- for row in table[idx]:
- if txt_table is None:
- txt_table = prettytable.PrettyTable(row)
- else:
- if any(row[1:]):
- txt_table.add_row(row)
- txt_table.set_style(prettytable.MSWORD_FRIENDLY)
- txt_table.align[u"Name"] = u"l"
- txt_table.align[u"Calls"] = u"r"
- txt_table.align[u"Vectors"] = u"r"
- txt_table.align[u"Suspends"] = u"r"
- txt_table.align[u"Clocks"] = u"r"
- txt_table.align[u"Vectors/Calls"] = u"r"
-
- text += txt_table.get_string(sortby=u"Name") + u'\n'
- text = f"\n**DUT: {host}{socket}**\n{text}".\
- replace(u'\n', u' |br| ').\
- replace(u'\r', u'').\
- replace(u'"', u"'")
- self._data[u"tests"][self._test_id][u"show-run"] += text
+ if item[u"vectors"][idx] > 0:
+ clocks = item[u"clocks"][idx] / item[u"vectors"][idx]
+ elif item[u"calls"][idx] > 0:
+ clocks = item[u"clocks"][idx] / item[u"calls"][idx]
+ elif item[u"suspends"][idx] > 0:
+ clocks = item[u"clocks"][idx] / item[u"suspends"][idx]
+ else:
+ clocks = 0.0
+
+ if item[u"calls"][idx] > 0:
+ vectors_call = item[u"vectors"][idx] / item[u"calls"][idx]
+ else:
+ vectors_call = 0.0
+
+ if int(item[u"calls"][idx]) + int(item[u"vectors"][idx]) + \
+ int(item[u"suspends"][idx]):
+ oper[u"threads"][idx].append([
+ item[u"name"],
+ item[u"calls"][idx],
+ item[u"vectors"][idx],
+ item[u"suspends"][idx],
+ clocks,
+ vectors_call
+ ])
+
+ self._data[u'tests'][self._test_id][u'show-run'][dut] = copy.copy(oper)
def _get_ndrpdr_throughput(self, msg):
"""Get NDR_LOWER, NDR_UPPER, PDR_LOWER and PDR_UPPER from the test
u"PDR": {
u"direction1": copy.copy(latency_default),
u"direction2": copy.copy(latency_default)
- }
+ },
+ u"LAT0": {
+ u"direction1": copy.copy(latency_default),
+ u"direction2": copy.copy(latency_default)
+ },
+ u"PDR10": {
+ u"direction1": copy.copy(latency_default),
+ u"direction2": copy.copy(latency_default)
+ },
+ u"PDR50": {
+ u"direction1": copy.copy(latency_default),
+ u"direction2": copy.copy(latency_default)
+ },
+ u"PDR90": {
+ u"direction1": copy.copy(latency_default),
+ u"direction2": copy.copy(latency_default)
+ },
}
- status = u"FAIL"
- groups = re.search(self.REGEX_NDRPDR_LAT, msg)
+
+ # TODO: Rewrite when long and base are not needed
+ groups = re.search(self.REGEX_NDRPDR_LAT_LONG, msg)
+ if groups is None:
+ groups = re.search(self.REGEX_NDRPDR_LAT, msg)
+ if groups is None:
+ groups = re.search(self.REGEX_NDRPDR_LAT_BASE, msg)
+ if groups is None:
+ return latency, u"FAIL"
def process_latency(in_str):
"""Return object with parsed latency values.
return rval
- if groups is not None:
- try:
- latency[u"NDR"][u"direction1"] = \
- process_latency(groups.group(1))
- latency[u"NDR"][u"direction2"] = \
- process_latency(groups.group(2))
- latency[u"PDR"][u"direction1"] = \
- process_latency(groups.group(3))
- latency[u"PDR"][u"direction2"] = \
- process_latency(groups.group(4))
- status = u"PASS"
- except (IndexError, ValueError):
- pass
+ try:
+ latency[u"NDR"][u"direction1"] = process_latency(groups.group(1))
+ latency[u"NDR"][u"direction2"] = process_latency(groups.group(2))
+ latency[u"PDR"][u"direction1"] = process_latency(groups.group(3))
+ latency[u"PDR"][u"direction2"] = process_latency(groups.group(4))
+ if groups.lastindex == 4:
+ return latency, u"PASS"
+ except (IndexError, ValueError):
+ pass
- return latency, status
+ try:
+ latency[u"PDR90"][u"direction1"] = process_latency(groups.group(5))
+ latency[u"PDR90"][u"direction2"] = process_latency(groups.group(6))
+ latency[u"PDR50"][u"direction1"] = process_latency(groups.group(7))
+ latency[u"PDR50"][u"direction2"] = process_latency(groups.group(8))
+ latency[u"PDR10"][u"direction1"] = process_latency(groups.group(9))
+ latency[u"PDR10"][u"direction2"] = process_latency(groups.group(10))
+ latency[u"LAT0"][u"direction1"] = process_latency(groups.group(11))
+ latency[u"LAT0"][u"direction2"] = process_latency(groups.group(12))
+ if groups.lastindex == 12:
+ return latency, u"PASS"
+ except (IndexError, ValueError):
+ pass
+
+ # TODO: Remove when not needed
+ latency[u"NDR10"] = {
+ u"direction1": copy.copy(latency_default),
+ u"direction2": copy.copy(latency_default)
+ }
+ latency[u"NDR50"] = {
+ u"direction1": copy.copy(latency_default),
+ u"direction2": copy.copy(latency_default)
+ }
+ latency[u"NDR90"] = {
+ u"direction1": copy.copy(latency_default),
+ u"direction2": copy.copy(latency_default)
+ }
+ try:
+ latency[u"LAT0"][u"direction1"] = process_latency(groups.group(5))
+ latency[u"LAT0"][u"direction2"] = process_latency(groups.group(6))
+ latency[u"NDR10"][u"direction1"] = process_latency(groups.group(7))
+ latency[u"NDR10"][u"direction2"] = process_latency(groups.group(8))
+ latency[u"NDR50"][u"direction1"] = process_latency(groups.group(9))
+ latency[u"NDR50"][u"direction2"] = process_latency(groups.group(10))
+ latency[u"NDR90"][u"direction1"] = process_latency(groups.group(11))
+ latency[u"NDR90"][u"direction2"] = process_latency(groups.group(12))
+ latency[u"PDR10"][u"direction1"] = process_latency(groups.group(13))
+ latency[u"PDR10"][u"direction2"] = process_latency(groups.group(14))
+ latency[u"PDR50"][u"direction1"] = process_latency(groups.group(15))
+ latency[u"PDR50"][u"direction2"] = process_latency(groups.group(16))
+ latency[u"PDR90"][u"direction1"] = process_latency(groups.group(17))
+ latency[u"PDR90"][u"direction2"] = process_latency(groups.group(18))
+ return latency, u"PASS"
+ except (IndexError, ValueError):
+ pass
+
+ return latency, u"FAIL"
def visit_suite(self, suite):
"""Implements traversing through the suite and its direct children.
replace(u'\r', u'').\
replace(u'[', u' |br| [').\
replace(u' |br| [', u'[', 1)
- test_result[u"msg"] = test.message.\
+ test_result[u"msg"] = self._get_data_from_perf_test_msg(test.message).\
replace(u'\n', u' |br| ').\
replace(u'\r', u'').\
replace(u'"', u"'")
if keyword.type == u"setup":
self.visit_setup_kw(keyword)
elif keyword.type == u"teardown":
- self._lookup_kw_nr = 0
self.visit_teardown_kw(keyword)
else:
- self._lookup_kw_nr = 0
self.visit_test_kw(keyword)
except AttributeError:
pass
:type test_kw: Keyword
:returns: Nothing.
"""
- if test_kw.name.count(u"Show Runtime On All Duts") or
+ if test_kw.name.count(u"Show Runtime On All Duts") or \
test_kw.name.count(u"Show Runtime Counters On All Duts"):
- self._lookup_kw_nr += 1
- self._show_run_lookup_nr = 0
self._msg_type = u"test-show-runtime"
elif test_kw.name.count(u"Install Dpdk Test") and not self._version:
self._msg_type = u"dpdk-version"
"""
try:
- if element[u"filter"] in (u"all", u"template"):
+ if data_set == "suites":
+ cond = u"True"
+ elif element[u"filter"] in (u"all", u"template"):
cond = u"True"
else:
cond = InputData._condition(element[u"filter"])
merged_data[item_id] = item_data
return merged_data
+
+ def print_all_oper_data(self):
+ """Print all operational data to console.
+ """
+
+ tbl_hdr = (
+ u"Name",
+ u"Nr of Vectors",
+ u"Nr of Packets",
+ u"Suspends",
+ u"Cycles per Packet",
+ u"Average Vector Size"
+ )
+
+ for job in self._input_data.values:
+ for build in job.values:
+ for test_id, test_data in build[u"tests"].items():
+ print(f"{test_id}")
+ if test_data.get(u"show-run", None) is None:
+ continue
+ for dut_name, data in test_data[u"show-run"].items():
+ if data.get(u"threads", None) is None:
+ continue
+ print(f"Host IP: {data.get(u'host', '')}, "
+ f"Socket: {data.get(u'socket', '')}")
+ for thread_nr, thread in data[u"threads"].items():
+ txt_table = prettytable.PrettyTable(tbl_hdr)
+ avg = 0.0
+ for row in thread:
+ txt_table.add_row(row)
+ avg += row[-1]
+ if len(thread) == 0:
+ avg = u""
+ else:
+ avg = f", Average Vector Size per Node: " \
+ f"{(avg / len(thread)):.2f}"
+ th_name = u"main" if thread_nr == 0 \
+ else f"worker_{thread_nr}"
+ print(f"{dut_name}, {th_name}{avg}")
+ txt_table.float_format = u".2"
+ txt_table.align = u"r"
+ txt_table.align[u"Name"] = u"l"
+ print(f"{txt_table.get_string()}\n")