-# Copyright (c) 2020 Cisco and/or its affiliates.
+# Copyright (c) 2021 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
r'Latency at 50% PDR:.*\[\'(.*)\', \'(.*)\'\].*\n'
r'Latency at 10% PDR:.*\[\'(.*)\', \'(.*)\'\].*\n'
)
+ REGEX_CPS_MSG_INFO = re.compile(
+ r'NDR_LOWER:\s(\d+.\d+)\s.*\s.*\n.*\n.*\n'
+ r'PDR_LOWER:\s(\d+.\d+)\s.*\s.*\n.*\n.*'
+ )
+ REGEX_PPS_MSG_INFO = re.compile(
+ r'NDR_LOWER:\s(\d+.\d+)\s.*\s(\d+.\d+)\s.*\n.*\n.*\n'
+ r'PDR_LOWER:\s(\d+.\d+)\s.*\s(\d+.\d+)\s.*\n.*\n.*'
+ )
REGEX_MRR_MSG_INFO = re.compile(r'.*\[(.*)\]')
- # TODO: Remove when not needed
+ # Needed for CPS and PPS tests
REGEX_NDRPDR_LAT_BASE = re.compile(
r'LATENCY.*\[\'(.*)\', \'(.*)\'\]\s\n.*\n.*\n'
r'LATENCY.*\[\'(.*)\', \'(.*)\'\]'
r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n'
r'Latency.*\[\'(.*)\', \'(.*)\'\]'
)
- # TODO: Remove when not needed
- REGEX_NDRPDR_LAT_LONG = re.compile(
- r'LATENCY.*\[\'(.*)\', \'(.*)\'\]\s\n.*\n.*\n'
- r'LATENCY.*\[\'(.*)\', \'(.*)\'\]\s\n.*\n'
- r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n'
- r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n'
- r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n'
- r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n'
- r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n'
- r'Latency.*\[\'(.*)\', \'(.*)\'\]\s\n'
- r'Latency.*\[\'(.*)\', \'(.*)\'\]'
- )
+
REGEX_VERSION_VPP = re.compile(
r"(return STDOUT Version:\s*|"
r"VPP Version:\s*|VPP version:\s*)(.*)"
r'tx\s(\d*),\srx\s(\d*)'
)
REGEX_BMRR = re.compile(
- r'Maximum Receive Rate trial results'
- r' in packets per second: \[(.*)\]'
+ r'.*trial results.*: \[(.*)\]'
)
REGEX_RECONF_LOSS = re.compile(
r'Packets lost due to reconfig: (\d*)'
except (AttributeError, IndexError, ValueError, KeyError):
return u"Test Failed."
+ def _get_data_from_cps_test_msg(self, msg):
+ """Get info from message of NDRPDR CPS tests.
+
+ :param msg: Message to be processed.
+ :type msg: str
+ :returns: Processed message or "Test Failed." if a problem occurs.
+ :rtype: str
+ """
+
+ groups = re.search(self.REGEX_CPS_MSG_INFO, msg)
+ if not groups or groups.lastindex != 2:
+ return u"Test Failed."
+
+ try:
+ return (
+ f"1. {(float(groups.group(1)) / 1e6):5.2f}\n"
+ f"2. {(float(groups.group(2)) / 1e6):5.2f}"
+ )
+ except (AttributeError, IndexError, ValueError, KeyError):
+ return u"Test Failed."
+
+ def _get_data_from_pps_test_msg(self, msg):
+ """Get info from message of NDRPDR PPS tests.
+
+ :param msg: Message to be processed.
+ :type msg: str
+ :returns: Processed message or "Test Failed." if a problem occurs.
+ :rtype: str
+ """
+
+ groups = re.search(self.REGEX_PPS_MSG_INFO, msg)
+ if not groups or groups.lastindex != 4:
+ return u"Test Failed."
+
+ try:
+ return (
+ f"1. {(float(groups.group(1)) / 1e6):5.2f} "
+ f"{float(groups.group(2)):5.2f}\n"
+ f"2. {(float(groups.group(3)) / 1e6):5.2f} "
+ f"{float(groups.group(4)):5.2f}"
+ )
+ except (AttributeError, IndexError, ValueError, KeyError):
+ return u"Test Failed."
+
def _get_data_from_perf_test_msg(self, msg):
"""Get info from message of NDRPDR performance tests.
:param msg: Message to be processed.
:type msg: str
- :returns: Processed message or original message if a problem occurs.
+ :returns: Processed message or "Test Failed." if a problem occurs.
:rtype: str
"""
if msg.message.count(u"return STDOUT Version:") or \
msg.message.count(u"VPP Version:") or \
msg.message.count(u"VPP version:"):
- self._version = str(re.search(self.REGEX_VERSION_VPP, msg.message).
- group(2))
+ self._version = str(
+ re.search(self.REGEX_VERSION_VPP, msg.message).group(2)
+ )
self._data[u"metadata"][u"version"] = self._version
self._msg_type = None
self._data[u"tests"][self._test_id][u"conf-history"] = str()
else:
self._msg_type = None
- text = re.sub(r"\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3} "
- r"VAT command history:", u"",
- msg.message, count=1).replace(u'\n', u' |br| ').\
- replace(u'"', u"'")
+ text = re.sub(
+ r"\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3} VAT command history:",
+ u"",
+ msg.message,
+ count=1
+ ).replace(u'\n', u' |br| ').replace(u'"', u"'")
self._data[u"tests"][self._test_id][u"conf-history"] += (
f" |br| **DUT{str(self._conf_history_lookup_nr)}:** {text}"
self._data[u"tests"][self._test_id][u"conf-history"] = str()
else:
self._msg_type = None
- text = re.sub(r"\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3} "
- r"PAPI command history:", u"",
- msg.message, count=1).replace(u'\n', u' |br| ').\
- replace(u'"', u"'")
+ text = re.sub(
+ r"\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3} PAPI command history:",
+ u"",
+ msg.message,
+ count=1
+ ).replace(u'\n', u' |br| ').replace(u'"', u"'")
self._data[u"tests"][self._test_id][u"conf-history"] += (
f" |br| **DUT{str(self._conf_history_lookup_nr)}:** {text}"
)
},
}
- # TODO: Rewrite when long and base are not needed
- groups = re.search(self.REGEX_NDRPDR_LAT_LONG, msg)
- if groups is None:
- groups = re.search(self.REGEX_NDRPDR_LAT, msg)
+ groups = re.search(self.REGEX_NDRPDR_LAT, msg)
if groups is None:
groups = re.search(self.REGEX_NDRPDR_LAT_BASE, msg)
if groups is None:
name = test.name.lower()
# Remove TC number from the TC long name (backward compatibility):
- self._test_id = re.sub(self.REGEX_TC_NUMBER, u"", longname)
+ self._test_id = re.sub(
+ self.REGEX_TC_NUMBER, u"", longname.replace(u"snat", u"nat")
+ )
# Remove TC number from the TC name (not needed):
- test_result[u"name"] = re.sub(self.REGEX_TC_NUMBER, "", name)
+ test_result[u"name"] = re.sub(
+ self.REGEX_TC_NUMBER, "", name.replace(u"snat", u"nat")
+ )
- test_result[u"parent"] = test.parent.name.lower()
+ test_result[u"parent"] = test.parent.name.lower().\
+ replace(u"snat", u"nat")
test_result[u"tags"] = tags
test_result["doc"] = test.doc.\
replace(u'"', u"'").\
if test.status == u"PASS":
if u"NDRPDR" in tags:
- test_result[u"msg"] = self._get_data_from_perf_test_msg(
- test.message).replace(u'\n', u' |br| ').\
- replace(u'\r', u'').replace(u'"', u"'")
+ if u"TCP_PPS" in tags or u"UDP_PPS" in tags:
+ test_result[u"msg"] = self._get_data_from_pps_test_msg(
+ test.message).replace(u'\n', u' |br| '). \
+ replace(u'\r', u'').replace(u'"', u"'")
+ elif u"TCP_CPS" in tags or u"UDP_CPS" in tags:
+ test_result[u"msg"] = self._get_data_from_cps_test_msg(
+ test.message).replace(u'\n', u' |br| '). \
+ replace(u'\r', u'').replace(u'"', u"'")
+ else:
+ test_result[u"msg"] = self._get_data_from_perf_test_msg(
+ test.message).replace(u'\n', u' |br| ').\
+ replace(u'\r', u'').replace(u'"', u"'")
elif u"MRR" in tags or u"FRMOBL" in tags or u"BMRR" in tags:
test_result[u"msg"] = self._get_data_from_mrr_test_msg(
test.message).replace(u'\n', u' |br| ').\
return
if test.status == u"PASS":
- if u"NDRPDR" in tags:
- test_result[u"type"] = u"NDRPDR"
+ if u"DEVICETEST" in tags:
+ test_result[u"type"] = u"DEVICETEST"
+ elif u"NDRPDR" in tags:
+ if u"TCP_CPS" in tags or u"UDP_CPS" in tags:
+ test_result[u"type"] = u"CPS"
+ else:
+ test_result[u"type"] = u"NDRPDR"
test_result[u"throughput"], test_result[u"status"] = \
self._get_ndrpdr_throughput(test.message)
test_result[u"gbps"], test_result[u"status"] = \
self._get_ndrpdr_throughput_gbps(test.message)
test_result[u"latency"], test_result[u"status"] = \
self._get_ndrpdr_latency(test.message)
- elif u"SOAK" in tags:
- test_result[u"type"] = u"SOAK"
- test_result[u"throughput"], test_result[u"status"] = \
- self._get_plr_throughput(test.message)
- elif u"HOSTSTACK" in tags:
- test_result[u"type"] = u"HOSTSTACK"
- test_result[u"result"], test_result[u"status"] = \
- self._get_hoststack_data(test.message, tags)
- elif u"TCP" in tags:
- test_result[u"type"] = u"TCP"
- groups = re.search(self.REGEX_TCP, test.message)
- test_result[u"result"] = int(groups.group(2))
elif u"MRR" in tags or u"FRMOBL" in tags or u"BMRR" in tags:
if u"MRR" in tags:
test_result[u"type"] = u"MRR"
if groups is not None:
items_str = groups.group(1)
items_float = [
- float(item.strip()) for item in items_str.split(",")
+ float(item.strip().replace(u"'", u""))
+ for item in items_str.split(",")
]
# Use whole list in CSIT-1180.
stats = jumpavg.AvgStdevStats.for_runs(items_float)
+ test_result[u"result"][u"samples"] = items_float
test_result[u"result"][u"receive-rate"] = stats.avg
test_result[u"result"][u"receive-stdev"] = stats.stdev
else:
groups = re.search(self.REGEX_MRR, test.message)
test_result[u"result"][u"receive-rate"] = \
float(groups.group(3)) / float(groups.group(1))
+ elif u"SOAK" in tags:
+ test_result[u"type"] = u"SOAK"
+ test_result[u"throughput"], test_result[u"status"] = \
+ self._get_plr_throughput(test.message)
+ elif u"HOSTSTACK" in tags:
+ test_result[u"type"] = u"HOSTSTACK"
+ test_result[u"result"], test_result[u"status"] = \
+ self._get_hoststack_data(test.message, tags)
+ elif u"TCP" in tags:
+ test_result[u"type"] = u"TCP"
+ groups = re.search(self.REGEX_TCP, test.message)
+ test_result[u"result"] = int(groups.group(2))
elif u"RECONF" in tags:
test_result[u"type"] = u"RECONF"
test_result[u"result"] = None
}
except (AttributeError, IndexError, ValueError, TypeError):
test_result[u"status"] = u"FAIL"
- elif u"DEVICETEST" in tags:
- test_result[u"type"] = u"DEVICETEST"
else:
test_result[u"status"] = u"FAIL"
self._data[u"tests"][self._test_id] = test_result
:returns: Nothing.
"""
if test_kw.name.count(u"Show Runtime On All Duts") or \
- test_kw.name.count(u"Show Runtime Counters On All Duts"):
+ test_kw.name.count(u"Show Runtime Counters On All Duts") or \
+ test_kw.name.count(u"Vpp Show Runtime On All Duts"):
self._msg_type = u"test-show-runtime"
self._sh_run_counter += 1
else:
f"Error occurred while parsing output.xml: {repr(err)}"
)
return None
- checker = ExecutionChecker(metadata, self._cfg.mapping,
- self._cfg.ignore)
+ checker = ExecutionChecker(
+ metadata, self._cfg.mapping, self._cfg.ignore
+ )
result.visit(checker)
return checker.data
:type repeat: int
"""
- logging.info(f" Processing the job/build: {job}: {build[u'build']}")
+ logging.info(f"Processing the job/build: {job}: {build[u'build']}")
state = u"failed"
success = False
f"Skipped."
)
if success:
- logging.info(f" Processing data from build {build[u'build']}")
+ logging.info(f" Processing data from build {build[u'build']}")
data = self._parse_tests(job, build)
if data is None:
logging.error(
# If the time-period is defined in the specification file, remove all
# files which are outside the time period.
is_last = False
- timeperiod = self._cfg.input.get(u"time-period", None)
+ timeperiod = self._cfg.environment.get(u"time-period", None)
if timeperiod and data:
now = dt.utcnow()
timeperiod = timedelta(int(timeperiod))
data = None
is_last = True
logging.info(
- f" The build {job}/{build[u'build']} is "
+ f" The build {job}/{build[u'build']} is "
f"outdated, will be removed."
)
- logging.info(u" Done.")
-
return {
u"data": data,
u"state": state,
logging.info(u"Downloading and parsing input files ...")
- for job, builds in self._cfg.builds.items():
+ for job, builds in self._cfg.input.items():
for build in builds:
result = self._download_and_parse_build(job, build, repeat)
if self._input_data.get(job, None) is None:
self._input_data[job] = pd.Series()
self._input_data[job][str(build_nr)] = build_data
-
self._cfg.set_input_file_name(
- job, build_nr, result[u"build"][u"file-name"])
-
+ job, build_nr, result[u"build"][u"file-name"]
+ )
self._cfg.set_input_state(job, build_nr, result[u"state"])
mem_alloc = \
logging.info(u"Done.")
+ msg = f"Successful downloads from the sources:\n"
+ for source in self._cfg.environment[u"data-sources"]:
+ if source[u"successful-downloads"]:
+ msg += (
+ f"{source[u'url']}/{source[u'path']}/"
+ f"{source[u'file-name']}: "
+ f"{source[u'successful-downloads']}\n"
+ )
+ logging.info(msg)
+
def process_local_file(self, local_file, job=u"local", build_nr=1,
replace=True):
"""Process local XML file given as a command-line parameter.
u"file-name": local_file
}
if replace:
- self._cfg.builds = dict()
+ self._cfg.input = dict()
self._cfg.add_build(job, build)
logging.info(f"Processing {job}: {build_nr:2d}: {local_file}")
)
if replace:
- self._cfg.builds = dict()
+ self._cfg.input = dict()
for job, files in local_builds.items():
for idx, local_file in enumerate(files):
if params is None:
params = element.get(u"parameters", None)
- if params:
+ if params and u"type" not in params:
params.append(u"type")
+ cores = element.get(u"core", None)
+ if cores:
+ tests = list()
+ for core in cores:
+ for test in include:
+ tests.append(test.format(core=core))
+ else:
+ tests = include
+
data = pd.Series()
try:
for job, builds in element[u"data"].items():
data[job] = pd.Series()
for build in builds:
data[job][str(build)] = pd.Series()
- for test in include:
+ for test in tests:
try:
reg_ex = re.compile(str(test).lower())
for test_id in self.data[job][