Code Review
/
csit.git
/ blobdiff
commit
grep
author
committer
pickaxe
?
search:
re
summary
|
shortlog
|
log
|
commit
|
commitdiff
|
review
|
tree
raw
|
inline
| side by side
PAL: fix typo
[csit.git]
/
resources
/
tools
/
presentation
/
input_data_parser.py
diff --git
a/resources/tools/presentation/input_data_parser.py
b/resources/tools/presentation/input_data_parser.py
index
9408142
..
67aec2f
100644
(file)
--- a/
resources/tools/presentation/input_data_parser.py
+++ b/
resources/tools/presentation/input_data_parser.py
@@
-263,8
+263,7
@@
class ExecutionChecker(ResultVisitor):
)
REGEX_VERSION_VPP = re.compile(
)
REGEX_VERSION_VPP = re.compile(
- r"(return STDOUT Version:\s*|"
- r"VPP Version:\s*|VPP version:\s*)(.*)"
+ r"(VPP Version:\s*|VPP version:\s*)(.*)"
)
REGEX_VERSION_DPDK = re.compile(
r"(DPDK version:\s*|DPDK Version:\s*)(.*)"
)
REGEX_VERSION_DPDK = re.compile(
r"(DPDK version:\s*|DPDK Version:\s*)(.*)"
@@
-592,14
+591,14
@@
class ExecutionChecker(ResultVisitor):
:returns: Nothing.
"""
:returns: Nothing.
"""
- if msg.message.count(u"return STDOUT Version:") or \
- msg.message.count(u"VPP Version:") or \
- msg.message.count(u"VPP version:"):
+ if msg.message.count(u"VPP version:") or \
+ msg.message.count(u"VPP Version:"):
self._version = str(
re.search(self.REGEX_VERSION_VPP, msg.message).group(2)
)
self._data[u"metadata"][u"version"] = self._version
self._msg_type = None
self._version = str(
re.search(self.REGEX_VERSION_VPP, msg.message).group(2)
)
self._data[u"metadata"][u"version"] = self._version
self._msg_type = None
+ logging.info(self._version)
def _get_dpdk_version(self, msg):
"""Called when extraction of DPDK version is required.
def _get_dpdk_version(self, msg):
"""Called when extraction of DPDK version is required.
@@
-1043,7
+1042,7
@@
class ExecutionChecker(ResultVisitor):
u"level": len(suite.longname.split(u"."))
}
u"level": len(suite.longname.split(u"."))
}
- suite.
body
.visit(self)
+ suite.
setup
.visit(self)
def end_suite(self, suite):
"""Called when suite ends.
def end_suite(self, suite):
"""Called when suite ends.
@@
-1211,11
+1210,6
@@
class ExecutionChecker(ResultVisitor):
if test.status == u"PASS":
test_result[u"result"], test_result[u"status"] = \
self._get_hoststack_data(test.message, tags)
if test.status == u"PASS":
test_result[u"result"], test_result[u"status"] = \
self._get_hoststack_data(test.message, tags)
- # elif u"TCP" in tags: # This might be not used
- # test_result[u"type"] = u"TCP"
- # if test.status == u"PASS":
- # groups = re.search(self.REGEX_TCP, test.message)
- # test_result[u"result"] = int(groups.group(2))
elif u"RECONF" in tags:
test_result[u"type"] = u"RECONF"
if test.status == u"PASS":
elif u"RECONF" in tags:
test_result[u"type"] = u"RECONF"
if test.status == u"PASS":
@@
-1327,7
+1321,7
@@
class ExecutionChecker(ResultVisitor):
:type setup_kw: Keyword
:returns: Nothing.
"""
:type setup_kw: Keyword
:returns: Nothing.
"""
- for keyword in setup_kw.
body
:
+ for keyword in setup_kw.
setup
:
if self.start_setup_kw(keyword) is not False:
self.visit_setup_kw(keyword)
self.end_setup_kw(keyword)
if self.start_setup_kw(keyword) is not False:
self.visit_setup_kw(keyword)
self.end_setup_kw(keyword)
@@
-1455,7
+1449,7
@@
class InputData:
self._for_output = for_output
# Data store:
self._for_output = for_output
# Data store:
- self._input_data = pd.Series()
+ self._input_data = pd.Series(
dtype="float64"
)
@property
def data(self):
@property
def data(self):
@@
-1543,11
+1537,11
@@
class InputData:
result.visit(checker)
checker.data[u"metadata"][u"tests_total"] = \
result.visit(checker)
checker.data[u"metadata"][u"tests_total"] = \
- result.statistics.total.
all.
total
+ result.statistics.total.total
checker.data[u"metadata"][u"tests_passed"] = \
checker.data[u"metadata"][u"tests_passed"] = \
- result.statistics.total.
all.
passed
+ result.statistics.total.passed
checker.data[u"metadata"][u"tests_failed"] = \
checker.data[u"metadata"][u"tests_failed"] = \
- result.statistics.total.
all.
failed
+ result.statistics.total.failed
checker.data[u"metadata"][u"elapsedtime"] = result.suite.elapsedtime
checker.data[u"metadata"][u"generated"] = result.suite.endtime[:14]
checker.data[u"metadata"][u"elapsedtime"] = result.suite.elapsedtime
checker.data[u"metadata"][u"generated"] = result.suite.endtime[:14]
@@
-1670,7
+1664,7
@@
class InputData:
})
if self._input_data.get(job, None) is None:
})
if self._input_data.get(job, None) is None:
- self._input_data[job] = pd.Series()
+ self._input_data[job] = pd.Series(
dtype="float64"
)
self._input_data[job][str(build_nr)] = build_data
self._cfg.set_input_file_name(
job, build_nr, result[u"build"][u"file-name"]
self._input_data[job][str(build_nr)] = build_data
self._cfg.set_input_file_name(
job, build_nr, result[u"build"][u"file-name"]
@@
-1749,7
+1743,7
@@
class InputData:
})
if self._input_data.get(job, None) is None:
})
if self._input_data.get(job, None) is None:
- self._input_data[job] = pd.Series()
+ self._input_data[job] = pd.Series(
dtype="float64"
)
self._input_data[job][str(build_nr)] = build_data
self._cfg.set_input_state(job, build_nr, u"processed")
self._input_data[job][str(build_nr)] = build_data
self._cfg.set_input_state(job, build_nr, u"processed")
@@
-1906,12
+1900,12
@@
class InputData:
params.extend((u"type", u"status"))
data_to_filter = data if data else element[u"data"]
params.extend((u"type", u"status"))
data_to_filter = data if data else element[u"data"]
- data = pd.Series()
+ data = pd.Series(
dtype="float64"
)
try:
for job, builds in data_to_filter.items():
try:
for job, builds in data_to_filter.items():
- data[job] = pd.Series()
+ data[job] = pd.Series(
dtype="float64"
)
for build in builds:
for build in builds:
- data[job][str(build)] = pd.Series()
+ data[job][str(build)] = pd.Series(
dtype="float64"
)
try:
data_dict = dict(
self.data[job][str(build)][data_set].items())
try:
data_dict = dict(
self.data[job][str(build)][data_set].items())
@@
-1922,7
+1916,8
@@
class InputData:
for test_id, test_data in data_dict.items():
if eval(cond, {u"tags": test_data.get(u"tags", u"")}):
for test_id, test_data in data_dict.items():
if eval(cond, {u"tags": test_data.get(u"tags", u"")}):
- data[job][str(build)][test_id] = pd.Series()
+ data[job][str(build)][test_id] = \
+ pd.Series(dtype="float64")
if params is None:
for param, val in test_data.items():
data[job][str(build)][test_id][param] = val
if params is None:
for param, val in test_data.items():
data[job][str(build)][test_id][param] = val
@@
-2006,12
+2001,12
@@
class InputData:
else:
tests = include
else:
tests = include
- data = pd.Series()
+ data = pd.Series(
dtype="float64"
)
try:
for job, builds in element[u"data"].items():
try:
for job, builds in element[u"data"].items():
- data[job] = pd.Series()
+ data[job] = pd.Series(
dtype="float64"
)
for build in builds:
for build in builds:
- data[job][str(build)] = pd.Series()
+ data[job][str(build)] = pd.Series(
dtype="float64"
)
for test in tests:
try:
reg_ex = re.compile(str(test).lower())
for test in tests:
try:
reg_ex = re.compile(str(test).lower())
@@
-2020,7
+2015,8
@@
class InputData:
if re.match(reg_ex, str(test_id).lower()):
test_data = self.data[job][
str(build)][data_set][test_id]
if re.match(reg_ex, str(test_id).lower()):
test_data = self.data[job][
str(build)][data_set][test_id]
- data[job][str(build)][test_id] = pd.Series()
+ data[job][str(build)][test_id] = \
+ pd.Series(dtype="float64")
if params is None:
for param, val in test_data.items():
data[job][str(build)][test_id]\
if params is None:
for param, val in test_data.items():
data[job][str(build)][test_id]\
@@
-2075,7
+2071,7
@@
class InputData:
logging.info(u" Merging data ...")
logging.info(u" Merging data ...")
- merged_data = pd.Series()
+ merged_data = pd.Series(
dtype="float64"
)
for builds in data.values:
for item in builds.values:
for item_id, item_data in item.items():
for builds in data.values:
for item in builds.values:
for item_id, item_data in item.items():