X-Git-Url: https://gerrit.fd.io/r/gitweb?p=csit.git;a=blobdiff_plain;f=resources%2Ftools%2Fpresentation%2Finput_data_parser.py;h=7f1ccd368bb1b43ec79e899da82f726abbe9f4d4;hp=1e5e880340622a495e42b3f869f0cae1c9d77475;hb=b7fccd32e5139b98976a7b4f44ef7eed9248f1b5;hpb=95bdd3ad2976e923c71fbfa2c5f45dcf9cc71935 diff --git a/resources/tools/presentation/input_data_parser.py b/resources/tools/presentation/input_data_parser.py index 1e5e880340..7f1ccd368b 100644 --- a/resources/tools/presentation/input_data_parser.py +++ b/resources/tools/presentation/input_data_parser.py @@ -1043,7 +1043,7 @@ class ExecutionChecker(ResultVisitor): u"level": len(suite.longname.split(u".")) } - suite.keywords.visit(self) + suite.setup.visit(self) def end_suite(self, suite): """Called when suite ends. @@ -1061,7 +1061,7 @@ class ExecutionChecker(ResultVisitor): :returns: Nothing. """ if self.start_test(test) is not False: - test.keywords.visit(self) + test.body.visit(self) self.end_test(test) def start_test(self, test): @@ -1202,20 +1202,15 @@ class ExecutionChecker(ResultVisitor): if test.status == u"PASS": test_result[u"throughput"], test_result[u"status"] = \ self._get_plr_throughput(test.message) + elif u"LDP_NGINX" in tags: + test_result[u"type"] = u"LDP_NGINX" + test_result[u"result"], test_result[u"status"] = \ + self._get_vsap_data(test.message, tags) elif u"HOSTSTACK" in tags: test_result[u"type"] = u"HOSTSTACK" if test.status == u"PASS": test_result[u"result"], test_result[u"status"] = \ self._get_hoststack_data(test.message, tags) - elif u"LDP_NGINX" in tags: - test_result[u"type"] = u"LDP_NGINX" - test_result[u"result"], test_result[u"status"] = \ - self._get_vsap_data(test.message, tags) - # elif u"TCP" in tags: # This might be not used - # test_result[u"type"] = u"TCP" - # if test.status == u"PASS": - # groups = re.search(self.REGEX_TCP, test.message) - # test_result[u"result"] = int(groups.group(2)) elif u"RECONF" in tags: test_result[u"type"] = u"RECONF" if test.status == u"PASS": @@ -1285,7 +1280,7 @@ class ExecutionChecker(ResultVisitor): :type test_kw: Keyword :returns: Nothing. """ - for keyword in test_kw.keywords: + for keyword in test_kw.body: if self.start_test_kw(keyword) is not False: self.visit_test_kw(keyword) self.end_test_kw(keyword) @@ -1327,7 +1322,7 @@ class ExecutionChecker(ResultVisitor): :type setup_kw: Keyword :returns: Nothing. """ - for keyword in setup_kw.keywords: + for keyword in setup_kw.body: if self.start_setup_kw(keyword) is not False: self.visit_setup_kw(keyword) self.end_setup_kw(keyword) @@ -1368,7 +1363,7 @@ class ExecutionChecker(ResultVisitor): :type teardown_kw: Keyword :returns: Nothing. """ - for keyword in teardown_kw.keywords: + for keyword in teardown_kw.body: if self.start_teardown_kw(keyword) is not False: self.visit_teardown_kw(keyword) self.end_teardown_kw(keyword) @@ -1455,7 +1450,7 @@ class InputData: self._for_output = for_output # Data store: - self._input_data = pd.Series() + self._input_data = pd.Series(dtype="object") @property def data(self): @@ -1531,12 +1526,12 @@ class InputData: process_oper = False if u"-vpp-perf-report-coverage-" in job: process_oper = True - elif u"-vpp-perf-report-iterative-" in job: - # Exceptions for TBs where we do not have coverage data: - for item in (u"-2n-icx", ): - if item in job: - process_oper = True - break + # elif u"-vpp-perf-report-iterative-" in job: + # # Exceptions for TBs where we do not have coverage data: + # for item in (u"-2n-icx", ): + # if item in job: + # process_oper = True + # break checker = ExecutionChecker( metadata, self._cfg.mapping, self._cfg.ignore, process_oper ) @@ -1670,7 +1665,7 @@ class InputData: }) if self._input_data.get(job, None) is None: - self._input_data[job] = pd.Series() + self._input_data[job] = pd.Series(dtype="object") self._input_data[job][str(build_nr)] = build_data self._cfg.set_input_file_name( job, build_nr, result[u"build"][u"file-name"] @@ -1749,7 +1744,7 @@ class InputData: }) if self._input_data.get(job, None) is None: - self._input_data[job] = pd.Series() + self._input_data[job] = pd.Series(dtype="object") self._input_data[job][str(build_nr)] = build_data self._cfg.set_input_state(job, build_nr, u"processed") @@ -1906,12 +1901,12 @@ class InputData: params.extend((u"type", u"status")) data_to_filter = data if data else element[u"data"] - data = pd.Series() + data = pd.Series(dtype="object") try: for job, builds in data_to_filter.items(): - data[job] = pd.Series() + data[job] = pd.Series(dtype="object") for build in builds: - data[job][str(build)] = pd.Series() + data[job][str(build)] = pd.Series(dtype="object") try: data_dict = dict( self.data[job][str(build)][data_set].items()) @@ -1922,7 +1917,8 @@ class InputData: for test_id, test_data in data_dict.items(): if eval(cond, {u"tags": test_data.get(u"tags", u"")}): - data[job][str(build)][test_id] = pd.Series() + data[job][str(build)][test_id] = \ + pd.Series(dtype="object") if params is None: for param, val in test_data.items(): data[job][str(build)][test_id][param] = val @@ -2006,12 +2002,12 @@ class InputData: else: tests = include - data = pd.Series() + data = pd.Series(dtype="object") try: for job, builds in element[u"data"].items(): - data[job] = pd.Series() + data[job] = pd.Series(dtype="object") for build in builds: - data[job][str(build)] = pd.Series() + data[job][str(build)] = pd.Series(dtype="object") for test in tests: try: reg_ex = re.compile(str(test).lower()) @@ -2020,7 +2016,8 @@ class InputData: if re.match(reg_ex, str(test_id).lower()): test_data = self.data[job][ str(build)][data_set][test_id] - data[job][str(build)][test_id] = pd.Series() + data[job][str(build)][test_id] = \ + pd.Series(dtype="object") if params is None: for param, val in test_data.items(): data[job][str(build)][test_id]\ @@ -2075,7 +2072,7 @@ class InputData: logging.info(u" Merging data ...") - merged_data = pd.Series() + merged_data = pd.Series(dtype="object") for builds in data.values: for item in builds.values: for item_id, item_data in item.items():