1 # Copyright (c) 2021 Cisco and/or its affiliates.
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at:
6 # http://www.apache.org/licenses/LICENSE-2.0
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
14 """Convert output_info.xml files into JSON structures.
19 The json structure is defined in https://gerrit.fd.io/r/c/csit/+/28992
28 from os.path import join
29 from shutil import rmtree
30 from copy import deepcopy
32 from pal_utils import get_files
36 """A Class storing and manipulating data from tests.
39 def __init__(self, template=None):
42 :param template: JSON formatted template used to store data. It can
43 include default values.
47 self._template = deepcopy(template)
48 self._data = self._template if self._template else dict()
51 """Return a string with human readable data.
53 :returns: Readable description.
56 return str(self._data)
59 """Return a string executable as Python constructor call.
61 :returns: Executable constructor call.
64 return f"JSONData(template={self._template!r})"
70 :return: Data stored in the object.
75 def add_element(self, value, path_to_value):
76 """Add an element to the json structure.
78 :param value: Element value.
79 :param path_to_value: List of tuples where the first item is the element
80 on the path and the second one is its type.
81 :type value: dict, list, str, int, float, bool
82 :type path_to_value: list
83 :raises: IndexError if the path is empty.
84 :raises: TypeError if the val is of not supported type.
87 def _add_element(val, path, structure):
88 """Add an element to the given path.
90 :param val: Element value.
91 :param path: List of tuples where the first item is the element
92 on the path and the second one is its type.
93 :param structure: The structure where the element is added.
94 :type val: dict, list, str, int, float, bool
97 :raises TypeError if there is a wrong type in the path.
100 if isinstance(structure, dict):
101 if path[0][1] is dict:
102 if path[0][0] not in structure:
103 structure[path[0][0]] = dict()
104 structure[path[0][0]].update(val)
105 elif path[0][1] is list:
106 if path[0][0] not in structure:
107 structure[path[0][0]] = list()
108 if isinstance(val, list):
109 structure[path[0][0]].extend(val)
111 structure[path[0][0]].append(val)
113 structure[path[0][0]] = val
114 elif isinstance(structure, list):
115 if path[0][0] == -1 or path[0][0] >= len(structure):
116 if isinstance(val, list):
117 structure.extend(val)
119 structure.append(val)
121 structure[path[0][0]] = val
124 if isinstance(structure, dict):
125 if path[0][1] is dict:
126 if path[0][0] not in structure:
127 structure[path[0][0]] = dict()
128 elif path[0][1] is list:
129 if path[0][0] not in structure:
130 structure[path[0][0]] = list()
131 elif isinstance(structure, list):
132 if path[0][0] == -1 or path[0][0] >= len(structure):
133 if path[0][1] is list:
134 structure.append(list())
135 elif path[0][1] is dict:
136 structure.append(dict())
139 path[0][0] = len(structure) - 1
142 u"Only the last item in the path can be different type "
143 u"then list or dictionary."
145 _add_element(val, path[1:], structure[path[0][0]])
147 if not isinstance(value, (dict, list, str, int, float, bool)):
149 u"The value must be one of these types: dict, list, str, int, "
152 f"Path: {path_to_value}"
154 _add_element(deepcopy(value), path_to_value, self._data)
156 def get_element(self, path):
157 """Get the element specified by the path.
159 :param path: List of keys and indices to the requested element or
162 :returns: Element specified by the path.
165 raise NotImplementedError
167 def dump(self, file_out, indent=None):
168 """Write JSON data to a file.
170 :param file_out: Path to the output JSON file.
171 :param indent: Indentation of items in JSON string. It is directly
172 passed to json.dump method.
177 with open(file_out, u"w") as file_handler:
178 json.dump(self._data, file_handler, indent=indent)
179 except OSError as err:
180 logging.warning(f"{repr(err)} Skipping")
182 def load(self, file_in):
183 """Load JSON data from a file.
185 :param file_in: Path to the input JSON file.
187 :raises: ValueError if the data being deserialized is not a valid
189 :raises: IOError if the file is not found or corrupted.
191 with open(file_in, u"r") as file_handler:
192 self._data = json.load(file_handler)
195 def _export_test_from_xml_to_json(tid, in_data, out, template, metadata):
196 """Export data from a test to a json structure.
199 :param in_data: Test data.
200 :param out: Path to output json file.
201 :param template: JSON template with optional default values.
202 :param metadata: Data which are not stored in XML structure.
210 p_metadata = [(u"metadata", dict), ]
211 p_test = [(u"test", dict), ]
212 p_log = [(u"log", list), (-1, list)]
214 data = JSONData(template=template)
216 data.add_element({u"suite-id": metadata.pop(u"suite-id", u"")}, p_metadata)
218 {u"suite-doc": metadata.pop(u"suite-doc", u"")}, p_metadata
220 data.add_element({u"testbed": metadata.pop(u"testbed", u"")}, p_metadata)
222 {u"sut-version": metadata.pop(u"sut-version", u"")}, p_metadata
225 data.add_element({u"test-id": tid}, p_test)
226 t_type = in_data.get(u"type", u"")
227 t_type = u"NDRPDR" if t_type == u"CPS" else t_type # It is NDRPDR
228 data.add_element({u"test-type": t_type}, p_test)
229 tags = in_data.get(u"tags", list())
230 data.add_element({u"tags": tags}, p_test)
232 {u"documentation": in_data.get(u"documentation", u"")}, p_test
234 data.add_element({u"message": in_data.get(u"msg", u"")}, p_test)
236 u"start_time": in_data.get(u"starttime", u""),
237 u"end_time": in_data.get(u"endtime", u""),
238 u"status": in_data.get(u"status", u"FAILED"),
240 execution.update(metadata)
241 data.add_element({u"execution": execution}, p_test)
249 u"log-level": u"INFO",
250 u"timestamp": in_data.get(u"starttime", u""), # replacement
255 # Process configuration history:
256 in_papi = deepcopy(in_data.get(u"conf-history", None))
258 regex_dut = re.compile(r'\*\*DUT(\d):\*\*')
260 for line in in_papi.split(u"\n"):
263 groups = re.search(regex_dut, line)
265 node_id = f"dut{groups.group(1)}"
267 log_item[u"source"][u"id"] = node_id
268 log_item[u"msg-type"] = u"papi"
269 log_item[u"msg"] = line
270 data.add_element(log_item, p_log)
272 # Process show runtime:
273 in_sh_run = deepcopy(in_data.get(u"show-run", None))
275 # Transform to openMetrics format
276 for key, val in in_sh_run.items():
277 log_item[u"source"][u"id"] = key
278 log_item[u"msg-type"] = u"metric"
279 log_item[u"msg"] = u"show-runtime"
280 log_item[u"data"] = list()
281 for item in val.get(u"runtime", list()):
282 for metric, m_data in item.items():
283 if metric == u"name":
285 for idx, m_item in enumerate(m_data):
286 log_item[u"data"].append(
291 u"host": val.get(u"host", u""),
292 u"socket": val.get(u"socket", u""),
293 u"graph-node": item.get(u"name", u""),
294 u"thread-id": str(idx)
298 data.add_element(log_item, p_log)
302 if t_type == u"DEVICETEST":
303 pass # Nothing to add.
304 elif t_type == u"NDRPDR":
308 u"cps" if u"TCP_CPS" in tags or u"UDP_CPS" in tags
312 u"lower": in_data.get(u"throughput", dict()).
313 get(u"NDR", dict()).get(u"LOWER", u"NaN"),
314 u"upper": in_data.get(u"throughput", dict()).
315 get(u"NDR", dict()).get(u"UPPER", u"NaN")
318 u"lower": in_data.get(u"gbps", dict()).
319 get(u"NDR", dict()).get(u"LOWER", u"NaN"),
320 u"upper": in_data.get(u"gbps", dict()).
321 get(u"NDR", dict()).get(u"UPPER", u"NaN")
326 u"lower": in_data.get(u"throughput", dict()).
327 get(u"PDR", dict()).get(u"LOWER", u"NaN"),
328 u"upper": in_data.get(u"throughput", dict()).
329 get(u"PDR", dict()).get(u"UPPER", u"NaN")
332 u"lower": in_data.get(u"gbps", dict()).
333 get(u"PDR", dict()).get(u"LOWER", u"NaN"),
334 u"upper": in_data.get(u"gbps", dict()).
335 get(u"PDR", dict()).get(u"UPPER", u"NaN")
341 u"pdr-90": in_data.get(u"latency", dict()).
342 get(u"PDR90", dict()).get(u"direction1", u"NaN"),
343 u"pdr-50": in_data.get(u"latency", dict()).
344 get(u"PDR50", dict()).get(u"direction1", u"NaN"),
345 u"pdr-10": in_data.get(u"latency", dict()).
346 get(u"PDR10", dict()).get(u"direction1", u"NaN"),
347 u"pdr-0": in_data.get(u"latency", dict()).
348 get(u"LAT0", dict()).get(u"direction1", u"NaN")
351 u"pdr-90": in_data.get(u"latency", dict()).
352 get(u"PDR90", dict()).get(u"direction2", u"NaN"),
353 u"pdr-50": in_data.get(u"latency", dict()).
354 get(u"PDR50", dict()).get(u"direction2", u"NaN"),
355 u"pdr-10": in_data.get(u"latency", dict()).
356 get(u"PDR10", dict()).get(u"direction2", u"NaN"),
357 u"pdr-0": in_data.get(u"latency", dict()).
358 get(u"LAT0", dict()).get(u"direction2", u"NaN")
362 elif t_type == "MRR":
364 u"unit": u"pps", # Old data use only pps
365 u"samples": in_data.get(u"result", dict()).get(u"samples", list()),
366 u"avg": in_data.get(u"result", dict()).get(u"receive-rate", u"NaN"),
367 u"stdev": in_data.get(u"result", dict()).
368 get(u"receive-stdev", u"NaN")
370 elif t_type == "SOAK":
373 u"lower": in_data.get(u"throughput", dict()).
374 get(u"LOWER", u"NaN"),
375 u"upper": in_data.get(u"throughput", dict()).
376 get(u"UPPER", u"NaN"),
379 elif t_type == "HOSTSTACK":
380 results = in_data.get(u"result", dict())
381 # elif t_type == "TCP": # Not used ???
382 # results = in_data.get(u"result", u"NaN")
383 elif t_type == "RECONF":
385 u"loss": in_data.get(u"result", dict()).get(u"loss", u"NaN"),
386 u"time": in_data.get(u"result", dict()).get(u"time", u"NaN")
390 data.add_element({u"results": results}, p_test)
392 data.dump(out, indent=u" ")
395 def convert_xml_to_json(spec, data):
396 """Convert downloaded XML files into JSON.
399 - create one json file for each test,
400 - gzip all json files one by one,
403 :param spec: Specification read from the specification files.
404 :param data: Input data parsed from output.xml files.
405 :type spec: Specification
406 :type data: InputData
409 logging.info(u"Converting downloaded XML files to JSON ...")
411 template_name = spec.output.get(u"use-template", None)
412 structure = spec.output.get(u"structure", u"tree")
414 with open(template_name, u"r") as file_handler:
415 template = json.load(file_handler)
419 build_dir = spec.environment[u"paths"][u"DIR[BUILD,JSON]"]
422 except FileNotFoundError:
423 pass # It does not exist
427 for job, builds in data.data.items():
428 logging.info(f" Processing job {job}")
429 if structure == "tree":
430 os.makedirs(join(build_dir, job), exist_ok=True)
431 for build_nr, build in builds.items():
432 logging.info(f" Processing build {build_nr}")
433 if structure == "tree":
434 os.makedirs(join(build_dir, job, build_nr), exist_ok=True)
435 for test_id, test_data in build[u"tests"].items():
436 groups = re.search(re.compile(r'-(\d+[tT](\d+[cC]))-'), test_id)
438 test_id = test_id.replace(groups.group(1), groups.group(2))
439 logging.info(f" Processing test {test_id}")
440 if structure == "tree":
441 dirs = test_id.split(u".")[:-1]
442 name = test_id.split(u".")[-1]
444 join(build_dir, job, build_nr, *dirs), exist_ok=True
447 f"{join(build_dir, job, build_nr, *dirs, name)}.json"
451 u'.'.join((job, build_nr, test_id, u'json'))
453 suite_id = test_id.rsplit(u".", 1)[0].replace(u" ", u"_")
454 _export_test_from_xml_to_json(
455 test_id, test_data, file_name, template,
457 u"ci": u"jenkins.fd.io",
460 u"suite-id": suite_id,
461 u"suite-doc": build[u"suites"].get(suite_id, dict()).
463 u"testbed": build[u"metadata"].get(u"testbed", u""),
464 u"sut-version": build[u"metadata"].get(u"version", u"")
468 # gzip the json files:
469 for file in get_files(build_dir, u"json"):
470 with open(file, u"rb") as src:
471 with gzip.open(f"{file}.gz", u"wb") as dst:
475 logging.info(u"Done.")