1 # Copyright (c) 2020 Cisco and/or its affiliates.
2 # Licensed under the Apache License, Version 2.0 (the "License");
3 # you may not use this file except in compliance with the License.
4 # You may obtain a copy of the License at:
6 # http://www.apache.org/licenses/LICENSE-2.0
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS,
10 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11 # See the License for the specific language governing permissions and
12 # limitations under the License.
14 """Algorithms to generate files.
19 from os.path import join
20 from collections import OrderedDict
24 from pal_utils import get_files, get_rst_title_char
27 RST_INCLUDE_TABLE = (u"\n.. only:: html\n\n"
32 u" :file: {file_html}\n"
33 u"\n.. only:: latex\n\n"
34 u"\n .. raw:: latex\n\n"
35 u" \\csvautolongtable{{{file_latex}}}\n\n")
37 REGEX_NIC_SHORT = re.compile(r'(\d*ge\dp\d)(\D*\d*[a-z]*)-')
40 def generate_files(spec, data):
41 """Generate all files specified in the specification file.
43 :param spec: Specification read from the specification file.
44 :param data: Data to process.
45 :type spec: Specification
50 u"file_details_split": file_details_split,
51 u"file_details_split_html": file_details_split_html,
52 u"file_test_results": file_test_results,
53 u"file_test_results_html": file_test_results_html
56 logging.info(u"Generating the files ...")
57 for file_spec in spec.files:
59 generator[file_spec[u"algorithm"]](file_spec, data)
60 except (NameError, KeyError) as err:
62 f"Probably algorithm {file_spec[u'algorithm']} is not defined: "
65 logging.info(u"Done.")
68 def _tests_in_suite(suite_name, tests):
69 """Check if the suite includes tests.
71 :param suite_name: Name of the suite to be checked.
72 :param tests: Set of tests
74 :type tests: pandas.Series
75 :returns: True if the suite includes tests.
79 for key in tests.keys():
80 if suite_name == tests[key][u"parent"]:
85 def file_details_split(file_spec, input_data, frmt=u"rst"):
86 """Generate the file(s) with algorithms
88 specified in the specification file.
90 :param file_spec: File to generate.
91 :param input_data: Data to process.
92 :param frmt: Format can be: rst or html
93 :type file_spec: pandas.Series
94 :type input_data: InputData
98 fileset_file_name = f"{file_spec[u'output-file']}"
101 u".. |br| raw:: html\n\n <br />\n\n\n"
102 u".. |prein| raw:: html\n\n <pre>\n\n\n"
103 u".. |preout| raw:: html\n\n </pre>\n\n"
105 start_lvl = file_spec.get(u"data-start-level", 4)
107 logging.info(f" Generating the file set {fileset_file_name} ...")
109 data_sets = file_spec.get(u"data", None)
112 f" No data sets specified for {file_spec[u'output-file']}, exit."
116 table_sets = file_spec.get(u"dir-tables", None)
119 f" No table sets specified for {file_spec[u'output-file']}, exit."
123 if len(data_sets) != len(table_sets):
125 f" The number of data sets and the number of table sets for "
126 f"{file_spec[u'output-file']} are not equal, exit."
130 chapters = OrderedDict()
131 for data_set, table_set in zip(data_sets, table_sets):
133 logging.info(f" Processing the table set {table_set}...")
137 table_lst = get_files(table_set, u".rst", full_path=True)
139 table_lst = get_files(table_set, u".csv", full_path=True)
143 f" No tables to include in {table_set}. Skipping."
147 logging.info(u" Creating the test data set...")
148 tests = input_data.filter_data(
150 params=[u"name", u"parent", u"doc", u"type", u"level"],
153 continue_on_error=True
157 tests = input_data.merge_data(tests)
158 tests.sort_index(inplace=True)
160 logging.info(u" Creating the suite data set...")
161 suites = input_data.filter_data(
164 continue_on_error=True,
169 suites = input_data.merge_data(suites)
170 suites.sort_index(inplace=True)
172 logging.info(u" Generating files...")
175 chapter_l2 = u"-".join(table_set.split(u"_")[-2:])
176 for suite_longname, suite in suites.items():
178 suite_lvl = len(suite_longname.split(u"."))
179 if suite_lvl < start_lvl:
180 # Not interested in this suite
183 if suite_lvl == start_lvl:
184 # Our top-level suite
185 chapter_l1 = suite_longname.split(u'.')[-1]
186 if chapters.get(chapter_l1, None) is None:
187 chapters[chapter_l1] = OrderedDict()
188 if chapters[chapter_l1].get(chapter_l2, None) is None:
189 chapters[chapter_l1][chapter_l2] = OrderedDict()
192 if _tests_in_suite(suite[u"name"], tests):
193 groups = re.search(REGEX_NIC_SHORT, suite[u"name"])
194 nic = groups.group(2) if groups else None
197 if chapters[chapter_l1][chapter_l2].get(nic, None) is None:
198 chapters[chapter_l1][chapter_l2][nic] = dict(
199 rst_file=f"{join(table_set, chapter_l1)}_{nic}.rst".
200 replace(u"2n1l-", u""),
203 for idx, tbl_file in enumerate(table_lst):
204 if suite[u"name"] in tbl_file:
205 chapters[chapter_l1][chapter_l2][nic][u"tables"].append(
208 suite[u"doc"].replace(u'|br|', u'\n\n -')
214 u"container_memif": u"LXC/DRC Container Memif",
215 u"crypto": u"IPsec IPv4 Routing",
216 u"hoststack": u"Hoststack Testing",
217 u"ip4": u"IPv4 Routing",
218 u"ip4_tunnels": u"IPv4 Tunnels",
219 u"ip6": u"IPv6 Routing",
220 u"ip6_tunnels": u"IPv6 Tunnels",
221 u"l2": u"L2 Ethernet Switching",
222 u"lb": u"LoadBalancer",
223 u"nfv_density": u"NFV Service Density",
224 u"srv6": u"SRv6 Routing",
225 u"vm_vhost": u"KVM VMs vhost-user",
226 u"vts": u"Virtual Topology System",
228 u"interfaces": u"Interfaces",
229 u"l2bd": u"L2 Bridge-domain",
230 u"l2patch": u"L2 Patch",
231 u"l2xc": u"L2 Cross-connect",
234 order_chapters = file_spec.get(u"order-chapters", None)
237 order_1 = order_chapters.get(u"level-1", None)
238 order_2 = order_chapters.get(u"level-2", None)
239 order_3 = order_chapters.get(u"level-3", None)
241 order_1 = chapters.keys()
247 for chapter_l1 in order_1:
248 content_l1 = chapters.get(chapter_l1, None)
251 with open(f"{fileset_file_name}/index.rst", u"a") as file_handler:
252 file_handler.write(f" {chapter_l1}\n")
253 l1_file_name = f"{join(fileset_file_name, chapter_l1)}.rst"
254 title = titles.get(chapter_l1, chapter_l1)
255 logging.info(f" Generating {title} ...")
256 with open(l1_file_name, u"w") as file_handler:
259 f"{get_rst_title_char(1) * len(title)}\n\n"
264 order_2 = chapters[chapter_l1].keys()
265 for chapter_l2 in order_2:
266 content_l2 = content_l1.get(chapter_l2, None)
270 order_3 = chapters[chapter_l1][chapter_l2].keys()
271 for chapter_l3 in order_3:
272 content_l3 = content_l2.get(chapter_l3, None)
275 with open(l1_file_name, u"a") as file_handler:
276 item = u"/".join(content_l3[u'rst_file'].split(u'/')[-2:])
277 file_handler.write(f" ../{item}\n")
278 logging.info(f" Writing the file {content_l3[u'rst_file']}")
279 with open(content_l3[u'rst_file'], u"w+") as file_handler:
280 title = f"{chapter_l2}-{chapter_l3}"
284 f"{get_rst_title_char(2) * len(title)}\n"
286 for table in content_l3[u'tables']:
287 title = table[0].split(u"/")[-1].split(u".")[0]
290 f"{get_rst_title_char(3) * len(title)}\n"
292 file_handler.write(f"\n{table[1]}\n")
295 f"\n.. include:: {table[0].split(u'/')[-1]}"
300 RST_INCLUDE_TABLE.format(
302 file_html=table[0].split(u"/")[-1])
306 def file_details_split_html(file_spec, input_data):
307 """Generate the file(s) with algorithms
308 - file_details_split_html
309 specified in the specification file.
311 :param file_spec: File to generate.
312 :param input_data: Data to process.
313 :type file_spec: pandas.Series
314 :type input_data: InputData
316 file_details_split(file_spec, input_data, frmt=u"html")
319 def file_test_results(file_spec, input_data, frmt=u"rst"):
320 """Generate the file(s) with algorithms
322 specified in the specification file.
324 :param file_spec: File to generate.
325 :param input_data: Data to process.
326 :param frmt: Format can be: rst or html
327 :type file_spec: pandas.Series
328 :type input_data: InputData
332 base_file_name = f"{file_spec[u'output-file']}"
335 u".. |br| raw:: html\n\n <br />\n\n\n"
336 u".. |prein| raw:: html\n\n <pre>\n\n\n"
337 u".. |preout| raw:: html\n\n </pre>\n\n"
339 start_lvl = file_spec.get(u"data-start-level", 4)
341 logging.info(f" Generating the file {base_file_name} ...")
344 table_lst = get_files(file_spec[u"dir-tables"], u".rst", full_path=True)
346 table_lst = get_files(file_spec[u"dir-tables"], u".csv", full_path=True)
351 f" No tables to include in {file_spec[u'dir-tables']}. Skipping."
356 f" Creating the tests data set for the "
357 f"{file_spec.get(u'type', u'')} {file_spec.get(u'title', u'')}."
360 tests = input_data.filter_data(
362 params=[u"name", u"parent", u"doc", u"type", u"level"],
363 continue_on_error=True
367 tests = input_data.merge_data(tests)
369 suites = input_data.filter_data(
371 continue_on_error=True,
376 suites = input_data.merge_data(suites)
377 suites.sort_index(inplace=True)
380 for suite_longname, suite in suites.items():
382 suite_lvl = len(suite_longname.split(u"."))
383 if suite_lvl < start_lvl:
384 # Not interested in this suite
387 if suite_lvl == start_lvl:
388 # Our top-level suite
389 chapter = suite_longname.split(u'.')[-1]
390 file_name = f"{base_file_name}/{chapter}.rst"
391 logging.info(f" Writing file {file_name}")
392 with open(f"{base_file_name}/index.rst", u"a") as file_handler:
393 file_handler.write(f" {chapter}\n")
394 with open(file_name, u"a") as file_handler:
395 file_handler.write(rst_header)
397 title_line = get_rst_title_char(suite[u"level"] - start_lvl + 2) * \
399 with open(file_name, u"a") as file_handler:
400 if not (u"-ndrpdr" in suite[u"name"] or
401 u"-mrr" in suite[u"name"] or
402 u"-dev" in suite[u"name"]):
403 file_handler.write(f"\n{suite[u'name']}\n{title_line}\n")
405 if _tests_in_suite(suite[u"name"], tests):
406 for tbl_file in table_lst:
407 if suite[u"name"] in tbl_file:
409 f"\n{suite[u'name']}\n{title_line}\n"
412 f"\n{suite[u'doc']}\n".replace(u'|br|', u'\n\n -')
416 f"\n.. include:: {tbl_file.split(u'/')[-1]}\n"
420 RST_INCLUDE_TABLE.format(
422 file_html=tbl_file.split(u"/")[-1])
426 logging.info(u" Done.")
429 def file_test_results_html(file_spec, input_data):
430 """Generate the file(s) with algorithms
431 - file_test_results_html
432 specified in the specification file.
434 :param file_spec: File to generate.
435 :param input_data: Data to process.
436 :type file_spec: pandas.Series
437 :type input_data: InputData
439 file_test_results(file_spec, input_data, frmt=u"html")