9 from subprocess import Popen, PIPE
11 import ply.yacc as yacc
12 from io import TextIOWrapper
14 assert sys.version_info >= (3, 5), "Not supported Python version: {}".format(
17 log = logging.getLogger("vppapigen")
19 # Ensure we don't leave temporary files around
20 sys.dont_write_bytecode = True
26 # Global dictionary of new types (including enums)
32 def global_type_add(name, obj):
33 """Add new type to the dictionary of types"""
34 type_name = "vl_api_" + name + "_t"
35 if type_name in global_types:
36 raise KeyError("Attempted redefinition of {!r} with {!r}.".format(name, obj))
37 global_types[type_name] = obj
40 # All your trace are belong to us!
41 def exception_handler(exception_type, exception, traceback):
42 print("%s: %s" % (exception_type.__name__, exception))
49 def __init__(self, filename):
50 self.filename = filename
62 "enumflag": "ENUMFLAG",
63 "typeonly": "TYPEONLY",
64 "manual_print": "MANUAL_PRINT",
65 "manual_endian": "MANUAL_ENDIAN",
66 "dont_trace": "DONT_TRACE",
67 "autoreply": "AUTOREPLY",
68 "autoendian": "AUTOENDIAN",
85 "counters": "COUNTERS",
88 "severity": "SEVERITY",
90 "description": "DESCRIPTION",
93 tokens = ["STRING_LITERAL", "COMMENT", "ID", "NUM"] + list(reserved.values())
95 t_ignore_LINE_COMMENT = "//.*"
108 r"0[xX][0-9a-fA-F]+|-?\d+\.?\d*"
109 base = 16 if t.value.startswith("0x") else 10
111 t.value = float(t.value)
113 t.value = int(t.value, base)
117 r"[a-zA-Z_][a-zA-Z_0-9]*"
118 # Check for reserved words
119 t.type = VPPAPILexer.reserved.get(t.value, "ID")
123 def t_STRING_LITERAL(self, t):
124 r"\"([^\\\n]|(\\.))*?\" "
125 t.value = str(t.value).replace('"', "")
128 # C or C++ comment (ignore)
129 def t_COMMENT(self, t):
130 r"(/\*(.|\n)*?\*/)|(//.*)"
131 t.lexer.lineno += t.value.count("\n")
134 # Error handling rule
135 def t_error(self, t):
137 "Illegal character '{}' ({})"
138 "in {}: line {}".format(
139 t.value[0], hex(ord(t.value[0])), self.filename, t.lexer.lineno
143 # Define a rule so we can track line numbers
144 def t_newline(self, t):
146 t.lexer.lineno += len(t.value)
148 literals = ":{}[];=.,"
150 # A string containing ignored characters (spaces and tabs)
154 def vla_mark_length_field(block):
155 if isinstance(block[-1], Array):
156 lengthfield = block[-1].lengthfield
158 if b.fieldname == lengthfield:
159 b.is_lengthfield = True
162 def vla_is_last_check(name, block):
164 for i, b in enumerate(block):
165 if isinstance(b, Array) and b.vla:
167 if i + 1 < len(block):
169 'VLA field "{}" must be the last field in message "{}"'.format(
173 elif b.fieldtype.startswith("vl_api_"):
174 if global_types[b.fieldtype].vla:
176 if i + 1 < len(block):
178 'VLA field "{}" must be the last '
179 'field in message "{}"'.format(b.fieldname, name)
181 elif b.fieldtype == "string" and b.length == 0:
183 if i + 1 < len(block):
185 'VLA field "{}" must be the last '
186 'field in message "{}"'.format(b.fieldname, name)
194 def process(self, result): # -> Dict
195 result[self.type].append(self)
198 class Service(Processable):
201 def __init__(self, caller, reply, events=None, stream_message=None, stream=False):
205 self.stream_message = stream_message
206 self.events = [] if events is None else events
209 class Typedef(Processable):
212 def __init__(self, name, flags, block):
216 self.crc = str(block).encode()
217 self.manual_print = False
218 self.manual_endian = False
220 if f == "manual_print":
221 self.manual_print = True
222 elif f == "manual_endian":
223 self.manual_endian = True
224 global_type_add(name, self)
226 self.vla = vla_is_last_check(name, block)
227 vla_mark_length_field(self.block)
229 def process(self, result):
230 result["types"].append(self)
233 return self.name + str(self.flags) + str(self.block)
236 class Using(Processable):
239 def __init__(self, name, flags, alias):
243 self.manual_print = True
244 self.manual_endian = True
246 self.manual_print = False
247 self.manual_endian = False
249 if f == "manual_print":
250 self.manual_print = True
251 elif f == "manual_endian":
252 self.manual_endian = True
254 if isinstance(alias, Array):
255 a = {"type": alias.fieldtype, "length": alias.length}
257 a = {"type": alias.fieldtype}
263 # self.crc = str(alias).encode()
264 # but to be backwards compatible use the block ([])
266 self.crc = str(self.block).encode()
267 global_type_add(name, self)
269 def process(self, result): # -> Dict
270 result["types"].append(self)
273 return self.name + str(self.alias)
276 class Union(Processable):
279 def __init__(self, name, flags, block):
280 self.manual_print = False
281 self.manual_endian = False
285 if f == "manual_print":
286 self.manual_print = True
287 elif f == "manual_endian":
288 self.manual_endian = True
291 self.crc = str(block).encode()
292 self.vla = vla_is_last_check(name, block)
294 global_type_add(name, self)
296 def process(self, result):
297 result["types"].append(self)
300 return str(self.block)
303 class Define(Processable):
306 def __init__(self, name, flags, block, comment=None):
310 self.dont_trace = False
311 self.manual_print = False
312 self.manual_endian = False
313 self.autoreply = False
316 self.comment = comment
318 if f == "dont_trace":
319 self.dont_trace = True
320 elif f == "manual_print":
321 self.manual_print = True
322 elif f == "manual_endian":
323 self.manual_endian = True
324 elif f == "autoreply":
325 self.autoreply = True
326 elif f == "autoendian":
331 if isinstance(b, Option):
332 self.options[b.option] = b.value
335 block = [x for x in block if x not in remove]
337 self.vla = vla_is_last_check(name, block)
338 vla_mark_length_field(self.block)
340 self.crc = str(block).encode()
342 def autoreply_block(self, name, parent):
343 block = [Field("u32", "context"), Field("i32", "retval")]
344 # inherit the parent's options
345 for k, v in parent.options.items():
346 block.append(Option(k, v))
347 return Define(name + "_reply", [], block)
349 def process(self, result): # -> Dict
350 tname = self.__class__.__name__
351 result[tname].append(self)
353 result[tname].append(self.autoreply_block(self.name, self))
356 return self.name + str(self.flags) + str(self.block)
359 class Enum(Processable):
362 def __init__(self, name, block, enumtype="u32"):
364 self.enumtype = enumtype
366 self.manual_print = False
378 block2.append([b["id"], count])
380 if b["option"]["backwards_compatible"]:
384 block3.append([b["id"], count])
387 "Backward compatible enum must "
388 "be last {!r} {!r}".format(name, b["id"])
391 self.crc = str(block3).encode()
392 global_type_add(name, self)
394 def process(self, result):
395 result["types"].append(self)
398 return self.name + str(self.block)
401 class EnumFlag(Enum):
404 def __init__(self, name, block, enumtype="u32"):
405 super(EnumFlag, self).__init__(name, block, enumtype)
408 if bin(b[1])[2:].count("1") > 1:
410 "%s is not a flag enum. No element in a "
411 "flag enum may have more than a "
412 "single bit set." % self.name
416 class Import(Processable):
420 def __new__(cls, *args, **kwargs):
421 if args[0] not in seen_imports:
422 instance = super().__new__(cls)
423 instance._initialized = False
424 seen_imports[args[0]] = instance
426 return seen_imports[args[0]]
428 def __init__(self, filename, revision):
429 if self._initialized:
431 self.filename = filename
433 parser = VPPAPI(filename=filename, revision=revision)
434 dirlist = dirlist_get()
437 f = os.path.join(dir, filename)
438 if os.path.exists(f):
440 self.result = parser.parse_filename(f, None)
441 self._initialized = True
447 class Option(Processable):
450 def __init__(self, option, value=None):
453 self.crc = str(option).encode()
455 def process(self, result): # -> Dict
456 result[self.type][self.option] = self.value
459 return str(self.option)
461 def __getitem__(self, index):
462 return self.option[index]
465 class Array(Processable):
468 def __init__(self, fieldtype, name, length, modern_vla=False):
469 self.fieldtype = fieldtype
470 self.fieldname = name
471 self.modern_vla = modern_vla
472 if type(length) is str:
473 self.lengthfield = length
478 self.lengthfield = None
482 return str([self.fieldtype, self.fieldname, self.length, self.lengthfield])
485 class Field(Processable):
488 def __init__(self, fieldtype, name, limit=None):
489 # limit field has been expanded to an options dict.
491 self.fieldtype = fieldtype
492 self.is_lengthfield = False
494 if self.fieldtype == "string":
495 raise ValueError("The string type {!r} is an " "array type ".format(name))
497 if name in keyword.kwlist:
499 "Fieldname {!r} is a python keyword and is not "
500 "accessible via the python API. ".format(name)
502 self.fieldname = name
506 return str([self.fieldtype, self.fieldname])
509 class Counter(Processable):
512 def __init__(self, path, counter):
516 def process(self, result): # -> Dict
517 result["Counters"].append(self)
520 class Paths(Processable):
523 def __init__(self, pathset):
527 return "%s(paths=%s)" % (self.__class__.__name__, self.paths)
531 """Coordinates of a syntactic element. Consists of:
534 - (optional) column number, for the Lexer
537 __slots__ = ("file", "line", "column", "__weakref__")
539 def __init__(self, file, line, column=None):
545 str = "%s:%s" % (self.file, self.line)
547 str += ":%s" % self.column
551 class ParseError(Exception):
559 tokens = VPPAPILexer.tokens
561 def __init__(self, filename, logger, revision=None):
562 self.filename = filename
565 self.revision = revision
566 self.last_comment = None
568 def _parse_error(self, msg, coord):
569 raise ParseError("%s: %s" % (coord, msg))
571 def _parse_warning(self, msg, coord):
573 self.logger.warning("%s: %s" % (coord, msg))
575 def _coord(self, lineno, column=None):
576 return Coord(file=self.filename, line=lineno, column=column)
578 def _token_coord(self, p, token_idx):
579 """Returns the coordinates for the YaccProduction object 'p' indexed
580 with 'token_idx'. The coordinate includes the 'lineno' and
581 'column'. Both follow the lex semantic, starting from 1.
583 last_cr = p.lexer.lexdata.rfind("\n", 0, p.lexpos(token_idx))
586 column = p.lexpos(token_idx) - (last_cr)
587 return self._coord(p.lineno(token_idx), column)
589 def p_slist(self, p):
611 def p_import(self, p):
612 """import : IMPORT STRING_LITERAL ';'"""
613 p[0] = Import(p[2], revision=self.revision)
615 def p_path_elements(self, p):
616 """path_elements : path_element
617 | path_elements path_element"""
621 if type(p[1]) is dict:
626 def p_path_element(self, p):
627 """path_element : STRING_LITERAL STRING_LITERAL ';'"""
628 p[0] = {"path": p[1], "counter": p[2]}
630 def p_paths(self, p):
631 """paths : PATHS '{' path_elements '}' ';'"""
634 def p_counters(self, p):
635 """counters : COUNTERS ID '{' counter_elements '}' ';'"""
636 p[0] = Counter(p[2], p[4])
638 def p_counter_elements(self, p):
639 """counter_elements : counter_element
640 | counter_elements counter_element"""
644 if type(p[1]) is dict:
649 def p_counter_element(self, p):
650 """counter_element : ID '{' counter_statements '}' ';'"""
651 p[0] = {**{"name": p[1]}, **p[3]}
653 def p_counter_statements(self, p):
654 """counter_statements : counter_statement
655 | counter_statements counter_statement"""
659 p[0] = {**p[1], **p[2]}
661 def p_counter_statement(self, p):
662 """counter_statement : SEVERITY ID ';'
663 | UNITS STRING_LITERAL ';'
664 | DESCRIPTION STRING_LITERAL ';'
668 def p_service(self, p):
669 """service : SERVICE '{' service_statements '}' ';'"""
672 def p_service_statements(self, p):
673 """service_statements : service_statement
674 | service_statements service_statement"""
680 def p_service_statement(self, p):
681 """service_statement : RPC ID RETURNS NULL ';'
682 | RPC ID RETURNS ID ';'
683 | RPC ID RETURNS STREAM ID ';'
684 | RPC ID RETURNS ID EVENTS event_list ';'"""
686 # Verify that caller and reply differ
688 "Reply ID ({}) should not be equal to Caller ID".format(p[2]),
689 self._token_coord(p, 1),
692 p[0] = Service(p[2], p[4], p[6])
694 p[0] = Service(p[2], p[5], stream=True)
696 p[0] = Service(p[2], p[4])
698 def p_service_statement2(self, p):
699 """service_statement : RPC ID RETURNS ID STREAM ID ';'"""
700 p[0] = Service(p[2], p[4], stream_message=p[6], stream=True)
702 def p_event_list(self, p):
703 """event_list : events
704 | event_list events"""
710 def p_event(self, p):
716 """enum : ENUM ID '{' enum_statements '}' ';'"""
717 p[0] = Enum(p[2], p[4])
719 def p_enum_type(self, p):
720 """enum : ENUM ID ':' enum_size '{' enum_statements '}' ';'"""
722 p[0] = Enum(p[2], p[6], enumtype=p[4])
724 p[0] = Enum(p[2], p[4])
726 def p_enumflag(self, p):
727 """enumflag : ENUMFLAG ID '{' enum_statements '}' ';'"""
728 p[0] = EnumFlag(p[2], p[4])
730 def p_enumflag_type(self, p):
731 """enumflag : ENUMFLAG ID ':' enumflag_size '{' enum_statements '}' ';'""" # noqa : E502
733 p[0] = EnumFlag(p[2], p[6], enumtype=p[4])
735 p[0] = EnumFlag(p[2], p[4])
737 def p_enum_size(self, p):
746 def p_enumflag_size(self, p):
747 """enumflag_size : U8
752 def p_define(self, p):
753 """define : DEFINE ID '{' block_statements_opt '}' ';'"""
755 p[0] = Define(p[2], [], p[4], self.last_comment)
756 self.last_comment = None
758 def p_define_flist(self, p):
759 """define : flist DEFINE ID '{' block_statements_opt '}' ';'"""
761 if "typeonly" in p[1]:
763 "legacy typedef. use typedef: {} {}[{}];".format(p[1], p[2], p[4]),
764 self._token_coord(p, 1),
767 p[0] = Define(p[3], p[1], p[5], self.last_comment)
768 self.last_comment = None
770 def p_flist(self, p):
779 """flag : MANUAL_PRINT
789 def p_typedef(self, p):
790 """typedef : TYPEDEF ID '{' block_statements_opt '}' ';'"""
791 p[0] = Typedef(p[2], [], p[4])
793 def p_typedef_flist(self, p):
794 """typedef : flist TYPEDEF ID '{' block_statements_opt '}' ';'"""
795 p[0] = Typedef(p[3], p[1], p[5])
797 def p_typedef_alias(self, p):
798 """typedef : TYPEDEF declaration"""
799 p[0] = Using(p[2].fieldname, [], p[2])
801 def p_typedef_alias_flist(self, p):
802 """typedef : flist TYPEDEF declaration"""
803 p[0] = Using(p[3].fieldname, p[1], p[3])
805 def p_block_statements_opt(self, p):
806 """block_statements_opt : block_statements"""
809 def p_block_statements(self, p):
810 """block_statements : block_statement
811 | block_statements block_statement"""
817 def p_block_statement(self, p):
818 """block_statement : declaration
822 def p_enum_statements(self, p):
823 """enum_statements : enum_statement
824 | enum_statements enum_statement"""
830 def p_enum_statement(self, p):
831 """enum_statement : ID '=' NUM ','
833 | ID '[' field_options ']' ','
834 | ID '=' NUM '[' field_options ']' ','"""
838 p[0] = {"id": p[1], "value": p[3]}
840 p[0] = {"id": p[1], "option": p[3]}
842 p[0] = {"id": p[1], "value": p[3], "option": p[5]}
844 self._parse_error("ERROR", self._token_coord(p, 1))
846 def p_field_options(self, p):
847 """field_options : field_option
848 | field_options field_option"""
852 p[0] = {**p[1], **p[2]}
854 def p_field_option(self, p):
856 | ID '=' assignee ','
865 def p_variable_name(self, p):
866 """variable_name : ID
875 def p_comment(self, p):
876 """comment : COMMENT"""
877 self.last_comment = p[1]
880 def p_declaration(self, p):
881 """declaration : type_specifier variable_name ';'
882 | type_specifier variable_name '[' field_options ']' ';'
885 p[0] = Field(p[1], p[2], p[4])
887 p[0] = Field(p[1], p[2])
889 self._parse_error("ERROR", self._token_coord(p, 1))
890 self.fields.append(p[2])
892 def p_declaration_array_vla(self, p):
893 """declaration : type_specifier variable_name '[' ']' ';'"""
894 p[0] = Array(p[1], p[2], 0, modern_vla=True)
896 def p_declaration_array(self, p):
897 """declaration : type_specifier variable_name '[' NUM ']' ';'
898 | type_specifier variable_name '[' ID ']' ';'"""
901 return self._parse_error(
902 "array: %s" % p.value, self._coord(lineno=p.lineno)
905 # Make this error later
906 if type(p[4]) is int and p[4] == 0:
907 # XXX: Line number is wrong
909 "Old Style VLA: {} {}[{}];".format(p[1], p[2], p[4]),
910 self._token_coord(p, 1),
913 if type(p[4]) is str and p[4] not in self.fields:
914 # Verify that length field exists
916 "Missing length field: {} {}[{}];".format(p[1], p[2], p[4]),
917 self._token_coord(p, 1),
919 p[0] = Array(p[1], p[2], p[4])
921 def p_option(self, p):
922 """option : OPTION ID '=' assignee ';'
927 p[0] = Option(p[2], p[4])
929 def p_assignee(self, p):
936 def p_type_specifier(self, p):
937 """type_specifier : U8
950 # Do a second pass later to verify that user defined types are defined
951 def p_typedef_specifier(self, p):
952 """type_specifier : ID"""
953 if p[1] not in global_types:
955 "Undefined type: {}".format(p[1]), self._token_coord(p, 1)
959 def p_union(self, p):
960 """union : UNION ID '{' block_statements_opt '}' ';'"""
961 p[0] = Union(p[2], [], p[4])
963 def p_union_flist(self, p):
964 """union : flist UNION ID '{' block_statements_opt '}' ';'"""
965 p[0] = Union(p[3], p[1], p[5])
967 # Error rule for syntax errors
968 def p_error(self, p):
970 if p.type == "COMMENT":
973 self._parse_error("before: %s" % p.value, self._coord(lineno=p.lineno))
975 self._parse_error("At end of input", self.filename)
977 def build(self, **kwargs):
978 self.parser = yacc.yacc(module=self, **kwargs)
982 def __init__(self, debug=False, filename="", logger=None, revision=None):
983 self.lexer = lex.lex(module=VPPAPILexer(filename), debug=debug)
984 self.parser = VPPAPIParser(filename, logger, revision=revision)
985 self.parser.build(write_tables=False, debug=debug)
987 self.revision = revision
988 self.filename = filename
990 def parse_string(self, code, debug=0, lineno=1):
991 self.lexer.lineno = lineno
992 return self.parser.parser.parse(code, lexer=self.lexer, debug=debug)
994 def parse_fd(self, fd, debug=0):
996 return self.parse_string(data, debug=debug)
998 def parse_filename(self, filename, debug=0):
1000 git_show = "git show {}:{}".format(self.revision, filename)
1001 proc = Popen(git_show.split(), stdout=PIPE, encoding="utf-8")
1003 data, errs = proc.communicate()
1004 if proc.returncode != 0:
1006 "File not found: {}:{}".format(self.revision, filename),
1010 return self.parse_string(data, debug=debug)
1015 with open(filename, encoding="utf-8") as fd:
1016 return self.parse_fd(fd, None)
1017 except FileNotFoundError:
1018 print("File not found: {}".format(filename), file=sys.stderr)
1021 def process(self, objs):
1033 crc = binascii.crc32(o.crc, crc) & 0xFFFFFFFF
1034 except AttributeError:
1039 if isinstance(o2, Service):
1044 msgs = {d.name: d for d in s["Define"]}
1045 svcs = {s.caller: s for s in s["Service"]}
1046 replies = {s.reply: s for s in s["Service"]}
1051 for service in svcs:
1052 if service not in msgs:
1054 "Service definition refers to unknown message"
1055 " definition: {}".format(service)
1057 if svcs[service].reply != "null" and svcs[service].reply not in msgs:
1059 "Service definition refers to unknown message"
1060 " definition in reply: {}".format(svcs[service].reply)
1062 if service in replies:
1064 "Service definition refers to message"
1065 " marked as reply: {}".format(service)
1067 for event in svcs[service].events:
1068 if event not in msgs:
1070 "Service definition refers to unknown "
1071 "event: {} in message: {}".format(event, service)
1073 seen_services[event] = True
1075 # Create services implicitly
1077 if d in seen_services:
1079 if d.endswith("_reply"):
1082 if d[:-6] not in msgs:
1083 raise ValueError("{} missing calling message".format(d))
1085 if d.endswith("_dump"):
1088 if d[:-5] + "_details" in msgs:
1089 s["Service"].append(Service(d, d[:-5] + "_details", stream=True))
1091 raise ValueError("{} missing details message".format(d))
1094 if d.endswith("_details"):
1095 if d[:-8] + "_get" in msgs:
1096 if d[:-8] + "_get" in svcs:
1099 "{} should be in a stream service".format(d[:-8] + "_get")
1101 if d[:-8] + "_dump" in msgs:
1103 raise ValueError("{} missing dump or get message".format(d))
1107 if d + "_reply" in msgs:
1108 s["Service"].append(Service(d, d + "_reply"))
1111 "{} missing reply message ({}) or service definition".format(
1118 def process_imports(self, objs, in_import, result): # -> List
1120 # Only allow the following object types from imported file
1121 if in_import and not isinstance(o, (Enum, Import, Typedef, Union, Using)):
1123 if isinstance(o, Import):
1125 result = self.process_imports(o.result, True, result)
1131 # Add message ids to each message.
1134 o.block.insert(0, Field("u16", "_vl_msg_id"))
1141 def dirlist_add(dirs):
1144 dirlist = dirlist + dirs
1151 def foldup_blocks(block, crc):
1153 # Look up CRC in user defined types
1154 if b.fieldtype.startswith("vl_api_"):
1156 t = global_types[b.fieldtype]
1158 crc = binascii.crc32(t.crc, crc) & 0xFFFFFFFF
1159 crc = foldup_blocks(t.block, crc)
1160 except AttributeError:
1167 f.crc = foldup_blocks(f.block, binascii.crc32(f.crc) & 0xFFFFFFFF)
1170 def write_dependencies(output_file, dependency_file, imports):
1174 f = os.path.abspath(os.path.join(d, i.filename))
1175 if os.path.exists(f):
1177 with open(dependency_file, "w", encoding="utf8") as f:
1178 print(f"{output_file}: \\", file=f)
1180 print(f" {i} \\", file=f)
1182 print(f" {r[-1]}", file=f)
1195 dependency_file=None,
1199 global_types.clear()
1200 seen_imports.clear()
1202 dirlist_add(includedir)
1204 sys.excepthook = exception_handler
1208 filename = show_name[0]
1210 filename = input_file
1215 logging.basicConfig(stream=sys.stdout, level=logging.WARNING)
1217 logging.basicConfig()
1220 # Generate representation
1222 from importlib.machinery import SourceFileLoader
1228 cand.append(os.path.dirname(os.path.realpath(__file__)))
1229 cand.append(os.path.dirname(os.path.realpath(__file__)) + "/../share/vpp/")
1232 if os.path.isfile("{}vppapigen_{}.py".format(c, output_module.lower())):
1236 pluginpath = pluginpath + "/"
1237 if pluginpath == "":
1238 log.exception("Output plugin not found")
1240 module_path = "{}vppapigen_{}.py".format(pluginpath, output_module.lower())
1243 plugin = SourceFileLoader(output_module, module_path).load_module()
1244 except Exception as err:
1245 log.exception("Error importing output plugin: %s, %s", module_path, err)
1248 parser = VPPAPI(debug=debug, filename=filename, logger=log, revision=git_revision)
1252 parsed_objects = parser.parse_fd(sys.stdin, log)
1254 parsed_objects = parser.parse_filename(input_file, log)
1255 except ParseError as e:
1256 print("Parse error: ", e, file=sys.stderr)
1259 # Build a list of objects. Hash of lists.
1262 # if the variable is not set in the plugin, assume it to be false.
1264 plugin.process_imports
1265 except AttributeError:
1266 plugin.process_imports = False
1268 if plugin.process_imports:
1269 result = parser.process_imports(parsed_objects, False, result)
1270 s = parser.process(result)
1272 s = parser.process(parsed_objects)
1273 imports = parser.process_imports(parsed_objects, False, result)
1274 s["imported"] = parser.process(imports)
1276 if dependency_file and isinstance(output, TextIOWrapper):
1277 write_dependencies(output.name, dependency_file[0], s["Import"])
1280 s["Define"] = add_msg_id(s["Define"])
1283 foldup_crcs(s["Define"])
1290 pp = pprint.PrettyPrinter(indent=4, stream=sys.stderr)
1291 for t in s["Define"]:
1292 pp.pprint([t.name, t.flags, t.block])
1293 for t in s["types"]:
1294 pp.pprint([t.name, t.block])
1296 result = plugin.run(outputdir, filename, s)
1298 if isinstance(output, str):
1299 with open(output, "w", encoding="UTF-8") as f:
1300 print(result, file=f)
1302 print(result, file=output)
1304 log.exception("Running plugin failed: %s %s", filename, result)
1309 def run_kw_vppapigen(kwargs):
1310 return run_vppapigen(**kwargs)
1317 if sys.version_info < (
1322 "vppapigen requires a supported version of python. "
1323 "Please use version 3.5 or greater. "
1329 cliparser = argparse.ArgumentParser(description="VPP API generator")
1330 cliparser.add_argument("--pluginpath", default="")
1331 cliparser.add_argument("--includedir", action="append")
1332 cliparser.add_argument("--outputdir", action="store")
1333 cliparser.add_argument("--input")
1334 cliparser.add_argument(
1337 type=argparse.FileType("w", encoding="UTF-8"),
1341 cliparser.add_argument("output_module", nargs="?", default="C")
1342 cliparser.add_argument("--debug", action="store_true")
1343 cliparser.add_argument("--show-name", nargs=1)
1344 cliparser.add_argument(
1345 "--git-revision", help="Git revision to use for opening files"
1347 cliparser.add_argument("-MF", nargs=1, help="Dependency file")
1348 args = cliparser.parse_args()
1350 return run_vppapigen(
1351 includedir=args.includedir,
1353 outputdir=args.outputdir,
1354 show_name=args.show_name,
1355 input_file=args.input,
1356 output_module=args.output_module,
1357 pluginpath=args.pluginpath,
1358 git_revision=args.git_revision,
1360 dependency_file=args.MF,
1364 if __name__ == "__main__":