9 from subprocess import Popen, PIPE
11 import ply.yacc as yacc
13 assert sys.version_info >= (3, 5), "Not supported Python version: {}".format(
16 log = logging.getLogger("vppapigen")
18 # Ensure we don't leave temporary files around
19 sys.dont_write_bytecode = True
25 # Global dictionary of new types (including enums)
31 def global_type_add(name, obj):
32 """Add new type to the dictionary of types"""
33 type_name = "vl_api_" + name + "_t"
34 if type_name in global_types:
35 raise KeyError("Attempted redefinition of {!r} with {!r}.".format(name, obj))
36 global_types[type_name] = obj
39 # All your trace are belong to us!
40 def exception_handler(exception_type, exception, traceback):
41 print("%s: %s" % (exception_type.__name__, exception))
48 def __init__(self, filename):
49 self.filename = filename
61 "enumflag": "ENUMFLAG",
62 "typeonly": "TYPEONLY",
63 "manual_print": "MANUAL_PRINT",
64 "manual_endian": "MANUAL_ENDIAN",
65 "dont_trace": "DONT_TRACE",
66 "autoreply": "AUTOREPLY",
67 "autoendian": "AUTOENDIAN",
84 "counters": "COUNTERS",
87 "severity": "SEVERITY",
89 "description": "DESCRIPTION",
92 tokens = ["STRING_LITERAL", "COMMENT", "ID", "NUM"] + list(reserved.values())
94 t_ignore_LINE_COMMENT = "//.*"
107 r"0[xX][0-9a-fA-F]+|-?\d+\.?\d*"
108 base = 16 if t.value.startswith("0x") else 10
110 t.value = float(t.value)
112 t.value = int(t.value, base)
116 r"[a-zA-Z_][a-zA-Z_0-9]*"
117 # Check for reserved words
118 t.type = VPPAPILexer.reserved.get(t.value, "ID")
122 def t_STRING_LITERAL(self, t):
123 r"\"([^\\\n]|(\\.))*?\" "
124 t.value = str(t.value).replace('"', "")
127 # C or C++ comment (ignore)
128 def t_COMMENT(self, t):
129 r"(/\*(.|\n)*?\*/)|(//.*)"
130 t.lexer.lineno += t.value.count("\n")
133 # Error handling rule
134 def t_error(self, t):
136 "Illegal character '{}' ({})"
137 "in {}: line {}".format(
138 t.value[0], hex(ord(t.value[0])), self.filename, t.lexer.lineno
142 # Define a rule so we can track line numbers
143 def t_newline(self, t):
145 t.lexer.lineno += len(t.value)
147 literals = ":{}[];=.,"
149 # A string containing ignored characters (spaces and tabs)
153 def vla_mark_length_field(block):
154 if isinstance(block[-1], Array):
155 lengthfield = block[-1].lengthfield
157 if b.fieldname == lengthfield:
158 b.is_lengthfield = True
161 def vla_is_last_check(name, block):
163 for i, b in enumerate(block):
164 if isinstance(b, Array) and b.vla:
166 if i + 1 < len(block):
168 'VLA field "{}" must be the last field in message "{}"'.format(
172 elif b.fieldtype.startswith("vl_api_"):
173 if global_types[b.fieldtype].vla:
175 if i + 1 < len(block):
177 'VLA field "{}" must be the last '
178 'field in message "{}"'.format(b.fieldname, name)
180 elif b.fieldtype == "string" and b.length == 0:
182 if i + 1 < len(block):
184 'VLA field "{}" must be the last '
185 'field in message "{}"'.format(b.fieldname, name)
193 def process(self, result): # -> Dict
194 result[self.type].append(self)
197 class Service(Processable):
200 def __init__(self, caller, reply, events=None, stream_message=None, stream=False):
204 self.stream_message = stream_message
205 self.events = [] if events is None else events
208 class Typedef(Processable):
211 def __init__(self, name, flags, block):
215 self.crc = str(block).encode()
216 self.manual_print = False
217 self.manual_endian = False
219 if f == "manual_print":
220 self.manual_print = True
221 elif f == "manual_endian":
222 self.manual_endian = True
223 global_type_add(name, self)
225 self.vla = vla_is_last_check(name, block)
226 vla_mark_length_field(self.block)
228 def process(self, result):
229 result["types"].append(self)
232 return self.name + str(self.flags) + str(self.block)
235 class Using(Processable):
238 def __init__(self, name, flags, alias):
242 self.manual_print = True
243 self.manual_endian = True
245 self.manual_print = False
246 self.manual_endian = False
248 if f == "manual_print":
249 self.manual_print = True
250 elif f == "manual_endian":
251 self.manual_endian = True
253 if isinstance(alias, Array):
254 a = {"type": alias.fieldtype, "length": alias.length}
256 a = {"type": alias.fieldtype}
262 # self.crc = str(alias).encode()
263 # but to be backwards compatible use the block ([])
265 self.crc = str(self.block).encode()
266 global_type_add(name, self)
268 def process(self, result): # -> Dict
269 result["types"].append(self)
272 return self.name + str(self.alias)
275 class Union(Processable):
278 def __init__(self, name, flags, block):
279 self.manual_print = False
280 self.manual_endian = False
284 if f == "manual_print":
285 self.manual_print = True
286 elif f == "manual_endian":
287 self.manual_endian = True
290 self.crc = str(block).encode()
291 self.vla = vla_is_last_check(name, block)
293 global_type_add(name, self)
295 def process(self, result):
296 result["types"].append(self)
299 return str(self.block)
302 class Define(Processable):
305 def __init__(self, name, flags, block, comment=None):
309 self.dont_trace = False
310 self.manual_print = False
311 self.manual_endian = False
312 self.autoreply = False
315 self.comment = comment
317 if f == "dont_trace":
318 self.dont_trace = True
319 elif f == "manual_print":
320 self.manual_print = True
321 elif f == "manual_endian":
322 self.manual_endian = True
323 elif f == "autoreply":
324 self.autoreply = True
325 elif f == "autoendian":
330 if isinstance(b, Option):
331 self.options[b.option] = b.value
334 block = [x for x in block if x not in remove]
336 self.vla = vla_is_last_check(name, block)
337 vla_mark_length_field(self.block)
339 self.crc = str(block).encode()
341 def autoreply_block(self, name, parent):
342 block = [Field("u32", "context"), Field("i32", "retval")]
343 # inherit the parent's options
344 for k, v in parent.options.items():
345 block.append(Option(k, v))
346 return Define(name + "_reply", [], block)
348 def process(self, result): # -> Dict
349 tname = self.__class__.__name__
350 result[tname].append(self)
352 result[tname].append(self.autoreply_block(self.name, self))
355 return self.name + str(self.flags) + str(self.block)
358 class Enum(Processable):
361 def __init__(self, name, block, enumtype="u32"):
363 self.enumtype = enumtype
365 self.manual_print = False
377 block2.append([b["id"], count])
379 if b["option"]["backwards_compatible"]:
383 block3.append([b["id"], count])
386 "Backward compatible enum must "
387 "be last {!r} {!r}".format(name, b["id"])
390 self.crc = str(block3).encode()
391 global_type_add(name, self)
393 def process(self, result):
394 result["types"].append(self)
397 return self.name + str(self.block)
400 class EnumFlag(Enum):
403 def __init__(self, name, block, enumtype="u32"):
404 super(EnumFlag, self).__init__(name, block, enumtype)
407 if bin(b[1])[2:].count("1") > 1:
409 "%s is not a flag enum. No element in a "
410 "flag enum may have more than a "
411 "single bit set." % self.name
415 class Import(Processable):
419 def __new__(cls, *args, **kwargs):
420 if args[0] not in seen_imports:
421 instance = super().__new__(cls)
422 instance._initialized = False
423 seen_imports[args[0]] = instance
425 return seen_imports[args[0]]
427 def __init__(self, filename, revision):
428 if self._initialized:
430 self.filename = filename
432 parser = VPPAPI(filename=filename, revision=revision)
433 dirlist = dirlist_get()
436 f = os.path.join(dir, filename)
437 if os.path.exists(f):
439 self.result = parser.parse_filename(f, None)
440 self._initialized = True
446 class Option(Processable):
449 def __init__(self, option, value=None):
452 self.crc = str(option).encode()
454 def process(self, result): # -> Dict
455 result[self.type][self.option] = self.value
458 return str(self.option)
460 def __getitem__(self, index):
461 return self.option[index]
464 class Array(Processable):
467 def __init__(self, fieldtype, name, length, modern_vla=False):
468 self.fieldtype = fieldtype
469 self.fieldname = name
470 self.modern_vla = modern_vla
471 if type(length) is str:
472 self.lengthfield = length
477 self.lengthfield = None
481 return str([self.fieldtype, self.fieldname, self.length, self.lengthfield])
484 class Field(Processable):
487 def __init__(self, fieldtype, name, limit=None):
488 # limit field has been expanded to an options dict.
490 self.fieldtype = fieldtype
491 self.is_lengthfield = False
493 if self.fieldtype == "string":
494 raise ValueError("The string type {!r} is an " "array type ".format(name))
496 if name in keyword.kwlist:
498 "Fieldname {!r} is a python keyword and is not "
499 "accessible via the python API. ".format(name)
501 self.fieldname = name
505 return str([self.fieldtype, self.fieldname])
508 class Counter(Processable):
511 def __init__(self, path, counter):
515 def process(self, result): # -> Dict
516 result["Counters"].append(self)
519 class Paths(Processable):
522 def __init__(self, pathset):
526 return "%s(paths=%s)" % (self.__class__.__name__, self.paths)
530 """Coordinates of a syntactic element. Consists of:
533 - (optional) column number, for the Lexer
536 __slots__ = ("file", "line", "column", "__weakref__")
538 def __init__(self, file, line, column=None):
544 str = "%s:%s" % (self.file, self.line)
546 str += ":%s" % self.column
550 class ParseError(Exception):
558 tokens = VPPAPILexer.tokens
560 def __init__(self, filename, logger, revision=None):
561 self.filename = filename
564 self.revision = revision
565 self.last_comment = None
567 def _parse_error(self, msg, coord):
568 raise ParseError("%s: %s" % (coord, msg))
570 def _parse_warning(self, msg, coord):
572 self.logger.warning("%s: %s" % (coord, msg))
574 def _coord(self, lineno, column=None):
575 return Coord(file=self.filename, line=lineno, column=column)
577 def _token_coord(self, p, token_idx):
578 """Returns the coordinates for the YaccProduction object 'p' indexed
579 with 'token_idx'. The coordinate includes the 'lineno' and
580 'column'. Both follow the lex semantic, starting from 1.
582 last_cr = p.lexer.lexdata.rfind("\n", 0, p.lexpos(token_idx))
585 column = p.lexpos(token_idx) - (last_cr)
586 return self._coord(p.lineno(token_idx), column)
588 def p_slist(self, p):
610 def p_import(self, p):
611 """import : IMPORT STRING_LITERAL ';'"""
612 p[0] = Import(p[2], revision=self.revision)
614 def p_path_elements(self, p):
615 """path_elements : path_element
616 | path_elements path_element"""
620 if type(p[1]) is dict:
625 def p_path_element(self, p):
626 """path_element : STRING_LITERAL STRING_LITERAL ';'"""
627 p[0] = {"path": p[1], "counter": p[2]}
629 def p_paths(self, p):
630 """paths : PATHS '{' path_elements '}' ';'"""
633 def p_counters(self, p):
634 """counters : COUNTERS ID '{' counter_elements '}' ';'"""
635 p[0] = Counter(p[2], p[4])
637 def p_counter_elements(self, p):
638 """counter_elements : counter_element
639 | counter_elements counter_element"""
643 if type(p[1]) is dict:
648 def p_counter_element(self, p):
649 """counter_element : ID '{' counter_statements '}' ';'"""
650 p[0] = {**{"name": p[1]}, **p[3]}
652 def p_counter_statements(self, p):
653 """counter_statements : counter_statement
654 | counter_statements counter_statement"""
658 p[0] = {**p[1], **p[2]}
660 def p_counter_statement(self, p):
661 """counter_statement : SEVERITY ID ';'
662 | UNITS STRING_LITERAL ';'
663 | DESCRIPTION STRING_LITERAL ';'
667 def p_service(self, p):
668 """service : SERVICE '{' service_statements '}' ';'"""
671 def p_service_statements(self, p):
672 """service_statements : service_statement
673 | service_statements service_statement"""
679 def p_service_statement(self, p):
680 """service_statement : RPC ID RETURNS NULL ';'
681 | RPC ID RETURNS ID ';'
682 | RPC ID RETURNS STREAM ID ';'
683 | RPC ID RETURNS ID EVENTS event_list ';'"""
685 # Verify that caller and reply differ
687 "Reply ID ({}) should not be equal to Caller ID".format(p[2]),
688 self._token_coord(p, 1),
691 p[0] = Service(p[2], p[4], p[6])
693 p[0] = Service(p[2], p[5], stream=True)
695 p[0] = Service(p[2], p[4])
697 def p_service_statement2(self, p):
698 """service_statement : RPC ID RETURNS ID STREAM ID ';'"""
699 p[0] = Service(p[2], p[4], stream_message=p[6], stream=True)
701 def p_event_list(self, p):
702 """event_list : events
703 | event_list events"""
709 def p_event(self, p):
715 """enum : ENUM ID '{' enum_statements '}' ';'"""
716 p[0] = Enum(p[2], p[4])
718 def p_enum_type(self, p):
719 """enum : ENUM ID ':' enum_size '{' enum_statements '}' ';'"""
721 p[0] = Enum(p[2], p[6], enumtype=p[4])
723 p[0] = Enum(p[2], p[4])
725 def p_enumflag(self, p):
726 """enumflag : ENUMFLAG ID '{' enum_statements '}' ';'"""
727 p[0] = EnumFlag(p[2], p[4])
729 def p_enumflag_type(self, p):
730 """enumflag : ENUMFLAG ID ':' enumflag_size '{' enum_statements '}' ';'""" # noqa : E502
732 p[0] = EnumFlag(p[2], p[6], enumtype=p[4])
734 p[0] = EnumFlag(p[2], p[4])
736 def p_enum_size(self, p):
745 def p_enumflag_size(self, p):
746 """enumflag_size : U8
751 def p_define(self, p):
752 """define : DEFINE ID '{' block_statements_opt '}' ';'"""
754 p[0] = Define(p[2], [], p[4], self.last_comment)
755 self.last_comment = None
757 def p_define_flist(self, p):
758 """define : flist DEFINE ID '{' block_statements_opt '}' ';'"""
760 if "typeonly" in p[1]:
762 "legacy typedef. use typedef: {} {}[{}];".format(p[1], p[2], p[4]),
763 self._token_coord(p, 1),
766 p[0] = Define(p[3], p[1], p[5], self.last_comment)
767 self.last_comment = None
769 def p_flist(self, p):
778 """flag : MANUAL_PRINT
788 def p_typedef(self, p):
789 """typedef : TYPEDEF ID '{' block_statements_opt '}' ';'"""
790 p[0] = Typedef(p[2], [], p[4])
792 def p_typedef_flist(self, p):
793 """typedef : flist TYPEDEF ID '{' block_statements_opt '}' ';'"""
794 p[0] = Typedef(p[3], p[1], p[5])
796 def p_typedef_alias(self, p):
797 """typedef : TYPEDEF declaration"""
798 p[0] = Using(p[2].fieldname, [], p[2])
800 def p_typedef_alias_flist(self, p):
801 """typedef : flist TYPEDEF declaration"""
802 p[0] = Using(p[3].fieldname, p[1], p[3])
804 def p_block_statements_opt(self, p):
805 """block_statements_opt : block_statements"""
808 def p_block_statements(self, p):
809 """block_statements : block_statement
810 | block_statements block_statement"""
816 def p_block_statement(self, p):
817 """block_statement : declaration
821 def p_enum_statements(self, p):
822 """enum_statements : enum_statement
823 | enum_statements enum_statement"""
829 def p_enum_statement(self, p):
830 """enum_statement : ID '=' NUM ','
832 | ID '[' field_options ']' ','
833 | ID '=' NUM '[' field_options ']' ','"""
837 p[0] = {"id": p[1], "value": p[3]}
839 p[0] = {"id": p[1], "option": p[3]}
841 p[0] = {"id": p[1], "value": p[3], "option": p[5]}
843 self._parse_error("ERROR", self._token_coord(p, 1))
845 def p_field_options(self, p):
846 """field_options : field_option
847 | field_options field_option"""
851 p[0] = {**p[1], **p[2]}
853 def p_field_option(self, p):
855 | ID '=' assignee ','
864 def p_variable_name(self, p):
865 """variable_name : ID
874 def p_comment(self, p):
875 """comment : COMMENT"""
876 self.last_comment = p[1]
879 def p_declaration(self, p):
880 """declaration : type_specifier variable_name ';'
881 | type_specifier variable_name '[' field_options ']' ';'
884 p[0] = Field(p[1], p[2], p[4])
886 p[0] = Field(p[1], p[2])
888 self._parse_error("ERROR", self._token_coord(p, 1))
889 self.fields.append(p[2])
891 def p_declaration_array_vla(self, p):
892 """declaration : type_specifier variable_name '[' ']' ';'"""
893 p[0] = Array(p[1], p[2], 0, modern_vla=True)
895 def p_declaration_array(self, p):
896 """declaration : type_specifier variable_name '[' NUM ']' ';'
897 | type_specifier variable_name '[' ID ']' ';'"""
900 return self._parse_error(
901 "array: %s" % p.value, self._coord(lineno=p.lineno)
904 # Make this error later
905 if type(p[4]) is int and p[4] == 0:
906 # XXX: Line number is wrong
908 "Old Style VLA: {} {}[{}];".format(p[1], p[2], p[4]),
909 self._token_coord(p, 1),
912 if type(p[4]) is str and p[4] not in self.fields:
913 # Verify that length field exists
915 "Missing length field: {} {}[{}];".format(p[1], p[2], p[4]),
916 self._token_coord(p, 1),
918 p[0] = Array(p[1], p[2], p[4])
920 def p_option(self, p):
921 """option : OPTION ID '=' assignee ';'
926 p[0] = Option(p[2], p[4])
928 def p_assignee(self, p):
935 def p_type_specifier(self, p):
936 """type_specifier : U8
949 # Do a second pass later to verify that user defined types are defined
950 def p_typedef_specifier(self, p):
951 """type_specifier : ID"""
952 if p[1] not in global_types:
954 "Undefined type: {}".format(p[1]), self._token_coord(p, 1)
958 def p_union(self, p):
959 """union : UNION ID '{' block_statements_opt '}' ';'"""
960 p[0] = Union(p[2], [], p[4])
962 def p_union_flist(self, p):
963 """union : flist UNION ID '{' block_statements_opt '}' ';'"""
964 p[0] = Union(p[3], p[1], p[5])
966 # Error rule for syntax errors
967 def p_error(self, p):
969 if p.type == "COMMENT":
972 self._parse_error("before: %s" % p.value, self._coord(lineno=p.lineno))
974 self._parse_error("At end of input", self.filename)
976 def build(self, **kwargs):
977 self.parser = yacc.yacc(module=self, **kwargs)
981 def __init__(self, debug=False, filename="", logger=None, revision=None):
982 self.lexer = lex.lex(module=VPPAPILexer(filename), debug=debug)
983 self.parser = VPPAPIParser(filename, logger, revision=revision)
984 self.parser.build(write_tables=False, debug=debug)
986 self.revision = revision
987 self.filename = filename
989 def parse_string(self, code, debug=0, lineno=1):
990 self.lexer.lineno = lineno
991 return self.parser.parser.parse(code, lexer=self.lexer, debug=debug)
993 def parse_fd(self, fd, debug=0):
995 return self.parse_string(data, debug=debug)
997 def parse_filename(self, filename, debug=0):
999 git_show = "git show {}:{}".format(self.revision, filename)
1000 proc = Popen(git_show.split(), stdout=PIPE, encoding="utf-8")
1002 data, errs = proc.communicate()
1003 if proc.returncode != 0:
1005 "File not found: {}:{}".format(self.revision, filename),
1009 return self.parse_string(data, debug=debug)
1014 with open(filename, encoding="utf-8") as fd:
1015 return self.parse_fd(fd, None)
1016 except FileNotFoundError:
1017 print("File not found: {}".format(filename), file=sys.stderr)
1020 def process(self, objs):
1032 crc = binascii.crc32(o.crc, crc) & 0xFFFFFFFF
1033 except AttributeError:
1038 if isinstance(o2, Service):
1043 msgs = {d.name: d for d in s["Define"]}
1044 svcs = {s.caller: s for s in s["Service"]}
1045 replies = {s.reply: s for s in s["Service"]}
1050 for service in svcs:
1051 if service not in msgs:
1053 "Service definition refers to unknown message"
1054 " definition: {}".format(service)
1056 if svcs[service].reply != "null" and svcs[service].reply not in msgs:
1058 "Service definition refers to unknown message"
1059 " definition in reply: {}".format(svcs[service].reply)
1061 if service in replies:
1063 "Service definition refers to message"
1064 " marked as reply: {}".format(service)
1066 for event in svcs[service].events:
1067 if event not in msgs:
1069 "Service definition refers to unknown "
1070 "event: {} in message: {}".format(event, service)
1072 seen_services[event] = True
1074 # Create services implicitly
1076 if d in seen_services:
1078 if d.endswith("_reply"):
1081 if d[:-6] not in msgs:
1082 raise ValueError("{} missing calling message".format(d))
1084 if d.endswith("_dump"):
1087 if d[:-5] + "_details" in msgs:
1088 s["Service"].append(Service(d, d[:-5] + "_details", stream=True))
1090 raise ValueError("{} missing details message".format(d))
1093 if d.endswith("_details"):
1094 if d[:-8] + "_get" in msgs:
1095 if d[:-8] + "_get" in svcs:
1098 "{} should be in a stream service".format(d[:-8] + "_get")
1100 if d[:-8] + "_dump" in msgs:
1102 raise ValueError("{} missing dump or get message".format(d))
1106 if d + "_reply" in msgs:
1107 s["Service"].append(Service(d, d + "_reply"))
1110 "{} missing reply message ({}) or service definition".format(
1117 def process_imports(self, objs, in_import, result): # -> List
1119 # Only allow the following object types from imported file
1120 if in_import and not isinstance(o, (Enum, Import, Typedef, Union, Using)):
1122 if isinstance(o, Import):
1124 result = self.process_imports(o.result, True, result)
1130 # Add message ids to each message.
1133 o.block.insert(0, Field("u16", "_vl_msg_id"))
1140 def dirlist_add(dirs):
1143 dirlist = dirlist + dirs
1150 def foldup_blocks(block, crc):
1152 # Look up CRC in user defined types
1153 if b.fieldtype.startswith("vl_api_"):
1155 t = global_types[b.fieldtype]
1157 crc = binascii.crc32(t.crc, crc) & 0xFFFFFFFF
1158 crc = foldup_blocks(t.block, crc)
1159 except AttributeError:
1166 f.crc = foldup_blocks(f.block, binascii.crc32(f.crc) & 0xFFFFFFFF)
1182 global_types.clear()
1183 seen_imports.clear()
1185 dirlist_add(includedir)
1187 sys.excepthook = exception_handler
1191 filename = show_name[0]
1193 filename = input_file
1198 logging.basicConfig(stream=sys.stdout, level=logging.WARNING)
1200 logging.basicConfig()
1203 # Generate representation
1205 from importlib.machinery import SourceFileLoader
1211 cand.append(os.path.dirname(os.path.realpath(__file__)))
1212 cand.append(os.path.dirname(os.path.realpath(__file__)) + "/../share/vpp/")
1215 if os.path.isfile("{}vppapigen_{}.py".format(c, output_module.lower())):
1219 pluginpath = pluginpath + "/"
1220 if pluginpath == "":
1221 log.exception("Output plugin not found")
1223 module_path = "{}vppapigen_{}.py".format(pluginpath, output_module.lower())
1226 plugin = SourceFileLoader(output_module, module_path).load_module()
1227 except Exception as err:
1228 log.exception("Error importing output plugin: %s, %s", module_path, err)
1231 parser = VPPAPI(debug=debug, filename=filename, logger=log, revision=git_revision)
1235 parsed_objects = parser.parse_fd(sys.stdin, log)
1237 parsed_objects = parser.parse_filename(input_file, log)
1238 except ParseError as e:
1239 print("Parse error: ", e, file=sys.stderr)
1242 # Build a list of objects. Hash of lists.
1245 # if the variable is not set in the plugin, assume it to be false.
1247 plugin.process_imports
1248 except AttributeError:
1249 plugin.process_imports = False
1251 if plugin.process_imports:
1252 result = parser.process_imports(parsed_objects, False, result)
1253 s = parser.process(result)
1255 s = parser.process(parsed_objects)
1256 imports = parser.process_imports(parsed_objects, False, result)
1257 s["imported"] = parser.process(imports)
1260 s["Define"] = add_msg_id(s["Define"])
1263 foldup_crcs(s["Define"])
1270 pp = pprint.PrettyPrinter(indent=4, stream=sys.stderr)
1271 for t in s["Define"]:
1272 pp.pprint([t.name, t.flags, t.block])
1273 for t in s["types"]:
1274 pp.pprint([t.name, t.block])
1276 result = plugin.run(outputdir, filename, s)
1278 if isinstance(output, str):
1279 with open(output, "w", encoding="UTF-8") as f:
1280 print(result, file=f)
1282 print(result, file=output)
1284 log.exception("Running plugin failed: %s %s", filename, result)
1289 def run_kw_vppapigen(kwargs):
1290 return run_vppapigen(**kwargs)
1297 if sys.version_info < (
1302 "vppapigen requires a supported version of python. "
1303 "Please use version 3.5 or greater. "
1309 cliparser = argparse.ArgumentParser(description="VPP API generator")
1310 cliparser.add_argument("--pluginpath", default="")
1311 cliparser.add_argument("--includedir", action="append")
1312 cliparser.add_argument("--outputdir", action="store")
1313 cliparser.add_argument("--input")
1314 cliparser.add_argument(
1317 type=argparse.FileType("w", encoding="UTF-8"),
1321 cliparser.add_argument("output_module", nargs="?", default="C")
1322 cliparser.add_argument("--debug", action="store_true")
1323 cliparser.add_argument("--show-name", nargs=1)
1324 cliparser.add_argument(
1325 "--git-revision", help="Git revision to use for opening files"
1327 args = cliparser.parse_args()
1329 return run_vppapigen(
1330 includedir=args.includedir,
1332 outputdir=args.outputdir,
1333 show_name=args.show_name,
1334 input_file=args.input,
1335 output_module=args.output_module,
1336 pluginpath=args.pluginpath,
1337 git_revision=args.git_revision,
1342 if __name__ == "__main__":