9 from subprocess import Popen, PIPE
11 import ply.yacc as yacc
13 assert sys.version_info >= (3, 5), "Not supported Python version: {}".format(
16 log = logging.getLogger("vppapigen")
18 # Ensure we don't leave temporary files around
19 sys.dont_write_bytecode = True
25 # Global dictionary of new types (including enums)
31 def global_type_add(name, obj):
32 """Add new type to the dictionary of types"""
33 type_name = "vl_api_" + name + "_t"
34 if type_name in global_types:
35 raise KeyError("Attempted redefinition of {!r} with {!r}.".format(name, obj))
36 global_types[type_name] = obj
39 # All your trace are belong to us!
40 def exception_handler(exception_type, exception, traceback):
41 print("%s: %s" % (exception_type.__name__, exception))
48 def __init__(self, filename):
49 self.filename = filename
61 "enumflag": "ENUMFLAG",
62 "typeonly": "TYPEONLY",
63 "manual_print": "MANUAL_PRINT",
64 "manual_endian": "MANUAL_ENDIAN",
65 "dont_trace": "DONT_TRACE",
66 "autoreply": "AUTOREPLY",
67 "autoendian": "AUTOENDIAN",
84 "counters": "COUNTERS",
87 "severity": "SEVERITY",
89 "description": "DESCRIPTION",
92 tokens = ["STRING_LITERAL", "ID", "NUM"] + list(reserved.values())
94 t_ignore_LINE_COMMENT = "//.*"
107 r"0[xX][0-9a-fA-F]+|-?\d+\.?\d*"
108 base = 16 if t.value.startswith("0x") else 10
110 t.value = float(t.value)
112 t.value = int(t.value, base)
116 r"[a-zA-Z_][a-zA-Z_0-9]*"
117 # Check for reserved words
118 t.type = VPPAPILexer.reserved.get(t.value, "ID")
122 def t_STRING_LITERAL(self, t):
123 r"\"([^\\\n]|(\\.))*?\" "
124 t.value = str(t.value).replace('"', "")
127 # C or C++ comment (ignore)
128 def t_comment(self, t):
129 r"(/\*(.|\n)*?\*/)|(//.*)"
130 t.lexer.lineno += t.value.count("\n")
132 # Error handling rule
133 def t_error(self, t):
135 "Illegal character '{}' ({})"
136 "in {}: line {}".format(
137 t.value[0], hex(ord(t.value[0])), self.filename, t.lexer.lineno
141 # Define a rule so we can track line numbers
142 def t_newline(self, t):
144 t.lexer.lineno += len(t.value)
146 literals = ":{}[];=.,"
148 # A string containing ignored characters (spaces and tabs)
152 def vla_mark_length_field(block):
153 if isinstance(block[-1], Array):
154 lengthfield = block[-1].lengthfield
156 if b.fieldname == lengthfield:
157 b.is_lengthfield = True
160 def vla_is_last_check(name, block):
162 for i, b in enumerate(block):
163 if isinstance(b, Array) and b.vla:
165 if i + 1 < len(block):
167 'VLA field "{}" must be the last field in message "{}"'.format(
171 elif b.fieldtype.startswith("vl_api_"):
172 if global_types[b.fieldtype].vla:
174 if i + 1 < len(block):
176 'VLA field "{}" must be the last '
177 'field in message "{}"'.format(b.fieldname, name)
179 elif b.fieldtype == "string" and b.length == 0:
181 if i + 1 < len(block):
183 'VLA field "{}" must be the last '
184 'field in message "{}"'.format(b.fieldname, name)
192 def process(self, result): # -> Dict
193 result[self.type].append(self)
196 class Service(Processable):
199 def __init__(self, caller, reply, events=None, stream_message=None, stream=False):
203 self.stream_message = stream_message
204 self.events = [] if events is None else events
207 class Typedef(Processable):
210 def __init__(self, name, flags, block):
214 self.crc = str(block).encode()
215 self.manual_print = False
216 self.manual_endian = False
218 if f == "manual_print":
219 self.manual_print = True
220 elif f == "manual_endian":
221 self.manual_endian = True
222 global_type_add(name, self)
224 self.vla = vla_is_last_check(name, block)
225 vla_mark_length_field(self.block)
227 def process(self, result):
228 result["types"].append(self)
231 return self.name + str(self.flags) + str(self.block)
234 class Using(Processable):
237 def __init__(self, name, flags, alias):
241 self.manual_print = True
242 self.manual_endian = True
244 self.manual_print = False
245 self.manual_endian = False
247 if f == "manual_print":
248 self.manual_print = True
249 elif f == "manual_endian":
250 self.manual_endian = True
252 if isinstance(alias, Array):
253 a = {"type": alias.fieldtype, "length": alias.length}
255 a = {"type": alias.fieldtype}
261 # self.crc = str(alias).encode()
262 # but to be backwards compatible use the block ([])
264 self.crc = str(self.block).encode()
265 global_type_add(name, self)
267 def process(self, result): # -> Dict
268 result["types"].append(self)
271 return self.name + str(self.alias)
274 class Union(Processable):
277 def __init__(self, name, flags, block):
278 self.manual_print = False
279 self.manual_endian = False
283 if f == "manual_print":
284 self.manual_print = True
285 elif f == "manual_endian":
286 self.manual_endian = True
289 self.crc = str(block).encode()
290 self.vla = vla_is_last_check(name, block)
292 global_type_add(name, self)
294 def process(self, result):
295 result["types"].append(self)
298 return str(self.block)
301 class Define(Processable):
304 def __init__(self, name, flags, block):
308 self.dont_trace = False
309 self.manual_print = False
310 self.manual_endian = False
311 self.autoreply = False
315 if f == "dont_trace":
316 self.dont_trace = True
317 elif f == "manual_print":
318 self.manual_print = True
319 elif f == "manual_endian":
320 self.manual_endian = True
321 elif f == "autoreply":
322 self.autoreply = True
323 elif f == "autoendian":
328 if isinstance(b, Option):
329 self.options[b.option] = b.value
332 block = [x for x in block if x not in remove]
334 self.vla = vla_is_last_check(name, block)
335 vla_mark_length_field(self.block)
337 self.crc = str(block).encode()
339 def autoreply_block(self, name, parent):
340 block = [Field("u32", "context"), Field("i32", "retval")]
341 # inherit the parent's options
342 for k, v in parent.options.items():
343 block.append(Option(k, v))
344 return Define(name + "_reply", [], block)
346 def process(self, result): # -> Dict
347 tname = self.__class__.__name__
348 result[tname].append(self)
350 result[tname].append(self.autoreply_block(self.name, self))
353 return self.name + str(self.flags) + str(self.block)
356 class Enum(Processable):
359 def __init__(self, name, block, enumtype="u32"):
361 self.enumtype = enumtype
363 self.manual_print = False
375 block2.append([b["id"], count])
377 if b["option"]["backwards_compatible"]:
381 block3.append([b["id"], count])
384 "Backward compatible enum must "
385 "be last {!r} {!r}".format(name, b["id"])
388 self.crc = str(block3).encode()
389 global_type_add(name, self)
391 def process(self, result):
392 result["types"].append(self)
395 return self.name + str(self.block)
398 class EnumFlag(Enum):
401 def __init__(self, name, block, enumtype="u32"):
402 super(EnumFlag, self).__init__(name, block, enumtype)
405 if bin(b[1])[2:].count("1") > 1:
407 "%s is not a flag enum. No element in a "
408 "flag enum may have more than a "
409 "single bit set." % self.name
413 class Import(Processable):
417 def __new__(cls, *args, **kwargs):
418 if args[0] not in seen_imports:
419 instance = super().__new__(cls)
420 instance._initialized = False
421 seen_imports[args[0]] = instance
423 return seen_imports[args[0]]
425 def __init__(self, filename, revision):
426 if self._initialized:
428 self.filename = filename
430 parser = VPPAPI(filename=filename, revision=revision)
431 dirlist = dirlist_get()
434 f = os.path.join(dir, filename)
435 if os.path.exists(f):
437 self.result = parser.parse_filename(f, None)
438 self._initialized = True
444 class Option(Processable):
447 def __init__(self, option, value=None):
450 self.crc = str(option).encode()
452 def process(self, result): # -> Dict
453 result[self.type][self.option] = self.value
456 return str(self.option)
458 def __getitem__(self, index):
459 return self.option[index]
462 class Array(Processable):
465 def __init__(self, fieldtype, name, length, modern_vla=False):
466 self.fieldtype = fieldtype
467 self.fieldname = name
468 self.modern_vla = modern_vla
469 if type(length) is str:
470 self.lengthfield = length
475 self.lengthfield = None
479 return str([self.fieldtype, self.fieldname, self.length, self.lengthfield])
482 class Field(Processable):
485 def __init__(self, fieldtype, name, limit=None):
486 # limit field has been expanded to an options dict.
488 self.fieldtype = fieldtype
489 self.is_lengthfield = False
491 if self.fieldtype == "string":
492 raise ValueError("The string type {!r} is an " "array type ".format(name))
494 if name in keyword.kwlist:
496 "Fieldname {!r} is a python keyword and is not "
497 "accessible via the python API. ".format(name)
499 self.fieldname = name
503 return str([self.fieldtype, self.fieldname])
506 class Counter(Processable):
509 def __init__(self, path, counter):
513 def process(self, result): # -> Dict
514 result["Counters"].append(self)
517 class Paths(Processable):
520 def __init__(self, pathset):
524 return "%s(paths=%s)" % (self.__class__.__name__, self.paths)
528 """Coordinates of a syntactic element. Consists of:
531 - (optional) column number, for the Lexer
534 __slots__ = ("file", "line", "column", "__weakref__")
536 def __init__(self, file, line, column=None):
542 str = "%s:%s" % (self.file, self.line)
544 str += ":%s" % self.column
548 class ParseError(Exception):
556 tokens = VPPAPILexer.tokens
558 def __init__(self, filename, logger, revision=None):
559 self.filename = filename
562 self.revision = revision
564 def _parse_error(self, msg, coord):
565 raise ParseError("%s: %s" % (coord, msg))
567 def _parse_warning(self, msg, coord):
569 self.logger.warning("%s: %s" % (coord, msg))
571 def _coord(self, lineno, column=None):
572 return Coord(file=self.filename, line=lineno, column=column)
574 def _token_coord(self, p, token_idx):
575 """Returns the coordinates for the YaccProduction object 'p' indexed
576 with 'token_idx'. The coordinate includes the 'lineno' and
577 'column'. Both follow the lex semantic, starting from 1.
579 last_cr = p.lexer.lexdata.rfind("\n", 0, p.lexpos(token_idx))
582 column = p.lexpos(token_idx) - (last_cr)
583 return self._coord(p.lineno(token_idx), column)
585 def p_slist(self, p):
606 def p_import(self, p):
607 """import : IMPORT STRING_LITERAL ';'"""
608 p[0] = Import(p[2], revision=self.revision)
610 def p_path_elements(self, p):
611 """path_elements : path_element
612 | path_elements path_element"""
616 if type(p[1]) is dict:
621 def p_path_element(self, p):
622 """path_element : STRING_LITERAL STRING_LITERAL ';'"""
623 p[0] = {"path": p[1], "counter": p[2]}
625 def p_paths(self, p):
626 """paths : PATHS '{' path_elements '}' ';'"""
629 def p_counters(self, p):
630 """counters : COUNTERS ID '{' counter_elements '}' ';'"""
631 p[0] = Counter(p[2], p[4])
633 def p_counter_elements(self, p):
634 """counter_elements : counter_element
635 | counter_elements counter_element"""
639 if type(p[1]) is dict:
644 def p_counter_element(self, p):
645 """counter_element : ID '{' counter_statements '}' ';'"""
646 p[0] = {**{"name": p[1]}, **p[3]}
648 def p_counter_statements(self, p):
649 """counter_statements : counter_statement
650 | counter_statements counter_statement"""
654 p[0] = {**p[1], **p[2]}
656 def p_counter_statement(self, p):
657 """counter_statement : SEVERITY ID ';'
658 | UNITS STRING_LITERAL ';'
659 | DESCRIPTION STRING_LITERAL ';'
663 def p_service(self, p):
664 """service : SERVICE '{' service_statements '}' ';'"""
667 def p_service_statements(self, p):
668 """service_statements : service_statement
669 | service_statements service_statement"""
675 def p_service_statement(self, p):
676 """service_statement : RPC ID RETURNS NULL ';'
677 | RPC ID RETURNS ID ';'
678 | RPC ID RETURNS STREAM ID ';'
679 | RPC ID RETURNS ID EVENTS event_list ';'"""
681 # Verify that caller and reply differ
683 "Reply ID ({}) should not be equal to Caller ID".format(p[2]),
684 self._token_coord(p, 1),
687 p[0] = Service(p[2], p[4], p[6])
689 p[0] = Service(p[2], p[5], stream=True)
691 p[0] = Service(p[2], p[4])
693 def p_service_statement2(self, p):
694 """service_statement : RPC ID RETURNS ID STREAM ID ';'"""
695 p[0] = Service(p[2], p[4], stream_message=p[6], stream=True)
697 def p_event_list(self, p):
698 """event_list : events
699 | event_list events"""
705 def p_event(self, p):
711 """enum : ENUM ID '{' enum_statements '}' ';'"""
712 p[0] = Enum(p[2], p[4])
714 def p_enum_type(self, p):
715 """enum : ENUM ID ':' enum_size '{' enum_statements '}' ';'"""
717 p[0] = Enum(p[2], p[6], enumtype=p[4])
719 p[0] = Enum(p[2], p[4])
721 def p_enumflag(self, p):
722 """enumflag : ENUMFLAG ID '{' enum_statements '}' ';'"""
723 p[0] = EnumFlag(p[2], p[4])
725 def p_enumflag_type(self, p):
726 """enumflag : ENUMFLAG ID ':' enumflag_size '{' enum_statements '}' ';'""" # noqa : E502
728 p[0] = EnumFlag(p[2], p[6], enumtype=p[4])
730 p[0] = EnumFlag(p[2], p[4])
732 def p_enum_size(self, p):
741 def p_enumflag_size(self, p):
742 """enumflag_size : U8
747 def p_define(self, p):
748 """define : DEFINE ID '{' block_statements_opt '}' ';'"""
750 p[0] = Define(p[2], [], p[4])
752 def p_define_flist(self, p):
753 """define : flist DEFINE ID '{' block_statements_opt '}' ';'"""
755 if "typeonly" in p[1]:
757 "legacy typedef. use typedef: {} {}[{}];".format(p[1], p[2], p[4]),
758 self._token_coord(p, 1),
761 p[0] = Define(p[3], p[1], p[5])
763 def p_flist(self, p):
772 """flag : MANUAL_PRINT
782 def p_typedef(self, p):
783 """typedef : TYPEDEF ID '{' block_statements_opt '}' ';'"""
784 p[0] = Typedef(p[2], [], p[4])
786 def p_typedef_flist(self, p):
787 """typedef : flist TYPEDEF ID '{' block_statements_opt '}' ';'"""
788 p[0] = Typedef(p[3], p[1], p[5])
790 def p_typedef_alias(self, p):
791 """typedef : TYPEDEF declaration"""
792 p[0] = Using(p[2].fieldname, [], p[2])
794 def p_typedef_alias_flist(self, p):
795 """typedef : flist TYPEDEF declaration"""
796 p[0] = Using(p[3].fieldname, p[1], p[3])
798 def p_block_statements_opt(self, p):
799 """block_statements_opt : block_statements"""
802 def p_block_statements(self, p):
803 """block_statements : block_statement
804 | block_statements block_statement"""
810 def p_block_statement(self, p):
811 """block_statement : declaration
815 def p_enum_statements(self, p):
816 """enum_statements : enum_statement
817 | enum_statements enum_statement"""
823 def p_enum_statement(self, p):
824 """enum_statement : ID '=' NUM ','
826 | ID '[' field_options ']' ','
827 | ID '=' NUM '[' field_options ']' ','"""
831 p[0] = {"id": p[1], "value": p[3]}
833 p[0] = {"id": p[1], "option": p[3]}
835 p[0] = {"id": p[1], "value": p[3], "option": p[5]}
837 self._parse_error("ERROR", self._token_coord(p, 1))
839 def p_field_options(self, p):
840 """field_options : field_option
841 | field_options field_option"""
845 p[0] = {**p[1], **p[2]}
847 def p_field_option(self, p):
849 | ID '=' assignee ','
858 def p_variable_name(self, p):
859 """variable_name : ID
868 def p_declaration(self, p):
869 """declaration : type_specifier variable_name ';'
870 | type_specifier variable_name '[' field_options ']' ';'
873 p[0] = Field(p[1], p[2], p[4])
875 p[0] = Field(p[1], p[2])
877 self._parse_error("ERROR", self._token_coord(p, 1))
878 self.fields.append(p[2])
880 def p_declaration_array_vla(self, p):
881 """declaration : type_specifier variable_name '[' ']' ';'"""
882 p[0] = Array(p[1], p[2], 0, modern_vla=True)
884 def p_declaration_array(self, p):
885 """declaration : type_specifier variable_name '[' NUM ']' ';'
886 | type_specifier variable_name '[' ID ']' ';'"""
889 return self._parse_error(
890 "array: %s" % p.value, self._coord(lineno=p.lineno)
893 # Make this error later
894 if type(p[4]) is int and p[4] == 0:
895 # XXX: Line number is wrong
897 "Old Style VLA: {} {}[{}];".format(p[1], p[2], p[4]),
898 self._token_coord(p, 1),
901 if type(p[4]) is str and p[4] not in self.fields:
902 # Verify that length field exists
904 "Missing length field: {} {}[{}];".format(p[1], p[2], p[4]),
905 self._token_coord(p, 1),
907 p[0] = Array(p[1], p[2], p[4])
909 def p_option(self, p):
910 """option : OPTION ID '=' assignee ';'
915 p[0] = Option(p[2], p[4])
917 def p_assignee(self, p):
924 def p_type_specifier(self, p):
925 """type_specifier : U8
938 # Do a second pass later to verify that user defined types are defined
939 def p_typedef_specifier(self, p):
940 """type_specifier : ID"""
941 if p[1] not in global_types:
943 "Undefined type: {}".format(p[1]), self._token_coord(p, 1)
947 def p_union(self, p):
948 """union : UNION ID '{' block_statements_opt '}' ';'"""
949 p[0] = Union(p[2], [], p[4])
951 def p_union_flist(self, p):
952 """union : flist UNION ID '{' block_statements_opt '}' ';'"""
953 p[0] = Union(p[3], p[1], p[5])
955 # Error rule for syntax errors
956 def p_error(self, p):
958 self._parse_error("before: %s" % p.value, self._coord(lineno=p.lineno))
960 self._parse_error("At end of input", self.filename)
964 def __init__(self, debug=False, filename="", logger=None, revision=None):
965 self.lexer = lex.lex(module=VPPAPILexer(filename), debug=debug)
966 self.parser = yacc.yacc(
967 module=VPPAPIParser(filename, logger, revision=revision),
972 self.revision = revision
973 self.filename = filename
975 def parse_string(self, code, debug=0, lineno=1):
976 self.lexer.lineno = lineno
977 return self.parser.parse(code, lexer=self.lexer, debug=debug)
979 def parse_fd(self, fd, debug=0):
981 return self.parse_string(data, debug=debug)
983 def parse_filename(self, filename, debug=0):
985 git_show = "git show {}:{}".format(self.revision, filename)
986 proc = Popen(git_show.split(), stdout=PIPE, encoding="utf-8")
988 data, errs = proc.communicate()
989 if proc.returncode != 0:
991 "File not found: {}:{}".format(self.revision, filename),
995 return self.parse_string(data, debug=debug)
1000 with open(filename, encoding="utf-8") as fd:
1001 return self.parse_fd(fd, None)
1002 except FileNotFoundError:
1003 print("File not found: {}".format(filename), file=sys.stderr)
1006 def process(self, objs):
1018 crc = binascii.crc32(o.crc, crc) & 0xFFFFFFFF
1019 except AttributeError:
1024 if isinstance(o2, Service):
1029 msgs = {d.name: d for d in s["Define"]}
1030 svcs = {s.caller: s for s in s["Service"]}
1031 replies = {s.reply: s for s in s["Service"]}
1036 for service in svcs:
1037 if service not in msgs:
1039 "Service definition refers to unknown message"
1040 " definition: {}".format(service)
1042 if svcs[service].reply != "null" and svcs[service].reply not in msgs:
1044 "Service definition refers to unknown message"
1045 " definition in reply: {}".format(svcs[service].reply)
1047 if service in replies:
1049 "Service definition refers to message"
1050 " marked as reply: {}".format(service)
1052 for event in svcs[service].events:
1053 if event not in msgs:
1055 "Service definition refers to unknown "
1056 "event: {} in message: {}".format(event, service)
1058 seen_services[event] = True
1060 # Create services implicitly
1062 if d in seen_services:
1064 if d.endswith("_reply"):
1067 if d[:-6] not in msgs:
1068 raise ValueError("{} missing calling message".format(d))
1070 if d.endswith("_dump"):
1073 if d[:-5] + "_details" in msgs:
1074 s["Service"].append(Service(d, d[:-5] + "_details", stream=True))
1076 raise ValueError("{} missing details message".format(d))
1079 if d.endswith("_details"):
1080 if d[:-8] + "_get" in msgs:
1081 if d[:-8] + "_get" in svcs:
1084 "{} should be in a stream service".format(d[:-8] + "_get")
1086 if d[:-8] + "_dump" in msgs:
1088 raise ValueError("{} missing dump or get message".format(d))
1092 if d + "_reply" in msgs:
1093 s["Service"].append(Service(d, d + "_reply"))
1096 "{} missing reply message ({}) or service definition".format(
1103 def process_imports(self, objs, in_import, result): # -> List
1105 # Only allow the following object types from imported file
1106 if in_import and not isinstance(o, (Enum, Import, Typedef, Union, Using)):
1108 if isinstance(o, Import):
1110 result = self.process_imports(o.result, True, result)
1116 # Add message ids to each message.
1119 o.block.insert(0, Field("u16", "_vl_msg_id"))
1126 def dirlist_add(dirs):
1129 dirlist = dirlist + dirs
1136 def foldup_blocks(block, crc):
1138 # Look up CRC in user defined types
1139 if b.fieldtype.startswith("vl_api_"):
1141 t = global_types[b.fieldtype]
1143 crc = binascii.crc32(t.crc, crc) & 0xFFFFFFFF
1144 crc = foldup_blocks(t.block, crc)
1145 except AttributeError:
1152 f.crc = foldup_blocks(f.block, binascii.crc32(f.crc) & 0xFFFFFFFF)
1168 global_types.clear()
1169 seen_imports.clear()
1171 dirlist_add(includedir)
1173 sys.excepthook = exception_handler
1177 filename = show_name[0]
1179 filename = input_file
1184 logging.basicConfig(stream=sys.stdout, level=logging.WARNING)
1186 logging.basicConfig()
1189 # Generate representation
1191 from importlib.machinery import SourceFileLoader
1197 cand.append(os.path.dirname(os.path.realpath(__file__)))
1198 cand.append(os.path.dirname(os.path.realpath(__file__)) + "/../share/vpp/")
1201 if os.path.isfile("{}vppapigen_{}.py".format(c, output_module.lower())):
1205 pluginpath = pluginpath + "/"
1206 if pluginpath == "":
1207 log.exception("Output plugin not found")
1209 module_path = "{}vppapigen_{}.py".format(pluginpath, output_module.lower())
1212 plugin = SourceFileLoader(output_module, module_path).load_module()
1213 except Exception as err:
1214 log.exception("Error importing output plugin: %s, %s", module_path, err)
1217 parser = VPPAPI(debug=debug, filename=filename, logger=log, revision=git_revision)
1221 parsed_objects = parser.parse_fd(sys.stdin, log)
1223 parsed_objects = parser.parse_filename(input_file, log)
1224 except ParseError as e:
1225 print("Parse error: ", e, file=sys.stderr)
1228 # Build a list of objects. Hash of lists.
1231 # if the variable is not set in the plugin, assume it to be false.
1233 plugin.process_imports
1234 except AttributeError:
1235 plugin.process_imports = False
1237 if plugin.process_imports:
1238 result = parser.process_imports(parsed_objects, False, result)
1239 s = parser.process(result)
1241 s = parser.process(parsed_objects)
1242 imports = parser.process_imports(parsed_objects, False, result)
1243 s["imported"] = parser.process(imports)
1246 s["Define"] = add_msg_id(s["Define"])
1249 foldup_crcs(s["Define"])
1256 pp = pprint.PrettyPrinter(indent=4, stream=sys.stderr)
1257 for t in s["Define"]:
1258 pp.pprint([t.name, t.flags, t.block])
1259 for t in s["types"]:
1260 pp.pprint([t.name, t.block])
1262 result = plugin.run(outputdir, filename, s)
1264 if isinstance(output, str):
1265 with open(output, "w", encoding="UTF-8") as f:
1266 print(result, file=f)
1268 print(result, file=output)
1270 log.exception("Running plugin failed: %s %s", filename, result)
1275 def run_kw_vppapigen(kwargs):
1276 return run_vppapigen(**kwargs)
1283 if sys.version_info < (
1288 "vppapigen requires a supported version of python. "
1289 "Please use version 3.5 or greater. "
1295 cliparser = argparse.ArgumentParser(description="VPP API generator")
1296 cliparser.add_argument("--pluginpath", default="")
1297 cliparser.add_argument("--includedir", action="append")
1298 cliparser.add_argument("--outputdir", action="store")
1299 cliparser.add_argument("--input")
1300 cliparser.add_argument(
1303 type=argparse.FileType("w", encoding="UTF-8"),
1307 cliparser.add_argument("output_module", nargs="?", default="C")
1308 cliparser.add_argument("--debug", action="store_true")
1309 cliparser.add_argument("--show-name", nargs=1)
1310 cliparser.add_argument(
1311 "--git-revision", help="Git revision to use for opening files"
1313 args = cliparser.parse_args()
1315 return run_vppapigen(
1316 includedir=args.includedir,
1318 outputdir=args.outputdir,
1319 show_name=args.show_name,
1320 input_file=args.input,
1321 output_module=args.output_module,
1322 pluginpath=args.pluginpath,
1323 git_revision=args.git_revision,
1328 if __name__ == "__main__":