9 from subprocess import Popen, PIPE
11 import ply.yacc as yacc
13 assert sys.version_info >= (3, 5), \
14 "Not supported Python version: {}".format(sys.version)
15 log = logging.getLogger('vppapigen')
17 # Ensure we don't leave temporary files around
18 sys.dont_write_bytecode = True
24 # Global dictionary of new types (including enums)
30 def global_type_add(name, obj):
31 '''Add new type to the dictionary of types '''
32 type_name = 'vl_api_' + name + '_t'
33 if type_name in global_types:
34 raise KeyError("Attempted redefinition of {!r} with {!r}.".format(
36 global_types[type_name] = obj
39 # All your trace are belong to us!
40 def exception_handler(exception_type, exception, traceback):
41 print("%s: %s" % (exception_type.__name__, exception))
48 def __init__(self, filename):
49 self.filename = filename
61 'enumflag': 'ENUMFLAG',
62 'typeonly': 'TYPEONLY',
63 'manual_print': 'MANUAL_PRINT',
64 'manual_endian': 'MANUAL_ENDIAN',
65 'dont_trace': 'DONT_TRACE',
66 'autoreply': 'AUTOREPLY',
67 'autoendian': 'AUTOENDIAN',
84 'counters': 'COUNTERS',
87 'severity': 'SEVERITY',
89 'description': 'DESCRIPTION',
92 tokens = ['STRING_LITERAL',
93 'ID', 'NUM'] + list(reserved.values())
95 t_ignore_LINE_COMMENT = '//.*'
108 r'0[xX][0-9a-fA-F]+|-?\d+\.?\d*'
109 base = 16 if t.value.startswith('0x') else 10
111 t.value = float(t.value)
113 t.value = int(t.value, base)
117 r'[a-zA-Z_][a-zA-Z_0-9]*'
118 # Check for reserved words
119 t.type = VPPAPILexer.reserved.get(t.value, 'ID')
123 def t_STRING_LITERAL(self, t):
124 r'\"([^\\\n]|(\\.))*?\"'
125 t.value = str(t.value).replace("\"", "")
128 # C or C++ comment (ignore)
129 def t_comment(self, t):
130 r'(/\*(.|\n)*?\*/)|(//.*)'
131 t.lexer.lineno += t.value.count('\n')
133 # Error handling rule
134 def t_error(self, t):
135 raise ParseError("Illegal character '{}' ({})"
136 "in {}: line {}".format(t.value[0],
137 hex(ord(t.value[0])),
141 # Define a rule so we can track line numbers
142 def t_newline(self, t):
144 t.lexer.lineno += len(t.value)
146 literals = ":{}[];=.,"
148 # A string containing ignored characters (spaces and tabs)
152 def vla_mark_length_field(block):
153 if isinstance(block[-1], Array):
154 lengthfield = block[-1].lengthfield
156 if b.fieldname == lengthfield:
157 b.is_lengthfield = True
160 def vla_is_last_check(name, block):
162 for i, b in enumerate(block):
163 if isinstance(b, Array) and b.vla:
165 if i + 1 < len(block):
167 'VLA field "{}" must be the last field in message "{}"'
168 .format(b.fieldname, name))
169 elif b.fieldtype.startswith('vl_api_'):
170 if global_types[b.fieldtype].vla:
172 if i + 1 < len(block):
174 'VLA field "{}" must be the last '
175 'field in message "{}"'
176 .format(b.fieldname, name))
177 elif b.fieldtype == 'string' and b.length == 0:
179 if i + 1 < len(block):
181 'VLA field "{}" must be the last '
182 'field in message "{}"'
183 .format(b.fieldname, name))
190 def process(self, result): # -> Dict
191 result[self.type].append(self)
194 class Service(Processable):
197 def __init__(self, caller, reply, events=None, stream_message=None,
202 self.stream_message = stream_message
203 self.events = [] if events is None else events
206 class Typedef(Processable):
209 def __init__(self, name, flags, block):
213 self.crc = str(block).encode()
214 self.manual_print = False
215 self.manual_endian = False
217 if f == 'manual_print':
218 self.manual_print = True
219 elif f == 'manual_endian':
220 self.manual_endian = True
221 global_type_add(name, self)
223 self.vla = vla_is_last_check(name, block)
224 vla_mark_length_field(self.block)
226 def process(self, result):
227 result['types'].append(self)
230 return self.name + str(self.flags) + str(self.block)
233 class Using(Processable):
236 def __init__(self, name, flags, alias):
240 self.manual_print = True
241 self.manual_endian = True
243 self.manual_print = False
244 self.manual_endian = False
246 if f == 'manual_print':
247 self.manual_print = True
248 elif f == 'manual_endian':
249 self.manual_endian = True
251 if isinstance(alias, Array):
252 a = {'type': alias.fieldtype,
253 'length': alias.length}
255 a = {'type': alias.fieldtype}
261 # self.crc = str(alias).encode()
262 # but to be backwards compatible use the block ([])
264 self.crc = str(self.block).encode()
265 global_type_add(name, self)
267 def process(self, result): # -> Dict
268 result['types'].append(self)
271 return self.name + str(self.alias)
274 class Union(Processable):
277 def __init__(self, name, flags, block):
278 self.manual_print = False
279 self.manual_endian = False
283 if f == 'manual_print':
284 self.manual_print = True
285 elif f == 'manual_endian':
286 self.manual_endian = True
289 self.crc = str(block).encode()
290 self.vla = vla_is_last_check(name, block)
292 global_type_add(name, self)
294 def process(self, result):
295 result['types'].append(self)
298 return str(self.block)
301 class Define(Processable):
304 def __init__(self, name, flags, block):
308 self.dont_trace = False
309 self.manual_print = False
310 self.manual_endian = False
311 self.autoreply = False
315 if f == 'dont_trace':
316 self.dont_trace = True
317 elif f == 'manual_print':
318 self.manual_print = True
319 elif f == 'manual_endian':
320 self.manual_endian = True
321 elif f == 'autoreply':
322 self.autoreply = True
323 elif f == 'autoendian':
328 if isinstance(b, Option):
329 self.options[b.option] = b.value
332 block = [x for x in block if x not in remove]
334 self.vla = vla_is_last_check(name, block)
335 vla_mark_length_field(self.block)
337 self.crc = str(block).encode()
339 def autoreply_block(self, name, parent):
340 block = [Field('u32', 'context'),
341 Field('i32', 'retval')]
342 # inherit the parent's options
343 for k, v in parent.options.items():
344 block.append(Option(k, v))
345 return Define(name + '_reply', [], block)
347 def process(self, result): # -> Dict
348 tname = self.__class__.__name__
349 result[tname].append(self)
351 result[tname].append(self.autoreply_block(self.name, self))
354 return self.name + str(self.flags) + str(self.block)
357 class Enum(Processable):
360 def __init__(self, name, block, enumtype='u32'):
362 self.enumtype = enumtype
364 self.manual_print = False
376 block2.append([b['id'], count])
378 if b['option']['backwards_compatible']:
382 block3.append([b['id'], count])
384 raise ValueError("Backward compatible enum must "
386 .format(name, b['id']))
388 self.crc = str(block3).encode()
389 global_type_add(name, self)
391 def process(self, result):
392 result['types'].append(self)
395 return self.name + str(self.block)
398 class EnumFlag(Enum):
401 def __init__(self, name, block, enumtype='u32'):
402 super(EnumFlag, self).__init__(name, block, enumtype)
405 if bin(b[1])[2:].count("1") > 1:
406 raise TypeError("%s is not a flag enum. No element in a "
407 "flag enum may have more than a "
408 "single bit set." % self.name)
411 class Import(Processable):
415 def __new__(cls, *args, **kwargs):
416 if args[0] not in seen_imports:
417 instance = super().__new__(cls)
418 instance._initialized = False
419 seen_imports[args[0]] = instance
421 return seen_imports[args[0]]
423 def __init__(self, filename, revision):
424 if self._initialized:
426 self.filename = filename
428 parser = VPPAPI(filename=filename, revision=revision)
429 dirlist = dirlist_get()
432 f = os.path.join(dir, filename)
433 if os.path.exists(f):
435 self.result = parser.parse_filename(f, None)
436 self._initialized = True
442 class Option(Processable):
445 def __init__(self, option, value=None):
448 self.crc = str(option).encode()
450 def process(self, result): # -> Dict
451 result[self.type][self.option] = self.value
454 return str(self.option)
456 def __getitem__(self, index):
457 return self.option[index]
460 class Array(Processable):
463 def __init__(self, fieldtype, name, length, modern_vla=False):
464 self.fieldtype = fieldtype
465 self.fieldname = name
466 self.modern_vla = modern_vla
467 if type(length) is str:
468 self.lengthfield = length
473 self.lengthfield = None
477 return str([self.fieldtype, self.fieldname, self.length,
481 class Field(Processable):
484 def __init__(self, fieldtype, name, limit=None):
485 # limit field has been expanded to an options dict.
487 self.fieldtype = fieldtype
488 self.is_lengthfield = False
490 if self.fieldtype == 'string':
491 raise ValueError("The string type {!r} is an "
492 "array type ".format(name))
494 if name in keyword.kwlist:
495 raise ValueError("Fieldname {!r} is a python keyword and is not "
496 "accessible via the python API. ".format(name))
497 self.fieldname = name
501 return str([self.fieldtype, self.fieldname])
504 class Counter(Processable):
507 def __init__(self, path, counter):
511 def process(self, result): # -> Dict
512 result['Counters'].append(self)
515 class Paths(Processable):
518 def __init__(self, pathset):
522 return "%s(paths=%s)" % (
523 self.__class__.__name__, self.paths
528 """ Coordinates of a syntactic element. Consists of:
531 - (optional) column number, for the Lexer
533 __slots__ = ('file', 'line', 'column', '__weakref__')
535 def __init__(self, file, line, column=None):
541 str = "%s:%s" % (self.file, self.line)
543 str += ":%s" % self.column
547 class ParseError(Exception):
555 tokens = VPPAPILexer.tokens
557 def __init__(self, filename, logger, revision=None):
558 self.filename = filename
561 self.revision = revision
563 def _parse_error(self, msg, coord):
564 raise ParseError("%s: %s" % (coord, msg))
566 def _parse_warning(self, msg, coord):
568 self.logger.warning("%s: %s" % (coord, msg))
570 def _coord(self, lineno, column=None):
573 line=lineno, column=column)
575 def _token_coord(self, p, token_idx):
576 """ Returns the coordinates for the YaccProduction object 'p' indexed
577 with 'token_idx'. The coordinate includes the 'lineno' and
578 'column'. Both follow the lex semantic, starting from 1.
580 last_cr = p.lexer.lexdata.rfind('\n', 0, p.lexpos(token_idx))
583 column = (p.lexpos(token_idx) - (last_cr))
584 return self._coord(p.lineno(token_idx), column)
586 def p_slist(self, p):
607 def p_import(self, p):
608 '''import : IMPORT STRING_LITERAL ';' '''
609 p[0] = Import(p[2], revision=self.revision)
611 def p_path_elements(self, p):
612 '''path_elements : path_element
613 | path_elements path_element'''
617 if type(p[1]) is dict:
622 def p_path_element(self, p):
623 '''path_element : STRING_LITERAL STRING_LITERAL ';' '''
624 p[0] = {'path': p[1], 'counter': p[2]}
626 def p_paths(self, p):
627 '''paths : PATHS '{' path_elements '}' ';' '''
630 def p_counters(self, p):
631 '''counters : COUNTERS ID '{' counter_elements '}' ';' '''
632 p[0] = Counter(p[2], p[4])
634 def p_counter_elements(self, p):
635 '''counter_elements : counter_element
636 | counter_elements counter_element'''
640 if type(p[1]) is dict:
645 def p_counter_element(self, p):
646 '''counter_element : ID '{' counter_statements '}' ';' '''
647 p[0] = {**{'name': p[1]}, **p[3]}
649 def p_counter_statements(self, p):
650 '''counter_statements : counter_statement
651 | counter_statements counter_statement'''
655 p[0] = {**p[1], **p[2]}
657 def p_counter_statement(self, p):
658 '''counter_statement : SEVERITY ID ';'
659 | UNITS STRING_LITERAL ';'
660 | DESCRIPTION STRING_LITERAL ';'
664 def p_service(self, p):
665 '''service : SERVICE '{' service_statements '}' ';' '''
668 def p_service_statements(self, p):
669 '''service_statements : service_statement
670 | service_statements service_statement'''
676 def p_service_statement(self, p):
677 '''service_statement : RPC ID RETURNS NULL ';'
678 | RPC ID RETURNS ID ';'
679 | RPC ID RETURNS STREAM ID ';'
680 | RPC ID RETURNS ID EVENTS event_list ';' '''
682 # Verify that caller and reply differ
684 'Reply ID ({}) should not be equal to Caller ID'.format(p[2]),
685 self._token_coord(p, 1))
687 p[0] = Service(p[2], p[4], p[6])
689 p[0] = Service(p[2], p[5], stream=True)
691 p[0] = Service(p[2], p[4])
693 def p_service_statement2(self, p):
694 '''service_statement : RPC ID RETURNS ID STREAM ID ';' '''
695 p[0] = Service(p[2], p[4], stream_message=p[6], stream=True)
697 def p_event_list(self, p):
698 '''event_list : events
699 | event_list events '''
705 def p_event(self, p):
711 '''enum : ENUM ID '{' enum_statements '}' ';' '''
712 p[0] = Enum(p[2], p[4])
714 def p_enum_type(self, p):
715 ''' enum : ENUM ID ':' enum_size '{' enum_statements '}' ';' '''
717 p[0] = Enum(p[2], p[6], enumtype=p[4])
719 p[0] = Enum(p[2], p[4])
721 def p_enumflag(self, p):
722 '''enumflag : ENUMFLAG ID '{' enum_statements '}' ';' '''
723 p[0] = EnumFlag(p[2], p[4])
725 def p_enumflag_type(self, p):
726 ''' enumflag : ENUMFLAG ID ':' enumflag_size '{' enum_statements '}' ';' ''' # noqa : E502
728 p[0] = EnumFlag(p[2], p[6], enumtype=p[4])
730 p[0] = EnumFlag(p[2], p[4])
732 def p_enum_size(self, p):
741 def p_enumflag_size(self, p):
742 ''' enumflag_size : U8
747 def p_define(self, p):
748 '''define : DEFINE ID '{' block_statements_opt '}' ';' '''
750 p[0] = Define(p[2], [], p[4])
752 def p_define_flist(self, p):
753 '''define : flist DEFINE ID '{' block_statements_opt '}' ';' '''
755 if 'typeonly' in p[1]:
756 self._parse_error('legacy typedef. use typedef: {} {}[{}];'
757 .format(p[1], p[2], p[4]),
758 self._token_coord(p, 1))
760 p[0] = Define(p[3], p[1], p[5])
762 def p_flist(self, p):
771 '''flag : MANUAL_PRINT
781 def p_typedef(self, p):
782 '''typedef : TYPEDEF ID '{' block_statements_opt '}' ';' '''
783 p[0] = Typedef(p[2], [], p[4])
785 def p_typedef_flist(self, p):
786 '''typedef : flist TYPEDEF ID '{' block_statements_opt '}' ';' '''
787 p[0] = Typedef(p[3], p[1], p[5])
789 def p_typedef_alias(self, p):
790 '''typedef : TYPEDEF declaration '''
791 p[0] = Using(p[2].fieldname, [], p[2])
793 def p_typedef_alias_flist(self, p):
794 '''typedef : flist TYPEDEF declaration '''
795 p[0] = Using(p[3].fieldname, p[1], p[3])
797 def p_block_statements_opt(self, p):
798 '''block_statements_opt : block_statements '''
801 def p_block_statements(self, p):
802 '''block_statements : block_statement
803 | block_statements block_statement'''
809 def p_block_statement(self, p):
810 '''block_statement : declaration
814 def p_enum_statements(self, p):
815 '''enum_statements : enum_statement
816 | enum_statements enum_statement'''
822 def p_enum_statement(self, p):
823 '''enum_statement : ID '=' NUM ','
825 | ID '[' field_options ']' ','
826 | ID '=' NUM '[' field_options ']' ',' '''
830 p[0] = {'id': p[1], 'value': p[3]}
832 p[0] = {'id': p[1], 'option': p[3]}
834 p[0] = {'id': p[1], 'value': p[3], 'option': p[5]}
836 self._parse_error('ERROR', self._token_coord(p, 1))
838 def p_field_options(self, p):
839 '''field_options : field_option
840 | field_options field_option'''
844 p[0] = {**p[1], **p[2]}
846 def p_field_option(self, p):
848 | ID '=' assignee ','
857 def p_variable_name(self, p):
858 '''variable_name : ID
867 def p_declaration(self, p):
868 '''declaration : type_specifier variable_name ';'
869 | type_specifier variable_name '[' field_options ']' ';'
872 p[0] = Field(p[1], p[2], p[4])
874 p[0] = Field(p[1], p[2])
876 self._parse_error('ERROR', self._token_coord(p, 1))
877 self.fields.append(p[2])
879 def p_declaration_array_vla(self, p):
880 '''declaration : type_specifier variable_name '[' ']' ';' '''
881 p[0] = Array(p[1], p[2], 0, modern_vla=True)
883 def p_declaration_array(self, p):
884 '''declaration : type_specifier variable_name '[' NUM ']' ';'
885 | type_specifier variable_name '[' ID ']' ';' '''
888 return self._parse_error(
889 'array: %s' % p.value,
890 self._coord(lineno=p.lineno))
892 # Make this error later
893 if type(p[4]) is int and p[4] == 0:
894 # XXX: Line number is wrong
895 self._parse_warning('Old Style VLA: {} {}[{}];'
896 .format(p[1], p[2], p[4]),
897 self._token_coord(p, 1))
899 if type(p[4]) is str and p[4] not in self.fields:
900 # Verify that length field exists
901 self._parse_error('Missing length field: {} {}[{}];'
902 .format(p[1], p[2], p[4]),
903 self._token_coord(p, 1))
904 p[0] = Array(p[1], p[2], p[4])
906 def p_option(self, p):
907 '''option : OPTION ID '=' assignee ';'
912 p[0] = Option(p[2], p[4])
914 def p_assignee(self, p):
921 def p_type_specifier(self, p):
922 '''type_specifier : U8
935 # Do a second pass later to verify that user defined types are defined
936 def p_typedef_specifier(self, p):
937 '''type_specifier : ID '''
938 if p[1] not in global_types:
939 self._parse_error('Undefined type: {}'.format(p[1]),
940 self._token_coord(p, 1))
943 def p_union(self, p):
944 '''union : UNION ID '{' block_statements_opt '}' ';' '''
945 p[0] = Union(p[2], [], p[4])
947 def p_union_flist(self, p):
948 '''union : flist UNION ID '{' block_statements_opt '}' ';' '''
949 p[0] = Union(p[3], p[1], p[5])
951 # Error rule for syntax errors
952 def p_error(self, p):
955 'before: %s' % p.value,
956 self._coord(lineno=p.lineno))
958 self._parse_error('At end of input', self.filename)
963 def __init__(self, debug=False, filename='', logger=None, revision=None):
964 self.lexer = lex.lex(module=VPPAPILexer(filename), debug=debug)
965 self.parser = yacc.yacc(module=VPPAPIParser(filename, logger,
967 write_tables=False, debug=debug)
969 self.revision = revision
970 self.filename = filename
972 def parse_string(self, code, debug=0, lineno=1):
973 self.lexer.lineno = lineno
974 return self.parser.parse(code, lexer=self.lexer, debug=debug)
976 def parse_fd(self, fd, debug=0):
978 return self.parse_string(data, debug=debug)
980 def parse_filename(self, filename, debug=0):
982 git_show = 'git show {}:{}'.format(self.revision, filename)
983 proc = Popen(git_show.split(), stdout=PIPE, encoding='utf-8')
985 data, errs = proc.communicate()
986 if proc.returncode != 0:
987 print('File not found: {}:{}'
988 .format(self.revision, filename), file=sys.stderr)
990 return self.parse_string(data, debug=debug)
995 with open(filename, encoding='utf-8') as fd:
996 return self.parse_fd(fd, None)
997 except FileNotFoundError:
998 print('File not found: {}'.format(filename), file=sys.stderr)
1001 def process(self, objs):
1013 crc = binascii.crc32(o.crc, crc) & 0xffffffff
1014 except AttributeError:
1019 if isinstance(o2, Service):
1024 msgs = {d.name: d for d in s['Define']}
1025 svcs = {s.caller: s for s in s['Service']}
1026 replies = {s.reply: s for s in s['Service']}
1031 for service in svcs:
1032 if service not in msgs:
1034 'Service definition refers to unknown message'
1035 ' definition: {}'.format(service))
1036 if svcs[service].reply != 'null' and \
1037 svcs[service].reply not in msgs:
1038 raise ValueError('Service definition refers to unknown message'
1039 ' definition in reply: {}'
1040 .format(svcs[service].reply))
1041 if service in replies:
1042 raise ValueError('Service definition refers to message'
1043 ' marked as reply: {}'.format(service))
1044 for event in svcs[service].events:
1045 if event not in msgs:
1046 raise ValueError('Service definition refers to unknown '
1047 'event: {} in message: {}'
1048 .format(event, service))
1049 seen_services[event] = True
1051 # Create services implicitly
1053 if d in seen_services:
1055 if d.endswith('_reply'):
1058 if d[:-6] not in msgs:
1059 raise ValueError('{} missing calling message'
1062 if d.endswith('_dump'):
1065 if d[:-5]+'_details' in msgs:
1066 s['Service'].append(Service(d, d[:-5]+'_details',
1069 raise ValueError('{} missing details message'
1073 if d.endswith('_details'):
1074 if d[:-8]+'_get' in msgs:
1075 if d[:-8]+'_get' in svcs:
1077 raise ValueError('{} should be in a stream service'
1078 .format(d[:-8]+'_get'))
1079 if d[:-8]+'_dump' in msgs:
1081 raise ValueError('{} missing dump or get message'
1086 if d+'_reply' in msgs:
1087 s['Service'].append(Service(d, d+'_reply'))
1090 '{} missing reply message ({}) or service definition'
1091 .format(d, d+'_reply'))
1095 def process_imports(self, objs, in_import, result): # -> List
1097 # Only allow the following object types from imported file
1098 if in_import and not isinstance(o, (Enum, Import, Typedef,
1101 if isinstance(o, Import):
1103 result = self.process_imports(o.result, True, result)
1109 # Add message ids to each message.
1112 o.block.insert(0, Field('u16', '_vl_msg_id'))
1119 def dirlist_add(dirs):
1122 dirlist = dirlist + dirs
1129 def foldup_blocks(block, crc):
1131 # Look up CRC in user defined types
1132 if b.fieldtype.startswith('vl_api_'):
1134 t = global_types[b.fieldtype]
1136 crc = binascii.crc32(t.crc, crc) & 0xffffffff
1137 crc = foldup_blocks(t.block, crc)
1138 except AttributeError:
1145 f.crc = foldup_blocks(f.block,
1146 binascii.crc32(f.crc) & 0xffffffff)
1153 if sys.version_info < (3, 5,):
1154 log.exception('vppapigen requires a supported version of python. '
1155 'Please use version 3.5 or greater. '
1156 'Using %s', sys.version)
1159 cliparser = argparse.ArgumentParser(description='VPP API generator')
1160 cliparser.add_argument('--pluginpath', default="")
1161 cliparser.add_argument('--includedir', action='append')
1162 cliparser.add_argument('--outputdir', action='store')
1163 cliparser.add_argument('--input')
1164 cliparser.add_argument('--output', nargs='?',
1165 type=argparse.FileType('w', encoding='UTF-8'),
1168 cliparser.add_argument('output_module', nargs='?', default='C')
1169 cliparser.add_argument('--debug', action='store_true')
1170 cliparser.add_argument('--show-name', nargs=1)
1171 cliparser.add_argument('--git-revision',
1172 help="Git revision to use for opening files")
1173 args = cliparser.parse_args()
1175 dirlist_add(args.includedir)
1177 sys.excepthook = exception_handler
1181 filename = args.show_name[0]
1183 filename = args.input
1188 logging.basicConfig(stream=sys.stdout, level=logging.WARNING)
1190 logging.basicConfig()
1193 # Generate representation
1195 from importlib.machinery import SourceFileLoader
1199 if not args.pluginpath:
1201 cand.append(os.path.dirname(os.path.realpath(__file__)))
1202 cand.append(os.path.dirname(os.path.realpath(__file__)) +
1206 if os.path.isfile('{}vppapigen_{}.py'
1207 .format(c, args.output_module.lower())):
1211 pluginpath = args.pluginpath + '/'
1212 if pluginpath == '':
1213 log.exception('Output plugin not found')
1215 module_path = '{}vppapigen_{}.py'.format(pluginpath,
1216 args.output_module.lower())
1219 plugin = SourceFileLoader(args.output_module,
1220 module_path).load_module()
1221 except Exception as err:
1222 log.exception('Error importing output plugin: %s, %s',
1226 parser = VPPAPI(debug=args.debug, filename=filename, logger=log,
1227 revision=args.git_revision)
1231 parsed_objects = parser.parse_fd(sys.stdin, log)
1233 parsed_objects = parser.parse_filename(args.input, log)
1234 except ParseError as e:
1235 print('Parse error: ', e, file=sys.stderr)
1238 # Build a list of objects. Hash of lists.
1241 # if the variable is not set in the plugin, assume it to be false.
1243 plugin.process_imports
1244 except AttributeError:
1245 plugin.process_imports = False
1247 if plugin.process_imports:
1248 result = parser.process_imports(parsed_objects, False, result)
1249 s = parser.process(result)
1251 s = parser.process(parsed_objects)
1252 imports = parser.process_imports(parsed_objects, False, result)
1253 s['imported'] = parser.process(imports)
1256 s['Define'] = add_msg_id(s['Define'])
1259 foldup_crcs(s['Define'])
1265 pp = pprint.PrettyPrinter(indent=4, stream=sys.stderr)
1266 for t in s['Define']:
1267 pp.pprint([t.name, t.flags, t.block])
1268 for t in s['types']:
1269 pp.pprint([t.name, t.block])
1271 result = plugin.run(args, filename, s)
1273 print(result, file=args.output)
1275 log.exception('Running plugin failed: %s %s', filename, result)
1280 if __name__ == '__main__':