-#!/usr/bin/python3
+#!/usr/bin/env python3
-import ply.lex as lex
-import ply.yacc as yacc
import sys
import argparse
import keyword
import logging
import binascii
import os
+from subprocess import Popen, PIPE
+import ply.lex as lex
+import ply.yacc as yacc
+assert sys.version_info >= (3, 5), \
+ "Not supported Python version: {}".format(sys.version)
log = logging.getLogger('vppapigen')
# Ensure we don't leave temporary files around
# Global dictionary of new types (including enums)
global_types = {}
+seen_imports = {}
+
def global_type_add(name, obj):
'''Add new type to the dictionary of types '''
type_name = 'vl_api_' + name + '_t'
+ if type_name in global_types:
+ raise KeyError("Attempted redefinition of {!r} with {!r}.".format(
+ name, obj))
global_types[type_name] = obj
#
# Lexer
#
-class VPPAPILexer(object):
+class VPPAPILexer:
def __init__(self, filename):
self.filename = filename
'define': 'DEFINE',
'typedef': 'TYPEDEF',
'enum': 'ENUM',
+ 'enumflag': 'ENUMFLAG',
'typeonly': 'TYPEONLY',
'manual_print': 'MANUAL_PRINT',
'manual_endian': 'MANUAL_ENDIAN',
'dont_trace': 'DONT_TRACE',
'autoreply': 'AUTOREPLY',
+ 'autoendian': 'AUTOENDIAN',
'option': 'OPTION',
'u8': 'U8',
'u16': 'U16',
'true': 'TRUE',
'false': 'FALSE',
'union': 'UNION',
+ 'counters': 'COUNTERS',
+ 'paths': 'PATHS',
+ 'units': 'UNITS',
+ 'severity': 'SEVERITY',
+ 'type': 'TYPE',
+ 'description': 'DESCRIPTION',
}
tokens = ['STRING_LITERAL',
t_ignore_LINE_COMMENT = '//.*'
+ def t_FALSE(self, t):
+ r'false'
+ t.value = False
+ return t
+
+ def t_TRUE(self, t):
+ r'false'
+ t.value = True
+ return t
+
def t_NUM(self, t):
r'0[xX][0-9a-fA-F]+|-?\d+\.?\d*'
base = 16 if t.value.startswith('0x') else 10
hex(ord(t.value[0])),
self.filename,
t.lexer.lineno))
- t.lexer.skip(1)
# Define a rule so we can track line numbers
def t_newline(self, t):
t_ignore = ' \t'
-def crc_block_combine(block, crc):
- s = str(block).encode()
- return binascii.crc32(s, crc) & 0xffffffff
+def vla_mark_length_field(block):
+ if isinstance(block[-1], Array):
+ lengthfield = block[-1].lengthfield
+ for b in block:
+ if b.fieldname == lengthfield:
+ b.is_lengthfield = True
+
+
+def vla_is_last_check(name, block):
+ vla = False
+ for i, b in enumerate(block):
+ if isinstance(b, Array) and b.vla:
+ vla = True
+ if i + 1 < len(block):
+ raise ValueError(
+ 'VLA field "{}" must be the last field in message "{}"'
+ .format(b.fieldname, name))
+ elif b.fieldtype.startswith('vl_api_'):
+ if global_types[b.fieldtype].vla:
+ vla = True
+ if i + 1 < len(block):
+ raise ValueError(
+ 'VLA field "{}" must be the last '
+ 'field in message "{}"'
+ .format(b.fieldname, name))
+ elif b.fieldtype == 'string' and b.length == 0:
+ vla = True
+ if i + 1 < len(block):
+ raise ValueError(
+ 'VLA field "{}" must be the last '
+ 'field in message "{}"'
+ .format(b.fieldname, name))
+ return vla
+
+
+class Processable:
+ type = "<Invalid>"
+
+ def process(self, result): # -> Dict
+ result[self.type].append(self)
+
+class Service(Processable):
+ type = 'Service'
-class Service():
- def __init__(self, caller, reply, events=None, stream=False):
+ def __init__(self, caller, reply, events=None, stream_message=None,
+ stream=False):
self.caller = caller
self.reply = reply
self.stream = stream
+ self.stream_message = stream_message
self.events = [] if events is None else events
-class Typedef():
+class Typedef(Processable):
+ type = 'Typedef'
+
def __init__(self, name, flags, block):
self.name = name
self.flags = flags
self.manual_endian = True
global_type_add(name, self)
+ self.vla = vla_is_last_check(name, block)
+ vla_mark_length_field(self.block)
+
+ def process(self, result):
+ result['types'].append(self)
+
def __repr__(self):
return self.name + str(self.flags) + str(self.block)
-class Using():
- def __init__(self, name, alias):
+class Using(Processable):
+ type = 'Using'
+
+ def __init__(self, name, flags, alias):
self.name = name
+ self.vla = False
+ self.block = []
+ self.manual_print = True
+ self.manual_endian = True
+
+ self.manual_print = False
+ self.manual_endian = False
+ for f in flags:
+ if f == 'manual_print':
+ self.manual_print = True
+ elif f == 'manual_endian':
+ self.manual_endian = True
if isinstance(alias, Array):
- a = { 'type': alias.fieldtype, # noqa: E201
- 'length': alias.length } # noqa: E202
+ a = {'type': alias.fieldtype,
+ 'length': alias.length}
else:
- a = { 'type': alias.fieldtype } # noqa: E201,E202
+ a = {'type': alias.fieldtype}
self.alias = a
- self.crc = str(alias).encode()
+ self.using = alias
+
+ #
+ # Should have been:
+ # self.crc = str(alias).encode()
+ # but to be backwards compatible use the block ([])
+ #
+ self.crc = str(self.block).encode()
global_type_add(name, self)
+ def process(self, result): # -> Dict
+ result['types'].append(self)
+
def __repr__(self):
return self.name + str(self.alias)
-class Union():
- def __init__(self, name, block):
- self.type = 'Union'
+class Union(Processable):
+ type = 'Union'
+
+ def __init__(self, name, flags, block):
self.manual_print = False
self.manual_endian = False
self.name = name
+
+ for f in flags:
+ if f == 'manual_print':
+ self.manual_print = True
+ elif f == 'manual_endian':
+ self.manual_endian = True
+
self.block = block
self.crc = str(block).encode()
+ self.vla = vla_is_last_check(name, block)
+
global_type_add(name, self)
+ def process(self, result):
+ result['types'].append(self)
+
def __repr__(self):
return str(self.block)
-class Define():
+class Define(Processable):
+ type = 'Define'
+
def __init__(self, name, flags, block):
self.name = name
self.flags = flags
self.block = block
- self.crc = str(block).encode()
self.dont_trace = False
self.manual_print = False
self.manual_endian = False
self.autoreply = False
- self.singular = False
+ self.autoendian = 0
+ self.options = {}
for f in flags:
if f == 'dont_trace':
self.dont_trace = True
self.manual_endian = True
elif f == 'autoreply':
self.autoreply = True
+ elif f == 'autoendian':
+ self.autoendian = 1
+ remove = []
for b in block:
if isinstance(b, Option):
- if b[1] == 'singular' and b[2] == 'true':
- self.singular = True
- block.remove(b)
+ self.options[b.option] = b.value
+ remove.append(b)
+
+ block = [x for x in block if x not in remove]
+ self.block = block
+ self.vla = vla_is_last_check(name, block)
+ vla_mark_length_field(self.block)
+
+ self.crc = str(block).encode()
+
+ def autoreply_block(self, name, parent):
+ block = [Field('u32', 'context'),
+ Field('i32', 'retval')]
+ # inherit the parent's options
+ for k, v in parent.options.items():
+ block.append(Option(k, v))
+ return Define(name + '_reply', [], block)
+
+ def process(self, result): # -> Dict
+ tname = self.__class__.__name__
+ result[tname].append(self)
+ if self.autoreply:
+ result[tname].append(self.autoreply_block(self.name, self))
def __repr__(self):
return self.name + str(self.flags) + str(self.block)
-class Enum():
+class Enum(Processable):
+ type = 'Enum'
+
def __init__(self, name, block, enumtype='u32'):
self.name = name
self.enumtype = enumtype
+ self.vla = False
+ self.manual_print = False
count = 0
- for i, b in enumerate(block):
- if type(b) is list:
- count = b[1]
+ block2 = []
+ block3 = []
+ bc_set = False
+
+ for b in block:
+ if 'value' in b:
+ count = b['value']
else:
count += 1
- block[i] = [b, count]
-
- self.block = block
- self.crc = str(block).encode()
+ block2.append([b['id'], count])
+ try:
+ if b['option']['backwards_compatible']:
+ pass
+ bc_set = True
+ except KeyError:
+ block3.append([b['id'], count])
+ if bc_set:
+ raise ValueError("Backward compatible enum must "
+ "be last {!r} {!r}"
+ .format(name, b['id']))
+ self.block = block2
+ self.crc = str(block3).encode()
global_type_add(name, self)
+ def process(self, result):
+ result['types'].append(self)
+
def __repr__(self):
return self.name + str(self.block)
-class Import():
- def __init__(self, filename):
- self.filename = filename
+class EnumFlag(Enum):
+ type = 'EnumFlag'
+
+ def __init__(self, name, block, enumtype='u32'):
+ super(EnumFlag, self).__init__(name, block, enumtype)
+ for b in self.block:
+ if bin(b[1])[2:].count("1") > 1:
+ raise TypeError("%s is not a flag enum. No element in a "
+ "flag enum may have more than a "
+ "single bit set." % self.name)
+
+
+class Import(Processable):
+ type = 'Import'
+ _initialized = False
+
+ def __new__(cls, *args, **kwargs):
+ if args[0] not in seen_imports:
+ instance = super().__new__(cls)
+ instance._initialized = False
+ seen_imports[args[0]] = instance
+
+ return seen_imports[args[0]]
+
+ def __init__(self, filename, revision):
+ if self._initialized:
+ return
+ self.filename = filename
# Deal with imports
- parser = VPPAPI(filename=filename)
+ parser = VPPAPI(filename=filename, revision=revision)
dirlist = dirlist_get()
f = filename
for dir in dirlist:
f = os.path.join(dir, filename)
if os.path.exists(f):
break
-
- with open(f, encoding='utf-8') as fd:
- self.result = parser.parse_file(fd, None)
+ self.result = parser.parse_filename(f, None)
+ self._initialized = True
def __repr__(self):
return self.filename
-class Option():
- def __init__(self, option):
+class Option(Processable):
+ type = 'Option'
+
+ def __init__(self, option, value=None):
self.option = option
+ self.value = value
self.crc = str(option).encode()
+ def process(self, result): # -> Dict
+ result[self.type][self.option] = self.value
+
def __repr__(self):
return str(self.option)
return self.option[index]
-class Array():
- def __init__(self, fieldtype, name, length):
- self.type = 'Array'
+class Array(Processable):
+ type = 'Array'
+
+ def __init__(self, fieldtype, name, length, modern_vla=False):
self.fieldtype = fieldtype
self.fieldname = name
+ self.modern_vla = modern_vla
if type(length) is str:
self.lengthfield = length
self.length = 0
+ self.vla = True
else:
self.length = length
self.lengthfield = None
+ self.vla = False
def __repr__(self):
return str([self.fieldtype, self.fieldname, self.length,
self.lengthfield])
-class Field():
+class Field(Processable):
+ type = 'Field'
+
def __init__(self, fieldtype, name, limit=None):
- self.type = 'Field'
+ # limit field has been expanded to an options dict.
+
self.fieldtype = fieldtype
+ self.is_lengthfield = False
+
+ if self.fieldtype == 'string':
+ raise ValueError("The string type {!r} is an "
+ "array type ".format(name))
+
if name in keyword.kwlist:
raise ValueError("Fieldname {!r} is a python keyword and is not "
"accessible via the python API. ".format(name))
return str([self.fieldtype, self.fieldname])
-class Coord(object):
+class Counter(Processable):
+ type = 'Counter'
+
+ def __init__(self, path, counter):
+ self.name = path
+ self.block = counter
+
+ def process(self, result): # -> Dict
+ result['Counters'].append(self)
+
+
+class Paths(Processable):
+ type = 'Paths'
+
+ def __init__(self, pathset):
+ self.paths = pathset
+
+ def __repr__(self):
+ return "%s(paths=%s)" % (
+ self.__class__.__name__, self.paths
+ )
+
+
+class Coord:
""" Coordinates of a syntactic element. Consists of:
- File name
- Line number
#
# Grammar rules
#
-class VPPAPIParser(object):
+class VPPAPIParser:
tokens = VPPAPILexer.tokens
- def __init__(self, filename, logger):
+ def __init__(self, filename, logger, revision=None):
self.filename = filename
self.logger = logger
self.fields = []
+ self.revision = revision
def _parse_error(self, msg, coord):
raise ParseError("%s: %s" % (coord, msg))
def _coord(self, lineno, column=None):
return Coord(
- file=self.filename,
- line=lineno, column=column)
+ file=self.filename,
+ line=lineno, column=column)
def _token_coord(self, p, token_idx):
""" Returns the coordinates for the YaccProduction object 'p' indexed
| option
| import
| enum
+ | enumflag
| union
- | service'''
+ | service
+ | paths
+ | counters'''
p[0] = p[1]
def p_import(self, p):
'''import : IMPORT STRING_LITERAL ';' '''
- p[0] = Import(p[2])
+ p[0] = Import(p[2], revision=self.revision)
+
+ def p_path_elements(self, p):
+ '''path_elements : path_element
+ | path_elements path_element'''
+ if len(p) == 2:
+ p[0] = p[1]
+ else:
+ if type(p[1]) is dict:
+ p[0] = [p[1], p[2]]
+ else:
+ p[0] = p[1] + [p[2]]
+
+ def p_path_element(self, p):
+ '''path_element : STRING_LITERAL STRING_LITERAL ';' '''
+ p[0] = {'path': p[1], 'counter': p[2]}
+
+ def p_paths(self, p):
+ '''paths : PATHS '{' path_elements '}' ';' '''
+ p[0] = Paths(p[3])
+
+ def p_counters(self, p):
+ '''counters : COUNTERS ID '{' counter_elements '}' ';' '''
+ p[0] = Counter(p[2], p[4])
+
+ def p_counter_elements(self, p):
+ '''counter_elements : counter_element
+ | counter_elements counter_element'''
+ if len(p) == 2:
+ p[0] = [p[1]]
+ else:
+ if type(p[1]) is dict:
+ p[0] = [p[1], p[2]]
+ else:
+ p[0] = p[1] + [p[2]]
+
+ def p_counter_element(self, p):
+ '''counter_element : ID '{' counter_statements '}' ';' '''
+ p[0] = {**{'name': p[1]}, **p[3]}
+
+ def p_counter_statements(self, p):
+ '''counter_statements : counter_statement
+ | counter_statements counter_statement'''
+ if len(p) == 2:
+ p[0] = p[1]
+ else:
+ p[0] = {**p[1], **p[2]}
+
+ def p_counter_statement(self, p):
+ '''counter_statement : SEVERITY ID ';'
+ | UNITS STRING_LITERAL ';'
+ | DESCRIPTION STRING_LITERAL ';'
+ | TYPE ID ';' '''
+ p[0] = {p[1]: p[2]}
def p_service(self, p):
'''service : SERVICE '{' service_statements '}' ';' '''
else:
p[0] = Service(p[2], p[4])
+ def p_service_statement2(self, p):
+ '''service_statement : RPC ID RETURNS ID STREAM ID ';' '''
+ p[0] = Service(p[2], p[4], stream_message=p[6], stream=True)
+
def p_event_list(self, p):
'''event_list : events
| event_list events '''
else:
p[0] = Enum(p[2], p[4])
+ def p_enumflag(self, p):
+ '''enumflag : ENUMFLAG ID '{' enum_statements '}' ';' '''
+ p[0] = EnumFlag(p[2], p[4])
+
+ def p_enumflag_type(self, p):
+ ''' enumflag : ENUMFLAG ID ':' enumflag_size '{' enum_statements '}' ';' ''' # noqa : E502
+ if len(p) == 9:
+ p[0] = EnumFlag(p[2], p[6], enumtype=p[4])
+ else:
+ p[0] = EnumFlag(p[2], p[4])
+
def p_enum_size(self, p):
''' enum_size : U8
| U16
- | U32 '''
+ | U32
+ | I8
+ | I16
+ | I32 '''
+ p[0] = p[1]
+
+ def p_enumflag_size(self, p):
+ ''' enumflag_size : U8
+ | U16
+ | U32 '''
p[0] = p[1]
def p_define(self, p):
| MANUAL_ENDIAN
| DONT_TRACE
| TYPEONLY
+ | AUTOENDIAN
| AUTOREPLY'''
if len(p) == 1:
return
'''typedef : TYPEDEF ID '{' block_statements_opt '}' ';' '''
p[0] = Typedef(p[2], [], p[4])
+ def p_typedef_flist(self, p):
+ '''typedef : flist TYPEDEF ID '{' block_statements_opt '}' ';' '''
+ p[0] = Typedef(p[3], p[1], p[5])
+
def p_typedef_alias(self, p):
'''typedef : TYPEDEF declaration '''
- p[0] = Using(p[2].fieldname, p[2])
+ p[0] = Using(p[2].fieldname, [], p[2])
+
+ def p_typedef_alias_flist(self, p):
+ '''typedef : flist TYPEDEF declaration '''
+ p[0] = Using(p[3].fieldname, p[1], p[3])
def p_block_statements_opt(self, p):
'''block_statements_opt : block_statements '''
def p_enum_statement(self, p):
'''enum_statement : ID '=' NUM ','
- | ID ',' '''
- if len(p) == 5:
- p[0] = [p[1], p[3]]
+ | ID ','
+ | ID '[' field_options ']' ','
+ | ID '=' NUM '[' field_options ']' ',' '''
+ if len(p) == 3:
+ p[0] = {'id': p[1]}
+ elif len(p) == 5:
+ p[0] = {'id': p[1], 'value': p[3]}
+ elif len(p) == 6:
+ p[0] = {'id': p[1], 'option': p[3]}
+ elif len(p) == 8:
+ p[0] = {'id': p[1], 'value': p[3], 'option': p[5]}
else:
- p[0] = p[1]
+ self._parse_error('ERROR', self._token_coord(p, 1))
def p_field_options(self, p):
'''field_options : field_option
if len(p) == 2:
p[0] = p[1]
else:
- p[0] = { **p[1], **p[2] }
+ p[0] = {**p[1], **p[2]}
def p_field_option(self, p):
- '''field_option : ID '=' assignee ','
+ '''field_option : ID
+ | ID '=' assignee ','
| ID '=' assignee
+
+ '''
+ if len(p) == 2:
+ p[0] = {p[1]: None}
+ else:
+ p[0] = {p[1]: p[3]}
+
+ def p_variable_name(self, p):
+ '''variable_name : ID
+ | TYPE
+ | SEVERITY
+ | DESCRIPTION
+ | COUNTERS
+ | PATHS
'''
- p[0] = { p[1]: p[3] }
+ p[0] = p[1]
def p_declaration(self, p):
- '''declaration : type_specifier ID ';'
- | type_specifier ID '[' field_options ']' ';' '''
+ '''declaration : type_specifier variable_name ';'
+ | type_specifier variable_name '[' field_options ']' ';'
+ '''
if len(p) == 7:
p[0] = Field(p[1], p[2], p[4])
elif len(p) == 4:
p[0] = Field(p[1], p[2])
else:
- self._parse_error('ERROR')
+ self._parse_error('ERROR', self._token_coord(p, 1))
self.fields.append(p[2])
+ def p_declaration_array_vla(self, p):
+ '''declaration : type_specifier variable_name '[' ']' ';' '''
+ p[0] = Array(p[1], p[2], 0, modern_vla=True)
+
def p_declaration_array(self, p):
- '''declaration : type_specifier ID '[' NUM ']' ';'
- | type_specifier ID '[' ID ']' ';' '''
+ '''declaration : type_specifier variable_name '[' NUM ']' ';'
+ | type_specifier variable_name '[' ID ']' ';' '''
+
if len(p) != 7:
return self._parse_error(
'array: %s' % p.value,
p[0] = Array(p[1], p[2], p[4])
def p_option(self, p):
- '''option : OPTION ID '=' assignee ';' '''
- p[0] = Option([p[1], p[2], p[4]])
+ '''option : OPTION ID '=' assignee ';'
+ | OPTION ID ';' '''
+ if len(p) == 4:
+ p[0] = Option(p[2])
+ else:
+ p[0] = Option(p[2], p[4])
def p_assignee(self, p):
'''assignee : NUM
def p_union(self, p):
'''union : UNION ID '{' block_statements_opt '}' ';' '''
- p[0] = Union(p[2], p[4])
+ p[0] = Union(p[2], [], p[4])
+
+ def p_union_flist(self, p):
+ '''union : flist UNION ID '{' block_statements_opt '}' ';' '''
+ p[0] = Union(p[3], p[1], p[5])
# Error rule for syntax errors
def p_error(self, p):
self._parse_error('At end of input', self.filename)
-class VPPAPI(object):
+class VPPAPI():
- def __init__(self, debug=False, filename='', logger=None):
+ def __init__(self, debug=False, filename='', logger=None, revision=None):
self.lexer = lex.lex(module=VPPAPILexer(filename), debug=debug)
- self.parser = yacc.yacc(module=VPPAPIParser(filename, logger),
+ self.parser = yacc.yacc(module=VPPAPIParser(filename, logger,
+ revision=revision),
write_tables=False, debug=debug)
self.logger = logger
+ self.revision = revision
+ self.filename = filename
def parse_string(self, code, debug=0, lineno=1):
self.lexer.lineno = lineno
return self.parser.parse(code, lexer=self.lexer, debug=debug)
- def parse_file(self, fd, debug=0):
+ def parse_fd(self, fd, debug=0):
data = fd.read()
return self.parse_string(data, debug=debug)
- def autoreply_block(self, name):
- block = [Field('u32', 'context'),
- Field('i32', 'retval')]
- return Define(name + '_reply', [], block)
+ def parse_filename(self, filename, debug=0):
+ if self.revision:
+ git_show = 'git show {}:{}'.format(self.revision, filename)
+ proc = Popen(git_show.split(), stdout=PIPE, encoding='utf-8')
+ try:
+ data, errs = proc.communicate()
+ if proc.returncode != 0:
+ print('File not found: {}:{}'
+ .format(self.revision, filename), file=sys.stderr)
+ sys.exit(2)
+ return self.parse_string(data, debug=debug)
+ except Exception:
+ sys.exit(3)
+ else:
+ try:
+ with open(filename, encoding='utf-8') as fd:
+ return self.parse_fd(fd, None)
+ except FileNotFoundError:
+ print('File not found: {}'.format(filename), file=sys.stderr)
+ sys.exit(2)
def process(self, objs):
s = {}
s['Service'] = []
s['types'] = []
s['Import'] = []
- s['Alias'] = {}
+ s['Counters'] = []
+ s['Paths'] = []
crc = 0
for o in objs:
- tname = o.__class__.__name__
try:
- crc = binascii.crc32(o.crc, crc)
+ crc = binascii.crc32(o.crc, crc) & 0xffffffff
except AttributeError:
pass
- if isinstance(o, Define):
- s[tname].append(o)
- if o.autoreply:
- s[tname].append(self.autoreply_block(o.name))
- elif isinstance(o, Option):
- s[tname][o[1]] = o[2]
- elif type(o) is list:
+
+ if type(o) is list:
for o2 in o:
if isinstance(o2, Service):
- s['Service'].append(o2)
- elif (isinstance(o, Enum) or
- isinstance(o, Typedef) or
- isinstance(o, Union)):
- s['types'].append(o)
- elif isinstance(o, Using):
- s['Alias'][o.name] = o.alias
+ o2.process(s)
else:
- if tname not in s:
- raise ValueError('Unknown class type: {} {}'
- .format(tname, o))
- s[tname].append(o)
+ o.process(s)
msgs = {d.name: d for d in s['Define']}
svcs = {s.caller: s for s in s['Service']}
for d in msgs:
if d in seen_services:
continue
- if msgs[d].singular is True:
- continue
if d.endswith('_reply'):
if d[:-6] in svcs:
continue
continue
if d.endswith('_details'):
- if d[:-8]+'_dump' not in msgs:
- raise ValueError('{} missing dump message'
- .format(d))
- continue
+ if d[:-8]+'_get' in msgs:
+ if d[:-8]+'_get' in svcs:
+ continue
+ raise ValueError('{} should be in a stream service'
+ .format(d[:-8]+'_get'))
+ if d[:-8]+'_dump' in msgs:
+ continue
+ raise ValueError('{} missing dump or get message'
+ .format(d))
if d in svcs:
continue
return s
- def process_imports(self, objs, in_import, result):
- imported_objs = []
+ def process_imports(self, objs, in_import, result): # -> List
for o in objs:
# Only allow the following object types from imported file
- if in_import and not (isinstance(o, Enum) or
- isinstance(o, Union) or
- isinstance(o, Typedef) or
- isinstance(o, Import) or
- isinstance(o, Using)):
+ if in_import and not isinstance(o, (Enum, Import, Typedef,
+ Union, Using)):
continue
if isinstance(o, Import):
- self.process_imports(o.result, True, result)
+ result.append(o)
+ result = self.process_imports(o.result, True, result)
else:
result.append(o)
+ return result
# Add message ids to each message.
# Recursively
t = global_types[b.fieldtype]
try:
- crc = crc_block_combine(t.block, crc)
- return foldup_blocks(t.block, crc)
- except:
+ crc = binascii.crc32(t.crc, crc) & 0xffffffff
+ crc = foldup_blocks(t.block, crc)
+ except AttributeError:
pass
return crc
def foldup_crcs(s):
for f in s:
f.crc = foldup_blocks(f.block,
- binascii.crc32(f.crc))
+ binascii.crc32(f.crc) & 0xffffffff)
#
if sys.version_info < (3, 5,):
log.exception('vppapigen requires a supported version of python. '
'Please use version 3.5 or greater. '
- 'Using {}'.format(sys.version))
+ 'Using %s', sys.version)
return 1
cliparser = argparse.ArgumentParser(description='VPP API generator')
- cliparser.add_argument('--pluginpath', default=""),
- cliparser.add_argument('--includedir', action='append'),
- cliparser.add_argument('--input',
- type=argparse.FileType('r', encoding='UTF-8'),
- default=sys.stdin)
+ cliparser.add_argument('--pluginpath', default="")
+ cliparser.add_argument('--includedir', action='append')
+ cliparser.add_argument('--outputdir', action='store')
+ cliparser.add_argument('--input')
cliparser.add_argument('--output', nargs='?',
type=argparse.FileType('w', encoding='UTF-8'),
default=sys.stdout)
cliparser.add_argument('output_module', nargs='?', default='C')
cliparser.add_argument('--debug', action='store_true')
cliparser.add_argument('--show-name', nargs=1)
+ cliparser.add_argument('--git-revision',
+ help="Git revision to use for opening files")
args = cliparser.parse_args()
dirlist_add(args.includedir)
# Filename
if args.show_name:
filename = args.show_name[0]
- elif args.input != sys.stdin:
- filename = args.input.name
+ elif args.input:
+ filename = args.input
else:
filename = ''
else:
logging.basicConfig()
- parser = VPPAPI(debug=args.debug, filename=filename, logger=log)
- parsed_objects = parser.parse_file(args.input, log)
-
- # Build a list of objects. Hash of lists.
- result = []
- parser.process_imports(parsed_objects, False, result)
- s = parser.process(result)
-
- # Add msg_id field
- s['Define'] = add_msg_id(s['Define'])
-
- # Fold up CRCs
- foldup_crcs(s['Define'])
-
- #
- # Debug
- if args.debug:
- import pprint
- pp = pprint.PrettyPrinter(indent=4, stream=sys.stderr)
- for t in s['Define']:
- pp.pprint([t.name, t.flags, t.block])
- for t in s['types']:
- pp.pprint([t.name, t.block])
-
#
# Generate representation
#
plugin = SourceFileLoader(args.output_module,
module_path).load_module()
except Exception as err:
- log.exception('Error importing output plugin: {}, {}'
- .format(module_path, err))
+ log.exception('Error importing output plugin: %s, %s',
+ module_path, err)
return 1
- result = plugin.run(filename, s)
+ parser = VPPAPI(debug=args.debug, filename=filename, logger=log,
+ revision=args.git_revision)
+
+ try:
+ if not args.input:
+ parsed_objects = parser.parse_fd(sys.stdin, log)
+ else:
+ parsed_objects = parser.parse_filename(args.input, log)
+ except ParseError as e:
+ print('Parse error: ', e, file=sys.stderr)
+ sys.exit(1)
+
+ # Build a list of objects. Hash of lists.
+ result = []
+
+ # if the variable is not set in the plugin, assume it to be false.
+ try:
+ plugin.process_imports
+ except AttributeError:
+ plugin.process_imports = False
+
+ if plugin.process_imports:
+ result = parser.process_imports(parsed_objects, False, result)
+ s = parser.process(result)
+ else:
+ s = parser.process(parsed_objects)
+ imports = parser.process_imports(parsed_objects, False, result)
+ s['imported'] = parser.process(imports)
+
+ # Add msg_id field
+ s['Define'] = add_msg_id(s['Define'])
+
+ # Fold up CRCs
+ foldup_crcs(s['Define'])
+
+ #
+ # Debug
+ if args.debug:
+ import pprint
+ pp = pprint.PrettyPrinter(indent=4, stream=sys.stderr)
+ for t in s['Define']:
+ pp.pprint([t.name, t.flags, t.block])
+ for t in s['types']:
+ pp.pprint([t.name, t.block])
+
+ result = plugin.run(args, filename, s)
if result:
print(result, file=args.output)
else:
- log.exception('Running plugin failed: {} {}'
- .format(filename, result))
+ log.exception('Running plugin failed: %s %s', filename, result)
return 1
return 0