aboutsummaryrefslogtreecommitdiffstats
path: root/src/vpp-api/python
diff options
context:
space:
mode:
authorOle Troan <ot@cisco.com>2018-06-12 21:06:44 +0200
committerDamjan Marion <dmarion@me.com>2018-06-17 15:21:00 +0000
commita5ee900fb75201bbfceaf13c8bc57a13ed094988 (patch)
treeac866f01fca454d05ade6a6be200a429ea786f99 /src/vpp-api/python
parent76440d9033822da4123d1c5bca1d4df12aef6280 (diff)
Python API: Add enum and union support.
As well as a rewrite of the encoders/decoders to make it more readable and extensible. Change-Id: I253369ac76303922bf9c11377622c8974fa92f19 Signed-off-by: Ole Troan <ot@cisco.com>
Diffstat (limited to 'src/vpp-api/python')
-rw-r--r--src/vpp-api/python/setup.py11
-rwxr-xr-xsrc/vpp-api/python/tests/test_vpp_serializer.py114
-rw-r--r--src/vpp-api/python/vpp_papi/__init__.py1
-rw-r--r--src/vpp-api/python/vpp_papi/vpp_papi.py (renamed from src/vpp-api/python/vpp_papi.py)393
-rw-r--r--src/vpp-api/python/vpp_papi/vpp_serializer.py332
5 files changed, 542 insertions, 309 deletions
diff --git a/src/vpp-api/python/setup.py b/src/vpp-api/python/setup.py
index abda43de606..20b9901d521 100644
--- a/src/vpp-api/python/setup.py
+++ b/src/vpp-api/python/setup.py
@@ -13,21 +13,20 @@
# limitations under the License.
try:
- from setuptools import setup
+ from setuptools import setup, find_packages
except ImportError:
- from distutils.core import setup
+ from distutils.core import setup, find_packages
setup (name = 'vpp_papi',
- version = '1.4',
+ version = '1.5',
description = 'VPP Python binding',
author = 'Ole Troan',
author_email = 'ot@cisco.com',
url = 'https://wiki.fd.io/view/VPP/Python_API',
- python_requires='>=2.7, >=3.3',
license = 'Apache-2.0',
test_suite = 'tests',
- install_requires=['cffi >= 1.6'],
- py_modules=['vpp_papi'],
+ install_requires=['cffi >= 1.6', 'enum34'],
+ packages=find_packages(),
long_description = '''VPP Python language binding.''',
zip_safe = True,
)
diff --git a/src/vpp-api/python/tests/test_vpp_serializer.py b/src/vpp-api/python/tests/test_vpp_serializer.py
new file mode 100755
index 00000000000..6b867f9e6fe
--- /dev/null
+++ b/src/vpp-api/python/tests/test_vpp_serializer.py
@@ -0,0 +1,114 @@
+#!/usr/bin/env python3
+
+import unittest
+from vpp_serializer import VPPType, VPPEnumType, VPPUnionType
+from socket import inet_pton, AF_INET, AF_INET6
+import logging
+
+logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.DEBUG)
+es_logger = logging.getLogger('vpp_serializer')
+es_logger.setLevel(logging.DEBUG)
+
+class VPPMessage(VPPType):
+ pass
+
+
+class TestAddType(unittest.TestCase):
+
+ def test_union(self):
+ un = VPPUnionType('test_union',
+ [['u8', 'is_bool'],
+ ['u32', 'is_int']])
+
+ b = un.pack({'is_int': 0x1234})
+ self.assertEqual(len(b), 4)
+ nt = un.unpack(b)
+ self.assertEqual(nt.is_bool, 52)
+ self.assertEqual(nt.is_int, 0x1234)
+
+ def test_address(self):
+ af = VPPEnumType('vl_api_address_family_t', [["ADDRESS_IP4", 0],
+ ["ADDRESS_IP6", 1],
+ {"enumtype": "u32"}])
+ ip4 = VPPType('vl_api_ip4_address_t', [['u8', 'address', 4]])
+ ip6 = VPPType('vl_api_ip6_address_t', [['u8', 'address', 16]])
+ VPPUnionType('vl_api_address_union_t',
+ [["vl_api_ip4_address_t", "ip4"],
+ ["vl_api_ip6_address_t", "ip6"]])
+
+ address = VPPType('address', [['vl_api_address_family_t', 'af'],
+ ['vl_api_address_union_t', 'un']])
+
+ b = ip4.pack({'address': inet_pton(AF_INET, '1.1.1.1')})
+ self.assertEqual(len(b), 4)
+ nt = ip4.unpack(b)
+ self.assertEqual(nt.address, inet_pton(AF_INET, '1.1.1.1'))
+
+ b = ip6.pack({'address': inet_pton(AF_INET6, '1::1')})
+ self.assertEqual(len(b), 16)
+
+ b = address.pack({'af': af.ADDRESS_IP4,
+ 'un':
+ {'ip4':
+ {'address': inet_pton(AF_INET, '2.2.2.2')}}})
+ self.assertEqual(len(b), 20)
+
+ nt = address.unpack(b)
+ print('NT union', nt)
+ self.assertEqual(nt.af, af.ADDRESS_IP4)
+ self.assertEqual(nt.un.ip4.address,
+ inet_pton(AF_INET, '2.2.2.2'))
+ self.assertEqual(nt.un.ip6.address,
+ inet_pton(AF_INET6, '::0202:0202'))
+
+ def test_arrays(self):
+ # Test cases
+ # 1. Fixed list
+ # 2. Fixed list of variable length sub type
+ # 3. Variable length type
+ #
+ ip4 = VPPType('ip4_address', [['u8', 'address', 4]])
+ listip4 = VPPType('list_ip4_t', [['ip4_address', 'addresses', 4]])
+ valistip4 = VPPType('list_ip4_t',
+ [['u8', 'count'],
+ ['ip4_address', 'addresses', 0, 'count']])
+
+ valistip4_legacy = VPPType('list_ip4_t',
+ [['u8', 'foo'],
+ ['ip4_address', 'addresses', 0]])
+
+ addresses = []
+ for i in range(4):
+ addresses.append({'address': inet_pton(AF_INET, '2.2.2.2')})
+ b = listip4.pack({'addresses': addresses})
+ self.assertEqual(len(b), 16)
+ nt = listip4.unpack(b)
+
+ self.assertEqual(nt.addresses[0].address,
+ inet_pton(AF_INET, '2.2.2.2'))
+
+ b = valistip4.pack({'count': len(addresses), 'addresses': addresses})
+ self.assertEqual(len(b), 17)
+
+ nt = valistip4.unpack(b)
+ print('NT', nt)
+
+ b = valistip4_legacy.pack({'foo': 1, 'addresses': addresses})
+ self.assertEqual(len(b), 17)
+ nt = valistip4_legacy.unpack(b)
+ print('NT', nt)
+
+
+ def test_message(self):
+ foo = VPPMessage('foo', [['u16', '_vl_msg_id'],
+ ['u8', 'client_index'],
+ ['u8', 'something'],
+ {"crc": "0x559b9f3c"}])
+ b = foo.pack({'_vl_msg_id': 1, 'client_index': 5,
+ 'something': 200})
+ self.assertEqual(len(b), 4)
+ nt = foo.unpack(b)
+ print('NT', nt)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/src/vpp-api/python/vpp_papi/__init__.py b/src/vpp-api/python/vpp_papi/__init__.py
new file mode 100644
index 00000000000..f9afcf17f29
--- /dev/null
+++ b/src/vpp-api/python/vpp_papi/__init__.py
@@ -0,0 +1 @@
+from .vpp_papi import *
diff --git a/src/vpp-api/python/vpp_papi.py b/src/vpp-api/python/vpp_papi/vpp_papi.py
index ece0e4ee52d..5ff8064d425 100644
--- a/src/vpp-api/python/vpp_papi.py
+++ b/src/vpp-api/python/vpp_papi/vpp_papi.py
@@ -27,6 +27,7 @@ import weakref
import atexit
from cffi import FFI
import cffi
+from vpp_serializer import VPPType, VPPEnumType, VPPUnionType, BaseTypes
if sys.version[0] == '2':
import Queue as queue
@@ -103,6 +104,9 @@ class FuncWrapper(object):
return self._func(**kwargs)
+class VPPMessage(VPPType):
+ pass
+
class VPP():
"""VPP interface.
@@ -115,8 +119,56 @@ class VPP():
provides a means to register a callback function to receive
these messages in a background thread.
"""
+
+ def process_json_file(self, apidef_file):
+ api = json.load(apidef_file)
+ types = {}
+ for t in api['enums']:
+ t[0] = 'vl_api_' + t[0] + '_t'
+ types[t[0]] = {'type': 'enum', 'data': t}
+ for t in api['unions']:
+ t[0] = 'vl_api_' + t[0] + '_t'
+ types[t[0]] = {'type': 'union', 'data': t}
+ for t in api['types']:
+ t[0] = 'vl_api_' + t[0] + '_t'
+ types[t[0]] = {'type': 'type', 'data': t}
+
+ i = 0
+ while True:
+ unresolved = {}
+ for k, v in types.items():
+ t = v['data']
+ if v['type'] == 'enum':
+ try:
+ VPPEnumType(t[0], t[1:])
+ except ValueError:
+ unresolved[k] = v
+ elif v['type'] == 'union':
+ try:
+ VPPUnionType(t[0], t[1:])
+ except ValueError:
+ unresolved[k] = v
+ elif v['type'] == 'type':
+ try:
+ VPPType(t[0], t[1:])
+ except ValueError:
+ unresolved[k] = v
+ if len(unresolved) == 0:
+ break
+ if i > 3:
+ raise ValueError('Unresolved type definitions {}'
+ .format(unresolved))
+ types = unresolved
+ i += 1
+
+ for m in api['messages']:
+ try:
+ self.messages[m[0]] = VPPMessage(m[0], m[1:])
+ except NotImplementedError:
+ self.logger.error('Not implemented error for {}'.format(m[0]))
+
def __init__(self, apifiles=None, testmode=False, async_thread=True,
- logger=None, loglevel=None,
+ logger=logging.getLogger('vpp_papi'), loglevel='debug',
read_timeout=0):
"""Create a VPP API object.
@@ -137,14 +189,14 @@ class VPP():
logger = logging.getLogger(__name__)
if loglevel is not None:
logger.setLevel(loglevel)
-
self.logger = logger
self.messages = {}
self.id_names = []
self.id_msgdef = []
self.connected = False
- self.header = struct.Struct('>HI')
+ self.header = VPPType('header', [['u16', 'msgid'],
+ ['u32', 'client_index']])
self.apifiles = []
self.event_callback = None
self.message_queue = queue.Queue()
@@ -165,12 +217,8 @@ class VPP():
for file in apifiles:
with open(file) as apidef_file:
- api = json.load(apidef_file)
- for t in api['types']:
- self.add_type(t[0], t[1:])
+ self.process_json_file(apidef_file)
- for m in api['messages']:
- self.add_message(m[0], m[1:])
self.apifiles = apifiles
# Basic sanity check
@@ -181,7 +229,8 @@ class VPP():
atexit.register(vpp_atexit, weakref.ref(self))
# Register error handler
- vpp_api.vac_set_error_handler(vac_error_handler)
+ if not testmode:
+ vpp_api.vac_set_error_handler(vac_error_handler)
# Support legacy CFFI
# from_buffer supported from 1.8.0
@@ -334,305 +383,38 @@ class VPP():
print('Connected') if self.connected else print('Not Connected')
print('Read API definitions from', ', '.join(self.apifiles))
- def __struct(self, t, n=None, e=-1, vl=None):
- """Create a packing structure for a message."""
- base_types = {'u8': 'B',
- 'u16': 'H',
- 'u32': 'I',
- 'i32': 'i',
- 'u64': 'Q',
- 'f64': 'd', }
-
- if t in base_types:
- if not vl:
- if e > 0 and t == 'u8':
- # Fixed byte array
- s = struct.Struct('>' + str(e) + 's')
- return s.size, s
- if e > 0:
- # Fixed array of base type
- s = struct.Struct('>' + base_types[t])
- return s.size, [e, s]
- elif e == 0:
- # Old style variable array
- s = struct.Struct('>' + base_types[t])
- return s.size, [-1, s]
- else:
- # Variable length array
- if t == 'u8':
- s = struct.Struct('>s')
- return s.size, [vl, s]
- else:
- s = struct.Struct('>' + base_types[t])
- return s.size, [vl, s]
-
- s = struct.Struct('>' + base_types[t])
- return s.size, s
-
- if t in self.messages:
- size = self.messages[t]['sizes'][0]
-
- # Return a list in case of array
- if e > 0 and not vl:
- return size, [e, lambda self, encode, buf, offset, args: (
- self.__struct_type(encode, self.messages[t], buf, offset,
- args))]
- if vl:
- return size, [vl, lambda self, encode, buf, offset, args: (
- self.__struct_type(encode, self.messages[t], buf, offset,
- args))]
- elif e == 0:
- # Old style VLA
- raise NotImplementedError(1,
- 'No support for compound types ' + t)
- return size, lambda self, encode, buf, offset, args: (
- self.__struct_type(encode, self.messages[t], buf, offset, args)
- )
-
- raise ValueError(1, 'Invalid message type: ' + t)
-
- def __struct_type(self, encode, msgdef, buf, offset, kwargs):
- """Get a message packer or unpacker."""
- if encode:
- return self.__struct_type_encode(msgdef, buf, offset, kwargs)
- else:
- return self.__struct_type_decode(msgdef, buf, offset)
-
- def __struct_type_encode(self, msgdef, buf, offset, kwargs):
- off = offset
- size = 0
-
- for k in kwargs:
- if k not in msgdef['args']:
- raise ValueError(1, 'Non existing argument [' + k + ']' +
- ' used in call to: ' +
- self.id_names[kwargs['_vl_msg_id']] + '()')
-
- for k, v in vpp_iterator(msgdef['args']):
- off += size
- if k in kwargs:
- if type(v) is list:
- if callable(v[1]):
- e = kwargs[v[0]] if v[0] in kwargs else v[0]
- if e != len(kwargs[k]):
- raise (ValueError(1,
- 'Input list length mismatch: '
- '%s (%s != %s)' %
- (k, e, len(kwargs[k]))))
- size = 0
- for i in range(e):
- size += v[1](self, True, buf, off + size,
- kwargs[k][i])
- else:
- if v[0] in kwargs:
- kwargslen = kwargs[v[0]]
- if kwargslen != len(kwargs[k]):
- raise ValueError(1,
- 'Input list length mismatch:'
- ' %s (%s != %s)' %
- (k, kwargslen,
- len(kwargs[k])))
- else:
- kwargslen = len(kwargs[k])
- if v[1].size == 1:
- buf[off:off + kwargslen] = bytearray(kwargs[k])
- size = kwargslen
- else:
- size = 0
- for i in kwargs[k]:
- v[1].pack_into(buf, off + size, i)
- size += v[1].size
- else:
- if callable(v):
- size = v(self, True, buf, off, kwargs[k])
- else:
- if type(kwargs[k]) is str and v.size < len(kwargs[k]):
- raise ValueError(1,
- 'Input list length mismatch: '
- '%s (%s < %s)' %
- (k, v.size, len(kwargs[k])))
- v.pack_into(buf, off, kwargs[k])
- size = v.size
- else:
- size = v.size if not type(v) is list else 0
-
- return off + size - offset
-
- def __getitem__(self, name):
- if name in self.messages:
- return self.messages[name]
- return None
-
- def get_size(self, sizes, kwargs):
- total_size = sizes[0]
- for e in sizes[1]:
- if e in kwargs and type(kwargs[e]) is list:
- total_size += len(kwargs[e]) * sizes[1][e]
- return total_size
-
- def encode(self, msgdef, kwargs):
- # Make suitably large buffer
- size = self.get_size(msgdef['sizes'], kwargs)
- buf = bytearray(size)
- offset = 0
- size = self.__struct_type(True, msgdef, buf, offset, kwargs)
- return buf[:offset + size]
-
- def decode(self, msgdef, buf):
- return self.__struct_type(False, msgdef, buf, 0, None)[1]
-
- def __struct_type_decode(self, msgdef, buf, offset):
- res = []
- off = offset
- size = 0
- for k, v in vpp_iterator(msgdef['args']):
- off += size
- if type(v) is list:
- lst = []
- if callable(v[1]): # compound type
- size = 0
- if v[0] in msgdef['args']: # vla
- e = res[v[2]]
- else: # fixed array
- e = v[0]
- res.append(lst)
- for i in range(e):
- (s, l) = v[1](self, False, buf, off + size, None)
- lst.append(l)
- size += s
- continue
- if v[1].size == 1:
- if type(v[0]) is int:
- size = len(buf) - off
- else:
- size = res[v[2]]
- res.append(buf[off:off + size])
- else:
- e = v[0] if type(v[0]) is int else res[v[2]]
- if e == -1:
- e = (len(buf) - off) / v[1].size
- lst = []
- res.append(lst)
- size = 0
- for i in range(e):
- lst.append(v[1].unpack_from(buf, off + size)[0])
- size += v[1].size
- else:
- if callable(v):
- size = 0
- (s, l) = v(self, False, buf, off, None)
- res.append(l)
- size += s
- else:
- res.append(v.unpack_from(buf, off)[0])
- size = v.size
-
- return off + size - offset, msgdef['return_tuple']._make(res)
-
- def ret_tup(self, name):
- if name in self.messages and 'return_tuple' in self.messages[name]:
- return self.messages[name]['return_tuple']
- return None
+ @property
+ def api(self):
+ if not hasattr(self, "_api"):
+ raise Exception("Not connected, api definitions not available")
+ return self._api
- def duplicate_check_ok(self, name, msgdef):
- crc = None
- for c in msgdef:
- if type(c) is dict and 'crc' in c:
- crc = c['crc']
- break
- if crc:
- # We can get duplicates because of imports
- if crc == self.messages[name]['crc']:
- return True
- return False
-
- def add_message(self, name, msgdef, typeonly=False):
- if name in self.messages:
- if typeonly:
- if self.duplicate_check_ok(name, msgdef):
- return
- raise ValueError('Duplicate message name: ' + name)
-
- args = collections.OrderedDict()
- argtypes = collections.OrderedDict()
- fields = []
- msg = {}
- total_size = 0
- sizes = {}
- for i, f in enumerate(msgdef):
- if type(f) is dict and 'crc' in f:
- msg['crc'] = f['crc']
- continue
- field_type = f[0]
- field_name = f[1]
- if len(f) == 3 and f[2] == 0 and i != len(msgdef) - 2:
- raise ValueError('Variable Length Array must be last: ' + name)
- size, s = self.__struct(*f)
- args[field_name] = s
- if type(s) == list and type(s[0]) == int and \
- type(s[1]) == struct.Struct:
- if s[0] < 0:
- sizes[field_name] = size
- else:
- sizes[field_name] = size
- total_size += s[0] * size
- else:
- sizes[field_name] = size
- total_size += size
-
- argtypes[field_name] = field_type
- if len(f) == 4: # Find offset to # elements field
- idx = list(args.keys()).index(f[3]) - i
- args[field_name].append(idx)
- fields.append(field_name)
- msg['return_tuple'] = collections.namedtuple(name, fields,
- rename=True)
- self.messages[name] = msg
- self.messages[name]['args'] = args
- self.messages[name]['argtypes'] = argtypes
- self.messages[name]['typeonly'] = typeonly
- self.messages[name]['sizes'] = [total_size, sizes]
- return self.messages[name]
-
- def add_type(self, name, typedef):
- return self.add_message('vl_api_' + name + '_t', typedef,
- typeonly=True)
-
- def make_function(self, name, i, msgdef, multipart, async):
+ def make_function(self, msg, i, multipart, async):
if (async):
def f(**kwargs):
- return self._call_vpp_async(i, msgdef, **kwargs)
+ return self._call_vpp_async(i, msg, **kwargs)
else:
def f(**kwargs):
- return self._call_vpp(i, msgdef, multipart, **kwargs)
- args = self.messages[name]['args']
- argtypes = self.messages[name]['argtypes']
- f.__name__ = str(name)
+ return self._call_vpp(i, msg, multipart, **kwargs)
+
+ f.__name__ = str(msg.name)
f.__doc__ = ", ".join(["%s %s" %
- (argtypes[k], k) for k in args.keys()])
+ (msg.fieldtypes[j], k) for j, k in enumerate(msg.fields)])
return f
- @property
- def api(self):
- if not hasattr(self, "_api"):
- raise Exception("Not connected, api definitions not available")
- return self._api
-
def _register_functions(self, async=False):
self.id_names = [None] * (self.vpp_dictionary_maxid + 1)
self.id_msgdef = [None] * (self.vpp_dictionary_maxid + 1)
self._api = Empty()
- for name, msgdef in vpp_iterator(self.messages):
- if self.messages[name]['typeonly']:
- continue
- crc = self.messages[name]['crc']
- n = name + '_' + crc[2:]
+ for name, msg in vpp_iterator(self.messages):
+ n = name + '_' + msg.crc[2:]
i = vpp_api.vac_get_msg_index(n.encode())
if i > 0:
- self.id_msgdef[i] = msgdef
+ self.id_msgdef[i] = msg
self.id_names[i] = name
+ # TODO: Fix multipart (use services)
multipart = True if name.find('_dump') > 0 else False
- f = self.make_function(name, i, msgdef, multipart, async)
+ f = self.make_function(msg, i, multipart, async)
setattr(self._api, name, FuncWrapper(f))
else:
self.logger.debug(
@@ -669,12 +451,11 @@ class VPP():
if rv != 0:
raise IOError(2, 'Connect failed')
self.connected = True
-
self.vpp_dictionary_maxid = vpp_api.vac_msg_table_max_index()
self._register_functions(async=async)
# Initialise control ping
- crc = self.messages['control_ping']['crc']
+ crc = self.messages['control_ping'].crc
self.control_ping_index = vpp_api.vac_get_msg_index(
('control_ping' + '_' + crc[2:]).encode())
self.control_ping_msgdef = self.messages['control_ping']
@@ -743,18 +524,18 @@ class VPP():
self.logger.warning('vpp_api.read failed')
return
- i, ci = self.header.unpack_from(msg, 0)
+ i, ci = self.header.unpack(msg, 0)
if self.id_names[i] == 'rx_thread_exit':
return
#
# Decode message and returns a tuple.
#
- msgdef = self.id_msgdef[i]
- if not msgdef:
+ msgobj = self.id_msgdef[i]
+ if not msgobj:
raise IOError(2, 'Reply message undefined')
- r = self.decode(msgdef, msg)
+ r = msgobj.unpack(msg)
return r
@@ -778,7 +559,12 @@ class VPP():
self.control_ping_msgdef,
context=context)
- def _call_vpp(self, i, msgdef, multipart, **kwargs):
+ def validate_args(self, msg, kwargs):
+ d = set(kwargs.keys()) - set(msg.field_by_name.keys())
+ if d:
+ raise ValueError('Invalid argument {} to {}'.format(list(d), msg.name))
+
+ def _call_vpp(self, i, msg, multipart, **kwargs):
"""Given a message, send the message and await a reply.
msgdef - the message packing definition
@@ -800,8 +586,9 @@ class VPP():
else:
context = kwargs['context']
kwargs['_vl_msg_id'] = i
- b = self.encode(msgdef, kwargs)
+ self.validate_args(msg, kwargs)
+ b = msg.pack(kwargs)
vpp_api.vac_rx_suspend()
self._write(b)
@@ -816,7 +603,6 @@ class VPP():
msg = self._read()
if not msg:
raise IOError(2, 'VPP API client: read failed')
-
r = self.decode_incoming_msg(msg)
msgname = type(r).__name__
if context not in r or r.context == 0 or context != r.context:
@@ -835,7 +621,7 @@ class VPP():
return rl
- def _call_vpp_async(self, i, msgdef, **kwargs):
+ def _call_vpp_async(self, i, msg, **kwargs):
"""Given a message, send the message and await a reply.
msgdef - the message packing definition
@@ -849,8 +635,9 @@ class VPP():
kwargs['context'] = context
else:
context = kwargs['context']
+ kwargs['client_index'] = 0
kwargs['_vl_msg_id'] = i
- b = self.encode(msgdef, kwargs)
+ b = msg.pack(kwargs)
self._write(b)
diff --git a/src/vpp-api/python/vpp_papi/vpp_serializer.py b/src/vpp-api/python/vpp_papi/vpp_serializer.py
new file mode 100644
index 00000000000..146a8f6919a
--- /dev/null
+++ b/src/vpp-api/python/vpp_papi/vpp_serializer.py
@@ -0,0 +1,332 @@
+#
+# Copyright (c) 2018 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import struct
+import collections
+from enum import IntEnum
+import logging
+
+#
+# Set log-level in application by doing e.g.:
+# logger = logging.getLogger('vpp_serializer')
+# logger.setLevel(logging.DEBUG)
+#
+logger = logging.getLogger(__name__)
+FORMAT = "[%(filename)s:%(lineno)s - %(funcName)s() ] %(message)s"
+logging.basicConfig(format=FORMAT)
+
+
+class BaseTypes():
+ def __init__(self, type, elements=0):
+ base_types = {'u8': '>B',
+ 'u16': '>H',
+ 'u32': '>I',
+ 'i32': '>i',
+ 'u64': '>Q',
+ 'f64': '>d',
+ 'header': '>HI'}
+
+ if elements > 0 and type == 'u8':
+ self.packer = struct.Struct('>%ss' % elements)
+ else:
+ self.packer = struct.Struct(base_types[type])
+ self.size = self.packer.size
+ logger.debug('Adding {} with format: {}'
+ .format(type, base_types[type]))
+
+ def pack(self, data, kwargs=None):
+ logger.debug("Data: {} Format: {}".format(data, self.packer.format))
+ return self.packer.pack(data)
+
+ def unpack(self, data, offset, result=None):
+ logger.debug("@ {} Format: {}".format(offset, self.packer.format))
+ return self.packer.unpack_from(data, offset)[0]
+
+
+types = {}
+types['u8'] = BaseTypes('u8')
+types['u16'] = BaseTypes('u16')
+types['u32'] = BaseTypes('u32')
+types['i32'] = BaseTypes('i32')
+types['u64'] = BaseTypes('u64')
+types['f64'] = BaseTypes('f64')
+
+
+class FixedList_u8():
+ def __init__(self, name, field_type, num):
+ self.name = name
+ self.num = num
+ self.packer = BaseTypes(field_type, num)
+ self.size = self.packer.size
+
+ def pack(self, list, kwargs):
+ logger.debug("Data: {}".format(list))
+
+ if len(list) > self.num:
+ raise ValueError('Fixed list length error for "{}", got: {}'
+ ' expected: {}'
+ .format(self.name, len(list), self.num))
+ return self.packer.pack(list)
+
+ def unpack(self, data, offset=0, result=None):
+ if len(data[offset:]) < self.num:
+ raise ValueError('Invalid array length for "{}" got {}'
+ ' expected {}'
+ .format(self.name, len(data), self.num))
+ return self.packer.unpack(data, offset)
+
+
+class FixedList():
+ def __init__(self, name, field_type, num):
+ self.num = num
+ self.packer = types[field_type]
+ self.size = self.packer.size * num
+
+ def pack(self, list, kwargs):
+ logger.debug("Data: {}".format(list))
+
+ if len(list) != self.num:
+ raise ValueError('Fixed list length error, got: {} expected: {}'
+ .format(len(list), self.num))
+ b = bytes()
+ for e in list:
+ b += self.packer.pack(e)
+ return b
+
+ def unpack(self, data, offset=0, result=None):
+ # Return a list of arguments
+ result = []
+ for e in range(self.num):
+ x = self.packer.unpack(data, offset)
+ result.append(x)
+ offset += self.packer.size
+ return result
+
+
+class VLAList():
+ def __init__(self, name, field_type, len_field_name, index):
+ self.index = index
+ self.packer = types[field_type]
+ self.size = self.packer.size
+ self.length_field = len_field_name
+
+ def pack(self, list, kwargs=None):
+ logger.debug("Data: {}".format(list))
+ if len(list) != kwargs[self.length_field]:
+ raise ValueError('Variable length error, got: {} expected: {}'
+ .format(len(list), kwargs[self.length_field]))
+ b = bytes()
+
+ # u8 array
+ if self.packer.size == 1:
+ p = BaseTypes('u8', len(list))
+ return p.pack(list)
+
+ for e in list:
+ b += self.packer.pack(e)
+ return b
+
+ def unpack(self, data, offset=0, result=None):
+ logger.debug("Data: {} @ {} Result: {}"
+ .format(list, offset, result[self.index]))
+ # Return a list of arguments
+
+ # u8 array
+ if self.packer.size == 1:
+ if result[self.index] == 0:
+ return b''
+ p = BaseTypes('u8', result[self.index])
+ r = p.unpack(data, offset)
+ return r
+
+ r = []
+ for e in range(result[self.index]):
+ x = self.packer.unpack(data, offset)
+ r.append(x)
+ offset += self.packer.size
+ return r
+
+
+class VLAList_legacy():
+ def __init__(self, name, field_type):
+ self.packer = types[field_type]
+ self.size = self.packer.size
+
+ def pack(self, list, kwargs=None):
+ logger.debug("Data: {}".format(list))
+ b = bytes()
+ for e in list:
+ b += self.packer.pack(e)
+ return b
+
+ def unpack(self, data, offset=0, result=None):
+ # Return a list of arguments
+ if (len(data) - offset) % self.packer.size:
+ raise ValueError('Legacy Variable Length Array length mismatch.')
+ elements = int((len(data) - offset) / self.packer.size)
+ r = []
+ logger.debug("Legacy VLA: {} elements of size {}"
+ .format(elements, self.packer.size))
+ for e in range(elements):
+ x = self.packer.unpack(data, offset)
+ r.append(x)
+ offset += self.packer.size
+ return r
+
+
+class VPPEnumType():
+ def __init__(self, name, msgdef):
+ self.size = types['u32'].size
+ e_hash = {}
+ for f in msgdef:
+ if type(f) is dict and 'enumtype' in f:
+ if f['enumtype'] != 'u32':
+ raise NotImplementedError
+ continue
+ ename, evalue = f
+ e_hash[ename] = evalue
+ self.enum = IntEnum(name, e_hash)
+ types[name] = self
+ logger.debug('Adding enum {}'.format(name))
+
+ def __getattr__(self, name):
+ return self.enum[name]
+
+ def pack(self, data, kwargs=None):
+ logger.debug("Data: {}".format(data))
+ return types['u32'].pack(data, kwargs)
+
+ def unpack(self, data, offset=0, result=None):
+ x = types['u32'].unpack(data, offset)
+ return self.enum(x)
+
+
+class VPPUnionType():
+ def __init__(self, name, msgdef):
+ self.name = name
+ self.size = 0
+ self.maxindex = 0
+ fields = []
+ self.packers = collections.OrderedDict()
+ for i, f in enumerate(msgdef):
+ if type(f) is dict and 'crc' in f:
+ self.crc = f['crc']
+ continue
+ f_type, f_name = f
+ if f_type not in types:
+ logger.debug('Unknown union type {}'.format(f_type))
+ raise ValueError('Unknown message type {}'.format(f_type))
+ fields.append(f_name)
+ size = types[f_type].size
+ self.packers[f_name] = types[f_type]
+ if size > self.size:
+ self.size = size
+ self.maxindex = i
+
+ types[name] = self
+ self.tuple = collections.namedtuple(name, fields, rename=True)
+ logger.debug('Adding union {}'.format(name))
+
+ def pack(self, data, kwargs=None):
+ logger.debug("Data: {}".format(data))
+ for k, v in data.items():
+ logger.debug("Key: {} Value: {}".format(k, v))
+ b = self.packers[k].pack(v, kwargs)
+ offset = self.size - self.packers[k].size
+ break
+ r = bytearray(self.size)
+ r[offset:] = b
+ return r
+
+ def unpack(self, data, offset=0, result=None):
+ r = []
+ for k, p in self.packers.items():
+ union_offset = self.size - p.size
+ r.append(p.unpack(data, offset + union_offset))
+ return self.tuple._make(r)
+
+
+class VPPType():
+ # Set everything up to be able to pack / unpack
+ def __init__(self, name, msgdef):
+ self.name = name
+ self.msgdef = msgdef
+ self.packers = []
+ self.fields = []
+ self.fieldtypes = []
+ self.field_by_name = {}
+ size = 0
+ for i, f in enumerate(msgdef):
+ if type(f) is dict and 'crc' in f:
+ self.crc = f['crc']
+ continue
+ f_type, f_name = f[:2]
+ self.fields.append(f_name)
+ self.field_by_name[f_name] = None
+ self.fieldtypes.append(f_type)
+ if f_type not in types:
+ logger.debug('Unknown type {}'.format(f_type))
+ raise ValueError('Unknown message type {}'.format(f_type))
+ if len(f) == 3: # list
+ list_elements = f[2]
+ if list_elements == 0:
+ p = VLAList_legacy(f_name, f_type)
+ self.packers.append(p)
+ elif f_type == 'u8':
+ p = FixedList_u8(f_name, f_type, list_elements)
+ self.packers.append(p)
+ size += p.size
+ else:
+ p = FixedList(f_name, f_type, list_elements)
+ self.packers.append(p)
+ size += p.size
+ elif len(f) == 4: # Variable length list
+ # Find index of length field
+ length_index = self.fields.index(f[3])
+ p = VLAList(f_name, f_type, f[3], length_index)
+ self.packers.append(p)
+ else:
+ self.packers.append(types[f_type])
+ size += types[f_type].size
+
+ self.size = size
+ self.tuple = collections.namedtuple(name, self.fields, rename=True)
+ types[name] = self
+ logger.debug('Adding type {}'.format(name))
+
+ def pack(self, data, kwargs=None):
+ if not kwargs:
+ kwargs = data
+ logger.debug("Data: {}".format(data))
+ b = bytes()
+ for i, a in enumerate(self.fields):
+ if a not in data:
+ logger.debug("Argument {} not given, defaulting to 0"
+ .format(a))
+ b += b'\x00' * self.packers[i].size
+ continue
+ b += self.packers[i].pack(data[a], kwargs)
+ return b
+
+ def unpack(self, data, offset=0, result=None):
+ # Return a list of arguments
+ result = []
+ for p in self.packers:
+ x = p.unpack(data, offset, result)
+ if type(x) is tuple and len(x) == 1:
+ x = x[0]
+ result.append(x)
+ offset += p.size
+ return self.tuple._make(result)