summaryrefslogtreecommitdiffstats
path: root/src/tools
diff options
context:
space:
mode:
authorKlement Sekera <klement.sekera@gmail.com>2022-04-26 19:02:15 +0200
committerOle Tr�an <otroan@employees.org>2022-05-10 18:52:08 +0000
commitd9b0c6fbf7aa5bd9af84264105b39c82028a4a29 (patch)
tree4f786cfd8ebc2443cb11e11b74c8657204068898 /src/tools
parentf90348bcb4afd0af2611cefc43b17ef3042b511c (diff)
tests: replace pycodestyle with black
Drop pycodestyle for code style checking in favor of black. Black is much faster, stable PEP8 compliant code style checker offering also automatic formatting. It aims to be very stable and produce smallest diffs. It's used by many small and big projects. Running checkstyle with black takes a few seconds with a terse output. Thus, test-checkstyle-diff is no longer necessary. Expand scope of checkstyle to all python files in the repo, replacing test-checkstyle with checkstyle-python. Also, fixstyle-python is now available for automatic style formatting. Note: python virtualenv has been consolidated in test/Makefile, test/requirements*.txt which will eventually be moved to a central location. This is required to simply the automated generation of docker executor images in the CI. Type: improvement Change-Id: I022a326603485f58585e879ac0f697fceefbc9c8 Signed-off-by: Klement Sekera <klement.sekera@gmail.com> Signed-off-by: Dave Wallace <dwallacelf@gmail.com>
Diffstat (limited to 'src/tools')
-rwxr-xr-xsrc/tools/vppapigen/generate_go.py154
-rwxr-xr-xsrc/tools/vppapigen/generate_json.py82
-rwxr-xr-xsrc/tools/vppapigen/test_vppapigen.py120
-rwxr-xr-xsrc/tools/vppapigen/vppapigen.py813
-rw-r--r--src/tools/vppapigen/vppapigen_c.py1850
-rw-r--r--src/tools/vppapigen/vppapigen_crc.py15
-rw-r--r--src/tools/vppapigen/vppapigen_json.py59
7 files changed, 1649 insertions, 1444 deletions
diff --git a/src/tools/vppapigen/generate_go.py b/src/tools/vppapigen/generate_go.py
index 1fb53c715e2..fa53bc3dca3 100755
--- a/src/tools/vppapigen/generate_go.py
+++ b/src/tools/vppapigen/generate_go.py
@@ -17,10 +17,12 @@ import sys
def get_go_version(go_root):
# Returns version of the installed Go
- p = subprocess.Popen(["./go", "version"],
- cwd=go_root + "/bin",
- stdout=subprocess.PIPE,
- universal_newlines=True, )
+ p = subprocess.Popen(
+ ["./go", "version"],
+ cwd=go_root + "/bin",
+ stdout=subprocess.PIPE,
+ universal_newlines=True,
+ )
output, _ = p.communicate()
output_fmt = output.replace("go version go", "", 1)
@@ -29,10 +31,12 @@ def get_go_version(go_root):
# Returns version of the installed binary API generator
def get_binapi_gen_version(go_path):
- p = subprocess.Popen(["./binapi-generator", "-version"],
- cwd=go_path + "/bin",
- stdout=subprocess.PIPE,
- universal_newlines=True, )
+ p = subprocess.Popen(
+ ["./binapi-generator", "-version"],
+ cwd=go_path + "/bin",
+ stdout=subprocess.PIPE,
+ universal_newlines=True,
+ )
output, _ = p.communicate()
output_fmt = output.replace("govpp", "", 1)
@@ -45,11 +49,12 @@ def install_golang(go_root):
go_bin = go_root + "/bin/go"
if os.path.exists(go_bin) and os.path.isfile(go_bin):
- print('Go ' + get_go_version(go_root) + ' is already installed')
+ print("Go " + get_go_version(go_root) + " is already installed")
return
- filename = requests.get(
- 'https://golang.org/VERSION?m=text').text + ".linux-amd64.tar.gz"
+ filename = (
+ requests.get("https://golang.org/VERSION?m=text").text + ".linux-amd64.tar.gz"
+ )
url = "https://dl.google.com/go/" + filename
print("Go binary not found, installing the latest version...")
@@ -61,13 +66,13 @@ def install_golang(go_root):
print("Aborting...")
exit(1)
- go_folders = ['src', 'pkg', 'bin']
+ go_folders = ["src", "pkg", "bin"]
for f in go_folders:
if not os.path.exists(os.path.join(go_root, f)):
os.makedirs(os.path.join(go_root, f))
r = requests.get(url)
- with open("/tmp/" + filename, 'wb') as f:
+ with open("/tmp/" + filename, "wb") as f:
f.write(r.content)
go_tf = tarfile.open("/tmp/" + filename)
@@ -78,32 +83,30 @@ def install_golang(go_root):
go_tf.close()
os.remove("/tmp/" + filename)
- print('Go ' + get_go_version(go_root) + ' was installed')
+ print("Go " + get_go_version(go_root) + " was installed")
# Installs latest binary API generator
def install_binapi_gen(c, go_root, go_path):
- os.environ['GO111MODULE'] = "on"
- if (os.path.exists(go_root + "/bin/go") and
- os.path.isfile(go_root + "/bin/go")):
- p = subprocess.Popen(["./go", "get",
- "git.fd.io/govpp.git/cmd/binapi-generator@" + c],
- cwd=go_root + "/bin",
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- universal_newlines=True, )
+ os.environ["GO111MODULE"] = "on"
+ if os.path.exists(go_root + "/bin/go") and os.path.isfile(go_root + "/bin/go"):
+ p = subprocess.Popen(
+ ["./go", "get", "git.fd.io/govpp.git/cmd/binapi-generator@" + c],
+ cwd=go_root + "/bin",
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ universal_newlines=True,
+ )
_, error = p.communicate()
if p.returncode != 0:
- print("binapi generator installation failed: %d %s" %
- (p.returncode, error))
+ print("binapi generator installation failed: %d %s" % (p.returncode, error))
sys.exit(1)
bg_ver = get_binapi_gen_version(go_path)
- print('Installed binary API generator ' + bg_ver)
+ print("Installed binary API generator " + bg_ver)
# Creates generated bindings using GoVPP binapigen to the target folder
-def generate_api(output_dir, vpp_dir, api_list, import_prefix,
- no_source, go_path):
+def generate_api(output_dir, vpp_dir, api_list, import_prefix, no_source, go_path):
json_dir = vpp_dir + "/build-root/install-vpp-native/vpp/share/vpp/api"
if not os.path.exists(json_dir):
@@ -115,19 +118,20 @@ def generate_api(output_dir, vpp_dir, api_list, import_prefix,
if output_dir:
cmd += ["--output-dir=" + output_dir]
if len(api_list):
- print("Following API files were requested by 'GO_API_FILES': " +
- str(api_list))
- print("Note that dependency requirements may generate "
- "additional API files")
+ print("Following API files were requested by 'GO_API_FILES': " + str(api_list))
+ print("Note that dependency requirements may generate " "additional API files")
cmd.append(api_list)
if import_prefix:
cmd.append("-import-prefix=" + import_prefix)
if no_source:
cmd.append("-no-source-path-info")
- p = subprocess.Popen(cmd, cwd=go_path + "/bin",
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- universal_newlines=True, )
+ p = subprocess.Popen(
+ cmd,
+ cwd=go_path + "/bin",
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ universal_newlines=True,
+ )
out = p.communicate()[1]
if p.returncode != 0:
@@ -150,50 +154,70 @@ def main():
vpp_dir = root.parent.parent.parent
parser = argparse.ArgumentParser()
- parser.add_argument("-govpp-commit", "--govpp-commit",
- help="GoVPP commit or branch "
- "(defaults to v0.3.5-45-g671f16c)",
- default="671f16c", # fixed GoVPP version
- type=str)
- parser.add_argument("-output-dir", "--output-dir",
- help="output target directory for generated bindings",
- type=str,
- default=os.path.join(vpp_dir, "vppbinapi"))
- parser.add_argument("-api-files", "--api-files",
- help="api files to generate (without commas)",
- nargs="+",
- type=str,
- default=[])
- parser.add_argument("-import-prefix", "--import-prefix",
- help="prefix imports in the generated go code",
- default="",
- type=str)
- parser.add_argument("-no-source-path-info", "--no-source-path-info",
- help="disable source path info in generated files",
- nargs='?',
- const=True,
- default=True)
+ parser.add_argument(
+ "-govpp-commit",
+ "--govpp-commit",
+ help="GoVPP commit or branch " "(defaults to v0.3.5-45-g671f16c)",
+ default="671f16c", # fixed GoVPP version
+ type=str,
+ )
+ parser.add_argument(
+ "-output-dir",
+ "--output-dir",
+ help="output target directory for generated bindings",
+ type=str,
+ default=os.path.join(vpp_dir, "vppbinapi"),
+ )
+ parser.add_argument(
+ "-api-files",
+ "--api-files",
+ help="api files to generate (without commas)",
+ nargs="+",
+ type=str,
+ default=[],
+ )
+ parser.add_argument(
+ "-import-prefix",
+ "--import-prefix",
+ help="prefix imports in the generated go code",
+ default="",
+ type=str,
+ )
+ parser.add_argument(
+ "-no-source-path-info",
+ "--no-source-path-info",
+ help="disable source path info in generated files",
+ nargs="?",
+ const=True,
+ default=True,
+ )
args = parser.parse_args()
# go specific environment variables
if "GOROOT" in os.environ:
- go_root = os.environ['GOROOT']
+ go_root = os.environ["GOROOT"]
else:
go_binary = shutil.which("go")
if go_binary != "":
go_binary_dir, _ = os.path.split(go_binary)
go_root = os.path.join(go_binary_dir, "..")
else:
- go_root = os.environ['HOME'] + "/.go"
+ go_root = os.environ["HOME"] + "/.go"
if "GOPATH" in os.environ:
- go_path = os.environ['GOPATH']
+ go_path = os.environ["GOPATH"]
else:
- go_path = os.environ['HOME'] + "/go"
+ go_path = os.environ["HOME"] + "/go"
install_golang(go_root)
install_binapi_gen(args.govpp_commit, go_root, go_path)
- generate_api(args.output_dir, str(vpp_dir), args.api_files,
- args.import_prefix, args.no_source_path_info, go_path)
+ generate_api(
+ args.output_dir,
+ str(vpp_dir),
+ args.api_files,
+ args.import_prefix,
+ args.no_source_path_info,
+ go_path,
+ )
if __name__ == "__main__":
diff --git a/src/tools/vppapigen/generate_json.py b/src/tools/vppapigen/generate_json.py
index 288e519edd2..e8041c5a3eb 100755
--- a/src/tools/vppapigen/generate_json.py
+++ b/src/tools/vppapigen/generate_json.py
@@ -16,30 +16,37 @@
import argparse
import pathlib
import subprocess
-BASE_DIR = subprocess.check_output('git rev-parse --show-toplevel',
- shell=True).strip().decode()
+
+BASE_DIR = (
+ subprocess.check_output("git rev-parse --show-toplevel", shell=True)
+ .strip()
+ .decode()
+)
vppapigen_bin = pathlib.Path(
- '%s/src/tools/vppapigen/vppapigen.py' % BASE_DIR).as_posix()
+ "%s/src/tools/vppapigen/vppapigen.py" % BASE_DIR
+).as_posix()
src_dir_depth = 3
output_path = pathlib.Path(
- '%s/build-root/install-vpp-native/vpp/share/vpp/api/' % BASE_DIR)
+ "%s/build-root/install-vpp-native/vpp/share/vpp/api/" % BASE_DIR
+)
output_path_debug = pathlib.Path(
- '%s/build-root/install-vpp_debug-native/vpp/share/vpp/api/' % BASE_DIR)
+ "%s/build-root/install-vpp_debug-native/vpp/share/vpp/api/" % BASE_DIR
+)
output_dir_map = {
- 'plugins': 'plugins',
- 'vlibmemory': 'core',
- 'vnet': 'core',
- 'vlib': 'core',
- 'vpp': 'core',
+ "plugins": "plugins",
+ "vlibmemory": "core",
+ "vnet": "core",
+ "vlib": "core",
+ "vpp": "core",
}
def api_search_globs(src_dir):
globs = []
for g in output_dir_map:
- globs.extend(list(src_dir.glob('%s/**/*.api' % g)))
+ globs.extend(list(src_dir.glob("%s/**/*.api" % g)))
return globs
@@ -51,28 +58,41 @@ def api_files(src_dir):
def vppapigen(vppapigen_bin, output_path, src_dir, src_file):
try:
subprocess.check_output(
- [vppapigen_bin, '--includedir', src_dir.as_posix(),
- '--input', src_file.as_posix(), 'JSON',
- '--output', '%s/%s/%s.json' % (
- output_path,
- output_dir_map[src_file.as_posix().split('/')[
- src_dir_depth + BASE_DIR.count('/') - 1]],
- src_file.name)])
+ [
+ vppapigen_bin,
+ "--includedir",
+ src_dir.as_posix(),
+ "--input",
+ src_file.as_posix(),
+ "JSON",
+ "--output",
+ "%s/%s/%s.json"
+ % (
+ output_path,
+ output_dir_map[
+ src_file.as_posix().split("/")[
+ src_dir_depth + BASE_DIR.count("/") - 1
+ ]
+ ],
+ src_file.name,
+ ),
+ ]
+ )
except KeyError:
- print('src_file: %s' % src_file)
+ print("src_file: %s" % src_file)
raise
def main():
- cliparser = argparse.ArgumentParser(
- description='VPP API JSON definition generator')
- cliparser.add_argument('--srcdir', action='store',
- default='%s/src' % BASE_DIR),
- cliparser.add_argument('--output', action='store',
- help='directory to store files'),
- cliparser.add_argument('--debug-target', action='store_true',
- default=False,
- help="'True' if -debug target"),
+ cliparser = argparse.ArgumentParser(description="VPP API JSON definition generator")
+ cliparser.add_argument("--srcdir", action="store", default="%s/src" % BASE_DIR),
+ cliparser.add_argument("--output", action="store", help="directory to store files"),
+ cliparser.add_argument(
+ "--debug-target",
+ action="store_true",
+ default=False,
+ help="'True' if -debug target",
+ ),
args = cliparser.parse_args()
src_dir = pathlib.Path(args.srcdir)
@@ -86,13 +106,13 @@ def main():
for d in output_dir_map.values():
output_dir.joinpath(d).mkdir(exist_ok=True, parents=True)
- for f in output_dir.glob('**/*.api.json'):
+ for f in output_dir.glob("**/*.api.json"):
f.unlink()
for f in api_files(src_dir):
vppapigen(vppapigen_bin, output_dir, src_dir, f)
- print('json files written to: %s/.' % output_dir)
+ print("json files written to: %s/." % output_dir)
-if __name__ == '__main__':
+if __name__ == "__main__":
main()
diff --git a/src/tools/vppapigen/test_vppapigen.py b/src/tools/vppapigen/test_vppapigen.py
index c454ffc8638..20f6c6da10d 100755
--- a/src/tools/vppapigen/test_vppapigen.py
+++ b/src/tools/vppapigen/test_vppapigen.py
@@ -1,8 +1,7 @@
#!/usr/bin/env python3
import unittest
-from vppapigen import VPPAPI, Option, ParseError, Union, foldup_crcs, \
- global_types
+from vppapigen import VPPAPI, Option, ParseError, Union, foldup_crcs, global_types
import vppapigen
@@ -28,17 +27,17 @@ class TestUnion(unittest.TestCase):
cls.parser = VPPAPI()
def test_union(self):
- test_string = '''
+ test_string = """
union foo_union {
u32 a;
u8 b;
};
- '''
+ """
r = self.parser.parse_string(test_string)
self.assertTrue(isinstance(r[0], Union))
def test_union_vla(self):
- test_string = '''
+ test_string = """
union foo_union_vla {
u32 a;
u8 b[a];
@@ -46,13 +45,13 @@ class TestUnion(unittest.TestCase):
autoreply define foo {
vl_api_foo_union_vla_t v;
};
- '''
+ """
r = self.parser.parse_string(test_string)
self.assertTrue(isinstance(r[0], Union))
self.assertTrue(r[0].vla)
s = self.parser.process(r)
- test_string2 = '''
+ test_string2 = """
union foo_union_vla2 {
u32 a;
u8 b[a];
@@ -61,10 +60,10 @@ class TestUnion(unittest.TestCase):
autoreply define foo2 {
vl_api_foo_union_vla2_t v;
};
- '''
+ """
self.assertRaises(ValueError, self.parser.parse_string, test_string2)
- test_string3 = '''
+ test_string3 = """
union foo_union_vla3 {
u32 a;
u8 b[a];
@@ -73,7 +72,7 @@ class TestUnion(unittest.TestCase):
vl_api_foo_union_vla3_t v;
u32 x;
};
- '''
+ """
self.assertRaises(ValueError, self.parser.parse_string, test_string3)
@@ -83,10 +82,10 @@ class TestTypedef(unittest.TestCase):
cls.parser = VPPAPI()
def test_duplicatetype(self):
- test_string = '''
+ test_string = """
typedef foo1 { u8 dummy; };
typedef foo1 { u8 dummy; };
- '''
+ """
self.assertRaises(KeyError, self.parser.parse_string, test_string)
@@ -96,42 +95,42 @@ class TestDefine(unittest.TestCase):
cls.parser = VPPAPI()
def test_unknowntype(self):
- test_string = 'define foo { foobar foo;};'
+ test_string = "define foo { foobar foo;};"
with self.assertRaises(ParseError) as ctx:
self.parser.parse_string(test_string)
- self.assertIn('Undefined type: foobar', str(ctx.exception))
+ self.assertIn("Undefined type: foobar", str(ctx.exception))
- test_string = 'define { u8 foo;};'
+ test_string = "define { u8 foo;};"
with self.assertRaises(ParseError) as ctx:
self.parser.parse_string(test_string)
def test_flags(self):
- test_string = '''
+ test_string = """
manual_print dont_trace manual_endian define foo { u8 foo; };
define foo_reply {u32 context; i32 retval; };
- '''
+ """
r = self.parser.parse_string(test_string)
self.assertIsNotNone(r)
s = self.parser.process(r)
self.assertIsNotNone(s)
- for d in s['Define']:
- if d.name == 'foo':
+ for d in s["Define"]:
+ if d.name == "foo":
self.assertTrue(d.dont_trace)
self.assertTrue(d.manual_endian)
self.assertTrue(d.manual_print)
self.assertFalse(d.autoreply)
- test_string = '''
+ test_string = """
nonexisting_flag define foo { u8 foo; };
- '''
+ """
with self.assertRaises(ParseError):
self.parser.parse_string(test_string)
def test_options(self):
- test_string = '''
+ test_string = """
define foo { option deprecated; u8 foo; };
define foo_reply {u32 context; i32 retval; };
- '''
+ """
r = self.parser.parse_string(test_string)
self.assertIsNotNone(r)
s = self.parser.process(r)
@@ -144,14 +143,14 @@ class TestService(unittest.TestCase):
cls.parser = VPPAPI()
def test_service(self):
- test_string = '''
+ test_string = """
autoreply define show_version { u8 foo;};
service { rpc show_version returns show_version_reply; };
- '''
+ """
r = self.parser.parse_string(test_string)
s = self.parser.process(r)
- self.assertEqual(s['Service'][0].caller, 'show_version')
- self.assertEqual(s['Service'][0].reply, 'show_version_reply')
+ self.assertEqual(s["Service"][0].caller, "show_version")
+ self.assertEqual(s["Service"][0].reply, "show_version_reply")
def get_crc(apistring, name):
@@ -159,52 +158,52 @@ def get_crc(apistring, name):
parser = vppapigen.VPPAPI()
r = parser.parse_string(apistring)
s = parser.process(r)
- foldup_crcs(s['Define'])
- d = [f for f in s['Define'] if f.name == name]
+ foldup_crcs(s["Define"])
+ d = [f for f in s["Define"] if f.name == name]
return d[0].crc
class TestCRC(unittest.TestCase):
def test_crc(self):
- test_string = '''
+ test_string = """
typedef list { u8 foo; };
autoreply define foo { u8 foo; vl_api_list_t l;};
- '''
- crc = get_crc(test_string, 'foo')
+ """
+ crc = get_crc(test_string, "foo")
# modify underlying type
- test_string = '''
+ test_string = """
typedef list { u8 foo2; };
autoreply define foo { u8 foo; vl_api_list_t l;};
- '''
- crc2 = get_crc(test_string, 'foo')
+ """
+ crc2 = get_crc(test_string, "foo")
self.assertNotEqual(crc, crc2)
# two user-defined types
- test_string = '''
+ test_string = """
typedef address { u8 foo2; };
typedef list { u8 foo2; vl_api_address_t add; };
autoreply define foo { u8 foo; vl_api_list_t l;};
- '''
- crc3 = get_crc(test_string, 'foo')
+ """
+ crc3 = get_crc(test_string, "foo")
- test_string = '''
+ test_string = """
typedef address { u8 foo3; };
typedef list { u8 foo2; vl_api_address_t add; };
autoreply define foo { u8 foo; vl_api_list_t l;};
- '''
- crc4 = get_crc(test_string, 'foo')
+ """
+ crc4 = get_crc(test_string, "foo")
self.assertNotEqual(crc3, crc4)
- test_string = '''
+ test_string = """
typedef address { u8 foo3; };
typedef list { u8 foo2; vl_api_address_t add; u8 foo3; };
autoreply define foo { u8 foo; vl_api_list_t l;};
- '''
- crc5 = get_crc(test_string, 'foo')
+ """
+ crc5 = get_crc(test_string, "foo")
self.assertNotEqual(crc4, crc5)
- test_string = '''
+ test_string = """
typedef ip6_address
{
u8 foo;
@@ -227,11 +226,11 @@ autoreply define sr_policy_add
u32 fib_table;
vl_api_srv6_sid_list_t sids;
};
-'''
+"""
- crc = get_crc(test_string, 'sr_policy_add')
+ crc = get_crc(test_string, "sr_policy_add")
- test_string = '''
+ test_string = """
typedef ip6_address
{
u8 foo;
@@ -253,14 +252,13 @@ autoreply define sr_policy_add
u32 fib_table;
vl_api_srv6_sid_list_t sids;
};
-'''
- crc2 = get_crc(test_string, 'sr_policy_add')
+"""
+ crc2 = get_crc(test_string, "sr_policy_add")
self.assertNotEqual(crc, crc2)
class TestEnum(unittest.TestCase):
-
@classmethod
def setUpClass(cls):
cls.parser = VPPAPI()
@@ -278,8 +276,8 @@ enum tunnel_mode : u8
r = self.parser.parse_string(test_string)
self.assertIsNotNone(r)
s = self.parser.process(r)
- for o in s['types']:
- if o.type == 'Enum':
+ for o in s["types"]:
+ if o.type == "Enum":
self.assertEqual(o.name, "tunnel_mode")
break
else:
@@ -298,8 +296,8 @@ enum virtio_flags {
r = self.parser.parse_string(test_string)
self.assertIsNotNone(r)
s = self.parser.process(r)
- for o in s['types']:
- if o.type == 'Enum':
+ for o in s["types"]:
+ if o.type == "Enum":
self.assertEqual(o.name, "virtio_flags")
break
else:
@@ -307,7 +305,6 @@ enum virtio_flags {
class TestEnumFlag(unittest.TestCase):
-
@classmethod
def setUpClass(cls):
cls.parser = VPPAPI()
@@ -326,8 +323,9 @@ enumflag tunnel_mode_ef : u8
with self.assertRaises(TypeError) as ctx:
r = self.parser.parse_string(test_string)
- self.assertTrue(str(ctx.exception).startswith(
- 'tunnel_mode_ef is not a flag enum.'))
+ self.assertTrue(
+ str(ctx.exception).startswith("tunnel_mode_ef is not a flag enum.")
+ )
def test_enumflag_as_enumflag(self):
test_string = """\
@@ -342,13 +340,13 @@ enumflag virtio_flags_ef {
r = self.parser.parse_string(test_string)
self.assertIsNotNone(r)
s = self.parser.process(r)
- for o in s['types']:
- if o.type == 'EnumFlag':
+ for o in s["types"]:
+ if o.type == "EnumFlag":
self.assertEqual(o.name, "virtio_flags_ef")
break
else:
self.fail()
-if __name__ == '__main__':
+if __name__ == "__main__":
unittest.main(verbosity=2)
diff --git a/src/tools/vppapigen/vppapigen.py b/src/tools/vppapigen/vppapigen.py
index 8415c28fb7b..c7bc68180a8 100755
--- a/src/tools/vppapigen/vppapigen.py
+++ b/src/tools/vppapigen/vppapigen.py
@@ -10,9 +10,10 @@ from subprocess import Popen, PIPE
import ply.lex as lex
import ply.yacc as yacc
-assert sys.version_info >= (3, 5), \
- "Not supported Python version: {}".format(sys.version)
-log = logging.getLogger('vppapigen')
+assert sys.version_info >= (3, 5), "Not supported Python version: {}".format(
+ sys.version
+)
+log = logging.getLogger("vppapigen")
# Ensure we don't leave temporary files around
sys.dont_write_bytecode = True
@@ -28,11 +29,10 @@ seen_imports = {}
def global_type_add(name, obj):
- '''Add new type to the dictionary of types '''
- type_name = 'vl_api_' + name + '_t'
+ """Add new type to the dictionary of types"""
+ type_name = "vl_api_" + name + "_t"
if type_name in global_types:
- raise KeyError("Attempted redefinition of {!r} with {!r}.".format(
- name, obj))
+ raise KeyError("Attempted redefinition of {!r} with {!r}.".format(name, obj))
global_types[type_name] = obj
@@ -49,104 +49,104 @@ class VPPAPILexer:
self.filename = filename
reserved = {
- 'service': 'SERVICE',
- 'rpc': 'RPC',
- 'returns': 'RETURNS',
- 'null': 'NULL',
- 'stream': 'STREAM',
- 'events': 'EVENTS',
- 'define': 'DEFINE',
- 'typedef': 'TYPEDEF',
- 'enum': 'ENUM',
- 'enumflag': 'ENUMFLAG',
- 'typeonly': 'TYPEONLY',
- 'manual_print': 'MANUAL_PRINT',
- 'manual_endian': 'MANUAL_ENDIAN',
- 'dont_trace': 'DONT_TRACE',
- 'autoreply': 'AUTOREPLY',
- 'autoendian': 'AUTOENDIAN',
- 'option': 'OPTION',
- 'u8': 'U8',
- 'u16': 'U16',
- 'u32': 'U32',
- 'u64': 'U64',
- 'i8': 'I8',
- 'i16': 'I16',
- 'i32': 'I32',
- 'i64': 'I64',
- 'f64': 'F64',
- 'bool': 'BOOL',
- 'string': 'STRING',
- 'import': 'IMPORT',
- 'true': 'TRUE',
- 'false': 'FALSE',
- 'union': 'UNION',
- 'counters': 'COUNTERS',
- 'paths': 'PATHS',
- 'units': 'UNITS',
- 'severity': 'SEVERITY',
- 'type': 'TYPE',
- 'description': 'DESCRIPTION',
+ "service": "SERVICE",
+ "rpc": "RPC",
+ "returns": "RETURNS",
+ "null": "NULL",
+ "stream": "STREAM",
+ "events": "EVENTS",
+ "define": "DEFINE",
+ "typedef": "TYPEDEF",
+ "enum": "ENUM",
+ "enumflag": "ENUMFLAG",
+ "typeonly": "TYPEONLY",
+ "manual_print": "MANUAL_PRINT",
+ "manual_endian": "MANUAL_ENDIAN",
+ "dont_trace": "DONT_TRACE",
+ "autoreply": "AUTOREPLY",
+ "autoendian": "AUTOENDIAN",
+ "option": "OPTION",
+ "u8": "U8",
+ "u16": "U16",
+ "u32": "U32",
+ "u64": "U64",
+ "i8": "I8",
+ "i16": "I16",
+ "i32": "I32",
+ "i64": "I64",
+ "f64": "F64",
+ "bool": "BOOL",
+ "string": "STRING",
+ "import": "IMPORT",
+ "true": "TRUE",
+ "false": "FALSE",
+ "union": "UNION",
+ "counters": "COUNTERS",
+ "paths": "PATHS",
+ "units": "UNITS",
+ "severity": "SEVERITY",
+ "type": "TYPE",
+ "description": "DESCRIPTION",
}
- tokens = ['STRING_LITERAL',
- 'ID', 'NUM'] + list(reserved.values())
+ tokens = ["STRING_LITERAL", "ID", "NUM"] + list(reserved.values())
- t_ignore_LINE_COMMENT = '//.*'
+ t_ignore_LINE_COMMENT = "//.*"
def t_FALSE(self, t):
- r'false'
+ r"false"
t.value = False
return t
def t_TRUE(self, t):
- r'false'
+ r"false"
t.value = True
return t
def t_NUM(self, t):
- r'0[xX][0-9a-fA-F]+|-?\d+\.?\d*'
- base = 16 if t.value.startswith('0x') else 10
- if '.' in t.value:
+ r"0[xX][0-9a-fA-F]+|-?\d+\.?\d*"
+ base = 16 if t.value.startswith("0x") else 10
+ if "." in t.value:
t.value = float(t.value)
else:
t.value = int(t.value, base)
return t
def t_ID(self, t):
- r'[a-zA-Z_][a-zA-Z_0-9]*'
+ r"[a-zA-Z_][a-zA-Z_0-9]*"
# Check for reserved words
- t.type = VPPAPILexer.reserved.get(t.value, 'ID')
+ t.type = VPPAPILexer.reserved.get(t.value, "ID")
return t
# C string
def t_STRING_LITERAL(self, t):
- r'\"([^\\\n]|(\\.))*?\"'
- t.value = str(t.value).replace("\"", "")
+ r"\"([^\\\n]|(\\.))*?\" "
+ t.value = str(t.value).replace('"', "")
return t
# C or C++ comment (ignore)
def t_comment(self, t):
- r'(/\*(.|\n)*?\*/)|(//.*)'
- t.lexer.lineno += t.value.count('\n')
+ r"(/\*(.|\n)*?\*/)|(//.*)"
+ t.lexer.lineno += t.value.count("\n")
# Error handling rule
def t_error(self, t):
- raise ParseError("Illegal character '{}' ({})"
- "in {}: line {}".format(t.value[0],
- hex(ord(t.value[0])),
- self.filename,
- t.lexer.lineno))
+ raise ParseError(
+ "Illegal character '{}' ({})"
+ "in {}: line {}".format(
+ t.value[0], hex(ord(t.value[0])), self.filename, t.lexer.lineno
+ )
+ )
# Define a rule so we can track line numbers
def t_newline(self, t):
- r'\n+'
+ r"\n+"
t.lexer.lineno += len(t.value)
literals = ":{}[];=.,"
# A string containing ignored characters (spaces and tabs)
- t_ignore = ' \t'
+ t_ignore = " \t"
def vla_mark_length_field(block):
@@ -164,23 +164,25 @@ def vla_is_last_check(name, block):
vla = True
if i + 1 < len(block):
raise ValueError(
- 'VLA field "{}" must be the last field in message "{}"'
- .format(b.fieldname, name))
- elif b.fieldtype.startswith('vl_api_'):
+ 'VLA field "{}" must be the last field in message "{}"'.format(
+ b.fieldname, name
+ )
+ )
+ elif b.fieldtype.startswith("vl_api_"):
if global_types[b.fieldtype].vla:
vla = True
if i + 1 < len(block):
raise ValueError(
'VLA field "{}" must be the last '
- 'field in message "{}"'
- .format(b.fieldname, name))
- elif b.fieldtype == 'string' and b.length == 0:
+ 'field in message "{}"'.format(b.fieldname, name)
+ )
+ elif b.fieldtype == "string" and b.length == 0:
vla = True
if i + 1 < len(block):
raise ValueError(
'VLA field "{}" must be the last '
- 'field in message "{}"'
- .format(b.fieldname, name))
+ 'field in message "{}"'.format(b.fieldname, name)
+ )
return vla
@@ -192,10 +194,9 @@ class Processable:
class Service(Processable):
- type = 'Service'
+ type = "Service"
- def __init__(self, caller, reply, events=None, stream_message=None,
- stream=False):
+ def __init__(self, caller, reply, events=None, stream_message=None, stream=False):
self.caller = caller
self.reply = reply
self.stream = stream
@@ -204,7 +205,7 @@ class Service(Processable):
class Typedef(Processable):
- type = 'Typedef'
+ type = "Typedef"
def __init__(self, name, flags, block):
self.name = name
@@ -214,9 +215,9 @@ class Typedef(Processable):
self.manual_print = False
self.manual_endian = False
for f in flags:
- if f == 'manual_print':
+ if f == "manual_print":
self.manual_print = True
- elif f == 'manual_endian':
+ elif f == "manual_endian":
self.manual_endian = True
global_type_add(name, self)
@@ -224,14 +225,14 @@ class Typedef(Processable):
vla_mark_length_field(self.block)
def process(self, result):
- result['types'].append(self)
+ result["types"].append(self)
def __repr__(self):
return self.name + str(self.flags) + str(self.block)
class Using(Processable):
- type = 'Using'
+ type = "Using"
def __init__(self, name, flags, alias):
self.name = name
@@ -243,16 +244,15 @@ class Using(Processable):
self.manual_print = False
self.manual_endian = False
for f in flags:
- if f == 'manual_print':
+ if f == "manual_print":
self.manual_print = True
- elif f == 'manual_endian':
+ elif f == "manual_endian":
self.manual_endian = True
if isinstance(alias, Array):
- a = {'type': alias.fieldtype,
- 'length': alias.length}
+ a = {"type": alias.fieldtype, "length": alias.length}
else:
- a = {'type': alias.fieldtype}
+ a = {"type": alias.fieldtype}
self.alias = a
self.using = alias
@@ -265,14 +265,14 @@ class Using(Processable):
global_type_add(name, self)
def process(self, result): # -> Dict
- result['types'].append(self)
+ result["types"].append(self)
def __repr__(self):
return self.name + str(self.alias)
class Union(Processable):
- type = 'Union'
+ type = "Union"
def __init__(self, name, flags, block):
self.manual_print = False
@@ -280,9 +280,9 @@ class Union(Processable):
self.name = name
for f in flags:
- if f == 'manual_print':
+ if f == "manual_print":
self.manual_print = True
- elif f == 'manual_endian':
+ elif f == "manual_endian":
self.manual_endian = True
self.block = block
@@ -292,14 +292,14 @@ class Union(Processable):
global_type_add(name, self)
def process(self, result):
- result['types'].append(self)
+ result["types"].append(self)
def __repr__(self):
return str(self.block)
class Define(Processable):
- type = 'Define'
+ type = "Define"
def __init__(self, name, flags, block):
self.name = name
@@ -312,15 +312,15 @@ class Define(Processable):
self.autoendian = 0
self.options = {}
for f in flags:
- if f == 'dont_trace':
+ if f == "dont_trace":
self.dont_trace = True
- elif f == 'manual_print':
+ elif f == "manual_print":
self.manual_print = True
- elif f == 'manual_endian':
+ elif f == "manual_endian":
self.manual_endian = True
- elif f == 'autoreply':
+ elif f == "autoreply":
self.autoreply = True
- elif f == 'autoendian':
+ elif f == "autoendian":
self.autoendian = 1
remove = []
@@ -337,12 +337,11 @@ class Define(Processable):
self.crc = str(block).encode()
def autoreply_block(self, name, parent):
- block = [Field('u32', 'context'),
- Field('i32', 'retval')]
+ block = [Field("u32", "context"), Field("i32", "retval")]
# inherit the parent's options
for k, v in parent.options.items():
block.append(Option(k, v))
- return Define(name + '_reply', [], block)
+ return Define(name + "_reply", [], block)
def process(self, result): # -> Dict
tname = self.__class__.__name__
@@ -355,9 +354,9 @@ class Define(Processable):
class Enum(Processable):
- type = 'Enum'
+ type = "Enum"
- def __init__(self, name, block, enumtype='u32'):
+ def __init__(self, name, block, enumtype="u32"):
self.name = name
self.enumtype = enumtype
self.vla = False
@@ -369,47 +368,50 @@ class Enum(Processable):
bc_set = False
for b in block:
- if 'value' in b:
- count = b['value']
+ if "value" in b:
+ count = b["value"]
else:
count += 1
- block2.append([b['id'], count])
+ block2.append([b["id"], count])
try:
- if b['option']['backwards_compatible']:
+ if b["option"]["backwards_compatible"]:
pass
bc_set = True
except KeyError:
- block3.append([b['id'], count])
+ block3.append([b["id"], count])
if bc_set:
- raise ValueError("Backward compatible enum must "
- "be last {!r} {!r}"
- .format(name, b['id']))
+ raise ValueError(
+ "Backward compatible enum must "
+ "be last {!r} {!r}".format(name, b["id"])
+ )
self.block = block2
self.crc = str(block3).encode()
global_type_add(name, self)
def process(self, result):
- result['types'].append(self)
+ result["types"].append(self)
def __repr__(self):
return self.name + str(self.block)
class EnumFlag(Enum):
- type = 'EnumFlag'
+ type = "EnumFlag"
- def __init__(self, name, block, enumtype='u32'):
+ def __init__(self, name, block, enumtype="u32"):
super(EnumFlag, self).__init__(name, block, enumtype)
for b in self.block:
if bin(b[1])[2:].count("1") > 1:
- raise TypeError("%s is not a flag enum. No element in a "
- "flag enum may have more than a "
- "single bit set." % self.name)
+ raise TypeError(
+ "%s is not a flag enum. No element in a "
+ "flag enum may have more than a "
+ "single bit set." % self.name
+ )
class Import(Processable):
- type = 'Import'
+ type = "Import"
_initialized = False
def __new__(cls, *args, **kwargs):
@@ -440,7 +442,7 @@ class Import(Processable):
class Option(Processable):
- type = 'Option'
+ type = "Option"
def __init__(self, option, value=None):
self.option = option
@@ -458,7 +460,7 @@ class Option(Processable):
class Array(Processable):
- type = 'Array'
+ type = "Array"
def __init__(self, fieldtype, name, length, modern_vla=False):
self.fieldtype = fieldtype
@@ -474,12 +476,11 @@ class Array(Processable):
self.vla = False
def __repr__(self):
- return str([self.fieldtype, self.fieldname, self.length,
- self.lengthfield])
+ return str([self.fieldtype, self.fieldname, self.length, self.lengthfield])
class Field(Processable):
- type = 'Field'
+ type = "Field"
def __init__(self, fieldtype, name, limit=None):
# limit field has been expanded to an options dict.
@@ -487,13 +488,14 @@ class Field(Processable):
self.fieldtype = fieldtype
self.is_lengthfield = False
- if self.fieldtype == 'string':
- raise ValueError("The string type {!r} is an "
- "array type ".format(name))
+ if self.fieldtype == "string":
+ raise ValueError("The string type {!r} is an " "array type ".format(name))
if name in keyword.kwlist:
- raise ValueError("Fieldname {!r} is a python keyword and is not "
- "accessible via the python API. ".format(name))
+ raise ValueError(
+ "Fieldname {!r} is a python keyword and is not "
+ "accessible via the python API. ".format(name)
+ )
self.fieldname = name
self.limit = limit
@@ -502,35 +504,34 @@ class Field(Processable):
class Counter(Processable):
- type = 'Counter'
+ type = "Counter"
def __init__(self, path, counter):
self.name = path
self.block = counter
def process(self, result): # -> Dict
- result['Counters'].append(self)
+ result["Counters"].append(self)
class Paths(Processable):
- type = 'Paths'
+ type = "Paths"
def __init__(self, pathset):
self.paths = pathset
def __repr__(self):
- return "%s(paths=%s)" % (
- self.__class__.__name__, self.paths
- )
+ return "%s(paths=%s)" % (self.__class__.__name__, self.paths)
class Coord:
- """ Coordinates of a syntactic element. Consists of:
- - File name
- - Line number
- - (optional) column number, for the Lexer
+ """Coordinates of a syntactic element. Consists of:
+ - File name
+ - Line number
+ - (optional) column number, for the Lexer
"""
- __slots__ = ('file', 'line', 'column', '__weakref__')
+
+ __slots__ = ("file", "line", "column", "__weakref__")
def __init__(self, file, line, column=None):
self.file = file
@@ -568,49 +569,47 @@ class VPPAPIParser:
self.logger.warning("%s: %s" % (coord, msg))
def _coord(self, lineno, column=None):
- return Coord(
- file=self.filename,
- line=lineno, column=column)
+ return Coord(file=self.filename, line=lineno, column=column)
def _token_coord(self, p, token_idx):
- """ Returns the coordinates for the YaccProduction object 'p' indexed
- with 'token_idx'. The coordinate includes the 'lineno' and
- 'column'. Both follow the lex semantic, starting from 1.
+ """Returns the coordinates for the YaccProduction object 'p' indexed
+ with 'token_idx'. The coordinate includes the 'lineno' and
+ 'column'. Both follow the lex semantic, starting from 1.
"""
- last_cr = p.lexer.lexdata.rfind('\n', 0, p.lexpos(token_idx))
+ last_cr = p.lexer.lexdata.rfind("\n", 0, p.lexpos(token_idx))
if last_cr < 0:
last_cr = -1
- column = (p.lexpos(token_idx) - (last_cr))
+ column = p.lexpos(token_idx) - (last_cr)
return self._coord(p.lineno(token_idx), column)
def p_slist(self, p):
- '''slist : stmt
- | slist stmt'''
+ """slist : stmt
+ | slist stmt"""
if len(p) == 2:
p[0] = [p[1]]
else:
p[0] = p[1] + [p[2]]
def p_stmt(self, p):
- '''stmt : define
- | typedef
- | option
- | import
- | enum
- | enumflag
- | union
- | service
- | paths
- | counters'''
+ """stmt : define
+ | typedef
+ | option
+ | import
+ | enum
+ | enumflag
+ | union
+ | service
+ | paths
+ | counters"""
p[0] = p[1]
def p_import(self, p):
- '''import : IMPORT STRING_LITERAL ';' '''
+ """import : IMPORT STRING_LITERAL ';'"""
p[0] = Import(p[2], revision=self.revision)
def p_path_elements(self, p):
- '''path_elements : path_element
- | path_elements path_element'''
+ """path_elements : path_element
+ | path_elements path_element"""
if len(p) == 2:
p[0] = p[1]
else:
@@ -620,20 +619,20 @@ class VPPAPIParser:
p[0] = p[1] + [p[2]]
def p_path_element(self, p):
- '''path_element : STRING_LITERAL STRING_LITERAL ';' '''
- p[0] = {'path': p[1], 'counter': p[2]}
+ """path_element : STRING_LITERAL STRING_LITERAL ';'"""
+ p[0] = {"path": p[1], "counter": p[2]}
def p_paths(self, p):
- '''paths : PATHS '{' path_elements '}' ';' '''
+ """paths : PATHS '{' path_elements '}' ';'"""
p[0] = Paths(p[3])
def p_counters(self, p):
- '''counters : COUNTERS ID '{' counter_elements '}' ';' '''
+ """counters : COUNTERS ID '{' counter_elements '}' ';'"""
p[0] = Counter(p[2], p[4])
def p_counter_elements(self, p):
- '''counter_elements : counter_element
- | counter_elements counter_element'''
+ """counter_elements : counter_element
+ | counter_elements counter_element"""
if len(p) == 2:
p[0] = [p[1]]
else:
@@ -643,46 +642,47 @@ class VPPAPIParser:
p[0] = p[1] + [p[2]]
def p_counter_element(self, p):
- '''counter_element : ID '{' counter_statements '}' ';' '''
- p[0] = {**{'name': p[1]}, **p[3]}
+ """counter_element : ID '{' counter_statements '}' ';'"""
+ p[0] = {**{"name": p[1]}, **p[3]}
def p_counter_statements(self, p):
- '''counter_statements : counter_statement
- | counter_statements counter_statement'''
+ """counter_statements : counter_statement
+ | counter_statements counter_statement"""
if len(p) == 2:
p[0] = p[1]
else:
p[0] = {**p[1], **p[2]}
def p_counter_statement(self, p):
- '''counter_statement : SEVERITY ID ';'
- | UNITS STRING_LITERAL ';'
- | DESCRIPTION STRING_LITERAL ';'
- | TYPE ID ';' '''
+ """counter_statement : SEVERITY ID ';'
+ | UNITS STRING_LITERAL ';'
+ | DESCRIPTION STRING_LITERAL ';'
+ | TYPE ID ';'"""
p[0] = {p[1]: p[2]}
def p_service(self, p):
- '''service : SERVICE '{' service_statements '}' ';' '''
+ """service : SERVICE '{' service_statements '}' ';'"""
p[0] = p[3]
def p_service_statements(self, p):
- '''service_statements : service_statement
- | service_statements service_statement'''
+ """service_statements : service_statement
+ | service_statements service_statement"""
if len(p) == 2:
p[0] = [p[1]]
else:
p[0] = p[1] + [p[2]]
def p_service_statement(self, p):
- '''service_statement : RPC ID RETURNS NULL ';'
- | RPC ID RETURNS ID ';'
- | RPC ID RETURNS STREAM ID ';'
- | RPC ID RETURNS ID EVENTS event_list ';' '''
+ """service_statement : RPC ID RETURNS NULL ';'
+ | RPC ID RETURNS ID ';'
+ | RPC ID RETURNS STREAM ID ';'
+ | RPC ID RETURNS ID EVENTS event_list ';'"""
if p[2] == p[4]:
# Verify that caller and reply differ
self._parse_error(
- 'Reply ID ({}) should not be equal to Caller ID'.format(p[2]),
- self._token_coord(p, 1))
+ "Reply ID ({}) should not be equal to Caller ID".format(p[2]),
+ self._token_coord(p, 1),
+ )
if len(p) == 8:
p[0] = Service(p[2], p[4], p[6])
elif len(p) == 7:
@@ -691,280 +691,283 @@ class VPPAPIParser:
p[0] = Service(p[2], p[4])
def p_service_statement2(self, p):
- '''service_statement : RPC ID RETURNS ID STREAM ID ';' '''
+ """service_statement : RPC ID RETURNS ID STREAM ID ';'"""
p[0] = Service(p[2], p[4], stream_message=p[6], stream=True)
def p_event_list(self, p):
- '''event_list : events
- | event_list events '''
+ """event_list : events
+ | event_list events"""
if len(p) == 2:
p[0] = [p[1]]
else:
p[0] = p[1] + [p[2]]
def p_event(self, p):
- '''events : ID
- | ID ',' '''
+ """events : ID
+ | ID ','"""
p[0] = p[1]
def p_enum(self, p):
- '''enum : ENUM ID '{' enum_statements '}' ';' '''
+ """enum : ENUM ID '{' enum_statements '}' ';'"""
p[0] = Enum(p[2], p[4])
def p_enum_type(self, p):
- ''' enum : ENUM ID ':' enum_size '{' enum_statements '}' ';' '''
+ """enum : ENUM ID ':' enum_size '{' enum_statements '}' ';'"""
if len(p) == 9:
p[0] = Enum(p[2], p[6], enumtype=p[4])
else:
p[0] = Enum(p[2], p[4])
def p_enumflag(self, p):
- '''enumflag : ENUMFLAG ID '{' enum_statements '}' ';' '''
+ """enumflag : ENUMFLAG ID '{' enum_statements '}' ';'"""
p[0] = EnumFlag(p[2], p[4])
def p_enumflag_type(self, p):
- ''' enumflag : ENUMFLAG ID ':' enumflag_size '{' enum_statements '}' ';' ''' # noqa : E502
+ """enumflag : ENUMFLAG ID ':' enumflag_size '{' enum_statements '}' ';'""" # noqa : E502
if len(p) == 9:
p[0] = EnumFlag(p[2], p[6], enumtype=p[4])
else:
p[0] = EnumFlag(p[2], p[4])
def p_enum_size(self, p):
- ''' enum_size : U8
- | U16
- | U32
- | I8
- | I16
- | I32 '''
+ """enum_size : U8
+ | U16
+ | U32
+ | I8
+ | I16
+ | I32"""
p[0] = p[1]
def p_enumflag_size(self, p):
- ''' enumflag_size : U8
- | U16
- | U32 '''
+ """enumflag_size : U8
+ | U16
+ | U32"""
p[0] = p[1]
def p_define(self, p):
- '''define : DEFINE ID '{' block_statements_opt '}' ';' '''
+ """define : DEFINE ID '{' block_statements_opt '}' ';'"""
self.fields = []
p[0] = Define(p[2], [], p[4])
def p_define_flist(self, p):
- '''define : flist DEFINE ID '{' block_statements_opt '}' ';' '''
+ """define : flist DEFINE ID '{' block_statements_opt '}' ';'"""
# Legacy typedef
- if 'typeonly' in p[1]:
- self._parse_error('legacy typedef. use typedef: {} {}[{}];'
- .format(p[1], p[2], p[4]),
- self._token_coord(p, 1))
+ if "typeonly" in p[1]:
+ self._parse_error(
+ "legacy typedef. use typedef: {} {}[{}];".format(p[1], p[2], p[4]),
+ self._token_coord(p, 1),
+ )
else:
p[0] = Define(p[3], p[1], p[5])
def p_flist(self, p):
- '''flist : flag
- | flist flag'''
+ """flist : flag
+ | flist flag"""
if len(p) == 2:
p[0] = [p[1]]
else:
p[0] = p[1] + [p[2]]
def p_flag(self, p):
- '''flag : MANUAL_PRINT
- | MANUAL_ENDIAN
- | DONT_TRACE
- | TYPEONLY
- | AUTOENDIAN
- | AUTOREPLY'''
+ """flag : MANUAL_PRINT
+ | MANUAL_ENDIAN
+ | DONT_TRACE
+ | TYPEONLY
+ | AUTOENDIAN
+ | AUTOREPLY"""
if len(p) == 1:
return
p[0] = p[1]
def p_typedef(self, p):
- '''typedef : TYPEDEF ID '{' block_statements_opt '}' ';' '''
+ """typedef : TYPEDEF ID '{' block_statements_opt '}' ';'"""
p[0] = Typedef(p[2], [], p[4])
def p_typedef_flist(self, p):
- '''typedef : flist TYPEDEF ID '{' block_statements_opt '}' ';' '''
+ """typedef : flist TYPEDEF ID '{' block_statements_opt '}' ';'"""
p[0] = Typedef(p[3], p[1], p[5])
def p_typedef_alias(self, p):
- '''typedef : TYPEDEF declaration '''
+ """typedef : TYPEDEF declaration"""
p[0] = Using(p[2].fieldname, [], p[2])
def p_typedef_alias_flist(self, p):
- '''typedef : flist TYPEDEF declaration '''
+ """typedef : flist TYPEDEF declaration"""
p[0] = Using(p[3].fieldname, p[1], p[3])
def p_block_statements_opt(self, p):
- '''block_statements_opt : block_statements '''
+ """block_statements_opt : block_statements"""
p[0] = p[1]
def p_block_statements(self, p):
- '''block_statements : block_statement
- | block_statements block_statement'''
+ """block_statements : block_statement
+ | block_statements block_statement"""
if len(p) == 2:
p[0] = [p[1]]
else:
p[0] = p[1] + [p[2]]
def p_block_statement(self, p):
- '''block_statement : declaration
- | option '''
+ """block_statement : declaration
+ | option"""
p[0] = p[1]
def p_enum_statements(self, p):
- '''enum_statements : enum_statement
- | enum_statements enum_statement'''
+ """enum_statements : enum_statement
+ | enum_statements enum_statement"""
if len(p) == 2:
p[0] = [p[1]]
else:
p[0] = p[1] + [p[2]]
def p_enum_statement(self, p):
- '''enum_statement : ID '=' NUM ','
- | ID ','
- | ID '[' field_options ']' ','
- | ID '=' NUM '[' field_options ']' ',' '''
+ """enum_statement : ID '=' NUM ','
+ | ID ','
+ | ID '[' field_options ']' ','
+ | ID '=' NUM '[' field_options ']' ','"""
if len(p) == 3:
- p[0] = {'id': p[1]}
+ p[0] = {"id": p[1]}
elif len(p) == 5:
- p[0] = {'id': p[1], 'value': p[3]}
+ p[0] = {"id": p[1], "value": p[3]}
elif len(p) == 6:
- p[0] = {'id': p[1], 'option': p[3]}
+ p[0] = {"id": p[1], "option": p[3]}
elif len(p) == 8:
- p[0] = {'id': p[1], 'value': p[3], 'option': p[5]}
+ p[0] = {"id": p[1], "value": p[3], "option": p[5]}
else:
- self._parse_error('ERROR', self._token_coord(p, 1))
+ self._parse_error("ERROR", self._token_coord(p, 1))
def p_field_options(self, p):
- '''field_options : field_option
- | field_options field_option'''
+ """field_options : field_option
+ | field_options field_option"""
if len(p) == 2:
p[0] = p[1]
else:
p[0] = {**p[1], **p[2]}
def p_field_option(self, p):
- '''field_option : ID
- | ID '=' assignee ','
- | ID '=' assignee
+ """field_option : ID
+ | ID '=' assignee ','
+ | ID '=' assignee
- '''
+ """
if len(p) == 2:
p[0] = {p[1]: None}
else:
p[0] = {p[1]: p[3]}
def p_variable_name(self, p):
- '''variable_name : ID
- | TYPE
- | SEVERITY
- | DESCRIPTION
- | COUNTERS
- | PATHS
- '''
+ """variable_name : ID
+ | TYPE
+ | SEVERITY
+ | DESCRIPTION
+ | COUNTERS
+ | PATHS
+ """
p[0] = p[1]
def p_declaration(self, p):
- '''declaration : type_specifier variable_name ';'
- | type_specifier variable_name '[' field_options ']' ';'
- '''
+ """declaration : type_specifier variable_name ';'
+ | type_specifier variable_name '[' field_options ']' ';'
+ """
if len(p) == 7:
p[0] = Field(p[1], p[2], p[4])
elif len(p) == 4:
p[0] = Field(p[1], p[2])
else:
- self._parse_error('ERROR', self._token_coord(p, 1))
+ self._parse_error("ERROR", self._token_coord(p, 1))
self.fields.append(p[2])
def p_declaration_array_vla(self, p):
- '''declaration : type_specifier variable_name '[' ']' ';' '''
+ """declaration : type_specifier variable_name '[' ']' ';'"""
p[0] = Array(p[1], p[2], 0, modern_vla=True)
def p_declaration_array(self, p):
- '''declaration : type_specifier variable_name '[' NUM ']' ';'
- | type_specifier variable_name '[' ID ']' ';' '''
+ """declaration : type_specifier variable_name '[' NUM ']' ';'
+ | type_specifier variable_name '[' ID ']' ';'"""
if len(p) != 7:
return self._parse_error(
- 'array: %s' % p.value,
- self._coord(lineno=p.lineno))
+ "array: %s" % p.value, self._coord(lineno=p.lineno)
+ )
# Make this error later
if type(p[4]) is int and p[4] == 0:
# XXX: Line number is wrong
- self._parse_warning('Old Style VLA: {} {}[{}];'
- .format(p[1], p[2], p[4]),
- self._token_coord(p, 1))
+ self._parse_warning(
+ "Old Style VLA: {} {}[{}];".format(p[1], p[2], p[4]),
+ self._token_coord(p, 1),
+ )
if type(p[4]) is str and p[4] not in self.fields:
# Verify that length field exists
- self._parse_error('Missing length field: {} {}[{}];'
- .format(p[1], p[2], p[4]),
- self._token_coord(p, 1))
+ self._parse_error(
+ "Missing length field: {} {}[{}];".format(p[1], p[2], p[4]),
+ self._token_coord(p, 1),
+ )
p[0] = Array(p[1], p[2], p[4])
def p_option(self, p):
- '''option : OPTION ID '=' assignee ';'
- | OPTION ID ';' '''
+ """option : OPTION ID '=' assignee ';'
+ | OPTION ID ';'"""
if len(p) == 4:
p[0] = Option(p[2])
else:
p[0] = Option(p[2], p[4])
def p_assignee(self, p):
- '''assignee : NUM
- | TRUE
- | FALSE
- | STRING_LITERAL '''
+ """assignee : NUM
+ | TRUE
+ | FALSE
+ | STRING_LITERAL"""
p[0] = p[1]
def p_type_specifier(self, p):
- '''type_specifier : U8
- | U16
- | U32
- | U64
- | I8
- | I16
- | I32
- | I64
- | F64
- | BOOL
- | STRING'''
+ """type_specifier : U8
+ | U16
+ | U32
+ | U64
+ | I8
+ | I16
+ | I32
+ | I64
+ | F64
+ | BOOL
+ | STRING"""
p[0] = p[1]
# Do a second pass later to verify that user defined types are defined
def p_typedef_specifier(self, p):
- '''type_specifier : ID '''
+ """type_specifier : ID"""
if p[1] not in global_types:
- self._parse_error('Undefined type: {}'.format(p[1]),
- self._token_coord(p, 1))
+ self._parse_error(
+ "Undefined type: {}".format(p[1]), self._token_coord(p, 1)
+ )
p[0] = p[1]
def p_union(self, p):
- '''union : UNION ID '{' block_statements_opt '}' ';' '''
+ """union : UNION ID '{' block_statements_opt '}' ';'"""
p[0] = Union(p[2], [], p[4])
def p_union_flist(self, p):
- '''union : flist UNION ID '{' block_statements_opt '}' ';' '''
+ """union : flist UNION ID '{' block_statements_opt '}' ';'"""
p[0] = Union(p[3], p[1], p[5])
# Error rule for syntax errors
def p_error(self, p):
if p:
- self._parse_error(
- 'before: %s' % p.value,
- self._coord(lineno=p.lineno))
+ self._parse_error("before: %s" % p.value, self._coord(lineno=p.lineno))
else:
- self._parse_error('At end of input', self.filename)
-
+ self._parse_error("At end of input", self.filename)
-class VPPAPI():
- def __init__(self, debug=False, filename='', logger=None, revision=None):
+class VPPAPI:
+ def __init__(self, debug=False, filename="", logger=None, revision=None):
self.lexer = lex.lex(module=VPPAPILexer(filename), debug=debug)
- self.parser = yacc.yacc(module=VPPAPIParser(filename, logger,
- revision=revision),
- write_tables=False, debug=debug)
+ self.parser = yacc.yacc(
+ module=VPPAPIParser(filename, logger, revision=revision),
+ write_tables=False,
+ debug=debug,
+ )
self.logger = logger
self.revision = revision
self.filename = filename
@@ -979,38 +982,40 @@ class VPPAPI():
def parse_filename(self, filename, debug=0):
if self.revision:
- git_show = 'git show {}:{}'.format(self.revision, filename)
- proc = Popen(git_show.split(), stdout=PIPE, encoding='utf-8')
+ git_show = "git show {}:{}".format(self.revision, filename)
+ proc = Popen(git_show.split(), stdout=PIPE, encoding="utf-8")
try:
data, errs = proc.communicate()
if proc.returncode != 0:
- print('File not found: {}:{}'
- .format(self.revision, filename), file=sys.stderr)
+ print(
+ "File not found: {}:{}".format(self.revision, filename),
+ file=sys.stderr,
+ )
sys.exit(2)
return self.parse_string(data, debug=debug)
except Exception:
sys.exit(3)
else:
try:
- with open(filename, encoding='utf-8') as fd:
+ with open(filename, encoding="utf-8") as fd:
return self.parse_fd(fd, None)
except FileNotFoundError:
- print('File not found: {}'.format(filename), file=sys.stderr)
+ print("File not found: {}".format(filename), file=sys.stderr)
sys.exit(2)
def process(self, objs):
s = {}
- s['Option'] = {}
- s['Define'] = []
- s['Service'] = []
- s['types'] = []
- s['Import'] = []
- s['Counters'] = []
- s['Paths'] = []
+ s["Option"] = {}
+ s["Define"] = []
+ s["Service"] = []
+ s["types"] = []
+ s["Import"] = []
+ s["Counters"] = []
+ s["Paths"] = []
crc = 0
for o in objs:
try:
- crc = binascii.crc32(o.crc, crc) & 0xffffffff
+ crc = binascii.crc32(o.crc, crc) & 0xFFFFFFFF
except AttributeError:
pass
@@ -1021,82 +1026,84 @@ class VPPAPI():
else:
o.process(s)
- msgs = {d.name: d for d in s['Define']}
- svcs = {s.caller: s for s in s['Service']}
- replies = {s.reply: s for s in s['Service']}
+ msgs = {d.name: d for d in s["Define"]}
+ svcs = {s.caller: s for s in s["Service"]}
+ replies = {s.reply: s for s in s["Service"]}
seen_services = {}
- s['file_crc'] = crc
+ s["file_crc"] = crc
for service in svcs:
if service not in msgs:
raise ValueError(
- 'Service definition refers to unknown message'
- ' definition: {}'.format(service))
- if svcs[service].reply != 'null' and \
- svcs[service].reply not in msgs:
- raise ValueError('Service definition refers to unknown message'
- ' definition in reply: {}'
- .format(svcs[service].reply))
+ "Service definition refers to unknown message"
+ " definition: {}".format(service)
+ )
+ if svcs[service].reply != "null" and svcs[service].reply not in msgs:
+ raise ValueError(
+ "Service definition refers to unknown message"
+ " definition in reply: {}".format(svcs[service].reply)
+ )
if service in replies:
- raise ValueError('Service definition refers to message'
- ' marked as reply: {}'.format(service))
+ raise ValueError(
+ "Service definition refers to message"
+ " marked as reply: {}".format(service)
+ )
for event in svcs[service].events:
if event not in msgs:
- raise ValueError('Service definition refers to unknown '
- 'event: {} in message: {}'
- .format(event, service))
+ raise ValueError(
+ "Service definition refers to unknown "
+ "event: {} in message: {}".format(event, service)
+ )
seen_services[event] = True
# Create services implicitly
for d in msgs:
if d in seen_services:
continue
- if d.endswith('_reply'):
+ if d.endswith("_reply"):
if d[:-6] in svcs:
continue
if d[:-6] not in msgs:
- raise ValueError('{} missing calling message'
- .format(d))
+ raise ValueError("{} missing calling message".format(d))
continue
- if d.endswith('_dump'):
+ if d.endswith("_dump"):
if d in svcs:
continue
- if d[:-5]+'_details' in msgs:
- s['Service'].append(Service(d, d[:-5]+'_details',
- stream=True))
+ if d[:-5] + "_details" in msgs:
+ s["Service"].append(Service(d, d[:-5] + "_details", stream=True))
else:
- raise ValueError('{} missing details message'
- .format(d))
+ raise ValueError("{} missing details message".format(d))
continue
- if d.endswith('_details'):
- if d[:-8]+'_get' in msgs:
- if d[:-8]+'_get' in svcs:
+ if d.endswith("_details"):
+ if d[:-8] + "_get" in msgs:
+ if d[:-8] + "_get" in svcs:
continue
- raise ValueError('{} should be in a stream service'
- .format(d[:-8]+'_get'))
- if d[:-8]+'_dump' in msgs:
+ raise ValueError(
+ "{} should be in a stream service".format(d[:-8] + "_get")
+ )
+ if d[:-8] + "_dump" in msgs:
continue
- raise ValueError('{} missing dump or get message'
- .format(d))
+ raise ValueError("{} missing dump or get message".format(d))
if d in svcs:
continue
- if d+'_reply' in msgs:
- s['Service'].append(Service(d, d+'_reply'))
+ if d + "_reply" in msgs:
+ s["Service"].append(Service(d, d + "_reply"))
else:
raise ValueError(
- '{} missing reply message ({}) or service definition'
- .format(d, d+'_reply'))
+ "{} missing reply message ({}) or service definition".format(
+ d, d + "_reply"
+ )
+ )
return s
def process_imports(self, objs, in_import, result): # -> List
for o in objs:
# Only allow the following object types from imported file
- if in_import and not isinstance(o, (Enum, Import, Typedef,
- Union, Using)):
+ if in_import and not isinstance(o, (Enum, Import, Typedef, Union, Using)):
continue
if isinstance(o, Import):
result.append(o)
@@ -1109,7 +1116,7 @@ class VPPAPI():
# Add message ids to each message.
def add_msg_id(s):
for o in s:
- o.block.insert(0, Field('u16', '_vl_msg_id'))
+ o.block.insert(0, Field("u16", "_vl_msg_id"))
return s
@@ -1129,11 +1136,11 @@ def dirlist_get():
def foldup_blocks(block, crc):
for b in block:
# Look up CRC in user defined types
- if b.fieldtype.startswith('vl_api_'):
+ if b.fieldtype.startswith("vl_api_"):
# Recursively
t = global_types[b.fieldtype]
try:
- crc = binascii.crc32(t.crc, crc) & 0xffffffff
+ crc = binascii.crc32(t.crc, crc) & 0xFFFFFFFF
crc = foldup_blocks(t.block, crc)
except AttributeError:
pass
@@ -1142,34 +1149,43 @@ def foldup_blocks(block, crc):
def foldup_crcs(s):
for f in s:
- f.crc = foldup_blocks(f.block,
- binascii.crc32(f.crc) & 0xffffffff)
+ f.crc = foldup_blocks(f.block, binascii.crc32(f.crc) & 0xFFFFFFFF)
#
# Main
#
def main():
- if sys.version_info < (3, 5,):
- log.exception('vppapigen requires a supported version of python. '
- 'Please use version 3.5 or greater. '
- 'Using %s', sys.version)
+ if sys.version_info < (
+ 3,
+ 5,
+ ):
+ log.exception(
+ "vppapigen requires a supported version of python. "
+ "Please use version 3.5 or greater. "
+ "Using %s",
+ sys.version,
+ )
return 1
- cliparser = argparse.ArgumentParser(description='VPP API generator')
- cliparser.add_argument('--pluginpath', default="")
- cliparser.add_argument('--includedir', action='append')
- cliparser.add_argument('--outputdir', action='store')
- cliparser.add_argument('--input')
- cliparser.add_argument('--output', nargs='?',
- type=argparse.FileType('w', encoding='UTF-8'),
- default=sys.stdout)
-
- cliparser.add_argument('output_module', nargs='?', default='C')
- cliparser.add_argument('--debug', action='store_true')
- cliparser.add_argument('--show-name', nargs=1)
- cliparser.add_argument('--git-revision',
- help="Git revision to use for opening files")
+ cliparser = argparse.ArgumentParser(description="VPP API generator")
+ cliparser.add_argument("--pluginpath", default="")
+ cliparser.add_argument("--includedir", action="append")
+ cliparser.add_argument("--outputdir", action="store")
+ cliparser.add_argument("--input")
+ cliparser.add_argument(
+ "--output",
+ nargs="?",
+ type=argparse.FileType("w", encoding="UTF-8"),
+ default=sys.stdout,
+ )
+
+ cliparser.add_argument("output_module", nargs="?", default="C")
+ cliparser.add_argument("--debug", action="store_true")
+ cliparser.add_argument("--show-name", nargs=1)
+ cliparser.add_argument(
+ "--git-revision", help="Git revision to use for opening files"
+ )
args = cliparser.parse_args()
dirlist_add(args.includedir)
@@ -1182,7 +1198,7 @@ def main():
elif args.input:
filename = args.input
else:
- filename = ''
+ filename = ""
if args.debug:
logging.basicConfig(stream=sys.stdout, level=logging.WARNING)
@@ -1195,36 +1211,34 @@ def main():
from importlib.machinery import SourceFileLoader
# Default path
- pluginpath = ''
+ pluginpath = ""
if not args.pluginpath:
cand = []
cand.append(os.path.dirname(os.path.realpath(__file__)))
- cand.append(os.path.dirname(os.path.realpath(__file__)) +
- '/../share/vpp/')
+ cand.append(os.path.dirname(os.path.realpath(__file__)) + "/../share/vpp/")
for c in cand:
- c += '/'
- if os.path.isfile('{}vppapigen_{}.py'
- .format(c, args.output_module.lower())):
+ c += "/"
+ if os.path.isfile(
+ "{}vppapigen_{}.py".format(c, args.output_module.lower())
+ ):
pluginpath = c
break
else:
- pluginpath = args.pluginpath + '/'
- if pluginpath == '':
- log.exception('Output plugin not found')
+ pluginpath = args.pluginpath + "/"
+ if pluginpath == "":
+ log.exception("Output plugin not found")
return 1
- module_path = '{}vppapigen_{}.py'.format(pluginpath,
- args.output_module.lower())
+ module_path = "{}vppapigen_{}.py".format(pluginpath, args.output_module.lower())
try:
- plugin = SourceFileLoader(args.output_module,
- module_path).load_module()
+ plugin = SourceFileLoader(args.output_module, module_path).load_module()
except Exception as err:
- log.exception('Error importing output plugin: %s, %s',
- module_path, err)
+ log.exception("Error importing output plugin: %s, %s", module_path, err)
return 1
- parser = VPPAPI(debug=args.debug, filename=filename, logger=log,
- revision=args.git_revision)
+ parser = VPPAPI(
+ debug=args.debug, filename=filename, logger=log, revision=args.git_revision
+ )
try:
if not args.input:
@@ -1232,7 +1246,7 @@ def main():
else:
parsed_objects = parser.parse_filename(args.input, log)
except ParseError as e:
- print('Parse error: ', e, file=sys.stderr)
+ print("Parse error: ", e, file=sys.stderr)
sys.exit(1)
# Build a list of objects. Hash of lists.
@@ -1250,32 +1264,33 @@ def main():
else:
s = parser.process(parsed_objects)
imports = parser.process_imports(parsed_objects, False, result)
- s['imported'] = parser.process(imports)
+ s["imported"] = parser.process(imports)
# Add msg_id field
- s['Define'] = add_msg_id(s['Define'])
+ s["Define"] = add_msg_id(s["Define"])
# Fold up CRCs
- foldup_crcs(s['Define'])
+ foldup_crcs(s["Define"])
#
# Debug
if args.debug:
import pprint
+
pp = pprint.PrettyPrinter(indent=4, stream=sys.stderr)
- for t in s['Define']:
+ for t in s["Define"]:
pp.pprint([t.name, t.flags, t.block])
- for t in s['types']:
+ for t in s["types"]:
pp.pprint([t.name, t.block])
result = plugin.run(args, filename, s)
if result:
print(result, file=args.output)
else:
- log.exception('Running plugin failed: %s %s', filename, result)
+ log.exception("Running plugin failed: %s %s", filename, result)
return 1
return 0
-if __name__ == '__main__':
+if __name__ == "__main__":
sys.exit(main())
diff --git a/src/tools/vppapigen/vppapigen_c.py b/src/tools/vppapigen/vppapigen_c.py
index f93e89843a3..fdbb7270a8a 100644
--- a/src/tools/vppapigen/vppapigen_c.py
+++ b/src/tools/vppapigen/vppapigen_c.py
@@ -18,10 +18,10 @@
# binary API format
#
-'''
+"""
This module creates C code for core VPP, VPP plugins and client side VAT and
VAT2 tests.
-'''
+"""
import datetime
import itertools
@@ -35,98 +35,103 @@ process_imports = False
###############################################################################
-class ToJSON():
- '''Class to generate functions converting from VPP binary API to JSON.'''
+class ToJSON:
+ """Class to generate functions converting from VPP binary API to JSON."""
+
_dispatch = {}
- noprint_fields = {'_vl_msg_id': None,
- 'client_index': None,
- 'context': None}
- is_number = {'u8': None,
- 'i8': None,
- 'u16': None,
- 'i16': None,
- 'u32': None,
- 'i32': None,
- 'u64': None,
- 'i64': None,
- 'f64': None,
- }
+ noprint_fields = {"_vl_msg_id": None, "client_index": None, "context": None}
+ is_number = {
+ "u8": None,
+ "i8": None,
+ "u16": None,
+ "i16": None,
+ "u32": None,
+ "i32": None,
+ "u64": None,
+ "i64": None,
+ "f64": None,
+ }
def __init__(self, module, types, defines, imported_types, stream):
self.stream = stream
self.module = module
self.defines = defines
self.types = types
- self.types_hash = {'vl_api_'+d.name+'_t':
- d for d in types + imported_types}
+ self.types_hash = {"vl_api_" + d.name + "_t": d for d in types + imported_types}
self.defines_hash = {d.name: d for d in defines}
def header(self):
- '''Output the top boilerplate.'''
+ """Output the top boilerplate."""
write = self.stream.write
- write('#ifndef included_{}_api_tojson_h\n'.format(self.module))
- write('#define included_{}_api_tojson_h\n'.format(self.module))
- write('#include <vppinfra/cJSON.h>\n\n')
- write('#include <vppinfra/jsonformat.h>\n\n')
- if self.module == 'interface_types':
- write('#define vl_printfun\n')
- write('#include <vnet/interface_types.api.h>\n\n')
+ write("#ifndef included_{}_api_tojson_h\n".format(self.module))
+ write("#define included_{}_api_tojson_h\n".format(self.module))
+ write("#include <vppinfra/cJSON.h>\n\n")
+ write("#include <vppinfra/jsonformat.h>\n\n")
+ if self.module == "interface_types":
+ write("#define vl_printfun\n")
+ write("#include <vnet/interface_types.api.h>\n\n")
def footer(self):
- '''Output the bottom boilerplate.'''
+ """Output the bottom boilerplate."""
write = self.stream.write
- write('#endif\n')
+ write("#endif\n")
def get_base_type(self, t):
vt_type = None
try:
vt = self.types_hash[t]
- if vt.type == 'Using' and 'length' not in vt.alias:
- vt_type = vt.alias['type']
+ if vt.type == "Using" and "length" not in vt.alias:
+ vt_type = vt.alias["type"]
except KeyError:
vt = t
return vt, vt_type
def get_json_func(self, t):
- '''Given the type, returns the function to use to create a
- cJSON object'''
+ """Given the type, returns the function to use to create a
+ cJSON object"""
vt, vt_type = self.get_base_type(t)
if t in self.is_number or vt_type in self.is_number:
- return 'cJSON_AddNumberToObject', '', False
- if t == 'bool':
- return 'cJSON_AddBoolToObject', '', False
+ return "cJSON_AddNumberToObject", "", False
+ if t == "bool":
+ return "cJSON_AddBoolToObject", "", False
# Lookup type name check if it's enum
- if vt.type == 'Enum' or vt.type == 'EnumFlag':
- return '{t}_tojson'.format(t=t), '', True
- return '{t}_tojson'.format(t=t), '&', True
+ if vt.type == "Enum" or vt.type == "EnumFlag":
+ return "{t}_tojson".format(t=t), "", True
+ return "{t}_tojson".format(t=t), "&", True
def get_json_array_func(self, t):
- '''Given a type returns the function to create a cJSON object
- for arrays.'''
+ """Given a type returns the function to create a cJSON object
+ for arrays."""
if t in self.is_number:
- return 'cJSON_CreateNumber', ''
- if t == 'bool':
- return 'cJSON_CreateBool', ''
+ return "cJSON_CreateNumber", ""
+ if t == "bool":
+ return "cJSON_CreateBool", ""
vt, vt_type = self.get_base_type(t)
- if vt.type == 'Enum' or vt.type == 'EnumFlag':
- return '{t}_tojson'.format(t=t), ''
- return '{t}_tojson'.format(t=t), '&'
+ if vt.type == "Enum" or vt.type == "EnumFlag":
+ return "{t}_tojson".format(t=t), ""
+ return "{t}_tojson".format(t=t), "&"
def print_string(self, o):
- '''Create cJSON object from vl_api_string_t'''
+ """Create cJSON object from vl_api_string_t"""
write = self.stream.write
if o.modern_vla:
- write(' vl_api_string_cJSON_AddToObject(o, "{n}", &a->{n});\n'
- .format(n=o.fieldname))
+ write(
+ ' vl_api_string_cJSON_AddToObject(o, "{n}", &a->{n});\n'.format(
+ n=o.fieldname
+ )
+ )
else:
- write(' cJSON_AddStringToObject(o, "{n}", (char *)a->{n});\n'
- .format(n=o.fieldname))
+ write(
+ ' cJSON_AddStringToObject(o, "{n}", (char *)a->{n});\n'.format(
+ n=o.fieldname
+ )
+ )
def print_field(self, o):
- '''Called for every field in a typedef or define.'''
+ """Called for every field in a typedef or define."""
write = self.stream.write
if o.fieldname in self.noprint_fields:
return
@@ -134,19 +139,21 @@ class ToJSON():
f, p, newobj = self.get_json_func(o.fieldtype)
if newobj:
- write(' cJSON_AddItemToObject(o, "{n}", {f}({p}a->{n}));\n'
- .format(f=f, p=p, n=o.fieldname))
+ write(
+ ' cJSON_AddItemToObject(o, "{n}", {f}({p}a->{n}));\n'.format(
+ f=f, p=p, n=o.fieldname
+ )
+ )
else:
- write(' {f}(o, "{n}", {p}a->{n});\n'
- .format(f=f, p=p, n=o.fieldname))
+ write(' {f}(o, "{n}", {p}a->{n});\n'.format(f=f, p=p, n=o.fieldname))
- _dispatch['Field'] = print_field
+ _dispatch["Field"] = print_field
def print_array(self, o):
- '''Converts a VPP API array to cJSON array.'''
+ """Converts a VPP API array to cJSON array."""
write = self.stream.write
- forloop = '''\
+ forloop = """\
{{
int i;
cJSON *array = cJSON_AddArrayToObject(o, "{n}");
@@ -154,237 +161,263 @@ class ToJSON():
cJSON_AddItemToArray(array, {f}({p}a->{n}[i]));
}}
}}
-'''
+"""
- if o.fieldtype == 'string':
+ if o.fieldtype == "string":
self.print_string(o)
return
- lfield = 'a->' + o.lengthfield if o.lengthfield else o.length
- if o.fieldtype == 'u8':
- write(' {\n')
+ lfield = "a->" + o.lengthfield if o.lengthfield else o.length
+ if o.fieldtype == "u8":
+ write(" {\n")
# What is length field doing here?
- write(' u8 *s = format(0, "0x%U", format_hex_bytes, '
- '&a->{n}, {lfield});\n'
- .format(n=o.fieldname, lfield=lfield))
- write(' cJSON_AddStringToObject(o, "{n}", (char *)s);\n'
- .format(n=o.fieldname))
- write(' vec_free(s);\n')
- write(' }\n')
+ write(
+ ' u8 *s = format(0, "0x%U", format_hex_bytes, '
+ "&a->{n}, {lfield});\n".format(n=o.fieldname, lfield=lfield)
+ )
+ write(
+ ' cJSON_AddStringToObject(o, "{n}", (char *)s);\n'.format(
+ n=o.fieldname
+ )
+ )
+ write(" vec_free(s);\n")
+ write(" }\n")
return
f, p = self.get_json_array_func(o.fieldtype)
- write(forloop.format(lfield=lfield,
- t=o.fieldtype,
- n=o.fieldname,
- f=f,
- p=p))
+ write(forloop.format(lfield=lfield, t=o.fieldtype, n=o.fieldname, f=f, p=p))
- _dispatch['Array'] = print_array
+ _dispatch["Array"] = print_array
def print_enum(self, o):
- '''Create cJSON object (string) for VPP API enum'''
+ """Create cJSON object (string) for VPP API enum"""
write = self.stream.write
- write('static inline cJSON *vl_api_{name}_t_tojson '
- '(vl_api_{name}_t a) {{\n'.format(name=o.name))
+ write(
+ "static inline cJSON *vl_api_{name}_t_tojson "
+ "(vl_api_{name}_t a) {{\n".format(name=o.name)
+ )
write(" switch(a) {\n")
for b in o.block:
write(" case %s:\n" % b[1])
write(' return cJSON_CreateString("{}");\n'.format(b[0]))
write(' default: return cJSON_CreateString("Invalid ENUM");\n')
- write(' }\n')
- write(' return 0;\n')
- write('}\n')
+ write(" }\n")
+ write(" return 0;\n")
+ write("}\n")
- _dispatch['Enum'] = print_enum
+ _dispatch["Enum"] = print_enum
def print_enum_flag(self, o):
- '''Create cJSON object (string) for VPP API enum'''
+ """Create cJSON object (string) for VPP API enum"""
write = self.stream.write
- write('static inline cJSON *vl_api_{name}_t_tojson '
- '(vl_api_{name}_t a) {{\n'.format(name=o.name))
- write(' cJSON *array = cJSON_CreateArray();\n')
+ write(
+ "static inline cJSON *vl_api_{name}_t_tojson "
+ "(vl_api_{name}_t a) {{\n".format(name=o.name)
+ )
+ write(" cJSON *array = cJSON_CreateArray();\n")
for b in o.block:
if b[1] == 0:
- continue
- write(' if (a & {})\n'.format(b[0]))
+ continue
+ write(" if (a & {})\n".format(b[0]))
write(
- ' cJSON_AddItemToArray(array, cJSON_CreateString("{}"));\n'.format(b[0]))
- write(' return array;\n')
- write('}\n')
+ ' cJSON_AddItemToArray(array, cJSON_CreateString("{}"));\n'.format(
+ b[0]
+ )
+ )
+ write(" return array;\n")
+ write("}\n")
- _dispatch['EnumFlag'] = print_enum_flag
+ _dispatch["EnumFlag"] = print_enum_flag
def print_typedef(self, o):
- '''Create cJSON (dictionary) object from VPP API typedef'''
+ """Create cJSON (dictionary) object from VPP API typedef"""
write = self.stream.write
- write('static inline cJSON *vl_api_{name}_t_tojson '
- '(vl_api_{name}_t *a) {{\n'.format(name=o.name))
- write(' cJSON *o = cJSON_CreateObject();\n')
+ write(
+ "static inline cJSON *vl_api_{name}_t_tojson "
+ "(vl_api_{name}_t *a) {{\n".format(name=o.name)
+ )
+ write(" cJSON *o = cJSON_CreateObject();\n")
for t in o.block:
self._dispatch[t.type](self, t)
- write(' return o;\n')
- write('}\n')
+ write(" return o;\n")
+ write("}\n")
def print_define(self, o):
- '''Create cJSON (dictionary) object from VPP API define'''
+ """Create cJSON (dictionary) object from VPP API define"""
write = self.stream.write
- write('static inline cJSON *vl_api_{name}_t_tojson '
- '(vl_api_{name}_t *a) {{\n'.format(name=o.name))
- write(' cJSON *o = cJSON_CreateObject();\n')
- write(' cJSON_AddStringToObject(o, "_msgname", "{}");\n'
- .format(o.name))
- write(' cJSON_AddStringToObject(o, "_crc", "{crc:08x}");\n'
- .format(crc=o.crc))
+ write(
+ "static inline cJSON *vl_api_{name}_t_tojson "
+ "(vl_api_{name}_t *a) {{\n".format(name=o.name)
+ )
+ write(" cJSON *o = cJSON_CreateObject();\n")
+ write(' cJSON_AddStringToObject(o, "_msgname", "{}");\n'.format(o.name))
+ write(
+ ' cJSON_AddStringToObject(o, "_crc", "{crc:08x}");\n'.format(crc=o.crc)
+ )
for t in o.block:
self._dispatch[t.type](self, t)
- write(' return o;\n')
- write('}\n')
+ write(" return o;\n")
+ write("}\n")
def print_using(self, o):
- '''Create cJSON (dictionary) object from VPP API aliased type'''
+ """Create cJSON (dictionary) object from VPP API aliased type"""
if o.manual_print:
return
write = self.stream.write
- write('static inline cJSON *vl_api_{name}_t_tojson '
- '(vl_api_{name}_t *a) {{\n'.format(name=o.name))
-
- write(' u8 *s = format(0, "%U", format_vl_api_{}_t, a);\n'
- .format(o.name))
- write(' cJSON *o = cJSON_CreateString((char *)s);\n')
- write(' vec_free(s);\n')
- write(' return o;\n')
- write('}\n')
-
- _dispatch['Typedef'] = print_typedef
- _dispatch['Define'] = print_define
- _dispatch['Using'] = print_using
- _dispatch['Union'] = print_typedef
+ write(
+ "static inline cJSON *vl_api_{name}_t_tojson "
+ "(vl_api_{name}_t *a) {{\n".format(name=o.name)
+ )
+
+ write(' u8 *s = format(0, "%U", format_vl_api_{}_t, a);\n'.format(o.name))
+ write(" cJSON *o = cJSON_CreateString((char *)s);\n")
+ write(" vec_free(s);\n")
+ write(" return o;\n")
+ write("}\n")
+
+ _dispatch["Typedef"] = print_typedef
+ _dispatch["Define"] = print_define
+ _dispatch["Using"] = print_using
+ _dispatch["Union"] = print_typedef
def generate_function(self, t):
- '''Main entry point'''
+ """Main entry point"""
write = self.stream.write
if t.manual_print:
- write('/* Manual print {} */\n'.format(t.name))
+ write("/* Manual print {} */\n".format(t.name))
return
self._dispatch[t.type](self, t)
def generate_types(self):
- '''Main entry point'''
+ """Main entry point"""
for t in self.types:
self.generate_function(t)
def generate_defines(self):
- '''Main entry point'''
+ """Main entry point"""
for t in self.defines:
self.generate_function(t)
-class FromJSON():
- '''
+class FromJSON:
+ """
Parse JSON objects into VPP API binary message structures.
- '''
+ """
+
_dispatch = {}
- noprint_fields = {'_vl_msg_id': None,
- 'client_index': None,
- 'context': None}
- is_number = {'u8': None,
- 'i8': None,
- 'u16': None,
- 'i16': None,
- 'u32': None,
- 'i32': None,
- 'u64': None,
- 'i64': None,
- 'f64': None,
- }
+ noprint_fields = {"_vl_msg_id": None, "client_index": None, "context": None}
+ is_number = {
+ "u8": None,
+ "i8": None,
+ "u16": None,
+ "i16": None,
+ "u32": None,
+ "i32": None,
+ "u64": None,
+ "i64": None,
+ "f64": None,
+ }
def __init__(self, module, types, defines, imported_types, stream):
self.stream = stream
self.module = module
self.defines = defines
self.types = types
- self.types_hash = {'vl_api_'+d.name+'_t':
- d for d in types + imported_types}
+ self.types_hash = {"vl_api_" + d.name + "_t": d for d in types + imported_types}
self.defines_hash = {d.name: d for d in defines}
def header(self):
- '''Output the top boilerplate.'''
+ """Output the top boilerplate."""
write = self.stream.write
- write('#ifndef included_{}_api_fromjson_h\n'.format(self.module))
- write('#define included_{}_api_fromjson_h\n'.format(self.module))
- write('#include <vppinfra/cJSON.h>\n\n')
- write('#include <vppinfra/jsonformat.h>\n\n')
+ write("#ifndef included_{}_api_fromjson_h\n".format(self.module))
+ write("#define included_{}_api_fromjson_h\n".format(self.module))
+ write("#include <vppinfra/cJSON.h>\n\n")
+ write("#include <vppinfra/jsonformat.h>\n\n")
write('#pragma GCC diagnostic ignored "-Wunused-label"\n')
def is_base_type(self, t):
- '''Check if a type is one of the VPP API base types'''
+ """Check if a type is one of the VPP API base types"""
if t in self.is_number:
return True
- if t == 'bool':
+ if t == "bool":
return True
return False
def footer(self):
- '''Output the bottom boilerplate.'''
+ """Output the bottom boilerplate."""
write = self.stream.write
- write('#endif\n')
+ write("#endif\n")
def print_string(self, o, toplevel=False):
- '''Convert JSON string to vl_api_string_t'''
+ """Convert JSON string to vl_api_string_t"""
write = self.stream.write
msgvar = "a" if toplevel else "*mp"
msgsize = "l" if toplevel else "*len"
if o.modern_vla:
- write(' char *p = cJSON_GetStringValue(item);\n')
- write(' size_t plen = strlen(p);\n')
- write(' {msgvar} = cJSON_realloc({msgvar}, {msgsize} + plen, {msgsize});\n'
- .format(msgvar=msgvar, msgsize=msgsize))
- write(' if ({msgvar} == 0) goto error;\n'.format(msgvar=msgvar))
- write(' vl_api_c_string_to_api_string(p, (void *){msgvar} + '
- '{msgsize} - sizeof(vl_api_string_t));\n'
- .format(msgvar=msgvar, msgsize=msgsize))
- write(' {msgsize} += plen;\n'.format(msgsize=msgsize))
+ write(" char *p = cJSON_GetStringValue(item);\n")
+ write(" size_t plen = strlen(p);\n")
+ write(
+ " {msgvar} = cJSON_realloc({msgvar}, {msgsize} + plen, {msgsize});\n".format(
+ msgvar=msgvar, msgsize=msgsize
+ )
+ )
+ write(" if ({msgvar} == 0) goto error;\n".format(msgvar=msgvar))
+ write(
+ " vl_api_c_string_to_api_string(p, (void *){msgvar} + "
+ "{msgsize} - sizeof(vl_api_string_t));\n".format(
+ msgvar=msgvar, msgsize=msgsize
+ )
+ )
+ write(" {msgsize} += plen;\n".format(msgsize=msgsize))
else:
- write(' strncpy_s((char *)a->{n}, sizeof(a->{n}), '
- 'cJSON_GetStringValue(item), sizeof(a->{n}) - 1);\n'
- .format(n=o.fieldname))
+ write(
+ " strncpy_s((char *)a->{n}, sizeof(a->{n}), "
+ "cJSON_GetStringValue(item), sizeof(a->{n}) - 1);\n".format(
+ n=o.fieldname
+ )
+ )
def print_field(self, o, toplevel=False):
- '''Called for every field in a typedef or define.'''
+ """Called for every field in a typedef or define."""
write = self.stream.write
if o.fieldname in self.noprint_fields:
return
is_bt = self.is_base_type(o.fieldtype)
- t = 'vl_api_{}'.format(o.fieldtype) if is_bt else o.fieldtype
+ t = "vl_api_{}".format(o.fieldtype) if is_bt else o.fieldtype
msgvar = "(void **)&a" if toplevel else "mp"
msgsize = "&l" if toplevel else "len"
if is_bt:
- write(' vl_api_{t}_fromjson(item, &a->{n});\n'
- .format(t=o.fieldtype, n=o.fieldname))
+ write(
+ " vl_api_{t}_fromjson(item, &a->{n});\n".format(
+ t=o.fieldtype, n=o.fieldname
+ )
+ )
else:
- write(' if ({t}_fromjson({msgvar}, '
- '{msgsize}, item, &a->{n}) < 0) goto error;\n'
- .format(t=t, n=o.fieldname, msgvar=msgvar, msgsize=msgsize))
+ write(
+ " if ({t}_fromjson({msgvar}, "
+ "{msgsize}, item, &a->{n}) < 0) goto error;\n".format(
+ t=t, n=o.fieldname, msgvar=msgvar, msgsize=msgsize
+ )
+ )
- _dispatch['Field'] = print_field
+ _dispatch["Field"] = print_field
def print_array(self, o, toplevel=False):
- '''Convert JSON array to VPP API array'''
+ """Convert JSON array to VPP API array"""
write = self.stream.write
- forloop = '''\
+ forloop = """\
{{
int i;
cJSON *array = cJSON_GetObjectItem(o, "{n}");
@@ -395,8 +428,8 @@ class FromJSON():
{call}
}}
}}
-'''
- forloop_vla = '''\
+"""
+ forloop_vla = """\
{{
int i;
cJSON *array = cJSON_GetObjectItem(o, "{n}");
@@ -410,271 +443,296 @@ class FromJSON():
{call}
}}
}}
-'''
+"""
t = o.fieldtype
- if o.fieldtype == 'string':
+ if o.fieldtype == "string":
self.print_string(o, toplevel)
return
- lfield = 'a->' + o.lengthfield if o.lengthfield else o.length
+ lfield = "a->" + o.lengthfield if o.lengthfield else o.length
msgvar = "(void **)&a" if toplevel else "mp"
realloc = "a" if toplevel else "*mp"
msgsize = "l" if toplevel else "*len"
- if o.fieldtype == 'u8':
+ if o.fieldtype == "u8":
if o.lengthfield:
- write(' s = u8string_fromjson(o, "{}");\n'
- .format(o.fieldname))
- write(' if (!s) goto error;\n')
- write(' {} = vec_len(s);\n'.format(lfield))
-
- write(' {realloc} = cJSON_realloc({realloc}, {msgsize} + '
- 'vec_len(s), {msgsize});\n'.format(msgvar=msgvar, msgsize=msgsize, realloc=realloc))
- write(' memcpy((void *){realloc} + {msgsize}, s, '
- 'vec_len(s));\n'.format(realloc=realloc, msgsize=msgsize))
- write(' {msgsize} += vec_len(s);\n'.format(msgsize=msgsize))
-
- write(' vec_free(s);\n')
+ write(' s = u8string_fromjson(o, "{}");\n'.format(o.fieldname))
+ write(" if (!s) goto error;\n")
+ write(" {} = vec_len(s);\n".format(lfield))
+
+ write(
+ " {realloc} = cJSON_realloc({realloc}, {msgsize} + "
+ "vec_len(s), {msgsize});\n".format(
+ msgvar=msgvar, msgsize=msgsize, realloc=realloc
+ )
+ )
+ write(
+ " memcpy((void *){realloc} + {msgsize}, s, "
+ "vec_len(s));\n".format(realloc=realloc, msgsize=msgsize)
+ )
+ write(" {msgsize} += vec_len(s);\n".format(msgsize=msgsize))
+
+ write(" vec_free(s);\n")
else:
- write(' if (u8string_fromjson2(o, "{n}", a->{n}) < 0) goto error;\n'
- .format(n=o.fieldname))
+ write(
+ ' if (u8string_fromjson2(o, "{n}", a->{n}) < 0) goto error;\n'.format(
+ n=o.fieldname
+ )
+ )
return
is_bt = self.is_base_type(o.fieldtype)
if o.lengthfield:
if is_bt:
- call = ('vl_api_{t}_fromjson(e, &d[i]);'
- .format(t=o.fieldtype))
+ call = "vl_api_{t}_fromjson(e, &d[i]);".format(t=o.fieldtype)
else:
- call = ('if ({t}_fromjson({msgvar}, len, e, &d[i]) < 0) goto error; '
- .format(t=o.fieldtype, msgvar=msgvar))
- write(forloop_vla.format(lfield=lfield,
- t=o.fieldtype,
- n=o.fieldname,
- call=call,
- realloc=realloc,
- msgsize=msgsize))
+ call = "if ({t}_fromjson({msgvar}, len, e, &d[i]) < 0) goto error; ".format(
+ t=o.fieldtype, msgvar=msgvar
+ )
+ write(
+ forloop_vla.format(
+ lfield=lfield,
+ t=o.fieldtype,
+ n=o.fieldname,
+ call=call,
+ realloc=realloc,
+ msgsize=msgsize,
+ )
+ )
else:
if is_bt:
- call = ('vl_api_{t}_fromjson(e, &a->{n}[i]);'
- .format(t=t, n=o.fieldname))
+ call = "vl_api_{t}_fromjson(e, &a->{n}[i]);".format(t=t, n=o.fieldname)
else:
- call = ('if ({}_fromjson({}, len, e, &a->{}[i]) < 0) goto error;'
- .format(t, msgvar, o.fieldname))
- write(forloop.format(lfield=lfield,
- t=t,
- n=o.fieldname,
- call=call,
- msgvar=msgvar,
- realloc=realloc,
- msgsize=msgsize))
-
- _dispatch['Array'] = print_array
+ call = "if ({}_fromjson({}, len, e, &a->{}[i]) < 0) goto error;".format(
+ t, msgvar, o.fieldname
+ )
+ write(
+ forloop.format(
+ lfield=lfield,
+ t=t,
+ n=o.fieldname,
+ call=call,
+ msgvar=msgvar,
+ realloc=realloc,
+ msgsize=msgsize,
+ )
+ )
+
+ _dispatch["Array"] = print_array
def print_enum(self, o):
- '''Convert to JSON enum(string) to VPP API enum (int)'''
+ """Convert to JSON enum(string) to VPP API enum (int)"""
write = self.stream.write
- write('static inline int vl_api_{n}_t_fromjson'
- '(void **mp, int *len, cJSON *o, vl_api_{n}_t *a) {{\n'
- .format(n=o.name))
- write(' char *p = cJSON_GetStringValue(o);\n')
+ write(
+ "static inline int vl_api_{n}_t_fromjson"
+ "(void **mp, int *len, cJSON *o, vl_api_{n}_t *a) {{\n".format(n=o.name)
+ )
+ write(" char *p = cJSON_GetStringValue(o);\n")
for b in o.block:
- write(' if (strcmp(p, "{}") == 0) {{*a = {}; return 0;}}\n'
- .format(b[0], b[1]))
- write(' *a = 0;\n')
- write(' return -1;\n')
- write('}\n')
+ write(
+ ' if (strcmp(p, "{}") == 0) {{*a = {}; return 0;}}\n'.format(
+ b[0], b[1]
+ )
+ )
+ write(" *a = 0;\n")
+ write(" return -1;\n")
+ write("}\n")
- _dispatch['Enum'] = print_enum
+ _dispatch["Enum"] = print_enum
def print_enum_flag(self, o):
- '''Convert to JSON enum(string) to VPP API enum (int)'''
+ """Convert to JSON enum(string) to VPP API enum (int)"""
write = self.stream.write
- write('static inline int vl_api_{n}_t_fromjson '
- '(void **mp, int *len, cJSON *o, vl_api_{n}_t *a) {{\n'
- .format(n=o.name))
- write(' int i;\n')
- write(' *a = 0;\n')
- write(' for (i = 0; i < cJSON_GetArraySize(o); i++) {\n')
- write(' cJSON *e = cJSON_GetArrayItem(o, i);\n')
- write(' char *p = cJSON_GetStringValue(e);\n')
- write(' if (!p) return -1;\n')
+ write(
+ "static inline int vl_api_{n}_t_fromjson "
+ "(void **mp, int *len, cJSON *o, vl_api_{n}_t *a) {{\n".format(n=o.name)
+ )
+ write(" int i;\n")
+ write(" *a = 0;\n")
+ write(" for (i = 0; i < cJSON_GetArraySize(o); i++) {\n")
+ write(" cJSON *e = cJSON_GetArrayItem(o, i);\n")
+ write(" char *p = cJSON_GetStringValue(e);\n")
+ write(" if (!p) return -1;\n")
for b in o.block:
- write(' if (strcmp(p, "{}") == 0) *a |= {};\n'
- .format(b[0], b[1]))
- write(' }\n')
- write(' return 0;\n')
- write('}\n')
+ write(' if (strcmp(p, "{}") == 0) *a |= {};\n'.format(b[0], b[1]))
+ write(" }\n")
+ write(" return 0;\n")
+ write("}\n")
- _dispatch['EnumFlag'] = print_enum_flag
+ _dispatch["EnumFlag"] = print_enum_flag
def print_typedef(self, o):
- '''Convert from JSON object to VPP API binary representation'''
+ """Convert from JSON object to VPP API binary representation"""
write = self.stream.write
- write('static inline int vl_api_{name}_t_fromjson (void **mp, '
- 'int *len, cJSON *o, vl_api_{name}_t *a) {{\n'
- .format(name=o.name))
- write(' cJSON *item __attribute__ ((unused));\n')
- write(' u8 *s __attribute__ ((unused));\n')
+ write(
+ "static inline int vl_api_{name}_t_fromjson (void **mp, "
+ "int *len, cJSON *o, vl_api_{name}_t *a) {{\n".format(name=o.name)
+ )
+ write(" cJSON *item __attribute__ ((unused));\n")
+ write(" u8 *s __attribute__ ((unused));\n")
for t in o.block:
- if t.type == 'Field' and t.is_lengthfield:
+ if t.type == "Field" and t.is_lengthfield:
continue
- write('\n item = cJSON_GetObjectItem(o, "{}");\n'
- .format(t.fieldname))
- write(' if (!item) goto error;\n')
+ write('\n item = cJSON_GetObjectItem(o, "{}");\n'.format(t.fieldname))
+ write(" if (!item) goto error;\n")
self._dispatch[t.type](self, t)
- write('\n return 0;\n')
- write('\n error:\n')
- write(' return -1;\n')
- write('}\n')
+ write("\n return 0;\n")
+ write("\n error:\n")
+ write(" return -1;\n")
+ write("}\n")
def print_union(self, o):
- '''Convert JSON object to VPP API binary union'''
+ """Convert JSON object to VPP API binary union"""
write = self.stream.write
- write('static inline int vl_api_{name}_t_fromjson (void **mp, '
- 'int *len, cJSON *o, vl_api_{name}_t *a) {{\n'
- .format(name=o.name))
- write(' cJSON *item __attribute__ ((unused));\n')
- write(' u8 *s __attribute__ ((unused));\n')
+ write(
+ "static inline int vl_api_{name}_t_fromjson (void **mp, "
+ "int *len, cJSON *o, vl_api_{name}_t *a) {{\n".format(name=o.name)
+ )
+ write(" cJSON *item __attribute__ ((unused));\n")
+ write(" u8 *s __attribute__ ((unused));\n")
for t in o.block:
- if t.type == 'Field' and t.is_lengthfield:
+ if t.type == "Field" and t.is_lengthfield:
continue
- write(' item = cJSON_GetObjectItem(o, "{}");\n'
- .format(t.fieldname))
- write(' if (item) {\n')
+ write(' item = cJSON_GetObjectItem(o, "{}");\n'.format(t.fieldname))
+ write(" if (item) {\n")
self._dispatch[t.type](self, t)
- write(' };\n')
- write('\n return 0;\n')
- write('\n error:\n')
- write(' return -1;\n')
- write('}\n')
+ write(" };\n")
+ write("\n return 0;\n")
+ write("\n error:\n")
+ write(" return -1;\n")
+ write("}\n")
def print_define(self, o):
- '''Convert JSON object to VPP API message'''
+ """Convert JSON object to VPP API message"""
write = self.stream.write
error = 0
- write('static inline vl_api_{name}_t *vl_api_{name}_t_fromjson '
- '(cJSON *o, int *len) {{\n'.format(name=o.name))
- write(' cJSON *item __attribute__ ((unused));\n')
- write(' u8 *s __attribute__ ((unused));\n')
- write(' int l = sizeof(vl_api_{}_t);\n'.format(o.name))
- write(' vl_api_{}_t *a = cJSON_malloc(l);\n'.format(o.name))
- write('\n')
+ write(
+ "static inline vl_api_{name}_t *vl_api_{name}_t_fromjson "
+ "(cJSON *o, int *len) {{\n".format(name=o.name)
+ )
+ write(" cJSON *item __attribute__ ((unused));\n")
+ write(" u8 *s __attribute__ ((unused));\n")
+ write(" int l = sizeof(vl_api_{}_t);\n".format(o.name))
+ write(" vl_api_{}_t *a = cJSON_malloc(l);\n".format(o.name))
+ write("\n")
for t in o.block:
if t.fieldname in self.noprint_fields:
continue
- if t.type == 'Field' and t.is_lengthfield:
+ if t.type == "Field" and t.is_lengthfield:
continue
- write(' item = cJSON_GetObjectItem(o, "{}");\n'
- .format(t.fieldname))
- write(' if (!item) goto error;\n')
+ write(' item = cJSON_GetObjectItem(o, "{}");\n'.format(t.fieldname))
+ write(" if (!item) goto error;\n")
error += 1
self._dispatch[t.type](self, t, toplevel=True)
- write('\n')
+ write("\n")
- write(' *len = l;\n')
- write(' return a;\n')
+ write(" *len = l;\n")
+ write(" return a;\n")
if error:
- write('\n error:\n')
- write(' cJSON_free(a);\n')
- write(' return 0;\n')
- write('}\n')
+ write("\n error:\n")
+ write(" cJSON_free(a);\n")
+ write(" return 0;\n")
+ write("}\n")
def print_using(self, o):
- '''Convert JSON field to VPP type alias'''
+ """Convert JSON field to VPP type alias"""
write = self.stream.write
if o.manual_print:
return
t = o.using
- write('static inline int vl_api_{name}_t_fromjson (void **mp, '
- 'int *len, cJSON *o, vl_api_{name}_t *a) {{\n'
- .format(name=o.name))
- if 'length' in o.alias:
- if t.fieldtype != 'u8':
- raise ValueError("Error in processing type {} for {}"
- .format(t.fieldtype, o.name))
- write(' vl_api_u8_string_fromjson(o, (u8 *)a, {});\n'
- .format(o.alias['length']))
+ write(
+ "static inline int vl_api_{name}_t_fromjson (void **mp, "
+ "int *len, cJSON *o, vl_api_{name}_t *a) {{\n".format(name=o.name)
+ )
+ if "length" in o.alias:
+ if t.fieldtype != "u8":
+ raise ValueError(
+ "Error in processing type {} for {}".format(t.fieldtype, o.name)
+ )
+ write(
+ " vl_api_u8_string_fromjson(o, (u8 *)a, {});\n".format(
+ o.alias["length"]
+ )
+ )
else:
- write(' vl_api_{t}_fromjson(o, ({t} *)a);\n'
- .format(t=t.fieldtype))
+ write(" vl_api_{t}_fromjson(o, ({t} *)a);\n".format(t=t.fieldtype))
- write(' return 0;\n')
- write('}\n')
+ write(" return 0;\n")
+ write("}\n")
- _dispatch['Typedef'] = print_typedef
- _dispatch['Define'] = print_define
- _dispatch['Using'] = print_using
- _dispatch['Union'] = print_union
+ _dispatch["Typedef"] = print_typedef
+ _dispatch["Define"] = print_define
+ _dispatch["Using"] = print_using
+ _dispatch["Union"] = print_union
def generate_function(self, t):
- '''Main entry point'''
+ """Main entry point"""
write = self.stream.write
if t.manual_print:
- write('/* Manual print {} */\n'.format(t.name))
+ write("/* Manual print {} */\n".format(t.name))
return
self._dispatch[t.type](self, t)
def generate_types(self):
- '''Main entry point'''
+ """Main entry point"""
for t in self.types:
self.generate_function(t)
def generate_defines(self):
- '''Main entry point'''
+ """Main entry point"""
for t in self.defines:
self.generate_function(t)
def generate_tojson(s, modulename, stream):
- '''Generate all functions to convert from API to JSON'''
+ """Generate all functions to convert from API to JSON"""
write = stream.write
- write('/* Imported API files */\n')
- for i in s['Import']:
- f = i.filename.replace('plugins/', '')
- write('#include <{}_tojson.h>\n'.format(f))
+ write("/* Imported API files */\n")
+ for i in s["Import"]:
+ f = i.filename.replace("plugins/", "")
+ write("#include <{}_tojson.h>\n".format(f))
- pp = ToJSON(modulename, s['types'], s['Define'], s['imported']['types'],
- stream)
+ pp = ToJSON(modulename, s["types"], s["Define"], s["imported"]["types"], stream)
pp.header()
pp.generate_types()
pp.generate_defines()
pp.footer()
- return ''
+ return ""
def generate_fromjson(s, modulename, stream):
- '''Generate all functions to convert from JSON to API'''
+ """Generate all functions to convert from JSON to API"""
write = stream.write
- write('/* Imported API files */\n')
- for i in s['Import']:
- f = i.filename.replace('plugins/', '')
- write('#include <{}_fromjson.h>\n'.format(f))
+ write("/* Imported API files */\n")
+ for i in s["Import"]:
+ f = i.filename.replace("plugins/", "")
+ write("#include <{}_fromjson.h>\n".format(f))
- pp = FromJSON(modulename, s['types'], s['Define'], s['imported']['types'],
- stream)
+ pp = FromJSON(modulename, s["types"], s["Define"], s["imported"]["types"], stream)
pp.header()
pp.generate_types()
pp.generate_defines()
pp.footer()
- return ''
+ return ""
+
###############################################################################
DATESTRING = datetime.datetime.utcfromtimestamp(
- int(os.environ.get('SOURCE_DATE_EPOCH', time.time())))
-TOP_BOILERPLATE = '''\
+ int(os.environ.get("SOURCE_DATE_EPOCH", time.time()))
+)
+TOP_BOILERPLATE = """\
/*
* VLIB API definitions {datestring}
* Input file: {input_filename}
@@ -693,45 +751,47 @@ TOP_BOILERPLATE = '''\
#endif
#define VL_API_PACKED(x) x __attribute__ ((packed))
-'''
+"""
-BOTTOM_BOILERPLATE = '''\
+BOTTOM_BOILERPLATE = """\
/****** API CRC (whole file) *****/
#ifdef vl_api_version
vl_api_version({input_filename}, {file_crc:#08x})
#endif
-'''
+"""
def msg_ids(s):
- '''Generate macro to map API message id to handler'''
- output = '''\
+ """Generate macro to map API message id to handler"""
+ output = """\
/****** Message ID / handler enum ******/
#ifdef vl_msg_id
-'''
+"""
- for t in s['Define']:
- output += "vl_msg_id(VL_API_%s, vl_api_%s_t_handler)\n" % \
- (t.name.upper(), t.name)
+ for t in s["Define"]:
+ output += "vl_msg_id(VL_API_%s, vl_api_%s_t_handler)\n" % (
+ t.name.upper(),
+ t.name,
+ )
output += "#endif"
return output
def msg_names(s):
- '''Generate calls to name mapping macro'''
- output = '''\
+ """Generate calls to name mapping macro"""
+ output = """\
/****** Message names ******/
#ifdef vl_msg_name
-'''
+"""
- for t in s['Define']:
+ for t in s["Define"]:
dont_trace = 0 if t.dont_trace else 1
output += "vl_msg_name(vl_api_%s_t, %d)\n" % (t.name, dont_trace)
output += "#endif"
@@ -740,190 +800,215 @@ def msg_names(s):
def msg_name_crc_list(s, suffix):
- '''Generate list of names to CRC mappings'''
- output = '''\
+ """Generate list of names to CRC mappings"""
+ output = """\
/****** Message name, crc list ******/
#ifdef vl_msg_name_crc_list
-'''
+"""
output += "#define foreach_vl_msg_name_crc_%s " % suffix
- for t in s['Define']:
- output += "\\\n_(VL_API_%s, %s, %08x) " % \
- (t.name.upper(), t.name, t.crc)
+ for t in s["Define"]:
+ output += "\\\n_(VL_API_%s, %s, %08x) " % (t.name.upper(), t.name, t.crc)
output += "\n#endif"
return output
def api2c(fieldtype):
- '''Map between API type names and internal VPP type names'''
- mappingtable = {'string': 'vl_api_string_t', }
+ """Map between API type names and internal VPP type names"""
+ mappingtable = {
+ "string": "vl_api_string_t",
+ }
if fieldtype in mappingtable:
return mappingtable[fieldtype]
return fieldtype
def typedefs(filename):
- '''Include in the main files to the types file'''
- output = '''\
+ """Include in the main files to the types file"""
+ output = """\
/****** Typedefs ******/
#ifdef vl_typedefs
#include "{include}.api_types.h"
#endif
-'''.format(include=filename)
+""".format(
+ include=filename
+ )
return output
-FORMAT_STRINGS = {'u8': '%u',
- 'bool': '%u',
- 'i8': '%d',
- 'u16': '%u',
- 'i16': '%d',
- 'u32': '%u',
- 'i32': '%ld',
- 'u64': '%llu',
- 'i64': '%lld',
- 'f64': '%.2f'}
+FORMAT_STRINGS = {
+ "u8": "%u",
+ "bool": "%u",
+ "i8": "%d",
+ "u16": "%u",
+ "i16": "%d",
+ "u32": "%u",
+ "i32": "%ld",
+ "u64": "%llu",
+ "i64": "%lld",
+ "f64": "%.2f",
+}
+
+class Printfun:
+ """Functions for pretty printing VPP API messages"""
-class Printfun():
- '''Functions for pretty printing VPP API messages'''
_dispatch = {}
- noprint_fields = {'_vl_msg_id': None,
- 'client_index': None,
- 'context': None}
+ noprint_fields = {"_vl_msg_id": None, "client_index": None, "context": None}
def __init__(self, stream):
self.stream = stream
@staticmethod
def print_string(o, stream):
- '''Pretty print a vl_api_string_t'''
+ """Pretty print a vl_api_string_t"""
write = stream.write
if o.modern_vla:
- write(' if (vl_api_string_len(&a->{f}) > 0) {{\n'
- .format(f=o.fieldname))
- write(' s = format(s, "\\n%U{f}: %U", '
- 'format_white_space, indent, '
- 'vl_api_format_string, (&a->{f}));\n'.format(f=o.fieldname))
- write(' } else {\n')
- write(' s = format(s, "\\n%U{f}:", '
- 'format_white_space, indent);\n'.format(f=o.fieldname))
- write(' }\n')
+ write(" if (vl_api_string_len(&a->{f}) > 0) {{\n".format(f=o.fieldname))
+ write(
+ ' s = format(s, "\\n%U{f}: %U", '
+ "format_white_space, indent, "
+ "vl_api_format_string, (&a->{f}));\n".format(f=o.fieldname)
+ )
+ write(" } else {\n")
+ write(
+ ' s = format(s, "\\n%U{f}:", '
+ "format_white_space, indent);\n".format(f=o.fieldname)
+ )
+ write(" }\n")
else:
- write(' s = format(s, "\\n%U{f}: %s", '
- 'format_white_space, indent, a->{f});\n'
- .format(f=o.fieldname))
+ write(
+ ' s = format(s, "\\n%U{f}: %s", '
+ "format_white_space, indent, a->{f});\n".format(f=o.fieldname)
+ )
def print_field(self, o, stream):
- '''Pretty print API field'''
+ """Pretty print API field"""
write = stream.write
if o.fieldname in self.noprint_fields:
return
if o.fieldtype in FORMAT_STRINGS:
f = FORMAT_STRINGS[o.fieldtype]
- write(' s = format(s, "\\n%U{n}: {f}", '
- 'format_white_space, indent, a->{n});\n'
- .format(n=o.fieldname, f=f))
+ write(
+ ' s = format(s, "\\n%U{n}: {f}", '
+ "format_white_space, indent, a->{n});\n".format(n=o.fieldname, f=f)
+ )
else:
- write(' s = format(s, "\\n%U{n}: %U", '
- 'format_white_space, indent, '
- 'format_{t}, &a->{n}, indent);\n'
- .format(n=o.fieldname, t=o.fieldtype))
+ write(
+ ' s = format(s, "\\n%U{n}: %U", '
+ "format_white_space, indent, "
+ "format_{t}, &a->{n}, indent);\n".format(n=o.fieldname, t=o.fieldtype)
+ )
- _dispatch['Field'] = print_field
+ _dispatch["Field"] = print_field
def print_array(self, o, stream):
- '''Pretty print API array'''
+ """Pretty print API array"""
write = stream.write
- forloop = '''\
+ forloop = """\
for (i = 0; i < {lfield}; i++) {{
s = format(s, "\\n%U{n}: %U",
format_white_space, indent, format_{t}, &a->{n}[i], indent);
}}
-'''
+"""
- forloop_format = '''\
+ forloop_format = """\
for (i = 0; i < {lfield}; i++) {{
s = format(s, "\\n%U{n}: {t}",
format_white_space, indent, a->{n}[i]);
}}
-'''
+"""
- if o.fieldtype == 'string':
+ if o.fieldtype == "string":
self.print_string(o, stream)
return
- if o.fieldtype == 'u8':
+ if o.fieldtype == "u8":
if o.lengthfield:
- write(' s = format(s, "\\n%U{n}: %U", format_white_space, '
- 'indent, format_hex_bytes, a->{n}, a->{lfield});\n'
- .format(n=o.fieldname, lfield=o.lengthfield))
+ write(
+ ' s = format(s, "\\n%U{n}: %U", format_white_space, '
+ "indent, format_hex_bytes, a->{n}, a->{lfield});\n".format(
+ n=o.fieldname, lfield=o.lengthfield
+ )
+ )
else:
- write(' s = format(s, "\\n%U{n}: %U", format_white_space, '
- 'indent, format_hex_bytes, a, {lfield});\n'
- .format(n=o.fieldname, lfield=o.length))
+ write(
+ ' s = format(s, "\\n%U{n}: %U", format_white_space, '
+ "indent, format_hex_bytes, a, {lfield});\n".format(
+ n=o.fieldname, lfield=o.length
+ )
+ )
return
- lfield = 'a->' + o.lengthfield if o.lengthfield else o.length
+ lfield = "a->" + o.lengthfield if o.lengthfield else o.length
if o.fieldtype in FORMAT_STRINGS:
- write(forloop_format.format(lfield=lfield,
- t=FORMAT_STRINGS[o.fieldtype],
- n=o.fieldname))
+ write(
+ forloop_format.format(
+ lfield=lfield, t=FORMAT_STRINGS[o.fieldtype], n=o.fieldname
+ )
+ )
else:
write(forloop.format(lfield=lfield, t=o.fieldtype, n=o.fieldname))
- _dispatch['Array'] = print_array
+ _dispatch["Array"] = print_array
@staticmethod
def print_alias(k, v, stream):
- '''Pretty print type alias'''
+ """Pretty print type alias"""
write = stream.write
- if ('length' in v.alias and v.alias['length'] and
- v.alias['type'] == 'u8'):
- write(' return format(s, "%U", format_hex_bytes, a, {});\n'
- .format(v.alias['length']))
- elif v.alias['type'] in FORMAT_STRINGS:
- write(' return format(s, "{}", *a);\n'
- .format(FORMAT_STRINGS[v.alias['type']]))
+ if "length" in v.alias and v.alias["length"] and v.alias["type"] == "u8":
+ write(
+ ' return format(s, "%U", format_hex_bytes, a, {});\n'.format(
+ v.alias["length"]
+ )
+ )
+ elif v.alias["type"] in FORMAT_STRINGS:
+ write(
+ ' return format(s, "{}", *a);\n'.format(
+ FORMAT_STRINGS[v.alias["type"]]
+ )
+ )
else:
- write(' return format(s, "{} (print not implemented)");\n'
- .format(k))
+ write(' return format(s, "{} (print not implemented)");\n'.format(k))
@staticmethod
def print_enum(o, stream):
- '''Pretty print API enum'''
+ """Pretty print API enum"""
write = stream.write
write(" switch(*a) {\n")
for b in o:
write(" case %s:\n" % b[1])
write(' return format(s, "{}");\n'.format(b[0]))
- write(' }\n')
+ write(" }\n")
- _dispatch['Enum'] = print_enum
- _dispatch['EnumFlag'] = print_enum
+ _dispatch["Enum"] = print_enum
+ _dispatch["EnumFlag"] = print_enum
def print_obj(self, o, stream):
- '''Entry point'''
+ """Entry point"""
write = stream.write
if o.type in self._dispatch:
self._dispatch[o.type](self, o, stream)
else:
- write(' s = format(s, "\\n{} {} {} (print not implemented");\n'
- .format(o.type, o.fieldtype, o.fieldname))
+ write(
+ ' s = format(s, "\\n{} {} {} (print not implemented");\n'.format(
+ o.type, o.fieldtype, o.fieldname
+ )
+ )
def printfun(objs, stream, modulename):
- '''Main entry point for pretty print function generation'''
+ """Main entry point for pretty print function generation"""
write = stream.write
- h = '''\
+ h = """\
/****** Print functions *****/
#ifdef vl_printfun
#ifndef included_{module}_printfun
@@ -940,15 +1025,15 @@ def printfun(objs, stream, modulename):
#include "{module}.api_tojson.h"
#include "{module}.api_fromjson.h"
-'''
+"""
- signature = '''\
+ signature = """\
static inline void *vl_api_{name}_t_print{suffix} (vl_api_{name}_t *a, void *handle)
{{
u8 *s = 0;
u32 indent __attribute__((unused)) = 2;
int i __attribute__((unused));
-'''
+"""
h = h.format(module=modulename)
write(h)
@@ -958,171 +1043,172 @@ static inline void *vl_api_{name}_t_print{suffix} (vl_api_{name}_t *a, void *han
if t.manual_print:
write("/***** manual: vl_api_%s_t_print *****/\n\n" % t.name)
continue
- write(signature.format(name=t.name, suffix=''))
- write(' /* Message definition: vl_api_{}_t: */\n'.format(t.name))
- write(" s = format(s, \"vl_api_%s_t:\");\n" % t.name)
+ write(signature.format(name=t.name, suffix=""))
+ write(" /* Message definition: vl_api_{}_t: */\n".format(t.name))
+ write(' s = format(s, "vl_api_%s_t:");\n' % t.name)
for o in t.block:
pp.print_obj(o, stream)
- write(' vec_add1(s, 0);\n')
- write(' vl_print (handle, (char *)s);\n')
- write(' vec_free (s);\n')
- write(' return handle;\n')
- write('}\n\n')
-
- write(signature.format(name=t.name, suffix='_json'))
- write(' cJSON * o = vl_api_{}_t_tojson(a);\n'.format(t.name))
- write(' (void)s;\n')
- write(' char *out = cJSON_Print(o);\n')
- write(' vl_print(handle, out);\n')
- write(' cJSON_Delete(o);\n')
- write(' cJSON_free(out);\n')
- write(' return handle;\n')
- write('}\n\n')
+ write(" vec_add1(s, 0);\n")
+ write(" vl_print (handle, (char *)s);\n")
+ write(" vec_free (s);\n")
+ write(" return handle;\n")
+ write("}\n\n")
+
+ write(signature.format(name=t.name, suffix="_json"))
+ write(" cJSON * o = vl_api_{}_t_tojson(a);\n".format(t.name))
+ write(" (void)s;\n")
+ write(" char *out = cJSON_Print(o);\n")
+ write(" vl_print(handle, out);\n")
+ write(" cJSON_Delete(o);\n")
+ write(" cJSON_free(out);\n")
+ write(" return handle;\n")
+ write("}\n\n")
write("\n#endif")
write("\n#endif /* vl_printfun */\n")
- return ''
+ return ""
def printfun_types(objs, stream, modulename):
- '''Pretty print API types'''
+ """Pretty print API types"""
write = stream.write
pp = Printfun(stream)
- h = '''\
+ h = """\
/****** Print functions *****/
#ifdef vl_printfun
#ifndef included_{module}_printfun_types
#define included_{module}_printfun_types
-'''
+"""
h = h.format(module=modulename)
write(h)
- signature = '''\
+ signature = """\
static inline u8 *format_vl_api_{name}_t (u8 *s, va_list * args)
{{
vl_api_{name}_t *a = va_arg (*args, vl_api_{name}_t *);
u32 indent __attribute__((unused)) = va_arg (*args, u32);
int i __attribute__((unused));
indent += 2;
-'''
+"""
for t in objs:
- if t.__class__.__name__ == 'Enum' or t.__class__.__name__ == 'EnumFlag':
+ if t.__class__.__name__ == "Enum" or t.__class__.__name__ == "EnumFlag":
write(signature.format(name=t.name))
pp.print_enum(t.block, stream)
- write(' return s;\n')
- write('}\n\n')
+ write(" return s;\n")
+ write("}\n\n")
continue
if t.manual_print:
write("/***** manual: vl_api_%s_t_print *****/\n\n" % t.name)
continue
- if t.__class__.__name__ == 'Using':
+ if t.__class__.__name__ == "Using":
write(signature.format(name=t.name))
pp.print_alias(t.name, t, stream)
- write('}\n\n')
+ write("}\n\n")
continue
write(signature.format(name=t.name))
for o in t.block:
pp.print_obj(o, stream)
- write(' return s;\n')
- write('}\n\n')
+ write(" return s;\n")
+ write("}\n\n")
write("\n#endif")
write("\n#endif /* vl_printfun_types */\n")
def generate_imports(imports):
- '''Add #include matching the API import statements'''
- output = '/* Imported API files */\n'
- output += '#ifndef vl_api_version\n'
+ """Add #include matching the API import statements"""
+ output = "/* Imported API files */\n"
+ output += "#ifndef vl_api_version\n"
for i in imports:
- s = i.filename.replace('plugins/', '')
- output += '#include <{}.h>\n'.format(s)
- output += '#endif\n'
+ s = i.filename.replace("plugins/", "")
+ output += "#include <{}.h>\n".format(s)
+ output += "#endif\n"
return output
ENDIAN_STRINGS = {
- 'u16': 'clib_net_to_host_u16',
- 'u32': 'clib_net_to_host_u32',
- 'u64': 'clib_net_to_host_u64',
- 'i16': 'clib_net_to_host_i16',
- 'i32': 'clib_net_to_host_i32',
- 'i64': 'clib_net_to_host_i64',
- 'f64': 'clib_net_to_host_f64',
+ "u16": "clib_net_to_host_u16",
+ "u32": "clib_net_to_host_u32",
+ "u64": "clib_net_to_host_u64",
+ "i16": "clib_net_to_host_i16",
+ "i32": "clib_net_to_host_i32",
+ "i64": "clib_net_to_host_i64",
+ "f64": "clib_net_to_host_f64",
}
def endianfun_array(o):
- '''Generate endian functions for arrays'''
- forloop = '''\
+ """Generate endian functions for arrays"""
+ forloop = """\
for (i = 0; i < {length}; i++) {{
a->{name}[i] = {format}(a->{name}[i]);
}}
-'''
+"""
- forloop_format = '''\
+ forloop_format = """\
for (i = 0; i < {length}; i++) {{
{type}_endian(&a->{name}[i]);
}}
-'''
+"""
- output = ''
- if o.fieldtype == 'u8' or o.fieldtype == 'string' or o.fieldtype == 'bool':
- output += ' /* a->{n} = a->{n} (no-op) */\n'.format(n=o.fieldname)
+ output = ""
+ if o.fieldtype == "u8" or o.fieldtype == "string" or o.fieldtype == "bool":
+ output += " /* a->{n} = a->{n} (no-op) */\n".format(n=o.fieldname)
else:
- lfield = 'a->' + o.lengthfield if o.lengthfield else o.length
+ lfield = "a->" + o.lengthfield if o.lengthfield else o.length
if o.fieldtype in ENDIAN_STRINGS:
- output += (forloop
- .format(length=lfield,
- format=ENDIAN_STRINGS[o.fieldtype],
- name=o.fieldname))
+ output += forloop.format(
+ length=lfield, format=ENDIAN_STRINGS[o.fieldtype], name=o.fieldname
+ )
else:
- output += (forloop_format
- .format(length=lfield, type=o.fieldtype,
- name=o.fieldname))
+ output += forloop_format.format(
+ length=lfield, type=o.fieldtype, name=o.fieldname
+ )
return output
-NO_ENDIAN_CONVERSION = {'client_index': None}
+NO_ENDIAN_CONVERSION = {"client_index": None}
def endianfun_obj(o):
- '''Generate endian conversion function for type'''
- output = ''
- if o.type == 'Array':
+ """Generate endian conversion function for type"""
+ output = ""
+ if o.type == "Array":
return endianfun_array(o)
- if o.type != 'Field':
- output += (' s = format(s, "\\n{} {} {} (print not implemented");\n'
- .format(o.type, o.fieldtype, o.fieldname))
+ if o.type != "Field":
+ output += ' s = format(s, "\\n{} {} {} (print not implemented");\n'.format(
+ o.type, o.fieldtype, o.fieldname
+ )
return output
if o.fieldname in NO_ENDIAN_CONVERSION:
- output += ' /* a->{n} = a->{n} (no-op) */\n'.format(n=o.fieldname)
+ output += " /* a->{n} = a->{n} (no-op) */\n".format(n=o.fieldname)
return output
if o.fieldtype in ENDIAN_STRINGS:
- output += (' a->{name} = {format}(a->{name});\n'
- .format(name=o.fieldname,
- format=ENDIAN_STRINGS[o.fieldtype]))
- elif o.fieldtype.startswith('vl_api_'):
- output += (' {type}_endian(&a->{name});\n'
- .format(type=o.fieldtype, name=o.fieldname))
+ output += " a->{name} = {format}(a->{name});\n".format(
+ name=o.fieldname, format=ENDIAN_STRINGS[o.fieldtype]
+ )
+ elif o.fieldtype.startswith("vl_api_"):
+ output += " {type}_endian(&a->{name});\n".format(
+ type=o.fieldtype, name=o.fieldname
+ )
else:
- output += ' /* a->{n} = a->{n} (no-op) */\n'.format(n=o.fieldname)
+ output += " /* a->{n} = a->{n} (no-op) */\n".format(n=o.fieldname)
return output
def endianfun(objs, modulename):
- '''Main entry point for endian function generation'''
- output = '''\
+ """Main entry point for endian function generation"""
+ output = """\
/****** Endian swap functions *****/\n\
#ifdef vl_endianfun
@@ -1136,51 +1222,50 @@ def endianfun(objs, modulename):
#define clib_net_to_host_uword clib_net_to_host_u32
#endif
-'''
+"""
output = output.format(module=modulename)
- signature = '''\
+ signature = """\
static inline void vl_api_{name}_t_endian (vl_api_{name}_t *a)
{{
int i __attribute__((unused));
-'''
+"""
for t in objs:
- if t.__class__.__name__ == 'Enum' or t.__class__.__name__ == 'EnumFlag':
+ if t.__class__.__name__ == "Enum" or t.__class__.__name__ == "EnumFlag":
output += signature.format(name=t.name)
if t.enumtype in ENDIAN_STRINGS:
- output += (' *a = {}(*a);\n'
- .format(ENDIAN_STRINGS[t.enumtype]))
+ output += " *a = {}(*a);\n".format(ENDIAN_STRINGS[t.enumtype])
else:
- output += (' /* a->{name} = a->{name} (no-op) */\n'
- .format(name=t.name))
+ output += " /* a->{name} = a->{name} (no-op) */\n".format(
+ name=t.name
+ )
- output += '}\n\n'
+ output += "}\n\n"
continue
if t.manual_endian:
output += "/***** manual: vl_api_%s_t_endian *****/\n\n" % t.name
continue
- if t.__class__.__name__ == 'Using':
+ if t.__class__.__name__ == "Using":
output += signature.format(name=t.name)
- if ('length' in t.alias and t.alias['length'] and
- t.alias['type'] == 'u8'):
- output += (' /* a->{name} = a->{name} (no-op) */\n'
- .format(name=t.name))
- elif t.alias['type'] in FORMAT_STRINGS:
- output += (' *a = {}(*a);\n'
- .format(ENDIAN_STRINGS[t.alias['type']]))
+ if "length" in t.alias and t.alias["length"] and t.alias["type"] == "u8":
+ output += " /* a->{name} = a->{name} (no-op) */\n".format(
+ name=t.name
+ )
+ elif t.alias["type"] in FORMAT_STRINGS:
+ output += " *a = {}(*a);\n".format(ENDIAN_STRINGS[t.alias["type"]])
else:
- output += ' /* Not Implemented yet {} */'.format(t.name)
- output += '}\n\n'
+ output += " /* Not Implemented yet {} */".format(t.name)
+ output += "}\n\n"
continue
output += signature.format(name=t.name)
for o in t.block:
output += endianfun_obj(o)
- output += '}\n\n'
+ output += "}\n\n"
output += "\n#endif"
output += "\n#endif /* vl_endianfun */\n\n"
@@ -1189,32 +1274,32 @@ static inline void vl_api_{name}_t_endian (vl_api_{name}_t *a)
def calc_size_fun(objs, modulename):
- '''Main entry point for calculate size function generation'''
- output = '''\
+ """Main entry point for calculate size function generation"""
+ output = """\
/****** Calculate size functions *****/\n\
#ifdef vl_calcsizefun
#ifndef included_{module}_calcsizefun
#define included_{module}_calcsizefun
-'''
+"""
output = output.format(module=modulename)
- signature = '''\
+ signature = """\
/* calculate message size of message in network byte order */
static inline uword vl_api_{name}_t_calc_size (vl_api_{name}_t *a)
{{
-'''
+"""
for o in objs:
tname = o.__class__.__name__
output += signature.format(name=o.name)
output += f" return sizeof(*a)"
- if tname == 'Using':
- if 'length' in o.alias:
+ if tname == "Using":
+ if "length" in o.alias:
try:
- tmp = int(o.alias['length'])
+ tmp = int(o.alias["length"])
if tmp == 0:
raise (f"Unexpected length '0' for alias {o}")
except:
@@ -1224,36 +1309,44 @@ static inline uword vl_api_{name}_t_calc_size (vl_api_{name}_t *a)
print(dir(o.alias))
print(o.alias)
raise
- elif tname == 'Enum' or tname == 'EnumFlag':
+ elif tname == "Enum" or tname == "EnumFlag":
pass
else:
for b in o.block:
- if b.type == 'Option':
+ if b.type == "Option":
continue
- elif b.type == 'Field':
- if b.fieldtype.startswith('vl_api_'):
+ elif b.type == "Field":
+ if b.fieldtype.startswith("vl_api_"):
output += f" - sizeof(a->{b.fieldname})"
output += f" + {b.fieldtype}_calc_size(&a->{b.fieldname})"
- elif b.type == 'Array':
+ elif b.type == "Array":
if b.lengthfield:
- m = list(filter(lambda x: x.fieldname == b.lengthfield, o.block))
+ m = list(
+ filter(lambda x: x.fieldname == b.lengthfield, o.block)
+ )
if len(m) != 1:
- raise Exception(f"Expected 1 match for field '{b.lengthfield}', got '{m}'")
+ raise Exception(
+ f"Expected 1 match for field '{b.lengthfield}', got '{m}'"
+ )
lf = m[0]
if lf.fieldtype in ENDIAN_STRINGS:
output += f" + {ENDIAN_STRINGS[lf.fieldtype]}(a->{b.lengthfield}) * sizeof(a->{b.fieldname}[0])"
elif lf.fieldtype == "u8":
- output += f" + a->{b.lengthfield} * sizeof(a->{b.fieldname}[0])"
+ output += (
+ f" + a->{b.lengthfield} * sizeof(a->{b.fieldname}[0])"
+ )
else:
- raise Exception(f"Don't know how to endian swap {lf.fieldtype}")
+ raise Exception(
+ f"Don't know how to endian swap {lf.fieldtype}"
+ )
else:
# Fixed length strings decay to nul terminated u8
- if b.fieldtype == 'string':
+ if b.fieldtype == "string":
if b.modern_vla:
output += f" + vl_api_string_len(&a->{b.fieldname})"
output += ";\n"
- output += '}\n\n'
+ output += "}\n\n"
output += "\n#endif"
output += "\n#endif /* vl_calcsizefun */\n\n"
@@ -1261,18 +1354,22 @@ static inline uword vl_api_{name}_t_calc_size (vl_api_{name}_t *a)
def version_tuple(s, module):
- '''Generate semantic version string'''
- output = '''\
+ """Generate semantic version string"""
+ output = """\
/****** Version tuple *****/
#ifdef vl_api_version_tuple
-'''
- if 'version' in s['Option']:
- v = s['Option']['version']
- (major, minor, patch) = v.split('.')
- output += "vl_api_version_tuple(%s, %s, %s, %s)\n" % \
- (module, major, minor, patch)
+"""
+ if "version" in s["Option"]:
+ v = s["Option"]["version"]
+ (major, minor, patch) = v.split(".")
+ output += "vl_api_version_tuple(%s, %s, %s, %s)\n" % (
+ module,
+ major,
+ minor,
+ patch,
+ )
output += "\n#endif /* vl_api_version_tuple */\n\n"
@@ -1280,131 +1377,145 @@ def version_tuple(s, module):
def generate_include_enum(s, module, stream):
- '''Generate <name>.api_enum.h'''
+ """Generate <name>.api_enum.h"""
write = stream.write
- if 'Define' in s:
- write('typedef enum {\n')
- for t in s['Define']:
- write(' VL_API_{},\n'.format(t.name.upper()))
- write(' VL_MSG_{}_LAST\n'.format(module.upper()))
- write('}} vl_api_{}_enum_t;\n'.format(module))
+ if "Define" in s:
+ write("typedef enum {\n")
+ for t in s["Define"]:
+ write(" VL_API_{},\n".format(t.name.upper()))
+ write(" VL_MSG_{}_LAST\n".format(module.upper()))
+ write("}} vl_api_{}_enum_t;\n".format(module))
def generate_include_counters(s, stream):
- '''Include file for the counter data model types.'''
+ """Include file for the counter data model types."""
write = stream.write
for counters in s:
csetname = counters.name
- write('typedef enum {\n')
+ write("typedef enum {\n")
for c in counters.block:
- write(' {}_ERROR_{},\n'
- .format(csetname.upper(), c['name'].upper()))
- write(' {}_N_ERROR\n'.format(csetname.upper()))
- write('}} vl_counter_{}_enum_t;\n'.format(csetname))
+ write(" {}_ERROR_{},\n".format(csetname.upper(), c["name"].upper()))
+ write(" {}_N_ERROR\n".format(csetname.upper()))
+ write("}} vl_counter_{}_enum_t;\n".format(csetname))
- write('extern vlib_error_desc_t {}_error_counters[];\n'.format(csetname))
+ write("extern vlib_error_desc_t {}_error_counters[];\n".format(csetname))
def generate_include_types(s, module, stream):
- '''Generate separate API _types file.'''
+ """Generate separate API _types file."""
write = stream.write
- write('#ifndef included_{module}_api_types_h\n'.format(module=module))
- write('#define included_{module}_api_types_h\n'.format(module=module))
-
- if 'version' in s['Option']:
- v = s['Option']['version']
- (major, minor, patch) = v.split('.')
- write('#define VL_API_{m}_API_VERSION_MAJOR {v}\n'
- .format(m=module.upper(), v=major))
- write('#define VL_API_{m}_API_VERSION_MINOR {v}\n'
- .format(m=module.upper(), v=minor))
- write('#define VL_API_{m}_API_VERSION_PATCH {v}\n'
- .format(m=module.upper(), v=patch))
-
- if 'Import' in s:
- write('/* Imported API files */\n')
- for i in s['Import']:
- filename = i.filename.replace('plugins/', '')
- write('#include <{}_types.h>\n'.format(filename))
-
- for o in itertools.chain(s['types'], s['Define']):
+ write("#ifndef included_{module}_api_types_h\n".format(module=module))
+ write("#define included_{module}_api_types_h\n".format(module=module))
+
+ if "version" in s["Option"]:
+ v = s["Option"]["version"]
+ (major, minor, patch) = v.split(".")
+ write(
+ "#define VL_API_{m}_API_VERSION_MAJOR {v}\n".format(
+ m=module.upper(), v=major
+ )
+ )
+ write(
+ "#define VL_API_{m}_API_VERSION_MINOR {v}\n".format(
+ m=module.upper(), v=minor
+ )
+ )
+ write(
+ "#define VL_API_{m}_API_VERSION_PATCH {v}\n".format(
+ m=module.upper(), v=patch
+ )
+ )
+
+ if "Import" in s:
+ write("/* Imported API files */\n")
+ for i in s["Import"]:
+ filename = i.filename.replace("plugins/", "")
+ write("#include <{}_types.h>\n".format(filename))
+
+ for o in itertools.chain(s["types"], s["Define"]):
tname = o.__class__.__name__
- if tname == 'Using':
- if 'length' in o.alias:
- write('typedef %s vl_api_%s_t[%s];\n' %
- (o.alias['type'], o.name, o.alias['length']))
+ if tname == "Using":
+ if "length" in o.alias:
+ write(
+ "typedef %s vl_api_%s_t[%s];\n"
+ % (o.alias["type"], o.name, o.alias["length"])
+ )
else:
- write('typedef %s vl_api_%s_t;\n' % (o.alias['type'], o.name))
- elif tname == 'Enum' or tname == 'EnumFlag':
- if o.enumtype == 'u32':
+ write("typedef %s vl_api_%s_t;\n" % (o.alias["type"], o.name))
+ elif tname == "Enum" or tname == "EnumFlag":
+ if o.enumtype == "u32":
write("typedef enum {\n")
else:
write("typedef enum __attribute__((packed)) {\n")
for b in o.block:
write(" %s = %s,\n" % (b[0], b[1]))
- write('} vl_api_%s_t;\n' % o.name)
- if o.enumtype != 'u32':
- size1 = 'sizeof(vl_api_%s_t)' % o.name
- size2 = 'sizeof(%s)' % o.enumtype
- err_str = 'size of API enum %s is wrong' % o.name
- write('STATIC_ASSERT(%s == %s, "%s");\n'
- % (size1, size2, err_str))
+ write("} vl_api_%s_t;\n" % o.name)
+ if o.enumtype != "u32":
+ size1 = "sizeof(vl_api_%s_t)" % o.name
+ size2 = "sizeof(%s)" % o.enumtype
+ err_str = "size of API enum %s is wrong" % o.name
+ write('STATIC_ASSERT(%s == %s, "%s");\n' % (size1, size2, err_str))
else:
- if tname == 'Union':
- write("typedef union __attribute__ ((packed)) _vl_api_%s {\n"
- % o.name)
+ if tname == "Union":
+ write("typedef union __attribute__ ((packed)) _vl_api_%s {\n" % o.name)
else:
- write(("typedef struct __attribute__ ((packed)) _vl_api_%s {\n")
- % o.name)
+ write(
+ ("typedef struct __attribute__ ((packed)) _vl_api_%s {\n") % o.name
+ )
for b in o.block:
- if b.type == 'Option':
+ if b.type == "Option":
continue
- if b.type == 'Field':
- write(" %s %s;\n" % (api2c(b.fieldtype),
- b.fieldname))
- elif b.type == 'Array':
+ if b.type == "Field":
+ write(" %s %s;\n" % (api2c(b.fieldtype), b.fieldname))
+ elif b.type == "Array":
if b.lengthfield:
- write(" %s %s[0];\n" % (api2c(b.fieldtype),
- b.fieldname))
+ write(" %s %s[0];\n" % (api2c(b.fieldtype), b.fieldname))
else:
# Fixed length strings decay to nul terminated u8
- if b.fieldtype == 'string':
+ if b.fieldtype == "string":
if b.modern_vla:
- write(' {} {};\n'
- .format(api2c(b.fieldtype),
- b.fieldname))
+ write(
+ " {} {};\n".format(
+ api2c(b.fieldtype), b.fieldname
+ )
+ )
else:
- write(' u8 {}[{}];\n'
- .format(b.fieldname, b.length))
+ write(" u8 {}[{}];\n".format(b.fieldname, b.length))
else:
- write(" %s %s[%s];\n" %
- (api2c(b.fieldtype), b.fieldname,
- b.length))
+ write(
+ " %s %s[%s];\n"
+ % (api2c(b.fieldtype), b.fieldname, b.length)
+ )
else:
- raise ValueError("Error in processing type {} for {}"
- .format(b, o.name))
+ raise ValueError(
+ "Error in processing type {} for {}".format(b, o.name)
+ )
- write('} vl_api_%s_t;\n' % o.name)
- write(f'#define VL_API_{o.name.upper()}_IS_CONSTANT_SIZE ({0 if o.vla else 1})\n\n')
+ write("} vl_api_%s_t;\n" % o.name)
+ write(
+ f"#define VL_API_{o.name.upper()}_IS_CONSTANT_SIZE ({0 if o.vla else 1})\n\n"
+ )
- for t in s['Define']:
- write('#define VL_API_{ID}_CRC "{n}_{crc:08x}"\n'
- .format(n=t.name, ID=t.name.upper(), crc=t.crc))
+ for t in s["Define"]:
+ write(
+ '#define VL_API_{ID}_CRC "{n}_{crc:08x}"\n'.format(
+ n=t.name, ID=t.name.upper(), crc=t.crc
+ )
+ )
write("\n#endif\n")
-def generate_c_boilerplate(services, defines, counters, file_crc,
- module, stream):
- '''VPP side plugin.'''
+def generate_c_boilerplate(services, defines, counters, file_crc, module, stream):
+ """VPP side plugin."""
write = stream.write
define_hash = {d.name: d for d in defines}
- hdr = '''\
+ hdr = """\
#define vl_endianfun /* define message structures */
#include "{module}.api.h"
#undef vl_endianfun
@@ -1419,89 +1530,98 @@ def generate_c_boilerplate(services, defines, counters, file_crc,
#include "{module}.api.h"
#undef vl_printfun
-'''
+"""
write(hdr.format(module=module))
- write('static u16\n')
- write('setup_message_id_table (void) {\n')
- write(' api_main_t *am = my_api_main;\n')
- write(' vl_msg_api_msg_config_t c;\n')
- write(' u16 msg_id_base = vl_msg_api_get_msg_ids ("{}_{crc:08x}", '
- 'VL_MSG_{m}_LAST);\n'
- .format(module, crc=file_crc, m=module.upper()))
+ write("static u16\n")
+ write("setup_message_id_table (void) {\n")
+ write(" api_main_t *am = my_api_main;\n")
+ write(" vl_msg_api_msg_config_t c;\n")
+ write(
+ ' u16 msg_id_base = vl_msg_api_get_msg_ids ("{}_{crc:08x}", '
+ "VL_MSG_{m}_LAST);\n".format(module, crc=file_crc, m=module.upper())
+ )
for d in defines:
- write(' vl_msg_api_add_msg_name_crc (am, "{n}_{crc:08x}",\n'
- ' VL_API_{ID} + msg_id_base);\n'
- .format(n=d.name, ID=d.name.upper(), crc=d.crc))
+ write(
+ ' vl_msg_api_add_msg_name_crc (am, "{n}_{crc:08x}",\n'
+ " VL_API_{ID} + msg_id_base);\n".format(
+ n=d.name, ID=d.name.upper(), crc=d.crc
+ )
+ )
for s in services:
d = define_hash[s.caller]
- write(' c = (vl_msg_api_msg_config_t) '
- ' {{.id = VL_API_{ID} + msg_id_base,\n'
- ' .name = "{n}",\n'
- ' .handler = vl_api_{n}_t_handler,\n'
- ' .cleanup = vl_noop_handler,\n'
- ' .endian = vl_api_{n}_t_endian,\n'
- ' .print = vl_api_{n}_t_print,\n'
- ' .traced = 1,\n'
- ' .replay = 1,\n'
- ' .print_json = vl_api_{n}_t_print_json,\n'
- ' .tojson = vl_api_{n}_t_tojson,\n'
- ' .fromjson = vl_api_{n}_t_fromjson,\n'
- ' .calc_size = vl_api_{n}_t_calc_size,\n'
- ' .is_autoendian = {auto}}};\n'
- .format(n=s.caller, ID=s.caller.upper(),
- auto=d.autoendian))
- write(' vl_msg_api_config (&c);\n')
+ write(
+ " c = (vl_msg_api_msg_config_t) "
+ " {{.id = VL_API_{ID} + msg_id_base,\n"
+ ' .name = "{n}",\n'
+ " .handler = vl_api_{n}_t_handler,\n"
+ " .cleanup = vl_noop_handler,\n"
+ " .endian = vl_api_{n}_t_endian,\n"
+ " .print = vl_api_{n}_t_print,\n"
+ " .traced = 1,\n"
+ " .replay = 1,\n"
+ " .print_json = vl_api_{n}_t_print_json,\n"
+ " .tojson = vl_api_{n}_t_tojson,\n"
+ " .fromjson = vl_api_{n}_t_fromjson,\n"
+ " .calc_size = vl_api_{n}_t_calc_size,\n"
+ " .is_autoendian = {auto}}};\n".format(
+ n=s.caller, ID=s.caller.upper(), auto=d.autoendian
+ )
+ )
+ write(" vl_msg_api_config (&c);\n")
try:
d = define_hash[s.reply]
- write(' c = (vl_msg_api_msg_config_t) '
- '{{.id = VL_API_{ID} + msg_id_base,\n'
- ' .name = "{n}",\n'
- ' .handler = 0,\n'
- ' .cleanup = vl_noop_handler,\n'
- ' .endian = vl_api_{n}_t_endian,\n'
- ' .print = vl_api_{n}_t_print,\n'
- ' .traced = 1,\n'
- ' .replay = 1,\n'
- ' .print_json = vl_api_{n}_t_print_json,\n'
- ' .tojson = vl_api_{n}_t_tojson,\n'
- ' .fromjson = vl_api_{n}_t_fromjson,\n'
- ' .calc_size = vl_api_{n}_t_calc_size,\n'
- ' .is_autoendian = {auto}}};\n'
- .format(n=s.reply, ID=s.reply.upper(),
- auto=d.autoendian))
- write(' vl_msg_api_config (&c);\n')
+ write(
+ " c = (vl_msg_api_msg_config_t) "
+ "{{.id = VL_API_{ID} + msg_id_base,\n"
+ ' .name = "{n}",\n'
+ " .handler = 0,\n"
+ " .cleanup = vl_noop_handler,\n"
+ " .endian = vl_api_{n}_t_endian,\n"
+ " .print = vl_api_{n}_t_print,\n"
+ " .traced = 1,\n"
+ " .replay = 1,\n"
+ " .print_json = vl_api_{n}_t_print_json,\n"
+ " .tojson = vl_api_{n}_t_tojson,\n"
+ " .fromjson = vl_api_{n}_t_fromjson,\n"
+ " .calc_size = vl_api_{n}_t_calc_size,\n"
+ " .is_autoendian = {auto}}};\n".format(
+ n=s.reply, ID=s.reply.upper(), auto=d.autoendian
+ )
+ )
+ write(" vl_msg_api_config (&c);\n")
except KeyError:
pass
- write(' return msg_id_base;\n')
- write('}\n')
+ write(" return msg_id_base;\n")
+ write("}\n")
- severity = {'error': 'VL_COUNTER_SEVERITY_ERROR',
- 'info': 'VL_COUNTER_SEVERITY_INFO',
- 'warn': 'VL_COUNTER_SEVERITY_WARN'}
+ severity = {
+ "error": "VL_COUNTER_SEVERITY_ERROR",
+ "info": "VL_COUNTER_SEVERITY_INFO",
+ "warn": "VL_COUNTER_SEVERITY_WARN",
+ }
for cnt in counters:
csetname = cnt.name
- write('vlib_error_desc_t {}_error_counters[] = {{\n'.format(csetname))
+ write("vlib_error_desc_t {}_error_counters[] = {{\n".format(csetname))
for c in cnt.block:
- write(' {\n')
- write(' .name = "{}",\n'.format(c['name']))
- write(' .desc = "{}",\n'.format(c['description']))
- write(' .severity = {},\n'.format(severity[c['severity']]))
- write(' },\n')
- write('};\n')
+ write(" {\n")
+ write(' .name = "{}",\n'.format(c["name"]))
+ write(' .desc = "{}",\n'.format(c["description"]))
+ write(" .severity = {},\n".format(severity[c["severity"]]))
+ write(" },\n")
+ write("};\n")
-def generate_c_test_boilerplate(services, defines, file_crc, module, plugin,
- stream):
- '''Generate code for legacy style VAT. To be deleted.'''
+def generate_c_test_boilerplate(services, defines, file_crc, module, plugin, stream):
+ """Generate code for legacy style VAT. To be deleted."""
write = stream.write
define_hash = {d.name: d for d in defines}
- hdr = '''\
+ hdr = """\
#define vl_endianfun /* define message structures */
#include "{module}.api.h"
#undef vl_endianfun
@@ -1516,7 +1636,7 @@ def generate_c_test_boilerplate(services, defines, file_crc, module, plugin,
#include "{module}.api.h"
#undef vl_printfun
-'''
+"""
write(hdr.format(module=module))
for s in services:
@@ -1525,113 +1645,133 @@ def generate_c_test_boilerplate(services, defines, file_crc, module, plugin,
except KeyError:
continue
if d.manual_print:
- write('/*\n'
- ' * Manual definition requested for: \n'
- ' * vl_api_{n}_t_handler()\n'
- ' */\n'
- .format(n=s.reply))
+ write(
+ "/*\n"
+ " * Manual definition requested for: \n"
+ " * vl_api_{n}_t_handler()\n"
+ " */\n".format(n=s.reply)
+ )
continue
if not define_hash[s.caller].autoreply:
- write('/* Generation not supported (vl_api_{n}_t_handler()) */\n'
- .format(n=s.reply))
+ write(
+ "/* Generation not supported (vl_api_{n}_t_handler()) */\n".format(
+ n=s.reply
+ )
+ )
continue
- write('#ifndef VL_API_{n}_T_HANDLER\n'.format(n=s.reply.upper()))
- write('static void\n')
- write('vl_api_{n}_t_handler (vl_api_{n}_t * mp) {{\n'
- .format(n=s.reply))
- write(' vat_main_t * vam = {}_test_main.vat_main;\n'.format(module))
- write(' i32 retval = ntohl(mp->retval);\n')
- write(' if (vam->async_mode) {\n')
- write(' vam->async_errors += (retval < 0);\n')
- write(' } else {\n')
- write(' vam->retval = retval;\n')
- write(' vam->result_ready = 1;\n')
- write(' }\n')
- write('}\n')
- write('#endif\n')
+ write("#ifndef VL_API_{n}_T_HANDLER\n".format(n=s.reply.upper()))
+ write("static void\n")
+ write("vl_api_{n}_t_handler (vl_api_{n}_t * mp) {{\n".format(n=s.reply))
+ write(" vat_main_t * vam = {}_test_main.vat_main;\n".format(module))
+ write(" i32 retval = ntohl(mp->retval);\n")
+ write(" if (vam->async_mode) {\n")
+ write(" vam->async_errors += (retval < 0);\n")
+ write(" } else {\n")
+ write(" vam->retval = retval;\n")
+ write(" vam->result_ready = 1;\n")
+ write(" }\n")
+ write("}\n")
+ write("#endif\n")
for e in s.events:
if define_hash[e].manual_print:
continue
- write('static void\n')
- write('vl_api_{n}_t_handler (vl_api_{n}_t * mp) {{\n'.format(n=e))
+ write("static void\n")
+ write("vl_api_{n}_t_handler (vl_api_{n}_t * mp) {{\n".format(n=e))
write(' vl_print(0, "{n} event called:");\n'.format(n=e))
- write(' vl_api_{n}_t_print(mp, 0);\n'.format(n=e))
- write('}\n')
+ write(" vl_api_{n}_t_print(mp, 0);\n".format(n=e))
+ write("}\n")
- write('static void\n')
- write('setup_message_id_table (vat_main_t * vam, u16 msg_id_base) {\n')
+ write("static void\n")
+ write("setup_message_id_table (vat_main_t * vam, u16 msg_id_base) {\n")
for s in services:
- write(' vl_msg_api_set_handlers(VL_API_{ID} + msg_id_base, '
- ' "{n}",\n'
- ' vl_api_{n}_t_handler, '
- ' vl_noop_handler,\n'
- ' vl_api_{n}_t_endian, '
- ' vl_api_{n}_t_print,\n'
- ' sizeof(vl_api_{n}_t), 1,\n'
- ' vl_api_{n}_t_print_json,\n'
- ' vl_api_{n}_t_tojson,\n'
- ' vl_api_{n}_t_fromjson,\n'
- ' vl_api_{n}_t_calc_size);\n'
- .format(n=s.reply, ID=s.reply.upper()))
- write(' hash_set_mem (vam->function_by_name, "{n}", api_{n});\n'
- .format(n=s.caller))
+ write(
+ " vl_msg_api_set_handlers(VL_API_{ID} + msg_id_base, "
+ ' "{n}",\n'
+ " vl_api_{n}_t_handler, "
+ " vl_noop_handler,\n"
+ " vl_api_{n}_t_endian, "
+ " vl_api_{n}_t_print,\n"
+ " sizeof(vl_api_{n}_t), 1,\n"
+ " vl_api_{n}_t_print_json,\n"
+ " vl_api_{n}_t_tojson,\n"
+ " vl_api_{n}_t_fromjson,\n"
+ " vl_api_{n}_t_calc_size);\n".format(
+ n=s.reply, ID=s.reply.upper()
+ )
+ )
+ write(
+ ' hash_set_mem (vam->function_by_name, "{n}", api_{n});\n'.format(
+ n=s.caller
+ )
+ )
try:
- write(' hash_set_mem (vam->help_by_name, "{n}", "{help}");\n'
- .format(n=s.caller,
- help=define_hash[s.caller].options['vat_help']))
+ write(
+ ' hash_set_mem (vam->help_by_name, "{n}", "{help}");\n'.format(
+ n=s.caller, help=define_hash[s.caller].options["vat_help"]
+ )
+ )
except KeyError:
pass
# Events
for e in s.events:
- write(' vl_msg_api_set_handlers(VL_API_{ID} + msg_id_base, '
- ' "{n}",\n'
- ' vl_api_{n}_t_handler, '
- ' vl_noop_handler,\n'
- ' vl_api_{n}_t_endian, '
- ' vl_api_{n}_t_print,\n'
- ' sizeof(vl_api_{n}_t), 1,\n'
- ' vl_api_{n}_t_print_json,\n'
- ' vl_api_{n}_t_tojson,\n'
- ' vl_api_{n}_t_fromjson,\n'
- ' vl_api_{n}_t_calc_size);\n'
- .format(n=e, ID=e.upper()))
-
- write('}\n')
- write('clib_error_t * vat_plugin_register (vat_main_t *vam)\n')
- write('{\n')
- write(' {n}_test_main_t * mainp = &{n}_test_main;\n'.format(n=module))
- write(' mainp->vat_main = vam;\n')
- write(' mainp->msg_id_base = vl_client_get_first_plugin_msg_id '
- ' ("{n}_{crc:08x}");\n'
- .format(n=module, crc=file_crc))
- write(' if (mainp->msg_id_base == (u16) ~0)\n')
- write(' return clib_error_return (0, "{} plugin not loaded...");\n'
- .format(module))
- write(' setup_message_id_table (vam, mainp->msg_id_base);\n')
- write('#ifdef VL_API_LOCAL_SETUP_MESSAGE_ID_TABLE\n')
- write(' VL_API_LOCAL_SETUP_MESSAGE_ID_TABLE(vam);\n')
- write('#endif\n')
- write(' return 0;\n')
- write('}\n')
+ write(
+ " vl_msg_api_set_handlers(VL_API_{ID} + msg_id_base, "
+ ' "{n}",\n'
+ " vl_api_{n}_t_handler, "
+ " vl_noop_handler,\n"
+ " vl_api_{n}_t_endian, "
+ " vl_api_{n}_t_print,\n"
+ " sizeof(vl_api_{n}_t), 1,\n"
+ " vl_api_{n}_t_print_json,\n"
+ " vl_api_{n}_t_tojson,\n"
+ " vl_api_{n}_t_fromjson,\n"
+ " vl_api_{n}_t_calc_size);\n".format(
+ n=e, ID=e.upper()
+ )
+ )
+
+ write("}\n")
+ write("clib_error_t * vat_plugin_register (vat_main_t *vam)\n")
+ write("{\n")
+ write(" {n}_test_main_t * mainp = &{n}_test_main;\n".format(n=module))
+ write(" mainp->vat_main = vam;\n")
+ write(
+ " mainp->msg_id_base = vl_client_get_first_plugin_msg_id "
+ ' ("{n}_{crc:08x}");\n'.format(n=module, crc=file_crc)
+ )
+ write(" if (mainp->msg_id_base == (u16) ~0)\n")
+ write(
+ ' return clib_error_return (0, "{} plugin not loaded...");\n'.format(
+ module
+ )
+ )
+ write(" setup_message_id_table (vam, mainp->msg_id_base);\n")
+ write("#ifdef VL_API_LOCAL_SETUP_MESSAGE_ID_TABLE\n")
+ write(" VL_API_LOCAL_SETUP_MESSAGE_ID_TABLE(vam);\n")
+ write("#endif\n")
+ write(" return 0;\n")
+ write("}\n")
def apifunc(func):
- '''Check if a method is generated already.'''
+ """Check if a method is generated already."""
+
def _f(module, d, processed, *args):
if d.name in processed:
return None
processed[d.name] = True
return func(module, d, *args)
+
return _f
def c_test_api_service(s, dump, stream):
- '''Generate JSON code for a service.'''
+ """Generate JSON code for a service."""
write = stream.write
- req_reply_template = '''\
+ req_reply_template = """\
static cJSON *
api_{n} (cJSON *o)
{{
@@ -1664,8 +1804,8 @@ api_{n} (cJSON *o)
return vl_api_{r}_t_tojson(rmp);
}}
-'''
- dump_details_template = '''\
+"""
+ dump_details_template = """\
static cJSON *
api_{n} (cJSON *o)
{{
@@ -1719,8 +1859,8 @@ api_{n} (cJSON *o)
return reply;
}}
-'''
- gets_details_reply_template = '''\
+"""
+ gets_details_reply_template = """\
static cJSON *
api_{n} (cJSON *o)
{{
@@ -1769,32 +1909,42 @@ api_{n} (cJSON *o)
return reply;
}}
-'''
+"""
if dump:
if s.stream_message:
- write(gets_details_reply_template
- .format(n=s.caller, r=s.reply, N=s.caller.upper(),
- R=s.reply.upper(), d=s.stream_message,
- D=s.stream_message.upper()))
+ write(
+ gets_details_reply_template.format(
+ n=s.caller,
+ r=s.reply,
+ N=s.caller.upper(),
+ R=s.reply.upper(),
+ d=s.stream_message,
+ D=s.stream_message.upper(),
+ )
+ )
else:
- write(dump_details_template.format(n=s.caller, r=s.reply,
- N=s.caller.upper(),
- R=s.reply.upper()))
+ write(
+ dump_details_template.format(
+ n=s.caller, r=s.reply, N=s.caller.upper(), R=s.reply.upper()
+ )
+ )
else:
- write(req_reply_template.format(n=s.caller, r=s.reply,
- N=s.caller.upper(),
- R=s.reply.upper()))
+ write(
+ req_reply_template.format(
+ n=s.caller, r=s.reply, N=s.caller.upper(), R=s.reply.upper()
+ )
+ )
def generate_c_test2_boilerplate(services, defines, module, stream):
- '''Generate code for VAT2 plugin.'''
+ """Generate code for VAT2 plugin."""
write = stream.write
define_hash = {d.name: d for d in defines}
# replies = {}
- hdr = '''\
+ hdr = """\
#include <vlibapi/api.h>
#include <vlibmemory/api.h>
#include <vppinfra/error.h>
@@ -1829,7 +1979,7 @@ def generate_c_test2_boilerplate(services, defines, module, stream):
#include <vat2/vat2_helpers.h>
-'''
+"""
write(hdr.format(module=module))
@@ -1838,123 +1988,123 @@ def generate_c_test2_boilerplate(services, defines, module, stream):
continue
c_test_api_service(s, s.stream, stream)
- write('void vat2_register_function(char *, cJSON * (*)(cJSON *), cJSON * (*)(void *), u32);\n')
+ write(
+ "void vat2_register_function(char *, cJSON * (*)(cJSON *), cJSON * (*)(void *), u32);\n"
+ )
# write('__attribute__((constructor))')
- write('clib_error_t *\n')
- write('vat2_register_plugin (void) {\n')
+ write("clib_error_t *\n")
+ write("vat2_register_plugin (void) {\n")
for s in services:
if s.reply not in define_hash:
continue
crc = define_hash[s.caller].crc
- write(' vat2_register_function("{n}", api_{n}, (cJSON * (*)(void *))vl_api_{n}_t_tojson, 0x{crc:08x});\n'
- .format(n=s.caller, crc=crc))
- write(' return 0;\n')
- write('}\n')
+ write(
+ ' vat2_register_function("{n}", api_{n}, (cJSON * (*)(void *))vl_api_{n}_t_tojson, 0x{crc:08x});\n'.format(
+ n=s.caller, crc=crc
+ )
+ )
+ write(" return 0;\n")
+ write("}\n")
#
# Plugin entry point
#
def run(args, apifilename, s):
- '''Main plugin entry point.'''
+ """Main plugin entry point."""
stream = StringIO()
if not args.outputdir:
- sys.stderr.write('Missing --outputdir argument')
+ sys.stderr.write("Missing --outputdir argument")
return None
basename = os.path.basename(apifilename)
filename, _ = os.path.splitext(basename)
- modulename = filename.replace('.', '_')
- filename_enum = os.path.join(args.outputdir + '/' + basename + '_enum.h')
- filename_types = os.path.join(args.outputdir + '/' + basename + '_types.h')
- filename_c = os.path.join(args.outputdir + '/' + basename + '.c')
- filename_c_test = os.path.join(args.outputdir + '/' + basename + '_test.c')
- filename_c_test2 = (os.path.join(args.outputdir + '/' + basename +
- '_test2.c'))
- filename_c_tojson = (os.path.join(args.outputdir +
- '/' + basename + '_tojson.h'))
- filename_c_fromjson = (os.path.join(args.outputdir + '/' +
- basename + '_fromjson.h'))
+ modulename = filename.replace(".", "_")
+ filename_enum = os.path.join(args.outputdir + "/" + basename + "_enum.h")
+ filename_types = os.path.join(args.outputdir + "/" + basename + "_types.h")
+ filename_c = os.path.join(args.outputdir + "/" + basename + ".c")
+ filename_c_test = os.path.join(args.outputdir + "/" + basename + "_test.c")
+ filename_c_test2 = os.path.join(args.outputdir + "/" + basename + "_test2.c")
+ filename_c_tojson = os.path.join(args.outputdir + "/" + basename + "_tojson.h")
+ filename_c_fromjson = os.path.join(args.outputdir + "/" + basename + "_fromjson.h")
# Generate separate types file
st = StringIO()
generate_include_types(s, modulename, st)
- with open(filename_types, 'w') as fd:
+ with open(filename_types, "w") as fd:
st.seek(0)
shutil.copyfileobj(st, fd)
st.close()
# Generate separate enum file
st = StringIO()
- st.write('#ifndef included_{}_api_enum_h\n'.format(modulename))
- st.write('#define included_{}_api_enum_h\n'.format(modulename))
+ st.write("#ifndef included_{}_api_enum_h\n".format(modulename))
+ st.write("#define included_{}_api_enum_h\n".format(modulename))
generate_include_enum(s, modulename, st)
- generate_include_counters(s['Counters'], st)
- st.write('#endif\n')
- with open(filename_enum, 'w') as fd:
+ generate_include_counters(s["Counters"], st)
+ st.write("#endif\n")
+ with open(filename_enum, "w") as fd:
st.seek(0)
shutil.copyfileobj(st, fd)
st.close()
# Generate separate C file
st = StringIO()
- generate_c_boilerplate(s['Service'], s['Define'], s['Counters'],
- s['file_crc'], modulename, st)
- with open(filename_c, 'w') as fd:
+ generate_c_boilerplate(
+ s["Service"], s["Define"], s["Counters"], s["file_crc"], modulename, st
+ )
+ with open(filename_c, "w") as fd:
st.seek(0)
shutil.copyfileobj(st, fd)
st.close()
# Generate separate C test file
st = StringIO()
- plugin = bool('plugin' in apifilename)
- generate_c_test_boilerplate(s['Service'], s['Define'],
- s['file_crc'],
- modulename, plugin, st)
- with open(filename_c_test, 'w') as fd:
+ plugin = bool("plugin" in apifilename)
+ generate_c_test_boilerplate(
+ s["Service"], s["Define"], s["file_crc"], modulename, plugin, st
+ )
+ with open(filename_c_test, "w") as fd:
st.seek(0)
shutil.copyfileobj(st, fd)
st.close()
# Fully autogenerated VATv2 C test file
st = StringIO()
- generate_c_test2_boilerplate(s['Service'], s['Define'],
- modulename, st)
- with open(filename_c_test2, 'w') as fd:
+ generate_c_test2_boilerplate(s["Service"], s["Define"], modulename, st)
+ with open(filename_c_test2, "w") as fd:
st.seek(0)
shutil.copyfileobj(st, fd)
- st.close() #
+ st.close() #
# Generate separate JSON file
st = StringIO()
generate_tojson(s, modulename, st)
- with open(filename_c_tojson, 'w') as fd:
+ with open(filename_c_tojson, "w") as fd:
st.seek(0)
shutil.copyfileobj(st, fd)
st.close()
st = StringIO()
generate_fromjson(s, modulename, st)
- with open(filename_c_fromjson, 'w') as fd:
+ with open(filename_c_fromjson, "w") as fd:
st.seek(0)
shutil.copyfileobj(st, fd)
st.close()
- output = TOP_BOILERPLATE.format(datestring=DATESTRING,
- input_filename=basename)
- output += generate_imports(s['Import'])
+ output = TOP_BOILERPLATE.format(datestring=DATESTRING, input_filename=basename)
+ output += generate_imports(s["Import"])
output += msg_ids(s)
output += msg_names(s)
output += msg_name_crc_list(s, filename)
output += typedefs(modulename)
- printfun_types(s['types'], stream, modulename)
- printfun(s['Define'], stream, modulename)
+ printfun_types(s["types"], stream, modulename)
+ printfun(s["Define"], stream, modulename)
output += stream.getvalue()
stream.close()
- output += endianfun(s['types'] + s['Define'], modulename)
- output += calc_size_fun(s['types'] + s['Define'], modulename)
+ output += endianfun(s["types"] + s["Define"], modulename)
+ output += calc_size_fun(s["types"] + s["Define"], modulename)
output += version_tuple(s, basename)
- output += BOTTOM_BOILERPLATE.format(input_filename=basename,
- file_crc=s['file_crc'])
+ output += BOTTOM_BOILERPLATE.format(input_filename=basename, file_crc=s["file_crc"])
return output
diff --git a/src/tools/vppapigen/vppapigen_crc.py b/src/tools/vppapigen/vppapigen_crc.py
index 791e347292e..525f6c07efc 100644
--- a/src/tools/vppapigen/vppapigen_crc.py
+++ b/src/tools/vppapigen/vppapigen_crc.py
@@ -12,11 +12,10 @@ def run(args, input_filename, s):
major = 0
minor = 0
patch = 0
- if 'version' in s['Option']:
- v = s['Option']['version']
- (major, minor, patch) = v.split('.')
- j['_version'] = {'major': major, 'minor': minor, 'patch': patch}
- for t in s['Define']:
- j[t.name] = {'crc': f'{t.crc:#08x}', 'version': major,
- 'options': t.options}
- return json.dumps(j, indent=4, separators=(',', ': '))
+ if "version" in s["Option"]:
+ v = s["Option"]["version"]
+ (major, minor, patch) = v.split(".")
+ j["_version"] = {"major": major, "minor": minor, "patch": patch}
+ for t in s["Define"]:
+ j[t.name] = {"crc": f"{t.crc:#08x}", "version": major, "options": t.options}
+ return json.dumps(j, indent=4, separators=(",", ": "))
diff --git a/src/tools/vppapigen/vppapigen_json.py b/src/tools/vppapigen/vppapigen_json.py
index 5fa839f9854..695b8cc7aa2 100644
--- a/src/tools/vppapigen/vppapigen_json.py
+++ b/src/tools/vppapigen/vppapigen_json.py
@@ -14,7 +14,7 @@ def walk_imports(s):
def walk_counters(s, pathset):
r = []
for e in s:
- r2 = {'name': e.name, 'elements': e.block}
+ r2 = {"name": e.name, "elements": e.block}
r.append(r2)
r3 = []
@@ -31,7 +31,7 @@ def walk_enums(s):
d.append(e.name)
for b in e.block:
d.append(b)
- d.append({'enumtype': e.enumtype})
+ d.append({"enumtype": e.enumtype})
r.append(d)
return r
@@ -39,13 +39,13 @@ def walk_enums(s):
def walk_services(s):
r = {}
for e in s:
- d = {'reply': e.reply}
+ d = {"reply": e.reply}
if e.stream:
- d['stream'] = True
+ d["stream"] = True
if e.stream_message:
- d['stream_msg'] = e.stream_message
+ d["stream_msg"] = e.stream_message
if e.events:
- d['events'] = e.events
+ d["events"] = e.events
r[e.caller] = d
return r
@@ -56,28 +56,27 @@ def walk_defs(s, is_message=False):
d = []
d.append(t.name)
for b in t.block:
- if b.type == 'Option':
+ if b.type == "Option":
continue
- if b.type == 'Field':
+ if b.type == "Field":
if b.limit:
d.append([b.fieldtype, b.fieldname, b.limit])
else:
d.append([b.fieldtype, b.fieldname])
- elif b.type == 'Array':
+ elif b.type == "Array":
if b.lengthfield:
- d.append([b.fieldtype, b.fieldname,
- b.length, b.lengthfield])
+ d.append([b.fieldtype, b.fieldname, b.length, b.lengthfield])
else:
d.append([b.fieldtype, b.fieldname, b.length])
- elif b.type == 'Union':
+ elif b.type == "Union":
pass
else:
raise ValueError("Error in processing array type %s" % b)
if is_message and t.crc:
c = {}
- c['crc'] = "{0:#0{1}x}".format(t.crc, 10)
- c['options'] = t.options
+ c["crc"] = "{0:#0{1}x}".format(t.crc, 10)
+ c["options"] = t.options
d.append(c)
r.append(d)
@@ -90,19 +89,19 @@ def walk_defs(s, is_message=False):
def run(args, filename, s):
j = {}
- j['types'] = (walk_defs([o for o in s['types']
- if o.__class__.__name__ == 'Typedef']))
- j['messages'] = walk_defs(s['Define'], True)
- j['unions'] = (walk_defs([o for o in s['types']
- if o.__class__.__name__ == 'Union']))
- j['enums'] = (walk_enums([o for o in s['types']
- if o.__class__.__name__ == 'Enum']))
- j['enumflags'] = (walk_enums([o for o in s['types']
- if o.__class__.__name__ == 'EnumFlag']))
- j['services'] = walk_services(s['Service'])
- j['options'] = s['Option']
- j['aliases'] = {o.name:o.alias for o in s['types'] if o.__class__.__name__ == 'Using'}
- j['vl_api_version'] = hex(s['file_crc'])
- j['imports'] = walk_imports(i for i in s['Import'])
- j['counters'], j['paths'] = walk_counters(s['Counters'], s['Paths'])
- return json.dumps(j, indent=4, separators=(',', ': '))
+ j["types"] = walk_defs([o for o in s["types"] if o.__class__.__name__ == "Typedef"])
+ j["messages"] = walk_defs(s["Define"], True)
+ j["unions"] = walk_defs([o for o in s["types"] if o.__class__.__name__ == "Union"])
+ j["enums"] = walk_enums([o for o in s["types"] if o.__class__.__name__ == "Enum"])
+ j["enumflags"] = walk_enums(
+ [o for o in s["types"] if o.__class__.__name__ == "EnumFlag"]
+ )
+ j["services"] = walk_services(s["Service"])
+ j["options"] = s["Option"]
+ j["aliases"] = {
+ o.name: o.alias for o in s["types"] if o.__class__.__name__ == "Using"
+ }
+ j["vl_api_version"] = hex(s["file_crc"])
+ j["imports"] = walk_imports(i for i in s["Import"])
+ j["counters"], j["paths"] = walk_counters(s["Counters"], s["Paths"])
+ return json.dumps(j, indent=4, separators=(",", ": "))