summaryrefslogtreecommitdiffstats
path: root/docs/_scripts
diff options
context:
space:
mode:
Diffstat (limited to 'docs/_scripts')
-rwxr-xr-xdocs/_scripts/filter_api.py14
-rwxr-xr-xdocs/_scripts/filter_c.py101
-rwxr-xr-xdocs/_scripts/filter_h.py16
-rw-r--r--docs/_scripts/includes_renderer.py7
-rw-r--r--docs/_scripts/siphon/generate.py83
-rw-r--r--docs/_scripts/siphon/generate_clicmd.py13
-rw-r--r--docs/_scripts/siphon/generate_syscfg.py14
-rw-r--r--docs/_scripts/siphon/parsers.py52
-rw-r--r--docs/_scripts/siphon/process.py109
-rw-r--r--docs/_scripts/siphon/process_clicmd.py30
10 files changed, 254 insertions, 185 deletions
diff --git a/docs/_scripts/filter_api.py b/docs/_scripts/filter_api.py
index 484881439b8..e22bc5e50f5 100755
--- a/docs/_scripts/filter_api.py
+++ b/docs/_scripts/filter_api.py
@@ -24,9 +24,10 @@ if len(sys.argv) < 2:
patterns = [
# Search for "define" blocks and treat them as structs
- (re.compile(r"^.*(manual_.[^\s]+\s+)?define\s+(?P<name>[^\s]+)"),
- r"typedef struct vl_api_\g<name>_t"),
-
+ (
+ re.compile(r"^.*(manual_.[^\s]+\s+)?define\s+(?P<name>[^\s]+)"),
+ r"typedef struct vl_api_\g<name>_t",
+ ),
# For every "brief" statement at the start of a comment block, add an
# xref with whatever is on the same line. This gives us an index page
# with all the API methods in one place.
@@ -36,14 +37,13 @@ patterns = [
# r'/** @xrefitem api "" "VPP API" \g<c> \g<b> \g<c>'), # capture inline comment close
# (re.compile(r"/\*\*\s*(?P<b>[\\@]brief)\s+(?P<c>.+)$"),
# r'/** @xrefitem api "" "VPP API" \g<c> \g<b> \g<c>'),
-
# Since structs don't have params, replace @param with @tparam
- ( re.compile("[\\@]param\\b"), "@tparam"),
+ (re.compile("[\\@]param\\b"), "@tparam"),
]
with open(sys.argv[1]) as fd:
for line in fd:
- str = line[:-1] # strip \n
+ str = line[:-1] # strip \n
for p in patterns:
str = p[0].sub(p[1], str)
- sys.stdout.write(str+"\n")
+ sys.stdout.write(str + "\n")
diff --git a/docs/_scripts/filter_c.py b/docs/_scripts/filter_c.py
index 897f9f6d0b3..d3e7ee38762 100755
--- a/docs/_scripts/filter_c.py
+++ b/docs/_scripts/filter_c.py
@@ -23,51 +23,82 @@ if len(sys.argv) < 2:
replace_patterns = [
# Search for VLIB_CLI_COMMAND, extract its parameters and add a docblock for it
- ( re.compile("(?P<m>VLIB_CLI_COMMAND)\s*[(](?P<name>[a-zA-Z0-9_]+)[)]"),
- r"/** @brief (@em constructor) \g<m> (\g<name>) */ vlib_cli_command_t \g<name>"),
- ( re.compile("(?P<m>VLIB_CLI_COMMAND)\s*[(](?P<name>[a-zA-Z0-9_]+),\s*(?P<qual>[^)]*)[)]"),
- r"/** @brief (@em constructor) \g<m> (\g<name>) */ \g<qual> vlib_cli_command_t \g<name>"),
-
+ (
+ re.compile("(?P<m>VLIB_CLI_COMMAND)\s*[(](?P<name>[a-zA-Z0-9_]+)[)]"),
+ r"/** @brief (@em constructor) \g<m> (\g<name>) */ vlib_cli_command_t \g<name>",
+ ),
+ (
+ re.compile(
+ "(?P<m>VLIB_CLI_COMMAND)\s*[(](?P<name>[a-zA-Z0-9_]+),\s*(?P<qual>[^)]*)[)]"
+ ),
+ r"/** @brief (@em constructor) \g<m> (\g<name>) */ \g<qual> vlib_cli_command_t \g<name>",
+ ),
# Search for VLIB_REGISTER_NODE, extract its parameters and add a docblock for it
- ( re.compile("(?P<m>VLIB_REGISTER_NODE)\s*[(](?P<name>[a-zA-Z0-9_]+)[)]"),
- r"/** @brief (@em constructor) \g<m> (\g<name>) */ vlib_node_registration_t \g<name>"),
- ( re.compile("(?P<m>VLIB_REGISTER_NODE)\s*[(](?P<name>[a-zA-Z0-9_]+),\s*(?P<qual>[^)]*)[)]"),
- r"/** @brief (@em constructor) \g<m> (\g<name>) */ \g<qual> vlib_node_registration_t \g<name>"),
-
+ (
+ re.compile("(?P<m>VLIB_REGISTER_NODE)\s*[(](?P<name>[a-zA-Z0-9_]+)[)]"),
+ r"/** @brief (@em constructor) \g<m> (\g<name>) */ vlib_node_registration_t \g<name>",
+ ),
+ (
+ re.compile(
+ "(?P<m>VLIB_REGISTER_NODE)\s*[(](?P<name>[a-zA-Z0-9_]+),\s*(?P<qual>[^)]*)[)]"
+ ),
+ r"/** @brief (@em constructor) \g<m> (\g<name>) */ \g<qual> vlib_node_registration_t \g<name>",
+ ),
# Search for VLIB_INIT_FUNCTION, extract its parameter and add a docblock for it
- ( re.compile("(?P<m>VLIB_INIT_FUNCTION)\s*[(](?P<name>[a-zA-Z0-9_]+)[)]"),
- r"/** @brief (@em constructor) \g<m> (@ref \g<name>) */ vlib_init_function_t * _vlib_init_function_\g<name>"),
- ( re.compile("(?P<m>VLIB_DECLARE_INIT_FUNCTION)\s*[(](?P<name>[a-zA-Z0-9_]+)[)]"),
- r"/** @brief (@em constructor) \g<m> (@ref \g<name>) */ vlib_init_function_t * _vlib_init_function_\g<name>"),
-
+ (
+ re.compile("(?P<m>VLIB_INIT_FUNCTION)\s*[(](?P<name>[a-zA-Z0-9_]+)[)]"),
+ r"/** @brief (@em constructor) \g<m> (@ref \g<name>) */ vlib_init_function_t * _vlib_init_function_\g<name>",
+ ),
+ (
+ re.compile("(?P<m>VLIB_DECLARE_INIT_FUNCTION)\s*[(](?P<name>[a-zA-Z0-9_]+)[)]"),
+ r"/** @brief (@em constructor) \g<m> (@ref \g<name>) */ vlib_init_function_t * _vlib_init_function_\g<name>",
+ ),
# Search for VLIB_LOOP_ENTER_FUNCTION, extract the parameters and add a docblock for it
- ( re.compile("(?P<m>VLIB_MAIN_LOOP_ENTER_FUNCTION)\s*[(](?P<name>[a-zA-Z0-9_]+)(,[^)]*)?[)]"),
- r"/** @brief (@em constructor) \g<m> (@ref \g<name>) */ _vlib_main_loop_enter_\g<name>"),
- ( re.compile("(?P<m>VLIB_MAIN_LOOP_EXIT_FUNCTION)\s*[(](?P<name>[a-zA-Z0-9_]+)(,[^)]*)?[)]"),
- r"/** @brief (@em constructor) \g<m> (@ref \g<name>) */ _vlib_main_loop_exit_\g<name>"),
-
+ (
+ re.compile(
+ "(?P<m>VLIB_MAIN_LOOP_ENTER_FUNCTION)\s*[(](?P<name>[a-zA-Z0-9_]+)(,[^)]*)?[)]"
+ ),
+ r"/** @brief (@em constructor) \g<m> (@ref \g<name>) */ _vlib_main_loop_enter_\g<name>",
+ ),
+ (
+ re.compile(
+ "(?P<m>VLIB_MAIN_LOOP_EXIT_FUNCTION)\s*[(](?P<name>[a-zA-Z0-9_]+)(,[^)]*)?[)]"
+ ),
+ r"/** @brief (@em constructor) \g<m> (@ref \g<name>) */ _vlib_main_loop_exit_\g<name>",
+ ),
# Search for VLIB_CONFIG_FUNCTION, extract the parameters and add a docblock for it
- ( re.compile("(?P<m>VLIB_CONFIG_FUNCTION)\s*[(](?P<name>[a-zA-Z0-9_]+),\s*(?P<n>\"[^\"]+\")(,[^)]*)?[)]"),
- r"/** @brief (@em constructor) \g<m> (\g<name>, \g<n>) */ vlib_config_function_runtime_t _vlib_config_function_\g<name>"),
- ( re.compile("(?P<m>VLIB_EARLY_CONFIG_FUNCTION)\s*[(](?P<name>[a-zA-Z0-9_]+),\s*(?P<n>\"[^\"]+\")(,[^)]*)?[)]"),
- r"/** @brief (@em constructor) \g<m> (\g<name>, \g<n>) */ vlib_config_function_runtime_t _vlib_config_function_\g<name>"),
-
+ (
+ re.compile(
+ '(?P<m>VLIB_CONFIG_FUNCTION)\s*[(](?P<name>[a-zA-Z0-9_]+),\s*(?P<n>"[^"]+")(,[^)]*)?[)]'
+ ),
+ r"/** @brief (@em constructor) \g<m> (\g<name>, \g<n>) */ vlib_config_function_runtime_t _vlib_config_function_\g<name>",
+ ),
+ (
+ re.compile(
+ '(?P<m>VLIB_EARLY_CONFIG_FUNCTION)\s*[(](?P<name>[a-zA-Z0-9_]+),\s*(?P<n>"[^"]+")(,[^)]*)?[)]'
+ ),
+ r"/** @brief (@em constructor) \g<m> (\g<name>, \g<n>) */ vlib_config_function_runtime_t _vlib_config_function_\g<name>",
+ ),
# Search for "format_thing" and "unformat_thing" when used as a function pointer and add parens
- ( re.compile("(?P<pre>(^|,)\s*)(?P<name>(un)?format_[a-zA-Z0-9_]+)(?P<post>\s*(,|$))"),
- r"\g<pre>\g<name>()\g<post>" ),
-
+ (
+ re.compile(
+ "(?P<pre>(^|,)\s*)(?P<name>(un)?format_[a-zA-Z0-9_]+)(?P<post>\s*(,|$))"
+ ),
+ r"\g<pre>\g<name>()\g<post>",
+ ),
# Search for CLIB_PAD_FROM_TO(...); and replace with padding
# #define CLIB_PAD_FROM_TO(from,to) u8 pad_##from[(to) - (from)]
- ( re.compile("(?P<m>CLIB_PAD_FROM_TO)\s*[(](?P<from>[^,]+),\s*(?P<to>[^)]+)[)]"),
- r"/** Padding. */ u8 pad_\g<from>[(\g<to>) - (\g<from>)]" ),
-
+ (
+ re.compile("(?P<m>CLIB_PAD_FROM_TO)\s*[(](?P<from>[^,]+),\s*(?P<to>[^)]+)[)]"),
+ r"/** Padding. */ u8 pad_\g<from>[(\g<to>) - (\g<from>)]",
+ ),
]
filename = sys.argv[1]
cwd = os.getcwd()
-if filename[0:len(cwd)] == cwd:
- filename = filename[len(cwd):]
+if filename[0 : len(cwd)] == cwd:
+ filename = filename[len(cwd) :]
if filename[0] == "/":
filename = filename[1:]
@@ -76,12 +107,12 @@ with open(filename) as fd:
for line in fd:
line_num += 1
- str = line[:-1] # filter \n
+ str = line[:-1] # filter \n
# Look for search/replace patterns
for p in replace_patterns:
str = p[0].sub(p[1], str)
- sys.stdout.write(str+"\n")
+ sys.stdout.write(str + "\n")
# All done
diff --git a/docs/_scripts/filter_h.py b/docs/_scripts/filter_h.py
index 0891fa708e1..31c4c514283 100755
--- a/docs/_scripts/filter_h.py
+++ b/docs/_scripts/filter_h.py
@@ -26,17 +26,19 @@ if len(sys.argv) < 2:
replace_patterns = [
# Search for CLIB_PAD_FROM_TO(...); and replace with padding
# #define CLIB_PAD_FROM_TO(from,to) u8 pad_##from[(to) - (from)]
- (re.compile(r"(?P<m>CLIB_PAD_FROM_TO)\s*[(](?P<from>[^,]+),"
- r"\s*(?P<to>[^)]+)[)]"),
- r"/** Padding. */ u8 pad_\g<from>[(\g<to>) - (\g<from>)]"),
-
+ (
+ re.compile(
+ r"(?P<m>CLIB_PAD_FROM_TO)\s*[(](?P<from>[^,]+)," r"\s*(?P<to>[^)]+)[)]"
+ ),
+ r"/** Padding. */ u8 pad_\g<from>[(\g<to>) - (\g<from>)]",
+ ),
]
filename = sys.argv[1]
cwd = os.getcwd()
-if filename[0:len(cwd)] == cwd:
- filename = filename[len(cwd):]
+if filename[0 : len(cwd)] == cwd:
+ filename = filename[len(cwd) :]
if filename[0] == "/":
filename = filename[1:]
@@ -51,6 +53,6 @@ with open(filename) as fd:
for p in replace_patterns:
str = p[0].sub(p[1], str)
- sys.stdout.write(str+"\n")
+ sys.stdout.write(str + "\n")
# All done
diff --git a/docs/_scripts/includes_renderer.py b/docs/_scripts/includes_renderer.py
index 6bd501d83ff..a2d422b5c18 100644
--- a/docs/_scripts/includes_renderer.py
+++ b/docs/_scripts/includes_renderer.py
@@ -33,15 +33,14 @@ class ContentRenderer:
class PluginRenderer(ContentRenderer):
-
def _render_entry(self, output_file, entry):
description = "<no-description-found>"
# we use glob because a plugin can (ioam for now)
# define the plugin definition in
# a further subdirectory.
- path = os.path.join(self.plugin_dir(), entry.name, '**')
+ path = os.path.join(self.plugin_dir(), entry.name, "**")
for f in glob.iglob(path, recursive=True):
- if not f.endswith('.c'):
+ if not f.endswith(".c"):
continue
with open(f, "r", encoding="utf-8") as src:
for match in self.regex.finditer(src.read()):
@@ -56,7 +55,7 @@ class PluginRenderer(ContentRenderer):
with open(fname, "w") as output_file:
with os.scandir(self.plugin_dir()) as pdir:
for entry in sorted(pdir, key=lambda entry: entry.name):
- if not entry.name.startswith('.') and entry.is_dir():
+ if not entry.name.startswith(".") and entry.is_dir():
self._render_entry(output_file, entry)
diff --git a/docs/_scripts/siphon/generate.py b/docs/_scripts/siphon/generate.py
index 2ae5a1b6f1b..1244c4658e4 100644
--- a/docs/_scripts/siphon/generate.py
+++ b/docs/_scripts/siphon/generate.py
@@ -24,8 +24,10 @@ import re
themselves on this list."""
siphon_patterns = []
+
class Generate(object):
"""Matches a siphon comment block start"""
+
siphon_block_start = re.compile("^\s*/\*\?\s*(.*)$")
"""Matches a siphon comment block stop"""
@@ -36,8 +38,10 @@ class Generate(object):
"""Matches a siphon block directive such as
'%clicmd:group_label Debug CLI%'"""
- siphon_block_directive = re.compile("(%s)\s*([a-zA-Z0-9_:]+)\s+(.*)\s*(%s)" % \
- (siphon_block_delimiter, siphon_block_delimiter))
+ siphon_block_directive = re.compile(
+ "(%s)\s*([a-zA-Z0-9_:]+)\s+(.*)\s*(%s)"
+ % (siphon_block_delimiter, siphon_block_delimiter)
+ )
"""Matches the start of an initializer block"""
siphon_initializer = re.compile("\s*=")
@@ -54,7 +58,6 @@ class Generate(object):
"""Logging handler"""
log = None
-
def __init__(self, output_directory, input_prefix):
super(Generate, self).__init__()
self.log = logging.getLogger("siphon.generate")
@@ -70,14 +73,13 @@ class Generate(object):
self.output = {}
for siphon in self.known_siphons:
self.output[siphon] = {
- "file": "%s/%s.siphon" % (output_directory, siphon),
- "global": {},
- "items": [],
- }
+ "file": "%s/%s.siphon" % (output_directory, siphon),
+ "global": {},
+ "items": [],
+ }
self.input_prefix = input_prefix
-
"""
count open and close braces in str
return (0, index) when braces were found and count becomes 0.
@@ -87,16 +89,17 @@ class Generate(object):
return (count, -1) if not all opening braces are closed, count is the
current depth
"""
+
def count_braces(self, str, count=0, found=False):
for index in range(0, len(str)):
- if str[index] == '{':
- count += 1;
+ if str[index] == "{":
+ count += 1
found = True
- elif str[index] == '}':
+ elif str[index] == "}":
if count == 0:
# means we never found an open brace
return (-1, -1)
- count -= 1;
+ count -= 1
if count == 0 and found:
return (count, index)
@@ -106,8 +109,8 @@ class Generate(object):
def parse(self, filename):
# Strip the current directory off the start of the
# filename for brevity
- if filename[0:len(self.input_prefix)] == self.input_prefix:
- filename = filename[len(self.input_prefix):]
+ if filename[0 : len(self.input_prefix)] == self.input_prefix:
+ filename = filename[len(self.input_prefix) :]
if filename[0] == "/":
filename = filename[1:]
@@ -115,8 +118,8 @@ class Generate(object):
directory = os.path.dirname(filename)
if directory[0:2] == "./":
directory = directory[2:]
- elif directory[0:len(self.input_prefix)] == self.input_prefix:
- directory = directory[len(self.input_prefix):]
+ elif directory[0 : len(self.input_prefix)] == self.input_prefix:
+ directory = directory[len(self.input_prefix) :]
if directory[0] == "/":
directory = directory[1:]
@@ -133,9 +136,10 @@ class Generate(object):
for line in fd:
line_num += 1
- str = line[:-1] # filter \n
+ str = line[:-1] # filter \n
"""See if there is a block directive and if so extract it"""
+
def process_block_directive(str, directives):
m = self.siphon_block_directive.search(str)
if m is not None:
@@ -143,7 +147,7 @@ class Generate(object):
v = m.group(3).strip()
directives[k] = v
# Return only the parts we did not match
- return str[0:m.start(1)] + str[m.end(4):]
+ return str[0 : m.start(1)] + str[m.end(4) :]
return str
@@ -200,27 +204,25 @@ class Generate(object):
# Skip to next line
continue
-
if siphon is None:
# Look for blocks we need to siphon
for p in siphon_patterns:
if p[0].match(str):
- siphon = [ p[1], str + "\n", 0 ]
+ siphon = [p[1], str + "\n", 0]
siphon_line = line_num
# see if we have an initializer
m = self.siphon_initializer.search(str)
if m is not None:
# count the braces on this line
- (count, index) = \
- self.count_braces(str[m.start():])
+ (count, index) = self.count_braces(str[m.start() :])
siphon[2] = count
# TODO - it's possible we have the
# initializer all on the first line
# we should check for it, but also
# account for the possibility that
# the open brace is on the next line
- #if count == 0:
+ # if count == 0:
# # braces balanced
# close_siphon = siphon
# siphon = None
@@ -231,12 +233,11 @@ class Generate(object):
else:
# See if we should end the siphon here - do we have
# balanced braces?
- (count, index) = self.count_braces(str,
- count=siphon[2], found=True)
+ (count, index) = self.count_braces(str, count=siphon[2], found=True)
if count == 0:
# braces balanced - add the substring and
# close the siphon
- siphon[1] += str[:index+1] + ";\n"
+ siphon[1] += str[: index + 1] + ";\n"
close_siphon = siphon
siphon = None
else:
@@ -259,15 +260,15 @@ class Generate(object):
details[key] = directives[key]
# Copy details for this block
- details['file'] = filename
- details['directory'] = directory
- details['line_start'] = siphon_line
- details['line_end'] = line_num
- details['siphon_block'] = siphon_block.strip()
+ details["file"] = filename
+ details["directory"] = directory
+ details["line_start"] = siphon_line
+ details["line_end"] = line_num
+ details["siphon_block"] = siphon_block.strip()
details["block"] = close_siphon[1]
# Store the item
- self.output[siphon_name]['items'].append(details)
+ self.output[siphon_name]["items"].append(details)
# All done
close_siphon = None
@@ -275,7 +276,7 @@ class Generate(object):
# Update globals
for key in directives.keys():
- if ':' not in key:
+ if ":" not in key:
continue
if filename.endswith("/dir.dox"):
@@ -288,19 +289,17 @@ class Generate(object):
if sn not in self.output:
self.output[sn] = {}
- if 'global' not in self.output[sn]:
- self.output[sn]['global'] = {}
- if l not in self.output[sn]['global']:
- self.output[sn]['global'][l] = {}
+ if "global" not in self.output[sn]:
+ self.output[sn]["global"] = {}
+ if l not in self.output[sn]["global"]:
+ self.output[sn]["global"][l] = {}
- self.output[sn]['global'][l][label] = directives[key]
+ self.output[sn]["global"][l][label] = directives[key]
def deliver(self):
# Write out the data
for siphon in self.output.keys():
self.log.info("Saving siphon data %s." % siphon)
s = self.output[siphon]
- with open(s['file'], "a") as fp:
- json.dump(s, fp,
- separators=(',', ': '), indent=4, sort_keys=True)
-
+ with open(s["file"], "a") as fp:
+ json.dump(s, fp, separators=(",", ": "), indent=4, sort_keys=True)
diff --git a/docs/_scripts/siphon/generate_clicmd.py b/docs/_scripts/siphon/generate_clicmd.py
index 6d24aaf4926..2e2f6281a39 100644
--- a/docs/_scripts/siphon/generate_clicmd.py
+++ b/docs/_scripts/siphon/generate_clicmd.py
@@ -17,8 +17,11 @@ import re
from . import generate
# Register our regexp
-generate.siphon_patterns.append((
- re.compile("(?P<m>VLIB_CLI_COMMAND)\s*"
- "[(](?P<name>[a-zA-Z0-9_]+)(,[^)]*)?[)]"),
- "clicmd"
-))
+generate.siphon_patterns.append(
+ (
+ re.compile(
+ "(?P<m>VLIB_CLI_COMMAND)\s*" "[(](?P<name>[a-zA-Z0-9_]+)(,[^)]*)?[)]"
+ ),
+ "clicmd",
+ )
+)
diff --git a/docs/_scripts/siphon/generate_syscfg.py b/docs/_scripts/siphon/generate_syscfg.py
index 52c802e5752..105a59c8262 100644
--- a/docs/_scripts/siphon/generate_syscfg.py
+++ b/docs/_scripts/siphon/generate_syscfg.py
@@ -17,8 +17,12 @@ import re
from . import generate
# Register our regexp
-generate.siphon_patterns.append((
- re.compile("(?P<m>VLIB_CONFIG_FUNCTION)\s*"
- '[(](?P<fn>[a-zA-Z0-9_]+)\s*,\s*"(?P<name>[^"]*)"[)]'),
- "syscfg"
-))
+generate.siphon_patterns.append(
+ (
+ re.compile(
+ "(?P<m>VLIB_CONFIG_FUNCTION)\s*"
+ '[(](?P<fn>[a-zA-Z0-9_]+)\s*,\s*"(?P<name>[^"]*)"[)]'
+ ),
+ "syscfg",
+ )
+)
diff --git a/docs/_scripts/siphon/parsers.py b/docs/_scripts/siphon/parsers.py
index 162205de4ca..1a7d1f59539 100644
--- a/docs/_scripts/siphon/parsers.py
+++ b/docs/_scripts/siphon/parsers.py
@@ -18,9 +18,10 @@ ident = pp.Word(pp.alphas + "_", pp.alphas + pp.nums + "_")
intNum = pp.Word(pp.nums)
hexNum = pp.Literal("0x") + pp.Word(pp.hexnums)
octalNum = pp.Literal("0") + pp.Word("01234567")
-integer = (hexNum | octalNum | intNum) + \
- pp.Optional(pp.Literal("ULL") | pp.Literal("LL") | pp.Literal("L"))
-floatNum = pp.Regex(r'\d+(\.\d*)?([eE]\d+)?') + pp.Optional(pp.Literal("f"))
+integer = (hexNum | octalNum | intNum) + pp.Optional(
+ pp.Literal("ULL") | pp.Literal("LL") | pp.Literal("L")
+)
+floatNum = pp.Regex(r"\d+(\.\d*)?([eE]\d+)?") + pp.Optional(pp.Literal("f"))
char = pp.Literal("'") + pp.Word(pp.printables, exact=1) + pp.Literal("'")
arrayIndex = integer | ident
@@ -36,23 +37,29 @@ semicolon = pp.Literal(";").suppress()
# initializer := { [member = ] (variable | expression | { initializer } ) }
typeName = ident
varName = ident
-typeSpec = pp.Optional("unsigned") + \
- pp.oneOf("int long short float double char u8 i8 void") + \
- pp.Optional(pp.Word("*"), default="")
-typeCast = pp.Combine( "(" + ( typeSpec | typeName ) + ")" ).suppress()
-
-string = pp.Combine(pp.OneOrMore(pp.QuotedString(quoteChar='"',
- escChar='\\', multiline=True)), adjacent=False)
+typeSpec = (
+ pp.Optional("unsigned")
+ + pp.oneOf("int long short float double char u8 i8 void")
+ + pp.Optional(pp.Word("*"), default="")
+)
+typeCast = pp.Combine("(" + (typeSpec | typeName) + ")").suppress()
+
+string = pp.Combine(
+ pp.OneOrMore(pp.QuotedString(quoteChar='"', escChar="\\", multiline=True)),
+ adjacent=False,
+)
literal = pp.Optional(typeCast) + (integer | floatNum | char | string)
-var = pp.Combine(pp.Optional(typeCast) + varName +
- pp.Optional("[" + arrayIndex + "]"))
+var = pp.Combine(pp.Optional(typeCast) + varName + pp.Optional("[" + arrayIndex + "]"))
# This could be more complete, but suffices for our uses
-expr = (literal | var)
+expr = literal | var
"""Parse and render a block of text into a Python dictionary."""
+
+
class Parser(object):
"""Compiled PyParsing BNF"""
+
_parser = None
def __init__(self):
@@ -71,6 +78,8 @@ class Parser(object):
"""Parser for function-like macros - without the closing semi-colon."""
+
+
class ParserFunctionMacro(Parser):
def BNF(self):
# VLIB_CONFIG_FUNCTION (unix_config, "unix")
@@ -91,6 +100,8 @@ class ParserFunctionMacro(Parser):
"""Parser for function-like macros with a closing semi-colon."""
+
+
class ParseFunctionMacroStmt(ParserFunctionMacro):
def BNF(self):
# VLIB_CONFIG_FUNCTION (unix_config, "unix");
@@ -106,6 +117,8 @@ Parser for our struct initializers which are composed from a
function-like macro, equals sign, and then a normal C struct initializer
block.
"""
+
+
class MacroInitializer(ParserFunctionMacro):
def BNF(self):
# VLIB_CLI_COMMAND (show_sr_tunnel_command, static) = {
@@ -115,14 +128,15 @@ class MacroInitializer(ParserFunctionMacro):
# };
cs = pp.Forward()
-
- member = pp.Combine(dot + varName + pp.Optional("[" + arrayIndex + "]"),
- adjacent=False)
- value = (expr | cs)
+ member = pp.Combine(
+ dot + varName + pp.Optional("[" + arrayIndex + "]"), adjacent=False
+ )
+ value = expr | cs
entry = pp.Group(pp.Optional(member + equals, default="") + value)
- entries = (pp.ZeroOrMore(entry + comma) + entry + pp.Optional(comma)) | \
- (pp.ZeroOrMore(entry + comma))
+ entries = (pp.ZeroOrMore(entry + comma) + entry + pp.Optional(comma)) | (
+ pp.ZeroOrMore(entry + comma)
+ )
cs << (lbrace + entries + rbrace)
diff --git a/docs/_scripts/siphon/process.py b/docs/_scripts/siphon/process.py
index e3a70152487..341b7cba299 100644
--- a/docs/_scripts/siphon/process.py
+++ b/docs/_scripts/siphon/process.py
@@ -88,7 +88,8 @@ class Siphon(object):
loader=loader,
trim_blocks=True,
autoescape=False,
- keep_trailing_newline=True)
+ keep_trailing_newline=True,
+ )
# Convenience, get a reference to the internal escape and
# unescape methods in html.parser. These then become
@@ -103,32 +104,38 @@ class Siphon(object):
# Output renderers
"""Returns an object to be used as the sorting key in the item index."""
+
def index_sort_key(self, group):
return group
"""Returns a string to use as the header at the top of the item index."""
+
def index_header(self):
return self.template("index_header")
"""Returns the string fragment to use for each section in the item
index."""
+
def index_section(self, group):
return self.template("index_section", group=group)
"""Returns the string fragment to use for each entry in the item index."""
+
def index_entry(self, meta, item):
return self.template("index_entry", meta=meta, item=item)
"""Returns an object, typically a string, to be used as the sorting key
for items within a section."""
+
def item_sort_key(self, item):
- return item['name']
+ return item["name"]
"""Returns a key for grouping items together."""
+
def group_key(self, directory, file, macro, name):
- _global = self._cmds['_global']
+ _global = self._cmds["_global"]
- if file in _global and 'group_label' in _global[file]:
+ if file in _global and "group_label" in _global[file]:
self._group[file] = (directory, file)
return file
@@ -136,60 +143,59 @@ class Siphon(object):
return directory
"""Returns a key for identifying items within a grouping."""
+
def item_key(self, directory, file, macro, name):
return name
"""Returns a string to use as the header when rendering the item."""
+
def item_header(self, group):
return self.template("item_header", group=group)
"""Returns a string to use as the body when rendering the item."""
+
def item_format(self, meta, item):
return self.template("item_format", meta=meta, item=item)
"""Returns a string to use as the label for the page reference."""
+
def page_label(self, group):
- return "_".join((
- self.name,
- self.sanitize_label(group)
- ))
+ return "_".join((self.name, self.sanitize_label(group)))
"""Returns a title to use for a page."""
+
def page_title(self, group):
- _global = self._cmds['_global']
+ _global = self._cmds["_global"]
(directory, file) = self._group[group]
- if file and file in _global and 'group_label' in _global[file]:
- return _global[file]['group_label']
+ if file and file in _global and "group_label" in _global[file]:
+ return _global[file]["group_label"]
- if directory in _global and 'group_label' in _global[directory]:
- return _global[directory]['group_label']
+ if directory in _global and "group_label" in _global[directory]:
+ return _global[directory]["group_label"]
return directory
"""Returns a string to use as the label for the section reference."""
+
def item_label(self, group, item):
- return "__".join((
- self.name,
- item
- ))
+ return "__".join((self.name, item))
"""Label sanitizer; for creating Doxygen references"""
+
def sanitize_label(self, value):
- return value.replace(" ", "_") \
- .replace("/", "_") \
- .replace(".", "_")
+ return value.replace(" ", "_").replace("/", "_").replace(".", "_")
"""Template processor"""
+
def template(self, name, **kwargs):
tpl = self._tplenv.get_template(name + self._format.extension)
- return tpl.render(
- this=self,
- **kwargs)
+ return tpl.render(this=self, **kwargs)
# Processing methods
"""Parse the input file into a more usable dictionary structure."""
+
def load_json(self, files):
self._cmds = {}
self._group = {}
@@ -198,34 +204,37 @@ class Siphon(object):
line_start = 0
for filename in files:
filename = os.path.relpath(filename)
- self.log.info("Parsing items in file \"%s\"." % filename)
+ self.log.info('Parsing items in file "%s".' % filename)
data = None
with open(filename, "r") as fd:
data = json.load(fd)
- self._cmds['_global'] = data['global']
+ self._cmds["_global"] = data["global"]
# iterate the items loaded and regroup it
for item in data["items"]:
try:
- o = self._parser.parse(item['block'])
+ o = self._parser.parse(item["block"])
except Exception:
- self.log.error("Exception parsing item: %s\n%s"
- % (json.dumps(item, separators=(',', ': '),
- indent=4),
- item['block']))
+ self.log.error(
+ "Exception parsing item: %s\n%s"
+ % (
+ json.dumps(item, separators=(",", ": "), indent=4),
+ item["block"],
+ )
+ )
raise
# Augment the item with metadata
o["meta"] = {}
for key in item:
- if key == 'block':
+ if key == "block":
continue
- o['meta'][key] = item[key]
+ o["meta"][key] = item[key]
# Load some interesting fields
- directory = item['directory']
- file = item['file']
+ directory = item["directory"]
+ file = item["file"]
macro = o["macro"]
name = o["name"]
@@ -240,6 +249,7 @@ class Siphon(object):
"""Iterate over the input data, calling render methods to generate the
output."""
+
def process(self, out=None):
if out is None:
@@ -257,11 +267,12 @@ class Siphon(object):
# Iterate the dictionary and process it
for group in sorted(self._cmds.keys(), key=group_sort_key):
- if group.startswith('_'):
+ if group.startswith("_"):
continue
- self.log.info("Processing items in group \"%s\" (%s)." %
- (group, group_sort_key(group)))
+ self.log.info(
+ 'Processing items in group "%s" (%s).' % (group, group_sort_key(group))
+ )
# Generate the section index entry (write it now)
out.write(self.index_section(group))
@@ -273,15 +284,16 @@ class Siphon(object):
return self.item_sort_key(self._cmds[group][key])
for key in sorted(self._cmds[group].keys(), key=item_sort_key):
- self.log.debug("--- Processing key \"%s\" (%s)." %
- (key, item_sort_key(key)))
+ self.log.debug(
+ '--- Processing key "%s" (%s).' % (key, item_sort_key(key))
+ )
o = self._cmds[group][key]
meta = {
- "directory": o['meta']['directory'],
- "file": o['meta']['file'],
- "macro": o['macro'],
- "name": o['name'],
+ "directory": o["meta"]["directory"],
+ "file": o["meta"]["file"],
+ "macro": o["macro"],
+ "name": o["name"],
"key": key,
"label": self.item_label(group, key),
}
@@ -304,7 +316,7 @@ class Siphon(object):
def do_cliexstart(self, matchobj):
title = matchobj.group(1)
- title = ' '.join(title.splitlines())
+ title = " ".join(title.splitlines())
content = matchobj.group(2)
content = re.sub(r"\n", r"\n ", content)
return "\n\n.. code-block:: console\n\n %s\n %s\n\n" % (title, content)
@@ -316,7 +328,7 @@ class Siphon(object):
def do_cliexcmd(self, matchobj):
content = matchobj.group(1)
- content = ' '.join(content.splitlines())
+ content = " ".join(content.splitlines())
return "\n\n.. code-block:: console\n\n %s\n\n" % content
def process_list(self, matchobj):
@@ -351,7 +363,9 @@ class Siphon(object):
s = re.sub(r"@TODO[^\n]*", "", s)
# ----------- code blocks
s = re.sub(r"@cliexcmd{(.+?)}", self.do_cliexcmd, s, flags=re.DOTALL)
- s = re.sub(r"@cliexstart{(.+?)}(.+?)@cliexend", self.do_cliexstart, s, flags=re.DOTALL)
+ s = re.sub(
+ r"@cliexstart{(.+?)}(.+?)@cliexend", self.do_cliexstart, s, flags=re.DOTALL
+ )
s = re.sub(r"@clistart(.+?)@cliend", self.do_clistart, s, flags=re.DOTALL)
# ----------- lists
s = re.sub(r"^\s*-", r"\n@@@@", s, flags=re.MULTILINE)
@@ -377,6 +391,7 @@ class Siphon(object):
s = re.sub(r"\n[ \f\v\t]*", "\n", s)
return s
+
class Format(object):
"""Output format class"""
@@ -389,6 +404,7 @@ class Format(object):
class FormatMarkdown(Format):
"""Markdown output format"""
+
name = "markdown"
extension = ".md"
@@ -399,6 +415,7 @@ formats["markdown"] = FormatMarkdown
class FormatItemlist(Format):
"""Itemlist output format"""
+
name = "itemlist"
extension = ".itemlist"
diff --git a/docs/_scripts/siphon/process_clicmd.py b/docs/_scripts/siphon/process_clicmd.py
index bf270518ad1..afc24ae6da1 100644
--- a/docs/_scripts/siphon/process_clicmd.py
+++ b/docs/_scripts/siphon/process_clicmd.py
@@ -17,6 +17,7 @@
from . import process, parsers
import os
+
class SiphonCLICMD(process.Siphon):
name = "clicmd"
@@ -32,37 +33,36 @@ class SiphonCLICMD(process.Siphon):
return self.page_label(group) + ".rst"
def index_sort_key(self, group):
- _global = self._cmds['_global']
+ _global = self._cmds["_global"]
if group not in self._group:
return group
(directory, file) = self._group[group]
- if file in _global and 'group_label' in _global[file]:
- return _global[file]['group_label']
+ if file in _global and "group_label" in _global[file]:
+ return _global[file]["group_label"]
- if directory in _global and 'group_label' in _global[directory]:
- return _global[directory]['group_label']
+ if directory in _global and "group_label" in _global[directory]:
+ return _global[directory]["group_label"]
return group
def item_sort_key(self, item):
- return item['value']['path']
+ return item["value"]["path"]
def item_label(self, group, item):
- return "_".join((
- self.name,
- self.sanitize_label(self._cmds[group][item]['value']['path'])
- ))
+ return "_".join(
+ (self.name, self.sanitize_label(self._cmds[group][item]["value"]["path"]))
+ )
def page_title(self, group):
- _global = self._cmds['_global']
+ _global = self._cmds["_global"]
(directory, file) = self._group[group]
- if file and file in _global and 'group_label' in _global[file]:
- return _global[file]['group_label']
+ if file and file in _global and "group_label" in _global[file]:
+ return _global[file]["group_label"]
- if directory in _global and 'group_label' in _global[directory]:
- return _global[directory]['group_label']
+ if directory in _global and "group_label" in _global[directory]:
+ return _global[directory]["group_label"]
file_ext = os.path.basename(directory)
fname, ext = os.path.splitext(file_ext)