summaryrefslogtreecommitdiffstats
path: root/docs/_scripts/siphon
diff options
context:
space:
mode:
Diffstat (limited to 'docs/_scripts/siphon')
-rw-r--r--docs/_scripts/siphon/__init__.py24
-rw-r--r--docs/_scripts/siphon/generate.py306
-rw-r--r--docs/_scripts/siphon/generate_clicmd.py24
-rw-r--r--docs/_scripts/siphon/generate_syscfg.py24
-rw-r--r--docs/_scripts/siphon/parsers.py150
-rw-r--r--docs/_scripts/siphon/process.py407
-rw-r--r--docs/_scripts/siphon/process_clicmd.py73
-rw-r--r--docs/_scripts/siphon/process_syscfg.py31
8 files changed, 1039 insertions, 0 deletions
diff --git a/docs/_scripts/siphon/__init__.py b/docs/_scripts/siphon/__init__.py
new file mode 100644
index 00000000000..f6417314d82
--- /dev/null
+++ b/docs/_scripts/siphon/__init__.py
@@ -0,0 +1,24 @@
+# Copyright (c) 2016 Comcast Cable Communications Management, LLC.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Siphon classes
+
+from . import generate
+from . import generate_clicmd
+from . import generate_syscfg
+
+from . import parsers
+from . import process
+from . import process_clicmd
+from . import process_syscfg
diff --git a/docs/_scripts/siphon/generate.py b/docs/_scripts/siphon/generate.py
new file mode 100644
index 00000000000..2ae5a1b6f1b
--- /dev/null
+++ b/docs/_scripts/siphon/generate.py
@@ -0,0 +1,306 @@
+# Copyright (c) 2016 Comcast Cable Communications Management, LLC.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generate .siphon source fragments for later processing
+
+import json
+import logging
+import os
+import re
+
+"""List of (regexp, siphon_name) tuples for matching the start of C
+ initializer blocks in source files. Each siphon class registers
+ themselves on this list."""
+siphon_patterns = []
+
+class Generate(object):
+ """Matches a siphon comment block start"""
+ siphon_block_start = re.compile("^\s*/\*\?\s*(.*)$")
+
+ """Matches a siphon comment block stop"""
+ siphon_block_stop = re.compile("^(.*)\s*\?\*/\s*$")
+
+ """Siphon block directive delimiter"""
+ siphon_block_delimiter = "%%"
+
+ """Matches a siphon block directive such as
+ '%clicmd:group_label Debug CLI%'"""
+ siphon_block_directive = re.compile("(%s)\s*([a-zA-Z0-9_:]+)\s+(.*)\s*(%s)" % \
+ (siphon_block_delimiter, siphon_block_delimiter))
+
+ """Matches the start of an initializer block"""
+ siphon_initializer = re.compile("\s*=")
+
+ """Collated output for each siphon"""
+ output = None
+
+ """Directory prefix to strip from input filenames to keep things tidy."""
+ input_prefix = None
+
+ """List of known siphons"""
+ known_siphons = None
+
+ """Logging handler"""
+ log = None
+
+
+ def __init__(self, output_directory, input_prefix):
+ super(Generate, self).__init__()
+ self.log = logging.getLogger("siphon.generate")
+
+ # Build a list of known siphons
+ self.known_siphons = []
+ for item in siphon_patterns:
+ siphon = item[1]
+ if siphon not in self.known_siphons:
+ self.known_siphons.append(siphon)
+
+ # Setup information for siphons we know about
+ self.output = {}
+ for siphon in self.known_siphons:
+ self.output[siphon] = {
+ "file": "%s/%s.siphon" % (output_directory, siphon),
+ "global": {},
+ "items": [],
+ }
+
+ self.input_prefix = input_prefix
+
+
+ """
+ count open and close braces in str
+ return (0, index) when braces were found and count becomes 0.
+ index indicates the position at which the last closing brace was
+ found.
+ return (-1, -1) if a closing brace is found before any opening one.
+ return (count, -1) if not all opening braces are closed, count is the
+ current depth
+ """
+ def count_braces(self, str, count=0, found=False):
+ for index in range(0, len(str)):
+ if str[index] == '{':
+ count += 1;
+ found = True
+ elif str[index] == '}':
+ if count == 0:
+ # means we never found an open brace
+ return (-1, -1)
+ count -= 1;
+
+ if count == 0 and found:
+ return (count, index)
+
+ return (count, -1)
+
+ def parse(self, filename):
+ # Strip the current directory off the start of the
+ # filename for brevity
+ if filename[0:len(self.input_prefix)] == self.input_prefix:
+ filename = filename[len(self.input_prefix):]
+ if filename[0] == "/":
+ filename = filename[1:]
+
+ # Work out the abbreviated directory name
+ directory = os.path.dirname(filename)
+ if directory[0:2] == "./":
+ directory = directory[2:]
+ elif directory[0:len(self.input_prefix)] == self.input_prefix:
+ directory = directory[len(self.input_prefix):]
+ if directory[0] == "/":
+ directory = directory[1:]
+
+ # Open the file and explore its contents...
+ self.log.info("Siphoning from %s." % filename)
+ directives = {}
+ with open(filename) as fd:
+ siphon = None
+ close_siphon = None
+ siphon_block = ""
+ in_block = False
+ line_num = 0
+ siphon_line = 0
+
+ for line in fd:
+ line_num += 1
+ str = line[:-1] # filter \n
+
+ """See if there is a block directive and if so extract it"""
+ def process_block_directive(str, directives):
+ m = self.siphon_block_directive.search(str)
+ if m is not None:
+ k = m.group(2)
+ v = m.group(3).strip()
+ directives[k] = v
+ # Return only the parts we did not match
+ return str[0:m.start(1)] + str[m.end(4):]
+
+ return str
+
+ def process_block_prefix(str):
+ if str.startswith(" * "):
+ str = str[3:]
+ elif str == " *":
+ str = ""
+ return str
+
+ if not in_block:
+ # See if the line contains the start of a siphon doc block
+ m = self.siphon_block_start.search(str)
+ if m is not None:
+ in_block = True
+ t = m.group(1)
+
+ # Now check if the block closes on the same line
+ m = self.siphon_block_stop.search(t)
+ if m is not None:
+ t = m.group(1)
+ in_block = False
+
+ # Check for directives
+ t = process_block_directive(t, directives)
+
+ # Filter for normal comment prefixes
+ t = process_block_prefix(t)
+
+ # Add what is left
+ siphon_block += t
+
+ # Skip to next line
+ continue
+
+ else:
+ # Check to see if we have an end block marker
+ m = self.siphon_block_stop.search(str)
+ if m is not None:
+ in_block = False
+ t = m.group(1)
+ else:
+ t = str
+
+ # Check for directives
+ t = process_block_directive(t, directives)
+
+ # Filter for normal comment prefixes
+ t = process_block_prefix(t)
+
+ # Add what is left
+ siphon_block += t + "\n"
+
+ # Skip to next line
+ continue
+
+
+ if siphon is None:
+ # Look for blocks we need to siphon
+ for p in siphon_patterns:
+ if p[0].match(str):
+ siphon = [ p[1], str + "\n", 0 ]
+ siphon_line = line_num
+
+ # see if we have an initializer
+ m = self.siphon_initializer.search(str)
+ if m is not None:
+ # count the braces on this line
+ (count, index) = \
+ self.count_braces(str[m.start():])
+ siphon[2] = count
+ # TODO - it's possible we have the
+ # initializer all on the first line
+ # we should check for it, but also
+ # account for the possibility that
+ # the open brace is on the next line
+ #if count == 0:
+ # # braces balanced
+ # close_siphon = siphon
+ # siphon = None
+ else:
+ # no initializer: close the siphon right now
+ close_siphon = siphon
+ siphon = None
+ else:
+ # See if we should end the siphon here - do we have
+ # balanced braces?
+ (count, index) = self.count_braces(str,
+ count=siphon[2], found=True)
+ if count == 0:
+ # braces balanced - add the substring and
+ # close the siphon
+ siphon[1] += str[:index+1] + ";\n"
+ close_siphon = siphon
+ siphon = None
+ else:
+ # add the whole string, move on
+ siphon[2] = count
+ siphon[1] += str + "\n"
+
+ if close_siphon is not None:
+ # Write the siphoned contents to the right place
+ siphon_name = close_siphon[0]
+
+ # Copy directives for the file
+ details = {}
+ for key in directives:
+ if ":" in key:
+ (sn, label) = key.split(":")
+ if sn == siphon_name:
+ details[label] = directives[key]
+ else:
+ details[key] = directives[key]
+
+ # Copy details for this block
+ details['file'] = filename
+ details['directory'] = directory
+ details['line_start'] = siphon_line
+ details['line_end'] = line_num
+ details['siphon_block'] = siphon_block.strip()
+ details["block"] = close_siphon[1]
+
+ # Store the item
+ self.output[siphon_name]['items'].append(details)
+
+ # All done
+ close_siphon = None
+ siphon_block = ""
+
+ # Update globals
+ for key in directives.keys():
+ if ':' not in key:
+ continue
+
+ if filename.endswith("/dir.dox"):
+ # very special! use the parent directory name
+ l = directory
+ else:
+ l = filename
+
+ (sn, label) = key.split(":")
+
+ if sn not in self.output:
+ self.output[sn] = {}
+ if 'global' not in self.output[sn]:
+ self.output[sn]['global'] = {}
+ if l not in self.output[sn]['global']:
+ self.output[sn]['global'][l] = {}
+
+ self.output[sn]['global'][l][label] = directives[key]
+
+ def deliver(self):
+ # Write out the data
+ for siphon in self.output.keys():
+ self.log.info("Saving siphon data %s." % siphon)
+ s = self.output[siphon]
+ with open(s['file'], "a") as fp:
+ json.dump(s, fp,
+ separators=(',', ': '), indent=4, sort_keys=True)
+
diff --git a/docs/_scripts/siphon/generate_clicmd.py b/docs/_scripts/siphon/generate_clicmd.py
new file mode 100644
index 00000000000..6d24aaf4926
--- /dev/null
+++ b/docs/_scripts/siphon/generate_clicmd.py
@@ -0,0 +1,24 @@
+# Copyright (c) 2016 Comcast Cable Communications Management, LLC.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+
+from . import generate
+
+# Register our regexp
+generate.siphon_patterns.append((
+ re.compile("(?P<m>VLIB_CLI_COMMAND)\s*"
+ "[(](?P<name>[a-zA-Z0-9_]+)(,[^)]*)?[)]"),
+ "clicmd"
+))
diff --git a/docs/_scripts/siphon/generate_syscfg.py b/docs/_scripts/siphon/generate_syscfg.py
new file mode 100644
index 00000000000..52c802e5752
--- /dev/null
+++ b/docs/_scripts/siphon/generate_syscfg.py
@@ -0,0 +1,24 @@
+# Copyright (c) 2016 Comcast Cable Communications Management, LLC.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+
+from . import generate
+
+# Register our regexp
+generate.siphon_patterns.append((
+ re.compile("(?P<m>VLIB_CONFIG_FUNCTION)\s*"
+ '[(](?P<fn>[a-zA-Z0-9_]+)\s*,\s*"(?P<name>[^"]*)"[)]'),
+ "syscfg"
+))
diff --git a/docs/_scripts/siphon/parsers.py b/docs/_scripts/siphon/parsers.py
new file mode 100644
index 00000000000..162205de4ca
--- /dev/null
+++ b/docs/_scripts/siphon/parsers.py
@@ -0,0 +1,150 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import html
+import pyparsing as pp
+
+# Some useful primitives
+ident = pp.Word(pp.alphas + "_", pp.alphas + pp.nums + "_")
+intNum = pp.Word(pp.nums)
+hexNum = pp.Literal("0x") + pp.Word(pp.hexnums)
+octalNum = pp.Literal("0") + pp.Word("01234567")
+integer = (hexNum | octalNum | intNum) + \
+ pp.Optional(pp.Literal("ULL") | pp.Literal("LL") | pp.Literal("L"))
+floatNum = pp.Regex(r'\d+(\.\d*)?([eE]\d+)?') + pp.Optional(pp.Literal("f"))
+char = pp.Literal("'") + pp.Word(pp.printables, exact=1) + pp.Literal("'")
+arrayIndex = integer | ident
+
+lbracket = pp.Literal("(").suppress()
+rbracket = pp.Literal(")").suppress()
+lbrace = pp.Literal("{").suppress()
+rbrace = pp.Literal("}").suppress()
+comma = pp.Literal(",").suppress()
+equals = pp.Literal("=").suppress()
+dot = pp.Literal(".").suppress()
+semicolon = pp.Literal(";").suppress()
+
+# initializer := { [member = ] (variable | expression | { initializer } ) }
+typeName = ident
+varName = ident
+typeSpec = pp.Optional("unsigned") + \
+ pp.oneOf("int long short float double char u8 i8 void") + \
+ pp.Optional(pp.Word("*"), default="")
+typeCast = pp.Combine( "(" + ( typeSpec | typeName ) + ")" ).suppress()
+
+string = pp.Combine(pp.OneOrMore(pp.QuotedString(quoteChar='"',
+ escChar='\\', multiline=True)), adjacent=False)
+literal = pp.Optional(typeCast) + (integer | floatNum | char | string)
+var = pp.Combine(pp.Optional(typeCast) + varName +
+ pp.Optional("[" + arrayIndex + "]"))
+
+# This could be more complete, but suffices for our uses
+expr = (literal | var)
+
+"""Parse and render a block of text into a Python dictionary."""
+class Parser(object):
+ """Compiled PyParsing BNF"""
+ _parser = None
+
+ def __init__(self):
+ super(Parser, self).__init__()
+ self._parser = self.BNF()
+
+ def BNF(self):
+ raise NotImplementedError
+
+ def item(self, item):
+ raise NotImplementedError
+
+ def parse(self, input):
+ item = self._parser.parseString(input).asList()
+ return self.item(item)
+
+
+"""Parser for function-like macros - without the closing semi-colon."""
+class ParserFunctionMacro(Parser):
+ def BNF(self):
+ # VLIB_CONFIG_FUNCTION (unix_config, "unix")
+ macroName = ident
+ params = pp.Group(pp.ZeroOrMore(expr + comma) + expr)
+ macroParams = lbracket + params + rbracket
+
+ return macroName + macroParams
+
+ def item(self, item):
+ r = {
+ "macro": item[0],
+ "name": item[1][1],
+ "function": item[1][0],
+ }
+
+ return r
+
+
+"""Parser for function-like macros with a closing semi-colon."""
+class ParseFunctionMacroStmt(ParserFunctionMacro):
+ def BNF(self):
+ # VLIB_CONFIG_FUNCTION (unix_config, "unix");
+ function_macro = super(ParseFunctionMacroStmt, self).BNF()
+ mi = function_macro + semicolon
+ mi.ignore(pp.cppStyleComment)
+
+ return mi
+
+
+"""
+Parser for our struct initializers which are composed from a
+function-like macro, equals sign, and then a normal C struct initializer
+block.
+"""
+class MacroInitializer(ParserFunctionMacro):
+ def BNF(self):
+ # VLIB_CLI_COMMAND (show_sr_tunnel_command, static) = {
+ # .path = "show sr tunnel",
+ # .short_help = "show sr tunnel [name <sr-tunnel-name>]",
+ # .function = show_sr_tunnel_fn,
+ # };
+ cs = pp.Forward()
+
+
+ member = pp.Combine(dot + varName + pp.Optional("[" + arrayIndex + "]"),
+ adjacent=False)
+ value = (expr | cs)
+
+ entry = pp.Group(pp.Optional(member + equals, default="") + value)
+ entries = (pp.ZeroOrMore(entry + comma) + entry + pp.Optional(comma)) | \
+ (pp.ZeroOrMore(entry + comma))
+
+ cs << (lbrace + entries + rbrace)
+
+ macroName = ident
+ params = pp.Group(pp.ZeroOrMore(expr + comma) + expr)
+ macroParams = lbracket + params + rbracket
+
+ function_macro = super(MacroInitializer, self).BNF()
+ mi = function_macro + equals + pp.Group(cs) + semicolon
+ mi.ignore(pp.cppStyleComment)
+
+ return mi
+
+ def item(self, item):
+ r = {
+ "macro": item[0],
+ "name": item[1][0],
+ "params": item[2],
+ "value": {},
+ }
+
+ for param in item[2]:
+ r["value"][param[0]] = html.escape(param[1])
+
+ return r
diff --git a/docs/_scripts/siphon/process.py b/docs/_scripts/siphon/process.py
new file mode 100644
index 00000000000..e3a70152487
--- /dev/null
+++ b/docs/_scripts/siphon/process.py
@@ -0,0 +1,407 @@
+# Copyright (c) 2016 Comcast Cable Communications Management, LLC.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generation template class
+
+import html.parser
+import json
+import logging
+import os
+import sys
+import re
+
+import jinja2
+
+# Classes register themselves in this dictionary
+"""Mapping of known processors to their classes"""
+siphons = {}
+
+"""Mapping of known output formats to their classes"""
+formats = {}
+
+
+class Siphon(object):
+ """Generate rendered output for siphoned data."""
+
+ # Set by subclasses
+ """Our siphon name"""
+ name = None
+
+ # Set by subclasses
+ """Name of an identifier used by this siphon"""
+ identifier = None
+
+ # Set by subclasses
+ """The pyparsing object to use to parse with"""
+ _parser = None
+
+ """The input data"""
+ _cmds = None
+
+ """Group key to (directory,file) mapping"""
+ _group = None
+
+ """Logging handler"""
+ log = None
+
+ """Directory to look for siphon rendering templates"""
+ template_directory = None
+
+ """Directory to output parts in"""
+ outdir = None
+
+ """Template environment, if we're using templates"""
+ _tplenv = None
+
+ def __init__(self, template_directory, format, outdir, repository_link):
+ super(Siphon, self).__init__()
+ self.log = logging.getLogger("siphon.process.%s" % self.name)
+
+ # Get our output format details
+ fmt_klass = formats[format]
+ fmt = fmt_klass()
+ self._format = fmt
+
+ # Sort out the template search path
+ def _tpldir(name):
+ return os.sep.join((template_directory, fmt.name, name))
+
+ self.template_directory = template_directory
+ searchpath = [
+ _tpldir(self.name),
+ _tpldir("default"),
+ ]
+ self.outdir = outdir
+ loader = jinja2.FileSystemLoader(searchpath=searchpath)
+ self._tplenv = jinja2.Environment(
+ loader=loader,
+ trim_blocks=True,
+ autoescape=False,
+ keep_trailing_newline=True)
+
+ # Convenience, get a reference to the internal escape and
+ # unescape methods in html.parser. These then become
+ # available to templates to use, if needed.
+ self._h = html.parser.HTMLParser()
+ self.escape = html.escape
+ self.unescape = html.unescape
+
+ # TODO: customize release
+ self.repository_link = repository_link
+
+ # Output renderers
+
+ """Returns an object to be used as the sorting key in the item index."""
+ def index_sort_key(self, group):
+ return group
+
+ """Returns a string to use as the header at the top of the item index."""
+ def index_header(self):
+ return self.template("index_header")
+
+ """Returns the string fragment to use for each section in the item
+ index."""
+ def index_section(self, group):
+ return self.template("index_section", group=group)
+
+ """Returns the string fragment to use for each entry in the item index."""
+ def index_entry(self, meta, item):
+ return self.template("index_entry", meta=meta, item=item)
+
+ """Returns an object, typically a string, to be used as the sorting key
+ for items within a section."""
+ def item_sort_key(self, item):
+ return item['name']
+
+ """Returns a key for grouping items together."""
+ def group_key(self, directory, file, macro, name):
+ _global = self._cmds['_global']
+
+ if file in _global and 'group_label' in _global[file]:
+ self._group[file] = (directory, file)
+ return file
+
+ self._group[directory] = (directory, None)
+ return directory
+
+ """Returns a key for identifying items within a grouping."""
+ def item_key(self, directory, file, macro, name):
+ return name
+
+ """Returns a string to use as the header when rendering the item."""
+ def item_header(self, group):
+ return self.template("item_header", group=group)
+
+ """Returns a string to use as the body when rendering the item."""
+ def item_format(self, meta, item):
+ return self.template("item_format", meta=meta, item=item)
+
+ """Returns a string to use as the label for the page reference."""
+ def page_label(self, group):
+ return "_".join((
+ self.name,
+ self.sanitize_label(group)
+ ))
+
+ """Returns a title to use for a page."""
+ def page_title(self, group):
+ _global = self._cmds['_global']
+ (directory, file) = self._group[group]
+
+ if file and file in _global and 'group_label' in _global[file]:
+ return _global[file]['group_label']
+
+ if directory in _global and 'group_label' in _global[directory]:
+ return _global[directory]['group_label']
+
+ return directory
+
+ """Returns a string to use as the label for the section reference."""
+ def item_label(self, group, item):
+ return "__".join((
+ self.name,
+ item
+ ))
+
+ """Label sanitizer; for creating Doxygen references"""
+ def sanitize_label(self, value):
+ return value.replace(" ", "_") \
+ .replace("/", "_") \
+ .replace(".", "_")
+
+ """Template processor"""
+ def template(self, name, **kwargs):
+ tpl = self._tplenv.get_template(name + self._format.extension)
+ return tpl.render(
+ this=self,
+ **kwargs)
+
+ # Processing methods
+
+ """Parse the input file into a more usable dictionary structure."""
+ def load_json(self, files):
+ self._cmds = {}
+ self._group = {}
+
+ line_num = 0
+ line_start = 0
+ for filename in files:
+ filename = os.path.relpath(filename)
+ self.log.info("Parsing items in file \"%s\"." % filename)
+ data = None
+ with open(filename, "r") as fd:
+ data = json.load(fd)
+
+ self._cmds['_global'] = data['global']
+
+ # iterate the items loaded and regroup it
+ for item in data["items"]:
+ try:
+ o = self._parser.parse(item['block'])
+ except Exception:
+ self.log.error("Exception parsing item: %s\n%s"
+ % (json.dumps(item, separators=(',', ': '),
+ indent=4),
+ item['block']))
+ raise
+
+ # Augment the item with metadata
+ o["meta"] = {}
+ for key in item:
+ if key == 'block':
+ continue
+ o['meta'][key] = item[key]
+
+ # Load some interesting fields
+ directory = item['directory']
+ file = item['file']
+ macro = o["macro"]
+ name = o["name"]
+
+ # Generate keys to group items by
+ group_key = self.group_key(directory, file, macro, name)
+ item_key = self.item_key(directory, file, macro, name)
+
+ if group_key not in self._cmds:
+ self._cmds[group_key] = {}
+
+ self._cmds[group_key][item_key] = o
+
+ """Iterate over the input data, calling render methods to generate the
+ output."""
+ def process(self, out=None):
+
+ if out is None:
+ out = sys.stdout
+
+ # Accumulated body contents
+ contents = ""
+
+ # Write the header for this siphon type
+ out.write(self.index_header())
+
+ # Sort key helper for the index
+ def group_sort_key(group):
+ return self.index_sort_key(group)
+
+ # Iterate the dictionary and process it
+ for group in sorted(self._cmds.keys(), key=group_sort_key):
+ if group.startswith('_'):
+ continue
+
+ self.log.info("Processing items in group \"%s\" (%s)." %
+ (group, group_sort_key(group)))
+
+ # Generate the section index entry (write it now)
+ out.write(self.index_section(group))
+
+ # Generate the item header (save for later)
+ contents += self.item_header(group)
+
+ def item_sort_key(key):
+ return self.item_sort_key(self._cmds[group][key])
+
+ for key in sorted(self._cmds[group].keys(), key=item_sort_key):
+ self.log.debug("--- Processing key \"%s\" (%s)." %
+ (key, item_sort_key(key)))
+
+ o = self._cmds[group][key]
+ meta = {
+ "directory": o['meta']['directory'],
+ "file": o['meta']['file'],
+ "macro": o['macro'],
+ "name": o['name'],
+ "key": key,
+ "label": self.item_label(group, key),
+ }
+
+ # Generate the index entry for the item (write it now)
+ out.write(self.index_entry(meta, o))
+
+ # Generate the item itself (save for later)
+ contents += self.item_format(meta, o)
+
+ page_name = self.separate_page_names(group)
+ if page_name != "":
+ path = os.path.join(self.outdir, page_name)
+ with open(path, "w+") as page:
+ page.write(contents)
+ contents = ""
+
+ # Deliver the accumulated body output
+ out.write(contents)
+
+ def do_cliexstart(self, matchobj):
+ title = matchobj.group(1)
+ title = ' '.join(title.splitlines())
+ content = matchobj.group(2)
+ content = re.sub(r"\n", r"\n ", content)
+ return "\n\n.. code-block:: console\n\n %s\n %s\n\n" % (title, content)
+
+ def do_clistart(self, matchobj):
+ content = matchobj.group(1)
+ content = re.sub(r"\n", r"\n ", content)
+ return "\n\n.. code-block:: console\n\n %s\n\n" % content
+
+ def do_cliexcmd(self, matchobj):
+ content = matchobj.group(1)
+ content = ' '.join(content.splitlines())
+ return "\n\n.. code-block:: console\n\n %s\n\n" % content
+
+ def process_list(self, matchobj):
+ content = matchobj.group(1)
+ content = self.reindent(content, 2)
+ return "@@@@%s\nBBBB" % content
+
+ def process_special(self, s):
+ # ----------- markers to remove
+ s = re.sub(r"@cliexpar\s*", r"", s)
+ s = re.sub(r"@parblock\s*", r"", s)
+ s = re.sub(r"@endparblock\s*", r"", s)
+ s = re.sub(r"<br>", "", s)
+ # ----------- emphasis
+ # <b><em>
+ s = re.sub(r"<b><em>\s*", "``", s)
+ s = re.sub(r"\s*</b></em>", "``", s)
+ s = re.sub(r"\s*</em></b>", "``", s)
+ # <b>
+ s = re.sub(r"<b>\s*", "**", s)
+ s = re.sub(r"\s*</b>", "**", s)
+ # <code>
+ s = re.sub(r"<code>\s*", "``", s)
+ s = re.sub(r"\s*</code>", "``", s)
+ # <em>
+ s = re.sub(r"'?<em>\s*", r"``", s)
+ s = re.sub(r"\s*</em>'?", r"``", s)
+ # @c <something>
+ s = re.sub(r"@c\s(\S+)", r"``\1``", s)
+ # ----------- todos
+ s = re.sub(r"@todo[^\n]*", "", s)
+ s = re.sub(r"@TODO[^\n]*", "", s)
+ # ----------- code blocks
+ s = re.sub(r"@cliexcmd{(.+?)}", self.do_cliexcmd, s, flags=re.DOTALL)
+ s = re.sub(r"@cliexstart{(.+?)}(.+?)@cliexend", self.do_cliexstart, s, flags=re.DOTALL)
+ s = re.sub(r"@clistart(.+?)@cliend", self.do_clistart, s, flags=re.DOTALL)
+ # ----------- lists
+ s = re.sub(r"^\s*-", r"\n@@@@", s, flags=re.MULTILINE)
+ s = re.sub(r"@@@@(.*?)\n\n+", self.process_list, s, flags=re.DOTALL)
+ s = re.sub(r"BBBB@@@@", r"-", s)
+ s = re.sub(r"@@@@", r"-", s)
+ s = re.sub(r"BBBB", r"\n\n", s)
+ # ----------- Cleanup remains
+ s = re.sub(r"@cliexend\s*", r"", s)
+ return s
+
+ def separate_page_names(self, group):
+ return ""
+
+ # This push the given textblock <indent> spaces right
+ def reindent(self, s, indent):
+ ind = " " * indent
+ s = re.sub(r"\n", "\n" + ind, s)
+ return s
+
+ # This aligns the given textblock left (no indent)
+ def noindent(self, s):
+ s = re.sub(r"\n[ \f\v\t]*", "\n", s)
+ return s
+
+class Format(object):
+ """Output format class"""
+
+ """Name of this output format"""
+ name = None
+
+ """Expected file extension of templates that build this format"""
+ extension = None
+
+
+class FormatMarkdown(Format):
+ """Markdown output format"""
+ name = "markdown"
+ extension = ".md"
+
+
+# Register 'markdown'
+formats["markdown"] = FormatMarkdown
+
+
+class FormatItemlist(Format):
+ """Itemlist output format"""
+ name = "itemlist"
+ extension = ".itemlist"
+
+
+# Register 'itemlist'
+formats["itemlist"] = FormatItemlist
diff --git a/docs/_scripts/siphon/process_clicmd.py b/docs/_scripts/siphon/process_clicmd.py
new file mode 100644
index 00000000000..bf270518ad1
--- /dev/null
+++ b/docs/_scripts/siphon/process_clicmd.py
@@ -0,0 +1,73 @@
+# Copyright (c) 2016 Comcast Cable Communications Management, LLC.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generate clicmd formatted output
+
+from . import process, parsers
+import os
+
+class SiphonCLICMD(process.Siphon):
+
+ name = "clicmd"
+ identifier = "VLIB_CLI_COMMAND"
+
+ def __init__(self, *args, **kwargs):
+ super(SiphonCLICMD, self).__init__(*args, **kwargs)
+ self._parser = parsers.MacroInitializer()
+
+ # Output renderers
+
+ def separate_page_names(self, group):
+ return self.page_label(group) + ".rst"
+
+ def index_sort_key(self, group):
+ _global = self._cmds['_global']
+ if group not in self._group:
+ return group
+ (directory, file) = self._group[group]
+
+ if file in _global and 'group_label' in _global[file]:
+ return _global[file]['group_label']
+
+ if directory in _global and 'group_label' in _global[directory]:
+ return _global[directory]['group_label']
+
+ return group
+
+ def item_sort_key(self, item):
+ return item['value']['path']
+
+ def item_label(self, group, item):
+ return "_".join((
+ self.name,
+ self.sanitize_label(self._cmds[group][item]['value']['path'])
+ ))
+
+ def page_title(self, group):
+ _global = self._cmds['_global']
+ (directory, file) = self._group[group]
+
+ if file and file in _global and 'group_label' in _global[file]:
+ return _global[file]['group_label']
+
+ if directory in _global and 'group_label' in _global[directory]:
+ return _global[directory]['group_label']
+
+ file_ext = os.path.basename(directory)
+ fname, ext = os.path.splitext(file_ext)
+ return "%s cli reference" % fname.capitalize()
+
+
+# Register our processor
+process.siphons["clicmd"] = SiphonCLICMD
diff --git a/docs/_scripts/siphon/process_syscfg.py b/docs/_scripts/siphon/process_syscfg.py
new file mode 100644
index 00000000000..bccde2c153d
--- /dev/null
+++ b/docs/_scripts/siphon/process_syscfg.py
@@ -0,0 +1,31 @@
+# Copyright (c) 2016 Comcast Cable Communications Management, LLC.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generate syscfg formatted output
+
+from . import process, parsers
+
+
+class SiphonSYSCFG(process.Siphon):
+
+ name = "syscfg"
+ identifier = "VLIB_CONFIG_FUNCTION"
+
+ def __init__(self, *args, **kwargs):
+ super(SiphonSYSCFG, self).__init__(*args, **kwargs)
+ self._parser = parsers.ParseFunctionMacroStmt()
+
+
+# Register our processor
+process.siphons["syscfg"] = SiphonSYSCFG