diff options
Diffstat (limited to 'doxygen/siphon')
-rw-r--r-- | doxygen/siphon/__init__.py | 24 | ||||
-rw-r--r-- | doxygen/siphon/generate.py | 304 | ||||
-rw-r--r-- | doxygen/siphon/generate_clicmd.py | 22 | ||||
-rw-r--r-- | doxygen/siphon/generate_syscfg.py | 22 | ||||
-rw-r--r-- | doxygen/siphon/parsers.py | 149 | ||||
-rw-r--r-- | doxygen/siphon/process.py | 271 | ||||
-rw-r--r-- | doxygen/siphon/process_clicmd.py | 56 | ||||
-rw-r--r-- | doxygen/siphon/process_syscfg.py | 30 |
8 files changed, 878 insertions, 0 deletions
diff --git a/doxygen/siphon/__init__.py b/doxygen/siphon/__init__.py new file mode 100644 index 00000000000..437a1df1597 --- /dev/null +++ b/doxygen/siphon/__init__.py @@ -0,0 +1,24 @@ +# Copyright (c) 2016 Comcast Cable Communications Management, LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at: +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Siphon classes + +import generate +import generate_clicmd +import generate_syscfg + +import parsers +import process +import process_clicmd +import process_syscfg diff --git a/doxygen/siphon/generate.py b/doxygen/siphon/generate.py new file mode 100644 index 00000000000..d6b6faf34f7 --- /dev/null +++ b/doxygen/siphon/generate.py @@ -0,0 +1,304 @@ +# Copyright (c) 2016 Comcast Cable Communications Management, LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at: +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generate .siphon source fragments for later processing + +import logging +import os, sys, re, json + +"""List of (regexp, siphon_name) tuples for matching the start of C + initializer blocks in source files. Each siphon class registers + themselves on tihs list.""" +siphon_patterns = [] + +class Generate(object): + """Matches a siphon comment block start""" + siphon_block_start = re.compile("^\s*/\*\?\s*(.*)$") + + """Matches a siphon comment block stop""" + siphon_block_stop = re.compile("^(.*)\s*\?\*/\s*$") + + """Siphon block directive delimiter""" + siphon_block_delimiter = "%%" + + """Matches a siphon block directive such as + '%clicmd:group_label Debug CLI%'""" + siphon_block_directive = re.compile("(%s)\s*([a-zA-Z0-9_:]+)\s+(.*)\s*(%s)" % \ + (siphon_block_delimiter, siphon_block_delimiter)) + + """Matches the start of an initializer block""" + siphon_initializer = re.compile("\s*=") + + """Collated output for each siphon""" + output = None + + """Directory prefix to strip from input filenames to keep things tidy.""" + input_prefix = None + + """List of known siphons""" + known_siphons = None + + """Logging handler""" + log = None + + + def __init__(self, output_directory, input_prefix): + super(Generate, self).__init__() + self.log = logging.getLogger("siphon.generate") + + # Build a list of known siphons + self.known_siphons = [] + for item in siphon_patterns: + siphon = item[1] + if siphon not in self.known_siphons: + self.known_siphons.append(siphon) + + # Setup information for siphons we know about + self.output = {} + for siphon in self.known_siphons: + self.output[siphon] = { + "file": "%s/%s.siphon" % (output_directory, siphon), + "global": {}, + "items": [], + } + + self.input_prefix = input_prefix + + + """ + count open and close braces in str + return (0, index) when braces were found and count becomes 0. + index indicates the position at which the last closing brace was + found. + return (-1, -1) if a closing brace is found before any opening one. + return (count, -1) if not all opening braces are closed, count is the + current depth + """ + def count_braces(self, str, count=0, found=False): + for index in range(0, len(str)): + if str[index] == '{': + count += 1; + found = True + elif str[index] == '}': + if count == 0: + # means we never found an open brace + return (-1, -1) + count -= 1; + + if count == 0 and found: + return (count, index) + + return (count, -1) + + def parse(self, filename): + # Strip the current directory off the start of the + # filename for brevity + if filename[0:len(self.input_prefix)] == self.input_prefix: + filename = filename[len(self.input_prefix):] + if filename[0] == "/": + filename = filename[1:] + + # Work out the abbreviated directory name + directory = os.path.dirname(filename) + if directory[0:2] == "./": + directory = directory[2:] + elif directory[0:len(self.input_prefix)] == self.input_prefix: + directory = directory[len(self.input_prefix):] + if directory[0] == "/": + directory = directory[1:] + + # Open the file and explore its contents... + self.log.info("Siphoning from %s." % filename) + directives = {} + with open(filename) as fd: + siphon = None + close_siphon = None + siphon_block = "" + in_block = False + line_num = 0 + siphon_line = 0 + + for line in fd: + line_num += 1 + str = line[:-1] # filter \n + + """See if there is a block directive and if so extract it""" + def process_block_directive(str, directives): + m = self.siphon_block_directive.search(str) + if m is not None: + k = m.group(2) + v = m.group(3).strip() + directives[k] = v + # Return only the parts we did not match + return str[0:m.start(1)] + str[m.end(4):] + + return str + + def process_block_prefix(str): + if str.startswith(" * "): + str = str[3:] + elif str == " *": + str = "" + return str + + if not in_block: + # See if the line contains the start of a siphon doc block + m = self.siphon_block_start.search(str) + if m is not None: + in_block = True + t = m.group(1) + + # Now check if the block closes on the same line + m = self.siphon_block_stop.search(t) + if m is not None: + t = m.group(1) + in_block = False + + # Check for directives + t = process_block_directive(t, directives) + + # Filter for normal comment prefixes + t = process_block_prefix(t) + + # Add what is left + siphon_block += t + + # Skip to next line + continue + + else: + # Check to see if we have an end block marker + m = self.siphon_block_stop.search(str) + if m is not None: + in_block = False + t = m.group(1) + else: + t = str + + # Check for directives + t = process_block_directive(t, directives) + + # Filter for normal comment prefixes + t = process_block_prefix(t) + + # Add what is left + siphon_block += t + "\n" + + # Skip to next line + continue + + + if siphon is None: + # Look for blocks we need to siphon + for p in siphon_patterns: + if p[0].match(str): + siphon = [ p[1], str + "\n", 0 ] + siphon_line = line_num + + # see if we have an initializer + m = self.siphon_initializer.search(str) + if m is not None: + # count the braces on this line + (count, index) = \ + self.count_braces(str[m.start():]) + siphon[2] = count + # TODO - it's possible we have the + # initializer all on the first line + # we should check for it, but also + # account for the possibility that + # the open brace is on the next line + #if count == 0: + # # braces balanced + # close_siphon = siphon + # siphon = None + else: + # no initializer: close the siphon right now + close_siphon = siphon + siphon = None + else: + # See if we should end the siphon here - do we have + # balanced braces? + (count, index) = self.count_braces(str, + count=siphon[2], found=True) + if count == 0: + # braces balanced - add the substring and + # close the siphon + siphon[1] += str[:index+1] + ";\n" + close_siphon = siphon + siphon = None + else: + # add the whole string, move on + siphon[2] = count + siphon[1] += str + "\n" + + if close_siphon is not None: + # Write the siphoned contents to the right place + siphon_name = close_siphon[0] + + # Copy directives for the file + details = {} + for key in directives: + if ":" in key: + (sn, label) = key.split(":") + if sn == siphon_name: + details[label] = directives[key] + else: + details[key] = directives[key] + + # Copy details for this block + details['file'] = filename + details['directory'] = directory + details['line_start'] = siphon_line + details['line_end'] = line_num + details['siphon_block'] = siphon_block.strip() + details["block"] = close_siphon[1] + + # Store the item + self.output[siphon_name]['items'].append(details) + + # All done + close_siphon = None + siphon_block = "" + + # Update globals + for key in directives.keys(): + if ':' not in key: + continue + + if filename.endswith("/dir.dox"): + # very special! use the parent directory name + l = directory + else: + l = filename + + (sn, label) = key.split(":") + + if sn not in self.output: + self.output[sn] = {} + if 'global' not in self.output[sn]: + self.output[sn]['global'] = {} + if l not in self.output[sn]['global']: + self.output[sn]['global'][l] = {} + + self.output[sn]['global'][l][label] = directives[key] + + def deliver(self): + # Write out the data + for siphon in self.output.keys(): + self.log.info("Saving siphon data %s." % siphon) + s = self.output[siphon] + with open(s['file'], "a") as fp: + json.dump(s, fp, + separators=(',', ': '), indent=4, sort_keys=True) + diff --git a/doxygen/siphon/generate_clicmd.py b/doxygen/siphon/generate_clicmd.py new file mode 100644 index 00000000000..7b13111028d --- /dev/null +++ b/doxygen/siphon/generate_clicmd.py @@ -0,0 +1,22 @@ +# Copyright (c) 2016 Comcast Cable Communications Management, LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at: +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import generate, re + +# Register our regexp +generate.siphon_patterns.append(( + re.compile("(?P<m>VLIB_CLI_COMMAND)\s*" + "[(](?P<name>[a-zA-Z0-9_]+)(,[^)]*)?[)]"), + "clicmd" +)) diff --git a/doxygen/siphon/generate_syscfg.py b/doxygen/siphon/generate_syscfg.py new file mode 100644 index 00000000000..c77936a9dc8 --- /dev/null +++ b/doxygen/siphon/generate_syscfg.py @@ -0,0 +1,22 @@ +# Copyright (c) 2016 Comcast Cable Communications Management, LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at: +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import generate, re + +# Register our regexp +generate.siphon_patterns.append(( + re.compile("(?P<m>VLIB_CONFIG_FUNCTION)\s*" + '[(](?P<fn>[a-zA-Z0-9_]+)\s*,\s*"(?P<name>[^"]*)"[)]'), + "syscfg" +)) diff --git a/doxygen/siphon/parsers.py b/doxygen/siphon/parsers.py new file mode 100644 index 00000000000..6fe8600d4b3 --- /dev/null +++ b/doxygen/siphon/parsers.py @@ -0,0 +1,149 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at: +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import cgi, pyparsing as pp + +# Some useful primitives +ident = pp.Word(pp.alphas + "_", pp.alphas + pp.nums + "_") +intNum = pp.Word(pp.nums) +hexNum = pp.Literal("0x") + pp.Word(pp.hexnums) +octalNum = pp.Literal("0") + pp.Word("01234567") +integer = (hexNum | octalNum | intNum) + \ + pp.Optional(pp.Literal("ULL") | pp.Literal("LL") | pp.Literal("L")) +floatNum = pp.Regex(r'\d+(\.\d*)?([eE]\d+)?') + pp.Optional(pp.Literal("f")) +char = pp.Literal("'") + pp.Word(pp.printables, exact=1) + pp.Literal("'") +arrayIndex = integer | ident + +lbracket = pp.Literal("(").suppress() +rbracket = pp.Literal(")").suppress() +lbrace = pp.Literal("{").suppress() +rbrace = pp.Literal("}").suppress() +comma = pp.Literal(",").suppress() +equals = pp.Literal("=").suppress() +dot = pp.Literal(".").suppress() +semicolon = pp.Literal(";").suppress() + +# initializer := { [member = ] (variable | expression | { initializer } ) } +typeName = ident +varName = ident +typeSpec = pp.Optional("unsigned") + \ + pp.oneOf("int long short float double char u8 i8 void") + \ + pp.Optional(pp.Word("*"), default="") +typeCast = pp.Combine( "(" + ( typeSpec | typeName ) + ")" ).suppress() + +string = pp.Combine(pp.OneOrMore(pp.QuotedString(quoteChar='"', + escChar='\\', multiline=True)), adjacent=False) +literal = pp.Optional(typeCast) + (integer | floatNum | char | string) +var = pp.Combine(pp.Optional(typeCast) + varName + + pp.Optional("[" + arrayIndex + "]")) + +# This could be more complete, but suffices for our uses +expr = (literal | var) + +"""Parse and render a block of text into a Python dictionary.""" +class Parser(object): + """Compiled PyParsing BNF""" + _parser = None + + def __init__(self): + super(Parser, self).__init__() + self._parser = self.BNF() + + def BNF(self): + raise NotImplementedError + + def item(self, item): + raise NotImplementedError + + def parse(self, input): + item = self._parser.parseString(input).asList() + return self.item(item) + + +"""Parser for function-like macros - without the closing semi-colon.""" +class ParserFunctionMacro(Parser): + def BNF(self): + # VLIB_CONFIG_FUNCTION (unix_config, "unix") + macroName = ident + params = pp.Group(pp.ZeroOrMore(expr + comma) + expr) + macroParams = lbracket + params + rbracket + + return macroName + macroParams + + def item(self, item): + r = { + "macro": item[0], + "name": item[1][1], + "function": item[1][0], + } + + return r + + +"""Parser for function-like macros with a closing semi-colon.""" +class ParseFunctionMacroStmt(ParserFunctionMacro): + def BNF(self): + # VLIB_CONFIG_FUNCTION (unix_config, "unix"); + function_macro = super(ParseFunctionMacroStmt, self).BNF() + mi = function_macro + semicolon + mi.ignore(pp.cppStyleComment) + + return mi + + +""" +Parser for our struct initializers which are composed from a +function-like macro, equals sign, and then a normal C struct initalizer +block. +""" +class MacroInitializer(ParserFunctionMacro): + def BNF(self): + # VLIB_CLI_COMMAND (show_sr_tunnel_command, static) = { + # .path = "show sr tunnel", + # .short_help = "show sr tunnel [name <sr-tunnel-name>]", + # .function = show_sr_tunnel_fn, + # }; + cs = pp.Forward() + + + member = pp.Combine(dot + varName + pp.Optional("[" + arrayIndex + "]"), + adjacent=False) + value = (expr | cs) + + entry = pp.Group(pp.Optional(member + equals, default="") + value) + entries = (pp.ZeroOrMore(entry + comma) + entry + pp.Optional(comma)) | \ + (pp.ZeroOrMore(entry + comma)) + + cs << (lbrace + entries + rbrace) + + macroName = ident + params = pp.Group(pp.ZeroOrMore(expr + comma) + expr) + macroParams = lbracket + params + rbracket + + function_macro = super(MacroInitializer, self).BNF() + mi = function_macro + equals + pp.Group(cs) + semicolon + mi.ignore(pp.cppStyleComment) + + return mi + + def item(self, item): + r = { + "macro": item[0], + "name": item[1][0], + "params": item[2], + "value": {}, + } + + for param in item[2]: + r["value"][param[0]] = cgi.escape(param[1]) + + return r diff --git a/doxygen/siphon/process.py b/doxygen/siphon/process.py new file mode 100644 index 00000000000..c7f8f1a232b --- /dev/null +++ b/doxygen/siphon/process.py @@ -0,0 +1,271 @@ +# Copyright (c) 2016 Comcast Cable Communications Management, LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at: +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generation template class + +import logging, os,sys, cgi, json, jinja2, HTMLParser + +# Classes register themselves in this dictionary +"""Mapping of known processors to their classes""" +siphons = {} + + +"""Generate rendered output for siphoned data.""" +class Siphon(object): + + # Set by subclasses + """Our siphon name""" + name = None + + # Set by subclasses + """Name of an identifier used by this siphon""" + identifier = None + + # Set by subclasses + """The pyparsing object to use to parse with""" + _parser = None + + """The input data""" + _cmds = None + + """Group key to (directory,file) mapping""" + _group = None + + """Logging handler""" + log = None + + """Directory to look for siphon rendering templates""" + template_directory = None + + """Template environment, if we're using templates""" + _tplenv = None + + def __init__(self, template_directory=None): + super(Siphon, self).__init__() + self.log = logging.getLogger("siphon.process.%s" % self.name) + + if template_directory is not None: + self.template_directory = template_directory + searchpath = [ + template_directory + "/" + self.name, + template_directory + "/" + "default", + ] + loader = jinja2.FileSystemLoader(searchpath=searchpath) + self._tplenv = jinja2.Environment( + loader=loader, + trim_blocks=True, + keep_trailing_newline=True) + + # Convenience, get a reference to the internal escape and + # unescape methods in cgi and HTMLParser. These then become + # available to templates to use, if needed. + self._h = HTMLParser.HTMLParser() + self.escape = cgi.escape + self.unescape = self._h.unescape + + + # Output renderers + + """Returns an object to be used as the sorting key in the item index.""" + def index_sort_key(self, group): + return group + + """Returns a string to use as the header at the top of the item index.""" + def index_header(self): + return self.template("index_header") + + """Returns the string fragment to use for each section in the item + index.""" + def index_section(self, group): + return self.template("index_section", group=group) + + """Returns the string fragment to use for each entry in the item index.""" + def index_entry(self, meta, item): + return self.template("index_entry", meta=meta, item=item) + + """Returns an object, typically a string, to be used as the sorting key + for items within a section.""" + def item_sort_key(self, item): + return item['name'] + + """Returns a key for grouping items together.""" + def group_key(self, directory, file, macro, name): + _global = self._cmds['_global'] + + if file in _global and 'group_label' in _global[file]: + self._group[file] = (directory, file) + return file + + self._group[directory] = (directory, None) + return directory + + """Returns a key for identifying items within a grouping.""" + def item_key(self, directory, file, macro, name): + return name + + """Returns a string to use as the header when rendering the item.""" + def item_header(self, group): + return self.template("item_header", group=group) + + """Returns a string to use as the body when rendering the item.""" + def item_format(self, meta, item): + return self.template("item_format", meta=meta, item=item) + + """Returns a string to use as the label for the page reference.""" + def page_label(self, group): + return "_".join(( + self.name, + self.sanitize_label(group) + )) + + """Returns a title to use for a page.""" + def page_title(self, group): + _global = self._cmds['_global'] + (directory, file) = self._group[group] + + if file and file in _global and 'group_label' in _global[file]: + return _global[file]['group_label'] + + if directory in _global and 'group_label' in _global[directory]: + return _global[directory]['group_label'] + + return directory + + """Returns a string to use as the label for the section reference.""" + def item_label(self, group, item): + return "__".join(( + self.name, + item + )) + + """Label sanitizer; for creating Doxygen references""" + def sanitize_label(self, value): + return value.replace(" ", "_") \ + .replace("/", "_") \ + .replace(".", "_") + + """Template processor""" + def template(self, name, **kwargs): + tpl = self._tplenv.get_template(name + ".md") + return tpl.render( + this=self, + **kwargs) + + + # Processing methods + + """Parse the input file into a more usable dictionary structure.""" + def load_json(self, files): + self._cmds = {} + self._group = {} + + line_num = 0 + line_start = 0 + for filename in files: + filename = os.path.relpath(filename) + self.log.info("Parsing items in file \"%s\"." % filename) + data = None + with open(filename, "r") as fd: + data = json.load(fd) + + self._cmds['_global'] = data['global'] + + # iterate the items loaded and regroup it + for item in data["items"]: + try: + o = self._parser.parse(item['block']) + except: + self.log.error("Exception parsing item: %s\n%s" \ + % (json.dumps(item, separators=(',', ': '), + indent=4), + item['block'])) + raise + + # Augment the item with metadata + o["meta"] = {} + for key in item: + if key == 'block': + continue + o['meta'][key] = item[key] + + # Load some interesting fields + directory = item['directory'] + file = item['file'] + macro = o["macro"] + name = o["name"] + + # Generate keys to group items by + group_key = self.group_key(directory, file, macro, name) + item_key = self.item_key(directory, file, macro, name) + + if group_key not in self._cmds: + self._cmds[group_key] = {} + + self._cmds[group_key][item_key] = o + + """Iterate over the input data, calling render methods to generate the + output.""" + def process(self, out=None): + + if out is None: + out = sys.stdout + + # Accumulated body contents + contents = "" + + # Write the header for this siphon type + out.write(self.index_header()) + + # Sort key helper for the index + def group_sort_key(group): + return self.index_sort_key(group) + + # Iterate the dictionary and process it + for group in sorted(self._cmds.keys(), key=group_sort_key): + if group.startswith('_'): + continue + + self.log.info("Processing items in group \"%s\" (%s)." % \ + (group, group_sort_key(group))) + + # Generate the section index entry (write it now) + out.write(self.index_section(group)) + + # Generate the item header (save for later) + contents += self.item_header(group) + + def item_sort_key(key): + return self.item_sort_key(self._cmds[group][key]) + + for key in sorted(self._cmds[group].keys(), key=item_sort_key): + self.log.debug("--- Processing key \"%s\" (%s)." % \ + (key, item_sort_key(key))) + + o = self._cmds[group][key] + meta = { + "directory": o['meta']['directory'], + "file": o['meta']['file'], + "macro": o['macro'], + "key": key, + "label": self.item_label(group, key), + } + + # Generate the index entry for the item (write it now) + out.write(self.index_entry(meta, o)) + + # Generate the item itself (save for later) + contents += self.item_format(meta, o) + + # Deliver the accumulated body output + out.write(contents) diff --git a/doxygen/siphon/process_clicmd.py b/doxygen/siphon/process_clicmd.py new file mode 100644 index 00000000000..9b3bd35c86d --- /dev/null +++ b/doxygen/siphon/process_clicmd.py @@ -0,0 +1,56 @@ +# Copyright (c) 2016 Comcast Cable Communications Management, LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at: +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generate clicmd formatted output + +import process, parsers + +class SiphonCLICMD(process.Siphon): + + name = "clicmd" + identifier = "VLIB_CLI_COMMAND" + + def __init__(self, *args, **kwargs): + super(SiphonCLICMD, self).__init__(*args, **kwargs) + self._parser = parsers.MacroInitializer() + + + # Output renderers + + def index_sort_key(self, group): + _global = self._cmds['_global'] + if group not in self._group: + return group + (directory, file) = self._group[group] + + if file in _global and 'group_label' in _global[file]: + return _global[file]['group_label'] + + if directory in _global and 'group_label' in _global[directory]: + return _global[directory]['group_label'] + + return group + + def item_sort_key(self, item): + return item['value']['path'] + + def item_label(self, group, item): + return "_".join(( + self.name, + self.sanitize_label(self._cmds[group][item]['value']['path']) + )) + + +# Register our processor +process.siphons["clicmd"] = SiphonCLICMD diff --git a/doxygen/siphon/process_syscfg.py b/doxygen/siphon/process_syscfg.py new file mode 100644 index 00000000000..94be591039d --- /dev/null +++ b/doxygen/siphon/process_syscfg.py @@ -0,0 +1,30 @@ +# Copyright (c) 2016 Comcast Cable Communications Management, LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at: +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generate syscfg formatted output + +import process, parsers + +class SiphonSYSCFG(process.Siphon): + + name = "syscfg" + identifier = "VLIB_CONFIG_FUNCTION" + + def __init__(self, *args, **kwargs): + super(SiphonSYSCFG, self).__init__(*args, **kwargs) + self._parser = parsers.ParseFunctionMacroStmt() + + +# Register our processor +process.siphons["syscfg"] = SiphonSYSCFG |