aboutsummaryrefslogtreecommitdiffstats
path: root/src/tools
diff options
context:
space:
mode:
authorOle Troan <ot@cisco.com>2020-10-07 18:05:37 +0200
committerNeale Ranns <nranns@cisco.com>2020-10-13 11:42:58 +0000
commit148c7b768721231325a349fa82db693190513b53 (patch)
tree94e0a9768eda3bc4923b1b3cc727dc637d877381 /src/tools
parente7c8396982607634b4c747870499671ffa53868e (diff)
stats: counters data model
This adds a new data model for counters. Specifying the errors severity and unit. A later patch will update vpp_get_stats to take advantage of this. Only the map plugin is updates as an example. New .api language: A new "counters" keyword to define counter sets. counters map { none { severity info; type counter64; units "packets"; description "valid MAP packets"; }; bad_protocol { severity error; type counter64; units "packets"; description "bad protocol"; }; }; Each counter has 4 keywords. severity, which is one of error, info or warn. A type, which is one of counter64 or gauge64. units, which is a text field using units from YANG. paths { "/err/ip4-map" "map"; "/err/ip6-map" "map"; "/err/ip4-t-map" "map"; "/err/ip6-t-map" "map"; }; A new paths keyword that maps the counter-set to a path in the stats segment KV store. Updated VPP CLI to include severity so user can see error counter severity. DBGvpp# show errors Count Node Reason Severity 13 ethernet-input no error error Type: feature Signed-off-by: Ole Troan <ot@cisco.com> Change-Id: Ib2177543f49d4c3aef4d7fa72476cff2068f7771 Signed-off-by: Ole Troan <ot@cisco.com>
Diffstat (limited to 'src/tools')
-rwxr-xr-xsrc/tools/vppapigen/vppapigen.py105
-rw-r--r--src/tools/vppapigen/vppapigen_c.py53
-rw-r--r--src/tools/vppapigen/vppapigen_json.py16
3 files changed, 162 insertions, 12 deletions
diff --git a/src/tools/vppapigen/vppapigen.py b/src/tools/vppapigen/vppapigen.py
index fbb0f27cc34..5219bfd9ece 100755
--- a/src/tools/vppapigen/vppapigen.py
+++ b/src/tools/vppapigen/vppapigen.py
@@ -8,7 +8,6 @@ import keyword
import logging
import binascii
import os
-import sys
from subprocess import Popen, PIPE
assert sys.version_info >= (3, 5), \
@@ -80,6 +79,12 @@ class VPPAPILexer(object):
'true': 'TRUE',
'false': 'FALSE',
'union': 'UNION',
+ 'counters': 'COUNTERS',
+ 'paths': 'PATHS',
+ 'units': 'UNITS',
+ 'severity': 'SEVERITY',
+ 'type': 'TYPE',
+ 'description': 'DESCRIPTION',
}
tokens = ['STRING_LITERAL',
@@ -191,7 +196,6 @@ class Typedef():
self.manual_print = True
elif f == 'manual_endian':
self.manual_endian = True
-
global_type_add(name, self)
self.vla = vla_is_last_check(name, block)
@@ -413,6 +417,19 @@ class Field():
return str([self.fieldtype, self.fieldname])
+class Counter():
+ def __init__(self, path, counter):
+ self.type = 'Counter'
+ self.name = path
+ self.block = counter
+
+
+class Paths():
+ def __init__(self, pathset):
+ self.type = 'Paths'
+ self.paths = pathset
+
+
class Coord(object):
""" Coordinates of a syntactic element. Consists of:
- File name
@@ -487,13 +504,68 @@ class VPPAPIParser(object):
| import
| enum
| union
- | service'''
+ | service
+ | paths
+ | counters'''
p[0] = p[1]
def p_import(self, p):
'''import : IMPORT STRING_LITERAL ';' '''
p[0] = Import(p[2], revision=self.revision)
+ def p_path_elements(self, p):
+ '''path_elements : path_element
+ | path_elements path_element'''
+ if len(p) == 2:
+ p[0] = p[1]
+ else:
+ if type(p[1]) is dict:
+ p[0] = [p[1], p[2]]
+ else:
+ p[0] = p[1] + [p[2]]
+
+ def p_path_element(self, p):
+ '''path_element : STRING_LITERAL STRING_LITERAL ';' '''
+ p[0] = {'path': p[1], 'counter': p[2]}
+
+ def p_paths(self, p):
+ '''paths : PATHS '{' path_elements '}' ';' '''
+ p[0] = Paths(p[3])
+
+ def p_counters(self, p):
+ '''counters : COUNTERS ID '{' counter_elements '}' ';' '''
+ p[0] = Counter(p[2], p[4])
+
+ def p_counter_elements(self, p):
+ '''counter_elements : counter_element
+ | counter_elements counter_element'''
+ if len(p) == 2:
+ p[0] = p[1]
+ else:
+ if type(p[1]) is dict:
+ p[0] = [p[1], p[2]]
+ else:
+ p[0] = p[1] + [p[2]]
+
+ def p_counter_element(self, p):
+ '''counter_element : ID '{' counter_statements '}' ';' '''
+ p[0] = {**{'name': p[1]}, **p[3]}
+
+ def p_counter_statements(self, p):
+ '''counter_statements : counter_statement
+ | counter_statements counter_statement'''
+ if len(p) == 2:
+ p[0] = p[1]
+ else:
+ p[0] = {**p[1], **p[2]}
+
+ def p_counter_statement(self, p):
+ '''counter_statement : SEVERITY ID ';'
+ | UNITS STRING_LITERAL ';'
+ | DESCRIPTION STRING_LITERAL ';'
+ | TYPE ID ';' '''
+ p[0] = {p[1]: p[2]}
+
def p_service(self, p):
'''service : SERVICE '{' service_statements '}' ';' '''
p[0] = p[3]
@@ -666,9 +738,20 @@ class VPPAPIParser(object):
else:
p[0] = {p[1]: p[3]}
+ def p_variable_name(self, p):
+ '''variable_name : ID
+ | TYPE
+ | SEVERITY
+ | DESCRIPTION
+ | COUNTERS
+ | PATHS
+ '''
+ p[0] = p[1]
+
def p_declaration(self, p):
- '''declaration : type_specifier ID ';'
- | type_specifier ID '[' field_options ']' ';' '''
+ '''declaration : type_specifier variable_name ';'
+ | type_specifier variable_name '[' field_options ']' ';'
+ '''
if len(p) == 7:
p[0] = Field(p[1], p[2], p[4])
elif len(p) == 4:
@@ -678,12 +761,12 @@ class VPPAPIParser(object):
self.fields.append(p[2])
def p_declaration_array_vla(self, p):
- '''declaration : type_specifier ID '[' ']' ';' '''
+ '''declaration : type_specifier variable_name '[' ']' ';' '''
p[0] = Array(p[1], p[2], 0, modern_vla=True)
def p_declaration_array(self, p):
- '''declaration : type_specifier ID '[' NUM ']' ';'
- | type_specifier ID '[' ID ']' ';' '''
+ '''declaration : type_specifier variable_name '[' NUM ']' ';'
+ | type_specifier variable_name '[' ID ']' ';' '''
if len(p) != 7:
return self._parse_error(
@@ -814,6 +897,8 @@ class VPPAPI(object):
s['Service'] = []
s['types'] = []
s['Import'] = []
+ s['Counters'] = []
+ s['Paths'] = []
crc = 0
for o in objs:
tname = o.__class__.__name__
@@ -836,6 +921,10 @@ class VPPAPI(object):
isinstance(o, Using) or
isinstance(o, Union)):
s['types'].append(o)
+ elif (isinstance(o, Counter)):
+ s['Counters'].append(o)
+ elif (isinstance(o, Paths)):
+ s['Paths'].append(o)
else:
if tname not in s:
raise ValueError('Unknown class type: {} {}'
diff --git a/src/tools/vppapigen/vppapigen_c.py b/src/tools/vppapigen/vppapigen_c.py
index 020a880c868..07975ce23b4 100644
--- a/src/tools/vppapigen/vppapigen_c.py
+++ b/src/tools/vppapigen/vppapigen_c.py
@@ -516,6 +516,22 @@ def generate_include_enum(s, module, stream):
write('}} vl_api_{}_enum_t;\n'.format(module))
+def generate_include_counters(s, module, stream):
+ write = stream.write
+
+ for counters in s:
+ csetname = counters.name
+ write('typedef enum {\n')
+ for c in counters.block:
+ write(' {}_ERROR_{},\n'
+ .format(csetname.upper(), c['name'].upper()))
+ write(' {}_N_ERROR\n'.format(csetname.upper()))
+ write('}} vl_counter_{}_enum_t;\n'.format(csetname))
+
+ # write('extern char *{}_error_strings[];\n'.format(csetname))
+ # write('extern char *{}_description_strings[];\n'.format(csetname))
+ write('extern vl_counter_t {}_error_counters[];\n'.format(csetname))
+
#
# Generate separate API _types file.
#
@@ -603,9 +619,10 @@ def generate_include_types(s, module, stream):
write("\n#endif\n")
-def generate_c_boilerplate(services, defines, file_crc, module, stream):
+def generate_c_boilerplate(services, defines, counters, file_crc,
+ module, stream):
write = stream.write
- define_hash = {d.name:d for d in defines}
+ define_hash = {d.name: d for d in defines}
hdr = '''\
#define vl_endianfun /* define message structures */
@@ -661,6 +678,30 @@ def generate_c_boilerplate(services, defines, file_crc, module, stream):
write(' return msg_id_base;\n')
write('}\n')
+ severity = {'error': 'VL_COUNTER_SEVERITY_ERROR',
+ 'info': 'VL_COUNTER_SEVERITY_INFO',
+ 'warn': 'VL_COUNTER_SEVERITY_WARN'}
+
+ for cnt in counters:
+ csetname = cnt.name
+ '''
+ write('char *{}_error_strings[] = {{\n'.format(csetname))
+ for c in cnt.block:
+ write(' "{}",\n'.format(c['name']))
+ write('};\n')
+ write('char *{}_description_strings[] = {{\n'.format(csetname))
+ for c in cnt.block:
+ write(' "{}",\n'.format(c['description']))
+ write('};\n')
+ '''
+ write('vl_counter_t {}_error_counters[] = {{\n'.format(csetname))
+ for c in cnt.block:
+ write(' {\n')
+ write(' .name = "{}",\n'.format(c['name']))
+ write(' .desc = "{}",\n'.format(c['description']))
+ write(' .severity = {},\n'.format(severity[c['severity']]))
+ write(' },\n')
+ write('};\n')
def generate_c_test_boilerplate(services, defines, file_crc, module, plugin, stream):
write = stream.write
@@ -788,7 +829,11 @@ def run(args, input_filename, s):
# Generate separate enum file
st = StringIO()
+ st.write('#ifndef included_{}_api_enum_h\n'.format(modulename))
+ st.write('#define included_{}_api_enum_h\n'.format(modulename))
generate_include_enum(s, modulename, st)
+ generate_include_counters(s['Counters'], modulename, st)
+ st.write('#endif\n')
with open (filename_enum, 'w') as fd:
st.seek (0)
shutil.copyfileobj (st, fd)
@@ -796,8 +841,8 @@ def run(args, input_filename, s):
# Generate separate C file
st = StringIO()
- generate_c_boilerplate(s['Service'], s['Define'], s['file_crc'],
- modulename, st)
+ generate_c_boilerplate(s['Service'], s['Define'], s['Counters'],
+ s['file_crc'], modulename, st)
with open (filename_c, 'w') as fd:
st.seek (0)
shutil.copyfileobj(st, fd)
diff --git a/src/tools/vppapigen/vppapigen_json.py b/src/tools/vppapigen/vppapigen_json.py
index 6e7aaa2e6f5..f41bfb08c58 100644
--- a/src/tools/vppapigen/vppapigen_json.py
+++ b/src/tools/vppapigen/vppapigen_json.py
@@ -1,6 +1,7 @@
# JSON generation
import json
+
def walk_imports(s):
r = []
for e in s:
@@ -8,6 +9,19 @@ def walk_imports(s):
return r
+def walk_counters(s, pathset):
+ r = []
+ for e in s:
+ r2 = {'name': e.name, 'elements': e.block}
+ r.append(r2)
+
+ r3 = []
+ for p in pathset:
+ r3.append(p.paths)
+
+ return r, r3
+
+
def walk_enums(s):
r = []
for e in s:
@@ -66,6 +80,7 @@ def walk_defs(s, is_message=False):
r.append(d)
return r
+
#
# Plugin entry point
#
@@ -84,4 +99,5 @@ def run(args, filename, s):
j['aliases'] = {o.name:o.alias for o in s['types'] if o.__class__.__name__ == 'Using'}
j['vl_api_version'] = hex(s['file_crc'])
j['imports'] = walk_imports(i for i in s['Import'])
+ j['counters'], j['paths'] = walk_counters(s['Counters'], s['Paths'])
return json.dumps(j, indent=4, separators=(',', ': '))