aboutsummaryrefslogtreecommitdiffstats
path: root/resources
diff options
context:
space:
mode:
authorTibor Frank <tifrank@cisco.com>2021-06-25 13:56:43 +0200
committerTibor Frank <tifrank@cisco.com>2021-06-28 10:23:08 +0000
commitcf622f8aa1aa25125e8aee9a36d5b20e70c40f42 (patch)
tree6a83a635c4d7b1f8de1c1921061edd789b17ab37 /resources
parent0f11bc4f741be09499014a45721ae83c85a447cd (diff)
PAL: Process sh-run from telemetry
Change-Id: Ia1a3427609b284d0036dfe3b2ffbb17a9b4f8a65 Signed-off-by: Tibor Frank <tifrank@cisco.com> (cherry picked from commit 7b4dc49521908774f9eb2f5d287078cce06d8e49)
Diffstat (limited to 'resources')
-rw-r--r--resources/tools/presentation/convert_xml_json.py66
-rw-r--r--resources/tools/presentation/generator_tables.py68
-rw-r--r--resources/tools/presentation/input_data_parser.py82
3 files changed, 145 insertions, 71 deletions
diff --git a/resources/tools/presentation/convert_xml_json.py b/resources/tools/presentation/convert_xml_json.py
index f1994df6b8..73469cd025 100644
--- a/resources/tools/presentation/convert_xml_json.py
+++ b/resources/tools/presentation/convert_xml_json.py
@@ -195,38 +195,42 @@ def _export_test_from_xml_to_json(tid, in_data, out, template, metadata):
)
# Process show runtime:
- in_sh_run = deepcopy(in_data.get(u"show-run", None))
- if in_sh_run:
- # Transform to openMetrics format
- for key, val in in_sh_run.items():
- log_item = {
- u"source_type": u"node",
- u"source_id": key,
- u"msg_type": u"metric",
- u"log_level": u"INFO",
- u"timestamp": in_data.get(u"starttime", u""),
- u"msg": u"show_runtime",
- u"data": list()
- }
- runtime = loads(val.get(u"runtime", list()))
- for item in runtime:
- for metric, m_data in item.items():
- if metric == u"name":
- continue
- for idx, m_item in enumerate(m_data):
- log_item[u"data"].append(
- {
- u"name": metric,
- u"value": m_item,
- u"labels": {
- u"host": val.get(u"host", u""),
- u"socket": val.get(u"socket", u""),
- u"graph_node": item.get(u"name", u""),
- u"thread_id": str(idx)
+ if in_data.get(u"telemetry-show-run", None):
+ for item in in_data[u"telemetry-show-run"].values():
+ data.add_to_list(u"log", item.get(u"runtime", dict()))
+ else:
+ in_sh_run = deepcopy(in_data.get(u"show-run", None))
+ if in_sh_run:
+ # Transform to openMetrics format
+ for key, val in in_sh_run.items():
+ log_item = {
+ u"source_type": u"node",
+ u"source_id": key,
+ u"msg_type": u"metric",
+ u"log_level": u"INFO",
+ u"timestamp": in_data.get(u"starttime", u""),
+ u"msg": u"show_runtime",
+ u"data": list()
+ }
+ runtime = loads(val.get(u"runtime", list()))
+ for item in runtime:
+ for metric, m_data in item.items():
+ if metric == u"name":
+ continue
+ for idx, m_item in enumerate(m_data):
+ log_item[u"data"].append(
+ {
+ u"name": metric,
+ u"value": m_item,
+ u"labels": {
+ u"host": val.get(u"host", u""),
+ u"socket": val.get(u"socket", u""),
+ u"graph_node": item.get(u"name", u""),
+ u"thread_id": str(idx)
+ }
}
- }
- )
- data.add_to_list(u"log", log_item)
+ )
+ data.add_to_list(u"log", log_item)
# Process results:
results = dict()
diff --git a/resources/tools/presentation/generator_tables.py b/resources/tools/presentation/generator_tables.py
index 8218084f71..5b95189029 100644
--- a/resources/tools/presentation/generator_tables.py
+++ b/resources/tools/presentation/generator_tables.py
@@ -94,7 +94,7 @@ def table_oper_data_html(table, input_data):
)
data = input_data.filter_data(
table,
- params=[u"name", u"parent", u"show-run", u"type"],
+ params=[u"name", u"parent", u"telemetry-show-run", u"type"],
continue_on_error=True
)
if data.empty:
@@ -147,7 +147,8 @@ def table_oper_data_html(table, input_data):
)
thead.text = u"\t"
- if tst_data.get(u"show-run", u"No Data") == u"No Data":
+ if tst_data.get(u"telemetry-show-run", None) is None or \
+ isinstance(tst_data[u"telemetry-show-run"], str):
trow = ET.SubElement(
tbl, u"tr", attrib=dict(bgcolor=colors[u"header"])
)
@@ -177,7 +178,7 @@ def table_oper_data_html(table, input_data):
u"Average Vector Size"
)
- for dut_data in tst_data[u"show-run"].values():
+ for dut_data in tst_data[u"telemetry-show-run"].values():
trow = ET.SubElement(
tbl, u"tr", attrib=dict(bgcolor=colors[u"header"])
)
@@ -188,39 +189,41 @@ def table_oper_data_html(table, input_data):
tcol.text = u"No Data"
continue
- runtime = loads(dut_data[u"runtime"])
-
- try:
- threads_nr = len(runtime[0][u"clocks"])
- except (IndexError, KeyError):
- tcol.text = u"No Data"
- continue
-
- threads = OrderedDict({idx: list() for idx in range(threads_nr)})
- for item in runtime:
- for idx in range(threads_nr):
- if item[u"vectors"][idx] > 0:
- clocks = item[u"clocks"][idx] / item[u"vectors"][idx]
- elif item[u"calls"][idx] > 0:
- clocks = item[u"clocks"][idx] / item[u"calls"][idx]
- elif item[u"suspends"][idx] > 0:
- clocks = item[u"clocks"][idx] / item[u"suspends"][idx]
+ runtime = dict()
+ for item in dut_data[u"runtime"].get(u"data", tuple()):
+ tid = int(item[u"labels"][u"thread_id"])
+ if runtime.get(tid, None) is None:
+ runtime[tid] = dict()
+ gnode = item[u"labels"][u"graph_node"]
+ if runtime[tid].get(gnode, None) is None:
+ runtime[tid][gnode] = dict()
+ try:
+ runtime[tid][gnode][item[u"name"]] = float(item[u"value"])
+ except ValueError:
+ runtime[tid][gnode][item[u"name"]] = item[u"value"]
+
+ threads = dict({idx: list() for idx in range(len(runtime))})
+ for idx, run_data in runtime.items():
+ for gnode, gdata in run_data.items():
+ if gdata[u"vectors"] > 0:
+ clocks = gdata[u"clocks"] / gdata[u"vectors"]
+ elif gdata[u"calls"] > 0:
+ clocks = gdata[u"clocks"] / gdata[u"calls"]
+ elif gdata[u"suspends"] > 0:
+ clocks = gdata[u"clocks"] / gdata[u"suspends"]
else:
clocks = 0.0
-
- if item[u"calls"][idx] > 0:
- vectors_call = item[u"vectors"][idx] / item[u"calls"][
- idx]
+ if gdata[u"calls"] > 0:
+ vectors_call = gdata[u"vectors"] / gdata[u"calls"]
else:
vectors_call = 0.0
-
- if int(item[u"calls"][idx]) + int(item[u"vectors"][idx]) + \
- int(item[u"suspends"][idx]):
+ if int(gdata[u"calls"]) + int(gdata[u"vectors"]) + \
+ int(gdata[u"suspends"]):
threads[idx].append([
- item[u"name"],
- item[u"calls"][idx],
- item[u"vectors"][idx],
- item[u"suspends"][idx],
+ gnode,
+ int(gdata[u"calls"]),
+ int(gdata[u"vectors"]),
+ int(gdata[u"suspends"]),
clocks,
vectors_call
])
@@ -393,8 +396,7 @@ def table_merged_details(table, input_data):
col_data = col_data.replace(u'\n', u' |br| ').\
replace(u'\r', u'').replace(u'"', u"'")
col_data = f" |prein| {col_data} |preout| "
- elif column[u"data"].split(u" ")[1] in \
- (u"conf-history", u"show-run"):
+ elif column[u"data"].split(u" ")[1] in (u"conf-history", ):
col_data = col_data.replace(u'\n', u' |br| ')
col_data = f" |prein| {col_data[:-5]} |preout| "
row_lst.append(f'"{col_data}"')
diff --git a/resources/tools/presentation/input_data_parser.py b/resources/tools/presentation/input_data_parser.py
index 9151cf2f8e..1e2513848b 100644
--- a/resources/tools/presentation/input_data_parser.py
+++ b/resources/tools/presentation/input_data_parser.py
@@ -337,6 +337,8 @@ class ExecutionChecker(ResultVisitor):
self._conf_history_lookup_nr = 0
self._sh_run_counter = 0
+ self._telemetry_kw_counter = 0
+ self._telemetry_msg_counter = 0
# Test ID of currently processed test- the lowercase full path to the
# test
@@ -361,7 +363,8 @@ class ExecutionChecker(ResultVisitor):
u"dpdk-version": self._get_dpdk_version,
u"teardown-papi-history": self._get_papi_history,
u"test-show-runtime": self._get_show_run,
- u"testbed": self._get_testbed
+ u"testbed": self._get_testbed,
+ u"test-telemetry": self._get_telemetry
}
@property
@@ -688,6 +691,66 @@ class ExecutionChecker(ResultVisitor):
}
)
+ def _get_telemetry(self, msg):
+ """Called when extraction of VPP telemetry data is required.
+
+ :param msg: Message to process.
+ :type msg: Message
+ :returns: Nothing.
+ """
+
+ if self._telemetry_kw_counter > 1:
+ return
+ if not msg.message.count(u"vpp_runtime_calls"):
+ return
+
+ if u"telemetry-show-run" not in \
+ self._data[u"tests"][self._test_id].keys():
+ self._data[u"tests"][self._test_id][u"telemetry-show-run"] = dict()
+
+ self._telemetry_msg_counter += 1
+ dut = f"dut{self._telemetry_msg_counter}"
+ runtime = {
+ u"source_type": u"node",
+ u"source_id": dut,
+ u"msg_type": u"metric",
+ u"log_level": u"INFO",
+ u"timestamp": msg.timestamp,
+ u"msg": u"show_runtime",
+ u"host": dut, # No info, should be host IP
+ u"socket": u"", # No info
+ u"data": list()
+ }
+ for line in msg.message.splitlines():
+ if not line.startswith(u"vpp_runtime_"):
+ continue
+ try:
+ params, value = line.rsplit(u" ", maxsplit=2)[:-1]
+ cut = params.index(u"{")
+ name = params[:cut].split(u"_", maxsplit=2)[-1]
+ labels = eval(
+ u"dict" + params[cut:].replace('{', '(').replace('}', ')')
+ )
+ labels[u"graph_node"] = labels.pop(u"name")
+ runtime[u"data"].append(
+ {
+ u"name": name,
+ u"value": value,
+ u"labels": labels
+ }
+ )
+ except (TypeError, ValueError, IndexError):
+ continue
+
+ self._data[u'tests'][self._test_id][u'telemetry-show-run'][dut] = \
+ copy.copy(
+ {
+ u"host": dut,
+ u"socket": u"",
+ u"runtime": runtime
+ }
+ )
+
def _get_ndrpdr_throughput(self, msg):
"""Get NDR_LOWER, NDR_UPPER, PDR_LOWER and PDR_UPPER from the test
message.
@@ -1003,6 +1066,8 @@ class ExecutionChecker(ResultVisitor):
"""
self._sh_run_counter = 0
+ self._telemetry_kw_counter = 0
+ self._telemetry_msg_counter = 0
longname_orig = test.longname.lower()
@@ -1228,10 +1293,13 @@ class ExecutionChecker(ResultVisitor):
:type test_kw: Keyword
:returns: Nothing.
"""
- if ((self._for_output != u"trending") and
- (test_kw.name.count(u"Show Runtime On All Duts") or
- test_kw.name.count(u"Show Runtime Counters On All Duts") or
- test_kw.name.count(u"Vpp Show Runtime On All Duts"))):
+ if self._for_output == u"trending":
+ return
+
+ if test_kw.name.count(u"Run Telemetry On All Duts"):
+ self._msg_type = u"test-telemetry"
+ self._telemetry_kw_counter += 1
+ elif test_kw.name.count(u"Show Runtime On All Duts"):
self._msg_type = u"test-show-runtime"
self._sh_run_counter += 1
else:
@@ -2034,8 +2102,8 @@ class InputData:
vectors_call = 0.0
if int(item[u"calls"][idx]) + int(
- item[u"vectors"][idx]) + \
- int(item[u"suspends"][idx]):
+ item[u"vectors"][idx]) + \
+ int(item[u"suspends"][idx]):
threads[idx].append([
item[u"name"],
item[u"calls"][idx],