aboutsummaryrefslogtreecommitdiffstats
path: root/resources/tools/presentation
diff options
context:
space:
mode:
authorTibor Frank <tifrank@cisco.com>2021-06-22 13:49:05 +0200
committerTibor Frank <tifrank@cisco.com>2021-06-22 14:30:05 +0200
commit7c7cad880bb72b19671fec71b3f5d1350995161e (patch)
tree3a3e4e13e22e617e1caf2b08d01be9dcdb431161 /resources/tools/presentation
parentb3881e8cb219865423bc578c1e65324e15336847 (diff)
PAL: Do not read sh run for trending
Change-Id: I0e142c1c3bb48ddeb0bcd2bfa11c0323461c1368 Signed-off-by: Tibor Frank <tifrank@cisco.com>
Diffstat (limited to 'resources/tools/presentation')
-rw-r--r--resources/tools/presentation/convert_xml_json.py4
-rw-r--r--resources/tools/presentation/generator_tables.py7
-rw-r--r--resources/tools/presentation/input_data_parser.py35
-rw-r--r--resources/tools/presentation/pal.py2
4 files changed, 31 insertions, 17 deletions
diff --git a/resources/tools/presentation/convert_xml_json.py b/resources/tools/presentation/convert_xml_json.py
index 61c6e84a98..f1994df6b8 100644
--- a/resources/tools/presentation/convert_xml_json.py
+++ b/resources/tools/presentation/convert_xml_json.py
@@ -28,6 +28,7 @@ import gzip
from os.path import join
from shutil import rmtree
from copy import deepcopy
+from json import loads
from pal_utils import get_files
@@ -207,7 +208,8 @@ def _export_test_from_xml_to_json(tid, in_data, out, template, metadata):
u"msg": u"show_runtime",
u"data": list()
}
- for item in val.get(u"runtime", list()):
+ runtime = loads(val.get(u"runtime", list()))
+ for item in runtime:
for metric, m_data in item.items():
if metric == u"name":
continue
diff --git a/resources/tools/presentation/generator_tables.py b/resources/tools/presentation/generator_tables.py
index d66a8fc3cf..8218084f71 100644
--- a/resources/tools/presentation/generator_tables.py
+++ b/resources/tools/presentation/generator_tables.py
@@ -24,6 +24,7 @@ from xml.etree import ElementTree as ET
from datetime import datetime as dt
from datetime import timedelta
from copy import deepcopy
+from json import loads
import plotly.graph_objects as go
import plotly.offline as ploff
@@ -187,14 +188,16 @@ def table_oper_data_html(table, input_data):
tcol.text = u"No Data"
continue
+ runtime = loads(dut_data[u"runtime"])
+
try:
- threads_nr = len(dut_data[u"runtime"][0][u"clocks"])
+ threads_nr = len(runtime[0][u"clocks"])
except (IndexError, KeyError):
tcol.text = u"No Data"
continue
threads = OrderedDict({idx: list() for idx in range(threads_nr)})
- for item in dut_data[u"runtime"]:
+ for item in runtime:
for idx in range(threads_nr):
if item[u"vectors"][idx] > 0:
clocks = item[u"clocks"][idx] / item[u"vectors"][idx]
diff --git a/resources/tools/presentation/input_data_parser.py b/resources/tools/presentation/input_data_parser.py
index 94f8e96ec8..9151cf2f8e 100644
--- a/resources/tools/presentation/input_data_parser.py
+++ b/resources/tools/presentation/input_data_parser.py
@@ -295,7 +295,7 @@ class ExecutionChecker(ResultVisitor):
REGEX_TC_PAPI_CLI = re.compile(r'.*\((\d+.\d+.\d+.\d+.) - (.*)\)')
- def __init__(self, metadata, mapping, ignore):
+ def __init__(self, metadata, mapping, ignore, for_output):
"""Initialisation.
:param metadata: Key-value pairs to be included in "metadata" part of
@@ -303,9 +303,11 @@ class ExecutionChecker(ResultVisitor):
:param mapping: Mapping of the old names of test cases to the new
(actual) one.
:param ignore: List of TCs to be ignored.
+ :param for_output: Output to be generated from downloaded data.
:type metadata: dict
:type mapping: dict
:type ignore: list
+ :type for_output: str
"""
# Type of message to parse out from the test messages
@@ -326,6 +328,8 @@ class ExecutionChecker(ResultVisitor):
# Ignore list
self._ignore = ignore
+ self._for_output = for_output
+
# Number of PAPI History messages found:
# 0 - no message
# 1 - PAPI History of DUT1
@@ -669,10 +673,6 @@ class ExecutionChecker(ResultVisitor):
except (AttributeError, IndexError):
sock = u""
- runtime = loads(str(msg.message).replace(u' ', u'').replace(u'\n', u'').
- replace(u"'", u'"').replace(u'b"', u'"').
- replace(u'u"', u'"').split(u":", 1)[1])
-
dut = u"dut{nr}".format(
nr=len(self._data[u'tests'][self._test_id][u'show-run'].keys()) + 1)
@@ -681,7 +681,10 @@ class ExecutionChecker(ResultVisitor):
{
u"host": host,
u"socket": sock,
- u"runtime": runtime,
+ u"runtime": str(msg.message).replace(u' ', u'').
+ replace(u'\n', u'').replace(u"'", u'"').
+ replace(u'b"', u'"').replace(u'u"', u'"').
+ split(u":", 1)[1]
}
)
@@ -1225,9 +1228,10 @@ class ExecutionChecker(ResultVisitor):
:type test_kw: Keyword
:returns: Nothing.
"""
- if test_kw.name.count(u"Show Runtime On All Duts") or \
- test_kw.name.count(u"Show Runtime Counters On All Duts") or \
- test_kw.name.count(u"Vpp Show Runtime On All Duts"):
+ if ((self._for_output != u"trending") and
+ (test_kw.name.count(u"Show Runtime On All Duts") or
+ test_kw.name.count(u"Show Runtime Counters On All Duts") or
+ test_kw.name.count(u"Vpp Show Runtime On All Duts"))):
self._msg_type = u"test-show-runtime"
self._sh_run_counter += 1
else:
@@ -1366,16 +1370,20 @@ class InputData:
(as described in ExecutionChecker documentation)
"""
- def __init__(self, spec):
+ def __init__(self, spec, for_output):
"""Initialization.
:param spec: Specification.
+ :param for_output: Output to be generated from downloaded data.
:type spec: Specification
+ :type for_output: str
"""
# Specification:
self._cfg = spec
+ self._for_output = for_output
+
# Data store:
self._input_data = pd.Series()
@@ -1450,7 +1458,7 @@ class InputData:
)
return None
checker = ExecutionChecker(
- metadata, self._cfg.mapping, self._cfg.ignore
+ metadata, self._cfg.mapping, self._cfg.ignore, self._for_output
)
result.visit(checker)
@@ -1998,13 +2006,14 @@ class InputData:
for dut_name, data in test_data[u"show-run"].items():
if data.get(u"runtime", None) is None:
continue
+ runtime = loads(data[u"runtime"])
try:
- threads_nr = len(data[u"runtime"][0][u"clocks"])
+ threads_nr = len(runtime[0][u"clocks"])
except (IndexError, KeyError):
continue
threads = OrderedDict(
{idx: list() for idx in range(threads_nr)})
- for item in data[u"runtime"]:
+ for item in runtime:
for idx in range(threads_nr):
if item[u"vectors"][idx] > 0:
clocks = item[u"clocks"][idx] / \
diff --git a/resources/tools/presentation/pal.py b/resources/tools/presentation/pal.py
index 7e2d9a8dbd..4f9b24fedc 100644
--- a/resources/tools/presentation/pal.py
+++ b/resources/tools/presentation/pal.py
@@ -148,7 +148,7 @@ def main():
prepare_static_content(spec)
- data = InputData(spec)
+ data = InputData(spec, spec.output[u"output"])
if args.input_file:
data.process_local_file(args.input_file)
elif args.input_directory: