aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorTibor Frank <tifrank@cisco.com>2020-02-28 13:48:15 +0100
committerTibor Frank <tifrank@cisco.com>2020-03-02 16:09:10 +0000
commit691f24ec052cc9d48d6abe143bcae95486f94388 (patch)
treebe243d4000a3f6a95224c82a59251e08c22ded53
parent0013ff2deb67ebdf99655b186b720d393c9cc620 (diff)
Report: Detailed test results
- sub-chapters reorg Change-Id: I0a90bcfde1e8be368e7ddaf9a41abbe52851cd62 Signed-off-by: Tibor Frank <tifrank@cisco.com>
-rw-r--r--docs/report/detailed_test_results/vpp_mrr_results/index.rst22
-rw-r--r--docs/report/detailed_test_results/vpp_performance_results/index.rst22
-rw-r--r--docs/report/test_configuration/vpp_mrr_configuration/index.rst22
-rw-r--r--docs/report/test_configuration/vpp_performance_configuration/index.rst22
-rw-r--r--docs/report/test_operational_data/vpp_performance_operational_data/index.rst22
-rw-r--r--resources/tools/presentation/generator_files.py213
-rw-r--r--resources/tools/presentation/generator_plots.py4
-rw-r--r--resources/tools/presentation/generator_tables.py62
-rw-r--r--resources/tools/presentation/input_data_parser.py4
-rw-r--r--resources/tools/presentation/specification.yaml554
-rw-r--r--resources/tools/presentation/specification_parser.py71
11 files changed, 575 insertions, 443 deletions
diff --git a/docs/report/detailed_test_results/vpp_mrr_results/index.rst b/docs/report/detailed_test_results/vpp_mrr_results/index.rst
index 9989860809..f783a404ba 100644
--- a/docs/report/detailed_test_results/vpp_mrr_results/index.rst
+++ b/docs/report/detailed_test_results/vpp_mrr_results/index.rst
@@ -1,14 +1,18 @@
VPP MRR
=======
-.. toctree::
+.. note::
+
+ Data sources for reported test results:
+ i) `FD.io test executor vpp performance job 2n-skx`_,
+ `FD.io test executor vpp performance job 3n-skx`_,
+ `FD.io test executor vpp performance job 2n-clx`_,
+ `FD.io test executor vpp performance job 3n-hsw`_,
+ `FD.io test executor vpp performance job 3n-tsh`_,
+ `FD.io test executor vpp performance job 2n-dnv`_,
+ `FD.io test executor vpp performance job 3n-dnv`_,
+ ii) archived FD.io jobs test results
+ `output files <../../_static/archive/>`_.
- ../vpp_mrr_results_3n_hsw/index
- ../vpp_mrr_results_3n_tsh/index
- ../vpp_mrr_results_2n_dnv/index
- ../vpp_mrr_results_3n_dnv/index
- ../vpp_mrr_results_2n_clx/index
+.. toctree::
-..
- ../vpp_mrr_results_2n_skx/index
- ../vpp_mrr_results_3n_skx/index
diff --git a/docs/report/detailed_test_results/vpp_performance_results/index.rst b/docs/report/detailed_test_results/vpp_performance_results/index.rst
index 0459da69ab..b6c6c8f645 100644
--- a/docs/report/detailed_test_results/vpp_performance_results/index.rst
+++ b/docs/report/detailed_test_results/vpp_performance_results/index.rst
@@ -1,14 +1,18 @@
VPP Throughput
==============
-.. toctree::
+.. note::
+
+ Data sources for reported test results:
+ i) `FD.io test executor vpp performance job 2n-skx`_,
+ `FD.io test executor vpp performance job 3n-skx`_,
+ `FD.io test executor vpp performance job 2n-clx`_,
+ `FD.io test executor vpp performance job 3n-hsw`_,
+ `FD.io test executor vpp performance job 3n-tsh`_,
+ `FD.io test executor vpp performance job 2n-dnv`_,
+ `FD.io test executor vpp performance job 3n-dnv`_,
+ ii) archived FD.io jobs test results
+ `output files <../../_static/archive/>`_.
- ../vpp_performance_results_3n_hsw/index
- ../vpp_performance_results_3n_tsh/index
- ../vpp_performance_results_2n_dnv/index
- ../vpp_performance_results_3n_dnv/index
- ../vpp_performance_results_2n_clx/index
+.. toctree::
-..
- ../vpp_performance_results_2n_skx/index
- ../vpp_performance_results_3n_skx/index
diff --git a/docs/report/test_configuration/vpp_mrr_configuration/index.rst b/docs/report/test_configuration/vpp_mrr_configuration/index.rst
index 400e45dae5..f783a404ba 100644
--- a/docs/report/test_configuration/vpp_mrr_configuration/index.rst
+++ b/docs/report/test_configuration/vpp_mrr_configuration/index.rst
@@ -1,14 +1,18 @@
VPP MRR
=======
-.. toctree::
+.. note::
+
+ Data sources for reported test results:
+ i) `FD.io test executor vpp performance job 2n-skx`_,
+ `FD.io test executor vpp performance job 3n-skx`_,
+ `FD.io test executor vpp performance job 2n-clx`_,
+ `FD.io test executor vpp performance job 3n-hsw`_,
+ `FD.io test executor vpp performance job 3n-tsh`_,
+ `FD.io test executor vpp performance job 2n-dnv`_,
+ `FD.io test executor vpp performance job 3n-dnv`_,
+ ii) archived FD.io jobs test results
+ `output files <../../_static/archive/>`_.
- ../vpp_mrr_configuration_3n_hsw/index
- ../vpp_mrr_configuration_3n_tsh/index
- ../vpp_mrr_configuration_2n_dnv/index
- ../vpp_mrr_configuration_3n_dnv/index
- ../vpp_mrr_configuration_2n_clx/index
+.. toctree::
-..
- ../vpp_mrr_configuration_2n_skx/index
- ../vpp_mrr_configuration_3n_skx/index
diff --git a/docs/report/test_configuration/vpp_performance_configuration/index.rst b/docs/report/test_configuration/vpp_performance_configuration/index.rst
index 7b4b9ab460..b6c6c8f645 100644
--- a/docs/report/test_configuration/vpp_performance_configuration/index.rst
+++ b/docs/report/test_configuration/vpp_performance_configuration/index.rst
@@ -1,14 +1,18 @@
VPP Throughput
==============
-.. toctree::
+.. note::
+
+ Data sources for reported test results:
+ i) `FD.io test executor vpp performance job 2n-skx`_,
+ `FD.io test executor vpp performance job 3n-skx`_,
+ `FD.io test executor vpp performance job 2n-clx`_,
+ `FD.io test executor vpp performance job 3n-hsw`_,
+ `FD.io test executor vpp performance job 3n-tsh`_,
+ `FD.io test executor vpp performance job 2n-dnv`_,
+ `FD.io test executor vpp performance job 3n-dnv`_,
+ ii) archived FD.io jobs test results
+ `output files <../../_static/archive/>`_.
- ../vpp_performance_configuration_3n_hsw/index
- ../vpp_performance_configuration_3n_tsh/index
- ../vpp_performance_configuration_2n_dnv/index
- ../vpp_performance_configuration_3n_dnv/index
- ../vpp_performance_configuration_2n_clx/index
+.. toctree::
-..
- ../vpp_performance_configuration_2n_skx/index
- ../vpp_performance_configuration_3n_skx/index
diff --git a/docs/report/test_operational_data/vpp_performance_operational_data/index.rst b/docs/report/test_operational_data/vpp_performance_operational_data/index.rst
index 7eca0de3b6..b6c6c8f645 100644
--- a/docs/report/test_operational_data/vpp_performance_operational_data/index.rst
+++ b/docs/report/test_operational_data/vpp_performance_operational_data/index.rst
@@ -1,14 +1,18 @@
VPP Throughput
==============
-.. toctree::
+.. note::
+
+ Data sources for reported test results:
+ i) `FD.io test executor vpp performance job 2n-skx`_,
+ `FD.io test executor vpp performance job 3n-skx`_,
+ `FD.io test executor vpp performance job 2n-clx`_,
+ `FD.io test executor vpp performance job 3n-hsw`_,
+ `FD.io test executor vpp performance job 3n-tsh`_,
+ `FD.io test executor vpp performance job 2n-dnv`_,
+ `FD.io test executor vpp performance job 3n-dnv`_,
+ ii) archived FD.io jobs test results
+ `output files <../../_static/archive/>`_.
- ../vpp_performance_operational_data_3n_hsw/index
- ../vpp_performance_operational_data_3n_tsh/index
- ../vpp_performance_operational_data_2n_dnv/index
- ../vpp_performance_operational_data_3n_dnv/index
- ../vpp_performance_operational_data_2n_clx/index
+.. toctree::
-..
- ../vpp_performance_operational_data_2n_skx/index
- ../vpp_performance_operational_data_3n_skx/index
diff --git a/resources/tools/presentation/generator_files.py b/resources/tools/presentation/generator_files.py
index ec142eded4..0a9b09b2ab 100644
--- a/resources/tools/presentation/generator_files.py
+++ b/resources/tools/presentation/generator_files.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2018 Cisco and/or its affiliates.
+# Copyright (c) 2020 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -14,6 +14,8 @@
"""Algorithms to generate files.
"""
+from os.path import isfile
+from collections import OrderedDict
import logging
@@ -41,6 +43,8 @@ def generate_files(spec, data):
"""
generator = {
+ u"file_details_split": file_details_split,
+ u"file_details_split_html": file_details_split_html,
u"file_test_results": file_test_results,
u"file_test_results_html": file_test_results_html
}
@@ -74,6 +78,213 @@ def _tests_in_suite(suite_name, tests):
return False
+def file_details_split(file_spec, input_data, frmt=u"rst"):
+ """Generate the file(s) with algorithms
+ - file_details_split
+ specified in the specification file.
+
+ :param file_spec: File to generate.
+ :param input_data: Data to process.
+ :param frmt: Format can be: rst or html
+ :type file_spec: pandas.Series
+ :type input_data: InputData
+ :type frmt: str
+ """
+
+ fileset_file_name = f"{file_spec[u'output-file']}"
+ rst_header = (
+ u"\n"
+ u".. |br| raw:: html\n\n <br />\n\n\n"
+ u".. |prein| raw:: html\n\n <pre>\n\n\n"
+ u".. |preout| raw:: html\n\n </pre>\n\n"
+ )
+ start_lvl = file_spec.get(u"data-start-level", 4)
+
+ logging.info(f" Generating the file set {fileset_file_name} ...")
+
+ data_sets = file_spec.get(u"data", None)
+ if not data_sets:
+ logging.error(
+ f" No data sets specified for {file_spec[u'output-file']}, exit."
+ )
+ return
+
+ table_sets = file_spec.get(u"dir-tables", None)
+ if not table_sets:
+ logging.error(
+ f" No table sets specified for {file_spec[u'output-file']}, exit."
+ )
+ return
+
+ if len(data_sets) != len(table_sets):
+ logging.error(
+ f" The number of data sets and the number of table sets for "
+ f"{file_spec[u'output-file']} are not equal, exit."
+ )
+ return
+
+ chapters = OrderedDict()
+ for data_set, table_set in zip(data_sets, table_sets):
+
+ logging.info(f" Processing the table set {table_set}...")
+
+ table_lst = None
+ if frmt == u"html":
+ table_lst = get_files(table_set, u".rst", full_path=True)
+ elif frmt == u"rst":
+ table_lst = get_files(table_set, u".csv", full_path=True)
+
+ if not table_lst:
+ logging.error(
+ f" No tables to include in {table_set}. Skipping."
+ )
+ return
+
+ logging.info(u" Creating the test data set...")
+ tests = input_data.filter_data(
+ element=file_spec,
+ params=[u"name", u"parent", u"doc", u"type", u"level"],
+ data=data_set,
+ data_set=u"tests",
+ continue_on_error=True
+ )
+ if tests.empty:
+ return
+ tests = input_data.merge_data(tests)
+
+ logging.info(u" Creating the suite data set...")
+ suites = input_data.filter_data(
+ element=file_spec,
+ data=data_set,
+ continue_on_error=True,
+ data_set=u"suites"
+ )
+ if suites.empty:
+ return
+ suites = input_data.merge_data(suites)
+ suites.sort_index(inplace=True)
+
+ logging.info(u" Generating files...")
+
+ file_name = u""
+ sub_chapter = u"-".join(table_set.split(u"_")[-2:])
+ for suite_longname, suite in suites.items():
+
+ suite_lvl = len(suite_longname.split(u"."))
+ if suite_lvl < start_lvl:
+ # Not interested in this suite
+ continue
+
+ if suite_lvl == start_lvl:
+ # Our top-level suite
+ chapter = suite_longname.split(u'.')[-1]
+ file_name = f"{table_set}/{chapter}.rst"
+ logging.info(f" Writing file {file_name}")
+ with open(file_name, u"a") as file_handler:
+ file_handler.write(rst_header)
+ if chapters.get(chapter, None) is None:
+ chapters[chapter] = OrderedDict()
+ chapters[chapter][sub_chapter] = file_name
+
+ title_line = get_rst_title_char(suite[u"level"] - start_lvl + 2) * \
+ len(sub_chapter)
+ with open(file_name, u"a") as file_handler:
+ if not (u"-ndrpdr" in suite[u"name"] or
+ u"-mrr" in suite[u"name"] or
+ u"-dev" in suite[u"name"]):
+ file_handler.write(f"\n{sub_chapter}\n{title_line}\n")
+
+ if _tests_in_suite(suite[u"name"], tests):
+ for tbl_file in table_lst:
+ if suite[u"name"] in tbl_file:
+ file_handler.write(
+ f"\n{suite[u'name']}\n{title_line}\n"
+ )
+ file_handler.write(
+ f"\n{suite[u'doc']}\n".
+ replace(u'|br|', u'\n\n -')
+ )
+ if frmt == u"html":
+ file_handler.write(
+ f"\n.. include:: {tbl_file.split(u'/')[-1]}"
+ f"\n"
+ )
+ elif frmt == u"rst":
+ file_handler.write(
+ RST_INCLUDE_TABLE.format(
+ file_latex=tbl_file,
+ file_html=tbl_file.split(u"/")[-1])
+ )
+ break
+ titles = {
+ # VPP Perf, MRR
+ u"container_memif": u"LXC/DRC Container Memif",
+ u"crypto": u"IPsec IPv4 Routing",
+ u"hoststack": u"Hoststack Testing",
+ u"ip4": u"IPv4 Routing",
+ u"ip4_tunnels": u"IPv4 Tunnels",
+ u"ip6": u"IPv6 Routing",
+ u"ip6_tunnels": u"IPv6 Tunnels",
+ u"l2": u"L2 Ethernet Switching",
+ u"lb": u"Link Bonding",
+ u"nfv_density": u"NFV Service Density",
+ u"srv6": u"SRv6 Routing",
+ u"vm_vhost": u"KVM VMs vhost-user",
+ u"vts": u"Virtual Topology System",
+ # VPP Device
+ u"interfaces": u"Interfaces",
+ u"l2bd": u"L2 Bridge-domain",
+ u"l2patch": u"L2 Patch",
+ u"l2xc": u"L2 Cross-connect",
+ }
+
+ order_chapters = file_spec.get(u"order-chapters", None)
+ if not order_chapters:
+ order_chapters = chapters.keys()
+
+ order_sub_chapters = file_spec.get(u"order-sub-chapters", None)
+
+ for chapter in order_chapters:
+ sub_chapters = chapters.get(chapter, None)
+ if not sub_chapters:
+ continue
+ with open(f"{fileset_file_name}/index.rst", u"a") as file_handler:
+ file_handler.write(f" {chapter}\n")
+ chapter_file_name = f"{fileset_file_name}/{chapter}.rst"
+ if not isfile(chapter_file_name):
+ with open(chapter_file_name, u"a") as file_handler:
+ title = titles.get(chapter, chapter)
+ file_handler.write(
+ f"{title}\n"
+ f"{get_rst_title_char(2) * len(title)}\n\n"
+ f".. toctree::\n\n"
+ )
+
+ if not order_sub_chapters:
+ order_sub_chapters = sub_chapters.keys()
+ for sub_chapter in order_sub_chapters:
+ testbed = sub_chapters.get(sub_chapter, None)
+ if not testbed:
+ continue
+ with open(chapter_file_name, u"a") as file_handler:
+ file_handler.write(
+ f" ../{u'/'.join(testbed.split(u'/')[-2:])}\n"
+ )
+
+
+def file_details_split_html(file_spec, input_data):
+ """Generate the file(s) with algorithms
+ - file_details_split_html
+ specified in the specification file.
+
+ :param file_spec: File to generate.
+ :param input_data: Data to process.
+ :type file_spec: pandas.Series
+ :type input_data: InputData
+ """
+ file_details_split(file_spec, input_data, frmt=u"html")
+
+
def file_test_results(file_spec, input_data, frmt=u"rst"):
"""Generate the file(s) with algorithms
- file_test_results
diff --git a/resources/tools/presentation/generator_plots.py b/resources/tools/presentation/generator_plots.py
index 74db877007..5c9698912f 100644
--- a/resources/tools/presentation/generator_plots.py
+++ b/resources/tools/presentation/generator_plots.py
@@ -314,8 +314,8 @@ def plot_hdrh_lat_by_percentile(plot, input_data):
filename=file_name)
# Add link to the file:
if file_links and target_links:
- with open(file_links, u"a") as fw:
- fw.write(
+ with open(file_links, u"a") as file_handler:
+ file_handler.write(
f"- `{name_link} "
f"<{target_links}/{file_name.split(u'/')[-1]}>`_\n"
)
diff --git a/resources/tools/presentation/generator_tables.py b/resources/tools/presentation/generator_tables.py
index 7a7db5d59f..15a101ced3 100644
--- a/resources/tools/presentation/generator_tables.py
+++ b/resources/tools/presentation/generator_tables.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2019 Cisco and/or its affiliates.
+# Copyright (c) 2020 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -98,10 +98,10 @@ def table_oper_data_html(table, input_data):
data = input_data.merge_data(data)
sort_tests = table.get(u"sort", None)
- if sort_tests and sort_tests in (u"ascending", u"descending"):
+ if sort_tests:
args = dict(
inplace=True,
- ascending=True if sort_tests == u"ascending" else False
+ ascending=(sort_tests == u"ascending")
)
data.sort_index(**args)
@@ -173,7 +173,7 @@ def table_oper_data_html(table, input_data):
u"Average Vector Size"
)
- for dut_name, dut_data in tst_data[u"show-run"].items():
+ for dut_data in tst_data[u"show-run"].values:
trow = ET.SubElement(
tbl, u"tr", attrib=dict(bgcolor=colors[u"header"])
)
@@ -183,15 +183,7 @@ def table_oper_data_html(table, input_data):
if dut_data.get(u"threads", None) is None:
tcol.text = u"No Data"
continue
- # bold = ET.SubElement(tcol, u"b")
- # bold.text = dut_name
- #
- # trow = ET.SubElement(
- # tbl, u"tr", attrib=dict(bgcolor=colors[u"body"][0])
- # )
- # tcol = ET.SubElement(
- # trow, u"td", attrib=dict(align=u"left", colspan=u"6")
- # )
+
bold = ET.SubElement(tcol, u"b")
bold.text = (
f"Host IP: {dut_data.get(u'host', '')}, "
@@ -304,10 +296,10 @@ def table_merged_details(table, input_data):
data = input_data.merge_data(data)
sort_tests = table.get(u"sort", None)
- if sort_tests and sort_tests in (u"ascending", u"descending"):
+ if sort_tests:
args = dict(
inplace=True,
- ascending=True if sort_tests == u"ascending" else False
+ ascending=(sort_tests == u"ascending")
)
data.sort_index(**args)
@@ -628,8 +620,8 @@ def table_perf_comparison(table, input_data):
for tst_name, tst_data in data[job][str(build)].items():
tst_name_mod = _tpc_modify_test_name(tst_name)
if (u"across topologies" in table[u"title"].lower() or
- (u" 3n-" in table[u"title"].lower() and
- u" 2n-" in table[u"title"].lower())):
+ (u" 3n-" in table[u"title"].lower() and
+ u" 2n-" in table[u"title"].lower())):
tst_name_mod = tst_name_mod.replace(u"2n1l-", u"")
if tbl_dict.get(tst_name_mod, None) is None:
groups = re.search(REGEX_NIC, tst_data[u"parent"])
@@ -658,8 +650,8 @@ def table_perf_comparison(table, input_data):
for tst_name, tst_data in rpl_data[job][str(build)].items():
tst_name_mod = _tpc_modify_test_name(tst_name)
if (u"across topologies" in table[u"title"].lower() or
- (u" 3n-" in table[u"title"].lower() and
- u" 2n-" in table[u"title"].lower())):
+ (u" 3n-" in table[u"title"].lower() and
+ u" 2n-" in table[u"title"].lower())):
tst_name_mod = tst_name_mod.replace(u"2n1l-", u"")
if tbl_dict.get(tst_name_mod, None) is None:
name = \
@@ -687,8 +679,8 @@ def table_perf_comparison(table, input_data):
for tst_name, tst_data in data[job][str(build)].items():
tst_name_mod = _tpc_modify_test_name(tst_name)
if (u"across topologies" in table[u"title"].lower() or
- (u" 3n-" in table[u"title"].lower() and
- u" 2n-" in table[u"title"].lower())):
+ (u" 3n-" in table[u"title"].lower() and
+ u" 2n-" in table[u"title"].lower())):
tst_name_mod = tst_name_mod.replace(u"2n1l-", u"")
if tbl_dict.get(tst_name_mod, None) is None:
groups = re.search(REGEX_NIC, tst_data[u"parent"])
@@ -719,8 +711,8 @@ def table_perf_comparison(table, input_data):
for tst_name, tst_data in rpl_data[job][str(build)].items():
tst_name_mod = _tpc_modify_test_name(tst_name)
if (u"across topologies" in table[u"title"].lower() or
- (u" 3n-" in table[u"title"].lower() and
- u" 2n-" in table[u"title"].lower())):
+ (u" 3n-" in table[u"title"].lower() and
+ u" 2n-" in table[u"title"].lower())):
tst_name_mod = tst_name_mod.replace(u"2n1l-", u"")
if tbl_dict.get(tst_name_mod, None) is None:
name = \
@@ -749,8 +741,8 @@ def table_perf_comparison(table, input_data):
for tst_name, tst_data in data[job][str(build)].items():
tst_name_mod = _tpc_modify_test_name(tst_name)
if (u"across topologies" in table[u"title"].lower() or
- (u" 3n-" in table[u"title"].lower() and
- u" 2n-" in table[u"title"].lower())):
+ (u" 3n-" in table[u"title"].lower() and
+ u" 2n-" in table[u"title"].lower())):
tst_name_mod = tst_name_mod.replace(u"2n1l-", u"")
if tbl_dict.get(tst_name_mod, None) is None:
continue
@@ -903,8 +895,8 @@ def table_perf_comparison_nic(table, input_data):
continue
tst_name_mod = _tpc_modify_test_name(tst_name)
if (u"across topologies" in table[u"title"].lower() or
- (u" 3n-" in table[u"title"].lower() and
- u" 2n-" in table[u"title"].lower())):
+ (u" 3n-" in table[u"title"].lower() and
+ u" 2n-" in table[u"title"].lower())):
tst_name_mod = tst_name_mod.replace(u"2n1l-", u"")
if tbl_dict.get(tst_name_mod, None) is None:
name = f"{u'-'.join(tst_data[u'name'].split(u'-')[:-1])}"
@@ -934,8 +926,8 @@ def table_perf_comparison_nic(table, input_data):
continue
tst_name_mod = _tpc_modify_test_name(tst_name)
if (u"across topologies" in table[u"title"].lower() or
- (u" 3n-" in table[u"title"].lower() and
- u" 2n-" in table[u"title"].lower())):
+ (u" 3n-" in table[u"title"].lower() and
+ u" 2n-" in table[u"title"].lower())):
tst_name_mod = tst_name_mod.replace(u"2n1l-", u"")
if tbl_dict.get(tst_name_mod, None) is None:
name = \
@@ -965,8 +957,8 @@ def table_perf_comparison_nic(table, input_data):
continue
tst_name_mod = _tpc_modify_test_name(tst_name)
if (u"across topologies" in table[u"title"].lower() or
- (u" 3n-" in table[u"title"].lower() and
- u" 2n-" in table[u"title"].lower())):
+ (u" 3n-" in table[u"title"].lower() and
+ u" 2n-" in table[u"title"].lower())):
tst_name_mod = tst_name_mod.replace(u"2n1l-", u"")
if tbl_dict.get(tst_name_mod, None) is None:
name = f"{u'-'.join(tst_data[u'name'].split(u'-')[:-1])}"
@@ -996,8 +988,8 @@ def table_perf_comparison_nic(table, input_data):
continue
tst_name_mod = _tpc_modify_test_name(tst_name)
if (u"across topologies" in table[u"title"].lower() or
- (u" 3n-" in table[u"title"].lower() and
- u" 2n-" in table[u"title"].lower())):
+ (u" 3n-" in table[u"title"].lower() and
+ u" 2n-" in table[u"title"].lower())):
tst_name_mod = tst_name_mod.replace(u"2n1l-", u"")
if tbl_dict.get(tst_name_mod, None) is None:
name = \
@@ -1028,8 +1020,8 @@ def table_perf_comparison_nic(table, input_data):
continue
tst_name_mod = _tpc_modify_test_name(tst_name)
if (u"across topologies" in table[u"title"].lower() or
- (u" 3n-" in table[u"title"].lower() and
- u" 2n-" in table[u"title"].lower())):
+ (u" 3n-" in table[u"title"].lower() and
+ u" 2n-" in table[u"title"].lower())):
tst_name_mod = tst_name_mod.replace(u"2n1l-", u"")
if tbl_dict.get(tst_name_mod, None) is None:
continue
diff --git a/resources/tools/presentation/input_data_parser.py b/resources/tools/presentation/input_data_parser.py
index bb802b3d83..f7869d889a 100644
--- a/resources/tools/presentation/input_data_parser.py
+++ b/resources/tools/presentation/input_data_parser.py
@@ -460,9 +460,9 @@ class ExecutionChecker(ResultVisitor):
try:
out_msg = (
f"1. {(data[u'ndr_low'] / 1e6):5.2f} "
- f"{data[u'ndr_low_b']:.2f}"
+ f"{data[u'ndr_low_b']:5.2f}"
f"\n2. {(data[u'pdr_low'] / 1e6):5.2f} "
- f"{data[u'pdr_low_b']:.2f}"
+ f"{data[u'pdr_low_b']:5.2f}"
)
latency = (
_process_lat(data[u'pdr_lat_10_1'], data[u'pdr_lat_10_2']),
diff --git a/resources/tools/presentation/specification.yaml b/resources/tools/presentation/specification.yaml
index c979915594..4bab6508b8 100644
--- a/resources/tools/presentation/specification.yaml
+++ b/resources/tools/presentation/specification.yaml
@@ -33,6 +33,7 @@
DIR[DTR,PERF,DPDK,2N,DNV]: "{DIR[DTR]}/dpdk_performance_results_2n_dnv"
DIR[DTR,PERF,DPDK,3N,DNV]: "{DIR[DTR]}/dpdk_performance_results_3n_dnv"
DIR[DTR,PERF,DPDK,3N,TSH]: "{DIR[DTR]}/dpdk_performance_results_3n_tsh"
+ DIR[DTR,PERF,VPP,ALL]: "{DIR[DTR]}/vpp_performance_results"
DIR[DTR,PERF,VPP,3N,HSW]: "{DIR[DTR]}/vpp_performance_results_3n_hsw"
DIR[DTR,PERF,VPP,3N,SKX]: "{DIR[DTR]}/vpp_performance_results_3n_skx"
DIR[DTR,PERF,VPP,2N,SKX]: "{DIR[DTR]}/vpp_performance_results_2n_skx"
@@ -40,6 +41,7 @@
DIR[DTR,PERF,VPP,3N,TSH]: "{DIR[DTR]}/vpp_performance_results_3n_tsh"
DIR[DTR,PERF,VPP,3N,DNV]: "{DIR[DTR]}/vpp_performance_results_3n_dnv"
DIR[DTR,PERF,VPP,2N,DNV]: "{DIR[DTR]}/vpp_performance_results_2n_dnv"
+ DIR[DTR,MRR,VPP,ALL]: "{DIR[DTR]}/vpp_mrr_results"
DIR[DTR,MRR,VPP,3N,HSW]: "{DIR[DTR]}/vpp_mrr_results_3n_hsw"
DIR[DTR,MRR,VPP,3N,SKX]: "{DIR[DTR]}/vpp_mrr_results_3n_skx"
DIR[DTR,MRR,VPP,2N,SKX]: "{DIR[DTR]}/vpp_mrr_results_2n_skx"
@@ -49,10 +51,10 @@
DIR[DTR,MRR,VPP,2N,DNV]: "{DIR[DTR]}/vpp_mrr_results_2n_dnv"
DIR[DTR,PERF,COT,3N,HSW]: "{DIR[DTR]}/cot_performance_results_3n_hsw"
DIR[DTR,FUNC,VPP,DEVICE,UBUNTU]: "{DIR[DTR]}/vpp_device_results_ubuntu"
- DIR[DTR,PERF,VPP,IMPRV]: "{DIR[WORKING,SRC]}/vpp_performance_tests/performance_improvements"
# Detailed test configurations
DIR[DTC]: "{DIR[WORKING,SRC]}/test_configuration"
+ DIR[DTC,PERF,VPP,ALL]: "{DIR[DTC]}/vpp_performance_configuration"
DIR[DTC,PERF,VPP,3N,HSW]: "{DIR[DTC]}/vpp_performance_configuration_3n_hsw"
DIR[DTC,PERF,VPP,3N,SKX]: "{DIR[DTC]}/vpp_performance_configuration_3n_skx"
DIR[DTC,PERF,VPP,2N,SKX]: "{DIR[DTC]}/vpp_performance_configuration_2n_skx"
@@ -60,6 +62,7 @@
DIR[DTC,PERF,VPP,3N,TSH]: "{DIR[DTC]}/vpp_performance_configuration_3n_tsh"
DIR[DTC,PERF,VPP,3N,DNV]: "{DIR[DTC]}/vpp_performance_configuration_3n_dnv"
DIR[DTC,PERF,VPP,2N,DNV]: "{DIR[DTC]}/vpp_performance_configuration_2n_dnv"
+ DIR[DTC,MRR,VPP,ALL]: "{DIR[DTC]}/vpp_mrr_configuration_3n_hsw"
DIR[DTC,MRR,VPP,3N,HSW]: "{DIR[DTC]}/vpp_mrr_configuration_3n_hsw"
DIR[DTC,MRR,VPP,3N,SKX]: "{DIR[DTC]}/vpp_mrr_configuration_3n_skx"
DIR[DTC,MRR,VPP,2N,SKX]: "{DIR[DTC]}/vpp_mrr_configuration_2n_skx"
@@ -71,6 +74,7 @@
# Detailed tests operational data
DIR[DTO]: "{DIR[WORKING,SRC]}/test_operational_data"
+ DIR[DTO,PERF,VPP,ALL]: "{DIR[DTO]}/vpp_performance_operational_data"
DIR[DTO,PERF,VPP,3N,HSW]: "{DIR[DTO]}/vpp_performance_operational_data_3n_hsw"
DIR[DTO,PERF,VPP,3N,SKX]: "{DIR[DTO]}/vpp_performance_operational_data_3n_skx"
DIR[DTO,PERF,VPP,2N,SKX]: "{DIR[DTO]}/vpp_performance_operational_data_2n_skx"
@@ -5535,355 +5539,235 @@
### F I L E S ###
################################################################################
-# VPP Performance Results 3n-hsw
+# VPP Performance Results
- type: "file"
- title: "VPP Performance Results 3n-hsw"
- algorithm: "file_test_results"
- output-file: "{DIR[DTR,PERF,VPP,3N,HSW]}"
- dir-tables: "{DIR[DTR,PERF,VPP,3N,HSW]}"
- data: "vpp-perf-results-3n-hsw"
- filter: "all"
- data-start-level: 4
-
-## VPP Performance Results 3n-skx
-#- type: "file"
-# title: "VPP Performance Results 3n-skx"
-# algorithm: "file_test_results"
-# output-file: "{DIR[DTR,PERF,VPP,3N,SKX]}"
-# dir-tables: "{DIR[DTR,PERF,VPP,3N,SKX]}"
-# data: "vpp-perf-results-3n-skx"
-# filter: "all"
-# data-start-level: 4
-#
-## VPP Performance Results 2n-skx
-#- type: "file"
-# title: "VPP Performance Results 2n-skx"
-# algorithm: "file_test_results"
-# output-file: "{DIR[DTR,PERF,VPP,2N,SKX]}"
-# dir-tables: "{DIR[DTR,PERF,VPP,2N,SKX]}"
-# data: "vpp-perf-results-2n-skx"
-# filter: "all"
-# data-start-level: 4
-
-# VPP Performance Results 2n-clx
-- type: "file"
- title: "VPP Performance Results 2n-clx"
- algorithm: "file_test_results"
- output-file: "{DIR[DTR,PERF,VPP,2N,CLX]}"
- dir-tables: "{DIR[DTR,PERF,VPP,2N,CLX]}"
- data: "vpp-perf-results-2n-clx"
- filter: "all"
- data-start-level: 4
-
-# VPP Performance Results 3n-tsh
-- type: "file"
- title: "VPP Performance Results 3n-tsh"
- algorithm: "file_test_results"
- output-file: "{DIR[DTR,PERF,VPP,3N,TSH]}"
- dir-tables: "{DIR[DTR,PERF,VPP,3N,TSH]}"
- data: "vpp-perf-results-3n-tsh"
- filter: "all"
- data-start-level: 4
-
-# VPP Performance Results 3n-dnv
-- type: "file"
- title: "VPP Performance Results 3n-dnv"
- algorithm: "file_test_results"
- output-file: "{DIR[DTR,PERF,VPP,3N,DNV]}"
- dir-tables: "{DIR[DTR,PERF,VPP,3N,DNV]}"
- data: "vpp-perf-results-3n-dnv"
- filter: "all"
- data-start-level: 4
-
-# VPP Performance Results 2n-dnv
-- type: "file"
- title: "VPP Performance Results 2n-dnv"
- algorithm: "file_test_results"
- output-file: "{DIR[DTR,PERF,VPP,2N,DNV]}"
- dir-tables: "{DIR[DTR,PERF,VPP,2N,DNV]}"
- data: "vpp-perf-results-2n-dnv"
- filter: "all"
- data-start-level: 4
-
-# VPP Performance Configuration 3n-hsw
-- type: "file"
- title: "VPP Performance Configuration 3n-hsw"
- algorithm: "file_test_results"
- output-file: "{DIR[DTC,PERF,VPP,3N,HSW]}"
- dir-tables: "{DIR[DTC,PERF,VPP,3N,HSW]}"
- data: "vpp-perf-results-3n-hsw"
- filter: "all"
- data-start-level: 4
-
-## VPP Performance Configuration 3n-skx
-#- type: "file"
-# title: "VPP Performance Configuration 3n-skx"
-# algorithm: "file_test_results"
-# output-file: "{DIR[DTC,PERF,VPP,3N,SKX]}"
-# dir-tables: "{DIR[DTC,PERF,VPP,3N,SKX]}"
-# data: "vpp-perf-results-3n-skx"
-# filter: "all"
-# data-start-level: 4
-#
-## VPP Performance Configuration 2n-skx
-#- type: "file"
-# title: "VPP Performance Configuration 2n-skx"
-# algorithm: "file_test_results"
-# output-file: "{DIR[DTC,PERF,VPP,2N,SKX]}"
-# dir-tables: "{DIR[DTC,PERF,VPP,2N,SKX]}"
-# data: "vpp-perf-results-2n-skx"
-# filter: "all"
-# data-start-level: 4
-
-# VPP Performance Configuration 2n-clx
-- type: "file"
- title: "VPP Performance Configuration 2n-clx"
- algorithm: "file_test_results"
- output-file: "{DIR[DTC,PERF,VPP,2N,CLX]}"
- dir-tables: "{DIR[DTC,PERF,VPP,2N,CLX]}"
- data: "vpp-perf-results-2n-clx"
+ title: "VPP Performance Results"
+ algorithm: "file_details_split"
+ output-file: "{DIR[DTR,PERF,VPP,ALL]}"
+ dir-tables:
+# - "{DIR[DTR,PERF,VPP,2N,SKX]}"
+# - "{DIR[DTR,PERF,VPP,3N,SKX]}"
+ - "{DIR[DTR,PERF,VPP,2N,CLX]}"
+ - "{DIR[DTR,PERF,VPP,3N,HSW]}"
+ - "{DIR[DTR,PERF,VPP,3N,TSH]}"
+ - "{DIR[DTR,PERF,VPP,2N,DNV]}"
+ - "{DIR[DTR,PERF,VPP,3N,DNV]}"
+ data:
+# - "vpp-perf-results-2n-skx"
+# - "vpp-perf-results-3n-skx"
+ - "vpp-perf-results-2n-clx"
+ - "vpp-perf-results-3n-hsw"
+ - "vpp-perf-results-3n-tsh"
+ - "vpp-perf-results-2n-dnv"
+ - "vpp-perf-results-3n-dnv"
filter: "all"
data-start-level: 4
-
-# VPP Performance Configuration 3n-tsh
+ order-chapters:
+ - "l2"
+ - "ip4"
+ - "ip6"
+ - "srv6"
+ - "ip4_tunnels"
+ - "ip6_tunnels"
+ - "vm_vhost"
+ - "container_memif"
+ - "crypto"
+ - "vts"
+ - "lb"
+ - "nfv_density"
+ - "hoststack"
+ order-sub-chapters:
+ - "2n-skx"
+ - "3n-skx"
+ - "2n-clx"
+ - "3n-hsw"
+ - "3n-tsh"
+ - "2n-dnv"
+ - "3n-dnv"
+
+# VPP Performance Configuration
- type: "file"
- title: "VPP Performance Configuration 3n-tsh"
- algorithm: "file_test_results"
- output-file: "{DIR[DTC,PERF,VPP,3N,TSH]}"
- dir-tables: "{DIR[DTC,PERF,VPP,3N,TSH]}"
- data: "vpp-perf-results-3n-tsh"
+ title: "VPP Performance Configuration"
+ algorithm: "file_details_split"
+ output-file: "{DIR[DTC,PERF,VPP,ALL]}"
+ dir-tables:
+# - "{DIR[DTC,PERF,VPP,2N,SKX]}"
+# - "{DIR[DTC,PERF,VPP,3N,SKX]}"
+ - "{DIR[DTC,PERF,VPP,2N,CLX]}"
+ - "{DIR[DTC,PERF,VPP,3N,HSW]}"
+ - "{DIR[DTC,PERF,VPP,3N,TSH]}"
+ - "{DIR[DTC,PERF,VPP,2N,DNV]}"
+ - "{DIR[DTC,PERF,VPP,3N,DNV]}"
+ data:
+# - "vpp-perf-results-2n-skx"
+# - "vpp-perf-results-3n-skx"
+ - "vpp-perf-results-2n-clx"
+ - "vpp-perf-results-3n-hsw"
+ - "vpp-perf-results-3n-tsh"
+ - "vpp-perf-results-2n-dnv"
+ - "vpp-perf-results-3n-dnv"
filter: "all"
data-start-level: 4
-
-# VPP Performance Configuration 3n-dnv
+ order-chapters:
+ - "l2"
+ - "ip4"
+ - "ip6"
+ - "srv6"
+ - "ip4_tunnels"
+ - "ip6_tunnels"
+ - "vm_vhost"
+ - "container_memif"
+ - "crypto"
+ - "vts"
+ - "lb"
+ - "nfv_density"
+ - "hoststack"
+ order-sub-chapters:
+ - "2n-skx"
+ - "3n-skx"
+ - "2n-clx"
+ - "3n-hsw"
+ - "3n-tsh"
+ - "2n-dnv"
+ - "3n-dnv"
+
+# VPP Performance Operational
- type: "file"
- title: "VPP Performance Configuration 3n-dnv"
- algorithm: "file_test_results"
- output-file: "{DIR[DTC,PERF,VPP,3N,DNV]}"
- dir-tables: "{DIR[DTC,PERF,VPP,3N,DNV]}"
- data: "vpp-perf-results-3n-dnv"
+ title: "VPP Performance Operational"
+ algorithm: "file_details_split_html"
+ output-file: "{DIR[DTO,PERF,VPP,ALL]}"
+ dir-tables:
+# - "{DIR[DTO,PERF,VPP,2N,SKX]}"
+# - "{DIR[DTO,PERF,VPP,3N,SKX]}"
+ - "{DIR[DTO,PERF,VPP,2N,CLX]}"
+ - "{DIR[DTO,PERF,VPP,3N,HSW]}"
+ - "{DIR[DTO,PERF,VPP,3N,TSH]}"
+ - "{DIR[DTO,PERF,VPP,2N,DNV]}"
+ - "{DIR[DTO,PERF,VPP,3N,DNV]}"
+ data:
+# - "vpp-perf-results-2n-skx"
+# - "vpp-perf-results-3n-skx"
+ - "vpp-perf-results-2n-clx"
+ - "vpp-perf-results-3n-hsw"
+ - "vpp-perf-results-3n-tsh"
+ - "vpp-perf-results-2n-dnv"
+ - "vpp-perf-results-3n-dnv"
filter: "all"
data-start-level: 4
-
-# VPP Performance Configuration 2n-dnv
+ order-chapters:
+ - "l2"
+ - "ip4"
+ - "ip6"
+ - "srv6"
+ - "ip4_tunnels"
+ - "ip6_tunnels"
+ - "vm_vhost"
+ - "container_memif"
+ - "crypto"
+ - "vts"
+ - "lb"
+ - "nfv_density"
+ - "hoststack"
+ order-sub-chapters:
+ - "2n-skx"
+ - "3n-skx"
+ - "2n-clx"
+ - "3n-hsw"
+ - "3n-tsh"
+ - "2n-dnv"
+ - "3n-dnv"
+
+# VPP MRR Results
- type: "file"
- title: "VPP Performance Configuration 2n-dnv"
- algorithm: "file_test_results"
- output-file: "{DIR[DTC,PERF,VPP,2N,DNV]}"
- dir-tables: "{DIR[DTC,PERF,VPP,2N,DNV]}"
- data: "vpp-perf-results-2n-dnv"
+ title: "VPP MRR Results"
+ algorithm: "file_details_split"
+ output-file: "{DIR[DTR,MRR,VPP,ALL]}"
+ dir-tables:
+# - "{DIR[DTR,MRR,VPP,2N,SKX]}"
+# - "{DIR[DTR,MRR,VPP,3N,SKX]}"
+ - "{DIR[DTR,MRR,VPP,2N,CLX]}"
+ - "{DIR[DTR,MRR,VPP,3N,HSW]}"
+ - "{DIR[DTR,MRR,VPP,3N,TSH]}"
+ - "{DIR[DTR,MRR,VPP,2N,DNV]}"
+ - "{DIR[DTR,MRR,VPP,3N,DNV]}"
+ data:
+# - "vpp-mrr-results-2n-skx"
+# - "vpp-mrr-results-3n-skx"
+ - "vpp-mrr-results-2n-clx"
+ - "vpp-mrr-results-3n-hsw"
+ - "vpp-mrr-results-3n-tsh"
+ - "vpp-mrr-results-2n-dnv"
+ - "vpp-mrr-results-3n-dnv"
filter: "all"
data-start-level: 4
-
-# VPP Performance Operational Data 3n-hsw
+ order-chapters:
+ - "l2"
+ - "ip4"
+ - "ip6"
+ - "srv6"
+ - "ip4_tunnels"
+ - "ip6_tunnels"
+ - "vm_vhost"
+ - "container_memif"
+ - "crypto"
+ - "vts"
+ - "lb"
+ - "nfv_density"
+ - "hoststack"
+ order-sub-chapters:
+ - "2n-skx"
+ - "3n-skx"
+ - "2n-clx"
+ - "3n-hsw"
+ - "3n-tsh"
+ - "2n-dnv"
+ - "3n-dnv"
+
+# VPP MRR Configuration
- type: "file"
- title: "VPP Performance Operational Data 3n-hsw"
- algorithm: "file_test_results_html"
- output-file: "{DIR[DTO,PERF,VPP,3N,HSW]}"
- dir-tables: "{DIR[DTO,PERF,VPP,3N,HSW]}"
- data: "vpp-perf-results-3n-hsw"
- filter: "all"
- data-start-level: 4
-
-## VPP Performance Operational Data 3n-skx
-#- type: "file"
-# title: "VPP Performance Operational Data 3n-skx"
-# algorithm: "file_test_results_html"
-# output-file: "{DIR[DTO,PERF,VPP,3N,SKX]}"
-# dir-tables: "{DIR[DTO,PERF,VPP,3N,SKX]}"
-# data: "vpp-perf-results-3n-skx"
-# filter: "all"
-# data-start-level: 4
-#
-## VPP Performance Operational Data 2n-skx
-#- type: "file"
-# title: "VPP Performance Operational Data 2n-skx"
-# algorithm: "file_test_results_html"
-# output-file: "{DIR[DTO,PERF,VPP,2N,SKX]}"
-# dir-tables: "{DIR[DTO,PERF,VPP,2N,SKX]}"
-# data: "vpp-perf-results-2n-skx"
-# filter: "all"
-# data-start-level: 4
-
-# VPP Performance Operational Data 2n-clx
-- type: "file"
- title: "VPP Performance Operational Data 2n-clx"
- algorithm: "file_test_results_html"
- output-file: "{DIR[DTO,PERF,VPP,2N,CLX]}"
- dir-tables: "{DIR[DTO,PERF,VPP,2N,CLX]}"
- data: "vpp-perf-results-2n-clx"
- filter: "all"
- data-start-level: 4
-
-# VPP Performance Operational Data 3n-tsh
-- type: "file"
- title: "VPP Performance Operational Data 3n-tsh"
- algorithm: "file_test_results_html"
- output-file: "{DIR[DTO,PERF,VPP,3N,TSH]}"
- dir-tables: "{DIR[DTO,PERF,VPP,3N,TSH]}"
- data: "vpp-perf-results-3n-tsh"
- filter: "all"
- data-start-level: 4
-
-# VPP Performance Operational Data 3n-dnv
-- type: "file"
- title: "VPP Performance Operational Data 3n-dnv"
- algorithm: "file_test_results_html"
- output-file: "{DIR[DTO,PERF,VPP,3N,DNV]}"
- dir-tables: "{DIR[DTO,PERF,VPP,3N,DNV]}"
- data: "vpp-perf-results-3n-dnv"
- filter: "all"
- data-start-level: 4
-
-# VPP Performance Operational Data 2n-dnv
-- type: "file"
- title: "VPP Performance Operational Data 2n-dnv"
- algorithm: "file_test_results_html"
- output-file: "{DIR[DTO,PERF,VPP,2N,DNV]}"
- dir-tables: "{DIR[DTO,PERF,VPP,2N,DNV]}"
- data: "vpp-perf-results-2n-dnv"
- filter: "all"
- data-start-level: 4
-
-# VPP MRR Results 3n-hsw
-- type: "file"
- title: "VPP MRR Results 3n-hsw"
- algorithm: "file_test_results"
- output-file: "{DIR[DTR,MRR,VPP,3N,HSW]}"
- dir-tables: "{DIR[DTR,MRR,VPP,3N,HSW]}"
- data: "vpp-mrr-results-3n-hsw"
- filter: "all"
- data-start-level: 4
-
-## VPP MRR Results 3n-skx
-#- type: "file"
-# title: "VPP MRR Results 3n-skx"
-# algorithm: "file_test_results"
-# output-file: "{DIR[DTR,MRR,VPP,3N,SKX]}"
-# dir-tables: "{DIR[DTR,MRR,VPP,3N,SKX]}"
-# data: "vpp-mrr-results-3n-skx"
-# filter: "all"
-# data-start-level: 4
-#
-## VPP MRR Results 2n-skx
-#- type: "file"
-# title: "VPP MRR Results 2n-skx"
-# algorithm: "file_test_results"
-# output-file: "{DIR[DTR,MRR,VPP,2N,SKX]}"
-# dir-tables: "{DIR[DTR,MRR,VPP,2N,SKX]}"
-# data: "vpp-mrr-results-2n-skx"
-# filter: "all"
-# data-start-level: 4
-
-# VPP MRR Results 2n-clx
-- type: "file"
- title: "VPP MRR Results 2n-clx"
- algorithm: "file_test_results"
- output-file: "{DIR[DTR,MRR,VPP,2N,CLX]}"
- dir-tables: "{DIR[DTR,MRR,VPP,2N,CLX]}"
- data: "vpp-mrr-results-2n-clx"
- filter: "all"
- data-start-level: 4
-
-# VPP MRR Results 3n-tsh
-- type: "file"
- title: "VPP MRR Results 3n-tsh"
- algorithm: "file_test_results"
- output-file: "{DIR[DTR,MRR,VPP,3N,TSH]}"
- dir-tables: "{DIR[DTR,MRR,VPP,3N,TSH]}"
- data: "vpp-mrr-results-3n-tsh"
- filter: "all"
- data-start-level: 4
-
-# VPP MRR Results 3n-dnv
-- type: "file"
- title: "VPP MRR Results 3n-dnv"
- algorithm: "file_test_results"
- output-file: "{DIR[DTR,MRR,VPP,3N,DNV]}"
- dir-tables: "{DIR[DTR,MRR,VPP,3N,DNV]}"
- data: "vpp-mrr-results-3n-dnv"
- filter: "all"
- data-start-level: 4
-
-# VPP MRR Results 2n-dnv
-- type: "file"
- title: "VPP MRR Results 2n-dnv"
- algorithm: "file_test_results"
- output-file: "{DIR[DTR,MRR,VPP,2N,DNV]}"
- dir-tables: "{DIR[DTR,MRR,VPP,2N,DNV]}"
- data: "vpp-mrr-results-2n-dnv"
- filter: "all"
- data-start-level: 4
-
-# VPP MRR Configuration 3n-hsw
-- type: "file"
- title: "VPP MRR Configuration 3n-hsw"
- algorithm: "file_test_results"
- output-file: "{DIR[DTC,MRR,VPP,3N,HSW]}"
- dir-tables: "{DIR[DTC,MRR,VPP,3N,HSW]}"
- data: "vpp-mrr-results-3n-hsw"
- filter: "all"
- data-start-level: 4
-
-## VPP MRR Configuration 3n-skx
-#- type: "file"
-# title: "VPP MRR Configuration 3n-skx"
-# algorithm: "file_test_results"
-# output-file: "{DIR[DTC,MRR,VPP,3N,SKX]}"
-# dir-tables: "{DIR[DTC,MRR,VPP,3N,SKX]}"
-# data: "vpp-mrr-results-3n-skx"
-# filter: "all"
-# data-start-level: 4
-#
-## VPP MRR Configuration 2n-skx
-#- type: "file"
-# title: "VPP MRR Configuration 2n-skx"
-# algorithm: "file_test_results"
-# output-file: "{DIR[DTC,MRR,VPP,2N,SKX]}"
-# dir-tables: "{DIR[DTC,MRR,VPP,2N,SKX]}"
-# data: "vpp-mrr-results-2n-skx"
-# filter: "all"
-# data-start-level: 4
-
-# VPP MRR Configuration 2n-clx
-- type: "file"
- title: "VPP MRR Configuration 2n-clx"
- algorithm: "file_test_results"
- output-file: "{DIR[DTC,MRR,VPP,2N,CLX]}"
- dir-tables: "{DIR[DTC,MRR,VPP,2N,CLX]}"
- data: "vpp-mrr-results-2n-clx"
- filter: "all"
- data-start-level: 4
-
-# VPP MRR Configuration 3n-tsh
-- type: "file"
- title: "VPP MRR Configuration 3n-tsh"
- algorithm: "file_test_results"
- output-file: "{DIR[DTC,MRR,VPP,3N,TSH]}"
- dir-tables: "{DIR[DTC,MRR,VPP,3N,TSH]}"
- data: "vpp-mrr-results-3n-tsh"
- filter: "all"
- data-start-level: 4
-
-# VPP MRR Configuration 3n-dnv
-- type: "file"
- title: "VPP MRR Configuration 3n-dnv"
- algorithm: "file_test_results"
- output-file: "{DIR[DTC,MRR,VPP,3N,DNV]}"
- dir-tables: "{DIR[DTC,MRR,VPP,3N,DNV]}"
- data: "vpp-mrr-results-3n-dnv"
- filter: "all"
- data-start-level: 4
-
-# VPP MRR Configuration 2n-dnv
-- type: "file"
- title: "VPP MRR Configuration 2n-dnv"
- algorithm: "file_test_results"
- output-file: "{DIR[DTC,MRR,VPP,2N,DNV]}"
- dir-tables: "{DIR[DTC,MRR,VPP,2N,DNV]}"
- data: "vpp-mrr-results-2n-dnv"
+ title: "VPP MRR Configuration"
+ algorithm: "file_details_split"
+ output-file: "{DIR[DTC,MRR,VPP,ALL]}"
+ dir-tables:
+# - "{DIR[DTC,MRR,VPP,2N,SKX]}"
+# - "{DIR[DTC,MRR,VPP,3N,SKX]}"
+ - "{DIR[DTC,MRR,VPP,2N,CLX]}"
+ - "{DIR[DTC,MRR,VPP,3N,HSW]}"
+ - "{DIR[DTC,MRR,VPP,3N,TSH]}"
+ - "{DIR[DTC,MRR,VPP,2N,DNV]}"
+ - "{DIR[DTC,MRR,VPP,3N,DNV]}"
+ data:
+# - "vpp-mrr-results-2n-skx"
+# - "vpp-mrr-results-3n-skx"
+ - "vpp-mrr-results-2n-clx"
+ - "vpp-mrr-results-3n-hsw"
+ - "vpp-mrr-results-3n-tsh"
+ - "vpp-mrr-results-2n-dnv"
+ - "vpp-mrr-results-3n-dnv"
filter: "all"
data-start-level: 4
+ order-chapters:
+ - "l2"
+ - "ip4"
+ - "ip6"
+ - "srv6"
+ - "ip4_tunnels"
+ - "ip6_tunnels"
+ - "vm_vhost"
+ - "container_memif"
+ - "crypto"
+ - "vts"
+ - "lb"
+ - "nfv_density"
+ - "hoststack"
+ order-sub-chapters:
+ - "2n-skx"
+ - "3n-skx"
+ - "2n-clx"
+ - "3n-hsw"
+ - "3n-tsh"
+ - "2n-dnv"
+ - "3n-dnv"
# VPP Device Results - Ubuntu
- type: "file"
diff --git a/resources/tools/presentation/specification_parser.py b/resources/tools/presentation/specification_parser.py
index 2360b78f2d..d2939bb4c1 100644
--- a/resources/tools/presentation/specification_parser.py
+++ b/resources/tools/presentation/specification_parser.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2019 Cisco and/or its affiliates.
+# Copyright (c) 2020 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -188,7 +188,7 @@ class Specification:
generated.
:returns: List of specifications of Continuous Performance Trending and
- Analysis to be generated.
+ Analysis to be generated.
:rtype: list
"""
return self._specification[u"cpta"]
@@ -196,10 +196,13 @@ class Specification:
def set_input_state(self, job, build_nr, state):
"""Set the state of input
- :param job:
- :param build_nr:
- :param state:
- :return:
+ :param job: Job name.
+ :param build_nr: Build number.
+ :param state: The new input state.
+ :type job: str
+ :type build_nr: int
+ :type state: str
+ :raises: PresentationError if wrong job and/or build is provided.
"""
try:
@@ -221,10 +224,13 @@ class Specification:
def set_input_file_name(self, job, build_nr, file_name):
"""Set the state of input
- :param job:
- :param build_nr:
- :param file_name:
- :return:
+ :param job: Job name.
+ :param build_nr: Build number.
+ :param file_name: The new file name.
+ :type job: str
+ :type build_nr: int
+ :type file_name: str
+ :raises: PresentationError if wrong job and/or build is provided.
"""
try:
@@ -254,7 +260,7 @@ class Specification:
- lastCompletedBuild
:type job" str
:raises PresentationError: If it is not possible to get the build
- number.
+ number.
:returns: The build number.
:rtype: int
"""
@@ -287,7 +293,7 @@ class Specification:
specification YAML file.
:param item_type: Item type: Top level items in specification YAML file,
- e.g.: environment, input, output.
+ e.g.: environment, input, output.
:type item_type: str
:returns: Index of the given item type.
:rtype: int
@@ -321,14 +327,14 @@ class Specification:
:param data: The data where the tags will be replaced by their values.
:param src_data: Data where the tags are defined. It is dictionary where
- the key is the tag and the value is the tag value. If not given, 'data'
- is used instead.
- :type data: str or dict
+ the key is the tag and the value is the tag value. If not given,
+ 'data' is used instead.
+ :type data: str, list or dict
:type src_data: dict
:returns: Data with the tags replaced.
- :rtype: str or dict
+ :rtype: str, list or dict
:raises: PresentationError if it is not possible to replace the tag or
- the data is not the supported data type (str, dict).
+ the data is not the supported data type (str, list or dict).
"""
if src_data is None:
@@ -338,8 +344,15 @@ class Specification:
tag = self._find_tag(data)
if tag is not None:
data = data.replace(tag, src_data[tag[1:-1]])
+ return data
+
+ if isinstance(data, list):
+ new_list = list()
+ for item in data:
+ new_list.append(self._replace_tags(item, src_data))
+ return new_list
- elif isinstance(data, dict):
+ if isinstance(data, dict):
counter = 0
for key, value in data.items():
tag = self._find_tag(value)
@@ -353,10 +366,9 @@ class Specification:
)
if counter:
self._replace_tags(data, src_data)
- else:
- raise PresentationError(u"Replace tags: Not supported data type.")
+ return data
- return data
+ raise PresentationError(u"Replace tags: Not supported data type.")
def _parse_env(self):
"""Parse environment specification in the specification YAML file.
@@ -774,6 +786,19 @@ class Specification:
f"Data set {data_set} is not defined in the "
f"configuration section."
)
+ elif isinstance(element.get(u"data", None), list):
+ new_list = list()
+ for item in element[u"data"]:
+ try:
+ new_list.append(
+ self.configuration[u"data-sets"][item]
+ )
+ except KeyError:
+ raise PresentationError(
+ f"Data set {item} is not defined in the "
+ f"configuration section."
+ )
+ element[u"data"] = new_list
# Parse elements:
if element[u"type"] == u"table":
@@ -809,14 +834,14 @@ class Specification:
"""Parse specification in the specification YAML file.
:raises: PresentationError if an error occurred while parsing the
- specification file.
+ specification file.
"""
try:
self._cfg_yaml = load(self._cfg_file, Loader=FullLoader)
except YAMLError as err:
raise PresentationError(msg=u"An error occurred while parsing the "
u"specification file.",
- details=str(err))
+ details=repr(err))
self._parse_env()
self._parse_configuration()