aboutsummaryrefslogtreecommitdiffstats
path: root/resources
diff options
context:
space:
mode:
authorTibor Frank <tifrank@cisco.com>2019-11-20 11:43:44 +0100
committerTibor Frank <tifrank@cisco.com>2019-12-03 14:22:39 +0000
commitcbfa26dc0f5334bcd367c161b4eaad342355bbde (patch)
tree0e2c9cec7e956f914dcb8a1b1865ff4e3d7a47fd /resources
parent375aeaab2c14e45ebe45c35947381dc248b32097 (diff)
Python3: PAL
- files renamed: - utils.py --> pal_utils.py - errors.py --> pal_errors.py - functions/methods renamed: - plot_service_density_reconf_box_name --> plot_nf_reconf_box_name - plot_performance_box_name --> plot_perf_box_name - plot_latency_error_bars_name --> plot_lat_err_bars_name - plot_throughput_speedup_analysis_name --> plot_tsa_name - plot_service_density_heatmap --> plot_nf_heatmap - table_performance_comparison --> table_perf_comparison - table_performance_comparison_nic --> table_perf_comparison_nic - table_performance_trending_dashboard_html --> table_perf_trending_dash_html - functions/methods removed: - plot_service_density_heatmap_compare - plot_throughput_speedup_analysis - plot_latency_error_bars - plot_soak_boxes - plot_soak_bars Change-Id: Icddc01d3ccb451abb92b9e5d912b642d01866033 Signed-off-by: Tibor Frank <tifrank@cisco.com>
Diffstat (limited to 'resources')
-rw-r--r--resources/tools/presentation/__init__.py16
-rw-r--r--resources/tools/presentation/conf.py205
-rw-r--r--resources/tools/presentation/conf_cpta/conf.py28
-rw-r--r--resources/tools/presentation/doc/pal_lld.rst8
-rw-r--r--resources/tools/presentation/environment.py61
-rw-r--r--resources/tools/presentation/generator_alerts.py393
-rw-r--r--resources/tools/presentation/generator_cpta.py (renamed from resources/tools/presentation/generator_CPTA.py)465
-rw-r--r--resources/tools/presentation/generator_files.py121
-rw-r--r--resources/tools/presentation/generator_plots.py2583
-rw-r--r--resources/tools/presentation/generator_report.py151
-rw-r--r--resources/tools/presentation/generator_tables.py1782
-rw-r--r--resources/tools/presentation/input_data_files.py249
-rw-r--r--resources/tools/presentation/input_data_parser.py771
-rw-r--r--resources/tools/presentation/pal.py151
-rw-r--r--resources/tools/presentation/pal_errors.py (renamed from resources/tools/presentation/errors.py)27
-rw-r--r--resources/tools/presentation/pal_utils.py (renamed from resources/tools/presentation/utils.py)138
-rw-r--r--resources/tools/presentation/requirements.txt12
-rwxr-xr-xresources/tools/presentation/run_cpta.sh2
-rwxr-xr-xresources/tools/presentation/run_report.sh2
-rw-r--r--resources/tools/presentation/specification.yaml4454
-rw-r--r--resources/tools/presentation/specification_CPTA.yaml217
-rw-r--r--resources/tools/presentation/specification_parser.py663
-rw-r--r--resources/tools/presentation/static_content.py26
23 files changed, 4994 insertions, 7531 deletions
diff --git a/resources/tools/presentation/__init__.py b/resources/tools/presentation/__init__.py
index a3b7344358..67fa7230d6 100644
--- a/resources/tools/presentation/__init__.py
+++ b/resources/tools/presentation/__init__.py
@@ -16,3 +16,19 @@ __init__ file for directory presentation
This makes the presentation a part of the great CSIT resources package.
"""
+
+from .pal_errors import PresentationError
+from .pal_utils import archive_input_data, classify_anomalies, \
+ convert_csv_to_pretty_txt, execute_command, get_files, \
+ get_last_completed_build_number, get_last_successful_build_nr, \
+ get_rst_title_char, mean, relative_change, relative_change_stdev, stdev
+from .environment import Environment
+from .specification_parser import Specification
+from .input_data_parser import InputData
+from .generator_tables import generate_tables
+from .generator_plots import generate_plots
+from .generator_files import generate_files
+from .static_content import prepare_static_content
+from .generator_report import generate_report
+from .generator_cpta import generate_cpta
+from .generator_alerts import Alerting
diff --git a/resources/tools/presentation/conf.py b/resources/tools/presentation/conf.py
index ae781bdac8..66dc46952c 100644
--- a/resources/tools/presentation/conf.py
+++ b/resources/tools/presentation/conf.py
@@ -1,25 +1,26 @@
# -*- coding: utf-8 -*-
-#
-# CSIT report documentation build configuration file
-#
-# This file is execfile()d with the current directory set to its
-# containing dir.
-#
-# Note that not all possible configuration values are present in this
-# autogenerated file.
-#
-# All configuration values have a default; values that are commented out
-# serve to show the default.
-# If extensions (or modules to document with autodoc) are in another directory,
-# add these directories to sys.path here. If the directory is relative to the
-# documentation root, use os.path.abspath to make it absolute, like shown here.
+"""CSIT report documentation build configuration file
+
+This file is execfile()d with the current directory set to its
+containing dir.
+
+Note that not all possible configuration values are present in this
+autogenerated file.
+
+All configuration values have a default; values that are commented out
+serve to show the default.
+
+If extensions (or modules to document with autodoc) are in another directory,
+add these directories to sys.path here. If the directory is relative to the
+documentation root, use os.path.abspath to make it absolute, like shown here.
+"""
import os
import sys
-sys.path.insert(0, os.path.abspath('.'))
+sys.path.insert(0, os.path.abspath(u'.'))
# -- General configuration ------------------------------------------------
@@ -30,22 +31,22 @@ sys.path.insert(0, os.path.abspath('.'))
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
-extensions = ['sphinxcontrib.programoutput',
- 'sphinx.ext.ifconfig']
+extensions = [u'sphinxcontrib.programoutput',
+ u'sphinx.ext.ifconfig']
# Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
+templates_path = [u'_templates']
# The suffix(es) of source file names.
# You can specify multiple suffix as a list of string:
#
-source_suffix = ['.rst', '.md']
+source_suffix = [u'.rst', u'.md']
# The master toctree document.
-master_doc = 'index'
+master_doc = u'index'
# General information about the project.
-report_week = '38'
+report_week = u'38'
project = u'FD.io CSIT-1908.{week}'.format(week=report_week)
copyright = u'2019, FD.io'
author = u'FD.io CSIT'
@@ -59,7 +60,7 @@ author = u'FD.io CSIT'
# The full version, including alpha/beta/rc tags.
# release = u''
-rst_epilog = """
+rst_epilog = u"""
.. |release-1| replace:: {prev_release}
.. |srelease| replace:: {srelease}
.. |csit-release| replace:: CSIT-{csitrelease}
@@ -108,35 +109,35 @@ rst_epilog = """
.. _CSIT Testbed Setup: https://git.fd.io/csit/tree/resources/tools/testbed-setup/README.md?h={release}
.. _K8s configuration files: https://github.com/FDio/csit/tree/{release}/resources/templates/kubernetes
.. _VPP startup.conf: https://git.fd.io/vpp/tree/src/vpp/conf/startup.conf?h=stable/{srelease}&id={vpp_release_commit_id}
-""".format(release='rls1908',
+""".format(release=u'rls1908',
report_week=report_week,
- prev_release='rls1904',
- srelease='1908',
- csitrelease='1908',
- csit_prev_release='1904',
- vpprelease='19.08',
- vpp_prev_release='19.04',
- dpdkrelease='19.05',
- sdpdkrelease='1905',
- trex_version='v2.54',
- csit_ubuntu_ver='csit-ubuntu-16.04.1_2019-03-27_2.4',
- csit_centos_ver='csit-centos-7.6-1810_2019-05-20_2.5',
- vpp_release_commit_id='1c586de48cc76fc6eac50f5d87003e2a80aa43e7')
+ prev_release=u'rls1904',
+ srelease=u'1908',
+ csitrelease=u'1908',
+ csit_prev_release=u'1904',
+ vpprelease=u'19.08',
+ vpp_prev_release=u'19.04',
+ dpdkrelease=u'19.05',
+ sdpdkrelease=u'1905',
+ trex_version=u'v2.54',
+ csit_ubuntu_ver=u'csit-ubuntu-16.04.1_2019-03-27_2.4',
+ csit_centos_ver=u'csit-centos-7.6-1810_2019-05-20_2.5',
+ vpp_release_commit_id=u'1c586de48cc76fc6eac50f5d87003e2a80aa43e7')
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
-language = 'en'
+language = u'en'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
-exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
+exclude_patterns = [u'_build', u'Thumbs.db', u'.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
+pygments_style = u'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
@@ -146,40 +147,41 @@ todo_include_todos = False
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
-html_theme = 'sphinx_rtd_theme'
+html_theme = u'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
- 'canonical_url': '',
- 'analytics_id': '',
- 'logo_only': False,
- 'display_version': True,
- 'prev_next_buttons_location': 'bottom',
- 'style_external_links': False,
+ u'canonical_url': u'',
+ u'analytics_id': u'',
+ u'logo_only': False,
+ u'display_version': True,
+ u'prev_next_buttons_location': u'bottom',
+ u'style_external_links': False,
# Toc options
- 'collapse_navigation': True,
- 'sticky_navigation': True,
- 'navigation_depth': 3,
- 'includehidden': True,
- 'titles_only': False
+ u'collapse_navigation': True,
+ u'sticky_navigation': True,
+ u'navigation_depth': 3,
+ u'includehidden': True,
+ u'titles_only': False
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
-html_theme_path = ['env/lib/python2.7/site-packages/sphinx_rtd_theme']
+html_theme_path = [u'env/lib/python2.7/site-packages/sphinx_rtd_theme']
# html_static_path = ['_build/_static']
-html_static_path = ['_tmp/src/_static']
+html_static_path = [u'_tmp/src/_static']
html_context = {
- 'css_files': [
- '_static/theme_overrides.css', # overrides for wide tables in RTD theme
- ],
- }
+ u'css_files': [
+# overrides for wide tables in RTD theme
+ u'_static/theme_overrides.css'
+ ]
+}
# If false, no module index is generated.
html_domain_indices = True
@@ -192,59 +194,58 @@ html_split_index = False
# -- Options for LaTeX output ---------------------------------------------
-latex_engine = 'pdflatex'
+latex_engine = u'pdflatex'
latex_elements = {
- # The paper size ('letterpaper' or 'a4paper').
- #
- 'papersize': 'a4paper',
-
- # The font size ('10pt', '11pt' or '12pt').
- #
- #'pointsize': '10pt',
-
- # Additional stuff for the LaTeX preamble.
- #
- 'preamble': r'''
- \usepackage{pdfpages}
- \usepackage{svg}
- \usepackage{charter}
- \usepackage[defaultsans]{lato}
- \usepackage{inconsolata}
- \usepackage{csvsimple}
- \usepackage{longtable}
- \usepackage{booktabs}
- ''',
-
- # Latex figure (float) alignment
- #
- 'figure_align': 'H',
-
- # Latex font setup
- #
- 'fontpkg': r'''
- \renewcommand{\familydefault}{\sfdefault}
- ''',
-
- # Latex other setup
- #
- 'extraclassoptions': 'openany',
- 'sphinxsetup': r'''
- TitleColor={RGB}{225,38,40},
- InnerLinkColor={RGB}{62,62,63},
- OuterLinkColor={RGB}{225,38,40},
- shadowsep=0pt,
- shadowsize=0pt,
- shadowrule=0pt
- '''
+ # The paper size ('letterpaper' or 'a4paper').
+ #
+ u'papersize': u'a4paper',
+
+ # The font size ('10pt', '11pt' or '12pt').
+ #
+ #'pointsize': '10pt',
+
+ # Additional stuff for the LaTeX preamble.
+ #
+ u'preamble': r'''
+ \usepackage{pdfpages}
+ \usepackage{svg}
+ \usepackage{charter}
+ \usepackage[defaultsans]{lato}
+ \usepackage{inconsolata}
+ \usepackage{csvsimple}
+ \usepackage{longtable}
+ \usepackage{booktabs}
+ ''',
+
+ # Latex figure (float) alignment
+ #
+ u'figure_align': u'H',
+
+ # Latex font setup
+ #
+ u'fontpkg': r'''
+ \renewcommand{\familydefault}{\sfdefault}
+ ''',
+
+ # Latex other setup
+ #
+ u'extraclassoptions': u'openany',
+ u'sphinxsetup': r'''
+ TitleColor={RGB}{225,38,40},
+ InnerLinkColor={RGB}{62,62,63},
+ OuterLinkColor={RGB}{225,38,40},
+ shadowsep=0pt,
+ shadowsize=0pt,
+ shadowrule=0pt
+ '''
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
- (master_doc, 'csit.tex', u'CSIT REPORT',
- u'', 'manual'),
+ (master_doc, u'csit.tex', u'CSIT REPORT', u'', u'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
@@ -263,7 +264,7 @@ latex_show_pagerefs = True
# If true, show URL addresses after external links.
#
-latex_show_urls = 'footnote'
+latex_show_urls = u'footnote'
# Documents to append as an appendix to all manuals.
#
diff --git a/resources/tools/presentation/conf_cpta/conf.py b/resources/tools/presentation/conf_cpta/conf.py
index 4eb51d884e..c20a5e2996 100644
--- a/resources/tools/presentation/conf_cpta/conf.py
+++ b/resources/tools/presentation/conf_cpta/conf.py
@@ -18,7 +18,7 @@
import os
import sys
-sys.path.insert(0, os.path.abspath('.'))
+sys.path.insert(0, os.path.abspath(u'.'))
# -- General configuration ------------------------------------------------
@@ -29,19 +29,19 @@ sys.path.insert(0, os.path.abspath('.'))
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
-extensions = ['sphinxcontrib.programoutput',
- 'sphinx.ext.ifconfig']
+extensions = [u'sphinxcontrib.programoutput',
+ u'sphinx.ext.ifconfig']
# Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
+templates_path = [u'_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
-source_suffix = ['.rst', '.md']
+source_suffix = [u'.rst', u'.md']
# The master toctree document.
-master_doc = 'index'
+master_doc = u'index'
# General information about the project.
project = u'FD.io CSIT'
@@ -62,15 +62,15 @@ author = u'FD.io CSIT'
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
-language = 'en'
+language = u'en'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
-exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
+exclude_patterns = [u'_build', u'Thumbs.db', u'.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
+pygments_style = u'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
@@ -80,7 +80,7 @@ todo_include_todos = False
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
-html_theme = 'sphinx_rtd_theme'
+html_theme = u'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
@@ -91,14 +91,14 @@ html_theme = 'sphinx_rtd_theme'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
-html_theme_path = ['env/lib/python2.7/site-packages/sphinx_rtd_theme']
+html_theme_path = [u'env/lib/python2.7/site-packages/sphinx_rtd_theme']
# html_static_path = ['_build/_static']
-html_static_path = ['../_tmp/src/_static']
+html_static_path = [u'../_tmp/src/_static']
html_context = {
- 'css_files': [
- '_static/theme_overrides.css', # overrides for wide tables in RTD theme
+ u'css_files': [
+ u'_static/theme_overrides.css', # overrides for wide tables in RTD theme
],
}
diff --git a/resources/tools/presentation/doc/pal_lld.rst b/resources/tools/presentation/doc/pal_lld.rst
index d3a9782681..ec4c97b454 100644
--- a/resources/tools/presentation/doc/pal_lld.rst
+++ b/resources/tools/presentation/doc/pal_lld.rst
@@ -841,7 +841,7 @@ VPP HTTP server performance in a box chart with pre-defined data
-
type: "plot"
title: "VPP HTTP Server Performance"
- algorithm: "plot_http_server_performance_box"
+ algorithm: "plot_http_server_perf_box"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/http-server-performance-cps"
data:
@@ -1263,7 +1263,7 @@ The model specifies:
-
type: "table"
title: "Performance comparison"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file-ext: ".csv"
output-file: "{DIR[DTR,PERF,VPP,IMPRV]}/vpp_performance_comparison"
reference:
@@ -1480,7 +1480,7 @@ At first, the .csv tables are generated (only the table for 1t1c is shown):
-
type: "table"
title: "Performance trending dashboard"
- algorithm: "table_performance_trending_dashboard"
+ algorithm: "table_perf_trending_dash"
output-file-ext: ".csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-1t1c"
data: "plot-performance-trending-all"
@@ -1503,7 +1503,7 @@ Then, html tables stored inside .rst files are generated:
-
type: "table"
title: "HTML performance trending dashboard 1t1c"
- algorithm: "table_performance_trending_dashboard_html"
+ algorithm: "table_perf_trending_dash_html"
input-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-1t1c.csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-1t1c.rst"
diff --git a/resources/tools/presentation/environment.py b/resources/tools/presentation/environment.py
index 7cddb0279c..ea4d94ad70 100644
--- a/resources/tools/presentation/environment.py
+++ b/resources/tools/presentation/environment.py
@@ -21,7 +21,7 @@ import os
import shutil
import logging
-from errors import PresentationError
+from pal_errors import PresentationError
class Environment:
@@ -60,34 +60,37 @@ class Environment:
"""
if self._force:
- logging.info("Removing old build(s) ...")
- for directory in self._env["build-dirs"]:
- dir_to_remove = self._env["paths"][directory]
+ logging.info(u"Removing old build(s) ...")
+ for directory in self._env[u"build-dirs"]:
+ dir_to_remove = self._env[u"paths"][directory]
if os.path.isdir(dir_to_remove):
try:
shutil.rmtree(dir_to_remove)
- logging.info(" Removed: {}".format(dir_to_remove))
+ logging.info(f" Removed: {dir_to_remove}")
except OSError:
- raise PresentationError("Cannot remove the directory "
- "'{}'".format(dir_to_remove))
- logging.info("Done.")
+ raise PresentationError(
+ f"Cannot remove the directory {dir_to_remove}"
+ )
+ logging.info(u"Done.")
- logging.info("Making directories ...")
+ logging.info(u"Making directories ...")
- for directory in self._env["make-dirs"]:
- dir_to_make = self._env["paths"][directory]
+ for directory in self._env[u"make-dirs"]:
+ dir_to_make = self._env[u"paths"][directory]
try:
if os.path.isdir(dir_to_make):
- logging.warning("The directory '{}' exists, skipping.".
- format(dir_to_make))
+ logging.warning(
+ f"The directory {dir_to_make} exists, skipping."
+ )
else:
os.makedirs(dir_to_make)
- logging.info(" Created: {}".format(dir_to_make))
+ logging.info(f" Created: {dir_to_make}")
except OSError:
- raise PresentationError("Cannot make the directory '{}'".
- format(dir_to_make))
+ raise PresentationError(
+ f"Cannot make the directory {dir_to_make}"
+ )
- logging.info("Done.")
+ logging.info(u"Done.")
def set_environment(self):
"""Set the environment.
@@ -101,28 +104,26 @@ def clean_environment(env):
:param env: Environment specification.
:type env: dict
- :raises: PresentationError if it is not possible to remove a directory.
"""
- logging.info("Cleaning the environment ...")
+ logging.info(u"Cleaning the environment ...")
- if not env["remove-dirs"]: # None or empty
- logging.info(" No directories to remove.")
+ if not env[u"remove-dirs"]: # None or empty
+ logging.info(u" No directories to remove.")
return
- for directory in env["remove-dirs"]:
- dir_to_remove = env["paths"][directory]
- logging.info(" Removing the working directory {} ...".
- format(dir_to_remove))
+ for directory in env[u"remove-dirs"]:
+ dir_to_remove = env[u"paths"][directory]
+ logging.info(f" Removing the working directory {dir_to_remove} ...")
if os.path.isdir(dir_to_remove):
try:
shutil.rmtree(dir_to_remove)
except OSError as err:
- logging.warning("Cannot remove the directory '{}'".
- format(dir_to_remove))
+ logging.warning(
+ f"Cannot remove the directory {dir_to_remove}"
+ )
logging.debug(str(err))
else:
- logging.warning("The directory '{}' does not exist.".
- format(dir_to_remove))
+ logging.warning(f"The directory {dir_to_remove} does not exist.")
- logging.info("Done.")
+ logging.info(u"Done.")
diff --git a/resources/tools/presentation/generator_alerts.py b/resources/tools/presentation/generator_alerts.py
index 3a9b5ddfb6..10c6734aad 100644
--- a/resources/tools/presentation/generator_alerts.py
+++ b/resources/tools/presentation/generator_alerts.py
@@ -11,6 +11,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+"""Generator of alerts:
+- failed tests
+- regressions
+- progressions
+"""
+
+
import smtplib
import logging
@@ -19,8 +26,8 @@ from email.mime.multipart import MIMEMultipart
from os.path import isdir
from collections import OrderedDict
-from utils import get_last_completed_build_number
-from errors import PresentationError
+from pal_utils import get_last_completed_build_number
+from pal_errors import PresentationError
class AlertingError(PresentationError):
@@ -34,7 +41,7 @@ class AlertingError(PresentationError):
- relevant data if there are any collected (optional parameter details).
"""
- def __init__(self, msg, details='', level="CRITICAL"):
+ def __init__(self, msg, details=u'', level=u"CRITICAL"):
"""Sets the exception message and the level.
:param msg: Short description of the encountered problem.
@@ -48,13 +55,13 @@ class AlertingError(PresentationError):
:type level: str
"""
- super(AlertingError, self).__init__(
- "Alerting: {0}".format(msg), details, level)
+ super(AlertingError, self).__init__(f"Alerting: {msg}", details, level)
def __repr__(self):
return (
- "AlertingError(msg={msg!r},details={dets!r},level={level!r})".
- format(msg=self._msg, dets=self._details, level=self._level))
+ f"AlertingError(msg={self._msg!r},details={self._details!r},"
+ f"level={self._level!r})"
+ )
class Alerting:
@@ -69,57 +76,58 @@ class Alerting:
"""
# Implemented alerts:
- self._ALERTS = ("failed-tests", )
+ self._implemented_alerts = (u"failed-tests", )
self._spec = spec
try:
self._spec_alert = spec.alerting
except KeyError as err:
- raise AlertingError("Alerting is not configured, skipped.",
- repr(err),
- "WARNING")
+ raise AlertingError(u"Alerting is not configured, skipped.",
+ repr(err),
+ u"WARNING")
- self._path_failed_tests = spec.environment["paths"]["DIR[STATIC,VPP]"]
+ self._path_failed_tests = spec.environment[u"paths"][u"DIR[STATIC,VPP]"]
# Verify and validate input specification:
- self.configs = self._spec_alert.get("configurations", None)
+ self.configs = self._spec_alert.get(u"configurations", None)
if not self.configs:
- raise AlertingError("No alert configuration is specified.")
- for config_type, config_data in self.configs.iteritems():
- if config_type == "email":
- if not config_data.get("server", None):
- raise AlertingError("Parameter 'server' is missing.")
- if not config_data.get("address-to", None):
- raise AlertingError("Parameter 'address-to' (recipient) is "
- "missing.")
- if not config_data.get("address-from", None):
- raise AlertingError("Parameter 'address-from' (sender) is "
- "missing.")
- elif config_type == "jenkins":
- if not isdir(config_data.get("output-dir", "")):
- raise AlertingError("Parameter 'output-dir' is "
- "missing or it is not a directory.")
- if not config_data.get("output-file", None):
- raise AlertingError("Parameter 'output-file' is missing.")
+ raise AlertingError(u"No alert configuration is specified.")
+ for config_type, config_data in self.configs.items():
+ if config_type == u"email":
+ if not config_data.get(u"server", None):
+ raise AlertingError(u"Parameter 'server' is missing.")
+ if not config_data.get(u"address-to", None):
+ raise AlertingError(u"Parameter 'address-to' (recipient) "
+ u"is missing.")
+ if not config_data.get(u"address-from", None):
+ raise AlertingError(u"Parameter 'address-from' (sender) is "
+ u"missing.")
+ elif config_type == u"jenkins":
+ if not isdir(config_data.get(u"output-dir", u"")):
+ raise AlertingError(u"Parameter 'output-dir' is "
+ u"missing or it is not a directory.")
+ if not config_data.get(u"output-file", None):
+ raise AlertingError(u"Parameter 'output-file' is missing.")
else:
- raise AlertingError("Alert of type '{0}' is not implemented.".
- format(config_type))
+ raise AlertingError(
+ f"Alert of type {config_type} is not implemented."
+ )
- self.alerts = self._spec_alert.get("alerts", None)
+ self.alerts = self._spec_alert.get(u"alerts", None)
if not self.alerts:
- raise AlertingError("No alert is specified.")
- for alert, alert_data in self.alerts.iteritems():
- if not alert_data.get("title", None):
- raise AlertingError("Parameter 'title' is missing.")
- if not alert_data.get("type", None) in self._ALERTS:
- raise AlertingError("Parameter 'failed-tests' is missing or "
- "incorrect.")
- if not alert_data.get("way", None) in self.configs.keys():
- raise AlertingError("Parameter 'way' is missing or incorrect.")
- if not alert_data.get("include", None):
- raise AlertingError("Parameter 'include' is missing or the "
- "list is empty.")
+ raise AlertingError(u"No alert is specified.")
+ for alert_data in self.alerts.values():
+ if not alert_data.get(u"title", None):
+ raise AlertingError(u"Parameter 'title' is missing.")
+ if not alert_data.get(u"type", None) in self._implemented_alerts:
+ raise AlertingError(u"Parameter 'failed-tests' is missing or "
+ u"incorrect.")
+ if not alert_data.get(u"way", None) in self.configs.keys():
+ raise AlertingError(u"Parameter 'way' is missing or incorrect.")
+ if not alert_data.get(u"include", None):
+ raise AlertingError(u"Parameter 'include' is missing or the "
+ u"list is empty.")
def __str__(self):
"""Return string with human readable description of the alert.
@@ -127,8 +135,7 @@ class Alerting:
:returns: Readable description.
:rtype: str
"""
- return "configs={configs}, alerts={alerts}".format(
- configs=self.configs, alerts=self.alerts)
+ return f"configs={self.configs}, alerts={self.alerts}"
def __repr__(self):
"""Return string executable as Python constructor call.
@@ -136,19 +143,19 @@ class Alerting:
:returns: Executable constructor call.
:rtype: str
"""
- return "Alerting(spec={spec})".format(
- spec=self._spec)
+ return f"Alerting(spec={self._spec})"
def generate_alerts(self):
"""Generate alert(s) using specified way(s).
"""
- for alert, alert_data in self.alerts.iteritems():
- if alert_data["way"] == "jenkins":
+ for alert_data in self.alerts.values():
+ if alert_data[u"way"] == u"jenkins":
self._generate_email_body(alert_data)
else:
- raise AlertingError("Alert with way '{0}' is not implemented.".
- format(alert_data["way"]))
+ raise AlertingError(
+ f"Alert with way {alert_data[u'way']} is not implemented."
+ )
@staticmethod
def _send_email(server, addr_from, addr_to, subject, text=None, html=None):
@@ -169,29 +176,29 @@ class Alerting:
"""
if not text and not html:
- raise AlertingError("No text/data to send.")
+ raise AlertingError(u"No text/data to send.")
- msg = MIMEMultipart('alternative')
- msg['Subject'] = subject
- msg['From'] = addr_from
- msg['To'] = ", ".join(addr_to)
+ msg = MIMEMultipart(u'alternative')
+ msg[u'Subject'] = subject
+ msg[u'From'] = addr_from
+ msg[u'To'] = u", ".join(addr_to)
if text:
- msg.attach(MIMEText(text, 'plain'))
+ msg.attach(MIMEText(text, u'plain'))
if html:
- msg.attach(MIMEText(html, 'html'))
+ msg.attach(MIMEText(html, u'html'))
smtp_server = None
try:
- logging.info("Trying to send alert '{0}' ...".format(subject))
- logging.debug("SMTP Server: {0}".format(server))
- logging.debug("From: {0}".format(addr_from))
- logging.debug("To: {0}".format(", ".join(addr_to)))
- logging.debug("Message: {0}".format(msg.as_string()))
+ logging.info(f"Trying to send alert {subject} ...")
+ logging.debug(f"SMTP Server: {server}")
+ logging.debug(f"From: {addr_from}")
+ logging.debug(f"To: {u', '.join(addr_to)}")
+ logging.debug(f"Message: {msg.as_string()}")
smtp_server = smtplib.SMTP(server)
smtp_server.sendmail(addr_from, addr_to, msg.as_string())
except smtplib.SMTPException as err:
- raise AlertingError("Not possible to send the alert via email.",
+ raise AlertingError(u"Not possible to send the alert via email.",
str(err))
finally:
if smtp_server:
@@ -242,12 +249,12 @@ class Alerting:
:rtype: tuple(str, str, int, int, OrderedDict)
"""
- directory = self.configs[alert["way"]]["output-dir"]
+ directory = self.configs[alert[u"way"]][u"output-dir"]
failed_tests = OrderedDict()
- file_path = "{0}/{1}.txt".format(directory, test_set)
- version = ""
+ file_path = f"{directory}/{test_set}.txt"
+ version = u""
try:
- with open(file_path, 'r') as f_txt:
+ with open(file_path, u'r') as f_txt:
for idx, line in enumerate(f_txt):
if idx == 0:
build = line[:-1]
@@ -262,36 +269,69 @@ class Alerting:
failed = line[:-1]
continue
try:
- test = line[:-1].split('-')
- nic = test[0]
- framesize = test[1]
- cores = test[2]
- name = '-'.join(test[3:-1])
+ test = line[:-1].split(u'-')
+ name = u'-'.join(test[3:-1])
except IndexError:
continue
if failed_tests.get(name, None) is None:
failed_tests[name] = dict(nics=list(),
framesizes=list(),
cores=list())
- if nic not in failed_tests[name]["nics"]:
- failed_tests[name]["nics"].append(nic)
- if framesize not in failed_tests[name]["framesizes"]:
- failed_tests[name]["framesizes"].append(framesize)
- if cores not in failed_tests[name]["cores"]:
- failed_tests[name]["cores"].append(cores)
+ if test[0] not in failed_tests[name][u"nics"]:
+ failed_tests[name][u"nics"].append(test[0])
+ if test[1] not in failed_tests[name][u"framesizes"]:
+ failed_tests[name][u"framesizes"].append(test[1])
+ if test[2] not in failed_tests[name][u"cores"]:
+ failed_tests[name][u"cores"].append(test[2])
except IOError:
- logging.error("No such file or directory: {file}".
- format(file=file_path))
+ logging.error(f"No such file or directory: {file_path}")
return None, None, None, None, None
if sort:
sorted_failed_tests = OrderedDict()
- keys = [k for k in failed_tests.keys()]
- keys.sort()
- for key in keys:
+ for key in sorted(failed_tests.keys()):
sorted_failed_tests[key] = failed_tests[key]
return build, version, passed, failed, sorted_failed_tests
- else:
- return build, version, passed, failed, failed_tests
+
+ return build, version, passed, failed, failed_tests
+
+ def _list_gressions(self, alert, idx, header, re_pro):
+ """Create a file with regressions or progressions for the test set
+ specified by idx.
+
+ :param alert: Files are created for this alert.
+ :param idx: Index of the test set as it is specified in the
+ specification file.
+ :param header: The header of the list of [re|pro]gressions.
+ :param re_pro: 'regression' or 'progression'.
+ :type alert: dict
+ :type idx: int
+ :type header: str
+ :type re_pro: str
+ """
+
+ if re_pro not in (u"regressions", u"progressions"):
+ return
+
+ in_file = (
+ f"{self.configs[alert[u'way']][u'output-dir']}/"
+ f"cpta-{re_pro}-{alert[u'urls'][idx].split(u'/')[-1]}.txt"
+ )
+ out_file = (
+ f"{self.configs[alert[u'way']][u'output-dir']}/"
+ f"trending-{re_pro}.txt"
+ )
+
+ try:
+ with open(in_file, u'r') as txt_file:
+ file_content = txt_file.read()
+ with open(out_file, u'a+') as reg_file:
+ reg_file.write(header)
+ if file_content:
+ reg_file.write(file_content)
+ else:
+ reg_file.write(f"No {re_pro}")
+ except IOError as err:
+ logging.warning(repr(err))
def _generate_email_body(self, alert):
"""Create the file which is used in the generated alert.
@@ -300,120 +340,97 @@ class Alerting:
:type alert: dict
"""
- if alert["type"] != "failed-tests":
- raise AlertingError("Alert of type '{0}' is not implemented.".
- format(alert["type"]))
+ if alert[u"type"] != u"failed-tests":
+ raise AlertingError(
+ f"Alert of type {alert[u'type']} is not implemented."
+ )
- config = self.configs[alert["way"]]
-
- text = ""
- for idx, test_set in enumerate(alert.get("include", [])):
+ text = u""
+ for idx, test_set in enumerate(alert.get(u"include", [])):
build, version, passed, failed, failed_tests = \
self._get_compressed_failed_tests(alert, test_set)
if build is None:
ret_code, build_nr, _ = get_last_completed_build_number(
- self._spec.environment["urls"]["URL[JENKINS,CSIT]"],
- alert["urls"][idx].split('/')[-1])
+ self._spec.environment[u"urls"][u"URL[JENKINS,CSIT]"],
+ alert[u"urls"][idx].split(u'/')[-1])
if ret_code != 0:
- build_nr = ''
- text += "\n\nNo input data available for '{set}'. See CSIT " \
- "build {link}/{build} for more information.\n".\
- format(set='-'.join(test_set.split('-')[-2:]),
- link=alert["urls"][idx],
- build=build_nr)
+ build_nr = u''
+ text += (
+ f"\n\nNo input data available for "
+ f"{u'-'.join(test_set.split('-')[-2:])}. See CSIT build "
+ f"{alert[u'urls'][idx]}/{build_nr} for more information.\n"
+ )
continue
- text += ("\n\n{topo}-{arch}, "
- "{failed} tests failed, "
- "{passed} tests passed, "
- "CSIT build: {link}/{build}, "
- "VPP version: {version}\n\n".
- format(topo=test_set.split('-')[-2],
- arch=test_set.split('-')[-1],
- failed=failed,
- passed=passed,
- link=alert["urls"][idx],
- build=build,
- version=version))
- regression_hdr = ("\n\n{topo}-{arch}, "
- "CSIT build: {link}/{build}, "
- "VPP version: {version}\n\n"
- .format(topo=test_set.split('-')[-2],
- arch=test_set.split('-')[-1],
- link=alert["urls"][idx],
- build=build,
- version=version
- ))
- max_len_name = 0
- max_len_nics = 0
- max_len_framesizes = 0
- max_len_cores = 0
- for name, params in failed_tests.items():
- failed_tests[name]["nics"] = ",".join(sorted(params["nics"]))
- failed_tests[name]["framesizes"] = \
- ",".join(sorted(params["framesizes"]))
- failed_tests[name]["cores"] = ",".join(sorted(params["cores"]))
- if len(name) > max_len_name:
- max_len_name = len(name)
- if len(failed_tests[name]["nics"]) > max_len_nics:
- max_len_nics = len(failed_tests[name]["nics"])
- if len(failed_tests[name]["framesizes"]) > max_len_framesizes:
- max_len_framesizes = len(failed_tests[name]["framesizes"])
- if len(failed_tests[name]["cores"]) > max_len_cores:
- max_len_cores = len(failed_tests[name]["cores"])
+ text += (
+ f"\n\n{test_set.split('-')[-2]}-{test_set.split('-')[-1]}, "
+ f"{failed} tests failed, "
+ f"{passed} tests passed, CSIT build: "
+ f"{alert[u'urls'][idx]}/{build}, VPP version: {version}\n\n"
+ )
+
+ class MaxLens():
+ """Class to store the max lengths of strings displayed in
+ failed tests list.
+ """
+ def __init__(self, tst_name, nics, framesizes, cores):
+ """Initialisation.
+
+ :param tst_name: Name of the test.
+ :param nics: NICs used in the test.
+ :param framesizes: Frame sizes used in the tests
+ :param cores: Cores used in th test.
+ """
+ self.name = tst_name
+ self.nics = nics
+ self.frmsizes = framesizes
+ self.cores = cores
+
+ max_len = MaxLens(0, 0, 0, 0)
for name, params in failed_tests.items():
- text += "{name} {nics} {frames} {cores}\n".format(
- name=name + " " * (max_len_name - len(name)),
- nics=params["nics"] +
- " " * (max_len_nics - len(params["nics"])),
- frames=params["framesizes"] + " " *
- (max_len_framesizes - len(params["framesizes"])),
- cores=params["cores"] +
- " " * (max_len_cores - len(params["cores"])))
+ failed_tests[name][u"nics"] = u",".join(sorted(params[u"nics"]))
+ failed_tests[name][u"framesizes"] = \
+ u",".join(sorted(params[u"framesizes"]))
+ failed_tests[name][u"cores"] = \
+ u",".join(sorted(params[u"cores"]))
+ if len(name) > max_len.name:
+ max_len.name = len(name)
+ if len(failed_tests[name][u"nics"]) > max_len.nics:
+ max_len.nics = len(failed_tests[name][u"nics"])
+ if len(failed_tests[name][u"framesizes"]) > max_len.frmsizes:
+ max_len.frmsizes = len(failed_tests[name][u"framesizes"])
+ if len(failed_tests[name][u"cores"]) > max_len.cores:
+ max_len.cores = len(failed_tests[name][u"cores"])
+ for name, params in failed_tests.items():
+ text += (
+ f"{name + u' ' * (max_len.name - len(name))} "
+ f"{params[u'nics']}"
+ f"{u' ' * (max_len.nics - len(params[u'nics']))} "
+ f"{params[u'framesizes']}"
+ f"{u' ' * (max_len.frmsizes-len(params[u'framesizes']))} "
+ f"{params[u'cores']}"
+ f"{u' ' * (max_len.cores - len(params[u'cores']))}\n"
+ )
+
+ gression_hdr = (
+ f"\n\n{test_set.split(u'-')[-2]}-{test_set.split(u'-')[-1]}, "
+ f"CSIT build: {alert[u'urls'][idx]}/{build}, "
+ f"VPP version: {version}\n\n"
+ )
# Add list of regressions:
- file_name = "{0}/cpta-regressions-{1}.txt".\
- format(config["output-dir"], alert["urls"][idx].split('/')[-1])
- try:
- with open(file_name, 'r') as txt_file:
- file_content = txt_file.read()
- reg_file_name = "{dir}/trending-regressions.txt". \
- format(dir=config["output-dir"])
- with open(reg_file_name, 'a+') as reg_file:
- reg_file.write(regression_hdr)
- if file_content:
- reg_file.write(file_content)
- else:
- reg_file.write("No regressions")
- except IOError as err:
- logging.warning(repr(err))
+ self._list_gressions(alert, idx, gression_hdr, u"regressions")
# Add list of progressions:
- file_name = "{0}/cpta-progressions-{1}.txt".\
- format(config["output-dir"], alert["urls"][idx].split('/')[-1])
- try:
- with open(file_name, 'r') as txt_file:
- file_content = txt_file.read()
- pro_file_name = "{dir}/trending-progressions.txt". \
- format(dir=config["output-dir"])
- with open(pro_file_name, 'a+') as pro_file:
- pro_file.write(regression_hdr)
- if file_content:
- pro_file.write(file_content)
- else:
- pro_file.write("No progressions")
- except IOError as err:
- logging.warning(repr(err))
-
- text += "\nFor detailed information visit: {url}\n".\
- format(url=alert["url-details"])
- file_name = "{0}/{1}".format(config["output-dir"],
- config["output-file"])
- logging.info("Writing the file '{0}.txt' ...".format(file_name))
+ self._list_gressions(alert, idx, gression_hdr, u"progressions")
+
+ text += f"\nFor detailed information visit: {alert[u'url-details']}\n"
+ file_name = f"{self.configs[alert[u'way']][u'output-dir']}/" \
+ f"{self.configs[alert[u'way']][u'output-file']}"
+ logging.info(f"Writing the file {file_name}.txt ...")
try:
- with open("{0}.txt".format(file_name), 'w') as txt_file:
+ with open(f"{file_name}.txt", u'w') as txt_file:
txt_file.write(text)
except IOError:
- logging.error("Not possible to write the file '{0}.txt'.".
- format(file_name))
+ logging.error(f"Not possible to write the file {file_name}.txt.")
diff --git a/resources/tools/presentation/generator_CPTA.py b/resources/tools/presentation/generator_cpta.py
index eec401bd1f..3003557696 100644
--- a/resources/tools/presentation/generator_CPTA.py
+++ b/resources/tools/presentation/generator_cpta.py
@@ -16,28 +16,29 @@
import logging
import csv
-import prettytable
-import plotly.offline as ploff
-import plotly.graph_objs as plgo
-import plotly.exceptions as plerr
from collections import OrderedDict
from datetime import datetime
from copy import deepcopy
-from utils import archive_input_data, execute_command, classify_anomalies
+import prettytable
+import plotly.offline as ploff
+import plotly.graph_objs as plgo
+import plotly.exceptions as plerr
+
+from pal_utils import archive_input_data, execute_command, classify_anomalies
# Command to build the html format of the report
-HTML_BUILDER = 'sphinx-build -v -c conf_cpta -a ' \
- '-b html -E ' \
- '-t html ' \
- '-D version="{date}" ' \
- '{working_dir} ' \
- '{build_dir}/'
+HTML_BUILDER = u'sphinx-build -v -c conf_cpta -a ' \
+ u'-b html -E ' \
+ u'-t html ' \
+ u'-D version="{date}" ' \
+ u'{working_dir} ' \
+ u'{build_dir}/'
# .css file for the html format of the report
-THEME_OVERRIDES = """/* override table width restrictions */
+THEME_OVERRIDES = u"""/* override table width restrictions */
.wy-nav-content {
max-width: 1200px !important;
}
@@ -91,19 +92,20 @@ THEME_OVERRIDES = """/* override table width restrictions */
}
"""
-COLORS = ["SkyBlue", "Olive", "Purple", "Coral", "Indigo", "Pink",
- "Chocolate", "Brown", "Magenta", "Cyan", "Orange", "Black",
- "Violet", "Blue", "Yellow", "BurlyWood", "CadetBlue", "Crimson",
- "DarkBlue", "DarkCyan", "DarkGreen", "Green", "GoldenRod",
- "LightGreen", "LightSeaGreen", "LightSkyBlue", "Maroon",
- "MediumSeaGreen", "SeaGreen", "LightSlateGrey",
- "SkyBlue", "Olive", "Purple", "Coral", "Indigo", "Pink",
- "Chocolate", "Brown", "Magenta", "Cyan", "Orange", "Black",
- "Violet", "Blue", "Yellow", "BurlyWood", "CadetBlue", "Crimson",
- "DarkBlue", "DarkCyan", "DarkGreen", "Green", "GoldenRod",
- "LightGreen", "LightSeaGreen", "LightSkyBlue", "Maroon",
- "MediumSeaGreen", "SeaGreen", "LightSlateGrey"
- ]
+COLORS = [
+ u"SkyBlue", u"Olive", u"Purple", u"Coral", u"Indigo", u"Pink",
+ u"Chocolate", u"Brown", u"Magenta", u"Cyan", u"Orange", u"Black",
+ u"Violet", u"Blue", u"Yellow", u"BurlyWood", u"CadetBlue", u"Crimson",
+ u"DarkBlue", u"DarkCyan", u"DarkGreen", u"Green", u"GoldenRod",
+ u"LightGreen", u"LightSeaGreen", u"LightSkyBlue", u"Maroon",
+ u"MediumSeaGreen", u"SeaGreen", u"LightSlateGrey",
+ u"SkyBlue", u"Olive", u"Purple", u"Coral", u"Indigo", u"Pink",
+ u"Chocolate", u"Brown", u"Magenta", u"Cyan", u"Orange", u"Black",
+ u"Violet", u"Blue", u"Yellow", u"BurlyWood", u"CadetBlue", u"Crimson",
+ u"DarkBlue", u"DarkCyan", u"DarkGreen", u"Green", u"GoldenRod",
+ u"LightGreen", u"LightSeaGreen", u"LightSkyBlue", u"Maroon",
+ u"MediumSeaGreen", u"SeaGreen", u"LightSlateGrey"
+]
def generate_cpta(spec, data):
@@ -116,35 +118,35 @@ def generate_cpta(spec, data):
:type data: InputData
"""
- logging.info("Generating the Continuous Performance Trending and Analysis "
- "...")
+ logging.info(u"Generating the Continuous Performance Trending and Analysis "
+ u"...")
ret_code = _generate_all_charts(spec, data)
cmd = HTML_BUILDER.format(
- date=datetime.utcnow().strftime('%Y-%m-%d %H:%M UTC'),
- working_dir=spec.environment["paths"]["DIR[WORKING,SRC]"],
- build_dir=spec.environment["paths"]["DIR[BUILD,HTML]"])
+ date=datetime.utcnow().strftime(u'%Y-%m-%d %H:%M UTC'),
+ working_dir=spec.environment[u'paths'][u'DIR[WORKING,SRC]'],
+ build_dir=spec.environment[u'paths'][u'DIR[BUILD,HTML]'])
execute_command(cmd)
- with open(spec.environment["paths"]["DIR[CSS_PATCH_FILE]"], "w") as \
+ with open(spec.environment[u'paths'][u'DIR[CSS_PATCH_FILE]'], u'w') as \
css_file:
css_file.write(THEME_OVERRIDES)
- with open(spec.environment["paths"]["DIR[CSS_PATCH_FILE2]"], "w") as \
+ with open(spec.environment[u'paths'][u'DIR[CSS_PATCH_FILE2]'], u'w') as \
css_file:
css_file.write(THEME_OVERRIDES)
- if spec.configuration.get("archive-inputs", True):
+ if spec.configuration.get(u"archive-inputs", True):
archive_input_data(spec)
- logging.info("Done.")
+ logging.info(u"Done.")
return ret_code
def _generate_trending_traces(in_data, job_name, build_info,
- show_trend_line=True, name="", color=""):
+ show_trend_line=True, name=u"", color=u""):
"""Generate the trending traces:
- samples,
- outliers, regress, progress
@@ -173,27 +175,27 @@ def _generate_trending_traces(in_data, job_name, build_info,
xaxis = list()
for idx in data_x:
date = build_info[job_name][str(idx)][0]
- hover_str = ("date: {date}<br>"
- "value: {value:,}<br>"
- "{sut}-ref: {build}<br>"
- "csit-ref: mrr-{period}-build-{build_nr}<br>"
- "testbed: {testbed}")
- if "dpdk" in job_name:
+ hover_str = (u"date: {date}<br>"
+ u"value: {value:,}<br>"
+ u"{sut}-ref: {build}<br>"
+ u"csit-ref: mrr-{period}-build-{build_nr}<br>"
+ u"testbed: {testbed}")
+ if u"dpdk" in job_name:
hover_text.append(hover_str.format(
date=date,
value=int(in_data[idx]),
- sut="dpdk",
- build=build_info[job_name][str(idx)][1].rsplit('~', 1)[0],
- period="weekly",
+ sut=u"dpdk",
+ build=build_info[job_name][str(idx)][1].rsplit(u'~', 1)[0],
+ period=u"weekly",
build_nr=idx,
testbed=build_info[job_name][str(idx)][2]))
- elif "vpp" in job_name:
+ elif u"vpp" in job_name:
hover_text.append(hover_str.format(
date=date,
value=int(in_data[idx]),
- sut="vpp",
- build=build_info[job_name][str(idx)][1].rsplit('~', 1)[0],
- period="daily",
+ sut=u"vpp",
+ build=build_info[job_name][str(idx)][1].rsplit(u'~', 1)[0],
+ period=u"daily",
build_nr=idx,
testbed=build_info[job_name][str(idx)][2]))
@@ -210,14 +212,14 @@ def _generate_trending_traces(in_data, job_name, build_info,
anomalies_colors = list()
anomalies_avgs = list()
anomaly_color = {
- "regression": 0.0,
- "normal": 0.5,
- "progression": 1.0
+ u"regression": 0.0,
+ u"normal": 0.5,
+ u"progression": 1.0
}
if anomaly_classification:
- for idx, (key, value) in enumerate(data_pd.iteritems()):
+ for idx, (key, value) in enumerate(data_pd.items()):
if anomaly_classification[idx] in \
- ("outlier", "regression", "progression"):
+ (u"outlier", u"regression", u"progression"):
anomalies[key] = value
anomalies_colors.append(
anomaly_color[anomaly_classification[idx]])
@@ -228,21 +230,21 @@ def _generate_trending_traces(in_data, job_name, build_info,
trace_samples = plgo.Scatter(
x=xaxis,
- y=[y for y in data_y], # Was: y.avg
- mode='markers',
+ y=data_y,
+ mode=u"markers",
line={
- "width": 1
+ u"width": 1
},
showlegend=True,
legendgroup=name,
- name="{name}".format(name=name),
+ name=f"{name}",
marker={
- "size": 5,
- "color": color,
- "symbol": "circle",
+ u"size": 5,
+ u"color": color,
+ u"symbol": u"circle",
},
text=hover_text,
- hoverinfo="text"
+ hoverinfo=u"text"
)
traces = [trace_samples, ]
@@ -250,57 +252,59 @@ def _generate_trending_traces(in_data, job_name, build_info,
trace_trend = plgo.Scatter(
x=xaxis,
y=avgs,
- mode='lines',
+ mode=u"lines",
line={
- "shape": "linear",
- "width": 1,
- "color": color,
+ u"shape": u"linear",
+ u"width": 1,
+ u"color": color,
},
showlegend=False,
legendgroup=name,
- name='{name}'.format(name=name),
- text=["trend: {0:,}".format(int(avg)) for avg in avgs],
- hoverinfo="text+name"
+ name=f"{name}",
+ text=[f"trend: {int(avg):,}" for avg in avgs],
+ hoverinfo=u"text+name"
)
traces.append(trace_trend)
trace_anomalies = plgo.Scatter(
- x=anomalies.keys(),
+ x=list(anomalies.keys()),
y=anomalies_avgs,
- mode='markers',
- hoverinfo="none",
+ mode=u"markers",
+ hoverinfo=u"none",
showlegend=False,
legendgroup=name,
- name="{name}-anomalies".format(name=name),
+ name=f"{name}-anomalies",
marker={
- "size": 15,
- "symbol": "circle-open",
- "color": anomalies_colors,
- "colorscale": [[0.00, "red"],
- [0.33, "red"],
- [0.33, "white"],
- [0.66, "white"],
- [0.66, "green"],
- [1.00, "green"]],
- "showscale": True,
- "line": {
- "width": 2
+ u"size": 15,
+ u"symbol": u"circle-open",
+ u"color": anomalies_colors,
+ u"colorscale": [
+ [0.00, u"red"],
+ [0.33, u"red"],
+ [0.33, u"white"],
+ [0.66, u"white"],
+ [0.66, u"green"],
+ [1.00, u"green"]
+ ],
+ u"showscale": True,
+ u"line": {
+ u"width": 2
},
- "colorbar": {
- "y": 0.5,
- "len": 0.8,
- "title": "Circles Marking Data Classification",
- "titleside": 'right',
- "titlefont": {
- "size": 14
+ u"colorbar": {
+ u"y": 0.5,
+ u"len": 0.8,
+ u"title": u"Circles Marking Data Classification",
+ u"titleside": u"right",
+ u"titlefont": {
+ u"size": 14
},
- "tickmode": 'array',
- "tickvals": [0.167, 0.500, 0.833],
- "ticktext": ["Regression", "Normal", "Progression"],
- "ticks": "",
- "ticklen": 0,
- "tickangle": -90,
- "thickness": 10
+ u"tickmode": u"array",
+ u"tickvals": [0.167, 0.500, 0.833],
+ u"ticktext": [u"Regression", u"Normal", u"Progression"],
+ u"ticks": u"",
+ u"ticklen": 0,
+ u"tickangle": -90,
+ u"thickness": 10
}
}
)
@@ -308,8 +312,8 @@ def _generate_trending_traces(in_data, job_name, build_info,
if anomaly_classification:
return traces, anomaly_classification[-1]
- else:
- return traces, None
+
+ return traces, None
def _generate_all_charts(spec, input_data):
@@ -323,29 +327,40 @@ def _generate_all_charts(spec, input_data):
def _generate_chart(graph):
"""Generates the chart.
+
+ :param graph: The graph to be generated
+ :type graph: dict
+ :returns: Dictionary with the job name, csv table with results and
+ list of tests classification results.
+ :rtype: dict
"""
logs = list()
- logs.append(("INFO", " Generating the chart '{0}' ...".
- format(graph.get("title", ""))))
+ logs.append(
+ (u"INFO", f" Generating the chart {graph.get(u'title', u'')} ...")
+ )
- job_name = graph["data"].keys()[0]
+ job_name = list(graph[u"data"].keys())[0]
csv_tbl = list()
res = dict()
# Transform the data
- logs.append(("INFO", " Creating the data set for the {0} '{1}'.".
- format(graph.get("type", ""), graph.get("title", ""))))
+ logs.append(
+ (u"INFO",
+ f" Creating the data set for the {graph.get(u'type', u'')} "
+ f"{graph.get(u'title', u'')}."
+ )
+ )
data = input_data.filter_data(graph, continue_on_error=True)
if data is None:
- logging.error("No data.")
- return
+ logging.error(u"No data.")
+ return dict()
chart_data = dict()
chart_tags = dict()
- for job, job_data in data.iteritems():
+ for job, job_data in data.items():
if job != job_name:
continue
for index, bld in job_data.items():
@@ -354,8 +369,8 @@ def _generate_all_charts(spec, input_data):
chart_data[test_name] = OrderedDict()
try:
chart_data[test_name][int(index)] = \
- test["result"]["receive-rate"]
- chart_tags[test_name] = test.get("tags", None)
+ test[u"result"][u"receive-rate"]
+ chart_tags[test_name] = test.get(u"tags", None)
except (KeyError, TypeError):
pass
@@ -363,15 +378,15 @@ def _generate_all_charts(spec, input_data):
for tst_name, tst_data in chart_data.items():
tst_lst = list()
for bld in builds_dict[job_name]:
- itm = tst_data.get(int(bld), '')
+ itm = tst_data.get(int(bld), u'')
# CSIT-1180: Itm will be list, compute stats.
tst_lst.append(str(itm))
- csv_tbl.append("{0},".format(tst_name) + ",".join(tst_lst) + '\n')
+ csv_tbl.append(f"{tst_name}," + u",".join(tst_lst) + u'\n')
# Generate traces:
traces = list()
index = 0
- groups = graph.get("groups", None)
+ groups = graph.get(u"groups", None)
visibility = list()
if groups:
@@ -380,52 +395,53 @@ def _generate_all_charts(spec, input_data):
for tag in group:
for tst_name, test_data in chart_data.items():
if not test_data:
- logs.append(("WARNING",
- "No data for the test '{0}'".
- format(tst_name)))
+ logs.append(
+ (u"WARNING", f"No data for the test {tst_name}")
+ )
continue
- if tag in chart_tags[tst_name]:
- message = "index: {index}, test: {test}".format(
- index=index, test=tst_name)
- try:
- trace, rslt = _generate_trending_traces(
- test_data,
- job_name=job_name,
- build_info=build_info,
- name='-'.join(tst_name.split('.')[-1].
- split('-')[2:-1]),
- color=COLORS[index])
- except IndexError:
- message = "Out of colors: {}".format(message)
- logs.append(("ERROR", message))
- logging.error(message)
- index += 1
- continue
- traces.extend(trace)
- visible.extend([True for _ in range(len(trace))])
- res[tst_name] = rslt
+ if tag not in chart_tags[tst_name]:
+ continue
+ message = f"index: {index}, test: {tst_name}"
+ try:
+ trace, rslt = _generate_trending_traces(
+ test_data,
+ job_name=job_name,
+ build_info=build_info,
+ name=u'-'.join(tst_name.split(u'.')[-1].
+ split(u'-')[2:-1]),
+ color=COLORS[index])
+ except IndexError:
+ logs.append(
+ (u"ERROR", f"Out of colors: {message}")
+ )
+ logging.error(f"Out of colors: {message}")
index += 1
- break
+ continue
+ traces.extend(trace)
+ visible.extend([True for _ in range(len(trace))])
+ res[tst_name] = rslt
+ index += 1
+ break
visibility.append(visible)
else:
for tst_name, test_data in chart_data.items():
if not test_data:
- logs.append(("WARNING", "No data for the test '{0}'".
- format(tst_name)))
+ logs.append(
+ (u"WARNING", f"No data for the test {tst_name}")
+ )
continue
- message = "index: {index}, test: {test}".format(
- index=index, test=tst_name)
+ message = f"index: {index}, test: {tst_name}"
try:
trace, rslt = _generate_trending_traces(
test_data,
job_name=job_name,
build_info=build_info,
- name='-'.join(tst_name.split('.')[-1].split('-')[2:-1]),
+ name=u'-'.join(
+ tst_name.split(u'.')[-1].split(u'-')[2:-1]),
color=COLORS[index])
except IndexError:
- message = "Out of colors: {}".format(message)
- logs.append(("ERROR", message))
- logging.error(message)
+ logs.append((u"ERROR", f"Out of colors: {message}"))
+ logging.error(f"Out of colors: {message}")
index += 1
continue
traces.extend(trace)
@@ -435,101 +451,99 @@ def _generate_all_charts(spec, input_data):
if traces:
# Generate the chart:
try:
- layout = deepcopy(graph["layout"])
+ layout = deepcopy(graph[u"layout"])
except KeyError as err:
- logging.error("Finished with error: No layout defined")
+ logging.error(u"Finished with error: No layout defined")
logging.error(repr(err))
- return
+ return dict()
if groups:
show = list()
for i in range(len(visibility)):
visible = list()
- for r in range(len(visibility)):
- for _ in range(len(visibility[r])):
- visible.append(i == r)
+ for vis_idx, _ in enumerate(visibility):
+ for _ in range(len(visibility[vis_idx])):
+ visible.append(i == vis_idx)
show.append(visible)
buttons = list()
buttons.append(dict(
- label="All",
- method="update",
- args=[{"visible": [True for _ in range(len(show[0]))]}, ]
+ label=u"All",
+ method=u"update",
+ args=[{u"visible": [True for _ in range(len(show[0]))]}, ]
))
for i in range(len(groups)):
try:
- label = graph["group-names"][i]
+ label = graph[u"group-names"][i]
except (IndexError, KeyError):
- label = "Group {num}".format(num=i + 1)
+ label = f"Group {i + 1}"
buttons.append(dict(
label=label,
- method="update",
- args=[{"visible": show[i]}, ]
+ method=u"update",
+ args=[{u"visible": show[i]}, ]
))
- layout['updatemenus'] = list([
+ layout[u"updatemenus"] = list([
dict(
active=0,
- type="dropdown",
- direction="down",
- xanchor="left",
- yanchor="bottom",
+ type=u"dropdown",
+ direction=u"down",
+ xanchor=u"left",
+ yanchor=u"bottom",
x=-0.12,
y=1.0,
buttons=buttons
)
])
- name_file = "{0}-{1}{2}".format(spec.cpta["output-file"],
- graph["output-file-name"],
- spec.cpta["output-file-type"])
+ name_file = (
+ f"{spec.cpta[u'output-file']}-{graph[u'output-file-name']}"
+ f"{spec.cpta[u'output-file-type']}")
- logs.append(("INFO", " Writing the file '{0}' ...".
- format(name_file)))
+ logs.append((u"INFO", f" Writing the file {name_file} ..."))
plpl = plgo.Figure(data=traces, layout=layout)
try:
ploff.plot(plpl, show_link=False, auto_open=False,
filename=name_file)
except plerr.PlotlyEmptyDataError:
- logs.append(("WARNING", "No data for the plot. Skipped."))
+ logs.append((u"WARNING", u"No data for the plot. Skipped."))
for level, line in logs:
- if level == "INFO":
+ if level == u"INFO":
logging.info(line)
- elif level == "ERROR":
+ elif level == u"ERROR":
logging.error(line)
- elif level == "DEBUG":
+ elif level == u"DEBUG":
logging.debug(line)
- elif level == "CRITICAL":
+ elif level == u"CRITICAL":
logging.critical(line)
- elif level == "WARNING":
+ elif level == u"WARNING":
logging.warning(line)
- return {"job_name": job_name, "csv_table": csv_tbl, "results": res}
+ return {u"job_name": job_name, u"csv_table": csv_tbl, u"results": res}
builds_dict = dict()
- for job in spec.input["builds"].keys():
+ for job in spec.input[u"builds"].keys():
if builds_dict.get(job, None) is None:
builds_dict[job] = list()
- for build in spec.input["builds"][job]:
- status = build["status"]
- if status != "failed" and status != "not found" and \
- status != "removed":
- builds_dict[job].append(str(build["build"]))
+ for build in spec.input[u"builds"][job]:
+ status = build[u"status"]
+ if status not in (u"failed", u"not found", u"removed"):
+ builds_dict[job].append(str(build[u"build"]))
# Create "build ID": "date" dict:
build_info = dict()
- tb_tbl = spec.environment.get("testbeds", None)
+ tb_tbl = spec.environment.get(u"testbeds", None)
for job_name, job_data in builds_dict.items():
if build_info.get(job_name, None) is None:
build_info[job_name] = OrderedDict()
for build in job_data:
- testbed = ""
- tb_ip = input_data.metadata(job_name, build).get("testbed", "")
+ testbed = u""
+ tb_ip = input_data.metadata(job_name, build).get(u"testbed", u"")
if tb_ip and tb_tbl:
- testbed = tb_tbl.get(tb_ip, "")
+ testbed = tb_tbl.get(tb_ip, u"")
build_info[job_name][build] = (
- input_data.metadata(job_name, build).get("generated", ""),
- input_data.metadata(job_name, build).get("version", ""),
+ input_data.metadata(job_name, build).get(u"generated", u""),
+ input_data.metadata(job_name, build).get(u"version", u""),
testbed
)
@@ -537,36 +551,38 @@ def _generate_all_charts(spec, input_data):
# Create the header:
csv_tables = dict()
- for job_name in builds_dict.keys():
+ for job_name in builds_dict:
if csv_tables.get(job_name, None) is None:
csv_tables[job_name] = list()
- header = "Build Number:," + ",".join(builds_dict[job_name]) + '\n'
+ header = u"Build Number:," + u",".join(builds_dict[job_name]) + u'\n'
csv_tables[job_name].append(header)
build_dates = [x[0] for x in build_info[job_name].values()]
- header = "Build Date:," + ",".join(build_dates) + '\n'
+ header = u"Build Date:," + u",".join(build_dates) + u'\n'
csv_tables[job_name].append(header)
versions = [x[1] for x in build_info[job_name].values()]
- header = "Version:," + ",".join(versions) + '\n'
+ header = u"Version:," + u",".join(versions) + u'\n'
csv_tables[job_name].append(header)
- for chart in spec.cpta["plots"]:
+ for chart in spec.cpta[u"plots"]:
result = _generate_chart(chart)
+ if not result:
+ continue
- csv_tables[result["job_name"]].extend(result["csv_table"])
+ csv_tables[result[u"job_name"]].extend(result[u"csv_table"])
- if anomaly_classifications.get(result["job_name"], None) is None:
- anomaly_classifications[result["job_name"]] = dict()
- anomaly_classifications[result["job_name"]].update(result["results"])
+ if anomaly_classifications.get(result[u"job_name"], None) is None:
+ anomaly_classifications[result[u"job_name"]] = dict()
+ anomaly_classifications[result[u"job_name"]].update(result[u"results"])
# Write the tables:
for job_name, csv_table in csv_tables.items():
- file_name = spec.cpta["output-file"] + "-" + job_name + "-trending"
- with open("{0}.csv".format(file_name), 'w') as file_handler:
+ file_name = spec.cpta[u"output-file"] + u"-" + job_name + u"-trending"
+ with open(f"{file_name}.csv", u"w") as file_handler:
file_handler.writelines(csv_table)
txt_table = None
- with open("{0}.csv".format(file_name), 'rb') as csv_file:
- csv_content = csv.reader(csv_file, delimiter=',', quotechar='"')
+ with open(f"{file_name}.csv", u"rt") as csv_file:
+ csv_content = csv.reader(csv_file, delimiter=u',', quotechar=u'"')
line_nr = 0
for row in csv_content:
if txt_table is None:
@@ -580,37 +596,38 @@ def _generate_all_charts(spec, input_data):
pass
try:
txt_table.add_row(row)
+ # PrettyTable raises Exception
except Exception as err:
- logging.warning("Error occurred while generating TXT "
- "table:\n{0}".format(err))
+ logging.warning(
+ f"Error occurred while generating TXT table:\n{err}"
+ )
line_nr += 1
- txt_table.align["Build Number:"] = "l"
- with open("{0}.txt".format(file_name), "w") as txt_file:
+ txt_table.align[u"Build Number:"] = u"l"
+ with open(f"{file_name}.txt", u"w") as txt_file:
txt_file.write(str(txt_table))
# Evaluate result:
if anomaly_classifications:
- result = "PASS"
- for job_name, job_data in anomaly_classifications.iteritems():
- file_name = "{0}-regressions-{1}.txt".\
- format(spec.cpta["output-file"], job_name)
- with open(file_name, 'w') as txt_file:
- for test_name, classification in job_data.iteritems():
- if classification == "regression":
- txt_file.write(test_name + '\n')
- if classification == "regression" or \
- classification == "outlier":
- result = "FAIL"
- file_name = "{0}-progressions-{1}.txt".\
- format(spec.cpta["output-file"], job_name)
- with open(file_name, 'w') as txt_file:
- for test_name, classification in job_data.iteritems():
- if classification == "progression":
- txt_file.write(test_name + '\n')
+ result = u"PASS"
+ for job_name, job_data in anomaly_classifications.items():
+ file_name = \
+ f"{spec.cpta[u'output-file']}-regressions-{job_name}.txt"
+ with open(file_name, u'w') as txt_file:
+ for test_name, classification in job_data.items():
+ if classification == u"regression":
+ txt_file.write(test_name + u'\n')
+ if classification in (u"regression", u"outlier"):
+ result = u"FAIL"
+ file_name = \
+ f"{spec.cpta[u'output-file']}-progressions-{job_name}.txt"
+ with open(file_name, u'w') as txt_file:
+ for test_name, classification in job_data.items():
+ if classification == u"progression":
+ txt_file.write(test_name + u'\n')
else:
- result = "FAIL"
+ result = u"FAIL"
- logging.info("Partial results: {0}".format(anomaly_classifications))
- logging.info("Result: {0}".format(result))
+ logging.info(f"Partial results: {anomaly_classifications}")
+ logging.info(f"Result: {result}")
return result
diff --git a/resources/tools/presentation/generator_files.py b/resources/tools/presentation/generator_files.py
index ee2fc1221c..00a78cc99a 100644
--- a/resources/tools/presentation/generator_files.py
+++ b/resources/tools/presentation/generator_files.py
@@ -17,17 +17,18 @@
import logging
-from utils import get_files, get_rst_title_char
+from pal_utils import get_files, get_rst_title_char
-RST_INCLUDE_TABLE = ("\n.. only:: html\n\n"
- " .. csv-table::\n"
- " :header-rows: 1\n"
- " :widths: auto\n"
- " :align: center\n"
- " :file: {file_html}\n"
- "\n.. only:: latex\n\n"
- "\n .. raw:: latex\n\n"
- " \csvautolongtable{{{file_latex}}}\n\n")
+
+RST_INCLUDE_TABLE = (u"\n.. only:: html\n\n"
+ u" .. csv-table::\n"
+ u" :header-rows: 1\n"
+ u" :widths: auto\n"
+ u" :align: center\n"
+ u" :file: {file_html}\n"
+ u"\n.. only:: latex\n\n"
+ u"\n .. raw:: latex\n\n"
+ u" \\csvautolongtable{{{file_latex}}}\n\n")
def generate_files(spec, data):
@@ -39,14 +40,20 @@ def generate_files(spec, data):
:type data: InputData
"""
- logging.info("Generating the files ...")
+ generator = {
+ u"file_test_results": file_test_results
+ }
+
+ logging.info(u"Generating the files ...")
for file_spec in spec.files:
try:
- eval(file_spec["algorithm"])(file_spec, data)
- except NameError as err:
- logging.error("Probably algorithm '{alg}' is not defined: {err}".
- format(alg=file_spec["algorithm"], err=repr(err)))
- logging.info("Done.")
+ generator[file_spec[u"algorithm"]](file_spec, data)
+ except (NameError, KeyError) as err:
+ logging.error(
+ f"Probably algorithm {file_spec[u'algorithm']} is not defined: "
+ f"{repr(err)}"
+ )
+ logging.info(u"Done.")
def _tests_in_suite(suite_name, tests):
@@ -61,7 +68,7 @@ def _tests_in_suite(suite_name, tests):
"""
for key in tests.keys():
- if suite_name == tests[key]["parent"]:
+ if suite_name == tests[key][u"parent"]:
return True
return False
@@ -77,59 +84,63 @@ def file_test_results(file_spec, input_data):
:type input_data: InputData
"""
- file_name = "{0}{1}".format(file_spec["output-file"],
- file_spec["output-file-ext"])
- rst_header = file_spec["file-header"]
+ file_name = f"{file_spec[u'output-file']}{file_spec[u'output-file-ext']}"
+ rst_header = file_spec[u"file-header"]
- logging.info(" Generating the file {0} ...".format(file_name))
+ logging.info(f" Generating the file {file_name} ...")
- table_lst = get_files(file_spec["dir-tables"], ".csv", full_path=True)
- if len(table_lst) == 0:
- logging.error(" No tables to include in '{0}'. Skipping.".
- format(file_spec["dir-tables"]))
- return None
+ table_lst = get_files(file_spec[u"dir-tables"], u".csv", full_path=True)
+ if not table_lst:
+ logging.error(
+ f" No tables to include in {file_spec[u'dir-tables']}. Skipping."
+ )
+ return
- logging.info(" Writing file '{0}'".format(file_name))
+ logging.info(f" Writing file {file_name}")
- logging.info(" Creating the 'tests' data set for the {0} '{1}'.".
- format(file_spec.get("type", ""), file_spec.get("title", "")))
+ logging.info(
+ f" Creating the tests data set for the "
+ f"{file_spec.get(u'type', u'')} {file_spec.get(u'title', u'')}."
+ )
tests = input_data.filter_data(file_spec)
tests = input_data.merge_data(tests)
- logging.info(" Creating the 'suites' data set for the {0} '{1}'.".
- format(file_spec.get("type", ""), file_spec.get("title", "")))
- file_spec["filter"] = "all"
- suites = input_data.filter_data(file_spec, data_set="suites")
+ logging.info(
+ f" Creating the suites data set for the "
+ f"{file_spec.get(u'type', u'')} {file_spec.get(u'title', u'')}."
+ )
+ file_spec[u"filter"] = u"all"
+ suites = input_data.filter_data(file_spec, data_set=u"suites")
suites = input_data.merge_data(suites)
suites.sort_index(inplace=True)
- with open(file_name, "w") as file_handler:
+ with open(file_name, u"w") as file_handler:
file_handler.write(rst_header)
- for suite_longname, suite in suites.iteritems():
- if len(suite_longname.split(".")) <= file_spec["data-start-level"]:
+ for suite_longname, suite in suites.items():
+ if len(suite_longname.split(u".")) <= \
+ file_spec[u"data-start-level"]:
continue
- if not ("-ndrpdr" in suite["name"] or
- "-mrr" in suite["name"] or
- "-func" in suite["name"] or
- "-device" in suite["name"]):
- file_handler.write("\n{0}\n{1}\n".format(
- suite["name"], get_rst_title_char(
- suite["level"] - file_spec["data-start-level"] - 1) *
- len(suite["name"])))
-
- if _tests_in_suite(suite["name"], tests):
- file_handler.write("\n{0}\n{1}\n".format(
- suite["name"], get_rst_title_char(
- suite["level"] - file_spec["data-start-level"] - 1) *
- len(suite["name"])))
- file_handler.write("\n{0}\n".format(
- suite["doc"].replace('|br|', '\n\n -')))
+ title_line = \
+ get_rst_title_char(
+ suite[u"level"] - file_spec[u"data-start-level"] - 1
+ ) * len(suite[u"name"])
+ if not (u"-ndrpdr" in suite[u"name"] or
+ u"-mrr" in suite[u"name"] or
+ u"-func" in suite[u"name"] or
+ u"-device" in suite[u"name"]):
+ file_handler.write(f"\n{suite[u'name']}\n{title_line}\n")
+
+ if _tests_in_suite(suite[u"name"], tests):
+ file_handler.write(f"\n{suite[u'name']}\n{title_line}\n")
+ file_handler.write(
+ f"\n{suite[u'doc']}\n".replace(u'|br|', u'\n\n -')
+ )
for tbl_file in table_lst:
- if suite["name"] in tbl_file:
+ if suite[u"name"] in tbl_file:
file_handler.write(
RST_INCLUDE_TABLE.format(
file_latex=tbl_file,
- file_html=tbl_file.split("/")[-1]))
+ file_html=tbl_file.split(u"/")[-1]))
- logging.info(" Done.")
+ logging.info(u" Done.")
diff --git a/resources/tools/presentation/generator_plots.py b/resources/tools/presentation/generator_plots.py
index 3cbd35c430..dda5196008 100644
--- a/resources/tools/presentation/generator_plots.py
+++ b/resources/tools/presentation/generator_plots.py
@@ -17,23 +17,25 @@
import re
import logging
+
+from collections import OrderedDict
+from copy import deepcopy
+
import pandas as pd
import plotly.offline as ploff
import plotly.graph_objs as plgo
from plotly.exceptions import PlotlyError
-from collections import OrderedDict
-from copy import deepcopy
-from utils import mean, stdev
+from pal_utils import mean, stdev
-COLORS = ["SkyBlue", "Olive", "Purple", "Coral", "Indigo", "Pink",
- "Chocolate", "Brown", "Magenta", "Cyan", "Orange", "Black",
- "Violet", "Blue", "Yellow", "BurlyWood", "CadetBlue", "Crimson",
- "DarkBlue", "DarkCyan", "DarkGreen", "Green", "GoldenRod",
- "LightGreen", "LightSeaGreen", "LightSkyBlue", "Maroon",
- "MediumSeaGreen", "SeaGreen", "LightSlateGrey"]
+COLORS = [u"SkyBlue", u"Olive", u"Purple", u"Coral", u"Indigo", u"Pink",
+ u"Chocolate", u"Brown", u"Magenta", u"Cyan", u"Orange", u"Black",
+ u"Violet", u"Blue", u"Yellow", u"BurlyWood", u"CadetBlue", u"Crimson",
+ u"DarkBlue", u"DarkCyan", u"DarkGreen", u"Green", u"GoldenRod",
+ u"LightGreen", u"LightSeaGreen", u"LightSkyBlue", u"Maroon",
+ u"MediumSeaGreen", u"SeaGreen", u"LightSlateGrey"]
REGEX_NIC = re.compile(r'\d*ge\dp\d\D*\d*-')
@@ -47,22 +49,32 @@ def generate_plots(spec, data):
:type data: InputData
"""
- logging.info("Generating the plots ...")
+ generator = {
+ u"plot_nf_reconf_box_name": plot_nf_reconf_box_name,
+ u"plot_perf_box_name": plot_perf_box_name,
+ u"plot_lat_err_bars_name": plot_lat_err_bars_name,
+ u"plot_tsa_name": plot_tsa_name,
+ u"plot_http_server_perf_box": plot_http_server_perf_box,
+ u"plot_nf_heatmap": plot_nf_heatmap
+ }
+
+ logging.info(u"Generating the plots ...")
for index, plot in enumerate(spec.plots):
try:
- logging.info(" Plot nr {0}: {1}".format(index + 1,
- plot.get("title", "")))
- plot["limits"] = spec.configuration["limits"]
- eval(plot["algorithm"])(plot, data)
- logging.info(" Done.")
+ logging.info(f" Plot nr {index + 1}: {plot.get(u'title', u'')}")
+ plot[u"limits"] = spec.configuration[u"limits"]
+ generator[plot[u"algorithm"]](plot, data)
+ logging.info(u" Done.")
except NameError as err:
- logging.error("Probably algorithm '{alg}' is not defined: {err}".
- format(alg=plot["algorithm"], err=repr(err)))
- logging.info("Done.")
+ logging.error(
+ f"Probably algorithm {plot[u'algorithm']} is not defined: "
+ f"{repr(err)}"
+ )
+ logging.info(u"Done.")
-def plot_service_density_reconf_box_name(plot, input_data):
- """Generate the plot(s) with algorithm: plot_service_density_reconf_box_name
+def plot_nf_reconf_box_name(plot, input_data):
+ """Generate the plot(s) with algorithm: plot_nf_reconf_box_name
specified in the specification file.
:param plot: Plot to generate.
@@ -72,13 +84,15 @@ def plot_service_density_reconf_box_name(plot, input_data):
"""
# Transform the data
- plot_title = plot.get("title", "")
- logging.info(" Creating the data set for the {0} '{1}'.".
- format(plot.get("type", ""), plot_title))
+ logging.info(
+ f" Creating the data set for the {plot.get(u'type', u'')} "
+ f"{plot.get(u'title', u'')}."
+ )
data = input_data.filter_tests_by_name(
- plot, params=["result", "parent", "tags", "type"])
+ plot, params=[u"result", u"parent", u"tags", u"type"]
+ )
if data is None:
- logging.error("No data.")
+ logging.error(u"No data.")
return
# Prepare the data for the plot
@@ -87,14 +101,14 @@ def plot_service_density_reconf_box_name(plot, input_data):
for job in data:
for build in job:
for test in build:
- if y_vals.get(test["parent"], None) is None:
- y_vals[test["parent"]] = list()
- loss[test["parent"]] = list()
+ if y_vals.get(test[u"parent"], None) is None:
+ y_vals[test[u"parent"]] = list()
+ loss[test[u"parent"]] = list()
try:
- y_vals[test["parent"]].append(test["result"]["time"])
- loss[test["parent"]].append(test["result"]["loss"])
+ y_vals[test[u"parent"]].append(test[u"result"][u"time"])
+ loss[test[u"parent"]].append(test[u"result"][u"loss"])
except (KeyError, TypeError):
- y_vals[test["parent"]].append(None)
+ y_vals[test[u"parent"]].append(None)
# Add None to the lists with missing data
max_len = 0
@@ -103,54 +117,58 @@ def plot_service_density_reconf_box_name(plot, input_data):
if len(val) > max_len:
max_len = len(val)
nr_of_samples.append(len(val))
- for key, val in y_vals.items():
+ for val in y_vals.values():
if len(val) < max_len:
val.extend([None for _ in range(max_len - len(val))])
# Add plot traces
traces = list()
- df = pd.DataFrame(y_vals)
- df.head()
- for i, col in enumerate(df.columns):
- tst_name = re.sub(REGEX_NIC, "",
- col.lower().replace('-ndrpdr', '').
- replace('2n1l-', ''))
- tst_name = "-".join(tst_name.split("-")[3:-2])
- name = "{nr}. ({samples:02d} run{plural}, packets lost average: " \
- "{loss:.1f}) {name}".format(
- nr=(i + 1),
- samples=nr_of_samples[i],
- plural='s' if nr_of_samples[i] > 1 else '',
- name=tst_name,
- loss=mean(loss[col]))
-
- traces.append(plgo.Box(x=[str(i + 1) + '.'] * len(df[col]),
- y=[y if y else None for y in df[col]],
- name=name,
- hoverinfo="y+name"))
+ df_y = pd.DataFrame(y_vals)
+ df_y.head()
+ for i, col in enumerate(df_y.columns):
+ tst_name = re.sub(REGEX_NIC, u"",
+ col.lower().replace(u'-ndrpdr', u'').
+ replace(u'2n1l-', u''))
+
+ traces.append(plgo.Box(
+ x=[str(i + 1) + u'.'] * len(df_y[col]),
+ y=[y if y else None for y in df_y[col]],
+ name=(
+ f"{i + 1}. "
+ f"({nr_of_samples[i]:02d} "
+ f"run{u's' if nr_of_samples[i] > 1 else u''}, "
+ f"packets lost average: {mean(loss[col]):.1f}) "
+ f"{u'-'.join(tst_name.split(u'-')[3:-2])}"
+ ),
+ hoverinfo=u"y+name"
+ ))
try:
# Create plot
- layout = deepcopy(plot["layout"])
- layout["title"] = "<b>Time Lost:</b> {0}".format(layout["title"])
- layout["yaxis"]["title"] = "<b>Implied Time Lost [s]</b>"
- layout["legend"]["font"]["size"] = 14
- layout["yaxis"].pop("range")
+ layout = deepcopy(plot[u"layout"])
+ layout[u"title"] = f"<b>Time Lost:</b> {layout[u'title']}"
+ layout[u"yaxis"][u"title"] = u"<b>Implied Time Lost [s]</b>"
+ layout[u"legend"][u"font"][u"size"] = 14
+ layout[u"yaxis"].pop(u"range")
plpl = plgo.Figure(data=traces, layout=layout)
# Export Plot
- file_type = plot.get("output-file-type", ".html")
- logging.info(" Writing file '{0}{1}'.".
- format(plot["output-file"], file_type))
- ploff.plot(plpl, show_link=False, auto_open=False,
- filename='{0}{1}'.format(plot["output-file"], file_type))
+ file_type = plot.get(u"output-file-type", u".html")
+ logging.info(f" Writing file {plot[u'output-file']}{file_type}.")
+ ploff.plot(
+ plpl,
+ show_link=False,
+ auto_open=False,
+ filename=f"{plot[u'output-file']}{file_type}"
+ )
except PlotlyError as err:
- logging.error(" Finished with error: {}".
- format(repr(err).replace("\n", " ")))
+ logging.error(
+ f" Finished with error: {repr(err)}".replace(u"\n", u" ")
+ )
return
-def plot_performance_box_name(plot, input_data):
- """Generate the plot(s) with algorithm: plot_performance_box_name
+def plot_perf_box_name(plot, input_data):
+ """Generate the plot(s) with algorithm: plot_perf_box_name
specified in the specification file.
:param plot: Plot to generate.
@@ -160,13 +178,14 @@ def plot_performance_box_name(plot, input_data):
"""
# Transform the data
- plot_title = plot.get("title", "")
- logging.info(" Creating the data set for the {0} '{1}'.".
- format(plot.get("type", ""), plot_title))
+ logging.info(
+ f" Creating data set for the {plot.get(u'type', u'')} "
+ f"{plot.get(u'title', u'')}."
+ )
data = input_data.filter_tests_by_name(
- plot, params=["throughput", "parent", "tags", "type"])
+ plot, params=[u"throughput", u"parent", u"tags", u"type"])
if data is None:
- logging.error("No data.")
+ logging.error(u"No data.")
return
# Prepare the data for the plot
@@ -174,25 +193,24 @@ def plot_performance_box_name(plot, input_data):
for job in data:
for build in job:
for test in build:
- if y_vals.get(test["parent"], None) is None:
- y_vals[test["parent"]] = list()
+ if y_vals.get(test[u"parent"], None) is None:
+ y_vals[test[u"parent"]] = list()
try:
- if test["type"] in ("NDRPDR", ):
- if "-pdr" in plot_title.lower():
- y_vals[test["parent"]].\
- append(test["throughput"]["PDR"]["LOWER"])
- elif "-ndr" in plot_title.lower():
- y_vals[test["parent"]]. \
- append(test["throughput"]["NDR"]["LOWER"])
- else:
- continue
- elif test["type"] in ("SOAK", ):
- y_vals[test["parent"]].\
- append(test["throughput"]["LOWER"])
+ if (test[u"type"] in (u"NDRPDR", ) and
+ u"-pdr" in plot.get(u"title", u"").lower()):
+ y_vals[test[u"parent"]].\
+ append(test[u"throughput"][u"PDR"][u"LOWER"])
+ elif (test[u"type"] in (u"NDRPDR", ) and
+ u"-ndr" in plot.get(u"title", u"").lower()):
+ y_vals[test[u"parent"]]. \
+ append(test[u"throughput"][u"NDR"][u"LOWER"])
+ elif test[u"type"] in (u"SOAK", ):
+ y_vals[test[u"parent"]].\
+ append(test[u"throughput"][u"LOWER"])
else:
continue
except (KeyError, TypeError):
- y_vals[test["parent"]].append(None)
+ y_vals[test[u"parent"]].append(None)
# Add None to the lists with missing data
max_len = 0
@@ -201,62 +219,66 @@ def plot_performance_box_name(plot, input_data):
if len(val) > max_len:
max_len = len(val)
nr_of_samples.append(len(val))
- for key, val in y_vals.items():
+ for val in y_vals.values():
if len(val) < max_len:
val.extend([None for _ in range(max_len - len(val))])
# Add plot traces
traces = list()
- df = pd.DataFrame(y_vals)
- df.head()
+ df_y = pd.DataFrame(y_vals)
+ df_y.head()
y_max = list()
- for i, col in enumerate(df.columns):
- tst_name = re.sub(REGEX_NIC, "",
- col.lower().replace('-ndrpdr', '').
- replace('2n1l-', ''))
- name = "{nr}. ({samples:02d} run{plural}) {name}".\
- format(nr=(i + 1),
- samples=nr_of_samples[i],
- plural='s' if nr_of_samples[i] > 1 else '',
- name=tst_name)
-
- logging.debug(name)
- traces.append(plgo.Box(x=[str(i + 1) + '.'] * len(df[col]),
- y=[y / 1000000 if y else None for y in df[col]],
- name=name,
- hoverinfo="y+name"))
+ for i, col in enumerate(df_y.columns):
+ tst_name = re.sub(REGEX_NIC, u"",
+ col.lower().replace(u'-ndrpdr', u'').
+ replace(u'2n1l-', u''))
+ traces.append(
+ plgo.Box(
+ x=[str(i + 1) + u'.'] * len(df_y[col]),
+ y=[y / 1000000 if y else None for y in df_y[col]],
+ name=(
+ f"{i + 1}. "
+ f"({nr_of_samples[i]:02d} "
+ f"run{u's' if nr_of_samples[i] > 1 else u''}) "
+ f"{tst_name}"
+ ),
+ hoverinfo=u"y+name"
+ )
+ )
try:
- val_max = max(df[col])
- except ValueError as err:
+ val_max = max(df_y[col])
+ if val_max:
+ y_max.append(int(val_max / 1000000) + 2)
+ except (ValueError, TypeError) as err:
logging.error(repr(err))
continue
- if val_max:
- y_max.append(int(val_max / 1000000) + 2)
try:
# Create plot
- layout = deepcopy(plot["layout"])
- if layout.get("title", None):
- layout["title"] = "<b>Throughput:</b> {0}". \
- format(layout["title"])
+ layout = deepcopy(plot[u"layout"])
+ if layout.get(u"title", None):
+ layout[u"title"] = f"<b>Throughput:</b> {layout[u'title']}"
if y_max:
- layout["yaxis"]["range"] = [0, max(y_max)]
+ layout[u"yaxis"][u"range"] = [0, max(y_max)]
plpl = plgo.Figure(data=traces, layout=layout)
# Export Plot
- file_type = plot.get("output-file-type", ".html")
- logging.info(" Writing file '{0}{1}'.".
- format(plot["output-file"], file_type))
- ploff.plot(plpl, show_link=False, auto_open=False,
- filename='{0}{1}'.format(plot["output-file"], file_type))
+ logging.info(f" Writing file {plot[u'output-file']}.html.")
+ ploff.plot(
+ plpl,
+ show_link=False,
+ auto_open=False,
+ filename=f"{plot[u'output-file']}.html"
+ )
except PlotlyError as err:
- logging.error(" Finished with error: {}".
- format(repr(err).replace("\n", " ")))
+ logging.error(
+ f" Finished with error: {repr(err)}".replace(u"\n", u" ")
+ )
return
-def plot_latency_error_bars_name(plot, input_data):
- """Generate the plot(s) with algorithm: plot_latency_error_bars_name
+def plot_lat_err_bars_name(plot, input_data):
+ """Generate the plot(s) with algorithm: plot_lat_err_bars_name
specified in the specification file.
:param plot: Plot to generate.
@@ -266,13 +288,14 @@ def plot_latency_error_bars_name(plot, input_data):
"""
# Transform the data
- plot_title = plot.get("title", "")
- logging.info(" Creating the data set for the {0} '{1}'.".
- format(plot.get("type", ""), plot_title))
+ plot_title = plot.get(u"title", u"")
+ logging.info(
+ f" Creating data set for the {plot.get(u'type', u'')} {plot_title}."
+ )
data = input_data.filter_tests_by_name(
- plot, params=["latency", "parent", "tags", "type"])
+ plot, params=[u"latency", u"parent", u"tags", u"type"])
if data is None:
- logging.error("No data.")
+ logging.error(u"No data.")
return
# Prepare the data for the plot
@@ -281,12 +304,11 @@ def plot_latency_error_bars_name(plot, input_data):
for build in job:
for test in build:
try:
- logging.debug("test['latency']: {0}\n".
- format(test["latency"]))
+ logging.debug(f"test[u'latency']: {test[u'latency']}\n")
except ValueError as err:
logging.warning(repr(err))
- if y_tmp_vals.get(test["parent"], None) is None:
- y_tmp_vals[test["parent"]] = [
+ if y_tmp_vals.get(test[u"parent"], None) is None:
+ y_tmp_vals[test[u"parent"]] = [
list(), # direction1, min
list(), # direction1, avg
list(), # direction1, max
@@ -295,31 +317,30 @@ def plot_latency_error_bars_name(plot, input_data):
list() # direction2, max
]
try:
- if test["type"] in ("NDRPDR", ):
- if "-pdr" in plot_title.lower():
- ttype = "PDR"
- elif "-ndr" in plot_title.lower():
- ttype = "NDR"
- else:
- logging.warning("Invalid test type: {0}".
- format(test["type"]))
- continue
- y_tmp_vals[test["parent"]][0].append(
- test["latency"][ttype]["direction1"]["min"])
- y_tmp_vals[test["parent"]][1].append(
- test["latency"][ttype]["direction1"]["avg"])
- y_tmp_vals[test["parent"]][2].append(
- test["latency"][ttype]["direction1"]["max"])
- y_tmp_vals[test["parent"]][3].append(
- test["latency"][ttype]["direction2"]["min"])
- y_tmp_vals[test["parent"]][4].append(
- test["latency"][ttype]["direction2"]["avg"])
- y_tmp_vals[test["parent"]][5].append(
- test["latency"][ttype]["direction2"]["max"])
+ if test[u"type"] not in (u"NDRPDR", ):
+ logging.warning(f"Invalid test type: {test[u'type']}")
+ continue
+ if u"-pdr" in plot_title.lower():
+ ttype = u"PDR"
+ elif u"-ndr" in plot_title.lower():
+ ttype = u"NDR"
else:
- logging.warning("Invalid test type: {0}".
- format(test["type"]))
+ logging.warning(
+ f"Invalid test type: {test[u'type']}"
+ )
continue
+ y_tmp_vals[test[u"parent"]][0].append(
+ test[u"latency"][ttype][u"direction1"][u"min"])
+ y_tmp_vals[test[u"parent"]][1].append(
+ test[u"latency"][ttype][u"direction1"][u"avg"])
+ y_tmp_vals[test[u"parent"]][2].append(
+ test[u"latency"][ttype][u"direction1"][u"max"])
+ y_tmp_vals[test[u"parent"]][3].append(
+ test[u"latency"][ttype][u"direction2"][u"min"])
+ y_tmp_vals[test[u"parent"]][4].append(
+ test[u"latency"][ttype][u"direction2"][u"avg"])
+ y_tmp_vals[test[u"parent"]][5].append(
+ test[u"latency"][ttype][u"direction2"][u"max"])
except (KeyError, TypeError) as err:
logging.warning(repr(err))
@@ -329,8 +350,8 @@ def plot_latency_error_bars_name(plot, input_data):
y_maxs = list()
nr_of_samples = list()
for key, val in y_tmp_vals.items():
- name = re.sub(REGEX_NIC, "", key.replace('-ndrpdr', '').
- replace('2n1l-', ''))
+ name = re.sub(REGEX_NIC, u"", key.replace(u'-ndrpdr', u'').
+ replace(u'2n1l-', u''))
x_vals.append(name) # dir 1
y_vals.append(mean(val[1]) if val[1] else None)
y_mins.append(mean(val[0]) if val[0] else None)
@@ -345,22 +366,22 @@ def plot_latency_error_bars_name(plot, input_data):
traces = list()
annotations = list()
- for idx in range(len(x_vals)):
+ for idx, _ in enumerate(x_vals):
if not bool(int(idx % 2)):
- direction = "West-East"
+ direction = u"West-East"
else:
- direction = "East-West"
- hovertext = ("No. of Runs: {nr}<br>"
- "Test: {test}<br>"
- "Direction: {dir}<br>".format(test=x_vals[idx],
- dir=direction,
- nr=nr_of_samples[idx]))
+ direction = u"East-West"
+ hovertext = (
+ f"No. of Runs: {nr_of_samples[idx]}<br>"
+ f"Test: {x_vals[idx]}<br>"
+ f"Direction: {direction}<br>"
+ )
if isinstance(y_maxs[idx], float):
- hovertext += "Max: {max:.2f}uSec<br>".format(max=y_maxs[idx])
+ hovertext += f"Max: {y_maxs[idx]:.2f}uSec<br>"
if isinstance(y_vals[idx], float):
- hovertext += "Mean: {avg:.2f}uSec<br>".format(avg=y_vals[idx])
+ hovertext += f"Mean: {y_vals[idx]:.2f}uSec<br>"
if isinstance(y_mins[idx], float):
- hovertext += "Min: {min:.2f}uSec".format(min=y_mins[idx])
+ hovertext += f"Min: {y_mins[idx]:.2f}uSec"
if isinstance(y_maxs[idx], float) and isinstance(y_vals[idx], float):
array = [y_maxs[idx] - y_vals[idx], ]
@@ -376,9 +397,9 @@ def plot_latency_error_bars_name(plot, input_data):
name=x_vals[idx],
legendgroup=x_vals[idx],
showlegend=bool(int(idx % 2)),
- mode="markers",
+ mode=u"markers",
error_y=dict(
- type='data',
+ type=u"data",
symmetric=False,
array=array,
arrayminus=arrayminus,
@@ -389,48 +410,49 @@ def plot_latency_error_bars_name(plot, input_data):
color=COLORS[int(idx / 2)],
),
text=hovertext,
- hoverinfo="text",
+ hoverinfo=u"text",
))
annotations.append(dict(
x=idx,
y=0,
- xref="x",
- yref="y",
- xanchor="center",
- yanchor="top",
- text="E-W" if bool(int(idx % 2)) else "W-E",
+ xref=u"x",
+ yref=u"y",
+ xanchor=u"center",
+ yanchor=u"top",
+ text=u"E-W" if bool(int(idx % 2)) else u"W-E",
font=dict(
size=16,
),
- align="center",
+ align=u"center",
showarrow=False
))
try:
# Create plot
- file_type = plot.get("output-file-type", ".html")
- logging.info(" Writing file '{0}{1}'.".
- format(plot["output-file"], file_type))
- layout = deepcopy(plot["layout"])
- if layout.get("title", None):
- layout["title"] = "<b>Latency:</b> {0}".\
- format(layout["title"])
- layout["annotations"] = annotations
+ file_type = plot.get(u"output-file-type", u".html")
+ logging.info(f" Writing file {plot[u'output-file']}{file_type}.")
+ layout = deepcopy(plot[u"layout"])
+ if layout.get(u"title", None):
+ layout[u"title"] = f"<b>Latency:</b> {layout[u'title']}"
+ layout[u"annotations"] = annotations
plpl = plgo.Figure(data=traces, layout=layout)
# Export Plot
- ploff.plot(plpl,
- show_link=False, auto_open=False,
- filename='{0}{1}'.format(plot["output-file"], file_type))
+ ploff.plot(
+ plpl,
+ show_link=False, auto_open=False,
+ filename=f"{plot[u'output-file']}{file_type}"
+ )
except PlotlyError as err:
- logging.error(" Finished with error: {}".
- format(str(err).replace("\n", " ")))
+ logging.error(
+ f" Finished with error: {repr(err)}".replace(u"\n", u" ")
+ )
return
-def plot_throughput_speedup_analysis_name(plot, input_data):
+def plot_tsa_name(plot, input_data):
"""Generate the plot(s) with algorithm:
- plot_throughput_speedup_analysis_name
+ plot_tsa_name
specified in the specification file.
:param plot: Plot to generate.
@@ -440,956 +462,51 @@ def plot_throughput_speedup_analysis_name(plot, input_data):
"""
# Transform the data
- plot_title = plot.get("title", "")
- logging.info(" Creating the data set for the {0} '{1}'.".
- format(plot.get("type", ""), plot_title))
+ plot_title = plot.get(u"title", u"")
+ logging.info(
+ f" Creating data set for the {plot.get(u'type', u'')} {plot_title}."
+ )
data = input_data.filter_tests_by_name(
- plot, params=["throughput", "parent", "tags", "type"])
+ plot, params=[u"throughput", u"parent", u"tags", u"type"])
if data is None:
- logging.error("No data.")
+ logging.error(u"No data.")
return
y_vals = OrderedDict()
for job in data:
for build in job:
for test in build:
- if y_vals.get(test["parent"], None) is None:
- y_vals[test["parent"]] = {"1": list(),
- "2": list(),
- "4": list()}
- try:
- if test["type"] in ("NDRPDR",):
- if "-pdr" in plot_title.lower():
- ttype = "PDR"
- elif "-ndr" in plot_title.lower():
- ttype = "NDR"
- else:
- continue
- if "1C" in test["tags"]:
- y_vals[test["parent"]]["1"]. \
- append(test["throughput"][ttype]["LOWER"])
- elif "2C" in test["tags"]:
- y_vals[test["parent"]]["2"]. \
- append(test["throughput"][ttype]["LOWER"])
- elif "4C" in test["tags"]:
- y_vals[test["parent"]]["4"]. \
- append(test["throughput"][ttype]["LOWER"])
- except (KeyError, TypeError):
- pass
-
- if not y_vals:
- logging.warning("No data for the plot '{}'".
- format(plot.get("title", "")))
- return
-
- y_1c_max = dict()
- for test_name, test_vals in y_vals.items():
- for key, test_val in test_vals.items():
- if test_val:
- avg_val = sum(test_val) / len(test_val)
- y_vals[test_name][key] = (avg_val, len(test_val))
- ideal = avg_val / (int(key) * 1000000.0)
- if test_name not in y_1c_max or ideal > y_1c_max[test_name]:
- y_1c_max[test_name] = ideal
-
- vals = OrderedDict()
- y_max = list()
- nic_limit = 0
- lnk_limit = 0
- pci_limit = plot["limits"]["pci"]["pci-g3-x8"]
- for test_name, test_vals in y_vals.items():
- try:
- if test_vals["1"][1]:
- name = re.sub(REGEX_NIC, "", test_name.replace('-ndrpdr', '').
- replace('2n1l-', ''))
- vals[name] = OrderedDict()
- y_val_1 = test_vals["1"][0] / 1000000.0
- y_val_2 = test_vals["2"][0] / 1000000.0 if test_vals["2"][0] \
- else None
- y_val_4 = test_vals["4"][0] / 1000000.0 if test_vals["4"][0] \
- else None
-
- vals[name]["val"] = [y_val_1, y_val_2, y_val_4]
- vals[name]["rel"] = [1.0, None, None]
- vals[name]["ideal"] = [y_1c_max[test_name],
- y_1c_max[test_name] * 2,
- y_1c_max[test_name] * 4]
- vals[name]["diff"] = [(y_val_1 - y_1c_max[test_name]) * 100 /
- y_val_1, None, None]
- vals[name]["count"] = [test_vals["1"][1],
- test_vals["2"][1],
- test_vals["4"][1]]
-
- try:
- val_max = max(vals[name]["val"])
- except ValueError as err:
- logging.error(repr(err))
- continue
- if val_max:
- y_max.append(val_max)
-
- if y_val_2:
- vals[name]["rel"][1] = round(y_val_2 / y_val_1, 2)
- vals[name]["diff"][1] = \
- (y_val_2 - vals[name]["ideal"][1]) * 100 / y_val_2
- if y_val_4:
- vals[name]["rel"][2] = round(y_val_4 / y_val_1, 2)
- vals[name]["diff"][2] = \
- (y_val_4 - vals[name]["ideal"][2]) * 100 / y_val_4
- except IndexError as err:
- logging.warning("No data for '{0}'".format(test_name))
- logging.warning(repr(err))
-
- # Limits:
- if "x520" in test_name:
- limit = plot["limits"]["nic"]["x520"]
- elif "x710" in test_name:
- limit = plot["limits"]["nic"]["x710"]
- elif "xxv710" in test_name:
- limit = plot["limits"]["nic"]["xxv710"]
- elif "xl710" in test_name:
- limit = plot["limits"]["nic"]["xl710"]
- elif "x553" in test_name:
- limit = plot["limits"]["nic"]["x553"]
- else:
- limit = 0
- if limit > nic_limit:
- nic_limit = limit
-
- mul = 2 if "ge2p" in test_name else 1
- if "10ge" in test_name:
- limit = plot["limits"]["link"]["10ge"] * mul
- elif "25ge" in test_name:
- limit = plot["limits"]["link"]["25ge"] * mul
- elif "40ge" in test_name:
- limit = plot["limits"]["link"]["40ge"] * mul
- elif "100ge" in test_name:
- limit = plot["limits"]["link"]["100ge"] * mul
- else:
- limit = 0
- if limit > lnk_limit:
- lnk_limit = limit
-
- traces = list()
- annotations = list()
- x_vals = [1, 2, 4]
-
- # Limits:
- try:
- threshold = 1.1 * max(y_max) # 10%
- except ValueError as err:
- logging.error(err)
- return
- nic_limit /= 1000000.0
- traces.append(plgo.Scatter(
- x=x_vals,
- y=[nic_limit, ] * len(x_vals),
- name="NIC: {0:.2f}Mpps".format(nic_limit),
- showlegend=False,
- mode="lines",
- line=dict(
- dash="dot",
- color=COLORS[-1],
- width=1),
- hoverinfo="none"
- ))
- annotations.append(dict(
- x=1,
- y=nic_limit,
- xref="x",
- yref="y",
- xanchor="left",
- yanchor="bottom",
- text="NIC: {0:.2f}Mpps".format(nic_limit),
- font=dict(
- size=14,
- color=COLORS[-1],
- ),
- align="left",
- showarrow=False
- ))
- y_max.append(nic_limit)
-
- lnk_limit /= 1000000.0
- if lnk_limit < threshold:
- traces.append(plgo.Scatter(
- x=x_vals,
- y=[lnk_limit, ] * len(x_vals),
- name="Link: {0:.2f}Mpps".format(lnk_limit),
- showlegend=False,
- mode="lines",
- line=dict(
- dash="dot",
- color=COLORS[-2],
- width=1),
- hoverinfo="none"
- ))
- annotations.append(dict(
- x=1,
- y=lnk_limit,
- xref="x",
- yref="y",
- xanchor="left",
- yanchor="bottom",
- text="Link: {0:.2f}Mpps".format(lnk_limit),
- font=dict(
- size=14,
- color=COLORS[-2],
- ),
- align="left",
- showarrow=False
- ))
- y_max.append(lnk_limit)
-
- pci_limit /= 1000000.0
- if (pci_limit < threshold and
- (pci_limit < lnk_limit * 0.95 or lnk_limit > lnk_limit * 1.05)):
- traces.append(plgo.Scatter(
- x=x_vals,
- y=[pci_limit, ] * len(x_vals),
- name="PCIe: {0:.2f}Mpps".format(pci_limit),
- showlegend=False,
- mode="lines",
- line=dict(
- dash="dot",
- color=COLORS[-3],
- width=1),
- hoverinfo="none"
- ))
- annotations.append(dict(
- x=1,
- y=pci_limit,
- xref="x",
- yref="y",
- xanchor="left",
- yanchor="bottom",
- text="PCIe: {0:.2f}Mpps".format(pci_limit),
- font=dict(
- size=14,
- color=COLORS[-3],
- ),
- align="left",
- showarrow=False
- ))
- y_max.append(pci_limit)
-
- # Perfect and measured:
- cidx = 0
- for name, val in vals.iteritems():
- hovertext = list()
- try:
- for idx in range(len(val["val"])):
- htext = ""
- if isinstance(val["val"][idx], float):
- htext += "No. of Runs: {1}<br>" \
- "Mean: {0:.2f}Mpps<br>".format(val["val"][idx],
- val["count"][idx])
- if isinstance(val["diff"][idx], float):
- htext += "Diff: {0:.0f}%<br>".format(
- round(val["diff"][idx]))
- if isinstance(val["rel"][idx], float):
- htext += "Speedup: {0:.2f}".format(val["rel"][idx])
- hovertext.append(htext)
- traces.append(plgo.Scatter(x=x_vals,
- y=val["val"],
- name=name,
- legendgroup=name,
- mode="lines+markers",
- line=dict(
- color=COLORS[cidx],
- width=2),
- marker=dict(
- symbol="circle",
- size=10
- ),
- text=hovertext,
- hoverinfo="text+name"
- ))
- traces.append(plgo.Scatter(x=x_vals,
- y=val["ideal"],
- name="{0} perfect".format(name),
- legendgroup=name,
- showlegend=False,
- mode="lines",
- line=dict(
- color=COLORS[cidx],
- width=2,
- dash="dash"),
- text=["Perfect: {0:.2f}Mpps".format(y)
- for y in val["ideal"]],
- hoverinfo="text"
- ))
- cidx += 1
- except (IndexError, ValueError, KeyError) as err:
- logging.warning("No data for '{0}'".format(name))
- logging.warning(repr(err))
-
- try:
- # Create plot
- file_type = plot.get("output-file-type", ".html")
- logging.info(" Writing file '{0}{1}'.".
- format(plot["output-file"], file_type))
- layout = deepcopy(plot["layout"])
- if layout.get("title", None):
- layout["title"] = "<b>Speedup Multi-core:</b> {0}". \
- format(layout["title"])
- layout["yaxis"]["range"] = [0, int(max(y_max) * 1.1)]
- layout["annotations"].extend(annotations)
- plpl = plgo.Figure(data=traces, layout=layout)
-
- # Export Plot
- ploff.plot(plpl,
- show_link=False, auto_open=False,
- filename='{0}{1}'.format(plot["output-file"], file_type))
- except PlotlyError as err:
- logging.error(" Finished with error: {}".
- format(repr(err).replace("\n", " ")))
- return
-
-
-def plot_performance_box(plot, input_data):
- """Generate the plot(s) with algorithm: plot_performance_box
- specified in the specification file.
-
- TODO: Remove when not needed.
-
- :param plot: Plot to generate.
- :param input_data: Data to process.
- :type plot: pandas.Series
- :type input_data: InputData
- """
-
- # Transform the data
- plot_title = plot.get("title", "")
- logging.info(" Creating the data set for the {0} '{1}'.".
- format(plot.get("type", ""), plot_title))
- data = input_data.filter_data(plot)
- if data is None:
- logging.error("No data.")
- return
-
- # Prepare the data for the plot
- y_vals = dict()
- y_tags = dict()
- for job in data:
- for build in job:
- for test in build:
- if y_vals.get(test["parent"], None) is None:
- y_vals[test["parent"]] = list()
- y_tags[test["parent"]] = test.get("tags", None)
+ if y_vals.get(test[u"parent"], None) is None:
+ y_vals[test[u"parent"]] = {
+ u"1": list(),
+ u"2": list(),
+ u"4": list()
+ }
try:
- if test["type"] in ("NDRPDR", ):
- if "-pdr" in plot_title.lower():
- y_vals[test["parent"]].\
- append(test["throughput"]["PDR"]["LOWER"])
- elif "-ndr" in plot_title.lower():
- y_vals[test["parent"]]. \
- append(test["throughput"]["NDR"]["LOWER"])
- else:
- continue
- elif test["type"] in ("SOAK", ):
- y_vals[test["parent"]].\
- append(test["throughput"]["LOWER"])
- else:
- continue
- except (KeyError, TypeError):
- y_vals[test["parent"]].append(None)
-
- # Sort the tests
- order = plot.get("sort", None)
- if order and y_tags:
- y_sorted = OrderedDict()
- y_tags_l = {s: [t.lower() for t in ts] for s, ts in y_tags.items()}
- for tag in order:
- logging.debug(tag)
- for suite, tags in y_tags_l.items():
- if "not " in tag:
- tag = tag.split(" ")[-1]
- if tag.lower() in tags:
- continue
- else:
- if tag.lower() not in tags:
+ if test[u"type"] not in (u"NDRPDR",):
continue
- try:
- y_sorted[suite] = y_vals.pop(suite)
- y_tags_l.pop(suite)
- logging.debug(suite)
- except KeyError as err:
- logging.error("Not found: {0}".format(repr(err)))
- finally:
- break
- else:
- y_sorted = y_vals
-
- # Add None to the lists with missing data
- max_len = 0
- nr_of_samples = list()
- for val in y_sorted.values():
- if len(val) > max_len:
- max_len = len(val)
- nr_of_samples.append(len(val))
- for key, val in y_sorted.items():
- if len(val) < max_len:
- val.extend([None for _ in range(max_len - len(val))])
-
- # Add plot traces
- traces = list()
- df = pd.DataFrame(y_sorted)
- df.head()
- y_max = list()
- for i, col in enumerate(df.columns):
- tst_name = re.sub(REGEX_NIC, "",
- col.lower().replace('-ndrpdr', '').
- replace('2n1l-', ''))
- name = "{nr}. ({samples:02d} run{plural}) {name}".\
- format(nr=(i + 1),
- samples=nr_of_samples[i],
- plural='s' if nr_of_samples[i] > 1 else '',
- name=tst_name)
-
- logging.debug(name)
- traces.append(plgo.Box(x=[str(i + 1) + '.'] * len(df[col]),
- y=[y / 1000000 if y else None for y in df[col]],
- name=name,
- **plot["traces"]))
- try:
- val_max = max(df[col])
- except ValueError as err:
- logging.error(repr(err))
- continue
- if val_max:
- y_max.append(int(val_max / 1000000) + 2)
-
- try:
- # Create plot
- layout = deepcopy(plot["layout"])
- if layout.get("title", None):
- layout["title"] = "<b>Throughput:</b> {0}". \
- format(layout["title"])
- if y_max:
- layout["yaxis"]["range"] = [0, max(y_max)]
- plpl = plgo.Figure(data=traces, layout=layout)
-
- # Export Plot
- logging.info(" Writing file '{0}{1}'.".
- format(plot["output-file"], plot["output-file-type"]))
- ploff.plot(plpl, show_link=False, auto_open=False,
- filename='{0}{1}'.format(plot["output-file"],
- plot["output-file-type"]))
- except PlotlyError as err:
- logging.error(" Finished with error: {}".
- format(repr(err).replace("\n", " ")))
- return
-
-
-def plot_soak_bars(plot, input_data):
- """Generate the plot(s) with algorithm: plot_soak_bars
- specified in the specification file.
-
- :param plot: Plot to generate.
- :param input_data: Data to process.
- :type plot: pandas.Series
- :type input_data: InputData
- """
-
- # Transform the data
- plot_title = plot.get("title", "")
- logging.info(" Creating the data set for the {0} '{1}'.".
- format(plot.get("type", ""), plot_title))
- data = input_data.filter_data(plot)
- if data is None:
- logging.error("No data.")
- return
-
- # Prepare the data for the plot
- y_vals = dict()
- y_tags = dict()
- for job in data:
- for build in job:
- for test in build:
- if y_vals.get(test["parent"], None) is None:
- y_tags[test["parent"]] = test.get("tags", None)
- try:
- if test["type"] in ("SOAK", ):
- y_vals[test["parent"]] = test["throughput"]
- else:
- continue
- except (KeyError, TypeError):
- y_vals[test["parent"]] = dict()
-
- # Sort the tests
- order = plot.get("sort", None)
- if order and y_tags:
- y_sorted = OrderedDict()
- y_tags_l = {s: [t.lower() for t in ts] for s, ts in y_tags.items()}
- for tag in order:
- logging.debug(tag)
- for suite, tags in y_tags_l.items():
- if "not " in tag:
- tag = tag.split(" ")[-1]
- if tag.lower() in tags:
- continue
- else:
- if tag.lower() not in tags:
- continue
- try:
- y_sorted[suite] = y_vals.pop(suite)
- y_tags_l.pop(suite)
- logging.debug(suite)
- except KeyError as err:
- logging.error("Not found: {0}".format(repr(err)))
- finally:
- break
- else:
- y_sorted = y_vals
-
- idx = 0
- y_max = 0
- traces = list()
- for test_name, test_data in y_sorted.items():
- idx += 1
- name = "{nr}. {name}".\
- format(nr=idx, name=test_name.lower().replace('-soak', ''))
- if len(name) > 50:
- name_lst = name.split('-')
- name = ""
- split_name = True
- for segment in name_lst:
- if (len(name) + len(segment) + 1) > 50 and split_name:
- name += "<br> "
- split_name = False
- name += segment + '-'
- name = name[:-1]
-
- y_val = test_data.get("LOWER", None)
- if y_val:
- y_val /= 1000000
- if y_val > y_max:
- y_max = y_val
-
- time = "No Information"
- result = "No Information"
- hovertext = ("{name}<br>"
- "Packet Throughput: {val:.2f}Mpps<br>"
- "Final Duration: {time}<br>"
- "Result: {result}".format(name=name,
- val=y_val,
- time=time,
- result=result))
- traces.append(plgo.Bar(x=[str(idx) + '.', ],
- y=[y_val, ],
- name=name,
- text=hovertext,
- hoverinfo="text"))
- try:
- # Create plot
- layout = deepcopy(plot["layout"])
- if layout.get("title", None):
- layout["title"] = "<b>Packet Throughput:</b> {0}". \
- format(layout["title"])
- if y_max:
- layout["yaxis"]["range"] = [0, y_max + 1]
- plpl = plgo.Figure(data=traces, layout=layout)
- # Export Plot
- logging.info(" Writing file '{0}{1}'.".
- format(plot["output-file"], plot["output-file-type"]))
- ploff.plot(plpl, show_link=False, auto_open=False,
- filename='{0}{1}'.format(plot["output-file"],
- plot["output-file-type"]))
- except PlotlyError as err:
- logging.error(" Finished with error: {}".
- format(repr(err).replace("\n", " ")))
- return
-
-
-def plot_soak_boxes(plot, input_data):
- """Generate the plot(s) with algorithm: plot_soak_boxes
- specified in the specification file.
-
- :param plot: Plot to generate.
- :param input_data: Data to process.
- :type plot: pandas.Series
- :type input_data: InputData
- """
-
- # Transform the data
- plot_title = plot.get("title", "")
- logging.info(" Creating the data set for the {0} '{1}'.".
- format(plot.get("type", ""), plot_title))
- data = input_data.filter_data(plot)
- if data is None:
- logging.error("No data.")
- return
-
- # Prepare the data for the plot
- y_vals = dict()
- y_tags = dict()
- for job in data:
- for build in job:
- for test in build:
- if y_vals.get(test["parent"], None) is None:
- y_tags[test["parent"]] = test.get("tags", None)
- try:
- if test["type"] in ("SOAK", ):
- y_vals[test["parent"]] = test["throughput"]
- else:
- continue
- except (KeyError, TypeError):
- y_vals[test["parent"]] = dict()
-
- # Sort the tests
- order = plot.get("sort", None)
- if order and y_tags:
- y_sorted = OrderedDict()
- y_tags_l = {s: [t.lower() for t in ts] for s, ts in y_tags.items()}
- for tag in order:
- logging.debug(tag)
- for suite, tags in y_tags_l.items():
- if "not " in tag:
- tag = tag.split(" ")[-1]
- if tag.lower() in tags:
- continue
- else:
- if tag.lower() not in tags:
- continue
- try:
- y_sorted[suite] = y_vals.pop(suite)
- y_tags_l.pop(suite)
- logging.debug(suite)
- except KeyError as err:
- logging.error("Not found: {0}".format(repr(err)))
- finally:
- break
- else:
- y_sorted = y_vals
-
- idx = 0
- y_max = 0
- traces = list()
- for test_name, test_data in y_sorted.items():
- idx += 1
- name = "{nr}. {name}".\
- format(nr=idx, name=test_name.lower().replace('-soak', '').
- replace('2n1l-', ''))
- if len(name) > 55:
- name_lst = name.split('-')
- name = ""
- split_name = True
- for segment in name_lst:
- if (len(name) + len(segment) + 1) > 55 and split_name:
- name += "<br> "
- split_name = False
- name += segment + '-'
- name = name[:-1]
-
- y_val = test_data.get("UPPER", None)
- if y_val:
- y_val /= 1000000
- if y_val > y_max:
- y_max = y_val
-
- y_base = test_data.get("LOWER", None)
- if y_base:
- y_base /= 1000000
-
- hovertext = ("Upper bound: {upper:.2f}<br>"
- "Lower bound: {lower:.2f}".format(upper=y_val,
- lower=y_base))
- traces.append(plgo.Bar(x=[str(idx) + '.', ],
- # +0.05 to see the value in case lower == upper
- y=[y_val - y_base + 0.05, ],
- base=y_base,
- name=name,
- text=hovertext,
- hoverinfo="text"))
- try:
- # Create plot
- layout = deepcopy(plot["layout"])
- if layout.get("title", None):
- layout["title"] = "<b>Throughput:</b> {0}". \
- format(layout["title"])
- if y_max:
- layout["yaxis"]["range"] = [0, y_max + 1]
- plpl = plgo.Figure(data=traces, layout=layout)
- # Export Plot
- logging.info(" Writing file '{0}{1}'.".
- format(plot["output-file"], plot["output-file-type"]))
- ploff.plot(plpl, show_link=False, auto_open=False,
- filename='{0}{1}'.format(plot["output-file"],
- plot["output-file-type"]))
- except PlotlyError as err:
- logging.error(" Finished with error: {}".
- format(repr(err).replace("\n", " ")))
- return
-
-
-def plot_latency_error_bars(plot, input_data):
- """Generate the plot(s) with algorithm: plot_latency_error_bars
- specified in the specification file.
-
- TODO: Remove when not needed.
-
- :param plot: Plot to generate.
- :param input_data: Data to process.
- :type plot: pandas.Series
- :type input_data: InputData
- """
-
- # Transform the data
- plot_title = plot.get("title", "")
- logging.info(" Creating the data set for the {0} '{1}'.".
- format(plot.get("type", ""), plot_title))
- data = input_data.filter_data(plot)
- if data is None:
- logging.error("No data.")
- return
- # Prepare the data for the plot
- y_tmp_vals = dict()
- y_tags = dict()
- for job in data:
- for build in job:
- for test in build:
- try:
- logging.debug("test['latency']: {0}\n".
- format(test["latency"]))
- except ValueError as err:
- logging.warning(repr(err))
- if y_tmp_vals.get(test["parent"], None) is None:
- y_tmp_vals[test["parent"]] = [
- list(), # direction1, min
- list(), # direction1, avg
- list(), # direction1, max
- list(), # direction2, min
- list(), # direction2, avg
- list() # direction2, max
- ]
- y_tags[test["parent"]] = test.get("tags", None)
- try:
- if test["type"] in ("NDRPDR", ):
- if "-pdr" in plot_title.lower():
- ttype = "PDR"
- elif "-ndr" in plot_title.lower():
- ttype = "NDR"
- else:
- logging.warning("Invalid test type: {0}".
- format(test["type"]))
- continue
- y_tmp_vals[test["parent"]][0].append(
- test["latency"][ttype]["direction1"]["min"])
- y_tmp_vals[test["parent"]][1].append(
- test["latency"][ttype]["direction1"]["avg"])
- y_tmp_vals[test["parent"]][2].append(
- test["latency"][ttype]["direction1"]["max"])
- y_tmp_vals[test["parent"]][3].append(
- test["latency"][ttype]["direction2"]["min"])
- y_tmp_vals[test["parent"]][4].append(
- test["latency"][ttype]["direction2"]["avg"])
- y_tmp_vals[test["parent"]][5].append(
- test["latency"][ttype]["direction2"]["max"])
+ if u"-pdr" in plot_title.lower():
+ ttype = u"PDR"
+ elif u"-ndr" in plot_title.lower():
+ ttype = u"NDR"
else:
- logging.warning("Invalid test type: {0}".
- format(test["type"]))
- continue
- except (KeyError, TypeError) as err:
- logging.warning(repr(err))
- logging.debug("y_tmp_vals: {0}\n".format(y_tmp_vals))
-
- # Sort the tests
- order = plot.get("sort", None)
- if order and y_tags:
- y_sorted = OrderedDict()
- y_tags_l = {s: [t.lower() for t in ts] for s, ts in y_tags.items()}
- for tag in order:
- logging.debug(tag)
- for suite, tags in y_tags_l.items():
- if "not " in tag:
- tag = tag.split(" ")[-1]
- if tag.lower() in tags:
continue
- else:
- if tag.lower() not in tags:
- continue
- try:
- y_sorted[suite] = y_tmp_vals.pop(suite)
- y_tags_l.pop(suite)
- logging.debug(suite)
- except KeyError as err:
- logging.error("Not found: {0}".format(repr(err)))
- finally:
- break
- else:
- y_sorted = y_tmp_vals
-
- logging.debug("y_sorted: {0}\n".format(y_sorted))
- x_vals = list()
- y_vals = list()
- y_mins = list()
- y_maxs = list()
- nr_of_samples = list()
- for key, val in y_sorted.items():
- name = re.sub(REGEX_NIC, "", key.replace('-ndrpdr', '').
- replace('2n1l-', ''))
- x_vals.append(name) # dir 1
- y_vals.append(mean(val[1]) if val[1] else None)
- y_mins.append(mean(val[0]) if val[0] else None)
- y_maxs.append(mean(val[2]) if val[2] else None)
- nr_of_samples.append(len(val[1]) if val[1] else 0)
- x_vals.append(name) # dir 2
- y_vals.append(mean(val[4]) if val[4] else None)
- y_mins.append(mean(val[3]) if val[3] else None)
- y_maxs.append(mean(val[5]) if val[5] else None)
- nr_of_samples.append(len(val[3]) if val[3] else 0)
- logging.debug("x_vals :{0}\n".format(x_vals))
- logging.debug("y_vals :{0}\n".format(y_vals))
- logging.debug("y_mins :{0}\n".format(y_mins))
- logging.debug("y_maxs :{0}\n".format(y_maxs))
- logging.debug("nr_of_samples :{0}\n".format(nr_of_samples))
- traces = list()
- annotations = list()
-
- for idx in range(len(x_vals)):
- if not bool(int(idx % 2)):
- direction = "West-East"
- else:
- direction = "East-West"
- hovertext = ("No. of Runs: {nr}<br>"
- "Test: {test}<br>"
- "Direction: {dir}<br>".format(test=x_vals[idx],
- dir=direction,
- nr=nr_of_samples[idx]))
- if isinstance(y_maxs[idx], float):
- hovertext += "Max: {max:.2f}uSec<br>".format(max=y_maxs[idx])
- if isinstance(y_vals[idx], float):
- hovertext += "Mean: {avg:.2f}uSec<br>".format(avg=y_vals[idx])
- if isinstance(y_mins[idx], float):
- hovertext += "Min: {min:.2f}uSec".format(min=y_mins[idx])
-
- if isinstance(y_maxs[idx], float) and isinstance(y_vals[idx], float):
- array = [y_maxs[idx] - y_vals[idx], ]
- else:
- array = [None, ]
- if isinstance(y_mins[idx], float) and isinstance(y_vals[idx], float):
- arrayminus = [y_vals[idx] - y_mins[idx], ]
- else:
- arrayminus = [None, ]
- logging.debug("y_vals[{1}] :{0}\n".format(y_vals[idx], idx))
- logging.debug("array :{0}\n".format(array))
- logging.debug("arrayminus :{0}\n".format(arrayminus))
- traces.append(plgo.Scatter(
- x=[idx, ],
- y=[y_vals[idx], ],
- name=x_vals[idx],
- legendgroup=x_vals[idx],
- showlegend=bool(int(idx % 2)),
- mode="markers",
- error_y=dict(
- type='data',
- symmetric=False,
- array=array,
- arrayminus=arrayminus,
- color=COLORS[int(idx / 2)]
- ),
- marker=dict(
- size=10,
- color=COLORS[int(idx / 2)],
- ),
- text=hovertext,
- hoverinfo="text",
- ))
- annotations.append(dict(
- x=idx,
- y=0,
- xref="x",
- yref="y",
- xanchor="center",
- yanchor="top",
- text="E-W" if bool(int(idx % 2)) else "W-E",
- font=dict(
- size=16,
- ),
- align="center",
- showarrow=False
- ))
-
- try:
- # Create plot
- logging.info(" Writing file '{0}{1}'.".
- format(plot["output-file"], plot["output-file-type"]))
- layout = deepcopy(plot["layout"])
- if layout.get("title", None):
- layout["title"] = "<b>Latency:</b> {0}".\
- format(layout["title"])
- layout["annotations"] = annotations
- plpl = plgo.Figure(data=traces, layout=layout)
-
- # Export Plot
- ploff.plot(plpl,
- show_link=False, auto_open=False,
- filename='{0}{1}'.format(plot["output-file"],
- plot["output-file-type"]))
- except PlotlyError as err:
- logging.error(" Finished with error: {}".
- format(str(err).replace("\n", " ")))
- return
-
-
-def plot_throughput_speedup_analysis(plot, input_data):
- """Generate the plot(s) with algorithm:
- plot_throughput_speedup_analysis
- specified in the specification file.
-
- TODO: Remove when not needed.
-
- :param plot: Plot to generate.
- :param input_data: Data to process.
- :type plot: pandas.Series
- :type input_data: InputData
- """
-
- # Transform the data
- plot_title = plot.get("title", "")
- logging.info(" Creating the data set for the {0} '{1}'.".
- format(plot.get("type", ""), plot_title))
- data = input_data.filter_data(plot)
- if data is None:
- logging.error("No data.")
- return
-
- y_vals = dict()
- y_tags = dict()
- for job in data:
- for build in job:
- for test in build:
- if y_vals.get(test["parent"], None) is None:
- y_vals[test["parent"]] = {"1": list(),
- "2": list(),
- "4": list()}
- y_tags[test["parent"]] = test.get("tags", None)
- try:
- if test["type"] in ("NDRPDR",):
- if "-pdr" in plot_title.lower():
- ttype = "PDR"
- elif "-ndr" in plot_title.lower():
- ttype = "NDR"
- else:
- continue
- if "1C" in test["tags"]:
- y_vals[test["parent"]]["1"]. \
- append(test["throughput"][ttype]["LOWER"])
- elif "2C" in test["tags"]:
- y_vals[test["parent"]]["2"]. \
- append(test["throughput"][ttype]["LOWER"])
- elif "4C" in test["tags"]:
- y_vals[test["parent"]]["4"]. \
- append(test["throughput"][ttype]["LOWER"])
+ if u"1C" in test[u"tags"]:
+ y_vals[test[u"parent"]][u"1"]. \
+ append(test[u"throughput"][ttype][u"LOWER"])
+ elif u"2C" in test[u"tags"]:
+ y_vals[test[u"parent"]][u"2"]. \
+ append(test[u"throughput"][ttype][u"LOWER"])
+ elif u"4C" in test[u"tags"]:
+ y_vals[test[u"parent"]][u"4"]. \
+ append(test[u"throughput"][ttype][u"LOWER"])
except (KeyError, TypeError):
pass
if not y_vals:
- logging.warning("No data for the plot '{}'".
- format(plot.get("title", "")))
+ logging.warning(f"No data for the plot {plot.get(u'title', u'')}")
return
y_1c_max = dict()
@@ -1397,112 +514,97 @@ def plot_throughput_speedup_analysis(plot, input_data):
for key, test_val in test_vals.items():
if test_val:
avg_val = sum(test_val) / len(test_val)
- y_vals[test_name][key] = (avg_val, len(test_val))
+ y_vals[test_name][key] = [avg_val, len(test_val)]
ideal = avg_val / (int(key) * 1000000.0)
if test_name not in y_1c_max or ideal > y_1c_max[test_name]:
y_1c_max[test_name] = ideal
- vals = dict()
+ vals = OrderedDict()
y_max = list()
nic_limit = 0
lnk_limit = 0
- pci_limit = plot["limits"]["pci"]["pci-g3-x8"]
+ pci_limit = plot[u"limits"][u"pci"][u"pci-g3-x8"]
for test_name, test_vals in y_vals.items():
try:
- if test_vals["1"][1]:
- name = re.sub(REGEX_NIC, "", test_name.replace('-ndrpdr', '').
- replace('2n1l-', ''))
- vals[name] = dict()
- y_val_1 = test_vals["1"][0] / 1000000.0
- y_val_2 = test_vals["2"][0] / 1000000.0 if test_vals["2"][0] \
+ if test_vals[u"1"][1]:
+ name = re.sub(
+ REGEX_NIC,
+ u"",
+ test_name.replace(u'-ndrpdr', u'').replace(u'2n1l-', u'')
+ )
+ vals[name] = OrderedDict()
+ y_val_1 = test_vals[u"1"][0] / 1000000.0
+ y_val_2 = test_vals[u"2"][0] / 1000000.0 if test_vals[u"2"][0] \
else None
- y_val_4 = test_vals["4"][0] / 1000000.0 if test_vals["4"][0] \
+ y_val_4 = test_vals[u"4"][0] / 1000000.0 if test_vals[u"4"][0] \
else None
- vals[name]["val"] = [y_val_1, y_val_2, y_val_4]
- vals[name]["rel"] = [1.0, None, None]
- vals[name]["ideal"] = [y_1c_max[test_name],
- y_1c_max[test_name] * 2,
- y_1c_max[test_name] * 4]
- vals[name]["diff"] = [(y_val_1 - y_1c_max[test_name]) * 100 /
- y_val_1, None, None]
- vals[name]["count"] = [test_vals["1"][1],
- test_vals["2"][1],
- test_vals["4"][1]]
+ vals[name][u"val"] = [y_val_1, y_val_2, y_val_4]
+ vals[name][u"rel"] = [1.0, None, None]
+ vals[name][u"ideal"] = [
+ y_1c_max[test_name],
+ y_1c_max[test_name] * 2,
+ y_1c_max[test_name] * 4
+ ]
+ vals[name][u"diff"] = [
+ (y_val_1 - y_1c_max[test_name]) * 100 / y_val_1, None, None
+ ]
+ vals[name][u"count"] = [
+ test_vals[u"1"][1],
+ test_vals[u"2"][1],
+ test_vals[u"4"][1]
+ ]
try:
- # val_max = max(max(vals[name]["val"], vals[name]["ideal"]))
- val_max = max(vals[name]["val"])
+ val_max = max(vals[name][u"val"])
except ValueError as err:
- logging.error(err)
+ logging.error(repr(err))
continue
if val_max:
- # y_max.append(int((val_max / 10) + 1) * 10)
y_max.append(val_max)
if y_val_2:
- vals[name]["rel"][1] = round(y_val_2 / y_val_1, 2)
- vals[name]["diff"][1] = \
- (y_val_2 - vals[name]["ideal"][1]) * 100 / y_val_2
+ vals[name][u"rel"][1] = round(y_val_2 / y_val_1, 2)
+ vals[name][u"diff"][1] = \
+ (y_val_2 - vals[name][u"ideal"][1]) * 100 / y_val_2
if y_val_4:
- vals[name]["rel"][2] = round(y_val_4 / y_val_1, 2)
- vals[name]["diff"][2] = \
- (y_val_4 - vals[name]["ideal"][2]) * 100 / y_val_4
+ vals[name][u"rel"][2] = round(y_val_4 / y_val_1, 2)
+ vals[name][u"diff"][2] = \
+ (y_val_4 - vals[name][u"ideal"][2]) * 100 / y_val_4
except IndexError as err:
- logging.warning("No data for '{0}'".format(test_name))
+ logging.warning(f"No data for {test_name}")
logging.warning(repr(err))
# Limits:
- if "x520" in test_name:
- limit = plot["limits"]["nic"]["x520"]
- elif "x710" in test_name:
- limit = plot["limits"]["nic"]["x710"]
- elif "xxv710" in test_name:
- limit = plot["limits"]["nic"]["xxv710"]
- elif "xl710" in test_name:
- limit = plot["limits"]["nic"]["xl710"]
- elif "x553" in test_name:
- limit = plot["limits"]["nic"]["x553"]
+ if u"x520" in test_name:
+ limit = plot[u"limits"][u"nic"][u"x520"]
+ elif u"x710" in test_name:
+ limit = plot[u"limits"][u"nic"][u"x710"]
+ elif u"xxv710" in test_name:
+ limit = plot[u"limits"][u"nic"][u"xxv710"]
+ elif u"xl710" in test_name:
+ limit = plot[u"limits"][u"nic"][u"xl710"]
+ elif u"x553" in test_name:
+ limit = plot[u"limits"][u"nic"][u"x553"]
else:
limit = 0
if limit > nic_limit:
nic_limit = limit
- mul = 2 if "ge2p" in test_name else 1
- if "10ge" in test_name:
- limit = plot["limits"]["link"]["10ge"] * mul
- elif "25ge" in test_name:
- limit = plot["limits"]["link"]["25ge"] * mul
- elif "40ge" in test_name:
- limit = plot["limits"]["link"]["40ge"] * mul
- elif "100ge" in test_name:
- limit = plot["limits"]["link"]["100ge"] * mul
+ mul = 2 if u"ge2p" in test_name else 1
+ if u"10ge" in test_name:
+ limit = plot[u"limits"][u"link"][u"10ge"] * mul
+ elif u"25ge" in test_name:
+ limit = plot[u"limits"][u"link"][u"25ge"] * mul
+ elif u"40ge" in test_name:
+ limit = plot[u"limits"][u"link"][u"40ge"] * mul
+ elif u"100ge" in test_name:
+ limit = plot[u"limits"][u"link"][u"100ge"] * mul
else:
limit = 0
if limit > lnk_limit:
lnk_limit = limit
- # Sort the tests
- order = plot.get("sort", None)
- if order and y_tags:
- y_sorted = OrderedDict()
- y_tags_l = {s: [t.lower() for t in ts] for s, ts in y_tags.items()}
- for tag in order:
- for test, tags in y_tags_l.items():
- if tag.lower() in tags:
- name = re.sub(REGEX_NIC, "",
- test.replace('-ndrpdr', '').
- replace('2n1l-', ''))
- try:
- y_sorted[name] = vals.pop(name)
- y_tags_l.pop(test)
- except KeyError as err:
- logging.error("Not found: {0}".format(err))
- finally:
- break
- else:
- y_sorted = vals
-
traces = list()
annotations = list()
x_vals = [1, 2, 4]
@@ -1514,35 +616,33 @@ def plot_throughput_speedup_analysis(plot, input_data):
logging.error(err)
return
nic_limit /= 1000000.0
- # if nic_limit < threshold:
traces.append(plgo.Scatter(
x=x_vals,
y=[nic_limit, ] * len(x_vals),
- name="NIC: {0:.2f}Mpps".format(nic_limit),
+ name=f"NIC: {nic_limit:.2f}Mpps",
showlegend=False,
- mode="lines",
+ mode=u"lines",
line=dict(
- dash="dot",
+ dash=u"dot",
color=COLORS[-1],
width=1),
- hoverinfo="none"
+ hoverinfo=u"none"
))
annotations.append(dict(
x=1,
y=nic_limit,
- xref="x",
- yref="y",
- xanchor="left",
- yanchor="bottom",
- text="NIC: {0:.2f}Mpps".format(nic_limit),
+ xref=u"x",
+ yref=u"y",
+ xanchor=u"left",
+ yanchor=u"bottom",
+ text=f"NIC: {nic_limit:.2f}Mpps",
font=dict(
size=14,
color=COLORS[-1],
),
- align="left",
+ align=u"left",
showarrow=False
))
- # y_max.append(int((nic_limit / 10) + 1) * 10)
y_max.append(nic_limit)
lnk_limit /= 1000000.0
@@ -1550,142 +650,146 @@ def plot_throughput_speedup_analysis(plot, input_data):
traces.append(plgo.Scatter(
x=x_vals,
y=[lnk_limit, ] * len(x_vals),
- name="Link: {0:.2f}Mpps".format(lnk_limit),
+ name=f"Link: {lnk_limit:.2f}Mpps",
showlegend=False,
- mode="lines",
+ mode=u"lines",
line=dict(
- dash="dot",
+ dash=u"dot",
color=COLORS[-2],
width=1),
- hoverinfo="none"
+ hoverinfo=u"none"
))
annotations.append(dict(
x=1,
y=lnk_limit,
- xref="x",
- yref="y",
- xanchor="left",
- yanchor="bottom",
- text="Link: {0:.2f}Mpps".format(lnk_limit),
+ xref=u"x",
+ yref=u"y",
+ xanchor=u"left",
+ yanchor=u"bottom",
+ text=f"Link: {lnk_limit:.2f}Mpps",
font=dict(
size=14,
color=COLORS[-2],
),
- align="left",
+ align=u"left",
showarrow=False
))
- # y_max.append(int((lnk_limit / 10) + 1) * 10)
y_max.append(lnk_limit)
pci_limit /= 1000000.0
if (pci_limit < threshold and
- (pci_limit < lnk_limit * 0.95 or lnk_limit > lnk_limit * 1.05)):
+ (pci_limit < lnk_limit * 0.95 or lnk_limit > lnk_limit * 1.05)):
traces.append(plgo.Scatter(
x=x_vals,
y=[pci_limit, ] * len(x_vals),
- name="PCIe: {0:.2f}Mpps".format(pci_limit),
+ name=f"PCIe: {pci_limit:.2f}Mpps",
showlegend=False,
- mode="lines",
+ mode=u"lines",
line=dict(
- dash="dot",
+ dash=u"dot",
color=COLORS[-3],
width=1),
- hoverinfo="none"
+ hoverinfo=u"none"
))
annotations.append(dict(
x=1,
y=pci_limit,
- xref="x",
- yref="y",
- xanchor="left",
- yanchor="bottom",
- text="PCIe: {0:.2f}Mpps".format(pci_limit),
+ xref=u"x",
+ yref=u"y",
+ xanchor=u"left",
+ yanchor=u"bottom",
+ text=f"PCIe: {pci_limit:.2f}Mpps",
font=dict(
size=14,
color=COLORS[-3],
),
- align="left",
+ align=u"left",
showarrow=False
))
- # y_max.append(int((pci_limit / 10) + 1) * 10)
y_max.append(pci_limit)
# Perfect and measured:
cidx = 0
- for name, val in y_sorted.iteritems():
+ for name, val in vals.items():
hovertext = list()
try:
- for idx in range(len(val["val"])):
+ for idx in range(len(val[u"val"])):
htext = ""
- if isinstance(val["val"][idx], float):
- htext += "No. of Runs: {1}<br>" \
- "Mean: {0:.2f}Mpps<br>".format(val["val"][idx],
- val["count"][idx])
- if isinstance(val["diff"][idx], float):
- htext += "Diff: {0:.0f}%<br>".format(round(val["diff"][idx]))
- if isinstance(val["rel"][idx], float):
- htext += "Speedup: {0:.2f}".format(val["rel"][idx])
+ if isinstance(val[u"val"][idx], float):
+ htext += (
+ f"No. of Runs: {val[u'count'][idx]}<br>"
+ f"Mean: {val[u'val'][idx]:.2f}Mpps<br>"
+ )
+ if isinstance(val[u"diff"][idx], float):
+ htext += f"Diff: {round(val[u'diff'][idx]):.0f}%<br>"
+ if isinstance(val[u"rel"][idx], float):
+ htext += f"Speedup: {val[u'rel'][idx]:.2f}"
hovertext.append(htext)
- traces.append(plgo.Scatter(x=x_vals,
- y=val["val"],
- name=name,
- legendgroup=name,
- mode="lines+markers",
- line=dict(
- color=COLORS[cidx],
- width=2),
- marker=dict(
- symbol="circle",
- size=10
- ),
- text=hovertext,
- hoverinfo="text+name"
- ))
- traces.append(plgo.Scatter(x=x_vals,
- y=val["ideal"],
- name="{0} perfect".format(name),
- legendgroup=name,
- showlegend=False,
- mode="lines",
- line=dict(
- color=COLORS[cidx],
- width=2,
- dash="dash"),
- text=["Perfect: {0:.2f}Mpps".format(y)
- for y in val["ideal"]],
- hoverinfo="text"
- ))
+ traces.append(
+ plgo.Scatter(
+ x=x_vals,
+ y=val[u"val"],
+ name=name,
+ legendgroup=name,
+ mode=u"lines+markers",
+ line=dict(
+ color=COLORS[cidx],
+ width=2),
+ marker=dict(
+ symbol=u"circle",
+ size=10
+ ),
+ text=hovertext,
+ hoverinfo=u"text+name"
+ )
+ )
+ traces.append(
+ plgo.Scatter(
+ x=x_vals,
+ y=val[u"ideal"],
+ name=f"{name} perfect",
+ legendgroup=name,
+ showlegend=False,
+ mode=u"lines",
+ line=dict(
+ color=COLORS[cidx],
+ width=2,
+ dash=u"dash"),
+ text=[f"Perfect: {y:.2f}Mpps" for y in val[u"ideal"]],
+ hoverinfo=u"text"
+ )
+ )
cidx += 1
except (IndexError, ValueError, KeyError) as err:
- logging.warning("No data for '{0}'".format(name))
- logging.warning(repr(err))
+ logging.warning(f"No data for {name}\n{repr(err)}")
try:
# Create plot
- logging.info(" Writing file '{0}{1}'.".
- format(plot["output-file"], plot["output-file-type"]))
- layout = deepcopy(plot["layout"])
- if layout.get("title", None):
- layout["title"] = "<b>Speedup Multi-core:</b> {0}". \
- format(layout["title"])
- # layout["yaxis"]["range"] = [0, int((max(y_max) / 10) + 1) * 10]
- layout["yaxis"]["range"] = [0, int(max(y_max) * 1.1)]
- layout["annotations"].extend(annotations)
+ file_type = plot.get(u"output-file-type", u".html")
+ logging.info(f" Writing file {plot[u'output-file']}{file_type}.")
+ layout = deepcopy(plot[u"layout"])
+ if layout.get(u"title", None):
+ layout[u"title"] = f"<b>Speedup Multi-core:</b> {layout[u'title']}"
+ layout[u"yaxis"][u"range"] = [0, int(max(y_max) * 1.1)]
+ layout[u"annotations"].extend(annotations)
plpl = plgo.Figure(data=traces, layout=layout)
# Export Plot
- ploff.plot(plpl,
- show_link=False, auto_open=False,
- filename='{0}{1}'.format(plot["output-file"],
- plot["output-file-type"]))
+ ploff.plot(
+ plpl,
+ show_link=False,
+ auto_open=False,
+ filename=f"{plot[u'output-file']}{file_type}"
+ )
except PlotlyError as err:
- logging.error(" Finished with error: {}".
- format(str(err).replace("\n", " ")))
+ logging.error(
+ f" Finished with error: {repr(err)}".replace(u"\n", u" ")
+ )
return
-def plot_http_server_performance_box(plot, input_data):
- """Generate the plot(s) with algorithm: plot_http_server_performance_box
+def plot_http_server_perf_box(plot, input_data):
+ """Generate the plot(s) with algorithm: plot_http_server_perf_box
specified in the specification file.
:param plot: Plot to generate.
@@ -1695,11 +799,13 @@ def plot_http_server_performance_box(plot, input_data):
"""
# Transform the data
- logging.info(" Creating the data set for the {0} '{1}'.".
- format(plot.get("type", ""), plot.get("title", "")))
+ logging.info(
+ f" Creating the data set for the {plot.get(u'type', u'')} "
+ f"{plot.get(u'title', u'')}."
+ )
data = input_data.filter_data(plot)
if data is None:
- logging.error("No data.")
+ logging.error(u"No data.")
return
# Prepare the data for the plot
@@ -1707,12 +813,12 @@ def plot_http_server_performance_box(plot, input_data):
for job in data:
for build in job:
for test in build:
- if y_vals.get(test["name"], None) is None:
- y_vals[test["name"]] = list()
+ if y_vals.get(test[u"name"], None) is None:
+ y_vals[test[u"name"]] = list()
try:
- y_vals[test["name"]].append(test["result"])
+ y_vals[test[u"name"]].append(test[u"result"])
except (KeyError, TypeError):
- y_vals[test["name"]].append(None)
+ y_vals[test[u"name"]].append(None)
# Add None to the lists with missing data
max_len = 0
@@ -1721,53 +827,59 @@ def plot_http_server_performance_box(plot, input_data):
if len(val) > max_len:
max_len = len(val)
nr_of_samples.append(len(val))
- for key, val in y_vals.items():
+ for val in y_vals.values():
if len(val) < max_len:
val.extend([None for _ in range(max_len - len(val))])
# Add plot traces
traces = list()
- df = pd.DataFrame(y_vals)
- df.head()
- for i, col in enumerate(df.columns):
- name = "{nr}. ({samples:02d} run{plural}) {name}".\
- format(nr=(i + 1),
- samples=nr_of_samples[i],
- plural='s' if nr_of_samples[i] > 1 else '',
- name=col.lower().replace('-ndrpdr', ''))
+ df_y = pd.DataFrame(y_vals)
+ df_y.head()
+ for i, col in enumerate(df_y.columns):
+ name = \
+ f"{i + 1}. " \
+ f"({nr_of_samples[i]:02d} " \
+ f"run{u's' if nr_of_samples[i] > 1 else u''}) " \
+ f"{col.lower().replace(u'-ndrpdr', u'')}"
if len(name) > 50:
- name_lst = name.split('-')
- name = ""
+ name_lst = name.split(u'-')
+ name = u""
split_name = True
for segment in name_lst:
if (len(name) + len(segment) + 1) > 50 and split_name:
- name += "<br> "
+ name += u"<br> "
split_name = False
- name += segment + '-'
+ name += segment + u'-'
name = name[:-1]
- traces.append(plgo.Box(x=[str(i + 1) + '.'] * len(df[col]),
- y=df[col],
+ traces.append(plgo.Box(x=[str(i + 1) + u'.'] * len(df_y[col]),
+ y=df_y[col],
name=name,
- **plot["traces"]))
+ **plot[u"traces"]))
try:
# Create plot
- plpl = plgo.Figure(data=traces, layout=plot["layout"])
+ plpl = plgo.Figure(data=traces, layout=plot[u"layout"])
# Export Plot
- logging.info(" Writing file '{0}{1}'.".
- format(plot["output-file"], plot["output-file-type"]))
- ploff.plot(plpl, show_link=False, auto_open=False,
- filename='{0}{1}'.format(plot["output-file"],
- plot["output-file-type"]))
+ logging.info(
+ f" Writing file {plot[u'output-file']}"
+ f"{plot[u'output-file-type']}."
+ )
+ ploff.plot(
+ plpl,
+ show_link=False,
+ auto_open=False,
+ filename=f"{plot[u'output-file']}{plot[u'output-file-type']}"
+ )
except PlotlyError as err:
- logging.error(" Finished with error: {}".
- format(str(err).replace("\n", " ")))
+ logging.error(
+ f" Finished with error: {repr(err)}".replace(u"\n", u" ")
+ )
return
-def plot_service_density_heatmap(plot, input_data):
- """Generate the plot(s) with algorithm: plot_service_density_heatmap
+def plot_nf_heatmap(plot, input_data):
+ """Generate the plot(s) with algorithm: plot_nf_heatmap
specified in the specification file.
:param plot: Plot to generate.
@@ -1776,729 +888,320 @@ def plot_service_density_heatmap(plot, input_data):
:type input_data: InputData
"""
- REGEX_CN = re.compile(r'^(\d*)R(\d*)C$')
- REGEX_TEST_NAME = re.compile(r'^.*-(\d+ch|\d+pl)-'
+ regex_cn = re.compile(r'^(\d*)R(\d*)C$')
+ regex_test_name = re.compile(r'^.*-(\d+ch|\d+pl)-'
r'(\d+mif|\d+vh)-'
r'(\d+vm\d+t|\d+dcr\d+t).*$')
-
- txt_chains = list()
- txt_nodes = list()
vals = dict()
# Transform the data
- logging.info(" Creating the data set for the {0} '{1}'.".
- format(plot.get("type", ""), plot.get("title", "")))
+ logging.info(
+ f" Creating the data set for the {plot.get(u'type', u'')} "
+ f"{plot.get(u'title', u'')}."
+ )
data = input_data.filter_data(plot, continue_on_error=True)
if data is None or data.empty:
- logging.error("No data.")
+ logging.error(u"No data.")
return
for job in data:
for build in job:
for test in build:
- for tag in test['tags']:
- groups = re.search(REGEX_CN, tag)
+ for tag in test[u"tags"]:
+ groups = re.search(regex_cn, tag)
if groups:
- c = str(groups.group(1))
- n = str(groups.group(2))
+ chain = str(groups.group(1))
+ node = str(groups.group(2))
break
else:
continue
- groups = re.search(REGEX_TEST_NAME, test["name"])
+ groups = re.search(regex_test_name, test[u"name"])
if groups and len(groups.groups()) == 3:
- hover_name = "{chain}-{vhost}-{vm}".format(
- chain=str(groups.group(1)),
- vhost=str(groups.group(2)),
- vm=str(groups.group(3)))
+ hover_name = (
+ f"{str(groups.group(1))}-"
+ f"{str(groups.group(2))}-"
+ f"{str(groups.group(3))}"
+ )
else:
- hover_name = ""
- if vals.get(c, None) is None:
- vals[c] = dict()
- if vals[c].get(n, None) is None:
- vals[c][n] = dict(name=hover_name,
- vals=list(),
- nr=None,
- mean=None,
- stdev=None)
+ hover_name = u""
+ if vals.get(chain, None) is None:
+ vals[chain] = dict()
+ if vals[chain].get(node, None) is None:
+ vals[chain][node] = dict(
+ name=hover_name,
+ vals=list(),
+ nr=None,
+ mean=None,
+ stdev=None
+ )
try:
- if plot["include-tests"] == "MRR":
- result = test["result"]["receive-rate"] # .avg
- elif plot["include-tests"] == "PDR":
- result = test["throughput"]["PDR"]["LOWER"]
- elif plot["include-tests"] == "NDR":
- result = test["throughput"]["NDR"]["LOWER"]
+ if plot[u"include-tests"] == u"MRR":
+ result = test[u"result"][u"receive-rate"]
+ elif plot[u"include-tests"] == u"PDR":
+ result = test[u"throughput"][u"PDR"][u"LOWER"]
+ elif plot[u"include-tests"] == u"NDR":
+ result = test[u"throughput"][u"NDR"][u"LOWER"]
else:
result = None
except TypeError:
result = None
if result:
- vals[c][n]["vals"].append(result)
+ vals[chain][node][u"vals"].append(result)
if not vals:
- logging.error("No data.")
+ logging.error(u"No data.")
return
- for key_c in vals.keys():
+ txt_chains = list()
+ txt_nodes = list()
+ for key_c in vals:
txt_chains.append(key_c)
for key_n in vals[key_c].keys():
txt_nodes.append(key_n)
- if vals[key_c][key_n]["vals"]:
- vals[key_c][key_n]["nr"] = len(vals[key_c][key_n]["vals"])
- vals[key_c][key_n]["mean"] = \
- round(mean(vals[key_c][key_n]["vals"]) / 1000000, 1)
- vals[key_c][key_n]["stdev"] = \
- round(stdev(vals[key_c][key_n]["vals"]) / 1000000, 1)
+ if vals[key_c][key_n][u"vals"]:
+ vals[key_c][key_n][u"nr"] = len(vals[key_c][key_n][u"vals"])
+ vals[key_c][key_n][u"mean"] = \
+ round(mean(vals[key_c][key_n][u"vals"]) / 1000000, 1)
+ vals[key_c][key_n][u"stdev"] = \
+ round(stdev(vals[key_c][key_n][u"vals"]) / 1000000, 1)
txt_nodes = list(set(txt_nodes))
- txt_chains = sorted(txt_chains, key=lambda chain: int(chain))
- txt_nodes = sorted(txt_nodes, key=lambda node: int(node))
+ def sort_by_int(value):
+ """Makes possible to sort a list of strings which represent integers.
+
+ :param value: Integer as a string.
+ :type value: str
+ :returns: Integer representation of input parameter 'value'.
+ :rtype: int
+ """
+ return int(value)
+
+ txt_chains = sorted(txt_chains, key=sort_by_int)
+ txt_nodes = sorted(txt_nodes, key=sort_by_int)
chains = [i + 1 for i in range(len(txt_chains))]
nodes = [i + 1 for i in range(len(txt_nodes))]
data = [list() for _ in range(len(chains))]
- for c in chains:
- for n in nodes:
+ for chain in chains:
+ for node in nodes:
try:
- val = vals[txt_chains[c - 1]][txt_nodes[n - 1]]["mean"]
+ val = vals[txt_chains[chain - 1]][txt_nodes[node - 1]][u"mean"]
except (KeyError, IndexError):
val = None
- data[c - 1].append(val)
+ data[chain - 1].append(val)
- # Colorscales:
- my_green = [[0.0, 'rgb(235, 249, 242)'],
- [1.0, 'rgb(45, 134, 89)']]
+ # Color scales:
+ my_green = [[0.0, u"rgb(235, 249, 242)"],
+ [1.0, u"rgb(45, 134, 89)"]]
- my_blue = [[0.0, 'rgb(236, 242, 248)'],
- [1.0, 'rgb(57, 115, 172)']]
+ my_blue = [[0.0, u"rgb(236, 242, 248)"],
+ [1.0, u"rgb(57, 115, 172)"]]
- my_grey = [[0.0, 'rgb(230, 230, 230)'],
- [1.0, 'rgb(102, 102, 102)']]
+ my_grey = [[0.0, u"rgb(230, 230, 230)"],
+ [1.0, u"rgb(102, 102, 102)"]]
hovertext = list()
annotations = list()
- text = ("Test: {name}<br>"
- "Runs: {nr}<br>"
- "Thput: {val}<br>"
- "StDev: {stdev}")
+ text = (u"Test: {name}<br>"
+ u"Runs: {nr}<br>"
+ u"Thput: {val}<br>"
+ u"StDev: {stdev}")
- for c in range(len(txt_chains)):
+ for chain, _ in enumerate(txt_chains):
hover_line = list()
- for n in range(len(txt_nodes)):
- if data[c][n] is not None:
- annotations.append(dict(
- x=n+1,
- y=c+1,
- xref="x",
- yref="y",
- xanchor="center",
- yanchor="middle",
- text=str(data[c][n]),
- font=dict(
- size=14,
- ),
- align="center",
- showarrow=False
- ))
+ for node, _ in enumerate(txt_nodes):
+ if data[chain][node] is not None:
+ annotations.append(
+ dict(
+ x=node+1,
+ y=chain+1,
+ xref=u"x",
+ yref=u"y",
+ xanchor=u"center",
+ yanchor=u"middle",
+ text=str(data[chain][node]),
+ font=dict(
+ size=14,
+ ),
+ align=u"center",
+ showarrow=False
+ )
+ )
hover_line.append(text.format(
- name=vals[txt_chains[c]][txt_nodes[n]]["name"],
- nr=vals[txt_chains[c]][txt_nodes[n]]["nr"],
- val=data[c][n],
- stdev=vals[txt_chains[c]][txt_nodes[n]]["stdev"]))
+ name=vals[txt_chains[chain]][txt_nodes[node]][u"name"],
+ nr=vals[txt_chains[chain]][txt_nodes[node]][u"nr"],
+ val=data[chain][node],
+ stdev=vals[txt_chains[chain]][txt_nodes[node]][u"stdev"]))
hovertext.append(hover_line)
traces = [
- plgo.Heatmap(x=nodes,
- y=chains,
- z=data,
- colorbar=dict(
- title=plot.get("z-axis", ""),
- titleside="right",
- titlefont=dict(
- size=16
- ),
- tickfont=dict(
- size=16,
- ),
- tickformat=".1f",
- yanchor="bottom",
- y=-0.02,
- len=0.925,
- ),
- showscale=True,
- colorscale=my_green,
- text=hovertext,
- hoverinfo="text")
+ plgo.Heatmap(
+ x=nodes,
+ y=chains,
+ z=data,
+ colorbar=dict(
+ title=plot.get(u"z-axis", u""),
+ titleside=u"right",
+ titlefont=dict(
+ size=16
+ ),
+ tickfont=dict(
+ size=16,
+ ),
+ tickformat=u".1f",
+ yanchor=u"bottom",
+ y=-0.02,
+ len=0.925,
+ ),
+ showscale=True,
+ colorscale=my_green,
+ text=hovertext,
+ hoverinfo=u"text"
+ )
]
for idx, item in enumerate(txt_nodes):
# X-axis, numbers:
- annotations.append(dict(
- x=idx+1,
- y=0.05,
- xref="x",
- yref="y",
- xanchor="center",
- yanchor="top",
- text=item,
- font=dict(
- size=16,
- ),
- align="center",
- showarrow=False
- ))
- for idx, item in enumerate(txt_chains):
- # Y-axis, numbers:
- annotations.append(dict(
- x=0.35,
- y=idx+1,
- xref="x",
- yref="y",
- xanchor="right",
- yanchor="middle",
- text=item,
- font=dict(
- size=16,
- ),
- align="center",
- showarrow=False
- ))
- # X-axis, title:
- annotations.append(dict(
- x=0.55,
- y=-0.15,
- xref="paper",
- yref="y",
- xanchor="center",
- yanchor="bottom",
- text=plot.get("x-axis", ""),
- font=dict(
- size=16,
- ),
- align="center",
- showarrow=False
- ))
- # Y-axis, title:
- annotations.append(dict(
- x=-0.1,
- y=0.5,
- xref="x",
- yref="paper",
- xanchor="center",
- yanchor="middle",
- text=plot.get("y-axis", ""),
- font=dict(
- size=16,
- ),
- align="center",
- textangle=270,
- showarrow=False
- ))
- updatemenus = list([
- dict(
- x=1.0,
- y=0.0,
- xanchor='right',
- yanchor='bottom',
- direction='up',
- buttons=list([
- dict(
- args=[{"colorscale": [my_green, ], "reversescale": False}],
- label="Green",
- method="update"
- ),
- dict(
- args=[{"colorscale": [my_blue, ], "reversescale": False}],
- label="Blue",
- method="update"
+ annotations.append(
+ dict(
+ x=idx+1,
+ y=0.05,
+ xref=u"x",
+ yref=u"y",
+ xanchor=u"center",
+ yanchor=u"top",
+ text=item,
+ font=dict(
+ size=16,
),
- dict(
- args=[{"colorscale": [my_grey, ], "reversescale": False}],
- label="Grey",
- method="update"
- )
- ])
+ align=u"center",
+ showarrow=False
+ )
)
- ])
-
- try:
- layout = deepcopy(plot["layout"])
- except KeyError as err:
- logging.error("Finished with error: No layout defined")
- logging.error(repr(err))
- return
-
- layout["annotations"] = annotations
- layout['updatemenus'] = updatemenus
-
- try:
- # Create plot
- plpl = plgo.Figure(data=traces, layout=layout)
-
- # Export Plot
- logging.info(" Writing file '{0}{1}'.".
- format(plot["output-file"], plot["output-file-type"]))
- ploff.plot(plpl, show_link=False, auto_open=False,
- filename='{0}{1}'.format(plot["output-file"],
- plot["output-file-type"]))
- except PlotlyError as err:
- logging.error(" Finished with error: {}".
- format(str(err).replace("\n", " ")))
- return
-
-
-def plot_service_density_heatmap_compare(plot, input_data):
- """Generate the plot(s) with algorithm: plot_service_density_heatmap_compare
- specified in the specification file.
-
- :param plot: Plot to generate.
- :param input_data: Data to process.
- :type plot: pandas.Series
- :type input_data: InputData
- """
-
- REGEX_CN = re.compile(r'^(\d*)R(\d*)C$')
- REGEX_TEST_NAME = re.compile(r'^.*-(\d+ch|\d+pl)-'
- r'(\d+mif|\d+vh)-'
- r'(\d+vm\d+t|\d+dcr\d+t).*$')
- REGEX_THREADS = re.compile(r'^(\d+)(VM|DCR)(\d+)T$')
-
- txt_chains = list()
- txt_nodes = list()
- vals = dict()
-
- # Transform the data
- logging.info(" Creating the data set for the {0} '{1}'.".
- format(plot.get("type", ""), plot.get("title", "")))
- data = input_data.filter_data(plot, continue_on_error=True)
- if data is None or data.empty:
- logging.error("No data.")
- return
-
- for job in data:
- for build in job:
- for test in build:
- for tag in test['tags']:
- groups = re.search(REGEX_CN, tag)
- if groups:
- c = str(groups.group(1))
- n = str(groups.group(2))
- break
- else:
- continue
- groups = re.search(REGEX_TEST_NAME, test["name"])
- if groups and len(groups.groups()) == 3:
- hover_name = "{chain}-{vhost}-{vm}".format(
- chain=str(groups.group(1)),
- vhost=str(groups.group(2)),
- vm=str(groups.group(3)))
- else:
- hover_name = ""
- if vals.get(c, None) is None:
- vals[c] = dict()
- if vals[c].get(n, None) is None:
- vals[c][n] = dict(name=hover_name,
- vals_r=list(),
- vals_c=list(),
- nr_r=None,
- nr_c=None,
- mean_r=None,
- mean_c=None,
- stdev_r=None,
- stdev_c=None)
- try:
- if plot["include-tests"] == "MRR":
- result = test["result"]["receive-rate"] # .avg
- elif plot["include-tests"] == "PDR":
- result = test["throughput"]["PDR"]["LOWER"]
- elif plot["include-tests"] == "NDR":
- result = test["throughput"]["NDR"]["LOWER"]
- else:
- result = None
- except TypeError:
- result = None
-
- if result:
- for tag in test['tags']:
- groups = re.search(REGEX_THREADS, tag)
- if groups and len(groups.groups()) == 3:
- if str(groups.group(3)) == \
- plot["reference"]["include"]:
- vals[c][n]["vals_r"].append(result)
- elif str(groups.group(3)) == \
- plot["compare"]["include"]:
- vals[c][n]["vals_c"].append(result)
- break
- if not vals:
- logging.error("No data.")
- return
-
- for key_c in vals.keys():
- txt_chains.append(key_c)
- for key_n in vals[key_c].keys():
- txt_nodes.append(key_n)
- if vals[key_c][key_n]["vals_r"]:
- vals[key_c][key_n]["nr_r"] = len(vals[key_c][key_n]["vals_r"])
- vals[key_c][key_n]["mean_r"] = \
- mean(vals[key_c][key_n]["vals_r"])
- vals[key_c][key_n]["stdev_r"] = \
- round(stdev(vals[key_c][key_n]["vals_r"]) / 1000000, 1)
- if vals[key_c][key_n]["vals_c"]:
- vals[key_c][key_n]["nr_c"] = len(vals[key_c][key_n]["vals_c"])
- vals[key_c][key_n]["mean_c"] = \
- mean(vals[key_c][key_n]["vals_c"])
- vals[key_c][key_n]["stdev_c"] = \
- round(stdev(vals[key_c][key_n]["vals_c"]) / 1000000, 1)
-
- txt_nodes = list(set(txt_nodes))
-
- txt_chains = sorted(txt_chains, key=lambda chain: int(chain))
- txt_nodes = sorted(txt_nodes, key=lambda node: int(node))
-
- chains = [i + 1 for i in range(len(txt_chains))]
- nodes = [i + 1 for i in range(len(txt_nodes))]
-
- data_r = [list() for _ in range(len(chains))]
- data_c = [list() for _ in range(len(chains))]
- diff = [list() for _ in range(len(chains))]
- for c in chains:
- for n in nodes:
- try:
- val_r = vals[txt_chains[c - 1]][txt_nodes[n - 1]]["mean_r"]
- except (KeyError, IndexError):
- val_r = None
- try:
- val_c = vals[txt_chains[c - 1]][txt_nodes[n - 1]]["mean_c"]
- except (KeyError, IndexError):
- val_c = None
- if val_c is not None and val_r:
- val_d = (val_c - val_r) * 100 / val_r
- else:
- val_d = None
-
- if val_r is not None:
- val_r = round(val_r / 1000000, 1)
- data_r[c - 1].append(val_r)
- if val_c is not None:
- val_c = round(val_c / 1000000, 1)
- data_c[c - 1].append(val_c)
- if val_d is not None:
- val_d = int(round(val_d, 0))
- diff[c - 1].append(val_d)
-
- # Colorscales:
- my_green = [[0.0, 'rgb(235, 249, 242)'],
- [1.0, 'rgb(45, 134, 89)']]
-
- my_blue = [[0.0, 'rgb(236, 242, 248)'],
- [1.0, 'rgb(57, 115, 172)']]
-
- my_grey = [[0.0, 'rgb(230, 230, 230)'],
- [1.0, 'rgb(102, 102, 102)']]
-
- hovertext = list()
-
- annotations = list()
- annotations_r = list()
- annotations_c = list()
- annotations_diff = list()
-
- text = ("Test: {name}"
- "<br>{title_r}: {text_r}"
- "<br>{title_c}: {text_c}{text_diff}")
- text_r = "Thput: {val_r}; StDev: {stdev_r}; Runs: {nr_r}"
- text_c = "Thput: {val_c}; StDev: {stdev_c}; Runs: {nr_c}"
- text_diff = "<br>Relative Difference {title_c} vs. {title_r}: {diff}%"
-
- for c in range(len(txt_chains)):
- hover_line = list()
- for n in range(len(txt_nodes)):
- point = dict(
- x=n + 1,
- y=c + 1,
- xref="x",
- yref="y",
- xanchor="center",
- yanchor="middle",
- text="",
+ for idx, item in enumerate(txt_chains):
+ # Y-axis, numbers:
+ annotations.append(
+ dict(
+ x=0.35,
+ y=idx+1,
+ xref=u"x",
+ yref=u"y",
+ xanchor=u"right",
+ yanchor=u"middle",
+ text=item,
font=dict(
- size=14,
+ size=16,
),
- align="center",
+ align=u"center",
showarrow=False
)
-
- point_text_r = "Not present"
- point_text_c = "Not present"
- point_text_diff = ""
- try:
- point_r = data_r[c][n]
- if point_r is not None:
- point_text_r = text_r.format(
- val_r=point_r,
- stdev_r=vals[txt_chains[c]][txt_nodes[n]]["stdev_r"],
- nr_r=vals[txt_chains[c]][txt_nodes[n]]["nr_r"])
- except KeyError:
- point_r = None
- point["text"] = "" if point_r is None else point_r
- annotations_r.append(deepcopy(point))
-
- try:
- point_c = data_c[c][n]
- if point_c is not None:
- point_text_c = text_c.format(
- val_c=point_c,
- stdev_c=vals[txt_chains[c]][txt_nodes[n]]["stdev_c"],
- nr_c=vals[txt_chains[c]][txt_nodes[n]]["nr_c"])
- except KeyError:
- point_c = None
- point["text"] = "" if point_c is None else point_c
- annotations_c.append(deepcopy(point))
-
- try:
- point_d = diff[c][n]
- if point_d is not None:
- point_text_diff = text_diff.format(
- title_r=plot["reference"]["name"],
- title_c=plot["compare"]["name"],
- diff=point_d)
- except KeyError:
- point_d = None
- point["text"] = "" if point_d is None else point_d
- annotations_diff.append(deepcopy(point))
-
- try:
- name = vals[txt_chains[c]][txt_nodes[n]]["name"]
- except KeyError:
- continue
-
- hover_line.append(text.format(
- name=name,
- title_r=plot["reference"]["name"],
- text_r=point_text_r,
- title_c=plot["compare"]["name"],
- text_c=point_text_c,
- text_diff=point_text_diff
- ))
-
- hovertext.append(hover_line)
-
- traces = [
- plgo.Heatmap(x=nodes,
- y=chains,
- z=data_r,
- visible=True,
- colorbar=dict(
- title=plot.get("z-axis", ""),
- titleside="right",
- titlefont=dict(
- size=16
- ),
- tickfont=dict(
- size=16,
- ),
- tickformat=".1f",
- yanchor="bottom",
- y=-0.02,
- len=0.925,
- ),
- showscale=True,
- colorscale=my_green,
- reversescale=False,
- text=hovertext,
- hoverinfo="text"),
- plgo.Heatmap(x=nodes,
- y=chains,
- z=data_c,
- visible=False,
- colorbar=dict(
- title=plot.get("z-axis", ""),
- titleside="right",
- titlefont=dict(
- size=16
- ),
- tickfont=dict(
- size=16,
- ),
- tickformat=".1f",
- yanchor="bottom",
- y=-0.02,
- len=0.925,
- ),
- showscale=True,
- colorscale=my_blue,
- reversescale=False,
- text=hovertext,
- hoverinfo="text"),
- plgo.Heatmap(x=nodes,
- y=chains,
- z=diff,
- name="Diff",
- visible=False,
- colorbar=dict(
- title="Relative Difference {name_c} vs. {name_r} [%]".
- format(name_c=plot["compare"]["name"],
- name_r=plot["reference"]["name"]),
- titleside="right",
- titlefont=dict(
- size=16
- ),
- tickfont=dict(
- size=16,
- ),
- tickformat=".1f",
- yanchor="bottom",
- y=-0.02,
- len=0.925,
- ),
- showscale=True,
- colorscale=my_grey,
- reversescale=False,
- text=hovertext,
- hoverinfo="text")
- ]
-
- for idx, item in enumerate(txt_nodes):
- # X-axis, numbers:
- annotations.append(dict(
- x=idx+1,
- y=0.05,
- xref="x",
- yref="y",
- xanchor="center",
- yanchor="top",
- text=item,
+ )
+ # X-axis, title:
+ annotations.append(
+ dict(
+ x=0.55,
+ y=-0.15,
+ xref=u"paper",
+ yref=u"y",
+ xanchor=u"center",
+ yanchor=u"bottom",
+ text=plot.get(u"x-axis", u""),
font=dict(
size=16,
),
- align="center",
+ align=u"center",
showarrow=False
- ))
- for idx, item in enumerate(txt_chains):
- # Y-axis, numbers:
- annotations.append(dict(
- x=0.35,
- y=idx+1,
- xref="x",
- yref="y",
- xanchor="right",
- yanchor="middle",
- text=item,
+ )
+ )
+ # Y-axis, title:
+ annotations.append(
+ dict(
+ x=-0.1,
+ y=0.5,
+ xref=u"x",
+ yref=u"paper",
+ xanchor=u"center",
+ yanchor=u"middle",
+ text=plot.get(u"y-axis", u""),
font=dict(
size=16,
),
- align="center",
+ align=u"center",
+ textangle=270,
showarrow=False
- ))
- # X-axis, title:
- annotations.append(dict(
- x=0.55,
- y=-0.15,
- xref="paper",
- yref="y",
- xanchor="center",
- yanchor="bottom",
- text=plot.get("x-axis", ""),
- font=dict(
- size=16,
- ),
- align="center",
- showarrow=False
- ))
- # Y-axis, title:
- annotations.append(dict(
- x=-0.1,
- y=0.5,
- xref="x",
- yref="paper",
- xanchor="center",
- yanchor="middle",
- text=plot.get("y-axis", ""),
- font=dict(
- size=16,
- ),
- align="center",
- textangle=270,
- showarrow=False
- ))
+ )
+ )
updatemenus = list([
dict(
- active=0,
x=1.0,
y=0.0,
- xanchor='right',
- yanchor='bottom',
- direction='up',
+ xanchor=u"right",
+ yanchor=u"bottom",
+ direction=u"up",
buttons=list([
dict(
- label=plot["reference"]["name"],
- method="update",
args=[
{
- "visible": [True, False, False]
- },
- {
- "colorscale": [my_green, ],
- "reversescale": False,
- "annotations": annotations + annotations_r,
- },
- ]
+ u"colorscale": [my_green, ],
+ u"reversescale": False
+ }
+ ],
+ label=u"Green",
+ method=u"update"
),
dict(
- label=plot["compare"]["name"],
- method="update",
args=[
{
- "visible": [False, True, False]
- },
- {
- "colorscale": [my_blue, ],
- "reversescale": False,
- "annotations": annotations + annotations_c,
- },
- ]
+ u"colorscale": [my_blue, ],
+ u"reversescale": False
+ }
+ ],
+ label=u"Blue",
+ method=u"update"
),
dict(
- label="Diff",
- method="update",
args=[
{
- "visible": [False, False, True]
- },
- {
- "colorscale": [my_grey, ],
- "reversescale": False,
- "annotations": annotations + annotations_diff,
- },
- ]
- ),
+ u"colorscale": [my_grey, ],
+ u"reversescale": False
+ }
+ ],
+ label=u"Grey",
+ method=u"update"
+ )
])
)
])
try:
- layout = deepcopy(plot["layout"])
+ layout = deepcopy(plot[u"layout"])
except KeyError as err:
- logging.error("Finished with error: No layout defined")
- logging.error(repr(err))
+ logging.error(f"Finished with error: No layout defined\n{repr(err)}")
return
- layout["annotations"] = annotations + annotations_r
- layout['updatemenus'] = updatemenus
+ layout[u"annotations"] = annotations
+ layout[u'updatemenus'] = updatemenus
try:
# Create plot
plpl = plgo.Figure(data=traces, layout=layout)
# Export Plot
- logging.info(" Writing file '{0}{1}'.".
- format(plot["output-file"], plot["output-file-type"]))
- ploff.plot(plpl, show_link=False, auto_open=False,
- filename='{0}{1}'.format(plot["output-file"],
- plot["output-file-type"]))
+ logging.info(
+ f" Writing file {plot[u'output-file']}"
+ f"{plot[u'output-file-type']}."
+ )
+ ploff.plot(
+ plpl,
+ show_link=False,
+ auto_open=False,
+ filename=f"{plot[u'output-file']}{plot[u'output-file-type']}"
+ )
except PlotlyError as err:
- logging.error(" Finished with error: {}".
- format(str(err).replace("\n", " ")))
+ logging.error(
+ f" Finished with error: {repr(err)}".replace(u"\n", u" ")
+ )
return
diff --git a/resources/tools/presentation/generator_report.py b/resources/tools/presentation/generator_report.py
index 13ca75c1b9..f2bf8a1cb2 100644
--- a/resources/tools/presentation/generator_report.py
+++ b/resources/tools/presentation/generator_report.py
@@ -19,11 +19,11 @@ import datetime
from shutil import make_archive
-from utils import get_files, execute_command, archive_input_data
+from pal_utils import get_files, execute_command, archive_input_data
# .css file for the html format of the report
-THEME_OVERRIDES = """/* override table width restrictions */
+THEME_OVERRIDES = u"""/* override table width restrictions */
@media screen and (min-width: 767px) {
.wy-table-responsive table td, .wy-table-responsive table th {
white-space: normal !important;
@@ -87,22 +87,22 @@ THEME_OVERRIDES = """/* override table width restrictions */
"""
# Command to build the html format of the report
-HTML_BUILDER = 'sphinx-build -v -c . -a ' \
- '-b html -E ' \
- '-t html ' \
- '-D release={release} ' \
- '-D version="Test Report {date}" ' \
- '{working_dir} ' \
- '{build_dir}/'
+HTML_BUILDER = u'sphinx-build -v -c . -a ' \
+ u'-b html -E ' \
+ u'-t html ' \
+ u'-D release={release} ' \
+ u'-D version="Test Report {date}" ' \
+ u'{working_dir} ' \
+ u'{build_dir}/'
# Command to build the pdf format of the report
-PDF_BUILDER = 'sphinx-build -v -c . -a ' \
- '-b latex -E ' \
- '-t latex ' \
- '-D release={release} ' \
- '-D version="Test Report {date}" ' \
- '{working_dir} ' \
- '{build_dir}'
+PDF_BUILDER = u'sphinx-build -v -c . -a ' \
+ u'-b latex -E ' \
+ u'-t latex ' \
+ u'-D release={release} ' \
+ u'-D version="Test Report {date}" ' \
+ u'{working_dir} ' \
+ u'{build_dir}'
def generate_report(release, spec, report_week):
@@ -116,126 +116,115 @@ def generate_report(release, spec, report_week):
:type report_week: str
"""
- logging.info("Generating the report ...")
+ logging.info(u"Generating the report ...")
report = {
- "html": generate_html_report,
- "pdf": generate_pdf_report
+ u"html": generate_html_report,
+ u"pdf": generate_pdf_report
}
- for report_format, versions in spec.output["format"].items():
- report[report_format](release, spec, versions, report_week)
+ for report_format in spec.output[u"format"]:
+ report[report_format](release, spec, report_week)
archive_input_data(spec)
- logging.info("Done.")
+ logging.info(u"Done.")
-def generate_html_report(release, spec, versions, report_version):
+def generate_html_report(release, spec, report_version):
"""Generate html format of the report.
:param release: Release string of the product.
:param spec: Specification read from the specification file.
- :param versions: List of versions to generate.
:param report_version: Version of the report.
:type release: str
:type spec: Specification
- :type versions: list
:type report_version: str
"""
- logging.info(" Generating the html report, give me a few minutes, please "
- "...")
+ _ = report_version
- working_dir = spec.environment["paths"]["DIR[WORKING,SRC]"]
+ logging.info(u" Generating the html report, give me a few minutes, please "
+ u"...")
- cmd = 'cd {working_dir} && mv -f index.html.template index.rst'.\
- format(working_dir=working_dir)
- execute_command(cmd)
+ working_dir = spec.environment[u"paths"][u"DIR[WORKING,SRC]"]
+
+ execute_command(f"cd {working_dir} && mv -f index.html.template index.rst")
cmd = HTML_BUILDER.format(
release=release,
- date=datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M UTC'),
+ date=datetime.datetime.utcnow().strftime(u'%Y-%m-%d %H:%M UTC'),
working_dir=working_dir,
- build_dir=spec.environment["paths"]["DIR[BUILD,HTML]"])
+ build_dir=spec.environment[u"paths"][u"DIR[BUILD,HTML]"])
execute_command(cmd)
- with open(spec.environment["paths"]["DIR[CSS_PATCH_FILE]"], "w") as \
+ with open(spec.environment[u"paths"][u"DIR[CSS_PATCH_FILE]"], u"w") as \
css_file:
css_file.write(THEME_OVERRIDES)
- with open(spec.environment["paths"]["DIR[CSS_PATCH_FILE2]"], "w") as \
+ with open(spec.environment[u"paths"][u"DIR[CSS_PATCH_FILE2]"], u"w") as \
css_file:
css_file.write(THEME_OVERRIDES)
- logging.info(" Done.")
+ logging.info(u" Done.")
-def generate_pdf_report(release, spec, versions, report_week):
+def generate_pdf_report(release, spec, report_week):
"""Generate html format of the report.
:param release: Release string of the product.
:param spec: Specification read from the specification file.
- :param versions: List of versions to generate. Not implemented yet.
:param report_week: Calendar week when the report is published.
:type release: str
:type spec: Specification
- :type versions: list
:type report_week: str
"""
- logging.info(" Generating the pdf report, give me a few minutes, please "
- "...")
+ logging.info(u" Generating the pdf report, give me a few minutes, please "
+ u"...")
- working_dir = spec.environment["paths"]["DIR[WORKING,SRC]"]
+ working_dir = spec.environment[u"paths"][u"DIR[WORKING,SRC]"]
- cmd = 'cd {working_dir} && mv -f index.pdf.template index.rst'.\
- format(working_dir=working_dir)
- execute_command(cmd)
+ execute_command(f"cd {working_dir} && mv -f index.pdf.template index.rst")
- _convert_all_svg_to_pdf(spec.environment["paths"]["DIR[WORKING,SRC]"])
+ _convert_all_svg_to_pdf(spec.environment[u"paths"][u"DIR[WORKING,SRC]"])
# Convert PyPLOT graphs in HTML format to PDF.
- convert_plots = "xvfb-run -a wkhtmltopdf {html} {pdf}"
- plots = get_files(spec.environment["paths"]["DIR[STATIC,VPP]"], "html")
- plots.extend(get_files(spec.environment["paths"]["DIR[STATIC,DPDK]"],
- "html"))
+ convert_plots = u"xvfb-run -a wkhtmltopdf {html} {pdf}"
+ plots = get_files(spec.environment[u"paths"][u"DIR[STATIC,VPP]"], u"html")
+ plots.extend(
+ get_files(spec.environment[u"paths"][u"DIR[STATIC,DPDK]"], u"html")
+ )
for plot in plots:
- file_name = "{0}.pdf".format(plot.rsplit(".", 1)[0])
- logging.info("Converting '{0}' to '{1}'".format(plot, file_name))
+ file_name = f"{plot.rsplit(u'.', 1)[0]}.pdf"
+ logging.info(f"Converting {plot} to {file_name}")
execute_command(convert_plots.format(html=plot, pdf=file_name))
# Generate the LaTeX documentation
- build_dir = spec.environment["paths"]["DIR[BUILD,LATEX]"]
+ build_dir = spec.environment[u"paths"][u"DIR[BUILD,LATEX]"]
cmd = PDF_BUILDER.format(
release=release,
- date=datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M UTC'),
+ date=datetime.datetime.utcnow().strftime(u'%Y-%m-%d %H:%M UTC'),
working_dir=working_dir,
build_dir=build_dir)
execute_command(cmd)
# Build pdf documentation
- archive_dir = spec.environment["paths"]["DIR[STATIC,ARCH]"]
+ archive_dir = spec.environment[u"paths"][u"DIR[STATIC,ARCH]"]
cmds = [
- 'cd {build_dir} && '
- 'pdflatex -shell-escape -interaction nonstopmode csit.tex || true'.
- format(build_dir=build_dir),
- 'cd {build_dir} && '
- 'pdflatex -interaction nonstopmode csit.tex || true'.
- format(build_dir=build_dir),
- 'cd {build_dir} && '
- 'cp csit.pdf ../{archive_dir}/csit_{release}.{week}.pdf &&'
- 'cp csit.pdf ../{archive_dir}/csit_{release}.pdf'.
- format(build_dir=build_dir,
- archive_dir=archive_dir,
- release=release,
- week=report_week)
+ f'cd {build_dir} && '
+ f'pdflatex -shell-escape -interaction nonstopmode csit.tex || true',
+ f'cd {build_dir} && '
+ f'pdflatex -interaction nonstopmode csit.tex || true',
+ f'cd {build_dir} && '
+ f'cp csit.pdf ../{archive_dir}/csit_{release}.{report_week}.pdf &&'
+ f'cp csit.pdf ../{archive_dir}/csit_{release}.pdf'
]
for cmd in cmds:
execute_command(cmd)
- logging.info(" Done.")
+ logging.info(u" Done.")
def archive_report(spec):
@@ -245,13 +234,15 @@ def archive_report(spec):
:type spec: Specification
"""
- logging.info(" Archiving the report ...")
+ logging.info(u" Archiving the report ...")
- make_archive("csit.report",
- "gztar",
- base_dir=spec.environment["paths"]["DIR[BUILD,HTML]"])
+ make_archive(
+ u"csit.report",
+ u"gztar",
+ base_dir=spec.environment[u"paths"][u"DIR[BUILD,HTML]"]
+ )
- logging.info(" Done.")
+ logging.info(u" Done.")
def _convert_all_svg_to_pdf(path):
@@ -261,10 +252,10 @@ def _convert_all_svg_to_pdf(path):
:type path: str
"""
- cmd = "inkscape -D -z --file={svg} --export-pdf={pdf}"
-
- svg_files = get_files(path, "svg", full_path=True)
+ svg_files = get_files(path, u"svg", full_path=True)
for svg_file in svg_files:
- pdf_file = "{0}.pdf".format(svg_file.rsplit('.', 1)[0])
- logging.info("Converting '{0}' to '{1}'".format(svg_file, pdf_file))
- execute_command(cmd.format(svg=svg_file, pdf=pdf_file))
+ pdf_file = f"{svg_file.rsplit(u'.', 1)[0]}.pdf"
+ logging.info(f"Converting {svg_file} to {pdf_file}")
+ execute_command(
+ f"inkscape -D -z --file={svg_file} --export-pdf={pdf_file}"
+ )
diff --git a/resources/tools/presentation/generator_tables.py b/resources/tools/presentation/generator_tables.py
index 4a1ac0ef71..b931344b4b 100644
--- a/resources/tools/presentation/generator_tables.py
+++ b/resources/tools/presentation/generator_tables.py
@@ -19,18 +19,18 @@ import logging
import csv
import re
+from collections import OrderedDict
+from xml.etree import ElementTree as ET
+from datetime import datetime as dt
+from datetime import timedelta
+
import plotly.graph_objects as go
import plotly.offline as ploff
import pandas as pd
-from string import replace
-from collections import OrderedDict
from numpy import nan, isnan
-from xml.etree import ElementTree as ET
-from datetime import datetime as dt
-from datetime import timedelta
-from utils import mean, stdev, relative_change, classify_anomalies, \
+from pal_utils import mean, stdev, relative_change, classify_anomalies, \
convert_csv_to_pretty_txt, relative_change_stdev
@@ -46,14 +46,30 @@ def generate_tables(spec, data):
:type data: InputData
"""
- logging.info("Generating the tables ...")
+ generator = {
+ u"table_details": table_details,
+ u"table_merged_details": table_merged_details,
+ u"table_perf_comparison": table_perf_comparison,
+ u"table_perf_comparison_nic": table_perf_comparison_nic,
+ u"table_nics_comparison": table_nics_comparison,
+ u"table_soak_vs_ndr": table_soak_vs_ndr,
+ u"table_perf_trending_dash": table_perf_trending_dash,
+ u"table_perf_trending_dash_html": table_perf_trending_dash_html,
+ u"table_last_failed_tests": table_last_failed_tests,
+ u"table_failed_tests": table_failed_tests,
+ u"table_failed_tests_html": table_failed_tests_html
+ }
+
+ logging.info(u"Generating the tables ...")
for table in spec.tables:
try:
- eval(table["algorithm"])(table, data)
+ generator[table[u"algorithm"]](table, data)
except NameError as err:
- logging.error("Probably algorithm '{alg}' is not defined: {err}".
- format(alg=table["algorithm"], err=repr(err)))
- logging.info("Done.")
+ logging.error(
+ f"Probably algorithm {table[u'algorithm']} is not defined: "
+ f"{repr(err)}"
+ )
+ logging.info(u"Done.")
def table_details(table, input_data):
@@ -66,62 +82,68 @@ def table_details(table, input_data):
:type input_data: InputData
"""
- logging.info(" Generating the table {0} ...".
- format(table.get("title", "")))
+ logging.info(f" Generating the table {table.get(u'title', u'')} ...")
# Transform the data
- logging.info(" Creating the data set for the {0} '{1}'.".
- format(table.get("type", ""), table.get("title", "")))
+ logging.info(
+ f" Creating the data set for the {table.get(u'type', u'')} "
+ f"{table.get(u'title', u'')}."
+ )
data = input_data.filter_data(table)
# Prepare the header of the tables
header = list()
- for column in table["columns"]:
- header.append('"{0}"'.format(str(column["title"]).replace('"', '""')))
+ for column in table[u"columns"]:
+ header.append(
+ u'"{0}"'.format(str(column[u"title"]).replace(u'"', u'""'))
+ )
# Generate the data for the table according to the model in the table
# specification
- job = table["data"].keys()[0]
- build = str(table["data"][job][0])
+ job = list(table[u"data"].keys())[0]
+ build = str(table[u"data"][job][0])
try:
suites = input_data.suites(job, build)
except KeyError:
- logging.error(" No data available. The table will not be generated.")
+ logging.error(
+ u" No data available. The table will not be generated."
+ )
return
- for suite_longname, suite in suites.iteritems():
+ for suite in suites.values:
# Generate data
- suite_name = suite["name"]
+ suite_name = suite[u"name"]
table_lst = list()
for test in data[job][build].keys():
- if data[job][build][test]["parent"] in suite_name:
- row_lst = list()
- for column in table["columns"]:
- try:
- col_data = str(data[job][build][test][column["data"].
- split(" ")[1]]).replace('"', '""')
- if column["data"].split(" ")[1] in ("conf-history",
- "show-run"):
- col_data = replace(col_data, " |br| ", "",
- maxreplace=1)
- col_data = " |prein| {0} |preout| ".\
- format(col_data[:-5])
- row_lst.append('"{0}"'.format(col_data))
- except KeyError:
- row_lst.append("No data")
- table_lst.append(row_lst)
+ if data[job][build][test][u"parent"] not in suite_name:
+ continue
+ row_lst = list()
+ for column in table[u"columns"]:
+ try:
+ col_data = str(data[job][build][test][column[
+ u"data"].split(" ")[1]]).replace(u'"', u'""')
+ if column[u"data"].split(u" ")[1] in \
+ (u"conf-history", u"show-run"):
+ col_data = col_data.replace(u" |br| ", u"", )
+ col_data = f" |prein| {col_data[:-5]} |preout| "
+ row_lst.append(f'"{col_data}"')
+ except KeyError:
+ row_lst.append(u"No data")
+ table_lst.append(row_lst)
# Write the data to file
if table_lst:
- file_name = "{0}_{1}{2}".format(table["output-file"], suite_name,
- table["output-file-ext"])
- logging.info(" Writing file: '{}'".format(file_name))
- with open(file_name, "w") as file_handler:
- file_handler.write(",".join(header) + "\n")
+ file_name = (
+ f"{table[u'output-file']}_{suite_name}"
+ f"{table[u'output-file-ext']}"
+ )
+ logging.info(f" Writing file: {file_name}")
+ with open(file_name, u"w") as file_handler:
+ file_handler.write(u",".join(header) + u"\n")
for item in table_lst:
- file_handler.write(",".join(item) + "\n")
+ file_handler.write(u",".join(item) + u"\n")
- logging.info(" Done.")
+ logging.info(u" Done.")
def table_merged_details(table, input_data):
@@ -134,107 +156,155 @@ def table_merged_details(table, input_data):
:type input_data: InputData
"""
- logging.info(" Generating the table {0} ...".
- format(table.get("title", "")))
-
+ logging.info(f" Generating the table {table.get(u'title', u'')} ...")
# Transform the data
- logging.info(" Creating the data set for the {0} '{1}'.".
- format(table.get("type", ""), table.get("title", "")))
+ logging.info(
+ f" Creating the data set for the {table.get(u'type', u'')} "
+ f"{table.get(u'title', u'')}."
+ )
data = input_data.filter_data(table, continue_on_error=True)
data = input_data.merge_data(data)
data.sort_index(inplace=True)
- logging.info(" Creating the data set for the {0} '{1}'.".
- format(table.get("type", ""), table.get("title", "")))
+ logging.info(
+ f" Creating the data set for the {table.get(u'type', u'')} "
+ f"{table.get(u'title', u'')}."
+ )
suites = input_data.filter_data(
- table, continue_on_error=True, data_set="suites")
+ table, continue_on_error=True, data_set=u"suites")
suites = input_data.merge_data(suites)
# Prepare the header of the tables
header = list()
- for column in table["columns"]:
- header.append('"{0}"'.format(str(column["title"]).replace('"', '""')))
+ for column in table[u"columns"]:
+ header.append(
+ u'"{0}"'.format(str(column[u"title"]).replace(u'"', u'""'))
+ )
- for _, suite in suites.iteritems():
+ for suite in suites.values:
# Generate data
- suite_name = suite["name"]
+ suite_name = suite[u"name"]
table_lst = list()
for test in data.keys():
- if data[test]["parent"] in suite_name:
- row_lst = list()
- for column in table["columns"]:
- try:
- col_data = str(data[test][column["data"].
- split(" ")[1]]).replace('"', '""')
- col_data = replace(col_data, "No Data",
- "Not Captured ")
- if column["data"].split(" ")[1] in ("conf-history",
- "show-run"):
- col_data = replace(col_data, " |br| ", "",
- maxreplace=1)
- col_data = " |prein| {0} |preout| ".\
- format(col_data[:-5])
- row_lst.append('"{0}"'.format(col_data))
- except KeyError:
- row_lst.append('"Not captured"')
- table_lst.append(row_lst)
+ if data[test][u"parent"] not in suite_name:
+ continue
+ row_lst = list()
+ for column in table[u"columns"]:
+ try:
+ col_data = str(data[test][column[
+ u"data"].split(u" ")[1]]).replace(u'"', u'""')
+ col_data = col_data.replace(
+ u"No Data", u"Not Captured "
+ )
+ if column[u"data"].split(u" ")[1] in \
+ (u"conf-history", u"show-run"):
+ col_data = col_data.replace(u" |br| ", u"", 1)
+ col_data = f" |prein| {col_data[:-5]} |preout| "
+ row_lst.append(f'"{col_data}"')
+ except KeyError:
+ row_lst.append(u'"Not captured"')
+ table_lst.append(row_lst)
# Write the data to file
if table_lst:
- file_name = "{0}_{1}{2}".format(table["output-file"], suite_name,
- table["output-file-ext"])
- logging.info(" Writing file: '{}'".format(file_name))
- with open(file_name, "w") as file_handler:
- file_handler.write(",".join(header) + "\n")
+ file_name = (
+ f"{table[u'output-file']}_{suite_name}"
+ f"{table[u'output-file-ext']}"
+ )
+ logging.info(f" Writing file: {file_name}")
+ with open(file_name, u"w") as file_handler:
+ file_handler.write(u",".join(header) + "u\n")
for item in table_lst:
- file_handler.write(",".join(item) + "\n")
+ file_handler.write(u",".join(item) + u"\n")
- logging.info(" Done.")
+ logging.info(u" Done.")
def _tpc_modify_test_name(test_name):
- test_name_mod = test_name.replace("-ndrpdrdisc", ""). \
- replace("-ndrpdr", "").replace("-pdrdisc", ""). \
- replace("-ndrdisc", "").replace("-pdr", ""). \
- replace("-ndr", ""). \
- replace("1t1c", "1c").replace("2t1c", "1c"). \
- replace("2t2c", "2c").replace("4t2c", "2c"). \
- replace("4t4c", "4c").replace("8t4c", "4c")
- test_name_mod = re.sub(REGEX_NIC, "", test_name_mod)
- return test_name_mod
+ """Modify a test name by replacing its parts.
+
+ :param test_name: Test name to be modified.
+ :type test_name: str
+ :returns: Modified test name.
+ :rtype: str
+ """
+ test_name_mod = test_name.\
+ replace(u"-ndrpdrdisc", u""). \
+ replace(u"-ndrpdr", u"").\
+ replace(u"-pdrdisc", u""). \
+ replace(u"-ndrdisc", u"").\
+ replace(u"-pdr", u""). \
+ replace(u"-ndr", u""). \
+ replace(u"1t1c", u"1c").\
+ replace(u"2t1c", u"1c"). \
+ replace(u"2t2c", u"2c").\
+ replace(u"4t2c", u"2c"). \
+ replace(u"4t4c", u"4c").\
+ replace(u"8t4c", u"4c")
+
+ return re.sub(REGEX_NIC, u"", test_name_mod)
def _tpc_modify_displayed_test_name(test_name):
- return test_name.replace("1t1c", "1c").replace("2t1c", "1c"). \
- replace("2t2c", "2c").replace("4t2c", "2c"). \
- replace("4t4c", "4c").replace("8t4c", "4c")
+ """Modify a test name which is displayed in a table by replacing its parts.
+
+ :param test_name: Test name to be modified.
+ :type test_name: str
+ :returns: Modified test name.
+ :rtype: str
+ """
+ return test_name.\
+ replace(u"1t1c", u"1c").\
+ replace(u"2t1c", u"1c"). \
+ replace(u"2t2c", u"2c").\
+ replace(u"4t2c", u"2c"). \
+ replace(u"4t4c", u"4c").\
+ replace(u"8t4c", u"4c")
def _tpc_insert_data(target, src, include_tests):
+ """Insert src data to the target structure.
+
+ :param target: Target structure where the data is placed.
+ :param src: Source data to be placed into the target stucture.
+ :param include_tests: Which results will be included (MRR, NDR, PDR).
+ :type target: list
+ :type src: dict
+ :type include_tests: str
+ """
try:
- if include_tests == "MRR":
- target.append(src["result"]["receive-rate"]) # .avg)
- elif include_tests == "PDR":
- target.append(src["throughput"]["PDR"]["LOWER"])
- elif include_tests == "NDR":
- target.append(src["throughput"]["NDR"]["LOWER"])
+ if include_tests == u"MRR":
+ target.append(src[u"result"][u"receive-rate"])
+ elif include_tests == u"PDR":
+ target.append(src[u"throughput"][u"PDR"][u"LOWER"])
+ elif include_tests == u"NDR":
+ target.append(src[u"throughput"][u"NDR"][u"LOWER"])
except (KeyError, TypeError):
pass
def _tpc_sort_table(table):
- # Sort the table:
- # 1. New in CSIT-XXXX
- # 2. See footnote
- # 3. Delta
+ """Sort the table this way:
+
+ 1. Put "New in CSIT-XXXX" at the first place.
+ 2. Put "See footnote" at the second place.
+ 3. Sort the rest by "Delta".
+
+ :param table: Table to sort.
+ :type table: list
+ :returns: Sorted table.
+ :rtype: list
+ """
+
+
tbl_new = list()
tbl_see = list()
tbl_delta = list()
for item in table:
if isinstance(item[-1], str):
- if "New in CSIT" in item[-1]:
+ if u"New in CSIT" in item[-1]:
tbl_new.append(item)
- elif "See footnote" in item[-1]:
+ elif u"See footnote" in item[-1]:
tbl_see.append(item)
else:
tbl_delta.append(item)
@@ -269,22 +339,22 @@ def _tpc_generate_html_table(header, data, output_file_name):
:type output_file_name: str
"""
- df = pd.DataFrame(data, columns=header)
+ df_data = pd.DataFrame(data, columns=header)
- df_sorted = [df.sort_values(
+ df_sorted = [df_data.sort_values(
by=[key, header[0]], ascending=[True, True]
if key != header[0] else [False, True]) for key in header]
- df_sorted_rev = [df.sort_values(
+ df_sorted_rev = [df_data.sort_values(
by=[key, header[0]], ascending=[False, True]
if key != header[0] else [True, True]) for key in header]
df_sorted.extend(df_sorted_rev)
- fill_color = [["#d4e4f7" if idx % 2 else "#e9f1fb"
- for idx in range(len(df))]]
+ fill_color = [[u"#d4e4f7" if idx % 2 else u"#e9f1fb"
+ for idx in range(len(df_data))]]
table_header = dict(
- values=["<b>{item}</b>".format(item=item) for item in header],
- fill_color="#7eade7",
- align=["left", "center"]
+ values=[f"<b>{item}</b>" for item in header],
+ fill_color=u"#7eade7",
+ align=[u"left", u"center"]
)
fig = go.Figure()
@@ -298,47 +368,47 @@ def _tpc_generate_html_table(header, data, output_file_name):
cells=dict(
values=columns,
fill_color=fill_color,
- align=["left", "right"]
+ align=[u"left", u"right"]
)
)
)
buttons = list()
- menu_items = ["<b>{0}</b> (ascending)".format(itm) for itm in header]
- menu_items_rev = ["<b>{0}</b> (descending)".format(itm) for itm in header]
+ menu_items = [f"<b>{itm}</b> (ascending)" for itm in header]
+ menu_items_rev = [f"<b>{itm}</b> (descending)" for itm in header]
menu_items.extend(menu_items_rev)
for idx, hdr in enumerate(menu_items):
visible = [False, ] * len(menu_items)
visible[idx] = True
buttons.append(
dict(
- label=hdr.replace(" [Mpps]", ""),
- method="update",
- args=[{"visible": visible}],
+ label=hdr.replace(u" [Mpps]", u""),
+ method=u"update",
+ args=[{u"visible": visible}],
)
)
fig.update_layout(
updatemenus=[
go.layout.Updatemenu(
- type="dropdown",
- direction="down",
+ type=u"dropdown",
+ direction=u"down",
x=0.03,
- xanchor="left",
+ xanchor=u"left",
y=1.045,
- yanchor="top",
+ yanchor=u"top",
active=len(menu_items) - 1,
buttons=list(buttons)
)
],
annotations=[
go.layout.Annotation(
- text="<b>Sort by:</b>",
+ text=u"<b>Sort by:</b>",
x=0,
- xref="paper",
+ xref=u"paper",
y=1.035,
- yref="paper",
- align="left",
+ yref=u"paper",
+ align=u"left",
showarrow=False
)
]
@@ -347,8 +417,8 @@ def _tpc_generate_html_table(header, data, output_file_name):
ploff.plot(fig, show_link=False, auto_open=False, filename=output_file_name)
-def table_performance_comparison(table, input_data):
- """Generate the table(s) with algorithm: table_performance_comparison
+def table_perf_comparison(table, input_data):
+ """Generate the table(s) with algorithm: table_perf_comparison
specified in the specification file.
:param table: Table to generate.
@@ -357,231 +427,229 @@ def table_performance_comparison(table, input_data):
:type input_data: InputData
"""
- logging.info(" Generating the table {0} ...".
- format(table.get("title", "")))
+ logging.info(f" Generating the table {table.get(u'title', u'')} ...")
# Transform the data
- logging.info(" Creating the data set for the {0} '{1}'.".
- format(table.get("type", ""), table.get("title", "")))
+ logging.info(
+ f" Creating the data set for the {table.get(u'type', u'')} "
+ f"{table.get(u'title', u'')}."
+ )
data = input_data.filter_data(table, continue_on_error=True)
# Prepare the header of the tables
try:
- header = ["Test case", ]
+ header = [u"Test case", ]
- if table["include-tests"] == "MRR":
- hdr_param = "Rec Rate"
+ if table[u"include-tests"] == u"MRR":
+ hdr_param = u"Rec Rate"
else:
- hdr_param = "Thput"
+ hdr_param = u"Thput"
- history = table.get("history", None)
- if history:
- for item in history:
- header.extend(
- ["{0} {1} [Mpps]".format(item["title"], hdr_param),
- "{0} Stdev [Mpps]".format(item["title"])])
+ history = table.get(u"history", list())
+ for item in history:
+ header.extend(
+ [
+ f"{item[u'title']} {hdr_param} [Mpps]",
+ f"{item[u'title']} Stdev [Mpps]"
+ ]
+ )
header.extend(
- ["{0} {1} [Mpps]".format(table["reference"]["title"], hdr_param),
- "{0} Stdev [Mpps]".format(table["reference"]["title"]),
- "{0} {1} [Mpps]".format(table["compare"]["title"], hdr_param),
- "{0} Stdev [Mpps]".format(table["compare"]["title"]),
- "Delta [%]"])
- header_str = ",".join(header) + "\n"
+ [
+ f"{table[u'reference'][u'title']} {hdr_param} [Mpps]",
+ f"{table[u'reference'][u'title']} Stdev [Mpps]",
+ f"{table[u'compare'][u'title']} {hdr_param} [Mpps]",
+ f"{table[u'compare'][u'title']} Stdev [Mpps]",
+ u"Delta [%]"
+ ]
+ )
+ header_str = u",".join(header) + u"\n"
except (AttributeError, KeyError) as err:
- logging.error("The model is invalid, missing parameter: {0}".
- format(err))
+ logging.error(f"The model is invalid, missing parameter: {repr(err)}")
return
# Prepare data to the table:
tbl_dict = dict()
topo = ""
- for job, builds in table["reference"]["data"].items():
- topo = "2n-skx" if "2n-skx" in job else ""
+ for job, builds in table[u"reference"][u"data"].items():
+ topo = u"2n-skx" if u"2n-skx" in job else u""
for build in builds:
- for tst_name, tst_data in data[job][str(build)].iteritems():
+ for tst_name, tst_data in data[job][str(build)].items():
tst_name_mod = _tpc_modify_test_name(tst_name)
- if "across topologies" in table["title"].lower():
- tst_name_mod = tst_name_mod.replace("2n1l-", "")
+ if u"across topologies" in table[u"title"].lower():
+ tst_name_mod = tst_name_mod.replace(u"2n1l-", u"")
if tbl_dict.get(tst_name_mod, None) is None:
- groups = re.search(REGEX_NIC, tst_data["parent"])
- nic = groups.group(0) if groups else ""
- name = "{0}-{1}".format(nic, "-".join(tst_data["name"].
- split("-")[:-1]))
- if "across testbeds" in table["title"].lower() or \
- "across topologies" in table["title"].lower():
+ groups = re.search(REGEX_NIC, tst_data[u"parent"])
+ nic = groups.group(0) if groups else u""
+ name = \
+ f"{nic}-{u'-'.join(tst_data[u'name'].split(u'-')[:-1])}"
+ if u"across testbeds" in table[u"title"].lower() or \
+ u"across topologies" in table[u"title"].lower():
name = _tpc_modify_displayed_test_name(name)
- tbl_dict[tst_name_mod] = {"name": name,
- "ref-data": list(),
- "cmp-data": list()}
- _tpc_insert_data(target=tbl_dict[tst_name_mod]["ref-data"],
+ tbl_dict[tst_name_mod] = {
+ u"name": name,
+ u"ref-data": list(),
+ u"cmp-data": list()
+ }
+ _tpc_insert_data(target=tbl_dict[tst_name_mod][u"ref-data"],
src=tst_data,
- include_tests=table["include-tests"])
+ include_tests=table[u"include-tests"])
- for job, builds in table["compare"]["data"].items():
+ for job, builds in table[u"compare"][u"data"].items():
for build in builds:
- for tst_name, tst_data in data[job][str(build)].iteritems():
+ for tst_name, tst_data in data[job][str(build)].items():
tst_name_mod = _tpc_modify_test_name(tst_name)
- if "across topologies" in table["title"].lower():
- tst_name_mod = tst_name_mod.replace("2n1l-", "")
+ if u"across topologies" in table[u"title"].lower():
+ tst_name_mod = tst_name_mod.replace(u"2n1l-", u"")
if tbl_dict.get(tst_name_mod, None) is None:
- groups = re.search(REGEX_NIC, tst_data["parent"])
- nic = groups.group(0) if groups else ""
- name = "{0}-{1}".format(nic, "-".join(tst_data["name"].
- split("-")[:-1]))
- if "across testbeds" in table["title"].lower() or \
- "across topologies" in table["title"].lower():
+ groups = re.search(REGEX_NIC, tst_data[u"parent"])
+ nic = groups.group(0) if groups else u""
+ name = \
+ f"{nic}-{u'-'.join(tst_data[u'name'].split(u'-')[:-1])}"
+ if u"across testbeds" in table[u"title"].lower() or \
+ u"across topologies" in table[u"title"].lower():
name = _tpc_modify_displayed_test_name(name)
- tbl_dict[tst_name_mod] = {"name": name,
- "ref-data": list(),
- "cmp-data": list()}
- _tpc_insert_data(target=tbl_dict[tst_name_mod]["cmp-data"],
- src=tst_data,
- include_tests=table["include-tests"])
+ tbl_dict[tst_name_mod] = {
+ u"name": name,
+ u"ref-data": list(),
+ u"cmp-data": list()
+ }
+ _tpc_insert_data(
+ target=tbl_dict[tst_name_mod][u"cmp-data"],
+ src=tst_data,
+ include_tests=table[u"include-tests"]
+ )
- replacement = table["compare"].get("data-replacement", None)
+ replacement = table[u"compare"].get(u"data-replacement", None)
if replacement:
create_new_list = True
rpl_data = input_data.filter_data(
table, data=replacement, continue_on_error=True)
for job, builds in replacement.items():
for build in builds:
- for tst_name, tst_data in rpl_data[job][str(build)].iteritems():
+ for tst_name, tst_data in rpl_data[job][str(build)].items():
tst_name_mod = _tpc_modify_test_name(tst_name)
- if "across topologies" in table["title"].lower():
- tst_name_mod = tst_name_mod.replace("2n1l-", "")
+ if u"across topologies" in table[u"title"].lower():
+ tst_name_mod = tst_name_mod.replace(u"2n1l-", u"")
if tbl_dict.get(tst_name_mod, None) is None:
- name = "{0}".format("-".join(tst_data["name"].
- split("-")[:-1]))
- if "across testbeds" in table["title"].lower() or \
- "across topologies" in table["title"].lower():
+ name = \
+ f"{u'-'.join(tst_data[u'name'].split(u'-')[:-1])}"
+ if u"across testbeds" in table[u"title"].lower() or \
+ u"across topologies" in table[u"title"].lower():
name = _tpc_modify_displayed_test_name(name)
- tbl_dict[tst_name_mod] = {"name": name,
- "ref-data": list(),
- "cmp-data": list()}
+ tbl_dict[tst_name_mod] = {
+ u"name": name,
+ u"ref-data": list(),
+ u"cmp-data": list()
+ }
if create_new_list:
create_new_list = False
- tbl_dict[tst_name_mod]["cmp-data"] = list()
+ tbl_dict[tst_name_mod][u"cmp-data"] = list()
- _tpc_insert_data(target=tbl_dict[tst_name_mod]["cmp-data"],
- src=tst_data,
- include_tests=table["include-tests"])
+ _tpc_insert_data(
+ target=tbl_dict[tst_name_mod][u"cmp-data"],
+ src=tst_data,
+ include_tests=table[u"include-tests"]
+ )
- if history:
- for item in history:
- for job, builds in item["data"].items():
- for build in builds:
- for tst_name, tst_data in data[job][str(build)].iteritems():
- tst_name_mod = _tpc_modify_test_name(tst_name)
- if "across topologies" in table["title"].lower():
- tst_name_mod = tst_name_mod.replace("2n1l-", "")
- if tbl_dict.get(tst_name_mod, None) is None:
+ for item in history:
+ for job, builds in item[u"data"].items():
+ for build in builds:
+ for tst_name, tst_data in data[job][str(build)].items():
+ tst_name_mod = _tpc_modify_test_name(tst_name)
+ if u"across topologies" in table[u"title"].lower():
+ tst_name_mod = tst_name_mod.replace(u"2n1l-", u"")
+ if tbl_dict.get(tst_name_mod, None) is None:
+ continue
+ if tbl_dict[tst_name_mod].get(u"history", None) is None:
+ tbl_dict[tst_name_mod][u"history"] = OrderedDict()
+ if tbl_dict[tst_name_mod][u"history"].\
+ get(item[u"title"], None) is None:
+ tbl_dict[tst_name_mod][u"history"][item[
+ u"title"]] = list()
+ try:
+ if table[u"include-tests"] == u"MRR":
+ res = tst_data[u"result"][u"receive-rate"]
+ elif table[u"include-tests"] == u"PDR":
+ res = tst_data[u"throughput"][u"PDR"][u"LOWER"]
+ elif table[u"include-tests"] == u"NDR":
+ res = tst_data[u"throughput"][u"NDR"][u"LOWER"]
+ else:
continue
- if tbl_dict[tst_name_mod].get("history", None) is None:
- tbl_dict[tst_name_mod]["history"] = OrderedDict()
- if tbl_dict[tst_name_mod]["history"].\
- get(item["title"], None) is None:
- tbl_dict[tst_name_mod]["history"][item["title"]] = \
- list()
- try:
- # TODO: Re-work when NDRPDRDISC tests are not used
- if table["include-tests"] == "MRR":
- tbl_dict[tst_name_mod]["history"][item[
- "title"]].append(tst_data["result"][
- "receive-rate"].avg)
- elif table["include-tests"] == "PDR":
- if tst_data["type"] == "PDR":
- tbl_dict[tst_name_mod]["history"][
- item["title"]].\
- append(tst_data["throughput"]["value"])
- elif tst_data["type"] == "NDRPDR":
- tbl_dict[tst_name_mod]["history"][item[
- "title"]].append(tst_data["throughput"][
- "PDR"]["LOWER"])
- elif table["include-tests"] == "NDR":
- if tst_data["type"] == "NDR":
- tbl_dict[tst_name_mod]["history"][
- item["title"]].\
- append(tst_data["throughput"]["value"])
- elif tst_data["type"] == "NDRPDR":
- tbl_dict[tst_name_mod]["history"][item[
- "title"]].append(tst_data["throughput"][
- "NDR"]["LOWER"])
- else:
- continue
- except (TypeError, KeyError):
- pass
+ tbl_dict[tst_name_mod][u"history"][item[u"title"]].\
+ append(res)
+ except (TypeError, KeyError):
+ pass
tbl_lst = list()
footnote = False
- for tst_name in tbl_dict.keys():
- item = [tbl_dict[tst_name]["name"], ]
+ for tst_name in tbl_dict:
+ item = [tbl_dict[tst_name][u"name"], ]
if history:
- if tbl_dict[tst_name].get("history", None) is not None:
- for hist_data in tbl_dict[tst_name]["history"].values():
+ if tbl_dict[tst_name].get(u"history", None) is not None:
+ for hist_data in tbl_dict[tst_name][u"history"].values():
if hist_data:
item.append(round(mean(hist_data) / 1000000, 2))
item.append(round(stdev(hist_data) / 1000000, 2))
else:
- item.extend(["Not tested", "Not tested"])
+ item.extend([u"Not tested", u"Not tested"])
else:
- item.extend(["Not tested", "Not tested"])
- data_t = tbl_dict[tst_name]["ref-data"]
+ item.extend([u"Not tested", u"Not tested"])
+ data_t = tbl_dict[tst_name][u"ref-data"]
if data_t:
item.append(round(mean(data_t) / 1000000, 2))
item.append(round(stdev(data_t) / 1000000, 2))
else:
- item.extend(["Not tested", "Not tested"])
- data_t = tbl_dict[tst_name]["cmp-data"]
+ item.extend([u"Not tested", u"Not tested"])
+ data_t = tbl_dict[tst_name][u"cmp-data"]
if data_t:
item.append(round(mean(data_t) / 1000000, 2))
item.append(round(stdev(data_t) / 1000000, 2))
else:
- item.extend(["Not tested", "Not tested"])
- if item[-2] == "Not tested":
+ item.extend([u"Not tested", u"Not tested"])
+ if item[-2] == u"Not tested":
pass
- elif item[-4] == "Not tested":
- item.append("New in CSIT-1908")
- elif topo == "2n-skx" and "dot1q" in tbl_dict[tst_name]["name"]:
- item.append("See footnote [1]")
+ elif item[-4] == u"Not tested":
+ item.append(u"New in CSIT-1908")
+ elif topo == u"2n-skx" and u"dot1q" in tbl_dict[tst_name][u"name"]:
+ item.append(u"See footnote [1]")
footnote = True
elif item[-4] != 0:
item.append(int(relative_change(float(item[-4]), float(item[-2]))))
- if (len(item) == len(header)) and (item[-3] != "Not tested"):
+ if (len(item) == len(header)) and (item[-3] != u"Not tested"):
tbl_lst.append(item)
tbl_lst = _tpc_sort_table(tbl_lst)
# Generate csv tables:
- csv_file = "{0}.csv".format(table["output-file"])
- with open(csv_file, "w") as file_handler:
+ csv_file = f"{table[u'output-file']}.csv"
+ with open(csv_file, u"w") as file_handler:
file_handler.write(header_str)
for test in tbl_lst:
- file_handler.write(",".join([str(item) for item in test]) + "\n")
+ file_handler.write(u",".join([str(item) for item in test]) + u"\n")
- txt_file_name = "{0}.txt".format(table["output-file"])
+ txt_file_name = f"{table[u'output-file']}.txt"
convert_csv_to_pretty_txt(csv_file, txt_file_name)
if footnote:
- with open(txt_file_name, 'a') as txt_file:
+ with open(txt_file_name, u'a') as txt_file:
txt_file.writelines([
- "\nFootnotes:\n",
- "[1] CSIT-1908 changed test methodology of dot1q tests in "
- "2-node testbeds, dot1q encapsulation is now used on both "
- "links of SUT.\n",
- " Previously dot1q was used only on a single link with the "
- "other link carrying untagged Ethernet frames. This changes "
- "results\n",
- " in slightly lower throughput in CSIT-1908 for these "
- "tests. See release notes."
+ u"\nFootnotes:\n",
+ u"[1] CSIT-1908 changed test methodology of dot1q tests in "
+ u"2-node testbeds, dot1q encapsulation is now used on both "
+ u"links of SUT.\n",
+ u" Previously dot1q was used only on a single link with the "
+ u"other link carrying untagged Ethernet frames. This changes "
+ u"results\n",
+ u" in slightly lower throughput in CSIT-1908 for these "
+ u"tests. See release notes."
])
# Generate html table:
- _tpc_generate_html_table(header, tbl_lst,
- "{0}.html".format(table["output-file"]))
+ _tpc_generate_html_table(header, tbl_lst, f"{table[u'output-file']}.html")
-def table_performance_comparison_nic(table, input_data):
- """Generate the table(s) with algorithm: table_performance_comparison
+def table_perf_comparison_nic(table, input_data):
+ """Generate the table(s) with algorithm: table_perf_comparison
specified in the specification file.
:param table: Table to generate.
@@ -590,231 +658,229 @@ def table_performance_comparison_nic(table, input_data):
:type input_data: InputData
"""
- logging.info(" Generating the table {0} ...".
- format(table.get("title", "")))
+ logging.info(f" Generating the table {table.get(u'title', u'')} ...")
# Transform the data
- logging.info(" Creating the data set for the {0} '{1}'.".
- format(table.get("type", ""), table.get("title", "")))
+ logging.info(
+ f" Creating the data set for the {table.get(u'type', u'')} "
+ f"{table.get(u'title', u'')}."
+ )
data = input_data.filter_data(table, continue_on_error=True)
# Prepare the header of the tables
try:
- header = ["Test case", ]
+ header = [u"Test case", ]
- if table["include-tests"] == "MRR":
- hdr_param = "Rec Rate"
+ if table[u"include-tests"] == u"MRR":
+ hdr_param = u"Rec Rate"
else:
- hdr_param = "Thput"
+ hdr_param = u"Thput"
- history = table.get("history", None)
- if history:
- for item in history:
- header.extend(
- ["{0} {1} [Mpps]".format(item["title"], hdr_param),
- "{0} Stdev [Mpps]".format(item["title"])])
+ history = table.get(u"history", list())
+ for item in history:
+ header.extend(
+ [
+ f"{item[u'title']} {hdr_param} [Mpps]",
+ f"{item[u'title']} Stdev [Mpps]"
+ ]
+ )
header.extend(
- ["{0} {1} [Mpps]".format(table["reference"]["title"], hdr_param),
- "{0} Stdev [Mpps]".format(table["reference"]["title"]),
- "{0} {1} [Mpps]".format(table["compare"]["title"], hdr_param),
- "{0} Stdev [Mpps]".format(table["compare"]["title"]),
- "Delta [%]"])
- header_str = ",".join(header) + "\n"
+ [
+ f"{table[u'reference'][u'title']} {hdr_param} [Mpps]",
+ f"{table[u'reference'][u'title']} Stdev [Mpps]",
+ f"{table[u'compare'][u'title']} {hdr_param} [Mpps]",
+ f"{table[u'compare'][u'title']} Stdev [Mpps]",
+ u"Delta [%]"
+ ]
+ )
+ header_str = u",".join(header) + u"\n"
except (AttributeError, KeyError) as err:
- logging.error("The model is invalid, missing parameter: {0}".
- format(err))
+ logging.error(f"The model is invalid, missing parameter: {repr(err)}")
return
# Prepare data to the table:
tbl_dict = dict()
- topo = ""
- for job, builds in table["reference"]["data"].items():
- topo = "2n-skx" if "2n-skx" in job else ""
+ topo = u""
+ for job, builds in table[u"reference"][u"data"].items():
+ topo = u"2n-skx" if u"2n-skx" in job else u""
for build in builds:
- for tst_name, tst_data in data[job][str(build)].iteritems():
- if table["reference"]["nic"] not in tst_data["tags"]:
+ for tst_name, tst_data in data[job][str(build)].items():
+ if table[u"reference"][u"nic"] not in tst_data[u"tags"]:
continue
tst_name_mod = _tpc_modify_test_name(tst_name)
- if "across topologies" in table["title"].lower():
- tst_name_mod = tst_name_mod.replace("2n1l-", "")
+ if u"across topologies" in table[u"title"].lower():
+ tst_name_mod = tst_name_mod.replace(u"2n1l-", u"")
if tbl_dict.get(tst_name_mod, None) is None:
- name = "{0}".format("-".join(tst_data["name"].
- split("-")[:-1]))
- if "across testbeds" in table["title"].lower() or \
- "across topologies" in table["title"].lower():
+ name = f"{u'-'.join(tst_data[u'name'].split(u'-')[:-1])}"
+ if u"across testbeds" in table[u"title"].lower() or \
+ u"across topologies" in table[u"title"].lower():
name = _tpc_modify_displayed_test_name(name)
- tbl_dict[tst_name_mod] = {"name": name,
- "ref-data": list(),
- "cmp-data": list()}
- _tpc_insert_data(target=tbl_dict[tst_name_mod]["ref-data"],
- src=tst_data,
- include_tests=table["include-tests"])
+ tbl_dict[tst_name_mod] = {
+ u"name": name,
+ u"ref-data": list(),
+ u"cmp-data": list()
+ }
+ _tpc_insert_data(
+ target=tbl_dict[tst_name_mod][u"ref-data"],
+ src=tst_data,
+ include_tests=table[u"include-tests"]
+ )
- for job, builds in table["compare"]["data"].items():
+ for job, builds in table[u"compare"][u"data"].items():
for build in builds:
- for tst_name, tst_data in data[job][str(build)].iteritems():
- if table["compare"]["nic"] not in tst_data["tags"]:
+ for tst_name, tst_data in data[job][str(build)].items():
+ if table[u"compare"][u"nic"] not in tst_data[u"tags"]:
continue
tst_name_mod = _tpc_modify_test_name(tst_name)
- if "across topologies" in table["title"].lower():
- tst_name_mod = tst_name_mod.replace("2n1l-", "")
+ if u"across topologies" in table[u"title"].lower():
+ tst_name_mod = tst_name_mod.replace(u"2n1l-", u"")
if tbl_dict.get(tst_name_mod, None) is None:
- name = "{0}".format("-".join(tst_data["name"].
- split("-")[:-1]))
- if "across testbeds" in table["title"].lower() or \
- "across topologies" in table["title"].lower():
+ name = f"{u'-'.join(tst_data[u'name'].split(u'-')[:-1])}"
+ if u"across testbeds" in table[u"title"].lower() or \
+ u"across topologies" in table[u"title"].lower():
name = _tpc_modify_displayed_test_name(name)
- tbl_dict[tst_name_mod] = {"name": name,
- "ref-data": list(),
- "cmp-data": list()}
- _tpc_insert_data(target=tbl_dict[tst_name_mod]["cmp-data"],
- src=tst_data,
- include_tests=table["include-tests"])
+ tbl_dict[tst_name_mod] = {
+ u"name": name,
+ u"ref-data": list(),
+ u"cmp-data": list()
+ }
+ _tpc_insert_data(
+ target=tbl_dict[tst_name_mod][u"cmp-data"],
+ src=tst_data,
+ include_tests=table[u"include-tests"]
+ )
- replacement = table["compare"].get("data-replacement", None)
+ replacement = table[u"compare"].get(u"data-replacement", None)
if replacement:
create_new_list = True
rpl_data = input_data.filter_data(
table, data=replacement, continue_on_error=True)
for job, builds in replacement.items():
for build in builds:
- for tst_name, tst_data in rpl_data[job][str(build)].iteritems():
- if table["compare"]["nic"] not in tst_data["tags"]:
+ for tst_name, tst_data in rpl_data[job][str(build)].items():
+ if table[u"compare"][u"nic"] not in tst_data[u"tags"]:
continue
tst_name_mod = _tpc_modify_test_name(tst_name)
- if "across topologies" in table["title"].lower():
- tst_name_mod = tst_name_mod.replace("2n1l-", "")
+ if u"across topologies" in table[u"title"].lower():
+ tst_name_mod = tst_name_mod.replace(u"2n1l-", u"")
if tbl_dict.get(tst_name_mod, None) is None:
- name = "{0}".format("-".join(tst_data["name"].
- split("-")[:-1]))
- if "across testbeds" in table["title"].lower() or \
- "across topologies" in table["title"].lower():
+ name = \
+ f"{u'-'.join(tst_data[u'name'].split(u'-')[:-1])}"
+ if u"across testbeds" in table[u"title"].lower() or \
+ u"across topologies" in table[u"title"].lower():
name = _tpc_modify_displayed_test_name(name)
- tbl_dict[tst_name_mod] = {"name": name,
- "ref-data": list(),
- "cmp-data": list()}
+ tbl_dict[tst_name_mod] = {
+ u"name": name,
+ u"ref-data": list(),
+ u"cmp-data": list()
+ }
if create_new_list:
create_new_list = False
- tbl_dict[tst_name_mod]["cmp-data"] = list()
+ tbl_dict[tst_name_mod][u"cmp-data"] = list()
- _tpc_insert_data(target=tbl_dict[tst_name_mod]["cmp-data"],
- src=tst_data,
- include_tests=table["include-tests"])
+ _tpc_insert_data(
+ target=tbl_dict[tst_name_mod][u"cmp-data"],
+ src=tst_data,
+ include_tests=table[u"include-tests"]
+ )
- if history:
- for item in history:
- for job, builds in item["data"].items():
- for build in builds:
- for tst_name, tst_data in data[job][str(build)].iteritems():
- if item["nic"] not in tst_data["tags"]:
- continue
- tst_name_mod = _tpc_modify_test_name(tst_name)
- if "across topologies" in table["title"].lower():
- tst_name_mod = tst_name_mod.replace("2n1l-", "")
- if tbl_dict.get(tst_name_mod, None) is None:
+ for item in history:
+ for job, builds in item[u"data"].items():
+ for build in builds:
+ for tst_name, tst_data in data[job][str(build)].items():
+ if item[u"nic"] not in tst_data[u"tags"]:
+ continue
+ tst_name_mod = _tpc_modify_test_name(tst_name)
+ if u"across topologies" in table[u"title"].lower():
+ tst_name_mod = tst_name_mod.replace(u"2n1l-", u"")
+ if tbl_dict.get(tst_name_mod, None) is None:
+ continue
+ if tbl_dict[tst_name_mod].get(u"history", None) is None:
+ tbl_dict[tst_name_mod][u"history"] = OrderedDict()
+ if tbl_dict[tst_name_mod][u"history"].\
+ get(item[u"title"], None) is None:
+ tbl_dict[tst_name_mod][u"history"][item[
+ u"title"]] = list()
+ try:
+ if table[u"include-tests"] == u"MRR":
+ res = tst_data[u"result"][u"receive-rate"]
+ elif table[u"include-tests"] == u"PDR":
+ res = tst_data[u"throughput"][u"PDR"][u"LOWER"]
+ elif table[u"include-tests"] == u"NDR":
+ res = tst_data[u"throughput"][u"NDR"][u"LOWER"]
+ else:
continue
- if tbl_dict[tst_name_mod].get("history", None) is None:
- tbl_dict[tst_name_mod]["history"] = OrderedDict()
- if tbl_dict[tst_name_mod]["history"].\
- get(item["title"], None) is None:
- tbl_dict[tst_name_mod]["history"][item["title"]] = \
- list()
- try:
- # TODO: Re-work when NDRPDRDISC tests are not used
- if table["include-tests"] == "MRR":
- tbl_dict[tst_name_mod]["history"][item[
- "title"]].append(tst_data["result"][
- "receive-rate"].avg)
- elif table["include-tests"] == "PDR":
- if tst_data["type"] == "PDR":
- tbl_dict[tst_name_mod]["history"][
- item["title"]].\
- append(tst_data["throughput"]["value"])
- elif tst_data["type"] == "NDRPDR":
- tbl_dict[tst_name_mod]["history"][item[
- "title"]].append(tst_data["throughput"][
- "PDR"]["LOWER"])
- elif table["include-tests"] == "NDR":
- if tst_data["type"] == "NDR":
- tbl_dict[tst_name_mod]["history"][
- item["title"]].\
- append(tst_data["throughput"]["value"])
- elif tst_data["type"] == "NDRPDR":
- tbl_dict[tst_name_mod]["history"][item[
- "title"]].append(tst_data["throughput"][
- "NDR"]["LOWER"])
- else:
- continue
- except (TypeError, KeyError):
- pass
+ tbl_dict[tst_name_mod][u"history"][item[u"title"]].\
+ append(res)
+ except (TypeError, KeyError):
+ pass
tbl_lst = list()
footnote = False
- for tst_name in tbl_dict.keys():
- item = [tbl_dict[tst_name]["name"], ]
+ for tst_name in tbl_dict:
+ item = [tbl_dict[tst_name][u"name"], ]
if history:
- if tbl_dict[tst_name].get("history", None) is not None:
- for hist_data in tbl_dict[tst_name]["history"].values():
+ if tbl_dict[tst_name].get(u"history", None) is not None:
+ for hist_data in tbl_dict[tst_name][u"history"].values():
if hist_data:
item.append(round(mean(hist_data) / 1000000, 2))
item.append(round(stdev(hist_data) / 1000000, 2))
else:
- item.extend(["Not tested", "Not tested"])
+ item.extend([u"Not tested", u"Not tested"])
else:
- item.extend(["Not tested", "Not tested"])
- data_t = tbl_dict[tst_name]["ref-data"]
+ item.extend([u"Not tested", u"Not tested"])
+ data_t = tbl_dict[tst_name][u"ref-data"]
if data_t:
item.append(round(mean(data_t) / 1000000, 2))
item.append(round(stdev(data_t) / 1000000, 2))
else:
- item.extend(["Not tested", "Not tested"])
- data_t = tbl_dict[tst_name]["cmp-data"]
+ item.extend([u"Not tested", u"Not tested"])
+ data_t = tbl_dict[tst_name][u"cmp-data"]
if data_t:
item.append(round(mean(data_t) / 1000000, 2))
item.append(round(stdev(data_t) / 1000000, 2))
else:
- item.extend(["Not tested", "Not tested"])
- if item[-2] == "Not tested":
+ item.extend([u"Not tested", u"Not tested"])
+ if item[-2] == u"Not tested":
pass
- elif item[-4] == "Not tested":
- item.append("New in CSIT-1908")
- elif topo == "2n-skx" and "dot1q" in tbl_dict[tst_name]["name"]:
- item.append("See footnote [1]")
+ elif item[-4] == u"Not tested":
+ item.append(u"New in CSIT-1908")
+ elif topo == u"2n-skx" and u"dot1q" in tbl_dict[tst_name][u"name"]:
+ item.append(u"See footnote [1]")
footnote = True
elif item[-4] != 0:
item.append(int(relative_change(float(item[-4]), float(item[-2]))))
- if (len(item) == len(header)) and (item[-3] != "Not tested"):
+ if (len(item) == len(header)) and (item[-3] != u"Not tested"):
tbl_lst.append(item)
tbl_lst = _tpc_sort_table(tbl_lst)
# Generate csv tables:
- csv_file = "{0}.csv".format(table["output-file"])
- with open(csv_file, "w") as file_handler:
+ csv_file = f"{table[u'output-file']}.csv"
+ with open(csv_file, u"w") as file_handler:
file_handler.write(header_str)
for test in tbl_lst:
- file_handler.write(",".join([str(item) for item in test]) + "\n")
+ file_handler.write(u",".join([str(item) for item in test]) + u"\n")
- txt_file_name = "{0}.txt".format(table["output-file"])
+ txt_file_name = f"{table[u'output-file']}.txt"
convert_csv_to_pretty_txt(csv_file, txt_file_name)
if footnote:
- with open(txt_file_name, 'a') as txt_file:
+ with open(txt_file_name, u'a') as txt_file:
txt_file.writelines([
- "\nFootnotes:\n",
- "[1] CSIT-1908 changed test methodology of dot1q tests in "
- "2-node testbeds, dot1q encapsulation is now used on both "
- "links of SUT.\n",
- " Previously dot1q was used only on a single link with the "
- "other link carrying untagged Ethernet frames. This changes "
- "results\n",
- " in slightly lower throughput in CSIT-1908 for these "
- "tests. See release notes."
+ u"\nFootnotes:\n",
+ u"[1] CSIT-1908 changed test methodology of dot1q tests in "
+ u"2-node testbeds, dot1q encapsulation is now used on both "
+ u"links of SUT.\n",
+ u" Previously dot1q was used only on a single link with the "
+ u"other link carrying untagged Ethernet frames. This changes "
+ u"results\n",
+ u" in slightly lower throughput in CSIT-1908 for these "
+ u"tests. See release notes."
])
# Generate html table:
- _tpc_generate_html_table(header, tbl_lst,
- "{0}.html".format(table["output-file"]))
+ _tpc_generate_html_table(header, tbl_lst, f"{table[u'output-file']}.html")
def table_nics_comparison(table, input_data):
@@ -827,83 +893,82 @@ def table_nics_comparison(table, input_data):
:type input_data: InputData
"""
- logging.info(" Generating the table {0} ...".
- format(table.get("title", "")))
+ logging.info(f" Generating the table {table.get(u'title', u'')} ...")
# Transform the data
- logging.info(" Creating the data set for the {0} '{1}'.".
- format(table.get("type", ""), table.get("title", "")))
+ logging.info(
+ f" Creating the data set for the {table.get(u'type', u'')} "
+ f"{table.get(u'title', u'')}."
+ )
data = input_data.filter_data(table, continue_on_error=True)
# Prepare the header of the tables
try:
- header = ["Test case", ]
+ header = [u"Test case", ]
- if table["include-tests"] == "MRR":
- hdr_param = "Rec Rate"
+ if table[u"include-tests"] == u"MRR":
+ hdr_param = u"Rec Rate"
else:
- hdr_param = "Thput"
+ hdr_param = u"Thput"
header.extend(
- ["{0} {1} [Mpps]".format(table["reference"]["title"], hdr_param),
- "{0} Stdev [Mpps]".format(table["reference"]["title"]),
- "{0} {1} [Mpps]".format(table["compare"]["title"], hdr_param),
- "{0} Stdev [Mpps]".format(table["compare"]["title"]),
- "Delta [%]"])
- header_str = ",".join(header) + "\n"
+ [
+ f"{table[u'reference'][u'title']} {hdr_param} [Mpps]",
+ f"{table[u'reference'][u'title']} Stdev [Mpps]",
+ f"{table[u'compare'][u'title']} {hdr_param} [Mpps]",
+ f"{table[u'compare'][u'title']} Stdev [Mpps]",
+ u"Delta [%]"
+ ]
+ )
+
except (AttributeError, KeyError) as err:
- logging.error("The model is invalid, missing parameter: {0}".
- format(err))
+ logging.error(f"The model is invalid, missing parameter: {repr(err)}")
return
# Prepare data to the table:
tbl_dict = dict()
- for job, builds in table["data"].items():
+ for job, builds in table[u"data"].items():
for build in builds:
- for tst_name, tst_data in data[job][str(build)].iteritems():
- tst_name_mod = tst_name.replace("-ndrpdrdisc", "").\
- replace("-ndrpdr", "").replace("-pdrdisc", "").\
- replace("-ndrdisc", "").replace("-pdr", "").\
- replace("-ndr", "").\
- replace("1t1c", "1c").replace("2t1c", "1c").\
- replace("2t2c", "2c").replace("4t2c", "2c").\
- replace("4t4c", "4c").replace("8t4c", "4c")
- tst_name_mod = re.sub(REGEX_NIC, "", tst_name_mod)
+ for tst_name, tst_data in data[job][str(build)].items():
+ tst_name_mod = _tpc_modify_test_name(tst_name)
if tbl_dict.get(tst_name_mod, None) is None:
- name = "-".join(tst_data["name"].split("-")[:-1])
- tbl_dict[tst_name_mod] = {"name": name,
- "ref-data": list(),
- "cmp-data": list()}
+ name = u"-".join(tst_data[u"name"].split(u"-")[:-1])
+ tbl_dict[tst_name_mod] = {
+ u"name": name,
+ u"ref-data": list(),
+ u"cmp-data": list()
+ }
try:
- if table["include-tests"] == "MRR":
- result = tst_data["result"]["receive-rate"] # .avg
- elif table["include-tests"] == "PDR":
- result = tst_data["throughput"]["PDR"]["LOWER"]
- elif table["include-tests"] == "NDR":
- result = tst_data["throughput"]["NDR"]["LOWER"]
+ result = None
+ if table[u"include-tests"] == u"MRR":
+ result = tst_data[u"result"][u"receive-rate"]
+ elif table[u"include-tests"] == u"PDR":
+ result = tst_data[u"throughput"][u"PDR"][u"LOWER"]
+ elif table[u"include-tests"] == u"NDR":
+ result = tst_data[u"throughput"][u"NDR"][u"LOWER"]
else:
- result = None
+ continue
- if result:
- if table["reference"]["nic"] in tst_data["tags"]:
- tbl_dict[tst_name_mod]["ref-data"].append(result)
- elif table["compare"]["nic"] in tst_data["tags"]:
- tbl_dict[tst_name_mod]["cmp-data"].append(result)
+ if result and \
+ table[u"reference"][u"nic"] in tst_data[u"tags"]:
+ tbl_dict[tst_name_mod][u"ref-data"].append(result)
+ elif result and \
+ table[u"compare"][u"nic"] in tst_data[u"tags"]:
+ tbl_dict[tst_name_mod][u"cmp-data"].append(result)
except (TypeError, KeyError) as err:
- logging.debug("No data for {0}".format(tst_name))
- logging.debug(repr(err))
+ logging.debug(f"No data for {tst_name}\n{repr(err)}")
# No data in output.xml for this test
tbl_lst = list()
- for tst_name in tbl_dict.keys():
- item = [tbl_dict[tst_name]["name"], ]
- data_t = tbl_dict[tst_name]["ref-data"]
+ for tst_name in tbl_dict:
+ item = [tbl_dict[tst_name][u"name"], ]
+ data_t = tbl_dict[tst_name][u"ref-data"]
if data_t:
item.append(round(mean(data_t) / 1000000, 2))
item.append(round(stdev(data_t) / 1000000, 2))
else:
item.extend([None, None])
- data_t = tbl_dict[tst_name]["cmp-data"]
+ data_t = tbl_dict[tst_name][u"cmp-data"]
if data_t:
item.append(round(mean(data_t) / 1000000, 2))
item.append(round(stdev(data_t) / 1000000, 2))
@@ -918,17 +983,16 @@ def table_nics_comparison(table, input_data):
tbl_lst.sort(key=lambda rel: rel[-1], reverse=True)
# Generate csv tables:
- csv_file = "{0}.csv".format(table["output-file"])
- with open(csv_file, "w") as file_handler:
- file_handler.write(header_str)
+ with open(f"{table[u'output-file']}.csv", u"w") as file_handler:
+ file_handler.write(u",".join(header) + u"\n")
for test in tbl_lst:
- file_handler.write(",".join([str(item) for item in test]) + "\n")
+ file_handler.write(u",".join([str(item) for item in test]) + u"\n")
- convert_csv_to_pretty_txt(csv_file, "{0}.txt".format(table["output-file"]))
+ convert_csv_to_pretty_txt(f"{table[u'output-file']}.csv",
+ f"{table[u'output-file']}.txt")
# Generate html table:
- _tpc_generate_html_table(header, tbl_lst,
- "{0}.html".format(table["output-file"]))
+ _tpc_generate_html_table(header, tbl_lst, f"{table[u'output-file']}.html")
def table_soak_vs_ndr(table, input_data):
@@ -941,80 +1005,87 @@ def table_soak_vs_ndr(table, input_data):
:type input_data: InputData
"""
- logging.info(" Generating the table {0} ...".
- format(table.get("title", "")))
+ logging.info(f" Generating the table {table.get(u'title', u'')} ...")
# Transform the data
- logging.info(" Creating the data set for the {0} '{1}'.".
- format(table.get("type", ""), table.get("title", "")))
+ logging.info(
+ f" Creating the data set for the {table.get(u'type', u'')} "
+ f"{table.get(u'title', u'')}."
+ )
data = input_data.filter_data(table, continue_on_error=True)
# Prepare the header of the table
try:
header = [
- "Test case",
- "{0} Thput [Mpps]".format(table["reference"]["title"]),
- "{0} Stdev [Mpps]".format(table["reference"]["title"]),
- "{0} Thput [Mpps]".format(table["compare"]["title"]),
- "{0} Stdev [Mpps]".format(table["compare"]["title"]),
- "Delta [%]", "Stdev of delta [%]"]
- header_str = ",".join(header) + "\n"
+ u"Test case",
+ f"{table[u'reference'][u'title']} Thput [Mpps]",
+ f"{table[u'reference'][u'title']} Stdev [Mpps]",
+ f"{table[u'compare'][u'title']} Thput [Mpps]",
+ f"{table[u'compare'][u'title']} Stdev [Mpps]",
+ u"Delta [%]", u"Stdev of delta [%]"
+ ]
+ header_str = u",".join(header) + u"\n"
except (AttributeError, KeyError) as err:
- logging.error("The model is invalid, missing parameter: {0}".
- format(err))
+ logging.error(f"The model is invalid, missing parameter: {repr(err)}")
return
# Create a list of available SOAK test results:
tbl_dict = dict()
- for job, builds in table["compare"]["data"].items():
+ for job, builds in table[u"compare"][u"data"].items():
for build in builds:
- for tst_name, tst_data in data[job][str(build)].iteritems():
- if tst_data["type"] == "SOAK":
- tst_name_mod = tst_name.replace("-soak", "")
+ for tst_name, tst_data in data[job][str(build)].items():
+ if tst_data[u"type"] == u"SOAK":
+ tst_name_mod = tst_name.replace(u"-soak", u"")
if tbl_dict.get(tst_name_mod, None) is None:
- groups = re.search(REGEX_NIC, tst_data["parent"])
- nic = groups.group(0) if groups else ""
- name = "{0}-{1}".format(nic, "-".join(tst_data["name"].
- split("-")[:-1]))
+ groups = re.search(REGEX_NIC, tst_data[u"parent"])
+ nic = groups.group(0) if groups else u""
+ name = (
+ f"{nic}-"
+ f"{u'-'.join(tst_data[u'name'].split(u'-')[:-1])}"
+ )
tbl_dict[tst_name_mod] = {
- "name": name,
- "ref-data": list(),
- "cmp-data": list()
+ u"name": name,
+ u"ref-data": list(),
+ u"cmp-data": list()
}
try:
- tbl_dict[tst_name_mod]["cmp-data"].append(
- tst_data["throughput"]["LOWER"])
+ tbl_dict[tst_name_mod][u"cmp-data"].append(
+ tst_data[u"throughput"][u"LOWER"])
except (KeyError, TypeError):
pass
tests_lst = tbl_dict.keys()
# Add corresponding NDR test results:
- for job, builds in table["reference"]["data"].items():
+ for job, builds in table[u"reference"][u"data"].items():
for build in builds:
- for tst_name, tst_data in data[job][str(build)].iteritems():
- tst_name_mod = tst_name.replace("-ndrpdr", "").\
- replace("-mrr", "")
- if tst_name_mod in tests_lst:
- try:
- if tst_data["type"] in ("NDRPDR", "MRR", "BMRR"):
- if table["include-tests"] == "MRR":
- result = tst_data["result"]["receive-rate"]
- elif table["include-tests"] == "PDR":
- result = tst_data["throughput"]["PDR"]["LOWER"]
- elif table["include-tests"] == "NDR":
- result = tst_data["throughput"]["NDR"]["LOWER"]
- else:
- result = None
- if result is not None:
- tbl_dict[tst_name_mod]["ref-data"].append(
- result)
- except (KeyError, TypeError):
+ for tst_name, tst_data in data[job][str(build)].items():
+ tst_name_mod = tst_name.replace(u"-ndrpdr", u"").\
+ replace(u"-mrr", u"")
+ if tst_name_mod not in tests_lst:
+ continue
+ try:
+ if tst_data[u"type"] not in (u"NDRPDR", u"MRR", u"BMRR"):
continue
+ if table[u"include-tests"] == u"MRR":
+ result = tst_data[u"result"][u"receive-rate"]
+ elif table[u"include-tests"] == u"PDR":
+ result = \
+ tst_data[u"throughput"][u"PDR"][u"LOWER"]
+ elif table[u"include-tests"] == u"NDR":
+ result = \
+ tst_data[u"throughput"][u"NDR"][u"LOWER"]
+ else:
+ result = None
+ if result is not None:
+ tbl_dict[tst_name_mod][u"ref-data"].append(
+ result)
+ except (KeyError, TypeError):
+ continue
tbl_lst = list()
- for tst_name in tbl_dict.keys():
- item = [tbl_dict[tst_name]["name"], ]
- data_r = tbl_dict[tst_name]["ref-data"]
+ for tst_name in tbl_dict:
+ item = [tbl_dict[tst_name][u"name"], ]
+ data_r = tbl_dict[tst_name][u"ref-data"]
if data_r:
data_r_mean = mean(data_r)
item.append(round(data_r_mean / 1000000, 2))
@@ -1024,7 +1095,7 @@ def table_soak_vs_ndr(table, input_data):
data_r_mean = None
data_r_stdev = None
item.extend([None, None])
- data_c = tbl_dict[tst_name]["cmp-data"]
+ data_c = tbl_dict[tst_name][u"cmp-data"]
if data_c:
data_c_mean = mean(data_c)
item.append(round(data_c_mean / 1000000, 2))
@@ -1045,22 +1116,21 @@ def table_soak_vs_ndr(table, input_data):
tbl_lst.sort(key=lambda rel: rel[-1], reverse=True)
# Generate csv tables:
- csv_file = "{0}.csv".format(table["output-file"])
- with open(csv_file, "w") as file_handler:
+ csv_file = f"{table[u'output-file']}.csv"
+ with open(csv_file, u"w") as file_handler:
file_handler.write(header_str)
for test in tbl_lst:
- file_handler.write(",".join([str(item) for item in test]) + "\n")
+ file_handler.write(u",".join([str(item) for item in test]) + u"\n")
- convert_csv_to_pretty_txt(csv_file, "{0}.txt".format(table["output-file"]))
+ convert_csv_to_pretty_txt(csv_file, f"{table[u'output-file']}.txt")
# Generate html table:
- _tpc_generate_html_table(header, tbl_lst,
- "{0}.html".format(table["output-file"]))
+ _tpc_generate_html_table(header, tbl_lst, f"{table[u'output-file']}.html")
-def table_performance_trending_dashboard(table, input_data):
+def table_perf_trending_dash(table, input_data):
"""Generate the table(s) with algorithm:
- table_performance_trending_dashboard
+ table_perf_trending_dash
specified in the specification file.
:param table: Table to generate.
@@ -1069,55 +1139,58 @@ def table_performance_trending_dashboard(table, input_data):
:type input_data: InputData
"""
- logging.info(" Generating the table {0} ...".
- format(table.get("title", "")))
+ logging.info(f" Generating the table {table.get(u'title', u'')} ...")
# Transform the data
- logging.info(" Creating the data set for the {0} '{1}'.".
- format(table.get("type", ""), table.get("title", "")))
+ logging.info(
+ f" Creating the data set for the {table.get(u'type', u'')} "
+ f"{table.get(u'title', u'')}."
+ )
data = input_data.filter_data(table, continue_on_error=True)
# Prepare the header of the tables
- header = ["Test Case",
- "Trend [Mpps]",
- "Short-Term Change [%]",
- "Long-Term Change [%]",
- "Regressions [#]",
- "Progressions [#]"
- ]
- header_str = ",".join(header) + "\n"
+ header = [
+ u"Test Case",
+ u"Trend [Mpps]",
+ u"Short-Term Change [%]",
+ u"Long-Term Change [%]",
+ u"Regressions [#]",
+ u"Progressions [#]"
+ ]
+ header_str = u",".join(header) + u"\n"
# Prepare data to the table:
tbl_dict = dict()
- for job, builds in table["data"].items():
+ for job, builds in table[u"data"].items():
for build in builds:
- for tst_name, tst_data in data[job][str(build)].iteritems():
- if tst_name.lower() in table.get("ignore-list", list()):
+ for tst_name, tst_data in data[job][str(build)].items():
+ if tst_name.lower() in table.get(u"ignore-list", list()):
continue
if tbl_dict.get(tst_name, None) is None:
- groups = re.search(REGEX_NIC, tst_data["parent"])
+ groups = re.search(REGEX_NIC, tst_data[u"parent"])
if not groups:
continue
nic = groups.group(0)
tbl_dict[tst_name] = {
- "name": "{0}-{1}".format(nic, tst_data["name"]),
- "data": OrderedDict()}
+ u"name": f"{nic}-{tst_data[u'name']}",
+ u"data": OrderedDict()
+ }
try:
- tbl_dict[tst_name]["data"][str(build)] = \
- tst_data["result"]["receive-rate"]
+ tbl_dict[tst_name][u"data"][str(build)] = \
+ tst_data[u"result"][u"receive-rate"]
except (TypeError, KeyError):
pass # No data in output.xml for this test
tbl_lst = list()
- for tst_name in tbl_dict.keys():
- data_t = tbl_dict[tst_name]["data"]
+ for tst_name in tbl_dict:
+ data_t = tbl_dict[tst_name][u"data"]
if len(data_t) < 2:
continue
classification_lst, avgs = classify_anomalies(data_t)
- win_size = min(len(data_t), table["window"])
- long_win_size = min(len(data_t), table["long-trend-window"])
+ win_size = min(len(data_t), table[u"window"])
+ long_win_size = min(len(data_t), table[u"long-trend-window"])
try:
max_long_avg = max(
@@ -1147,34 +1220,33 @@ def table_performance_trending_dashboard(table, input_data):
isnan(rel_change_long):
continue
tbl_lst.append(
- [tbl_dict[tst_name]["name"],
+ [tbl_dict[tst_name][u"name"],
round(last_avg / 1000000, 2),
rel_change_last,
rel_change_long,
- classification_lst[-win_size:].count("regression"),
- classification_lst[-win_size:].count("progression")])
+ classification_lst[-win_size:].count(u"regression"),
+ classification_lst[-win_size:].count(u"progression")])
tbl_lst.sort(key=lambda rel: rel[0])
tbl_sorted = list()
- for nrr in range(table["window"], -1, -1):
+ for nrr in range(table[u"window"], -1, -1):
tbl_reg = [item for item in tbl_lst if item[4] == nrr]
- for nrp in range(table["window"], -1, -1):
+ for nrp in range(table[u"window"], -1, -1):
tbl_out = [item for item in tbl_reg if item[5] == nrp]
tbl_out.sort(key=lambda rel: rel[2])
tbl_sorted.extend(tbl_out)
- file_name = "{0}{1}".format(table["output-file"], table["output-file-ext"])
+ file_name = f"{table[u'output-file']}{table[u'output-file-ext']}"
- logging.info(" Writing file: '{0}'".format(file_name))
- with open(file_name, "w") as file_handler:
+ logging.info(f" Writing file: {file_name}")
+ with open(file_name, u"w") as file_handler:
file_handler.write(header_str)
for test in tbl_sorted:
- file_handler.write(",".join([str(item) for item in test]) + '\n')
+ file_handler.write(u",".join([str(item) for item in test]) + u'\n')
- txt_file_name = "{0}.txt".format(table["output-file"])
- logging.info(" Writing file: '{0}'".format(txt_file_name))
- convert_csv_to_pretty_txt(file_name, txt_file_name)
+ logging.info(f" Writing file: {table[u'output-file']}.txt")
+ convert_csv_to_pretty_txt(file_name, f"{table[u'output-file']}.txt")
def _generate_url(base, testbed, test_name):
@@ -1192,145 +1264,144 @@ def _generate_url(base, testbed, test_name):
"""
url = base
- file_name = ""
- anchor = ".html#"
- feature = ""
-
- if "lbdpdk" in test_name or "lbvpp" in test_name:
- file_name = "link_bonding"
-
- elif "114b" in test_name and "vhost" in test_name:
- file_name = "vts"
-
- elif "testpmd" in test_name or "l3fwd" in test_name:
- file_name = "dpdk"
-
- elif "memif" in test_name:
- file_name = "container_memif"
- feature = "-base"
-
- elif "srv6" in test_name:
- file_name = "srv6"
-
- elif "vhost" in test_name:
- if "l2xcbase" in test_name or "l2bdbasemaclrn" in test_name:
- file_name = "vm_vhost_l2"
- if "114b" in test_name:
- feature = ""
- elif "l2xcbase" in test_name and "x520" in test_name:
- feature = "-base-l2xc"
- elif "l2bdbasemaclrn" in test_name and "x520" in test_name:
- feature = "-base-l2bd"
+ file_name = u""
+ anchor = u".html#"
+ feature = u""
+
+ if u"lbdpdk" in test_name or u"lbvpp" in test_name:
+ file_name = u"link_bonding"
+
+ elif u"114b" in test_name and u"vhost" in test_name:
+ file_name = u"vts"
+
+ elif u"testpmd" in test_name or u"l3fwd" in test_name:
+ file_name = u"dpdk"
+
+ elif u"memif" in test_name:
+ file_name = u"container_memif"
+ feature = u"-base"
+
+ elif u"srv6" in test_name:
+ file_name = u"srv6"
+
+ elif u"vhost" in test_name:
+ if u"l2xcbase" in test_name or u"l2bdbasemaclrn" in test_name:
+ file_name = u"vm_vhost_l2"
+ if u"114b" in test_name:
+ feature = u""
+ elif u"l2xcbase" in test_name and u"x520" in test_name:
+ feature = u"-base-l2xc"
+ elif u"l2bdbasemaclrn" in test_name and u"x520" in test_name:
+ feature = u"-base-l2bd"
else:
- feature = "-base"
- elif "ip4base" in test_name:
- file_name = "vm_vhost_ip4"
- feature = "-base"
-
- elif "ipsecbasetnlsw" in test_name:
- file_name = "ipsecsw"
- feature = "-base-scale"
-
- elif "ipsec" in test_name:
- file_name = "ipsec"
- feature = "-base-scale"
- if "hw-" in test_name:
- file_name = "ipsechw"
- elif "sw-" in test_name:
- file_name = "ipsecsw"
- if "-int-" in test_name:
- feature = "-base-scale-int"
- elif "tnl" in test_name:
- feature = "-base-scale-tnl"
-
- elif "ethip4lispip" in test_name or "ethip4vxlan" in test_name:
- file_name = "ip4_tunnels"
- feature = "-base"
-
- elif "ip4base" in test_name or "ip4scale" in test_name:
- file_name = "ip4"
- if "xl710" in test_name:
- feature = "-base-scale-features"
- elif "iacl" in test_name:
- feature = "-features-iacl"
- elif "oacl" in test_name:
- feature = "-features-oacl"
- elif "snat" in test_name or "cop" in test_name:
- feature = "-features"
+ feature = u"-base"
+ elif u"ip4base" in test_name:
+ file_name = u"vm_vhost_ip4"
+ feature = u"-base"
+
+ elif u"ipsecbasetnlsw" in test_name:
+ file_name = u"ipsecsw"
+ feature = u"-base-scale"
+
+ elif u"ipsec" in test_name:
+ file_name = u"ipsec"
+ feature = u"-base-scale"
+ if u"hw-" in test_name:
+ file_name = u"ipsechw"
+ elif u"sw-" in test_name:
+ file_name = u"ipsecsw"
+ if u"-int-" in test_name:
+ feature = u"-base-scale-int"
+ elif u"tnl" in test_name:
+ feature = u"-base-scale-tnl"
+
+ elif u"ethip4lispip" in test_name or u"ethip4vxlan" in test_name:
+ file_name = u"ip4_tunnels"
+ feature = u"-base"
+
+ elif u"ip4base" in test_name or u"ip4scale" in test_name:
+ file_name = u"ip4"
+ if u"xl710" in test_name:
+ feature = u"-base-scale-features"
+ elif u"iacl" in test_name:
+ feature = u"-features-iacl"
+ elif u"oacl" in test_name:
+ feature = u"-features-oacl"
+ elif u"snat" in test_name or u"cop" in test_name:
+ feature = u"-features"
else:
- feature = "-base-scale"
-
- elif "ip6base" in test_name or "ip6scale" in test_name:
- file_name = "ip6"
- feature = "-base-scale"
-
- elif "l2xcbase" in test_name or "l2xcscale" in test_name \
- or "l2bdbasemaclrn" in test_name or "l2bdscale" in test_name \
- or "l2dbbasemaclrn" in test_name or "l2dbscale" in test_name:
- file_name = "l2"
- if "macip" in test_name:
- feature = "-features-macip"
- elif "iacl" in test_name:
- feature = "-features-iacl"
- elif "oacl" in test_name:
- feature = "-features-oacl"
+ feature = u"-base-scale"
+
+ elif u"ip6base" in test_name or u"ip6scale" in test_name:
+ file_name = u"ip6"
+ feature = u"-base-scale"
+
+ elif u"l2xcbase" in test_name or u"l2xcscale" in test_name \
+ or u"l2bdbasemaclrn" in test_name or u"l2bdscale" in test_name:
+ file_name = u"l2"
+ if u"macip" in test_name:
+ feature = u"-features-macip"
+ elif u"iacl" in test_name:
+ feature = u"-features-iacl"
+ elif u"oacl" in test_name:
+ feature = u"-features-oacl"
else:
- feature = "-base-scale"
-
- if "x520" in test_name:
- nic = "x520-"
- elif "x710" in test_name:
- nic = "x710-"
- elif "xl710" in test_name:
- nic = "xl710-"
- elif "xxv710" in test_name:
- nic = "xxv710-"
- elif "vic1227" in test_name:
- nic = "vic1227-"
- elif "vic1385" in test_name:
- nic = "vic1385-"
- elif "x553" in test_name:
- nic = "x553-"
+ feature = u"-base-scale"
+
+ if u"x520" in test_name:
+ nic = u"x520-"
+ elif u"x710" in test_name:
+ nic = u"x710-"
+ elif u"xl710" in test_name:
+ nic = u"xl710-"
+ elif u"xxv710" in test_name:
+ nic = u"xxv710-"
+ elif u"vic1227" in test_name:
+ nic = u"vic1227-"
+ elif u"vic1385" in test_name:
+ nic = u"vic1385-"
+ elif u"x553" in test_name:
+ nic = u"x553-"
else:
- nic = ""
+ nic = u""
anchor += nic
- if "64b" in test_name:
- framesize = "64b"
- elif "78b" in test_name:
- framesize = "78b"
- elif "imix" in test_name:
- framesize = "imix"
- elif "9000b" in test_name:
- framesize = "9000b"
- elif "1518b" in test_name:
- framesize = "1518b"
- elif "114b" in test_name:
- framesize = "114b"
+ if u"64b" in test_name:
+ framesize = u"64b"
+ elif u"78b" in test_name:
+ framesize = u"78b"
+ elif u"imix" in test_name:
+ framesize = u"imix"
+ elif u"9000b" in test_name:
+ framesize = u"9000b"
+ elif u"1518b" in test_name:
+ framesize = u"1518b"
+ elif u"114b" in test_name:
+ framesize = u"114b"
else:
- framesize = ""
- anchor += framesize + '-'
-
- if "1t1c" in test_name:
- anchor += "1t1c"
- elif "2t2c" in test_name:
- anchor += "2t2c"
- elif "4t4c" in test_name:
- anchor += "4t4c"
- elif "2t1c" in test_name:
- anchor += "2t1c"
- elif "4t2c" in test_name:
- anchor += "4t2c"
- elif "8t4c" in test_name:
- anchor += "8t4c"
-
- return url + file_name + '-' + testbed + '-' + nic + framesize + \
- feature.replace("-int", "").replace("-tnl", "") + anchor + feature
-
-
-def table_performance_trending_dashboard_html(table, input_data):
+ framesize = u""
+ anchor += framesize + u"-"
+
+ if u"1t1c" in test_name:
+ anchor += u"1t1c"
+ elif u"2t2c" in test_name:
+ anchor += u"2t2c"
+ elif u"4t4c" in test_name:
+ anchor += u"4t4c"
+ elif u"2t1c" in test_name:
+ anchor += u"2t1c"
+ elif u"4t2c" in test_name:
+ anchor += u"4t2c"
+ elif u"8t4c" in test_name:
+ anchor += u"8t4c"
+
+ return url + file_name + u"-" + testbed + u"-" + nic + framesize + \
+ feature.replace("-int", u"").replace("-tnl", u"") + anchor + feature
+
+
+def table_perf_trending_dash_html(table, input_data):
"""Generate the table(s) with algorithm:
- table_performance_trending_dashboard_html specified in the specification
+ table_perf_trending_dash_html specified in the specification
file.
:param table: Table to generate.
@@ -1339,70 +1410,97 @@ def table_performance_trending_dashboard_html(table, input_data):
:type input_data: InputData
"""
- testbed = table.get("testbed", None)
- if testbed is None:
- logging.error("The testbed is not defined for the table '{0}'.".
- format(table.get("title", "")))
+ _ = input_data
+
+ if not table.get(u"testbed", None):
+ logging.error(
+ f"The testbed is not defined for the table "
+ f"{table.get(u'title', u'')}."
+ )
return
- logging.info(" Generating the table {0} ...".
- format(table.get("title", "")))
+ logging.info(f" Generating the table {table.get(u'title', u'')} ...")
try:
- with open(table["input-file"], 'rb') as csv_file:
- csv_content = csv.reader(csv_file, delimiter=',', quotechar='"')
- csv_lst = [item for item in csv_content]
+ with open(table[u"input-file"], u'rt') as csv_file:
+ csv_lst = list(csv.reader(csv_file, delimiter=u',', quotechar=u'"'))
except KeyError:
- logging.warning("The input file is not defined.")
+ logging.warning(u"The input file is not defined.")
return
except csv.Error as err:
- logging.warning("Not possible to process the file '{0}'.\n{1}".
- format(table["input-file"], err))
+ logging.warning(
+ f"Not possible to process the file {table[u'input-file']}.\n"
+ f"{repr(err)}"
+ )
return
# Table:
- dashboard = ET.Element("table", attrib=dict(width="100%", border='0'))
+ dashboard = ET.Element(u"table", attrib=dict(width=u"100%", border=u'0'))
# Table header:
- tr = ET.SubElement(dashboard, "tr", attrib=dict(bgcolor="#7eade7"))
+ trow = ET.SubElement(dashboard, u"tr", attrib=dict(bgcolor=u"#7eade7"))
for idx, item in enumerate(csv_lst[0]):
- alignment = "left" if idx == 0 else "center"
- th = ET.SubElement(tr, "th", attrib=dict(align=alignment))
- th.text = item
+ alignment = u"left" if idx == 0 else u"center"
+ thead = ET.SubElement(trow, u"th", attrib=dict(align=alignment))
+ thead.text = item
# Rows:
- colors = {"regression": ("#ffcccc", "#ff9999"),
- "progression": ("#c6ecc6", "#9fdf9f"),
- "normal": ("#e9f1fb", "#d4e4f7")}
+ colors = {
+ u"regression": (
+ u"#ffcccc",
+ u"#ff9999"
+ ),
+ u"progression": (
+ u"#c6ecc6",
+ u"#9fdf9f"
+ ),
+ u"normal": (
+ u"#e9f1fb",
+ u"#d4e4f7"
+ )
+ }
for r_idx, row in enumerate(csv_lst[1:]):
if int(row[4]):
- color = "regression"
+ color = u"regression"
elif int(row[5]):
- color = "progression"
+ color = u"progression"
else:
- color = "normal"
- background = colors[color][r_idx % 2]
- tr = ET.SubElement(dashboard, "tr", attrib=dict(bgcolor=background))
+ color = u"normal"
+ trow = ET.SubElement(
+ dashboard, u"tr", attrib=dict(bgcolor=colors[color][r_idx % 2])
+ )
# Columns:
for c_idx, item in enumerate(row):
- alignment = "left" if c_idx == 0 else "center"
- td = ET.SubElement(tr, "td", attrib=dict(align=alignment))
+ tdata = ET.SubElement(
+ trow,
+ u"td",
+ attrib=dict(align=u"left" if c_idx == 0 else u"center")
+ )
# Name:
if c_idx == 0:
- url = _generate_url("../trending/", testbed, item)
- ref = ET.SubElement(td, "a", attrib=dict(href=url))
+ ref = ET.SubElement(
+ tdata,
+ u"a",
+ attrib=dict(
+ href=_generate_url(
+ u"../trending/",
+ table.get(u"testbed", None),
+ item
+ )
+ )
+ )
ref.text = item
else:
- td.text = item
+ tdata.text = item
try:
- with open(table["output-file"], 'w') as html_file:
- logging.info(" Writing file: '{0}'".format(table["output-file"]))
- html_file.write(".. raw:: html\n\n\t")
- html_file.write(ET.tostring(dashboard))
- html_file.write("\n\t<p><br><br></p>\n")
+ with open(table[u"output-file"], u'w') as html_file:
+ logging.info(f" Writing file: {table[u'output-file']}")
+ html_file.write(u".. raw:: html\n\n\t")
+ html_file.write(str(ET.tostring(dashboard, encoding=u"unicode")))
+ html_file.write(u"\n\t<p><br><br></p>\n")
except KeyError:
- logging.warning("The output file is not defined.")
+ logging.warning(u"The output file is not defined.")
return
@@ -1416,53 +1514,56 @@ def table_last_failed_tests(table, input_data):
:type input_data: InputData
"""
- logging.info(" Generating the table {0} ...".
- format(table.get("title", "")))
+ logging.info(f" Generating the table {table.get(u'title', u'')} ...")
# Transform the data
- logging.info(" Creating the data set for the {0} '{1}'.".
- format(table.get("type", ""), table.get("title", "")))
+ logging.info(
+ f" Creating the data set for the {table.get(u'type', u'')} "
+ f"{table.get(u'title', u'')}."
+ )
+
data = input_data.filter_data(table, continue_on_error=True)
if data is None or data.empty:
- logging.warn(" No data for the {0} '{1}'.".
- format(table.get("type", ""), table.get("title", "")))
+ logging.warning(
+ f" No data for the {table.get(u'type', u'')} "
+ f"{table.get(u'title', u'')}."
+ )
return
tbl_list = list()
- for job, builds in table["data"].items():
+ for job, builds in table[u"data"].items():
for build in builds:
build = str(build)
try:
- version = input_data.metadata(job, build).get("version", "")
+ version = input_data.metadata(job, build).get(u"version", u"")
except KeyError:
- logging.error("Data for {job}: {build} is not present.".
- format(job=job, build=build))
+ logging.error(f"Data for {job}: {build} is not present.")
return
tbl_list.append(build)
tbl_list.append(version)
failed_tests = list()
passed = 0
failed = 0
- for tst_name, tst_data in data[job][build].iteritems():
- if tst_data["status"] != "FAIL":
+ for tst_data in data[job][build].values:
+ if tst_data[u"status"] != u"FAIL":
passed += 1
continue
failed += 1
- groups = re.search(REGEX_NIC, tst_data["parent"])
+ groups = re.search(REGEX_NIC, tst_data[u"parent"])
if not groups:
continue
nic = groups.group(0)
- failed_tests.append("{0}-{1}".format(nic, tst_data["name"]))
+ failed_tests.append(f"{nic}-{tst_data[u'name']}")
tbl_list.append(str(passed))
tbl_list.append(str(failed))
tbl_list.extend(failed_tests)
- file_name = "{0}{1}".format(table["output-file"], table["output-file-ext"])
- logging.info(" Writing file: '{0}'".format(file_name))
- with open(file_name, "w") as file_handler:
+ file_name = f"{table[u'output-file']}{table[u'output-file-ext']}"
+ logging.info(f" Writing file: {file_name}")
+ with open(file_name, u"w") as file_handler:
for test in tbl_list:
- file_handler.write(test + '\n')
+ file_handler.write(test + u'\n')
def table_failed_tests(table, input_data):
@@ -1475,95 +1576,103 @@ def table_failed_tests(table, input_data):
:type input_data: InputData
"""
- logging.info(" Generating the table {0} ...".
- format(table.get("title", "")))
+ logging.info(f" Generating the table {table.get(u'title', u'')} ...")
# Transform the data
- logging.info(" Creating the data set for the {0} '{1}'.".
- format(table.get("type", ""), table.get("title", "")))
+ logging.info(
+ f" Creating the data set for the {table.get(u'type', u'')} "
+ f"{table.get(u'title', u'')}."
+ )
data = input_data.filter_data(table, continue_on_error=True)
# Prepare the header of the tables
- header = ["Test Case",
- "Failures [#]",
- "Last Failure [Time]",
- "Last Failure [VPP-Build-Id]",
- "Last Failure [CSIT-Job-Build-Id]"]
+ header = [
+ u"Test Case",
+ u"Failures [#]",
+ u"Last Failure [Time]",
+ u"Last Failure [VPP-Build-Id]",
+ u"Last Failure [CSIT-Job-Build-Id]"
+ ]
# Generate the data for the table according to the model in the table
# specification
now = dt.utcnow()
- timeperiod = timedelta(int(table.get("window", 7)))
+ timeperiod = timedelta(int(table.get(u"window", 7)))
tbl_dict = dict()
- for job, builds in table["data"].items():
+ for job, builds in table[u"data"].items():
for build in builds:
build = str(build)
- for tst_name, tst_data in data[job][build].iteritems():
- if tst_name.lower() in table.get("ignore-list", list()):
+ for tst_name, tst_data in data[job][build].items():
+ if tst_name.lower() in table.get(u"ignore-list", list()):
continue
if tbl_dict.get(tst_name, None) is None:
- groups = re.search(REGEX_NIC, tst_data["parent"])
+ groups = re.search(REGEX_NIC, tst_data[u"parent"])
if not groups:
continue
nic = groups.group(0)
tbl_dict[tst_name] = {
- "name": "{0}-{1}".format(nic, tst_data["name"]),
- "data": OrderedDict()}
+ u"name": f"{nic}-{tst_data[u'name']}",
+ u"data": OrderedDict()
+ }
try:
generated = input_data.metadata(job, build).\
- get("generated", "")
+ get(u"generated", u"")
if not generated:
continue
- then = dt.strptime(generated, "%Y%m%d %H:%M")
+ then = dt.strptime(generated, u"%Y%m%d %H:%M")
if (now - then) <= timeperiod:
- tbl_dict[tst_name]["data"][build] = (
- tst_data["status"],
+ tbl_dict[tst_name][u"data"][build] = (
+ tst_data[u"status"],
generated,
- input_data.metadata(job, build).get("version", ""),
- build)
+ input_data.metadata(job, build).get(u"version",
+ u""),
+ build
+ )
except (TypeError, KeyError) as err:
- logging.warning("tst_name: {} - err: {}".
- format(tst_name, repr(err)))
+ logging.warning(f"tst_name: {tst_name} - err: {repr(err)}")
max_fails = 0
tbl_lst = list()
for tst_data in tbl_dict.values():
fails_nr = 0
- fails_last_date = ""
- fails_last_vpp = ""
- fails_last_csit = ""
- for val in tst_data["data"].values():
- if val[0] == "FAIL":
+ fails_last_date = u""
+ fails_last_vpp = u""
+ fails_last_csit = u""
+ for val in tst_data[u"data"].values():
+ if val[0] == u"FAIL":
fails_nr += 1
fails_last_date = val[1]
fails_last_vpp = val[2]
fails_last_csit = val[3]
if fails_nr:
max_fails = fails_nr if fails_nr > max_fails else max_fails
- tbl_lst.append([tst_data["name"],
- fails_nr,
- fails_last_date,
- fails_last_vpp,
- "mrr-daily-build-{0}".format(fails_last_csit)])
+ tbl_lst.append(
+ [
+ tst_data[u"name"],
+ fails_nr,
+ fails_last_date,
+ fails_last_vpp,
+ f"mrr-daily-build-{fails_last_csit}"
+ ]
+ )
tbl_lst.sort(key=lambda rel: rel[2], reverse=True)
tbl_sorted = list()
for nrf in range(max_fails, -1, -1):
tbl_fails = [item for item in tbl_lst if item[1] == nrf]
tbl_sorted.extend(tbl_fails)
- file_name = "{0}{1}".format(table["output-file"], table["output-file-ext"])
- logging.info(" Writing file: '{0}'".format(file_name))
- with open(file_name, "w") as file_handler:
- file_handler.write(",".join(header) + "\n")
+ file_name = f"{table[u'output-file']}{table[u'output-file-ext']}"
+ logging.info(f" Writing file: {file_name}")
+ with open(file_name, u"w") as file_handler:
+ file_handler.write(u",".join(header) + u"\n")
for test in tbl_sorted:
- file_handler.write(",".join([str(item) for item in test]) + '\n')
+ file_handler.write(u",".join([str(item) for item in test]) + u'\n')
- txt_file_name = "{0}.txt".format(table["output-file"])
- logging.info(" Writing file: '{0}'".format(txt_file_name))
- convert_csv_to_pretty_txt(file_name, txt_file_name)
+ logging.info(f" Writing file: {table[u'output-file']}.txt")
+ convert_csv_to_pretty_txt(file_name, f"{table[u'output-file']}.txt")
def table_failed_tests_html(table, input_data):
@@ -1576,60 +1685,77 @@ def table_failed_tests_html(table, input_data):
:type input_data: InputData
"""
- testbed = table.get("testbed", None)
- if testbed is None:
- logging.error("The testbed is not defined for the table '{0}'.".
- format(table.get("title", "")))
+ _ = input_data
+
+ if not table.get(u"testbed", None):
+ logging.error(
+ f"The testbed is not defined for the table "
+ f"{table.get(u'title', u'')}."
+ )
return
- logging.info(" Generating the table {0} ...".
- format(table.get("title", "")))
+ logging.info(f" Generating the table {table.get(u'title', u'')} ...")
try:
- with open(table["input-file"], 'rb') as csv_file:
- csv_content = csv.reader(csv_file, delimiter=',', quotechar='"')
- csv_lst = [item for item in csv_content]
+ with open(table[u"input-file"], u'rt') as csv_file:
+ csv_lst = list(csv.reader(csv_file, delimiter=u',', quotechar=u'"'))
except KeyError:
- logging.warning("The input file is not defined.")
+ logging.warning(u"The input file is not defined.")
return
except csv.Error as err:
- logging.warning("Not possible to process the file '{0}'.\n{1}".
- format(table["input-file"], err))
+ logging.warning(
+ f"Not possible to process the file {table[u'input-file']}.\n"
+ f"{repr(err)}"
+ )
return
# Table:
- failed_tests = ET.Element("table", attrib=dict(width="100%", border='0'))
+ failed_tests = ET.Element(u"table", attrib=dict(width=u"100%", border=u'0'))
# Table header:
- tr = ET.SubElement(failed_tests, "tr", attrib=dict(bgcolor="#7eade7"))
+ trow = ET.SubElement(failed_tests, u"tr", attrib=dict(bgcolor=u"#7eade7"))
for idx, item in enumerate(csv_lst[0]):
- alignment = "left" if idx == 0 else "center"
- th = ET.SubElement(tr, "th", attrib=dict(align=alignment))
- th.text = item
+ alignment = u"left" if idx == 0 else u"center"
+ thead = ET.SubElement(trow, u"th", attrib=dict(align=alignment))
+ thead.text = item
# Rows:
- colors = ("#e9f1fb", "#d4e4f7")
+ colors = (u"#e9f1fb", u"#d4e4f7")
for r_idx, row in enumerate(csv_lst[1:]):
background = colors[r_idx % 2]
- tr = ET.SubElement(failed_tests, "tr", attrib=dict(bgcolor=background))
+ trow = ET.SubElement(
+ failed_tests, u"tr", attrib=dict(bgcolor=background)
+ )
# Columns:
for c_idx, item in enumerate(row):
- alignment = "left" if c_idx == 0 else "center"
- td = ET.SubElement(tr, "td", attrib=dict(align=alignment))
+ tdata = ET.SubElement(
+ trow,
+ u"td",
+ attrib=dict(align=u"left" if c_idx == 0 else u"center")
+ )
# Name:
if c_idx == 0:
- url = _generate_url("../trending/", testbed, item)
- ref = ET.SubElement(td, "a", attrib=dict(href=url))
+ ref = ET.SubElement(
+ tdata,
+ u"a",
+ attrib=dict(
+ href=_generate_url(
+ u"../trending/",
+ table.get(u"testbed", None),
+ item
+ )
+ )
+ )
ref.text = item
else:
- td.text = item
+ tdata.text = item
try:
- with open(table["output-file"], 'w') as html_file:
- logging.info(" Writing file: '{0}'".format(table["output-file"]))
- html_file.write(".. raw:: html\n\n\t")
- html_file.write(ET.tostring(failed_tests))
- html_file.write("\n\t<p><br><br></p>\n")
+ with open(table[u"output-file"], u'w') as html_file:
+ logging.info(f" Writing file: {table[u'output-file']}")
+ html_file.write(u".. raw:: html\n\n\t")
+ html_file.write(str(ET.tostring(failed_tests, encoding=u"unicode")))
+ html_file.write(u"\n\t<p><br><br></p>\n")
except KeyError:
- logging.warning("The output file is not defined.")
+ logging.warning(u"The output file is not defined.")
return
diff --git a/resources/tools/presentation/input_data_files.py b/resources/tools/presentation/input_data_files.py
index 0a723cedc7..b1bb8cd20f 100644
--- a/resources/tools/presentation/input_data_files.py
+++ b/resources/tools/presentation/input_data_files.py
@@ -16,26 +16,27 @@ Download all data.
"""
import re
-import requests
import logging
from os import rename, mkdir
from os.path import join
+from http.client import responses
from zipfile import ZipFile, is_zipfile, BadZipfile
-from httplib import responses
-from requests.adapters import HTTPAdapter
-from requests.packages.urllib3.util.retry import Retry
-from requests import codes, RequestException, Timeout, TooManyRedirects, \
- HTTPError, ConnectionError
-from errors import PresentationError
+import requests
+
+from requests.adapters import HTTPAdapter, Retry
+from requests.exceptions import RequestException
+from requests import codes
+
+from pal_errors import PresentationError
# Chunk size used for file download
CHUNK_SIZE = 512
# Separator used in file names
-SEPARATOR = "__"
+SEPARATOR = u"__"
REGEX_RELEASE = re.compile(r'(\D*)(\d{4}|master)(\D*)')
@@ -81,79 +82,70 @@ def _download_file(url, file_name, log, arch=False):
)
adapter = HTTPAdapter(max_retries=retry)
session = requests.Session()
- session.mount('http://', adapter)
- session.mount('https://', adapter)
+ session.mount(u"http://", adapter)
+ session.mount(u"https://", adapter)
return session
success = False
session = None
try:
- log.append(("INFO", " Connecting to '{0}' ...".format(url)))
+ log.append((u"INFO", f" Connecting to {url} ..."))
session = requests_retry_session()
response = session.get(url, stream=True)
code = response.status_code
- log.append(("INFO", " {0}: {1}".format(code, responses[code])))
+ log.append((u"INFO", f" {code}: {responses[code]}"))
- if code != codes["OK"]:
+ if code != codes[u"OK"]:
if session:
session.close()
- url = url.replace("_info", "")
- log.append(("INFO", " Connecting to '{0}' ...".format(url)))
+ url = url.replace(u"_info", u"")
+ log.append((u"INFO", f" Connecting to {url} ..."))
session = requests_retry_session()
response = session.get(url, stream=True)
code = response.status_code
- log.append(("INFO", " {0}: {1}".format(code, responses[code])))
- if code != codes["OK"]:
+ log.append((u"INFO", f" {code}: {responses[code]}"))
+ if code != codes[u"OK"]:
return False, file_name
- file_name = file_name.replace("_info", "")
+ file_name = file_name.replace(u"_info", u"")
- dst_file_name = file_name.replace(".gz", "")
- log.append(("INFO", " Downloading the file '{0}' to '{1}' ...".
- format(url, dst_file_name)))
- with open(dst_file_name, "wb") as file_handle:
+ dst_file_name = file_name.replace(u".gz", u"")
+ log.append(
+ (u"INFO", f" Downloading the file {url} to {dst_file_name} ...")
+ )
+ with open(dst_file_name, u"wb") as file_handle:
for chunk in response.iter_content(chunk_size=CHUNK_SIZE):
if chunk:
file_handle.write(chunk)
- if arch and ".gz" in file_name:
+ if arch and u".gz" in file_name:
if session:
session.close()
- log.append(("INFO", " Downloading the file '{0}' to '{1}' ...".
- format(url, file_name)))
+ log.append(
+ (u"INFO", f" Downloading the file {url} to {file_name} ...")
+ )
session = requests_retry_session()
response = session.get(url, stream=True)
- if response.status_code == codes["OK"]:
- with open(file_name, "wb") as file_handle:
+ if response.status_code == codes[u"OK"]:
+ with open(file_name, u"wb") as file_handle:
file_handle.write(response.raw.read())
else:
- log.append(("ERROR", "Not possible to download the file '{0}' "
- "to '{1}' ...".format(url, file_name)))
+ log.append(
+ (u"ERROR", f"Not possible to download the file {url} to "
+ f"{file_name} ...")
+ )
success = True
- except ConnectionError as err:
- log.append(("ERROR", "Not possible to connect to '{0}'.".format(url)))
- log.append(("DEBUG", repr(err)))
- except HTTPError as err:
- log.append(("ERROR", "Invalid HTTP response from '{0}'.".format(url)))
- log.append(("DEBUG", repr(err)))
- except TooManyRedirects as err:
- log.append(("ERROR", "Request exceeded the configured number "
- "of maximum re-directions."))
- log.append(("DEBUG", repr(err)))
- except Timeout as err:
- log.append(("ERROR", "Request timed out."))
- log.append(("DEBUG", repr(err)))
except RequestException as err:
- log.append(("ERROR", "Unexpected HTTP request exception."))
- log.append(("DEBUG", repr(err)))
+ log.append(
+ (u"ERROR", f"HTTP Request exception:\n{repr(err)}")
+ )
except (IOError, ValueError, KeyError) as err:
- log.append(("ERROR", "Download failed."))
- log.append(("DEBUG", repr(err)))
+ log.append((u"ERROR", f"Download failed.\n{repr(err)}"))
finally:
if session:
session.close()
- log.append(("INFO", " Download finished."))
+ log.append((u"INFO", u" Download finished."))
return success, file_name
@@ -170,39 +162,41 @@ def _unzip_file(spec, build, pid, log):
:rtype: bool
"""
- file_name = build["file-name"]
- if ".zip" in file_name:
- data_file = spec.input["zip-extract"]
+ file_name = build[u"file-name"]
+ if u".zip" in file_name:
+ data_file = spec.input[u"zip-extract"]
else:
- data_file = spec.input["extract"]
+ data_file = spec.input[u"extract"]
- directory = spec.environment["paths"]["DIR[WORKING,DATA]"]
+ directory = spec.environment[u"paths"][u"DIR[WORKING,DATA]"]
tmp_dir = join(directory, str(pid))
try:
mkdir(tmp_dir)
except OSError:
pass
- new_name = "{0}{1}{2}".format(file_name.rsplit('.')[-2],
- SEPARATOR,
- data_file.split("/")[-1])
+ new_name = \
+ f"{file_name.rsplit(u'.')[-2]}{SEPARATOR}{data_file.split(u'/')[-1]}"
- log.append(("INFO", " Unzipping: '{0}' from '{1}'.".
- format(data_file, file_name)))
+ log.append((u"INFO", f" Unzipping: {data_file} from {file_name}."))
try:
- with ZipFile(file_name, 'r') as zip_file:
+ with ZipFile(file_name, u'r') as zip_file:
zip_file.extract(data_file, tmp_dir)
- log.append(("INFO", " Renaming the file '{0}' to '{1}'".
- format(join(tmp_dir, data_file), new_name)))
+ log.append(
+ (u"INFO", f" Renaming the file {join(tmp_dir, data_file)} to "
+ f"{new_name}")
+ )
rename(join(tmp_dir, data_file), new_name)
- build["file-name"] = new_name
+ build[u"file-name"] = new_name
return True
except (BadZipfile, RuntimeError) as err:
- log.append(("ERROR", "Failed to unzip the file '{0}': {1}.".
- format(file_name, str(err))))
+ log.append(
+ (u"ERROR", f"Failed to unzip the file {file_name}: {repr(err)}.")
+ )
return False
except OSError as err:
- log.append(("ERROR", "Failed to rename the file '{0}': {1}.".
- format(data_file, str(err))))
+ log.append(
+ (u"ERROR", f"Failed to rename the file {data_file}: {repr(err)}.")
+ )
return False
@@ -225,67 +219,69 @@ def download_and_unzip_data_file(spec, job, build, pid, log):
# Try to download .gz from logs.fd.io
- file_name = spec.input["file-name"]
- url = "{0}/{1}".format(
- spec.environment["urls"]["URL[NEXUS,LOG]"],
- spec.input["download-path"].format(
- job=job, build=build["build"], filename=file_name))
- new_name = join(spec.environment["paths"]["DIR[WORKING,DATA]"],
- "{job}{sep}{build}{sep}{name}".format(
- job=job, sep=SEPARATOR, build=build["build"],
- name=file_name))
+ file_name = spec.input[u"file-name"]
+ url = u"{0}/{1}".format(
+ spec.environment[u'urls'][u'URL[NEXUS,LOG]'],
+ spec.input[u'download-path'].format(
+ job=job, build=build[u'build'], filename=file_name
+ )
+ )
+ new_name = join(
+ spec.environment[u"paths"][u"DIR[WORKING,DATA]"],
+ f"{job}{SEPARATOR}{build[u'build']}{SEPARATOR}{file_name}"
+ )
- logging.info("Trying to download {0}".format(url))
+ logging.info(f"Trying to download {url}")
- arch = True if spec.configuration.get("archive-inputs", True) else False
+ arch = bool(spec.configuration.get(u"archive-inputs", True))
success, downloaded_name = _download_file(url, new_name, log, arch=arch)
if not success:
# Try to download .gz from docs.fd.io
- file_name = spec.input["file-name"]
- url = "{0}/{1}".format(
- spec.environment["urls"]["URL[NEXUS,DOC]"],
- spec.input["download-path"].format(
- job=job, build=build["build"], filename=file_name))
- new_name = join(spec.environment["paths"]["DIR[WORKING,DATA]"],
- "{job}{sep}{build}{sep}{name}".format(
- job=job, sep=SEPARATOR, build=build["build"],
- name=file_name))
+ file_name = spec.input[u"file-name"]
+ url = u"{0}/{1}".format(
+ spec.environment[u"urls"][u"URL[NEXUS,DOC]"],
+ spec.input[u"download-path"].format(
+ job=job, build=build[u"build"], filename=file_name
+ )
+ )
+ new_name = join(
+ spec.environment[u"paths"][u"DIR[WORKING,DATA]"],
+ f"{job}{SEPARATOR}{build[u'build']}{SEPARATOR}{file_name}"
+ )
- logging.info("Downloading {0}".format(url))
+ logging.info(f"Downloading {url}")
- if spec.configuration.get("archive-inputs", True):
- arch = True
success, downloaded_name = _download_file(url, new_name, log, arch=arch)
if not success:
# Try to download .zip from docs.fd.io
- file_name = spec.input["zip-file-name"]
- new_name = join(spec.environment["paths"]["DIR[WORKING,DATA]"],
- "{job}{sep}{build}{sep}{name}".format(
- job=job, sep=SEPARATOR, build=build["build"],
- name=file_name))
+ file_name = spec.input[u"zip-file-name"]
+ new_name = join(
+ spec.environment[u"paths"][u"DIR[WORKING,DATA]"],
+ f"{job}{SEPARATOR}{build[u'build']}{SEPARATOR}{file_name}"
+ )
release = re.search(REGEX_RELEASE, job).group(2)
- for rls in (release, "master"):
- nexus_file_name = "{job}{sep}{build}{sep}{name}". \
- format(job=job, sep=SEPARATOR, build=build["build"],
- name=file_name)
+ for rls in (release, u"master"):
+ nexus_file_name = \
+ f"{job}{SEPARATOR}{build[u'build']}{SEPARATOR}{file_name}"
try:
- rls = "rls{0}".format(int(rls))
+ rls = f"rls{int(rls)}"
except ValueError:
- # It is 'master'
+ # It is master
pass
- url = "{url}/{release}/{dir}/{file}". \
- format(url=spec.environment["urls"]["URL[NEXUS,DOC]"],
- release=rls,
- dir=spec.environment["urls"]["DIR[NEXUS,DOC]"],
- file=nexus_file_name)
+ url = (
+ f"{spec.environment[u'urls'][u'URL[NEXUS,DOC]']}/"
+ f"{rls}/"
+ f"{spec.environment[u'urls'][u'DIR[NEXUS,DOC]']}/"
+ f"{nexus_file_name}"
+ )
- logging.info("Downloading {0}".format(url))
+ logging.info(f"Downloading {url}")
success, downloaded_name = _download_file(url, new_name, log)
if success:
@@ -295,41 +291,40 @@ def download_and_unzip_data_file(spec, job, build, pid, log):
# Try to download .zip from jenkins.fd.io
- file_name = spec.input["zip-file-name"]
- download_path = spec.input["zip-download-path"]
- if job.startswith("csit-"):
- url = spec.environment["urls"]["URL[JENKINS,CSIT]"]
- elif job.startswith("hc2vpp-"):
- url = spec.environment["urls"]["URL[JENKINS,HC]"]
+ file_name = spec.input[u"zip-file-name"]
+ download_path = spec.input[u"zip-download-path"]
+ if job.startswith(u"csit-"):
+ url = spec.environment[u"urls"][u"URL[JENKINS,CSIT]"]
+ elif job.startswith(u"hc2vpp-"):
+ url = spec.environment[u"urls"][u"URL[JENKINS,HC]"]
else:
- raise PresentationError(
- "No url defined for the job '{}'.".format(job))
+ raise PresentationError(f"No url defined for the job {job}.")
full_name = download_path.format(
- job=job, build=build["build"], filename=file_name)
- url = "{0}/{1}".format(url, full_name)
- new_name = join(spec.environment["paths"]["DIR[WORKING,DATA]"],
- "{job}{sep}{build}{sep}{name}".
- format(job=job, sep=SEPARATOR, build=build["build"],
- name=file_name))
+ job=job, build=build[u"build"], filename=file_name
+ )
+ url = u"{0}/{1}".format(url, full_name)
+ new_name = join(
+ spec.environment[u"paths"][u"DIR[WORKING,DATA]"],
+ f"{job}{SEPARATOR}{build[u'build']}{SEPARATOR}{file_name}"
+ )
- logging.info("Downloading {0}".format(url))
+ logging.info(f"Downloading {url}")
success, downloaded_name = _download_file(url, new_name, log)
- if success and downloaded_name.endswith(".zip"):
+ if success and downloaded_name.endswith(u".zip"):
if not is_zipfile(downloaded_name):
- log.append(("ERROR",
- "Zip file '{0}' is corrupted.".format(new_name)))
+ log.append((u"ERROR", f"Zip file {new_name} is corrupted."))
success = False
if success:
- build["file-name"] = downloaded_name
+ build[u"file-name"] = downloaded_name
- if file_name.endswith(".gz"):
- build["file-name"] = downloaded_name[:-3]
+ if file_name.endswith(u".gz"):
+ build[u"file-name"] = downloaded_name[:-3]
- if downloaded_name.endswith(".zip"):
+ if downloaded_name.endswith(u".zip"):
success = _unzip_file(spec, build, pid, log)
return success
diff --git a/resources/tools/presentation/input_data_parser.py b/resources/tools/presentation/input_data_parser.py
index e48b271489..1177defe35 100644
--- a/resources/tools/presentation/input_data_parser.py
+++ b/resources/tools/presentation/input_data_parser.py
@@ -19,28 +19,29 @@
- filter the data using tags,
"""
-import copy
import re
+import copy
import resource
-import pandas as pd
import logging
-import prettytable
-from robot.api import ExecutionResult, ResultVisitor
-from robot import errors
from collections import OrderedDict
-from string import replace
from os import remove
from datetime import datetime as dt
from datetime import timedelta
from json import loads
+import prettytable
+import pandas as pd
+
+from robot.api import ExecutionResult, ResultVisitor
+from robot import errors
+
from resources.libraries.python import jumpavg
from input_data_files import download_and_unzip_data_file
# Separator used in file names
-SEPARATOR = "__"
+SEPARATOR = u"__"
class ExecutionChecker(ResultVisitor):
@@ -200,9 +201,6 @@ class ExecutionChecker(ResultVisitor):
.. note:: ID is the lowercase full path to the test.
"""
- # TODO: Remove when definitely no NDRPDRDISC tests are used:
- REGEX_RATE = re.compile(r'^[\D\d]*FINAL_RATE:\s(\d+\.\d+)\s(\w+)')
-
REGEX_PLR_RATE = re.compile(r'PLRsearch lower bound::?\s(\d+.\d+).*\n'
r'PLRsearch upper bound::?\s(\d+.\d+)')
@@ -274,10 +272,10 @@ class ExecutionChecker(ResultVisitor):
# Ignore list
self._ignore = ignore
- # Number of VAT History messages found:
+ # Number of PAPI History messages found:
# 0 - no message
- # 1 - VAT History of DUT1
- # 2 - VAT History of DUT2
+ # 1 - PAPI History of DUT1
+ # 2 - PAPI History of DUT2
self._lookup_kw_nr = 0
self._conf_history_lookup_nr = 0
@@ -288,29 +286,30 @@ class ExecutionChecker(ResultVisitor):
# Test ID of currently processed test- the lowercase full path to the
# test
- self._test_ID = None
+ self._test_id = None
# The main data structure
self._data = {
- "metadata": OrderedDict(),
- "suites": OrderedDict(),
- "tests": OrderedDict()
+ u"metadata": OrderedDict(),
+ u"suites": OrderedDict(),
+ u"tests": OrderedDict()
}
# Save the provided metadata
for key, val in metadata.items():
- self._data["metadata"][key] = val
+ self._data[u"metadata"][key] = val
# Dictionary defining the methods used to parse different types of
# messages
self.parse_msg = {
- "timestamp": self._get_timestamp,
- "vpp-version": self._get_vpp_version,
- "dpdk-version": self._get_dpdk_version,
- "teardown-vat-history": self._get_vat_history,
- "teardown-papi-history": self._get_papi_history,
- "test-show-runtime": self._get_show_run,
- "testbed": self._get_testbed
+ u"timestamp": self._get_timestamp,
+ u"vpp-version": self._get_vpp_version,
+ u"dpdk-version": self._get_dpdk_version,
+ # TODO: Remove when not needed:
+ u"teardown-vat-history": self._get_vat_history,
+ u"teardown-papi-history": self._get_papi_history,
+ u"test-show-runtime": self._get_show_run,
+ u"testbed": self._get_testbed
}
@property
@@ -331,15 +330,16 @@ class ExecutionChecker(ResultVisitor):
:returns: Nothing.
"""
- if msg.message.count("Setup of TG node"):
+ if msg.message.count(u"Setup of TG node") or \
+ msg.message.count(u"Setup of node TG host"):
reg_tg_ip = re.compile(
- r'Setup of TG node (\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3}) done')
+ r'.*TG .* (\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3}).*')
try:
self._testbed = str(re.search(reg_tg_ip, msg.message).group(1))
except (KeyError, ValueError, IndexError, AttributeError):
pass
finally:
- self._data["metadata"]["testbed"] = self._testbed
+ self._data[u"metadata"][u"testbed"] = self._testbed
self._msg_type = None
def _get_vpp_version(self, msg):
@@ -350,12 +350,12 @@ class ExecutionChecker(ResultVisitor):
:returns: Nothing.
"""
- if msg.message.count("return STDOUT Version:") or \
- msg.message.count("VPP Version:") or \
- msg.message.count("VPP version:"):
+ if msg.message.count(u"return STDOUT Version:") or \
+ msg.message.count(u"VPP Version:") or \
+ msg.message.count(u"VPP version:"):
self._version = str(re.search(self.REGEX_VERSION_VPP, msg.message).
group(2))
- self._data["metadata"]["version"] = self._version
+ self._data[u"metadata"][u"version"] = self._version
self._msg_type = None
def _get_dpdk_version(self, msg):
@@ -366,11 +366,11 @@ class ExecutionChecker(ResultVisitor):
:returns: Nothing.
"""
- if msg.message.count("DPDK Version:"):
+ if msg.message.count(u"DPDK Version:"):
try:
self._version = str(re.search(
- self.REGEX_VERSION_DPDK, msg.message). group(2))
- self._data["metadata"]["version"] = self._version
+ self.REGEX_VERSION_DPDK, msg.message).group(2))
+ self._data[u"metadata"][u"version"] = self._version
except IndexError:
pass
finally:
@@ -385,30 +385,32 @@ class ExecutionChecker(ResultVisitor):
"""
self._timestamp = msg.timestamp[:14]
- self._data["metadata"]["generated"] = self._timestamp
+ self._data[u"metadata"][u"generated"] = self._timestamp
self._msg_type = None
def _get_vat_history(self, msg):
"""Called when extraction of VAT command history is required.
+ TODO: Remove when not needed.
+
:param msg: Message to process.
:type msg: Message
:returns: Nothing.
"""
- if msg.message.count("VAT command history:"):
+ if msg.message.count(u"VAT command history:"):
self._conf_history_lookup_nr += 1
if self._conf_history_lookup_nr == 1:
- self._data["tests"][self._test_ID]["conf-history"] = str()
+ self._data[u"tests"][self._test_id][u"conf-history"] = str()
else:
self._msg_type = None
- text = re.sub("\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3} "
- "VAT command history:", "", msg.message, count=1). \
- replace("\n\n", "\n").replace('\n', ' |br| ').\
- replace('\r', '').replace('"', "'")
+ text = re.sub(r"\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3} "
+ r"VAT command history:", u"", msg.message, count=1). \
+ replace(u"\n\n", u"\n").replace(u'\n', u' |br| ').\
+ replace(u'\r', u'').replace(u'"', u"'")
- self._data["tests"][self._test_ID]["conf-history"] += " |br| "
- self._data["tests"][self._test_ID]["conf-history"] += \
- "**DUT" + str(self._conf_history_lookup_nr) + ":** " + text
+ self._data[u"tests"][self._test_id][u"conf-history"] += (
+ f" |br| **DUT{str(self._conf_history_lookup_nr)}:** {text}"
+ )
def _get_papi_history(self, msg):
"""Called when extraction of PAPI command history is required.
@@ -417,20 +419,21 @@ class ExecutionChecker(ResultVisitor):
:type msg: Message
:returns: Nothing.
"""
- if msg.message.count("PAPI command history:"):
+ if msg.message.count(u"PAPI command history:"):
self._conf_history_lookup_nr += 1
if self._conf_history_lookup_nr == 1:
- self._data["tests"][self._test_ID]["conf-history"] = str()
+ self._data[u"tests"][self._test_id][u"conf-history"] = str()
else:
self._msg_type = None
- text = re.sub("\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3} "
- "PAPI command history:", "", msg.message, count=1). \
- replace("\n\n", "\n").replace('\n', ' |br| ').\
- replace('\r', '').replace('"', "'")
+ text = re.sub(r"\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3} "
+ r"PAPI command history:", u"",
+ msg.message, count=1). \
+ replace(u"\n\n", u"\n").replace(u'\n', u' |br| ').\
+ replace(u'\r', u'').replace(u'"', u"'")
- self._data["tests"][self._test_ID]["conf-history"] += " |br| "
- self._data["tests"][self._test_ID]["conf-history"] += \
- "**DUT" + str(self._conf_history_lookup_nr) + ":** " + text
+ self._data[u"tests"][self._test_id][u"conf-history"] += (
+ f" |br| **DUT{str(self._conf_history_lookup_nr)}:** {text}"
+ )
def _get_show_run(self, msg):
"""Called when extraction of VPP operational data (output of CLI command
@@ -440,56 +443,66 @@ class ExecutionChecker(ResultVisitor):
:type msg: Message
:returns: Nothing.
"""
- if not "show-run" in self._data["tests"][self._test_ID].keys():
- self._data["tests"][self._test_ID]["show-run"] = str()
+ if u"show-run" not in self._data[u"tests"][self._test_id].keys():
+ self._data[u"tests"][self._test_id][u"show-run"] = str()
- if msg.message.count("stats runtime"):
- host = str(re.search(self.REGEX_TC_PAPI_CLI, msg.message).\
+ if msg.message.count(u"stats runtime"):
+ host = str(re.search(self.REGEX_TC_PAPI_CLI, msg.message).
group(1))
- socket = str(re.search(self.REGEX_TC_PAPI_CLI, msg.message).\
+ socket = str(re.search(self.REGEX_TC_PAPI_CLI, msg.message).
group(2))
- message = str(msg.message).replace(' ', '').replace('\n', '').\
- replace("'", '"').replace('b"', '"').replace('u"', '"').\
- split(":",1)[1]
- runtime = loads(message)
+ runtime = loads(
+ str(msg.message).
+ replace(u' ', u'').
+ replace(u'\n', u'').
+ replace(u"'", u'"').
+ replace(u'b"', u'"').
+ replace(u'u"', u'"').
+ split(u":", 1)[1]
+ )
try:
- threads_nr = len(runtime[0]["clocks"])
+ threads_nr = len(runtime[0][u"clocks"])
except (IndexError, KeyError):
return
- tbl_hdr = ["Name", "Calls", "Vectors", "Suspends", "Clocks",
- "Vectors/Calls"]
+ tbl_hdr = [
+ u"Name",
+ u"Calls",
+ u"Vectors",
+ u"Suspends",
+ u"Clocks",
+ u"Vectors/Calls"
+ ]
table = [[tbl_hdr, ] for _ in range(threads_nr)]
for item in runtime:
for idx in range(threads_nr):
- name = format(item["name"])
- calls = format(item["calls"][idx])
- vectors = format(item["vectors"][idx])
- suspends = format(item["suspends"][idx])
- if item["vectors"][idx] > 0:
+ name = format(item[u"name"])
+ calls = format(item[u"calls"][idx])
+ vectors = format(item[u"vectors"][idx])
+ suspends = format(item[u"suspends"][idx])
+ if item[u"vectors"][idx] > 0:
clocks = format(
- item["clocks"][idx]/item["vectors"][idx], ".2e")
- elif item["calls"][idx] > 0:
+ item[u"clocks"][idx]/item[u"vectors"][idx], u".2e")
+ elif item[u"calls"][idx] > 0:
clocks = format(
- item["clocks"][idx]/item["calls"][idx], ".2e")
- elif item["suspends"][idx] > 0:
+ item[u"clocks"][idx]/item[u"calls"][idx], u".2e")
+ elif item[u"suspends"][idx] > 0:
clocks = format(
- item["clocks"][idx]/item["suspends"][idx], ".2e")
+ item[u"clocks"][idx]/item[u"suspends"][idx], u".2e")
else:
clocks = 0
- if item["calls"][idx] > 0:
+ if item[u"calls"][idx] > 0:
vectors_call = format(
- item["vectors"][idx]/item["calls"][idx], ".2f")
+ item[u"vectors"][idx]/item[u"calls"][idx], u".2f")
else:
- vectors_call = format(0, ".2f")
+ vectors_call = format(0, u".2f")
if int(calls) + int(vectors) + int(suspends):
table[idx].append([
name, calls, vectors, suspends, clocks, vectors_call
])
text = ""
for idx in range(threads_nr):
- text += "Thread {idx} ".format(idx=idx)
- text += "vpp_main\n" if idx == 0 else \
- "vpp_wk_{idx}\n".format(idx=idx-1)
+ text += f"Thread {idx} "
+ text += u"vpp_main\n" if idx == 0 else f"vpp_wk_{idx-1}\n"
txt_table = None
for row in table[idx]:
if txt_table is None:
@@ -498,19 +511,19 @@ class ExecutionChecker(ResultVisitor):
if any(row[1:]):
txt_table.add_row(row)
txt_table.set_style(prettytable.MSWORD_FRIENDLY)
- txt_table.align["Name"] = "l"
- txt_table.align["Calls"] = "r"
- txt_table.align["Vectors"] = "r"
- txt_table.align["Suspends"] = "r"
- txt_table.align["Clocks"] = "r"
- txt_table.align["Vectors/Calls"] = "r"
-
- text += txt_table.get_string(sortby="Name") + '\n'
- text = (" \n **DUT: {host}/{socket}** \n {text}".
- format(host=host, socket=socket, text=text))
- text = text.replace('\n', ' |br| ').replace('\r', '').\
- replace('"', "'")
- self._data["tests"][self._test_ID]["show-run"] += text
+ txt_table.align[u"Name"] = u"l"
+ txt_table.align[u"Calls"] = u"r"
+ txt_table.align[u"Vectors"] = u"r"
+ txt_table.align[u"Suspends"] = u"r"
+ txt_table.align[u"Clocks"] = u"r"
+ txt_table.align[u"Vectors/Calls"] = u"r"
+
+ text += txt_table.get_string(sortby=u"Name") + u'\n'
+ text = f" \n **DUT: {host}/{socket}** \n {text}".\
+ replace(u'\n', u' |br| ').\
+ replace(u'\r', u'').\
+ replace(u'"', u"'")
+ self._data[u"tests"][self._test_id][u"show-run"] += text
def _get_ndrpdr_throughput(self, msg):
"""Get NDR_LOWER, NDR_UPPER, PDR_LOWER and PDR_UPPER from the test
@@ -523,19 +536,19 @@ class ExecutionChecker(ResultVisitor):
"""
throughput = {
- "NDR": {"LOWER": -1.0, "UPPER": -1.0},
- "PDR": {"LOWER": -1.0, "UPPER": -1.0}
+ u"NDR": {u"LOWER": -1.0, u"UPPER": -1.0},
+ u"PDR": {u"LOWER": -1.0, u"UPPER": -1.0}
}
- status = "FAIL"
+ status = u"FAIL"
groups = re.search(self.REGEX_NDRPDR_RATE, msg)
if groups is not None:
try:
- throughput["NDR"]["LOWER"] = float(groups.group(1))
- throughput["NDR"]["UPPER"] = float(groups.group(2))
- throughput["PDR"]["LOWER"] = float(groups.group(3))
- throughput["PDR"]["UPPER"] = float(groups.group(4))
- status = "PASS"
+ throughput[u"NDR"][u"LOWER"] = float(groups.group(1))
+ throughput[u"NDR"][u"UPPER"] = float(groups.group(2))
+ throughput[u"PDR"][u"LOWER"] = float(groups.group(3))
+ throughput[u"PDR"][u"UPPER"] = float(groups.group(4))
+ status = u"PASS"
except (IndexError, ValueError):
pass
@@ -552,17 +565,17 @@ class ExecutionChecker(ResultVisitor):
"""
throughput = {
- "LOWER": -1.0,
- "UPPER": -1.0
+ u"LOWER": -1.0,
+ u"UPPER": -1.0
}
- status = "FAIL"
+ status = u"FAIL"
groups = re.search(self.REGEX_PLR_RATE, msg)
if groups is not None:
try:
- throughput["LOWER"] = float(groups.group(1))
- throughput["UPPER"] = float(groups.group(2))
- status = "PASS"
+ throughput[u"LOWER"] = float(groups.group(1))
+ throughput[u"UPPER"] = float(groups.group(2))
+ status = u"PASS"
except (IndexError, ValueError):
pass
@@ -576,18 +589,23 @@ class ExecutionChecker(ResultVisitor):
:returns: Parsed data as a dict and the status (PASS/FAIL).
:rtype: tuple(dict, str)
"""
- latency_default = {"min": -1.0, "avg": -1.0, "max": -1.0, "hdrh": ""}
+ latency_default = {
+ u"min": -1.0,
+ u"avg": -1.0,
+ u"max": -1.0,
+ u"hdrh": u""
+ }
latency = {
- "NDR": {
- "direction1": copy.copy(latency_default),
- "direction2": copy.copy(latency_default)
+ u"NDR": {
+ u"direction1": copy.copy(latency_default),
+ u"direction2": copy.copy(latency_default)
},
- "PDR": {
- "direction1": copy.copy(latency_default),
- "direction2": copy.copy(latency_default)
+ u"PDR": {
+ u"direction1": copy.copy(latency_default),
+ u"direction2": copy.copy(latency_default)
}
}
- status = "FAIL"
+ status = u"FAIL"
groups = re.search(self.REGEX_NDRPDR_LAT, msg)
def process_latency(in_str):
@@ -605,24 +623,28 @@ class ExecutionChecker(ResultVisitor):
in_list = in_str.split('/')
rval = {
- "min": float(in_list[0]),
- "avg": float(in_list[1]),
- "max": float(in_list[2]),
- "hdrh": ""
+ u"min": float(in_list[0]),
+ u"avg": float(in_list[1]),
+ u"max": float(in_list[2]),
+ u"hdrh": u""
}
if len(in_list) == 4:
- rval["hdrh"] = str(in_list[3])
+ rval[u"hdrh"] = str(in_list[3])
return rval
if groups is not None:
try:
- latency["NDR"]["direction1"] = process_latency(groups.group(1))
- latency["NDR"]["direction2"] = process_latency(groups.group(2))
- latency["PDR"]["direction1"] = process_latency(groups.group(3))
- latency["PDR"]["direction2"] = process_latency(groups.group(4))
- status = "PASS"
+ latency[u"NDR"][u"direction1"] = \
+ process_latency(groups.group(1))
+ latency[u"NDR"][u"direction2"] = \
+ process_latency(groups.group(2))
+ latency[u"PDR"][u"direction1"] = \
+ process_latency(groups.group(3))
+ latency[u"PDR"][u"direction2"] = \
+ process_latency(groups.group(4))
+ status = u"PASS"
except (IndexError, ValueError):
pass
@@ -653,17 +675,22 @@ class ExecutionChecker(ResultVisitor):
except AttributeError:
return
- doc_str = suite.doc.replace('"', "'").replace('\n', ' ').\
- replace('\r', '').replace('*[', ' |br| *[').replace("*", "**")
- doc_str = replace(doc_str, ' |br| *[', '*[', maxreplace=1)
-
- self._data["suites"][suite.longname.lower().replace('"', "'").
- replace(" ", "_")] = {
- "name": suite.name.lower(),
- "doc": doc_str,
- "parent": parent_name,
- "level": len(suite.longname.split("."))
- }
+ doc_str = suite.doc.\
+ replace(u'"', u"'").\
+ replace(u'\n', u' ').\
+ replace(u'\r', u'').\
+ replace(u'*[', u' |br| *[').\
+ replace(u"*", u"**").\
+ replace(u' |br| *[', u'*[', 1)
+
+ self._data[u"suites"][suite.longname.lower().
+ replace(u'"', u"'").
+ replace(u" ", u"_")] = {
+ u"name": suite.name.lower(),
+ u"doc": doc_str,
+ u"parent": parent_name,
+ u"level": len(suite.longname.split(u"."))
+ }
suite.keywords.visit(self)
@@ -674,7 +701,6 @@ class ExecutionChecker(ResultVisitor):
:type suite: Suite
:returns: Nothing.
"""
- pass
def visit_test(self, test):
"""Implements traversing through the test.
@@ -707,128 +733,90 @@ class ExecutionChecker(ResultVisitor):
# Change the TC long name and name if defined in the mapping table
longname = self._mapping.get(longname_orig, None)
if longname is not None:
- name = longname.split('.')[-1]
- logging.debug("{0}\n{1}\n{2}\n{3}".format(
- self._data["metadata"], longname_orig, longname, name))
+ name = longname.split(u'.')[-1]
+ logging.debug(
+ f"{self._data[u'metadata']}\n{longname_orig}\n{longname}\n"
+ f"{name}"
+ )
else:
longname = longname_orig
name = test.name.lower()
# Remove TC number from the TC long name (backward compatibility):
- self._test_ID = re.sub(self.REGEX_TC_NUMBER, "", longname)
+ self._test_id = re.sub(self.REGEX_TC_NUMBER, u"", longname)
# Remove TC number from the TC name (not needed):
- test_result["name"] = re.sub(self.REGEX_TC_NUMBER, "", name)
-
- test_result["parent"] = test.parent.name.lower()
- test_result["tags"] = tags
- doc_str = test.doc.replace('"', "'").replace('\n', ' '). \
- replace('\r', '').replace('[', ' |br| [')
- test_result["doc"] = replace(doc_str, ' |br| [', '[', maxreplace=1)
- test_result["msg"] = test.message.replace('\n', ' |br| '). \
- replace('\r', '').replace('"', "'")
- test_result["type"] = "FUNC"
- test_result["status"] = test.status
-
- if "PERFTEST" in tags:
+ test_result[u"name"] = re.sub(self.REGEX_TC_NUMBER, "", name)
+
+ test_result[u"parent"] = test.parent.name.lower()
+ test_result[u"tags"] = tags
+ test_result["doc"] = test.doc.\
+ replace(u'"', u"'").\
+ replace(u'\n', u' ').\
+ replace(u'\r', u'').\
+ replace(u'[', u' |br| [').\
+ replace(u' |br| [', u'[', 1)
+ test_result[u"msg"] = test.message.\
+ replace(u'\n', u' |br| ').\
+ replace(u'\r', u'').\
+ replace(u'"', u"'")
+ test_result[u"type"] = u"FUNC"
+ test_result[u"status"] = test.status
+
+ if u"PERFTEST" in tags:
# Replace info about cores (e.g. -1c-) with the info about threads
# and cores (e.g. -1t1c-) in the long test case names and in the
# test case names if necessary.
- groups = re.search(self.REGEX_TC_NAME_OLD, self._test_ID)
+ groups = re.search(self.REGEX_TC_NAME_OLD, self._test_id)
if not groups:
tag_count = 0
tag_tc = str()
- for tag in test_result["tags"]:
+ for tag in test_result[u"tags"]:
groups = re.search(self.REGEX_TC_TAG, tag)
if groups:
tag_count += 1
tag_tc = tag
if tag_count == 1:
- self._test_ID = re.sub(self.REGEX_TC_NAME_NEW,
- "-{0}-".format(tag_tc.lower()),
- self._test_ID,
+ self._test_id = re.sub(self.REGEX_TC_NAME_NEW,
+ f"-{tag_tc.lower()}-",
+ self._test_id,
count=1)
- test_result["name"] = re.sub(self.REGEX_TC_NAME_NEW,
- "-{0}-".format(tag_tc.lower()),
- test_result["name"],
- count=1)
+ test_result[u"name"] = re.sub(self.REGEX_TC_NAME_NEW,
+ f"-{tag_tc.lower()}-",
+ test_result["name"],
+ count=1)
else:
- test_result["status"] = "FAIL"
- self._data["tests"][self._test_ID] = test_result
- logging.debug("The test '{0}' has no or more than one "
- "multi-threading tags.".format(self._test_ID))
- logging.debug("Tags: {0}".format(test_result["tags"]))
+ test_result[u"status"] = u"FAIL"
+ self._data[u"tests"][self._test_id] = test_result
+ logging.debug(
+ f"The test {self._test_id} has no or more than one "
+ f"multi-threading tags.\n"
+ f"Tags: {test_result[u'tags']}"
+ )
return
- if test.status == "PASS" and ("NDRPDRDISC" in tags or
- "NDRPDR" in tags or
- "SOAK" in tags or
- "TCP" in tags or
- "MRR" in tags or
- "BMRR" in tags or
- "RECONF" in tags):
- # TODO: Remove when definitely no NDRPDRDISC tests are used:
- if "NDRDISC" in tags:
- test_result["type"] = "NDR"
- # TODO: Remove when definitely no NDRPDRDISC tests are used:
- elif "PDRDISC" in tags:
- test_result["type"] = "PDR"
- elif "NDRPDR" in tags:
- test_result["type"] = "NDRPDR"
- elif "SOAK" in tags:
- test_result["type"] = "SOAK"
- elif "TCP" in tags:
- test_result["type"] = "TCP"
- elif "MRR" in tags:
- test_result["type"] = "MRR"
- elif "FRMOBL" in tags or "BMRR" in tags:
- test_result["type"] = "BMRR"
- elif "RECONF" in tags:
- test_result["type"] = "RECONF"
- else:
- test_result["status"] = "FAIL"
- self._data["tests"][self._test_ID] = test_result
- return
-
- # TODO: Remove when definitely no NDRPDRDISC tests are used:
- if test_result["type"] in ("NDR", "PDR"):
- try:
- rate_value = str(re.search(
- self.REGEX_RATE, test.message).group(1))
- except AttributeError:
- rate_value = "-1"
- try:
- rate_unit = str(re.search(
- self.REGEX_RATE, test.message).group(2))
- except AttributeError:
- rate_unit = "-1"
-
- test_result["throughput"] = dict()
- test_result["throughput"]["value"] = \
- int(rate_value.split('.')[0])
- test_result["throughput"]["unit"] = rate_unit
- test_result["latency"] = \
- self._get_latency(test.message, test_result["type"])
- if test_result["type"] == "PDR":
- test_result["lossTolerance"] = str(re.search(
- self.REGEX_TOLERANCE, test.message).group(1))
-
- elif test_result["type"] in ("NDRPDR", ):
- test_result["throughput"], test_result["status"] = \
+ if test.status == u"PASS":
+ if u"NDRPDR" in tags:
+ test_result[u"type"] = u"NDRPDR"
+ test_result[u"throughput"], test_result[u"status"] = \
self._get_ndrpdr_throughput(test.message)
- test_result["latency"], test_result["status"] = \
+ test_result[u"latency"], test_result[u"status"] = \
self._get_ndrpdr_latency(test.message)
-
- elif test_result["type"] in ("SOAK", ):
- test_result["throughput"], test_result["status"] = \
+ elif u"SOAK" in tags:
+ test_result[u"type"] = u"SOAK"
+ test_result[u"throughput"], test_result[u"status"] = \
self._get_plr_throughput(test.message)
-
- elif test_result["type"] in ("TCP", ):
+ elif u"TCP" in tags:
+ test_result[u"type"] = u"TCP"
groups = re.search(self.REGEX_TCP, test.message)
- test_result["result"] = int(groups.group(2))
+ test_result[u"result"] = int(groups.group(2))
+ elif u"MRR" in tags or u"FRMOBL" in tags or u"BMRR" in tags:
+ if u"MRR" in tags:
+ test_result[u"type"] = u"MRR"
+ else:
+ test_result[u"type"] = u"BMRR"
- elif test_result["type"] in ("MRR", "BMRR"):
- test_result["result"] = dict()
+ test_result[u"result"] = dict()
groups = re.search(self.REGEX_BMRR, test.message)
if groups is not None:
items_str = groups.group(1)
@@ -836,25 +824,29 @@ class ExecutionChecker(ResultVisitor):
in items_str.split(",")]
# Use whole list in CSIT-1180.
stats = jumpavg.AvgStdevStats.for_runs(items_float)
- test_result["result"]["receive-rate"] = stats.avg
+ test_result[u"result"][u"receive-rate"] = stats.avg
else:
groups = re.search(self.REGEX_MRR, test.message)
- test_result["result"]["receive-rate"] = \
+ test_result[u"result"][u"receive-rate"] = \
float(groups.group(3)) / float(groups.group(1))
-
- elif test_result["type"] == "RECONF":
- test_result["result"] = None
+ elif u"RECONF" in tags:
+ test_result[u"type"] = u"RECONF"
+ test_result[u"result"] = None
try:
grps_loss = re.search(self.REGEX_RECONF_LOSS, test.message)
grps_time = re.search(self.REGEX_RECONF_TIME, test.message)
- test_result["result"] = {
- "loss": int(grps_loss.group(1)),
- "time": float(grps_time.group(1))
+ test_result[u"result"] = {
+ u"loss": int(grps_loss.group(1)),
+ u"time": float(grps_time.group(1))
}
except (AttributeError, IndexError, ValueError, TypeError):
- test_result["status"] = "FAIL"
+ test_result[u"status"] = u"FAIL"
+ else:
+ test_result[u"status"] = u"FAIL"
+ self._data[u"tests"][self._test_id] = test_result
+ return
- self._data["tests"][self._test_ID] = test_result
+ self._data[u"tests"][self._test_id] = test_result
def end_test(self, test):
"""Called when test ends.
@@ -863,7 +855,6 @@ class ExecutionChecker(ResultVisitor):
:type test: Test
:returns: Nothing.
"""
- pass
def visit_keyword(self, keyword):
"""Implements traversing through the keyword and its child keywords.
@@ -883,9 +874,9 @@ class ExecutionChecker(ResultVisitor):
:returns: Nothing.
"""
try:
- if keyword.type == "setup":
+ if keyword.type == u"setup":
self.visit_setup_kw(keyword)
- elif keyword.type == "teardown":
+ elif keyword.type == u"teardown":
self._lookup_kw_nr = 0
self.visit_teardown_kw(keyword)
else:
@@ -901,7 +892,6 @@ class ExecutionChecker(ResultVisitor):
:type keyword: Keyword
:returns: Nothing.
"""
- pass
def visit_test_kw(self, test_kw):
"""Implements traversing through the test keyword and its child
@@ -924,12 +914,12 @@ class ExecutionChecker(ResultVisitor):
:type test_kw: Keyword
:returns: Nothing.
"""
- if test_kw.name.count("Show Runtime Counters On All Duts"):
+ if test_kw.name.count(u"Show Runtime Counters On All Duts"):
self._lookup_kw_nr += 1
self._show_run_lookup_nr = 0
- self._msg_type = "test-show-runtime"
- elif test_kw.name.count("Install Dpdk Test") and not self._version:
- self._msg_type = "dpdk-version"
+ self._msg_type = u"test-show-runtime"
+ elif test_kw.name.count(u"Install Dpdk Test") and not self._version:
+ self._msg_type = u"dpdk-version"
else:
return
test_kw.messages.visit(self)
@@ -941,7 +931,6 @@ class ExecutionChecker(ResultVisitor):
:type test_kw: Keyword
:returns: Nothing.
"""
- pass
def visit_setup_kw(self, setup_kw):
"""Implements traversing through the teardown keyword and its child
@@ -964,14 +953,14 @@ class ExecutionChecker(ResultVisitor):
:type setup_kw: Keyword
:returns: Nothing.
"""
- if setup_kw.name.count("Show Vpp Version On All Duts") \
+ if setup_kw.name.count(u"Show Vpp Version On All Duts") \
and not self._version:
- self._msg_type = "vpp-version"
- elif setup_kw.name.count("Set Global Variable") \
+ self._msg_type = u"vpp-version"
+ elif setup_kw.name.count(u"Set Global Variable") \
and not self._timestamp:
- self._msg_type = "timestamp"
- elif setup_kw.name.count("Setup Framework") and not self._testbed:
- self._msg_type = "testbed"
+ self._msg_type = u"timestamp"
+ elif setup_kw.name.count(u"Setup Framework") and not self._testbed:
+ self._msg_type = u"testbed"
else:
return
setup_kw.messages.visit(self)
@@ -983,7 +972,6 @@ class ExecutionChecker(ResultVisitor):
:type setup_kw: Keyword
:returns: Nothing.
"""
- pass
def visit_teardown_kw(self, teardown_kw):
"""Implements traversing through the teardown keyword and its child
@@ -999,21 +987,21 @@ class ExecutionChecker(ResultVisitor):
self.end_teardown_kw(keyword)
def start_teardown_kw(self, teardown_kw):
- """Called when teardown keyword starts. Default implementation does
- nothing.
+ """Called when teardown keyword starts
:param teardown_kw: Keyword to process.
:type teardown_kw: Keyword
:returns: Nothing.
"""
- if teardown_kw.name.count("Show Vat History On All Duts"):
+ if teardown_kw.name.count(u"Show Vat History On All Duts"):
+ # TODO: Remove when not needed:
self._conf_history_lookup_nr = 0
- self._msg_type = "teardown-vat-history"
+ self._msg_type = u"teardown-vat-history"
teardown_kw.messages.visit(self)
- elif teardown_kw.name.count("Show Papi History On All Duts"):
+ elif teardown_kw.name.count(u"Show Papi History On All Duts"):
self._conf_history_lookup_nr = 0
- self._msg_type = "teardown-papi-history"
+ self._msg_type = u"teardown-papi-history"
teardown_kw.messages.visit(self)
def end_teardown_kw(self, teardown_kw):
@@ -1023,7 +1011,6 @@ class ExecutionChecker(ResultVisitor):
:type teardown_kw: Keyword
:returns: Nothing.
"""
- pass
def visit_message(self, msg):
"""Implements visiting the message.
@@ -1054,7 +1041,6 @@ class ExecutionChecker(ResultVisitor):
:type msg: Message
:returns: Nothing.
"""
- pass
class InputData:
@@ -1107,7 +1093,7 @@ class InputData:
:rtype: pandas.Series
"""
- return self.data[job][build]["metadata"]
+ return self.data[job][build][u"metadata"]
def suites(self, job, build):
"""Getter - suites
@@ -1120,7 +1106,7 @@ class InputData:
:rtype: pandas.Series
"""
- return self.data[job][str(build)]["suites"]
+ return self.data[job][str(build)][u"suites"]
def tests(self, job, build):
"""Getter - tests
@@ -1133,7 +1119,7 @@ class InputData:
:rtype: pandas.Series
"""
- return self.data[job][build]["tests"]
+ return self.data[job][build][u"tests"]
def _parse_tests(self, job, build, log):
"""Process data from robot output.xml file and return JSON structured
@@ -1150,16 +1136,18 @@ class InputData:
"""
metadata = {
- "job": job,
- "build": build
+ u"job": job,
+ u"build": build
}
- with open(build["file-name"], 'r') as data_file:
+ with open(build[u"file-name"], u'r') as data_file:
try:
result = ExecutionResult(data_file)
except errors.DataError as err:
- log.append(("ERROR", "Error occurred while parsing output.xml: "
- "{0}".format(err)))
+ log.append(
+ (u"ERROR", f"Error occurred while parsing output.xml: "
+ f"{repr(err)}")
+ )
return None
checker = ExecutionChecker(metadata, self._cfg.mapping,
self._cfg.ignore)
@@ -1185,10 +1173,11 @@ class InputData:
logs = list()
- logs.append(("INFO", " Processing the job/build: {0}: {1}".
- format(job, build["build"])))
+ logs.append(
+ (u"INFO", f" Processing the job/build: {job}: {build[u'build']}")
+ )
- state = "failed"
+ state = u"failed"
success = False
data = None
do_repeat = repeat
@@ -1199,61 +1188,70 @@ class InputData:
break
do_repeat -= 1
if not success:
- logs.append(("ERROR", "It is not possible to download the input "
- "data file from the job '{job}', build "
- "'{build}', or it is damaged. Skipped.".
- format(job=job, build=build["build"])))
+ logs.append(
+ (u"ERROR",
+ f"It is not possible to download the input data file from the "
+ f"job {job}, build {build[u'build']}, or it is damaged. "
+ f"Skipped.")
+ )
if success:
- logs.append(("INFO", " Processing data from the build '{0}' ...".
- format(build["build"])))
+ logs.append(
+ (u"INFO",
+ f" Processing data from the build {build[u'build']} ...")
+ )
data = self._parse_tests(job, build, logs)
if data is None:
- logs.append(("ERROR", "Input data file from the job '{job}', "
- "build '{build}' is damaged. Skipped.".
- format(job=job, build=build["build"])))
+ logs.append(
+ (u"ERROR",
+ f"Input data file from the job {job}, build "
+ f"{build[u'build']} is damaged. Skipped.")
+ )
else:
- state = "processed"
+ state = u"processed"
try:
- remove(build["file-name"])
+ remove(build[u"file-name"])
except OSError as err:
- logs.append(("ERROR", "Cannot remove the file '{0}': {1}".
- format(build["file-name"], repr(err))))
+ logs.append(
+ ("ERROR", f"Cannot remove the file {build[u'file-name']}: "
+ f"{repr(err)}")
+ )
# If the time-period is defined in the specification file, remove all
# files which are outside the time period.
- timeperiod = self._cfg.input.get("time-period", None)
+ timeperiod = self._cfg.input.get(u"time-period", None)
if timeperiod and data:
now = dt.utcnow()
timeperiod = timedelta(int(timeperiod))
- metadata = data.get("metadata", None)
+ metadata = data.get(u"metadata", None)
if metadata:
- generated = metadata.get("generated", None)
+ generated = metadata.get(u"generated", None)
if generated:
- generated = dt.strptime(generated, "%Y%m%d %H:%M")
+ generated = dt.strptime(generated, u"%Y%m%d %H:%M")
if (now - generated) > timeperiod:
# Remove the data and the file:
- state = "removed"
+ state = u"removed"
data = None
logs.append(
- ("INFO",
- " The build {job}/{build} is outdated, will be "
- "removed".format(job=job, build=build["build"])))
- logs.append(("INFO", " Done."))
+ (u"INFO",
+ f" The build {job}/{build[u'build']} is "
+ f"outdated, will be removed.")
+ )
+ logs.append((u"INFO", u" Done."))
for level, line in logs:
- if level == "INFO":
+ if level == u"INFO":
logging.info(line)
- elif level == "ERROR":
+ elif level == u"ERROR":
logging.error(line)
- elif level == "DEBUG":
+ elif level == u"DEBUG":
logging.debug(line)
- elif level == "CRITICAL":
+ elif level == u"CRITICAL":
logging.critical(line)
- elif level == "WARNING":
+ elif level == u"WARNING":
logging.warning(line)
- return {"data": data, "state": state, "job": job, "build": build}
+ return {u"data": data, u"state": state, u"job": job, u"build": build}
def download_and_parse_data(self, repeat=1):
"""Download the input data files, parse input data from input files and
@@ -1264,41 +1262,48 @@ class InputData:
:type repeat: int
"""
- logging.info("Downloading and parsing input files ...")
+ logging.info(u"Downloading and parsing input files ...")
for job, builds in self._cfg.builds.items():
for build in builds:
result = self._download_and_parse_build(job, build, repeat)
- build_nr = result["build"]["build"]
+ build_nr = result[u"build"][u"build"]
- if result["data"]:
- data = result["data"]
+ if result[u"data"]:
+ data = result[u"data"]
build_data = pd.Series({
- "metadata": pd.Series(
- data["metadata"].values(),
- index=data["metadata"].keys()),
- "suites": pd.Series(data["suites"].values(),
- index=data["suites"].keys()),
- "tests": pd.Series(data["tests"].values(),
- index=data["tests"].keys())})
+ u"metadata": pd.Series(
+ list(data[u"metadata"].values()),
+ index=list(data[u"metadata"].keys())
+ ),
+ u"suites": pd.Series(
+ list(data[u"suites"].values()),
+ index=list(data[u"suites"].keys())
+ ),
+ u"tests": pd.Series(
+ list(data[u"tests"].values()),
+ index=list(data[u"tests"].keys())
+ )
+ })
if self._input_data.get(job, None) is None:
self._input_data[job] = pd.Series()
self._input_data[job][str(build_nr)] = build_data
self._cfg.set_input_file_name(
- job, build_nr, result["build"]["file-name"])
+ job, build_nr, result[u"build"][u"file-name"])
- self._cfg.set_input_state(job, build_nr, result["state"])
+ self._cfg.set_input_state(job, build_nr, result[u"state"])
- logging.info("Memory allocation: {0:,d}MB".format(
- resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1000))
+ mem_alloc = \
+ resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1000
+ logging.info(f"Memory allocation: {mem_alloc:.0f}MB")
- logging.info("Done.")
+ logging.info(u"Done.")
@staticmethod
- def _end_of_tag(tag_filter, start=0, closer="'"):
+ def _end_of_tag(tag_filter, start=0, closer=u"'"):
"""Return the index of character in the string which is the end of tag.
:param tag_filter: The string where the end of tag is being searched.
@@ -1333,9 +1338,9 @@ class InputData:
if index is None:
return tag_filter
index += 1
- tag_filter = tag_filter[:index] + " in tags" + tag_filter[index:]
+ tag_filter = tag_filter[:index] + u" in tags" + tag_filter[index:]
- def filter_data(self, element, params=None, data=None, data_set="tests",
+ def filter_data(self, element, params=None, data=None, data_set=u"tests",
continue_on_error=False):
"""Filter required data from the given jobs and builds.
@@ -1375,21 +1380,21 @@ class InputData:
"""
try:
- if element["filter"] in ("all", "template"):
- cond = "True"
+ if element[u"filter"] in (u"all", u"template"):
+ cond = u"True"
else:
- cond = InputData._condition(element["filter"])
- logging.debug(" Filter: {0}".format(cond))
+ cond = InputData._condition(element[u"filter"])
+ logging.debug(f" Filter: {cond}")
except KeyError:
- logging.error(" No filter defined.")
+ logging.error(u" No filter defined.")
return None
if params is None:
- params = element.get("parameters", None)
+ params = element.get(u"parameters", None)
if params:
- params.append("type")
+ params.append(u"type")
- data_to_filter = data if data else element["data"]
+ data_to_filter = data if data else element[u"data"]
data = pd.Series()
try:
for job, builds in data_to_filter.items():
@@ -1397,41 +1402,46 @@ class InputData:
for build in builds:
data[job][str(build)] = pd.Series()
try:
- data_iter = self.data[job][str(build)][data_set].\
- iteritems()
+ data_dict = dict(
+ self.data[job][str(build)][data_set].items())
except KeyError:
if continue_on_error:
continue
- else:
- return None
- for test_ID, test_data in data_iter:
- if eval(cond, {"tags": test_data.get("tags", "")}):
- data[job][str(build)][test_ID] = pd.Series()
+ return None
+
+ for test_id, test_data in data_dict.items():
+ if eval(cond, {u"tags": test_data.get(u"tags", u"")}):
+ data[job][str(build)][test_id] = pd.Series()
if params is None:
for param, val in test_data.items():
- data[job][str(build)][test_ID][param] = val
+ data[job][str(build)][test_id][param] = val
else:
for param in params:
try:
- data[job][str(build)][test_ID][param] =\
+ data[job][str(build)][test_id][param] =\
test_data[param]
except KeyError:
- data[job][str(build)][test_ID][param] =\
- "No Data"
+ data[job][str(build)][test_id][param] =\
+ u"No Data"
return data
except (KeyError, IndexError, ValueError) as err:
- logging.error(" Missing mandatory parameter in the element "
- "specification: {0}".format(err))
+ logging.error(
+ f"Missing mandatory parameter in the element specification: "
+ f"{repr(err)}"
+ )
return None
- except AttributeError:
+ except AttributeError as err:
+ logging.error(repr(err))
return None
- except SyntaxError:
- logging.error(" The filter '{0}' is not correct. Check if all "
- "tags are enclosed by apostrophes.".format(cond))
+ except SyntaxError as err:
+ logging.error(
+ f"The filter {cond} is not correct. Check if all tags are "
+ f"enclosed by apostrophes.\n{repr(err)}"
+ )
return None
- def filter_tests_by_name(self, element, params=None, data_set="tests",
+ def filter_tests_by_name(self, element, params=None, data_set=u"tests",
continue_on_error=False):
"""Filter required data from the given jobs and builds.
@@ -1467,60 +1477,61 @@ class InputData:
:rtype pandas.Series
"""
- include = element.get("include", None)
+ include = element.get(u"include", None)
if not include:
- logging.warning("No tests to include, skipping the element.")
+ logging.warning(u"No tests to include, skipping the element.")
return None
if params is None:
- params = element.get("parameters", None)
+ params = element.get(u"parameters", None)
if params:
- params.append("type")
+ params.append(u"type")
data = pd.Series()
try:
- for job, builds in element["data"].items():
+ for job, builds in element[u"data"].items():
data[job] = pd.Series()
for build in builds:
data[job][str(build)] = pd.Series()
for test in include:
try:
reg_ex = re.compile(str(test).lower())
- for test_ID in self.data[job][str(build)]\
- [data_set].keys():
- if re.match(reg_ex, str(test_ID).lower()):
- test_data = self.data[job][str(build)]\
- [data_set][test_ID]
- data[job][str(build)][test_ID] = pd.Series()
+ for test_id in self.data[job][
+ str(build)][data_set].keys():
+ if re.match(reg_ex, str(test_id).lower()):
+ test_data = self.data[job][
+ str(build)][data_set][test_id]
+ data[job][str(build)][test_id] = pd.Series()
if params is None:
for param, val in test_data.items():
- data[job][str(build)][test_ID]\
+ data[job][str(build)][test_id]\
[param] = val
else:
for param in params:
try:
- data[job][str(build)][test_ID]\
- [param] = test_data[param]
+ data[job][str(build)][
+ test_id][param] = \
+ test_data[param]
except KeyError:
- data[job][str(build)][test_ID]\
- [param] = "No Data"
+ data[job][str(build)][
+ test_id][param] = u"No Data"
except KeyError as err:
- logging.error("{err!r}".format(err=err))
+ logging.error(repr(err))
if continue_on_error:
continue
- else:
- return None
+ return None
return data
except (KeyError, IndexError, ValueError) as err:
- logging.error("Missing mandatory parameter in the element "
- "specification: {err!r}".format(err=err))
+ logging.error(
+ f"Missing mandatory parameter in the element "
+ f"specification: {repr(err)}"
+ )
return None
except AttributeError as err:
- logging.error("{err!r}".format(err=err))
+ logging.error(repr(err))
return None
-
@staticmethod
def merge_data(data):
"""Merge data from more jobs and builds to a simple data structure.
@@ -1542,12 +1553,12 @@ class InputData:
:rtype: pandas.Series
"""
- logging.info(" Merging data ...")
+ logging.info(u" Merging data ...")
merged_data = pd.Series()
- for _, builds in data.iteritems():
- for _, item in builds.iteritems():
- for ID, item_data in item.iteritems():
- merged_data[ID] = item_data
+ for builds in data.values:
+ for item in builds.values:
+ for item_id, item_data in item.items():
+ merged_data[item_id] = item_data
return merged_data
diff --git a/resources/tools/presentation/pal.py b/resources/tools/presentation/pal.py
index 94742b07da..01eb8f64ff 100644
--- a/resources/tools/presentation/pal.py
+++ b/resources/tools/presentation/pal.py
@@ -18,7 +18,7 @@ import sys
import argparse
import logging
-from errors import PresentationError
+from pal_errors import PresentationError
from environment import Environment, clean_environment
from specification_parser import Specification
from input_data_parser import InputData
@@ -27,7 +27,7 @@ from generator_plots import generate_plots
from generator_files import generate_files
from static_content import prepare_static_content
from generator_report import generate_report
-from generator_CPTA import generate_cpta
+from generator_cpta import generate_cpta
from generator_alerts import Alerting, AlertingError
@@ -41,26 +41,26 @@ def parse_args():
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=argparse.
RawDescriptionHelpFormatter)
- parser.add_argument("-s", "--specification",
+ parser.add_argument(u"-s", u"--specification",
required=True,
- type=argparse.FileType('r'),
- help="Specification YAML file.")
- parser.add_argument("-r", "--release",
- default="master",
+ type=argparse.FileType(u'r'),
+ help=u"Specification YAML file.")
+ parser.add_argument(u"-r", u"--release",
+ default=u"master",
type=str,
- help="Release string of the product.")
- parser.add_argument("-w", "--week",
- default="1",
+ help=u"Release string of the product.")
+ parser.add_argument(u"-w", u"--week",
+ default=u"1",
type=str,
- help="Calendar week when the report is published.")
- parser.add_argument("-l", "--logging",
- choices=["DEBUG", "INFO", "WARNING",
- "ERROR", "CRITICAL"],
- default="ERROR",
- help="Logging level.")
- parser.add_argument("-f", "--force",
- action='store_true',
- help="Force removing the old build(s) if present.")
+ help=u"Calendar week when the report is published.")
+ parser.add_argument(u"-l", u"--logging",
+ choices=[u"DEBUG", u"INFO", u"WARNING",
+ u"ERROR", u"CRITICAL"],
+ default=u"ERROR",
+ help=u"Logging level.")
+ parser.add_argument(u"-f", u"--force",
+ action=u"store_true",
+ help=u"Force removing the old build(s) if present.")
return parser.parse_args()
@@ -68,75 +68,70 @@ def parse_args():
def main():
"""Main function."""
- log_levels = {"NOTSET": logging.NOTSET,
- "DEBUG": logging.DEBUG,
- "INFO": logging.INFO,
- "WARNING": logging.WARNING,
- "ERROR": logging.ERROR,
- "CRITICAL": logging.CRITICAL}
+ log_levels = {u"NOTSET": logging.NOTSET,
+ u"DEBUG": logging.DEBUG,
+ u"INFO": logging.INFO,
+ u"WARNING": logging.WARNING,
+ u"ERROR": logging.ERROR,
+ u"CRITICAL": logging.CRITICAL}
args = parse_args()
- logging.basicConfig(format='%(asctime)s: %(levelname)s: %(message)s',
- datefmt='%Y/%m/%d %H:%M:%S',
+ logging.basicConfig(format=u"%(asctime)s: %(levelname)s: %(message)s",
+ datefmt=u"%Y/%m/%d %H:%M:%S",
level=log_levels[args.logging])
- logging.info("Application started.")
+ logging.info(u"Application started.")
try:
spec = Specification(args.specification)
spec.read_specification()
except PresentationError:
- logging.critical("Finished with error.")
+ logging.critical(u"Finished with error.")
return 1
- if spec.output["output"] not in ("report", "CPTA"):
- logging.critical("The output '{0}' is not supported.".
- format(spec.output["output"]))
+ if spec.output[u"output"] not in (u"report", u"CPTA"):
+ logging.critical(
+ f"The output {spec.output[u'output']} is not supported."
+ )
return 1
- # ret_code = 1
- # try:
- env = Environment(spec.environment, args.force)
- env.set_environment()
-
- prepare_static_content(spec)
-
- data = InputData(spec)
- data.download_and_parse_data(repeat=1)
-
- generate_tables(spec, data)
- generate_plots(spec, data)
- generate_files(spec, data)
-
- if spec.output["output"] == "report":
- generate_report(args.release, spec, args.week)
- logging.info("Successfully finished.")
- elif spec.output["output"] == "CPTA":
- sys.stdout.write(generate_cpta(spec, data))
- try:
- alert = Alerting(spec)
- alert.generate_alerts()
- except AlertingError as err:
- logging.warning(repr(err))
- logging.info("Successfully finished.")
- ret_code = 0
-
- # except AlertingError as err:
- # logging.critical("Finished with an alerting error.")
- # logging.critical(repr(err))
- # except PresentationError as err:
- # logging.critical("Finished with an PAL error.")
- # logging.critical(repr(err))
- # except (KeyError, ValueError) as err:
- # logging.critical("Finished with an error.")
- # logging.critical(repr(err))
- # except Exception as err:
- # logging.critical("Finished with an unexpected error.")
- # logging.critical(repr(err))
- # finally:
- # if spec is not None:
- # clean_environment(spec.environment)
- # return ret_code
-
-
-if __name__ == '__main__':
+ ret_code = 1
+ try:
+ env = Environment(spec.environment, args.force)
+ env.set_environment()
+
+ prepare_static_content(spec)
+
+ data = InputData(spec)
+ data.download_and_parse_data(repeat=1)
+
+ generate_tables(spec, data)
+ generate_plots(spec, data)
+ generate_files(spec, data)
+
+ if spec.output[u"output"] == u"report":
+ generate_report(args.release, spec, args.week)
+ elif spec.output[u"output"] == u"CPTA":
+ sys.stdout.write(generate_cpta(spec, data))
+ try:
+ alert = Alerting(spec)
+ alert.generate_alerts()
+ except AlertingError as err:
+ logging.warning(repr(err))
+
+ logging.info(u"Successfully finished.")
+ ret_code = 0
+
+ except AlertingError as err:
+ logging.critical(f"Finished with an alerting error.\n{repr(err)}")
+ except PresentationError as err:
+ logging.critical(f"Finished with an PAL error.\n{repr(err)}")
+ except (KeyError, ValueError) as err:
+ logging.critical(f"Finished with an error.\n{repr(err)}")
+ finally:
+ if spec is not None:
+ clean_environment(spec.environment)
+ return ret_code
+
+
+if __name__ == u"__main__":
sys.exit(main())
diff --git a/resources/tools/presentation/errors.py b/resources/tools/presentation/pal_errors.py
index 64084f6a40..649c86cde8 100644
--- a/resources/tools/presentation/errors.py
+++ b/resources/tools/presentation/pal_errors.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2017 Cisco and/or its affiliates.
+# Copyright (c) 2019 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -29,21 +29,21 @@ class PresentationError(Exception):
- relevant data if there are any collected (optional parameter details).
"""
- log_exception = {"DEBUG": logging.debug,
- "INFO": logging.info,
- "WARNING": logging.warning,
- "ERROR": logging.error,
- "CRITICAL": logging.critical}
+ log_exception = {u"DEBUG": logging.debug,
+ u"INFO": logging.info,
+ u"WARNING": logging.warning,
+ u"ERROR": logging.error,
+ u"CRITICAL": logging.critical}
- def __init__(self, msg, details='', level="CRITICAL"):
+ def __init__(self, msg, details=u'', level=u"CRITICAL"):
"""Sets the exception message and the level.
:param msg: Short description of the encountered problem.
:param details: Relevant messages if there are any collected, e.g.,
- from caught exception (optional parameter details), or relevant data if
- there are any collected (optional parameter details).
+ from caught exception (optional parameter details), or relevant data
+ if there are any collected (optional parameter details).
:param level: Level of the error, possible choices are: "DEBUG", "INFO",
- "WARNING", "ERROR" and "CRITICAL".
+ "WARNING", "ERROR" and "CRITICAL".
:type msg: str
:type details: str
:type level: str
@@ -59,13 +59,14 @@ class PresentationError(Exception):
if self._details:
self.log_exception[self._level](self._details)
except KeyError:
- print("Wrong log level.")
+ print(u"Wrong log level.")
sys.exit(1)
def __repr__(self):
return (
- "PresentationError(msg={msg!r},details={dets!r},level={level!r})".
- format(msg=self._msg, dets=self._details, level=self._level))
+ f"PresentationError(msg={self._msg!r},details={self._details!r},"
+ f"level={self._level!r})"
+ )
def __str__(self):
return str(self._msg)
diff --git a/resources/tools/presentation/utils.py b/resources/tools/presentation/pal_utils.py
index 3bd5a71e00..5949432340 100644
--- a/resources/tools/presentation/utils.py
+++ b/resources/tools/presentation/pal_utils.py
@@ -14,22 +14,24 @@
"""General purpose utilities.
"""
-import multiprocessing
import subprocess
import math
-import numpy as np
import logging
import csv
-import prettytable
from os import walk, makedirs, environ
from os.path import join, isdir
from shutil import move, Error
from datetime import datetime
+
+import numpy as np
+import prettytable
+
from pandas import Series
from resources.libraries.python import jumpavg
-from errors import PresentationError
+
+from pal_errors import PresentationError
def mean(items):
@@ -132,11 +134,10 @@ def get_rst_title_char(level):
:returns: Character used for the given title level in rst files.
:rtype: str
"""
- chars = ('=', '-', '`', "'", '.', '~', '*', '+', '^')
+ chars = (u'=', u'-', u'`', u"'", u'.', u'~', u'*', u'+', u'^')
if level < len(chars):
return chars[level]
- else:
- return chars[-1]
+ return chars[-1]
def execute_command(cmd):
@@ -159,16 +160,16 @@ def execute_command(cmd):
stdout, stderr = proc.communicate()
if stdout:
- logging.info(stdout)
+ logging.info(stdout.decode())
if stderr:
- logging.info(stderr)
+ logging.info(stderr.decode())
if proc.returncode != 0:
- logging.error(" Command execution failed.")
- return proc.returncode, stdout, stderr
+ logging.error(u" Command execution failed.")
+ return proc.returncode, stdout.decode(), stderr.decode()
-def get_last_successful_build_number(jenkins_url, job_name):
+def get_last_successful_build_nr(jenkins_url, job_name):
"""Get the number of the last successful build of the given job.
:param jenkins_url: Jenkins URL.
@@ -178,11 +179,9 @@ def get_last_successful_build_number(jenkins_url, job_name):
:returns: The build number as a string.
:rtype: str
"""
-
- url = "{}/{}/lastSuccessfulBuild/buildNumber".format(jenkins_url, job_name)
- cmd = "wget -qO- {url}".format(url=url)
-
- return execute_command(cmd)
+ return execute_command(
+ f"wget -qO- {jenkins_url}/{job_name}/lastSuccessfulBuild/buildNumber"
+ )
def get_last_completed_build_number(jenkins_url, job_name):
@@ -195,11 +194,9 @@ def get_last_completed_build_number(jenkins_url, job_name):
:returns: The build number as a string.
:rtype: str
"""
-
- url = "{}/{}/lastCompletedBuild/buildNumber".format(jenkins_url, job_name)
- cmd = "wget -qO- {url}".format(url=url)
-
- return execute_command(cmd)
+ return execute_command(
+ f"wget -qO- {jenkins_url}/{job_name}/lastCompletedBuild/buildNumber"
+ )
def get_build_timestamp(jenkins_url, job_name, build_nr):
@@ -214,14 +211,9 @@ def get_build_timestamp(jenkins_url, job_name, build_nr):
:returns: The timestamp.
:rtype: datetime.datetime
"""
-
- url = "{jenkins_url}/{job_name}/{build_nr}".format(jenkins_url=jenkins_url,
- job_name=job_name,
- build_nr=build_nr)
- cmd = "wget -qO- {url}".format(url=url)
-
- timestamp = execute_command(cmd)
-
+ timestamp = execute_command(
+ f"wget -qO- {jenkins_url}/{job_name}/{build_nr}"
+ )
return datetime.fromtimestamp(timestamp/1000)
@@ -233,29 +225,31 @@ def archive_input_data(spec):
:raises PresentationError: If it is not possible to archive the input data.
"""
- logging.info(" Archiving the input data files ...")
+ logging.info(u" Archiving the input data files ...")
- extension = spec.input["arch-file-format"]
+ extension = spec.input[u"arch-file-format"]
data_files = list()
for ext in extension:
data_files.extend(get_files(
- spec.environment["paths"]["DIR[WORKING,DATA]"], extension=ext))
- dst = spec.environment["paths"]["DIR[STATIC,ARCH]"]
- logging.info(" Destination: {0}".format(dst))
+ spec.environment[u"paths"][u"DIR[WORKING,DATA]"], extension=ext))
+ dst = spec.environment[u"paths"][u"DIR[STATIC,ARCH]"]
+ logging.info(f" Destination: {dst}")
try:
if not isdir(dst):
makedirs(dst)
for data_file in data_files:
- logging.info(" Moving the file: {0} ...".format(data_file))
+ logging.info(f" Moving the file: {data_file} ...")
move(data_file, dst)
except (Error, OSError) as err:
- raise PresentationError("Not possible to archive the input data.",
- str(err))
+ raise PresentationError(
+ u"Not possible to archive the input data.",
+ repr(err)
+ )
- logging.info(" Done.")
+ logging.info(u" Done.")
def classify_anomalies(data):
@@ -273,7 +267,7 @@ def classify_anomalies(data):
# Nan means something went wrong.
# Use 0.0 to cause that being reported as a severe regression.
bare_data = [0.0 if np.isnan(sample) else sample
- for sample in data.itervalues()]
+ for sample in data.values()]
# TODO: Make BitCountingGroupList a subclass of list again?
group_list = jumpavg.classify(bare_data).group_list
group_list.reverse() # Just to use .pop() for FIFO.
@@ -282,9 +276,9 @@ def classify_anomalies(data):
active_group = None
values_left = 0
avg = 0.0
- for sample in data.itervalues():
+ for sample in data.values():
if np.isnan(sample):
- classification.append("outlier")
+ classification.append(u"outlier")
avgs.append(sample)
continue
if values_left < 1 or active_group is None:
@@ -297,70 +291,30 @@ def classify_anomalies(data):
avgs.append(avg)
values_left -= 1
continue
- classification.append("normal")
+ classification.append(u"normal")
avgs.append(avg)
values_left -= 1
return classification, avgs
-def convert_csv_to_pretty_txt(csv_file, txt_file):
+def convert_csv_to_pretty_txt(csv_file_name, txt_file_name):
"""Convert the given csv table to pretty text table.
- :param csv_file: The path to the input csv file.
- :param txt_file: The path to the output pretty text file.
- :type csv_file: str
- :type txt_file: str
+ :param csv_file_name: The path to the input csv file.
+ :param txt_file_name: The path to the output pretty text file.
+ :type csv_file_name: str
+ :type txt_file_name: str
"""
txt_table = None
- with open(csv_file, 'rb') as csv_file:
- csv_content = csv.reader(csv_file, delimiter=',', quotechar='"')
+ with open(csv_file_name, u"rt") as csv_file:
+ csv_content = csv.reader(csv_file, delimiter=u',', quotechar=u'"')
for row in csv_content:
if txt_table is None:
txt_table = prettytable.PrettyTable(row)
else:
txt_table.add_row(row)
- txt_table.align["Test case"] = "l"
+ txt_table.align[u"Test case"] = u"l"
if txt_table:
- with open(txt_file, "w") as txt_file:
+ with open(txt_file_name, u"w") as txt_file:
txt_file.write(str(txt_table))
-
-
-class Worker(multiprocessing.Process):
- """Worker class used to process tasks in separate parallel processes.
- """
-
- def __init__(self, work_queue, data_queue, func):
- """Initialization.
-
- :param work_queue: Queue with items to process.
- :param data_queue: Shared memory between processes. Queue which keeps
- the result data. This data is then read by the main process and used
- in further processing.
- :param func: Function which is executed by the worker.
- :type work_queue: multiprocessing.JoinableQueue
- :type data_queue: multiprocessing.Manager().Queue()
- :type func: Callable object
- """
- super(Worker, self).__init__()
- self._work_queue = work_queue
- self._data_queue = data_queue
- self._func = func
-
- def run(self):
- """Method representing the process's activity.
- """
-
- while True:
- try:
- self.process(self._work_queue.get())
- finally:
- self._work_queue.task_done()
-
- def process(self, item_to_process):
- """Method executed by the runner.
-
- :param item_to_process: Data to be processed by the function.
- :type item_to_process: tuple
- """
- self._func(self.pid, self._data_queue, *item_to_process)
diff --git a/resources/tools/presentation/requirements.txt b/resources/tools/presentation/requirements.txt
index 1676983658..537bbf2fda 100644
--- a/resources/tools/presentation/requirements.txt
+++ b/resources/tools/presentation/requirements.txt
@@ -1,9 +1,9 @@
-Sphinx==1.7.6
+Sphinx==2.2.1
sphinx-rtd-theme==0.4.0
-robotframework==2.9.2
-sphinxcontrib-programoutput
-PyYAML==5.1.1
-numpy==1.16.4
-pandas==0.24.2
+sphinxcontrib-programoutput==0.15
+robotframework==3.1.2
+PyYAML==5.1
+numpy==1.17.3
+pandas==0.25.3
plotly==4.1.1
PTable==0.9.2
diff --git a/resources/tools/presentation/run_cpta.sh b/resources/tools/presentation/run_cpta.sh
index a60c1cc7a9..1aa5703162 100755
--- a/resources/tools/presentation/run_cpta.sh
+++ b/resources/tools/presentation/run_cpta.sh
@@ -22,7 +22,7 @@ source ${DIR[WORKING]}/env/bin/activate
# Install python dependencies:
pip3 install -r requirements.txt
-export PYTHONPATH=`pwd`
+export PYTHONPATH=`pwd`:`pwd`/../../../:`pwd`/../../libraries/python
STATUS=$(python pal.py \
--specification specification_CPTA.yaml \
diff --git a/resources/tools/presentation/run_report.sh b/resources/tools/presentation/run_report.sh
index 56c671a8bb..9f6290e4b2 100755
--- a/resources/tools/presentation/run_report.sh
+++ b/resources/tools/presentation/run_report.sh
@@ -33,7 +33,7 @@ source ${DIR[WORKING]}/env/bin/activate
# Install python dependencies:
pip3 install -r requirements.txt
-export PYTHONPATH=`pwd`
+export PYTHONPATH=`pwd`:`pwd`/../../../:`pwd`/../../libraries/python
python pal.py \
--specification specification.yaml \
diff --git a/resources/tools/presentation/specification.yaml b/resources/tools/presentation/specification.yaml
index f4ad61686d..f24e6f355d 100644
--- a/resources/tools/presentation/specification.yaml
+++ b/resources/tools/presentation/specification.yaml
@@ -141,52 +141,52 @@
- 12 # NDRPDR sel
- 13 # NDRPDR sel
- 14 # NDRPDR sel
- - 15 # NDRPDR sel
- - 16 # NDRPDR sel
- - 17 # NDRPDR sel
- - 18 # NDRPDR sel
- - 19 # NDRPDR sel
- - 20 # NDRPDR sel
- - 21 # NDRPDR sel
+# - 15 # NDRPDR sel
+# - 16 # NDRPDR sel
+# - 17 # NDRPDR sel
+# - 18 # NDRPDR sel
+# - 19 # NDRPDR sel
+# - 20 # NDRPDR sel
+# - 21 # NDRPDR sel
plot-vpp-throughput-lat-tsa-3n-hsw-vhost:
csit-vpp-perf-verify-1908-3n-hsw:
- 48 # NDRPDR sel vhost
- 49 # NDRPDR sel vhost
- 52 # NDRPDR sel vhost
- - 53 # NDRPDR sel vhost
- - 54 # NDRPDR sel vhost
- - 55 # NDRPDR sel vhost
- - 56 # NDRPDR sel vhost
- - 57 # NDRPDR sel vhost
- - 58 # NDRPDR sel vhost
- - 59 # NDRPDR sel vhost
+# - 53 # NDRPDR sel vhost
+# - 54 # NDRPDR sel vhost
+# - 55 # NDRPDR sel vhost
+# - 56 # NDRPDR sel vhost
+# - 57 # NDRPDR sel vhost
+# - 58 # NDRPDR sel vhost
+# - 59 # NDRPDR sel vhost
plot-vpp-http-server-performance:
csit-vpp-perf-verify-1908-2n-skx:
- 29 # TCP
- 30 # TCP
- 58 # TCP
- - 59 # TCP
- - 60 # TCP
- - 61 # TCP
- - 62 # TCP
- - 63 # TCP
- - 64 # TCP
- - 65 # TCP
+# - 59 # TCP
+# - 60 # TCP
+# - 61 # TCP
+# - 62 # TCP
+# - 63 # TCP
+# - 64 # TCP
+# - 65 # TCP
plot-dpdk-throughput-latency-3n-hsw:
csit-dpdk-perf-verify-1908-3n-hsw:
- 2 # NDRPDR sel
- 3 # NDRPDR sel
- 4 # NDRPDR sel
- - 5 # NDRPDR sel
- - 6 # NDRPDR sel
- - 7 # NDRPDR sel
- - 8 # NDRPDR sel
- - 9 # NDRPDR sel
- - 10 # NDRPDR sel
- - 11 # NDRPDR sel
+# - 5 # NDRPDR sel
+# - 6 # NDRPDR sel
+# - 7 # NDRPDR sel
+# - 8 # NDRPDR sel
+# - 9 # NDRPDR sel
+# - 10 # NDRPDR sel
+# - 11 # NDRPDR sel
# 3n-skx
@@ -195,39 +195,39 @@
- 14 # NDRPDR sel
- 16 # NDRPDR sel
- 19 # NDRPDR sel
- - 20 # NDRPDR sel
- - 22 # NDRPDR sel
- - 23 # NDRPDR sel
- - 24 # NDRPDR sel
- - 25 # NDRPDR sel
- - 26 # NDRPDR sel
- - 29 # NDRPDR sel
+# - 20 # NDRPDR sel
+# - 22 # NDRPDR sel
+# - 23 # NDRPDR sel
+# - 24 # NDRPDR sel
+# - 25 # NDRPDR sel
+# - 26 # NDRPDR sel
+# - 29 # NDRPDR sel
plot-vpp-throughput-lat-tsa-3n-skx-vhost:
csit-vpp-perf-verify-1908-3n-skx:
- 52 # NDRPDR sel vhost
- 53 # NDRPDR sel vhost
- 55 # NDRPDR sel vhost
- - 61 # NDRPDR sel vhost
- - 62 # NDRPDR sel vhost
- - 63 # NDRPDR sel vhost
- - 64 # NDRPDR sel vhost
- - 65 # NDRPDR sel vhost
- - 66 # NDRPDR sel vhost
- - 67 # NDRPDR sel vhost
+# - 61 # NDRPDR sel vhost
+# - 62 # NDRPDR sel vhost
+# - 63 # NDRPDR sel vhost
+# - 64 # NDRPDR sel vhost
+# - 65 # NDRPDR sel vhost
+# - 66 # NDRPDR sel vhost
+# - 67 # NDRPDR sel vhost
plot-dpdk-throughput-latency-3n-skx:
csit-dpdk-perf-verify-1908-3n-skx:
- 3 # NDRPDR sel
- 4 # NDRPDR sel
- 5 # NDRPDR sel
- - 6 # NDRPDR sel
- - 7 # NDRPDR sel
- - 8 # NDRPDR sel
- - 9 # NDRPDR sel
- - 10 # NDRPDR sel
- - 11 # NDRPDR sel
- - 12 # NDRPDR sel
+# - 6 # NDRPDR sel
+# - 7 # NDRPDR sel
+# - 8 # NDRPDR sel
+# - 9 # NDRPDR sel
+# - 10 # NDRPDR sel
+# - 11 # NDRPDR sel
+# - 12 # NDRPDR sel
# 2n-skx
@@ -236,26 +236,26 @@
- 26 # NDRPDR sel
- 27 # NDRPDR sel
- 34 # NDRPDR sel
- - 35 # NDRPDR sel
- - 36 # NDRPDR sel
- - 42 # NDRPDR sel
- - 43 # NDRPDR sel
- - 44 # NDRPDR sel
- - 45 # NDRPDR sel
- - 46 # NDRPDR sel
+# - 35 # NDRPDR sel
+# - 36 # NDRPDR sel
+# - 42 # NDRPDR sel
+# - 43 # NDRPDR sel
+# - 44 # NDRPDR sel
+# - 45 # NDRPDR sel
+# - 46 # NDRPDR sel
plot-vpp-throughput-lat-tsa-2n-skx-vhost:
csit-vpp-perf-verify-1908-2n-skx:
- 88 # NDRPDR sel vhost
- 89 # NDRPDR sel vhost
- 91 # NDRPDR sel vhost
- - 92 # NDRPDR sel vhost
- - 93 # NDRPDR sel vhost
- - 94 # NDRPDR sel vhost
- - 95 # NDRPDR sel vhost
- - 96 # NDRPDR sel vhost
- - 97 # NDRPDR sel vhost
- - 98 # NDRPDR sel vhost
+# - 92 # NDRPDR sel vhost
+# - 93 # NDRPDR sel vhost
+# - 94 # NDRPDR sel vhost
+# - 95 # NDRPDR sel vhost
+# - 96 # NDRPDR sel vhost
+# - 97 # NDRPDR sel vhost
+# - 98 # NDRPDR sel vhost
plot-vpp-soak-2n-skx:
csit-vpp-perf-verify-1908-2n-skx:
@@ -267,13 +267,13 @@
- 22 # NFV
- 31 # NFV
- 32 # NFV
- - 37 # NFV
- - 38 # NFV
- - 39 # NFV
- - 53 # NFV
- - 67 # NFV
- - 68 # NFV
- - 70 # NFV
+# - 37 # NFV
+# - 38 # NFV
+# - 39 # NFV
+# - 53 # NFV
+# - 67 # NFV
+# - 68 # NFV
+# - 70 # NFV
plot-vpp-nfv-2n-skx-mrr:
csit-vpp-perf-verify-1908-2n-skx:
@@ -284,26 +284,26 @@
- 113 # NFV reconf sel
- 114 # NFV reconf sel
- 115 # NFV reconf sel
- - 116 # NFV reconf sel
- - 118 # NFV reconf sel
- - 119 # NFV reconf sel
- - 120 # NFV reconf sel
- - 121 # NFV reconf sel
- - 122 # NFV reconf sel
- - 123 # NFV reconf sel
+# - 116 # NFV reconf sel
+# - 118 # NFV reconf sel
+# - 119 # NFV reconf sel
+# - 120 # NFV reconf sel
+# - 121 # NFV reconf sel
+# - 122 # NFV reconf sel
+# - 123 # NFV reconf sel
plot-dpdk-throughput-latency-2n-skx:
csit-dpdk-perf-verify-1908-2n-skx:
- 2 # NDRPDR sel
- 3 # NDRPDR sel
- 4 # NDRPDR sel
- - 5 # NDRPDR sel
- - 7 # NDRPDR sel
- - 8 # NDRPDR sel
- - 9 # NDRPDR sel
- - 10 # NDRPDR sel
- - 11 # NDRPDR sel
- - 12 # NDRPDR sel
+# - 5 # NDRPDR sel
+# - 7 # NDRPDR sel
+# - 8 # NDRPDR sel
+# - 9 # NDRPDR sel
+# - 10 # NDRPDR sel
+# - 11 # NDRPDR sel
+# - 12 # NDRPDR sel
# 3n-tsh
@@ -312,13 +312,13 @@
- 7 # NDRPDR sel
- 8 # NDRPDR sel
- 9 # NDRPDR sel
- - 10 # NDRPDR sel
- - 11 # NDRPDR sel
- - 12 # NDRPDR sel
- - 13 # NDRPDR sel
- - 14 # NDRPDR sel
- - 15 # NDRPDR sel
- - 16 # NDRPDR sel
+# - 10 # NDRPDR sel
+# - 11 # NDRPDR sel
+# - 12 # NDRPDR sel
+# - 13 # NDRPDR sel
+# - 14 # NDRPDR sel
+# - 15 # NDRPDR sel
+# - 16 # NDRPDR sel
# 3n-dnv
@@ -327,13 +327,13 @@
- 6 # NDRPDR sel
- 7 # NDRPDR sel
- 8 # NDRPDR sel
- - 9 # NDRPDR sel
- - 10 # NDRPDR sel
- - 12 # NDRPDR sel
- - 13 # NDRPDR sel
- - 14 # NDRPDR sel
- - 15 # NDRPDR sel
- - 16 # NDRPDR sel
+# - 9 # NDRPDR sel
+# - 10 # NDRPDR sel
+# - 12 # NDRPDR sel
+# - 13 # NDRPDR sel
+# - 14 # NDRPDR sel
+# - 15 # NDRPDR sel
+# - 16 # NDRPDR sel
# 2n-dnv
@@ -342,13 +342,13 @@
- 1 # NDRPDR sel
- 2 # NDRPDR sel
- 3 # NDRPDR sel
- - 4 # NDRPDR sel
- - 5 # NDRPDR sel
- - 6 # NDRPDR sel
- - 7 # NDRPDR sel
- - 8 # NDRPDR sel
- - 9 # NDRPDR sel
- - 10 # NDRPDR sel
+# - 4 # NDRPDR sel
+# - 5 # NDRPDR sel
+# - 6 # NDRPDR sel
+# - 7 # NDRPDR sel
+# - 8 # NDRPDR sel
+# - 9 # NDRPDR sel
+# - 10 # NDRPDR sel
###########################################################################
# Simple tables (only one data set):
@@ -360,26 +360,26 @@
- 14 # NDRPDR sel
- 16 # NDRPDR sel
- 19 # NDRPDR sel
- - 20 # NDRPDR sel
- - 22 # NDRPDR sel
- - 23 # NDRPDR sel
- - 24 # NDRPDR sel
- - 25 # NDRPDR sel
- - 26 # NDRPDR sel
- - 29 # NDRPDR sel
+# - 20 # NDRPDR sel
+# - 22 # NDRPDR sel
+# - 23 # NDRPDR sel
+# - 24 # NDRPDR sel
+# - 25 # NDRPDR sel
+# - 26 # NDRPDR sel
+# - 29 # NDRPDR sel
vpp-nic-comparison-2n-skx:
csit-vpp-perf-verify-1908-2n-skx:
- 26 # NDRPDR sel
- 27 # NDRPDR sel
- 34 # NDRPDR sel
- - 35 # NDRPDR sel
- - 36 # NDRPDR sel
- - 42 # NDRPDR sel
- - 43 # NDRPDR sel
- - 44 # NDRPDR sel
- - 45 # NDRPDR sel
- - 46 # NDRPDR sel
+# - 35 # NDRPDR sel
+# - 36 # NDRPDR sel
+# - 42 # NDRPDR sel
+# - 43 # NDRPDR sel
+# - 44 # NDRPDR sel
+# - 45 # NDRPDR sel
+# - 46 # NDRPDR sel
vpp-nic-comparison-3n-skx-mrr:
csit-vpp-perf-verify-1908-3n-skx:
@@ -396,45 +396,45 @@
- 24 # NDRPDR full
- 25 # NDRPDR full
- 26 # NDRPDR full
- - 27 # NDRPDR full
- - 28 # NDRPDR full
- - 29 # NDRPDR full
- - 32 # NDRPDR full
- - 33 # NDRPDR full
- - 36 # NDRPDR full
- - 38 # NDRPDR full
- - 50 # NDRPDR full vhost
+# - 27 # NDRPDR full
+# - 28 # NDRPDR full
+# - 29 # NDRPDR full
+# - 32 # NDRPDR full
+# - 33 # NDRPDR full
+# - 36 # NDRPDR full
+# - 38 # NDRPDR full
+# - 50 # NDRPDR full vhost
vpp-perf-results-3n-skx:
csit-vpp-perf-verify-1908-3n-skx:
- 27 # NDRPDR full
- 28 # NDRPDR full
- 30 # NDRPDR full
- - 31 # NDRPDR full
- - 32 # NDRPDR full
- - 33 # NDRPDR full
- - 34 # NDRPDR full
- - 35 # NDRPDR full
- - 37 # NDRPDR full
- - 39 # NDRPDR full
- - 40 # NDRPDR full
- - 41 # NDRPDR full
- - 54 # NDRPDR full
+# - 31 # NDRPDR full
+# - 32 # NDRPDR full
+# - 33 # NDRPDR full
+# - 34 # NDRPDR full
+# - 35 # NDRPDR full
+# - 37 # NDRPDR full
+# - 39 # NDRPDR full
+# - 40 # NDRPDR full
+# - 41 # NDRPDR full
+# - 54 # NDRPDR full
vpp-perf-results-2n-skx:
csit-vpp-perf-verify-1908-2n-skx:
- 48 # NDRPDR full
- 49 # NDRPDR full
- 50 # NDRPDR full
- - 51 # NDRPDR full
- - 54 # NDRPDR full
- - 55 # NDRPDR full
- - 90 # NDRPDR full vhost
- - 110 # NFV full
- - 111 # NFV full
- - 126 # NFV full
- - 127 # NFV full
- - 112 # NFV reconf full
+# - 51 # NDRPDR full
+# - 54 # NDRPDR full
+# - 55 # NDRPDR full
+# - 90 # NDRPDR full vhost
+# - 110 # NFV full
+# - 111 # NFV full
+# - 126 # NFV full
+# - 127 # NFV full
+# - 112 # NFV reconf full
vpp-perf-results-3n-tsh:
csit-vpp-perf-verify-1908-3n-tsh:
@@ -506,47 +506,47 @@
- 10 # NDRPDR sel
- 19 # NDRPDR sel
- 31 # NDRPDR sel
- - 36 # NDRPDR sel
- - 40 # NDRPDR sel
- - 42 # NDRPDR sel
- - 44 # NDRPDR sel
- - 46 # NDRPDR sel
- - 47 # NDRPDR sel
- - 48 # NDRPDR sel
- - 61 # NDRPDR LB
- - 62 # NDRPDR LB
- - 63 # NDRPDR LB
- - 64 # NDRPDR LB
- - 65 # NDRPDR LB
- - 66 # NDRPDR LB
- - 67 # NDRPDR LB
- - 69 # NDRPDR LB
- - 70 # NDRPDR LB
- - 71 # NDRPDR LB
+# - 36 # NDRPDR sel
+# - 40 # NDRPDR sel
+# - 42 # NDRPDR sel
+# - 44 # NDRPDR sel
+# - 46 # NDRPDR sel
+# - 47 # NDRPDR sel
+# - 48 # NDRPDR sel
+# - 61 # NDRPDR LB
+# - 62 # NDRPDR LB
+# - 63 # NDRPDR LB
+# - 64 # NDRPDR LB
+# - 65 # NDRPDR LB
+# - 66 # NDRPDR LB
+# - 67 # NDRPDR LB
+# - 69 # NDRPDR LB
+# - 70 # NDRPDR LB
+# - 71 # NDRPDR LB
vpp-performance-changes-3n-hsw-ref:
csit-vpp-perf-verify-1904-3n-hsw:
- 11 # NDRPDR sel
- 12 # NDRPDR sel
- 20 # NDRPDR sel
- - 21 # NDRPDR sel
- - 23 # NDRPDR sel
- - 24 # NDRPDR sel
- - 25 # NDRPDR sel
- - 26 # NDRPDR sel
- - 29 # NDRPDR sel
- - 30 # NDRPDR sel
+# - 21 # NDRPDR sel
+# - 23 # NDRPDR sel
+# - 24 # NDRPDR sel
+# - 25 # NDRPDR sel
+# - 26 # NDRPDR sel
+# - 29 # NDRPDR sel
+# - 30 # NDRPDR sel
vpp-performance-changes-3n-hsw-cmp:
csit-vpp-perf-verify-1908-3n-hsw:
- 12 # NDRPDR sel
- 13 # NDRPDR sel
- 14 # NDRPDR sel
- - 15 # NDRPDR sel
- - 16 # NDRPDR sel
- - 17 # NDRPDR sel
- - 18 # NDRPDR sel
- - 19 # NDRPDR sel
- - 20 # NDRPDR sel
- - 21 # NDRPDR sel
+# - 15 # NDRPDR sel
+# - 16 # NDRPDR sel
+# - 17 # NDRPDR sel
+# - 18 # NDRPDR sel
+# - 19 # NDRPDR sel
+# - 20 # NDRPDR sel
+# - 21 # NDRPDR sel
vpp-performance-changes-3n-hsw:
- "vpp-performance-changes-3n-hsw-h1"
- "vpp-performance-changes-3n-hsw-ref"
@@ -557,47 +557,47 @@
- 18 # NDRPDR sel
- 19 # NDRPDR sel
- 20 # NDRPDR sel
- - 28 # NDRPDR sel
- - 31 # NDRPDR sel
- - 33 # NDRPDR sel
- - 36 # NDRPDR sel
- - 38 # NDRPDR sel
- - 47 # NDRPDR sel
- - 48 # NDRPDR sel
- - 54 # NDRPDR LB
- - 57 # NDRPDR LB
- - 58 # NDRPDR LB
- - 59 # NDRPDR LB
- - 60 # NDRPDR LB
- - 63 # NDRPDR LB
- - 64 # NDRPDR LB
- - 65 # NDRPDR LB
- - 66 # NDRPDR LB
- - 67 # NDRPDR LB
+# - 28 # NDRPDR sel
+# - 31 # NDRPDR sel
+# - 33 # NDRPDR sel
+# - 36 # NDRPDR sel
+# - 38 # NDRPDR sel
+# - 47 # NDRPDR sel
+# - 48 # NDRPDR sel
+# - 54 # NDRPDR LB
+# - 57 # NDRPDR LB
+# - 58 # NDRPDR LB
+# - 59 # NDRPDR LB
+# - 60 # NDRPDR LB
+# - 63 # NDRPDR LB
+# - 64 # NDRPDR LB
+# - 65 # NDRPDR LB
+# - 66 # NDRPDR LB
+# - 67 # NDRPDR LB
vpp-performance-changes-3n-skx-ref:
csit-vpp-perf-verify-1904-3n-skx:
- 8 # NDRPDR sel
- 9 # NDRPDR sel
- 10 # NDRPDR sel
- - 11 # NDRPDR sel
- - 12 # NDRPDR sel
- - 14 # NDRPDR sel
- - 16 # NDRPDR sel
- - 21 # NDRPDR sel
- - 23 # NDRPDR sel
- - 25 # NDRPDR sel
+# - 11 # NDRPDR sel
+# - 12 # NDRPDR sel
+# - 14 # NDRPDR sel
+# - 16 # NDRPDR sel
+# - 21 # NDRPDR sel
+# - 23 # NDRPDR sel
+# - 25 # NDRPDR sel
vpp-performance-changes-3n-skx-cmp:
csit-vpp-perf-verify-1908-3n-skx:
- 14 # NDRPDR sel
- 16 # NDRPDR sel
- 19 # NDRPDR sel
- - 20 # NDRPDR sel
- - 22 # NDRPDR sel
- - 23 # NDRPDR sel
- - 24 # NDRPDR sel
- - 25 # NDRPDR sel
- - 26 # NDRPDR sel
- - 29 # NDRPDR sel
+# - 20 # NDRPDR sel
+# - 22 # NDRPDR sel
+# - 23 # NDRPDR sel
+# - 24 # NDRPDR sel
+# - 25 # NDRPDR sel
+# - 26 # NDRPDR sel
+# - 29 # NDRPDR sel
vpp-performance-changes-3n-skx:
- "vpp-performance-changes-3n-skx-h1"
- "vpp-performance-changes-3n-skx-ref"
@@ -608,37 +608,37 @@
- 5 # NDRPDR sel
- 12 # NDRPDR sel
- 13 # NDRPDR sel
- - 21 # NDRPDR sel
- - 23 # NDRPDR sel
- - 31 # NDRPDR sel
- - 34 # NDRPDR sel
- - 40 # NDRPDR sel
- - 42 # NDRPDR sel
- - 43 # NDRPDR sel
+# - 21 # NDRPDR sel
+# - 23 # NDRPDR sel
+# - 31 # NDRPDR sel
+# - 34 # NDRPDR sel
+# - 40 # NDRPDR sel
+# - 42 # NDRPDR sel
+# - 43 # NDRPDR sel
vpp-performance-changes-2n-skx-ref:
csit-vpp-perf-verify-1904-2n-skx:
- 12 # NDRPDR sel
- 14 # NDRPDR sel
- 16 # NDRPDR sel
- - 17 # NDRPDR sel
- - 18 # NDRPDR sel
- - 21 # NDRPDR sel
- - 27 # NDRPDR sel
- - 30 # NDRPDR sel
- - 31 # NDRPDR sel
- - 32 # NDRPDR sel
+# - 17 # NDRPDR sel
+# - 18 # NDRPDR sel
+# - 21 # NDRPDR sel
+# - 27 # NDRPDR sel
+# - 30 # NDRPDR sel
+# - 31 # NDRPDR sel
+# - 32 # NDRPDR sel
vpp-performance-changes-2n-skx-cmp:
csit-vpp-perf-verify-1908-2n-skx:
- 26 # NDRPDR sel
- 27 # NDRPDR sel
- 34 # NDRPDR sel
- - 35 # NDRPDR sel
- - 36 # NDRPDR sel
- - 42 # NDRPDR sel
- - 43 # NDRPDR sel
- - 44 # NDRPDR sel
- - 45 # NDRPDR sel
- - 46 # NDRPDR sel
+# - 35 # NDRPDR sel
+# - 36 # NDRPDR sel
+# - 42 # NDRPDR sel
+# - 43 # NDRPDR sel
+# - 44 # NDRPDR sel
+# - 45 # NDRPDR sel
+# - 46 # NDRPDR sel
vpp-performance-changes-2n-skx:
- "vpp-performance-changes-2n-skx-h1"
- "vpp-performance-changes-2n-skx-ref"
@@ -649,13 +649,13 @@
- 26 # NDRPDR sel
- 27 # NDRPDR sel
- 34 # NDRPDR sel
- - 35 # NDRPDR sel
- - 36 # NDRPDR sel
- - 42 # NDRPDR sel
- - 43 # NDRPDR sel
- - 44 # NDRPDR sel
- - 45 # NDRPDR sel
- - 46 # NDRPDR sel
+# - 35 # NDRPDR sel
+# - 36 # NDRPDR sel
+# - 42 # NDRPDR sel
+# - 43 # NDRPDR sel
+# - 44 # NDRPDR sel
+# - 45 # NDRPDR sel
+# - 46 # NDRPDR sel
vpp-soak-vs-ndr-2n-skx-cmp:
csit-vpp-perf-verify-1908-2n-skx:
- 83 # SOAK
@@ -665,13 +665,13 @@
- 26 # NDRPDR sel
- 27 # NDRPDR sel
- 34 # NDRPDR sel
- - 35 # NDRPDR sel
- - 36 # NDRPDR sel
- - 42 # NDRPDR sel
- - 43 # NDRPDR sel
- - 44 # NDRPDR sel
- - 45 # NDRPDR sel
- - 46 # NDRPDR sel
+# - 35 # NDRPDR sel
+# - 36 # NDRPDR sel
+# - 42 # NDRPDR sel
+# - 43 # NDRPDR sel
+# - 44 # NDRPDR sel
+# - 45 # NDRPDR sel
+# - 46 # NDRPDR sel
- 83 # SOAK
- 86 # SOAK
@@ -722,37 +722,37 @@
- 13 # NDRPDR sel
- 14 # NDRPDR sel
- 16 # NDRPDR sel
- - 17 # NDRPDR sel
- - 18 # NDRPDR sel
- - 19 # NDRPDR sel
- - 20 # NDRPDR sel
- - 21 # NDRPDR sel
- - 22 # NDRPDR sel
- - 23 # NDRPDR sel
+# - 17 # NDRPDR sel
+# - 18 # NDRPDR sel
+# - 19 # NDRPDR sel
+# - 20 # NDRPDR sel
+# - 21 # NDRPDR sel
+# - 22 # NDRPDR sel
+# - 23 # NDRPDR sel
dpdk-performance-changes-3n-hsw-ref:
csit-dpdk-perf-verify-1904-3n-hsw:
- 2 # NDRPDR sel
- 3 # NDRPDR sel
- 5 # NDRPDR sel
- - 6 # NDRPDR sel
- - 13 # NDRPDR sel
- - 14 # NDRPDR sel
- - 15 # NDRPDR sel
- - 18 # NDRPDR sel
- - 19 # NDRPDR sel
- - 20 # NDRPDR sel
+# - 6 # NDRPDR sel
+# - 13 # NDRPDR sel
+# - 14 # NDRPDR sel
+# - 15 # NDRPDR sel
+# - 18 # NDRPDR sel
+# - 19 # NDRPDR sel
+# - 20 # NDRPDR sel
dpdk-performance-changes-3n-hsw-cmp:
csit-dpdk-perf-verify-1908-3n-hsw:
- 2 # NDRPDR sel
- 3 # NDRPDR sel
- 4 # NDRPDR sel
- - 5 # NDRPDR sel
- - 6 # NDRPDR sel
- - 7 # NDRPDR sel
- - 8 # NDRPDR sel
- - 9 # NDRPDR sel
- - 10 # NDRPDR sel
- - 11 # NDRPDR sel
+# - 5 # NDRPDR sel
+# - 6 # NDRPDR sel
+# - 7 # NDRPDR sel
+# - 8 # NDRPDR sel
+# - 9 # NDRPDR sel
+# - 10 # NDRPDR sel
+# - 11 # NDRPDR sel
dpdk-performance-changes-3n-hsw:
- "dpdk-performance-changes-3n-hsw-h1"
- "dpdk-performance-changes-3n-hsw-ref"
@@ -763,37 +763,37 @@
- 3 # NDRPDR sel
- 5 # NDRPDR sel
- 6 # NDRPDR sel
- - 7 # NDRPDR sel
- - 8 # NDRPDR sel
- - 9 # NDRPDR sel
- - 10 # NDRPDR sel
- - 11 # NDRPDR sel
- - 12 # NDRPDR sel
- - 13 # NDRPDR sel
+# - 7 # NDRPDR sel
+# - 8 # NDRPDR sel
+# - 9 # NDRPDR sel
+# - 10 # NDRPDR sel
+# - 11 # NDRPDR sel
+# - 12 # NDRPDR sel
+# - 13 # NDRPDR sel
dpdk-performance-changes-3n-skx-ref:
csit-dpdk-perf-verify-1904-3n-skx:
- 2 # NDRPDR sel
- 3 # NDRPDR sel
- 4 # NDRPDR sel
- - 5 # NDRPDR sel
- - 6 # NDRPDR sel
- - 7 # NDRPDR sel
- - 11 # NDRPDR sel
- - 12 # NDRPDR sel
- - 13 # NDRPDR sel
- - 14 # NDRPDR sel
+# - 5 # NDRPDR sel
+# - 6 # NDRPDR sel
+# - 7 # NDRPDR sel
+# - 11 # NDRPDR sel
+# - 12 # NDRPDR sel
+# - 13 # NDRPDR sel
+# - 14 # NDRPDR sel
dpdk-performance-changes-3n-skx-cmp:
csit-dpdk-perf-verify-1908-3n-skx:
- 3 # NDRPDR sel
- 4 # NDRPDR sel
- 5 # NDRPDR sel
- - 6 # NDRPDR sel
- - 7 # NDRPDR sel
- - 8 # NDRPDR sel
- - 9 # NDRPDR sel
- - 10 # NDRPDR sel
- - 11 # NDRPDR sel
- - 12 # NDRPDR sel
+# - 6 # NDRPDR sel
+# - 7 # NDRPDR sel
+# - 8 # NDRPDR sel
+# - 9 # NDRPDR sel
+# - 10 # NDRPDR sel
+# - 11 # NDRPDR sel
+# - 12 # NDRPDR sel
dpdk-performance-changes-3n-skx:
- "dpdk-performance-changes-3n-skx-h1"
- "dpdk-performance-changes-3n-skx-ref"
@@ -804,37 +804,37 @@
- 12 # NDRPDR sel
- 13 # NDRPDR sel
- 14 # NDRPDR sel
- - 15 # NDRPDR sel
- - 16 # NDRPDR sel
- - 17 # NDRPDR sel
- - 18 # NDRPDR sel
- - 19 # NDRPDR sel
- - 20 # NDRPDR sel
- - 21 # NDRPDR sel
+# - 15 # NDRPDR sel
+# - 16 # NDRPDR sel
+# - 17 # NDRPDR sel
+# - 18 # NDRPDR sel
+# - 19 # NDRPDR sel
+# - 20 # NDRPDR sel
+# - 21 # NDRPDR sel
dpdk-performance-changes-2n-skx-ref:
csit-dpdk-perf-verify-1904-2n-skx:
- 2 # NDRPDR sel
- 3 # NDRPDR sel
- 4 # NDRPDR sel
- - 5 # NDRPDR sel
- - 7 # NDRPDR sel
- - 8 # NDRPDR sel
- - 9 # NDRPDR sel
- - 10 # NDRPDR sel
- - 11 # NDRPDR sel
- - 12 # NDRPDR sel
+# - 5 # NDRPDR sel
+# - 7 # NDRPDR sel
+# - 8 # NDRPDR sel
+# - 9 # NDRPDR sel
+# - 10 # NDRPDR sel
+# - 11 # NDRPDR sel
+# - 12 # NDRPDR sel
dpdk-performance-changes-2n-skx-cmp:
csit-dpdk-perf-verify-1908-2n-skx:
- 2 # NDRPDR sel
- 3 # NDRPDR sel
- 4 # NDRPDR sel
- - 5 # NDRPDR sel
- - 7 # NDRPDR sel
- - 8 # NDRPDR sel
- - 9 # NDRPDR sel
- - 10 # NDRPDR sel
- - 11 # NDRPDR sel
- - 12 # NDRPDR sel
+# - 5 # NDRPDR sel
+# - 7 # NDRPDR sel
+# - 8 # NDRPDR sel
+# - 9 # NDRPDR sel
+# - 10 # NDRPDR sel
+# - 11 # NDRPDR sel
+# - 12 # NDRPDR sel
dpdk-performance-changes-2n-skx:
- "dpdk-performance-changes-2n-skx-h1"
- "dpdk-performance-changes-2n-skx-ref"
@@ -847,25 +847,25 @@
- 12 # NDRPDR sel
- 13 # NDRPDR sel
- 14 # NDRPDR sel
- - 15 # NDRPDR sel
- - 16 # NDRPDR sel
- - 17 # NDRPDR sel
- - 18 # NDRPDR sel
- - 19 # NDRPDR sel
- - 20 # NDRPDR sel
- - 21 # NDRPDR sel
+# - 15 # NDRPDR sel
+# - 16 # NDRPDR sel
+# - 17 # NDRPDR sel
+# - 18 # NDRPDR sel
+# - 19 # NDRPDR sel
+# - 20 # NDRPDR sel
+# - 21 # NDRPDR sel
vpp-compare-testbeds-cmp:
csit-vpp-perf-verify-1908-3n-skx:
- 14 # NDRPDR sel
- 16 # NDRPDR sel
- 19 # NDRPDR sel
- - 20 # NDRPDR sel
- - 22 # NDRPDR sel
- - 23 # NDRPDR sel
- - 24 # NDRPDR sel
- - 25 # NDRPDR sel
- - 26 # NDRPDR sel
- - 29 # NDRPDR sel
+# - 20 # NDRPDR sel
+# - 22 # NDRPDR sel
+# - 23 # NDRPDR sel
+# - 24 # NDRPDR sel
+# - 25 # NDRPDR sel
+# - 26 # NDRPDR sel
+# - 29 # NDRPDR sel
vpp-compare-testbeds:
- "vpp-compare-testbeds-ref"
- "vpp-compare-testbeds-cmp"
@@ -885,25 +885,25 @@
- 2 # NDRPDR sel
- 3 # NDRPDR sel
- 4 # NDRPDR sel
- - 5 # NDRPDR sel
- - 6 # NDRPDR sel
- - 7 # NDRPDR sel
- - 8 # NDRPDR sel
- - 9 # NDRPDR sel
- - 10 # NDRPDR sel
- - 11 # NDRPDR sel
+# - 5 # NDRPDR sel
+# - 6 # NDRPDR sel
+# - 7 # NDRPDR sel
+# - 8 # NDRPDR sel
+# - 9 # NDRPDR sel
+# - 10 # NDRPDR sel
+# - 11 # NDRPDR sel
dpdk-compare-testbeds-cmp:
csit-dpdk-perf-verify-1908-3n-skx:
- 3 # NDRPDR sel
- 4 # NDRPDR sel
- 5 # NDRPDR sel
- - 6 # NDRPDR sel
- - 7 # NDRPDR sel
- - 8 # NDRPDR sel
- - 9 # NDRPDR sel
- - 10 # NDRPDR sel
- - 11 # NDRPDR sel
- - 12 # NDRPDR sel
+# - 6 # NDRPDR sel
+# - 7 # NDRPDR sel
+# - 8 # NDRPDR sel
+# - 9 # NDRPDR sel
+# - 10 # NDRPDR sel
+# - 11 # NDRPDR sel
+# - 12 # NDRPDR sel
dpdk-compare-testbeds:
- "dpdk-compare-testbeds-ref"
- "dpdk-compare-testbeds-cmp"
@@ -915,25 +915,25 @@
- 14 # NDRPDR sel
- 16 # NDRPDR sel
- 19 # NDRPDR sel
- - 20 # NDRPDR sel
- - 22 # NDRPDR sel
- - 23 # NDRPDR sel
- - 24 # NDRPDR sel
- - 25 # NDRPDR sel
- - 26 # NDRPDR sel
- - 29 # NDRPDR sel
+# - 20 # NDRPDR sel
+# - 22 # NDRPDR sel
+# - 23 # NDRPDR sel
+# - 24 # NDRPDR sel
+# - 25 # NDRPDR sel
+# - 26 # NDRPDR sel
+# - 29 # NDRPDR sel
vpp-compare-topologies-cmp:
csit-vpp-perf-verify-1908-2n-skx:
- 26 # NDRPDR sel
- 27 # NDRPDR sel
- 34 # NDRPDR sel
- - 35 # NDRPDR sel
- - 36 # NDRPDR sel
- - 42 # NDRPDR sel
- - 43 # NDRPDR sel
- - 44 # NDRPDR sel
- - 45 # NDRPDR sel
- - 46 # NDRPDR sel
+# - 35 # NDRPDR sel
+# - 36 # NDRPDR sel
+# - 42 # NDRPDR sel
+# - 43 # NDRPDR sel
+# - 44 # NDRPDR sel
+# - 45 # NDRPDR sel
+# - 46 # NDRPDR sel
vpp-compare-topologies:
- "vpp-compare-topologies-ref"
- "vpp-compare-topologies-cmp"
@@ -953,25 +953,25 @@
- 3 # NDRPDR sel
- 4 # NDRPDR sel
- 5 # NDRPDR sel
- - 6 # NDRPDR sel
- - 7 # NDRPDR sel
- - 8 # NDRPDR sel
- - 9 # NDRPDR sel
- - 10 # NDRPDR sel
- - 11 # NDRPDR sel
- - 12 # NDRPDR sel
+# - 6 # NDRPDR sel
+# - 7 # NDRPDR sel
+# - 8 # NDRPDR sel
+# - 9 # NDRPDR sel
+# - 10 # NDRPDR sel
+# - 11 # NDRPDR sel
+# - 12 # NDRPDR sel
dpdk-compare-topologies-cmp:
csit-dpdk-perf-verify-1908-2n-skx:
- 2 # NDRPDR sel
- 3 # NDRPDR sel
- 4 # NDRPDR sel
- - 5 # NDRPDR sel
- - 7 # NDRPDR sel
- - 8 # NDRPDR sel
- - 9 # NDRPDR sel
- - 10 # NDRPDR sel
- - 11 # NDRPDR sel
- - 12 # NDRPDR sel
+# - 5 # NDRPDR sel
+# - 7 # NDRPDR sel
+# - 8 # NDRPDR sel
+# - 9 # NDRPDR sel
+# - 10 # NDRPDR sel
+# - 11 # NDRPDR sel
+# - 12 # NDRPDR sel
dpdk-compare-topologies:
- "dpdk-compare-topologies-ref"
- "dpdk-compare-topologies-cmp"
@@ -1412,67 +1412,67 @@
- 10 # NDRPDR sel
- 19 # NDRPDR sel
- 31 # NDRPDR sel
- - 36 # NDRPDR sel
- - 40 # NDRPDR sel
- - 42 # NDRPDR sel
- - 44 # NDRPDR sel
- - 46 # NDRPDR sel
- - 47 # NDRPDR sel
- - 48 # NDRPDR sel
- - 61 # NDRPDR LB
- - 62 # NDRPDR LB
- - 63 # NDRPDR LB
- - 64 # NDRPDR LB
- - 65 # NDRPDR LB
- - 66 # NDRPDR LB
- - 67 # NDRPDR LB
- - 69 # NDRPDR LB
- - 70 # NDRPDR LB
- - 71 # NDRPDR LB
+# - 36 # NDRPDR sel
+# - 40 # NDRPDR sel
+# - 42 # NDRPDR sel
+# - 44 # NDRPDR sel
+# - 46 # NDRPDR sel
+# - 47 # NDRPDR sel
+# - 48 # NDRPDR sel
+# - 61 # NDRPDR LB
+# - 62 # NDRPDR LB
+# - 63 # NDRPDR LB
+# - 64 # NDRPDR LB
+# - 65 # NDRPDR LB
+# - 66 # NDRPDR LB
+# - 67 # NDRPDR LB
+# - 69 # NDRPDR LB
+# - 70 # NDRPDR LB
+# - 71 # NDRPDR LB
csit-vpp-perf-verify-1904-3n-hsw:
- 9 # MRR sel
- 11 # NDRPDR sel
- 12 # NDRPDR sel
- 20 # NDRPDR sel
- - 21 # NDRPDR sel
- - 23 # NDRPDR sel
- - 24 # NDRPDR sel
- - 25 # NDRPDR sel
- - 26 # NDRPDR sel
- - 29 # NDRPDR sel
- - 30 # NDRPDR sel
+# - 21 # NDRPDR sel
+# - 23 # NDRPDR sel
+# - 24 # NDRPDR sel
+# - 25 # NDRPDR sel
+# - 26 # NDRPDR sel
+# - 29 # NDRPDR sel
+# - 30 # NDRPDR sel
csit-vpp-perf-verify-1908-3n-hsw:
- 12 # NDRPDR sel
- 13 # NDRPDR sel
- 14 # NDRPDR sel
- - 15 # NDRPDR sel
- - 16 # NDRPDR sel
- - 17 # NDRPDR sel
- - 18 # NDRPDR sel
- - 19 # NDRPDR sel
- - 20 # NDRPDR sel
- - 21 # NDRPDR sel
+# - 15 # NDRPDR sel
+# - 16 # NDRPDR sel
+# - 17 # NDRPDR sel
+# - 18 # NDRPDR sel
+# - 19 # NDRPDR sel
+# - 20 # NDRPDR sel
+# - 21 # NDRPDR sel
- 48 # NDRPDR sel vhost
- 49 # NDRPDR sel vhost
- 52 # NDRPDR sel vhost
- - 53 # NDRPDR sel vhost
- - 54 # NDRPDR sel vhost
- - 55 # NDRPDR sel vhost
- - 56 # NDRPDR sel vhost
- - 57 # NDRPDR sel vhost
- - 58 # NDRPDR sel vhost
- - 59 # NDRPDR sel vhost
+# - 53 # NDRPDR sel vhost
+# - 54 # NDRPDR sel vhost
+# - 55 # NDRPDR sel vhost
+# - 56 # NDRPDR sel vhost
+# - 57 # NDRPDR sel vhost
+# - 58 # NDRPDR sel vhost
+# - 59 # NDRPDR sel vhost
- 24 # NDRPDR full
- 25 # NDRPDR full
- 26 # NDRPDR full
- - 27 # NDRPDR full
- - 28 # NDRPDR full
- - 29 # NDRPDR full
- - 32 # NDRPDR full
- - 33 # NDRPDR full
- - 36 # NDRPDR full
- - 38 # NDRPDR full
- - 50 # NDRPDR full vhost
+# - 27 # NDRPDR full
+# - 28 # NDRPDR full
+# - 29 # NDRPDR full
+# - 32 # NDRPDR full
+# - 33 # NDRPDR full
+# - 36 # NDRPDR full
+# - 38 # NDRPDR full
+# - 50 # NDRPDR full vhost
- 39 # MRR sel
csit-vpp-perf-verify-1901-3n-skx:
@@ -1480,69 +1480,69 @@
- 18 # NDRPDR sel
- 19 # NDRPDR sel
- 20 # NDRPDR sel
- - 28 # NDRPDR sel
- - 31 # NDRPDR sel
- - 33 # NDRPDR sel
- - 36 # NDRPDR sel
- - 38 # NDRPDR sel
- - 47 # NDRPDR sel
- - 48 # NDRPDR sel
- - 54 # NDRPDR LB
- - 57 # NDRPDR LB
- - 58 # NDRPDR LB
- - 59 # NDRPDR LB
- - 60 # NDRPDR LB
- - 63 # NDRPDR LB
- - 64 # NDRPDR LB
- - 65 # NDRPDR LB
- - 66 # NDRPDR LB
- - 67 # NDRPDR LB
+# - 28 # NDRPDR sel
+# - 31 # NDRPDR sel
+# - 33 # NDRPDR sel
+# - 36 # NDRPDR sel
+# - 38 # NDRPDR sel
+# - 47 # NDRPDR sel
+# - 48 # NDRPDR sel
+# - 54 # NDRPDR LB
+# - 57 # NDRPDR LB
+# - 58 # NDRPDR LB
+# - 59 # NDRPDR LB
+# - 60 # NDRPDR LB
+# - 63 # NDRPDR LB
+# - 64 # NDRPDR LB
+# - 65 # NDRPDR LB
+# - 66 # NDRPDR LB
+# - 67 # NDRPDR LB
csit-vpp-perf-verify-1904-3n-skx:
- 7 # MRR sel
- 8 # NDRPDR sel
- 9 # NDRPDR sel
- 10 # NDRPDR sel
- - 11 # NDRPDR sel
- - 12 # NDRPDR sel
- - 14 # NDRPDR sel
- - 16 # NDRPDR sel
- - 21 # NDRPDR sel
- - 23 # NDRPDR sel
- - 25 # NDRPDR sel
+# - 11 # NDRPDR sel
+# - 12 # NDRPDR sel
+# - 14 # NDRPDR sel
+# - 16 # NDRPDR sel
+# - 21 # NDRPDR sel
+# - 23 # NDRPDR sel
+# - 25 # NDRPDR sel
csit-vpp-perf-verify-1908-3n-skx:
- 14 # NDRPDR sel
- 16 # NDRPDR sel
- 19 # NDRPDR sel
- - 20 # NDRPDR sel
- - 22 # NDRPDR sel
- - 23 # NDRPDR sel
- - 24 # NDRPDR sel
- - 25 # NDRPDR sel
- - 26 # NDRPDR sel
- - 29 # NDRPDR sel
- - 52 # NDRPDR sel vhost
- - 53 # NDRPDR sel vhost
- - 55 # NDRPDR sel vhost
- - 61 # NDRPDR sel vhost
- - 62 # NDRPDR sel vhost
- - 63 # NDRPDR sel vhost
- - 64 # NDRPDR sel vhost
- - 65 # NDRPDR sel vhost
- - 66 # NDRPDR sel vhost
- - 67 # NDRPDR sel vhost
+# - 20 # NDRPDR sel
+# - 22 # NDRPDR sel
+# - 23 # NDRPDR sel
+# - 24 # NDRPDR sel
+# - 25 # NDRPDR sel
+# - 26 # NDRPDR sel
+# - 29 # NDRPDR sel
+# - 52 # NDRPDR sel vhost
+# - 53 # NDRPDR sel vhost
+# - 55 # NDRPDR sel vhost
+# - 61 # NDRPDR sel vhost
+# - 62 # NDRPDR sel vhost
+# - 63 # NDRPDR sel vhost
+# - 64 # NDRPDR sel vhost
+# - 65 # NDRPDR sel vhost
+# - 66 # NDRPDR sel vhost
+# - 67 # NDRPDR sel vhost
- 27 # NDRPDR full
- 28 # NDRPDR full
- 30 # NDRPDR full
- - 31 # NDRPDR full
- - 32 # NDRPDR full
- - 33 # NDRPDR full
- - 34 # NDRPDR full
- - 35 # NDRPDR full
- - 37 # NDRPDR full
- - 39 # NDRPDR full
- - 40 # NDRPDR full
- - 41 # NDRPDR full
- - 54 # NDRPDR full
+# - 31 # NDRPDR full
+# - 32 # NDRPDR full
+# - 33 # NDRPDR full
+# - 34 # NDRPDR full
+# - 35 # NDRPDR full
+# - 37 # NDRPDR full
+# - 39 # NDRPDR full
+# - 40 # NDRPDR full
+# - 41 # NDRPDR full
+# - 54 # NDRPDR full
- 36 # MRR sel
csit-vpp-perf-verify-1901-2n-skx:
@@ -1550,90 +1550,90 @@
- 5 # NDRPDR sel
- 12 # NDRPDR sel
- 13 # NDRPDR sel
- - 21 # NDRPDR sel
- - 23 # NDRPDR sel
- - 31 # NDRPDR sel
- - 34 # NDRPDR sel
- - 40 # NDRPDR sel
- - 42 # NDRPDR sel
- - 43 # NDRPDR sel
+# - 21 # NDRPDR sel
+# - 23 # NDRPDR sel
+# - 31 # NDRPDR sel
+# - 34 # NDRPDR sel
+# - 40 # NDRPDR sel
+# - 42 # NDRPDR sel
+# - 43 # NDRPDR sel
csit-vpp-perf-verify-1904-2n-skx:
- 11 # MRR sel
- 12 # NDRPDR sel
- 14 # NDRPDR sel
- 16 # NDRPDR sel
- - 17 # NDRPDR sel
- - 18 # NDRPDR sel
- - 21 # NDRPDR sel
- - 27 # NDRPDR sel
- - 30 # NDRPDR sel
- - 31 # NDRPDR sel
- - 32 # NDRPDR sel
+# - 17 # NDRPDR sel
+# - 18 # NDRPDR sel
+# - 21 # NDRPDR sel
+# - 27 # NDRPDR sel
+# - 30 # NDRPDR sel
+# - 31 # NDRPDR sel
+# - 32 # NDRPDR sel
csit-vpp-perf-verify-1908-2n-skx:
- 26 # NDRPDR sel
- 27 # NDRPDR sel
- 34 # NDRPDR sel
- - 35 # NDRPDR sel
- - 36 # NDRPDR sel
- - 42 # NDRPDR sel
- - 43 # NDRPDR sel
- - 44 # NDRPDR sel
- - 45 # NDRPDR sel
- - 46 # NDRPDR sel
+# - 35 # NDRPDR sel
+# - 36 # NDRPDR sel
+# - 42 # NDRPDR sel
+# - 43 # NDRPDR sel
+# - 44 # NDRPDR sel
+# - 45 # NDRPDR sel
+# - 46 # NDRPDR sel
- 88 # NDRPDR sel vhost
- 89 # NDRPDR sel vhost
- 91 # NDRPDR sel vhost
- - 92 # NDRPDR sel vhost
- - 93 # NDRPDR sel vhost
- - 94 # NDRPDR sel vhost
- - 95 # NDRPDR sel vhost
- - 96 # NDRPDR sel vhost
- - 97 # NDRPDR sel vhost
- - 98 # NDRPDR sel vhost
+# - 92 # NDRPDR sel vhost
+# - 93 # NDRPDR sel vhost
+# - 94 # NDRPDR sel vhost
+# - 95 # NDRPDR sel vhost
+# - 96 # NDRPDR sel vhost
+# - 97 # NDRPDR sel vhost
+# - 98 # NDRPDR sel vhost
- 48 # NDRPDR full
- 49 # NDRPDR full
- 50 # NDRPDR full
- - 51 # NDRPDR full
- - 54 # NDRPDR full
- - 55 # NDRPDR full
- - 90 # NDRPDR full vhost
+# - 51 # NDRPDR full
+# - 54 # NDRPDR full
+# - 55 # NDRPDR full
+# - 90 # NDRPDR full vhost
- 69 # MRR sel
- 29 # TCP
- 30 # TCP
- 58 # TCP
- - 59 # TCP
- - 60 # TCP
- - 61 # TCP
- - 62 # TCP
- - 63 # TCP
- - 64 # TCP
- - 65 # TCP
+# - 59 # TCP
+# - 60 # TCP
+# - 61 # TCP
+# - 62 # TCP
+# - 63 # TCP
+# - 64 # TCP
+# - 65 # TCP
- 22 # NFV
- 31 # NFV
- 32 # NFV
- - 37 # NFV
- - 38 # NFV
- - 39 # NFV
- - 53 # NFV
- - 67 # NFV
- - 68 # NFV
- - 70 # NFV
- - 110 # NFV full
- - 111 # NFV full
- - 126 # NFV full
- - 127 # NFV full
+# - 37 # NFV
+# - 38 # NFV
+# - 39 # NFV
+# - 53 # NFV
+# - 67 # NFV
+# - 68 # NFV
+# - 70 # NFV
+# - 110 # NFV full
+# - 111 # NFV full
+# - 126 # NFV full
+# - 127 # NFV full
- 71 # NFV MRR
- - 112 # NFV reconf full
+# - 112 # NFV reconf full
- 113 # NFV reconf sel
- 114 # NFV reconf sel
- 115 # NFV reconf sel
- - 116 # NFV reconf sel
- - 118 # NFV reconf sel
- - 119 # NFV reconf sel
- - 120 # NFV reconf sel
- - 121 # NFV reconf sel
- - 122 # NFV reconf sel
- - 123 # NFV reconf sel
+# - 116 # NFV reconf sel
+# - 118 # NFV reconf sel
+# - 119 # NFV reconf sel
+# - 120 # NFV reconf sel
+# - 121 # NFV reconf sel
+# - 122 # NFV reconf sel
+# - 123 # NFV reconf sel
- 83 # SOAK
- 86 # SOAK
@@ -1641,142 +1641,142 @@
- 7 # NDRPDR sel
- 8 # NDRPDR sel
- 9 # NDRPDR sel
- - 10 # NDRPDR sel
- - 11 # NDRPDR sel
- - 12 # NDRPDR sel
- - 13 # NDRPDR sel
- - 14 # NDRPDR sel
- - 15 # NDRPDR sel
- - 16 # NDRPDR sel
+# - 10 # NDRPDR sel
+# - 11 # NDRPDR sel
+# - 12 # NDRPDR sel
+# - 13 # NDRPDR sel
+# - 14 # NDRPDR sel
+# - 15 # NDRPDR sel
+# - 16 # NDRPDR sel
- 17 # MRR sel
csit-vpp-perf-verify-1908-3n-dnv:
- 6 # NDRPDR sel
- 7 # NDRPDR sel
- 8 # NDRPDR sel
- - 9 # NDRPDR sel
- - 10 # NDRPDR sel
- - 12 # NDRPDR sel
- - 13 # NDRPDR sel
- - 14 # NDRPDR sel
- - 15 # NDRPDR sel
- - 16 # NDRPDR sel
+# - 9 # NDRPDR sel
+# - 10 # NDRPDR sel
+# - 12 # NDRPDR sel
+# - 13 # NDRPDR sel
+# - 14 # NDRPDR sel
+# - 15 # NDRPDR sel
+# - 16 # NDRPDR sel
- 17 # MRR sel
csit-vpp-perf-verify-1908-2n-dnv:
- 1 # NDRPDR sel
- 2 # NDRPDR sel
- 3 # NDRPDR sel
- - 4 # NDRPDR sel
- - 5 # NDRPDR sel
- - 6 # NDRPDR sel
- - 7 # NDRPDR sel
- - 8 # NDRPDR sel
- - 9 # NDRPDR sel
- - 10 # NDRPDR sel
+# - 4 # NDRPDR sel
+# - 5 # NDRPDR sel
+# - 6 # NDRPDR sel
+# - 7 # NDRPDR sel
+# - 8 # NDRPDR sel
+# - 9 # NDRPDR sel
+# - 10 # NDRPDR sel
- 11 # MRR sel
csit-dpdk-perf-verify-1901-3n-hsw:
- 13 # NDRPDR sel
- 14 # NDRPDR sel
- 16 # NDRPDR sel
- - 17 # NDRPDR sel
- - 18 # NDRPDR sel
- - 19 # NDRPDR sel
- - 20 # NDRPDR sel
- - 21 # NDRPDR sel
- - 22 # NDRPDR sel
- - 23 # NDRPDR sel
+# - 17 # NDRPDR sel
+# - 18 # NDRPDR sel
+# - 19 # NDRPDR sel
+# - 20 # NDRPDR sel
+# - 21 # NDRPDR sel
+# - 22 # NDRPDR sel
+# - 23 # NDRPDR sel
csit-dpdk-perf-verify-1904-3n-hsw:
- 2 # NDRPDR sel
- 3 # NDRPDR sel
- 5 # NDRPDR sel
- - 6 # NDRPDR sel
- - 13 # NDRPDR sel
- - 14 # NDRPDR sel
- - 15 # NDRPDR sel
- - 18 # NDRPDR sel
- - 19 # NDRPDR sel
- - 20 # NDRPDR sel
+# - 6 # NDRPDR sel
+# - 13 # NDRPDR sel
+# - 14 # NDRPDR sel
+# - 15 # NDRPDR sel
+# - 18 # NDRPDR sel
+# - 19 # NDRPDR sel
+# - 20 # NDRPDR sel
csit-dpdk-perf-verify-1908-3n-hsw:
- 2 # NDRPDR sel
- 3 # NDRPDR sel
- 4 # NDRPDR sel
- - 5 # NDRPDR sel
- - 6 # NDRPDR sel
- - 7 # NDRPDR sel
- - 8 # NDRPDR sel
- - 9 # NDRPDR sel
- - 10 # NDRPDR sel
- - 11 # NDRPDR sel
+# - 5 # NDRPDR sel
+# - 6 # NDRPDR sel
+# - 7 # NDRPDR sel
+# - 8 # NDRPDR sel
+# - 9 # NDRPDR sel
+# - 10 # NDRPDR sel
+# - 11 # NDRPDR sel
csit-dpdk-perf-verify-1901-3n-skx:
- 3 # NDRPDR sel
- 5 # NDRPDR sel
- 6 # NDRPDR sel
- - 7 # NDRPDR sel
- - 8 # NDRPDR sel
- - 9 # NDRPDR sel
- - 10 # NDRPDR sel
- - 11 # NDRPDR sel
- - 12 # NDRPDR sel
- - 13 # NDRPDR sel
+# - 7 # NDRPDR sel
+# - 8 # NDRPDR sel
+# - 9 # NDRPDR sel
+# - 10 # NDRPDR sel
+# - 11 # NDRPDR sel
+# - 12 # NDRPDR sel
+# - 13 # NDRPDR sel
csit-dpdk-perf-verify-1904-3n-skx:
- 2 # NDRPDR sel
- 3 # NDRPDR sel
- 4 # NDRPDR sel
- - 5 # NDRPDR sel
- - 6 # NDRPDR sel
- - 7 # NDRPDR sel
- - 11 # NDRPDR sel
- - 12 # NDRPDR sel
- - 13 # NDRPDR sel
- - 14 # NDRPDR sel
+# - 5 # NDRPDR sel
+# - 6 # NDRPDR sel
+# - 7 # NDRPDR sel
+# - 11 # NDRPDR sel
+# - 12 # NDRPDR sel
+# - 13 # NDRPDR sel
+# - 14 # NDRPDR sel
csit-dpdk-perf-verify-1908-3n-skx:
- 3 # NDRPDR sel
- 4 # NDRPDR sel
- 5 # NDRPDR sel
- - 6 # NDRPDR sel
- - 7 # NDRPDR sel
- - 8 # NDRPDR sel
- - 9 # NDRPDR sel
- - 10 # NDRPDR sel
- - 11 # NDRPDR sel
- - 12 # NDRPDR sel
+# - 6 # NDRPDR sel
+# - 7 # NDRPDR sel
+# - 8 # NDRPDR sel
+# - 9 # NDRPDR sel
+# - 10 # NDRPDR sel
+# - 11 # NDRPDR sel
+# - 12 # NDRPDR sel
csit-dpdk-perf-verify-1901-2n-skx:
- 12 # NDRPDR sel
- 13 # NDRPDR sel
- 14 # NDRPDR sel
- - 15 # NDRPDR sel
- - 16 # NDRPDR sel
- - 17 # NDRPDR sel
- - 18 # NDRPDR sel
- - 19 # NDRPDR sel
- - 20 # NDRPDR sel
- - 21 # NDRPDR sel
+# - 15 # NDRPDR sel
+# - 16 # NDRPDR sel
+# - 17 # NDRPDR sel
+# - 18 # NDRPDR sel
+# - 19 # NDRPDR sel
+# - 20 # NDRPDR sel
+# - 21 # NDRPDR sel
csit-dpdk-perf-verify-1904-2n-skx:
- 2 # NDRPDR sel
- 3 # NDRPDR sel
- 4 # NDRPDR sel
- - 5 # NDRPDR sel
- - 7 # NDRPDR sel
- - 8 # NDRPDR sel
- - 9 # NDRPDR sel
- - 10 # NDRPDR sel
- - 11 # NDRPDR sel
- - 12 # NDRPDR sel
+# - 5 # NDRPDR sel
+# - 7 # NDRPDR sel
+# - 8 # NDRPDR sel
+# - 9 # NDRPDR sel
+# - 10 # NDRPDR sel
+# - 11 # NDRPDR sel
+# - 12 # NDRPDR sel
csit-dpdk-perf-verify-1908-2n-skx:
- 2 # NDRPDR sel
- 3 # NDRPDR sel
- 4 # NDRPDR sel
- - 5 # NDRPDR sel
- - 7 # NDRPDR sel
- - 8 # NDRPDR sel
- - 9 # NDRPDR sel
- - 10 # NDRPDR sel
- - 11 # NDRPDR sel
- - 12 # NDRPDR sel
+# - 5 # NDRPDR sel
+# - 7 # NDRPDR sel
+# - 8 # NDRPDR sel
+# - 9 # NDRPDR sel
+# - 10 # NDRPDR sel
+# - 11 # NDRPDR sel
+# - 12 # NDRPDR sel
csit-vpp-functional-1908-ubuntu1604-virl:
- 64
@@ -2104,7 +2104,7 @@
# VPP Performance Changes 3n-hsw 1t1c pdr
- type: "table"
title: "VPP Performance Changes 3n-hsw 1t1c pdr"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,VPP]}/performance-changes-3n-hsw-1t1c-pdr"
history:
- title: "rls1901"
@@ -2127,7 +2127,7 @@
# VPP Performance Changes 3n-hsw 2t2c pdr
- type: "table"
title: "VPP Performance Changes 3n-hsw 2t2c pdr"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,VPP]}/performance-changes-3n-hsw-2t2c-pdr"
history:
- title: "rls1901"
@@ -2150,7 +2150,7 @@
# VPP Performance Changes 3n-hsw 1t1c ndr
- type: "table"
title: "VPP Performance Changes 3n-hsw 1t1c ndr"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,VPP]}/performance-changes-3n-hsw-1t1c-ndr"
history:
- title: "rls1901"
@@ -2173,7 +2173,7 @@
# VPP Performance Changes 3n-hsw 2t2c ndr
- type: "table"
title: "VPP Performance Changes 3n-hsw 2t2c ndr"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,VPP]}/performance-changes-3n-hsw-2t2c-ndr"
history:
- title: "rls1901"
@@ -2196,7 +2196,7 @@
# VPP Performance Changes 3n-skx 2t1c pdr
- type: "table"
title: "VPP Performance Changes 3n-skx 2t1c pdr"
- algorithm: "table_performance_comparison_nic"
+ algorithm: "table_perf_comparison_nic"
output-file: "{DIR[STATIC,VPP]}/performance-changes-3n-skx-2t1c-pdr"
history:
- title: "rls1901 x710"
@@ -2223,7 +2223,7 @@
# VPP Performance Changes 3n-skx 4t2c pdr
- type: "table"
title: "VPP Performance Changes 3n-skx 4t2c pdr"
- algorithm: "table_performance_comparison_nic"
+ algorithm: "table_perf_comparison_nic"
output-file: "{DIR[STATIC,VPP]}/performance-changes-3n-skx-4t2c-pdr"
history:
- title: "rls1901 x710"
@@ -2250,7 +2250,7 @@
# VPP Performance Changes 3n-skx 2t1c ndr
- type: "table"
title: "VPP Performance Changes 3n-skx 2t1c ndr"
- algorithm: "table_performance_comparison_nic"
+ algorithm: "table_perf_comparison_nic"
output-file: "{DIR[STATIC,VPP]}/performance-changes-3n-skx-2t1c-ndr"
history:
- title: "rls1901 x710"
@@ -2277,7 +2277,7 @@
# VPP Performance Changes 3n-skx 4t2c ndr
- type: "table"
title: "VPP Performance Changes 3n-skx 4t2c ndr"
- algorithm: "table_performance_comparison_nic"
+ algorithm: "table_perf_comparison_nic"
output-file: "{DIR[STATIC,VPP]}/performance-changes-3n-skx-4t2c-ndr"
history:
- title: "rls1901 x710"
@@ -2304,7 +2304,7 @@
# VPP Performance Changes 2n-skx 2t1c pdr
- type: "table"
title: "VPP Performance Changes 2n-skx 2t1c pdr"
- algorithm: "table_performance_comparison_nic"
+ algorithm: "table_perf_comparison_nic"
output-file: "{DIR[STATIC,VPP]}/performance-changes-2n-skx-2t1c-pdr"
history:
- title: "rls1901 x710"
@@ -2331,7 +2331,7 @@
# VPP Performance Changes 2n-skx 4t2c pdr
- type: "table"
title: "VPP Performance Changes 2n-skx 4t2c pdr"
- algorithm: "table_performance_comparison_nic"
+ algorithm: "table_perf_comparison_nic"
output-file: "{DIR[STATIC,VPP]}/performance-changes-2n-skx-4t2c-pdr"
history:
- title: "rls1901 x710"
@@ -2358,7 +2358,7 @@
# VPP Performance Changes 2n-skx 2t1c ndr
- type: "table"
title: "VPP Performance Changes 2n-skx 2t1c ndr"
- algorithm: "table_performance_comparison_nic"
+ algorithm: "table_perf_comparison_nic"
output-file: "{DIR[STATIC,VPP]}/performance-changes-2n-skx-2t1c-ndr"
history:
- title: "rls1901 x710"
@@ -2385,7 +2385,7 @@
# VPP Performance Changes 2n-skx 4t2c ndr
- type: "table"
title: "VPP Performance Changes 2n-skx 4t2c ndr"
- algorithm: "table_performance_comparison_nic"
+ algorithm: "table_perf_comparison_nic"
output-file: "{DIR[STATIC,VPP]}/performance-changes-2n-skx-4t2c-ndr"
history:
- title: "rls1901 x710"
@@ -2412,7 +2412,7 @@
# VPP Comparison Across Testbeds 3n-hsw to 3n-skx 1c ndr
- type: "table"
title: "VPP Comparison Across Testbeds 3n-hsw to 3n-skx 1c ndr"
- algorithm: "table_performance_comparison_nic"
+ algorithm: "table_perf_comparison_nic"
output-file: "{DIR[STATIC,VPP]}/performance-compare-testbeds-3n-hsw-3n-skx-1c-ndr"
reference:
title: "3n-hsw xl710"
@@ -2434,7 +2434,7 @@
# VPP Comparison Across Testbeds 3n-hsw to 3n-skx 2c ndr
- type: "table"
title: "VPP Comparison Across Testbeds 3n-hsw to 3n-skx 2c ndr"
- algorithm: "table_performance_comparison_nic"
+ algorithm: "table_perf_comparison_nic"
output-file: "{DIR[STATIC,VPP]}/performance-compare-testbeds-3n-hsw-3n-skx-2c-ndr"
reference:
title: "3n-hsw xl710"
@@ -2456,7 +2456,7 @@
# VPP Comparison Across Testbeds 3n-hsw to 3n-skx 1c pdr
- type: "table"
title: "VPP Comparison Across Testbeds 3n-hsw to 3n-skx 1c pdr"
- algorithm: "table_performance_comparison_nic"
+ algorithm: "table_perf_comparison_nic"
output-file: "{DIR[STATIC,VPP]}/performance-compare-testbeds-3n-hsw-3n-skx-1c-pdr"
reference:
title: "3n-hsw xl710"
@@ -2478,7 +2478,7 @@
# VPP Comparison Across Testbeds 3n-hsw to 3n-skx 2c pdr
- type: "table"
title: "VPP Comparison Across Testbeds 3n-hsw to 3n-skx 2c pdr"
- algorithm: "table_performance_comparison_nic"
+ algorithm: "table_perf_comparison_nic"
output-file: "{DIR[STATIC,VPP]}/performance-compare-testbeds-3n-hsw-3n-skx-2c-pdr"
reference:
title: "3n-hsw xl710"
@@ -2500,7 +2500,7 @@
# VPP Comparison Across Topologies 3n-skx to 2n-skx 1c ndr
- type: "table"
title: "VPP Comparison Across Topologies 3n-skx to 2n-skx 1c ndr"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,VPP]}/performance-compare-topologies-3n-skx-2n-skx-1c-ndr"
reference:
title: "3-Node Skx"
@@ -2519,7 +2519,7 @@
# VPP Comparison Across Topologies 3n-skx to 2n-skx 2c ndr
- type: "table"
title: "VPP Comparison Across Topologies 3n-skx to 2n-skx 2c ndr"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,VPP]}/performance-compare-topologies-3n-skx-2n-skx-2c-ndr"
reference:
title: "3-Node Skx"
@@ -2538,7 +2538,7 @@
# VPP Comparison Across Topologies 3n-skx to 2n-skx 1c pdr
- type: "table"
title: "VPP Comparison Across Topologies 3n-skx to 2n-skx 1c pdr"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,VPP]}/performance-compare-topologies-3n-skx-2n-skx-1c-pdr"
reference:
title: "3-Node Skx"
@@ -2557,7 +2557,7 @@
# VPP Comparison Across Topologies 3n-skx to 2n-skx 2c pdr
- type: "table"
title: "VPP Comparison Across Topologies 3n-skx to 2n-skx 2c pdr"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,VPP]}/performance-compare-topologies-3n-skx-2n-skx-2c-pdr"
reference:
title: "3-Node Skx"
@@ -2576,7 +2576,7 @@
# VPP Performance Changes 3n-hsw 1t1c MRR
- type: "table"
title: "VPP Performance Changes 3n-hsw 1t1c MRR"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,VPP]}/performance-changes-3n-hsw-1t1c-mrr"
history:
- title: "rls1901"
@@ -2598,7 +2598,7 @@
# VPP Performance Changes 3n-hsw 2t2c MRR
- type: "table"
title: "VPP Performance Changes 3n-hsw 2t2c MRR"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,VPP]}/performance-changes-3n-hsw-2t2c-mrr"
history:
- title: "rls1901"
@@ -2620,7 +2620,7 @@
# VPP Performance Changes 3n-hsw 4t4c MRR
- type: "table"
title: "VPP Performance Changes 3n-hsw 4t4c MRR"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,VPP]}/performance-changes-3n-hsw-4t4c-mrr"
history:
- title: "rls1901"
@@ -2642,7 +2642,7 @@
# VPP Performance Changes 3n-skx 2t1c MRR
- type: "table"
title: "VPP Performance Changes 3n-skx 2t1c MRR"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,VPP]}/performance-changes-3n-skx-2t1c-mrr"
history:
- title: "rls1901"
@@ -2664,7 +2664,7 @@
# VPP Performance Changes 3n-skx 4t2c MRR
- type: "table"
title: "VPP Performance Changes 3n-skx 4t2c MRR"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,VPP]}/performance-changes-3n-skx-4t2c-mrr"
history:
- title: "rls1901"
@@ -2686,7 +2686,7 @@
# VPP Performance Changes 3n-skx 8t4c MRR
- type: "table"
title: "VPP Performance Changes 3n-skx 8t4c MRR"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,VPP]}/performance-changes-3n-skx-8t4c-mrr"
history:
- title: "rls1901"
@@ -2708,7 +2708,7 @@
# VPP Performance Changes 2n-skx 2t1c MRR
- type: "table"
title: "VPP Performance Changes 2n-skx 2t1c MRR"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,VPP]}/performance-changes-2n-skx-2t1c-mrr"
history:
- title: "rls1901"
@@ -2730,7 +2730,7 @@
# VPP Performance Changes 2n-skx 4t2c MRR
- type: "table"
title: "VPP Performance Changes 2n-skx 4t2c MRR"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,VPP]}/performance-changes-2n-skx-4t2c-mrr"
history:
- title: "rls1901"
@@ -2752,7 +2752,7 @@
# VPP Performance Changes 2n-skx 8t4c MRR
- type: "table"
title: "VPP Performance Changes 2n-skx 8t4c MRR"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,VPP]}/performance-changes-2n-skx-8t4c-mrr"
history:
- title: "rls1901"
@@ -2774,7 +2774,7 @@
# VPP Comparison Across Testbeds 3n-hsw to 3n-skx 1c mrr
- type: "table"
title: "VPP Comparison Across Testbeds 3n-hsw to 3n-skx 1c mrr"
- algorithm: "table_performance_comparison_nic"
+ algorithm: "table_perf_comparison_nic"
output-file: "{DIR[STATIC,VPP]}/performance-compare-testbeds-3n-hsw-3n-skx-1c-mrr"
reference:
title: "3n-hsw xl710"
@@ -2796,7 +2796,7 @@
# VPP Comparison Across Testbeds 3n-hsw to 3n-skx 2c mrr
- type: "table"
title: "VPP Comparison Across Testbeds 3n-hsw to 3n-skx 2c mrr"
- algorithm: "table_performance_comparison_nic"
+ algorithm: "table_perf_comparison_nic"
output-file: "{DIR[STATIC,VPP]}/performance-compare-testbeds-3n-hsw-3n-skx-2c-mrr"
reference:
title: "3n-hsw xl710"
@@ -2818,7 +2818,7 @@
# VPP Comparison Across Testbeds 3n-hsw to 3n-skx 4c mrr
- type: "table"
title: "VPP Comparison Across Testbeds 3n-hsw to 3n-skx 4c mrr"
- algorithm: "table_performance_comparison_nic"
+ algorithm: "table_perf_comparison_nic"
output-file: "{DIR[STATIC,VPP]}/performance-compare-testbeds-3n-hsw-3n-skx-4c-mrr"
reference:
title: "3n-hsw xl710"
@@ -2840,7 +2840,7 @@
# VPP Comparison Across Topologies 3n-skx to 2n-skx 1c mrr
- type: "table"
title: "VPP Comparison Across Topologies 3n-skx to 2n-skx 1c mrr"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,VPP]}/performance-compare-topologies-3n-skx-2n-skx-1c-mrr"
reference:
title: "3-Node Skx"
@@ -2859,7 +2859,7 @@
# VPP Comparison Across Topologies 3n-skx to 2n-skx 2c mrr
- type: "table"
title: "VPP Comparison Across Topologies 3n-skx to 2n-skx 2c mrr"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,VPP]}/performance-compare-topologies-3n-skx-2n-skx-2c-mrr"
reference:
title: "3-Node Skx"
@@ -2878,7 +2878,7 @@
# VPP Comparison Across Topologies 3n-skx to 2n-skx 4c mrr
- type: "table"
title: "VPP Comparison Across Topologies 3n-skx to 2n-skx 4c mrr"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,VPP]}/performance-compare-topologies-3n-skx-2n-skx-4c-mrr"
reference:
title: "3-Node Skx"
@@ -3613,7 +3613,7 @@
# DPDK Performance Changes 3n-hsw 1t1c pdr
- type: "table"
title: "DPDK Performance Changes 3n-hsw 1t1c pdr"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,DPDK]}/performance-changes-3n-hsw-1t1c-pdr"
history:
- title: "rls1901"
@@ -3635,7 +3635,7 @@
# DPDK Performance Changes 3n-hsw 2t2c pdr
- type: "table"
title: "DPDK Performance Changes 3n-hsw 2t2c pdr"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,DPDK]}/performance-changes-3n-hsw-2t2c-pdr"
history:
- title: "rls1901"
@@ -3657,7 +3657,7 @@
# DPDK Performance Changes 3n-hsw 1t1c ndr
- type: "table"
title: "DPDK Performance Changes 3n-hsw 1t1c ndr"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,DPDK]}/performance-changes-3n-hsw-1t1c-ndr"
history:
- title: "rls1901"
@@ -3679,7 +3679,7 @@
# DPDK Performance Changes 3n-hsw 2t2c ndr
- type: "table"
title: "DPDK Performance Changes 3n-hsw 2t2c ndr"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,DPDK]}/performance-changes-3n-hsw-2t2c-ndr"
history:
- title: "rls1901"
@@ -3701,7 +3701,7 @@
# DPDK Performance Changes 3n-skx 2t1c pdr
- type: "table"
title: "DPDK Performance Changes 3n-skx 2t1c pdr"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,DPDK]}/performance-changes-3n-skx-2t1c-pdr"
history:
- title: "rls1901"
@@ -3723,7 +3723,7 @@
# DPDK Performance Changes 3n-skx 4t2c pdr
- type: "table"
title: "DPDK Performance Changes 3n-skx 4t2c pdr"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,DPDK]}/performance-changes-3n-skx-4t2c-pdr"
history:
- title: "rls1901"
@@ -3745,7 +3745,7 @@
# DPDK Performance Changes 3n-skx 2t1c ndr
- type: "table"
title: "DPDK Performance Changes 3n-skx 2t1c ndr"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,DPDK]}/performance-changes-3n-skx-2t1c-ndr"
history:
- title: "rls1901"
@@ -3767,7 +3767,7 @@
# DPDK Performance Changes 3n-skx 4t2c ndr
- type: "table"
title: "DPDK Performance Changes 3n-skx 4t2c ndr"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,DPDK]}/performance-changes-3n-skx-4t2c-ndr"
history:
- title: "rls1901"
@@ -3789,7 +3789,7 @@
# DPDK Comparison Across Testbeds 3n-hsw to 3n-skx ndr
- type: "table"
title: "DPDK Comparison Across Testbeds 3n-hsw to 3n-skx ndr"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,DPDK]}/performance-compare-testbeds-3n-hsw-3n-skx-ndr"
reference:
title: "3-Node Hsw"
@@ -3808,7 +3808,7 @@
# DPDK Comparison Across Testbeds 3n-hsw to 3n-skx pdr
- type: "table"
title: "DPDK Comparison Across Testbeds 3n-hsw to 3n-skx pdr"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,DPDK]}/performance-compare-testbeds-3n-hsw-3n-skx-pdr"
reference:
title: "3-Node Hsw"
@@ -3827,7 +3827,7 @@
# DPDK Comparison Across Topologies 3n-skx to 2n-skx ndr
- type: "table"
title: "DPDK Comparison Across Topologies 3n-skx to 2n-skx ndr"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,DPDK]}/performance-compare-topologies-3n-skx-2n-skx-ndr"
reference:
title: "3-Node Skx"
@@ -3846,7 +3846,7 @@
# DPDK Comparison Across Topologies 3n-skx to 2n-skx pdr
- type: "table"
title: "DPDK Comparison Across Topologies 3n-skx to 2n-skx pdr"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,DPDK]}/performance-compare-topologies-3n-skx-2n-skx-pdr"
reference:
title: "3-Node Skx"
@@ -3865,7 +3865,7 @@
# DPDK Performance Changes 2n-skx 2t1c pdr
- type: "table"
title: "DPDK Performance Changes 2n-skx 2t1c pdr"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,DPDK]}/performance-changes-2n-skx-2t1c-pdr"
history:
- title: "rls1901"
@@ -3887,7 +3887,7 @@
# DPDK Performance Changes 2n-skx 4t2c pdr
- type: "table"
title: "DPDK Performance Changes 2n-skx 4t2c pdr"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,DPDK]}/performance-changes-2n-skx-4t2c-pdr"
history:
- title: "rls1901"
@@ -3909,7 +3909,7 @@
# DPDK Performance Changes 2n-skx 2t1c ndr
- type: "table"
title: "DPDK Performance Changes 2n-skx 2t1c ndr"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,DPDK]}/performance-changes-2n-skx-2t1c-ndr"
history:
- title: "rls1901"
@@ -3931,7 +3931,7 @@
# DPDK Performance Changes 2n-skx 4t2c ndr
- type: "table"
title: "DPDK Performance Changes 2n-skx 4t2c ndr"
- algorithm: "table_performance_comparison"
+ algorithm: "table_perf_comparison"
output-file: "{DIR[STATIC,DPDK]}/performance-changes-2n-skx-4t2c-ndr"
history:
- title: "rls1901"
@@ -4683,7 +4683,7 @@
# Plots VPP HTTP Server Performance
- type: "plot"
title: "VPP HTTP Server Performance"
- algorithm: "plot_http_server_performance_box"
+ algorithm: "plot_http_server_perf_box"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/http-server-performance-cps"
data:
@@ -4705,7 +4705,7 @@
- type: "plot"
title: "VPP HTTP Server Performance"
- algorithm: "plot_http_server_performance_box"
+ algorithm: "plot_http_server_perf_box"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/http-server-performance-rps"
data:
@@ -4728,26 +4728,17 @@
# Soak Test (PLRsearch), boxes
- type: "plot"
title: "VPP Critical rate: 30 Minutes Soak Test (PLRsearch)"
- algorithm: "plot_performance_box"
+ algorithm: "plot_perf_box_name"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/soak-test-1"
data: "plot-vpp-soak-2n-skx"
- filter: "('L2BDMACLRN' or 'L2PATCH' or 'L2XCBASE') and not 'VHOST' and not 'MEMIF'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- sort:
- - "DRV_AVF"
- - "DRV_AVF"
- - "DRV_AVF"
- - "L2BDMACLRN"
- - "L2XCBASE"
- - "L2PATCH"
- traces:
- hoverinfo: "y+name"
- boxpoints: "all"
- whiskerwidth: 0
+ include:
+ - "Tests.Vpp.Perf.L2.2N1L-25Ge2P1Xxv710-Avf-Eth-L2Patch-Soak.64B-2t1c-avf-eth-l2patch-soak"
+ - "Tests.Vpp.Perf.L2.2N1L-25Ge2P1Xxv710-Avf-Eth-L2Xcbase-Soak.64B-2t1c-avf-eth-l2xcbase-soak"
+ - "Tests.Vpp.Perf.L2.2N1L-25Ge2P1Xxv710-Avf-Eth-L2Bdbasemaclrn-Soak.64B-2t1c-avf-eth-l2bdbasemaclrn-soak"
+ - "Tests.Vpp.Perf.L2.2N1L-25Ge2P1Xxv710-Eth-L2Patch-Soak.64B-2t1c-eth-l2patch-soak"
+ - "Tests.Vpp.Perf.L2.2N1L-25Ge2P1Xxv710-Eth-L2Xcbase-Soak.64B-2t1c-eth-l2xcbase-soak"
+ - "Tests.Vpp.Perf.L2.2N1L-25Ge2P1Xxv710-Eth-L2Bdbasemaclrn-Soak.64B-2t1c-eth-l2bdbasemaclrn-soak"
layout:
title: "30 Minutes Soak Test (PLRsearch)"
layout: "plot-soak-throughput"
@@ -4755,26 +4746,17 @@
# Soak Test (PLRsearch), boxes
- type: "plot"
title: "VPP Critical rate: 30 Minutes Soak Test (PLRsearch)"
- algorithm: "plot_performance_box"
+ algorithm: "plot_perf_box_name"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/soak-test-2"
data: "plot-vpp-soak-2n-skx"
- filter: "'L2BDMACLRN' and ('VHOST' or 'MEMIF') or 'IP4BASE' or 'IP6BASE'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- sort:
- - "VHOST"
- - "VHOST"
- - "MEMIF"
- - "IP4FWD"
- - "IP4FWD"
- - "IP6FWD"
- traces:
- hoverinfo: "y+name"
- boxpoints: "all"
- whiskerwidth: 0
+ include:
+ - "Tests.Vpp.Perf.Vm Vhost.2N1L-25Ge2P1Xxv710-Eth-L2Bdbasemaclrn-Eth-2Vhostvr1024-1Vm-Vppl2Xc-Soak.64B-2t1c-eth-l2bdbasemaclrn-eth-2vhostvr1024-1vm-vppl2xc-soak"
+ - "Tests.Vpp.Perf.Vm Vhost.2N1L-25Ge2P1Xxv710-Eth-L2Bdbasemaclrn-Eth-2Vhostvr1024-1Vm-Soak.64B-2t1c-eth-l2bdbasemaclrn-eth-2vhostvr1024-1vm-soak"
+ - "Tests.Vpp.Perf.Container Memif.2N1L-25Ge2P1Xxv710-Eth-L2Bdbasemaclrn-Eth-2Memif-1Dcr-Soak.64B-2t1c-eth-l2bdbasemaclrn-eth-2memif-1dcr-soak"
+ - "Tests.Vpp.Perf.Ip4.2N1L-25Ge2P1Xxv710-Avf-Ethip4-Ip4Base-Soak.64B-2t1c-avf-ethip4-ip4base-soak"
+ - "Tests.Vpp.Perf.Ip4.2N1L-25Ge2P1Xxv710-Ethip4-Ip4Base-Soak.64B-2t1c-ethip4-ip4base-soak"
+ - "Tests.Vpp.Perf.Ip6.2N1L-25Ge2P1Xxv710-Ethip6-Ip6Base-Soak.78B-2t1c-ethip6-ip6base-soak"
layout:
title: "30 Minutes Soak Test (PLRsearch)"
layout: "plot-soak-throughput"
@@ -4783,7 +4765,7 @@
- type: "plot"
title: "NFV Implied time lost: 2n-skx-xxv710-imix-2t1c-dot1qip4vxlan-l2bd-{Y}ch-1ach-{2XY}vh-{XY}vm-reconf"
- algorithm: "plot_service_density_reconf_box_name"
+ algorithm: "plot_nf_reconf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-imix-2t1c-dot1qip4vxlan-l2bd-reconf"
data: "plot-vpp-nfv-reconf-2n-skx"
include:
@@ -4799,7 +4781,7 @@
- type: "plot"
title: "NFV Implied time lost: 2n-skx-xxv710-imix-4t2c-dot1qip4vxlan-l2bd-{Y}ch-1ach-{2XY}vh-{XY}vm-reconf"
- algorithm: "plot_service_density_reconf_box_name"
+ algorithm: "plot_nf_reconf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-imix-4t2c-dot1qip4vxlan-l2bd-reconf"
data: "plot-vpp-nfv-reconf-2n-skx"
include:
@@ -4815,7 +4797,7 @@
- type: "plot"
title: "NFV Implied time lost: 2n-skx-xxv710-imix-8t4c-dot1qip4vxlan-l2bd-{Y}ch-1ach-{2XY}vh-{XY}vm-reconf"
- algorithm: "plot_service_density_reconf_box_name"
+ algorithm: "plot_nf_reconf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-imix-8t4c-dot1qip4vxlan-l2bd-reconf"
data: "plot-vpp-nfv-reconf-2n-skx"
include:
@@ -4832,7 +4814,7 @@
# Plots - NF Density - VSC - MRR 64b
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}vh-{XY}vm-vppip4-mrr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-64b-2t1c-base-vsc-mrr"
data: "plot-vpp-nfv-2n-skx-mrr"
@@ -4864,7 +4846,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}vh-{XY}vm-vppip4-mrr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-64b-4t2c-base-vsc-mrr"
data: "plot-vpp-nfv-2n-skx-mrr"
@@ -4896,7 +4878,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}vh-{XY}vm-vppip4-mrr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-64b-8t4c-base-vsc-mrr"
data: "plot-vpp-nfv-2n-skx-mrr"
@@ -4929,7 +4911,7 @@
# Plots - NF Density - VSC - MRR IMIX
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}vh-{XY}vm-vppip4-mrr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-imix-2t1c-base-vsc-mrr"
data: "plot-vpp-nfv-2n-skx-mrr"
@@ -4961,7 +4943,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}vh-{XY}vm-vppip4-mrr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-imix-4t2c-base-vsc-mrr"
data: "plot-vpp-nfv-2n-skx-mrr"
@@ -4993,7 +4975,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}vh-{XY}vm-vppip4-mrr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-imix-8t4c-base-vsc-mrr"
data: "plot-vpp-nfv-2n-skx-mrr"
@@ -5026,7 +5008,7 @@
# Plots - NF Density - VSC - NDR 64b
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}vh-{XY}vm-vppip4-ndr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-64b-2t1c-base-vsc-ndr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -5058,7 +5040,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}vh-{XY}vm-vppip4-ndr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-64b-4t2c-base-vsc-ndr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -5090,7 +5072,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}vh-{XY}vm-vppip4-ndr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-64b-8t4c-base-vsc-ndr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -5123,7 +5105,7 @@
# Plots - NF Density - VSC - NDR IMIX
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}vh-{XY}vm-vppip4-ndr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-imix-2t1c-base-vsc-ndr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -5155,7 +5137,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}vh-{XY}vm-vppip4-ndr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-imix-4t2c-base-vsc-ndr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -5187,7 +5169,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}vh-{XY}vm-vppip4-ndr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-imix-8t4c-base-vsc-ndr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -5220,7 +5202,7 @@
# Plots - NF Density - VSC - PDR 64b
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}vh-{XY}vm-vppip4-pdr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-64b-2t1c-base-vsc-pdr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -5252,7 +5234,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}vh-{XY}vm-vppip4-pdr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-64b-4t2c-base-vsc-pdr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -5284,7 +5266,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}vh-{XY}vm-vppip4-pdr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-64b-8t4c-base-vsc-pdr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -5317,7 +5299,7 @@
# Plots - NF Density - VSC - PDR IMIX
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}vh-{XY}vm-vppip4-pdr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-imix-2t1c-base-vsc-pdr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -5349,7 +5331,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}vh-{XY}vm-vppip4-pdr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-imix-4t2c-base-vsc-pdr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -5381,7 +5363,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}vh-{XY}vm-vppip4-pdr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-imix-8t4c-base-vsc-pdr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -5415,7 +5397,7 @@
# Plots - NF Density - CSC - MRR 64b
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}mif-{XY}dcr-vppip4-mrr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-64b-2t1c-base-csc-mrr"
data: "plot-vpp-nfv-2n-skx-mrr"
@@ -5447,7 +5429,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}mif-{XY}dcr-vppip4-mrr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-64b-4t2c-base-csc-mrr"
data: "plot-vpp-nfv-2n-skx-mrr"
@@ -5479,7 +5461,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}mif-{XY}dcr-vppip4-mrr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-64b-8t4c-base-csc-mrr"
data: "plot-vpp-nfv-2n-skx-mrr"
@@ -5512,7 +5494,7 @@
# Plots - NF Density - CSC - MRR IMIX
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}mif-{XY}dcr-vppip4-mrr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-imix-2t1c-base-csc-mrr"
data: "plot-vpp-nfv-2n-skx-mrr"
@@ -5544,7 +5526,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}mif-{XY}dcr-vppip4-mrr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-imix-4t2c-base-csc-mrr"
data: "plot-vpp-nfv-2n-skx-mrr"
@@ -5576,7 +5558,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}mif-{XY}dcr-vppip4-mrr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-imix-8t4c-base-csc-mrr"
data: "plot-vpp-nfv-2n-skx-mrr"
@@ -5609,7 +5591,7 @@
# Plots - NF Density - CSC - NDR 64b
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}mif-{XY}dcr-vppip4-ndr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-64b-2t1c-base-csc-ndr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -5641,7 +5623,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}mif-{XY}dcr-vppip4-ndr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-64b-4t2c-base-csc-ndr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -5673,7 +5655,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}mif-{XY}dcr-vppip4-ndr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-64b-8t4c-base-csc-ndr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -5706,7 +5688,7 @@
# Plots - NF Density - CSC - NDR IMIX
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}mif-{XY}dcr-vppip4-ndr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-imix-2t1c-base-csc-ndr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -5738,7 +5720,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}mif-{XY}dcr-vppip4-ndr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-imix-4t2c-base-csc-ndr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -5770,7 +5752,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}mif-{XY}dcr-vppip4-ndr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-imix-8t4c-base-csc-ndr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -5803,7 +5785,7 @@
# Plots - NF Density - CSC - PDR 64b
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}mif-{XY}dcr-vppip4-pdr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-64b-2t1c-base-csc-pdr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -5835,7 +5817,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}mif-{XY}dcr-vppip4-pdr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-64b-4t2c-base-csc-pdr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -5867,7 +5849,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}mif-{XY}dcr-vppip4-pdr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-64b-8t4c-base-csc-pdr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -5900,7 +5882,7 @@
# Plots - NF Density - CSC - PDR IMIX
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}mif-{XY}dcr-vppip4-pdr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-imix-2t1c-base-csc-pdr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -5932,7 +5914,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}mif-{XY}dcr-vppip4-pdr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-imix-4t2c-base-csc-pdr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -5964,7 +5946,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}ch-{2XY}mif-{XY}dcr-vppip4-pdr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-imix-8t4c-base-csc-pdr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -5998,7 +5980,7 @@
# Plots - NF Density - CSP - MRR 64b
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}pl-{2Y}mif-{XY}dcr-vppip4-mrr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-64b-2t1c-base-csp-mrr"
data: "plot-vpp-nfv-2n-skx-mrr"
@@ -6030,7 +6012,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}pl-{2Y}mif-{XY}dcr-vppip4-mrr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-64b-4t2c-base-csp-mrr"
data: "plot-vpp-nfv-2n-skx-mrr"
@@ -6062,7 +6044,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}pl-{2Y}mif-{XY}dcr-vppip4-mrr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-64b-8t4c-base-csp-mrr"
data: "plot-vpp-nfv-2n-skx-mrr"
@@ -6095,7 +6077,7 @@
# Plots - NF Density - CSP - MRR IMIX
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}pl-{2Y}mif-{XY}dcr-vppip4-mrr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-imix-2t1c-base-csp-mrr"
data: "plot-vpp-nfv-2n-skx-mrr"
@@ -6127,7 +6109,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}pl-{2Y}mif-{XY}dcr-vppip4-mrr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-imix-4t2c-base-csp-mrr"
data: "plot-vpp-nfv-2n-skx-mrr"
@@ -6159,7 +6141,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}pl-{2Y}mif-{XY}dcr-vppip4-mrr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-imix-8t4c-base-csp-mrr"
data: "plot-vpp-nfv-2n-skx-mrr"
@@ -6192,7 +6174,7 @@
# Plots - NF Density - CSP - NDR 64b
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}pl-{2Y}mif-{XY}dcr-vppip4-ndr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-64b-2t1c-base-csp-ndr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -6224,7 +6206,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}pl-{2Y}mif-{XY}dcr-vppip4-ndr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-64b-4t2c-base-csp-ndr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -6256,7 +6238,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}pl-{2Y}mif-{XY}dcr-vppip4-ndr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-64b-8t4c-base-csp-ndr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -6289,7 +6271,7 @@
# Plots - NF Density - CSP - NDR IMIX
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}pl-{2Y}mif-{XY}dcr-vppip4-ndr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-imix-2t1c-base-csp-ndr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -6321,7 +6303,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}pl-{2Y}mif-{XY}dcr-vppip4-ndr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-imix-4t2c-base-csp-ndr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -6353,7 +6335,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}pl-{2Y}mif-{XY}dcr-vppip4-ndr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-imix-8t4c-base-csp-ndr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -6386,7 +6368,7 @@
# Plots - NF Density - CSP - PDR 64b
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}pl-{2Y}mif-{XY}dcr-vppip4-pdr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-64b-2t1c-base-csp-pdr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -6418,7 +6400,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}pl-{2Y}mif-{XY}dcr-vppip4-pdr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-64b-4t2c-base-csp-pdr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -6450,7 +6432,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}pl-{2Y}mif-{XY}dcr-vppip4-pdr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-64b-8t4c-base-csp-pdr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -6483,7 +6465,7 @@
# Plots - NF Density - CSP - PDR IMIX
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}pl-{2Y}mif-{XY}dcr-vppip4-pdr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-imix-2t1c-base-csp-pdr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -6515,7 +6497,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}pl-{2Y}mif-{XY}dcr-vppip4-pdr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-imix-4t2c-base-csp-pdr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -6547,7 +6529,7 @@
- type: "plot"
title: "Packet Throughput: eth-l2bd-{Y}pl-{2Y}mif-{XY}dcr-vppip4-pdr"
- algorithm: "plot_service_density_heatmap"
+ algorithm: "plot_nf_heatmap"
output-file-type: ".html"
output-file: "{DIR[STATIC,VPP]}/l2bd-2n-skx-xxv710-imix-8t4c-base-csp-pdr"
data: "plot-vpp-nfv-2n-skx-ndrpdr"
@@ -6583,7 +6565,7 @@
- type: "plot"
title: "Throughput: 2n-skx-xxv710-64b-2t1c-memif-base-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-2t1c-memif-base-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -6597,7 +6579,7 @@
- type: "plot"
title: "Throughput: 2n-skx-xxv710-64b-2t1c-memif-base-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-2t1c-memif-base-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -6611,7 +6593,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-64b-2t1c-memif-base-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-memif-base-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -6626,7 +6608,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-64b-2t1c-memif-base-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-memif-base-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -6641,7 +6623,7 @@
- type: "plot"
title: "Throughput: 3n-tsh-x520-64b-1t1c-memif-base-ixgbe-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-1t1c-memif-base-ixgbe-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -6656,7 +6638,7 @@
- type: "plot"
title: "Throughput: 3n-tsh-x520-64b-1t1c-memif-base-ixgbe-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-1t1c-memif-base-ixgbe-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -6671,7 +6653,7 @@
- type: "plot"
title: "Throughput: 2n-skx-x710-64b-2t1c-ip4routing-base-scale-avf-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-x710-64b-2t1c-ip4routing-base-scale-avf-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -6686,7 +6668,7 @@
- type: "plot"
title: "Throughput: 2n-skx-x710-64b-2t1c-ip4routing-base-scale-avf-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-x710-64b-2t1c-ip4routing-base-scale-avf-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -6701,7 +6683,7 @@
- type: "plot"
title: "Throughput: 2n-skx-xxv710-64b-2t1c-ip4routing-base-scale-avf-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-2t1c-ip4routing-base-scale-avf-ndr"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -6715,7 +6697,7 @@
- type: "plot"
title: "Throughput: 2n-skx-xxv710-64b-2t1c-ip4routing-base-scale-avf-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-2t1c-ip4routing-base-scale-avf-pdr"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -6729,7 +6711,7 @@
- type: "plot"
title: "Throughput: 2n-skx-xxv710-64b-2t1c-ip4routing-base-scale-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-2t1c-ip4routing-base-scale-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -6744,7 +6726,7 @@
- type: "plot"
title: "Throughput: 2n-skx-xxv710-64b-2t1c-ip4routing-base-scale-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-2t1c-ip4routing-base-scale-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -6759,7 +6741,7 @@
- type: "plot"
title: "Throughput: 2n-skx-xxv710-64b-2t1c-features-ip4routing-base-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-2t1c-features-ip4routing-base-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -6775,7 +6757,7 @@
- type: "plot"
title: "Throughput: 2n-skx-xxv710-64b-2t1c-features-ip4routing-base-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-2t1c-features-ip4routing-base-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -6791,7 +6773,7 @@
- type: "plot"
title: "Throughput: 3n-hsw-xl710-64b-1t1c-ip4routing-base-scale-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-1t1c-ip4routing-base-scale-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -6804,7 +6786,7 @@
- type: "plot"
title: "Throughput: 3n-hsw-xl710-64b-1t1c-ip4routing-base-scale-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-1t1c-ip4routing-base-scale-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -6817,7 +6799,7 @@
- type: "plot"
title: "Throughput: 3n-skx-x710-64b-2t1c-ip4routing-base-scale-avf-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-2t1c-ip4routing-base-scale-avf-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -6832,7 +6814,7 @@
- type: "plot"
title: "Throughput: 3n-skx-x710-64b-2t1c-ip4routing-base-scale-avf-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-2t1c-ip4routing-base-scale-avf-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -6847,7 +6829,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-64b-2t1c-ip4routing-base-scale-avf-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-ip4routing-base-scale-avf-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -6861,7 +6843,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-64b-2t1c-ip4routing-base-scale-avf-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-ip4routing-base-scale-avf-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -6875,7 +6857,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-64b-2t1c-ip4routing-base-scale-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-ip4routing-base-scale-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -6890,7 +6872,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-64b-2t1c-ip4routing-base-scale-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-ip4routing-base-scale-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -6905,7 +6887,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-64b-2t1c-features-ip4routing-base-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-features-ip4routing-base-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -6921,7 +6903,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-64b-2t1c-features-ip4routing-base-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-features-ip4routing-base-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -6937,7 +6919,7 @@
- type: "plot"
title: "Throughput: 3n-tsh-x520-64b-1t1c-ip4routing-base-scale-ixgbe-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-1t1c-ip4routing-base-scale-ixgbe-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -6952,7 +6934,7 @@
- type: "plot"
title: "Throughput: 3n-tsh-x520-64b-1t1c-ip4routing-base-scale-ixgbe-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-1t1c-ip4routing-base-scale-ixgbe-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -6967,7 +6949,7 @@
- type: "plot"
title: "Throughput: 3n-tsh-x520-64b-1t1c-features-ip4routing-base-ixgbe-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-1t1c-features-ip4routing-base-ixgbe-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -6983,7 +6965,7 @@
- type: "plot"
title: "Throughput: 3n-tsh-x520-64b-1t1c-features-ip4routing-base-ixgbe-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-1t1c-features-ip4routing-base-ixgbe-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -6999,7 +6981,7 @@
- type: "plot"
title: "Throughput: 2n-dnv-x553-64b-1t1c-ip4routing-base-scale-ixgbe-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-dnv-x553-64b-1t1c-ip4routing-base-scale-ixgbe-ndr"
data: "plot-vpp-throughput-lat-tsa-2n-dnv"
include:
@@ -7014,7 +6996,7 @@
- type: "plot"
title: "Throughput: 2n-dnv-x553-64b-1t1c-ip4routing-base-scale-ixgbe-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-dnv-x553-64b-1t1c-ip4routing-base-scale-ixgbe-pdr"
data: "plot-vpp-throughput-lat-tsa-2n-dnv"
include:
@@ -7029,7 +7011,7 @@
- type: "plot"
title: "Throughput: 2n-dnv-x553-64b-1t1c-features-ip4routing-base-ixgbe-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-dnv-x553-64b-1t1c-features-ip4routing-base-ixgbe-ndr"
data: "plot-vpp-throughput-lat-tsa-2n-dnv"
include:
@@ -7045,7 +7027,7 @@
- type: "plot"
title: "Throughput: 2n-dnv-x553-64b-1t1c-features-ip4routing-base-ixgbe-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-dnv-x553-64b-1t1c-features-ip4routing-base-ixgbe-pdr"
data: "plot-vpp-throughput-lat-tsa-2n-dnv"
include:
@@ -7061,7 +7043,7 @@
- type: "plot"
title: "Throughput: 3n-dnv-x553-64b-1t1c-ip4routing-base-scale-ixgbe-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-64b-1t1c-ip4routing-base-scale-ixgbe-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -7076,7 +7058,7 @@
- type: "plot"
title: "Throughput: 3n-dnv-x553-64b-1t1c-ip4routing-base-scale-ixgbe-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-64b-1t1c-ip4routing-base-scale-ixgbe-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -7091,7 +7073,7 @@
- type: "plot"
title: "Throughput: 3n-dnv-x553-64b-1t1c-features-ip4routing-base-ixgbe-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-64b-1t1c-features-ip4routing-base-ixgbe-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -7107,7 +7089,7 @@
- type: "plot"
title: "Throughput: 3n-dnv-x553-64b-1t1c-features-ip4routing-base-ixgbe-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-64b-1t1c-features-ip4routing-base-ixgbe-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -7123,7 +7105,7 @@
- type: "plot"
title: "Throughput: 3n-hsw-xl710-64b-1t1c-ip4tunnel-base-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-1t1c-ip4tunnel-base-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -7135,7 +7117,7 @@
- type: "plot"
title: "Throughput: 3n-hsw-xl710-64b-1t1c-ip4tunnel-base-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-1t1c-ip4tunnel-base-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -7147,7 +7129,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-64b-2t1c-ip4tunnel-base-scale-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-ip4tunnel-base-scale-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -7161,7 +7143,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-64b-2t1c-ip4tunnel-base-scale-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-ip4tunnel-base-scale-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -7175,7 +7157,7 @@
- type: "plot"
title: "Throughput: 3n-tsh-x520-64b-1t1c-ip4tunnel-base-scale-ixgbe-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-1t1c-ip4tunnel-base-scale-ixgbe-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -7189,7 +7171,7 @@
- type: "plot"
title: "Throughput: 3n-tsh-x520-64b-1t1c-ip4tunnel-base-scale-ixgbe-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-1t1c-ip4tunnel-base-scale-ixgbe-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -7203,7 +7185,7 @@
- type: "plot"
title: "Throughput: 3n-dnv-x553-64b-1t1c-ip4tunnel-base-scale-ixgbe-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-64b-1t1c-ip4tunnel-base-scale-ixgbe-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -7217,7 +7199,7 @@
- type: "plot"
title: "Throughput: 3n-dnv-x553-64b-1t1c-ip4tunnel-base-scale-ixgbe-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-64b-1t1c-ip4tunnel-base-scale-ixgbe-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -7231,7 +7213,7 @@
- type: "plot"
title: "Throughput: 2n-skx-x710-78b-2t1c-ip6routing-base-scale-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-x710-78b-2t1c-ip6routing-base-scale-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -7244,7 +7226,7 @@
- type: "plot"
title: "Throughput: 2n-skx-x710-78b-2t1c-ip6routing-base-scale-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-x710-78b-2t1c-ip6routing-base-scale-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -7257,7 +7239,7 @@
- type: "plot"
title: "Throughput: 2n-skx-xxv710-78b-2t1c-ip6routing-base-scale-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-78b-2t1c-ip6routing-base-scale-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -7272,7 +7254,7 @@
- type: "plot"
title: "Throughput: 2n-skx-xxv710-78b-2t1c-ip6routing-base-scale-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-78b-2t1c-ip6routing-base-scale-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -7287,7 +7269,7 @@
- type: "plot"
title: "Throughput: 3n-hsw-xl710-78b-1t1c-ip6routing-base-scale-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-78b-1t1c-ip6routing-base-scale-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -7300,7 +7282,7 @@
- type: "plot"
title: "Throughput: 3n-hsw-xl710-78b-1t1c-ip6routing-base-scale-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-78b-1t1c-ip6routing-base-scale-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -7313,7 +7295,7 @@
- type: "plot"
title: "Throughput: 3n-skx-x710-78b-2t1c-ip6routing-base-scale-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-78b-2t1c-ip6routing-base-scale-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -7326,7 +7308,7 @@
- type: "plot"
title: "Throughput: 3n-skx-x710-78b-2t1c-ip6routing-base-scale-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-78b-2t1c-ip6routing-base-scale-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -7339,7 +7321,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-78b-2t1c-ip6routing-base-scale-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-78b-2t1c-ip6routing-base-scale-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -7354,7 +7336,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-78b-2t1c-ip6routing-base-scale-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-78b-2t1c-ip6routing-base-scale-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -7369,7 +7351,7 @@
- type: "plot"
title: "Throughput: 3n-tsh-x520-78b-1t1c-ip6routing-base-scale-ixgbe-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-78b-1t1c-ip6routing-base-scale-ixgbe-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -7384,7 +7366,7 @@
- type: "plot"
title: "Throughput: 3n-tsh-x520-78b-1t1c-ip6routing-base-scale-ixgbe-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-78b-1t1c-ip6routing-base-scale-ixgbe-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -7399,7 +7381,7 @@
- type: "plot"
title: "Throughput: 2n-dnv-x553-78b-1t1c-ip6routing-base-scale-ixgbe-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-dnv-x553-78b-1t1c-ip6routing-base-scale-ixgbe-ndr"
data: "plot-vpp-throughput-lat-tsa-2n-dnv"
include:
@@ -7414,7 +7396,7 @@
- type: "plot"
title: "Throughput: 2n-dnv-x553-78b-1t1c-ip6routing-base-scale-ixgbe-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-dnv-x553-78b-1t1c-ip6routing-base-scale-ixgbe-pdr"
data: "plot-vpp-throughput-lat-tsa-2n-dnv"
include:
@@ -7429,7 +7411,7 @@
- type: "plot"
title: "Throughput: 3n-dnv-x553-78b-1t1c-ip6routing-base-scale-ixgbe-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-78b-1t1c-ip6routing-base-scale-ixgbe-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -7444,7 +7426,7 @@
- type: "plot"
title: "Throughput: 3n-dnv-x553-78b-1t1c-ip6routing-base-scale-ixgbe-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-78b-1t1c-ip6routing-base-scale-ixgbe-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -7459,7 +7441,7 @@
- type: "plot"
title: "Throughput: 3n-hsw-xl710-imix-1t1c-ipsec-ip4routing-base-scale-sw-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-imix-1t1c-ipsec-ip4routing-base-scale-sw-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -7475,7 +7457,7 @@
- type: "plot"
title: "Throughput: 3n-hsw-xl710-imix-1t1c-ipsec-ip4routing-base-scale-sw-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-imix-1t1c-ipsec-ip4routing-base-scale-sw-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -7491,7 +7473,7 @@
- type: "plot"
title: "Throughput: 3n-hsw-xl710-imix-1t1c-ipsec-ip4routing-base-scale-hw-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-imix-1t1c-ipsec-ip4routing-base-scale-hw-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -7505,7 +7487,7 @@
- type: "plot"
title: "Throughput: 3n-hsw-xl710-imix-1t1c-ipsec-ip4routing-base-scale-hw-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-imix-1t1c-ipsec-ip4routing-base-scale-hw-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -7519,7 +7501,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-imix-2t1c-ipsec-ip4routing-base-scale-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-imix-2t1c-ipsec-ip4routing-base-scale-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -7535,7 +7517,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-imix-2t1c-ipsec-ip4routing-base-scale-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-imix-2t1c-ipsec-ip4routing-base-scale-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -7551,7 +7533,7 @@
- type: "plot"
title: "Throughput: 3n-tsh-x520-imix-1t1c-ipsec-ip4routing-base-scale-sw-ixgbe-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-imix-1t1c-ipsec-ip4routing-base-scale-sw-ixgbe-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -7567,7 +7549,7 @@
- type: "plot"
title: "Throughput: 3n-tsh-x520-imix-1t1c-ipsec-ip4routing-base-scale-sw-ixgbe-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-imix-1t1c-ipsec-ip4routing-base-scale-sw-ixgbe-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -7583,7 +7565,7 @@
- type: "plot"
title: "Throughput: 3n-dnv-x553-imix-1t1c-ipsec-ip4routing-base-scale-sw-ixgbe-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-imix-1t1c-ipsec-ip4routing-base-scale-sw-ixgbe-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -7599,7 +7581,7 @@
- type: "plot"
title: "Throughput: 3n-dnv-x553-imix-1t1c-ipsec-ip4routing-base-scale-sw-ixgbe-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-imix-1t1c-ipsec-ip4routing-base-scale-sw-ixgbe-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -7615,7 +7597,7 @@
- type: "plot"
title: "Throughput: 2n-skx-x710-64b-2t1c-l2switching-base-scale-avf-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-x710-64b-2t1c-l2switching-base-scale-avf-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -7631,7 +7613,7 @@
- type: "plot"
title: "Throughput: 2n-skx-x710-64b-2t1c-l2switching-base-scale-avf-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-x710-64b-2t1c-l2switching-base-scale-avf-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -7647,7 +7629,7 @@
- type: "plot"
title: "Throughput: 2n-skx-xxv710-64b-2t1c-l2switching-base-avf-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-2t1c-l2switching-base-avf-ndr"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -7661,7 +7643,7 @@
- type: "plot"
title: "Throughput: 2n-skx-xxv710-64b-2t1c-l2switching-base-avf-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-2t1c-l2switching-base-avf-pdr"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -7675,7 +7657,7 @@
- type: "plot"
title: "Throughput: 2n-skx-xxv710-64b-2t1c-l2switching-base-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-2t1c-l2switching-base-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -7690,7 +7672,7 @@
- type: "plot"
title: "Throughput: 2n-skx-xxv710-64b-2t1c-l2switching-base-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-2t1c-l2switching-base-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -7705,7 +7687,7 @@
- type: "plot"
title: "Throughput: 2n-skx-xxv710-64b-2t1c-l2switching-base-scale-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-2t1c-l2switching-base-scale-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -7719,7 +7701,7 @@
- type: "plot"
title: "Throughput: 2n-skx-xxv710-64b-2t1c-l2switching-base-scale-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-2t1c-l2switching-base-scale-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -7733,7 +7715,7 @@
- type: "plot"
title: "Throughput: 3n-hsw-xl710-64b-1t1c-l2switching-base-scale-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-1t1c-l2switching-base-scale-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -7749,7 +7731,7 @@
- type: "plot"
title: "Throughput: 3n-hsw-xl710-64b-1t1c-l2switching-base-scale-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-1t1c-l2switching-base-scale-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -7765,7 +7747,7 @@
- type: "plot"
title: "Throughput: 3n-skx-x710-64b-2t1c-l2switching-base-scale-avf-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-2t1c-l2switching-base-scale-avf-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -7780,7 +7762,7 @@
- type: "plot"
title: "Throughput: 3n-skx-x710-64b-2t1c-l2switching-base-scale-avf-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-2t1c-l2switching-base-scale-avf-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -7795,7 +7777,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-64b-2t1c-l2switching-base-avf-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-l2switching-base-avf-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -7808,7 +7790,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-64b-2t1c-l2switching-base-avf-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-l2switching-base-avf-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -7821,7 +7803,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-64b-2t1c-l2switching-base-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-l2switching-base-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -7835,7 +7817,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-64b-2t1c-l2switching-base-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-l2switching-base-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -7849,7 +7831,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-64b-2t1c-l2switching-base-scale-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-l2switching-base-scale-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -7865,7 +7847,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-64b-2t1c-l2switching-base-scale-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-l2switching-base-scale-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -7881,7 +7863,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-64b-2t1c-features-l2switching-base-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-features-l2switching-base-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -7897,7 +7879,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-64b-2t1c-features-l2switching-base-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-features-l2switching-base-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -7913,7 +7895,7 @@
- type: "plot"
title: "Throughput: 3n-tsh-x520-64b-1t1c-l2switching-base-ixgbe-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-1t1c-l2switching-base-ixgbe-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -7927,7 +7909,7 @@
- type: "plot"
title: "Throughput: 3n-tsh-x520-64b-1t1c-l2switching-base-ixgbe-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-1t1c-l2switching-base-ixgbe-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -7941,7 +7923,7 @@
- type: "plot"
title: "Throughput: 3n-tsh-x520-64b-1t1c-l2switching-base-scale-ixgbe-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-1t1c-l2switching-base-scale-ixgbe-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -7957,7 +7939,7 @@
- type: "plot"
title: "Throughput: 3n-tsh-x520-64b-1t1c-l2switching-base-scale-ixgbe-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-1t1c-l2switching-base-scale-ixgbe-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -7973,7 +7955,7 @@
- type: "plot"
title: "Throughput: 3n-tsh-x520-64b-1t1c-features-l2switching-base-ixgbe-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-1t1c-features-l2switching-base-ixgbe-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -7989,7 +7971,7 @@
- type: "plot"
title: "Throughput: 3n-tsh-x520-64b-1t1c-features-l2switching-base-ixgbe-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-1t1c-features-l2switching-base-ixgbe-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -8005,7 +7987,7 @@
- type: "plot"
title: "Throughput: 2n-dnv-x553-64b-1t1c-l2switching-base-ixgbe-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-dnv-x553-64b-1t1c-l2switching-base-ixgbe-ndr"
data: "plot-vpp-throughput-lat-tsa-2n-dnv"
include:
@@ -8019,7 +8001,7 @@
- type: "plot"
title: "Throughput: 2n-dnv-x553-64b-1t1c-l2switching-base-ixgbe-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-dnv-x553-64b-1t1c-l2switching-base-ixgbe-pdr"
data: "plot-vpp-throughput-lat-tsa-2n-dnv"
include:
@@ -8033,7 +8015,7 @@
- type: "plot"
title: "Throughput: 2n-dnv-x553-64b-1t1c-l2switching-base-scale-ixgbe-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-dnv-x553-64b-1t1c-l2switching-base-scale-ixgbe-ndr"
data: "plot-vpp-throughput-lat-tsa-2n-dnv"
include:
@@ -8049,7 +8031,7 @@
- type: "plot"
title: "Throughput: 2n-dnv-x553-64b-1t1c-l2switching-base-scale-ixgbe-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-dnv-x553-64b-1t1c-l2switching-base-scale-ixgbe-pdr"
data: "plot-vpp-throughput-lat-tsa-2n-dnv"
include:
@@ -8065,7 +8047,7 @@
- type: "plot"
title: "Throughput: 3n-dnv-x553-64b-1t1c-l2switching-base-ixgbe-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-64b-1t1c-l2switching-base-ixgbe-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -8079,7 +8061,7 @@
- type: "plot"
title: "Throughput: 3n-dnv-x553-64b-1t1c-l2switching-base-ixgbe-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-64b-1t1c-l2switching-base-ixgbe-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -8093,7 +8075,7 @@
- type: "plot"
title: "Throughput: 3n-dnv-x553-64b-1t1c-l2switching-base-scale-ixgbe-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-64b-1t1c-l2switching-base-scale-ixgbe-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -8109,7 +8091,7 @@
- type: "plot"
title: "Throughput: 3n-dnv-x553-64b-1t1c-l2switching-base-scale-ixgbe-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-64b-1t1c-l2switching-base-scale-ixgbe-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -8125,7 +8107,7 @@
- type: "plot"
title: "Throughput: 3n-dnv-x553-64b-1t1c-features-l2switching-base-ixgbe-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-64b-1t1c-features-l2switching-base-ixgbe-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -8141,7 +8123,7 @@
- type: "plot"
title: "Throughput: 3n-dnv-x553-64b-1t1c-features-l2switching-base-ixgbe-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-64b-1t1c-features-l2switching-base-ixgbe-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -8157,7 +8139,7 @@
- type: "plot"
title: "Throughput: 3n-hsw-xl710-78b-1t1c-srv6-ip6routing-base-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-78b-1t1c-srv6-ip6routing-base-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -8173,7 +8155,7 @@
- type: "plot"
title: "Throughput: 3n-hsw-xl710-78b-1t1c-srv6-ip6routing-base-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-78b-1t1c-srv6-ip6routing-base-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -8189,7 +8171,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-78b-2t1c-srv6-ip6routing-base-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-78b-2t1c-srv6-ip6routing-base-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -8205,7 +8187,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-78b-2t1c-srv6-ip6routing-base-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-78b-2t1c-srv6-ip6routing-base-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -8221,7 +8203,7 @@
- type: "plot"
title: "Throughput: 3n-tsh-x520-78b-1t1c-srv6-ip6routing-base-ixgbe-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-78b-1t1c-srv6-ip6routing-base-ixgbe-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -8237,7 +8219,7 @@
- type: "plot"
title: "Throughput: 3n-tsh-x520-78b-1t1c-srv6-ip6routing-base-ixgbe-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-78b-1t1c-srv6-ip6routing-base-ixgbe-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -8253,7 +8235,7 @@
- type: "plot"
title: "Throughput: 3n-dnv-x553-78b-1t1c-srv6-ip6routing-base-ixgbe-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-78b-1t1c-srv6-ip6routing-base-ixgbe-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -8266,7 +8248,7 @@
- type: "plot"
title: "Throughput: 3n-dnv-x553-78b-1t1c-srv6-ip6routing-base-ixgbe-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-78b-1t1c-srv6-ip6routing-base-ixgbe-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -8279,7 +8261,7 @@
- type: "plot"
title: "Throughput: 2n-skx-xxv710-64b-2t1c-vhost-base-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-2t1c-vhost-base-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-2n-skx-vhost"
include:
@@ -8293,7 +8275,7 @@
- type: "plot"
title: "Throughput: 2n-skx-xxv710-64b-2t1c-vhost-base-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-2t1c-vhost-base-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-2n-skx-vhost"
include:
@@ -8307,7 +8289,7 @@
- type: "plot"
title: "Throughput: 2n-skx-xxv710-64b-2t1c-vhost-base-i40e-vpp-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-2t1c-vhost-base-i40e-vpp-ndr"
data: "plot-vpp-throughput-lat-tsa-2n-skx-vhost"
include:
@@ -8321,7 +8303,7 @@
- type: "plot"
title: "Throughput: 2n-skx-xxv710-64b-2t1c-vhost-base-i40e-vpp-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-2t1c-vhost-base-i40e-vpp-pdr"
data: "plot-vpp-throughput-lat-tsa-2n-skx-vhost"
include:
@@ -8335,7 +8317,7 @@
- type: "plot"
title: "Throughput: 3n-hsw-xl710-64b-1t1c-vhost-base-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-1t1c-vhost-base-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-hsw-vhost"
include:
@@ -8349,7 +8331,7 @@
- type: "plot"
title: "Throughput: 3n-hsw-xl710-64b-1t1c-vhost-base-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-1t1c-vhost-base-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-hsw-vhost"
include:
@@ -8363,7 +8345,7 @@
- type: "plot"
title: "Throughput: 3n-hsw-xl710-64b-1t1c-vhost-base-i40e-vpp-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-1t1c-vhost-base-i40e-vpp-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-hsw-vhost"
include:
@@ -8377,7 +8359,7 @@
- type: "plot"
title: "Throughput: 3n-hsw-xl710-64b-1t1c-vhost-base-i40e-vpp-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-1t1c-vhost-base-i40e-vpp-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-hsw-vhost"
include:
@@ -8391,7 +8373,7 @@
- type: "plot"
title: "Throughput: 3n-skx-x710-64b-2t1c-link-bonding-vhost-base-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-2t1c-link-bonding-vhost-base-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -8405,7 +8387,7 @@
- type: "plot"
title: "Throughput: 3n-skx-x710-64b-2t1c-link-bonding-vhost-base-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-2t1c-link-bonding-vhost-base-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -8419,7 +8401,7 @@
- type: "plot"
title: "Throughput: 3n-skx-x710-64b-2t1c-link-bonding-vhost-base-i40e-vpp-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-2t1c-link-bonding-vhost-base-i40e-vpp-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -8433,7 +8415,7 @@
- type: "plot"
title: "Throughput: 3n-skx-x710-64b-2t1c-link-bonding-vhost-base-i40e-vpp-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-2t1c-link-bonding-vhost-base-i40e-vpp-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -8447,7 +8429,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-64b-2t1c-vhost-base-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-vhost-base-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -8462,7 +8444,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-64b-2t1c-vhost-base-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-vhost-base-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -8477,7 +8459,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-64b-2t1c-vhost-base-i40e-vpp-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-vhost-base-i40e-vpp-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -8492,7 +8474,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-64b-2t1c-vhost-base-i40e-vpp-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-vhost-base-i40e-vpp-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -8507,7 +8489,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-64b-2t1c-link-bonding-vhost-base-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-link-bonding-vhost-base-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -8523,7 +8505,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-64b-2t1c-link-bonding-vhost-base-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-link-bonding-vhost-base-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -8539,7 +8521,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-64b-2t1c-link-bonding-vhost-base-i40e-vpp-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-link-bonding-vhost-base-i40e-vpp-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -8555,7 +8537,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-64b-2t1c-link-bonding-vhost-base-i40e-vpp-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-link-bonding-vhost-base-i40e-vpp-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -8571,7 +8553,7 @@
- type: "plot"
title: "Throughput: 3n-tsh-x520-64b-1t1c-vhost-base-ixgbe-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-1t1c-vhost-base-ixgbe-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -8586,7 +8568,7 @@
- type: "plot"
title: "Throughput: 3n-tsh-x520-64b-1t1c-vhost-base-ixgbe-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-1t1c-vhost-base-ixgbe-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -8601,7 +8583,7 @@
- type: "plot"
title: "Throughput: 3n-tsh-x520-64b-1t1c-link-bonding-vhost-base-ixgbe-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-1t1c-link-bonding-vhost-base-ixgbe-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -8617,7 +8599,7 @@
- type: "plot"
title: "Throughput: 3n-tsh-x520-64b-1t1c-link-bonding-vhost-base-ixgbe-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-1t1c-link-bonding-vhost-base-ixgbe-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -8633,7 +8615,7 @@
- type: "plot"
title: "Throughput: 3n-hsw-xl710-114b-1t1c-vts-l2switching-base-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-114b-1t1c-vts-l2switching-base-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -8646,7 +8628,7 @@
- type: "plot"
title: "Throughput: 3n-hsw-xl710-114b-1t1c-vts-l2switching-base-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-114b-1t1c-vts-l2switching-base-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -8659,7 +8641,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-114b-2t1c-vts-l2switching-base-i40e-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-114b-2t1c-vts-l2switching-base-i40e-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -8672,7 +8654,7 @@
- type: "plot"
title: "Throughput: 3n-skx-xxv710-114b-2t1c-vts-l2switching-base-i40e-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-114b-2t1c-vts-l2switching-base-i40e-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -8685,7 +8667,7 @@
- type: "plot"
title: "Throughput: 3n-tsh-x520-114b-1t1c-vts-l2switching-base-ixgbe-ndr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-114b-1t1c-vts-l2switching-base-ixgbe-ndr"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -8698,7 +8680,7 @@
- type: "plot"
title: "Throughput: 3n-tsh-x520-114b-1t1c-vts-l2switching-base-ixgbe-pdr"
- algorithm: "plot_performance_box_name"
+ algorithm: "plot_perf_box_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-114b-1t1c-vts-l2switching-base-ixgbe-pdr"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -8715,7 +8697,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-64b-2t1c-memif-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-2t1c-memif-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -8729,7 +8711,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-2t1c-memif-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-memif-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -8744,7 +8726,7 @@
- type: "plot"
title: "Latency: 2n-skx-x710-64b-2t1c-ip4routing-base-scale-avf-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-x710-64b-2t1c-ip4routing-base-scale-avf-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -8759,7 +8741,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-64b-2t1c-ip4routing-base-scale-avf-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-2t1c-ip4routing-base-scale-avf-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -8773,7 +8755,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-64b-2t1c-ip4routing-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-2t1c-ip4routing-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -8788,7 +8770,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-64b-2t1c-features-ip4routing-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-2t1c-features-ip4routing-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -8804,7 +8786,7 @@
- type: "plot"
title: "Latency: 3n-hsw-xl710-64b-1t1c-ip4routing-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-1t1c-ip4routing-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -8817,7 +8799,7 @@
- type: "plot"
title: "Latency: 3n-skx-x710-64b-2t1c-ip4routing-base-scale-avf-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-2t1c-ip4routing-base-scale-avf-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -8832,7 +8814,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-2t1c-ip4routing-base-scale-avf-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-ip4routing-base-scale-avf-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -8846,7 +8828,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-2t1c-ip4routing-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-ip4routing-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -8861,7 +8843,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-2t1c-features-ip4routing-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-features-ip4routing-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -8877,7 +8859,7 @@
- type: "plot"
title: "Latency: 3n-hsw-xl710-64b-1t1c-ip4tunnel-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-1t1c-ip4tunnel-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -8889,7 +8871,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-2t1c-ip4tunnel-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-ip4tunnel-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -8903,7 +8885,7 @@
- type: "plot"
title: "Latency: 2n-skx-x710-78b-2t1c-ip6routing-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-x710-78b-2t1c-ip6routing-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -8916,7 +8898,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-78b-2t1c-ip6routing-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-78b-2t1c-ip6routing-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -8931,7 +8913,7 @@
- type: "plot"
title: "Latency: 3n-hsw-xl710-78b-1t1c-ip6routing-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-78b-1t1c-ip6routing-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -8944,7 +8926,7 @@
- type: "plot"
title: "Latency: 3n-skx-x710-78b-2t1c-ip6routing-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-78b-2t1c-ip6routing-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -8957,7 +8939,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-78b-2t1c-ip6routing-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-78b-2t1c-ip6routing-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -8972,7 +8954,7 @@
#- type: "plot"
# title: "Latency: 3n-hsw-xl710-imix-1t1c-ipsec-ip4routing-base-scale-sw-i40e-ndr-lat"
-# algorithm: "plot_latency_error_bars_name"
+# algorithm: "plot_lat_err_bars_name"
# output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-imix-1t1c-ipsec-ip4routing-base-scale-sw-i40e-ndr-lat"
# data: "plot-vpp-throughput-lat-tsa-3n-hsw"
# include:
@@ -8988,7 +8970,7 @@
#- type: "plot"
# title: "Latency: 3n-hsw-xl710-imix-1t1c-ipsec-ip4routing-base-scale-hw-i40e-ndr-lat"
-# algorithm: "plot_latency_error_bars_name"
+# algorithm: "plot_lat_err_bars_name"
# output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-imix-1t1c-ipsec-ip4routing-base-scale-hw-i40e-ndr-lat"
# data: "plot-vpp-throughput-lat-tsa-3n-hsw"
# include:
@@ -9002,7 +8984,7 @@
#- type: "plot"
# title: "Latency: 3n-skx-xxv710-imix-2t1c-ipsec-ip4routing-base-scale-i40e-ndr-lat"
-# algorithm: "plot_latency_error_bars_name"
+# algorithm: "plot_lat_err_bars_name"
# output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-imix-2t1c-ipsec-ip4routing-base-scale-i40e-ndr-lat"
# data: "plot-vpp-throughput-lat-tsa-3n-skx"
# include:
@@ -9018,7 +9000,7 @@
- type: "plot"
title: "Latency: 2n-skx-x710-64b-2t1c-l2switching-base-scale-avf-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-x710-64b-2t1c-l2switching-base-scale-avf-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -9034,7 +9016,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-64b-2t1c-l2switching-base-avf-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-2t1c-l2switching-base-avf-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -9048,7 +9030,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-64b-2t1c-l2switching-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-2t1c-l2switching-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -9063,7 +9045,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-64b-2t1c-l2switching-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-2t1c-l2switching-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -9077,7 +9059,7 @@
- type: "plot"
title: "Latency: 3n-hsw-xl710-64b-1t1c-l2switching-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-1t1c-l2switching-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -9093,7 +9075,7 @@
- type: "plot"
title: "Latency: 3n-skx-x710-64b-2t1c-l2switching-base-scale-avf-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-2t1c-l2switching-base-scale-avf-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -9108,7 +9090,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-2t1c-l2switching-base-avf-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-l2switching-base-avf-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -9121,7 +9103,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-2t1c-l2switching-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-l2switching-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -9135,7 +9117,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-2t1c-l2switching-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-l2switching-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -9151,7 +9133,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-2t1c-features-l2switching-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-features-l2switching-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -9167,7 +9149,7 @@
- type: "plot"
title: "Latency: 3n-hsw-xl710-78b-1t1c-srv6-ip6routing-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-78b-1t1c-srv6-ip6routing-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -9183,7 +9165,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-78b-2t1c-srv6-ip6routing-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-78b-2t1c-srv6-ip6routing-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -9199,7 +9181,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-64b-2t1c-vhost-base-i40e-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-2t1c-vhost-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx-vhost"
include:
@@ -9213,7 +9195,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-64b-2t1c-vhost-base-i40e-vpp-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-2t1c-vhost-base-i40e-vpp-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx-vhost"
include:
@@ -9227,7 +9209,7 @@
- type: "plot"
title: "Latency: 3n-hsw-xl710-64b-1t1c-vhost-base-i40e-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-1t1c-vhost-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-hsw-vhost"
include:
@@ -9241,7 +9223,7 @@
- type: "plot"
title: "Latency: 3n-hsw-xl710-64b-1t1c-vhost-base-i40e-vpp-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-1t1c-vhost-base-i40e-vpp-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-hsw-vhost"
include:
@@ -9255,7 +9237,7 @@
- type: "plot"
title: "Latency: 3n-skx-x710-64b-2t1c-link-bonding-vhost-base-i40e-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-2t1c-link-bonding-vhost-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -9269,7 +9251,7 @@
- type: "plot"
title: "Latency: 3n-skx-x710-64b-2t1c-link-bonding-vhost-base-i40e-vpp-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-2t1c-link-bonding-vhost-base-i40e-vpp-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -9283,7 +9265,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-2t1c-vhost-base-i40e-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-vhost-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -9298,7 +9280,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-2t1c-vhost-base-i40e-vpp-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-vhost-base-i40e-vpp-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -9313,7 +9295,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-2t1c-link-bonding-vhost-base-i40e-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-link-bonding-vhost-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -9329,7 +9311,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-2t1c-link-bonding-vhost-base-i40e-vpp-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-2t1c-link-bonding-vhost-base-i40e-vpp-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -9345,7 +9327,7 @@
#- type: "plot"
# title: "Latency: 3n-hsw-xl710-114b-1t1c-vts-l2switching-base-i40e-ndr-lat"
-# algorithm: "plot_latency_error_bars_name"
+# algorithm: "plot_lat_err_bars_name"
# output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-114b-1t1c-vts-l2switching-base-i40e-ndr-lat"
# data: "plot-vpp-throughput-lat-tsa-3n-hsw"
# include:
@@ -9358,7 +9340,7 @@
#- type: "plot"
# title: "Latency: 3n-skx-xxv710-114b-2t1c-vts-l2switching-base-i40e-ndr-lat"
-# algorithm: "plot_latency_error_bars_name"
+# algorithm: "plot_lat_err_bars_name"
# output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-114b-2t1c-vts-l2switching-base-i40e-ndr-lat"
# data: "plot-vpp-throughput-lat-tsa-3n-skx"
# include:
@@ -9371,7 +9353,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-64b-1t1c-memif-base-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-1t1c-memif-base-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -9386,7 +9368,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-64b-1t1c-ip4routing-base-scale-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-1t1c-ip4routing-base-scale-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -9401,7 +9383,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-64b-1t1c-features-ip4routing-base-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-1t1c-features-ip4routing-base-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -9417,7 +9399,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-64b-1t1c-ip4tunnel-base-scale-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-1t1c-ip4tunnel-base-scale-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -9431,7 +9413,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-78b-1t1c-ip6routing-base-scale-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-78b-1t1c-ip6routing-base-scale-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -9446,7 +9428,7 @@
#- type: "plot"
# title: "Latency: 3n-tsh-x520-imix-1t1c-ipsec-ip4routing-base-scale-sw-ixgbe-ndr-lat"
-# algorithm: "plot_latency_error_bars_name"
+# algorithm: "plot_lat_err_bars_name"
# output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-imix-1t1c-ipsec-ip4routing-base-scale-sw-ixgbe-ndr-lat"
# data: "plot-vpp-throughput-lat-tsa-3n-tsh"
# include:
@@ -9462,7 +9444,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-64b-1t1c-l2switching-base-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-1t1c-l2switching-base-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -9476,7 +9458,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-64b-1t1c-l2switching-base-scale-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-1t1c-l2switching-base-scale-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -9492,7 +9474,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-64b-1t1c-features-l2switching-base-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-1t1c-features-l2switching-base-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -9508,7 +9490,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-78b-1t1c-srv6-ip6routing-base-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-78b-1t1c-srv6-ip6routing-base-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -9524,7 +9506,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-64b-1t1c-vhost-base-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-1t1c-vhost-base-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -9539,7 +9521,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-64b-1t1c-link-bonding-vhost-base-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-1t1c-link-bonding-vhost-base-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -9555,7 +9537,7 @@
#- type: "plot"
# title: "Latency: 3n-tsh-x520-114b-1t1c-vts-l2switching-base-ixgbe-ndr-lat"
-# algorithm: "plot_latency_error_bars_name"
+# algorithm: "plot_lat_err_bars_name"
# output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-114b-1t1c-vts-l2switching-base-ixgbe-ndr-lat"
# data: "plot-vpp-throughput-lat-tsa-3n-tsh"
# include:
@@ -9572,7 +9554,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-64b-4t2c-memif-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-4t2c-memif-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -9586,7 +9568,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-4t2c-memif-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-4t2c-memif-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -9601,7 +9583,7 @@
- type: "plot"
title: "Latency: 2n-skx-x710-64b-4t2c-ip4routing-base-scale-avf-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-x710-64b-4t2c-ip4routing-base-scale-avf-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -9616,7 +9598,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-64b-4t2c-ip4routing-base-scale-avf-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-4t2c-ip4routing-base-scale-avf-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -9630,7 +9612,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-64b-4t2c-ip4routing-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-4t2c-ip4routing-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -9645,7 +9627,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-64b-4t2c-features-ip4routing-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-4t2c-features-ip4routing-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -9661,7 +9643,7 @@
- type: "plot"
title: "Latency: 3n-hsw-xl710-64b-2t2c-ip4routing-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-2t2c-ip4routing-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -9674,7 +9656,7 @@
- type: "plot"
title: "Latency: 3n-skx-x710-64b-4t2c-ip4routing-base-scale-avf-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-4t2c-ip4routing-base-scale-avf-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -9689,7 +9671,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-4t2c-ip4routing-base-scale-avf-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-4t2c-ip4routing-base-scale-avf-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -9703,7 +9685,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-4t2c-ip4routing-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-4t2c-ip4routing-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -9718,7 +9700,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-4t2c-features-ip4routing-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-4t2c-features-ip4routing-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -9734,7 +9716,7 @@
- type: "plot"
title: "Latency: 3n-hsw-xl710-64b-2t2c-ip4tunnel-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-2t2c-ip4tunnel-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -9746,7 +9728,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-4t2c-ip4tunnel-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-4t2c-ip4tunnel-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -9760,7 +9742,7 @@
- type: "plot"
title: "Latency: 2n-skx-x710-78b-4t2c-ip6routing-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-x710-78b-4t2c-ip6routing-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -9773,7 +9755,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-78b-4t2c-ip6routing-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-78b-4t2c-ip6routing-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -9788,7 +9770,7 @@
- type: "plot"
title: "Latency: 3n-hsw-xl710-78b-2t2c-ip6routing-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-78b-2t2c-ip6routing-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -9801,7 +9783,7 @@
- type: "plot"
title: "Latency: 3n-skx-x710-78b-4t2c-ip6routing-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-78b-4t2c-ip6routing-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -9814,7 +9796,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-78b-4t2c-ip6routing-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-78b-4t2c-ip6routing-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -9829,7 +9811,7 @@
#- type: "plot"
# title: "Latency: 3n-hsw-xl710-imix-2t2c-ipsec-ip4routing-base-scale-sw-i40e-ndr-lat"
-# algorithm: "plot_latency_error_bars_name"
+# algorithm: "plot_lat_err_bars_name"
# output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-imix-2t2c-ipsec-ip4routing-base-scale-sw-i40e-ndr-lat"
# data: "plot-vpp-throughput-lat-tsa-3n-hsw"
# include:
@@ -9845,7 +9827,7 @@
#- type: "plot"
# title: "Latency: 3n-hsw-xl710-imix-2t2c-ipsec-ip4routing-base-scale-hw-i40e-ndr-lat"
-# algorithm: "plot_latency_error_bars_name"
+# algorithm: "plot_lat_err_bars_name"
# output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-imix-2t2c-ipsec-ip4routing-base-scale-hw-i40e-ndr-lat"
# data: "plot-vpp-throughput-lat-tsa-3n-hsw"
# include:
@@ -9859,7 +9841,7 @@
#- type: "plot"
# title: "Latency: 3n-skx-xxv710-imix-4t2c-ipsec-ip4routing-base-scale-i40e-ndr-lat"
-# algorithm: "plot_latency_error_bars_name"
+# algorithm: "plot_lat_err_bars_name"
# output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-imix-4t2c-ipsec-ip4routing-base-scale-i40e-ndr-lat"
# data: "plot-vpp-throughput-lat-tsa-3n-skx"
# include:
@@ -9875,7 +9857,7 @@
- type: "plot"
title: "Latency: 2n-skx-x710-64b-4t2c-l2switching-base-scale-avf-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-x710-64b-4t2c-l2switching-base-scale-avf-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -9891,7 +9873,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-64b-4t2c-l2switching-base-avf-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-4t2c-l2switching-base-avf-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -9905,7 +9887,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-64b-4t2c-l2switching-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-4t2c-l2switching-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -9920,7 +9902,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-64b-4t2c-l2switching-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-4t2c-l2switching-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -9934,7 +9916,7 @@
- type: "plot"
title: "Latency: 3n-hsw-xl710-64b-2t2c-l2switching-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-2t2c-l2switching-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -9950,7 +9932,7 @@
- type: "plot"
title: "Latency: 3n-skx-x710-64b-4t2c-l2switching-base-scale-avf-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-4t2c-l2switching-base-scale-avf-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -9965,7 +9947,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-4t2c-l2switching-base-avf-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-4t2c-l2switching-base-avf-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -9978,7 +9960,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-4t2c-l2switching-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-4t2c-l2switching-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -9992,7 +9974,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-4t2c-l2switching-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-4t2c-l2switching-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -10008,7 +9990,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-4t2c-features-l2switching-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-4t2c-features-l2switching-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -10024,7 +10006,7 @@
- type: "plot"
title: "Latency: 3n-hsw-xl710-78b-2t2c-srv6-ip6routing-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-78b-2t2c-srv6-ip6routing-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -10040,7 +10022,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-78b-4t2c-srv6-ip6routing-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-78b-4t2c-srv6-ip6routing-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -10056,7 +10038,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-64b-4t2c-vhost-base-i40e-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-4t2c-vhost-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx-vhost"
include:
@@ -10070,7 +10052,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-64b-4t2c-vhost-base-i40e-vpp-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-4t2c-vhost-base-i40e-vpp-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx-vhost"
include:
@@ -10084,7 +10066,7 @@
- type: "plot"
title: "Latency: 3n-hsw-xl710-64b-2t2c-vhost-base-i40e-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-2t2c-vhost-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-hsw-vhost"
include:
@@ -10098,7 +10080,7 @@
- type: "plot"
title: "Latency: 3n-hsw-xl710-64b-2t2c-vhost-base-i40e-vpp-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-2t2c-vhost-base-i40e-vpp-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-hsw-vhost"
include:
@@ -10112,7 +10094,7 @@
- type: "plot"
title: "Latency: 3n-skx-x710-64b-4t2c-link-bonding-vhost-base-i40e-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-4t2c-link-bonding-vhost-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -10126,7 +10108,7 @@
- type: "plot"
title: "Latency: 3n-skx-x710-64b-4t2c-link-bonding-vhost-base-i40e-vpp-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-4t2c-link-bonding-vhost-base-i40e-vpp-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -10140,7 +10122,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-4t2c-vhost-base-i40e-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-4t2c-vhost-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -10155,7 +10137,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-4t2c-vhost-base-i40e-vpp-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-4t2c-vhost-base-i40e-vpp-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -10170,7 +10152,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-4t2c-link-bonding-vhost-base-i40e-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-4t2c-link-bonding-vhost-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -10186,7 +10168,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-4t2c-link-bonding-vhost-base-i40e-vpp-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-4t2c-link-bonding-vhost-base-i40e-vpp-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -10202,7 +10184,7 @@
#- type: "plot"
# title: "Latency: 3n-hsw-xl710-114b-2t2c-vts-l2switching-base-i40e-ndr-lat"
-# algorithm: "plot_latency_error_bars_name"
+# algorithm: "plot_lat_err_bars_name"
# output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-114b-2t2c-vts-l2switching-base-i40e-ndr-lat"
# data: "plot-vpp-throughput-lat-tsa-3n-hsw"
# include:
@@ -10215,7 +10197,7 @@
#- type: "plot"
# title: "Latency: 3n-skx-xxv710-114b-4t2c-vts-l2switching-base-i40e-ndr-lat"
-# algorithm: "plot_latency_error_bars_name"
+# algorithm: "plot_lat_err_bars_name"
# output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-114b-4t2c-vts-l2switching-base-i40e-ndr-lat"
# data: "plot-vpp-throughput-lat-tsa-3n-skx"
# include:
@@ -10228,7 +10210,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-64b-2t2c-memif-base-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-2t2c-memif-base-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -10243,7 +10225,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-64b-2t2c-ip4routing-base-scale-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-2t2c-ip4routing-base-scale-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -10258,7 +10240,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-64b-2t2c-features-ip4routing-base-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-2t2c-features-ip4routing-base-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -10274,7 +10256,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-64b-2t2c-ip4tunnel-base-scale-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-2t2c-ip4tunnel-base-scale-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -10288,7 +10270,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-78b-2t2c-ip6routing-base-scale-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-78b-2t2c-ip6routing-base-scale-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -10303,7 +10285,7 @@
#- type: "plot"
# title: "Latency: 3n-tsh-x520-imix-2t2c-ipsec-ip4routing-base-scale-sw-ixgbe-ndr-lat"
-# algorithm: "plot_latency_error_bars_name"
+# algorithm: "plot_lat_err_bars_name"
# output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-imix-2t2c-ipsec-ip4routing-base-scale-sw-ixgbe-ndr-lat"
# data: "plot-vpp-throughput-lat-tsa-3n-tsh"
# include:
@@ -10319,7 +10301,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-64b-2t2c-l2switching-base-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-2t2c-l2switching-base-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -10333,7 +10315,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-64b-2t2c-l2switching-base-scale-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-2t2c-l2switching-base-scale-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -10349,7 +10331,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-64b-2t2c-features-l2switching-base-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-2t2c-features-l2switching-base-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -10365,7 +10347,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-78b-2t2c-srv6-ip6routing-base-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-78b-2t2c-srv6-ip6routing-base-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -10381,7 +10363,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-64b-2t2c-vhost-base-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-2t2c-vhost-base-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -10396,7 +10378,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-64b-2t2c-link-bonding-vhost-base-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-2t2c-link-bonding-vhost-base-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -10412,7 +10394,7 @@
#- type: "plot"
# title: "Latency: 3n-tsh-x520-114b-2t2c-vts-l2switching-base-ixgbe-ndr-lat"
-# algorithm: "plot_latency_error_bars_name"
+# algorithm: "plot_lat_err_bars_name"
# output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-114b-2t2c-vts-l2switching-base-ixgbe-ndr-lat"
# data: "plot-vpp-throughput-lat-tsa-3n-tsh"
# include:
@@ -10429,7 +10411,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-64b-8t4c-memif-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-8t4c-memif-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -10443,7 +10425,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-8t4c-memif-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-8t4c-memif-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -10458,7 +10440,7 @@
- type: "plot"
title: "Latency: 2n-skx-x710-64b-8t4c-ip4routing-base-scale-avf-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-x710-64b-8t4c-ip4routing-base-scale-avf-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -10473,7 +10455,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-64b-8t4c-ip4routing-base-scale-avf-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-8t4c-ip4routing-base-scale-avf-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -10487,7 +10469,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-64b-8t4c-ip4routing-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-8t4c-ip4routing-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -10502,7 +10484,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-64b-8t4c-features-ip4routing-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-8t4c-features-ip4routing-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -10518,7 +10500,7 @@
- type: "plot"
title: "Latency: 3n-hsw-xl710-64b-4t4c-ip4routing-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-4t4c-ip4routing-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -10531,7 +10513,7 @@
- type: "plot"
title: "Latency: 3n-skx-x710-64b-8t4c-ip4routing-base-scale-avf-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-8t4c-ip4routing-base-scale-avf-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -10546,7 +10528,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-8t4c-ip4routing-base-scale-avf-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-8t4c-ip4routing-base-scale-avf-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -10560,7 +10542,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-8t4c-ip4routing-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-8t4c-ip4routing-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -10575,7 +10557,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-8t4c-features-ip4routing-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-8t4c-features-ip4routing-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -10591,7 +10573,7 @@
- type: "plot"
title: "Latency: 3n-hsw-xl710-64b-4t4c-ip4tunnel-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-4t4c-ip4tunnel-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -10603,7 +10585,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-8t4c-ip4tunnel-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-8t4c-ip4tunnel-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -10617,7 +10599,7 @@
- type: "plot"
title: "Latency: 2n-skx-x710-78b-8t4c-ip6routing-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-x710-78b-8t4c-ip6routing-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -10630,7 +10612,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-78b-8t4c-ip6routing-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-78b-8t4c-ip6routing-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -10645,7 +10627,7 @@
- type: "plot"
title: "Latency: 3n-hsw-xl710-78b-4t4c-ip6routing-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-78b-4t4c-ip6routing-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -10658,7 +10640,7 @@
- type: "plot"
title: "Latency: 3n-skx-x710-78b-8t4c-ip6routing-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-78b-8t4c-ip6routing-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -10671,7 +10653,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-78b-8t4c-ip6routing-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-78b-8t4c-ip6routing-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -10686,7 +10668,7 @@
#- type: "plot"
# title: "Latency: 3n-hsw-xl710-imix-4t4c-ipsec-ip4routing-base-scale-sw-i40e-ndr-lat"
-# algorithm: "plot_latency_error_bars_name"
+# algorithm: "plot_lat_err_bars_name"
# output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-imix-4t4c-ipsec-ip4routing-base-scale-sw-i40e-ndr-lat"
# data: "plot-vpp-throughput-lat-tsa-3n-hsw"
# include:
@@ -10702,7 +10684,7 @@
#- type: "plot"
# title: "Latency: 3n-hsw-xl710-imix-4t4c-ipsec-ip4routing-base-scale-hw-i40e-ndr-lat"
-# algorithm: "plot_latency_error_bars_name"
+# algorithm: "plot_lat_err_bars_name"
# output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-imix-4t4c-ipsec-ip4routing-base-scale-hw-i40e-ndr-lat"
# data: "plot-vpp-throughput-lat-tsa-3n-hsw"
# include:
@@ -10716,7 +10698,7 @@
#- type: "plot"
# title: "Latency: 3n-skx-xxv710-imix-8t4c-ipsec-ip4routing-base-scale-i40e-ndr-lat"
-# algorithm: "plot_latency_error_bars_name"
+# algorithm: "plot_lat_err_bars_name"
# output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-imix-8t4c-ipsec-ip4routing-base-scale-i40e-ndr-lat"
# data: "plot-vpp-throughput-lat-tsa-3n-skx"
# include:
@@ -10732,7 +10714,7 @@
- type: "plot"
title: "Latency: 2n-skx-x710-64b-8t4c-l2switching-base-scale-avf-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-x710-64b-8t4c-l2switching-base-scale-avf-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -10748,7 +10730,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-64b-8t4c-l2switching-base-avf-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-8t4c-l2switching-base-avf-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -10762,7 +10744,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-64b-8t4c-l2switching-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-8t4c-l2switching-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -10777,7 +10759,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-64b-8t4c-l2switching-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-8t4c-l2switching-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -10791,7 +10773,7 @@
- type: "plot"
title: "Latency: 3n-hsw-xl710-64b-4t4c-l2switching-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-4t4c-l2switching-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -10807,7 +10789,7 @@
- type: "plot"
title: "Latency: 3n-skx-x710-64b-8t4c-l2switching-base-scale-avf-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-8t4c-l2switching-base-scale-avf-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -10822,7 +10804,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-8t4c-l2switching-base-avf-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-8t4c-l2switching-base-avf-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -10835,7 +10817,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-8t4c-l2switching-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-8t4c-l2switching-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -10849,7 +10831,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-8t4c-l2switching-base-scale-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-8t4c-l2switching-base-scale-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -10865,7 +10847,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-8t4c-features-l2switching-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-8t4c-features-l2switching-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -10881,7 +10863,7 @@
- type: "plot"
title: "Latency: 3n-hsw-xl710-78b-4t4c-srv6-ip6routing-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-78b-4t4c-srv6-ip6routing-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -10897,7 +10879,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-78b-8t4c-srv6-ip6routing-base-i40e-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-78b-8t4c-srv6-ip6routing-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -10913,7 +10895,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-64b-8t4c-vhost-base-i40e-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-8t4c-vhost-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx-vhost"
include:
@@ -10927,7 +10909,7 @@
- type: "plot"
title: "Latency: 2n-skx-xxv710-64b-8t4c-vhost-base-i40e-vpp-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-8t4c-vhost-base-i40e-vpp-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-2n-skx-vhost"
include:
@@ -10941,7 +10923,7 @@
- type: "plot"
title: "Latency: 3n-hsw-xl710-64b-4t4c-vhost-base-i40e-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-4t4c-vhost-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-hsw-vhost"
include:
@@ -10955,7 +10937,7 @@
- type: "plot"
title: "Latency: 3n-hsw-xl710-64b-4t4c-vhost-base-i40e-vpp-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-4t4c-vhost-base-i40e-vpp-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-hsw-vhost"
include:
@@ -10969,7 +10951,7 @@
- type: "plot"
title: "Latency: 3n-skx-x710-64b-8t4c-link-bonding-vhost-base-i40e-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-8t4c-link-bonding-vhost-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -10983,7 +10965,7 @@
- type: "plot"
title: "Latency: 3n-skx-x710-64b-8t4c-link-bonding-vhost-base-i40e-vpp-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-8t4c-link-bonding-vhost-base-i40e-vpp-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -10997,7 +10979,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-8t4c-vhost-base-i40e-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-8t4c-vhost-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -11012,7 +10994,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-8t4c-vhost-base-i40e-vpp-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-8t4c-vhost-base-i40e-vpp-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -11027,7 +11009,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-8t4c-link-bonding-vhost-base-i40e-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-8t4c-link-bonding-vhost-base-i40e-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -11043,7 +11025,7 @@
- type: "plot"
title: "Latency: 3n-skx-xxv710-64b-8t4c-link-bonding-vhost-base-i40e-vpp-ndr"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-8t4c-link-bonding-vhost-base-i40e-vpp-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -11059,7 +11041,7 @@
#- type: "plot"
# title: "Latency: 3n-hsw-xl710-114b-4t4c-vts-l2switching-base-i40e-ndr-lat"
-# algorithm: "plot_latency_error_bars_name"
+# algorithm: "plot_lat_err_bars_name"
# output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-114b-4t4c-vts-l2switching-base-i40e-ndr-lat"
# data: "plot-vpp-throughput-lat-tsa-3n-hsw"
# include:
@@ -11072,7 +11054,7 @@
#- type: "plot"
# title: "Latency: 3n-skx-xxv710-114b-8t4c-vts-l2switching-base-i40e-ndr-lat"
-# algorithm: "plot_latency_error_bars_name"
+# algorithm: "plot_lat_err_bars_name"
# output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-114b-8t4c-vts-l2switching-base-i40e-ndr-lat"
# data: "plot-vpp-throughput-lat-tsa-3n-skx"
# include:
@@ -11085,7 +11067,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-64b-4t4c-memif-base-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-4t4c-memif-base-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -11100,7 +11082,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-64b-4t4c-ip4routing-base-scale-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-4t4c-ip4routing-base-scale-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -11115,7 +11097,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-64b-4t4c-features-ip4routing-base-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-4t4c-features-ip4routing-base-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -11131,7 +11113,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-64b-4t4c-ip4tunnel-base-scale-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-4t4c-ip4tunnel-base-scale-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -11145,7 +11127,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-78b-4t4c-ip6routing-base-scale-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-78b-4t4c-ip6routing-base-scale-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -11160,7 +11142,7 @@
#- type: "plot"
# title: "Latency: 3n-tsh-x520-imix-4t4c-ipsec-ip4routing-base-scale-sw-ixgbe-ndr-lat"
-# algorithm: "plot_latency_error_bars_name"
+# algorithm: "plot_lat_err_bars_name"
# output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-imix-4t4c-ipsec-ip4routing-base-scale-sw-ixgbe-ndr-lat"
# data: "plot-vpp-throughput-lat-tsa-3n-tsh"
# include:
@@ -11176,7 +11158,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-64b-4t4c-l2switching-base-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-4t4c-l2switching-base-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -11190,7 +11172,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-64b-4t4c-l2switching-base-scale-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-4t4c-l2switching-base-scale-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -11206,7 +11188,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-64b-4t4c-features-l2switching-base-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-4t4c-features-l2switching-base-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -11222,7 +11204,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-78b-4t4c-srv6-ip6routing-base-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-78b-4t4c-srv6-ip6routing-base-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -11238,7 +11220,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-64b-4t4c-vhost-base-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-4t4c-vhost-base-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -11253,7 +11235,7 @@
- type: "plot"
title: "Latency: 3n-tsh-x520-64b-4t4c-link-bonding-vhost-base-ixgbe-ndr-lat"
- algorithm: "plot_latency_error_bars_name"
+ algorithm: "plot_lat_err_bars_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-4t4c-link-bonding-vhost-base-ixgbe-ndr-lat"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -11269,7 +11251,7 @@
#- type: "plot"
# title: "Latency: 3n-tsh-x520-114b-4t4c-vts-l2switching-base-ixgbe-ndr-lat"
-# algorithm: "plot_latency_error_bars_name"
+# algorithm: "plot_lat_err_bars_name"
# output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-114b-4t4c-vts-l2switching-base-ixgbe-ndr-lat"
# data: "plot-vpp-throughput-lat-tsa-3n-tsh"
# include:
@@ -11286,7 +11268,7 @@
- type: "plot"
title: "Speedup: 2n-skx-xxv710-64b-memif-base-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-memif-base-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -11300,7 +11282,7 @@
- type: "plot"
title: "Speedup: 2n-skx-xxv710-64b-memif-base-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-memif-base-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -11314,7 +11296,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-64b-memif-base-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-memif-base-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -11329,7 +11311,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-64b-memif-base-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-memif-base-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -11344,7 +11326,7 @@
- type: "plot"
title: "Speedup: 2n-skx-x710-64b-ip4routing-base-scale-avf-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-x710-64b-ip4routing-base-scale-avf-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -11359,7 +11341,7 @@
- type: "plot"
title: "Speedup: 2n-skx-x710-64b-ip4routing-base-scale-avf-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-x710-64b-ip4routing-base-scale-avf-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -11374,7 +11356,7 @@
- type: "plot"
title: "Speedup: 2n-skx-xxv710-64b-ip4routing-base-scale-avf-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-ip4routing-base-scale-avf-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -11388,7 +11370,7 @@
- type: "plot"
title: "Speedup: 2n-skx-xxv710-64b-ip4routing-base-scale-avf-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-ip4routing-base-scale-avf-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -11402,7 +11384,7 @@
- type: "plot"
title: "Speedup: 2n-skx-xxv710-64b-ip4routing-base-scale-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-ip4routing-base-scale-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -11417,7 +11399,7 @@
- type: "plot"
title: "Speedup: 2n-skx-xxv710-64b-ip4routing-base-scale-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-ip4routing-base-scale-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -11432,7 +11414,7 @@
- type: "plot"
title: "Speedup: 2n-skx-xxv710-64b-features-ip4routing-base-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-features-ip4routing-base-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -11448,7 +11430,7 @@
- type: "plot"
title: "Speedup: 2n-skx-xxv710-64b-features-ip4routing-base-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-features-ip4routing-base-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -11464,7 +11446,7 @@
- type: "plot"
title: "Speedup: 3n-hsw-xl710-64b-ip4routing-base-scale-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-ip4routing-base-scale-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -11477,7 +11459,7 @@
- type: "plot"
title: "Speedup: 3n-hsw-xl710-64b-ip4routing-base-scale-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-ip4routing-base-scale-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -11490,7 +11472,7 @@
- type: "plot"
title: "Speedup: 3n-skx-x710-64b-ip4routing-base-scale-avf-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-ip4routing-base-scale-avf-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -11505,7 +11487,7 @@
- type: "plot"
title: "Speedup: 3n-skx-x710-64b-ip4routing-base-scale-avf-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-ip4routing-base-scale-avf-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -11520,7 +11502,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-64b-ip4routing-base-scale-avf-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-ip4routing-base-scale-avf-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -11534,7 +11516,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-64b-ip4routing-base-scale-avf-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-ip4routing-base-scale-avf-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -11548,7 +11530,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-64b-ip4routing-base-scale-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-ip4routing-base-scale-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -11563,7 +11545,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-64b-ip4routing-base-scale-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-ip4routing-base-scale-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -11578,7 +11560,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-64b-features-ip4routing-base-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-features-ip4routing-base-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -11594,7 +11576,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-64b-features-ip4routing-base-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-features-ip4routing-base-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -11610,7 +11592,7 @@
- type: "plot"
title: "Speedup: 3n-hsw-xl710-64b-ip4tunnel-base-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-ip4tunnel-base-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -11622,7 +11604,7 @@
- type: "plot"
title: "Speedup: 3n-hsw-xl710-64b-ip4tunnel-base-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-ip4tunnel-base-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -11634,7 +11616,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-64b-ip4tunnel-base-scale-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-ip4tunnel-base-scale-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -11648,7 +11630,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-64b-ip4tunnel-base-scale-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-ip4tunnel-base-scale-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -11662,7 +11644,7 @@
- type: "plot"
title: "Speedup: 2n-skx-x710-78b-ip6routing-base-scale-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-x710-78b-ip6routing-base-scale-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -11675,7 +11657,7 @@
- type: "plot"
title: "Speedup: 2n-skx-x710-78b-ip6routing-base-scale-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-x710-78b-ip6routing-base-scale-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -11688,7 +11670,7 @@
- type: "plot"
title: "Speedup: 2n-skx-xxv710-78b-ip6routing-base-scale-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-78b-ip6routing-base-scale-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -11703,7 +11685,7 @@
- type: "plot"
title: "Speedup: 2n-skx-xxv710-78b-ip6routing-base-scale-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-78b-ip6routing-base-scale-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -11718,7 +11700,7 @@
- type: "plot"
title: "Speedup: 3n-hsw-xl710-78b-ip6routing-base-scale-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-78b-ip6routing-base-scale-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -11731,7 +11713,7 @@
- type: "plot"
title: "Speedup: 3n-hsw-xl710-78b-ip6routing-base-scale-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-78b-ip6routing-base-scale-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -11744,7 +11726,7 @@
- type: "plot"
title: "Speedup: 3n-skx-x710-78b-ip6routing-base-scale-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-78b-ip6routing-base-scale-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -11757,7 +11739,7 @@
- type: "plot"
title: "Speedup: 3n-skx-x710-78b-ip6routing-base-scale-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-78b-ip6routing-base-scale-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -11770,7 +11752,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-78b-ip6routing-base-scale-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-78b-ip6routing-base-scale-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -11785,7 +11767,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-78b-ip6routing-base-scale-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-78b-ip6routing-base-scale-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -11800,7 +11782,7 @@
- type: "plot"
title: "Speedup: 3n-hsw-xl710-imix-ipsec-ip4routing-base-scale-sw-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-imix-ipsec-ip4routing-base-scale-sw-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -11816,7 +11798,7 @@
- type: "plot"
title: "Speedup: 3n-hsw-xl710-imix-ipsec-ip4routing-base-scale-sw-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-imix-ipsec-ip4routing-base-scale-sw-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -11832,7 +11814,7 @@
- type: "plot"
title: "Speedup: 3n-hsw-xl710-imix-ipsec-ip4routing-base-scale-hw-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-imix-ipsec-ip4routing-base-scale-hw-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -11846,7 +11828,7 @@
- type: "plot"
title: "Speedup: 3n-hsw-xl710-imix-ipsec-ip4routing-base-scale-hw-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-imix-ipsec-ip4routing-base-scale-hw-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -11860,7 +11842,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-imix-ipsec-ip4routing-base-scale-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-imix-ipsec-ip4routing-base-scale-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -11876,7 +11858,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-imix-ipsec-ip4routing-base-scale-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-imix-ipsec-ip4routing-base-scale-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -11892,7 +11874,7 @@
- type: "plot"
title: "Speedup: 2n-skx-x710-64b-l2switching-base-scale-avf-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-x710-64b-l2switching-base-scale-avf-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -11908,7 +11890,7 @@
- type: "plot"
title: "Speedup: 2n-skx-x710-64b-l2switching-base-scale-avf-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-x710-64b-l2switching-base-scale-avf-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -11924,7 +11906,7 @@
- type: "plot"
title: "Speedup: 2n-skx-xxv710-64b-l2switching-base-avf-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-l2switching-base-avf-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -11938,7 +11920,7 @@
- type: "plot"
title: "Speedup: 2n-skx-xxv710-64b-l2switching-base-avf-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-l2switching-base-avf-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -11952,7 +11934,7 @@
- type: "plot"
title: "Speedup: 2n-skx-xxv710-64b-l2switching-base-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-l2switching-base-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -11967,7 +11949,7 @@
- type: "plot"
title: "Speedup: 2n-skx-xxv710-64b-l2switching-base-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-l2switching-base-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -11982,7 +11964,7 @@
- type: "plot"
title: "Speedup: 2n-skx-xxv710-64b-l2switching-base-scale-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-l2switching-base-scale-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -11996,7 +11978,7 @@
- type: "plot"
title: "Speedup: 2n-skx-xxv710-64b-l2switching-base-scale-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-l2switching-base-scale-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-skx"
include:
@@ -12010,7 +11992,7 @@
- type: "plot"
title: "Speedup: 3n-hsw-xl710-64b-l2switching-base-scale-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-l2switching-base-scale-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -12026,7 +12008,7 @@
- type: "plot"
title: "Speedup: 3n-hsw-xl710-64b-l2switching-base-scale-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-l2switching-base-scale-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -12042,7 +12024,7 @@
- type: "plot"
title: "Speedup: 3n-skx-x710-64b-l2switching-base-scale-avf-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-l2switching-base-scale-avf-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -12057,7 +12039,7 @@
- type: "plot"
title: "Speedup: 3n-skx-x710-64b-l2switching-base-scale-avf-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-l2switching-base-scale-avf-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -12072,7 +12054,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-64b-l2switching-base-avf-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-l2switching-base-avf-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -12085,7 +12067,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-64b-l2switching-base-avf-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-l2switching-base-avf-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -12098,7 +12080,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-64b-l2switching-base-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-l2switching-base-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -12112,7 +12094,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-64b-l2switching-base-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-l2switching-base-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -12126,7 +12108,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-64b-l2switching-base-scale-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-l2switching-base-scale-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -12142,7 +12124,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-64b-l2switching-base-scale-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-l2switching-base-scale-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -12158,7 +12140,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-64b-features-l2switching-base-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-features-l2switching-base-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -12174,7 +12156,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-64b-features-l2switching-base-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-features-l2switching-base-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -12190,7 +12172,7 @@
- type: "plot"
title: "Speedup: 3n-hsw-xl710-78b-srv6-ip6routing-base-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-78b-srv6-ip6routing-base-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -12206,7 +12188,7 @@
- type: "plot"
title: "Speedup: 3n-hsw-xl710-78b-srv6-ip6routing-base-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-78b-srv6-ip6routing-base-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -12222,7 +12204,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-78b-srv6-ip6routing-base-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-78b-srv6-ip6routing-base-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -12238,7 +12220,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-78b-srv6-ip6routing-base-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-78b-srv6-ip6routing-base-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -12254,7 +12236,7 @@
- type: "plot"
title: "Speedup: 2n-skx-xxv710-64b-vhost-base-i40e-ndr"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-vhost-base-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-skx-vhost"
include:
@@ -12268,7 +12250,7 @@
- type: "plot"
title: "Speedup: 2n-skx-xxv710-64b-vhost-base-i40e-pdr"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-vhost-base-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-skx-vhost"
include:
@@ -12282,7 +12264,7 @@
- type: "plot"
title: "Speedup: 2n-skx-xxv710-64b-vhost-base-i40e-vpp-ndr"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-vhost-base-i40e-vpp-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-skx-vhost"
include:
@@ -12296,7 +12278,7 @@
- type: "plot"
title: "Speedup: 2n-skx-xxv710-64b-vhost-base-i40e-vpp-pdr"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-skx-xxv710-64b-vhost-base-i40e-vpp-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-skx-vhost"
include:
@@ -12310,7 +12292,7 @@
- type: "plot"
title: "Speedup: 3n-hsw-xl710-64b-vhost-base-i40e-ndr"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-vhost-base-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-hsw-vhost"
include:
@@ -12324,7 +12306,7 @@
- type: "plot"
title: "Speedup: 3n-hsw-xl710-64b-vhost-base-i40e-pdr"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-vhost-base-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-hsw-vhost"
include:
@@ -12338,7 +12320,7 @@
- type: "plot"
title: "Speedup: 3n-hsw-xl710-64b-vhost-base-i40e-vpp-ndr"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-vhost-base-i40e-vpp-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-hsw-vhost"
include:
@@ -12352,7 +12334,7 @@
- type: "plot"
title: "Speedup: 3n-hsw-xl710-64b-vhost-base-i40e-vpp-pdr"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-64b-vhost-base-i40e-vpp-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-hsw-vhost"
include:
@@ -12366,7 +12348,7 @@
- type: "plot"
title: "Speedup: 3n-skx-x710-64b-link-bonding-vhost-base-i40e-ndr"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-link-bonding-vhost-base-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -12380,7 +12362,7 @@
- type: "plot"
title: "Speedup: 3n-skx-x710-64b-link-bonding-vhost-base-i40e-pdr"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-link-bonding-vhost-base-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -12394,7 +12376,7 @@
- type: "plot"
title: "Speedup: 3n-skx-x710-64b-link-bonding-vhost-base-i40e-vpp-ndr"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-link-bonding-vhost-base-i40e-vpp-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -12408,7 +12390,7 @@
- type: "plot"
title: "Speedup: 3n-skx-x710-64b-link-bonding-vhost-base-i40e-vpp-pdr"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-x710-64b-link-bonding-vhost-base-i40e-vpp-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -12422,7 +12404,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-64b-vhost-base-i40e-ndr"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-vhost-base-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -12437,7 +12419,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-64b-vhost-base-i40e-pdr"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-vhost-base-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -12452,7 +12434,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-64b-vhost-base-i40e-vpp-ndr"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-vhost-base-i40e-vpp-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -12467,7 +12449,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-64b-vhost-base-i40e-vpp-pdr"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-vhost-base-i40e-vpp-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -12482,7 +12464,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-64b-link-bonding-vhost-base-i40e-ndr"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-link-bonding-vhost-base-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -12498,7 +12480,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-64b-link-bonding-vhost-base-i40e-pdr"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-link-bonding-vhost-base-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -12514,7 +12496,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-64b-link-bonding-vhost-base-i40e-vpp-ndr"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-link-bonding-vhost-base-i40e-vpp-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -12530,7 +12512,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-64b-link-bonding-vhost-base-i40e-vpp-pdr"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-64b-link-bonding-vhost-base-i40e-vpp-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx-vhost"
include:
@@ -12546,7 +12528,7 @@
- type: "plot"
title: "Speedup: 3n-hsw-xl710-114b-vts-l2switching-base-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-114b-vts-l2switching-base-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -12559,7 +12541,7 @@
- type: "plot"
title: "Speedup: 3n-hsw-xl710-114b-vts-l2switching-base-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-hsw-xl710-114b-vts-l2switching-base-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-hsw"
include:
@@ -12572,7 +12554,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-114b-vts-l2switching-base-i40e-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-114b-vts-l2switching-base-i40e-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -12585,7 +12567,7 @@
- type: "plot"
title: "Speedup: 3n-skx-xxv710-114b-vts-l2switching-base-i40e-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-skx-xxv710-114b-vts-l2switching-base-i40e-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-skx"
include:
@@ -12598,7 +12580,7 @@
- type: "plot"
title: "Speedup: 3n-tsh-x520-64b-memif-base-ixgbe-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-memif-base-ixgbe-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -12613,7 +12595,7 @@
- type: "plot"
title: "Speedup: 3n-tsh-x520-64b-memif-base-ixgbe-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-memif-base-ixgbe-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -12628,7 +12610,7 @@
- type: "plot"
title: "Speedup: 3n-tsh-x520-64b-ip4routing-base-scale-ixgbe-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-ip4routing-base-scale-ixgbe-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -12643,7 +12625,7 @@
- type: "plot"
title: "Speedup: 3n-tsh-x520-64b-ip4routing-base-scale-ixgbe-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-ip4routing-base-scale-ixgbe-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -12658,7 +12640,7 @@
- type: "plot"
title: "Speedup: 3n-tsh-x520-64b-features-ip4routing-base-ixgbe-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-features-ip4routing-base-ixgbe-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -12674,7 +12656,7 @@
- type: "plot"
title: "Speedup: 3n-tsh-x520-64b-features-ip4routing-base-ixgbe-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-features-ip4routing-base-ixgbe-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -12690,7 +12672,7 @@
- type: "plot"
title: "Speedup: 3n-tsh-x520-64b-ip4tunnel-base-scale-ixgbe-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-ip4tunnel-base-scale-ixgbe-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -12704,7 +12686,7 @@
- type: "plot"
title: "Speedup: 3n-tsh-x520-64b-ip4tunnel-base-scale-ixgbe-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-ip4tunnel-base-scale-ixgbe-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -12718,7 +12700,7 @@
- type: "plot"
title: "Speedup: 3n-tsh-x520-78b-ip6routing-base-scale-ixgbe-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-78b-ip6routing-base-scale-ixgbe-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -12733,7 +12715,7 @@
- type: "plot"
title: "Speedup: 3n-tsh-x520-78b-ip6routing-base-scale-ixgbe-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-78b-ip6routing-base-scale-ixgbe-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -12748,7 +12730,7 @@
- type: "plot"
title: "Speedup: 3n-tsh-x520-imix-ipsec-ip4routing-base-scale-sw-ixgbe-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-imix-ipsec-ip4routing-base-scale-sw-ixgbe-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -12764,7 +12746,7 @@
- type: "plot"
title: "Speedup: 3n-tsh-x520-imix-ipsec-ip4routing-base-scale-sw-ixgbe-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-imix-ipsec-ip4routing-base-scale-sw-ixgbe-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -12780,7 +12762,7 @@
- type: "plot"
title: "Speedup: 3n-tsh-x520-64b-l2switching-base-ixgbe-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-l2switching-base-ixgbe-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -12794,7 +12776,7 @@
- type: "plot"
title: "Speedup: 3n-tsh-x520-64b-l2switching-base-ixgbe-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-l2switching-base-ixgbe-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -12808,7 +12790,7 @@
- type: "plot"
title: "Speedup: 3n-tsh-x520-64b-l2switching-base-scale-ixgbe-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-l2switching-base-scale-ixgbe-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -12824,7 +12806,7 @@
- type: "plot"
title: "Speedup: 3n-tsh-x520-64b-l2switching-base-scale-ixgbe-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-l2switching-base-scale-ixgbe-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -12840,7 +12822,7 @@
- type: "plot"
title: "Speedup: 3n-tsh-x520-64b-features-l2switching-base-ixgbe-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-features-l2switching-base-ixgbe-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -12856,7 +12838,7 @@
- type: "plot"
title: "Speedup: 3n-tsh-x520-64b-features-l2switching-base-ixgbe-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-features-l2switching-base-ixgbe-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -12872,7 +12854,7 @@
- type: "plot"
title: "Speedup: 3n-tsh-x520-78b-srv6-ip6routing-base-ixgbe-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-78b-srv6-ip6routing-base-ixgbe-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -12888,7 +12870,7 @@
- type: "plot"
title: "Speedup: 3n-tsh-x520-78b-srv6-ip6routing-base-ixgbe-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-78b-srv6-ip6routing-base-ixgbe-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -12904,7 +12886,7 @@
- type: "plot"
title: "Speedup: 3n-tsh-x520-64b-vhost-base-ixgbe-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-vhost-base-ixgbe-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -12919,7 +12901,7 @@
- type: "plot"
title: "Speedup: 3n-tsh-x520-64b-vhost-base-ixgbe-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-vhost-base-ixgbe-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -12934,7 +12916,7 @@
- type: "plot"
title: "Speedup: 3n-tsh-x520-64b-link-bonding-vhost-base-ixgbe-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-link-bonding-vhost-base-ixgbe-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -12950,7 +12932,7 @@
- type: "plot"
title: "Speedup: 3n-tsh-x520-64b-link-bonding-vhost-base-ixgbe-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-64b-link-bonding-vhost-base-ixgbe-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -12966,7 +12948,7 @@
- type: "plot"
title: "Speedup: 3n-tsh-x520-114b-vts-l2switching-base-ixgbe-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-114b-vts-l2switching-base-ixgbe-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -12979,7 +12961,7 @@
- type: "plot"
title: "Speedup: 3n-tsh-x520-114b-vts-l2switching-base-ixgbe-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-tsh-x520-114b-vts-l2switching-base-ixgbe-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-tsh"
include:
@@ -12992,7 +12974,7 @@
- type: "plot"
title: "Speedup: 2n-dnv-x553-64b-ip4routing-base-scale-ixgbe-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-dnv-x553-64b-ip4routing-base-scale-ixgbe-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-dnv"
include:
@@ -13007,7 +12989,7 @@
- type: "plot"
title: "Speedup: 2n-dnv-x553-64b-ip4routing-base-scale-ixgbe-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-dnv-x553-64b-ip4routing-base-scale-ixgbe-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-dnv"
include:
@@ -13022,7 +13004,7 @@
- type: "plot"
title: "Speedup: 2n-dnv-x553-64b-features-ip4routing-base-ixgbe-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-dnv-x553-64b-features-ip4routing-base-ixgbe-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-dnv"
include:
@@ -13038,7 +13020,7 @@
- type: "plot"
title: "Speedup: 2n-dnv-x553-64b-features-ip4routing-base-ixgbe-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-dnv-x553-64b-features-ip4routing-base-ixgbe-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-dnv"
include:
@@ -13054,7 +13036,7 @@
- type: "plot"
title: "Speedup: 2n-dnv-x553-78b-ip6routing-base-scale-ixgbe-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-dnv-x553-78b-ip6routing-base-scale-ixgbe-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-dnv"
include:
@@ -13069,7 +13051,7 @@
- type: "plot"
title: "Speedup: 2n-dnv-x553-78b-ip6routing-base-scale-ixgbe-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-dnv-x553-78b-ip6routing-base-scale-ixgbe-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-dnv"
include:
@@ -13084,7 +13066,7 @@
- type: "plot"
title: "Speedup: 2n-dnv-x553-64b-l2switching-base-ixgbe-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-dnv-x553-64b-l2switching-base-ixgbe-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-dnv"
include:
@@ -13098,7 +13080,7 @@
- type: "plot"
title: "Speedup: 2n-dnv-x553-64b-l2switching-base-ixgbe-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-dnv-x553-64b-l2switching-base-ixgbe-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-dnv"
include:
@@ -13112,7 +13094,7 @@
- type: "plot"
title: "Speedup: 2n-dnv-x553-64b-l2switching-base-scale-ixgbe-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-dnv-x553-64b-l2switching-base-scale-ixgbe-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-dnv"
include:
@@ -13128,7 +13110,7 @@
- type: "plot"
title: "Speedup: 2n-dnv-x553-64b-l2switching-base-scale-ixgbe-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/2n-dnv-x553-64b-l2switching-base-scale-ixgbe-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-2n-dnv"
include:
@@ -13144,7 +13126,7 @@
- type: "plot"
title: "Speedup: 3n-dnv-x553-64b-ip4routing-base-scale-ixgbe-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-64b-ip4routing-base-scale-ixgbe-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -13159,7 +13141,7 @@
- type: "plot"
title: "Speedup: 3n-dnv-x553-64b-ip4routing-base-scale-ixgbe-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-64b-ip4routing-base-scale-ixgbe-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -13174,7 +13156,7 @@
- type: "plot"
title: "Speedup: 3n-dnv-x553-64b-features-ip4routing-base-ixgbe-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-64b-features-ip4routing-base-ixgbe-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -13190,7 +13172,7 @@
- type: "plot"
title: "Speedup: 3n-dnv-x553-64b-features-ip4routing-base-ixgbe-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-64b-features-ip4routing-base-ixgbe-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -13206,7 +13188,7 @@
- type: "plot"
title: "Speedup: 3n-dnv-x553-64b-ip4tunnel-base-scale-ixgbe-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-64b-ip4tunnel-base-scale-ixgbe-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -13220,7 +13202,7 @@
- type: "plot"
title: "Speedup: 3n-dnv-x553-64b-ip4tunnel-base-scale-ixgbe-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-64b-ip4tunnel-base-scale-ixgbe-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -13234,7 +13216,7 @@
- type: "plot"
title: "Speedup: 3n-dnv-x553-78b-ip6routing-base-scale-ixgbe-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-78b-ip6routing-base-scale-ixgbe-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -13249,7 +13231,7 @@
- type: "plot"
title: "Speedup: 3n-dnv-x553-78b-ip6routing-base-scale-ixgbe-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-78b-ip6routing-base-scale-ixgbe-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -13264,7 +13246,7 @@
- type: "plot"
title: "Speedup: 3n-dnv-x553-imix-ipsec-ip4routing-base-scale-sw-ixgbe-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-imix-ipsec-ip4routing-base-scale-sw-ixgbe-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -13280,7 +13262,7 @@
- type: "plot"
title: "Speedup: 3n-dnv-x553-imix-ipsec-ip4routing-base-scale-sw-ixgbe-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-imix-ipsec-ip4routing-base-scale-sw-ixgbe-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -13296,7 +13278,7 @@
- type: "plot"
title: "Speedup: 3n-dnv-x553-64b-l2switching-base-ixgbe-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-64b-l2switching-base-ixgbe-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -13310,7 +13292,7 @@
- type: "plot"
title: "Speedup: 3n-dnv-x553-64b-l2switching-base-ixgbe-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-64b-l2switching-base-ixgbe-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -13324,7 +13306,7 @@
- type: "plot"
title: "Speedup: 3n-dnv-x553-64b-l2switching-base-scale-ixgbe-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-64b-l2switching-base-scale-ixgbe-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -13340,7 +13322,7 @@
- type: "plot"
title: "Speedup: 3n-dnv-x553-64b-l2switching-base-scale-ixgbe-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-64b-l2switching-base-scale-ixgbe-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -13356,7 +13338,7 @@
- type: "plot"
title: "Speedup: 3n-dnv-x553-64b-features-l2switching-base-ixgbe-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-64b-features-l2switching-base-ixgbe-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -13372,7 +13354,7 @@
- type: "plot"
title: "Speedup: 3n-dnv-x553-64b-features-l2switching-base-ixgbe-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-64b-features-l2switching-base-ixgbe-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -13388,7 +13370,7 @@
- type: "plot"
title: "Speedup: 3n-dnv-x553-78b-srv6-ip6routing-base-ixgbe-ndr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-78b-srv6-ip6routing-base-ixgbe-ndr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -13401,7 +13383,7 @@
- type: "plot"
title: "Speedup: 3n-dnv-x553-78b-srv6-ip6routing-base-ixgbe-pdr-tsa"
- algorithm: "plot_throughput_speedup_analysis_name"
+ algorithm: "plot_tsa_name"
output-file: "{DIR[STATIC,VPP]}/3n-dnv-x553-78b-srv6-ip6routing-base-ixgbe-pdr-tsa"
data: "plot-vpp-throughput-lat-tsa-3n-dnv"
include:
@@ -13414,1940 +13396,536 @@
################################################################################
-# Packet Throughput - DPDK l3fwd 3n-hsw-x520
-- type: "plot"
- title: "DPDK Throughput: l3fwd-3n-hsw-x520-64b-1t1c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-hsw-x520-64b-1t1c-base-ndr"
- data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-X520-DA2' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '1T1C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "l3fwd-3n-hsw-x520-64b-1t1c-base-ndr"
- layout: "plot-throughput"
+# Packet Throughput - DPDK
-- type: "plot"
- title: "DPDK Throughput: l3fwd-3n-hsw-x520-64b-2t2c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-hsw-x520-64b-2t2c-base-ndr"
- data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-X520-DA2' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T2C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "l3fwd-3n-hsw-x520-64b-2t2c-base-ndr"
- layout: "plot-throughput"
+# 2n-skx-xxv710
- type: "plot"
- title: "DPDK Throughput: l3fwd-3n-hsw-x520-64b-1t1c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-hsw-x520-64b-1t1c-base-pdr"
- data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-X520-DA2' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '1T1C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
+ title: "DPDK Throughput: 2n-skx-xxv710-64b-2t1c-base-ndr"
+ algorithm: "plot_perf_box_name"
+ output-file: "{DIR[STATIC,DPDK]}/2n-skx-xxv710-64b-2t1c-base-ndr"
+ data: "plot-dpdk-throughput-latency-2n-skx"
+ include:
+ - "tests.dpdk.perf.2n1l-25ge2p1xxv710-eth-l2xcbase-testpmd-ndrpdr.64b-2t1c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.2n1l-25ge2p1xxv710-ethip4-ip4base-l3fwd-ndrpdr.64b-2t1c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "l3fwd-3n-hsw-x520-64b-1t1c-base-pdr"
+ title: "2n-skx-xxv710-64b-2t1c-base-ndr"
layout: "plot-throughput"
- type: "plot"
- title: "DPDK Throughput: l3fwd-3n-hsw-x520-64b-2t2c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-hsw-x520-64b-2t2c-base-pdr"
- data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-X520-DA2' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T2C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
+ title: "DPDK Throughput: 2n-skx-xxv710-64b-4t2c-base-ndr"
+ algorithm: "plot_perf_box_name"
+ output-file: "{DIR[STATIC,DPDK]}/2n-skx-xxv710-64b-4t2c-base-ndr"
+ data: "plot-dpdk-throughput-latency-2n-skx"
+ include:
+ - "tests.dpdk.perf.2n1l-25ge2p1xxv710-eth-l2xcbase-testpmd-ndrpdr.64b-4t2c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.2n1l-25ge2p1xxv710-ethip4-ip4base-l3fwd-ndrpdr.64b-4t2c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "l3fwd-3n-hsw-x520-64b-2t2c-base-pdr"
+ title: "2n-skx-xxv710-64b-4t2c-base-ndr"
layout: "plot-throughput"
-# Packet Throughput - DPDK l3fwd 3n-hsw-x710
- type: "plot"
- title: "DPDK Throughput: l3fwd-3n-hsw-x710-64b-1t1c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-hsw-x710-64b-1t1c-base-ndr"
- data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '1T1C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
+ title: "DPDK Throughput: 2n-skx-xxv710-64b-2t1c-base-pdr"
+ algorithm: "plot_perf_box_name"
+ output-file: "{DIR[STATIC,DPDK]}/2n-skx-xxv710-64b-2t1c-base-pdr"
+ data: "plot-dpdk-throughput-latency-2n-skx"
+ include:
+ - "tests.dpdk.perf.2n1l-25ge2p1xxv710-eth-l2xcbase-testpmd-ndrpdr.64b-2t1c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.2n1l-25ge2p1xxv710-ethip4-ip4base-l3fwd-ndrpdr.64b-2t1c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "l3fwd-3n-hsw-x710-64b-1t1c-base-ndr"
+ title: "2n-skx-xxv710-64b-2t1c-base-pdr"
layout: "plot-throughput"
- type: "plot"
- title: "DPDK Throughput: l3fwd-3n-hsw-x710-64b-2t2c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-hsw-x710-64b-2t2c-base-ndr"
- data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T2C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
+ title: "DPDK Throughput: 2n-skx-xxv710-64b-4t2c-base-pdr"
+ algorithm: "plot_perf_box_name"
+ output-file: "{DIR[STATIC,DPDK]}/2n-skx-xxv710-64b-4t2c-base-pdr"
+ data: "plot-dpdk-throughput-latency-2n-skx"
+ include:
+ - "tests.dpdk.perf.2n1l-25ge2p1xxv710-eth-l2xcbase-testpmd-ndrpdr.64b-4t2c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.2n1l-25ge2p1xxv710-ethip4-ip4base-l3fwd-ndrpdr.64b-4t2c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "l3fwd-3n-hsw-x710-64b-2t2c-base-ndr"
+ title: "2n-skx-xxv710-64b-4t2c-base-pdr"
layout: "plot-throughput"
-- type: "plot"
- title: "DPDK Throughput: l3fwd-3n-hsw-x710-64b-1t1c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-hsw-x710-64b-1t1c-base-pdr"
- data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '1T1C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "l3fwd-3n-hsw-x710-64b-1t1c-base-pdr"
- layout: "plot-throughput"
+# 2n-skx-x710
- type: "plot"
- title: "DPDK Throughput: l3fwd-3n-hsw-x710-64b-2t2c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-hsw-x710-64b-2t2c-base-pdr"
- data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T2C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
+ title: "DPDK Throughput: 2n-skx-x710-64b-2t1c-base-ndr"
+ algorithm: "plot_perf_box_name"
+ output-file: "{DIR[STATIC,DPDK]}/2n-skx-x710-64b-2t1c-base-ndr"
+ data: "plot-dpdk-throughput-latency-2n-skx"
+ include:
+ - "tests.dpdk.perf.2n1l-10ge2p1x710-eth-l2xcbase-testpmd-ndrpdr.64b-2t1c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.2n1l-10ge2p1x710-ethip4-ip4base-l3fwd-ndrpdr.64b-2t1c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "l3fwd-3n-hsw-x710-64b-2t2c-base-pdr"
+ title: "2n-skx-x710-64b-2t1c-base-ndr"
layout: "plot-throughput"
-# Packet Throughput - DPDK l3fwd 3n-hsw-xl710
- type: "plot"
- title: "DPDK Throughput: l3fwd-3n-hsw-xl710-64b-1t1c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-hsw-xl710-64b-1t1c-base-ndr"
- data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-XL710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '1T1C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
+ title: "DPDK Throughput: 2n-skx-x710-64b-4t2c-base-ndr"
+ algorithm: "plot_perf_box_name"
+ output-file: "{DIR[STATIC,DPDK]}/2n-skx-x710-64b-4t2c-base-ndr"
+ data: "plot-dpdk-throughput-latency-2n-skx"
+ include:
+ - "tests.dpdk.perf.2n1l-10ge2p1x710-eth-l2xcbase-testpmd-ndrpdr.64b-4t2c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.2n1l-10ge2p1x710-ethip4-ip4base-l3fwd-ndrpdr.64b-4t2c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "l3fwd-3n-hsw-xl710-64b-1t1c-base-ndr"
+ title: "2n-skx-x710-64b-4t2c-base-ndr"
layout: "plot-throughput"
- type: "plot"
- title: "DPDK Throughput: l3fwd-3n-hsw-xl710-64b-2t2c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-hsw-xl710-64b-2t2c-base-ndr"
- data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-XL710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T2C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
+ title: "DPDK Throughput: 2n-skx-x710-64b-2t1c-base-pdr"
+ algorithm: "plot_perf_box_name"
+ output-file: "{DIR[STATIC,DPDK]}/2n-skx-x710-64b-2t1c-base-pdr"
+ data: "plot-dpdk-throughput-latency-2n-skx"
+ include:
+ - "tests.dpdk.perf.2n1l-10ge2p1x710-eth-l2xcbase-testpmd-ndrpdr.64b-2t1c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.2n1l-10ge2p1x710-ethip4-ip4base-l3fwd-ndrpdr.64b-2t1c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "l3fwd-3n-hsw-xl710-64b-2t2c-base-ndr"
+ title: "2n-skx-x710-64b-2t1c-base-pdr"
layout: "plot-throughput"
- type: "plot"
- title: "DPDK Throughput: l3fwd-3n-hsw-xl710-64b-1t1c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-hsw-xl710-64b-1t1c-base-pdr"
- data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-XL710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '1T1C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
+ title: "DPDK Throughput: 2n-skx-x710-64b-4t2c-base-pdr"
+ algorithm: "plot_perf_box_name"
+ output-file: "{DIR[STATIC,DPDK]}/2n-skx-x710-64b-4t2c-base-pdr"
+ data: "plot-dpdk-throughput-latency-2n-skx"
+ include:
+ - "tests.dpdk.perf.2n1l-10ge2p1x710-eth-l2xcbase-testpmd-ndrpdr.64b-4t2c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.2n1l-10ge2p1x710-ethip4-ip4base-l3fwd-ndrpdr.64b-4t2c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "l3fwd-3n-hsw-xl710-64b-1t1c-base-pdr"
+ title: "2n-skx-x710-64b-4t2c-base-pdr"
layout: "plot-throughput"
-- type: "plot"
- title: "DPDK Throughput: l3fwd-3n-hsw-xl710-64b-2t2c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-hsw-xl710-64b-2t2c-base-pdr"
- data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-XL710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T2C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "l3fwd-3n-hsw-xl710-64b-2t2c-base-pdr"
- layout: "plot-throughput"
+# 3n-skx-xxv710
-# Packet Throughput - DPDK l3fwd 3n-skx-x710
- type: "plot"
- title: "DPDK Throughput: l3fwd-3n-skx-x710-64b-2t1c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-skx-x710-64b-2t1c-base-ndr"
+ title: "DPDK Throughput: 3n-skx-xxv710-64b-2t1c-base-ndr"
+ algorithm: "plot_perf_box_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-skx-xxv710-64b-2t1c-base-ndr"
data: "plot-dpdk-throughput-latency-3n-skx"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T1C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
+ include:
+ - "tests.dpdk.perf.25ge2p1xxv710-eth-l2xcbase-testpmd-ndrpdr.64b-2t1c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.25ge2p1xxv710-ethip4-ip4base-l3fwd-ndrpdr.64b-2t1c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "l3fwd-3n-skx-x710-64b-2t1c-base-ndr"
+ title: "3n-skx-xxv710-64b-2t1c-base-ndr"
layout: "plot-throughput"
- type: "plot"
- title: "DPDK Throughput: l3fwd-3n-skx-x710-64b-4t2c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-skx-x710-64b-4t2c-base-ndr"
+ title: "DPDK Throughput: 3n-skx-xxv710-64b-4t2c-base-ndr"
+ algorithm: "plot_perf_box_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-skx-xxv710-64b-4t2c-base-ndr"
data: "plot-dpdk-throughput-latency-3n-skx"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '4T2C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
+ include:
+ - "tests.dpdk.perf.25ge2p1xxv710-eth-l2xcbase-testpmd-ndrpdr.64b-4t2c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.25ge2p1xxv710-ethip4-ip4base-l3fwd-ndrpdr.64b-4t2c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "l3fwd-3n-skx-x710-64b-4t2c-base-ndr"
+ title: "3n-skx-xxv710-64b-4t2c-base-ndr"
layout: "plot-throughput"
- type: "plot"
- title: "DPDK Throughput: l3fwd-3n-skx-x710-64b-2t1c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-skx-x710-64b-2t1c-base-pdr"
+ title: "DPDK Throughput: 3n-skx-xxv710-64b-2t1c-base-pdr"
+ algorithm: "plot_perf_box_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-skx-xxv710-64b-2t1c-base-pdr"
data: "plot-dpdk-throughput-latency-3n-skx"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T1C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
+ include:
+ - "tests.dpdk.perf.25ge2p1xxv710-eth-l2xcbase-testpmd-ndrpdr.64b-2t1c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.25ge2p1xxv710-ethip4-ip4base-l3fwd-ndrpdr.64b-2t1c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "l3fwd-3n-skx-x710-64b-2t1c-base-pdr"
+ title: "3n-skx-xxv710-64b-2t1c-base-pdr"
layout: "plot-throughput"
- type: "plot"
- title: "DPDK Throughput: l3fwd-3n-skx-x710-64b-4t2c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-skx-x710-64b-4t2c-base-pdr"
+ title: "DPDK Throughput: 3n-skx-xxv710-64b-4t2c-base-pdr"
+ algorithm: "plot_perf_box_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-skx-xxv710-64b-4t2c-base-pdr"
data: "plot-dpdk-throughput-latency-3n-skx"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '4T2C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
+ include:
+ - "tests.dpdk.perf.25ge2p1xxv710-eth-l2xcbase-testpmd-ndrpdr.64b-4t2c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.25ge2p1xxv710-ethip4-ip4base-l3fwd-ndrpdr.64b-4t2c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "l3fwd-3n-skx-x710-64b-4t2c-base-pdr"
+ title: "3n-skx-xxv710-64b-4t2c-base-pdr"
layout: "plot-throughput"
-# Packet Throughput - DPDK l3fwd 3n-skx-xxv710
-- type: "plot"
- title: "DPDK Throughput: l3fwd-3n-skx-xxv710-64b-2t1c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-skx-xxv710-64b-2t1c-base-ndr"
- data: "plot-dpdk-throughput-latency-3n-skx"
- filter: "'NIC_Intel-XXV710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T1C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "l3fwd-3n-skx-xxv710-64b-2t1c-base-ndr"
- layout: "plot-throughput"
+# 3n-skx-x710
- type: "plot"
- title: "DPDK Throughput: l3fwd-3n-skx-xxv710-64b-4t2c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-skx-xxv710-64b-4t2c-base-ndr"
+ title: "DPDK Throughput: 3n-skx-x710-64b-2t1c-base-ndr"
+ algorithm: "plot_perf_box_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-skx-x710-64b-2t1c-base-ndr"
data: "plot-dpdk-throughput-latency-3n-skx"
- filter: "'NIC_Intel-XXV710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '4T2C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
+ include:
+ - "tests.dpdk.perf.10ge2p1x710-eth-l2xcbase-testpmd-ndrpdr.64b-2t1c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.10ge2p1x710-ethip4-ip4base-l3fwd-ndrpdr.64b-2t1c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "l3fwd-3n-skx-xxv710-64b-4t2c-base-ndr"
+ title: "3n-skx-x710-64b-2t1c-base-ndr"
layout: "plot-throughput"
- type: "plot"
- title: "DPDK Throughput: l3fwd-3n-skx-xxv710-64b-2t1c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-skx-xxv710-64b-2t1c-base-pdr"
+ title: "DPDK Throughput: 3n-skx-x710-64b-4t2c-base-ndr"
+ algorithm: "plot_perf_box_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-skx-x710-64b-4t2c-base-ndr"
data: "plot-dpdk-throughput-latency-3n-skx"
- filter: "'NIC_Intel-XXV710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T1C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
+ include:
+ - "tests.dpdk.perf.10ge2p1x710-eth-l2xcbase-testpmd-ndrpdr.64b-4t2c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.10ge2p1x710-ethip4-ip4base-l3fwd-ndrpdr.64b-4t2c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "l3fwd-3n-skx-xxv710-64b-2t1c-base-pdr"
+ title: "3n-skx-x710-64b-4t2c-base-ndr"
layout: "plot-throughput"
- type: "plot"
- title: "DPDK Throughput: l3fwd-3n-skx-xxv710-64b-4t2c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-skx-xxv710-64b-4t2c-base-pdr"
+ title: "DPDK Throughput: 3n-skx-x710-64b-2t1c-base-pdr"
+ algorithm: "plot_perf_box_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-skx-x710-64b-2t1c-base-pdr"
data: "plot-dpdk-throughput-latency-3n-skx"
- filter: "'NIC_Intel-XXV710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '4T2C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "l3fwd-3n-skx-xxv710-64b-4t2c-base-pdr"
- layout: "plot-throughput"
-
-# Packet Throughput - DPDK l3fwd 2n-skx-x710
-- type: "plot"
- title: "DPDK Throughput: l3fwd-2n-skx-x710-64b-2t1c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-2n-skx-x710-64b-2t1c-base-ndr"
- data: "plot-dpdk-throughput-latency-2n-skx"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T1C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "l3fwd-2n-skx-x710-64b-2t1c-base-ndr"
- layout: "plot-throughput"
-
-- type: "plot"
- title: "DPDK Throughput: l3fwd-2n-skx-x710-64b-4t2c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-2n-skx-x710-64b-4t2c-base-ndr"
- data: "plot-dpdk-throughput-latency-2n-skx"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '4T2C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "l3fwd-2n-skx-x710-64b-4t2c-base-ndr"
- layout: "plot-throughput"
-
-- type: "plot"
- title: "DPDK Throughput: l3fwd-2n-skx-x710-64b-2t1c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-2n-skx-x710-64b-2t1c-base-pdr"
- data: "plot-dpdk-throughput-latency-2n-skx"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T1C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "l3fwd-2n-skx-x710-64b-2t1c-base-pdr"
- layout: "plot-throughput"
-
-- type: "plot"
- title: "DPDK Throughput: l3fwd-2n-skx-x710-64b-4t2c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-2n-skx-x710-64b-4t2c-base-pdr"
- data: "plot-dpdk-throughput-latency-2n-skx"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '4T2C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "l3fwd-2n-skx-x710-64b-4t2c-base-pdr"
- layout: "plot-throughput"
-
-# Packet Throughput - DPDK l3fwd 2n-skx-xxv710
-- type: "plot"
- title: "DPDK Throughput: l3fwd-2n-skx-xxv710-64b-2t1c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-2n-skx-xxv710-64b-2t1c-base-ndr"
- data: "plot-dpdk-throughput-latency-2n-skx"
- filter: "'NIC_Intel-XXV710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T1C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "l3fwd-2n-skx-xxv710-64b-2t1c-base-ndr"
- layout: "plot-throughput"
-
-- type: "plot"
- title: "DPDK Throughput: l3fwd-2n-skx-xxv710-64b-4t2c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-2n-skx-xxv710-64b-4t2c-base-ndr"
- data: "plot-dpdk-throughput-latency-2n-skx"
- filter: "'NIC_Intel-XXV710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '4T2C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "l3fwd-2n-skx-xxv710-64b-4t2c-base-ndr"
- layout: "plot-throughput"
-
-- type: "plot"
- title: "DPDK Throughput: l3fwd-2n-skx-xxv710-64b-2t1c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-2n-skx-xxv710-64b-2t1c-base-pdr"
- data: "plot-dpdk-throughput-latency-2n-skx"
- filter: "'NIC_Intel-XXV710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T1C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "l3fwd-2n-skx-xxv710-64b-2t1c-base-pdr"
- layout: "plot-throughput"
-
-- type: "plot"
- title: "DPDK Throughput: l3fwd-2n-skx-xxv710-64b-4t2c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-2n-skx-xxv710-64b-4t2c-base-pdr"
- data: "plot-dpdk-throughput-latency-2n-skx"
- filter: "'NIC_Intel-XXV710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '4T2C' and
- 'IP4FWD'"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
+ include:
+ - "tests.dpdk.perf.10ge2p1x710-eth-l2xcbase-testpmd-ndrpdr.64b-2t1c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.10ge2p1x710-ethip4-ip4base-l3fwd-ndrpdr.64b-2t1c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "l3fwd-2n-skx-xxv710-64b-4t2c-base-pdr"
+ title: "3n-skx-x710-64b-2t1c-base-pdr"
layout: "plot-throughput"
-################################################################################
-
-# Packet Throughput - DPDK testpmd 3n-hsw-x520
- type: "plot"
- title: "DPDK Throughput: testpmd-3n-hsw-x520-64b-1t1c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-hsw-x520-64b-1t1c-base-ndr"
- data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-X520-DA2' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '1T1C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
+ title: "DPDK Throughput: 3n-skx-x710-64b-4t2c-base-pdr"
+ algorithm: "plot_perf_box_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-skx-x710-64b-4t2c-base-pdr"
+ data: "plot-dpdk-throughput-latency-3n-skx"
+ include:
+ - "tests.dpdk.perf.10ge2p1x710-eth-l2xcbase-testpmd-ndrpdr.64b-4t2c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.10ge2p1x710-ethip4-ip4base-l3fwd-ndrpdr.64b-4t2c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "testpmd-3n-hsw-x520-64b-1t1c-base-ndr"
+ title: "3n-skx-x710-64b-4t2c-base-pdr"
layout: "plot-throughput"
-- type: "plot"
- title: "DPDK Throughput: testpmd-3n-hsw-x520-64b-2t2c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-hsw-x520-64b-2t2c-base-ndr"
- data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-X520-DA2' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T2C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "testpmd-3n-hsw-x520-64b-2t2c-base-ndr"
- layout: "plot-throughput"
+# 3n-hsw-xl710
- type: "plot"
- title: "DPDK Throughput: testpmd-3n-hsw-x520-64b-1t1c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-hsw-x520-64b-1t1c-base-pdr"
+ title: "DPDK Throughput: 3n-hsw-xl710-64b-1t1c-base-ndr"
+ algorithm: "plot_perf_box_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-hsw-xl710-64b-1t1c-base-ndr"
data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-X520-DA2' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '1T1C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "testpmd-3n-hsw-x520-64b-1t1c-base-pdr"
- layout: "plot-throughput"
-
-- type: "plot"
- title: "DPDK Throughput: testpmd-3n-hsw-x520-64b-2t2c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-hsw-x520-64b-2t2c-base-pdr"
- data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-X520-DA2' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T2C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
+ include:
+ - "tests.dpdk.perf.40ge2p1xl710-eth-l2xcbase-testpmd-ndrpdr.64b-1t1c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.40ge2p1xl710-ethip4-ip4base-l3fwd-ndrpdr.64b-1t1c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "testpmd-3n-hsw-x520-64b-2t2c-base-pdr"
+ title: "3n-hsw-xl710-64b-1t1c-base-ndr"
layout: "plot-throughput"
-# Packet Throughput - DPDK testpmd 3n-hsw-x710
- type: "plot"
- title: "DPDK Throughput: testpmd-3n-hsw-x710-64b-1t1c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-hsw-x710-64b-1t1c-base-ndr"
+ title: "DPDK Throughput: 3n-hsw-xl710-64b-2t2c-base-ndr"
+ algorithm: "plot_perf_box_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-hsw-xl710-64b-2t2c-base-ndr"
data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '1T1C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
+ include:
+ - "tests.dpdk.perf.40ge2p1xl710-eth-l2xcbase-testpmd-ndrpdr.64b-2t2c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.40ge2p1xl710-ethip4-ip4base-l3fwd-ndrpdr.64b-2t2c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "testpmd-3n-hsw-x710-64b-1t1c-base-ndr"
+ title: "3n-hsw-xl710-64b-2t2c-base-ndr"
layout: "plot-throughput"
- type: "plot"
- title: "DPDK Throughput: testpmd-3n-hsw-x710-64b-2t2c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-hsw-x710-64b-2t2c-base-ndr"
+ title: "DPDK Throughput: 3n-hsw-xl710-64b-1t1c-base-pdr"
+ algorithm: "plot_perf_box_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-hsw-xl710-64b-1t1c-base-pdr"
data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T2C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
+ include:
+ - "tests.dpdk.perf.40ge2p1xl710-eth-l2xcbase-testpmd-ndrpdr.64b-1t1c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.40ge2p1xl710-ethip4-ip4base-l3fwd-ndrpdr.64b-1t1c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "testpmd-3n-hsw-x710-64b-2t2c-base-ndr"
+ title: "3n-hsw-xl710-64b-1t1c-base-pdr"
layout: "plot-throughput"
- type: "plot"
- title: "DPDK Throughput: testpmd-3n-hsw-x710-64b-1t1c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-hsw-x710-64b-1t1c-base-pdr"
+ title: "DPDK Throughput: 3n-hsw-xl710-64b-2t2c-base-pdr"
+ algorithm: "plot_perf_box_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-hsw-xl710-64b-2t2c-base-pdr"
data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '1T1C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
+ include:
+ - "tests.dpdk.perf.40ge2p1xl710-eth-l2xcbase-testpmd-ndrpdr.64b-2t2c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.40ge2p1xl710-ethip4-ip4base-l3fwd-ndrpdr.64b-2t2c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "testpmd-3n-hsw-x710-64b-1t1c-base-pdr"
+ title: "3n-hsw-xl710-64b-2t2c-base-pdr"
layout: "plot-throughput"
-- type: "plot"
- title: "DPDK Throughput: testpmd-3n-hsw-x710-64b-2t2c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-hsw-x710-64b-2t2c-base-pdr"
- data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T2C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "testpmd-3n-hsw-x710-64b-2t2c-base-pdr"
- layout: "plot-throughput"
+# 3n-hsw-x710
-# Packet Throughput - DPDK testpmd 3n-hsw-xl710
- type: "plot"
- title: "DPDK Throughput: testpmd-3n-hsw-xl710-64b-1t1c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-hsw-xl710-64b-1t1c-base-ndr"
+ title: "DPDK Throughput: 3n-hsw-x710-64b-1t1c-base-ndr"
+ algorithm: "plot_perf_box_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-hsw-x710-64b-1t1c-base-ndr"
data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-XL710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '1T1C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
+ include:
+ - "tests.dpdk.perf.10ge2p1x710-eth-l2xcbase-testpmd-ndrpdr.64b-1t1c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.10ge2p1x710-ethip4-ip4base-l3fwd-ndrpdr.64b-1t1c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "testpmd-3n-hsw-xl710-64b-1t1c-base-ndr"
+ title: "3n-hsw-x710-64b-1t1c-base-ndr"
layout: "plot-throughput"
- type: "plot"
- title: "DPDK Throughput: testpmd-3n-hsw-xl710-64b-2t2c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-hsw-xl710-64b-2t2c-base-ndr"
+ title: "DPDK Throughput: 3n-hsw-x710-64b-2t2c-base-ndr"
+ algorithm: "plot_perf_box_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-hsw-x710-64b-2t2c-base-ndr"
data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-XL710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T2C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
+ include:
+ - "tests.dpdk.perf.10ge2p1x710-eth-l2xcbase-testpmd-ndrpdr.64b-2t2c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.10ge2p1x710-ethip4-ip4base-l3fwd-ndrpdr.64b-2t2c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "testpmd-3n-hsw-xl710-64b-2t2c-base-ndr"
+ title: "3n-hsw-x710-64b-2t2c-base-ndr"
layout: "plot-throughput"
- type: "plot"
- title: "DPDK Throughput: testpmd-3n-hsw-xl710-64b-1t1c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-hsw-xl710-64b-1t1c-base-pdr"
+ title: "DPDK Throughput: 3n-hsw-x710-64b-1t1c-base-pdr"
+ algorithm: "plot_perf_box_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-hsw-x710-64b-1t1c-base-pdr"
data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-XL710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '1T1C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
+ include:
+ - "tests.dpdk.perf.10ge2p1x710-eth-l2xcbase-testpmd-ndrpdr.64b-1t1c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.10ge2p1x710-ethip4-ip4base-l3fwd-ndrpdr.64b-1t1c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "testpmd-3n-hsw-xl710-64b-1t1c-base-pdr"
+ title: "3n-hsw-x710-64b-1t1c-base-pdr"
layout: "plot-throughput"
- type: "plot"
- title: "DPDK Throughput: testpmd-3n-hsw-xl710-64b-2t2c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-hsw-xl710-64b-2t2c-base-pdr"
+ title: "DPDK Throughput: 3n-hsw-x710-64b-2t2c-base-pdr"
+ algorithm: "plot_perf_box_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-hsw-x710-64b-2t2c-base-pdr"
data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-XL710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T2C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "testpmd-3n-hsw-xl710-64b-2t2c-base-pdr"
- layout: "plot-throughput"
-
-# Packet Throughput - DPDK testpmd 3n-skx-x710
-- type: "plot"
- title: "DPDK Throughput: testpmd-3n-skx-x710-64b-2t1c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-skx-x710-64b-2t1c-base-ndr"
- data: "plot-dpdk-throughput-latency-3n-skx"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T1C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "testpmd-3n-skx-x710-64b-2t1c-base-ndr"
- layout: "plot-throughput"
-
-- type: "plot"
- title: "DPDK Throughput: testpmd-3n-skx-x710-64b-4t2c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-skx-x710-64b-4t2c-base-ndr"
- data: "plot-dpdk-throughput-latency-3n-skx"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '4T2C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "testpmd-3n-skx-x710-64b-4t2c-base-ndr"
- layout: "plot-throughput"
-
-- type: "plot"
- title: "DPDK Throughput: testpmd-3n-skx-x710-64b-2t1c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-skx-x710-64b-2t1c-base-pdr"
- data: "plot-dpdk-throughput-latency-3n-skx"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T1C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "testpmd-3n-skx-x710-64b-2t1c-base-pdr"
- layout: "plot-throughput"
-
-- type: "plot"
- title: "DPDK Throughput: testpmd-3n-skx-x710-64b-4t2c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-skx-x710-64b-4t2c-base-pdr"
- data: "plot-dpdk-throughput-latency-3n-skx"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '4T2C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "testpmd-3n-skx-x710-64b-4t2c-base-pdr"
- layout: "plot-throughput"
-
-# Packet Throughput - DPDK testpmd 3n-skx-xxv710
-- type: "plot"
- title: "DPDK Throughput: testpmd-3n-skx-xxv710-64b-2t1c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-skx-xxv710-64b-2t1c-base-ndr"
- data: "plot-dpdk-throughput-latency-3n-skx"
- filter: "'NIC_Intel-XXV710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T1C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "testpmd-3n-skx-xxv710-64b-2t1c-base-ndr"
- layout: "plot-throughput"
-
-- type: "plot"
- title: "DPDK Throughput: testpmd-3n-skx-xxv710-64b-4t2c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-skx-xxv710-64b-4t2c-base-ndr"
- data: "plot-dpdk-throughput-latency-3n-skx"
- filter: "'NIC_Intel-XXV710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '4T2C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "testpmd-3n-skx-xxv710-64b-4t2c-base-ndr"
- layout: "plot-throughput"
-
-- type: "plot"
- title: "DPDK Throughput: testpmd-3n-skx-xxv710-64b-2t1c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-skx-xxv710-64b-2t1c-base-pdr"
- data: "plot-dpdk-throughput-latency-3n-skx"
- filter: "'NIC_Intel-XXV710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T1C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "testpmd-3n-skx-xxv710-64b-2t1c-base-pdr"
- layout: "plot-throughput"
-
-- type: "plot"
- title: "DPDK Throughput: testpmd-3n-skx-xxv710-64b-4t2c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-skx-xxv710-64b-4t2c-base-pdr"
- data: "plot-dpdk-throughput-latency-3n-skx"
- filter: "'NIC_Intel-XXV710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '4T2C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "testpmd-3n-skx-xxv710-64b-4t2c-base-pdr"
- layout: "plot-throughput"
-
-# Packet Throughput - DPDK testpmd 2n-skx-x710
-- type: "plot"
- title: "DPDK Throughput: testpmd-2n-skx-x710-64b-2t1c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-2n-skx-x710-64b-2t1c-base-ndr"
- data: "plot-dpdk-throughput-latency-2n-skx"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T1C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "testpmd-2n-skx-x710-64b-2t1c-base-ndr"
- layout: "plot-throughput"
-
-- type: "plot"
- title: "DPDK Throughput: testpmd-2n-skx-x710-64b-4t2c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-2n-skx-x710-64b-4t2c-base-ndr"
- data: "plot-dpdk-throughput-latency-2n-skx"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '4T2C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "testpmd-2n-skx-x710-64b-4t2c-base-ndr"
- layout: "plot-throughput"
-
-- type: "plot"
- title: "DPDK Throughput: testpmd-2n-skx-x710-64b-2t1c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-2n-skx-x710-64b-2t1c-base-pdr"
- data: "plot-dpdk-throughput-latency-2n-skx"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T1C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
+ include:
+ - "tests.dpdk.perf.10ge2p1x710-eth-l2xcbase-testpmd-ndrpdr.64b-2t2c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.10ge2p1x710-ethip4-ip4base-l3fwd-ndrpdr.64b-2t2c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "testpmd-2n-skx-x710-64b-2t1c-base-pdr"
+ title: "3n-hsw-x710-64b-2t2c-base-pdr"
layout: "plot-throughput"
-- type: "plot"
- title: "DPDK Throughput: testpmd-2n-skx-x710-64b-4t2c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-2n-skx-x710-64b-4t2c-base-pdr"
- data: "plot-dpdk-throughput-latency-2n-skx"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '4T2C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "testpmd-2n-skx-x710-64b-4t2c-base-pdr"
- layout: "plot-throughput"
+################################################################################
-# Packet Throughput - DPDK testpmd 2n-skx-xxv710
-- type: "plot"
- title: "DPDK Throughput: testpmd-2n-skx-xxv710-64b-2t1c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-2n-skx-xxv710-64b-2t1c-base-ndr"
- data: "plot-dpdk-throughput-latency-2n-skx"
- filter: "'NIC_Intel-XXV710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T1C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "testpmd-2n-skx-xxv710-64b-2t1c-base-ndr"
- layout: "plot-throughput"
+# Packet Latency - DPDK
-- type: "plot"
- title: "DPDK Throughput: testpmd-2n-skx-xxv710-64b-4t2c-base-ndr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-2n-skx-xxv710-64b-4t2c-base-ndr"
- data: "plot-dpdk-throughput-latency-2n-skx"
- filter: "'NIC_Intel-XXV710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '4T2C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "testpmd-2n-skx-xxv710-64b-4t2c-base-ndr"
- layout: "plot-throughput"
+# 2n-skx-xxv710
- type: "plot"
- title: "DPDK Throughput: testpmd-2n-skx-xxv710-64b-2t1c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-2n-skx-xxv710-64b-2t1c-base-pdr"
+ title: "Latency: 2n-skx-xxv710-64b-2t1c-base-ndr-lat"
+ algorithm: "plot_lat_err_bars_name"
+ output-file: "{DIR[STATIC,DPDK]}/2n-skx-xxv710-64b-2t1c-base-ndr-lat"
data: "plot-dpdk-throughput-latency-2n-skx"
- filter: "'NIC_Intel-XXV710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T1C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
+ include:
+ - "tests.dpdk.perf.2n1l-25ge2p1xxv710-eth-l2xcbase-testpmd-ndrpdr.64b-2t1c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.2n1l-25ge2p1xxv710-ethip4-ip4base-l3fwd-ndrpdr.64b-2t1c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "testpmd-2n-skx-xxv710-64b-2t1c-base-pdr"
- layout: "plot-throughput"
+ title: "2n-skx-xxv710-64b-2t1c-base-ndr"
+ layout: "plot-latency"
- type: "plot"
- title: "DPDK Throughput: testpmd-2n-skx-xxv710-64b-4t2c-base-pdr"
- algorithm: "plot_performance_box"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-2n-skx-xxv710-64b-4t2c-base-pdr"
+ title: "Latency: 2n-skx-xxv710-64b-4t2c-base-ndr-lat"
+ algorithm: "plot_lat_err_bars_name"
+ output-file: "{DIR[STATIC,DPDK]}/2n-skx-xxv710-64b-4t2c-base-ndr-lat"
data: "plot-dpdk-throughput-latency-2n-skx"
- filter: "'NIC_Intel-XXV710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '4T2C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "throughput"
- - "parent"
- - "tags"
- traces:
- hoverinfo: "x+y"
- boxpoints: "outliers"
- whiskerwidth: 0
- layout:
- title: "testpmd-2n-skx-xxv710-64b-4t2c-base-pdr"
- layout: "plot-throughput"
-
-################################################################################
-
-# Packet Latency - DPDK l3fwd 3n-hsw-x520
-- type: "plot"
- title: "DPDK Latency: l3fwd-3n-hsw-x520-64b-1t1c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-hsw-x520-64b-1t1c-base-ndr-lat"
- data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-X520-DA2' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '1T1C' and
- 'IP4FWD'"
- parameters:
- - "latency"
- - "parent"
- - "tags"
+ include:
+ - "tests.dpdk.perf.2n1l-25ge2p1xxv710-eth-l2xcbase-testpmd-ndrpdr.64b-4t2c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.2n1l-25ge2p1xxv710-ethip4-ip4base-l3fwd-ndrpdr.64b-4t2c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "l3fwd-3n-hsw-x520-64b-1t1c-base-ndr"
+ title: "2n-skx-xxv710-64b-4t2c-base-ndr"
layout: "plot-latency"
- type: "plot"
- title: "DPDK Latency: l3fwd-3n-hsw-x520-64b-2t2c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-hsw-x520-64b-2t2c-base-ndr-lat"
- data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-X520-DA2' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T2C' and
- 'IP4FWD'"
- parameters:
- - "latency"
- - "parent"
- - "tags"
+ title: "Latency: 2n-skx-xxv710-64b-8t4c-base-ndr-lat"
+ algorithm: "plot_lat_err_bars_name"
+ output-file: "{DIR[STATIC,DPDK]}/2n-skx-xxv710-64b-8t4c-base-ndr-lat"
+ data: "plot-dpdk-throughput-latency-2n-skx"
+ include:
+ - "tests.dpdk.perf.2n1l-25ge2p1xxv710-eth-l2xcbase-testpmd-ndrpdr.64b-8t4c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.2n1l-25ge2p1xxv710-ethip4-ip4base-l3fwd-ndrpdr.64b-8t4c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "l3fwd-3n-hsw-x520-64b-2t2c-base-ndr"
+ title: "2n-skx-xxv710-64b-8t4c-base-ndr"
layout: "plot-latency"
-# Packet Latency - DPDK l3fwd 3n-hsw-x710
-- type: "plot"
- title: "DPDK Latency: l3fwd-3n-hsw-x710-64b-1t1c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-hsw-x710-64b-1t1c-base-ndr-lat"
- data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '1T1C' and
- 'IP4FWD'"
- parameters:
- - "latency"
- - "parent"
- - "tags"
- layout:
- title: "l3fwd-3n-hsw-x710-64b-1t1c-base-ndr"
- layout: "plot-latency"
+# 2n-skx-x710
- type: "plot"
- title: "DPDK Latency: l3fwd-3n-hsw-x710-64b-2t2c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-hsw-x710-64b-2t2c-base-ndr-lat"
- data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T2C' and
- 'IP4FWD'"
- parameters:
- - "latency"
- - "parent"
- - "tags"
+ title: "Latency: 2n-skx-x710-64b-2t1c-base-ndr-lat"
+ algorithm: "plot_lat_err_bars_name"
+ output-file: "{DIR[STATIC,DPDK]}/2n-skx-x710-64b-2t1c-base-ndr-lat"
+ data: "plot-dpdk-throughput-latency-2n-skx"
+ include:
+ - "tests.dpdk.perf.2n1l-10ge2p1x710-eth-l2xcbase-testpmd-ndrpdr.64b-2t1c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.2n1l-10ge2p1x710-ethip4-ip4base-l3fwd-ndrpdr.64b-2t1c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "l3fwd-3n-hsw-x710-64b-2t2c-base-ndr"
+ title: "2n-skx-x710-64b-2t1c-base-ndr"
layout: "plot-latency"
-# Packet Latency - DPDK l3fwd 3n-hsw-xl710
- type: "plot"
- title: "DPDK Latency: l3fwd-3n-hsw-xl710-64b-1t1c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-hsw-xl710-64b-1t1c-base-ndr-lat"
- data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-XL710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '1T1C' and
- 'IP4FWD'"
- parameters:
- - "latency"
- - "parent"
- - "tags"
+ title: "Latency: 2n-skx-x710-64b-4t2c-base-ndr-lat"
+ algorithm: "plot_lat_err_bars_name"
+ output-file: "{DIR[STATIC,DPDK]}/2n-skx-x710-64b-4t2c-base-ndr-lat"
+ data: "plot-dpdk-throughput-latency-2n-skx"
+ include:
+ - "tests.dpdk.perf.2n1l-10ge2p1x710-eth-l2xcbase-testpmd-ndrpdr.64b-4t2c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.2n1l-10ge2p1x710-ethip4-ip4base-l3fwd-ndrpdr.64b-4t2c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "l3fwd-3n-hsw-xl710-64b-1t1c-base-ndr"
+ title: "2n-skx-x710-64b-4t2c-base-ndr"
layout: "plot-latency"
- type: "plot"
- title: "DPDK Latency: l3fwd-3n-hsw-xl710-64b-2t2c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-hsw-xl710-64b-2t2c-base-ndr-lat"
- data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-XL710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T2C' and
- 'IP4FWD'"
- parameters:
- - "latency"
- - "parent"
- - "tags"
+ title: "Latency: 2n-skx-x710-64b-8t4c-base-ndr-lat"
+ algorithm: "plot_lat_err_bars_name"
+ output-file: "{DIR[STATIC,DPDK]}/2n-skx-x710-64b-8t4c-base-ndr-lat"
+ data: "plot-dpdk-throughput-latency-2n-skx"
+ include:
+ - "tests.dpdk.perf.2n1l-10ge2p1x710-eth-l2xcbase-testpmd-ndrpdr.64b-8t4c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.2n1l-10ge2p1x710-ethip4-ip4base-l3fwd-ndrpdr.64b-8t4c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "l3fwd-3n-hsw-xl710-64b-2t2c-base-ndr"
+ title: "2n-skx-x710-64b-8t4c-base-ndr"
layout: "plot-latency"
-# Packet Latency - DPDK l3fwd 3n-skx-x710
-- type: "plot"
- title: "DPDK Latency: l3fwd-3n-skx-x710-64b-2t1c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-skx-x710-64b-2t1c-base-ndr-lat"
- data: "plot-dpdk-throughput-latency-3n-skx"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T1C' and
- 'IP4FWD'"
- parameters:
- - "latency"
- - "parent"
- - "tags"
- layout:
- title: "l3fwd-3n-skx-x710-64b-2t1c-base-ndr"
- layout: "plot-latency"
+# 3n-skx-xxv710
- type: "plot"
- title: "DPDK Latency: l3fwd-3n-skx-x710-64b-4t2c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-skx-x710-64b-4t2c-base-ndr-lat"
+ title: "Latency: 3n-skx-xxv710-64b-2t1c-base-ndr-lat"
+ algorithm: "plot_lat_err_bars_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-skx-xxv710-64b-2t1c-base-ndr-lat"
data: "plot-dpdk-throughput-latency-3n-skx"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '4T2C' and
- 'IP4FWD'"
- parameters:
- - "latency"
- - "parent"
- - "tags"
+ include:
+ - "tests.dpdk.perf.25ge2p1xxv710-eth-l2xcbase-testpmd-ndrpdr.64b-2t1c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.25ge2p1xxv710-ethip4-ip4base-l3fwd-ndrpdr.64b-2t1c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "l3fwd-3n-skx-x710-64b-4t2c-base-ndr"
+ title: "3n-skx-xxv710-64b-2t1c-base-ndr"
layout: "plot-latency"
-# Packet Latency - DPDK l3fwd 3n-skx-xxv710
- type: "plot"
- title: "DPDK Latency: l3fwd-3n-skx-xxv710-64b-2t1c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-skx-xxv710-64b-2t1c-base-ndr-lat"
+ title: "Latency: 3n-skx-xxv710-64b-4t2c-base-ndr-lat"
+ algorithm: "plot_lat_err_bars_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-skx-xxv710-64b-4t2c-base-ndr-lat"
data: "plot-dpdk-throughput-latency-3n-skx"
- filter: "'NIC_Intel-XXV710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T1C' and
- 'IP4FWD'"
- parameters:
- - "latency"
- - "parent"
- - "tags"
+ include:
+ - "tests.dpdk.perf.25ge2p1xxv710-eth-l2xcbase-testpmd-ndrpdr.64b-4t2c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.25ge2p1xxv710-ethip4-ip4base-l3fwd-ndrpdr.64b-4t2c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "l3fwd-3n-skx-xxv710-64b-2t1c-base-ndr"
+ title: "3n-skx-xxv710-64b-4t2c-base-ndr"
layout: "plot-latency"
- type: "plot"
- title: "DPDK Latency: l3fwd-3n-skx-xxv710-64b-4t2c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-3n-skx-xxv710-64b-4t2c-base-ndr-lat"
+ title: "Latency: 3n-skx-xxv710-64b-8t4c-base-ndr-lat"
+ algorithm: "plot_lat_err_bars_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-skx-xxv710-64b-8t4c-base-ndr-lat"
data: "plot-dpdk-throughput-latency-3n-skx"
- filter: "'NIC_Intel-XXV710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '4T2C' and
- 'IP4FWD'"
- parameters:
- - "latency"
- - "parent"
- - "tags"
+ include:
+ - "tests.dpdk.perf.25ge2p1xxv710-eth-l2xcbase-testpmd-ndrpdr.64b-8t4c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.25ge2p1xxv710-ethip4-ip4base-l3fwd-ndrpdr.64b-8t4c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "l3fwd-3n-skx-xxv710-64b-4t2c-base-ndr"
+ title: "3n-skx-xxv710-64b-8t4c-base-ndr"
layout: "plot-latency"
-# Packet Latency - DPDK l3fwd 2n-skx-x710
-- type: "plot"
- title: "DPDK Latency: l3fwd-2n-skx-x710-64b-2t1c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-2n-skx-x710-64b-2t1c-base-ndr-lat"
- data: "plot-dpdk-throughput-latency-2n-skx"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T1C' and
- 'IP4FWD'"
- parameters:
- - "latency"
- - "parent"
- - "tags"
- layout:
- title: "l3fwd-2n-skx-x710-64b-2t1c-base-ndr"
- layout: "plot-latency"
+# 3n-skx-x710
- type: "plot"
- title: "DPDK Latency: l3fwd-2n-skx-x710-64b-4t2c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-2n-skx-x710-64b-4t2c-base-ndr-lat"
- data: "plot-dpdk-throughput-latency-2n-skx"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '4T2C' and
- 'IP4FWD'"
- parameters:
- - "latency"
- - "parent"
- - "tags"
+ title: "Latency: 3n-skx-x710-64b-2t1c-base-ndr-lat"
+ algorithm: "plot_lat_err_bars_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-skx-x710-64b-2t1c-base-ndr-lat"
+ data: "plot-dpdk-throughput-latency-3n-skx"
+ include:
+ - "tests.dpdk.perf.10ge2p1x710-eth-l2xcbase-testpmd-ndrpdr.64b-2t1c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.10ge2p1x710-ethip4-ip4base-l3fwd-ndrpdr.64b-2t1c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "l3fwd-2n-skx-x710-64b-4t2c-base-ndr"
+ title: "3n-skx-x710-64b-2t1c-base-ndr"
layout: "plot-latency"
-# Packet Latency - DPDK l3fwd 2n-skx-xxv710
- type: "plot"
- title: "DPDK Latency: l3fwd-2n-skx-xxv710-64b-2t1c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-2n-skx-xxv710-64b-2t1c-base-ndr-lat"
- data: "plot-dpdk-throughput-latency-2n-skx"
- filter: "'NIC_Intel-XXV710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T1C' and
- 'IP4FWD'"
- parameters:
- - "latency"
- - "parent"
- - "tags"
+ title: "Latency: 3n-skx-x710-64b-4t2c-base-ndr-lat"
+ algorithm: "plot_lat_err_bars_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-skx-x710-64b-4t2c-base-ndr-lat"
+ data: "plot-dpdk-throughput-latency-3n-skx"
+ include:
+ - "tests.dpdk.perf.10ge2p1x710-eth-l2xcbase-testpmd-ndrpdr.64b-4t2c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.10ge2p1x710-ethip4-ip4base-l3fwd-ndrpdr.64b-4t2c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "l3fwd-2n-skx-xxv710-64b-2t1c-base-ndr"
+ title: "3n-skx-x710-64b-4t2c-base-ndr"
layout: "plot-latency"
- type: "plot"
- title: "DPDK Latency: l3fwd-2n-skx-xxv710-64b-4t2c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/l3fwd-2n-skx-xxv710-64b-4t2c-base-ndr-lat"
- data: "plot-dpdk-throughput-latency-2n-skx"
- filter: "'NIC_Intel-XXV710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '4T2C' and
- 'IP4FWD'"
- parameters:
- - "latency"
- - "parent"
- - "tags"
+ title: "Latency: 3n-skx-x710-64b-8t4c-base-ndr-lat"
+ algorithm: "plot_lat_err_bars_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-skx-x710-64b-8t4c-base-ndr-lat"
+ data: "plot-dpdk-throughput-latency-3n-skx"
+ include:
+ - "tests.dpdk.perf.10ge2p1x710-eth-l2xcbase-testpmd-ndrpdr.64b-8t4c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.10ge2p1x710-ethip4-ip4base-l3fwd-ndrpdr.64b-8t4c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "l3fwd-2n-skx-xxv710-64b-4t2c-base-ndr"
+ title: "3n-skx-x710-64b-8t4c-base-ndr"
layout: "plot-latency"
-################################################################################
+# 3n-hsw-xl710
-# Packet Latency - DPDK testpmd 3n-hsw-x520
- type: "plot"
- title: "DPDK Latency: testpmd-3n-hsw-x520-64b-1t1c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-hsw-x520-64b-1t1c-base-ndr-lat"
+ title: "Latency: 3n-hsw-xl710-64b-1t1c-base-ndr-lat"
+ algorithm: "plot_lat_err_bars_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-hsw-xl710-64b-1t1c-base-ndr-lat"
data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-X520-DA2' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '1T1C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "latency"
- - "parent"
- - "tags"
+ include:
+ - "tests.dpdk.perf.40ge2p1xl710-eth-l2xcbase-testpmd-ndrpdr.64b-1t1c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.40ge2p1xl710-ethip4-ip4base-l3fwd-ndrpdr.64b-1t1c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "testpmd-3n-hsw-x520-64b-1t1c-base-ndr"
+ title: "3n-hsw-xl710-64b-1t1c-base-ndr"
layout: "plot-latency"
- type: "plot"
- title: "DPDK Latency: testpmd-3n-hsw-x520-64b-2t2c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-hsw-x520-64b-2t2c-base-ndr-lat"
+ title: "Latency: 3n-hsw-xl710-64b-2t2c-base-ndr-lat"
+ algorithm: "plot_lat_err_bars_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-hsw-xl710-64b-2t2c-base-ndr-lat"
data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-X520-DA2' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T2C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "latency"
- - "parent"
- - "tags"
+ include:
+ - "tests.dpdk.perf.40ge2p1xl710-eth-l2xcbase-testpmd-ndrpdr.64b-2t2c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.40ge2p1xl710-ethip4-ip4base-l3fwd-ndrpdr.64b-2t2c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "testpmd-3n-hsw-x520-64b-2t2c-base-ndr"
+ title: "3n-hsw-xl710-64b-2t2c-base-ndr"
layout: "plot-latency"
-# Packet Latency - DPDK testpmd 3n-hsw-x710
- type: "plot"
- title: "DPDK Latency: testpmd-3n-hsw-x710-64b-1t1c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-hsw-x710-64b-1t1c-base-ndr-lat"
+ title: "Latency: 3n-hsw-xl710-64b-4t4c-base-ndr-lat"
+ algorithm: "plot_lat_err_bars_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-hsw-xl710-64b-4t4c-base-ndr-lat"
data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '1T1C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "latency"
- - "parent"
- - "tags"
+ include:
+ - "tests.dpdk.perf.40ge2p1xl710-eth-l2xcbase-testpmd-ndrpdr.64b-4t4c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.40ge2p1xl710-ethip4-ip4base-l3fwd-ndrpdr.64b-4t4c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "testpmd-3n-hsw-x710-64b-1t1c-base-ndr"
+ title: "3n-hsw-xl710-64b-4t4c-base-ndr"
layout: "plot-latency"
-- type: "plot"
- title: "DPDK Latency: testpmd-3n-hsw-x710-64b-2t2c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-hsw-x710-64b-2t2c-base-ndr-lat"
- data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T2C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "latency"
- - "parent"
- - "tags"
- layout:
- title: "testpmd-3n-hsw-x710-64b-2t2c-base-ndr"
- layout: "plot-latency"
+# 3n-hsw-x710
-# Packet Latency - DPDK testpmd 3n-hsw-xl710
- type: "plot"
- title: "DPDK Latency: testpmd-3n-hsw-xl710-64b-1t1c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-hsw-xl710-64b-1t1c-base-ndr-lat"
+ title: "Latency: 3n-hsw-x710-64b-1t1c-base-ndr-lat"
+ algorithm: "plot_lat_err_bars_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-hsw-x710-64b-1t1c-base-ndr-lat"
data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-XL710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '1T1C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "latency"
- - "parent"
- - "tags"
+ include:
+ - "tests.dpdk.perf.10ge2p1x710-eth-l2xcbase-testpmd-ndrpdr.64b-1t1c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.10ge2p1x710-ethip4-ip4base-l3fwd-ndrpdr.64b-1t1c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "testpmd-3n-hsw-xl710-64b-1t1c-base-ndr"
+ title: "3n-hsw-x710-64b-1t1c-base-ndr"
layout: "plot-latency"
- type: "plot"
- title: "DPDK Latency: testpmd-3n-hsw-xl710-64b-2t2c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-hsw-xl710-64b-2t2c-base-ndr-lat"
+ title: "Latency: 3n-hsw-x710-64b-2t2c-base-ndr-lat"
+ algorithm: "plot_lat_err_bars_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-hsw-x710-64b-2t2c-base-ndr-lat"
data: "plot-dpdk-throughput-latency-3n-hsw"
- filter: "'NIC_Intel-XL710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T2C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "latency"
- - "parent"
- - "tags"
- layout:
- title: "testpmd-3n-hsw-xl710-64b-2t2c-base-ndr"
- layout: "plot-latency"
-
-# Packet Latency - DPDK testpmd 3n-skx-x710
-- type: "plot"
- title: "DPDK Latency: testpmd-3n-skx-x710-64b-2t1c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-skx-x710-64b-2t1c-base-ndr-lat"
- data: "plot-dpdk-throughput-latency-3n-skx"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T1C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "latency"
- - "parent"
- - "tags"
- layout:
- title: "testpmd-3n-skx-x710-64b-2t1c-base-ndr"
- layout: "plot-latency"
-
-- type: "plot"
- title: "DPDK Latency: testpmd-3n-skx-x710-64b-4t2c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-skx-x710-64b-4t2c-base-ndr-lat"
- data: "plot-dpdk-throughput-latency-3n-skx"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '4T2C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "latency"
- - "parent"
- - "tags"
- layout:
- title: "testpmd-3n-skx-x710-64b-4t2c-base-ndr"
- layout: "plot-latency"
-
-# Packet Latency - DPDK testpmd 3n-skx-xxv710
-- type: "plot"
- title: "DPDK Latency: testpmd-3n-skx-xxv710-64b-2t1c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-skx-xxv710-64b-2t1c-base-ndr-lat"
- data: "plot-dpdk-throughput-latency-3n-skx"
- filter: "'NIC_Intel-XXV710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T1C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "latency"
- - "parent"
- - "tags"
- layout:
- title: "testpmd-3n-skx-xxv710-64b-2t1c-base-ndr"
- layout: "plot-latency"
-
-- type: "plot"
- title: "DPDK Latency: testpmd-3n-skx-xxv710-64b-4t2c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-3n-skx-xxv710-64b-4t2c-base-ndr-lat"
- data: "plot-dpdk-throughput-latency-3n-skx"
- filter: "'NIC_Intel-XXV710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '4T2C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "latency"
- - "parent"
- - "tags"
- layout:
- title: "testpmd-3n-skx-xxv710-64b-4t2c-base-ndr"
- layout: "plot-latency"
-
-# Packet Latency - DPDK testpmd 2n-skx-x710
-- type: "plot"
- title: "DPDK Latency: testpmd-2n-skx-x710-64b-2t1c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-2n-skx-x710-64b-2t1c-base-ndr-lat"
- data: "plot-dpdk-throughput-latency-2n-skx"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T1C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "latency"
- - "parent"
- - "tags"
- layout:
- title: "testpmd-2n-skx-x710-64b-2t1c-base-ndr"
- layout: "plot-latency"
-
-- type: "plot"
- title: "DPDK Latency: testpmd-2n-skx-x710-64b-4t2c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-2n-skx-x710-64b-4t2c-base-ndr-lat"
- data: "plot-dpdk-throughput-latency-2n-skx"
- filter: "'NIC_Intel-X710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '4T2C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "latency"
- - "parent"
- - "tags"
- layout:
- title: "testpmd-2n-skx-x710-64b-4t2c-base-ndr"
- layout: "plot-latency"
-
-# Packet Latency - DPDK testpmd 2n-skx-xxv710
-- type: "plot"
- title: "DPDK Latency: testpmd-2n-skx-xxv710-64b-2t1c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-2n-skx-xxv710-64b-2t1c-base-ndr-lat"
- data: "plot-dpdk-throughput-latency-2n-skx"
- filter: "'NIC_Intel-XXV710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '2T1C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "latency"
- - "parent"
- - "tags"
+ include:
+ - "tests.dpdk.perf.10ge2p1x710-eth-l2xcbase-testpmd-ndrpdr.64b-2t2c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.10ge2p1x710-ethip4-ip4base-l3fwd-ndrpdr.64b-2t2c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "testpmd-2n-skx-xxv710-64b-2t1c-base-ndr"
+ title: "3n-hsw-x710-64b-2t2c-base-ndr"
layout: "plot-latency"
- type: "plot"
- title: "DPDK Latency: testpmd-2n-skx-xxv710-64b-4t2c-base-ndr-lat"
- algorithm: "plot_latency_error_bars"
- output-file-type: ".html"
- output-file: "{DIR[STATIC,DPDK]}/testpmd-2n-skx-xxv710-64b-4t2c-base-ndr-lat"
- data: "plot-dpdk-throughput-latency-2n-skx"
- filter: "'NIC_Intel-XXV710' and
- '64B' and
- 'BASE' and
- 'NDRPDR' and
- '4T2C' and
- ('L2BDMACSTAT' or 'L2BDMACLRN' or 'L2XCFWD' or 'L2PATCH')"
- parameters:
- - "latency"
- - "parent"
- - "tags"
+ title: "Latency: 3n-hsw-x710-64b-4t4c-base-ndr-lat"
+ algorithm: "plot_lat_err_bars_name"
+ output-file: "{DIR[STATIC,DPDK]}/3n-hsw-x710-64b-4t4c-base-ndr-lat"
+ data: "plot-dpdk-throughput-latency-3n-hsw"
+ include:
+ - "tests.dpdk.perf.10ge2p1x710-eth-l2xcbase-testpmd-ndrpdr.64b-4t4c-eth-l2xcbase-testpmd-ndrpdr"
+ - "tests.dpdk.perf.10ge2p1x710-ethip4-ip4base-l3fwd-ndrpdr.64b-4t4c-ethip4-ip4base-l3fwd-ndrpdr"
layout:
- title: "testpmd-2n-skx-xxv710-64b-4t2c-base-ndr"
+ title: "3n-hsw-x710-64b-4t4c-base-ndr"
layout: "plot-latency"
diff --git a/resources/tools/presentation/specification_CPTA.yaml b/resources/tools/presentation/specification_CPTA.yaml
index cd3b62a1e3..9363428773 100644
--- a/resources/tools/presentation/specification_CPTA.yaml
+++ b/resources/tools/presentation/specification_CPTA.yaml
@@ -191,7 +191,7 @@
# 3n-hsw
plot-performance-trending-all-3n-hsw:
csit-vpp-perf-mrr-daily-master:
- start: 836
+ start: 851
end: "lastCompletedBuild"
skip:
- 852
@@ -218,17 +218,16 @@
- 940
- 941
- 953
+ - 987
csit-dpdk-perf-mrr-weekly-master:
- start: 83
+ start: 91
end: "lastCompletedBuild"
skip:
- - 87
- - 89
- - 90
+ - 101
plot-performance-trending-vpp-3n-hsw:
csit-vpp-perf-mrr-daily-master:
- start: 836
+ start: 851
end: "lastCompletedBuild"
skip:
- 852
@@ -255,23 +254,21 @@
- 940
- 941
- 953
+ - 987
plot-performance-trending-dpdk-3n-hsw:
csit-dpdk-perf-mrr-weekly-master:
- start: 83
+ start: 91
end: "lastCompletedBuild"
skip:
- - 87
- - 89
- - 90
+ - 101
# 3n-skx
plot-performance-trending-all-3n-skx:
csit-vpp-perf-mrr-daily-master-3n-skx:
- start: 587
+ start: 598
end: "lastCompletedBuild"
skip:
- - 588
- 600
- 601
- 603
@@ -294,21 +291,20 @@
- 687
- 694
- 695
+ - 696
+ - 697
+ - 705
csit-dpdk-perf-mrr-weekly-master-3n-skx:
- start: 48
+ start: 57
end: "lastCompletedBuild"
skip:
- - 52
- - 54
- - 55
- - 56
+ - 66
plot-performance-trending-vpp-3n-skx:
csit-vpp-perf-mrr-daily-master-3n-skx:
- start: 587
+ start: 598
end: "lastCompletedBuild"
skip:
- - 588
- 600
- 601
- 603
@@ -331,24 +327,23 @@
- 687
- 694
- 695
+ - 696
+ - 697
+ - 705
plot-performance-trending-dpdk-3n-skx:
csit-dpdk-perf-mrr-weekly-master-3n-skx:
- start: 48
+ start: 57
end: "lastCompletedBuild"
skip:
- - 52
- - 54
- - 55
- - 56
+ - 66
# 2n-skx
plot-performance-trending-all-2n-skx:
csit-vpp-perf-mrr-daily-master-2n-skx:
- start: 589
+ start: 606
end: "lastCompletedBuild"
skip:
- - 605
- 608
- 609
- 612
@@ -375,20 +370,17 @@
- 701
- 702
csit-dpdk-perf-mrr-weekly-master-2n-skx:
- start: 48
+ start: 57
end: "lastCompletedBuild"
skip:
- - 52
- - 54
- - 55
- - 56
+ - 64
+ - 66
plot-performance-trending-vpp-2n-skx:
csit-vpp-perf-mrr-daily-master-2n-skx:
- start: 589
+ start: 606
end: "lastCompletedBuild"
skip:
- - 605
- 608
- 609
- 612
@@ -417,56 +409,40 @@
plot-performance-trending-dpdk-2n-skx:
csit-dpdk-perf-mrr-weekly-master-2n-skx:
- start: 48
+ start: 57
end: "lastCompletedBuild"
skip:
- - 52
- - 54
- - 55
- - 56
+ - 64
+ - 66
plot-performance-trending-vpp-nfv-2n-skx:
csit-vpp-perf-mrr-weekly-master-2n-skx:
- start: 26
+ start: 30
end: "lastCompletedBuild"
skip:
- - 29
- 34
- 37
# 3n-tsh
plot-performance-trending-all-3n-tsh:
csit-vpp-perf-mrr-daily-master-3n-tsh:
- start: 75
+ start: 95
end: "lastCompletedBuild"
skip:
- - 80
- - 84
- - 89
- - 92
- - 93
- - 94
- 95
plot-performance-trending-vpp-3n-tsh:
csit-vpp-perf-mrr-daily-master-3n-tsh:
- start: 75
+ start: 95
end: "lastCompletedBuild"
skip:
- - 80
- - 84
- - 89
- - 92
- - 93
- - 94
- 95
plot-performance-trending-vpp-3n-dnv:
csit-vpp-perf-mrr-daily-master-3n-dnv:
- start: 69
+ start: 79
end: "lastCompletedBuild"
skip:
- - 78
- 80
- 83
- 84
@@ -511,10 +487,12 @@
- 151
- 175
- 176
+ - 227
+ - 228
plot-performance-trending-vpp-2n-dnv:
csit-vpp-perf-mrr-daily-master-2n-dnv:
- start: 67
+ start: 78
end: "lastCompletedBuild"
skip:
- 79
@@ -560,6 +538,7 @@
- 145
- 149
- 150
+ - 228
plot-layouts:
@@ -658,7 +637,7 @@
# 3n-hsw
csit-vpp-perf-mrr-daily-master:
- start: 836
+ start: 851
end: "lastCompletedBuild"
skip:
- 852
@@ -685,20 +664,18 @@
- 940
- 941
- 953
+ - 987
csit-dpdk-perf-mrr-weekly-master:
- start: 83
+ start: 91
end: "lastCompletedBuild"
skip:
- - 87
- - 89
- - 90
+ - 101
# 3n-skx
csit-vpp-perf-mrr-daily-master-3n-skx:
- start: 587
+ start: 598
end: "lastCompletedBuild"
skip:
- - 588
- 600
- 601
- 603
@@ -721,21 +698,20 @@
- 687
- 694
- 695
+ - 696
+ - 697
+ - 705
csit-dpdk-perf-mrr-weekly-master-3n-skx:
- start: 48
+ start: 57
end: "lastCompletedBuild"
skip:
- - 52
- - 54
- - 55
- - 56
+ - 66
# 2n-skx
csit-vpp-perf-mrr-daily-master-2n-skx:
- start: 589
+ start: 606
end: "lastCompletedBuild"
skip:
- - 605
- 608
- 609
- 612
@@ -762,40 +738,30 @@
- 701
- 702
csit-vpp-perf-mrr-weekly-master-2n-skx:
- start: 26
+ start: 30
end: "lastCompletedBuild"
skip:
- - 29
- 34
- 37
csit-dpdk-perf-mrr-weekly-master-2n-skx:
- start: 48
+ start: 57
end: "lastCompletedBuild"
skip:
- - 52
- - 54
- - 55
- - 56
+ - 64
+ - 66
# 3n-tsh
csit-vpp-perf-mrr-daily-master-3n-tsh:
- start: 75
+ start: 95
end: "lastCompletedBuild"
skip:
- - 80
- - 84
- - 89
- - 92
- - 93
- - 94
- 95
# 3n-dnv
csit-vpp-perf-mrr-daily-master-3n-dnv:
- start: 69
+ start: 79
end: "lastCompletedBuild"
skip:
- - 78
- 80
- 83
- 84
@@ -840,10 +806,12 @@
- 151
- 175
- 176
+ - 227
+ - 228
# 2n-dnv
csit-vpp-perf-mrr-daily-master-2n-dnv:
- start: 67
+ start: 78
end: "lastCompletedBuild"
skip:
- 79
@@ -889,6 +857,7 @@
- 145
- 149
- 150
+ - 228
-
type: "output"
@@ -987,7 +956,7 @@
-
type: "table"
title: "Performance trending dashboard 1t1c-3n-hsw"
- algorithm: "table_performance_trending_dashboard"
+ algorithm: "table_perf_trending_dash"
output-file-ext: ".csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-1t1c-3n-hsw"
data: "plot-performance-trending-all-3n-hsw"
@@ -1006,7 +975,7 @@
-
type: "table"
title: "Performance trending dashboard 2t2c-3n-hsw"
- algorithm: "table_performance_trending_dashboard"
+ algorithm: "table_perf_trending_dash"
output-file-ext: ".csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-2t2c-3n-hsw"
data: "plot-performance-trending-all-3n-hsw"
@@ -1025,7 +994,7 @@
-
type: "table"
title: "Performance trending dashboard 4t4c-3n-hsw"
- algorithm: "table_performance_trending_dashboard"
+ algorithm: "table_perf_trending_dash"
output-file-ext: ".csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-4t4c-3n-hsw"
data: "plot-performance-trending-all-3n-hsw"
@@ -1044,7 +1013,7 @@
-
type: "table"
title: "HTML performance trending dashboard 1t1c-3n-hsw"
- algorithm: "table_performance_trending_dashboard_html"
+ algorithm: "table_perf_trending_dash_html"
input-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-1t1c-3n-hsw.csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-1t1c-3n-hsw.rst"
testbed: "3n-hsw"
@@ -1052,7 +1021,7 @@
-
type: "table"
title: "HTML performance trending dashboard 2t2c-3n-hsw"
- algorithm: "table_performance_trending_dashboard_html"
+ algorithm: "table_perf_trending_dash_html"
input-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-2t2c-3n-hsw.csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-2t2c-3n-hsw.rst"
testbed: "3n-hsw"
@@ -1060,7 +1029,7 @@
-
type: "table"
title: "HTML performance trending dashboard 4t4c-3n-hsw"
- algorithm: "table_performance_trending_dashboard_html"
+ algorithm: "table_perf_trending_dash_html"
input-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-4t4c-3n-hsw.csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-4t4c-3n-hsw.rst"
testbed: "3n-hsw"
@@ -1096,7 +1065,7 @@
-
type: "table"
title: "Performance trending dashboard 2t1c-3n-skx"
- algorithm: "table_performance_trending_dashboard"
+ algorithm: "table_perf_trending_dash"
output-file-ext: ".csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-2t1c-3n-skx"
data: "plot-performance-trending-all-3n-skx"
@@ -1115,7 +1084,7 @@
-
type: "table"
title: "Performance trending dashboard 4t2c-3n-skx"
- algorithm: "table_performance_trending_dashboard"
+ algorithm: "table_perf_trending_dash"
output-file-ext: ".csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-4t2c-3n-skx"
data: "plot-performance-trending-all-3n-skx"
@@ -1134,7 +1103,7 @@
-
type: "table"
title: "Performance trending dashboard 8t4c-3n-skx"
- algorithm: "table_performance_trending_dashboard"
+ algorithm: "table_perf_trending_dash"
output-file-ext: ".csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-8t4c-3n-skx"
data: "plot-performance-trending-all-3n-skx"
@@ -1153,7 +1122,7 @@
-
type: "table"
title: "HTML performance trending dashboard 2t1c-3n-skx"
- algorithm: "table_performance_trending_dashboard_html"
+ algorithm: "table_perf_trending_dash_html"
input-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-2t1c-3n-skx.csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-2t1c-3n-skx.rst"
testbed: "3n-skx"
@@ -1161,7 +1130,7 @@
-
type: "table"
title: "HTML performance trending dashboard 4t2c-3n-skx"
- algorithm: "table_performance_trending_dashboard_html"
+ algorithm: "table_perf_trending_dash_html"
input-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-4t2c-3n-skx.csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-4t2c-3n-skx.rst"
testbed: "3n-skx"
@@ -1169,7 +1138,7 @@
-
type: "table"
title: "HTML performance trending dashboard 8t4c-3n-skx"
- algorithm: "table_performance_trending_dashboard_html"
+ algorithm: "table_perf_trending_dash_html"
input-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-8t4c-3n-skx.csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-8t4c-3n-skx.rst"
testbed: "3n-skx"
@@ -1205,7 +1174,7 @@
-
type: "table"
title: "Performance trending dashboard 2t1c-2n-skx"
- algorithm: "table_performance_trending_dashboard"
+ algorithm: "table_perf_trending_dash"
output-file-ext: ".csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-2t1c-2n-skx"
data: "plot-performance-trending-all-2n-skx"
@@ -1224,7 +1193,7 @@
-
type: "table"
title: "Performance trending dashboard 4t2c-2n-skx"
- algorithm: "table_performance_trending_dashboard"
+ algorithm: "table_perf_trending_dash"
output-file-ext: ".csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-4t2c-2n-skx"
data: "plot-performance-trending-all-2n-skx"
@@ -1243,7 +1212,7 @@
-
type: "table"
title: "Performance trending dashboard 8t4c-2n-skx"
- algorithm: "table_performance_trending_dashboard"
+ algorithm: "table_perf_trending_dash"
output-file-ext: ".csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-8t4c-2n-skx"
data: "plot-performance-trending-all-2n-skx"
@@ -1262,7 +1231,7 @@
-
type: "table"
title: "HTML performance trending dashboard 2t1c-2n-skx"
- algorithm: "table_performance_trending_dashboard_html"
+ algorithm: "table_perf_trending_dash_html"
input-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-2t1c-2n-skx.csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-2t1c-2n-skx.rst"
testbed: "2n-skx"
@@ -1270,7 +1239,7 @@
-
type: "table"
title: "HTML performance trending dashboard 4t2c-2n-skx"
- algorithm: "table_performance_trending_dashboard_html"
+ algorithm: "table_perf_trending_dash_html"
input-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-4t2c-2n-skx.csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-4t2c-2n-skx.rst"
testbed: "2n-skx"
@@ -1278,7 +1247,7 @@
-
type: "table"
title: "HTML performance trending dashboard 8t4c-2n-skx"
- algorithm: "table_performance_trending_dashboard_html"
+ algorithm: "table_perf_trending_dash_html"
input-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-8t4c-2n-skx.csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-8t4c-2n-skx.rst"
testbed: "2n-skx"
@@ -1314,7 +1283,7 @@
-
type: "table"
title: "Performance trending dashboard 1t1c-3n-tsh"
- algorithm: "table_performance_trending_dashboard"
+ algorithm: "table_perf_trending_dash"
output-file-ext: ".csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-1t1c-3n-tsh"
data: "plot-performance-trending-all-3n-tsh"
@@ -1329,7 +1298,7 @@
-
type: "table"
title: "Performance trending dashboard 2t2c-3n-tsh"
- algorithm: "table_performance_trending_dashboard"
+ algorithm: "table_perf_trending_dash"
output-file-ext: ".csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-2t2c-3n-tsh"
data: "plot-performance-trending-all-3n-tsh"
@@ -1344,7 +1313,7 @@
-
type: "table"
title: "Performance trending dashboard 4t4c-3n-tsh"
- algorithm: "table_performance_trending_dashboard"
+ algorithm: "table_perf_trending_dash"
output-file-ext: ".csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-4t4c-3n-tsh"
data: "plot-performance-trending-all-3n-tsh"
@@ -1359,7 +1328,7 @@
-
type: "table"
title: "HTML performance trending dashboard 1t1c-3n-tsh"
- algorithm: "table_performance_trending_dashboard_html"
+ algorithm: "table_perf_trending_dash_html"
input-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-1t1c-3n-tsh.csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-1t1c-3n-tsh.rst"
testbed: "3n-tsh"
@@ -1367,7 +1336,7 @@
-
type: "table"
title: "HTML performance trending dashboard 2t2c-3n-tsh"
- algorithm: "table_performance_trending_dashboard_html"
+ algorithm: "table_perf_trending_dash_html"
input-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-2t2c-3n-tsh.csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-2t2c-3n-tsh.rst"
testbed: "3n-tsh"
@@ -1375,7 +1344,7 @@
-
type: "table"
title: "HTML performance trending dashboard 4t4c-3n-tsh"
- algorithm: "table_performance_trending_dashboard_html"
+ algorithm: "table_perf_trending_dash_html"
input-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-4t4c-3n-tsh.csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-4t4c-3n-tsh.rst"
testbed: "3n-tsh"
@@ -1407,7 +1376,7 @@
-
type: "table"
title: "Performance trending dashboard 1t1c-3n-dnv"
- algorithm: "table_performance_trending_dashboard"
+ algorithm: "table_perf_trending_dash"
output-file-ext: ".csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-1t1c-3n-dnv"
data: "plot-performance-trending-vpp-3n-dnv"
@@ -1422,7 +1391,7 @@
-
type: "table"
title: "Performance trending dashboard 2t2c-3n-dnv"
- algorithm: "table_performance_trending_dashboard"
+ algorithm: "table_perf_trending_dash"
output-file-ext: ".csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-2t2c-3n-dnv"
data: "plot-performance-trending-vpp-3n-dnv"
@@ -1437,7 +1406,7 @@
-
type: "table"
title: "Performance trending dashboard 4t4c-3n-dnv"
- algorithm: "table_performance_trending_dashboard"
+ algorithm: "table_perf_trending_dash"
output-file-ext: ".csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-4t4c-3n-dnv"
data: "plot-performance-trending-vpp-3n-dnv"
@@ -1452,7 +1421,7 @@
-
type: "table"
title: "HTML performance trending dashboard 1t1c-3n-dnv"
- algorithm: "table_performance_trending_dashboard_html"
+ algorithm: "table_perf_trending_dash_html"
input-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-1t1c-3n-dnv.csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-1t1c-3n-dnv.rst"
testbed: "3n-dnv"
@@ -1460,7 +1429,7 @@
-
type: "table"
title: "HTML performance trending dashboard 2t2c-3n-dnv"
- algorithm: "table_performance_trending_dashboard_html"
+ algorithm: "table_perf_trending_dash_html"
input-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-2t2c-3n-dnv.csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-2t2c-3n-dnv.rst"
testbed: "3n-dnv"
@@ -1468,7 +1437,7 @@
-
type: "table"
title: "HTML performance trending dashboard 4t4c-3n-dnv"
- algorithm: "table_performance_trending_dashboard_html"
+ algorithm: "table_perf_trending_dash_html"
input-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-4t4c-3n-dnv.csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-4t4c-3n-dnv.rst"
testbed: "3n-dnv"
@@ -1500,7 +1469,7 @@
-
type: "table"
title: "Performance trending dashboard 1t1c-2n-dnv"
- algorithm: "table_performance_trending_dashboard"
+ algorithm: "table_perf_trending_dash"
output-file-ext: ".csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-1t1c-2n-dnv"
data: "plot-performance-trending-vpp-2n-dnv"
@@ -1515,7 +1484,7 @@
-
type: "table"
title: "Performance trending dashboard 2t2c-2n-dnv"
- algorithm: "table_performance_trending_dashboard"
+ algorithm: "table_perf_trending_dash"
output-file-ext: ".csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-2t2c-2n-dnv"
data: "plot-performance-trending-vpp-2n-dnv"
@@ -1530,7 +1499,7 @@
-
type: "table"
title: "Performance trending dashboard 4t4c-2n-dnv"
- algorithm: "table_performance_trending_dashboard"
+ algorithm: "table_perf_trending_dash"
output-file-ext: ".csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-4t4c-2n-dnv"
data: "plot-performance-trending-vpp-2n-dnv"
@@ -1545,7 +1514,7 @@
-
type: "table"
title: "HTML performance trending dashboard 1t1c-2n-dnv"
- algorithm: "table_performance_trending_dashboard_html"
+ algorithm: "table_perf_trending_dash_html"
input-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-1t1c-2n-dnv.csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-1t1c-2n-dnv.rst"
testbed: "2n-dnv"
@@ -1553,7 +1522,7 @@
-
type: "table"
title: "HTML performance trending dashboard 2t2c-2n-dnv"
- algorithm: "table_performance_trending_dashboard_html"
+ algorithm: "table_perf_trending_dash_html"
input-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-2t2c-2n-dnv.csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-2t2c-2n-dnv.rst"
testbed: "2n-dnv"
@@ -1561,7 +1530,7 @@
-
type: "table"
title: "HTML performance trending dashboard 4t4c-2n-dnv"
- algorithm: "table_performance_trending_dashboard_html"
+ algorithm: "table_perf_trending_dash_html"
input-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-4t4c-2n-dnv.csv"
output-file: "{DIR[STATIC,VPP]}/performance-trending-dashboard-4t4c-2n-dnv.rst"
testbed: "2n-dnv"
diff --git a/resources/tools/presentation/specification_parser.py b/resources/tools/presentation/specification_parser.py
index f99c7515fb..ef2fafccae 100644
--- a/resources/tools/presentation/specification_parser.py
+++ b/resources/tools/presentation/specification_parser.py
@@ -18,12 +18,14 @@ Parsing of the specification YAML file.
import logging
-from yaml import load, YAMLError
from pprint import pformat
-from errors import PresentationError
-from utils import (
- get_last_successful_build_number, get_last_completed_build_number)
+from yaml import load, FullLoader, YAMLError
+
+from pal_errors import PresentationError
+from pal_utils import (
+ get_last_successful_build_nr, get_last_completed_build_number
+)
class Specification:
@@ -35,8 +37,8 @@ class Specification:
# Tags are used in specification YAML file and replaced while the file is
# parsed.
- TAG_OPENER = "{"
- TAG_CLOSER = "}"
+ TAG_OPENER = u"{"
+ TAG_CLOSER = u"}"
def __init__(self, cfg_file):
"""Initialization.
@@ -47,15 +49,17 @@ class Specification:
self._cfg_file = cfg_file
self._cfg_yaml = None
- self._specification = {"environment": dict(),
- "configuration": dict(),
- "static": dict(),
- "input": dict(),
- "output": dict(),
- "tables": list(),
- "plots": list(),
- "files": list(),
- "cpta": dict()}
+ self._specification = {
+ u"environment": dict(),
+ u"configuration": dict(),
+ u"static": dict(),
+ u"input": dict(),
+ u"output": dict(),
+ u"tables": list(),
+ u"plots": list(),
+ u"files": list(),
+ u"cpta": dict()
+ }
@property
def specification(self):
@@ -73,7 +77,7 @@ class Specification:
:returns: Environment specification.
:rtype: dict
"""
- return self._specification["environment"]
+ return self._specification[u"environment"]
@property
def configuration(self):
@@ -82,7 +86,7 @@ class Specification:
:returns: Configuration of PAL.
:rtype: dict
"""
- return self._specification["configuration"]
+ return self._specification[u"configuration"]
@property
def static(self):
@@ -91,7 +95,7 @@ class Specification:
:returns: Static content specification.
:rtype: dict
"""
- return self._specification["static"]
+ return self._specification[u"static"]
@property
def mapping(self):
@@ -101,7 +105,7 @@ class Specification:
one.
:rtype: dict
"""
- return self._specification["configuration"]["mapping"]
+ return self._specification[u"configuration"][u"mapping"]
@property
def ignore(self):
@@ -110,7 +114,7 @@ class Specification:
:returns: List of ignored test cases.
:rtype: list
"""
- return self._specification["configuration"]["ignore"]
+ return self._specification[u"configuration"][u"ignore"]
@property
def alerting(self):
@@ -119,7 +123,7 @@ class Specification:
:returns: Specification of alerts.
:rtype: dict
"""
- return self._specification["configuration"]["alerting"]
+ return self._specification[u"configuration"][u"alerting"]
@property
def input(self):
@@ -129,7 +133,7 @@ class Specification:
:returns: Inputs.
:rtype: dict
"""
- return self._specification["input"]
+ return self._specification[u"input"]
@property
def builds(self):
@@ -138,7 +142,7 @@ class Specification:
:returns: Builds defined in the specification.
:rtype: dict
"""
- return self.input["builds"]
+ return self.input[u"builds"]
@property
def output(self):
@@ -149,7 +153,7 @@ class Specification:
:returns: Outputs to be generated.
:rtype: dict
"""
- return self._specification["output"]
+ return self._specification[u"output"]
@property
def tables(self):
@@ -158,7 +162,7 @@ class Specification:
:returns: List of specifications of tables to be generated.
:rtype: list
"""
- return self._specification["tables"]
+ return self._specification[u"tables"]
@property
def plots(self):
@@ -167,7 +171,7 @@ class Specification:
:returns: List of specifications of plots to be generated.
:rtype: list
"""
- return self._specification["plots"]
+ return self._specification[u"plots"]
@property
def files(self):
@@ -176,7 +180,7 @@ class Specification:
:returns: List of specifications of files to be generated.
:rtype: list
"""
- return self._specification["files"]
+ return self._specification[u"files"]
@property
def cpta(self):
@@ -187,7 +191,7 @@ class Specification:
Analysis to be generated.
:rtype: list
"""
- return self._specification["cpta"]
+ return self._specification[u"cpta"]
def set_input_state(self, job, build_nr, state):
"""Set the state of input
@@ -199,17 +203,20 @@ class Specification:
"""
try:
- for build in self._specification["input"]["builds"][job]:
- if build["build"] == build_nr:
- build["status"] = state
+ for build in self._specification[u"input"][u"builds"][job]:
+ if build[u"build"] == build_nr:
+ build[u"status"] = state
break
else:
- raise PresentationError("Build '{}' is not defined for job '{}'"
- " in specification file.".
- format(build_nr, job))
+ raise PresentationError(
+ f"Build {build_nr} is not defined for job {job} in "
+ f"specification file."
+ )
except KeyError:
- raise PresentationError("Job '{}' and build '{}' is not defined in "
- "specification file.".format(job, build_nr))
+ raise PresentationError(
+ f"Job {job} and build {build_nr} is not defined in "
+ f"specification file."
+ )
def set_input_file_name(self, job, build_nr, file_name):
"""Set the state of input
@@ -221,17 +228,20 @@ class Specification:
"""
try:
- for build in self._specification["input"]["builds"][job]:
- if build["build"] == build_nr:
- build["file-name"] = file_name
+ for build in self._specification[u"input"][u"builds"][job]:
+ if build[u"build"] == build_nr:
+ build[u"file-name"] = file_name
break
else:
- raise PresentationError("Build '{}' is not defined for job '{}'"
- " in specification file.".
- format(build_nr, job))
+ raise PresentationError(
+ f"Build {build_nr} is not defined for job {job} in "
+ f"specification file."
+ )
except KeyError:
- raise PresentationError("Job '{}' and build '{}' is not defined in "
- "specification file.".format(job, build_nr))
+ raise PresentationError(
+ f"Job {job} and build {build_nr} is not defined in "
+ f"specification file."
+ )
def _get_build_number(self, job, build_type):
"""Get the number of the job defined by its name:
@@ -250,26 +260,27 @@ class Specification:
"""
# defined as a range <start, end>
- if build_type == "lastSuccessfulBuild":
+ if build_type == u"lastSuccessfulBuild":
# defined as a range <start, lastSuccessfulBuild>
- ret_code, build_nr, _ = get_last_successful_build_number(
- self.environment["urls"]["URL[JENKINS,CSIT]"], job)
- elif build_type == "lastCompletedBuild":
+ ret_code, build_nr, _ = get_last_successful_build_nr(
+ self.environment[u"urls"][u"URL[JENKINS,CSIT]"], job)
+ elif build_type == u"lastCompletedBuild":
# defined as a range <start, lastCompletedBuild>
ret_code, build_nr, _ = get_last_completed_build_number(
- self.environment["urls"]["URL[JENKINS,CSIT]"], job)
+ self.environment[u"urls"][u"URL[JENKINS,CSIT]"], job)
else:
- raise PresentationError("Not supported build type: '{0}'".
- format(build_type))
+ raise PresentationError(f"Not supported build type: {build_type}")
if ret_code != 0:
- raise PresentationError("Not possible to get the number of the "
- "build number.")
+ raise PresentationError(u"Not possible to get the number of the "
+ u"build number.")
try:
build_nr = int(build_nr)
return build_nr
except ValueError as err:
- raise PresentationError("Not possible to get the number of the "
- "build number.\nReason: {0}".format(err))
+ raise PresentationError(
+ f"Not possible to get the number of the build number. Reason:\n"
+ f"{repr(err)}"
+ )
def _get_type_index(self, item_type):
"""Get index of item type (environment, input, output, ...) in
@@ -284,7 +295,7 @@ class Specification:
index = 0
for item in self._cfg_yaml:
- if item["type"] == item_type:
+ if item[u"type"] == item_type:
return index
index += 1
return None
@@ -337,12 +348,13 @@ class Specification:
data[key] = value.replace(tag, src_data[tag[1:-1]])
counter += 1
except KeyError:
- raise PresentationError("Not possible to replace the "
- "tag '{}'".format(tag))
+ raise PresentationError(
+ f"Not possible to replace the tag {tag}"
+ )
if counter:
self._replace_tags(data, src_data)
else:
- raise PresentationError("Replace tags: Not supported data type.")
+ raise PresentationError(u"Replace tags: Not supported data type.")
return data
@@ -350,158 +362,170 @@ class Specification:
"""Parse environment specification in the specification YAML file.
"""
- logging.info("Parsing specification file: environment ...")
+ logging.info(u"Parsing specification file: environment ...")
- idx = self._get_type_index("environment")
+ idx = self._get_type_index(u"environment")
if idx is None:
- return None
+ return
try:
- self._specification["environment"]["configuration"] = \
- self._cfg_yaml[idx]["configuration"]
+ self._specification[u"environment"][u"configuration"] = \
+ self._cfg_yaml[idx][u"configuration"]
except KeyError:
- self._specification["environment"]["configuration"] = None
+ self._specification[u"environment"][u"configuration"] = None
try:
- self._specification["environment"]["paths"] = \
- self._replace_tags(self._cfg_yaml[idx]["paths"])
+ self._specification[u"environment"][u"paths"] = \
+ self._replace_tags(self._cfg_yaml[idx][u"paths"])
except KeyError:
- self._specification["environment"]["paths"] = None
+ self._specification[u"environment"][u"paths"] = None
try:
- self._specification["environment"]["urls"] = \
- self._cfg_yaml[idx]["urls"]
+ self._specification[u"environment"][u"urls"] = \
+ self._cfg_yaml[idx][u"urls"]
except KeyError:
- self._specification["environment"]["urls"] = None
+ self._specification[u"environment"][u"urls"] = None
try:
- self._specification["environment"]["make-dirs"] = \
- self._cfg_yaml[idx]["make-dirs"]
+ self._specification[u"environment"][u"make-dirs"] = \
+ self._cfg_yaml[idx][u"make-dirs"]
except KeyError:
- self._specification["environment"]["make-dirs"] = None
+ self._specification[u"environment"][u"make-dirs"] = None
try:
- self._specification["environment"]["remove-dirs"] = \
- self._cfg_yaml[idx]["remove-dirs"]
+ self._specification[u"environment"][u"remove-dirs"] = \
+ self._cfg_yaml[idx][u"remove-dirs"]
except KeyError:
- self._specification["environment"]["remove-dirs"] = None
+ self._specification[u"environment"][u"remove-dirs"] = None
try:
- self._specification["environment"]["build-dirs"] = \
- self._cfg_yaml[idx]["build-dirs"]
+ self._specification[u"environment"][u"build-dirs"] = \
+ self._cfg_yaml[idx][u"build-dirs"]
except KeyError:
- self._specification["environment"]["build-dirs"] = None
+ self._specification[u"environment"][u"build-dirs"] = None
try:
- self._specification["environment"]["testbeds"] = \
- self._cfg_yaml[idx]["testbeds"]
+ self._specification[u"environment"][u"testbeds"] = \
+ self._cfg_yaml[idx][u"testbeds"]
except KeyError:
- self._specification["environment"]["testbeds"] = None
+ self._specification[u"environment"][u"testbeds"] = None
+
+ logging.info(u"Done.")
+
+ def _load_mapping_table(self):
+ """Load a mapping table if it is specified. If not, use empty list.
+ """
+
+ mapping_file_name = self._specification[u"configuration"].\
+ get(u"mapping-file", None)
+ if mapping_file_name:
+ try:
+ with open(mapping_file_name, u'r') as mfile:
+ mapping = load(mfile, Loader=FullLoader)
+ # Make sure everything is lowercase
+ self._specification[u"configuration"][u"mapping"] = \
+ {key.lower(): val.lower() for key, val in
+ mapping.items()}
+ logging.debug(f"Loaded mapping table:\n{mapping}")
+ except (YAMLError, IOError) as err:
+ raise PresentationError(
+ msg=f"An error occurred while parsing the mapping file "
+ f"{mapping_file_name}",
+ details=repr(err)
+ )
+ else:
+ self._specification[u"configuration"][u"mapping"] = dict()
- logging.info("Done.")
+ def _load_ignore_list(self):
+ """Load an ignore list if it is specified. If not, use empty list.
+ """
+
+ ignore_list_name = self._specification[u"configuration"].\
+ get(u"ignore-list", None)
+ if ignore_list_name:
+ try:
+ with open(ignore_list_name, u'r') as ifile:
+ ignore = load(ifile, Loader=FullLoader)
+ # Make sure everything is lowercase
+ self._specification[u"configuration"][u"ignore"] = \
+ [item.lower() for item in ignore]
+ logging.debug(f"Loaded ignore list:\n{ignore}")
+ except (YAMLError, IOError) as err:
+ raise PresentationError(
+ msg=f"An error occurred while parsing the ignore list file "
+ f"{ignore_list_name}.",
+ details=repr(err)
+ )
+ else:
+ self._specification[u"configuration"][u"ignore"] = list()
def _parse_configuration(self):
"""Parse configuration of PAL in the specification YAML file.
"""
- logging.info("Parsing specification file: configuration ...")
+ logging.info(u"Parsing specification file: configuration ...")
idx = self._get_type_index("configuration")
if idx is None:
- logging.warning("No configuration information in the specification "
- "file.")
- return None
+ logging.warning(
+ u"No configuration information in the specification file."
+ )
+ return
try:
- self._specification["configuration"] = self._cfg_yaml[idx]
-
+ self._specification[u"configuration"] = self._cfg_yaml[idx]
except KeyError:
- raise PresentationError("No configuration defined.")
+ raise PresentationError(u"No configuration defined.")
# Data sets: Replace ranges by lists
- for set_name, data_set in self.configuration["data-sets"].items():
+ for set_name, data_set in self.configuration[u"data-sets"].items():
if not isinstance(data_set, dict):
continue
for job, builds in data_set.items():
- if builds:
- if isinstance(builds, dict):
- build_end = builds.get("end", None)
+ if not builds:
+ continue
+ if isinstance(builds, dict):
+ build_end = builds.get(u"end", None)
+ try:
+ build_end = int(build_end)
+ except ValueError:
+ # defined as a range <start, build_type>
+ build_end = self._get_build_number(job, build_end)
+ builds = [x for x in range(builds[u"start"],
+ build_end + 1)
+ if x not in builds.get(u"skip", list())]
+ self.configuration[u"data-sets"][set_name][job] = builds
+ elif isinstance(builds, list):
+ for idx, item in enumerate(builds):
try:
- build_end = int(build_end)
+ builds[idx] = int(item)
except ValueError:
- # defined as a range <start, build_type>
- build_end = self._get_build_number(job, build_end)
- builds = [x for x in range(builds["start"], build_end+1)
- if x not in builds.get("skip", list())]
- self.configuration["data-sets"][set_name][job] = builds
- elif isinstance(builds, list):
- for idx, item in enumerate(builds):
- try:
- builds[idx] = int(item)
- except ValueError:
- # defined as a range <build_type>
- builds[idx] = self._get_build_number(job, item)
+ # defined as a range <build_type>
+ builds[idx] = self._get_build_number(job, item)
# Data sets: add sub-sets to sets (only one level):
- for set_name, data_set in self.configuration["data-sets"].items():
+ for set_name, data_set in self.configuration[u"data-sets"].items():
if isinstance(data_set, list):
new_set = dict()
for item in data_set:
try:
- for key, val in self.configuration["data-sets"][item].\
+ for key, val in self.configuration[u"data-sets"][item].\
items():
new_set[key] = val
except KeyError:
raise PresentationError(
- "Data set {0} is not defined in "
- "the configuration section.".format(item))
- self.configuration["data-sets"][set_name] = new_set
+ f"Data set {item} is not defined in "
+ f"the configuration section."
+ )
+ self.configuration[u"data-sets"][set_name] = new_set
# Mapping table:
- mapping = None
- mapping_file_name = self._specification["configuration"].\
- get("mapping-file", None)
- if mapping_file_name:
- logging.debug("Mapping file: '{0}'".format(mapping_file_name))
- try:
- with open(mapping_file_name, 'r') as mfile:
- mapping = load(mfile)
- logging.debug("Loaded mapping table:\n{0}".format(mapping))
- except (YAMLError, IOError) as err:
- raise PresentationError(
- msg="An error occurred while parsing the mapping file "
- "'{0}'.".format(mapping_file_name),
- details=repr(err))
- # Make sure everything is lowercase
- if mapping:
- self._specification["configuration"]["mapping"] = \
- {key.lower(): val.lower() for key, val in mapping.iteritems()}
- else:
- self._specification["configuration"]["mapping"] = dict()
+ self._load_mapping_table()
# Ignore list:
- ignore = None
- ignore_list_name = self._specification["configuration"].\
- get("ignore-list", None)
- if ignore_list_name:
- logging.debug("Ignore list file: '{0}'".format(ignore_list_name))
- try:
- with open(ignore_list_name, 'r') as ifile:
- ignore = load(ifile)
- logging.debug("Loaded ignore list:\n{0}".format(ignore))
- except (YAMLError, IOError) as err:
- raise PresentationError(
- msg="An error occurred while parsing the ignore list file "
- "'{0}'.".format(ignore_list_name),
- details=repr(err))
- # Make sure everything is lowercase
- if ignore:
- self._specification["configuration"]["ignore"] = \
- [item.lower() for item in ignore]
- else:
- self._specification["configuration"]["ignore"] = list()
+ self._load_ignore_list()
- logging.info("Done.")
+ logging.info(u"Done.")
def _parse_input(self):
"""Parse input specification in the specification YAML file.
@@ -509,41 +533,43 @@ class Specification:
:raises: PresentationError if there are no data to process.
"""
- logging.info("Parsing specification file: input ...")
+ logging.info(u"Parsing specification file: input ...")
- idx = self._get_type_index("input")
+ idx = self._get_type_index(u"input")
if idx is None:
- raise PresentationError("No data to process.")
+ raise PresentationError(u"No data to process.")
try:
- for key, value in self._cfg_yaml[idx]["general"].items():
- self._specification["input"][key] = value
- self._specification["input"]["builds"] = dict()
+ for key, value in self._cfg_yaml[idx][u"general"].items():
+ self._specification[u"input"][key] = value
+ self._specification[u"input"][u"builds"] = dict()
- for job, builds in self._cfg_yaml[idx]["builds"].items():
+ for job, builds in self._cfg_yaml[idx][u"builds"].items():
if builds:
if isinstance(builds, dict):
- build_end = builds.get("end", None)
+ build_end = builds.get(u"end", None)
try:
build_end = int(build_end)
except ValueError:
# defined as a range <start, build_type>
build_end = self._get_build_number(job, build_end)
- builds = [x for x in range(builds["start"], build_end+1)
- if x not in builds.get("skip", list())]
- self._specification["input"]["builds"][job] = list()
+ builds = [x for x in range(builds[u"start"],
+ build_end + 1)
+ if x not in builds.get(u"skip", list())]
+ self._specification[u"input"][u"builds"][job] = list()
for build in builds:
- self._specification["input"]["builds"][job]. \
- append({"build": build, "status": None})
+ self._specification[u"input"][u"builds"][job]. \
+ append({u"build": build, u"status": None})
else:
- logging.warning("No build is defined for the job '{}'. "
- "Trying to continue without it.".
- format(job))
+ logging.warning(
+ f"No build is defined for the job {job}. Trying to "
+ f"continue without it."
+ )
except KeyError:
- raise PresentationError("No data to process.")
+ raise PresentationError(u"No data to process.")
- logging.info("Done.")
+ logging.info(u"Done.")
def _parse_output(self):
"""Parse output specification in the specification YAML file.
@@ -551,176 +577,226 @@ class Specification:
:raises: PresentationError if there is no output defined.
"""
- logging.info("Parsing specification file: output ...")
+ logging.info(u"Parsing specification file: output ...")
- idx = self._get_type_index("output")
+ idx = self._get_type_index(u"output")
if idx is None:
- raise PresentationError("No output defined.")
+ raise PresentationError(u"No output defined.")
try:
- self._specification["output"] = self._cfg_yaml[idx]
+ self._specification[u"output"] = self._cfg_yaml[idx]
except (KeyError, IndexError):
- raise PresentationError("No output defined.")
+ raise PresentationError(u"No output defined.")
- logging.info("Done.")
+ logging.info(u"Done.")
def _parse_static(self):
"""Parse specification of the static content in the specification YAML
file.
"""
- logging.info("Parsing specification file: static content ...")
+ logging.info(u"Parsing specification file: static content ...")
- idx = self._get_type_index("static")
+ idx = self._get_type_index(u"static")
if idx is None:
- logging.warning("No static content specified.")
+ logging.warning(u"No static content specified.")
for key, value in self._cfg_yaml[idx].items():
if isinstance(value, str):
try:
self._cfg_yaml[idx][key] = self._replace_tags(
- value, self._specification["environment"]["paths"])
+ value, self._specification[u"environment"][u"paths"])
except KeyError:
pass
- self._specification["static"] = self._cfg_yaml[idx]
+ self._specification[u"static"] = self._cfg_yaml[idx]
+
+ logging.info(u"Done.")
+
+ def _parse_elements_tables(self, table):
+ """Parse tables from the specification YAML file.
+
+ :param table: Table to be parsed from the specification file.
+ :type table: dict
+ :raises PresentationError: If wrong data set is used.
+ """
+
+ try:
+ table[u"template"] = self._replace_tags(
+ table[u"template"],
+ self._specification[u"environment"][u"paths"])
+ except KeyError:
+ pass
+
+ # Add data sets
+ try:
+ for item in (u"reference", u"compare"):
+ if table.get(item, None):
+ data_set = table[item].get(u"data", None)
+ if isinstance(data_set, str):
+ table[item][u"data"] = \
+ self.configuration[u"data-sets"][data_set]
+ data_set = table[item].get(u"data-replacement", None)
+ if isinstance(data_set, str):
+ table[item][u"data-replacement"] = \
+ self.configuration[u"data-sets"][data_set]
+
+ if table.get(u"history", None):
+ for i in range(len(table[u"history"])):
+ data_set = table[u"history"][i].get(u"data", None)
+ if isinstance(data_set, str):
+ table[u"history"][i][u"data"] = \
+ self.configuration[u"data-sets"][data_set]
+ data_set = table[u"history"][i].get(
+ u"data-replacement", None)
+ if isinstance(data_set, str):
+ table[u"history"][i][u"data-replacement"] = \
+ self.configuration[u"data-sets"][data_set]
+ except KeyError:
+ raise PresentationError(
+ f"Wrong data set used in {table.get(u'title', u'')}."
+ )
+
+ self._specification[u"tables"].append(table)
+
+ def _parse_elements_plots(self, plot):
+ """Parse plots from the specification YAML file.
+
+ :param plot: Plot to be parsed from the specification file.
+ :type plot: dict
+ :raises PresentationError: If plot layout is not defined.
+ """
+
+ # Add layout to the plots:
+ layout = plot[u"layout"].get(u"layout", None)
+ if layout is not None:
+ plot[u"layout"].pop(u"layout")
+ try:
+ for key, val in (self.configuration[u"plot-layouts"]
+ [layout].items()):
+ plot[u"layout"][key] = val
+ except KeyError:
+ raise PresentationError(
+ f"Layout {layout} is not defined in the "
+ f"configuration section."
+ )
+ self._specification[u"plots"].append(plot)
+
+ def _parse_elements_files(self, file):
+ """Parse files from the specification YAML file.
+
+ :param file: File to be parsed from the specification file.
+ :type file: dict
+ """
- logging.info("Done.")
+ try:
+ file[u"dir-tables"] = self._replace_tags(
+ file[u"dir-tables"],
+ self._specification[u"environment"][u"paths"])
+ except KeyError:
+ pass
+ self._specification[u"files"].append(file)
+
+ def _parse_elements_cpta(self, cpta):
+ """Parse cpta from the specification YAML file.
+
+ :param cpta: cpta to be parsed from the specification file.
+ :type cpta: dict
+ :raises PresentationError: If wrong data set is used or if plot layout
+ is not defined.
+ """
+
+ for plot in cpta[u"plots"]:
+ # Add layout to the plots:
+ layout = plot.get(u"layout", None)
+ if layout is not None:
+ try:
+ plot[u"layout"] = \
+ self.configuration[u"plot-layouts"][layout]
+ except KeyError:
+ raise PresentationError(
+ f"Layout {layout} is not defined in the "
+ f"configuration section."
+ )
+ # Add data sets:
+ if isinstance(plot.get(u"data", None), str):
+ data_set = plot[u"data"]
+ try:
+ plot[u"data"] = \
+ self.configuration[u"data-sets"][data_set]
+ except KeyError:
+ raise PresentationError(
+ f"Data set {data_set} is not defined in "
+ f"the configuration section."
+ )
+ self._specification[u"cpta"] = cpta
def _parse_elements(self):
- """Parse elements (tables, plots) specification in the specification
+ """Parse elements (tables, plots, ..) specification in the specification
YAML file.
"""
- logging.info("Parsing specification file: elements ...")
+ logging.info(u"Parsing specification file: elements ...")
count = 1
for element in self._cfg_yaml:
+
+ # Replace tags:
try:
- element["output-file"] = self._replace_tags(
- element["output-file"],
- self._specification["environment"]["paths"])
+ element[u"output-file"] = self._replace_tags(
+ element[u"output-file"],
+ self._specification[u"environment"][u"paths"])
except KeyError:
pass
try:
- element["input-file"] = self._replace_tags(
- element["input-file"],
- self._specification["environment"]["paths"])
+ element[u"input-file"] = self._replace_tags(
+ element[u"input-file"],
+ self._specification[u"environment"][u"paths"])
except KeyError:
pass
- # add data sets to the elements:
- if isinstance(element.get("data", None), str):
- data_set = element["data"]
+ # Add data sets to the elements:
+ if isinstance(element.get(u"data", None), str):
+ data_set = element[u"data"]
try:
- element["data"] = self.configuration["data-sets"][data_set]
+ element[u"data"] = \
+ self.configuration[u"data-sets"][data_set]
except KeyError:
- raise PresentationError("Data set {0} is not defined in "
- "the configuration section.".
- format(data_set))
+ raise PresentationError(
+ f"Data set {data_set} is not defined in the "
+ f"configuration section."
+ )
- if element["type"] == "table":
- logging.info(" {:3d} Processing a table ...".format(count))
- try:
- element["template"] = self._replace_tags(
- element["template"],
- self._specification["environment"]["paths"])
- except KeyError:
- pass
-
- # add data sets
- try:
- for item in ("reference", "compare"):
- if element.get(item, None):
- data_set = element[item].get("data", None)
- if isinstance(data_set, str):
- element[item]["data"] = \
- self.configuration["data-sets"][data_set]
- data_set = element[item].get("data-replacement",
- None)
- if isinstance(data_set, str):
- element[item]["data-replacement"] = \
- self.configuration["data-sets"][data_set]
-
- if element.get("history", None):
- for i in range(len(element["history"])):
- data_set = element["history"][i].get("data", None)
- if isinstance(data_set, str):
- element["history"][i]["data"] = \
- self.configuration["data-sets"][data_set]
- data_set = element["history"][i].get(
- "data-replacement", None)
- if isinstance(data_set, str):
- element["history"][i]["data-replacement"] = \
- self.configuration["data-sets"][data_set]
+ # Parse elements:
+ if element[u"type"] == u"table":
- except KeyError:
- raise PresentationError("Wrong data set used in {0}.".
- format(element.get("title", "")))
-
- self._specification["tables"].append(element)
+ logging.info(f" {count:3d} Processing a table ...")
+ self._parse_elements_tables(element)
count += 1
- elif element["type"] == "plot":
- logging.info(" {:3d} Processing a plot ...".format(count))
+ elif element[u"type"] == u"plot":
- # Add layout to the plots:
- layout = element["layout"].get("layout", None)
- if layout is not None:
- element["layout"].pop("layout")
- try:
- for key, val in (self.configuration["plot-layouts"]
- [layout].items()):
- element["layout"][key] = val
- except KeyError:
- raise PresentationError("Layout {0} is not defined in "
- "the configuration section.".
- format(layout))
- self._specification["plots"].append(element)
+ logging.info(f" {count:3d} Processing a plot ...")
+ self._parse_elements_plots(element)
count += 1
- elif element["type"] == "file":
- logging.info(" {:3d} Processing a file ...".format(count))
- try:
- element["dir-tables"] = self._replace_tags(
- element["dir-tables"],
- self._specification["environment"]["paths"])
- except KeyError:
- pass
- self._specification["files"].append(element)
+ elif element[u"type"] == u"file":
+
+ logging.info(f" {count:3d} Processing a file ...")
+ self._parse_elements_files(element)
count += 1
- elif element["type"] == "cpta":
- logging.info(" {:3d} Processing Continuous Performance "
- "Trending and Analysis ...".format(count))
+ elif element[u"type"] == u"cpta":
- for plot in element["plots"]:
- # Add layout to the plots:
- layout = plot.get("layout", None)
- if layout is not None:
- try:
- plot["layout"] = \
- self.configuration["plot-layouts"][layout]
- except KeyError:
- raise PresentationError(
- "Layout {0} is not defined in the "
- "configuration section.".format(layout))
- # Add data sets:
- if isinstance(plot.get("data", None), str):
- data_set = plot["data"]
- try:
- plot["data"] = \
- self.configuration["data-sets"][data_set]
- except KeyError:
- raise PresentationError(
- "Data set {0} is not defined in "
- "the configuration section.".
- format(data_set))
- self._specification["cpta"] = element
+ logging.info(
+ f" {count:3d} Processing Continuous Performance Trending "
+ f"and Analysis ..."
+ )
+ self._parse_elements_cpta(element)
count += 1
- logging.info("Done.")
+ logging.info(u"Done.")
def read_specification(self):
"""Parse specification in the specification YAML file.
@@ -729,10 +805,10 @@ class Specification:
specification file.
"""
try:
- self._cfg_yaml = load(self._cfg_file)
+ self._cfg_yaml = load(self._cfg_file, Loader=FullLoader)
except YAMLError as err:
- raise PresentationError(msg="An error occurred while parsing the "
- "specification file.",
+ raise PresentationError(msg=u"An error occurred while parsing the "
+ u"specification file.",
details=str(err))
self._parse_env()
@@ -742,5 +818,4 @@ class Specification:
self._parse_static()
self._parse_elements()
- logging.debug("Specification: \n{}".
- format(pformat(self._specification)))
+ logging.debug(f"Specification: \n{pformat(self._specification)}")
diff --git a/resources/tools/presentation/static_content.py b/resources/tools/presentation/static_content.py
index a02330c15f..0fb81d1cbe 100644
--- a/resources/tools/presentation/static_content.py
+++ b/resources/tools/presentation/static_content.py
@@ -22,7 +22,7 @@ from os import makedirs
from os.path import isdir
from shutil import rmtree, copytree, Error
-from errors import PresentationError
+from pal_errors import PresentationError
def prepare_static_content(spec):
@@ -31,19 +31,19 @@ def prepare_static_content(spec):
:param spec: Specification read from the specification file.
:type spec: Specification
:raises PresentationError: If it is not possible to process the static
- content.
+ content.
"""
- src = spec.static.get("src-path", None)
- dst = spec.static.get("dst-path", None)
+ src = spec.static.get(u"src-path", None)
+ dst = spec.static.get(u"dst-path", None)
if src is None or dst is None:
- logging.warning("No static content specified, skipping")
+ logging.warning(u"No static content specified, skipping")
return
# Copy all the static content to the build directory:
- logging.info("Copying the static content ...")
- logging.info(" Source: {0}".format(src))
- logging.info(" Destination: {0}".format(dst))
+ logging.info(u"Copying the static content ...")
+ logging.info(f" Source: {src}")
+ logging.info(f" Destination: {dst}")
try:
if isdir(dst):
@@ -51,10 +51,12 @@ def prepare_static_content(spec):
copytree(src, dst)
- makedirs(spec.environment["paths"]["DIR[WORKING,SRC,STATIC]"])
+ makedirs(spec.environment[u"paths"][u"DIR[WORKING,SRC,STATIC]"])
except (Error, OSError) as err:
- raise PresentationError("Not possible to process the static content.",
- str(err))
+ raise PresentationError(
+ u"Not possible to process the static content.",
+ repr(err)
+ )
- logging.info("Done.")
+ logging.info(u"Done.")