aboutsummaryrefslogtreecommitdiffstats
path: root/resources/tools
diff options
context:
space:
mode:
Diffstat (limited to 'resources/tools')
-rwxr-xr-xresources/tools/doc_gen/gen_rst.py88
-rw-r--r--resources/tools/doc_gen/src/conf.py56
-rw-r--r--resources/tools/integrated/check_crc.py53
-rwxr-xr-xresources/tools/papi/vpp_papi_provider.py116
-rw-r--r--resources/tools/presentation/environment.py2
-rw-r--r--resources/tools/presentation/generator_alerts.py2
-rw-r--r--resources/tools/presentation/input_data_parser.py2
-rw-r--r--resources/tools/presentation/specification_parser.py2
-rw-r--r--resources/tools/scripts/compare_perpatch.py61
-rwxr-xr-xresources/tools/scripts/topo_reservation.py59
-rwxr-xr-xresources/tools/topology/update_topology.py90
-rwxr-xr-xresources/tools/trex/trex_server_info.py8
-rwxr-xr-xresources/tools/trex/trex_stateless_profile.py188
-rwxr-xr-xresources/tools/trex/trex_stateless_stop.py45
-rw-r--r--resources/tools/wrk/wrk.py207
-rw-r--r--resources/tools/wrk/wrk_errors.py4
-rw-r--r--resources/tools/wrk/wrk_traffic_profile_parser.py150
17 files changed, 583 insertions, 550 deletions
diff --git a/resources/tools/doc_gen/gen_rst.py b/resources/tools/doc_gen/gen_rst.py
index c6d82817ee..28223e22c7 100755
--- a/resources/tools/doc_gen/gen_rst.py
+++ b/resources/tools/doc_gen/gen_rst.py
@@ -16,52 +16,52 @@ from os import walk, listdir
from os.path import isfile, isdir, join, getsize
# Temporary working directory. It is created and deleted by run_doc.sh
-WORKING_DIR = "tmp"
+WORKING_DIR = u"tmp"
# Directory with resources to be documented.
-RESOURCES_DIR = "resources"
+RESOURCES_DIR = u"resources"
# Directory with libraries (python, robot) to be documented.
-LIB_DIR = "libraries"
+LIB_DIR = u"libraries"
# Directory with tests (func, perf) to be documented.
-TESTS_DIR = "tests"
+TESTS_DIR = u"tests"
-PY_EXT = ".py"
-RF_EXT = ".robot"
+PY_EXT = u".py"
+RF_EXT = u".robot"
-PATH_PY_LIBS = join(WORKING_DIR, RESOURCES_DIR, LIB_DIR, "python")
-PATH_RF_LIBS = join(WORKING_DIR, RESOURCES_DIR, LIB_DIR, "robot")
+PATH_PY_LIBS = join(WORKING_DIR, RESOURCES_DIR, LIB_DIR, u"python")
+PATH_RF_LIBS = join(WORKING_DIR, RESOURCES_DIR, LIB_DIR, u"robot")
PATH_TESTS = join(WORKING_DIR, TESTS_DIR)
# Sections in rst files
-rst_toc = """
+rst_toc = u"""
.. toctree::
"""
-rst_py_module = """
+rst_py_module = u"""
.. automodule:: {}.{}
:members:
:undoc-members:
:show-inheritance:
"""
-rst_rf_suite_setup = """
+rst_rf_suite_setup = u"""
.. robot-settings::
:source: {}
"""
-rst_rf_variables = """
+rst_rf_variables = u"""
.. robot-variables::
:source: {}
"""
-rst_rf_keywords = """
+rst_rf_keywords = u"""
.. robot-keywords::
:source: {}
"""
-rst_rf_tests = """
+rst_rf_tests = u"""
.. robot-tests::
:source: {}
"""
@@ -104,9 +104,9 @@ def create_file_name(path, start):
:returns: File name.
:rtype: str
"""
- dir_list = path.split('/')
+ dir_list = path.split(u"/")
start_index = dir_list.index(start)
- return ".".join(dir_list[start_index:-1]) + ".rst"
+ return u".".join(dir_list[start_index:-1]) + u".rst"
def create_rst_file_names_set(files, start):
@@ -139,7 +139,7 @@ def scan_dir(path):
dirs = list()
items = listdir(path)
for item in items:
- if isfile(join(path, item)) and "__init__" not in item:
+ if isfile(join(path, item)) and u"__init__" not in item:
files.append(item)
elif isdir(join(path, item)):
dirs.append(item)
@@ -158,7 +158,7 @@ def write_toc(fh, path, dirs):
"""
fh.write(rst_toc)
for dir in dirs:
- fh.write(" {}.{}\n".format('.'.join(path), dir))
+ fh.write(f" {u'.'.join(path)}.{dir}\n")
def write_module_title(fh, module_name):
@@ -170,20 +170,20 @@ def write_module_title(fh, module_name):
:type fh: BinaryIO
:type module_name: str
"""
- title = "{} suite".format(module_name)
- fh.write("\n{}\n{}\n".format(title, '-' * len(title)))
+ title = f"{module_name} suite"
+ fh.write(f"\n{title}\n{u'-' * len(title)}")
def generate_py_rst_files():
"""Generate all rst files for all python modules."""
- dirs_ignore_list = ["__pycache__", ]
+ dirs_ignore_list = [u"__pycache__", ]
py_libs = get_files(PATH_PY_LIBS, PY_EXT)
file_names = create_rst_file_names_set(py_libs, RESOURCES_DIR)
for file_name in file_names:
- path = join(WORKING_DIR, *file_name.split('.')[:-1])
+ path = join(WORKING_DIR, *file_name.split(u".")[:-1])
dirs, files = scan_dir(path)
for item in dirs_ignore_list:
@@ -194,23 +194,25 @@ def generate_py_rst_files():
break
full_path = join(WORKING_DIR, file_name)
- with open(full_path, mode='a') as fh:
+ with open(full_path, mode="a") as fh:
if getsize(full_path) == 0:
- package = file_name.split('.')[-2]
- fh.write("{}\n".format(package))
- fh.write('=' * len("{}".format(package)))
- module_path = file_name.split('.')[:-1]
+ package = file_name.split(u".")[-2]
+ fh.write(f"{package}\n")
+ fh.write(u"=" * len(f"{package}"))
+ module_path = file_name.split(u".")[:-1]
if dirs:
write_toc(fh, module_path, dirs)
for file in files:
- module_name = file.split('.')[0]
+ module_name = file.split(u".")[0]
write_module_title(fh, module_name)
- fh.write(rst_py_module.format('.'.join(module_path),
- module_name))
+ fh.write(rst_py_module.format(
+ u".".join(module_path), module_name)
+ )
-def generate_rf_rst_files(file_names, incl_tests=True, incl_keywords=True,
- incl_suite_setup=False, incl_variables=False):
+def generate_rf_rst_files(
+ file_names, incl_tests=True, incl_keywords=True, incl_suite_setup=False,
+ incl_variables=False):
"""Generate rst files for the given robot modules.
:param file_names: List of file names to be included in the documentation
@@ -230,20 +232,20 @@ def generate_rf_rst_files(file_names, incl_tests=True, incl_keywords=True,
"""
for file_name in file_names:
- path = join(WORKING_DIR, *file_name.split('.')[:-1])
+ path = join(WORKING_DIR, *file_name.split(u".")[:-1])
dirs, files = scan_dir(path)
full_path = join(WORKING_DIR, file_name)
- with open(full_path, mode='a') as fh:
+ with open(full_path, mode="a") as fh:
if getsize(full_path) == 0:
- package = file_name.split('.')[-2]
- fh.write("{}\n".format(package))
- fh.write('=' * len("{}".format(package)) + '\n')
- module_path = file_name.split('.')[:-1]
+ package = file_name.split(u".")[-2]
+ fh.write(f"{package}\n")
+ fh.write(u"=" * len(f"{package}") + u"\n")
+ module_path = file_name.split(u".")[:-1]
if dirs:
write_toc(fh, module_path, dirs)
for file in files:
- module_name = file.split('.')[0]
+ module_name = file.split(u".")[0]
write_module_title(fh, module_name)
path = join(join(*module_path), module_name + RF_EXT)
if incl_suite_setup:
@@ -273,12 +275,12 @@ def generate_tests_rst_files():
tests = get_files(PATH_TESTS, RF_EXT)
file_names = create_rst_file_names_set(tests, TESTS_DIR)
- generate_rf_rst_files(file_names,
- incl_suite_setup=True,
- incl_variables=True)
+ generate_rf_rst_files(
+ file_names, incl_suite_setup=True, incl_variables=True
+ )
-if __name__ == '__main__':
+if __name__ == u"__main__":
# Generate all rst files:
generate_py_rst_files()
diff --git a/resources/tools/doc_gen/src/conf.py b/resources/tools/doc_gen/src/conf.py
index 1ebbbe7921..9be0baea53 100644
--- a/resources/tools/doc_gen/src/conf.py
+++ b/resources/tools/doc_gen/src/conf.py
@@ -19,7 +19,7 @@
import os
import sys
-sys.path.insert(0, os.path.abspath('.'))
+sys.path.insert(0, os.path.abspath(u"."))
# -- General configuration ------------------------------------------------
@@ -31,31 +31,31 @@ sys.path.insert(0, os.path.abspath('.'))
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
- 'sphinx.ext.autodoc',
- 'sphinx.ext.doctest',
- 'sphinxcontrib_robotdoc',
+ u"sphinx.ext.autodoc",
+ u"sphinx.ext.doctest",
+ u"sphinxcontrib_robotdoc"
]
# Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
+templates_path = [u"_templates"]
-# The suffix(es) of source filenames.
+# The suffix(es) of source file names.
# You can specify multiple suffix as a list of string:
#
-# source_suffix = ['.rst', '.md']
-source_suffix = '.rst'
+# source_suffix = [u".rst", u".md"]
+source_suffix = u".rst"
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
-master_doc = 'index'
+master_doc = u"index"
# General information about the project.
-project = u'CSIT'
-copyright = u'2018, FD.io'
-author = u'CSIT'
+project = u"CSIT"
+copyright = u"2018, FD.io"
+author = u"CSIT"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@@ -85,7 +85,7 @@ language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
-exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
+exclude_patterns = [u"_build", u"Thumbs.db", u".DS_Store"]
# The reST default role (used for this markup: `text`) to use for all
# documents.
@@ -107,7 +107,7 @@ exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
+pygments_style = u"sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
@@ -124,8 +124,8 @@ todo_include_todos = False
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
-# html_theme = 'alabaster'
-html_theme = 'sphinx_rtd_theme'
+# html_theme =u"alabaster"
+html_theme = u"sphinx_rtd_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
@@ -134,21 +134,21 @@ html_theme = 'sphinx_rtd_theme'
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
-html_theme_path = ['env/lib/python2.7/site-packages/sphinx_rtd_theme']
+html_theme_path = [u"env/lib/python2.7/site-packages/sphinx_rtd_theme"]
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
-html_title = u'CSIT Documentation'
+html_title = u"CSIT Documentation"
# A shorter title for the navigation bar. Default is the same as html_title.
#
-html_short_title = u'CSIT'
+html_short_title = u"CSIT"
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
-html_logo = 'fdio_logo.png'
+html_logo = u"fdio_logo.png"
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
@@ -159,7 +159,7 @@ html_logo = 'fdio_logo.png'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['_static']
+html_static_path = [u"_static"]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
@@ -241,7 +241,7 @@ html_show_sourcelink = True
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
-htmlhelp_basename = 'csitdoc'
+htmlhelp_basename = u"csitdoc"
# -- Options for LaTeX output ---------------------------------------------
@@ -267,8 +267,7 @@ latex_elements = {
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
- (master_doc, 'CSIT.tex', u'CSIT Documentation',
- u'CSIT', 'manual'),
+ (master_doc, u"CSIT.tex", u"CSIT Documentation", u"CSIT", u"manual"),
]
# The name of an image file (relative to this directory) to place at the top of
@@ -309,8 +308,7 @@ latex_documents = [
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
- (master_doc, 'CSIT', u'CSIT Documentation',
- [author], 1)
+ (master_doc, u"CSIT", u"CSIT Documentation", [author], 1)
]
# If true, show URL addresses after external links.
@@ -324,9 +322,9 @@ man_pages = [
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
- (master_doc, 'CSIT', u'CSIT Documentation',
- author, 'CSIT', 'One line description of project.',
- 'Miscellaneous'),
+ (master_doc, u"CSIT", u"CSIT Documentation",
+ author, u"CSIT", u"One line description of project.",
+ u"Miscellaneous"),
]
# Documents to append as an appendix to all manuals.
diff --git a/resources/tools/integrated/check_crc.py b/resources/tools/integrated/check_crc.py
index 3d5c30a6d6..157bd9e86d 100644
--- a/resources/tools/integrated/check_crc.py
+++ b/resources/tools/integrated/check_crc.py
@@ -27,38 +27,41 @@ from resources.libraries.python.VppApiCrc import VppApiCrcChecker
# TODO: Read FDIO_VPP_DIR environment variable, or some other input,
# instead of using hardcoded relative path?
-API_DIR = op.normpath(op.join(
- op.dirname(op.abspath(__file__)), "..", "..", "..", "..",
- "build-root", "install-vpp-native", "vpp", "share", "vpp", "api"))
+API_DIR = op.normpath(
+ op.join(
+ op.dirname(op.abspath(__file__)), u"..", u"..", u"..", u"..",
+ u"build-root", u"install-vpp-native", u"vpp", u"share", u"vpp", u"api"
+ )
+)
CHECKER = VppApiCrcChecker(API_DIR)
try:
CHECKER.report_initial_conflicts(report_missing=True)
except RuntimeError as err:
- sys.stderr.write("{err!r}\n".format(err=err))
+ sys.stderr.write(f"{err!r}\n")
sys.stderr.write(
- "\n"
- "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n"
- "\n"
- "VPP CSIT API CHECK FAIL!\n"
- "\n"
- "This means the patch under test has missing messages,\n"
- "or messages with unexpected CRCs compared to what CSIT needs.\n"
- "Either this Change and/or its ancestors were editing .api files,\n"
- "or your chain is not rebased upon the recent enough VPP codebase.\n"
- "\n"
- "Please rebase the patch to see if that fixes the problem.\n"
- "If that fails email csit-dev@lists.fd.io for a new\n"
- "operational branch supporting the api changes.\n"
- "\n"
- "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n"
+ u"\n"
+ u"@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n"
+ u"\n"
+ u"VPP CSIT API CHECK FAIL!\n"
+ u"\n"
+ u"This means the patch under test has missing messages,\n"
+ u"or messages with unexpected CRCs compared to what CSIT needs.\n"
+ u"Either this Change and/or its ancestors were editing .api files,\n"
+ u"or your chain is not rebased upon the recent enough VPP codebase.\n"
+ u"\n"
+ u"Please rebase the patch to see if that fixes the problem.\n"
+ u"If that fails email csit-dev@lists.fd.io for a new\n"
+ u"operational branch supporting the api changes.\n"
+ u"\n"
+ u"@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n"
)
sys.exit(1)
else:
sys.stderr.write(
- "\n"
- "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n"
- "\n"
- "VPP CSIT API CHECK PASS!\n"
- "\n"
- "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n"
+ u"\n"
+ u"@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n"
+ u"\n"
+ u"VPP CSIT API CHECK PASS!\n"
+ u"\n"
+ u"@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\n"
)
diff --git a/resources/tools/papi/vpp_papi_provider.py b/resources/tools/papi/vpp_papi_provider.py
index 676f5491dd..6f3b06663a 100755
--- a/resources/tools/papi/vpp_papi_provider.py
+++ b/resources/tools/papi/vpp_papi_provider.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
# Copyright (c) 2019 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -34,14 +34,13 @@ VPP-stats:
"""
import argparse
-import binascii
import json
import os
import sys
# Client name
-CLIENT_NAME = 'csit_papi'
+CLIENT_NAME = u"csit_papi"
# Sphinx creates auto-generated documentation by importing the python source
@@ -50,7 +49,7 @@ CLIENT_NAME = 'csit_papi'
# the whole vpp api if the user only wishes to generate the test documentation.
try:
- do_import = False if os.getenv("NO_VPP_PAPI") == "1" else True
+ do_import = bool(not os.getenv(u"NO_VPP_PAPI") == u"1")
except KeyError:
do_import = True
@@ -61,9 +60,9 @@ if do_import:
# TODO: Find a better way to import papi modules.
modules_path = None
- for root, dirs, files in os.walk('/usr/lib'):
+ for root, dirs, files in os.walk(u"/usr/lib"):
for name in files:
- if name == 'vpp_papi.py':
+ if name == u"vpp_papi.py":
modules_path = os.path.split(root)[0]
break
if modules_path:
@@ -71,7 +70,7 @@ if do_import:
from vpp_papi import VPP
from vpp_papi.vpp_stats import VPPStats
else:
- raise RuntimeError('vpp_papi module not found')
+ raise RuntimeError(u"vpp_papi module not found")
def _convert_reply(api_r):
@@ -89,7 +88,7 @@ def _convert_reply(api_r):
:returns: Processed API reply / a part of API reply.
:rtype: dict
"""
- unwanted_fields = ['count', 'index', 'context']
+ unwanted_fields = [u"count", u"index", u"context"]
def process_value(val):
"""Process value.
@@ -100,29 +99,31 @@ def _convert_reply(api_r):
:rtype: dict or str or int
"""
if isinstance(val, dict):
- for val_k, val_v in val.iteritems():
+ for val_k, val_v in val.items():
val[str(val_k)] = process_value(val_v)
return val
elif isinstance(val, list):
for idx, val_l in enumerate(val):
val[idx] = process_value(val_l)
return val
- elif hasattr(val, '__int__'):
+ elif isinstance(val, bytes):
+ val.hex()
+ elif hasattr(val, u"__int__"):
return int(val)
- elif hasattr(val, '__str__'):
- return binascii.hexlify(str(val))
+ elif hasattr(val, "__str__"):
+ return str(val).encode(encoding=u"utf-8").hex()
# Next handles parameters not supporting preferred integer or string
# representation to get it logged
- elif hasattr(val, '__repr__'):
+ elif hasattr(val, u"__repr__"):
return repr(val)
else:
return val
reply_dict = dict()
- reply_key = repr(api_r).split('(')[0]
+ reply_key = repr(api_r).split(u"(")[0]
reply_value = dict()
for item in dir(api_r):
- if not item.startswith('_') and item not in unwanted_fields:
+ if not item.startswith(u"_") and item not in unwanted_fields:
reply_value[item] = process_value(getattr(api_r, item))
reply_dict[reply_key] = reply_value
return reply_dict
@@ -141,7 +142,7 @@ def process_json_request(args):
try:
vpp = VPP()
except Exception as err:
- raise RuntimeError('PAPI init failed:\n{err}'.format(err=repr(err)))
+ raise RuntimeError(f"PAPI init failed:\n{err!r}")
reply = list()
@@ -154,15 +155,15 @@ def process_json_request(args):
:rtype: dict or str or int
"""
if isinstance(val, dict):
- for val_k, val_v in val.iteritems():
+ for val_k, val_v in val.items():
val[str(val_k)] = process_value(val_v)
return val
elif isinstance(val, list):
for idx, val_l in enumerate(val):
val[idx] = process_value(val_l)
return val
- elif isinstance(val, unicode):
- return binascii.unhexlify(val)
+ elif isinstance(val, str):
+ return bytes.fromhex(val).decode(encoding=u"utf-8")
elif isinstance(val, int):
return val
else:
@@ -171,8 +172,8 @@ def process_json_request(args):
json_data = json.loads(args.data)
vpp.connect(CLIENT_NAME)
for data in json_data:
- api_name = data['api_name']
- api_args_unicode = data['api_args']
+ api_name = data[u"api_name"]
+ api_args_unicode = data[u"api_args"]
api_reply = dict(api_name=api_name)
api_args = dict()
for a_k, a_v in api_args_unicode.items():
@@ -188,20 +189,18 @@ def process_json_request(args):
else:
converted_reply = _convert_reply(rep)
- api_reply['api_reply'] = converted_reply
+ api_reply[u"api_reply"] = converted_reply
reply.append(api_reply)
except (AttributeError, ValueError) as err:
vpp.disconnect()
- raise RuntimeError('PAPI command {api}({args}) input error:\n{err}'.
- format(api=api_name,
- args=api_args,
- err=repr(err)))
+ raise RuntimeError(
+ f"PAPI command {api_name}({api_args}) input error:\n{err!r}"
+ )
except Exception as err:
vpp.disconnect()
- raise RuntimeError('PAPI command {api}({args}) error:\n{exc}'.
- format(api=api_name,
- args=api_args,
- exc=repr(err)))
+ raise RuntimeError(
+ f"PAPI command {api_name}({api_args}) error:\n{err!r}"
+ )
vpp.disconnect()
return json.dumps(reply)
@@ -220,7 +219,7 @@ def process_stats(args):
try:
stats = VPPStats(args.socket)
except Exception as err:
- raise RuntimeError('PAPI init failed:\n{err}'.format(err=repr(err)))
+ raise RuntimeError(f"PAPI init failed:\n{err!r}")
json_data = json.loads(args.data)
@@ -234,8 +233,7 @@ def process_stats(args):
try:
return json.dumps(reply)
except UnicodeDecodeError as err:
- raise RuntimeError('PAPI reply {reply} error:\n{exc}'.format(
- reply=reply, exc=repr(err)))
+ raise RuntimeError(f"PAPI reply {reply} error:\n{err!r}")
def process_stats_request(args):
@@ -251,16 +249,15 @@ def process_stats_request(args):
try:
stats = VPPStats(args.socket)
except Exception as err:
- raise RuntimeError('PAPI init failed:\n{err}'.format(err=repr(err)))
+ raise RuntimeError(f"PAPI init failed:\n{err!r}")
try:
json_data = json.loads(args.data)
except ValueError as err:
- raise RuntimeError('Input json string is invalid:\n{err}'.
- format(err=repr(err)))
+ raise RuntimeError(f"Input json string is invalid:\n{err!r}")
- papi_fn = getattr(stats, json_data["api_name"])
- reply = papi_fn(**json_data.get("api_args", {}))
+ papi_fn = getattr(stats, json_data[u"api_name"])
+ reply = papi_fn(**json_data.get(u"api_args", {}))
return json.dumps(reply)
@@ -279,32 +276,35 @@ def main():
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
- description=__doc__)
- parser.add_argument("-m", "--method",
- required=True,
- choices=[str(key) for key in process_request.keys()],
- help="Specifies the VPP API methods: 1. request - "
- "simple request / reply; 2. dump - dump function;"
- "3. stats - VPP statistics.")
- parser.add_argument("-d", "--data",
- required=True,
- help="If the method is 'request' or 'dump', data is a "
- "JSON string (list) containing API name(s) and "
- "its/their input argument(s). "
- "If the method is 'stats', data is a JSON string "
- "containing the list of path(s) to the required "
- "data.")
- parser.add_argument("-s", "--socket",
- default="/var/run/vpp/stats.sock",
- help="A file descriptor over the VPP stats Unix domain "
- "socket. It is used only if method=='stats'.")
+ description=__doc__
+ )
+ parser.add_argument(
+ u"-m", u"--method", required=True,
+ choices=[str(key) for key in process_request.keys()],
+ help=u"Specifies the VPP API methods: "
+ u"1. request - simple request / reply; "
+ u"2. dump - dump function;"
+ u"3. stats - VPP statistics."
+ )
+ parser.add_argument(
+ u"-d", u"--data", required=True,
+ help=u"If the method is 'request' or 'dump', data is a JSON string "
+ u"(list) containing API name(s) and its/their input argument(s). "
+ u"If the method is 'stats', data is a JSON string containing t"
+ u"he list of path(s) to the required data."
+ )
+ parser.add_argument(
+ u"-s", u"--socket", default=u"/var/run/vpp/stats.sock",
+ help=u"A file descriptor over the VPP stats Unix domain socket. "
+ u"It is used only if method=='stats'."
+ )
args = parser.parse_args()
return process_request[args.method](args)
-if __name__ == '__main__':
+if __name__ == u"__main__":
sys.stdout.write(main())
sys.stdout.flush()
sys.exit(0)
diff --git a/resources/tools/presentation/environment.py b/resources/tools/presentation/environment.py
index a2fa9a0d5b..7cddb0279c 100644
--- a/resources/tools/presentation/environment.py
+++ b/resources/tools/presentation/environment.py
@@ -24,7 +24,7 @@ import logging
from errors import PresentationError
-class Environment(object):
+class Environment:
"""Setting of the environment:
- set environment variables,
- create directories.
diff --git a/resources/tools/presentation/generator_alerts.py b/resources/tools/presentation/generator_alerts.py
index 3f19230c0f..3a9b5ddfb6 100644
--- a/resources/tools/presentation/generator_alerts.py
+++ b/resources/tools/presentation/generator_alerts.py
@@ -57,7 +57,7 @@ class AlertingError(PresentationError):
format(msg=self._msg, dets=self._details, level=self._level))
-class Alerting(object):
+class Alerting:
"""Class implementing the alerting mechanism.
"""
diff --git a/resources/tools/presentation/input_data_parser.py b/resources/tools/presentation/input_data_parser.py
index e512cf8c47..46c8b9d5b9 100644
--- a/resources/tools/presentation/input_data_parser.py
+++ b/resources/tools/presentation/input_data_parser.py
@@ -1059,7 +1059,7 @@ class ExecutionChecker(ResultVisitor):
pass
-class InputData(object):
+class InputData:
"""Input data
The data is extracted from output.xml files generated by Jenkins jobs and
diff --git a/resources/tools/presentation/specification_parser.py b/resources/tools/presentation/specification_parser.py
index 9852d905ba..16c69ce18c 100644
--- a/resources/tools/presentation/specification_parser.py
+++ b/resources/tools/presentation/specification_parser.py
@@ -26,7 +26,7 @@ from utils import get_last_successful_build_number
from utils import get_last_completed_build_number
-class Specification(object):
+class Specification:
"""Specification of Presentation and analytics layer.
- based on specification specified in the specification YAML file
diff --git a/resources/tools/scripts/compare_perpatch.py b/resources/tools/scripts/compare_perpatch.py
index 9c9bd20023..c2c165c76d 100644
--- a/resources/tools/scripts/compare_perpatch.py
+++ b/resources/tools/scripts/compare_perpatch.py
@@ -44,6 +44,7 @@ def hack(value_list):
ret = tmp[3*eight:-eight]
return tmp # ret
+
iteration = -1
parent_iterations = list()
current_iterations = list()
@@ -52,23 +53,25 @@ while 1:
iteration += 1
parent_lines = list()
current_lines = list()
- filename = "csit_parent/{iter}/results.txt".format(iter=iteration)
+ filename = f"csit_parent/{iteration}/results.txt"
try:
with open(filename) as parent_file:
parent_lines = parent_file.readlines()
except IOError:
break
num_lines = len(parent_lines)
- filename = "csit_current/{iter}/results.txt".format(iter=iteration)
+ filename = f"csit_current/{iteration}/results.txt"
with open(filename) as current_file:
current_lines = current_file.readlines()
if num_lines != len(current_lines):
- print "Number of tests does not match within iteration", iteration
+ print(f"Number of tests does not match within iteration {iteration}")
sys.exit(1)
if num_tests is None:
num_tests = num_lines
elif num_tests != num_lines:
- print "Number of tests does not match previous at iteration", iteration
+ print(
+ f"Number of tests does not match previous at iteration {iteration}"
+ )
sys.exit(1)
parent_iterations.append(parent_lines)
current_iterations.append(current_lines)
@@ -80,13 +83,13 @@ for test_index in range(num_tests):
current_values = list()
for iteration_index in range(len(parent_iterations)):
parent_values.extend(
- json.loads(parent_iterations[iteration_index][test_index]))
+ json.loads(parent_iterations[iteration_index][test_index])
+ )
current_values.extend(
- json.loads(current_iterations[iteration_index][test_index]))
- print "Time-ordered MRR values for parent build: {p}".format(
- p=parent_values)
- print "Time-ordered MRR values for current build: {c}".format(
- c=current_values)
+ json.loads(current_iterations[iteration_index][test_index])
+ )
+ print(f"Time-ordered MRR values for parent build: {parent_values}")
+ print(f"Time-ordered MRR values for current build: {current_values}")
parent_values = hack(parent_values)
current_values = hack(current_values)
parent_max = BitCountingMetadataFactory.find_max_value(parent_values)
@@ -97,35 +100,27 @@ for test_index in range(num_tests):
current_factory = BitCountingMetadataFactory(val_max, parent_stats.avg)
current_stats = current_factory.from_data(current_values)
both_stats = factory.from_data(parent_values + current_values)
- print "Value-ordered MRR values for parent build: {p}".format(
- p=parent_values)
- print "Value-ordered MRR values for current build: {c}".format(
- c=current_values)
+ print(f"Value-ordered MRR values for parent build: {parent_values}")
+ print(f"Value-ordered MRR values for current build: {current_values}")
difference = (current_stats.avg - parent_stats.avg) / parent_stats.avg
- print "Difference of averages relative to parent: {d}%".format(
- d=100 * difference)
- print "Jumpavg representation of parent group: {p}".format(
- p=parent_stats)
- print "Jumpavg representation of current group: {c}".format(
- c=current_stats)
- print "Jumpavg representation of both as one group: {b}".format(
- b=both_stats)
+ print(f"Difference of averages relative to parent: {100 * difference}%")
+ print(f"Jumpavg representation of parent group: {parent_stats}")
+ print(f"Jumpavg representation of current group: {current_stats}")
+ print(f"Jumpavg representation of both as one group: {both_stats}")
bits = parent_stats.bits + current_stats.bits - both_stats.bits
- compared = "longer" if bits >= 0 else "shorter"
- print "Separate groups are {cmp} than single group by {bit} bits".format(
- cmp=compared, bit=abs(bits))
+ compared = u"longer" if bits >= 0 else u"shorter"
+ print(
+ f"Separate groups are {compared} than single group by {abs(bits)} bits"
+ )
classified_list = classifier.classify([parent_stats, current_stats])
if len(classified_list) < 2:
- print "Test test_index {test_index}: normal (no anomaly)".format(
- test_index=test_index)
+ print(f"Test test_index {test_index}: normal (no anomaly)")
continue
anomaly = classified_list[1].metadata.classification
- if anomaly == "regression":
- print "Test test_index {test_index}: anomaly regression".format(
- test_index=test_index)
+ if anomaly == u"regression":
+ print(f"Test test_index {test_index}: anomaly regression")
exit_code = 1
continue
- print "Test test_index {test_index}: anomaly {anomaly}".format(
- test_index=test_index, anomaly=anomaly)
-print "Exit code {code}".format(code=exit_code)
+ print(f"Test test_index {test_index}: anomaly {anomaly}")
+print(f"Exit code {exit_code}")
sys.exit(exit_code)
diff --git a/resources/tools/scripts/topo_reservation.py b/resources/tools/scripts/topo_reservation.py
index e7e1ff6bab..73f6d0e815 100755
--- a/resources/tools/scripts/topo_reservation.py
+++ b/resources/tools/scripts/topo_reservation.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
# Copyright (c) 2019 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -20,15 +20,15 @@ simultaneous use of nodes listed in topology file.
As source of truth, TG node from the topology file is used.
"""
-import sys
import argparse
+import sys
import yaml
from resources.libraries.python.ssh import exec_cmd
-RESERVATION_DIR = "/tmp/reservation_dir"
-RESERVATION_NODE = "TG"
+RESERVATION_DIR = u"/tmp/reservation_dir"
+RESERVATION_NODE = u"TG"
def diag_cmd(node, cmd):
@@ -36,10 +36,10 @@ def diag_cmd(node, cmd):
:param node: Node object as parsed from topology file to execute cmd on.
:param cmd: Command to execute.
- :type ssh: dict
+ :type node: dict
:type cmd: str
"""
- print('+ {cmd}'.format(cmd=cmd))
+ print(f"+ {cmd}")
_, stdout, _ = exec_cmd(node, cmd)
print(stdout)
@@ -74,16 +74,18 @@ def main():
Python returns on encountering and unexcepted exception.
"""
parser = argparse.ArgumentParser()
- parser.add_argument("-t", "--topo", required=True,
- help="Topology file")
- parser.add_argument("-c", "--cancel", help="Cancel reservation",
- action="store_true")
- parser.add_argument("-r", "--runtag", required=False, default="Unknown",
- help="Identifier for test run suitable as filename")
+ parser.add_argument(u"-t", u"--topo", required=True, help=u"Topology file")
+ parser.add_argument(
+ u"-c", u"--cancel", help=u"Cancel reservation", action=u"store_true"
+ )
+ parser.add_argument(
+ u"-r", u"--runtag", required=False, default=u"Unknown",
+ help=u"Identifier for test run suitable as filename"
+ )
args = parser.parse_args()
with open(args.topo, "r") as topo_file:
- topology = yaml.load(topo_file.read())['nodes']
+ topology = yaml.safe_load(topo_file.read())[u"nodes"]
# Even if TG is not guaranteed to be a Linux host,
# we are using it, because testing shows SSH access to DUT
@@ -91,39 +93,36 @@ def main():
try:
node = topology[RESERVATION_NODE]
except KeyError:
- print("Topology file does not contain '{node}' node".
- format(node=RESERVATION_NODE))
+ print(f"Topology file does not contain '{RESERVATION_NODE}' node")
return 1
# For system reservation we use mkdir it is an atomic operation and we can
# store additional data (time, client_ID, ..) within reservation directory.
if args.cancel:
- ret, _, err = exec_cmd(node, "rm -r {dir}".format(dir=RESERVATION_DIR))
+ ret, _, err = exec_cmd(node, f"rm -r {RESERVATION_DIR}")
if ret:
- print("Cancellation unsuccessful:\n{err}".format(err=err))
+ print(f"Cancellation unsuccessful:\n{err!r}")
return ret
# Before critical section, output can be outdated already.
- print("Diagnostic commands:")
- # -d and * are to supress "total <size>", see https://askubuntu.com/a/61190
- diag_cmd(node, "ls --full-time -cd '{dir}'/*".format(dir=RESERVATION_DIR))
- print("Attempting testbed reservation.")
+ print(u"Diagnostic commands:")
+ # -d and * are to suppress "total <size>", see https://askubuntu.com/a/61190
+ diag_cmd(node, f"ls --full-time -cd '{RESERVATION_DIR}'/*")
+ print(u"Attempting testbed reservation.")
# Entering critical section.
- ret, _, _ = exec_cmd(node, "mkdir '{dir}'".format(dir=RESERVATION_DIR))
+ ret, _, _ = exec_cmd(node, f"mkdir '{RESERVATION_DIR}'")
# Critical section is over.
if ret:
- _, stdo, _ = exec_cmd(node, "ls '{dir}'/*".format(dir=RESERVATION_DIR))
- print("Testbed already reserved by:\n{stdo}".format(stdo=stdo))
+ _, stdo, _ = exec_cmd(node, f"ls '{RESERVATION_DIR}'/*")
+ print(f"Testbed already reserved by:\n{stdo}")
return 2
# Here the script knows it is the only owner of the testbed.
- print("Reservation success, writing additional info to reservation dir.")
+ print(u"Reservation success, writing additional info to reservation dir.")
ret, _, err = exec_cmd(
- node, "touch '{dir}/{runtag}'"\
- .format(dir=RESERVATION_DIR, runtag=args.runtag))
+ node, f"touch '{RESERVATION_DIR}/{args.runtag}'")
if ret:
- print("Writing test run info failed, but continuing anyway:\n{err}".
- format(err=err))
+ print(f"Writing test run info failed, but continuing anyway:\n{err!r}")
return 0
-if __name__ == "__main__":
+if __name__ == u"__main__":
sys.exit(main())
diff --git a/resources/tools/topology/update_topology.py b/resources/tools/topology/update_topology.py
index 2e267639ef..4ba3a833d0 100755
--- a/resources/tools/topology/update_topology.py
+++ b/resources/tools/topology/update_topology.py
@@ -20,6 +20,7 @@ extracts MAC address from it."""
import sys
import os
import re
+
from argparse import ArgumentParser
import yaml
@@ -36,12 +37,12 @@ def load_topology(args):
:rtype: dict
"""
data = None
- with open(args.topology, 'r') as stream:
+ with open(args.topology, "r") as stream:
try:
- data = yaml.load(stream)
+ data = yaml.safe_load(stream)
except yaml.YAMLError as exc:
- print 'Failed to load topology file: {0}'.format(args.topology)
- print exc
+ print(f"Failed to load topology file: {args.topology}")
+ print(exc)
raise
return data
@@ -60,10 +61,10 @@ def ssh_no_error(ssh, cmd):
"""
ret, stdo, stde = ssh.exec_command(cmd)
if ret != 0:
- print 'Command execution failed: "{}"'.format(cmd)
- print 'stdout: {0}'.format(stdo)
- print 'stderr: {0}'.format(stde)
- raise RuntimeError('Unexpected ssh command failure')
+ print(f"Command execution failed: '{cmd}'")
+ print(f"stdout: {stdo}")
+ print(f"stderr: {stde}")
+ raise RuntimeError(u"Unexpected ssh command failure")
return stdo
@@ -80,11 +81,12 @@ def update_mac_addresses_for_node(node):
:param node: Node from topology.
:type node: dict
"""
- for port_name, port in node['interfaces'].items():
- if 'driver' not in port:
- err_msg = '{0} port {1} has no driver element, exiting'.format(
- node['host'], port_name)
- raise RuntimeError(err_msg)
+ for port_name, port in node[u"interfaces"].items():
+ if u"driver" not in port:
+ raise RuntimeError(
+ f"{node[u'host']} port {port_name} has no driver element, "
+ f"exiting"
+ )
ssh = SSH()
ssh.connect(node)
@@ -92,33 +94,34 @@ def update_mac_addresses_for_node(node):
# TODO: make following SSH commands into one-liner to save on SSH opers
# First unbind from current driver
- drvr_dir_path = '/sys/bus/pci/devices/{0}/driver'.format(
- port['pci_address'])
- cmd = '''\
- if [ -d {0} ]; then
- echo {1} | sudo tee {0}/unbind ;
+ drvr_dir_path = f"/sys/bus/pci/devices/{port[u'pci_address']}/driver"
+ cmd = f'''\
+ if [ -d {drvr_dir_path} ]; then
+ echo {port[u'pci_address']} | sudo tee {drvr_dir_path}/unbind ;
else
true Do not have to do anything, port already unbound ;
- fi'''.format(drvr_dir_path, port['pci_address'])
+ fi'''
ssh_no_error(ssh, cmd)
# Then bind to the 'driver' from topology for given port
- cmd = 'echo {0} | sudo tee /sys/bus/pci/drivers/{1}/bind'.\
- format(port['pci_address'], port['driver'])
+ cmd = f"echo {port[u'pci_address']} | " \
+ f"sudo tee /sys/bus/pci/drivers/{port[u'driver']}/bind"
ssh_no_error(ssh, cmd)
# Then extract the mac address and store it in the topology
- cmd = 'cat /sys/bus/pci/devices/{0}/net/*/address'.format(
- port['pci_address'])
+ cmd = f"cat /sys/bus/pci/devices/{port['pci_address']}/net/*/address"
mac = ssh_no_error(ssh, cmd).strip()
- pattern = re.compile("^([0-9A-Fa-f]{2}:){5}[0-9A-Fa-f]{2}$")
+ pattern = re.compile(u"^([0-9A-Fa-f]{2}:){5}[0-9A-Fa-f]{2}$")
if not pattern.match(mac):
- raise RuntimeError('MAC address read from host {0} {1} is in '
- 'bad format "{2}"'
- .format(node['host'], port['pci_address'], mac))
- print '{0}: Found MAC address of PCI device {1}: {2}'.format(
- node['host'], port['pci_address'], mac)
- port['mac_address'] = mac
+ raise RuntimeError(
+ f"MAC address read from host {node[u'host']} "
+ f"{port[u'pci_address']} is in bad format '{mac}'"
+ )
+ print(
+ f"{node[u'host']}: Found MAC address of PCI device "
+ f"{port[u'pci_address']}: {mac}"
+ )
+ port[u"mac_address"] = mac
def update_nodes_mac_addresses(topology):
@@ -128,7 +131,7 @@ def update_nodes_mac_addresses(topology):
:param topology: Topology information with nodes.
:type topology: dict
"""
- for node in topology['nodes'].values():
+ for node in topology[u"nodes"].values():
update_mac_addresses_for_node(node)
@@ -145,25 +148,28 @@ def dump_updated_topology(topology, args):
if args.output_file:
if not args.force:
if os.path.isfile(args.output_file):
- print ('File {0} already exists. If you want to overwrite this '
- 'file, add -f as a parameter to this script'.format(
- args.output_file))
+ print (
+ f"File {args.output_file} already exists. If you want to "
+ f"overwrite this file, add -f as a parameter to this script"
+ )
return 1
- with open(args.output_file, 'w') as stream:
+ with open(args.output_file, "w") as stream:
yaml.dump(topology, stream, default_flow_style=False)
else:
- print yaml.dump(topology, default_flow_style=False)
+ print(yaml.dump(topology, default_flow_style=False))
return 0
def main():
"""Main function"""
parser = ArgumentParser()
- parser.add_argument('topology', help="Topology yaml file to read")
- parser.add_argument('--output-file', '-o', help='Output file')
- parser.add_argument('-f', '--force', help='Overwrite existing file',
- action='store_const', const=True)
- parser.add_argument('--verbose', '-v', action='store_true')
+ parser.add_argument(u"topology", help=u"Topology yaml file to read")
+ parser.add_argument(u"--output-file", u"-o", help=u"Output file")
+ parser.add_argument(
+ u"-f", u"--force", help=u"Overwrite existing file",
+ action=u"store_const", const=True
+ )
+ parser.add_argument(u"--verbose", u"-v", action=u"store_true")
args = parser.parse_args()
topology = load_topology(args)
@@ -173,5 +179,5 @@ def main():
return ret
-if __name__ == "__main__":
+if __name__ == u"__main__":
sys.exit(main())
diff --git a/resources/tools/trex/trex_server_info.py b/resources/tools/trex/trex_server_info.py
index b9574957a5..e74a63e09d 100755
--- a/resources/tools/trex/trex_server_info.py
+++ b/resources/tools/trex/trex_server_info.py
@@ -28,10 +28,12 @@ Functionality:
import sys
-sys.path.insert(0, "/opt/trex-core-2.61/scripts/automation/"+\
- "trex_control_plane/interactive/")
+sys.path.insert(
+ 0, u"/opt/trex-core-2.61/scripts/automation/trex_control_plane/interactive/"
+)
from trex.stl.api import *
+
def main():
"""Check server info and quit."""
client = STLClient()
@@ -48,5 +50,5 @@ def main():
client.disconnect()
-if __name__ == "__main__":
+if __name__ == u"__main__":
main()
diff --git a/resources/tools/trex/trex_stateless_profile.py b/resources/tools/trex/trex_stateless_profile.py
index b888bcdea9..9233f6a5f1 100755
--- a/resources/tools/trex/trex_stateless_profile.py
+++ b/resources/tools/trex/trex_stateless_profile.py
@@ -22,8 +22,9 @@ import argparse
import json
import sys
-sys.path.insert(0, "/opt/trex-core-2.61/scripts/automation/"
- "trex_control_plane/interactive/")
+sys.path.insert(
+ 0, "/opt/trex-core-2.61/scripts/automation/trex_control_plane/interactive/"
+)
from trex.stl.api import *
@@ -54,11 +55,12 @@ def fmt_latency(lat_min, lat_avg, lat_max, hdrh):
except ValueError:
t_max = int(-1)
- return "/".join(str(tmp) for tmp in (t_min, t_avg, t_max, hdrh))
+ return u"/".join(str(tmp) for tmp in (t_min, t_avg, t_max, hdrh))
-def simple_burst(profile_file, duration, framesize, rate, warmup_time, port_0,
- port_1, latency, async_start=False, traffic_directions=2):
+def simple_burst(
+ profile_file, duration, framesize, rate, warmup_time, port_0, port_1,
+ latency, async_start=False, traffic_directions=2):
"""Send traffic and measure packet loss and latency.
Procedure:
@@ -102,17 +104,18 @@ def simple_burst(profile_file, duration, framesize, rate, warmup_time, port_0,
total_sent = 0
lost_a = 0
lost_b = 0
- lat_a = "-1/-1/-1/"
- lat_b = "-1/-1/-1/"
+ lat_a = u"-1/-1/-1/"
+ lat_b = u"-1/-1/-1/"
# Read the profile:
try:
- print("### Profile file:\n{}".format(profile_file))
- profile = STLProfile.load(profile_file, direction=0, port_id=0,
- framesize=framesize)
+ print(f"### Profile file:\n{profile_file}")
+ profile = STLProfile.load(
+ profile_file, direction=0, port_id=0, framesize=framesize
+ )
streams = profile.get_streams()
except STLError as err:
- print("Error while loading profile '{0}' {1}".format(profile_file, err))
+ print(f"Error while loading profile '{profile_file}' {err!r}")
sys.exit(1)
try:
@@ -124,7 +127,7 @@ def simple_burst(profile_file, duration, framesize, rate, warmup_time, port_0,
client.reset(ports=[port_0, port_1])
client.remove_all_streams(ports=[port_0, port_1])
- if "macsrc" in profile_file:
+ if u"macsrc" in profile_file:
client.set_port_attr(ports=[port_0, port_1], promiscuous=True)
if isinstance(framesize, int):
client.add_streams(streams[0], ports=[port_0])
@@ -144,7 +147,7 @@ def simple_burst(profile_file, duration, framesize, rate, warmup_time, port_0,
latency = False
except STLError:
# Disable latency if NIC does not support requested stream type
- print("##### FAILED to add latency streams #####")
+ print(u"##### FAILED to add latency streams #####")
latency = False
ports = [port_0]
if traffic_directions > 1:
@@ -167,18 +170,16 @@ def simple_burst(profile_file, duration, framesize, rate, warmup_time, port_0,
# Read the stats after the test:
stats = client.get_stats()
- print("##### Warmup statistics #####")
- print(json.dumps(stats, indent=4, separators=(',', ': ')))
+ print(u"##### Warmup statistics #####")
+ print(json.dumps(stats, indent=4, separators=(u",", u": ")))
- lost_a = stats[port_0]["opackets"] - stats[port_1]["ipackets"]
+ lost_a = stats[port_0][u"opackets"] - stats[port_1][u"ipackets"]
if traffic_directions > 1:
- lost_b = stats[port_1]["opackets"] - stats[port_0]["ipackets"]
+ lost_b = stats[port_1][u"opackets"] - stats[port_0][u"ipackets"]
- print("\npackets lost from {p_0} --> {p_1}: {v} pkts".format(
- p_0=port_0, p_1=port_1, v=lost_a))
+ print(f"\npackets lost from {port_0} --> {port_1}: {lost_a} pkts")
if traffic_directions > 1:
- print("packets lost from {p_1} --> {p_0}: {v} pkts".format(
- p_0=port_0, p_1=port_1, v=lost_b))
+ print(f"packets lost from {port_1} --> {port_0}: {lost_b} pkts")
# Clear the stats before injecting:
client.clear_stats()
@@ -191,10 +192,10 @@ def simple_burst(profile_file, duration, framesize, rate, warmup_time, port_0,
if async_start:
# For async stop, we need to export the current snapshot.
xsnap0 = client.ports[0].get_xstats().reference_stats
- print("Xstats snapshot 0: {s!r}".format(s=xsnap0))
+ print(f"Xstats snapshot 0: {xsnap0!r}")
if traffic_directions > 1:
xsnap1 = client.ports[1].get_xstats().reference_stats
- print("Xstats snapshot 1: {s!r}".format(s=xsnap1))
+ print(f"Xstats snapshot 1: {xsnap1!r}")
else:
# Block until done:
client.wait_on_traffic(ports=ports, timeout=duration+30)
@@ -206,38 +207,36 @@ def simple_burst(profile_file, duration, framesize, rate, warmup_time, port_0,
# Read the stats after the test
stats = client.get_stats()
- print("##### Statistics #####")
- print(json.dumps(stats, indent=4, separators=(',', ': ')))
+ print(u"##### Statistics #####")
+ print(json.dumps(stats, indent=4, separators=(u",", u": ")))
- lost_a = stats[port_0]["opackets"] - stats[port_1]["ipackets"]
+ lost_a = stats[port_0][u"opackets"] - stats[port_1][u"ipackets"]
if traffic_directions > 1:
- lost_b = stats[port_1]["opackets"] - stats[port_0]["ipackets"]
+ lost_b = stats[port_1][u"opackets"] - stats[port_0][u"ipackets"]
# Stats index is not a port number, but "pgid".
# TODO: Find out what "pgid" means.
if latency:
- lat_obj = stats["latency"][0]["latency"]
+ lat_obj = stats[u"latency"][0][u"latency"]
lat_a = fmt_latency(
- str(lat_obj["total_min"]), str(lat_obj["average"]),
- str(lat_obj["total_max"]), str(lat_obj["hdrh"]))
+ str(lat_obj[u"total_min"]), str(lat_obj[u"average"]),
+ str(lat_obj[u"total_max"]), str(lat_obj[u"hdrh"]))
if traffic_directions > 1:
- lat_obj = stats["latency"][1]["latency"]
+ lat_obj = stats[u"latency"][1][u"latency"]
lat_b = fmt_latency(
- str(lat_obj["total_min"]), str(lat_obj["average"]),
- str(lat_obj["total_max"]), str(lat_obj["hdrh"]))
+ str(lat_obj[u"total_min"]), str(lat_obj[u"average"]),
+ str(lat_obj[u"total_max"]), str(lat_obj[u"hdrh"]))
if traffic_directions > 1:
- total_sent = stats[0]["opackets"] + stats[1]["opackets"]
- total_rcvd = stats[0]["ipackets"] + stats[1]["ipackets"]
+ total_sent = stats[0][u"opackets"] + stats[1][u"opackets"]
+ total_rcvd = stats[0][u"ipackets"] + stats[1][u"ipackets"]
else:
- total_sent = stats[port_0]["opackets"]
- total_rcvd = stats[port_1]["ipackets"]
+ total_sent = stats[port_0][u"opackets"]
+ total_rcvd = stats[port_1][u"ipackets"]
- print("\npackets lost from {p_0} --> {p_1}: {v} pkts".format(
- p_0=port_0, p_1=port_1, v=lost_a))
+ print(f"\npackets lost from {port_0} --> {port_1}: {lost_a} pkts")
if traffic_directions > 1:
- print("packets lost from {p_1} --> {p_0}: {v} pkts".format(
- p_0=port_0, p_1=port_1, v=lost_b))
+ print(f"packets lost from {port_1} --> {port_0}: {lost_b} pkts")
except STLError as ex_error:
print(ex_error, file=sys.stderr)
@@ -250,11 +249,12 @@ def simple_burst(profile_file, duration, framesize, rate, warmup_time, port_0,
else:
if client:
client.disconnect()
- print("rate={0!r}, totalReceived={1}, totalSent={2}, "
- "frameLoss={3}, latencyStream0(usec)={4}, "
- "latencyStream1(usec)={5}, targetDuration={d!r}".
- format(rate, total_rcvd, total_sent, lost_a + lost_b,
- lat_a, lat_b, d=duration))
+ print(
+ f"rate={rate!r}, totalReceived={total_rcvd}, "
+ f"totalSent={total_sent}, frameLoss={lost_a + lost_b}, "
+ f"latencyStream0(usec)={lat_a}, latencyStream1(usec)={lat_b}, "
+ f"targetDuration={duration!r}"
+ )
def main():
@@ -264,44 +264,46 @@ def main():
function.
"""
parser = argparse.ArgumentParser()
- parser.add_argument("-p", "--profile",
- required=True,
- type=str,
- help="Python traffic profile.")
- parser.add_argument("-d", "--duration",
- required=True,
- type=float,
- help="Duration of traffic run.")
- parser.add_argument("-s", "--frame_size",
- required=True,
- help="Size of a Frame without padding and IPG.")
- parser.add_argument("-r", "--rate",
- required=True,
- help="Traffic rate with included units (%, pps).")
- parser.add_argument("-w", "--warmup_time",
- type=float,
- default=5.0,
- help="Traffic warm-up time in seconds, 0 = disable.")
- parser.add_argument("--port_0",
- required=True,
- type=int,
- help="Port 0 on the traffic generator.")
- parser.add_argument("--port_1",
- required=True,
- type=int,
- help="Port 1 on the traffic generator.")
- parser.add_argument("--async_start",
- action="store_true",
- default=False,
- help="Non-blocking call of the script.")
- parser.add_argument("--latency",
- action="store_true",
- default=False,
- help="Add latency stream.")
- parser.add_argument("--traffic_directions",
- type=int,
- default=2,
- help="Send bi- (2) or uni- (1) directional traffic.")
+ parser.add_argument(
+ u"-p", u"--profile", required=True, type=str,
+ help=u"Python traffic profile."
+ )
+ parser.add_argument(
+ u"-d", u"--duration", required=True, type=float,
+ help=u"Duration of traffic run."
+ )
+ parser.add_argument(
+ u"-s", u"--frame_size", required=True,
+ help=u"Size of a Frame without padding and IPG."
+ )
+ parser.add_argument(
+ u"-r", u"--rate", required=True,
+ help=u"Traffic rate with included units (%, pps)."
+ )
+ parser.add_argument(
+ u"-w", u"--warmup_time", type=float, default=5.0,
+ help=u"Traffic warm-up time in seconds, 0 = disable."
+ )
+ parser.add_argument(
+ u"--port_0", required=True, type=int,
+ help=u"Port 0 on the traffic generator."
+ )
+ parser.add_argument(
+ u"--port_1", required=True, type=int,
+ help=u"Port 1 on the traffic generator."
+ )
+ parser.add_argument(
+ u"--async_start", action=u"store_true", default=False,
+ help=u"Non-blocking call of the script."
+ )
+ parser.add_argument(
+ u"--latency", action=u"store_true", default=False,
+ help=u"Add latency stream."
+ )
+ parser.add_argument(
+ u"--traffic_directions", type=int, default=2,
+ help=u"Send bi- (2) or uni- (1) directional traffic."
+ )
args = parser.parse_args()
@@ -310,17 +312,13 @@ def main():
except ValueError:
framesize = args.frame_size
- simple_burst(profile_file=args.profile,
- duration=args.duration,
- framesize=framesize,
- rate=args.rate,
- warmup_time=args.warmup_time,
- port_0=args.port_0,
- port_1=args.port_1,
- latency=args.latency,
- async_start=args.async_start,
- traffic_directions=args.traffic_directions)
+ simple_burst(
+ profile_file=args.profile, duration=args.duration, framesize=framesize,
+ rate=args.rate, warmup_time=args.warmup_time, port_0=args.port_0,
+ port_1=args.port_1, latency=args.latency, async_start=args.async_start,
+ traffic_directions=args.traffic_directions
+ )
-if __name__ == '__main__':
+if __name__ == u"__main__":
main()
diff --git a/resources/tools/trex/trex_stateless_stop.py b/resources/tools/trex/trex_stateless_stop.py
index 3fc599a5a4..45f4838e9d 100755
--- a/resources/tools/trex/trex_stateless_stop.py
+++ b/resources/tools/trex/trex_stateless_stop.py
@@ -28,12 +28,14 @@ Functionality:
"""
import argparse
-from collections import OrderedDict # Needed to parse xstats representation.
-import sys
import json
+import sys
-sys.path.insert(0, "/opt/trex-core-2.61/scripts/automation/"+\
- "trex_control_plane/interactive/")
+from collections import OrderedDict # Needed to parse xstats representation.
+
+sys.path.insert(
+ 0, u"/opt/trex-core-2.61/scripts/automation/trex_control_plane/interactive/"
+)
from trex.stl.api import *
@@ -41,9 +43,13 @@ def main():
"""Stop traffic if any is running. Report xstats."""
parser = argparse.ArgumentParser()
parser.add_argument(
- "--xstat0", type=str, default="", help="Reference xstat object if any.")
+ u"--xstat0", type=str, default=u"",
+ help=u"Reference xstat object if any."
+ )
parser.add_argument(
- "--xstat1", type=str, default="", help="Reference xstat object if any.")
+ u"--xstat1", type=str, default=u"",
+ help=u"Reference xstat object if any."
+ )
args = parser.parse_args()
client = STLClient()
@@ -71,26 +77,27 @@ def main():
finally:
client.disconnect()
- print("##### statistics port 0 #####")
- print(json.dumps(xstats0, indent=4, separators=(',', ': ')))
- print("##### statistics port 1 #####")
- print(json.dumps(xstats1, indent=4, separators=(',', ': ')))
+ print(u"##### statistics port 0 #####")
+ print(json.dumps(xstats0, indent=4, separators=(u",", u": ")))
+ print(u"##### statistics port 1 #####")
+ print(json.dumps(xstats1, indent=4, separators=(u",", u": ")))
- tx_0, rx_0 = xstats0["tx_good_packets"], xstats0["rx_good_packets"]
- tx_1, rx_1 = xstats1["tx_good_packets"], xstats1["rx_good_packets"]
+ tx_0, rx_0 = xstats0[u"tx_good_packets"], xstats0[u"rx_good_packets"]
+ tx_1, rx_1 = xstats1[u"tx_good_packets"], xstats1[u"rx_good_packets"]
lost_a, lost_b = tx_0 - rx_1, tx_1 - rx_0
- print("\npackets lost from 0 --> 1: {0} pkts".format(lost_a))
- print("packets lost from 1 --> 0: {0} pkts".format(lost_b))
+ print(f"\npackets lost from 0 --> 1: {lost_a} pkts")
+ print(f"packets lost from 1 --> 0: {lost_b} pkts")
total_rcvd, total_sent = rx_0 + rx_1, tx_0 + tx_1
total_lost = total_sent - total_rcvd
# TODO: Add latency.
print(
- "rate='unknown', totalReceived={rec}, totalSent={sen}, frameLoss={los},"
- " latencyStream0(usec)=-1/-1/-1, latencyStream1(usec)=-1/-1/-1,"
- " targetDuration='manual'".format(
- rec=total_rcvd, sen=total_sent, los=total_lost))
+ f"rate='unknown', totalReceived={total_rcvd}, totalSent={total_sent}, "
+ f"frameLoss={total_lost}, latencyStream0(usec)=-1/-1/-1, "
+ f"latencyStream1(usec)=-1/-1/-1, targetDuration='manual'"
+ )
+
-if __name__ == "__main__":
+if __name__ == u"__main__":
main()
diff --git a/resources/tools/wrk/wrk.py b/resources/tools/wrk/wrk.py
index 84d17ee7a1..381e9b9da0 100644
--- a/resources/tools/wrk/wrk.py
+++ b/resources/tools/wrk/wrk.py
@@ -66,18 +66,18 @@ def check_wrk(tg_node):
command is not availble.
"""
- if tg_node['type'] != NodeType.TG:
- raise RuntimeError('Node type is not a TG.')
+ if tg_node[u"type"] != NodeType.TG:
+ raise RuntimeError(u"Node type is not a TG.")
ssh = SSH()
ssh.connect(tg_node)
ret, _, _ = ssh.exec_command(
- "sudo -E "
- "sh -c '{0}/resources/tools/wrk/wrk_utils.sh installed'".
- format(Constants.REMOTE_FW_DIR))
+ f"sudo -E sh -c '{Constants.REMOTE_FW_DIR}/resources/tools/"
+ f"wrk/wrk_utils.sh installed'"
+ )
if int(ret) != 0:
- raise RuntimeError('WRK is not installed on TG node.')
+ raise RuntimeError(u"WRK is not installed on TG node.")
def run_wrk(tg_node, profile_name, tg_numa, test_type, warm_up=False):
@@ -98,102 +98,103 @@ def run_wrk(tg_node, profile_name, tg_numa, test_type, warm_up=False):
:raises: RuntimeError if node type is not a TG.
"""
- if tg_node['type'] != NodeType.TG:
- raise RuntimeError('Node type is not a TG.')
+ if tg_node[u"type"] != NodeType.TG:
+ raise RuntimeError(u"Node type is not a TG.")
# Parse and validate the profile
- profile_path = ("resources/traffic_profiles/wrk/{0}.yaml".
- format(profile_name))
+ profile_path = f"resources/traffic_profiles/wrk/{profile_name}.yaml"
profile = WrkTrafficProfile(profile_path).traffic_profile
cores = CpuUtils.cpu_list_per_node(tg_node, tg_numa)
- first_cpu = cores[profile["first-cpu"]]
+ first_cpu = cores[profile[u"first-cpu"]]
- if len(profile["urls"]) == 1 and profile["cpus"] == 1:
+ if len(profile[u"urls"]) == 1 and profile[u"cpus"] == 1:
params = [
- "traffic_1_url_1_core",
+ u"traffic_1_url_1_core",
str(first_cpu),
- str(profile["nr-of-threads"]),
- str(profile["nr-of-connections"]),
- "{0}s".format(profile["duration"]),
- "'{0}'".format(profile["header"]),
- str(profile["timeout"]),
- str(profile["script"]),
- str(profile["latency"]),
- "'{0}'".format(" ".join(profile["urls"]))
+ str(profile[u"nr-of-threads"]),
+ str(profile[u"nr-of-connections"]),
+ f"{profile[u'duration']}s",
+ f"'{profile[u'header']}'",
+ str(profile[u"timeout"]),
+ str(profile[u"script"]),
+ str(profile[u"latency"]),
+ f"'{u' '.join(profile[u'urls'])}'"
]
if warm_up:
warm_up_params = deepcopy(params)
- warm_up_params[4] = "10s"
- elif len(profile["urls"]) == profile["cpus"]:
+ warm_up_params[4] = u"10s"
+ elif len(profile[u"urls"]) == profile[u"cpus"]:
params = [
- "traffic_n_urls_n_cores",
+ u"traffic_n_urls_n_cores",
str(first_cpu),
- str(profile["nr-of-threads"]),
- str(profile["nr-of-connections"]),
- "{0}s".format(profile["duration"]),
- "'{0}'".format(profile["header"]),
- str(profile["timeout"]),
- str(profile["script"]),
- str(profile["latency"]),
- "'{0}'".format(" ".join(profile["urls"]))
+ str(profile[u"nr-of-threads"]),
+ str(profile[u"nr-of-connections"]),
+ f"{profile[u'duration']}s",
+ f"'{profile[u'header']}'",
+ str(profile[u"timeout"]),
+ str(profile[u"script"]),
+ str(profile[u"latency"]),
+ f"'{u' '.join(profile[u'urls'])}'"
]
if warm_up:
warm_up_params = deepcopy(params)
- warm_up_params[4] = "10s"
+ warm_up_params[4] = u"10s"
else:
params = [
- "traffic_n_urls_m_cores",
+ u"traffic_n_urls_m_cores",
str(first_cpu),
- str(profile["cpus"] / len(profile["urls"])),
- str(profile["nr-of-threads"]),
- str(profile["nr-of-connections"]),
- "{0}s".format(profile["duration"]),
- "'{0}'".format(profile["header"]),
- str(profile["timeout"]),
- str(profile["script"]),
- str(profile["latency"]),
- "'{0}'".format(" ".join(profile["urls"]))
+ str(profile[u"cpus"] // len(profile[u"urls"])),
+ str(profile[u"nr-of-threads"]),
+ str(profile[u"nr-of-connections"]),
+ f"{profile[u'duration']}s",
+ f"'{profile[u'header']}'",
+ str(profile[u"timeout"]),
+ str(profile[u"script"]),
+ str(profile[u"latency"]),
+ f"'{u' '.join(profile[u'urls'])}'"
]
if warm_up:
warm_up_params = deepcopy(params)
- warm_up_params[5] = "10s"
+ warm_up_params[5] = u"10s"
- args = " ".join(params)
+ args = u" ".join(params)
ssh = SSH()
ssh.connect(tg_node)
if warm_up:
- warm_up_args = " ".join(warm_up_params)
+ warm_up_args = u" ".join(warm_up_params)
ret, _, _ = ssh.exec_command(
- "{0}/resources/tools/wrk/wrk_utils.sh {1}".
- format(Constants.REMOTE_FW_DIR, warm_up_args), timeout=1800)
+ f"{Constants.REMOTE_FW_DIR}/resources/tools/wrk/wrk_utils.sh "
+ f"{warm_up_args}", timeout=1800
+ )
if int(ret) != 0:
- raise RuntimeError('wrk runtime error.')
+ raise RuntimeError(u"wrk runtime error.")
sleep(60)
ret, stdout, _ = ssh.exec_command(
- "{0}/resources/tools/wrk/wrk_utils.sh {1}".
- format(Constants.REMOTE_FW_DIR, args), timeout=1800)
+ f"{Constants.REMOTE_FW_DIR}/resources/tools/wrk/wrk_utils.sh {args}",
+ timeout=1800
+ )
if int(ret) != 0:
raise RuntimeError('wrk runtime error.')
stats = _parse_wrk_output(stdout)
- log_msg = "\nMeasured values:\n"
- if test_type == "cps":
- log_msg += "Connections/sec: Avg / Stdev / Max / +/- Stdev\n"
- for item in stats["rps-stats-lst"]:
- log_msg += "{0} / {1} / {2} / {3}\n".format(*item)
- log_msg += "Total cps: {0}cps\n".format(stats["rps-sum"])
- elif test_type == "rps":
- log_msg += "Requests/sec: Avg / Stdev / Max / +/- Stdev\n"
- for item in stats["rps-stats-lst"]:
- log_msg += "{0} / {1} / {2} / {3}\n".format(*item)
- log_msg += "Total rps: {0}rps\n".format(stats["rps-sum"])
- elif test_type == "bw":
- log_msg += "Transfer/sec: {0}Bps".format(stats["bw-sum"])
+ log_msg = u"\nMeasured values:\n"
+ if test_type == u"cps":
+ log_msg += u"Connections/sec: Avg / Stdev / Max / +/- Stdev\n"
+ for item in stats[u"rps-stats-lst"]:
+ log_msg += f"{0} / {1} / {2} / {3}\n".format(*item)
+ log_msg += f"Total cps: {stats[u'rps-sum']}cps\n"
+ elif test_type == u"rps":
+ log_msg += u"Requests/sec: Avg / Stdev / Max / +/- Stdev\n"
+ for item in stats[u"rps-stats-lst"]:
+ log_msg += f"{0} / {1} / {2} / {3}\n".format(*item)
+ log_msg += f"Total rps: {stats[u'rps-sum']}rps\n"
+ elif test_type == u"bw":
+ log_msg += f"Transfer/sec: {stats[u'bw-sum']}Bps"
logger.info(log_msg)
@@ -210,47 +211,52 @@ def _parse_wrk_output(msg):
:raises: WrkError if the message does not include the results.
"""
- if "Thread Stats" not in msg:
- raise WrkError("The output of wrk does not include the results.")
+ if u"Thread Stats" not in msg:
+ raise WrkError(u"The output of wrk does not include the results.")
msg_lst = msg.splitlines(False)
stats = {
- "latency-dist-lst": list(),
- "latency-stats-lst": list(),
- "rps-stats-lst": list(),
- "rps-lst": list(),
- "bw-lst": list(),
- "rps-sum": 0,
- "bw-sum": None
+ u"latency-dist-lst": list(),
+ u"latency-stats-lst": list(),
+ u"rps-stats-lst": list(),
+ u"rps-lst": list(),
+ u"bw-lst": list(),
+ u"rps-sum": 0,
+ u"bw-sum": None
}
for line in msg_lst:
- if "Latency Distribution" in line:
+ if u"Latency Distribution" in line:
# Latency distribution - 50%, 75%, 90%, 99%
pass
- elif "Latency" in line:
+ elif u"Latency" in line:
# Latency statistics - Avg, Stdev, Max, +/- Stdev
pass
- elif "Req/Sec" in line:
+ elif u"Req/Sec" in line:
# rps statistics - Avg, Stdev, Max, +/- Stdev
- stats["rps-stats-lst"].append((
- _evaluate_number(re.search(REGEX_RPS_STATS, line).group(1)),
- _evaluate_number(re.search(REGEX_RPS_STATS, line).group(2)),
- _evaluate_number(re.search(REGEX_RPS_STATS, line).group(3)),
- _evaluate_number(re.search(REGEX_RPS_STATS, line).group(4))))
- elif "Requests/sec:" in line:
+ stats[u"rps-stats-lst"].append(
+ (
+ _evaluate_number(re.search(REGEX_RPS_STATS, line).group(1)),
+ _evaluate_number(re.search(REGEX_RPS_STATS, line).group(2)),
+ _evaluate_number(re.search(REGEX_RPS_STATS, line).group(3)),
+ _evaluate_number(re.search(REGEX_RPS_STATS, line).group(4))
+ )
+ )
+ elif u"Requests/sec:" in line:
# rps (cps)
- stats["rps-lst"].append(
- _evaluate_number(re.search(REGEX_RPS, line).group(1)))
- elif "Transfer/sec:" in line:
+ stats[u"rps-lst"].append(
+ _evaluate_number(re.search(REGEX_RPS, line).group(1))
+ )
+ elif u"Transfer/sec:" in line:
# BW
- stats["bw-lst"].append(
- _evaluate_number(re.search(REGEX_BW, line).group(1)))
+ stats[u"bw-lst"].append(
+ _evaluate_number(re.search(REGEX_BW, line).group(1))
+ )
- for item in stats["rps-stats-lst"]:
- stats["rps-sum"] += item[0]
- stats["bw-sum"] = sum(stats["bw-lst"])
+ for item in stats[u"rps-stats-lst"]:
+ stats[u"rps-sum"] += item[0]
+ stats[u"bw-sum"] = sum(stats[u"bw-lst"])
return stats
@@ -270,23 +276,24 @@ def _evaluate_number(num):
try:
val_num = float(val.group(1))
except ValueError:
- raise WrkError("The output of wrk does not include the results "
- "or the format of results has changed.")
+ raise WrkError(
+ u"The output of wrk does not include the results or the format "
+ u"of results has changed."
+ )
val_mul = val.group(2).lower()
if val_mul:
- if "k" in val_mul:
+ if u"k" in val_mul:
val_num *= 1000
- elif "m" in val_mul:
+ elif u"m" in val_mul:
val_num *= 1000000
- elif "g" in val_mul:
+ elif u"g" in val_mul:
val_num *= 1000000000
- elif "b" in val_mul:
+ elif u"b" in val_mul:
pass
- elif "%" in val_mul:
+ elif u"%" in val_mul:
pass
- elif "" in val_mul:
+ elif u"" in val_mul:
pass
else:
- raise WrkError("The multiplicand {0} is not defined.".
- format(val_mul))
+ raise WrkError(f"The multiplicand {val_mul} is not defined.")
return val_num
diff --git a/resources/tools/wrk/wrk_errors.py b/resources/tools/wrk/wrk_errors.py
index 3173dd4223..2cdd76815a 100644
--- a/resources/tools/wrk/wrk_errors.py
+++ b/resources/tools/wrk/wrk_errors.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2018 Cisco and/or its affiliates.
+# Copyright (c) 2019 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -29,7 +29,7 @@ class WrkError(Exception):
- relevant data if there are any collected (optional parameter details).
"""
- def __init__(self, msg, details=''):
+ def __init__(self, msg, details=u""):
"""Sets the exception message and the level.
:param msg: Short description of the encountered problem.
diff --git a/resources/tools/wrk/wrk_traffic_profile_parser.py b/resources/tools/wrk/wrk_traffic_profile_parser.py
index 1d40aa3d8a..1994b6195d 100644
--- a/resources/tools/wrk/wrk_traffic_profile_parser.py
+++ b/resources/tools/wrk/wrk_traffic_profile_parser.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2018 Cisco and / or its affiliates.
+# Copyright (c) 2019 Cisco and / or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at:
@@ -26,22 +26,26 @@ from robot.api import logger
from resources.tools.wrk.wrk_errors import WrkError
-class WrkTrafficProfile(object):
+class WrkTrafficProfile:
"""The wrk traffic profile.
"""
- MANDATORY_PARAMS = ("urls",
- "first-cpu",
- "cpus",
- "duration",
- "nr-of-threads",
- "nr-of-connections")
-
- INTEGER_PARAMS = (("cpus", 1),
- ("first-cpu", 0),
- ("duration", 1),
- ("nr-of-threads", 1),
- ("nr-of-connections", 1))
+ MANDATORY_PARAMS = (
+ u"urls",
+ u"first-cpu",
+ u"cpus",
+ u"duration",
+ u"nr-of-threads",
+ u"nr-of-connections"
+ )
+
+ INTEGER_PARAMS = (
+ (u"cpus", 1),
+ (u"first-cpu", 0),
+ (u"duration", 1),
+ (u"nr-of-threads", 1),
+ (u"nr-of-connections", 1)
+ )
def __init__(self, profile_name):
"""Read the traffic profile from the yaml file.
@@ -57,29 +61,34 @@ class WrkTrafficProfile(object):
self.profile_name = profile_name
try:
- with open(self.profile_name, 'r') as profile_file:
+ with open(self.profile_name, "r") as profile_file:
self.traffic_profile = load(profile_file)
except IOError as err:
- raise WrkError(msg="An error occurred while opening the file '{0}'."
- .format(self.profile_name),
- details=str(err))
+ raise WrkError(
+ msg=f"An error occurred while opening the file "
+ f"'{self.profile_name}'.", details=str(err)
+ )
except YAMLError as err:
- raise WrkError(msg="An error occurred while parsing the traffic "
- "profile '{0}'.".format(self.profile_name),
- details=str(err))
+ raise WrkError(
+ msg=f"An error occurred while parsing the traffic profile "
+ f"'{self.profile_name}'.", details=str(err)
+ )
self._validate_traffic_profile()
if self.traffic_profile:
- logger.debug("\nThe wrk traffic profile '{0}' is valid.\n".
- format(self.profile_name))
- logger.debug("wrk traffic profile '{0}':".format(self.profile_name))
+ logger.debug(
+ f"\nThe wrk traffic profile '{self.profile_name}' is valid.\n"
+ )
+ logger.debug(f"wrk traffic profile '{self.profile_name}':")
logger.debug(pformat(self.traffic_profile))
else:
- logger.debug("\nThe wrk traffic profile '{0}' is invalid.\n".
- format(self.profile_name))
- raise WrkError("\nThe wrk traffic profile '{0}' is invalid.\n".
- format(self.profile_name))
+ logger.debug(
+ f"\nThe wrk traffic profile '{self.profile_name}' is invalid.\n"
+ )
+ raise WrkError(
+ f"\nThe wrk traffic profile '{self.profile_name}' is invalid.\n"
+ )
def __repr__(self):
return pformat(self.traffic_profile)
@@ -94,8 +103,9 @@ class WrkTrafficProfile(object):
doc/wrk_lld.rst
"""
- logger.debug("\nValidating the wrk traffic profile '{0}'...\n".
- format(self.profile_name))
+ logger.debug(
+ f"\nValidating the wrk traffic profile '{self.profile_name}'...\n"
+ )
if not (self._validate_mandatory_structure()
and self._validate_mandatory_values()
and self._validate_optional_values()
@@ -110,14 +120,14 @@ class WrkTrafficProfile(object):
"""
# Level 1: Check if the profile is a dictionary:
if not isinstance(self.traffic_profile, dict):
- logger.error("The wrk traffic profile must be a dictionary.")
+ logger.error(u"The wrk traffic profile must be a dictionary.")
return False
# Level 2: Check if all mandatory parameters are present:
is_valid = True
for param in self.MANDATORY_PARAMS:
if self.traffic_profile.get(param, None) is None:
- logger.error("The parameter '{0}' in mandatory.".format(param))
+ logger.error(f"The parameter '{param}' in mandatory.")
is_valid = False
return is_valid
@@ -129,8 +139,8 @@ class WrkTrafficProfile(object):
"""
# Level 3: Mandatory params: Check if urls is a list:
is_valid = True
- if not isinstance(self.traffic_profile["urls"], list):
- logger.error("The parameter 'urls' must be a list.")
+ if not isinstance(self.traffic_profile[u"urls"], list):
+ logger.error(u"The parameter 'urls' must be a list.")
is_valid = False
# Level 3: Mandatory params: Check if integers are not below minimum
@@ -147,60 +157,64 @@ class WrkTrafficProfile(object):
"""
is_valid = True
# Level 4: Optional params: Check if script is present:
- script = self.traffic_profile.get("script", None)
+ script = self.traffic_profile.get(u"script", None)
if script is not None:
if not isinstance(script, str):
- logger.error("The path to LuaJIT script in invalid")
+ logger.error(u"The path to LuaJIT script in invalid")
is_valid = False
else:
if not isfile(script):
- logger.error("The file '{0}' does not exist.".
- format(script))
+ logger.error(f"The file '{script}' does not exist.")
is_valid = False
else:
- self.traffic_profile["script"] = None
- logger.debug("The optional parameter 'LuaJIT script' is not "
- "defined. No problem.")
+ self.traffic_profile[u"script"] = None
+ logger.debug(
+ u"The optional parameter 'LuaJIT script' is not defined. "
+ u"No problem."
+ )
# Level 4: Optional params: Check if header is present:
- header = self.traffic_profile.get("header", None)
+ header = self.traffic_profile.get(u"header", None)
if header is not None:
if isinstance(header, dict):
- header = ", ".join("{0}: {1}".format(*item)
- for item in header.items())
- self.traffic_profile["header"] = header
+ header = u", ".join(
+ f"{0}: {1}".format(*item) for item in header.items()
+ )
+ self.traffic_profile[u"header"] = header
elif not isinstance(header, str):
- logger.error("The parameter 'header' type is not valid.")
+ logger.error(u"The parameter 'header' type is not valid.")
is_valid = False
if not header:
- logger.error("The parameter 'header' is defined but "
- "empty.")
+ logger.error(u"The parameter 'header' is defined but empty.")
is_valid = False
else:
- self.traffic_profile["header"] = None
- logger.debug("The optional parameter 'header' is not defined. "
- "No problem.")
+ self.traffic_profile[u"header"] = None
+ logger.debug(
+ u"The optional parameter 'header' is not defined. No problem."
+ )
# Level 4: Optional params: Check if latency is present:
- latency = self.traffic_profile.get("latency", None)
+ latency = self.traffic_profile.get(u"latency", None)
if latency is not None:
if not isinstance(latency, bool):
- logger.error("The parameter 'latency' must be boolean.")
+ logger.error(u"The parameter 'latency' must be boolean.")
is_valid = False
else:
- self.traffic_profile["latency"] = False
- logger.debug("The optional parameter 'latency' is not defined. "
- "No problem.")
+ self.traffic_profile[u"latency"] = False
+ logger.debug(
+ u"The optional parameter 'latency' is not defined. No problem."
+ )
# Level 4: Optional params: Check if timeout is present:
- if 'timeout' in self.traffic_profile:
- if not self._validate_int_param('timeout', 1):
+ if u"timeout" in self.traffic_profile:
+ if not self._validate_int_param(u"timeout", 1):
is_valid = False
else:
- self.traffic_profile["timeout"] = None
- logger.debug("The optional parameter 'timeout' is not defined. "
- "No problem.")
+ self.traffic_profile[u"timeout"] = None
+ logger.debug(
+ u"The optional parameter 'timeout' is not defined. No problem."
+ )
return is_valid
@@ -211,9 +225,10 @@ class WrkTrafficProfile(object):
:rtype: bool
"""
# Level 5: Check urls and cpus:
- if self.traffic_profile["cpus"] % len(self.traffic_profile["urls"]):
- logger.error("The number of CPUs must be a multiple of the "
- "number of URLs.")
+ if self.traffic_profile[u"cpus"] % len(self.traffic_profile[u"urls"]):
+ logger.error(
+ u"The number of CPUs must be a multiple of the number of URLs."
+ )
return False
return True
@@ -229,7 +244,7 @@ class WrkTrafficProfile(object):
:rtype: bool
"""
value = self._traffic_profile[param]
- if isinstance(value, (str, unicode)):
+ if isinstance(value, str):
if value.isdigit():
value = int(value)
else:
@@ -237,8 +252,9 @@ class WrkTrafficProfile(object):
if isinstance(value, int) and value >= minimum:
self.traffic_profile[param] = value
return True
- logger.error("The parameter '{param}' must be an integer and "
- "at least {minimum}".format(param=param, minimum=minimum))
+ logger.error(
+ f"The parameter '{param}' must be an integer and at least {minimum}"
+ )
return False
@property