diff options
author | Tibor Frank <tifrank@cisco.com> | 2018-05-14 14:35:46 +0200 |
---|---|---|
committer | Tibor Frank <tifrank@cisco.com> | 2018-05-14 12:37:57 +0000 |
commit | 3cd4d56c159f3fd5aa2fe96aeb98201027810d7a (patch) | |
tree | 17fa01edcbcb63996064993f52b77686e8bd37bd | |
parent | a0c4696596ddbbd0914b1fdd04c2307922b1f7da (diff) |
CSIT-1078: Optimize input data files download and processing
Change-Id: I973238d98d549555c0d43c91f9fd96a9209065ac
Signed-off-by: Tibor Frank <tifrank@cisco.com>
(cherry picked from commit 9964155b56020914494f7341c2d1162f2ac9d720)
-rw-r--r-- | resources/tools/presentation/conf.py | 3 | ||||
-rw-r--r-- | resources/tools/presentation/input_data_parser.py | 21 | ||||
-rw-r--r-- | resources/tools/presentation/pal.py | 18 | ||||
-rw-r--r-- | resources/tools/report_gen/conf.py | 222 | ||||
-rw-r--r-- | resources/tools/report_gen/fdio.svg | 25 | ||||
-rw-r--r-- | resources/tools/report_gen/requirements.txt | 6 | ||||
-rwxr-xr-x | resources/tools/report_gen/run_improvments_tables.py | 213 | ||||
-rw-r--r-- | resources/tools/report_gen/run_plot.py | 276 | ||||
-rw-r--r-- | resources/tools/report_gen/run_report.cfg | 69 | ||||
-rwxr-xr-x | resources/tools/report_gen/run_report.sh | 627 | ||||
-rwxr-xr-x | resources/tools/report_gen/run_robot_data.py | 485 | ||||
-rwxr-xr-x | resources/tools/report_gen/run_robot_json_data.py | 331 | ||||
-rwxr-xr-x | resources/tools/report_gen/run_robot_teardown_data.py | 635 |
13 files changed, 14 insertions, 2917 deletions
diff --git a/resources/tools/presentation/conf.py b/resources/tools/presentation/conf.py index 58dd56163a..84890d9c46 100644 --- a/resources/tools/presentation/conf.py +++ b/resources/tools/presentation/conf.py @@ -73,7 +73,8 @@ rst_epilog = """ .. _TRex intallation: https://git.fd.io/csit/tree/resources/tools/trex/trex_installer.sh?h={release} .. _TRex driver: https://git.fd.io/csit/tree/resources/tools/trex/trex_stateless_profile.py?h={release} .. _VIRL topologies directory: https://git.fd.io/csit/tree/resources/tools/virl/topologies/?h={release} -.. _VIRL images lists: https://git.fd.io/csit/tree/resources/tools/disk-image-builder/ubuntu/lists/?h={release} +.. _VIRL ubuntu images lists: https://git.fd.io/csit/tree/resources/tools/disk-image-builder/ubuntu/lists/?h={release} +.. _VIRL centos images lists: https://git.fd.io/csit/tree/resources/tools/disk-image-builder/centos/lists/?h={release} .. _VIRL nested: https://git.fd.io/csit/tree/resources/tools/disk-image-builder/nested/?h={release} .. _CSIT Honeycomb Functional Tests Documentation: https://docs.fd.io/csit/{release}/doc/tests.vpp.func.honeycomb.html .. _CSIT Honeycomb Performance Tests Documentation: https://docs.fd.io/csit/{release}/doc/tests.vpp.perf.honeycomb.html diff --git a/resources/tools/presentation/input_data_parser.py b/resources/tools/presentation/input_data_parser.py index e12e2fb8df..d0f9eed9a4 100644 --- a/resources/tools/presentation/input_data_parser.py +++ b/resources/tools/presentation/input_data_parser.py @@ -251,6 +251,7 @@ class ExecutionChecker(ResultVisitor): self._version = str(re.search(self.REGEX_VERSION, msg.message). group(2)) self._data["metadata"]["version"] = self._version + self._data["metadata"]["generated"] = msg.timestamp self._msg_type = None def _get_vat_history(self, msg): @@ -748,17 +749,14 @@ class InputData(object): return self.data[job][build]["tests"] @staticmethod - def _parse_tests(job, build, get_timestamp=False): + def _parse_tests(job, build): """Process data from robot output.xml file and return JSON structured data. :param job: The name of job which build output data will be processed. :param build: The build which output data will be processed. - :param get_timestamp: If True, timestamp is read form the xml source - file. :type job: str :type build: dict - :type get_timestamp: bool :returns: JSON data structure. :rtype: dict """ @@ -767,10 +765,6 @@ class InputData(object): "job": job, "build": build } - if get_timestamp: - tree = ET.parse(build["file-name"]) - root = tree.getroot() - metadata["generated"] = root.attrib["generated"] with open(build["file-name"], 'r') as data_file: try: @@ -784,20 +778,16 @@ class InputData(object): return checker.data - def download_and_parse_data(self, get_timestamp=False): + def download_and_parse_data(self): """Download the input data files, parse input data from input files and store in pandas' Series. - - :param get_timestamp: If True, timestamp is read form the xml source - file. - :type get_timestamp: bool """ logging.info("Downloading and parsing input files ...") job_data = dict() for job, builds in self._cfg.builds.items(): - logging.info(" Processing data from the job '{0}' ...'". + logging.info(" Processing data from the job '{0}' ...". format(job)) builds_data = dict() for build in builds: @@ -813,8 +803,7 @@ class InputData(object): logging.info(" Processing data from the build '{0}' ...". format(build["build"])) - data = InputData._parse_tests(job, build, - get_timestamp=get_timestamp) + data = InputData._parse_tests(job, build) if data is None: logging.error("Input data file from the job '{job}', build " "'{build}' is damaged. Skipped.". diff --git a/resources/tools/presentation/pal.py b/resources/tools/presentation/pal.py index 2268801407..1ccefd3b43 100644 --- a/resources/tools/presentation/pal.py +++ b/resources/tools/presentation/pal.py @@ -96,11 +96,7 @@ def main(): prepare_static_content(spec) data = InputData(spec) - - if spec.output["output"] == "report": - data.download_and_parse_data(get_timestamp=False) - elif spec.output["output"] == "CPTA": - data.download_and_parse_data(get_timestamp=True) + data.download_and_parse_data() generate_tables(spec, data) generate_plots(spec, data) @@ -114,12 +110,12 @@ def main(): logging.info("Successfully finished.") ret_code = 0 - # except (KeyError, ValueError, PresentationError) as err: - # logging.info("Finished with an error.") - # logging.critical(str(err)) - # except Exception as err: - # logging.info("Finished with an unexpected error.") - # logging.critical(str(err)) + except (KeyError, ValueError, PresentationError) as err: + logging.info("Finished with an error.") + logging.critical(str(err)) + except Exception as err: + logging.info("Finished with an unexpected error.") + logging.critical(str(err)) finally: if spec is not None: clean_environment(spec.environment) diff --git a/resources/tools/report_gen/conf.py b/resources/tools/report_gen/conf.py deleted file mode 100644 index c0eb9893b4..0000000000 --- a/resources/tools/report_gen/conf.py +++ /dev/null @@ -1,222 +0,0 @@ -# -*- coding: utf-8 -*- -# -# CSIT 17.01 report documentation build configuration file, created by -# sphinx-quickstart on Sun Jan 15 09:49:36 2017. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os -import sys - -sys.path.insert(0, os.path.abspath('.')) - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -# -# needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = ['sphinxcontrib.programoutput', - 'sphinx.ext.ifconfig'] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# -source_suffix = ['.rst', '.md'] - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'FD.io CSIT' -copyright = u'2017, FD.io' -author = u'FD.io CSIT' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -#version = u'' -# The full version, including alpha/beta/rc tags. -#release = u'' - -rst_epilog = """ -.. |release-1| replace:: rls1704 -.. |vpp-release| replace:: VPP-17.07 release -.. |vpp-release-1| replace:: VPP-17.04 release -.. |dpdk-release| replace:: DPDK 17.05 -.. |trex-release| replace:: TRex v2.25 -.. |virl-image-ubuntu| replace:: ubuntu-16.04.1_2017-02-23_1.8 -.. |virl-image-centos| replace:: centos-7.3-1611_2017-02-23_1.4 - -.. _tag documentation rst file: https://git.fd.io/csit/tree/docs/tag_documentation.rst?h=rls1707 -.. _TRex intallation: https://git.fd.io/csit/tree/resources/tools/trex/trex_installer.sh?h=rls1707 -.. _TRex driver: https://git.fd.io/csit/tree/resources/tools/trex/trex_stateless_profile.py?h=rls1707 -.. _VIRL topologies directory: https://git.fd.io/csit/tree/resources/tools/virl/topologies/?h=rls1707 -.. _VIRL ubuntu images lists: https://git.fd.io/csit/tree/resources/tools/disk-image-builder/ubuntu/lists/?h=rls1707 -.. _VIRL centos images lists: https://git.fd.io/csit/tree/resources/tools/disk-image-builder/centos/lists/?h=rls1707 -.. _VIRL nested: https://git.fd.io/csit/tree/resources/tools/disk-image-builder/nested/?h=rls1707 -.. _CSIT Honeycomb Functional Tests Documentation: https://docs.fd.io/csit/rls1707/doc/tests.vpp.func.honeycomb.html -.. _CSIT Honeycomb Performance Tests Documentation: https://docs.fd.io/csit/rls1707/doc/tests.vpp.perf.honeycomb.html -.. _CSIT DPDK Performance Tests Documentation: https://docs.fd.io/csit/rls1707/doc/tests.dpdk.perf.html -.. _CSIT VPP Functional Tests Documentation: https://docs.fd.io/csit/rls1707/doc/tests.vpp.func.html -.. _CSIT VPP Performance Tests Documentation: https://docs.fd.io/csit/rls1707/doc/tests.vpp.perf.html -.. _CSIT NSH_SFC Functional Tests Documentation: https://docs.fd.io/csit/rls1707/doc/tests.nsh_sfc.func.html -.. _VPP test framework documentation: https://docs.fd.io/vpp/17.07/vpp_make_test/html/ -.. _FD.io test executor vpp performance jobs: https://jenkins.fd.io/view/csit/job/csit-vpp-perf-1707-all -.. _FD.io test executor vpp functional jobs: https://jenkins.fd.io/view/csit/job/csit-vpp-functional-1707-ubuntu1604-virl/lastSuccessfulBuild -.. _FD.io test executor dpdk performance jobs: https://jenkins.fd.io/view/csit/job/csit-dpdk-perf-1707-all -.. _FD.io test executor Honeycomb functional jobs: https://jenkins.fd.io/view/csit/job/hc2vpp-csit-integration-1707-ubuntu1604/lastSuccessfulBuild -.. _FD.io test executor honeycomb performance jobs: https://jenkins.fd.io/view/hc2vpp/job/hc2vpp-csit-perf-master-ubuntu1604/lastSuccessfulBuild -.. _FD.io test executor NSH_SFC functional jobs: https://jenkins.fd.io/view/csit/job/csit-nsh_sfc-verify-func-1707-ubuntu1604-virl/lastSuccessfulBuild -.. _FD.io VPP compile job: https://jenkins.fd.io/view/vpp/job/vpp-merge-1707-ubuntu1604/ -.. _CSIT Testbed Setup: https://git.fd.io/csit/tree/resources/tools/testbed-setup/README.md?h=rls1707 -""" - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = 'en' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This patterns also effect to html_static_path and html_extra_path -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = False - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'sphinx_rtd_theme' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# -# html_theme_options = {} - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_theme_path = ['env/lib/python2.7/site-packages/sphinx_rtd_theme'] - -html_static_path = ['../../../docs/report/_static'] - -html_context = { - 'css_files': [ - '_static/theme_overrides.css', # overrides for wide tables in RTD theme - ], - } - -# -- Options for LaTeX output --------------------------------------------- - -latex_engine = 'pdflatex' - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # - 'papersize': 'a4paper', - - # The font size ('10pt', '11pt' or '12pt'). - # - #'pointsize': '10pt', - - # Additional stuff for the LaTeX preamble. - # - 'preamble': r''' - \usepackage{pdfpages} - \usepackage{svg} - \usepackage{charter} - \usepackage[defaultsans]{lato} - \usepackage{inconsolata} - ''', - - # Latex figure (float) alignment - # - 'figure_align': 'H', - - # Latex font setup - # - 'fontpkg': r''' - \renewcommand{\familydefault}{\sfdefault} - ''', - - # Latex other setup - # - 'extraclassoptions': 'openany', - 'sphinxsetup': r''' - TitleColor={RGB}{225,38,40}, - InnerLinkColor={RGB}{62,62,63}, - OuterLinkColor={RGB}{225,38,40}, - shadowsep=0pt, - shadowsize=0pt, - shadowrule=0pt - ''' -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - (master_doc, 'csit.tex', u'CSIT REPORT', - u'', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# -# latex_logo = 'fdio.pdf' - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# -# latex_use_parts = True - -# If true, show page references after internal links. -# -latex_show_pagerefs = True - -# If true, show URL addresses after external links. -# -latex_show_urls = 'footnote' - -# Documents to append as an appendix to all manuals. -# -# latex_appendices = [] - -# It false, will not define \strong, \code, itleref, \crossref ... but only -# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added -# packages. -# -# latex_keep_old_macro_names = True - -# If false, no module index is generated. -# -# latex_domain_indices = True diff --git a/resources/tools/report_gen/fdio.svg b/resources/tools/report_gen/fdio.svg deleted file mode 100644 index f0dcc87cd4..0000000000 --- a/resources/tools/report_gen/fdio.svg +++ /dev/null @@ -1,25 +0,0 @@ -<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
-<svg xmlns="http://www.w3.org/2000/svg" xml:space="preserve" width="630px" height="394px" version="1.1" style="shape-rendering:geometricPrecision; text-rendering:geometricPrecision; image-rendering:optimizeQuality; fill-rule:evenodd; clip-rule:evenodd"
-viewBox="0 0 94500000 59062500"
- xmlns:xlink="http://www.w3.org/1999/xlink">
- <defs>
- <style type="text/css">
- <![CDATA[
- .fil2 {fill:#3C4641}
- .fil1 {fill:#3E4742}
- .fil0 {fill:#ED3341}
- ]]>
- </style>
- </defs>
- <g id="Layer_x0020_1">
- <path class="fil0" d="M42892251 15970973c234833,2555989 1402971,3784607 3633525,3195163 64614,-17483 464586,-605509 524121,-613778 100406,-13939 -325789,592397 -234951,589562 663981,-21026 1166839,235659 2703054,-1575788 -125567,2507794 385088,1931698 753047,3733577 -1039736,2409632 -1745415,307125 -1486485,4471622 -4668654,-2126 -6967721,-648979 -9839813,2159679 -1571417,1536924 -3134683,5759184 -3805515,8365022 -725760,124740 -1467939,37800 -1887401,510536 -484194,545619 -431747,1408050 35674,1833418 379890,345516 987289,308424 1519678,355793 800769,71348 1142151,473327 1136244,1236651 -6615,848964 -451001,1192826 -1631306,1191054 -3702746,-5316 -8320961,16538 -11713866,45596 -1026624,8741 -1698992,464822 -1746714,1344971 -50676,931416 521640,1260039 1499833,1295831 4286993,156988 8304069,-117653 12887201,141041 783996,103714 1189755,583183 1168256,1106004 -35201,855461 -707923,1099626 -1411594,1145104 -5730598,370440 -13969581,-91665 -19611231,141278 -797344,32839 -1694621,179786 -1702890,1222476 -6615,840459 589798,1177943 1399899,1281538 2377738,7088 5327556,1418 7519956,1890 2596151,591 5192775,4134 7788926,-118l-1007843 3222096 9601909 24334 3086606 -8484328c2943911,-111628 4325029,-862076 6023666,-2197361 944528,-742534 2909773,-3271590 3377903,-4985466l-6699578 -78081 1196016 -3619114c2873627,-19727 7980761,562984 10127447,-748913 1534089,-937440 4106970,-3980458 4422954,-6498411 -1731358,-38863 -8963443,287753 -9479649,-290469 -811991,-1588073 1028633,-2402426 1978121,-3132911 1388678,-1068441 1793846,-1560904 1785578,-3760746 230108,313386 87413,679809 294368,826048 502740,355320 1446913,224319 3038411,773010 1055447,363943 2115855,607399 3274661,404696 606218,-105958 570662,-121196 999692,-407413 -87767,-924683 -247472,-1025089 -709813,-1469003 1446086,654176 966617,1888937 966499,3676168 0,1623983 504512,3115429 -2599,4527259 -1397891,3892219 -3372351,4768234 -4125752,6086745l4834856 25043 -4320540 12061626c-2523504,20318 -10295893,-341499 -12094937,331459 -2551264,954568 -5105126,4809341 -5427844,6962878 3747398,-8151 17429226,243219 20078179,-244755 5769579,-1062534 6837902,-6323231 8783066,-11759344 1441716,-4029244 5291646,-11853371 -1699464,-14191183 -1193535,-382253 -1853145,-295667 -2077228,-395955 -216523,-96863 4489,-368078 -1209600,-1404861 -1196488,-1140379 -417690,-2367934 118716,-3662820 1265001,-3005691 3370461,-2733058 3370343,-6944096 -236,-4388934 2680611,-3141771 3022583,-4763627 307479,-1458135 -1345326,-645317 -2307336,-1215388 -794273,-470610 -170809,-694693 -1801524,-842940l-178251 -1084151c-406114,246999 -713003,925509 -824631,912870 -159705,-18191 -35083,-801478 -213216,-759780 -33075,7796 -73828,19845 -123323,42643 -162068,75009 -368668,285036 -609053,585900 -1161759,1454355 -3112358,5035433 -4642076,5789779 -3004746,1481878 -5309719,91074 -10962709,3162206 -2587056,1405451 -4703383,1914216 -7362259,348233z"/>
- <path class="fil1" d="M81444116 48006591c-917831,-4596598 5486670,-5710989 6489669,-1741753 1221649,4834502 -5678387,5805017 -6489669,1741753zm1873463 -5811986c-6608858,1801052 -4202297,11851718 2867130,10021253 6433324,-1665681 4111577,-11922947 -2867130,-10021253z"/>
- <polygon class="fil2" points="76840667,42255321 75263698,42301508 75172978,52072571 76944144,52066547 "/>
- <path class="fil1" d="M70635206 52314137c1084388,307834 2141370,-714302 1520859,-1977413 -474863,-966853 -2784797,-983627 -2488776,930589 89421,578931 388041,882158 967916,1046824z"/>
- <path class="fil1" d="M76175978 40542272c695402,-45006 1114037,-372448 1062889,-1137426 -18309,-275349 -120251,-539595 -294013,-710876 -609761,-601611 -1840151,-411548 -1952016,509001 -111628,919485 435173,1387733 1183140,1339301z"/>
- <path class="fil0" d="M14086524 36346354l15995897 0c719972,0 1308943,589089 1308943,1308943l0 0c0,719854 -589089,1308943 -1308943,1308943l-15995897 0c-719736,0 -1308943,-588971 -1308943,-1308943l0 0c0,-719972 589089,-1308943 1308943,-1308943z"/>
- <path class="fil0" d="M20652975 31242173l13718919 0c719972,0 1308943,589326 1308943,1308943l0 118c0,719618 -589208,1308943 -1308943,1308943l-13718919 0c-719736,0 -1308943,-588971 -1308943,-1308943l0 -118c0,-719972 588971,-1308943 1308943,-1308943z"/>
- <path class="fil0" d="M6885624 46609763l3154410 0c719972,0 1308943,589208 1308943,1308943l0 0c0,719736 -589326,1308943 -1308943,1308943l-3154410 0c-719736,0 -1308943,-588971 -1308943,-1308943l0 0c0,-719972 588971,-1308943 1308943,-1308943z"/>
- </g>
-</svg>
diff --git a/resources/tools/report_gen/requirements.txt b/resources/tools/report_gen/requirements.txt deleted file mode 100644 index fc0ff8c728..0000000000 --- a/resources/tools/report_gen/requirements.txt +++ /dev/null @@ -1,6 +0,0 @@ -Sphinx -sphinx-rtd-theme -plotly -lxml==3.5.0 -robotframework==2.9.2 -sphinxcontrib-programoutput diff --git a/resources/tools/report_gen/run_improvments_tables.py b/resources/tools/report_gen/run_improvments_tables.py deleted file mode 100755 index ebdfd60f14..0000000000 --- a/resources/tools/report_gen/run_improvments_tables.py +++ /dev/null @@ -1,213 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2017 Cisco and/or its affiliates. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Generate csv files for the chapter "CSIT Release Notes" from json files -generated by Jenkins' jobs. -""" - -from sys import exit as sys_exit -from os import walk -from os.path import join -from math import sqrt -from argparse import ArgumentParser, RawDescriptionHelpFormatter -from json import load - - -EXT_JSON = ".json" -EXT_TMPL = ".template" -EXT_CSV = ".csv" - - -def get_files(path, extension): - """Generates the list of files to process. - - :param path: Path to files. - :param extension: Extension of files to process. If it is the empty string, - all files will be processed. - :type path: str - :type extension: str - :returns: List of files to process. - :rtype: list - """ - - file_list = list() - for root, _, files in walk(path): - for filename in files: - if extension: - if filename.endswith(extension): - file_list.append(join(root, filename)) - else: - file_list.append(join(root, filename)) - - return file_list - - -def parse_args(): - """Parse arguments from cmd line. - - :returns: Parsed arguments. - :rtype ArgumentParser - """ - - parser = ArgumentParser(description=__doc__, - formatter_class=RawDescriptionHelpFormatter) - parser.add_argument("-i", "--input", - required=True, - help="Input folder with data files.") - parser.add_argument("-o", "--output", - required=True, - help="Output folder with csv files and templates for " - "csv files.") - return parser.parse_args() - - -def calculate_stats(data): - """Calculate statistics: - - average, - - standard deviation. - - :param data: Data to process. - :type data: list - :returns: Average and standard deviation. - :rtype: tuple - """ - - if len(data) == 0: - return None, None - - def average(items): - """Calculate average from the items. - - :param items: Average is calculated from these items. - :type items: list - :returns: Average. - :rtype: float - """ - return float(sum(items)) / len(items) - - avg = average(data) - variance = [(x - avg) ** 2 for x in data] - stdev = sqrt(average(variance)) - - return avg, stdev - - -def write_line_to_file(file_handler, item): - """Write a line to the csv file. - - :param file_handler: File handler for the csv file. It must be open for - writing text. - :param item: Item to be written to the file. - :type file_handler: BinaryIO - :type item: dict - """ - - mean = "" if item["mean"] is None else "{:.1f}".format(item["mean"]) - stdev = "" if item["stdev"] is None else "{:.1f}".format(item["stdev"]) - change = "" if item["change"] is None else "{:.0f}%".format(item["change"]) - file_handler.write("{},{},{},{}\n".format(item["old"], mean, stdev, change)) - - -def main(): - """Main function to generate csv files for the chapter "CSIT Release Notes" - from json files generated by Jenkins' jobs. - """ - - args = parse_args() - - json_files = get_files(args.input, EXT_JSON) - tmpl_files = get_files(args.output, EXT_TMPL) - - if len(json_files) == 0: - print("No json data to process.") - exit(1) - - if len(tmpl_files) == 0: - print("No template files to process.") - exit(1) - - # Get information from template files - csv_data = list() - for tmpl_file in tmpl_files: - with open(tmpl_file, mode='r') as file_handler: - for line in file_handler: - line_list = line.split(',') - try: - csv_data.append({ - "ID": line_list[0], - "type": line_list[0].rsplit("-", 1)[-1], - "old": ",".join(line_list[1:])[:-1], - "last_old": line_list[-1][:-1], - "rates": list(), - "mean": None, - "stdev": None, - "change": None}) - except IndexError: - pass - - # Update existing data with the new information from json files - for json_file in json_files: - with open(json_file) as file_handler: - tests_data = load(file_handler) - for item in csv_data: - try: - rate = tests_data["data"][item["ID"]]["throughput"]["value"] - item["rates"].append(rate) - except KeyError: - pass - - # Add statistics - for item in csv_data: - mean, stdev = calculate_stats(item["rates"]) - if mean is not None: - mean = float(mean) / 1000000 - old = float(item["last_old"]) if item["last_old"] else None - item["mean"] = mean - item["change"] = ((round(mean, 1) - round(old, 1)) / round(old, 1))\ - * 100 if old else None - item["stdev"] = stdev / 1000000 - - # Sort the list, key = change - csv_data.sort(key=lambda data: data["change"], reverse=True) - - # Write csv files - for tmpl_file in tmpl_files: - csv_file = tmpl_file.replace(EXT_TMPL, EXT_CSV) - with open(csv_file, "w") as file_handler: - for item in csv_data: - if "pdr_" in csv_file \ - and "_others" not in csv_file \ - and item["type"] == "pdrdisc" \ - and item["change"] >= 9.5: - write_line_to_file(file_handler, item) - elif "pdr_" in csv_file \ - and "_others" in csv_file \ - and item["type"] == "pdrdisc" \ - and item["change"] < 9.5: - write_line_to_file(file_handler, item) - elif "ndr_" in csv_file \ - and "_others" not in csv_file \ - and item["type"] == "ndrdisc" \ - and item["change"] >= 9.5: - write_line_to_file(file_handler, item) - elif "ndr_" in csv_file \ - and "_others" in csv_file \ - and item["type"] == "ndrdisc" \ - and item["change"] < 9.5: - write_line_to_file(file_handler, item) - - -if __name__ == "__main__": - sys_exit(main()) diff --git a/resources/tools/report_gen/run_plot.py b/resources/tools/report_gen/run_plot.py deleted file mode 100644 index 0a95396ac9..0000000000 --- a/resources/tools/report_gen/run_plot.py +++ /dev/null @@ -1,276 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2016 Cisco and/or its affiliates. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Plot the performance data""" - -import argparse -import operator -import os -import sys -import math - -import plotly.offline as ploff -import plotly.graph_objs as plgo -from lxml import etree - - -def select_files_in_subfolders(directory, ext='xml'): - """Get all files in folder and its subfolders. - - :param dir: Input folder. - :param ext: File extension. - :type dir: str - :type ext: str - :return: List of filex matching the parameters. - :rtype list - """ - for _, _, files in os.walk(directory): - for file in files: - if file.endswith('.%s' % ext): - yield os.path.join(directory, file) - - -def select_files_in_folder(directory, ext='xml'): - """Get all files in folder. - - :param dir: Input folder. - :param ext: File extension. - :type dir: str - :type ext: str - :return: List of filex matching the parameters. - :rtype list - """ - for file in os.listdir(directory): - if file.endswith('.%s' % ext): - yield os.path.join(directory, file) - - -def combine_dicts(first, second, oper=operator.add): - """Combine two dictionaries. - - :param first: First dict. - :param second: Second dict. - :param oper: Operator. - :type first: dict - :type second: dict - :type oper: operator - :return: Combined dictionary. - :rtype dict - """ - - return dict(first.items() + second.items() +\ - [(k, oper(first[k], second[k])) for k in set(second) & set(first)]) - - -def parse_data_pps(args): - """Get PPS data out of XML file into array. - - :param args: Command line parameters. - :type suite: ArgumentParser - :return: X-data and Y-data dictionaries. - :rtype tuple of dict - """ - xdata = [] - ydata_pps = {} - - for i, file in enumerate(sorted(select_files_in_folder(args.input))): - xml_tree = etree.parse(file) - sel = xml_tree.xpath(args.xpath) - if sel: - ydata_pps = combine_dicts(ydata_pps, dict((elem.attrib['name'],\ - (i, float(elem.text))) for elem in sel)) - xdata.append(xml_tree.getroot().attrib['vdevice']) - return xdata, ydata_pps - - -def parse_data_lat(args): - """Get latency data out of XML file into array. - - :param args: Command line parameters. - :type suite: ArgumentParser - :return: X-data and Y-data dictionaries. - :rtype tuple of dict - """ - xdata = [] - ydata_lat = {} - - for i, file in enumerate(sorted(select_files_in_folder(args.input))): - xml_tree = etree.parse(file) - sel = xml_tree.xpath(args.xpath) - if sel: - try: - ydata_lat = combine_dicts(ydata_lat, dict((elem.attrib['name'],\ - (i, elem.attrib[args.latency])) for elem in sel)) - except KeyError: - raise RuntimeError('Retrieving latency data error (PDR?)') - xdata.append(xml_tree.getroot().attrib['vdevice']) - return xdata, ydata_lat - - -def parse_args(): - """Parse arguments from cmd line. - - :return: Parsed arguments. - :rtype ArgumentParser - """ - - parser = argparse.ArgumentParser() - parser.add_argument("-x", "--xpath", required=True, - help="Xpath filter") - parser.add_argument("-t", "--title", required=True, - help="Plot title") - parser.add_argument("-l", "--lower", - default=False, - help="Lower boudary of Y-axis") - parser.add_argument("-u", "--upper", - default=False, - help="Upper boudary of Y-axis") - parser.add_argument("-e", "--errorbar", - default=False, - help="Errorbar for Y-axis") - parser.add_argument("-d", "--latency", - choices=['lat_10', 'lat_50', 'lat_100'], - help="Latency to draw") - parser.add_argument("-p", "--plot", - choices=['box', 'scatter'], - default='box', - help="Throughput plot type") - parser.add_argument("-i", "--input", - help="Input folder") - parser.add_argument("-o", "--output", required=True, - help="Output image file name") - return parser.parse_args() - - -def main(): - """Main function.""" - - args = parse_args() - if args.latency: - xdata, ydata = parse_data_lat(args) - else: - xdata, ydata = parse_data_pps(args) - - # Print data into console for debug - print args.title - for data in ydata: - print data + ";" + ";".join(str(val) for val in ydata[data][1::2]) - - if xdata and ydata: - traces = [] - # Add plot traces - for i, suite in enumerate(ydata): - if args.latency: - y_extract = [] - _ = [y_extract.extend([l, l]) for l in ydata[suite][1::2][0].split('/')] - traces.append(plgo.Box( - x=['TGint1-to-SUT1-to-SUT2-to-TGint2', - 'TGint1-to-SUT1-to-SUT2-to-TGint2', - 'TGint1-to-SUT1-to-SUT2-to-TGint2', - 'TGint1-to-SUT1-to-SUT2-to-TGint2', - 'TGint1-to-SUT1-to-SUT2-to-TGint2', - 'TGint1-to-SUT1-to-SUT2-to-TGint2', - 'TGint2-to-SUT2-to-SUT1-to-TGint1', - 'TGint2-to-SUT2-to-SUT1-to-TGint1', - 'TGint2-to-SUT2-to-SUT1-to-TGint1', - 'TGint2-to-SUT2-to-SUT1-to-TGint1', - 'TGint2-to-SUT2-to-SUT1-to-TGint1', - 'TGint2-to-SUT2-to-SUT1-to-TGint1'], - y=y_extract, - name=str(i+1)+'. '+suite.lower().replace('-ndrdisc',''), - boxmean=False, - )) - else: - if args.plot == 'box': - traces.append(plgo.Box( - x=[str(i+1)+'.'] * len(ydata[suite][1::2]), - y=ydata[suite][1::2], - name=str(i+1)+'. '+suite.lower().replace('-ndrdisc',''), - hoverinfo='x+y', - boxpoints='outliers', - whiskerwidth=0, - )) - elif args.plot == 'scatter': - traces.append(plgo.Scatter( - x=ydata[suite][0::2], - y=ydata[suite][1::2], - mode='lines+markers', - name=str(i+1)+'. '+suite.lower().replace('-ndrdisc',''), - )) - else: - pass - - # Add plot layout - layout = plgo.Layout( - title='{0}'.format(args.title), - xaxis=dict( - autorange=True, - autotick=False, - fixedrange=False, - gridcolor='rgb(238, 238, 238)', - linecolor='rgb(238, 238, 238)', - linewidth=1, - showgrid=True, - showline=True, - showticklabels=True, - tickcolor='rgb(238, 238, 238)', - tickmode='linear', - title='Indexed Test Cases' if args.plot == 'box'\ - else '', - zeroline=False, - ), - yaxis=dict( - gridcolor='rgb(238, 238, 238)', - hoverformat='' if args.latency else '.4s', - linecolor='rgb(238, 238, 238)', - linewidth=1, - range=[args.lower, args.upper] if args.lower and args.upper\ - else [], - showgrid=True, - showline=True, - showticklabels=True, - tickcolor='rgb(238, 238, 238)', - title='Latency min/avg/max [uSec]' if args.latency\ - else 'Packets Per Second [pps]', - zeroline=False, - ), - boxmode='group', - boxgroupgap=0.5, - autosize=False, - margin=dict( - t=50, - b=20, - l=50, - r=20, - ), - showlegend=True, - legend=dict( - orientation='h', - ), - width=700, - height=1000, - ) - # Create plot - plpl = plgo.Figure(data=traces, layout=layout) - # Export Plot - ploff.plot(plpl, - show_link=False, auto_open=False, - filename='{0}.html'.format(args.output)) - else: - sys.stderr.write('No data found!\n') - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/resources/tools/report_gen/run_report.cfg b/resources/tools/report_gen/run_report.cfg deleted file mode 100644 index b35f490e50..0000000000 --- a/resources/tools/report_gen/run_report.cfg +++ /dev/null @@ -1,69 +0,0 @@ -#!/bin/bash - -declare -r TRUE=0 -declare -r FALSE=1 - -# set default values in config array -typeset -A DIR -typeset -A URL -typeset -A JOB -typeset -A CFG - -CFG[DEBUG]=0 -CFG[BLD_LATEX]=1 -CFG[BLD_HTML]=1 - -DIR[WORKING]=_tmp -DIR[BUILD,HTML]=_build -DIR[BUILD,LATEX]=_build_latex -DIR[RST]=../../../docs/report - -DIR[STATIC]=${DIR[BUILD,HTML]}/_static -DIR[STATIC,VPP]=${DIR[STATIC]}/vpp -DIR[STATIC,DPDK]=${DIR[STATIC]}/dpdk -DIR[STATIC,ARCH]=${DIR[STATIC]}/archive -DIR[STATIC,TREND]=${DIR[STATIC]}/trending - -DIR[PLOT,VPP]=${DIR[WORKING]}/vpp_plot -DIR[PLOT,DPDK]=${DIR[WORKING]}/dpdk_plot - -DIR[DTR]=${DIR[RST]}/detailed_test_results -DIR[DTR,PERF,DPDK]=${DIR[DTR]}/dpdk_performance_results -DIR[DTR,PERF,VPP]=${DIR[DTR]}/vpp_performance_results -DIR[DTR,PERF,HC]=${DIR[DTR]}/honeycomb_performance_results -DIR[DTR,FUNC,VPP]=${DIR[DTR]}/vpp_functional_results -DIR[DTR,FUNC,HC]=${DIR[DTR]}/honeycomb_functional_results -DIR[DTR,FUNC,NSHSFC]=${DIR[DTR]}/nshsfc_functional_results -DIR[DTR,PERF,VPP,IMPRV]=${DIR[RST]}/vpp_performance_tests/performance_improvements - -DIR[DTC]=${DIR[RST]}/test_configuration -DIR[DTC,PERF,VPP]=${DIR[DTC]}/vpp_performance_configuration -DIR[DTC,FUNC,VPP]=${DIR[DTC]}/vpp_functional_configuration - -DIR[DTO]=${DIR[RST]}/test_operational_data -DIR[DTO,PERF,VPP]=${DIR[DTO]}/vpp_performance_operational_data - -DIR[CSS_PATCH_FILE]=${DIR[STATIC]}/theme_overrides.css - -URL[JENKINS,CSIT]='https://jenkins.fd.io/view/csit/job' -URL[JENKINS,HC]='https://jenkins.fd.io/view/hc2vpp/job' - -JOB[PERF,VPP]=csit-vpp-perf-1707-all -JOB[PERF,VPP,BLD]="9 10 13 14 15 16 17 18 19 21" -JOB[PERF,VPP,FBLD]=22 -JOB[PERF,DPDK]=csit-dpdk-perf-1707-all -JOB[PERF,DPDK,BLD]="1 2 3 4 5 6 7 8 9 10" -JOB[PERF,DPDK,FBLD]=10 -JOB[FUNC,VPP]=csit-vpp-functional-1707-ubuntu1604-virl -JOB[FUNC,VPP,BLD]=lastSuccessfulBuild -JOB[PERF,HC]=hc2vpp-csit-perf-master-ubuntu1604 -JOB[PERF,HC,BLD]="8 9" -JOB[FUNC,HC]=hc2vpp-csit-integration-1707-ubuntu1604 -JOB[FUNC,HC,BLD]=lastSuccessfulBuild -JOB[FUNC,NSH]=csit-nsh_sfc-verify-func-1707-ubuntu1604-virl -JOB[FUNC,NSH,BLD]=2 - -JOB[1704,PERF,VPP]=csit-vpp-perf-1704-all -JOB[1704,VPP,BLD]="6 7 8 9 10 12 14 15 16 17" -JOB[1704,DPDK]=csit-dpdk-perf-1704-all -JOB[1704,DPDK,BLD]="1 2 3 4 6 7 8 9 10 11" diff --git a/resources/tools/report_gen/run_report.sh b/resources/tools/report_gen/run_report.sh deleted file mode 100755 index ec779adf5d..0000000000 --- a/resources/tools/report_gen/run_report.sh +++ /dev/null @@ -1,627 +0,0 @@ -#!/bin/bash - -set -x - -# Script directory -SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" - -# Get actual date -DATE=$(date -u '+%d-%b-%Y') - -# Load configuration -source ${SCRIPT_DIR}/run_report.cfg - -# Process parameters -for i in "$@"; do -case $i in - --debug) - CFG[DEBUG]=1 - ;; - --no_latex) - CFG[BLD_LATEX]=0 - ;; - --no_html) - CFG[BLD_HTML]=0 - ;; - *) - # unknown option - ;; -esac -done - -# Install system dependencies -sudo apt-get -y update -sudo apt-get -y install libxml2 libxml2-dev libxslt-dev build-essential \ - zlib1g-dev unzip -if [[ ${CFG[BLD_LATEX]} -eq 1 ]] ; -then - sudo apt-get -y install xvfb texlive-latex-recommended \ - texlive-fonts-recommended texlive-fonts-extra texlive-latex-extra latexmk wkhtmltopdf - sudo sed -i.bak 's/^\(main_memory\s=\s\).*/\110000000/' /usr/share/texlive/texmf-dist/web2c/texmf.cnf -fi - -# Clean-up when finished -trap 'rm -rf ${DIR[WORKING]}; exit' EXIT -trap 'rm -rf ${DIR[WORKING]}; exit' ERR - -# Remove the old build -rm -rf ${DIR[BUILD,HTML]} || true -rm -rf ${DIR[BUILD,LATEX]} || true -rm -rf ${DIR[WORKING]} || true - -# Create working directories -mkdir ${DIR[WORKING]} - -# Create virtual environment -virtualenv ${DIR[WORKING]}/env -. ${DIR[WORKING]}/env/bin/activate - -# Install python dependencies: -pip install -r requirements.txt - -export PYTHONPATH=`pwd` - -# Download raw outputs for plots -echo Downloading raw outputs for plots ... -mkdir -p ${DIR[STATIC,VPP]} -mkdir -p ${DIR[STATIC,DPDK]} -mkdir -p ${DIR[STATIC,ARCH]} -mkdir -p ${DIR[STATIC,TREND]} -mkdir -p ${DIR[PLOT,VPP]} -mkdir -p ${DIR[PLOT,DPDK]} - -### VPP PERFORMANCE SOURCE DATA - -#if [[ ${CFG[DEBUG]} -eq 1 ]] ; -# cp ./${JOB[PERF,VPP]}-${JOB[PERF,VPP,FBLD]}.zip ${DIR[STATIC,ARCH]}/${JOB[PERF,VPP]}-${JOB[PERF,VPP,FBLD]}.zip -#fi - -blds=${JOB[PERF,VPP,BLD]} -for i in ${blds[@]}; do - curl --silent ${URL[JENKINS,CSIT]}/${JOB[PERF,VPP]}/${i}/robot/report/output_perf_data.xml \ - --output ${DIR[PLOT,VPP]}/${JOB[PERF,VPP]}-${i}.xml - if [[ ${CFG[DEBUG]} -eq 0 ]] ; - then - curl --fail --silent ${URL[JENKINS,CSIT]}/${JOB[PERF,VPP]}/${i}/robot/report/\*zip\*/robot-plugin.zip \ - --output ${DIR[STATIC,ARCH]}/${JOB[PERF,VPP]}-${i}.zip - fi -done -if [[ ${CFG[DEBUG]} -eq 0 ]] ; -then - curl --fail --silent ${URL[JENKINS,CSIT]}/${JOB[PERF,VPP]}/${JOB[PERF,VPP,FBLD]}/robot/report/\*zip\*/robot-plugin.zip \ - --output ${DIR[STATIC,ARCH]}/${JOB[PERF,VPP]}-${JOB[PERF,VPP,FBLD]}.zip -fi -# Archive trending -cp ${DIR[PLOT,VPP]}/* ${DIR[STATIC,TREND]} -blds=${JOB[1704,VPP,BLD]} -for i in ${blds[@]}; do - curl --silent ${URL[JENKINS,CSIT]}/${JOB[1704,PERF,VPP]}/${i}/robot/report/output_perf_data.xml \ - --output ${DIR[STATIC,TREND]}/${JOB[1704,PERF,VPP]}-${i}.xml -done - -### DPDK PERFORMANCE SOURCE DATA - -#if [[ ${CFG[DEBUG]} -eq 1 ]] ; -# cp ./${JOB[PERF,DPDK]}-${JOB[PERF,DPDK,FBLD]}.zip ${DIR[STATIC,ARCH]}/${JOB[PERF,DPDK]}-${JOB[PERF,DPDK,FBLD]}.zip -#fi - -blds=${JOB[PERF,DPDK,BLD]} -for i in ${blds[@]}; do - curl --silent ${URL[JENKINS,CSIT]}/${JOB[PERF,DPDK]}/${i}/robot/report/output_perf_data.xml \ - --output ${DIR[PLOT,DPDK]}/${JOB[PERF,DPDK]}-${i}.xml - if [[ ${CFG[DEBUG]} -eq 0 ]] ; - then - curl --fail --silent ${URL[JENKINS,CSIT]}/${JOB[PERF,DPDK]}/${i}/robot/report/\*zip\*/robot-plugin.zip \ - --output ${DIR[STATIC,ARCH]}/${JOB[PERF,DPDK]}-${i}.zip - fi -done -cp ${DIR[PLOT,DPDK]}/* ${DIR[STATIC,TREND]} - -### FUNCTIONAL SOURCE DATA - -#if [[ ${CFG[DEBUG]} -eq 1 ]] ; -# cp ./${JOB[FUNC,VPP]}-${JOB[FUNC,VPP,BLD]}.zip ${DIR[STATIC,ARCH]}/${JOB[FUNC,VPP]}-${JOB[FUNC,VPP,BLD]}.zip -#fi - -if [[ ${CFG[DEBUG]} -eq 0 ]] ; -then - curl --fail --silent ${URL[JENKINS,CSIT]}/${JOB[FUNC,VPP]}/${JOB[FUNC,VPP,BLD]}/robot/report/\*zip\*/robot-plugin.zip \ - --output ${DIR[STATIC,ARCH]}/${JOB[FUNC,VPP]}-${JOB[FUNC,VPP,BLD]}.zip -fi - -### HONEYCOMB FUNCTIONAL SOURCE DATA - -#if [[ ${CFG[DEBUG]} -eq 1 ]] ; -# cp ./${JOB[FUNC,HC]}-${JOB[FUNC,HC,BLD]}.zip ${DIR[STATIC,ARCH]}/${JOB[FUNC,HC]}-${JOB[FUNC,HC,BLD]}.zip -#fi - -if [[ ${CFG[DEBUG]} -eq 0 ]] ; -then - curl --fail --silent ${URL[JENKINS,HC]}/${JOB[FUNC,HC]}/${JOB[FUNC,HC,BLD]}/robot/report/\*zip\*/robot-plugin.zip \ - --output ${DIR[STATIC,ARCH]}/${JOB[FUNC,HC]}-${JOB[FUNC,HC,BLD]}.zip -fi - -### HONEYCOMB PERFORMANCE SOURCE DATA - -#if [[ ${CFG[DEBUG]} -eq 1 ]] ; -# cp ./${JOB[PERF,HC]}-${JOB[PERF,HC,BLD]}.zip ${DIR[STATIC,ARCH]}/${JOB[PERF,HC]}-${JOB[PERF,HC,BLD]}.zip -#fi - -if [[ ${CFG[DEBUG]} -eq 0 ]] ; -then - blds=${JOB[PERF,HC,BLD]} - for i in ${blds[@]}; do - curl --silent ${URL[JENKINS,HC]}/${JOB[PERF,HC]}/${i}/robot/report/\*zip\*/robot-plugin.zip \ - --output ${DIR[STATIC,ARCH]}/${JOB[PERF,HC]}-${i}.zip -done -fi - -### NSH_SFC SOURCE DATA - -#if [[ ${CFG[DEBUG]} -eq 1 ]] ; -# cp ./${JOB[FUNC,NSH]}-${JOB[FUNC,NSH,BLD]}.zip ${DIR[STATIC,ARCH]}/${JOB[FUNC,NSH]}-${JOB[FUNC,NSH,BLD]}.zip -#fi - -if [[ ${CFG[DEBUG]} -eq 0 ]] ; -then - curl --fail --silent ${URL[JENKINS,CSIT]}/${JOB[FUNC,NSH]}/${JOB[FUNC,NSH,BLD]}/robot/report/\*zip\*/robot-plugin.zip \ - --output ${DIR[STATIC,ARCH]}/${JOB[FUNC,NSH]}-${JOB[FUNC,NSH,BLD]}.zip -fi - -# Data post processing - -if [[ ${CFG[DEBUG]} -eq 0 ]] ; -then - # VPP PERF - unzip -o ${DIR[STATIC,ARCH]}/${JOB[PERF,VPP]}-${JOB[PERF,VPP,FBLD]}.zip -d ${DIR[WORKING]}/ - python run_robot_data.py -i ${DIR[WORKING]}/robot-plugin/output.xml \ - --output ${DIR[DTR,PERF,VPP]}/vpp_performance_results.rst \ - --formatting rst --start 4 --level 2 - python run_robot_teardown_data.py -i ${DIR[WORKING]}/robot-plugin/output.xml \ - --output ${DIR[DTC,PERF,VPP]}/vpp_performance_configuration.rst \ - --data "VAT_H" --formatting rst --start 4 --level 2 - python run_robot_teardown_data.py -i ${DIR[WORKING]}/robot-plugin/output.xml \ - --output ${DIR[DTO,PERF,VPP]}/vpp_performance_operational_data.rst \ - --data "SH_RUN" --formatting rst --start 4 --level 2 - - blds=${JOB[PERF,VPP,BLD]} - for i in ${blds[@]}; do - unzip -o ${DIR[STATIC,ARCH]}/${JOB[PERF,VPP]}-${i}.zip -d ${DIR[WORKING]}/ - python run_robot_json_data.py \ - --input ${DIR[WORKING]}/robot-plugin/output.xml \ - --output ${DIR[DTR,PERF,VPP,IMPRV]}/${JOB[PERF,VPP]}-${i}.json \ - --vdevice ${i} - done - - # DPDK PERF - unzip -o ${DIR[STATIC,ARCH]}/${JOB[PERF,DPDK]}-${JOB[PERF,DPDK,FBLD]}.zip -d ${DIR[WORKING]}/ - python run_robot_data.py -i ${DIR[WORKING]}/robot-plugin/output.xml \ - --output ${DIR[DTR,PERF,DPDK]}/dpdk_performance_results.rst \ - --formatting rst --start 4 --level 2 - - # VPP FUNC - unzip -o ${DIR[STATIC,ARCH]}/${JOB[FUNC,VPP]}-${JOB[FUNC,VPP,BLD]}.zip -d ${DIR[WORKING]}/ - python run_robot_data.py -i ${DIR[WORKING]}/robot-plugin/output.xml \ - --output ${DIR[DTR,FUNC,VPP]}/vpp_functional_results.rst \ - --formatting rst --start 5 --level 2 - python run_robot_teardown_data.py -i ${DIR[WORKING]}/robot-plugin/output.xml \ - --output ${DIR[DTC,FUNC,VPP]}/vpp_functional_configuration.rst \ - --data "VAT_H" --formatting rst --start 5 --level 2 - - # HC FUNC - unzip -o ${DIR[STATIC,ARCH]}/${JOB[FUNC,HC]}-${JOB[FUNC,HC,BLD]}.zip -d ${DIR[WORKING]}/ - python run_robot_data.py -i ${DIR[WORKING]}/robot-plugin/output.xml \ - --output ${DIR[DTR,FUNC,HC]}/honeycomb_functional_results.rst \ - --formatting rst --start 5 --level 2 - - # NSHSFC FUNC - unzip -o ${DIR[STATIC,ARCH]}/${JOB[FUNC,NSH]}-${JOB[FUNC,NSH,BLD]}.zip -d ${DIR[WORKING]}/ - python run_robot_data.py -i ${DIR[WORKING]}/robot-plugin/output.xml \ - --output ${DIR[DTR,FUNC,NSHSFC]}/nshsfc_functional_results.rst \ - --formatting rst --start 5 --level 2 -fi - -# Generate tables for performance improvements -if [[ ${CFG[DEBUG]} -eq 0 ]] ; -then - python run_improvments_tables.py \ - --input ${DIR[DTR,PERF,VPP,IMPRV]} \ - --output ${DIR[DTR,PERF,VPP,IMPRV]} -fi - -# Delete temporary json files -find ${DIR[RST]} -name "*.json" -type f -delete - -# Plot packets per second - -# VPP L2 sel1 - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-1t1c-l2-sel1-ndrdisc \ - --title "64B-1t1c-(eth|dot1q|dot1ad)-(l2xcbase|l2bdbasemaclrn)-ndrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"BASE") and contains(@tags,"NDRDISC") and contains(@tags,"1T1C") and (contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD")) and not(contains(@tags,"VHOST"))]' - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-2t2c-l2-sel1-ndrdisc \ - --title "64B-2t2c-(eth|dot1q|dot1ad)-(l2xcbase|l2bdbasemaclrn)-ndrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"BASE") and contains(@tags,"NDRDISC") and contains(@tags,"2T2C") and (contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD")) and not(contains(@tags,"VHOST"))]' - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-1t1c-l2-sel1-pdrdisc \ - --title "64B-1t1c-(eth|dot1q|dot1ad)-(l2xcbase|l2bdbasemaclrn)-pdrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"BASE") and contains(@tags,"PDRDISC") and not(contains(@tags,"NDRDISC")) and contains(@tags,"1T1C") and (contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD")) and not(contains(@tags,"VHOST"))]' -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-2t2c-l2-sel1-pdrdisc \ - --title "64B-2t2c-(eth|dot1q|dot1ad)-(l2xcbase|l2bdbasemaclrn)-pdrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"BASE") and contains(@tags,"PDRDISC") and not(contains(@tags,"NDRDISC")) and contains(@tags,"2T2C") and (contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD")) and not(contains(@tags,"VHOST"))]' - -# VPP L2 sel2 - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-1t1c-l2-sel2-ndrdisc \ - --title "64B-1t1c-(eth|dot1q|dot1ad)-(l2xcbase|l2bdbasemaclrn)-ndrdisc" \ - --xpath '//*[@framesize="64B" and ((contains(@tags,"FEATURE") and contains(@tags,"ACL50") and contains(@tags,"10k_FLOWS"))) and contains(@tags,"NDRDISC") and contains(@tags,"1T1C") and (contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD")) and not(contains(@tags,"VHOST"))]' --lower 0 --upper 8000000 -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-2t2c-l2-sel2-ndrdisc \ - --title "64B-2t2c-(eth|dot1q|dot1ad)-(l2xcbase|l2bdbasemaclrn)-ndrdisc" \ - --xpath '//*[@framesize="64B" and ((contains(@tags,"FEATURE") and contains(@tags,"ACL50") and contains(@tags,"10k_FLOWS"))) and contains(@tags,"NDRDISC") and contains(@tags,"2T2C") and (contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD")) and not(contains(@tags,"VHOST"))]' --lower 5000000 --upper 12000000 - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-1t1c-l2-sel2-pdrdisc \ - --title "64B-1t1c-(eth|dot1q|dot1ad)-(l2xcbase|l2bdbasemaclrn)-pdrdisc" \ - --xpath '//*[@framesize="64B" and ((contains(@tags,"FEATURE") and contains(@tags,"ACL50") and contains(@tags,"10k_FLOWS"))) and contains(@tags,"PDRDISC") and not(contains(@tags,"NDRDISC")) and contains(@tags,"1T1C") and (contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD")) and not(contains(@tags,"VHOST"))]' --lower 0 --upper 8000000 -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-2t2c-l2-sel2-pdrdisc \ - --title "64B-2t2c-(eth|dot1q|dot1ad)-(l2xcbase|l2bdbasemaclrn)-pdrdisc" \ - --xpath '//*[@framesize="64B" and ((contains(@tags,"FEATURE") and contains(@tags,"ACL50") and contains(@tags,"10k_FLOWS"))) and contains(@tags,"NDRDISC") and contains(@tags,"2T2C") and (contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD")) and not(contains(@tags,"VHOST"))]' --lower 5000000 --upper 12000000 - -# VPP IP4 - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-1t1c-ethip4-ip4-ndrdisc \ - --title "64B-1t1c-ethip4-ip4[a-z0-9]+-[a-z-]*ndrdisc" \ - --xpath '//*[@framesize="64B" and (contains(@tags,"BASE") or contains(@tags,"SCALE") or contains(@tags,"FEATURE")) and contains(@tags,"NDRDISC") and contains(@tags,"1T1C") and contains(@tags,"IP4FWD") and not(contains(@tags,"IPSEC")) and not(contains(@tags,"VHOST"))]' -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-2t2c-ethip4-ip4-ndrdisc \ - --title "64B-2t2c-ethip4-ip4[a-z0-9]+-[a-z-]*ndrdisc" \ - --xpath '//*[@framesize="64B" and (contains(@tags,"BASE") or contains(@tags,"SCALE") or contains(@tags,"FEATURE")) and contains(@tags,"NDRDISC") and contains(@tags,"2T2C") and contains(@tags,"IP4FWD") and not(contains(@tags,"IPSEC")) and not(contains(@tags,"VHOST"))]' - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-1t1c-ethip4-ip4-pdrdisc \ - --title "64B-1t1c-ethip4-ip4[a-z0-9]+-[a-z-]*pdrdisc" \ - --xpath '//*[@framesize="64B" and (contains(@tags,"BASE") or contains(@tags,"SCALE") or contains(@tags,"FEATURE")) and contains(@tags,"PDRDISC") and not(contains(@tags,"NDRDISC")) and contains(@tags,"1T1C") and contains(@tags,"IP4FWD") and not(contains(@tags,"IPSEC")) and not(contains(@tags,"VHOST"))]' -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-2t2c-ethip4-ip4-pdrdisc \ - --title "64B-2t2c-ethip4-ip4[a-z0-9]+-[a-z-]*pdrdisc" \ - --xpath '//*[@framesize="64B" and (contains(@tags,"BASE") or contains(@tags,"SCALE") or contains(@tags,"FEATURE")) and contains(@tags,"PDRDISC") and not(contains(@tags,"NDRDISC")) and contains(@tags,"2T2C") and contains(@tags,"IP4FWD") and not(contains(@tags,"IPSEC")) and not(contains(@tags,"VHOST"))]' - -# VPP IP6 - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/78B-1t1c-ethip6-ip6-ndrdisc \ - --title "78B-1t1c-ethip6-ip6[a-z0-9]+-[a-z-]*ndrdisc" \ - --xpath '//*[@framesize="78B" and (contains(@tags,"BASE") or contains(@tags,"SCALE") or contains(@tags,"FEATURE")) and contains(@tags,"NDRDISC") and contains(@tags,"1T1C") and contains(@tags,"IP6FWD") and not(contains(@tags,"IPSEC")) and not(contains(@tags,"VHOST"))]' -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/78B-2t2c-ethip6-ip6-ndrdisc \ - --title "78B-2t2c-ethip6-ip6[a-z0-9]+-[a-z-]*ndrdisc" \ - --xpath '//*[@framesize="78B" and (contains(@tags,"BASE") or contains(@tags,"SCALE") or contains(@tags,"FEATURE")) and contains(@tags,"NDRDISC") and contains(@tags,"2T2C") and contains(@tags,"IP6FWD") and not(contains(@tags,"IPSEC")) and not(contains(@tags,"VHOST"))]' - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/78B-1t1c-ethip6-ip6-pdrdisc \ - --title "78B-1t1c-ethip6-ip6[a-z0-9]+-[a-z-]*pdrdisc" \ - --xpath '//*[@framesize="78B" and (contains(@tags,"BASE") or contains(@tags,"SCALE") or contains(@tags,"FEATURE")) and contains(@tags,"PDRDISC") and not(contains(@tags,"NDRDISC")) and contains(@tags,"1T1C") and contains(@tags,"IP6FWD") and not(contains(@tags,"IPSEC")) and not(contains(@tags,"VHOST"))]' -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/78B-2t2c-ethip6-ip6-pdrdisc \ - --title "78B-2t2c-ethip6-ip6[a-z0-9]+-[a-z-]*pdrdisc" \ - --xpath '//*[@framesize="78B" and (contains(@tags,"BASE") or contains(@tags,"SCALE") or contains(@tags,"FEATURE")) and contains(@tags,"PDRDISC") and not(contains(@tags,"NDRDISC")) and contains(@tags,"2T2C") and contains(@tags,"IP6FWD") and not(contains(@tags,"IPSEC")) and not(contains(@tags,"VHOST"))]' - -# VPP IP4_overlay - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-1t1c-ethip4-ndrdisc \ - --title "64B-1t1c-ethip4[a-z0-9]+-[a-z0-9]*-ndrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"ENCAP") and contains(@tags,"NDRDISC") and contains(@tags,"1T1C") and (contains(@tags,"VXLAN") or contains(@tags,"VXLANGPE") or contains(@tags,"LISP") or contains(@tags,"LISPGPE") or contains(@tags,"GRE")) and not(contains(@tags,"VHOST")) and not(contains(@tags, "IPSECHW"))]' -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-2t2c-ethip4-ndrdisc \ - --title "64B-2t2c-ethip4[a-z0-9]+-[a-z0-9]*-ndrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"ENCAP") and contains(@tags,"NDRDISC") and contains(@tags,"2T2C") and (contains(@tags,"VXLAN") or contains(@tags,"VXLANGPE") or contains(@tags,"LISP") or contains(@tags,"LISPGPE") or contains(@tags,"GRE")) and not(contains(@tags,"VHOST")) and not(contains(@tags, "IPSECHW"))]' - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-1t1c-ethip4-pdrdisc \ - --title "64B-1t1c-ethip4[a-z0-9]+-[a-z0-9]*-pdrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"ENCAP") and contains(@tags,"PDRDISC") and not(contains(@tags,"NDRDISC")) and contains(@tags,"1T1C") and (contains(@tags,"VXLAN") or contains(@tags,"VXLANGPE") or contains(@tags,"LISP") or contains(@tags,"LISPGPE") or contains(@tags,"GRE")) and not(contains(@tags,"VHOST")) and not(contains(@tags, "IPSECHW"))]' -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-2t2c-ethip4-pdrdisc \ - --title "64B-2t2c-ethip4[a-z0-9]+-[a-z0-9]*-pdrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"ENCAP") and contains(@tags,"PDRDISC") and not(contains(@tags,"NDRDISC")) and contains(@tags,"2T2C") and (contains(@tags,"VXLAN") or contains(@tags,"VXLANGPE") or contains(@tags,"LISP") or contains(@tags,"LISPGPE") or contains(@tags,"GRE")) and not(contains(@tags,"VHOST")) and not(contains(@tags, "IPSECHW"))]' - -# VPP IP6_overlay - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/78B-1t1c-ethip6-ndrdisc \ - --title "78B-1t1c-ethip6[a-z0-9]+-[a-z0-9]*-ndrdisc" \ - --xpath '//*[@framesize="78B" and contains(@tags,"ENCAP") and contains(@tags,"NDRDISC") and contains(@tags,"1T1C") and (contains(@tags,"VXLAN") or contains(@tags,"VXLANGPE") or contains(@tags,"LISP") or contains(@tags,"LISPGPE") or contains(@tags,"GRE")) and not(contains(@tags,"VHOST"))]' -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/78B-2t2c-ethip6-ndrdisc \ - --title "78B-2t2c-ethip6[a-z0-9]+-[a-z0-9]*-ndrdisc" \ - --xpath '//*[@framesize="78B" and contains(@tags,"ENCAP") and contains(@tags,"NDRDISC") and contains(@tags,"2T2C") and (contains(@tags,"VXLAN") or contains(@tags,"VXLANGPE") or contains(@tags,"LISP") or contains(@tags,"LISPGPE") or contains(@tags,"GRE")) and not(contains(@tags,"VHOST"))]' - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/78B-1t1c-ethip6-pdrdisc \ - --title "78B-1t1c-ethip6[a-z0-9]+-[a-z0-9]*-pdrdisc" \ - --xpath '//*[@framesize="78B" and contains(@tags,"ENCAP") and contains(@tags,"PDRDISC") and not(contains(@tags,"NDRDISC")) and contains(@tags,"1T1C") and (contains(@tags,"VXLAN") or contains(@tags,"VXLANGPE") or contains(@tags,"LISP") or contains(@tags,"LISPGPE") or contains(@tags,"GRE")) and not(contains(@tags,"VHOST"))]' -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/78B-2t2c-ethip6-pdrdisc \ - --title "78B-2t2c-ethip6[a-z0-9]+-[a-z0-9]*-pdrdisc" \ - --xpath '//*[@framesize="78B" and contains(@tags,"ENCAP") and contains(@tags,"PDRDISC") and not(contains(@tags,"NDRDISC")) and contains(@tags,"2T2C") and (contains(@tags,"VXLAN") or contains(@tags,"VXLANGPE") or contains(@tags,"LISP") or contains(@tags,"LISPGPE") or contains(@tags,"GRE")) and not(contains(@tags,"VHOST"))]' - -# VPP VM VHOST - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-1t1c-vhost-sel1-ndrdisc \ - --title "64B-1t1c-.*vhost.*-ndrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"NDRDISC") and contains(@tags,"1T1C") and contains(@tags,"VHOST") and not(contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD"))]' -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-2t2c-vhost-sel1-ndrdisc \ - --title "64B-2t2c-.*vhost.*-ndrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"NDRDISC") and contains(@tags,"2T2C") and contains(@tags,"VHOST") and not(contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD"))]' - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-1t1c-vhost-sel1-pdrdisc \ - --title "64B-1t1c-.*vhost.*-pdrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"PDRDISC") and contains(@tags,"1T1C") and not(contains(@tags,"NDRDISC")) and contains(@tags,"VHOST") and not(contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD"))]' -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-2t2c-vhost-sel1-pdrdisc \ - --title "64B-2t2c-.*vhost.*-pdrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"PDRDISC") and contains(@tags,"2T2C") and not(contains(@tags,"NDRDISC")) and contains(@tags,"VHOST") and not(contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD"))]' - -# VPP VM VHOST SELECTION - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-1t1c-vhost-sel2-ndrdisc \ - --title "64B-1t1c-.*vhost.*-ndrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"NDRDISC") and contains(@tags,"1T1C") and contains(@tags,"VHOST") and not(contains(@tags,"VXLAN")) and not(contains(@tags,"IP4FWD")) and not(contains(@tags,"DOT1Q")) and not(contains(name(), "2Vm"))]' -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-2t2c-vhost-sel2-ndrdisc \ - --title "64B-2t2c-.*vhost.*-ndrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"NDRDISC") and contains(@tags,"2T2C") and contains(@tags,"VHOST") and not(contains(@tags,"VXLAN")) and not(contains(@tags,"IP4FWD")) and not(contains(@tags,"DOT1Q")) and not(contains(name(), "2Vm"))]' - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-1t1c-vhost-sel2-pdrdisc \ - --title "64B-1t1c-.*vhost.*-pdrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"PDRDISC") and contains(@tags,"1T1C") and not(contains(@tags,"NDRDISC")) and contains(@tags,"VHOST") and not(contains(@tags,"VXLAN")) and not(contains(@tags,"IP4FWD")) and not(contains(@tags,"DOT1Q")) and not(contains(name(), "2Vm"))]' -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-2t2c-vhost-sel2-pdrdisc \ - --title "64B-2t2c-.*vhost.*-pdrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"PDRDISC") and contains(@tags,"2T2C") and not(contains(@tags,"NDRDISC")) and contains(@tags,"VHOST") and not(contains(@tags,"VXLAN")) and not(contains(@tags,"IP4FWD")) and not(contains(@tags,"DOT1Q")) and not(contains(name(), "2Vm"))]' - -# VPP CRYPTO - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-1t1c-ipsechw-ndrdisc \ - --title "64B-1t1c-.*ipsec.*-ndrdisc" \ - --xpath '//*[@framesize="64B" and not(contains(@tags, "VHOST")) and contains(@tags, "IP4FWD") and contains(@tags, "NDRDISC") and contains(@tags, "1T1C") and contains(@tags, "IPSECHW") and (contains(@tags, "IPSECTRAN") or contains(@tags, "IPSECTUN"))]' -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-2t2c-ipsechw-ndrdisc \ - --title "64B-2t2c-.*ipsec.*-ndrdisc" \ - --xpath '//*[@framesize="64B" and not(contains(@tags, "VHOST")) and contains(@tags, "IP4FWD") and contains(@tags, "NDRDISC") and contains(@tags, "2T2C") and contains(@tags, "IPSECHW") and (contains(@tags, "IPSECTRAN") or contains(@tags, "IPSECTUN"))]' - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-1t1c-ipsechw-pdrdisc \ - --title "64B-1t1c-.*ipsec.*-pdrdisc" \ - --xpath '//*[@framesize="64B" and not(contains(@tags, "VHOST")) and contains(@tags, "IP4FWD") and contains(@tags, "PDRDISC") and not(contains(@tags,"NDRDISC")) and contains(@tags, "1T1C") and contains(@tags, "IPSECHW") and (contains(@tags, "IPSECTRAN") or contains(@tags, "IPSECTUN"))]' -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-2t2c-ipsechw-pdrdisc \ - --title "64B-2t2c-.*ipsec.*-pdrdisc" \ - --xpath '//*[@framesize="64B" and not(contains(@tags, "VHOST")) and contains(@tags, "IP4FWD") and contains(@tags, "PDRDISC") and not(contains(@tags,"NDRDISC")) and contains(@tags, "2T2C") and contains(@tags, "IPSECHW") and (contains(@tags, "IPSECTRAN") or contains(@tags, "IPSECTUN"))]' - -# DPDK - -python run_plot.py --input ${DIR[PLOT,DPDK]} \ - --output ${DIR[STATIC,DPDK]}/64B-1t1c-l2-ndrdisc \ - --title "64B-1t1c-(eth|dot1q|dot1ad)-(l2xcbase|l2bdbasemaclrn)-ndrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"BASE") and contains(@tags,"NDRDISC") and contains(@tags,"1T1C") and (contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD")) and not(contains(@tags,"VHOST"))]' -python run_plot.py --input ${DIR[PLOT,DPDK]} \ - --output ${DIR[STATIC,DPDK]}/64B-2t2c-l2-ndrdisc \ - --title "64B-2t2c-(eth|dot1q|dot1ad)-(l2xcbase|l2bdbasemaclrn)-ndrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"BASE") and contains(@tags,"NDRDISC") and contains(@tags,"2T2C") and (contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD")) and not(contains(@tags,"VHOST"))]' -python run_plot.py --input ${DIR[PLOT,DPDK]} \ - --output ${DIR[STATIC,DPDK]}/64B-1t1c-ipv4-ndrdisc \ - --title "64B-1t1c-ethip4-ip4base-l3fwd-ndrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"BASE") and contains(@tags,"NDRDISC") and contains(@tags,"1T1C") and contains(@tags,"IP4FWD")]' \ - --lower 2000000 --upper 12000000 -python run_plot.py --input ${DIR[PLOT,DPDK]} \ - --output ${DIR[STATIC,DPDK]}/64B-2t2c-ipv4-ndrdisc \ - --title "64B-2t2c-ethip4-ip4base-l3fwd-ndrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"BASE") and contains(@tags,"NDRDISC") and contains(@tags,"2T2C") and contains(@tags,"IP4FWD")]' \ - --lower 2000000 --upper 12000000 - -python run_plot.py --input ${DIR[PLOT,DPDK]} \ - --output ${DIR[STATIC,DPDK]}/64B-1t1c-l2-pdrdisc \ - --title "64B-1t1c-(eth|dot1q|dot1ad)-(l2xcbase|l2bdbasemaclrn)-pdrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"BASE") and contains(@tags,"PDRDISC") and not(contains(@tags,"NDRDISC")) and contains(@tags,"1T1C") and (contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD")) and not(contains(@tags,"VHOST"))]' -python run_plot.py --input ${DIR[PLOT,DPDK]} \ - --output ${DIR[STATIC,DPDK]}/64B-2t2c-l2-pdrdisc \ - --title "64B-2t2c-(eth|dot1q|dot1ad)-(l2xcbase|l2bdbasemaclrn)-pdrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"BASE") and contains(@tags,"PDRDISC") and not(contains(@tags,"NDRDISC")) and not(contains(@tags,"NDRDISC")) and contains(@tags,"2T2C") and (contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD")) and not(contains(@tags,"VHOST"))]' -python run_plot.py --input ${DIR[PLOT,DPDK]} \ - --output ${DIR[STATIC,DPDK]}/64B-1t1c-ipv4-pdrdisc \ - --title "64B-1t1c-ethip4-ip4base-l3fwd-pdrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"BASE") and contains(@tags,"PDRDISC") and not(contains(@tags,"NDRDISC")) and contains(@tags,"1T1C") and contains(@tags,"IP4FWD")]' \ - --lower 20000000 --upper 30000000 -python run_plot.py --input ${DIR[PLOT,DPDK]} \ - --output ${DIR[STATIC,DPDK]}/64B-2t2c-ipv4-pdrdisc \ - --title "64B-2t2c-ethip4-ip4base-l3fwd-pdrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"BASE") and contains(@tags,"PDRDISC") and not(contains(@tags,"NDRDISC")) and contains(@tags,"2T2C") and contains(@tags,"IP4FWD")]' \ - --lower 20000000 --upper 30000000 - -# Plot latency - -# VPP L2 sel1 - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-1t1c-l2-sel1-ndrdisc-lat50 \ - --title "64B-1t1c-(eth|dot1q|dot1ad)-(l2xcbase|l2bdbasemaclrn)-ndrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"BASE") and contains(@tags,"NDRDISC") and contains(@tags,"1T1C") and (contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD")) and not(contains(@tags,"VHOST"))]' --latency lat_50 -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-2t2c-l2-sel1-ndrdisc-lat50 \ - --title "64B-2t2c-(eth|dot1q|dot1ad)-(l2xcbase|l2bdbasemaclrn)-ndrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"BASE") and contains(@tags,"NDRDISC") and contains(@tags,"2T2C") and (contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD")) and not(contains(@tags,"VHOST"))]' --latency lat_50 - -# VPP L2 sel2 - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-1t1c-l2-sel2-ndrdisc-lat50 \ - --title "64B-1t1c-(eth|dot1q|dot1ad)-(l2xcbase|l2bdbasemaclrn)-ndrdisc" \ - --xpath '//*[@framesize="64B" and ((contains(@tags,"FEATURE") and contains(@tags,"ACL50") and contains(@tags,"10k_FLOWS"))) and contains(@tags,"NDRDISC") and contains(@tags,"1T1C") and (contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD")) and not(contains(@tags,"VHOST"))]' --latency lat_50 -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-2t2c-l2-sel2-ndrdisc-lat50 \ - --title "64B-2t2c-(eth|dot1q|dot1ad)-(l2xcbase|l2bdbasemaclrn)-ndrdisc" \ - --xpath '//*[@framesize="64B" and ((contains(@tags,"FEATURE") and contains(@tags,"ACL50") and contains(@tags,"10k_FLOWS"))) and contains(@tags,"NDRDISC") and contains(@tags,"2T2C") and (contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD")) and not(contains(@tags,"VHOST"))]' --latency lat_50 - -# VPP IP4 - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-1t1c-ethip4-ip4-ndrdisc-lat50 \ - --title "64B-1t1c-ethip4-ip4[a-z0-9]+-[a-z-]*ndrdisc" \ - --xpath '//*[@framesize="64B" and (contains(@tags,"BASE") or contains(@tags,"SCALE") or contains(@tags,"FEATURE")) and contains(@tags,"NDRDISC") and contains(@tags,"1T1C") and contains(@tags,"IP4FWD") and not(contains(@tags,"IPSEC")) and not(contains(@tags,"VHOST"))]' --latency lat_50 -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-2t2c-ethip4-ip4-ndrdisc-lat50 \ - --title "64B-2t2c-ethip4-ip4[a-z0-9]+-[a-z-]*ndrdisc" \ - --xpath '//*[@framesize="64B" and (contains(@tags,"BASE") or contains(@tags,"SCALE") or contains(@tags,"FEATURE")) and contains(@tags,"NDRDISC") and contains(@tags,"2T2C") and contains(@tags,"IP4FWD") and not(contains(@tags,"IPSEC")) and not(contains(@tags,"VHOST"))]' --latency lat_50 - -# VPP IP6 - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/78B-1t1c-ethip6-ip6-ndrdisc-lat50 \ - --title "78B-1t1c-ethip6-ip6[a-z0-9]+-[a-z-]*ndrdisc" \ - --xpath '//*[@framesize="78B" and (contains(@tags,"BASE") or contains(@tags,"SCALE") or contains(@tags,"FEATURE")) and contains(@tags,"NDRDISC") and contains(@tags,"1T1C") and contains(@tags,"IP6FWD") and not(contains(@tags,"IPSEC")) and not(contains(@tags,"VHOST"))]' --latency lat_50 -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/78B-2t2c-ethip6-ip6-ndrdisc-lat50 \ - --title "78B-2t2c-ethip6-ip6[a-z0-9]+-[a-z-]*ndrdisc" \ - --xpath '//*[@framesize="78B" and (contains(@tags,"BASE") or contains(@tags,"SCALE") or contains(@tags,"FEATURE")) and contains(@tags,"NDRDISC") and contains(@tags,"2T2C") and contains(@tags,"IP6FWD") and not(contains(@tags,"IPSEC")) and not(contains(@tags,"VHOST"))]' --latency lat_50 - -# VPP IP4_overlay - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-1t1c-ethip4-ndrdisc-lat50 \ - --title "64B-1t1c-ethip4[a-z0-9]+-[a-z0-9]*-ndrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"ENCAP") and contains(@tags,"NDRDISC") and contains(@tags,"1T1C") and (contains(@tags,"VXLAN") or contains(@tags,"VXLANGPE") or contains(@tags,"LISP") or contains(@tags,"LISPGPE") or contains(@tags,"GRE")) and not(contains(@tags,"VHOST")) and not(contains(@tags, "IPSECHW"))]' --latency lat_50 -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-2t2c-ethip4-ndrdisc-lat50 \ - --title "64B-2t2c-ethip4[a-z0-9]+-[a-z0-9]*-ndrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"ENCAP") and contains(@tags,"NDRDISC") and contains(@tags,"2T2C") and (contains(@tags,"VXLAN") or contains(@tags,"VXLANGPE") or contains(@tags,"LISP") or contains(@tags,"LISPGPE") or contains(@tags,"GRE")) and not(contains(@tags,"VHOST")) and not(contains(@tags, "IPSECHW"))]' --latency lat_50 - -# VPP IP6_overlay - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/78B-1t1c-ethip6-ndrdisc-lat50 \ - --title "78B-1t1c-ethip6[a-z0-9]+-[a-z0-9]*-ndrdisc" \ - --xpath '//*[@framesize="78B" and contains(@tags,"ENCAP") and contains(@tags,"NDRDISC") and contains(@tags,"1T1C") and (contains(@tags,"VXLAN") or contains(@tags,"VXLANGPE") or contains(@tags,"LISP") or contains(@tags,"LISPGPE") or contains(@tags,"GRE")) and not(contains(@tags,"VHOST"))]' --latency lat_50 -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/78B-2t2c-ethip6-ndrdisc-lat50 \ - --title "78B-2t2c-ethip6[a-z0-9]+-[a-z0-9]*-ndrdisc" \ - --xpath '//*[@framesize="78B" and contains(@tags,"ENCAP") and contains(@tags,"NDRDISC") and contains(@tags,"2T2C") and (contains(@tags,"VXLAN") or contains(@tags,"VXLANGPE") or contains(@tags,"LISP") or contains(@tags,"LISPGPE") or contains(@tags,"GRE")) and not(contains(@tags,"VHOST"))]' --latency lat_50 - -# VPP VM VHOST - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-1t1c-vhost-sel1-ndrdisc-lat50 \ - --title "64B-1t1c-.*vhost.*-ndrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"NDRDISC") and contains(@tags,"1T1C") and contains(@tags,"VHOST") and not(contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD"))]' --latency lat_50 -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-2t2c-vhost-sel1-ndrdisc-lat50 \ - --title "64B-2t2c-.*vhost.*-ndrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"NDRDISC") and contains(@tags,"2T2C") and contains(@tags,"VHOST") and not(contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD"))]' --latency lat_50 - -# VPP VM VHOST selection - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-1t1c-vhost-sel2-ndrdisc-lat50 \ - --title "64B-1t1c-.*vhost.*-ndrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"NDRDISC") and contains(@tags,"1T1C") and contains(@tags,"VHOST") and not(contains(@tags,"VXLAN")) and not(contains(@tags,"IP4FWD")) and not(contains(@tags,"DOT1Q")) and not(contains(name(), "2Vm"))]' --latency lat_50 -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-2t2c-vhost-sel2-ndrdisc-lat50 \ - --title "64B-2t2c-.*vhost.*-ndrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"NDRDISC") and contains(@tags,"2T2C") and contains(@tags,"VHOST") and not(contains(@tags,"VXLAN")) and not(contains(@tags,"IP4FWD")) and not(contains(@tags,"DOT1Q")) and not(contains(name(), "2Vm"))]' --latency lat_50 - -# VPP CRYPTO - -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-1t1c-ipsechw-ndrdisc-lat50 \ - --title "64B-1t1c-.*ipsec.*-ndrdisc" \ - --xpath '//*[@framesize="64B" and not(contains(@tags, "VHOST")) and contains(@tags, "IP4FWD") and contains(@tags, "NDRDISC") and contains(@tags, "1T1C") and contains(@tags, "IPSECHW") and (contains(@tags, "IPSECTRAN") or contains(@tags, "IPSECTUN"))]' --latency lat_50 -python run_plot.py --input ${DIR[PLOT,VPP]} \ - --output ${DIR[STATIC,VPP]}/64B-2t2c-ipsechw-ndrdisc-lat50 \ - --title "64B-2t2c-.*ipsec.*-ndrdisc" \ - --xpath '//*[@framesize="64B" and not(contains(@tags, "VHOST")) and contains(@tags, "IP4FWD") and contains(@tags, "NDRDISC") and contains(@tags, "2T2C") and contains(@tags, "IPSECHW") and (contains(@tags, "IPSECTRAN") or contains(@tags, "IPSECTUN"))]' --latency lat_50 - -# DPDK - -python run_plot.py --input ${DIR[PLOT,DPDK]} \ - --output ${DIR[STATIC,DPDK]}/64B-1t1c-l2-ndrdisc-lat50 \ - --title "64B-1t1c-(eth|dot1q|dot1ad)-(l2xcbase|l2bdbasemaclrn)-ndrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"BASE") and contains(@tags,"NDRDISC") and contains(@tags,"1T1C") and (contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD")) and not(contains(@tags,"VHOST"))]' --latency lat_50 -python run_plot.py --input ${DIR[PLOT,DPDK]} \ - --output ${DIR[STATIC,DPDK]}/64B-2t2c-l2-ndrdisc-lat50 \ - --title "64B-2t2c-(eth|dot1q|dot1ad)-(l2xcbase|l2bdbasemaclrn)-ndrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"BASE") and contains(@tags,"NDRDISC") and contains(@tags,"2T2C") and (contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD")) and not(contains(@tags,"VHOST"))]' --latency lat_50 -python run_plot.py --input ${DIR[PLOT,DPDK]} \ - --output ${DIR[STATIC,DPDK]}/64B-1t1c-ipv4-ndrdisc-lat50 \ - --title "64B-1t1c-ethip4-ip4base-l3fwd-ndrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"BASE") and contains(@tags,"NDRDISC") and contains(@tags,"1T1C") and contains(@tags,"IP4FWD")]' --latency lat_50 -python run_plot.py --input ${DIR[PLOT,DPDK]} \ - --output ${DIR[STATIC,DPDK]}/64B-2t2c-ipv4-ndrdisc-lat50 \ - --title "64B-2t2c-ethip4-ip4base-l3fwd-ndrdisc" \ - --xpath '//*[@framesize="64B" and contains(@tags,"BASE") and contains(@tags,"NDRDISC") and contains(@tags,"2T2C") and contains(@tags,"IP4FWD")]' --latency lat_50 - -# HTML BUILDER -if [[ ${CFG[BLD_HTML]} -eq 1 ]] ; -then - sphinx-build -v -c . -a -b html -E \ - -D release=$1 -D version="$1 report - $DATE" \ - ${DIR[RST]} ${DIR[BUILD,HTML]}/ - - # Patch the CSS for tables layout - cat - > ${DIR[CSS_PATCH_FILE]} <<"_EOF" -/* override table width restrictions */ -@media screen and (min-width: 767px) { - .wy-table-responsive table td, .wy-table-responsive table th { - white-space: normal !important; - } - - .wy-table-responsive { - font-size: small; - margin-bottom: 24px; - max-width: 100%; - overflow: visible !important; - } -} -_EOF -fi - -# LATEX BUILDER -if [[ ${CFG[BLD_LATEX]} -eq 1 ]] ; -then - # Convert PyPLOT graphs in HTML format to PDF. - for f in ${DIR[STATIC,VPP]}/*; do - xvfb-run -a wkhtmltopdf ${f} ${f%.html}.pdf - done - for f in ${DIR[STATIC,DPDK]}/*; do - xvfb-run -a wkhtmltopdf ${f} ${f%.html}.pdf - done - - # Generate the LaTeX documentation - sphinx-build -v -c . -a -b latex -E \ - -D release=$1 -D version="$1 report - $DATE" \ - ${DIR[RST]} ${DIR[BUILD,LATEX]} - cd ${DIR[BUILD,LATEX]} - pdflatex -shell-escape -interaction nonstopmode csit.tex || true - pdflatex -interaction nonstopmode csit.tex || true - cp csit.pdf ../${DIR[STATIC,ARCH]}/csit_$1.pdf - cd ${SCRIPT_DIR} -fi - -# Create archive -echo Creating csit.report.tar.gz ... -tar -czvf ./csit.report.tar.gz ${DIR[BUILD,HTML]} diff --git a/resources/tools/report_gen/run_robot_data.py b/resources/tools/report_gen/run_robot_data.py deleted file mode 100755 index d5672b4ce5..0000000000 --- a/resources/tools/report_gen/run_robot_data.py +++ /dev/null @@ -1,485 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2017 Cisco and/or its affiliates. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Script extracts interested data (name, documentation, message, status) from -robot framework output file (output.xml) and prints in specified format (wiki, -html, rst) to defined output file. - -Supported formats: - - html - - rst - - wiki - -:TODO: - - md - -:Example: - -run_robot_data.py -i "output.xml" -o "tests.rst" -f "rst" -s 3 -l 2 - -The example reads the data from "output.xml", writes the output to "tests.rst" -in rst format. It will start on the 3rd level of xml structure and the generated -document hierarchy will start on the 2nd level. -All test suites will be processed. - -:Example: - -run_robot_data.py -i "output.xml" -o "tests.rst" -f "rst" -r "(.*)(lisp)(.*)" - -The example reads the data from "output.xml", writes the output to "tests.rst" -in rst format. It will start on the 1st level of xml structure and the generated -document hierarchy will start on the 1st level (default values). -Only the test suites which match the given regular expression are processed. -""" - -import argparse -import re -import sys -import json -import string - -from robot.api import ExecutionResult, ResultVisitor - - -class ExecutionChecker(ResultVisitor): - """Class to traverse through the test suite structure. - - The functionality implemented in this class generates a json file. Its - structure is: - - [ - { - "level": "Level of the suite, type: str", - "title": "Title of the suite, type: str", - "doc": "Documentation of the suite, type: str", - "table": [ - ["TC name", "TC doc", "message or status"], - ["TC name", "TC doc", "message or status"], - ... other test cases ... - ["Name", "Documentation", "Message or Status"] - ] - }, - ... other test suites ... - ] - - .. note:: The header of the table with TCs is at the and of the table. - """ - - def __init__(self, args): - self.formatting = args.formatting - - def visit_suite(self, suite): - """Implements traversing through the suite and its direct children. - - :param suite: Suite to process. - :type suite: Suite - :returns: Nothing. - """ - - if self.start_suite(suite) is not False: - if suite.tests: - sys.stdout.write(',"tests":[') - else: - sys.stdout.write('},') - - suite.suites.visit(self) - suite.tests.visit(self) - - if suite.tests: - if "ndrdisc" in suite.longname.lower(): - hdr = '["Name","Documentation","Message"]' - else: - hdr = '["Name","Documentation","Status"]' - sys.stdout.write(hdr + ']},') - - self.end_suite(suite) - - def start_suite(self, suite): - """Called when suite starts. - - :param suite: Suite to process. - :type suite: Suite - :returns: Nothing. - """ - - level = len(suite.longname.split(".")) - sys.stdout.write('{') - sys.stdout.write('"level":"' + str(level) + '",') - sys.stdout.write('"title":"' + suite.name.replace('"', "'") + '",') - sys.stdout.write('"doc":"' + suite.doc.replace('"', "'"). - replace('\n', ' ').replace('\r', ''). - replace('*[', ' |br| *[') + '"') - - def end_suite(self, suite): - """Called when suite ends. - - :param suite: Suite to process. - :type suite: Suite - :returns: Nothing. - """ - pass - - def visit_test(self, test): - """Implements traversing through the test. - - :param test: Test to process. - :type test: Test - :returns: Nothing. - """ - if self.start_test(test) is not False: - self.end_test(test) - - def start_test(self, test): - """Called when test starts. - - :param test: Test to process. - :type test: Test - :returns: Nothing. - """ - - name = test.name.replace('"', "'") - doc = test.doc.replace('"', "'").replace('\n', ' ').replace('\r', '').\ - replace('[', ' |br| [') - if any("NDRPDRDISC" in tag for tag in test.tags): - msg = test.message.replace('\n', ' |br| ').replace('\r', ''). \ - replace('"', "'") - - sys.stdout.write('["' + name + '","' + doc + '","' + msg + '"]') - else: - sys.stdout.write( - '["' + name + '","' + doc + '","' + test.status + '"]') - - def end_test(self, test): - """Called when test ends. - - :param test: Test to process. - :type test: Test - :returns: Nothing. - """ - sys.stdout.write(',') - - -def do_html(data, args): - """Generation of a html file from json data. - - :param data: List of suites from json file. - :param args: Parsed arguments. - :type data: list of dict - :type args: ArgumentParser - :returns: Nothing. - """ - - shift = int(args.level) - start = int(args.start) - - output = open(args.output, 'w') - - output.write('<html>') - for item in data: - if int(item['level']) < start: - continue - level = str(int(item['level']) - start + shift) - output.write('<h' + level + '>' + item['title'].lower() + - '</h' + level + '>') - output.write('<p>' + re.sub(r"(\*)(.*?)(\*)", r"<b>\2</b>", item['doc'], - 0, flags=re.MULTILINE). - replace(' |br| ', '<br>') + '</p>') - try: - output.write(gen_html_table(item['tests'])) - except KeyError: - continue - output.write('</html>') - output.close() - - -def gen_html_table(data): - """Generates a table with TCs' names, documentation and messages / statuses - in html format. There is no css used. - - :param data: Json data representing a table with TCs. - :type data: str - :returns: Table with TCs' names, documentation and messages / statuses in - html format. - :rtype: str - """ - - table = '<table width=100% border=1><tr>' - table += '<th width=30%>' + data[-1][0] + '</th>' - table += '<th width=50%>' + data[-1][1] + '</th>' - table += '<th width=20%>' + data[-1][2] + '</th></tr>' - - for item in data[0:-1]: - table += '<tr>' - for element in item: - table += '<td>' + element.replace(' |br| ', '<br>') + '</td>' - table += '</tr></table>' - - return table - - -def do_rst(data, args): - """Generation of a rst file from json data. - - :param data: List of suites from json file. - :param args: Parsed arguments. - :type data: list of dict - :type args: ArgumentParser - :returns: Nothing. - """ - - hdrs = ['=', '-', '`', "'", '.', '~', '*', '+', '^'] - shift = int(args.level) - start = int(args.start) - - output = open(args.output, 'w') - output.write('\n.. |br| raw:: html\n\n <br />\n\n') - - if (args.title): - output.write(args.title + '\n' + - hdrs[shift - 1] * - len(args.title) + '\n\n') - - for item in data: - if int(item['level']) < start: - continue - if 'ndrchk' in item['title'].lower(): - continue - output.write(item['title'].lower() + '\n' + - hdrs[int(item['level']) - start + shift] * - len(item['title']) + '\n\n') - output.write(item['doc'].replace('*', '**').replace('|br|', '\n\n -') + - '\n\n') - try: - output.write(gen_rst_table(item['tests']) + '\n\n') - except KeyError: - continue - output.close() - - -def gen_rst_table(data): - """Generates a table with TCs' names, documentation and messages / statuses - in rst format. - - :param data: Json data representing a table with TCs. - :type data: str - :returns: Table with TCs' names, documentation and messages / statuses in - rst format. - :rtype: str - """ - - table = [] - # max size of each column - lengths = map(max, zip(*[[len(str(elt)) for elt in item] for item in data])) - - start_of_line = '| ' - vert_separator = ' | ' - end_of_line = ' |' - line_marker = '-' - - meta_template = vert_separator.join(['{{{{{0}:{{{0}}}}}}}'.format(i) - for i in range(len(lengths))]) - template = '{0}{1}{2}'.format(start_of_line, meta_template.format(*lengths), - end_of_line) - # determine top/bottom borders - to_separator = string.maketrans('| ', '+-') - start_of_line = start_of_line.translate(to_separator) - vert_separator = vert_separator.translate(to_separator) - end_of_line = end_of_line.translate(to_separator) - separator = '{0}{1}{2}'.format(start_of_line, vert_separator. - join([x * line_marker for x in lengths]), - end_of_line) - # determine header separator - th_separator_tr = string.maketrans('-', '=') - start_of_line = start_of_line.translate(th_separator_tr) - line_marker = line_marker.translate(th_separator_tr) - vertical_separator = vert_separator.translate(th_separator_tr) - end_of_line = end_of_line.translate(th_separator_tr) - th_separator = '{0}{1}{2}'.format(start_of_line, vertical_separator. - join([x * line_marker for x in lengths]), - end_of_line) - # prepare table - table.append(separator) - # set table header - titles = data[-1] - table.append(template.format(*titles)) - table.append(th_separator) - # generate table rows - for item in data[0:-2]: - desc = re.sub(r'(^ \|br\| )', r'', item[1]) - table.append(template.format(item[0], desc, item[2])) - table.append(separator) - desc = re.sub(r'(^ \|br\| )', r'', data[-2][1]) - table.append(template.format(data[-2][0], desc, data[-2][2])) - table.append(separator) - return '\n'.join(table) - - -def do_md(data, args): - """Generation of a rst file from json data. - - :param data: List of suites from json file. - :param args: Parsed arguments. - :type data: list of dict - :type args: ArgumentParser - :returns: Nothing. - """ - raise NotImplementedError("Export to 'md' format is not implemented.") - - -def do_wiki(data, args): - """Generation of a wiki page from json data. - - :param data: List of suites from json file. - :param args: Parsed arguments. - :type data: list of dict - :type args: ArgumentParser - :returns: Nothing. - """ - - shift = int(args.level) - start = int(args.start) - - output = open(args.output, 'w') - - for item in data: - if int(item['level']) < start: - continue - if 'ndrchk' in item['title'].lower(): - continue - mark = "=" * (int(item['level']) - start + shift) + ' ' - output.write(mark + item['title'].lower() + mark + '\n') - output.write(item['doc'].replace('*', "'''").replace('|br|', '\n*') + - '\n') - try: - output.write(gen_wiki_table(item['tests']) + '\n\n') - except KeyError: - continue - output.close() - - -def gen_wiki_table(data): - """Generates a table with TCs' names, documentation and messages / statuses - in wiki format. - - :param data: Json data representing a table with TCs. - :type data: str - :returns: Table with TCs' names, documentation and messages / statuses in - wiki format. - :rtype: str - """ - - table = '{| class="wikitable"\n' - header = "" - for item in data[-1]: - header += '!{}\n'.format(item) - table += header - for item in data[0:-1]: - desc = re.sub(r'(^ \|br\| )', r'', item[1]).replace(' |br| ', '\n\n') - msg = item[2].replace(' |br| ', '\n\n') - table += '|-\n|{}\n|{}\n|{}\n'.format(item[0], desc, msg) - table += '|}\n' - - return table - - -def process_robot_file(args): - """Process data from robot output.xml file and generate defined file type. - - :param args: Parsed arguments. - :type args: ArgumentParser - :return: Nothing. - """ - - old_sys_stdout = sys.stdout - sys.stdout = open(args.output + '.json', 'w') - - result = ExecutionResult(args.input) - checker = ExecutionChecker(args) - - sys.stdout.write('[') - result.visit(checker) - sys.stdout.write('{}]') - sys.stdout.close() - sys.stdout = old_sys_stdout - - with open(args.output + '.json', 'r') as json_file: - data = json.load(json_file) - data.pop(-1) - - if args.regex: - results = list() - regex = re.compile(args.regex) - for item in data: - if re.search(regex, item['title'].lower()): - results.append(item) - else: - results = data - - if args.formatting == 'rst': - do_rst(results, args) - elif args.formatting == 'wiki': - do_wiki(results, args) - elif args.formatting == 'html': - do_html(results, args) - elif args.formatting == 'md': - do_md(results, args) - - -def parse_args(): - """Parse arguments from cmd line. - - :return: Parsed arguments. - :rtype ArgumentParser - """ - - parser = argparse.ArgumentParser(description=__doc__, - formatter_class=argparse. - RawDescriptionHelpFormatter) - parser.add_argument("-i", "--input", - required=True, - type=argparse.FileType('r'), - help="Robot XML log file") - parser.add_argument("-o", "--output", - type=str, - required=True, - help="Output file") - parser.add_argument("-f", "--formatting", - required=True, - choices=['html', 'wiki', 'rst', 'md'], - help="Output file format") - parser.add_argument("-s", "--start", - type=int, - default=1, - help="The first level to be taken from xml file") - parser.add_argument("-l", "--level", - type=int, - default=1, - help="The level of the first chapter in generated file") - parser.add_argument("-r", "--regex", - type=str, - default=None, - help="Regular expression used to select test suites. " - "If None, all test suites are selected.") - parser.add_argument("-t", "--title", - type=str, - default=None, - help="Title of the output.") - - return parser.parse_args() - - -if __name__ == "__main__": - sys.exit(process_robot_file(parse_args())) diff --git a/resources/tools/report_gen/run_robot_json_data.py b/resources/tools/report_gen/run_robot_json_data.py deleted file mode 100755 index 708836aef4..0000000000 --- a/resources/tools/report_gen/run_robot_json_data.py +++ /dev/null @@ -1,331 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2017 Cisco and/or its affiliates. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Script extracts required data from robot framework output file (output.xml) and -saves it in JSON format. Its structure is: - -{ - "metadata": { - "vdevice": "VPP version", - "data-length": int - }, - "data": { - "ID": { - "name": "Test name", - "parent": "Name of the parent of the test", - "tags": ["tag 1", "tag 2", "tag n"], - "type": "PDR" | "NDR", - "throughput": { - "value": int, - "unit": "pps" | "bps" | "percentage" - }, - "latency": { - "direction1": { - "100": { - "min": int, - "avg": int, - "max": int - }, - "50": { # Only for NDR - "min": int, - "avg": int, - "max": int - }, - "10": { # Only for NDR - "min": int, - "avg": int, - "max": int - } - }, - "direction2": { - "100": { - "min": int, - "avg": int, - "max": int - }, - "50": { # Only for NDR - "min": int, - "avg": int, - "max": int - }, - "10": { # Only for NDR - "min": int, - "avg": int, - "max": int - } - } - }, - "lossTolerance": "lossTolerance" # Only for PDR - }, - "ID" { - # next test - } - } -} - -.. note:: ID is the lowercase full path to the test. - -:Example: - -run_robot_json_data.py -i "output.xml" -o "data.json" -v "17.07-rc0~144" - -""" - -import argparse -import re -import sys -import json - -from robot.api import ExecutionResult, ResultVisitor - - -class ExecutionChecker(ResultVisitor): - """Class to traverse through the test suite structure. - - The functionality implemented in this class generates a json structure. - """ - - REGEX_RATE = re.compile(r'^[\D\d]*FINAL_RATE:\s(\d+\.\d+)\s(\w+)') - - REGEX_LAT_NDR = re.compile(r'^[\D\d]*' - r'LAT_\d+%NDR:\s\[\'(-?\d+\/-?\d+\/-?\d+)\',' - r'\s\'(-?\d+\/-?\d+\/-?\d+)\'\]\s\n' - r'LAT_\d+%NDR:\s\[\'(-?\d+\/-?\d+\/-?\d+)\',' - r'\s\'(-?\d+\/-?\d+\/-?\d+)\'\]\s\n' - r'LAT_\d+%NDR:\s\[\'(-?\d+\/-?\d+\/-?\d+)\',' - r'\s\'(-?\d+\/-?\d+\/-?\d+)\'\]') - - REGEX_LAT_PDR = re.compile(r'^[\D\d]*' - r'LAT_\d+%PDR:\s\[\'(-?\d+\/-?\d+\/-?\d+)\',' - r'\s\'(-?\d+\/-?\d+\/-?\d+)\'\][\D\d]*') - - REGEX_TOLERANCE = re.compile(r'^[\D\d]*LOSS_ACCEPTANCE:\s(\d*\.\d*)\s' - r'[\D\d]*') - - def __init__(self, **metadata): - """Initialisation. - - :param metadata: Key-value pairs to be included to "metadata" part of - JSON structure. - :type metadata: dict - """ - self._data = { - "metadata": { - }, - "data": { - } - } - - for key, val in metadata.items(): - self._data["metadata"][key] = val - - @property - def data(self): - return self._data - - def _get_latency(self, msg, test_type): - """Get the latency data from the test message. - - :param msg: Message to be parsed. - :param test_type: Type of the test - NDR or PDR. - :type msg: str - :type test_type: str - :returns: Latencies parsed from the message. - :rtype: dict - """ - - if test_type == "NDR": - groups = re.search(self.REGEX_LAT_NDR, msg) - groups_range = range(1, 7) - elif test_type == "PDR": - groups = re.search(self.REGEX_LAT_PDR, msg) - groups_range = range(1, 3) - else: - return {} - - latencies = list() - for idx in groups_range: - try: - lat = [int(item) for item in str(groups.group(idx)).split('/')] - except (AttributeError, ValueError): - lat = [-1, -1, -1] - latencies.append(lat) - - keys = ("min", "avg", "max") - latency = { - "direction1": { - }, - "direction2": { - } - } - - latency["direction1"]["100"] = dict(zip(keys, latencies[0])) - latency["direction2"]["100"] = dict(zip(keys, latencies[1])) - if test_type == "NDR": - latency["direction1"]["50"] = dict(zip(keys, latencies[2])) - latency["direction2"]["50"] = dict(zip(keys, latencies[3])) - latency["direction1"]["10"] = dict(zip(keys, latencies[4])) - latency["direction2"]["10"] = dict(zip(keys, latencies[5])) - - return latency - - def visit_suite(self, suite): - """Implements traversing through the suite and its direct children. - - :param suite: Suite to process. - :type suite: Suite - :returns: Nothing. - """ - if self.start_suite(suite) is not False: - suite.suites.visit(self) - suite.tests.visit(self) - self.end_suite(suite) - - def start_suite(self, suite): - """Called when suite starts. - - :param suite: Suite to process. - :type suite: Suite - :returns: Nothing. - """ - pass - - def end_suite(self, suite): - """Called when suite ends. - - :param suite: Suite to process. - :type suite: Suite - :returns: Nothing. - """ - pass - - def visit_test(self, test): - """Implements traversing through the test. - - :param test: Test to process. - :type test: Test - :returns: Nothing. - """ - if self.start_test(test) is not False: - self.end_test(test) - - def start_test(self, test): - """Called when test starts. - - :param test: Test to process. - :type test: Test - :returns: Nothing. - """ - - tags = [str(tag) for tag in test.tags] - if test.status == "PASS" and "NDRPDRDISC" in tags: - - if "NDRDISC" in tags: - test_type = "NDR" - elif "PDRDISC" in tags: - test_type = "PDR" - else: - return - - try: - rate_value = str(re.search( - self.REGEX_RATE, test.message).group(1)) - except AttributeError: - rate_value = "-1" - try: - rate_unit = str(re.search( - self.REGEX_RATE, test.message).group(2)) - except AttributeError: - rate_unit = "-1" - - test_result = dict() - test_result["name"] = test.name.lower() - test_result["parent"] = test.parent.name.lower() - test_result["tags"] = tags - test_result["type"] = test_type - test_result["throughput"] = dict() - test_result["throughput"]["value"] = int(rate_value.split('.')[0]) - test_result["throughput"]["unit"] = rate_unit - test_result["latency"] = self._get_latency(test.message, test_type) - if test_type == "PDR": - test_result["lossTolerance"] = str(re.search( - self.REGEX_TOLERANCE, test.message).group(1)) - - self._data["data"][test.longname.lower()] = test_result - - def end_test(self, test): - """Called when test ends. - - :param test: Test to process. - :type test: Test - :returns: Nothing. - """ - pass - - -def parse_tests(args): - """Process data from robot output.xml file and return JSON data. - - :param args: Parsed arguments. - :type args: ArgumentParser - :returns: JSON data structure. - :rtype: dict - """ - - result = ExecutionResult(args.input) - checker = ExecutionChecker(vdevice=args.vdevice) - result.visit(checker) - - return checker.data - - -def parse_args(): - """Parse arguments from cmd line. - - :returns: Parsed arguments. - :rtype: ArgumentParser - """ - - parser = argparse.ArgumentParser(description=__doc__, - formatter_class=argparse. - RawDescriptionHelpFormatter) - parser.add_argument("-i", "--input", - required=True, - type=argparse.FileType('r'), - help="Robot XML log file.") - parser.add_argument("-o", "--output", - required=True, - type=argparse.FileType('w'), - help="JSON output file") - parser.add_argument("-v", "--vdevice", - required=False, - default="", - type=str, - help="VPP version") - - return parser.parse_args() - - -def main(): - """Main function.""" - - args = parse_args() - json_data = parse_tests(args) - json_data["metadata"]["data-length"] = len(json_data["data"]) - json.dump(json_data, args.output) - -if __name__ == "__main__": - sys.exit(main()) diff --git a/resources/tools/report_gen/run_robot_teardown_data.py b/resources/tools/report_gen/run_robot_teardown_data.py deleted file mode 100755 index df2aae5769..0000000000 --- a/resources/tools/report_gen/run_robot_teardown_data.py +++ /dev/null @@ -1,635 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2017 Cisco and/or its affiliates. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Script extracts interested data (name, VAT command history or table from Show -Runtime command) from robot framework output file (output.xml) and prints in -specified format (wiki, html, rst) to defined output file. - -Supported formats: - - html - - rst - - wiki - -:TODO: - - md - -:Example: - -run_robot_teardown_data.py -i "output.xml" -o "tests.rst" -d "VAT_H" -f "rst" --s 3 -l 2 - -The example reads the VAT command history data from "output.xml", writes -the output to "tests.rst" in rst format. It will start on the 3rd level of xml -structure and the generated document hierarchy will start on the 2nd level. - -:Example: - -run_robot_teardown_data.py -i "output.xml" -o "tests.rst" -f "rst" -d "SH_RUN" - -r "(.*)(lisp)(.*)" - -The example reads the data from "output.xml", writes the output to "tests.rst" -in rst format. It will start on the 1st level of xml structure and the generated -document hierarchy will start on the 1st level (default values). -Only the test suites which match the given regular expression are processed. -""" - -import argparse -import re -import sys -import json -import string - -from robot.api import ExecutionResult, ResultVisitor - - -class ExecutionChecker(ResultVisitor): - """Class to traverse through the test suite structure. - - The functionality implemented in this class generates a json file. Its - structure is: - - [ - { - "level": "Level of the suite, type: str", - "title": "Title of the suite, type: str", - "doc": "Documentation of the suite, type: str", - "table": [ - ["TC name", "VAT history or show runtime"], - ["TC name", "VAT history or show runtime"], - ... other test cases ... - ["Name","VAT command history or VPP operational data"] - ] - }, - ... other test suites ... - ] - - .. note:: The header of the table with TCs is at the end of the table. - """ - - def __init__(self, args): - self.formatting = args.formatting - self.data = args.data - self.tagin = " |prein| " - self.tagout = " |preout| " - if self.data == "VAT_H": - self.lookup_kw = "Show Vat History On All Duts" - self.column_name = "VPP API Test (VAT) Commands History - " \ - "Commands Used Per Test Case" - elif self.data == "SH_RUN": - self.lookup_kw = "Vpp Show Runtime" - self.column_name = "VPP Operational Data - Outputs of " \ - "'show runtime' at NDR packet rate" - else: - raise ValueError("{0} look-up not implemented.".format(self.data)) - self.lookup_kw_nr = 0 - self.lookup_msg_nr = 0 - - def visit_suite(self, suite): - """Implements traversing through the suite and its direct children. - - :param suite: Suite to process. - :type suite: Suite - :returns: Nothing. - """ - - if self.start_suite(suite) is not False: - if suite.tests: - sys.stdout.write(',"tests":[') - else: - sys.stdout.write('},') - - suite.suites.visit(self) - suite.tests.visit(self) - - if suite.tests: - hdr = '["Name","' + self.column_name + '"]' - sys.stdout.write(hdr + ']},') - - self.end_suite(suite) - - def start_suite(self, suite): - """Called when suite starts. - - :param suite: Suite to process. - :type suite: Suite - :returns: Nothing. - """ - - level = len(suite.longname.split(".")) - sys.stdout.write('{') - sys.stdout.write('"level":"' + str(level) + '",') - sys.stdout.write('"title":"' + suite.name.replace('"', "'") + '",') - sys.stdout.write('"doc":"' + suite.doc.replace('"', "'"). - replace('\n', ' ').replace('\r', ''). - replace('*[', ' |br| *[') + '"') - - def end_suite(self, suite): - """Called when suite ends. - - :param suite: Suite to process. - :type suite: Suite - :returns: Nothing. - """ - pass - - def visit_test(self, test): - """Implements traversing through the test. - - :param test: Test to process. - :type test: Test - :returns: Nothing. - """ - if self.start_test(test) is not False: - test.keywords.visit(self) - self.end_test(test) - - def start_test(self, test): - """Called when test starts. - - :param test: Test to process. - :type test: Test - :returns: Nothing. - """ - - name = test.name.replace('"', "'") - sys.stdout.write('["' + name + '","' + self.tagin) - - def end_test(self, test): - """Called when test ends. - - :param test: Test to process. - :type test: Test - :returns: Nothing. - """ - sys.stdout.write(self.tagout + '"],') - - def visit_keyword(self, kw): - """Implements traversing through the keyword and its child keywords. - - :param kw: Keyword to process. - :type kw: Keyword - :returns: Nothing. - """ - if self.start_keyword(kw) is not False: - self.end_keyword(kw) - - def start_keyword(self, kw): - """Called when keyword starts. Default implementation does nothing. - - :param kw: Keyword to process. - :type kw: Keyword - :returns: Nothing. - """ - try: - if kw.type == "teardown": - self.lookup_kw_nr = 0 - self.visit_teardown_kw(kw) - except AttributeError: - pass - - def end_keyword(self, kw): - """Called when keyword ends. Default implementation does nothing. - - :param kw: Keyword to process. - :type kw: Keyword - :returns: Nothing. - """ - pass - - def visit_teardown_kw(self, kw): - """Implements traversing through the teardown keyword and its child - keywords. - - :param kw: Keyword to process. - :type kw: Keyword - :returns: Nothing. - """ - for keyword in kw.keywords: - if self.start_teardown_kw(keyword) is not False: - self.visit_teardown_kw(keyword) - self.end_teardown_kw(keyword) - - def start_teardown_kw(self, kw): - """Called when teardown keyword starts. Default implementation does - nothing. - - :param kw: Keyword to process. - :type kw: Keyword - :returns: Nothing. - """ - if kw.name.count(self.lookup_kw): - self.lookup_kw_nr += 1 - self.lookup_msg_nr = 0 - kw.messages.visit(self) - - def end_teardown_kw(self, kw): - """Called when keyword ends. Default implementation does nothing. - - :param kw: Keyword to process. - :type kw: Keyword - :returns: Nothing. - """ - pass - - def visit_message(self, msg): - """Implements visiting the message. - - :param msg: Message to process. - :type msg: Message - :returns: Nothing. - """ - if self.start_message(msg) is not False: - self.end_message(msg) - - def start_message(self, msg): - """Called when message starts. Default implementation does nothing. - - :param msg: Message to process. - :type msg: Message - :returns: Nothing. - """ - if self.data == "VAT_H": - self.vat_history(msg) - elif self.data == "SH_RUN": - self.show_run(msg) - - def end_message(self, msg): - """Called when message ends. Default implementation does nothing. - - :param msg: Message to process. - :type msg: Message - :returns: Nothing. - """ - pass - - def vat_history(self, msg): - """Called when extraction of VAT command history is required. - - :param msg: Message to process. - :type msg: Message - :returns: Nothing. - """ - if msg.message.count("VAT command history:"): - self.lookup_msg_nr += 1 - text = re.sub("[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3} " - "VAT command history:", "", msg.message, count=1).\ - replace('\n', ' |br| ').replace('\r', '').replace('"', "'") - if self.lookup_msg_nr > 1: - sys.stdout.write(" |br| ") - sys.stdout.write("*DUT" + str(self.lookup_msg_nr) + ":*" + text) - - def show_run(self, msg): - """Called when extraction of VPP operational data (output of CLI command - Show Runtime) is required. - - :param msg: Message to process. - :type msg: Message - :returns: Nothing. - """ - if msg.message.count("vat# Thread "): - self.lookup_msg_nr += 1 - text = msg.message.replace("vat# ", "").\ - replace("return STDOUT ", "").replace('\n', ' |br| ').\ - replace('\r', '').replace('"', "'") - if self.lookup_msg_nr == 1: - sys.stdout.write("*DUT" + str(self.lookup_kw_nr) + - ":* |br| " + text) - - -def do_html(data, args): - """Generation of a html file from json data. - - :param data: List of suites from json file. - :param args: Parsed arguments. - :type data: list of dict - :type args: ArgumentParser - :returns: Nothing. - """ - - shift = int(args.level) - start = int(args.start) - - output = open(args.output, 'w') - - output.write('<html>') - for item in data: - if int(item['level']) < start: - continue - level = str(int(item['level']) - start + shift) - output.write('<h' + level + '>' + item['title'].lower() + - '</h' + level + '>') - output.write('<p>' + re.sub(r"(\*)(.*?)(\*)", r"<b>\2</b>", item['doc'], - 0, flags=re.MULTILINE). - replace(' |br| ', '<br>') + '</p>') - try: - output.write(gen_html_table(item['tests'])) - except KeyError: - continue - output.write('</html>') - output.close() - - -def gen_html_table(data): - """Generates a table with TCs' names and VAT command histories / VPP - operational data in html format. There is no css used. - - :param data: Json data representing a table with TCs. - :type data: str - :returns: Table with TCs' names and VAT command histories / VPP operational - data in html format. - :rtype: str - """ - - table = '<table width=100% border=1><tr>' - table += '<th width=30%>' + data[-1][0] + '</th>' - table += '<th width=70%>' + data[-1][1] + '</th></tr>' - - for item in data[0:-1]: - table += '<tr>' - for element in item: - table += '<td>' + re.sub(r"(\*)(.*?)(\*)", r"<b>\2</b>", element, - 0, flags=re.MULTILINE).\ - replace(' |br| ', '<br>').replace(' |prein| ', '<pre>').\ - replace(' |preout| ', '</pre>') + '</td>' - table += '</tr></table>' - - return table - - -def do_rst(data, args): - """Generation of a rst file from json data. - - :param data: List of suites from json file. - :param args: Parsed arguments. - :type data: list of dict - :type args: ArgumentParser - :returns: Nothing. - """ - - hdrs = ['=', '-', '`', "'", '.', '~', '*', '+', '^'] - shift = int(args.level) - start = int(args.start) - - output = open(args.output, 'w') - output.write('\n.. |br| raw:: html\n\n <br />\n\n') - output.write('\n.. |prein| raw:: html\n\n <pre>\n\n') - output.write('\n.. |preout| raw:: html\n\n </pre>\n\n') - - if args.title: - output.write(args.title + '\n' + - hdrs[shift - 1] * - len(args.title) + '\n\n') - - for item in data: - if int(item['level']) < start: - continue - if 'ndrchk' in item['title'].lower(): - continue - output.write(item['title'].lower() + '\n' + - hdrs[int(item['level']) - start + shift] * - len(item['title']) + '\n\n') - output.write(item['doc'].replace('*', '**').replace('|br|', '\n\n -') + - '\n\n') - try: - test_set = list() - for test in item['tests']: - test_data = list() - test_data.append(test[0]) - test_data.append(test[1].replace('*', '**')) - test_set.append(test_data) - output.write(gen_rst_table(test_set) + '\n\n') - except KeyError: - continue - output.close() - - -def gen_rst_table(data): - """Generates a table with TCs' names and VAT command histories / VPP - operational data in rst format. - - :param data: Json data representing a table with TCs. - :type data: str - :returns: Table with TCs' names and VAT command histories / VPP operational - data in rst format. - :rtype: str - """ - - table = [] - # max size of each column - lengths = map(max, zip(*[[len(str(elt)) for elt in item] for item in data])) - - start_of_line = '| ' - vert_separator = ' | ' - end_of_line = ' |' - line_marker = '-' - - meta_template = vert_separator.join(['{{{{{0}:{{{0}}}}}}}'.format(i) - for i in range(len(lengths))]) - template = '{0}{1}{2}'.format(start_of_line, meta_template.format(*lengths), - end_of_line) - # determine top/bottom borders - to_separator = string.maketrans('| ', '+-') - start_of_line = start_of_line.translate(to_separator) - vert_separator = vert_separator.translate(to_separator) - end_of_line = end_of_line.translate(to_separator) - separator = '{0}{1}{2}'.format(start_of_line, vert_separator. - join([x * line_marker for x in lengths]), - end_of_line) - # determine header separator - th_separator_tr = string.maketrans('-', '=') - start_of_line = start_of_line.translate(th_separator_tr) - line_marker = line_marker.translate(th_separator_tr) - vertical_separator = vert_separator.translate(th_separator_tr) - end_of_line = end_of_line.translate(th_separator_tr) - th_separator = '{0}{1}{2}'.format(start_of_line, vertical_separator. - join([x * line_marker for x in lengths]), - end_of_line) - # prepare table - table.append(separator) - # set table header - titles = data[-1] - table.append(template.format(*titles)) - table.append(th_separator) - # generate table rows - for item in data[0:-2]: - table.append(template.format(item[0], item[1])) - table.append(separator) - table.append(template.format(data[-2][0], data[-2][1])) - table.append(separator) - return '\n'.join(table) - - -def do_md(data, args): - """Generation of a rst file from json data. - - :param data: List of suites from json file. - :param args: Parsed arguments. - :type data: list of dict - :type args: ArgumentParser - :returns: Nothing. - """ - raise NotImplementedError("Export to 'md' format is not implemented.") - - -def do_wiki(data, args): - """Generation of a wiki page from json data. - - :param data: List of suites from json file. - :param args: Parsed arguments. - :type data: list of dict - :type args: ArgumentParser - :returns: Nothing. - """ - - shift = int(args.level) - start = int(args.start) - - output = open(args.output, 'w') - - for item in data: - if int(item['level']) < start: - continue - if 'ndrchk' in item['title'].lower(): - continue - mark = "=" * (int(item['level']) - start + shift) + ' ' - output.write(mark + item['title'].lower() + mark + '\n') - try: - output.write(gen_wiki_table(item['tests'], mark) + - '\n\n') - except KeyError: - continue - output.close() - - -def gen_wiki_table(data, mark): - """Generates a table with TCs' names and VAT command histories / VPP - operational data in wiki format. - - :param data: Json data representing a table with TCs. - :type data: str - :returns: Table with TCs' names and VAT command histories / VPP operational - data in wiki format. - :rtype: str - """ - - table = '{| class="wikitable"\n' - header = "" - mark = mark[0:-2] + "= " - for item in data[-1]: - header += '!{}\n'.format(item) - table += header - for item in data[0:-1]: - msg = item[1].replace('*', mark).replace(' |br| ', '\n\n').\ - replace(' |prein| ', '<pre>').replace(' |preout| ', '</pre>') - table += '|-\n|{}\n|{}\n'.format(item[0], msg) - table += '|}\n' - - return table - - -def process_robot_file(args): - """Process data from robot output.xml file and generate defined file type. - - :param args: Parsed arguments. - :type args: ArgumentParser - :return: Nothing. - """ - - old_sys_stdout = sys.stdout - sys.stdout = open(args.output + '.json', 'w') - - result = ExecutionResult(args.input) - checker = ExecutionChecker(args) - - sys.stdout.write('[') - result.visit(checker) - sys.stdout.write('{}]') - sys.stdout.close() - sys.stdout = old_sys_stdout - - with open(args.output + '.json', 'r') as json_file: - data = json.load(json_file) - data.pop(-1) - - if args.regex: - results = list() - regex = re.compile(args.regex) - for item in data: - if re.search(regex, item['title'].lower()): - results.append(item) - else: - results = data - - if args.formatting == 'rst': - do_rst(results, args) - elif args.formatting == 'wiki': - do_wiki(results, args) - elif args.formatting == 'html': - do_html(results, args) - elif args.formatting == 'md': - do_md(results, args) - - -def parse_args(): - """Parse arguments from cmd line. - - :return: Parsed arguments. - :rtype ArgumentParser - """ - - parser = argparse.ArgumentParser(description=__doc__, - formatter_class=argparse. - RawDescriptionHelpFormatter) - parser.add_argument("-i", "--input", - required=True, - type=argparse.FileType('r'), - help="Robot XML log file") - parser.add_argument("-o", "--output", - type=str, - required=True, - help="Output file") - parser.add_argument("-d", "--data", - type=str, - required=True, - help="Required data: VAT_H (VAT history), SH_RUN " - "(show runtime output)") - parser.add_argument("-f", "--formatting", - required=True, - choices=['html', 'wiki', 'rst', 'md'], - help="Output file format") - parser.add_argument("-s", "--start", - type=int, - default=1, - help="The first level to be taken from xml file") - parser.add_argument("-l", "--level", - type=int, - default=1, - help="The level of the first chapter in generated file") - parser.add_argument("-r", "--regex", - type=str, - default=None, - help="Regular expression used to select test suites. " - "If None, all test suites are selected.") - parser.add_argument("-t", "--title", - type=str, - default=None, - help="Title of the output.") - - return parser.parse_args() - - -if __name__ == "__main__": - sys.exit(process_robot_file(parse_args())) |