aboutsummaryrefslogtreecommitdiffstats
path: root/resources/tools/presentation
diff options
context:
space:
mode:
Diffstat (limited to 'resources/tools/presentation')
-rw-r--r--resources/tools/presentation/doc/pal_lld.rst29
-rw-r--r--resources/tools/presentation/generator_plots.py73
-rw-r--r--resources/tools/presentation/input_data_parser.py52
-rw-r--r--resources/tools/presentation/pal.py66
-rw-r--r--resources/tools/presentation/specification.yaml150
5 files changed, 316 insertions, 54 deletions
diff --git a/resources/tools/presentation/doc/pal_lld.rst b/resources/tools/presentation/doc/pal_lld.rst
index 64bde3e5fe..7ca3ad43d5 100644
--- a/resources/tools/presentation/doc/pal_lld.rst
+++ b/resources/tools/presentation/doc/pal_lld.rst
@@ -840,6 +840,35 @@ latency in a box chart):
width: 700
height: 1000
+The structure of the section "Plot" is as follows (example of a plot showing
+VPP HTTP server performance in a box chart with pre-defined data
+"plot-vpp-httlp-server-performance" set and plot layout "plot-cps"):
+
+::
+
+ -
+ type: "plot"
+ title: "VPP HTTP Server Performance"
+ algorithm: "plot_http_server_performance_box"
+ output-file-type: ".html"
+ output-file: "{DIR[STATIC,VPP]}/http-server-performance-cps"
+ data:
+ "plot-vpp-httlp-server-performance"
+ # Keep this formatting, the filter is enclosed with " (quotation mark) and
+ # each tag is enclosed with ' (apostrophe).
+ filter: "'HTTP' and 'TCP_CPS'"
+ parameters:
+ - "result"
+ - "name"
+ traces:
+ hoverinfo: "x+y"
+ boxpoints: "outliers"
+ whiskerwidth: 0
+ layout:
+ title: "VPP HTTP Server Performance"
+ layout:
+ "plot-cps"
+
Section: file
'''''''''''''
diff --git a/resources/tools/presentation/generator_plots.py b/resources/tools/presentation/generator_plots.py
index 66656679d0..ac77b3d425 100644
--- a/resources/tools/presentation/generator_plots.py
+++ b/resources/tools/presentation/generator_plots.py
@@ -45,7 +45,7 @@ def generate_plots(spec, data):
def plot_performance_box(plot, input_data):
- """Generate the plot(s) with algorithm: table_detailed_test_results
+ """Generate the plot(s) with algorithm: plot_performance_box
specified in the specification file.
:param plot: Plot to generate.
@@ -318,3 +318,74 @@ def plot_throughput_speedup_analysis(plot, input_data):
return
logging.info(" Done.")
+
+
+def plot_http_server_performance_box(plot, input_data):
+ """Generate the plot(s) with algorithm: plot_http_server_performance_box
+ specified in the specification file.
+
+ :param plot: Plot to generate.
+ :param input_data: Data to process.
+ :type plot: pandas.Series
+ :type input_data: InputData
+ """
+
+ logging.info(" Generating the plot {0} ...".
+ format(plot.get("title", "")))
+
+ # Transform the data
+ data = input_data.filter_data(plot)
+ if data is None:
+ logging.error("No data.")
+ return
+
+ # Prepare the data for the plot
+ y_vals = dict()
+ for job in data:
+ for build in job:
+ for test in build:
+ if y_vals.get(test["name"], None) is None:
+ y_vals[test["name"]] = list()
+ try:
+ y_vals[test["name"]].append(test["result"]["value"])
+ except (KeyError, TypeError):
+ y_vals[test["name"]].append(None)
+
+ # Add None to the lists with missing data
+ max_len = 0
+ for val in y_vals.values():
+ if len(val) > max_len:
+ max_len = len(val)
+ for key, val in y_vals.items():
+ if len(val) < max_len:
+ val.extend([None for _ in range(max_len - len(val))])
+
+ # Add plot traces
+ traces = list()
+ df = pd.DataFrame(y_vals)
+ df.head()
+ for i, col in enumerate(df.columns):
+ name = "{0}. {1}".format(i + 1, col.lower().replace('-cps', '').
+ replace('-rps', ''))
+ traces.append(plgo.Box(x=[str(i + 1) + '.'] * len(df[col]),
+ y=df[col],
+ name=name,
+ **plot["traces"]))
+
+ try:
+ # Create plot
+ plpl = plgo.Figure(data=traces, layout=plot["layout"])
+
+ # Export Plot
+ logging.info(" Writing file '{0}{1}'.".
+ format(plot["output-file"], plot["output-file-type"]))
+ ploff.plot(plpl,
+ show_link=False, auto_open=False,
+ filename='{0}{1}'.format(plot["output-file"],
+ plot["output-file-type"]))
+ except PlotlyError as err:
+ logging.error(" Finished with error: {}".
+ format(str(err).replace("\n", " ")))
+ return
+
+ logging.info(" Done.")
diff --git a/resources/tools/presentation/input_data_parser.py b/resources/tools/presentation/input_data_parser.py
index 2351942377..e1763b97d2 100644
--- a/resources/tools/presentation/input_data_parser.py
+++ b/resources/tools/presentation/input_data_parser.py
@@ -171,6 +171,8 @@ class ExecutionChecker(ResultVisitor):
REGEX_VERSION = re.compile(r"(stdout: 'vat# vat# Version:)(\s*)(.*)")
+ REGEX_TCP = re.compile(r'Total\s(rps|cps|throughput):\s([0-9]*).*$')
+
def __init__(self, **metadata):
"""Initialisation.
@@ -416,34 +418,46 @@ class ExecutionChecker(ResultVisitor):
test_result["doc"] = replace(doc_str, ' |br| [', '[', maxreplace=1)
test_result["msg"] = test.message.replace('\n', ' |br| '). \
replace('\r', '').replace('"', "'")
- if test.status == "PASS" and "NDRPDRDISC" in tags:
+ if test.status == "PASS" and ("NDRPDRDISC" in tags or "TCP" in tags):
if "NDRDISC" in tags:
test_type = "NDR"
elif "PDRDISC" in tags:
test_type = "PDR"
+ elif "TCP" in tags: # Change to wrk?
+ test_type = "TCP"
else:
return
- try:
- rate_value = str(re.search(
- self.REGEX_RATE, test.message).group(1))
- except AttributeError:
- rate_value = "-1"
- try:
- rate_unit = str(re.search(
- self.REGEX_RATE, test.message).group(2))
- except AttributeError:
- rate_unit = "-1"
-
test_result["type"] = test_type
- test_result["throughput"] = dict()
- test_result["throughput"]["value"] = int(rate_value.split('.')[0])
- test_result["throughput"]["unit"] = rate_unit
- test_result["latency"] = self._get_latency(test.message, test_type)
- if test_type == "PDR":
- test_result["lossTolerance"] = str(re.search(
- self.REGEX_TOLERANCE, test.message).group(1))
+
+ if test_type in ("NDR", "PDR"):
+ try:
+ rate_value = str(re.search(
+ self.REGEX_RATE, test.message).group(1))
+ except AttributeError:
+ rate_value = "-1"
+ try:
+ rate_unit = str(re.search(
+ self.REGEX_RATE, test.message).group(2))
+ except AttributeError:
+ rate_unit = "-1"
+
+ test_result["throughput"] = dict()
+ test_result["throughput"]["value"] = \
+ int(rate_value.split('.')[0])
+ test_result["throughput"]["unit"] = rate_unit
+ test_result["latency"] = \
+ self._get_latency(test.message, test_type)
+ if test_type == "PDR":
+ test_result["lossTolerance"] = str(re.search(
+ self.REGEX_TOLERANCE, test.message).group(1))
+
+ elif test_type in ("TCP", ):
+ groups = re.search(self.REGEX_TCP, test.message)
+ test_result["result"] = dict()
+ test_result["result"]["value"] = int(groups.group(2))
+ test_result["result"]["unit"] = groups.group(1)
else:
test_result["status"] = test.status
diff --git a/resources/tools/presentation/pal.py b/resources/tools/presentation/pal.py
index 14a1937054..6d613e339c 100644
--- a/resources/tools/presentation/pal.py
+++ b/resources/tools/presentation/pal.py
@@ -83,39 +83,39 @@ def main():
logging.critical("Finished with error.")
sys.exit(1)
- #try:
- env = Environment(spec.environment, args.force)
- env.set_environment()
-
- if spec.is_debug:
- if spec.debug["input-format"] == "zip":
- unzip_files(spec)
- else:
- download_data_files(spec)
-
- prepare_static_content(spec)
-
- data = InputData(spec)
- data.read_data()
-
- generate_tables(spec, data)
- generate_plots(spec, data)
- generate_files(spec, data)
- generate_report(args.release, spec)
-
- logging.info("Successfully finished.")
-
- # except (KeyError, ValueError, PresentationError) as err:
- # logging.info("Finished with an error.")
- # logging.critical(str(err))
- # except Exception as err:
- # logging.info("Finished with an error.")
- # logging.critical(str(err))
- #
- # finally:
- # if spec is not None and not spec.is_debug:
- # clean_environment(spec.environment)
- # sys.exit(1)
+ try:
+ env = Environment(spec.environment, args.force)
+ env.set_environment()
+
+ if spec.is_debug:
+ if spec.debug["input-format"] == "zip":
+ unzip_files(spec)
+ else:
+ download_data_files(spec)
+
+ prepare_static_content(spec)
+
+ data = InputData(spec)
+ data.read_data()
+
+ generate_tables(spec, data)
+ generate_plots(spec, data)
+ generate_files(spec, data)
+ generate_report(args.release, spec)
+
+ logging.info("Successfully finished.")
+
+ except (KeyError, ValueError, PresentationError) as err:
+ logging.info("Finished with an error.")
+ logging.critical(str(err))
+ except Exception as err:
+ logging.info("Finished with an unexpected error.")
+ logging.critical(str(err))
+
+ finally:
+ if spec is not None and not spec.is_debug:
+ clean_environment(spec.environment)
+ sys.exit(1)
if __name__ == '__main__':
diff --git a/resources/tools/presentation/specification.yaml b/resources/tools/presentation/specification.yaml
index cb51e8c016..339bf5c2c8 100644
--- a/resources/tools/presentation/specification.yaml
+++ b/resources/tools/presentation/specification.yaml
@@ -87,6 +87,14 @@
-
type: "configuration"
data-sets:
+# TODO: Add the data sources
+ plot-vpp-http-server-performance:
+ csit-vpp-perf-1801-all:
+ - 1
+ - 2
+ - 3
+ - 4
+# TODO: Add the data sources
vpp-meltdown-impact:
csit-vpp-perf-1707-all:
- 9
@@ -96,6 +104,7 @@
- 11
- 12
- 13
+# TODO: Add the data sources
vpp-spectre-impact:
csit-vpp-perf-1707-all:
- 9
@@ -134,7 +143,7 @@
- 18
- 19
- 20
-# TODO:
+# TODO: Add the data sources
csit-vpp-perf-1801-all:
- 13
- 14
@@ -201,7 +210,93 @@
- 1
- 3
- 7
+
plot-layouts:
+
+ plot-cps:
+ xaxis:
+ autorange: True
+ autotick: False
+ fixedrange: False
+ gridcolor: "rgb(238, 238, 238)"
+ linecolor: "rgb(238, 238, 238)"
+ linewidth: 1
+ showgrid: True
+ showline: True
+ showticklabels: True
+ tickcolor: "rgb(238, 238, 238)"
+ tickmode: "linear"
+ title: "Indexed Test Cases"
+ zeroline: False
+ yaxis:
+ gridcolor: "rgb(238, 238, 238)'"
+ hoverformat: ".4s"
+ linecolor: "rgb(238, 238, 238)"
+ linewidth: 1
+ range: []
+ rangemode: "tozero"
+ showgrid: True
+ showline: True
+ showticklabels: True
+ tickcolor: "rgb(238, 238, 238)"
+ title: "Connections Per Second [cps]"
+ zeroline: False
+ boxmode: "group"
+ boxgroupgap: 0.5
+ autosize: False
+ margin:
+ t: 50
+ b: 20
+ l: 50
+ r: 20
+ showlegend: True
+ legend:
+ orientation: "h"
+ width: 700
+ height: 1000
+
+ plot-rps:
+ xaxis:
+ autorange: True
+ autotick: False
+ fixedrange: False
+ gridcolor: "rgb(238, 238, 238)"
+ linecolor: "rgb(238, 238, 238)"
+ linewidth: 1
+ showgrid: True
+ showline: True
+ showticklabels: True
+ tickcolor: "rgb(238, 238, 238)"
+ tickmode: "linear"
+ title: "Indexed Test Cases"
+ zeroline: False
+ yaxis:
+ gridcolor: "rgb(238, 238, 238)'"
+ hoverformat: ".4s"
+ linecolor: "rgb(238, 238, 238)"
+ linewidth: 1
+ range: []
+ rangemode: "tozero"
+ showgrid: True
+ showline: True
+ showticklabels: True
+ tickcolor: "rgb(238, 238, 238)"
+ title: "Requests Per Second [rps]"
+ zeroline: False
+ boxmode: "group"
+ boxgroupgap: 0.5
+ autosize: False
+ margin:
+ t: 50
+ b: 20
+ l: 50
+ r: 20
+ showlegend: True
+ legend:
+ orientation: "h"
+ width: 700
+ height: 1000
+
plot-throughput:
xaxis:
autorange: True
@@ -242,6 +337,7 @@
orientation: "h"
width: 700
height: 1000
+
plot-latency:
xaxis:
autorange: True
@@ -282,6 +378,7 @@
orientation: "h"
width: 700
height: 1000
+
plot-throughput-speedup-analysis:
xaxis:
autorange: True
@@ -344,6 +441,12 @@
-
build: 2
file: "{DIR[WORKING,DATA]}/output.xml"
+ -
+ build: 3
+ file: "{DIR[WORKING,DATA]}/output.xml"
+ -
+ build: 4
+ file: "{DIR[WORKING,DATA]}/output.xml"
-
type: "static"
@@ -947,6 +1050,51 @@
### P L O T S ###
################################################################################
+# Plots VPP HTTP Server Performance
+-
+ type: "plot"
+ title: "VPP HTTP Server Performance"
+ algorithm: "plot_http_server_performance_box"
+ output-file-type: ".html"
+ output-file: "{DIR[STATIC,VPP]}/http-server-performance-cps"
+ data:
+ "plot-vpp-http-server-performance"
+ # Keep this formatting, the filter is enclosed with " (quotation mark) and
+ # each tag is enclosed with ' (apostrophe).
+ filter: "'HTTP' and 'TCP_CPS'"
+ parameters:
+ - "result"
+ - "name"
+ traces:
+ hoverinfo: "x+y"
+ boxpoints: "outliers"
+ whiskerwidth: 0
+ layout:
+ title: "VPP HTTP Server Performance"
+ layout:
+ "plot-cps"
+
+-
+ type: "plot"
+ title: "VPP HTTP Server Performance"
+ algorithm: "plot_http_server_performance_box"
+ output-file-type: ".html"
+ output-file: "{DIR[STATIC,VPP]}/http-server-performance-rps"
+ data:
+ "plot-vpp-http-server-performance"
+ filter: "'HTTP' and 'TCP_RPS'"
+ parameters:
+ - "result"
+ - "name"
+ traces:
+ hoverinfo: "x+y"
+ boxpoints: "outliers"
+ whiskerwidth: 0
+ layout:
+ title: "VPP HTTP Server Performance"
+ layout:
+ "plot-rps"
+
# Plot Throughput Speedup Analysis
# L2 - 10ge2p1x520 - NDR