From ae1fe880286d7b0414664bce2b2c7c91c3f543f3 Mon Sep 17 00:00:00 2001 From: Tibor Frank Date: Wed, 20 Jul 2022 15:51:21 +0200 Subject: UTI: Move constatns to a separate file Change-Id: If3796b71d02bcf5a92612585dfa8867e5039f037 Signed-off-by: Tibor Frank --- resources/tools/dash/app/pal/__init__.py | 23 +- resources/tools/dash/app/pal/data/data.py | 4 +- resources/tools/dash/app/pal/data/tooltips.yaml | 40 --- .../tools/dash/app/pal/data/url_processing.py | 99 ------- resources/tools/dash/app/pal/data/utils.py | 69 ----- resources/tools/dash/app/pal/news/layout.py | 14 +- resources/tools/dash/app/pal/news/news.py | 12 +- resources/tools/dash/app/pal/news/tables.py | 8 +- resources/tools/dash/app/pal/report/graphs.py | 144 +--------- resources/tools/dash/app/pal/report/layout.py | 172 ++++-------- resources/tools/dash/app/pal/report/report.py | 14 +- resources/tools/dash/app/pal/routes.py | 12 +- resources/tools/dash/app/pal/stats/layout.py | 17 +- resources/tools/dash/app/pal/stats/stats.py | 14 +- resources/tools/dash/app/pal/trending/graphs.py | 120 ++------- resources/tools/dash/app/pal/trending/layout.py | 144 ++++------ resources/tools/dash/app/pal/trending/trending.py | 14 +- resources/tools/dash/app/pal/utils/__init__.py | 12 + resources/tools/dash/app/pal/utils/constants.py | 299 +++++++++++++++++++++ resources/tools/dash/app/pal/utils/tooltips.yaml | 40 +++ .../tools/dash/app/pal/utils/url_processing.py | 99 +++++++ resources/tools/dash/app/pal/utils/utils.py | 69 +++++ 22 files changed, 701 insertions(+), 738 deletions(-) delete mode 100644 resources/tools/dash/app/pal/data/tooltips.yaml delete mode 100644 resources/tools/dash/app/pal/data/url_processing.py delete mode 100644 resources/tools/dash/app/pal/data/utils.py create mode 100644 resources/tools/dash/app/pal/utils/__init__.py create mode 100644 resources/tools/dash/app/pal/utils/constants.py create mode 100644 resources/tools/dash/app/pal/utils/tooltips.yaml create mode 100644 resources/tools/dash/app/pal/utils/url_processing.py create mode 100644 resources/tools/dash/app/pal/utils/utils.py (limited to 'resources') diff --git a/resources/tools/dash/app/pal/__init__.py b/resources/tools/dash/app/pal/__init__.py index 9f80c5fcaa..0eb2a4e79e 100644 --- a/resources/tools/dash/app/pal/__init__.py +++ b/resources/tools/dash/app/pal/__init__.py @@ -19,21 +19,8 @@ import logging from flask import Flask from flask_assets import Environment +from .utils.constants import Constants as C -# Maximal value of TIME_PERIOD for Trending in days. -# Do not change without a good reason. -MAX_TIME_PERIOD = 180 - -# It defines the time period for Trending in days from now back to the past from -# which data is read to dataframes. -# TIME_PERIOD = None means all data (max MAX_TIME_PERIOD days) is read. -# TIME_PERIOD = MAX_TIME_PERIOD is the default value -TIME_PERIOD = MAX_TIME_PERIOD # [days] - -# List of releases used for iterative data processing. -# The releases MUST be in the order from the current (newest) to the last -# (oldest). -RELEASES=["csit2206", "csit2202", ] def init_app(): """Construct core Flask application with embedded Dash app. @@ -58,10 +45,10 @@ def init_app(): assets.init_app(app) # Set the time period for Trending - if TIME_PERIOD is None or TIME_PERIOD > MAX_TIME_PERIOD: - time_period = MAX_TIME_PERIOD + if C.TIME_PERIOD is None or C.TIME_PERIOD > C.MAX_TIME_PERIOD: + time_period = C.MAX_TIME_PERIOD else: - time_period = TIME_PERIOD + time_period = C.TIME_PERIOD # Import Dash applications. from .news.news import init_news @@ -74,7 +61,7 @@ def init_app(): app = init_trending(app, time_period=time_period) from .report.report import init_report - app = init_report(app, releases=RELEASES) + app = init_report(app, releases=C.RELEASES) return app diff --git a/resources/tools/dash/app/pal/data/data.py b/resources/tools/dash/app/pal/data/data.py index 296db024c0..0956333e34 100644 --- a/resources/tools/dash/app/pal/data/data.py +++ b/resources/tools/dash/app/pal/data/data.py @@ -15,15 +15,13 @@ """ import logging +import awswrangler as wr from yaml import load, FullLoader, YAMLError from datetime import datetime, timedelta from time import time from pytz import UTC from pandas import DataFrame - -import awswrangler as wr - from awswrangler.exceptions import EmptyDataFrame, NoFilesFound diff --git a/resources/tools/dash/app/pal/data/tooltips.yaml b/resources/tools/dash/app/pal/data/tooltips.yaml deleted file mode 100644 index 2086b575a9..0000000000 --- a/resources/tools/dash/app/pal/data/tooltips.yaml +++ /dev/null @@ -1,40 +0,0 @@ -help-area: - The area defines a VPP packet path and lookup type. -help-cadence: - The cadence of the Jenkins job which runs the tests. -help-cores: - Number of cores the DUT uses during the test. -help-download: - Download the selected data as a csv file. -help-dut: - Device Under Test (DUT) - In software networking, “device” denotes a specific - piece of software tasked with packet processing. Such device is surrounded - with other software components (such as operating system kernel). -help-dut-ver: - The version of the Device under Test. -help-framesize: - Frame size - size of an Ethernet Layer-2 frame on the wire, including any VLAN - tags (dot1q, dot1ad) and Ethernet FCS, but excluding Ethernet preamble and - inter-frame gap. Measured in Bytes. -help-infra: - Infrastructure is defined by the toplology (number of nodes), processor - architecture, NIC and driver. -help-normalize: - Normalize the results to CPU frequency 2GHz. The results from AWS environment - are not normalized as we do not know the exact value of CPU frequency. -help-release: - The CSIT release. -help-tbed: - The test bed is defined by toplology (number of nodes) and processor - architecture. -help-test: - The test specification consists of packet encapsulation, VPP packet processing - (packet forwarding mode and packet processing function(s)) and packet - forwarding path. -help-time-period: - Choose a time period for selected tests. -help-ttype: - Main measured variable. -help-url: - URL with current configuration. If there is no "Copy URL" button, use triple - click. diff --git a/resources/tools/dash/app/pal/data/url_processing.py b/resources/tools/dash/app/pal/data/url_processing.py deleted file mode 100644 index 9307015d0d..0000000000 --- a/resources/tools/dash/app/pal/data/url_processing.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright (c) 2022 Cisco and/or its affiliates. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""URL decoding and parsing and URL encoding. -""" - -import logging - -from base64 import urlsafe_b64encode, urlsafe_b64decode -from urllib.parse import urlencode, urlunparse, urlparse, parse_qs -from zlib import compress, decompress -from zlib import error as ZlibErr -from binascii import Error as BinasciiErr - - -def url_encode(params: dict) -> str: - """Encode the URL parameters and zip them and create the whole URL using - given data. - - :param params: All data necessary to create the URL: - - scheme, - - network location, - - path, - - query, - - parameters. - :type params: dict - :returns: Encoded URL. - :rtype: str - """ - - url_params = params.get("params", None) - if url_params: - encoded_params = urlsafe_b64encode( - compress(urlencode(url_params).encode("utf-8"), level=9) - ).rstrip(b"=").decode("utf-8") - else: - encoded_params = str() - - return urlunparse(( - params.get("scheme", "http"), - params.get("netloc", str()), - params.get("path", str()), - str(), # params - params.get("query", str()), - encoded_params - )) - - -def url_decode(url: str) -> dict: - """Parse the given URL and decode the parameters. - - :param url: URL to be parsed and decoded. - :type url: str - :returns: Paresed URL. - :rtype: dict - """ - - try: - parsed_url = urlparse(url) - except ValueError as err: - logging.warning(f"\nThe url {url} is not valid, ignoring.\n{repr(err)}") - return None - - if parsed_url.fragment: - try: - padding = b"=" * (4 - (len(parsed_url.fragment) % 4)) - params = parse_qs(decompress( - urlsafe_b64decode( - (parsed_url.fragment.encode("utf-8") + padding) - )).decode("utf-8") - ) - except (BinasciiErr, UnicodeDecodeError, ZlibErr) as err: - logging.warning( - f"\nNot possible to decode the parameters from url: {url}" - f"\nEncoded parameters: '{parsed_url.fragment}'" - f"\n{repr(err)}" - ) - return None - else: - params = None - - return { - "scheme": parsed_url.scheme, - "netloc": parsed_url.netloc, - "path": parsed_url.path, - "query": parsed_url.query, - "fragment": parsed_url.fragment, - "params": params - } diff --git a/resources/tools/dash/app/pal/data/utils.py b/resources/tools/dash/app/pal/data/utils.py deleted file mode 100644 index 63c9c1aaa4..0000000000 --- a/resources/tools/dash/app/pal/data/utils.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright (c) 2022 Cisco and/or its affiliates. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at: -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -""" - -from numpy import isnan - -from ..jumpavg import classify - - -def classify_anomalies(data): - """Process the data and return anomalies and trending values. - - Gather data into groups with average as trend value. - Decorate values within groups to be normal, - the first value of changed average as a regression, or a progression. - - :param data: Full data set with unavailable samples replaced by nan. - :type data: OrderedDict - :returns: Classification and trend values - :rtype: 3-tuple, list of strings, list of floats and list of floats - """ - # NaN means something went wrong. - # Use 0.0 to cause that being reported as a severe regression. - bare_data = [0.0 if isnan(sample) else sample for sample in data.values()] - # TODO: Make BitCountingGroupList a subclass of list again? - group_list = classify(bare_data).group_list - group_list.reverse() # Just to use .pop() for FIFO. - classification = list() - avgs = list() - stdevs = list() - active_group = None - values_left = 0 - avg = 0.0 - stdv = 0.0 - for sample in data.values(): - if isnan(sample): - classification.append("outlier") - avgs.append(sample) - stdevs.append(sample) - continue - if values_left < 1 or active_group is None: - values_left = 0 - while values_left < 1: # Ignore empty groups (should not happen). - active_group = group_list.pop() - values_left = len(active_group.run_list) - avg = active_group.stats.avg - stdv = active_group.stats.stdev - classification.append(active_group.comment) - avgs.append(avg) - stdevs.append(stdv) - values_left -= 1 - continue - classification.append("normal") - avgs.append(avg) - stdevs.append(stdv) - values_left -= 1 - return classification, avgs, stdevs diff --git a/resources/tools/dash/app/pal/news/layout.py b/resources/tools/dash/app/pal/news/layout.py index 2f66ce5c81..c9f2808a14 100644 --- a/resources/tools/dash/app/pal/news/layout.py +++ b/resources/tools/dash/app/pal/news/layout.py @@ -27,7 +27,9 @@ from yaml import load, FullLoader, YAMLError from copy import deepcopy from ..data.data import Data -from ..data.utils import classify_anomalies +from ..utils.constants import Constants as C +from ..utils.utils import classify_anomalies +from ..data.data import Data from .tables import table_news @@ -35,12 +37,6 @@ class Layout: """The layout of the dash app and the callbacks. """ - # The default job displayed when the page is loaded first time. - DEFAULT_JOB = "csit-vpp-perf-mrr-daily-master-2n-icx" - - # Time period for regressions and progressions. - TIME_PERIOD = 21 # [days] - def __init__(self, app: Flask, html_layout_file: str, data_spec_file: str, tooltip_file: str) -> None: """Initialization: @@ -73,7 +69,7 @@ class Layout: data_stats, data_mrr, data_ndrpdr = Data( data_spec_file=self._data_spec_file, debug=True - ).read_stats(days=self.TIME_PERIOD) + ).read_stats(days=C.NEWS_TIME_PERIOD) df_tst_info = pd.concat([data_mrr, data_ndrpdr], ignore_index=True) @@ -95,7 +91,7 @@ class Layout: job_info["tbed"].append("-".join(lst_job[-2:])) self.df_job_info = pd.DataFrame.from_dict(job_info) - self._default = self._set_job_params(self.DEFAULT_JOB) + self._default = self._set_job_params(C.NEWS_DEFAULT_JOB) # Pre-process the data: diff --git a/resources/tools/dash/app/pal/news/news.py b/resources/tools/dash/app/pal/news/news.py index deb00c1810..5e65b62281 100644 --- a/resources/tools/dash/app/pal/news/news.py +++ b/resources/tools/dash/app/pal/news/news.py @@ -14,8 +14,8 @@ """Instantiate the Statistics Dash applocation. """ import dash -import dash_bootstrap_components as dbc +from ..utils.constants import Constants as C from .layout import Layout @@ -30,15 +30,15 @@ def init_news(server): dash_app = dash.Dash( server=server, - routes_pathname_prefix=u"/news/", - external_stylesheets=[dbc.themes.LUX], + routes_pathname_prefix=C.NEWS_ROUTES_PATHNAME_PREFIX, + external_stylesheets=C.EXTERNAL_STYLESHEETS ) layout = Layout( app=dash_app, - html_layout_file="pal/templates/news_layout.jinja2", - data_spec_file="pal/data/data.yaml", - tooltip_file="pal/data/tooltips.yaml", + html_layout_file=C.NEWS_HTML_LAYOUT_FILE, + data_spec_file=C.DATA_SPEC_FILE, + tooltip_file=C.TOOLTIP_FILE, ) dash_app.index_string = layout.html_layout dash_app.layout = layout.add_content() diff --git a/resources/tools/dash/app/pal/news/tables.py b/resources/tools/dash/app/pal/news/tables.py index 53b24608d5..6272814165 100644 --- a/resources/tools/dash/app/pal/news/tables.py +++ b/resources/tools/dash/app/pal/news/tables.py @@ -17,9 +17,7 @@ import pandas as pd import dash_bootstrap_components as dbc - -# Time period for regressions and progressions. -TIME_PERIOD = 21 # [days] +from ..utils.constants import Constants as C def table_news(data: pd.DataFrame, job: str) -> list: @@ -57,7 +55,7 @@ def table_news(data: pd.DataFrame, job: str) -> list: class_name="p-0", size="lg", children=( - f"Regressions during the last {TIME_PERIOD} days " + f"Regressions during the last {C.NEWS_TIME_PERIOD} days " f"({len(regressions['Test Name'])})" ) ), @@ -68,7 +66,7 @@ def table_news(data: pd.DataFrame, job: str) -> list: class_name="p-0", size="lg", children=( - f"Progressions during the last {TIME_PERIOD} days " + f"Progressions during the last {C.NEWS_TIME_PERIOD} days " f"({len(progressions['Test Name'])})" ) ), diff --git a/resources/tools/dash/app/pal/report/graphs.py b/resources/tools/dash/app/pal/report/graphs.py index 76aa8b7793..c5d8f8f2d7 100644 --- a/resources/tools/dash/app/pal/report/graphs.py +++ b/resources/tools/dash/app/pal/report/graphs.py @@ -20,75 +20,13 @@ import pandas as pd from copy import deepcopy -import hdrh.histogram -import hdrh.codec - - -_NORM_FREQUENCY = 2.0 # [GHz] -_FREQURENCY = { # [GHz] - "2n-aws": 1.000, - "2n-dnv": 2.000, - "2n-clx": 2.300, - "2n-icx": 2.600, - "2n-skx": 2.500, - "2n-tx2": 2.500, - "2n-zn2": 2.900, - "3n-alt": 3.000, - "3n-aws": 1.000, - "3n-dnv": 2.000, - "3n-icx": 2.600, - "3n-skx": 2.500, - "3n-tsh": 2.200 -} - -_VALUE = { - "mrr": "result_receive_rate_rate_values", - "ndr": "result_ndr_lower_rate_value", - "pdr": "result_pdr_lower_rate_value", - "pdr-lat": "result_latency_forward_pdr_50_avg" -} -_UNIT = { - "mrr": "result_receive_rate_rate_unit", - "ndr": "result_ndr_lower_rate_unit", - "pdr": "result_pdr_lower_rate_unit", - "pdr-lat": "result_latency_forward_pdr_50_unit" -} -_LAT_HDRH = ( # Do not change the order - "result_latency_forward_pdr_0_hdrh", - "result_latency_reverse_pdr_0_hdrh", - "result_latency_forward_pdr_10_hdrh", - "result_latency_reverse_pdr_10_hdrh", - "result_latency_forward_pdr_50_hdrh", - "result_latency_reverse_pdr_50_hdrh", - "result_latency_forward_pdr_90_hdrh", - "result_latency_reverse_pdr_90_hdrh", -) -# This value depends on latency stream rate (9001 pps) and duration (5s). -# Keep it slightly higher to ensure rounding errors to not remove tick mark. -PERCENTILE_MAX = 99.999501 - -_GRAPH_LAT_HDRH_DESC = { - "result_latency_forward_pdr_0_hdrh": "No-load.", - "result_latency_reverse_pdr_0_hdrh": "No-load.", - "result_latency_forward_pdr_10_hdrh": "Low-load, 10% PDR.", - "result_latency_reverse_pdr_10_hdrh": "Low-load, 10% PDR.", - "result_latency_forward_pdr_50_hdrh": "Mid-load, 50% PDR.", - "result_latency_reverse_pdr_50_hdrh": "Mid-load, 50% PDR.", - "result_latency_forward_pdr_90_hdrh": "High-load, 90% PDR.", - "result_latency_reverse_pdr_90_hdrh": "High-load, 90% PDR." -} +from ..utils.constants import Constants as C def _get_color(idx: int) -> str: """ """ - _COLORS = ( - "#1A1110", "#DA2647", "#214FC6", "#01786F", "#BD8260", "#FFD12A", - "#A6E7FF", "#738276", "#C95A49", "#FC5A8D", "#CEC8EF", "#391285", - "#6F2DA8", "#FF878D", "#45A27D", "#FFD0B9", "#FD5240", "#DB91EF", - "#44D7A8", "#4F86F7", "#84DE02", "#FFCFF1", "#614051" - ) - return _COLORS[idx % len(_COLORS)] + return C.PLOT_COLORS[idx % len(C.PLOT_COLORS)] def get_short_version(version: str, dut_type: str="vpp") -> str: @@ -182,16 +120,16 @@ def graph_iterative(data: pd.DataFrame, sel:dict, layout: dict, continue phy = itm["phy"].split("-") topo_arch = f"{phy[0]}-{phy[1]}" if len(phy) == 4 else str() - norm_factor = (_NORM_FREQUENCY / _FREQURENCY[topo_arch]) \ + norm_factor = (C.NORM_FREQUENCY / C.FREQUENCY[topo_arch]) \ if normalize else 1.0 if itm["testtype"] == "mrr": - y_data_raw = itm_data[_VALUE[itm["testtype"]]].to_list()[0] + y_data_raw = itm_data[C.VALUE_ITER[itm["testtype"]]].to_list()[0] y_data = [(y * norm_factor) for y in y_data_raw] if len(y_data) > 0: y_tput_max = \ max(y_data) if max(y_data) > y_tput_max else y_tput_max else: - y_data_raw = itm_data[_VALUE[itm["testtype"]]].to_list() + y_data_raw = itm_data[C.VALUE_ITER[itm["testtype"]]].to_list() y_data = [(y * norm_factor) for y in y_data_raw] if y_data: y_tput_max = \ @@ -214,7 +152,7 @@ def graph_iterative(data: pd.DataFrame, sel:dict, layout: dict, show_tput = True if itm["testtype"] == "pdr": - y_lat_row = itm_data[_VALUE["pdr-lat"]].to_list() + y_lat_row = itm_data[C.VALUE_ITER["pdr-lat"]].to_list() y_lat = [(y / norm_factor) for y in y_lat_row] if y_lat: y_lat_max = max(y_lat) if max(y_lat) > y_lat_max else y_lat_max @@ -302,73 +240,3 @@ def table_comparison(data: pd.DataFrame, sel:dict, ) return pd.DataFrame() #table - - -def graph_hdrh_latency(data: dict, layout: dict) -> go.Figure: - """ - """ - - fig = None - - traces = list() - for idx, (lat_name, lat_hdrh) in enumerate(data.items()): - try: - decoded = hdrh.histogram.HdrHistogram.decode(lat_hdrh) - except (hdrh.codec.HdrLengthException, TypeError) as err: - continue - previous_x = 0.0 - prev_perc = 0.0 - xaxis = list() - yaxis = list() - hovertext = list() - for item in decoded.get_recorded_iterator(): - # The real value is "percentile". - # For 100%, we cut that down to "x_perc" to avoid - # infinity. - percentile = item.percentile_level_iterated_to - x_perc = min(percentile, PERCENTILE_MAX) - xaxis.append(previous_x) - yaxis.append(item.value_iterated_to) - hovertext.append( - f"{_GRAPH_LAT_HDRH_DESC[lat_name]}
" - f"Direction: {('W-E', 'E-W')[idx % 2]}
" - f"Percentile: {prev_perc:.5f}-{percentile:.5f}%
" - f"Latency: {item.value_iterated_to}uSec" - ) - next_x = 100.0 / (100.0 - x_perc) - xaxis.append(next_x) - yaxis.append(item.value_iterated_to) - hovertext.append( - f"{_GRAPH_LAT_HDRH_DESC[lat_name]}
" - f"Direction: {('W-E', 'E-W')[idx % 2]}
" - f"Percentile: {prev_perc:.5f}-{percentile:.5f}%
" - f"Latency: {item.value_iterated_to}uSec" - ) - previous_x = next_x - prev_perc = percentile - - traces.append( - go.Scatter( - x=xaxis, - y=yaxis, - name=_GRAPH_LAT_HDRH_DESC[lat_name], - mode="lines", - legendgroup=_GRAPH_LAT_HDRH_DESC[lat_name], - showlegend=bool(idx % 2), - line=dict( - color=_get_color(int(idx/2)), - dash="solid", - width=1 if idx % 2 else 2 - ), - hovertext=hovertext, - hoverinfo="text" - ) - ) - if traces: - fig = go.Figure() - fig.add_traces(traces) - layout_hdrh = layout.get("plot-hdrh-latency", None) - if lat_hdrh: - fig.update_layout(layout_hdrh) - - return fig diff --git a/resources/tools/dash/app/pal/report/layout.py b/resources/tools/dash/app/pal/report/layout.py index e7c8db43ae..164f2d48d9 100644 --- a/resources/tools/dash/app/pal/report/layout.py +++ b/resources/tools/dash/app/pal/report/layout.py @@ -27,8 +27,9 @@ from yaml import load, FullLoader, YAMLError from copy import deepcopy from ast import literal_eval +from ..utils.constants import Constants as C +from ..utils.url_processing import url_decode, url_encode from ..data.data import Data -from ..data.url_processing import url_decode, url_encode from .graphs import graph_iterative, table_comparison, get_short_version @@ -36,51 +37,6 @@ class Layout: """ """ - # If True, clear all inputs in control panel when button "ADD SELECTED" is - # pressed. - CLEAR_ALL_INPUTS = False - - STYLE_DISABLED = {"display": "none"} - STYLE_ENABLED = {"display": "inherit"} - - CL_ALL_DISABLED = [{ - "label": "All", - "value": "all", - "disabled": True - }] - CL_ALL_ENABLED = [{ - "label": "All", - "value": "all", - "disabled": False - }] - - PLACEHOLDER = html.Nobr("") - - DRIVERS = ("avf", "af-xdp", "rdma", "dpdk") - - LABELS = { - "dpdk": "DPDK", - "container_memif": "LXC/DRC Container Memif", - "crypto": "IPSec IPv4 Routing", - "ip4": "IPv4 Routing", - "ip6": "IPv6 Routing", - "ip4_tunnels": "IPv4 Tunnels", - "l2": "L2 Ethernet Switching", - "srv6": "SRv6 Routing", - "vm_vhost": "VMs vhost-user", - "nfv_density-dcr_memif-chain_ipsec": "CNF Service Chains Routing IPSec", - "nfv_density-vm_vhost-chain_dot1qip4vxlan":"VNF Service Chains Tunnels", - "nfv_density-vm_vhost-chain": "VNF Service Chains Routing", - "nfv_density-dcr_memif-pipeline": "CNF Service Pipelines Routing", - "nfv_density-dcr_memif-chain": "CNF Service Chains Routing", - } - - URL_STYLE = { - "background-color": "#d2ebf5", - "border-color": "#bce1f1", - "color": "#135d7c" - } - def __init__(self, app: Flask, releases: list, html_layout_file: str, graph_layout_file: str, data_spec_file: str, tooltip_file: str) -> None: """ @@ -125,7 +81,7 @@ class Layout: replace("2n-", "") test = lst_test_id[-1] nic = suite.split("-")[0] - for drv in self.DRIVERS: + for drv in C.DRIVERS: if drv in test: driver = drv.replace("-", "_") test = test.replace(f"{drv}-", "") @@ -234,7 +190,7 @@ class Layout: return self._graph_layout def label(self, key: str) -> str: - return self.LABELS.get(key, key) + return C.LABELS.get(key, key) def _show_tooltip(self, id: str, title: str, clipboard_id: str=None) -> list: @@ -359,9 +315,7 @@ class Layout: dbc.Row( # Throughput id="row-graph-tput", class_name="g-0 p-2", - children=[ - self.PLACEHOLDER - ] + children=[C.PLACEHOLDER, ] ), width=6 ), @@ -369,9 +323,7 @@ class Layout: dbc.Row( # Latency id="row-graph-lat", class_name="g-0 p-2", - children=[ - self.PLACEHOLDER - ] + children=[C.PLACEHOLDER, ] ), width=6 ) @@ -380,16 +332,12 @@ class Layout: dbc.Row( # Tables id="row-table", class_name="g-0 p-2", - children=[ - self.PLACEHOLDER - ] + children=[C.PLACEHOLDER, ] ), dbc.Row( # Download id="row-btn-download", class_name="g-0 p-2", - children=[ - self.PLACEHOLDER - ] + children=[C.PLACEHOLDER, ] ) ] ) @@ -548,7 +496,7 @@ class Layout: children=[ dbc.Checklist( id="cl-ctrl-framesize-all", - options=self.CL_ALL_DISABLED, + options=C.CL_ALL_DISABLED, inline=True, switch=False ), @@ -579,7 +527,7 @@ class Layout: children=[ dbc.Checklist( id="cl-ctrl-core-all", - options=self.CL_ALL_DISABLED, + options=C.CL_ALL_DISABLED, inline=False, switch=False ) @@ -610,7 +558,7 @@ class Layout: children=[ dbc.Checklist( id="cl-ctrl-testtype-all", - options=self.CL_ALL_DISABLED, + options=C.CL_ALL_DISABLED, inline=True, switch=False ), @@ -675,7 +623,7 @@ class Layout: dbc.Row( id="row-card-sel-tests", class_name="gy-1", - style=self.STYLE_DISABLED, + style=C.STYLE_DISABLED, children=[ dbc.Label( "Selected tests", @@ -692,7 +640,7 @@ class Layout: ), dbc.Row( id="row-btns-sel-tests", - style=self.STYLE_DISABLED, + style=C.STYLE_DISABLED, children=[ dbc.ButtonGroup( class_name="gy-2", @@ -722,12 +670,6 @@ class Layout: class ControlPanel: def __init__(self, panel: dict) -> None: - CL_ALL_DISABLED = [{ - "label": "All", - "value": "all", - "disabled": True - }] - # Defines also the order of keys self._defaults = { "dd-rls-value": str(), @@ -749,15 +691,15 @@ class Layout: "cl-core-options": list(), "cl-core-value": list(), "cl-core-all-value": list(), - "cl-core-all-options": CL_ALL_DISABLED, + "cl-core-all-options": C.CL_ALL_DISABLED, "cl-framesize-options": list(), "cl-framesize-value": list(), "cl-framesize-all-value": list(), - "cl-framesize-all-options": CL_ALL_DISABLED, + "cl-framesize-all-options": C.CL_ALL_DISABLED, "cl-testtype-options": list(), "cl-testtype-value": list(), "cl-testtype-all-value": list(), - "cl-testtype-all-options": CL_ALL_DISABLED, + "cl-testtype-all-options": C.CL_ALL_DISABLED, "btn-add-disabled": True, "cl-normalize-value": list(), "cl-selected-options": list() @@ -818,10 +760,10 @@ class Layout: (fig_tput, fig_lat) = figs - row_fig_tput = self.PLACEHOLDER - row_fig_lat = self.PLACEHOLDER - row_table = self.PLACEHOLDER - row_btn_dwnld = self.PLACEHOLDER + row_fig_tput = C.PLACEHOLDER + row_fig_lat = C.PLACEHOLDER + row_table = C.PLACEHOLDER + row_btn_dwnld = C.PLACEHOLDER if fig_tput: row_fig_tput = [ @@ -853,7 +795,7 @@ class Layout: class_name="me-1", children=[ dbc.InputGroupText( - style=self.URL_STYLE, + style=C.URL_STYLE, children=self._show_tooltip( "help-url", "URL", "input-url") ), @@ -861,7 +803,7 @@ class Layout: id="input-url", readonly=True, type="url", - style=self.URL_STYLE, + style=C.URL_STYLE, value=url ) ] @@ -1019,15 +961,15 @@ class Layout: "cl-core-options": list(), "cl-core-value": list(), "cl-core-all-value": list(), - "cl-core-all-options": self.CL_ALL_DISABLED, + "cl-core-all-options": C.CL_ALL_DISABLED, "cl-framesize-options": list(), "cl-framesize-value": list(), "cl-framesize-all-value": list(), - "cl-framesize-all-options": self.CL_ALL_DISABLED, + "cl-framesize-all-options": C.CL_ALL_DISABLED, "cl-testtype-options": list(), "cl-testtype-value": list(), "cl-testtype-all-value": list(), - "cl-testtype-all-options": self.CL_ALL_DISABLED + "cl-testtype-all-options": C.CL_ALL_DISABLED }) elif trigger_id == "dd-ctrl-dut": try: @@ -1058,15 +1000,15 @@ class Layout: "cl-core-options": list(), "cl-core-value": list(), "cl-core-all-value": list(), - "cl-core-all-options": self.CL_ALL_DISABLED, + "cl-core-all-options": C.CL_ALL_DISABLED, "cl-framesize-options": list(), "cl-framesize-value": list(), "cl-framesize-all-value": list(), - "cl-framesize-all-options": self.CL_ALL_DISABLED, + "cl-framesize-all-options": C.CL_ALL_DISABLED, "cl-testtype-options": list(), "cl-testtype-value": list(), "cl-testtype-all-value": list(), - "cl-testtype-all-options": self.CL_ALL_DISABLED + "cl-testtype-all-options": C.CL_ALL_DISABLED }) elif trigger_id == "dd-ctrl-dutver": try: @@ -1095,15 +1037,15 @@ class Layout: "cl-core-options": list(), "cl-core-value": list(), "cl-core-all-value": list(), - "cl-core-all-options": self.CL_ALL_DISABLED, + "cl-core-all-options": C.CL_ALL_DISABLED, "cl-framesize-options": list(), "cl-framesize-value": list(), "cl-framesize-all-value": list(), - "cl-framesize-all-options": self.CL_ALL_DISABLED, + "cl-framesize-all-options": C.CL_ALL_DISABLED, "cl-testtype-options": list(), "cl-testtype-value": list(), "cl-testtype-all-value": list(), - "cl-testtype-all-options": self.CL_ALL_DISABLED + "cl-testtype-all-options": C.CL_ALL_DISABLED }) elif trigger_id == "dd-ctrl-phy": try: @@ -1131,15 +1073,15 @@ class Layout: "cl-core-options": list(), "cl-core-value": list(), "cl-core-all-value": list(), - "cl-core-all-options": self.CL_ALL_DISABLED, + "cl-core-all-options": C.CL_ALL_DISABLED, "cl-framesize-options": list(), "cl-framesize-value": list(), "cl-framesize-all-value": list(), - "cl-framesize-all-options": self.CL_ALL_DISABLED, + "cl-framesize-all-options": C.CL_ALL_DISABLED, "cl-testtype-options": list(), "cl-testtype-value": list(), "cl-testtype-all-value": list(), - "cl-testtype-all-options": self.CL_ALL_DISABLED + "cl-testtype-all-options": C.CL_ALL_DISABLED }) elif trigger_id == "dd-ctrl-area": try: @@ -1164,15 +1106,15 @@ class Layout: "cl-core-options": list(), "cl-core-value": list(), "cl-core-all-value": list(), - "cl-core-all-options": self.CL_ALL_DISABLED, + "cl-core-all-options": C.CL_ALL_DISABLED, "cl-framesize-options": list(), "cl-framesize-value": list(), "cl-framesize-all-value": list(), - "cl-framesize-all-options": self.CL_ALL_DISABLED, + "cl-framesize-all-options": C.CL_ALL_DISABLED, "cl-testtype-options": list(), "cl-testtype-value": list(), "cl-testtype-all-value": list(), - "cl-testtype-all-options": self.CL_ALL_DISABLED + "cl-testtype-all-options": C.CL_ALL_DISABLED }) elif trigger_id == "dd-ctrl-test": rls = ctrl_panel.get("dd-rls-value") @@ -1188,17 +1130,17 @@ class Layout: for v in sorted(test["core"])], "cl-core-value": list(), "cl-core-all-value": list(), - "cl-core-all-options": self.CL_ALL_ENABLED, + "cl-core-all-options": C.CL_ALL_ENABLED, "cl-framesize-options": [{"label": v, "value": v} for v in sorted(test["frame-size"])], "cl-framesize-value": list(), "cl-framesize-all-value": list(), - "cl-framesize-all-options": self.CL_ALL_ENABLED, + "cl-framesize-all-options": C.CL_ALL_ENABLED, "cl-testtype-options": [{"label": v, "value": v} for v in sorted(test["test-type"])], "cl-testtype-value": list(), "cl-testtype-all-value": list(), - "cl-testtype-all-options": self.CL_ALL_ENABLED, + "cl-testtype-all-options": C.CL_ALL_ENABLED, }) elif trigger_id == "cl-ctrl-core": val_sel, val_all = self._sync_checklists( @@ -1305,21 +1247,21 @@ class Layout: "testtype": ttype.lower() }) store_sel = sorted(store_sel, key=lambda d: d["id"]) - row_card_sel_tests = self.STYLE_ENABLED - row_btns_sel_tests = self.STYLE_ENABLED - if self.CLEAR_ALL_INPUTS: + row_card_sel_tests = C.STYLE_ENABLED + row_btns_sel_tests = C.STYLE_ENABLED + if C.CLEAR_ALL_INPUTS: ctrl_panel.set(ctrl_panel.defaults) ctrl_panel.set({ "cl-selected-options": self._list_tests(store_sel) }) elif trigger_id == "btn-sel-remove-all": _ = btn_remove_all - row_fig_tput = self.PLACEHOLDER - row_fig_lat = self.PLACEHOLDER - row_table = self.PLACEHOLDER - row_btn_dwnld = self.PLACEHOLDER - row_card_sel_tests = self.STYLE_DISABLED - row_btns_sel_tests = self.STYLE_DISABLED + row_fig_tput = C.PLACEHOLDER + row_fig_lat = C.PLACEHOLDER + row_table = C.PLACEHOLDER + row_btn_dwnld = C.PLACEHOLDER + row_card_sel_tests = C.STYLE_DISABLED + row_btns_sel_tests = C.STYLE_DISABLED store_sel = list() ctrl_panel.set({"cl-selected-options": list()}) elif trigger_id == "btn-sel-remove": @@ -1337,8 +1279,8 @@ class Layout: store_sel = literal_eval( url_params.get("store_sel", list())[0]) if store_sel: - row_card_sel_tests = self.STYLE_ENABLED - row_btns_sel_tests = self.STYLE_ENABLED + row_card_sel_tests = C.STYLE_ENABLED + row_btns_sel_tests = C.STYLE_ENABLED if trigger_id in ("btn-ctrl-add", "url", "btn-sel-remove", "cl-ctrl-normalize"): @@ -1358,12 +1300,12 @@ class Layout: "cl-selected-options": self._list_tests(store_sel) }) else: - row_fig_tput = self.PLACEHOLDER - row_fig_lat = self.PLACEHOLDER - row_table = self.PLACEHOLDER - row_btn_dwnld = self.PLACEHOLDER - row_card_sel_tests = self.STYLE_DISABLED - row_btns_sel_tests = self.STYLE_DISABLED + row_fig_tput = C.PLACEHOLDER + row_fig_lat = C.PLACEHOLDER + row_table = C.PLACEHOLDER + row_btn_dwnld = C.PLACEHOLDER + row_card_sel_tests = C.STYLE_DISABLED + row_btns_sel_tests = C.STYLE_DISABLED store_sel = list() ctrl_panel.set({"cl-selected-options": list()}) diff --git a/resources/tools/dash/app/pal/report/report.py b/resources/tools/dash/app/pal/report/report.py index c02b409973..c6008ca595 100644 --- a/resources/tools/dash/app/pal/report/report.py +++ b/resources/tools/dash/app/pal/report/report.py @@ -14,8 +14,8 @@ """Instantiate the Report Dash applocation. """ import dash -import dash_bootstrap_components as dbc +from ..utils.constants import Constants as C from .layout import Layout @@ -30,17 +30,17 @@ def init_report(server, releases): dash_app = dash.Dash( server=server, - routes_pathname_prefix=u"/report/", - external_stylesheets=[dbc.themes.LUX], + routes_pathname_prefix=C.REPORT_ROUTES_PATHNAME_PREFIX, + external_stylesheets=C.EXTERNAL_STYLESHEETS ) layout = Layout( app=dash_app, releases=releases, - html_layout_file="pal/templates/report_layout.jinja2", - graph_layout_file="pal/report/layout.yaml", - data_spec_file="pal/data/data.yaml", - tooltip_file="pal/data/tooltips.yaml" + html_layout_file=C.REPORT_HTML_LAYOUT_FILE, + graph_layout_file=C.REPORT_GRAPH_LAYOUT_FILE, + data_spec_file=C.DATA_SPEC_FILE, + tooltip_file=C.TOOLTIP_FILE, ) dash_app.index_string = layout.html_layout dash_app.layout = layout.add_content() diff --git a/resources/tools/dash/app/pal/routes.py b/resources/tools/dash/app/pal/routes.py index d4cd88ffce..59af748168 100644 --- a/resources/tools/dash/app/pal/routes.py +++ b/resources/tools/dash/app/pal/routes.py @@ -17,14 +17,16 @@ from flask import current_app as app from flask import render_template +from .utils.constants import Constants as C -@app.route("/") + +@app.route(C.APPLICATIN_ROOT) def home(): """Landing page. """ return render_template( - "index_layout.jinja2", - title="FD.io CSIT", - description="Performance Dashboard", - template="d-flex h-100 text-center text-white bg-dark" + C.MAIN_HTML_LAYOUT_FILE, + title=C.TITLE, + description=C.DESCRIPTION, + template=C.TEMPLATE ) diff --git a/resources/tools/dash/app/pal/stats/layout.py b/resources/tools/dash/app/pal/stats/layout.py index 5c3758ba76..03707c0394 100644 --- a/resources/tools/dash/app/pal/stats/layout.py +++ b/resources/tools/dash/app/pal/stats/layout.py @@ -28,8 +28,9 @@ from yaml import load, FullLoader, YAMLError from datetime import datetime, timedelta from copy import deepcopy +from ..utils.constants import Constants as C +from ..utils.url_processing import url_decode, url_encode from ..data.data import Data -from ..data.url_processing import url_decode, url_encode from .graphs import graph_statistics, select_data @@ -37,14 +38,6 @@ class Layout: """ """ - DEFAULT_JOB = "csit-vpp-perf-mrr-daily-master-2n-icx" - - URL_STYLE = { - "background-color": "#d2ebf5", - "border-color": "#bce1f1", - "color": "#135d7c" - } - def __init__(self, app: Flask, html_layout_file: str, graph_layout_file: str, data_spec_file: str, tooltip_file: str, time_period: int=None) -> None: @@ -95,7 +88,7 @@ class Layout: job_info["tbed"].append("-".join(lst_job[-2:])) self.df_job_info = pd.DataFrame.from_dict(job_info) - self._default = self._set_job_params(self.DEFAULT_JOB) + self._default = self._set_job_params(C.STATS_DEFAULT_JOB) tst_info = { "job": list(), @@ -431,7 +424,7 @@ class Layout: class_name="me-1", children=[ dbc.InputGroupText( - style=self.URL_STYLE, + style=C.URL_STYLE, children=self._show_tooltip( "help-url", "URL", "input-url") ), @@ -439,7 +432,7 @@ class Layout: id="input-url", readonly=True, type="url", - style=self.URL_STYLE, + style=C.URL_STYLE, value="" ) ] diff --git a/resources/tools/dash/app/pal/stats/stats.py b/resources/tools/dash/app/pal/stats/stats.py index 3da742d61e..560ec53f14 100644 --- a/resources/tools/dash/app/pal/stats/stats.py +++ b/resources/tools/dash/app/pal/stats/stats.py @@ -14,8 +14,8 @@ """Instantiate the Statistics Dash applocation. """ import dash -import dash_bootstrap_components as dbc +from ..utils.constants import Constants as C from .layout import Layout @@ -30,16 +30,16 @@ def init_stats(server, time_period=None): dash_app = dash.Dash( server=server, - routes_pathname_prefix=u"/stats/", - external_stylesheets=[dbc.themes.LUX], + routes_pathname_prefix=C.STATS_ROUTES_PATHNAME_PREFIX, + external_stylesheets=C.EXTERNAL_STYLESHEETS ) layout = Layout( app=dash_app, - html_layout_file="pal/templates/stats_layout.jinja2", - graph_layout_file="pal/stats/layout.yaml", - data_spec_file="pal/data/data.yaml", - tooltip_file="pal/data/tooltips.yaml", + html_layout_file=C.STATS_HTML_LAYOUT_FILE, + graph_layout_file=C.STATS_GRAPH_LAYOUT_FILE, + data_spec_file=C.DATA_SPEC_FILE, + tooltip_file=C.TOOLTIP_FILE, time_period=time_period ) dash_app.index_string = layout.html_layout diff --git a/resources/tools/dash/app/pal/trending/graphs.py b/resources/tools/dash/app/pal/trending/graphs.py index a63bebb818..0b4968082f 100644 --- a/resources/tools/dash/app/pal/trending/graphs.py +++ b/resources/tools/dash/app/pal/trending/graphs.py @@ -22,96 +22,14 @@ import hdrh.codec from datetime import datetime -from ..data.utils import classify_anomalies - -_NORM_FREQUENCY = 2.0 # [GHz] -_FREQURENCY = { # [GHz] - "2n-aws": 1.000, - "2n-dnv": 2.000, - "2n-clx": 2.300, - "2n-icx": 2.600, - "2n-skx": 2.500, - "2n-tx2": 2.500, - "2n-zn2": 2.900, - "3n-alt": 3.000, - "3n-aws": 1.000, - "3n-dnv": 2.000, - "3n-icx": 2.600, - "3n-skx": 2.500, - "3n-tsh": 2.200 -} - -_ANOMALY_COLOR = { - "regression": 0.0, - "normal": 0.5, - "progression": 1.0 -} -_COLORSCALE_TPUT = [ - [0.00, "red"], - [0.33, "red"], - [0.33, "white"], - [0.66, "white"], - [0.66, "green"], - [1.00, "green"] -] -_TICK_TEXT_TPUT = ["Regression", "Normal", "Progression"] -_COLORSCALE_LAT = [ - [0.00, "green"], - [0.33, "green"], - [0.33, "white"], - [0.66, "white"], - [0.66, "red"], - [1.00, "red"] -] -_TICK_TEXT_LAT = ["Progression", "Normal", "Regression"] -_VALUE = { - "mrr": "result_receive_rate_rate_avg", - "ndr": "result_ndr_lower_rate_value", - "pdr": "result_pdr_lower_rate_value", - "pdr-lat": "result_latency_forward_pdr_50_avg" -} -_UNIT = { - "mrr": "result_receive_rate_rate_unit", - "ndr": "result_ndr_lower_rate_unit", - "pdr": "result_pdr_lower_rate_unit", - "pdr-lat": "result_latency_forward_pdr_50_unit" -} -_LAT_HDRH = ( # Do not change the order - "result_latency_forward_pdr_0_hdrh", - "result_latency_reverse_pdr_0_hdrh", - "result_latency_forward_pdr_10_hdrh", - "result_latency_reverse_pdr_10_hdrh", - "result_latency_forward_pdr_50_hdrh", - "result_latency_reverse_pdr_50_hdrh", - "result_latency_forward_pdr_90_hdrh", - "result_latency_reverse_pdr_90_hdrh", -) -# This value depends on latency stream rate (9001 pps) and duration (5s). -# Keep it slightly higher to ensure rounding errors to not remove tick mark. -PERCENTILE_MAX = 99.999501 - -_GRAPH_LAT_HDRH_DESC = { - "result_latency_forward_pdr_0_hdrh": "No-load.", - "result_latency_reverse_pdr_0_hdrh": "No-load.", - "result_latency_forward_pdr_10_hdrh": "Low-load, 10% PDR.", - "result_latency_reverse_pdr_10_hdrh": "Low-load, 10% PDR.", - "result_latency_forward_pdr_50_hdrh": "Mid-load, 50% PDR.", - "result_latency_reverse_pdr_50_hdrh": "Mid-load, 50% PDR.", - "result_latency_forward_pdr_90_hdrh": "High-load, 90% PDR.", - "result_latency_reverse_pdr_90_hdrh": "High-load, 90% PDR." -} +from ..utils.constants import Constants as C +from ..utils.utils import classify_anomalies def _get_color(idx: int) -> str: """ """ - _COLORS = ( - "#1A1110", "#DA2647", "#214FC6", "#01786F", "#BD8260", "#FFD12A", - "#A6E7FF", "#738276", "#C95A49", "#FC5A8D", "#CEC8EF", "#391285", - "#6F2DA8", "#FF878D", "#45A27D", "#FFD0B9", "#FD5240", "#DB91EF", - "#44D7A8", "#4F86F7", "#84DE02", "#FFCFF1", "#614051" - ) - return _COLORS[idx % len(_COLORS)] + return C.PLOT_COLORS[idx % len(C.PLOT_COLORS)] def _get_hdrh_latencies(row: pd.Series, name: str) -> dict: @@ -119,7 +37,7 @@ def _get_hdrh_latencies(row: pd.Series, name: str) -> dict: """ latencies = {"name": name} - for key in _LAT_HDRH: + for key in C.LAT_HDRH: try: latencies[key] = row[key] except KeyError: @@ -176,7 +94,7 @@ def _generate_trending_traces(ttype: str, name: str, df: pd.DataFrame, """ """ - df = df.dropna(subset=[_VALUE[ttype], ]) + df = df.dropna(subset=[C.VALUE[ttype], ]) if df.empty: return list() df = df.loc[((df["start_time"] >= start) & (df["start_time"] <= end))] @@ -185,9 +103,9 @@ def _generate_trending_traces(ttype: str, name: str, df: pd.DataFrame, x_axis = df["start_time"].tolist() if ttype == "pdr-lat": - y_data = [(itm / norm_factor) for itm in df[_VALUE[ttype]].tolist()] + y_data = [(itm / norm_factor) for itm in df[C.VALUE[ttype]].tolist()] else: - y_data = [(itm * norm_factor) for itm in df[_VALUE[ttype]].tolist()] + y_data = [(itm * norm_factor) for itm in df[C.VALUE[ttype]].tolist()] anomalies, trend_avg, trend_stdev = classify_anomalies( {k: v for k, v in zip(x_axis, y_data)} @@ -199,7 +117,7 @@ def _generate_trending_traces(ttype: str, name: str, df: pd.DataFrame, d_type = "trex" if row["dut_type"] == "none" else row["dut_type"] hover_itm = ( f"date: {row['start_time'].strftime('%Y-%m-%d %H:%M:%S')}
" - f" [{row[_UNIT[ttype]]}]: {y_data[idx]:,.0f}
" + f" [{row[C.UNIT[ttype]]}]: {y_data[idx]:,.0f}
" f"" f"{d_type}-ref: {row['dut_version']}
" f"csit-ref: {row['job']}/{row['build']}
" @@ -277,7 +195,7 @@ def _generate_trending_traces(ttype: str, name: str, df: pd.DataFrame, if anomaly in ("regression", "progression"): anomaly_x.append(x_axis[idx]) anomaly_y.append(trend_avg[idx]) - anomaly_color.append(_ANOMALY_COLOR[anomaly]) + anomaly_color.append(C.ANOMALY_COLOR[anomaly]) hover_itm = ( f"date: {x_axis[idx].strftime('%Y-%m-%d %H:%M:%S')}
" f"trend [pps]: {trend_avg[idx]:,.0f}
" @@ -301,8 +219,8 @@ def _generate_trending_traces(ttype: str, name: str, df: pd.DataFrame, "size": 15, "symbol": "circle-open", "color": anomaly_color, - "colorscale": _COLORSCALE_LAT \ - if ttype == "pdr-lat" else _COLORSCALE_TPUT, + "colorscale": C.COLORSCALE_LAT \ + if ttype == "pdr-lat" else C.COLORSCALE_TPUT, "showscale": True, "line": { "width": 2 @@ -314,8 +232,8 @@ def _generate_trending_traces(ttype: str, name: str, df: pd.DataFrame, "titleside": "right", "tickmode": "array", "tickvals": [0.167, 0.500, 0.833], - "ticktext": _TICK_TEXT_LAT \ - if ttype == "pdr-lat" else _TICK_TEXT_TPUT, + "ticktext": C.TICK_TEXT_LAT \ + if ttype == "pdr-lat" else C.TICK_TEXT_TPUT, "ticks": "", "ticklen": 0, "tickangle": -90, @@ -349,7 +267,7 @@ def graph_trending(data: pd.DataFrame, sel:dict, layout: dict, if normalize: phy = itm["phy"].split("-") topo_arch = f"{phy[0]}-{phy[1]}" if len(phy) == 4 else str() - norm_factor = (_NORM_FREQUENCY / _FREQURENCY[topo_arch]) \ + norm_factor = (C.NORM_FREQUENCY / C.FREQUENCY[topo_arch]) \ if topo_arch else 1.0 else: norm_factor = 1.0 @@ -400,11 +318,11 @@ def graph_hdrh_latency(data: dict, layout: dict) -> go.Figure: # For 100%, we cut that down to "x_perc" to avoid # infinity. percentile = item.percentile_level_iterated_to - x_perc = min(percentile, PERCENTILE_MAX) + x_perc = min(percentile, C.PERCENTILE_MAX) xaxis.append(previous_x) yaxis.append(item.value_iterated_to) hovertext.append( - f"{_GRAPH_LAT_HDRH_DESC[lat_name]}
" + f"{C.GRAPH_LAT_HDRH_DESC[lat_name]}
" f"Direction: {('W-E', 'E-W')[idx % 2]}
" f"Percentile: {prev_perc:.5f}-{percentile:.5f}%
" f"Latency: {item.value_iterated_to}uSec" @@ -413,7 +331,7 @@ def graph_hdrh_latency(data: dict, layout: dict) -> go.Figure: xaxis.append(next_x) yaxis.append(item.value_iterated_to) hovertext.append( - f"{_GRAPH_LAT_HDRH_DESC[lat_name]}
" + f"{C.GRAPH_LAT_HDRH_DESC[lat_name]}
" f"Direction: {('W-E', 'E-W')[idx % 2]}
" f"Percentile: {prev_perc:.5f}-{percentile:.5f}%
" f"Latency: {item.value_iterated_to}uSec" @@ -425,9 +343,9 @@ def graph_hdrh_latency(data: dict, layout: dict) -> go.Figure: go.Scatter( x=xaxis, y=yaxis, - name=_GRAPH_LAT_HDRH_DESC[lat_name], + name=C.GRAPH_LAT_HDRH_DESC[lat_name], mode="lines", - legendgroup=_GRAPH_LAT_HDRH_DESC[lat_name], + legendgroup=C.GRAPH_LAT_HDRH_DESC[lat_name], showlegend=bool(idx % 2), line=dict( color=_get_color(int(idx/2)), diff --git a/resources/tools/dash/app/pal/trending/layout.py b/resources/tools/dash/app/pal/trending/layout.py index d632820b99..48a8e5481e 100644 --- a/resources/tools/dash/app/pal/trending/layout.py +++ b/resources/tools/dash/app/pal/trending/layout.py @@ -30,8 +30,9 @@ from copy import deepcopy from json import loads, JSONDecodeError from ast import literal_eval +from ..utils.constants import Constants as C +from ..utils.url_processing import url_decode, url_encode from ..data.data import Data -from ..data.url_processing import url_decode, url_encode from .graphs import graph_trending, graph_hdrh_latency, \ select_trending_data @@ -40,51 +41,6 @@ class Layout: """ """ - # If True, clear all inputs in control panel when button "ADD SELECTED" is - # pressed. - CLEAR_ALL_INPUTS = False - - STYLE_DISABLED = {"display": "none"} - STYLE_ENABLED = {"display": "inherit"} - - CL_ALL_DISABLED = [{ - "label": "All", - "value": "all", - "disabled": True - }] - CL_ALL_ENABLED = [{ - "label": "All", - "value": "all", - "disabled": False - }] - - PLACEHOLDER = html.Nobr("") - - DRIVERS = ("avf", "af-xdp", "rdma", "dpdk") - - LABELS = { - "dpdk": "DPDK", - "container_memif": "LXC/DRC Container Memif", - "crypto": "IPSec IPv4 Routing", - "ip4": "IPv4 Routing", - "ip6": "IPv6 Routing", - "ip4_tunnels": "IPv4 Tunnels", - "l2": "L2 Ethernet Switching", - "srv6": "SRv6 Routing", - "vm_vhost": "VMs vhost-user", - "nfv_density-dcr_memif-chain_ipsec": "CNF Service Chains Routing IPSec", - "nfv_density-vm_vhost-chain_dot1qip4vxlan":"VNF Service Chains Tunnels", - "nfv_density-vm_vhost-chain": "VNF Service Chains Routing", - "nfv_density-dcr_memif-pipeline": "CNF Service Pipelines Routing", - "nfv_density-dcr_memif-chain": "CNF Service Chains Routing", - } - - URL_STYLE = { - "background-color": "#d2ebf5", - "border-color": "#bce1f1", - "color": "#135d7c" - } - def __init__(self, app: Flask, html_layout_file: str, graph_layout_file: str, data_spec_file: str, tooltip_file: str, time_period: str=None) -> None: @@ -135,7 +91,7 @@ class Layout: replace("2n-", "") test = lst_test[-1] nic = suite.split("-")[0] - for drv in self.DRIVERS: + for drv in C.DRIVERS: if drv in test: if drv == "af-xdp": driver = "af_xdp" @@ -242,7 +198,7 @@ class Layout: return self._time_period def label(self, key: str) -> str: - return self.LABELS.get(key, key) + return C.LABELS.get(key, key) def _show_tooltip(self, id: str, title: str, clipboard_id: str=None) -> list: @@ -364,21 +320,21 @@ class Layout: id="row-graph-tput", class_name="g-0 p-2", children=[ - self.PLACEHOLDER + C.PLACEHOLDER ] ), dbc.Row( # Latency id="row-graph-lat", class_name="g-0 p-2", children=[ - self.PLACEHOLDER + C.PLACEHOLDER ] ), dbc.Row( # Download id="row-btn-download", class_name="g-0 p-2", children=[ - self.PLACEHOLDER + C.PLACEHOLDER ] ) ] @@ -497,7 +453,7 @@ class Layout: children=[ dbc.Checklist( id="cl-ctrl-framesize-all", - options=self.CL_ALL_DISABLED, + options=C.CL_ALL_DISABLED, inline=True, switch=False ), @@ -528,7 +484,7 @@ class Layout: children=[ dbc.Checklist( id="cl-ctrl-core-all", - options=self.CL_ALL_DISABLED, + options=C.CL_ALL_DISABLED, inline=False, switch=False ) @@ -559,7 +515,7 @@ class Layout: children=[ dbc.Checklist( id="cl-ctrl-testtype-all", - options=self.CL_ALL_DISABLED, + options=C.CL_ALL_DISABLED, inline=True, switch=False ), @@ -648,7 +604,7 @@ class Layout: dbc.Row( id="row-card-sel-tests", class_name="gy-1", - style=self.STYLE_DISABLED, + style=C.STYLE_DISABLED, children=[ dbc.Label( "Selected tests", @@ -665,7 +621,7 @@ class Layout: ), dbc.Row( id="row-btns-sel-tests", - style=self.STYLE_DISABLED, + style=C.STYLE_DISABLED, children=[ dbc.ButtonGroup( class_name="gy-2", @@ -695,12 +651,6 @@ class Layout: class ControlPanel: def __init__(self, panel: dict) -> None: - CL_ALL_DISABLED = [{ - "label": "All", - "value": "all", - "disabled": True - }] - # Defines also the order of keys self._defaults = { "dd-ctrl-dut-value": str(), @@ -716,15 +666,15 @@ class Layout: "cl-ctrl-core-options": list(), "cl-ctrl-core-value": list(), "cl-ctrl-core-all-value": list(), - "cl-ctrl-core-all-options": CL_ALL_DISABLED, + "cl-ctrl-core-all-options": C.CL_ALL_DISABLED, "cl-ctrl-framesize-options": list(), "cl-ctrl-framesize-value": list(), "cl-ctrl-framesize-all-value": list(), - "cl-ctrl-framesize-all-options": CL_ALL_DISABLED, + "cl-ctrl-framesize-all-options": C.CL_ALL_DISABLED, "cl-ctrl-testtype-options": list(), "cl-ctrl-testtype-value": list(), "cl-ctrl-testtype-all-value": list(), - "cl-ctrl-testtype-all-options": CL_ALL_DISABLED, + "cl-ctrl-testtype-all-options": C.CL_ALL_DISABLED, "btn-ctrl-add-disabled": True, "cl-normalize-value": list(), "cl-selected-options": list(), @@ -788,9 +738,9 @@ class Layout: (fig_tput, fig_lat) = figs - row_fig_tput = self.PLACEHOLDER - row_fig_lat = self.PLACEHOLDER - row_btn_dwnld = self.PLACEHOLDER + row_fig_tput = C.PLACEHOLDER + row_fig_lat = C.PLACEHOLDER + row_btn_dwnld = C.PLACEHOLDER if fig_tput: row_fig_tput = [ @@ -822,7 +772,7 @@ class Layout: class_name="me-1", children=[ dbc.InputGroupText( - style=self.URL_STYLE, + style=C.URL_STYLE, children=self._show_tooltip( "help-url", "URL", "input-url") ), @@ -830,7 +780,7 @@ class Layout: id="input-url", readonly=True, type="url", - style=self.URL_STYLE, + style=C.URL_STYLE, value=url ) ] @@ -971,15 +921,15 @@ class Layout: "cl-ctrl-core-options": list(), "cl-ctrl-core-value": list(), "cl-ctrl-core-all-value": list(), - "cl-ctrl-core-all-options": self.CL_ALL_DISABLED, + "cl-ctrl-core-all-options": C.CL_ALL_DISABLED, "cl-ctrl-framesize-options": list(), "cl-ctrl-framesize-value": list(), "cl-ctrl-framesize-all-value": list(), - "cl-ctrl-framesize-all-options": self.CL_ALL_DISABLED, + "cl-ctrl-framesize-all-options": C.CL_ALL_DISABLED, "cl-ctrl-testtype-options": list(), "cl-ctrl-testtype-value": list(), "cl-ctrl-testtype-all-value": list(), - "cl-ctrl-testtype-all-options": self.CL_ALL_DISABLED, + "cl-ctrl-testtype-all-options": C.CL_ALL_DISABLED, }) elif trigger_id == "dd-ctrl-phy": try: @@ -1005,15 +955,15 @@ class Layout: "cl-ctrl-core-options": list(), "cl-ctrl-core-value": list(), "cl-ctrl-core-all-value": list(), - "cl-ctrl-core-all-options": self.CL_ALL_DISABLED, + "cl-ctrl-core-all-options": C.CL_ALL_DISABLED, "cl-ctrl-framesize-options": list(), "cl-ctrl-framesize-value": list(), "cl-ctrl-framesize-all-value": list(), - "cl-ctrl-framesize-all-options": self.CL_ALL_DISABLED, + "cl-ctrl-framesize-all-options": C.CL_ALL_DISABLED, "cl-ctrl-testtype-options": list(), "cl-ctrl-testtype-value": list(), "cl-ctrl-testtype-all-value": list(), - "cl-ctrl-testtype-all-options": self.CL_ALL_DISABLED, + "cl-ctrl-testtype-all-options": C.CL_ALL_DISABLED, }) elif trigger_id == "dd-ctrl-area": try: @@ -1036,15 +986,15 @@ class Layout: "cl-ctrl-core-options": list(), "cl-ctrl-core-value": list(), "cl-ctrl-core-all-value": list(), - "cl-ctrl-core-all-options": self.CL_ALL_DISABLED, + "cl-ctrl-core-all-options": C.CL_ALL_DISABLED, "cl-ctrl-framesize-options": list(), "cl-ctrl-framesize-value": list(), "cl-ctrl-framesize-all-value": list(), - "cl-ctrl-framesize-all-options": self.CL_ALL_DISABLED, + "cl-ctrl-framesize-all-options": C.CL_ALL_DISABLED, "cl-ctrl-testtype-options": list(), "cl-ctrl-testtype-value": list(), "cl-ctrl-testtype-all-value": list(), - "cl-ctrl-testtype-all-options": self.CL_ALL_DISABLED, + "cl-ctrl-testtype-all-options": C.CL_ALL_DISABLED, }) elif trigger_id == "dd-ctrl-test": core_opts = list() @@ -1069,15 +1019,15 @@ class Layout: "cl-ctrl-core-options": core_opts, "cl-ctrl-core-value": list(), "cl-ctrl-core-all-value": list(), - "cl-ctrl-core-all-options": self.CL_ALL_ENABLED, + "cl-ctrl-core-all-options": C.CL_ALL_ENABLED, "cl-ctrl-framesize-options": framesize_opts, "cl-ctrl-framesize-value": list(), "cl-ctrl-framesize-all-value": list(), - "cl-ctrl-framesize-all-options": self.CL_ALL_ENABLED, + "cl-ctrl-framesize-all-options": C.CL_ALL_ENABLED, "cl-ctrl-testtype-options": testtype_opts, "cl-ctrl-testtype-value": list(), "cl-ctrl-testtype-all-value": list(), - "cl-ctrl-testtype-all-options": self.CL_ALL_ENABLED, + "cl-ctrl-testtype-all-options": C.CL_ALL_ENABLED, }) elif trigger_id == "cl-ctrl-core": val_sel, val_all = self._sync_checklists( @@ -1180,17 +1130,17 @@ class Layout: "testtype": ttype.lower() }) store_sel = sorted(store_sel, key=lambda d: d["id"]) - row_card_sel_tests = self.STYLE_ENABLED - row_btns_sel_tests = self.STYLE_ENABLED - if self.CLEAR_ALL_INPUTS: + row_card_sel_tests = C.STYLE_ENABLED + row_btns_sel_tests = C.STYLE_ENABLED + if C.CLEAR_ALL_INPUTS: ctrl_panel.set(ctrl_panel.defaults) elif trigger_id == "btn-sel-remove-all": _ = btn_remove_all - row_fig_tput = self.PLACEHOLDER - row_fig_lat = self.PLACEHOLDER - row_btn_dwnld = self.PLACEHOLDER - row_card_sel_tests = self.STYLE_DISABLED - row_btns_sel_tests = self.STYLE_DISABLED + row_fig_tput = C.PLACEHOLDER + row_fig_lat = C.PLACEHOLDER + row_btn_dwnld = C.PLACEHOLDER + row_card_sel_tests = C.STYLE_DISABLED + row_btns_sel_tests = C.STYLE_DISABLED store_sel = list() ctrl_panel.set({"cl-selected-options": list()}) elif trigger_id == "btn-sel-remove": @@ -1210,8 +1160,8 @@ class Layout: d_start = self._get_date(url_params.get("start", list())[0]) d_end = self._get_date(url_params.get("end", list())[0]) if store_sel: - row_card_sel_tests = self.STYLE_ENABLED - row_btns_sel_tests = self.STYLE_ENABLED + row_card_sel_tests = C.STYLE_ENABLED + row_btns_sel_tests = C.STYLE_ENABLED if trigger_id in ("btn-ctrl-add", "url", "dpr-period", "btn-sel-remove", "cl-ctrl-normalize"): @@ -1226,11 +1176,11 @@ class Layout: "cl-selected-options": self._list_tests(store_sel) }) else: - row_fig_tput = self.PLACEHOLDER - row_fig_lat = self.PLACEHOLDER - row_btn_dwnld = self.PLACEHOLDER - row_card_sel_tests = self.STYLE_DISABLED - row_btns_sel_tests = self.STYLE_DISABLED + row_fig_tput = C.PLACEHOLDER + row_fig_lat = C.PLACEHOLDER + row_btn_dwnld = C.PLACEHOLDER + row_card_sel_tests = C.STYLE_DISABLED + row_btns_sel_tests = C.STYLE_DISABLED store_sel = list() ctrl_panel.set({"cl-selected-options": list()}) diff --git a/resources/tools/dash/app/pal/trending/trending.py b/resources/tools/dash/app/pal/trending/trending.py index 1c64677eea..3697f7150f 100644 --- a/resources/tools/dash/app/pal/trending/trending.py +++ b/resources/tools/dash/app/pal/trending/trending.py @@ -14,8 +14,8 @@ """Instantiate the Trending Dash applocation. """ import dash -import dash_bootstrap_components as dbc +from ..utils.constants import Constants as C from .layout import Layout @@ -30,16 +30,16 @@ def init_trending(server, time_period=None): dash_app = dash.Dash( server=server, - routes_pathname_prefix=u"/trending/", - external_stylesheets=[dbc.themes.LUX], + routes_pathname_prefix=C.TREND_ROUTES_PATHNAME_PREFIX, + external_stylesheets=C.EXTERNAL_STYLESHEETS ) layout = Layout( app=dash_app, - html_layout_file="pal/templates/trending_layout.jinja2", - graph_layout_file="pal/trending/layout.yaml", - data_spec_file="pal/data/data.yaml", - tooltip_file="pal/data/tooltips.yaml", + html_layout_file=C.TREND_HTML_LAYOUT_FILE, + graph_layout_file=C.TREND_GRAPH_LAYOUT_FILE, + data_spec_file=C.DATA_SPEC_FILE, + tooltip_file=C.TOOLTIP_FILE, time_period=time_period ) dash_app.index_string = layout.html_layout diff --git a/resources/tools/dash/app/pal/utils/__init__.py b/resources/tools/dash/app/pal/utils/__init__.py new file mode 100644 index 0000000000..5692432123 --- /dev/null +++ b/resources/tools/dash/app/pal/utils/__init__.py @@ -0,0 +1,12 @@ +# Copyright (c) 2022 Cisco and/or its affiliates. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at: +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/resources/tools/dash/app/pal/utils/constants.py b/resources/tools/dash/app/pal/utils/constants.py new file mode 100644 index 0000000000..1c84ba14a6 --- /dev/null +++ b/resources/tools/dash/app/pal/utils/constants.py @@ -0,0 +1,299 @@ +# Copyright (c) 2022 Cisco and/or its affiliates. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at: +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Constants used in Dash PAL. + +"Constant" means a value that keeps its value since initialization. The value +does not need to be hard coded here, but can be read from environment variables. +""" + + +import dash_bootstrap_components as dbc + +from dash import html + + +class Constants: + """Constants used in Dash PAL. + """ + + ############################################################################ + # General, application wide constants. + + # The application title. + TITLE = "FD.io CSIT" + + # The application description. + DESCRIPTION = "Performance Dashboard" + + # External stylesheets. + EXTERNAL_STYLESHEETS = [dbc.themes.LUX, ] + + # Top level template for all pages. + TEMPLATE = "d-flex h-100 text-center text-white bg-dark" + + # Path and name of the file specifying the HTML layout of the dash + # application. + MAIN_HTML_LAYOUT_FILE = "index_layout.jinja2" + + # Application root. + APPLICATIN_ROOT = "/" + + # Data to be downloaded from the parquets specification file. + DATA_SPEC_FILE = "pal/data/data.yaml" + + # The file with tooltips. + TOOLTIP_FILE = "pal/utils/tooltips.yaml" + + # Maximal value of TIME_PERIOD for data read from the parquets in days. + # Do not change without a good reason. + MAX_TIME_PERIOD = 180 + + # It defines the time period for data read from the parquets in days from + # now back to the past. + # TIME_PERIOD = None - means all data (max MAX_TIME_PERIOD days) is read. + # TIME_PERIOD = MAX_TIME_PERIOD - is the default value + TIME_PERIOD = MAX_TIME_PERIOD # [days] + + # List of releases used for iterative data processing. + # The releases MUST be in the order from the current (newest) to the last + # (oldest). + RELEASES = ["csit2206", "csit2202", ] + + ############################################################################ + # General, application wide, layout affecting constants. + + # If True, clear all inputs in control panel when button "ADD SELECTED" is + # pressed. + CLEAR_ALL_INPUTS = False + + # The element is disabled. + STYLE_DISABLED = {"display": "none"} + + # The element is enabled and visible. + STYLE_ENABLED = {"display": "inherit"} + + # Checklist "All" is disabled. + CL_ALL_DISABLED = [ + { + "label": "All", + "value": "all", + "disabled": True + } + ] + + # Checklist "All" is enable, visible and unchecked. + CL_ALL_ENABLED = [ + { + "label": "All", + "value": "all", + "disabled": False + } + ] + + # Placeholder for any element in the layout. + PLACEHOLDER = html.Nobr("") + + # List of drivers used in CSIT. + DRIVERS = ("avf", "af-xdp", "rdma", "dpdk") + + # Labels for input elements (dropdowns, ...). + LABELS = { + "dpdk": "DPDK", + "container_memif": "LXC/DRC Container Memif", + "crypto": "IPSec IPv4 Routing", + "ip4": "IPv4 Routing", + "ip6": "IPv6 Routing", + "ip4_tunnels": "IPv4 Tunnels", + "l2": "L2 Ethernet Switching", + "srv6": "SRv6 Routing", + "vm_vhost": "VMs vhost-user", + "nfv_density-dcr_memif-chain_ipsec": "CNF Service Chains Routing IPSec", + "nfv_density-vm_vhost-chain_dot1qip4vxlan":"VNF Service Chains Tunnels", + "nfv_density-vm_vhost-chain": "VNF Service Chains Routing", + "nfv_density-dcr_memif-pipeline": "CNF Service Pipelines Routing", + "nfv_density-dcr_memif-chain": "CNF Service Chains Routing", + } + + # URL style. + URL_STYLE = { + "background-color": "#d2ebf5", + "border-color": "#bce1f1", + "color": "#135d7c" + } + + ############################################################################ + # General, normalization constants. + + NORM_FREQUENCY = 2.0 # [GHz] + FREQUENCY = { # [GHz] + "2n-aws": 1.000, + "2n-dnv": 2.000, + "2n-clx": 2.300, + "2n-icx": 2.600, + "2n-skx": 2.500, + "2n-tx2": 2.500, + "2n-zn2": 2.900, + "3n-alt": 3.000, + "3n-aws": 1.000, + "3n-dnv": 2.000, + "3n-icx": 2.600, + "3n-skx": 2.500, + "3n-tsh": 2.200 + } + + ############################################################################ + # General, plots constants. + + PLOT_COLORS = ( + "#1A1110", "#DA2647", "#214FC6", "#01786F", "#BD8260", "#FFD12A", + "#A6E7FF", "#738276", "#C95A49", "#FC5A8D", "#CEC8EF", "#391285", + "#6F2DA8", "#FF878D", "#45A27D", "#FFD0B9", "#FD5240", "#DB91EF", + "#44D7A8", "#4F86F7", "#84DE02", "#FFCFF1", "#614051" + ) + + # Trending, anomalies. + ANOMALY_COLOR = { + "regression": 0.0, + "normal": 0.5, + "progression": 1.0 + } + + COLORSCALE_TPUT = [ + [0.00, "red"], + [0.33, "red"], + [0.33, "white"], + [0.66, "white"], + [0.66, "green"], + [1.00, "green"] + ] + + TICK_TEXT_TPUT = ["Regression", "Normal", "Progression"] + + COLORSCALE_LAT = [ + [0.00, "green"], + [0.33, "green"], + [0.33, "white"], + [0.66, "white"], + [0.66, "red"], + [1.00, "red"] + ] + + TICK_TEXT_LAT = ["Progression", "Normal", "Regression"] + + # Access to the results. + VALUE = { + "mrr": "result_receive_rate_rate_avg", + "ndr": "result_ndr_lower_rate_value", + "pdr": "result_pdr_lower_rate_value", + "pdr-lat": "result_latency_forward_pdr_50_avg" + } + + VALUE_ITER = { + "mrr": "result_receive_rate_rate_values", + "ndr": "result_ndr_lower_rate_value", + "pdr": "result_pdr_lower_rate_value", + "pdr-lat": "result_latency_forward_pdr_50_avg" + } + + UNIT = { + "mrr": "result_receive_rate_rate_unit", + "ndr": "result_ndr_lower_rate_unit", + "pdr": "result_pdr_lower_rate_unit", + "pdr-lat": "result_latency_forward_pdr_50_unit" + } + + # Latencies. + LAT_HDRH = ( # Do not change the order + "result_latency_forward_pdr_0_hdrh", + "result_latency_reverse_pdr_0_hdrh", + "result_latency_forward_pdr_10_hdrh", + "result_latency_reverse_pdr_10_hdrh", + "result_latency_forward_pdr_50_hdrh", + "result_latency_reverse_pdr_50_hdrh", + "result_latency_forward_pdr_90_hdrh", + "result_latency_reverse_pdr_90_hdrh", + ) + + # This value depends on latency stream rate (9001 pps) and duration (5s). + # Keep it slightly higher to ensure rounding errors to not remove tick mark. + PERCENTILE_MAX = 99.999501 + + GRAPH_LAT_HDRH_DESC = { + "result_latency_forward_pdr_0_hdrh": "No-load.", + "result_latency_reverse_pdr_0_hdrh": "No-load.", + "result_latency_forward_pdr_10_hdrh": "Low-load, 10% PDR.", + "result_latency_reverse_pdr_10_hdrh": "Low-load, 10% PDR.", + "result_latency_forward_pdr_50_hdrh": "Mid-load, 50% PDR.", + "result_latency_reverse_pdr_50_hdrh": "Mid-load, 50% PDR.", + "result_latency_forward_pdr_90_hdrh": "High-load, 90% PDR.", + "result_latency_reverse_pdr_90_hdrh": "High-load, 90% PDR." + } + + ############################################################################ + # News. + + # The pathname prefix for the application. + NEWS_ROUTES_PATHNAME_PREFIX = "/news/" + + # Path and name of the file specifying the HTML layout of the dash + # application. + NEWS_HTML_LAYOUT_FILE = "pal/templates/news_layout.jinja2" + + # The default job displayed when the page is loaded first time. + NEWS_DEFAULT_JOB = "csit-vpp-perf-mrr-daily-master-2n-icx" + + # Time period for regressions and progressions. + NEWS_TIME_PERIOD = 21 # [days] + + ############################################################################ + # Report. + + # The pathname prefix for the application. + REPORT_ROUTES_PATHNAME_PREFIX = "/report/" + + # Path and name of the file specifying the HTML layout of the dash + # application. + REPORT_HTML_LAYOUT_FILE = "pal/templates/report_layout.jinja2" + + # Layout of plot.ly graphs. + REPORT_GRAPH_LAYOUT_FILE = "pal/report/layout.yaml" + + ############################################################################ + # Statistics. + + # The pathname prefix for the application. + STATS_ROUTES_PATHNAME_PREFIX = "/stats/" + + # Path and name of the file specifying the HTML layout of the dash + # application. + STATS_HTML_LAYOUT_FILE = "pal/templates/stats_layout.jinja2" + + # Layout of plot.ly graphs. + STATS_GRAPH_LAYOUT_FILE = "pal/stats/layout.yaml" + + # The default job displayed when the page is loaded first time. + STATS_DEFAULT_JOB = "csit-vpp-perf-mrr-daily-master-2n-icx" + + ############################################################################ + # Trending. + + # The pathname prefix for the application. + TREND_ROUTES_PATHNAME_PREFIX = "/trending/" + + # Path and name of the file specifying the HTML layout of the dash + # application. + TREND_HTML_LAYOUT_FILE = "pal/templates/trending_layout.jinja2" + + # Layout of plot.ly graphs. + TREND_GRAPH_LAYOUT_FILE = "pal/trending/layout.yaml" diff --git a/resources/tools/dash/app/pal/utils/tooltips.yaml b/resources/tools/dash/app/pal/utils/tooltips.yaml new file mode 100644 index 0000000000..2086b575a9 --- /dev/null +++ b/resources/tools/dash/app/pal/utils/tooltips.yaml @@ -0,0 +1,40 @@ +help-area: + The area defines a VPP packet path and lookup type. +help-cadence: + The cadence of the Jenkins job which runs the tests. +help-cores: + Number of cores the DUT uses during the test. +help-download: + Download the selected data as a csv file. +help-dut: + Device Under Test (DUT) - In software networking, “device” denotes a specific + piece of software tasked with packet processing. Such device is surrounded + with other software components (such as operating system kernel). +help-dut-ver: + The version of the Device under Test. +help-framesize: + Frame size - size of an Ethernet Layer-2 frame on the wire, including any VLAN + tags (dot1q, dot1ad) and Ethernet FCS, but excluding Ethernet preamble and + inter-frame gap. Measured in Bytes. +help-infra: + Infrastructure is defined by the toplology (number of nodes), processor + architecture, NIC and driver. +help-normalize: + Normalize the results to CPU frequency 2GHz. The results from AWS environment + are not normalized as we do not know the exact value of CPU frequency. +help-release: + The CSIT release. +help-tbed: + The test bed is defined by toplology (number of nodes) and processor + architecture. +help-test: + The test specification consists of packet encapsulation, VPP packet processing + (packet forwarding mode and packet processing function(s)) and packet + forwarding path. +help-time-period: + Choose a time period for selected tests. +help-ttype: + Main measured variable. +help-url: + URL with current configuration. If there is no "Copy URL" button, use triple + click. diff --git a/resources/tools/dash/app/pal/utils/url_processing.py b/resources/tools/dash/app/pal/utils/url_processing.py new file mode 100644 index 0000000000..9307015d0d --- /dev/null +++ b/resources/tools/dash/app/pal/utils/url_processing.py @@ -0,0 +1,99 @@ +# Copyright (c) 2022 Cisco and/or its affiliates. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at: +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""URL decoding and parsing and URL encoding. +""" + +import logging + +from base64 import urlsafe_b64encode, urlsafe_b64decode +from urllib.parse import urlencode, urlunparse, urlparse, parse_qs +from zlib import compress, decompress +from zlib import error as ZlibErr +from binascii import Error as BinasciiErr + + +def url_encode(params: dict) -> str: + """Encode the URL parameters and zip them and create the whole URL using + given data. + + :param params: All data necessary to create the URL: + - scheme, + - network location, + - path, + - query, + - parameters. + :type params: dict + :returns: Encoded URL. + :rtype: str + """ + + url_params = params.get("params", None) + if url_params: + encoded_params = urlsafe_b64encode( + compress(urlencode(url_params).encode("utf-8"), level=9) + ).rstrip(b"=").decode("utf-8") + else: + encoded_params = str() + + return urlunparse(( + params.get("scheme", "http"), + params.get("netloc", str()), + params.get("path", str()), + str(), # params + params.get("query", str()), + encoded_params + )) + + +def url_decode(url: str) -> dict: + """Parse the given URL and decode the parameters. + + :param url: URL to be parsed and decoded. + :type url: str + :returns: Paresed URL. + :rtype: dict + """ + + try: + parsed_url = urlparse(url) + except ValueError as err: + logging.warning(f"\nThe url {url} is not valid, ignoring.\n{repr(err)}") + return None + + if parsed_url.fragment: + try: + padding = b"=" * (4 - (len(parsed_url.fragment) % 4)) + params = parse_qs(decompress( + urlsafe_b64decode( + (parsed_url.fragment.encode("utf-8") + padding) + )).decode("utf-8") + ) + except (BinasciiErr, UnicodeDecodeError, ZlibErr) as err: + logging.warning( + f"\nNot possible to decode the parameters from url: {url}" + f"\nEncoded parameters: '{parsed_url.fragment}'" + f"\n{repr(err)}" + ) + return None + else: + params = None + + return { + "scheme": parsed_url.scheme, + "netloc": parsed_url.netloc, + "path": parsed_url.path, + "query": parsed_url.query, + "fragment": parsed_url.fragment, + "params": params + } diff --git a/resources/tools/dash/app/pal/utils/utils.py b/resources/tools/dash/app/pal/utils/utils.py new file mode 100644 index 0000000000..63c9c1aaa4 --- /dev/null +++ b/resources/tools/dash/app/pal/utils/utils.py @@ -0,0 +1,69 @@ +# Copyright (c) 2022 Cisco and/or its affiliates. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at: +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +""" + +from numpy import isnan + +from ..jumpavg import classify + + +def classify_anomalies(data): + """Process the data and return anomalies and trending values. + + Gather data into groups with average as trend value. + Decorate values within groups to be normal, + the first value of changed average as a regression, or a progression. + + :param data: Full data set with unavailable samples replaced by nan. + :type data: OrderedDict + :returns: Classification and trend values + :rtype: 3-tuple, list of strings, list of floats and list of floats + """ + # NaN means something went wrong. + # Use 0.0 to cause that being reported as a severe regression. + bare_data = [0.0 if isnan(sample) else sample for sample in data.values()] + # TODO: Make BitCountingGroupList a subclass of list again? + group_list = classify(bare_data).group_list + group_list.reverse() # Just to use .pop() for FIFO. + classification = list() + avgs = list() + stdevs = list() + active_group = None + values_left = 0 + avg = 0.0 + stdv = 0.0 + for sample in data.values(): + if isnan(sample): + classification.append("outlier") + avgs.append(sample) + stdevs.append(sample) + continue + if values_left < 1 or active_group is None: + values_left = 0 + while values_left < 1: # Ignore empty groups (should not happen). + active_group = group_list.pop() + values_left = len(active_group.run_list) + avg = active_group.stats.avg + stdv = active_group.stats.stdev + classification.append(active_group.comment) + avgs.append(avg) + stdevs.append(stdv) + values_left -= 1 + continue + classification.append("normal") + avgs.append(avg) + stdevs.append(stdv) + values_left -= 1 + return classification, avgs, stdevs -- cgit 1.2.3-korg