aboutsummaryrefslogtreecommitdiffstats
path: root/resources/tools/dash/app/pal/utils
diff options
context:
space:
mode:
Diffstat (limited to 'resources/tools/dash/app/pal/utils')
-rw-r--r--resources/tools/dash/app/pal/utils/__init__.py12
-rw-r--r--resources/tools/dash/app/pal/utils/constants.py299
-rw-r--r--resources/tools/dash/app/pal/utils/tooltips.yaml40
-rw-r--r--resources/tools/dash/app/pal/utils/url_processing.py99
-rw-r--r--resources/tools/dash/app/pal/utils/utils.py69
5 files changed, 519 insertions, 0 deletions
diff --git a/resources/tools/dash/app/pal/utils/__init__.py b/resources/tools/dash/app/pal/utils/__init__.py
new file mode 100644
index 0000000000..5692432123
--- /dev/null
+++ b/resources/tools/dash/app/pal/utils/__init__.py
@@ -0,0 +1,12 @@
+# Copyright (c) 2022 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/resources/tools/dash/app/pal/utils/constants.py b/resources/tools/dash/app/pal/utils/constants.py
new file mode 100644
index 0000000000..1c84ba14a6
--- /dev/null
+++ b/resources/tools/dash/app/pal/utils/constants.py
@@ -0,0 +1,299 @@
+# Copyright (c) 2022 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Constants used in Dash PAL.
+
+"Constant" means a value that keeps its value since initialization. The value
+does not need to be hard coded here, but can be read from environment variables.
+"""
+
+
+import dash_bootstrap_components as dbc
+
+from dash import html
+
+
+class Constants:
+ """Constants used in Dash PAL.
+ """
+
+ ############################################################################
+ # General, application wide constants.
+
+ # The application title.
+ TITLE = "FD.io CSIT"
+
+ # The application description.
+ DESCRIPTION = "Performance Dashboard"
+
+ # External stylesheets.
+ EXTERNAL_STYLESHEETS = [dbc.themes.LUX, ]
+
+ # Top level template for all pages.
+ TEMPLATE = "d-flex h-100 text-center text-white bg-dark"
+
+ # Path and name of the file specifying the HTML layout of the dash
+ # application.
+ MAIN_HTML_LAYOUT_FILE = "index_layout.jinja2"
+
+ # Application root.
+ APPLICATIN_ROOT = "/"
+
+ # Data to be downloaded from the parquets specification file.
+ DATA_SPEC_FILE = "pal/data/data.yaml"
+
+ # The file with tooltips.
+ TOOLTIP_FILE = "pal/utils/tooltips.yaml"
+
+ # Maximal value of TIME_PERIOD for data read from the parquets in days.
+ # Do not change without a good reason.
+ MAX_TIME_PERIOD = 180
+
+ # It defines the time period for data read from the parquets in days from
+ # now back to the past.
+ # TIME_PERIOD = None - means all data (max MAX_TIME_PERIOD days) is read.
+ # TIME_PERIOD = MAX_TIME_PERIOD - is the default value
+ TIME_PERIOD = MAX_TIME_PERIOD # [days]
+
+ # List of releases used for iterative data processing.
+ # The releases MUST be in the order from the current (newest) to the last
+ # (oldest).
+ RELEASES = ["csit2206", "csit2202", ]
+
+ ############################################################################
+ # General, application wide, layout affecting constants.
+
+ # If True, clear all inputs in control panel when button "ADD SELECTED" is
+ # pressed.
+ CLEAR_ALL_INPUTS = False
+
+ # The element is disabled.
+ STYLE_DISABLED = {"display": "none"}
+
+ # The element is enabled and visible.
+ STYLE_ENABLED = {"display": "inherit"}
+
+ # Checklist "All" is disabled.
+ CL_ALL_DISABLED = [
+ {
+ "label": "All",
+ "value": "all",
+ "disabled": True
+ }
+ ]
+
+ # Checklist "All" is enable, visible and unchecked.
+ CL_ALL_ENABLED = [
+ {
+ "label": "All",
+ "value": "all",
+ "disabled": False
+ }
+ ]
+
+ # Placeholder for any element in the layout.
+ PLACEHOLDER = html.Nobr("")
+
+ # List of drivers used in CSIT.
+ DRIVERS = ("avf", "af-xdp", "rdma", "dpdk")
+
+ # Labels for input elements (dropdowns, ...).
+ LABELS = {
+ "dpdk": "DPDK",
+ "container_memif": "LXC/DRC Container Memif",
+ "crypto": "IPSec IPv4 Routing",
+ "ip4": "IPv4 Routing",
+ "ip6": "IPv6 Routing",
+ "ip4_tunnels": "IPv4 Tunnels",
+ "l2": "L2 Ethernet Switching",
+ "srv6": "SRv6 Routing",
+ "vm_vhost": "VMs vhost-user",
+ "nfv_density-dcr_memif-chain_ipsec": "CNF Service Chains Routing IPSec",
+ "nfv_density-vm_vhost-chain_dot1qip4vxlan":"VNF Service Chains Tunnels",
+ "nfv_density-vm_vhost-chain": "VNF Service Chains Routing",
+ "nfv_density-dcr_memif-pipeline": "CNF Service Pipelines Routing",
+ "nfv_density-dcr_memif-chain": "CNF Service Chains Routing",
+ }
+
+ # URL style.
+ URL_STYLE = {
+ "background-color": "#d2ebf5",
+ "border-color": "#bce1f1",
+ "color": "#135d7c"
+ }
+
+ ############################################################################
+ # General, normalization constants.
+
+ NORM_FREQUENCY = 2.0 # [GHz]
+ FREQUENCY = { # [GHz]
+ "2n-aws": 1.000,
+ "2n-dnv": 2.000,
+ "2n-clx": 2.300,
+ "2n-icx": 2.600,
+ "2n-skx": 2.500,
+ "2n-tx2": 2.500,
+ "2n-zn2": 2.900,
+ "3n-alt": 3.000,
+ "3n-aws": 1.000,
+ "3n-dnv": 2.000,
+ "3n-icx": 2.600,
+ "3n-skx": 2.500,
+ "3n-tsh": 2.200
+ }
+
+ ############################################################################
+ # General, plots constants.
+
+ PLOT_COLORS = (
+ "#1A1110", "#DA2647", "#214FC6", "#01786F", "#BD8260", "#FFD12A",
+ "#A6E7FF", "#738276", "#C95A49", "#FC5A8D", "#CEC8EF", "#391285",
+ "#6F2DA8", "#FF878D", "#45A27D", "#FFD0B9", "#FD5240", "#DB91EF",
+ "#44D7A8", "#4F86F7", "#84DE02", "#FFCFF1", "#614051"
+ )
+
+ # Trending, anomalies.
+ ANOMALY_COLOR = {
+ "regression": 0.0,
+ "normal": 0.5,
+ "progression": 1.0
+ }
+
+ COLORSCALE_TPUT = [
+ [0.00, "red"],
+ [0.33, "red"],
+ [0.33, "white"],
+ [0.66, "white"],
+ [0.66, "green"],
+ [1.00, "green"]
+ ]
+
+ TICK_TEXT_TPUT = ["Regression", "Normal", "Progression"]
+
+ COLORSCALE_LAT = [
+ [0.00, "green"],
+ [0.33, "green"],
+ [0.33, "white"],
+ [0.66, "white"],
+ [0.66, "red"],
+ [1.00, "red"]
+ ]
+
+ TICK_TEXT_LAT = ["Progression", "Normal", "Regression"]
+
+ # Access to the results.
+ VALUE = {
+ "mrr": "result_receive_rate_rate_avg",
+ "ndr": "result_ndr_lower_rate_value",
+ "pdr": "result_pdr_lower_rate_value",
+ "pdr-lat": "result_latency_forward_pdr_50_avg"
+ }
+
+ VALUE_ITER = {
+ "mrr": "result_receive_rate_rate_values",
+ "ndr": "result_ndr_lower_rate_value",
+ "pdr": "result_pdr_lower_rate_value",
+ "pdr-lat": "result_latency_forward_pdr_50_avg"
+ }
+
+ UNIT = {
+ "mrr": "result_receive_rate_rate_unit",
+ "ndr": "result_ndr_lower_rate_unit",
+ "pdr": "result_pdr_lower_rate_unit",
+ "pdr-lat": "result_latency_forward_pdr_50_unit"
+ }
+
+ # Latencies.
+ LAT_HDRH = ( # Do not change the order
+ "result_latency_forward_pdr_0_hdrh",
+ "result_latency_reverse_pdr_0_hdrh",
+ "result_latency_forward_pdr_10_hdrh",
+ "result_latency_reverse_pdr_10_hdrh",
+ "result_latency_forward_pdr_50_hdrh",
+ "result_latency_reverse_pdr_50_hdrh",
+ "result_latency_forward_pdr_90_hdrh",
+ "result_latency_reverse_pdr_90_hdrh",
+ )
+
+ # This value depends on latency stream rate (9001 pps) and duration (5s).
+ # Keep it slightly higher to ensure rounding errors to not remove tick mark.
+ PERCENTILE_MAX = 99.999501
+
+ GRAPH_LAT_HDRH_DESC = {
+ "result_latency_forward_pdr_0_hdrh": "No-load.",
+ "result_latency_reverse_pdr_0_hdrh": "No-load.",
+ "result_latency_forward_pdr_10_hdrh": "Low-load, 10% PDR.",
+ "result_latency_reverse_pdr_10_hdrh": "Low-load, 10% PDR.",
+ "result_latency_forward_pdr_50_hdrh": "Mid-load, 50% PDR.",
+ "result_latency_reverse_pdr_50_hdrh": "Mid-load, 50% PDR.",
+ "result_latency_forward_pdr_90_hdrh": "High-load, 90% PDR.",
+ "result_latency_reverse_pdr_90_hdrh": "High-load, 90% PDR."
+ }
+
+ ############################################################################
+ # News.
+
+ # The pathname prefix for the application.
+ NEWS_ROUTES_PATHNAME_PREFIX = "/news/"
+
+ # Path and name of the file specifying the HTML layout of the dash
+ # application.
+ NEWS_HTML_LAYOUT_FILE = "pal/templates/news_layout.jinja2"
+
+ # The default job displayed when the page is loaded first time.
+ NEWS_DEFAULT_JOB = "csit-vpp-perf-mrr-daily-master-2n-icx"
+
+ # Time period for regressions and progressions.
+ NEWS_TIME_PERIOD = 21 # [days]
+
+ ############################################################################
+ # Report.
+
+ # The pathname prefix for the application.
+ REPORT_ROUTES_PATHNAME_PREFIX = "/report/"
+
+ # Path and name of the file specifying the HTML layout of the dash
+ # application.
+ REPORT_HTML_LAYOUT_FILE = "pal/templates/report_layout.jinja2"
+
+ # Layout of plot.ly graphs.
+ REPORT_GRAPH_LAYOUT_FILE = "pal/report/layout.yaml"
+
+ ############################################################################
+ # Statistics.
+
+ # The pathname prefix for the application.
+ STATS_ROUTES_PATHNAME_PREFIX = "/stats/"
+
+ # Path and name of the file specifying the HTML layout of the dash
+ # application.
+ STATS_HTML_LAYOUT_FILE = "pal/templates/stats_layout.jinja2"
+
+ # Layout of plot.ly graphs.
+ STATS_GRAPH_LAYOUT_FILE = "pal/stats/layout.yaml"
+
+ # The default job displayed when the page is loaded first time.
+ STATS_DEFAULT_JOB = "csit-vpp-perf-mrr-daily-master-2n-icx"
+
+ ############################################################################
+ # Trending.
+
+ # The pathname prefix for the application.
+ TREND_ROUTES_PATHNAME_PREFIX = "/trending/"
+
+ # Path and name of the file specifying the HTML layout of the dash
+ # application.
+ TREND_HTML_LAYOUT_FILE = "pal/templates/trending_layout.jinja2"
+
+ # Layout of plot.ly graphs.
+ TREND_GRAPH_LAYOUT_FILE = "pal/trending/layout.yaml"
diff --git a/resources/tools/dash/app/pal/utils/tooltips.yaml b/resources/tools/dash/app/pal/utils/tooltips.yaml
new file mode 100644
index 0000000000..2086b575a9
--- /dev/null
+++ b/resources/tools/dash/app/pal/utils/tooltips.yaml
@@ -0,0 +1,40 @@
+help-area:
+ The area defines a VPP packet path and lookup type.
+help-cadence:
+ The cadence of the Jenkins job which runs the tests.
+help-cores:
+ Number of cores the DUT uses during the test.
+help-download:
+ Download the selected data as a csv file.
+help-dut:
+ Device Under Test (DUT) - In software networking, “device” denotes a specific
+ piece of software tasked with packet processing. Such device is surrounded
+ with other software components (such as operating system kernel).
+help-dut-ver:
+ The version of the Device under Test.
+help-framesize:
+ Frame size - size of an Ethernet Layer-2 frame on the wire, including any VLAN
+ tags (dot1q, dot1ad) and Ethernet FCS, but excluding Ethernet preamble and
+ inter-frame gap. Measured in Bytes.
+help-infra:
+ Infrastructure is defined by the toplology (number of nodes), processor
+ architecture, NIC and driver.
+help-normalize:
+ Normalize the results to CPU frequency 2GHz. The results from AWS environment
+ are not normalized as we do not know the exact value of CPU frequency.
+help-release:
+ The CSIT release.
+help-tbed:
+ The test bed is defined by toplology (number of nodes) and processor
+ architecture.
+help-test:
+ The test specification consists of packet encapsulation, VPP packet processing
+ (packet forwarding mode and packet processing function(s)) and packet
+ forwarding path.
+help-time-period:
+ Choose a time period for selected tests.
+help-ttype:
+ Main measured variable.
+help-url:
+ URL with current configuration. If there is no "Copy URL" button, use triple
+ click.
diff --git a/resources/tools/dash/app/pal/utils/url_processing.py b/resources/tools/dash/app/pal/utils/url_processing.py
new file mode 100644
index 0000000000..9307015d0d
--- /dev/null
+++ b/resources/tools/dash/app/pal/utils/url_processing.py
@@ -0,0 +1,99 @@
+# Copyright (c) 2022 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""URL decoding and parsing and URL encoding.
+"""
+
+import logging
+
+from base64 import urlsafe_b64encode, urlsafe_b64decode
+from urllib.parse import urlencode, urlunparse, urlparse, parse_qs
+from zlib import compress, decompress
+from zlib import error as ZlibErr
+from binascii import Error as BinasciiErr
+
+
+def url_encode(params: dict) -> str:
+ """Encode the URL parameters and zip them and create the whole URL using
+ given data.
+
+ :param params: All data necessary to create the URL:
+ - scheme,
+ - network location,
+ - path,
+ - query,
+ - parameters.
+ :type params: dict
+ :returns: Encoded URL.
+ :rtype: str
+ """
+
+ url_params = params.get("params", None)
+ if url_params:
+ encoded_params = urlsafe_b64encode(
+ compress(urlencode(url_params).encode("utf-8"), level=9)
+ ).rstrip(b"=").decode("utf-8")
+ else:
+ encoded_params = str()
+
+ return urlunparse((
+ params.get("scheme", "http"),
+ params.get("netloc", str()),
+ params.get("path", str()),
+ str(), # params
+ params.get("query", str()),
+ encoded_params
+ ))
+
+
+def url_decode(url: str) -> dict:
+ """Parse the given URL and decode the parameters.
+
+ :param url: URL to be parsed and decoded.
+ :type url: str
+ :returns: Paresed URL.
+ :rtype: dict
+ """
+
+ try:
+ parsed_url = urlparse(url)
+ except ValueError as err:
+ logging.warning(f"\nThe url {url} is not valid, ignoring.\n{repr(err)}")
+ return None
+
+ if parsed_url.fragment:
+ try:
+ padding = b"=" * (4 - (len(parsed_url.fragment) % 4))
+ params = parse_qs(decompress(
+ urlsafe_b64decode(
+ (parsed_url.fragment.encode("utf-8") + padding)
+ )).decode("utf-8")
+ )
+ except (BinasciiErr, UnicodeDecodeError, ZlibErr) as err:
+ logging.warning(
+ f"\nNot possible to decode the parameters from url: {url}"
+ f"\nEncoded parameters: '{parsed_url.fragment}'"
+ f"\n{repr(err)}"
+ )
+ return None
+ else:
+ params = None
+
+ return {
+ "scheme": parsed_url.scheme,
+ "netloc": parsed_url.netloc,
+ "path": parsed_url.path,
+ "query": parsed_url.query,
+ "fragment": parsed_url.fragment,
+ "params": params
+ }
diff --git a/resources/tools/dash/app/pal/utils/utils.py b/resources/tools/dash/app/pal/utils/utils.py
new file mode 100644
index 0000000000..63c9c1aaa4
--- /dev/null
+++ b/resources/tools/dash/app/pal/utils/utils.py
@@ -0,0 +1,69 @@
+# Copyright (c) 2022 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+"""
+
+from numpy import isnan
+
+from ..jumpavg import classify
+
+
+def classify_anomalies(data):
+ """Process the data and return anomalies and trending values.
+
+ Gather data into groups with average as trend value.
+ Decorate values within groups to be normal,
+ the first value of changed average as a regression, or a progression.
+
+ :param data: Full data set with unavailable samples replaced by nan.
+ :type data: OrderedDict
+ :returns: Classification and trend values
+ :rtype: 3-tuple, list of strings, list of floats and list of floats
+ """
+ # NaN means something went wrong.
+ # Use 0.0 to cause that being reported as a severe regression.
+ bare_data = [0.0 if isnan(sample) else sample for sample in data.values()]
+ # TODO: Make BitCountingGroupList a subclass of list again?
+ group_list = classify(bare_data).group_list
+ group_list.reverse() # Just to use .pop() for FIFO.
+ classification = list()
+ avgs = list()
+ stdevs = list()
+ active_group = None
+ values_left = 0
+ avg = 0.0
+ stdv = 0.0
+ for sample in data.values():
+ if isnan(sample):
+ classification.append("outlier")
+ avgs.append(sample)
+ stdevs.append(sample)
+ continue
+ if values_left < 1 or active_group is None:
+ values_left = 0
+ while values_left < 1: # Ignore empty groups (should not happen).
+ active_group = group_list.pop()
+ values_left = len(active_group.run_list)
+ avg = active_group.stats.avg
+ stdv = active_group.stats.stdev
+ classification.append(active_group.comment)
+ avgs.append(avg)
+ stdevs.append(stdv)
+ values_left -= 1
+ continue
+ classification.append("normal")
+ avgs.append(avg)
+ stdevs.append(stdv)
+ values_left -= 1
+ return classification, avgs, stdevs