aboutsummaryrefslogtreecommitdiffstats
path: root/resources/tools/dash/app/pal/data
diff options
context:
space:
mode:
authorTibor Frank <tifrank@cisco.com>2022-07-20 15:51:21 +0200
committerTibor Frank <tifrank@cisco.com>2022-07-21 07:45:39 +0000
commitae1fe880286d7b0414664bce2b2c7c91c3f543f3 (patch)
tree5790af6de3884e0aa6918d44f23aca59cf0d86a7 /resources/tools/dash/app/pal/data
parent739e01de7a65045dc42e6c16406a6d054da72f7b (diff)
UTI: Move constatns to a separate file
Change-Id: If3796b71d02bcf5a92612585dfa8867e5039f037 Signed-off-by: Tibor Frank <tifrank@cisco.com>
Diffstat (limited to 'resources/tools/dash/app/pal/data')
-rw-r--r--resources/tools/dash/app/pal/data/data.py4
-rw-r--r--resources/tools/dash/app/pal/data/tooltips.yaml40
-rw-r--r--resources/tools/dash/app/pal/data/url_processing.py99
-rw-r--r--resources/tools/dash/app/pal/data/utils.py69
4 files changed, 1 insertions, 211 deletions
diff --git a/resources/tools/dash/app/pal/data/data.py b/resources/tools/dash/app/pal/data/data.py
index 296db024c0..0956333e34 100644
--- a/resources/tools/dash/app/pal/data/data.py
+++ b/resources/tools/dash/app/pal/data/data.py
@@ -15,15 +15,13 @@
"""
import logging
+import awswrangler as wr
from yaml import load, FullLoader, YAMLError
from datetime import datetime, timedelta
from time import time
from pytz import UTC
from pandas import DataFrame
-
-import awswrangler as wr
-
from awswrangler.exceptions import EmptyDataFrame, NoFilesFound
diff --git a/resources/tools/dash/app/pal/data/tooltips.yaml b/resources/tools/dash/app/pal/data/tooltips.yaml
deleted file mode 100644
index 2086b575a9..0000000000
--- a/resources/tools/dash/app/pal/data/tooltips.yaml
+++ /dev/null
@@ -1,40 +0,0 @@
-help-area:
- The area defines a VPP packet path and lookup type.
-help-cadence:
- The cadence of the Jenkins job which runs the tests.
-help-cores:
- Number of cores the DUT uses during the test.
-help-download:
- Download the selected data as a csv file.
-help-dut:
- Device Under Test (DUT) - In software networking, “device” denotes a specific
- piece of software tasked with packet processing. Such device is surrounded
- with other software components (such as operating system kernel).
-help-dut-ver:
- The version of the Device under Test.
-help-framesize:
- Frame size - size of an Ethernet Layer-2 frame on the wire, including any VLAN
- tags (dot1q, dot1ad) and Ethernet FCS, but excluding Ethernet preamble and
- inter-frame gap. Measured in Bytes.
-help-infra:
- Infrastructure is defined by the toplology (number of nodes), processor
- architecture, NIC and driver.
-help-normalize:
- Normalize the results to CPU frequency 2GHz. The results from AWS environment
- are not normalized as we do not know the exact value of CPU frequency.
-help-release:
- The CSIT release.
-help-tbed:
- The test bed is defined by toplology (number of nodes) and processor
- architecture.
-help-test:
- The test specification consists of packet encapsulation, VPP packet processing
- (packet forwarding mode and packet processing function(s)) and packet
- forwarding path.
-help-time-period:
- Choose a time period for selected tests.
-help-ttype:
- Main measured variable.
-help-url:
- URL with current configuration. If there is no "Copy URL" button, use triple
- click.
diff --git a/resources/tools/dash/app/pal/data/url_processing.py b/resources/tools/dash/app/pal/data/url_processing.py
deleted file mode 100644
index 9307015d0d..0000000000
--- a/resources/tools/dash/app/pal/data/url_processing.py
+++ /dev/null
@@ -1,99 +0,0 @@
-# Copyright (c) 2022 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""URL decoding and parsing and URL encoding.
-"""
-
-import logging
-
-from base64 import urlsafe_b64encode, urlsafe_b64decode
-from urllib.parse import urlencode, urlunparse, urlparse, parse_qs
-from zlib import compress, decompress
-from zlib import error as ZlibErr
-from binascii import Error as BinasciiErr
-
-
-def url_encode(params: dict) -> str:
- """Encode the URL parameters and zip them and create the whole URL using
- given data.
-
- :param params: All data necessary to create the URL:
- - scheme,
- - network location,
- - path,
- - query,
- - parameters.
- :type params: dict
- :returns: Encoded URL.
- :rtype: str
- """
-
- url_params = params.get("params", None)
- if url_params:
- encoded_params = urlsafe_b64encode(
- compress(urlencode(url_params).encode("utf-8"), level=9)
- ).rstrip(b"=").decode("utf-8")
- else:
- encoded_params = str()
-
- return urlunparse((
- params.get("scheme", "http"),
- params.get("netloc", str()),
- params.get("path", str()),
- str(), # params
- params.get("query", str()),
- encoded_params
- ))
-
-
-def url_decode(url: str) -> dict:
- """Parse the given URL and decode the parameters.
-
- :param url: URL to be parsed and decoded.
- :type url: str
- :returns: Paresed URL.
- :rtype: dict
- """
-
- try:
- parsed_url = urlparse(url)
- except ValueError as err:
- logging.warning(f"\nThe url {url} is not valid, ignoring.\n{repr(err)}")
- return None
-
- if parsed_url.fragment:
- try:
- padding = b"=" * (4 - (len(parsed_url.fragment) % 4))
- params = parse_qs(decompress(
- urlsafe_b64decode(
- (parsed_url.fragment.encode("utf-8") + padding)
- )).decode("utf-8")
- )
- except (BinasciiErr, UnicodeDecodeError, ZlibErr) as err:
- logging.warning(
- f"\nNot possible to decode the parameters from url: {url}"
- f"\nEncoded parameters: '{parsed_url.fragment}'"
- f"\n{repr(err)}"
- )
- return None
- else:
- params = None
-
- return {
- "scheme": parsed_url.scheme,
- "netloc": parsed_url.netloc,
- "path": parsed_url.path,
- "query": parsed_url.query,
- "fragment": parsed_url.fragment,
- "params": params
- }
diff --git a/resources/tools/dash/app/pal/data/utils.py b/resources/tools/dash/app/pal/data/utils.py
deleted file mode 100644
index 63c9c1aaa4..0000000000
--- a/resources/tools/dash/app/pal/data/utils.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# Copyright (c) 2022 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-"""
-
-from numpy import isnan
-
-from ..jumpavg import classify
-
-
-def classify_anomalies(data):
- """Process the data and return anomalies and trending values.
-
- Gather data into groups with average as trend value.
- Decorate values within groups to be normal,
- the first value of changed average as a regression, or a progression.
-
- :param data: Full data set with unavailable samples replaced by nan.
- :type data: OrderedDict
- :returns: Classification and trend values
- :rtype: 3-tuple, list of strings, list of floats and list of floats
- """
- # NaN means something went wrong.
- # Use 0.0 to cause that being reported as a severe regression.
- bare_data = [0.0 if isnan(sample) else sample for sample in data.values()]
- # TODO: Make BitCountingGroupList a subclass of list again?
- group_list = classify(bare_data).group_list
- group_list.reverse() # Just to use .pop() for FIFO.
- classification = list()
- avgs = list()
- stdevs = list()
- active_group = None
- values_left = 0
- avg = 0.0
- stdv = 0.0
- for sample in data.values():
- if isnan(sample):
- classification.append("outlier")
- avgs.append(sample)
- stdevs.append(sample)
- continue
- if values_left < 1 or active_group is None:
- values_left = 0
- while values_left < 1: # Ignore empty groups (should not happen).
- active_group = group_list.pop()
- values_left = len(active_group.run_list)
- avg = active_group.stats.avg
- stdv = active_group.stats.stdev
- classification.append(active_group.comment)
- avgs.append(avg)
- stdevs.append(stdv)
- values_left -= 1
- continue
- classification.append("normal")
- avgs.append(avg)
- stdevs.append(stdv)
- values_left -= 1
- return classification, avgs, stdevs