aboutsummaryrefslogtreecommitdiffstats
path: root/resources/tools/dash/app/pal/utils
diff options
context:
space:
mode:
Diffstat (limited to 'resources/tools/dash/app/pal/utils')
-rw-r--r--resources/tools/dash/app/pal/utils/__init__.py12
-rw-r--r--resources/tools/dash/app/pal/utils/constants.py312
-rw-r--r--resources/tools/dash/app/pal/utils/tooltips.yaml40
-rw-r--r--resources/tools/dash/app/pal/utils/url_processing.py99
-rw-r--r--resources/tools/dash/app/pal/utils/utils.py344
5 files changed, 0 insertions, 807 deletions
diff --git a/resources/tools/dash/app/pal/utils/__init__.py b/resources/tools/dash/app/pal/utils/__init__.py
deleted file mode 100644
index 5692432123..0000000000
--- a/resources/tools/dash/app/pal/utils/__init__.py
+++ /dev/null
@@ -1,12 +0,0 @@
-# Copyright (c) 2022 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
diff --git a/resources/tools/dash/app/pal/utils/constants.py b/resources/tools/dash/app/pal/utils/constants.py
deleted file mode 100644
index cc4a9e0f23..0000000000
--- a/resources/tools/dash/app/pal/utils/constants.py
+++ /dev/null
@@ -1,312 +0,0 @@
-# Copyright (c) 2022 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Constants used in Dash PAL.
-
-"Constant" means a value that keeps its value since initialization. The value
-does not need to be hard coded here, but can be read from environment variables.
-"""
-
-
-import dash_bootstrap_components as dbc
-
-from dash import html
-
-
-class Constants:
- """Constants used in Dash PAL.
- """
-
- ############################################################################
- # General, application wide constants.
-
- # The application title.
- TITLE = "FD.io CSIT"
-
- # The application description.
- DESCRIPTION = "Performance Dashboard"
-
- # External stylesheets.
- EXTERNAL_STYLESHEETS = [dbc.themes.LUX, ]
-
- # Top level template for all pages.
- TEMPLATE = "d-flex h-100 text-center text-white bg-dark"
-
- # Path and name of the file specifying the HTML layout of the dash
- # application.
- MAIN_HTML_LAYOUT_FILE = "index_layout.jinja2"
-
- # Application root.
- APPLICATIN_ROOT = "/"
-
- # Data to be downloaded from the parquets specification file.
- DATA_SPEC_FILE = "pal/data/data.yaml"
-
- # The file with tooltips.
- TOOLTIP_FILE = "pal/utils/tooltips.yaml"
-
- # Maximal value of TIME_PERIOD for data read from the parquets in days.
- # Do not change without a good reason.
- MAX_TIME_PERIOD = 180
-
- # It defines the time period for data read from the parquets in days from
- # now back to the past.
- # TIME_PERIOD = None - means all data (max MAX_TIME_PERIOD days) is read.
- # TIME_PERIOD = MAX_TIME_PERIOD - is the default value
- TIME_PERIOD = MAX_TIME_PERIOD # [days]
-
- # List of releases used for iterative data processing.
- # The releases MUST be in the order from the current (newest) to the last
- # (oldest).
- RELEASES = ["csit2206", "csit2202", ]
-
- ############################################################################
- # General, application wide, layout affecting constants.
-
- # If True, clear all inputs in control panel when button "ADD SELECTED" is
- # pressed.
- CLEAR_ALL_INPUTS = False
-
- # The element is disabled.
- STYLE_DISABLED = {"display": "none"}
-
- # The element is enabled and visible.
- STYLE_ENABLED = {"display": "inherit"}
-
- # Checklist "All" is disabled.
- CL_ALL_DISABLED = [
- {
- "label": "All",
- "value": "all",
- "disabled": True
- }
- ]
-
- # Checklist "All" is enable, visible and unchecked.
- CL_ALL_ENABLED = [
- {
- "label": "All",
- "value": "all",
- "disabled": False
- }
- ]
-
- # Placeholder for any element in the layout.
- PLACEHOLDER = html.Nobr("")
-
- # List of drivers used in CSIT.
- DRIVERS = ("avf", "af-xdp", "rdma", "dpdk")
-
- # Labels for input elements (dropdowns, ...).
- LABELS = {
- "dpdk": "DPDK",
- "container_memif": "LXC/DRC Container Memif",
- "crypto": "IPSec IPv4 Routing",
- "ip4": "IPv4 Routing",
- "ip6": "IPv6 Routing",
- "ip4_tunnels": "IPv4 Tunnels",
- "l2": "L2 Ethernet Switching",
- "srv6": "SRv6 Routing",
- "vm_vhost": "VMs vhost-user",
- "nfv_density-dcr_memif-chain_ipsec": "CNF Service Chains Routing IPSec",
- "nfv_density-vm_vhost-chain_dot1qip4vxlan":"VNF Service Chains Tunnels",
- "nfv_density-vm_vhost-chain": "VNF Service Chains Routing",
- "nfv_density-dcr_memif-pipeline": "CNF Service Pipelines Routing",
- "nfv_density-dcr_memif-chain": "CNF Service Chains Routing",
- }
-
- # URL style.
- URL_STYLE = {
- "background-color": "#d2ebf5",
- "border-color": "#bce1f1",
- "color": "#135d7c"
- }
-
- ############################################################################
- # General, normalization constants.
-
- NORM_FREQUENCY = 2.0 # [GHz]
- FREQUENCY = { # [GHz]
- "2n-aws": 1.000,
- "2n-dnv": 2.000,
- "2n-clx": 2.300,
- "2n-icx": 2.600,
- "2n-skx": 2.500,
- "2n-tx2": 2.500,
- "2n-zn2": 2.900,
- "3n-alt": 3.000,
- "3n-aws": 1.000,
- "3n-dnv": 2.000,
- "3n-icx": 2.600,
- "3n-skx": 2.500,
- "3n-tsh": 2.200
- }
-
- ############################################################################
- # General, plots constants.
-
- PLOT_COLORS = (
- "#1A1110", "#DA2647", "#214FC6", "#01786F", "#BD8260", "#FFD12A",
- "#A6E7FF", "#738276", "#C95A49", "#FC5A8D", "#CEC8EF", "#391285",
- "#6F2DA8", "#FF878D", "#45A27D", "#FFD0B9", "#FD5240", "#DB91EF",
- "#44D7A8", "#4F86F7", "#84DE02", "#FFCFF1", "#614051"
- )
-
- # Trending, anomalies.
- ANOMALY_COLOR = {
- "regression": 0.0,
- "normal": 0.5,
- "progression": 1.0
- }
-
- COLORSCALE_TPUT = [
- [0.00, "red"],
- [0.33, "red"],
- [0.33, "white"],
- [0.66, "white"],
- [0.66, "green"],
- [1.00, "green"]
- ]
-
- TICK_TEXT_TPUT = ["Regression", "Normal", "Progression"]
-
- COLORSCALE_LAT = [
- [0.00, "green"],
- [0.33, "green"],
- [0.33, "white"],
- [0.66, "white"],
- [0.66, "red"],
- [1.00, "red"]
- ]
-
- TICK_TEXT_LAT = ["Progression", "Normal", "Regression"]
-
- # Access to the results.
- VALUE = {
- "mrr": "result_receive_rate_rate_avg",
- "ndr": "result_ndr_lower_rate_value",
- "pdr": "result_pdr_lower_rate_value",
- "pdr-lat": "result_latency_forward_pdr_50_avg"
- }
-
- VALUE_ITER = {
- "mrr": "result_receive_rate_rate_values",
- "ndr": "result_ndr_lower_rate_value",
- "pdr": "result_pdr_lower_rate_value",
- "pdr-lat": "result_latency_forward_pdr_50_avg"
- }
-
- UNIT = {
- "mrr": "result_receive_rate_rate_unit",
- "ndr": "result_ndr_lower_rate_unit",
- "pdr": "result_pdr_lower_rate_unit",
- "pdr-lat": "result_latency_forward_pdr_50_unit"
- }
-
- # Latencies.
- LAT_HDRH = ( # Do not change the order
- "result_latency_forward_pdr_0_hdrh",
- "result_latency_reverse_pdr_0_hdrh",
- "result_latency_forward_pdr_10_hdrh",
- "result_latency_reverse_pdr_10_hdrh",
- "result_latency_forward_pdr_50_hdrh",
- "result_latency_reverse_pdr_50_hdrh",
- "result_latency_forward_pdr_90_hdrh",
- "result_latency_reverse_pdr_90_hdrh",
- )
-
- # This value depends on latency stream rate (9001 pps) and duration (5s).
- # Keep it slightly higher to ensure rounding errors to not remove tick mark.
- PERCENTILE_MAX = 99.999501
-
- GRAPH_LAT_HDRH_DESC = {
- "result_latency_forward_pdr_0_hdrh": "No-load.",
- "result_latency_reverse_pdr_0_hdrh": "No-load.",
- "result_latency_forward_pdr_10_hdrh": "Low-load, 10% PDR.",
- "result_latency_reverse_pdr_10_hdrh": "Low-load, 10% PDR.",
- "result_latency_forward_pdr_50_hdrh": "Mid-load, 50% PDR.",
- "result_latency_reverse_pdr_50_hdrh": "Mid-load, 50% PDR.",
- "result_latency_forward_pdr_90_hdrh": "High-load, 90% PDR.",
- "result_latency_reverse_pdr_90_hdrh": "High-load, 90% PDR."
- }
-
- ############################################################################
- # News.
-
- # The pathname prefix for the application.
- NEWS_ROUTES_PATHNAME_PREFIX = "/news/"
-
- # Path and name of the file specifying the HTML layout of the dash
- # application.
- NEWS_HTML_LAYOUT_FILE = "pal/templates/news_layout.jinja2"
-
- # The default job displayed when the page is loaded first time.
- NEWS_DEFAULT_JOB = "csit-vpp-perf-mrr-daily-master-2n-icx"
-
- # Time period for regressions and progressions. Be CAREFULL with this
- # number. Setting it too high causes long processing time during the
- # application start-up.
- # If NEWS_TIME_PERIOD = 180, it takes approx. 35 minutes to calculate
- # annomalies for all tests.
- NEWS_TIME_PERIOD = 21 # [days]
-
- ############################################################################
- # Report.
-
- # The pathname prefix for the application.
- REPORT_ROUTES_PATHNAME_PREFIX = "/report/"
-
- # Path and name of the file specifying the HTML layout of the dash
- # application.
- REPORT_HTML_LAYOUT_FILE = "pal/templates/report_layout.jinja2"
-
- # Layout of plot.ly graphs.
- REPORT_GRAPH_LAYOUT_FILE = "pal/report/layout.yaml"
-
- # Default name of downloaded file with selected data.
- REPORT_DOWNLOAD_FILE_NAME = "iterative_data.csv"
-
- ############################################################################
- # Statistics.
-
- # The pathname prefix for the application.
- STATS_ROUTES_PATHNAME_PREFIX = "/stats/"
-
- # Path and name of the file specifying the HTML layout of the dash
- # application.
- STATS_HTML_LAYOUT_FILE = "pal/templates/stats_layout.jinja2"
-
- # Layout of plot.ly graphs.
- STATS_GRAPH_LAYOUT_FILE = "pal/stats/layout.yaml"
-
- # The default job displayed when the page is loaded first time.
- STATS_DEFAULT_JOB = "csit-vpp-perf-mrr-daily-master-2n-icx"
-
- # Default name of downloaded file with selected data.
- STATS_DOWNLOAD_FILE_NAME = "stats.csv"
-
- ############################################################################
- # Trending.
-
- # The pathname prefix for the application.
- TREND_ROUTES_PATHNAME_PREFIX = "/trending/"
-
- # Path and name of the file specifying the HTML layout of the dash
- # application.
- TREND_HTML_LAYOUT_FILE = "pal/templates/trending_layout.jinja2"
-
- # Layout of plot.ly graphs.
- TREND_GRAPH_LAYOUT_FILE = "pal/trending/layout.yaml"
-
- # Default name of downloaded file with selected data.
- TREND_DOWNLOAD_FILE_NAME = "trending_data.csv"
diff --git a/resources/tools/dash/app/pal/utils/tooltips.yaml b/resources/tools/dash/app/pal/utils/tooltips.yaml
deleted file mode 100644
index 2086b575a9..0000000000
--- a/resources/tools/dash/app/pal/utils/tooltips.yaml
+++ /dev/null
@@ -1,40 +0,0 @@
-help-area:
- The area defines a VPP packet path and lookup type.
-help-cadence:
- The cadence of the Jenkins job which runs the tests.
-help-cores:
- Number of cores the DUT uses during the test.
-help-download:
- Download the selected data as a csv file.
-help-dut:
- Device Under Test (DUT) - In software networking, “device” denotes a specific
- piece of software tasked with packet processing. Such device is surrounded
- with other software components (such as operating system kernel).
-help-dut-ver:
- The version of the Device under Test.
-help-framesize:
- Frame size - size of an Ethernet Layer-2 frame on the wire, including any VLAN
- tags (dot1q, dot1ad) and Ethernet FCS, but excluding Ethernet preamble and
- inter-frame gap. Measured in Bytes.
-help-infra:
- Infrastructure is defined by the toplology (number of nodes), processor
- architecture, NIC and driver.
-help-normalize:
- Normalize the results to CPU frequency 2GHz. The results from AWS environment
- are not normalized as we do not know the exact value of CPU frequency.
-help-release:
- The CSIT release.
-help-tbed:
- The test bed is defined by toplology (number of nodes) and processor
- architecture.
-help-test:
- The test specification consists of packet encapsulation, VPP packet processing
- (packet forwarding mode and packet processing function(s)) and packet
- forwarding path.
-help-time-period:
- Choose a time period for selected tests.
-help-ttype:
- Main measured variable.
-help-url:
- URL with current configuration. If there is no "Copy URL" button, use triple
- click.
diff --git a/resources/tools/dash/app/pal/utils/url_processing.py b/resources/tools/dash/app/pal/utils/url_processing.py
deleted file mode 100644
index 9307015d0d..0000000000
--- a/resources/tools/dash/app/pal/utils/url_processing.py
+++ /dev/null
@@ -1,99 +0,0 @@
-# Copyright (c) 2022 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""URL decoding and parsing and URL encoding.
-"""
-
-import logging
-
-from base64 import urlsafe_b64encode, urlsafe_b64decode
-from urllib.parse import urlencode, urlunparse, urlparse, parse_qs
-from zlib import compress, decompress
-from zlib import error as ZlibErr
-from binascii import Error as BinasciiErr
-
-
-def url_encode(params: dict) -> str:
- """Encode the URL parameters and zip them and create the whole URL using
- given data.
-
- :param params: All data necessary to create the URL:
- - scheme,
- - network location,
- - path,
- - query,
- - parameters.
- :type params: dict
- :returns: Encoded URL.
- :rtype: str
- """
-
- url_params = params.get("params", None)
- if url_params:
- encoded_params = urlsafe_b64encode(
- compress(urlencode(url_params).encode("utf-8"), level=9)
- ).rstrip(b"=").decode("utf-8")
- else:
- encoded_params = str()
-
- return urlunparse((
- params.get("scheme", "http"),
- params.get("netloc", str()),
- params.get("path", str()),
- str(), # params
- params.get("query", str()),
- encoded_params
- ))
-
-
-def url_decode(url: str) -> dict:
- """Parse the given URL and decode the parameters.
-
- :param url: URL to be parsed and decoded.
- :type url: str
- :returns: Paresed URL.
- :rtype: dict
- """
-
- try:
- parsed_url = urlparse(url)
- except ValueError as err:
- logging.warning(f"\nThe url {url} is not valid, ignoring.\n{repr(err)}")
- return None
-
- if parsed_url.fragment:
- try:
- padding = b"=" * (4 - (len(parsed_url.fragment) % 4))
- params = parse_qs(decompress(
- urlsafe_b64decode(
- (parsed_url.fragment.encode("utf-8") + padding)
- )).decode("utf-8")
- )
- except (BinasciiErr, UnicodeDecodeError, ZlibErr) as err:
- logging.warning(
- f"\nNot possible to decode the parameters from url: {url}"
- f"\nEncoded parameters: '{parsed_url.fragment}'"
- f"\n{repr(err)}"
- )
- return None
- else:
- params = None
-
- return {
- "scheme": parsed_url.scheme,
- "netloc": parsed_url.netloc,
- "path": parsed_url.path,
- "query": parsed_url.query,
- "fragment": parsed_url.fragment,
- "params": params
- }
diff --git a/resources/tools/dash/app/pal/utils/utils.py b/resources/tools/dash/app/pal/utils/utils.py
deleted file mode 100644
index 9e4eeeb892..0000000000
--- a/resources/tools/dash/app/pal/utils/utils.py
+++ /dev/null
@@ -1,344 +0,0 @@
-# Copyright (c) 2022 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Function used by Dash applications.
-"""
-
-import pandas as pd
-import dash_bootstrap_components as dbc
-
-from numpy import isnan
-from dash import dcc
-from datetime import datetime
-
-from ..jumpavg import classify
-from ..utils.constants import Constants as C
-from ..utils.url_processing import url_encode
-
-
-def classify_anomalies(data):
- """Process the data and return anomalies and trending values.
-
- Gather data into groups with average as trend value.
- Decorate values within groups to be normal,
- the first value of changed average as a regression, or a progression.
-
- :param data: Full data set with unavailable samples replaced by nan.
- :type data: OrderedDict
- :returns: Classification and trend values
- :rtype: 3-tuple, list of strings, list of floats and list of floats
- """
- # NaN means something went wrong.
- # Use 0.0 to cause that being reported as a severe regression.
- bare_data = [0.0 if isnan(sample) else sample for sample in data.values()]
- # TODO: Make BitCountingGroupList a subclass of list again?
- group_list = classify(bare_data).group_list
- group_list.reverse() # Just to use .pop() for FIFO.
- classification = list()
- avgs = list()
- stdevs = list()
- active_group = None
- values_left = 0
- avg = 0.0
- stdv = 0.0
- for sample in data.values():
- if isnan(sample):
- classification.append("outlier")
- avgs.append(sample)
- stdevs.append(sample)
- continue
- if values_left < 1 or active_group is None:
- values_left = 0
- while values_left < 1: # Ignore empty groups (should not happen).
- active_group = group_list.pop()
- values_left = len(active_group.run_list)
- avg = active_group.stats.avg
- stdv = active_group.stats.stdev
- classification.append(active_group.comment)
- avgs.append(avg)
- stdevs.append(stdv)
- values_left -= 1
- continue
- classification.append("normal")
- avgs.append(avg)
- stdevs.append(stdv)
- values_left -= 1
- return classification, avgs, stdevs
-
-
-def get_color(idx: int) -> str:
- """Returns a color from the list defined in Constants.PLOT_COLORS defined by
- its index.
-
- :param idx: Index of the color.
- :type idx: int
- :returns: Color defined by hex code.
- :trype: str
- """
- return C.PLOT_COLORS[idx % len(C.PLOT_COLORS)]
-
-
-def show_tooltip(tooltips:dict, id: str, title: str,
- clipboard_id: str=None) -> list:
- """Generate list of elements to display a text (e.g. a title) with a
- tooltip and optionaly with Copy&Paste icon and the clipboard
- functionality enabled.
-
- :param tooltips: Dictionary with tooltips.
- :param id: Tooltip ID.
- :param title: A text for which the tooltip will be displayed.
- :param clipboard_id: If defined, a Copy&Paste icon is displayed and the
- clipboard functionality is enabled.
- :type tooltips: dict
- :type id: str
- :type title: str
- :type clipboard_id: str
- :returns: List of elements to display a text with a tooltip and
- optionaly with Copy&Paste icon.
- :rtype: list
- """
-
- return [
- dcc.Clipboard(target_id=clipboard_id, title="Copy URL") \
- if clipboard_id else str(),
- f"{title} ",
- dbc.Badge(
- id=id,
- children="?",
- pill=True,
- color="white",
- text_color="info",
- class_name="border ms-1",
- ),
- dbc.Tooltip(
- children=tooltips.get(id, str()),
- target=id,
- placement="auto"
- )
- ]
-
-
-def label(key: str) -> str:
- """Returns a label for input elements (dropdowns, ...).
-
- If the label is not defined, the function returns the provided key.
-
- :param key: The key to the label defined in Constants.LABELS.
- :type key: str
- :returns: Label.
- :rtype: str
- """
- return C.LABELS.get(key, key)
-
-
-def sync_checklists(options: list, sel: list, all: list, id: str) -> tuple:
- """Synchronize a checklist with defined "options" with its "All" checklist.
-
- :param options: List of options for the cheklist.
- :param sel: List of selected options.
- :param all: List of selected option from "All" checklist.
- :param id: ID of a checklist to be used for synchronization.
- :returns: Tuple of lists with otions for both checklists.
- :rtype: tuple of lists
- """
- opts = {v["value"] for v in options}
- if id =="all":
- sel = list(opts) if all else list()
- else:
- all = ["all", ] if set(sel) == opts else list()
- return sel, all
-
-
-def list_tests(selection: dict) -> list:
- """Transform list of tests to a list of dictionaries usable by checkboxes.
-
- :param selection: List of tests to be displayed in "Selected tests" window.
- :type selection: list
- :returns: List of dictionaries with "label", "value" pairs for a checkbox.
- :rtype: list
- """
- if selection:
- return [{"label": v["id"], "value": v["id"]} for v in selection]
- else:
- return list()
-
-
-def get_date(s_date: str) -> datetime:
- """Transform string reprezentation of date to datetime.datetime data type.
-
- :param s_date: String reprezentation of date.
- :type s_date: str
- :returns: Date as datetime.datetime.
- :rtype: datetime.datetime
- """
- return datetime(int(s_date[0:4]), int(s_date[5:7]), int(s_date[8:10]))
-
-
-def gen_new_url(url_components: dict, params: dict) -> str:
- """Generate a new URL with encoded parameters.
-
- :param url_components: Dictionary with URL elements. It should contain
- "scheme", "netloc" and "path".
- :param url_components: URL parameters to be encoded to the URL.
- :type parsed_url: dict
- :type params: dict
- :returns Encoded URL with parameters.
- :rtype: str
- """
-
- if url_components:
- return url_encode(
- {
- "scheme": url_components.get("scheme", ""),
- "netloc": url_components.get("netloc", ""),
- "path": url_components.get("path", ""),
- "params": params
- }
- )
- else:
- return str()
-
-
-def get_duts(df: pd.DataFrame) -> list:
- """Get the list of DUTs from the pre-processed information about jobs.
-
- :param df: DataFrame with information about jobs.
- :type df: pandas.DataFrame
- :returns: Alphabeticaly sorted list of DUTs.
- :rtype: list
- """
- return sorted(list(df["dut"].unique()))
-
-
-def get_ttypes(df: pd.DataFrame, dut: str) -> list:
- """Get the list of test types from the pre-processed information about
- jobs.
-
- :param df: DataFrame with information about jobs.
- :param dut: The DUT for which the list of test types will be populated.
- :type df: pandas.DataFrame
- :type dut: str
- :returns: Alphabeticaly sorted list of test types.
- :rtype: list
- """
- return sorted(list(df.loc[(df["dut"] == dut)]["ttype"].unique()))
-
-
-def get_cadences(df: pd.DataFrame, dut: str, ttype: str) -> list:
- """Get the list of cadences from the pre-processed information about
- jobs.
-
- :param df: DataFrame with information about jobs.
- :param dut: The DUT for which the list of cadences will be populated.
- :param ttype: The test type for which the list of cadences will be
- populated.
- :type df: pandas.DataFrame
- :type dut: str
- :type ttype: str
- :returns: Alphabeticaly sorted list of cadences.
- :rtype: list
- """
- return sorted(list(df.loc[(
- (df["dut"] == dut) &
- (df["ttype"] == ttype)
- )]["cadence"].unique()))
-
-
-def get_test_beds(df: pd.DataFrame, dut: str, ttype: str, cadence: str) -> list:
- """Get the list of test beds from the pre-processed information about
- jobs.
-
- :param df: DataFrame with information about jobs.
- :param dut: The DUT for which the list of test beds will be populated.
- :param ttype: The test type for which the list of test beds will be
- populated.
- :param cadence: The cadence for which the list of test beds will be
- populated.
- :type df: pandas.DataFrame
- :type dut: str
- :type ttype: str
- :type cadence: str
- :returns: Alphabeticaly sorted list of test beds.
- :rtype: list
- """
- return sorted(list(df.loc[(
- (df["dut"] == dut) &
- (df["ttype"] == ttype) &
- (df["cadence"] == cadence)
- )]["tbed"].unique()))
-
-
-def get_job(df: pd.DataFrame, dut, ttype, cadence, testbed):
- """Get the name of a job defined by dut, ttype, cadence, test bed.
- Input information comes from the control panel.
-
- :param df: DataFrame with information about jobs.
- :param dut: The DUT for which the job name will be created.
- :param ttype: The test type for which the job name will be created.
- :param cadence: The cadence for which the job name will be created.
- :param testbed: The test bed for which the job name will be created.
- :type df: pandas.DataFrame
- :type dut: str
- :type ttype: str
- :type cadence: str
- :type testbed: str
- :returns: Job name.
- :rtype: str
- """
- return df.loc[(
- (df["dut"] == dut) &
- (df["ttype"] == ttype) &
- (df["cadence"] == cadence) &
- (df["tbed"] == testbed)
- )]["job"].item()
-
-
-def generate_options(opts: list) -> list:
- """Return list of options for radio items in control panel. The items in
- the list are dictionaries with keys "label" and "value".
-
- :params opts: List of options (str) to be used for the generated list.
- :type opts: list
- :returns: List of options (dict).
- :rtype: list
- """
- return [{"label": i, "value": i} for i in opts]
-
-
-def set_job_params(df: pd.DataFrame, job: str) -> dict:
- """Create a dictionary with all options and values for (and from) the
- given job.
-
- :param df: DataFrame with information about jobs.
- :params job: The name of job for and from which the dictionary will be
- created.
- :type df: pandas.DataFrame
- :type job: str
- :returns: Dictionary with all options and values for (and from) the
- given job.
- :rtype: dict
- """
-
- l_job = job.split("-")
- return {
- "job": job,
- "dut": l_job[1],
- "ttype": l_job[3],
- "cadence": l_job[4],
- "tbed": "-".join(l_job[-2:]),
- "duts": generate_options(get_duts(df)),
- "ttypes": generate_options(get_ttypes(df, l_job[1])),
- "cadences": generate_options(get_cadences(df, l_job[1], l_job[3])),
- "tbeds": generate_options(
- get_test_beds(df, l_job[1], l_job[3], l_job[4]))
- }