aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--VPP_STABLE_VER_UBUNTU_JAMMY2
-rw-r--r--csit.infra.dash/app/cdash/data/_metadata/coverage_rls2406_devicebin5373 -> 5726 bytes
-rw-r--r--csit.infra.dash/app/cdash/data/_metadata/iterative_rls2406_mrrbin10632 -> 9704 bytes
-rw-r--r--csit.infra.dash/app/cdash/data/_metadata/iterative_rls2406_ndrpdrbin15602 -> 15951 bytes
-rw-r--r--csit.infra.dash/app/cdash/data/_metadata/trending_mrrbin11097 -> 11450 bytes
-rw-r--r--csit.infra.dash/app/cdash/data/_metadata/trending_ndrpdrbin16091 -> 16432 bytes
-rw-r--r--csit.infra.dash/app/cdash/data/data.py15
-rw-r--r--csit.infra.dash/app/cdash/data/data.yaml6
-rw-r--r--csit.infra.dash/app/cdash/stats/layout.py19
-rw-r--r--csit.infra.dash/app/cdash/utils/constants.py12
-rw-r--r--csit.infra.dash/app/cdash/utils/telemetry_data.py7
-rw-r--r--csit.infra.dash/app/cdash/utils/utils.py49
-rw-r--r--csit.infra.dash/docker-compose.yaml1
-rw-r--r--resources/libraries/bash/function/common.sh3
-rw-r--r--resources/libraries/python/DUTSetup.py8
-rw-r--r--resources/libraries/python/VppConfigGenerator.py2
-rw-r--r--resources/libraries/robot/shared/test_setup.robot3
-rw-r--r--tests/vpp/perf/__init__.robot3
18 files changed, 97 insertions, 33 deletions
diff --git a/VPP_STABLE_VER_UBUNTU_JAMMY b/VPP_STABLE_VER_UBUNTU_JAMMY
index 6701a8bcf7..9b2eefe393 100644
--- a/VPP_STABLE_VER_UBUNTU_JAMMY
+++ b/VPP_STABLE_VER_UBUNTU_JAMMY
@@ -1 +1 @@
-24.10-rc0~59-g1c30d2d8b \ No newline at end of file
+24.10-rc0~69-gd8efd6e31 \ No newline at end of file
diff --git a/csit.infra.dash/app/cdash/data/_metadata/coverage_rls2406_device b/csit.infra.dash/app/cdash/data/_metadata/coverage_rls2406_device
index f619ce8a8e..011ebba41f 100644
--- a/csit.infra.dash/app/cdash/data/_metadata/coverage_rls2406_device
+++ b/csit.infra.dash/app/cdash/data/_metadata/coverage_rls2406_device
Binary files differ
diff --git a/csit.infra.dash/app/cdash/data/_metadata/iterative_rls2406_mrr b/csit.infra.dash/app/cdash/data/_metadata/iterative_rls2406_mrr
index 496a6b72fe..ced78967c5 100644
--- a/csit.infra.dash/app/cdash/data/_metadata/iterative_rls2406_mrr
+++ b/csit.infra.dash/app/cdash/data/_metadata/iterative_rls2406_mrr
Binary files differ
diff --git a/csit.infra.dash/app/cdash/data/_metadata/iterative_rls2406_ndrpdr b/csit.infra.dash/app/cdash/data/_metadata/iterative_rls2406_ndrpdr
index 39bc301681..cf2b8a116b 100644
--- a/csit.infra.dash/app/cdash/data/_metadata/iterative_rls2406_ndrpdr
+++ b/csit.infra.dash/app/cdash/data/_metadata/iterative_rls2406_ndrpdr
Binary files differ
diff --git a/csit.infra.dash/app/cdash/data/_metadata/trending_mrr b/csit.infra.dash/app/cdash/data/_metadata/trending_mrr
index 768a9ec21c..06ddbca659 100644
--- a/csit.infra.dash/app/cdash/data/_metadata/trending_mrr
+++ b/csit.infra.dash/app/cdash/data/_metadata/trending_mrr
Binary files differ
diff --git a/csit.infra.dash/app/cdash/data/_metadata/trending_ndrpdr b/csit.infra.dash/app/cdash/data/_metadata/trending_ndrpdr
index 3f8b85c66e..870d8a9f9b 100644
--- a/csit.infra.dash/app/cdash/data/_metadata/trending_ndrpdr
+++ b/csit.infra.dash/app/cdash/data/_metadata/trending_ndrpdr
Binary files differ
diff --git a/csit.infra.dash/app/cdash/data/data.py b/csit.infra.dash/app/cdash/data/data.py
index 2c49992bf8..41033a7758 100644
--- a/csit.infra.dash/app/cdash/data/data.py
+++ b/csit.infra.dash/app/cdash/data/data.py
@@ -400,11 +400,16 @@ class Data:
)
for key in self._data.keys():
logging.info(f"\n\nDataframe {key}:\n")
- self._data[key] = pd.concat(
- data_lists[key],
- ignore_index=True,
- copy=False
- )
+ if len(data_lists[key]) == 0:
+ self._data[key] = pd.DataFrame()
+ elif len(data_lists[key]) == 1:
+ self._data[key] = data_lists[key][0]
+ else:
+ self._data[key] = pd.concat(
+ data_lists[key],
+ ignore_index=True,
+ copy=False
+ )
self._data[key].info(verbose=True, memory_usage="deep")
err_msg = self._validate_columns(key)
if err_msg:
diff --git a/csit.infra.dash/app/cdash/data/data.yaml b/csit.infra.dash/app/cdash/data/data.yaml
index db7775b210..c1b45536b7 100644
--- a/csit.infra.dash/app/cdash/data/data.yaml
+++ b/csit.infra.dash/app/cdash/data/data.yaml
@@ -19,6 +19,7 @@
- dut_type
- dut_version
- hosts
+ - tg_type
- start_time
- passed
- test_id
@@ -43,6 +44,7 @@
- dut_type
- dut_version
- hosts
+ - tg_type
- start_time
- passed
- test_id
@@ -386,6 +388,7 @@
- build
- dut_type
- dut_version
+ - tg_type
- hosts
- start_time
- passed
@@ -408,6 +411,7 @@
- build
- dut_type
- dut_version
+ - tg_type
- hosts
- start_time
- passed
@@ -512,6 +516,7 @@
- build
- dut_type
- dut_version
+ - tg_type
- start_time
- passed
- test_id
@@ -530,6 +535,7 @@
- build
- dut_type
- dut_version
+ - tg_type
- passed
- test_id
- version
diff --git a/csit.infra.dash/app/cdash/stats/layout.py b/csit.infra.dash/app/cdash/stats/layout.py
index 616a4028e6..823e32659a 100644
--- a/csit.infra.dash/app/cdash/stats/layout.py
+++ b/csit.infra.dash/app/cdash/stats/layout.py
@@ -29,7 +29,8 @@ from yaml import load, FullLoader, YAMLError
from ..utils.constants import Constants as C
from ..utils.control_panel import ControlPanel
from ..utils.utils import show_tooltip, gen_new_url, get_ttypes, get_cadences, \
- get_test_beds, get_job, generate_options, set_job_params, navbar_trending
+ get_test_beds, get_job, generate_options, set_job_params, navbar_trending, \
+ get_url_job, get_url_logs
from ..utils.url_processing import url_decode
from .graphs import graph_statistics, select_data
@@ -92,15 +93,16 @@ class Layout:
"tbed": list()
}
for job in jobs:
+ idx = -3 if "-x-" in job else -2
lst_job = job.split("-")
d_job_info["job"].append(job)
d_job_info["dut"].append(lst_job[1])
d_job_info["ttype"].append(lst_job[3])
d_job_info["cadence"].append(lst_job[4])
- d_job_info["tbed"].append("-".join(lst_job[-2:]))
+ d_job_info["tbed"].append("-".join(lst_job[idx:]))
self._job_info = pd.DataFrame.from_dict(d_job_info)
- self._default = set_job_params(self._job_info, C.STATS_DEFAULT_JOB)
+ self._default = set_job_params(self._job_info, d_job_info["job"][0])
tst_info = {
"job": list(),
@@ -196,7 +198,7 @@ class Layout:
"dd-tbeds-value": self._default["tbed"],
"al-job-children": html.A(
self._default["job"],
- href=f"{C.URL_CICD}{self._default['job']}",
+ href=get_url_job(self._default["job"]),
target="_blank"
)
}
@@ -630,10 +632,7 @@ class Layout:
"dd-tbeds-value": job_params["tbed"],
"al-job-children": html.A(
self._default["job"],
- href=(
- f"{C.URL_CICD}"
- f"{self._default['job']}"
- ),
+ href=get_url_job(self._default["job"]),
target="_blank"
)
},
@@ -654,7 +653,7 @@ class Layout:
{
"al-job-children": html.A(
job,
- href=f"{C.URL_CICD}{job}",
+ href=get_url_job(job),
target="_blank"
)
}
@@ -784,7 +783,7 @@ class Layout:
dbc.Badge(lst_itm[0]),
html.A(
lst_itm[1],
- href=f"{C.URL_LOGS}{lst_itm[1]}",
+ href=get_url_logs(lst_itm[1]),
target="_blank"
)
])
diff --git a/csit.infra.dash/app/cdash/utils/constants.py b/csit.infra.dash/app/cdash/utils/constants.py
index bafa7b7f42..3b6e125d8e 100644
--- a/csit.infra.dash/app/cdash/utils/constants.py
+++ b/csit.infra.dash/app/cdash/utils/constants.py
@@ -118,15 +118,18 @@ class Constants:
# External stylesheets.
EXTERNAL_STYLESHEETS = ["/static/dist/css/bootstrap.css", ]
- # URL to Jenkins
+ # CICD type.
+ CICD_TYPE = get_str_from_env("CICD_TYPE", "jenkins")
+
+ # URL to CICD.
URL_CICD = get_str_from_env("URL_CICD", "https://jenkins.fd.io/job/")
- # URL to logs
+ # URL to logs.
URL_LOGS = get_str_from_env(
"URL_LOGS", "https://logs.fd.io/vex-yul-rot-jenkins-1/"
)
- # URL to the documentation
+ # URL to the documentation.
URL_DOC = get_str_from_env("URL_DOC", "https://csit.fd.io/cdocs/")
URL_DOC_TRENDING = URL_DOC + "methodology/trending/analysis/"
URL_DOC_REL_NOTES = URL_DOC + "release_notes/current/"
@@ -490,9 +493,6 @@ class Constants:
# Layout of plot.ly graphs.
STATS_GRAPH_LAYOUT_FILE = "cdash/stats/layout.yaml"
- # The default job displayed when the page is loaded first time.
- STATS_DEFAULT_JOB = "csit-vpp-perf-mrr-daily-master-2n-icx"
-
# Default name of downloaded file with selected data.
STATS_DOWNLOAD_FILE_NAME = "stats.csv"
diff --git a/csit.infra.dash/app/cdash/utils/telemetry_data.py b/csit.infra.dash/app/cdash/utils/telemetry_data.py
index 9975874d96..c63ee0057a 100644
--- a/csit.infra.dash/app/cdash/utils/telemetry_data.py
+++ b/csit.infra.dash/app/cdash/utils/telemetry_data.py
@@ -64,7 +64,7 @@ class TelemetryData:
df = pd.concat(lst_items, ignore_index=True, copy=False)
# Use only neccessary data:
- df = df[[
+ df = df[df.columns.intersection([
"job",
"build",
"dut_type",
@@ -81,7 +81,8 @@ class TelemetryData:
"result_ndr_lower_rate_value",
"result_ndr_lower_rate_unit",
"telemetry"
- ]]
+ ])]
+
# Transform metrics from strings to dataframes:
lst_telemetry = list()
for _, row in df.iterrows():
@@ -91,7 +92,7 @@ class TelemetryData:
"value": list(),
"timestamp": list()
}
-
+
# If there is no telemetry data, use empty dictionary
if row["telemetry"] is None or isinstance(row["telemetry"], float):
lst_telemetry.append(pd.DataFrame(data=d_telemetry))
diff --git a/csit.infra.dash/app/cdash/utils/utils.py b/csit.infra.dash/app/cdash/utils/utils.py
index e203dfbccd..62d4770937 100644
--- a/csit.infra.dash/app/cdash/utils/utils.py
+++ b/csit.infra.dash/app/cdash/utils/utils.py
@@ -286,12 +286,13 @@ def set_job_params(df: pd.DataFrame, job: str) -> dict:
"""
l_job = job.split("-")
+ idx = -3 if "-x-" in job else -2
return {
"job": job,
"dut": l_job[1],
"ttype": l_job[3],
"cadence": l_job[4],
- "tbed": "-".join(l_job[-2:]),
+ "tbed": "-".join(l_job[idx:]),
"duts": generate_options(get_duts(df)),
"ttypes": generate_options(get_ttypes(df, l_job[1])),
"cadences": generate_options(get_cadences(df, l_job[1], l_job[3])),
@@ -720,7 +721,7 @@ def show_trending_graph_data(
dbc.Badge(lst_itm[0]),
html.A(
lst_itm[1],
- href=f"{C.URL_LOGS}{lst_itm[1]}",
+ href=get_url_logs(lst_itm[1]),
target="_blank"
)
])
@@ -852,7 +853,7 @@ def show_iterative_graph_data(
continue
list_group_item = dbc.ListGroupItem([
dbc.Badge(k),
- html.A(v, href=f"{C.URL_LOGS}{v}", target="_blank")
+ html.A(v, href=get_url_logs(v), target="_blank")
])
else:
list_group_item = dbc.ListGroupItem([dbc.Badge(k), v])
@@ -903,3 +904,45 @@ def show_iterative_graph_data(
]
return metadata, graph, True
+
+
+def get_url_job(job: str) -> str:
+ """Generates a URL to CI/CD job.
+
+ :param job: The name of job.
+ :type job: str
+ :raises KeyError: If the job name is not a valid job name.
+ :returns: The URL to CI/CD job.
+ """
+
+ if C.CICD_TYPE == "jenkins":
+ return f"{C.URL_CICD}{job}"
+ elif C.CICD_TYPE == "github":
+ l_j = job.split("-")
+ try:
+ return f"{C.URL_CICD}{l_j[0]}-{l_j[1]}-{l_j[2]}-{l_j[4]}.yml"
+ except KeyError:
+ return str()
+ else:
+ return str()
+
+
+def get_url_logs(job_build: str) -> str:
+ """Generates a URL to CI/CD job and its build.
+
+ :param job_build: The name of job and number of build. Its structure is:
+ "<job name>/<build number>".
+ :type job: str
+ :raises KeyError: If the job name is not a valid job name.
+ :returns: The URL to CI/CD build.
+ """
+
+ if C.CICD_TYPE == "jenkins":
+ return f"{C.URL_LOGS}{job_build}"
+ elif C.CICD_TYPE == "github":
+ try:
+ return f"{C.URL_LOGS}{job_build.split('/')[1]}"
+ except KeyError:
+ return str()
+ else:
+ return str()
diff --git a/csit.infra.dash/docker-compose.yaml b/csit.infra.dash/docker-compose.yaml
index a3b5e1a683..8ec97fad0f 100644
--- a/csit.infra.dash/docker-compose.yaml
+++ b/csit.infra.dash/docker-compose.yaml
@@ -16,6 +16,7 @@ services:
CSIT_START_DOC: "True"
CSIT_TITLE: "FD.io CSIT"
CSIT_BRAND: "CSIT-Dash"
+ CSIT_CICD_TYPE: "jenkins"
CSIT_URL_CICD: "https://jenkins.fd.io/job/"
CSIT_URL_LOGS: "https://logs.fd.io/vex-yul-rot-jenkins-1/"
CSIT_URL_DOC: "https://csit.fd.io/cdocs/"
diff --git a/resources/libraries/bash/function/common.sh b/resources/libraries/bash/function/common.sh
index 4f104dbfd3..8a46fc329d 100644
--- a/resources/libraries/bash/function/common.sh
+++ b/resources/libraries/bash/function/common.sh
@@ -1363,7 +1363,8 @@ function set_environment_variables () {
export TREX_CORE_COUNT=14
;;
*"2n-x-"* | *"3n-x-"* )
- export TREX_CORE_COUNT=2
+ export TREX_CORE_COUNT=6
+ export TREX_PORT_MTU=9000
;;
esac
}
diff --git a/resources/libraries/python/DUTSetup.py b/resources/libraries/python/DUTSetup.py
index f9758c5f9f..64f3b4317c 100644
--- a/resources/libraries/python/DUTSetup.py
+++ b/resources/libraries/python/DUTSetup.py
@@ -298,7 +298,8 @@ class DUTSetup:
return sriov_numvfs
@staticmethod
- def set_sriov_numvfs(node, pf_pci_addr, path="devices", numvfs=0):
+ def set_sriov_numvfs(
+ node, pf_pci_addr, path="devices", numvfs=0, skip_check=True):
"""Init or reset SR-IOV virtual functions by setting its number on PCI
device on DUT. Setting to zero removes all VFs.
@@ -306,10 +307,12 @@ class DUTSetup:
:param pf_pci_addr: Physical Function PCI device address.
:param path: Either device or driver.
:param numvfs: Number of VFs to initialize, 0 - removes the VFs.
+ :param skip_check: Return anyway.
:type node: dict
:type pf_pci_addr: str
:type path: str
:type numvfs: int
+ :type skip_check: bool
:raises RuntimeError: Failed to create VFs on PCI.
"""
cmd = f"test -f /sys/bus/pci/{path}/{pf_pci_addr}/sriov_numvfs"
@@ -320,6 +323,9 @@ class DUTSetup:
# sriov is not supported and we want 0 VFs
# no need to do anything
return
+ if numvfs > 0 and skip_check:
+ # we may be in VM
+ return
raise RuntimeError(
f"Can't configure {numvfs} VFs on {pf_pci_addr} device "
diff --git a/resources/libraries/python/VppConfigGenerator.py b/resources/libraries/python/VppConfigGenerator.py
index fb3df2fc16..971b82606f 100644
--- a/resources/libraries/python/VppConfigGenerator.py
+++ b/resources/libraries/python/VppConfigGenerator.py
@@ -197,7 +197,7 @@ class VppConfigGenerator:
path = ["node", "default", "variant"]
self.add_config_item(self._nodeconfig, variant, path)
- def add_api_segment_gid(self, value="vpp"):
+ def add_api_segment_gid(self, value="testuser"):
"""Add api-segment gid configuration.
:param value: Gid.
diff --git a/resources/libraries/robot/shared/test_setup.robot b/resources/libraries/robot/shared/test_setup.robot
index 22e017fd6f..ae3cfa495f 100644
--- a/resources/libraries/robot/shared/test_setup.robot
+++ b/resources/libraries/robot/shared/test_setup.robot
@@ -1,4 +1,4 @@
-# Copyright (c) 2022 Cisco and/or its affiliates.
+# Copyright (c) 2024 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -50,3 +50,4 @@
| |
| | ${trex_running}= | Is Trex Running | ${tg}
| | Run Keyword If | not ${trex_running} | Startup Trex | ${tg} | ${osi_layer}
+| | Stop Vpp Service on All Duts | ${nodes}
diff --git a/tests/vpp/perf/__init__.robot b/tests/vpp/perf/__init__.robot
index 04b47f9746..ef7c81980b 100644
--- a/tests/vpp/perf/__init__.robot
+++ b/tests/vpp/perf/__init__.robot
@@ -1,4 +1,4 @@
-# Copyright (c) 2023 Cisco and/or its affiliates.
+# Copyright (c) 2024 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -29,6 +29,7 @@
| ... | AND | Get CPU Info from All Nodes | ${nodes}
| ... | AND | Update All Interface Data on All Nodes | ${nodes}
| ... | skip_tg=${True}
+| ... | AND | Stop Vpp Service on All Duts | ${nodes}
| ... | AND | Finalize Suite Setup Export
|
| Suite Teardown | Run Keywords | Start Suite Teardown Export