aboutsummaryrefslogtreecommitdiffstats
path: root/resources/libraries/bash/function
diff options
context:
space:
mode:
Diffstat (limited to 'resources/libraries/bash/function')
-rw-r--r--resources/libraries/bash/function/README.txt20
-rw-r--r--resources/libraries/bash/function/ansible.sh12
-rw-r--r--resources/libraries/bash/function/artifacts.sh79
-rw-r--r--resources/libraries/bash/function/branch.sh6
-rw-r--r--resources/libraries/bash/function/common.sh539
-rw-r--r--resources/libraries/bash/function/device.sh37
-rw-r--r--resources/libraries/bash/function/docs.sh267
-rw-r--r--resources/libraries/bash/function/dpdk.sh23
-rw-r--r--resources/libraries/bash/function/eb_version.sh159
-rw-r--r--resources/libraries/bash/function/gather.sh13
-rw-r--r--resources/libraries/bash/function/hugo.sh113
-rwxr-xr-xresources/libraries/bash/function/nginx.sh8
-rw-r--r--resources/libraries/bash/function/per_patch.sh216
-rw-r--r--resources/libraries/bash/function/terraform.sh115
14 files changed, 864 insertions, 743 deletions
diff --git a/resources/libraries/bash/function/README.txt b/resources/libraries/bash/function/README.txt
deleted file mode 100644
index e4eb91565b..0000000000
--- a/resources/libraries/bash/function/README.txt
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2021 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-Files in this directory system are to be executed indirectly,
-sourced from other scripts.
-
-In fact, the files should only define functions,
-except perhaps some minimal logic needed to import dependencies.
-The originating function calls should be executed from elsewhere,
-typically from entry scripts.
diff --git a/resources/libraries/bash/function/ansible.sh b/resources/libraries/bash/function/ansible.sh
index 64508bda11..6cf4d16f43 100644
--- a/resources/libraries/bash/function/ansible.sh
+++ b/resources/libraries/bash/function/ansible.sh
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
-# Copyright (c) 2021 Cisco and/or its affiliates.
+# Copyright (c) 2024 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -28,9 +28,12 @@ function ansible_adhoc () {
set -exuo pipefail
case "$FLAVOR" in
- "aws")
+ "aws" | "c6in" | "c6gn" | "c7gn")
INVENTORY_PATH="cloud_inventory"
;;
+ "x-"*)
+ INVENTORY_PATH="external_inventory"
+ ;;
*)
INVENTORY_PATH="lf_inventory"
;;
@@ -70,9 +73,12 @@ function ansible_playbook () {
set -exuo pipefail
case "$FLAVOR" in
- "aws")
+ "aws" | "c6in" | "c6gn" | "c7gn")
INVENTORY_PATH="cloud_inventory"
;;
+ "x-"*)
+ INVENTORY_PATH="external_inventory"
+ ;;
*)
INVENTORY_PATH="lf_inventory"
;;
diff --git a/resources/libraries/bash/function/artifacts.sh b/resources/libraries/bash/function/artifacts.sh
index 15a4dd2fe1..e4d5dd6fc6 100644
--- a/resources/libraries/bash/function/artifacts.sh
+++ b/resources/libraries/bash/function/artifacts.sh
@@ -1,7 +1,6 @@
#!/usr/bin/env bash
-# Copyright (c) 2021 Cisco and/or its affiliates.
-# Copyright (c) 2021 PANTHEON.tech and/or its affiliates.
+# Copyright (c) 2023 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -26,8 +25,6 @@ function download_artifacts () {
# - REPO_URL - FD.io Packagecloud repository.
# Functions conditionally called (see their documentation for side effects):
# - download_ubuntu_artifacts
- # - download_centos_artifacts
- # - download_opensuse_artifacts
set -exuo pipefail
@@ -46,10 +43,6 @@ function download_artifacts () {
if [ "${os_id}" == "ubuntu" ]; then
download_ubuntu_artifacts || die
- elif [ "${os_id}" == "centos" ]; then
- download_centos_artifacts || die
- elif [ "${os_id}" == "opensuse" ]; then
- download_opensuse_artifacts || die
else
die "${os_id} is not yet supported."
fi
@@ -129,73 +122,3 @@ function download_ubuntu_artifacts () {
}
fi
}
-
-function download_centos_artifacts () {
-
- # Download or install CentOS VPP artifacts from packagecloud.io.
- #
- # Variables read:
- # - REPO_URL - FD.io Packagecloud repository.
- # - VPP_VERSION - VPP version.
- # - INSTALL - Whether install packages (if set to "true") or download only.
- # Default: "false".
-
- set -exuo pipefail
-
- curl -s "${REPO_URL}"/script.rpm.sh | sudo -E bash || {
- die "Packagecloud FD.io repo fetch failed."
- }
- # If version is set we will add suffix.
- artifacts=()
- pkgs=(vpp vpp-selinux-policy vpp-devel vpp-lib vpp-plugins vpp-api-python)
- if [ -z "${VPP_VERSION-}" ]; then
- artifs+=(${pkgs[@]})
- else
- artifs+=(${pkgs[@]/%/-${VPP_VERSION-}})
- fi
-
- if [[ "${INSTALL:-false}" == "true" ]]; then
- sudo yum -y install "${artifs[@]}" || {
- die "Install VPP artifact failed."
- }
- else
- sudo yum -y install --downloadonly --downloaddir=. "${artifs[@]}" || {
- die "Download VPP artifacts failed."
- }
- fi
-}
-
-function download_opensuse_artifacts () {
-
- # Download or install OpenSuSE VPP artifacts from packagecloud.io.
- #
- # Variables read:
- # - REPO_URL - FD.io Packagecloud repository.
- # - VPP_VERSION - VPP version.
- # - INSTALL - Whether install packages (if set to "true") or download only.
- # Default: "false".
-
- set -exuo pipefail
-
- curl -s "${REPO_URL}"/script.rpm.sh | sudo -E bash || {
- die "Packagecloud FD.io repo fetch failed."
- }
- # If version is set we will add suffix.
- artifs=()
- pkgs=(vpp vpp-devel vpp-lib vpp-plugins libvpp0)
- if [ -z "${VPP_VERSION-}" ]; then
- artifs+=(${pkgs[@]})
- else
- artifs+=(${pkgs[@]/%/-${VPP_VERSION-}})
- fi
-
- if [[ "${INSTALL:-false}" == "true" ]]; then
- sudo yum -y install "${artifs[@]}" || {
- die "Install VPP artifact failed."
- }
- else
- sudo yum -y install --downloadonly --downloaddir=. "${artifs[@]}" || {
- die "Download VPP artifacts failed."
- }
- fi
-}
diff --git a/resources/libraries/bash/function/branch.sh b/resources/libraries/bash/function/branch.sh
index ba9cc39c67..37900eab01 100644
--- a/resources/libraries/bash/function/branch.sh
+++ b/resources/libraries/bash/function/branch.sh
@@ -1,4 +1,4 @@
-# Copyright (c) 2021 Cisco and/or its affiliates.
+# Copyright (c) 2023 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -57,10 +57,6 @@ function checkout_csit_for_vpp () {
set -exuo pipefail
case "${1}" in
- "stable/2009")
- # LTS branch
- branch_id="origin/${1/stable\//oper-rls}_lts"
- ;;
"stable/"*)
branch_id="origin/${1/stable\//oper-rls}"
;;
diff --git a/resources/libraries/bash/function/common.sh b/resources/libraries/bash/function/common.sh
index b194c31259..4f104dbfd3 100644
--- a/resources/libraries/bash/function/common.sh
+++ b/resources/libraries/bash/function/common.sh
@@ -1,5 +1,5 @@
-# Copyright (c) 2022 Cisco and/or its affiliates.
-# Copyright (c) 2022 PANTHEON.tech and/or its affiliates.
+# Copyright (c) 2024 Cisco and/or its affiliates.
+# Copyright (c) 2024 PANTHEON.tech and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -45,7 +45,7 @@ function activate_docker_topology () {
device_image="$(< ${CSIT_DIR}/${IMAGE_VER_FILE})"
case_text="${NODENESS}_${FLAVOR}"
case "${case_text}" in
- "1n_skx" | "1n_tx2")
+ "1n_skx" | "1n_alt" | "1n_spr")
# We execute reservation over csit-shim-dcr (ssh) which runs sourced
# script's functions. Env variables are read from ssh output
# back to localhost for further processing.
@@ -93,6 +93,12 @@ function activate_docker_topology () {
cat ${WORKING_TOPOLOGY} | grep -v password || {
die "Topology read failed!"
}
+
+ # Subfunctions to update data that may depend on topology reserved.
+ set_environment_variables || die
+ select_tags || die
+ compose_robot_arguments || die
+
}
@@ -158,7 +164,7 @@ function check_download_dir () {
# Fail if there are no files visible in ${DOWNLOAD_DIR}.
#
# Variables read:
- # - DOWNLOAD_DIR - Path to directory pybot takes the build to test from.
+ # - DOWNLOAD_DIR - Path to directory robot takes the build to test from.
# Directories read:
# - ${DOWNLOAD_DIR} - Has to be non-empty to proceed.
# Functions called:
@@ -227,7 +233,7 @@ function common_dirs () {
TOPOLOGIES_DIR=$(readlink -e "${CSIT_DIR}/topologies/available") || {
die "Readlink failed."
}
- JOB_SPECS_DIR=$(readlink -e "${CSIT_DIR}/docs/job_specs") || {
+ JOB_SPECS_DIR=$(readlink -e "${CSIT_DIR}/resources/job_specs") || {
die "Readlink failed."
}
RESOURCES_DIR=$(readlink -e "${CSIT_DIR}/resources") || {
@@ -236,9 +242,6 @@ function common_dirs () {
TOOLS_DIR=$(readlink -e "${RESOURCES_DIR}/tools") || {
die "Readlink failed."
}
- DOC_GEN_DIR=$(readlink -e "${TOOLS_DIR}/doc_gen") || {
- die "Readlink failed."
- }
PYTHON_SCRIPTS_DIR=$(readlink -e "${TOOLS_DIR}/scripts") || {
die "Readlink failed."
}
@@ -258,8 +261,12 @@ function common_dirs () {
}
-function compose_pybot_arguments () {
+function compose_robot_arguments () {
+ # This function is called by run_tests function.
+ # The reason is that some jobs (bisect) perform reservation multiple times,
+ # so WORKING_TOPOLOGY can be different each time.
+ #
# Variables read:
# - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
# - DUT - CSIT test/ subdirectory, set while processing tags.
@@ -268,21 +275,23 @@ function compose_pybot_arguments () {
# - TEST_CODE - The test selection string from environment or argument.
# - SELECTION_MODE - Selection criteria [test, suite, include, exclude].
# Variables set:
- # - PYBOT_ARGS - String holding part of all arguments for pybot.
- # - EXPANDED_TAGS - Array of strings pybot arguments compiled from tags.
+ # - ROBOT_ARGS - String holding part of all arguments for robot.
+ # - EXPANDED_TAGS - Array of strings robot arguments compiled from tags.
set -exuo pipefail
# No explicit check needed with "set -u".
- PYBOT_ARGS=("--loglevel" "TRACE")
- PYBOT_ARGS+=("--variable" "TOPOLOGY_PATH:${WORKING_TOPOLOGY}")
+ ROBOT_ARGS=("--loglevel" "TRACE")
+ ROBOT_ARGS+=("--variable" "TOPOLOGY_PATH:${WORKING_TOPOLOGY}")
+ # TODO: The rest does not need to be recomputed on each reservation.
+ # Refactor TEST_CODE so this part can be called only once.
case "${TEST_CODE}" in
*"device"*)
- PYBOT_ARGS+=("--suite" "tests.${DUT}.device")
+ ROBOT_ARGS+=("--suite" "tests.${DUT}.device")
;;
- *"perf"*)
- PYBOT_ARGS+=("--suite" "tests.${DUT}.perf")
+ *"perf"* | *"bisect"*)
+ ROBOT_ARGS+=("--suite" "tests.${DUT}.perf")
;;
*)
die "Unknown specification: ${TEST_CODE}"
@@ -319,7 +328,7 @@ function deactivate_docker_topology () {
case_text="${NODENESS}_${FLAVOR}"
case "${case_text}" in
- "1n_skx" | "1n_tx2")
+ "1n_skx" | "1n_alt" | "1n_spr")
ssh="ssh root@172.17.0.1 -p 6022"
env_vars=$(env | grep CSIT_ | tr '\n' ' ' ) || die
# The "declare -f" output is long and boring.
@@ -360,19 +369,19 @@ function die () {
}
-function die_on_pybot_error () {
+function die_on_robot_error () {
# Source this fragment if you want to abort on any failed test case.
#
# Variables read:
- # - PYBOT_EXIT_STATUS - Set by a pybot running fragment.
+ # - ROBOT_EXIT_STATUS - Set by a robot running fragment.
# Functions called:
# - die - Print to stderr and exit.
set -exuo pipefail
- if [[ "${PYBOT_EXIT_STATUS}" != "0" ]]; then
- die "Test failures are present!" "${PYBOT_EXIT_STATUS}"
+ if [[ "${ROBOT_EXIT_STATUS}" != "0" ]]; then
+ die "Test failures are present!" "${ROBOT_EXIT_STATUS}"
fi
}
@@ -385,7 +394,7 @@ function generate_tests () {
# within any subdirectory after copying.
# This is a separate function, because this code is called
- # both by autogen checker and entries calling run_pybot.
+ # both by autogen checker and entries calling run_robot.
# Directories read:
# - ${CSIT_DIR}/tests - Used as templates for the generated tests.
@@ -431,78 +440,110 @@ function get_test_code () {
fi
case "${TEST_CODE}" in
- *"1n-vbox"*)
+ *"1n-vbox")
NODENESS="1n"
FLAVOR="vbox"
;;
- *"1n-skx"*)
+ *"1n-skx")
NODENESS="1n"
FLAVOR="skx"
;;
- *"1n-tx2"*)
+ *"1n-spr")
NODENESS="1n"
- FLAVOR="tx2"
+ FLAVOR="spr"
;;
- *"1n-aws"*)
+ *"1n-alt")
+ NODENESS="1n"
+ FLAVOR="alt"
+ ;;
+ *"1n-aws")
NODENESS="1n"
FLAVOR="aws"
;;
- *"2n-aws"*)
+ *"2n-aws")
NODENESS="2n"
FLAVOR="aws"
;;
- *"3n-aws"*)
+ *"3n-aws")
NODENESS="3n"
FLAVOR="aws"
;;
- *"2n-skx"*)
+ *"2n-c7gn")
NODENESS="2n"
- FLAVOR="skx"
+ FLAVOR="c7gn"
;;
- *"3n-skx"*)
+ *"3n-c7gn")
NODENESS="3n"
- FLAVOR="skx"
+ FLAVOR="c7gn"
;;
- *"2n-zn2"*)
+ *"1n-c6in")
+ NODENESS="1n"
+ FLAVOR="c6in"
+ ;;
+ *"2n-c6in")
+ NODENESS="2n"
+ FLAVOR="c6in"
+ ;;
+ *"3n-c6in")
+ NODENESS="3n"
+ FLAVOR="c6in"
+ ;;
+ *"2n-zn2")
NODENESS="2n"
FLAVOR="zn2"
;;
- *"2n-clx"*)
+ *"2n-clx")
NODENESS="2n"
FLAVOR="clx"
;;
- *"2n-icx"*)
+ *"2n-icx")
NODENESS="2n"
FLAVOR="icx"
;;
- *"3n-icx"*)
+ *"2n-spr")
+ NODENESS="2n"
+ FLAVOR="spr"
+ ;;
+ *"3n-icx")
NODENESS="3n"
FLAVOR="icx"
;;
- *"2n-dnv"*)
- NODENESS="2n"
- FLAVOR="dnv"
+ *"3na-spr")
+ NODENESS="3na"
+ FLAVOR="spr"
;;
- *"3n-dnv"*)
- NODENESS="3n"
- FLAVOR="dnv"
+ *"3nb-spr")
+ NODENESS="3nb"
+ FLAVOR="spr"
;;
- *"3n-snr"*)
+ *"3n-snr")
NODENESS="3n"
FLAVOR="snr"
;;
- *"2n-tx2"*)
+ *"3n-icxd")
+ NODENESS="3n"
+ FLAVOR="icxd"
+ ;;
+ *"2n-tx2")
NODENESS="2n"
FLAVOR="tx2"
;;
- *"3n-tsh"*)
+ *"3n-tsh")
NODENESS="3n"
FLAVOR="tsh"
;;
- *"3n-alt"*)
+ *"3n-alt")
NODENESS="3n"
FLAVOR="alt"
;;
+ *"2n-x-"*)
+ NODENESS="2n"
+ FLAVOR="${TEST_CODE#*2n-}"
+ ;;
+ *"3n-x-"*)
+ NODENESS="3n"
+ FLAVOR="${TEST_CODE#*3n-}"
+ ;;
esac
}
@@ -516,6 +557,10 @@ function get_test_tag_string () {
# Variables set:
# - TEST_TAG_STRING - The string following trigger word in gerrit comment.
# May be empty, or even not set on event types not adding comment.
+ # - GIT_BISECT_FROM - If bisecttest, the commit hash to bisect from.
+ # Else not set.
+ # Variables exported optionally:
+ # - GRAPH_NODE_VARIANT - Node variant to test with, set if found in trigger.
# TODO: ci-management scripts no longer need to perform this.
@@ -523,6 +568,10 @@ function get_test_tag_string () {
if [[ "${GERRIT_EVENT_TYPE-}" == "comment-added" ]]; then
case "${TEST_CODE}" in
+ # Order matters, bisect job contains "perf" in its name.
+ *"bisect"*)
+ trigger="bisecttest"
+ ;;
*"device"*)
trigger="devicetest"
;;
@@ -548,6 +597,18 @@ function get_test_tag_string () {
comment=$(fgrep "${trigger}" <<< "${comment}" || true)
TEST_TAG_STRING=$("${cmd[@]}" <<< "${comment}" || true)
fi
+ if [[ "${trigger}" == "bisecttest" ]]; then
+ # Intentionally without quotes, so spaces delimit elements.
+ test_tag_array=(${TEST_TAG_STRING}) || die "How could this fail?"
+ # First "argument" of bisecttest is a commit hash.
+ GIT_BISECT_FROM="${test_tag_array[0]}" || {
+ die "Bisect job requires commit hash."
+ }
+ # Update the tag string (tag expressions only, no commit hash).
+ TEST_TAG_STRING="${test_tag_array[@]:1}" || {
+ die "Bisect job needs a single test, no default."
+ }
+ fi
if [[ -n "${TEST_TAG_STRING-}" ]]; then
test_tag_array=(${TEST_TAG_STRING})
if [[ "${test_tag_array[0]}" == "icl" ]]; then
@@ -621,42 +682,6 @@ function move_archives () {
}
-function post_process_robot_outputs () {
-
- # Generate INFO level output_info.xml by rebot.
- # Archive UTI raw json outputs.
- #
- # Variables read:
- # - ARCHIVE_DIR - Path to post-processed files.
-
- set -exuo pipefail
-
- # Compress raw json outputs, as they will never be post-processed.
- pushd "${ARCHIVE_DIR}" || die
- if [ -d "tests" ]; then
- # Use deterministic order.
- options+=("--sort=name")
- # We are keeping info outputs where they are.
- # Assuming we want to move anything but info files (and dirs).
- options+=("--exclude=*.info.json")
- tar czf "generated_output_raw.tar.gz" "${options[@]}" "tests" || true
- # Tar can remove when archiving, but chokes (not deterministically)
- # on attempting to remove dirs (not empty as info files are there).
- # So we need to delete the raw files manually.
- find "tests" -type f -name "*.raw.json" -delete || true
- fi
- popd || die
-
- # Generate INFO level output_info.xml for post-processing.
- all_options=("--loglevel" "INFO")
- all_options+=("--log" "none")
- all_options+=("--report" "none")
- all_options+=("--output" "${ARCHIVE_DIR}/output_info.xml")
- all_options+=("${ARCHIVE_DIR}/output.xml")
- rebot "${all_options[@]}" || true
-}
-
-
function prepare_topology () {
# Prepare virtual testbed topology if needed based on flavor.
@@ -665,6 +690,8 @@ function prepare_topology () {
# - TEST_CODE - String affecting test selection, usually jenkins job name.
# - NODENESS - Node multiplicity of testbed, either "2n" or "3n".
# - FLAVOR - Node flavor string, e.g. "clx" or "skx".
+ # Variables set:
+ # - TERRAFORM_MODULE_DIR - Terraform module directory.
# Functions called:
# - die - Print to stderr and exit.
# - terraform_init - Terraform init topology.
@@ -676,7 +703,29 @@ function prepare_topology () {
case "${case_text}" in
"1n_aws" | "2n_aws" | "3n_aws")
export TF_VAR_testbed_name="${TEST_CODE}"
+ TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}-c5n"
+ terraform_init || die "Failed to call terraform init."
+ trap "terraform_destroy" ERR EXIT || {
+ die "Trap attempt failed, please cleanup manually. Aborting!"
+ }
+ terraform_apply || die "Failed to call terraform apply."
+ ;;
+ "2n_c7gn" | "3n_c7gn")
+ export TF_VAR_testbed_name="${TEST_CODE}"
+ TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-c7gn"
+ terraform_init || die "Failed to call terraform init."
+ trap "terraform_destroy" ERR EXIT || {
+ die "Trap attempt failed, please cleanup manually. Aborting!"
+ }
+ terraform_apply || die "Failed to call terraform apply."
+ ;;
+ "1n_c6in" | "2n_c6in" | "3n_c6in")
+ export TF_VAR_testbed_name="${TEST_CODE}"
+ TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-c6in"
terraform_init || die "Failed to call terraform init."
+ trap "terraform_destroy" ERR EXIT || {
+ die "Trap attempt failed, please cleanup manually. Aborting!"
+ }
terraform_apply || die "Failed to call terraform apply."
;;
esac
@@ -689,6 +738,9 @@ function reserve_and_cleanup_testbed () {
# When cleanup fails, remove from topologies and keep retrying
# until all topologies are removed.
#
+ # Multiple other functions are called from here,
+ # as they set variables that depend on reserved topology data.
+ #
# Variables read:
# - TOPOLOGIES - Array of paths to topology yaml to attempt reservation on.
# - PYTHON_SCRIPTS_DIR - Path to directory holding the reservation script.
@@ -757,38 +809,50 @@ function reserve_and_cleanup_testbed () {
echo "Sleeping ${sleep_time}"
sleep "${sleep_time}" || die "Sleep failed."
done
+
+ # Subfunctions to update data that may depend on topology reserved.
+ set_environment_variables || die
+ select_tags || die
+ compose_robot_arguments || die
}
-function run_pybot () {
+function run_robot () {
- # Run pybot with options based on input variables.
- # Generate INFO level output_info.xml by rebot.
- # Archive UTI raw json outputs.
+ # Run robot with options based on input variables.
+ #
+ # Testbed has to be reserved already,
+ # as some data may have changed between reservations,
+ # for example excluded NICs.
#
# Variables read:
# - CSIT_DIR - Path to existing root of local CSIT git repository.
# - ARCHIVE_DIR - Path to store robot result files in.
- # - PYBOT_ARGS, EXPANDED_TAGS - See compose_pybot_arguments.sh
+ # - ROBOT_ARGS, EXPANDED_TAGS - See compose_robot_arguments.sh
# - GENERATED_DIR - Tests are assumed to be generated under there.
+ # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
+ # - DUT - CSIT test/ subdirectory, set while processing tags.
+ # - TAGS - Array variable holding selected tag boolean expressions.
+ # - TOPOLOGIES_TAGS - Tag boolean expression filtering tests for topology.
+ # - TEST_CODE - The test selection string from environment or argument.
# Variables set:
- # - PYBOT_EXIT_STATUS - Exit status of most recent pybot invocation.
+ # - ROBOT_ARGS - String holding part of all arguments for robot.
+ # - EXPANDED_TAGS - Array of string robot arguments compiled from tags.
+ # - ROBOT_EXIT_STATUS - Exit status of most recent robot invocation.
# Functions called:
# - die - Print to stderr and exit.
set -exuo pipefail
- all_options=("--outputdir" "${ARCHIVE_DIR}" "${PYBOT_ARGS[@]}")
+ all_options=("--outputdir" "${ARCHIVE_DIR}" "${ROBOT_ARGS[@]}")
all_options+=("${EXPANDED_TAGS[@]}")
pushd "${CSIT_DIR}" || die "Change directory operation failed."
set +e
robot "${all_options[@]}" "${GENERATED_DIR}/tests/"
- PYBOT_EXIT_STATUS="$?"
+ ROBOT_EXIT_STATUS="$?"
set -e
- post_process_robot_outputs || die
-
popd || die "Change directory operation failed."
}
@@ -809,9 +873,9 @@ function select_arch_os () {
case "${ID}" in
"ubuntu"*)
case "${VERSION}" in
- *"LTS (Focal Fossa)"*)
- IMAGE_VER_FILE="VPP_DEVICE_IMAGE_UBUNTU"
- VPP_VER_FILE="VPP_STABLE_VER_UBUNTU_FOCAL"
+ *"LTS (Jammy Jellyfish)"*)
+ IMAGE_VER_FILE="VPP_DEVICE_IMAGE_UBUNTU_JAMMY"
+ VPP_VER_FILE="VPP_STABLE_VER_UBUNTU_JAMMY"
PKG_SUFFIX="deb"
;;
*)
@@ -840,6 +904,9 @@ function select_arch_os () {
function select_tags () {
+ # Only to be called from the reservation function,
+ # as resulting tags may change based on topology data.
+ #
# Variables read:
# - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
# - TEST_CODE - String affecting test selection, usually jenkins job name.
@@ -856,7 +923,7 @@ function select_tags () {
# NIC SELECTION
case "${TEST_CODE}" in
- *"1n-aws"*)
+ *"1n-aws"* | *"1n-c6in"*)
start_pattern='^ SUT:'
;;
*)
@@ -879,27 +946,45 @@ function select_tags () {
# Select default NIC tag.
case "${TEST_CODE}" in
- *"3n-dnv"* | *"2n-dnv"*)
- default_nic="nic_intel-x553"
+ *"3n-snr")
+ default_nic="nic_intel-e822cq"
;;
- *"3n-snr"*)
- default_nic="nic_intel-e810xxv"
+ *"3n-icxd")
+ default_nic="nic_intel-e823c"
;;
- *"3n-tsh"*)
+ *"3n-tsh")
default_nic="nic_intel-x520-da2"
;;
- *"3n-icx"* | *"2n-icx"*)
- default_nic="nic_intel-xxv710"
+ *"3n-icx" | *"2n-icx")
+ default_nic="nic_intel-e810cq"
+ ;;
+ *"3na-spr")
+ default_nic="nic_mellanox-cx7veat"
+ ;;
+ *"3nb-spr")
+ default_nic="nic_intel-e810cq"
;;
- *"3n-skx"* | *"2n-skx"* | *"2n-clx"* | *"2n-zn2"*)
+ *"2n-spr")
+ default_nic="nic_intel-e810cq"
+ ;;
+ *"2n-clx" | *"2n-zn2")
default_nic="nic_intel-xxv710"
;;
- *"2n-tx2"* | *"3n-alt"* | *"mrr-daily-master")
+ *"2n-tx2" | *"3n-alt")
default_nic="nic_intel-xl710"
;;
- *"1n-aws"* | *"2n-aws"* | *"3n-aws"*)
+ *"1n-aws" | *"2n-aws" | *"3n-aws")
default_nic="nic_amazon-nitro-50g"
;;
+ *"2n-c7gn" | *"3n-c7gn")
+ default_nic="nic_amazon-nitro-100g"
+ ;;
+ *"1n-c6in" | *"2n-c6in" | *"3n-c6in")
+ default_nic="nic_amazon-nitro-200g"
+ ;;
+ *"2n-x-"* | *"3n-x-"*)
+ default_nic="nic_intel-e810cq"
+ ;;
*)
default_nic="nic_intel-x710"
;;
@@ -911,14 +996,22 @@ function select_tags () {
awk_nics_sub_cmd+='gsub("x710","10ge2p1x710");'
awk_nics_sub_cmd+='gsub("xl710","40ge2p1xl710");'
awk_nics_sub_cmd+='gsub("x520-da2","10ge2p1x520");'
- awk_nics_sub_cmd+='gsub("x553","10ge2p1x553");'
awk_nics_sub_cmd+='gsub("cx556a","100ge2p1cx556a");'
+ awk_nics_sub_cmd+='gsub("2p1cx7veat","200ge2p1cx7veat");'
+ awk_nics_sub_cmd+='gsub("6p3cx7veat","200ge6p3cx7veat");'
+ awk_nics_sub_cmd+='gsub("cx6dx","100ge2p1cx6dx");'
awk_nics_sub_cmd+='gsub("e810cq","100ge2p1e810cq");'
+ awk_nics_sub_cmd+='gsub("e822cq","25ge2p1e822cq");'
+ awk_nics_sub_cmd+='gsub("e823c","25ge2p1e823c");'
awk_nics_sub_cmd+='gsub("vic1227","10ge2p1vic1227");'
awk_nics_sub_cmd+='gsub("vic1385","40ge2p1vic1385");'
awk_nics_sub_cmd+='gsub("nitro-50g","50ge1p1ENA");'
+ awk_nics_sub_cmd+='gsub("nitro-100g","100ge1p1ENA");'
+ awk_nics_sub_cmd+='gsub("nitro-200g","200ge1p1ENA");'
+ awk_nics_sub_cmd+='gsub("virtual","1ge1p82540em");'
awk_nics_sub_cmd+='if ($9 =="drv_avf") drv="avf-";'
awk_nics_sub_cmd+='else if ($9 =="drv_rdma_core") drv ="rdma-";'
+ awk_nics_sub_cmd+='else if ($9 =="drv_mlx5_core") drv ="mlx5-";'
awk_nics_sub_cmd+='else if ($9 =="drv_af_xdp") drv ="af-xdp-";'
awk_nics_sub_cmd+='else drv="";'
awk_nics_sub_cmd+='if ($1 =="-") cores="";'
@@ -935,9 +1028,15 @@ function select_tags () {
awk {"$awk_nics_sub_cmd"} || echo "devicetest") || die
SELECTION_MODE="--test"
;;
+ *"hoststack-daily"* )
+ readarray -t test_tag_array <<< $(grep -v "#" \
+ ${tfd}/hoststack_daily/${DUT}-${NODENESS}-${FLAVOR}.md |
+ awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
+ SELECTION_MODE="--test"
+ ;;
*"ndrpdr-weekly"* )
readarray -t test_tag_array <<< $(grep -v "#" \
- ${tfd}/mlr_weekly/${DUT}-${NODENESS}-${FLAVOR}.md |
+ ${tfd}/ndrpdr_weekly/${DUT}-${NODENESS}-${FLAVOR}.md |
awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
SELECTION_MODE="--test"
;;
@@ -953,6 +1052,12 @@ function select_tags () {
awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
SELECTION_MODE="--test"
;;
+ *"soak-weekly"* )
+ readarray -t test_tag_array <<< $(grep -v "#" \
+ ${tfd}/soak_weekly/${DUT}-${NODENESS}-${FLAVOR}.md |
+ awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
+ SELECTION_MODE="--test"
+ ;;
*"report-iterative"* )
test_sets=(${TEST_TAG_STRING//:/ })
# Run only one test set per run
@@ -993,63 +1098,58 @@ function select_tags () {
# Reasons for blacklisting:
# - ipsechw - Blacklisted on testbeds without crypto hardware accelerator.
case "${TEST_CODE}" in
- *"1n-vbox"*)
+ *"1n-vbox")
test_tag_array+=("!avf")
test_tag_array+=("!vhost")
test_tag_array+=("!flow")
;;
- *"1n_tx2"*)
+ *"1n-alt")
test_tag_array+=("!flow")
;;
- *"2n-skx"*)
+ *"2n-clx")
test_tag_array+=("!ipsechw")
;;
- *"3n-skx"*)
+ *"2n-icx")
test_tag_array+=("!ipsechw")
- # Not enough nic_intel-xxv710 to support double link tests.
- test_tag_array+=("!3_node_double_link_topoANDnic_intel-xxv710")
;;
- *"2n-clx"*)
+ *"2n-spr")
+ ;;
+ *"2n-tx2")
test_tag_array+=("!ipsechw")
;;
- *"2n-icx"*)
+ *"2n-zn2")
test_tag_array+=("!ipsechw")
;;
- *"3n-icx"*)
+ *"3n-alt")
test_tag_array+=("!ipsechw")
- # Not enough nic_intel-xxv710 to support double link tests.
- test_tag_array+=("!3_node_double_link_topoANDnic_intel-xxv710")
;;
- *"2n-zn2"*)
+ *"3n-icx")
test_tag_array+=("!ipsechw")
+ test_tag_array+=("!3_node_double_link_topoANDnic_intel-xxv710")
;;
- *"2n-dnv"*)
- test_tag_array+=("!memif")
- test_tag_array+=("!srv6_proxy")
- test_tag_array+=("!vhost")
- test_tag_array+=("!vts")
- test_tag_array+=("!drv_avf")
+ *"3n-snr")
;;
- *"2n-tx2"* | *"3n-alt"*)
- test_tag_array+=("!ipsechw")
+ *"3n-icxd")
;;
- *"3n-dnv"*)
- test_tag_array+=("!memif")
- test_tag_array+=("!srv6_proxy")
- test_tag_array+=("!vhost")
- test_tag_array+=("!vts")
- test_tag_array+=("!drv_avf")
+ *"3na-spr")
;;
- *"3n-snr"*)
+ *"3nb-spr")
;;
- *"3n-tsh"*)
- # 3n-tsh only has x520 NICs which don't work with AVF
+ *"3n-tsh")
test_tag_array+=("!drv_avf")
test_tag_array+=("!ipsechw")
;;
- *"1n-aws"* | *"2n-aws"* | *"3n-aws"*)
+ *"1n-aws" | *"2n-aws" | *"3n-aws")
test_tag_array+=("!ipsechw")
;;
+ *"2n-c7gn" | *"3n-c7gn")
+ test_tag_array+=("!ipsechw")
+ ;;
+ *"1n-c6in" | *"2n-c6in" | *"3n-c6in")
+ test_tag_array+=("!ipsechw")
+ ;;
+ *"2n-x-"* | *"3n-x-"*)
+ ;;
esac
# We will add excluded NICs.
@@ -1057,19 +1157,17 @@ function select_tags () {
TAGS=()
prefix=""
-
- set +x
- if [[ "${TEST_CODE}" == "vpp-"* ]]; then
- if [[ "${TEST_CODE}" != *"device"* ]]; then
- # Automatic prefixing for VPP perf jobs to limit the NIC used and
- # traffic evaluation to MRR.
- if [[ "${TEST_TAG_STRING-}" == *"nic_"* ]]; then
- prefix="${prefix}mrrAND"
- else
- prefix="${prefix}mrrAND${default_nic}AND"
+ if [[ "${TEST_CODE}" != *"daily"* ]]; then
+ if [[ "${TEST_CODE}" == "vpp-"* ]]; then
+ if [[ "${TEST_CODE}" != *"device"* ]]; then
+ # Automatic prefixing for VPP perf jobs to limit the NIC used.
+ if [[ "${TEST_TAG_STRING-}" != *"nic_"* ]]; then
+ prefix="${default_nic}AND"
+ fi
fi
fi
fi
+ set +x
for tag in "${test_tag_array[@]}"; do
if [[ "${tag}" == "!"* ]]; then
# Exclude tags are not prefixed.
@@ -1114,68 +1212,101 @@ function select_topology () {
case_text="${NODENESS}_${FLAVOR}"
case "${case_text}" in
- "1n_vbox")
+ "1n_aws")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*1n-aws*.yaml )
+ TOPOLOGIES_TAGS="1_node_single_link_topo"
+ ;;
+ "1n_c6in")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*1n-c6in*.yaml )
+ TOPOLOGIES_TAGS="1_node_single_link_topo"
+ ;;
+ "1n_alt" | "1n_spr")
TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*vpp_device*.template )
TOPOLOGIES_TAGS="2_node_single_link_topo"
;;
- "1n_skx" | "1n_tx2")
+ "1n_vbox")
TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*vpp_device*.template )
TOPOLOGIES_TAGS="2_node_single_link_topo"
;;
- "2n_skx")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_skx*.yaml )
- TOPOLOGIES_TAGS="2_node_*_link_topo"
- ;;
- "2n_zn2")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_zn2*.yaml )
- TOPOLOGIES_TAGS="2_node_*_link_topo"
+ "2n_aws")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-aws*.yaml )
+ TOPOLOGIES_TAGS="2_node_single_link_topo"
;;
- "3n_skx")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_skx*.yaml )
- TOPOLOGIES_TAGS="3_node_*_link_topo"
+ "2n_c7gn")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-c7gn*.yaml )
+ TOPOLOGIES_TAGS="2_node_single_link_topo"
;;
- "3n_icx")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_icx*.yaml )
- TOPOLOGIES_TAGS="3_node_*_link_topo"
+ "2n_c6in")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-c6in*.yaml )
+ TOPOLOGIES_TAGS="2_node_single_link_topo"
;;
"2n_clx")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_clx*.yaml )
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_clx_*.yaml )
TOPOLOGIES_TAGS="2_node_*_link_topo"
;;
"2n_icx")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_icx*.yaml )
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_icx_*.yaml )
TOPOLOGIES_TAGS="2_node_*_link_topo"
;;
- "2n_dnv")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_dnv*.yaml )
+ "2n_spr")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_spr_*.yaml )
+ TOPOLOGIES_TAGS="2_node_*_link_topo"
+ ;;
+ "2n_tx2")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_tx2_*.yaml )
TOPOLOGIES_TAGS="2_node_single_link_topo"
;;
- "3n_dnv")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_dnv*.yaml )
+ "2n_zn2")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_zn2_*.yaml )
+ TOPOLOGIES_TAGS="2_node_*_link_topo"
+ ;;
+ "3n_alt")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_alt_*.yaml )
TOPOLOGIES_TAGS="3_node_single_link_topo"
;;
- "3n_tsh")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_tsh*.yaml )
+ "3n_aws")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-aws*.yaml )
TOPOLOGIES_TAGS="3_node_single_link_topo"
;;
- "2n_tx2")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_tx2*.yaml )
- TOPOLOGIES_TAGS="2_node_single_link_topo"
+ "3n_c7gn")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-c7gn*.yaml )
+ TOPOLOGIES_TAGS="3_node_single_link_topo"
;;
- "3n_alt")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_alt*.yaml )
+ "3n_c6in")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-c6in*.yaml )
TOPOLOGIES_TAGS="3_node_single_link_topo"
;;
- "1n_aws")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*1n-aws*.yaml )
- TOPOLOGIES_TAGS="1_node_single_link_topo"
+ "3n_icx")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_icx_*.yaml )
+ # Trailing underscore is needed to distinguish from 3n_icxd.
+ TOPOLOGIES_TAGS="3_node_*_link_topo"
;;
- "2n_aws")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-aws*.yaml )
+ "3n_icxd")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_icxd_*.yaml )
+ TOPOLOGIES_TAGS="3_node_single_link_topo"
+ ;;
+ "3n_snr")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_snr_*.yaml )
+ TOPOLOGIES_TAGS="3_node_single_link_topo"
+ ;;
+ "3n_tsh")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_tsh_*.yaml )
+ TOPOLOGIES_TAGS="3_node_single_link_topo"
+ ;;
+ "3na_spr")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3na_spr_*.yaml )
+ TOPOLOGIES_TAGS="3_node_*_link_topo"
+ ;;
+ "3nb_spr")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3nb_spr_*.yaml )
+ TOPOLOGIES_TAGS="3_node_*_link_topo"
+ ;;
+ "2n_x"*)
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_"${FLAVOR}"*.yaml )
TOPOLOGIES_TAGS="2_node_single_link_topo"
;;
- "3n_aws")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-aws*.yaml )
+ "3n_x"*)
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_"${FLAVOR}"*.yaml )
TOPOLOGIES_TAGS="3_node_single_link_topo"
;;
*)
@@ -1195,6 +1326,9 @@ function set_environment_variables () {
# Depending on testbed topology, overwrite defaults set in the
# resources/libraries/python/Constants.py file
#
+ # Only to be called from the reservation function,
+ # as resulting values may change based on topology data.
+ #
# Variables read:
# - TEST_CODE - String affecting test selection, usually jenkins job name.
# Variables set:
@@ -1203,17 +1337,34 @@ function set_environment_variables () {
set -exuo pipefail
case "${TEST_CODE}" in
- *"1n-aws"* | *"2n-aws"* | *"3n-aws"*)
- # T-Rex 2.88+ workaround for ENA NICs.
+ *"1n-aws" | *"2n-aws" | *"3n-aws")
export TREX_RX_DESCRIPTORS_COUNT=1024
export TREX_EXTRA_CMDLINE="--mbuf-factor 19"
export TREX_CORE_COUNT=6
# Settings to prevent duration stretching.
export PERF_TRIAL_STL_DELAY=0.1
;;
- *"2n-zn2"*)
+ *"2n-c7gn" | *"3n-c7gn")
+ export TREX_RX_DESCRIPTORS_COUNT=1024
+ export TREX_EXTRA_CMDLINE="--mbuf-factor 19"
+ export TREX_CORE_COUNT=6
+ # Settings to prevent duration stretching.
+ export PERF_TRIAL_STL_DELAY=0.1
+ ;;
+ *"1n-c6in" | *"2n-c6in" | *"3n-c6in")
+ export TREX_RX_DESCRIPTORS_COUNT=1024
+ export TREX_EXTRA_CMDLINE="--mbuf-factor 19"
+ export TREX_CORE_COUNT=6
+ # Settings to prevent duration stretching.
+ export PERF_TRIAL_STL_DELAY=0.1
+ ;;
+ *"2n-zn2")
# Maciek's workaround for Zen2 with lower amount of cores.
export TREX_CORE_COUNT=14
+ ;;
+ *"2n-x-"* | *"3n-x-"* )
+ export TREX_CORE_COUNT=2
+ ;;
esac
}
@@ -1232,7 +1383,8 @@ function untrap_and_unreserve_testbed () {
# Variables read (by inner function):
# - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
# - PYTHON_SCRIPTS_DIR - Path to directory holding Python scripts.
- # Variables written:
+ # Variables set:
+ # - TERRAFORM_MODULE_DIR - Terraform module directory.
# - WORKING_TOPOLOGY - Set to empty string on successful unreservation.
# Trap unregistered:
# - EXIT - Failure to untrap is reported, but ignored otherwise.
@@ -1253,7 +1405,16 @@ function untrap_and_unreserve_testbed () {
die "${1:-FAILED TO UNRESERVE, FIX MANUALLY.}" 2
}
case "${TEST_CODE}" in
- *"1n-aws"* | *"2n-aws"* | *"3n-aws"*)
+ *"1n-aws" | *"2n-aws" | *"3n-aws")
+ TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}-c5n"
+ terraform_destroy || die "Failed to call terraform destroy."
+ ;;
+ *"2n-c7gn" | *"3n-c7gn")
+ TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}"
+ terraform_destroy || die "Failed to call terraform destroy."
+ ;;
+ *"1n-c6in" | *"2n-c6in" | *"3n-c6in")
+ TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}"
terraform_destroy || die "Failed to call terraform destroy."
;;
*)
diff --git a/resources/libraries/bash/function/device.sh b/resources/libraries/bash/function/device.sh
index 86d482068a..4d39cd2de6 100644
--- a/resources/libraries/bash/function/device.sh
+++ b/resources/libraries/bash/function/device.sh
@@ -1,4 +1,4 @@
-# Copyright (c) 2021 Cisco and/or its affiliates.
+# Copyright (c) 2024 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -173,13 +173,17 @@ function clean_environment () {
}
# Rebind interfaces back to kernel drivers.
+ i=0
for ADDR in ${TG_PCIDEVS[@]}; do
- DRIVER="${TG_DRIVERS[0]}"
+ DRIVER="${TG_DRIVERS[${i}]}"
bind_interfaces_to_driver || die
+ ((i++))
done
+ i=0
for ADDR in ${DUT1_PCIDEVS[@]}; do
- DRIVER="${DUT1_DRIVERS[0]}"
+ DRIVER="${DUT1_DRIVERS[${i}]}"
bind_interfaces_to_driver || die
+ ((i++))
done
}
@@ -312,14 +316,24 @@ function get_available_interfaces () {
dut1_netdev=(ens5 enp175)
ports_per_nic=2
;;
- "1n_tx2")
+ "1n_alt")
# Add Intel Corporation XL710/X710 Virtual Function to the
# whitelist.
- pci_id="0x154c"
- tg_netdev=(enp5)
- dut1_netdev=(enp145)
+ # Add MT2892 Family [ConnectX-6 Dx] Virtual Function to the
+ # whitelist.
+ pci_id="0x154c\|0x101e"
+ tg_netdev=(enp1s0f0 enp1s0f1 enP1p1s0f0)
+ dut1_netdev=(enP3p2s0f0 enP3p2s0f1 enP1p1s0f1)
ports_per_nic=2
;;
+ "1n_spr")
+ # Add Intel Corporation E810 Virtual Function to the
+ # whitelist.
+ pci_id="0x1889"
+ tg_netdev=(enp42s0 enp44s0)
+ dut1_netdev=(enp63s0 enp61s0)
+ ports_per_nic=1
+ ;;
"1n_vbox")
# Add Intel Corporation 82545EM Gigabit Ethernet Controller to the
# whitelist.
@@ -495,7 +509,10 @@ function get_csit_model () {
"0x1572"|"0x154c")
MODEL="Intel-X710"
;;
- "*")
+ "0x101e")
+ MODEL="Mellanox-CX6DX"
+ ;;
+ *)
MODEL="virtual"
esac
fi
@@ -697,7 +714,7 @@ function set_env_variables () {
CSIT_TG_HOST="$(hostname --all-ip-addresses | awk '{print $1}')" || {
die "Reading hostname IP address failed!"
}
- CSIT_TG_PORT="${DCR_PORTS[tg]#*:}"
+ CSIT_TG_PORT="${DCR_PORTS[tg]##*:}"
CSIT_TG_UUID="${DCR_UUIDS[tg]}"
CSIT_TG_ARCH="$(uname -i)" || {
die "Reading machine architecture failed!"
@@ -705,7 +722,7 @@ function set_env_variables () {
CSIT_DUT1_HOST="$(hostname --all-ip-addresses | awk '{print $1}')" || {
die "Reading hostname IP address failed!"
}
- CSIT_DUT1_PORT="${DCR_PORTS[dut1]#*:}"
+ CSIT_DUT1_PORT="${DCR_PORTS[dut1]##*:}"
CSIT_DUT1_UUID="${DCR_UUIDS[dut1]}"
CSIT_DUT1_ARCH="$(uname -i)" || {
die "Reading machine architecture failed!"
diff --git a/resources/libraries/bash/function/docs.sh b/resources/libraries/bash/function/docs.sh
deleted file mode 100644
index cb3f36d21a..0000000000
--- a/resources/libraries/bash/function/docs.sh
+++ /dev/null
@@ -1,267 +0,0 @@
-#!/usr/bin/env bash
-
-# Copyright (c) 2021 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -exuo pipefail
-
-
-function die_on_docs_error () {
-
- # Source this fragment if you want to abort on any failure.
- #
- # Variables read:
- # - DOCS_EXIT_STATUS - Set by a generation function.
- # Functions called:
- # - die - Print to stderr and exit.
-
- set -exuo pipefail
-
- if [[ "${DOCS_EXIT_STATUS}" != "0" ]]; then
- die "Failed to generate docs!" "${DOCS_EXIT_STATUS}"
- fi
-}
-
-function generate_docs () {
-
- # Generate docs content.
- #
- # Variable read:
- # - ${TOOLS_DIR} - Path to existing resources subdirectory "tools".
- # Variables set:
- # - DOCS_EXIT_STATUS - Exit status of docs generation.
- # Functions called:
- # - die - Print to stderr and exit.
-
- set -exuo pipefail
-
- pushd "${TOOLS_DIR}"/doc_gen || die "Pushd failed!"
-
- BUILD_DIR="_build"
-
- # Remove the old build:
- rm -rf ${BUILD_DIR} || true
- rm -rf /tmp/tmp-csit* || true
-
- export WORKING_DIR=$(mktemp -d /tmp/tmp-csitXXX) || die "export failed"
-
- # Create working directories
- mkdir -p "${BUILD_DIR}" || die "Mkdir failed!"
- mkdir -p "${WORKING_DIR}"/resources/libraries/python/ || die "Mkdir failed!"
- mkdir -p "${WORKING_DIR}"/resources/libraries/robot/ || die "Mkdir failed!"
- mkdir -p "${WORKING_DIR}"/tests/ || die "Mkdir failed!"
-
- # Copy the Sphinx source files:
- cp -r src/* ${WORKING_DIR}/ || die "Copy the Sphinx source files failed!"
-
- # Copy the source files to be processed:
- from_dir="${RESOURCES_DIR}/libraries/python/"
- to_dir="${WORKING_DIR}/resources/libraries/python/"
- dirs="${from_dir} ${to_dir}"
- rsync -ar --include='*/' --include='*.py' --exclude='*' ${dirs} || {
- die "rSync failed!"
- }
-
- from_dir="${RESOURCES_DIR}/libraries/robot/"
- to_dir="${WORKING_DIR}/resources/libraries/robot/"
- dirs="${from_dir} ${to_dir}"
- rsync -ar --include='*/' --include '*.robot' --exclude '*' ${dirs} || {
- die "rSync failed!"
- }
- touch ${to_dir}/index.robot || {
- die "Touch index.robot file failed!"
- }
-
- from_dir="${CSIT_DIR}/tests/"
- to_dir="${WORKING_DIR}/tests/"
- dirs="${from_dir} ${to_dir}"
- rsync -ar --include='*/' --include '*.robot' --exclude '*' ${dirs} || {
- die "rSync failed!"
- }
-
- # to remove GPL licence section
- find "${WORKING_DIR}/tests/" -type f -exec sed -i '/\*\*\*/,$!d' {} \;
-
- find ${WORKING_DIR}/ -type d -exec echo {} \; -exec touch {}/__init__.py \;
-
- python3 gen_rst.py || die "Generate .rst files failed!"
-
- # Generate the documentation:
- DATE=$(date -u '+%d-%b-%Y') || die "Get date failed!"
-
- all_options=("-v")
- all_options+=("-c" "${WORKING_DIR}")
- all_options+=("-a")
- all_options+=("-b" "html")
- all_options+=("-E")
- all_options+=("-D" "version="${GERRIT_BRANCH:-master}"")
- all_options+=("${WORKING_DIR}" "${BUILD_DIR}/")
-
- set +e
- sphinx-build "${all_options[@]}"
- DOCS_EXIT_STATUS="$?"
- set -e
-}
-
-function generate_report () {
-
- # Generate report content.
- #
- # Variable read:
- # - ${TOOLS_DIR} - Path to existing resources subdirectory "tools".
- # - ${GERRIT_BRANCH} - Gerrit branch used for release tagging.
- # Variables set:
- # - DOCS_EXIT_STATUS - Exit status of report generation.
- # Functions called:
- # - die - Print to stderr and exit.
-
- set -exuo pipefail
-
- pushd "${TOOLS_DIR}"/presentation || die "Pushd failed!"
-
- # Set default values in config array.
- typeset -A CFG
- typeset -A DIR
-
- DIR[WORKING]="_tmp"
-
- # Create working directories.
- mkdir "${DIR[WORKING]}" || die "Mkdir failed!"
-
- export PYTHONPATH=`pwd`:`pwd`/../../../ || die "Export failed!"
-
- all_options=("pal.py")
- all_options+=("--specification" "specifications/report")
- all_options+=("--release" "${GERRIT_BRANCH:-master}")
- all_options+=("--week" $(date "+%V"))
- all_options+=("--logging" "INFO")
- all_options+=("--force")
-
- set +e
- python "${all_options[@]}"
- DOCS_EXIT_STATUS="$?"
- set -e
-
-}
-
-function generate_report_local () {
-
- # Generate report from local content.
- #
- # Variable read:
- # - ${TOOLS_DIR} - Path to existing resources subdirectory "tools".
- # - ${CSIT_REPORT_FILENAME} - Source filename.
- # - ${CSIT_REPORT_DIRECTORYNAME} - Source directory.
- # - ${CSIT_REPORT_INSTALL_DEPENDENCIES} - Whether to install dependencies.
- # - ${CSIT_REPORT_INSTALL_LATEX} - Whether to install latex.
- # Variables set:
- # - DOCS_EXIT_STATUS - Exit status of report generation.
- # Functions called:
- # - die - Print to stderr and exit.
-
- set -exuo pipefail
-
- pushd "${TOOLS_DIR}"/presentation || die "Pushd failed!"
-
- filename="${CSIT_REPORT_FILENAME-}"
- directoryname="${CSIT_REPORT_DIRECTORYNAME-}"
- install_dependencies="${CSIT_REPORT_INSTALL_DEPENDENCIES:-1}"
- install_latex="${CSIT_REPORT_INSTALL_LATEX:-0}"
-
- # Set default values in config array.
- typeset -A CFG
- typeset -A DIR
-
- DIR[WORKING]="_tmp"
-
- # Install system dependencies.
- if [[ ${install_dependencies} -eq 1 ]] ;
- then
- sudo apt -y update || die "APT update failed!"
- sudo apt -y install libxml2 libxml2-dev libxslt-dev \
- build-essential zlib1g-dev unzip || die "APT install failed!"
- fi
-
- if [[ ${install_latex} -eq 1 ]] ;
- then
- sudo apt -y update || die "APT update failed!"
- sudo apt -y install xvfb texlive-latex-recommended \
- texlive-fonts-recommended texlive-fonts-extra texlive-latex-extra \
- latexmk wkhtmltopdf inkscape || die "APT install failed!"
- target="/usr/share/texlive/texmf-dist/web2c/texmf.cnf"
- sudo sed -i.bak 's/^\(main_memory\s=\s\).*/\110000000/' "${target}" || {
- die "Patching latex failed!"
- }
- fi
-
- # Create working directories.
- mkdir "${DIR[WORKING]}" || die "Mkdir failed!"
-
- export PYTHONPATH=`pwd`:`pwd`/../../../ || die "Export failed!"
-
- all_options=("pal.py")
- all_options+=("--specification" "specifications/report_local")
- all_options+=("--release" "${RELEASE:-master}")
- all_options+=("--week" "${WEEK:-1}")
- all_options+=("--logging" "INFO")
- all_options+=("--force")
- if [[ ${filename} != "" ]]; then
- all_options+=("--input-file" "${filename}")
- fi
- if [[ ${directoryname} != "" ]]; then
- all_options+=("--input-directory" "${directoryname}")
- fi
-
- set +e
- python "${all_options[@]}"
- DOCS_EXIT_STATUS="$?"
- set -e
-
-}
-
-function generate_trending () {
-
- # Generate trending content.
- #
- # Variable read:
- # - ${TOOLS_DIR} - Path to existing resources subdirectory "tools".
- # Variables set:
- # - DOCS_EXIT_STATUS - Exit status of trending generation.
- # Functions called:
- # - die - Print to stderr and exit.
-
- set -exuo pipefail
-
- pushd "${TOOLS_DIR}"/presentation || die "Pushd failed!"
-
- # Set default values in config array.
- typeset -A DIR
-
- DIR[WORKING]="_tmp"
-
- # Create working directories.
- mkdir "${DIR[WORKING]}" || die "Mkdir failed!"
-
- export PYTHONPATH=`pwd`:`pwd`/../../../ || die "Export failed!"
-
- all_options=("pal.py")
- all_options+=("--specification" "specifications/trending")
- all_options+=("--logging" "INFO")
- all_options+=("--force")
-
- set +e
- python "${all_options[@]}"
- DOCS_EXIT_STATUS="$?"
- set -e
-
-}
diff --git a/resources/libraries/bash/function/dpdk.sh b/resources/libraries/bash/function/dpdk.sh
index f013683659..86abb84a02 100644
--- a/resources/libraries/bash/function/dpdk.sh
+++ b/resources/libraries/bash/function/dpdk.sh
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
-# Copyright (c) 2022 Cisco and/or its affiliates.
+# Copyright (c) 2023 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -96,19 +96,8 @@ function dpdk_compile () {
# Configure generic build - the same used by VPP
meson_options="${meson_options} -Dplatform=generic"
- # Patch L3FWD.
- sed_rxd="s/^#define RTE_TEST_RX_DESC_DEFAULT 128"
- sed_rxd+="/#define RTE_TEST_RX_DESC_DEFAULT 1024/g"
- sed_txd="s/^#define RTE_TEST_TX_DESC_DEFAULT 512"
- sed_txd+="/#define RTE_TEST_TX_DESC_DEFAULT 1024/g"
- sed_file="./main.c"
- pushd examples/l3fwd || die "Pushd failed"
- sed -i "${sed_rxd}" "${sed_file}" || die "Patch failed"
- sed -i "${sed_txd}" "${sed_file}" || die "Patch failed"
- popd || die "Popd failed"
-
# Compile using Meson and Ninja.
- meson ${meson_options} build || {
+ meson setup ${meson_options} build || {
die "Failed to compile DPDK!"
}
ninja -C build || die "Failed to compile DPDK!"
@@ -201,7 +190,6 @@ function dpdk_l3fwd_compile () {
#
# Variables read:
# - DPDK_DIR - Path to DPDK framework.
- # - CSIT_DIR - Path to CSIT framework.
# Functions called:
# - die - Print to stderr and exit.
@@ -209,14 +197,7 @@ function dpdk_l3fwd_compile () {
pushd "${DPDK_DIR}" || die "Pushd failed"
# Patch L3FWD.
- sed_rxd="s/^#define RTE_TEST_RX_DESC_DEFAULT 128"
- sed_rxd+="/#define RTE_TEST_RX_DESC_DEFAULT 2048/g"
- sed_txd="s/^#define RTE_TEST_TX_DESC_DEFAULT 512"
- sed_txd+="/#define RTE_TEST_TX_DESC_DEFAULT 2048/g"
- sed_file="./main.c"
pushd examples/l3fwd || die "Pushd failed"
- sed -i "${sed_rxd}" "${sed_file}" || die "Patch failed"
- sed -i "${sed_txd}" "${sed_file}" || die "Patch failed"
chmod +x ${1} && source ${1} || die "Patch failed"
popd || die "Popd failed"
diff --git a/resources/libraries/bash/function/eb_version.sh b/resources/libraries/bash/function/eb_version.sh
new file mode 100644
index 0000000000..0393030065
--- /dev/null
+++ b/resources/libraries/bash/function/eb_version.sh
@@ -0,0 +1,159 @@
+#!/usr/bin/env bash
+
+# Copyright (c) 2023 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -exuo pipefail
+
+
+function die_on_error () {
+
+ # Source this fragment if you want to abort on any failure.
+ #
+ # Variables read:
+ # - ${CODE_EXIT_STATUS} - Exit status of report generation.
+ # Functions called:
+ # - die - Print to stderr and exit.
+
+ set -exuo pipefail
+
+ if [[ "${CODE_EXIT_STATUS}" != "0" ]]; then
+ die "Failed to generate docs!" "${CODE_EXIT_STATUS}"
+ fi
+}
+
+
+function eb_version_deploy () {
+
+ # Deploy Elastic Beanstalk CDash content.
+ #
+ # Variable read:
+ # - ${CSIT_DIR} - CSIT main directory.
+ # - ${TERRAFORM_OUTPUT_VAL} - Terraform output value.
+ # Variables set:
+ # - ${CODE_EXIT_STATUS} - Exit status of report generation.
+ # - ${TERRAFORM_OUTPUT_VAR} - Register Terraform output variable name.
+ # Functions called:
+ # - eb_version_verify - Build and verify EB version.
+ # - terraform_apply - Apply EB version by Terraform.
+ # - terraform_output - Get the application name string from Terraform.
+ # - die - Print to stderr and exit.
+
+ set -exuo pipefail
+
+ eb_version_build_verify || die "Failed to call Elastic Beanstalk verify!"
+ terraform_apply || die "Failed to call Terraform apply!"
+
+ TERRAFORM_OUTPUT_VAR="application_version"
+ terraform_output || die "Failed to call Terraform output!"
+
+ #aws --region eu-central-1 elasticbeanstalk update-environment \
+ # --environment-name fdio-csit-dash-env \
+ # --version-label "${TERRAFORM_OUTPUT_VAL}"
+}
+
+
+function eb_version_build_verify () {
+
+ # Build and verify Elastic Beanstalk CDash integrity.
+ #
+ # Variable read:
+ # - ${CSIT_DIR} - CSIT main directory.
+ # Variables set:
+ # - ${TERRAFORM_MODULE_DIR} - Terraform module sub-directory.
+ # Functions called:
+ # - hugo_init_modules - Initialize Hugo modules.
+ # - hugo_build_site - Build static site with Hugo.
+ # - terraform_init - Initialize Terraform modules.
+ # - terraform_validate - Validate Terraform code.
+ # - die - Print to stderr and exit.
+
+ set -exuo pipefail
+
+ if ! installed zip; then
+ die "Please install zip!"
+ fi
+
+ hugo_init_modules || die "Failed to call Hugo initialize!"
+ hugo_build_site || die "Failed to call Hugo build!"
+
+ pushd "${CSIT_DIR}"/csit.infra.dash || die "Pushd failed!"
+ pushd app || die "Pushd failed!"
+ find . -type d -name "__pycache__" -exec rm -rf "{}" \;
+ find . -type d -name ".webassets-cache" -exec rm -rf "{}" \;
+ zip -r ../app.zip . || die "Compress failed!"
+ popd || die "Popd failed!"
+ popd || die "Popd failed!"
+
+ TERRAFORM_MODULE_DIR="terraform-aws-fdio-csit-dash-app-base"
+
+ export TF_VAR_application_version="${BUILD_ID}"
+ terraform_init || die "Failed to call Terraform init!"
+ terraform_validate || die "Failed to call Terraform validate!"
+}
+
+
+function generate_report () {
+
+ # Generate report content.
+ #
+ # Variable read:
+ # - ${TOOLS_DIR} - Path to existing resources subdirectory "tools".
+ # - ${GERRIT_BRANCH} - Gerrit branch used for release tagging.
+ # Variables set:
+ # - ${CODE_EXIT_STATUS} - Exit status of report generation.
+ # Functions called:
+ # - die - Print to stderr and exit.
+
+ set -exuo pipefail
+
+ pushd "${TOOLS_DIR}"/presentation || die "Pushd failed!"
+
+ # Set default values in config array.
+ typeset -A CFG
+ typeset -A DIR
+
+ DIR[WORKING]="_tmp"
+
+ # Create working directories.
+ mkdir "${DIR[WORKING]}" || die "Mkdir failed!"
+
+ export PYTHONPATH=`pwd`:`pwd`/../../../ || die "Export failed!"
+
+ all_options=("pal.py")
+ all_options+=("--specification" "specifications/report")
+ all_options+=("--release" "${GERRIT_BRANCH:-master}")
+ all_options+=("--week" $(date "+%V"))
+ all_options+=("--logging" "INFO")
+ all_options+=("--force")
+
+ set +e
+ python "${all_options[@]}"
+ CODE_EXIT_STATUS="$?"
+ set -e
+}
+
+function installed () {
+
+ # Check if the given utility is installed. Fail if not installed.
+ #
+ # Arguments:
+ # - ${1} - Utility to check.
+ # Returns (implicitly):
+ # - 0 - If command is installed.
+ # - 1 - If command is not installed.
+
+ set -exuo pipefail
+
+ command -v "${1}"
+}
diff --git a/resources/libraries/bash/function/gather.sh b/resources/libraries/bash/function/gather.sh
index 4958e5251b..e432777e32 100644
--- a/resources/libraries/bash/function/gather.sh
+++ b/resources/libraries/bash/function/gather.sh
@@ -1,5 +1,5 @@
-# Copyright (c) 2021 Cisco and/or its affiliates.
-# Copyright (c) 2021 PANTHEON.tech and/or its affiliates.
+# Copyright (c) 2023 Cisco and/or its affiliates.
+# Copyright (c) 2023 PANTHEON.tech and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -26,7 +26,7 @@ function gather_build () {
# Variables read:
# - TEST_CODE - String affecting test selection, usually jenkins job name.
- # - DOWNLOAD_DIR - Path to directory pybot takes the build to test from.
+ # - DOWNLOAD_DIR - Path to directory robot takes the build to test from.
# Variables set:
# - DUT - CSIT test/ subdirectory containing suites to execute.
# Directories updated:
@@ -92,7 +92,8 @@ function gather_dpdk () {
then
echo "Downloading latest DPDK packages from repo..."
# URL is not in quotes, calling command from variable keeps them.
- wget_command=("wget" "--no-check-certificate" "-nv" "-O" "-")
+ wget_command=("wget" "--no-check-certificate" "--compression=auto")
+ wget_command+=("-nv" "-O" "-")
wget_command+=("${dpdk_repo}")
dpdk_stable_ver="$("${wget_command[@]}" | grep -v "2015"\
| grep -Eo 'dpdk-[^\"]+xz' | tail -1)" || {
@@ -130,7 +131,7 @@ function gather_vpp () {
# Variables read:
# - BASH_FUNCTION_DIR - Bash directory with functions.
# - TEST_CODE - The test selection string from environment or argument.
- # - DOWNLOAD_DIR - Path to directory pybot takes the build to test from.
+ # - DOWNLOAD_DIR - Path to directory robot takes the build to test from.
# - CSIT_DIR - Path to existing root of local CSIT git repository.
# Variables set:
# - VPP_VERSION - VPP stable version under test.
@@ -173,7 +174,7 @@ function gather_vpp () {
;;
"vpp-csit-"*)
# Shorten line.
- pgks="${PKG_SUFFIX}"
+ pkgs="${PKG_SUFFIX}"
# Use locally built packages.
mv "${DOWNLOAD_DIR}"/../*vpp*."${pkgs}" "${DOWNLOAD_DIR}"/ || {
die "Move command failed."
diff --git a/resources/libraries/bash/function/hugo.sh b/resources/libraries/bash/function/hugo.sh
new file mode 100644
index 0000000000..052e8333fb
--- /dev/null
+++ b/resources/libraries/bash/function/hugo.sh
@@ -0,0 +1,113 @@
+#!/usr/bin/env bash
+
+# Copyright (c) 2023 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -exuo pipefail
+
+
+function go_install () {
+
+ # Install Go.
+
+ OS_ARCH=$(uname -m) || die "Failed to get arch."
+ case "${OS_ARCH}" in
+ x86_64) architecture="amd64" ;;
+ aarch64) architecture="arm64" ;;
+ esac
+
+ go_version="go1.20.2.linux-${architecture}.tar.gz"
+ go_url="https://go.dev/dl"
+ wget "${go_url}/${go_version}"
+ rm -rf "/usr/local/go"
+ tar -C "/usr/local" -xzf "go1.20.2.linux-${architecture}.tar.gz"
+ rm "go1.20.2.linux-${architecture}.tar.gz"
+ export PATH=$PATH:/usr/local/go/bin
+}
+
+
+function hugo_build_site () {
+
+ # Build site via Hugo.
+ #
+ # Variable read:
+ # - ${CSIT_DIR} - CSIT main directory.
+ # Functions called:
+ # - die - Print to stderr and exit.
+
+ if ! installed hugo; then
+ die "Please install Hugo!"
+ fi
+
+ pushd "${CSIT_DIR}"/docs || die "Pushd failed!"
+ hugo || die "Failed to run Hugo build!"
+ popd || die "Popd failed!"
+}
+
+
+function hugo_init_modules () {
+
+ # Initialize Hugo modules.
+ #
+ # Variable read:
+ # - ${CSIT_DIR} - CSIT main directory.
+ # Functions called:
+ # - die - Print to stderr and exit.
+
+ if ! installed hugo; then
+ die "Please install Hugo!"
+ fi
+
+ hugo_book_url="github.com/alex-shpak/hugo-book"
+ hugo_book_version="v0.0.0-20230424134111-d86d5e70c7c0"
+ hugo_book_link="${hugo_book_url}@${hugo_book_version}"
+ pushd "${CSIT_DIR}"/docs || die "Pushd failed!"
+ export PATH=$PATH:/usr/local/go/bin
+ hugo mod get "${hugo_book_link}" || die "Failed to run Hugo mod!"
+ popd || die "Popd failed!"
+}
+
+
+function hugo_install () {
+
+ # Install Hugo Extended.
+
+ OS_ARCH=$(uname -m) || die "Failed to get arch."
+ case "${OS_ARCH}" in
+ x86_64) architecture="amd64" ;;
+ aarch64) architecture="arm64" ;;
+ esac
+
+ hugo_version="v0.111.3/hugo_extended_0.111.3_linux-${architecture}.deb"
+ hugo_url="https://github.com/gohugoio/hugo/releases/download"
+ hugo_link="${hugo_url}/${hugo_version}"
+ wget -O "hugo.deb" "${hugo_link}" || die "Failed to install Hugo!"
+ dpkg -i "hugo.deb" || die "Failed to install Hugo!"
+ rm "hugo.deb" || die "Failed to install Hugo!"
+}
+
+
+function installed () {
+
+ # Check if the given utility is installed. Fail if not installed.
+ #
+ # Arguments:
+ # - ${1} - Utility to check.
+ # Returns (implicitly):
+ # - 0 - If command is installed.
+ # - 1 - If command is not installed.
+
+ set -exuo pipefail
+
+ command -v "${1}"
+}
diff --git a/resources/libraries/bash/function/nginx.sh b/resources/libraries/bash/function/nginx.sh
index 122af23852..a2cf8e6514 100755
--- a/resources/libraries/bash/function/nginx.sh
+++ b/resources/libraries/bash/function/nginx.sh
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
-# Copyright (c) 2021 Intel and/or its affiliates.
+# Copyright (c) 2023 Intel and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -21,7 +21,7 @@ function gather_nginx () {
# Ensure stable NGINX archive is downloaded.
#
# Variables read:
- # - DOWNLOAD_DIR - Path to directory pybot takes the build to test from.
+ # - DOWNLOAD_DIR - Path to directory robot takes the build to test from.
# - NGINX_VER - Version number of Nginx.
set -exuo pipefail
pushd "${DOWNLOAD_DIR}" || die "Pushd failed."
@@ -53,7 +53,7 @@ function common_dirs () {
# Variables set:
# - BASH_FUNCTION_DIR - Path to existing directory this file is located in.
# - CSIT_DIR - Path to CSIT framework.
- # - DOWNLOAD_DIR - Path to directory pybot takes the build to test from.
+ # - DOWNLOAD_DIR - Path to directory robot takes the build to test from.
# - NGINX_DIR - Path to NGINX framework.
# - NGINX_VER - Version number of Nginx.
# Functions called:
@@ -121,7 +121,7 @@ function nginx_extract () {
# Variables read:
# - NGINX_DIR - Path to NGINX framework.
# - CSIT_DIR - Path to CSIT framework.
- # - DOWNLOAD_DIR - Path to directory pybot takes the build to test from.
+ # - DOWNLOAD_DIR - Path to directory robot takes the build to test from.
# - NGINX_VER - Version number of Nginx.
# Functions called:
# - die - Print to stderr and exit.
diff --git a/resources/libraries/bash/function/per_patch.sh b/resources/libraries/bash/function/per_patch.sh
index 2149d79b52..44bd57da80 100644
--- a/resources/libraries/bash/function/per_patch.sh
+++ b/resources/libraries/bash/function/per_patch.sh
@@ -1,5 +1,5 @@
-# Copyright (c) 2022 Cisco and/or its affiliates.
-# Copyright (c) 2022 PANTHEON.tech s.r.o.
+# Copyright (c) 2023 Cisco and/or its affiliates.
+# Copyright (c) 2023 PANTHEON.tech s.r.o.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -18,61 +18,14 @@ set -exuo pipefail
# Generally, the functions assume "common.sh" library has been sourced already.
# Keep functions ordered alphabetically, please.
-function archive_test_results () {
- # Arguments:
- # - ${1}: Directory to archive to. Required. Parent has to exist.
- # Variable set:
- # - TARGET - Target directory.
- # Variables read:
- # - ARCHIVE_DIR - Path to where robot result files are created in.
- # - VPP_DIR - Path to existing directory, root for to relative paths.
- # Directories updated:
- # - ${1} - Created, and robot and parsing files are moved/created there.
- # Functions called:
- # - die - Print to stderr and exit, defined in common.sh
-
- set -exuo pipefail
-
- cd "${VPP_DIR}" || die "Change directory command failed."
- TARGET="$(readlink -f "$1")"
- mkdir -p "${TARGET}" || die "Directory creation failed."
- file_list=("output.xml" "log.html" "report.html")
- file_list+=("tests" "generated_output_raw.tar.gz")
- for filename in "${file_list[@]}"; do
- mv "${ARCHIVE_DIR}/${filename}" "${TARGET}/${filename}" || {
- die "Attempt to move '${filename}' failed."
- }
- done
-}
-
-
-function archive_parse_test_results () {
-
- # Arguments:
- # - ${1}: Directory to archive to. Required. Parent has to exist.
- # Variables read:
- # - TARGET - Target directory.
- # Functions called:
- # - die - Print to stderr and exit, defined in common.sh
- # - archive_test_results - Archiving results.
- # - parse_bmrr_results - See definition in this file.
-
- set -exuo pipefail
-
- archive_test_results "$1" || die
- parse_bmrr_results "${TARGET}" || {
- die "The function should have died on error."
- }
-}
-
-
-function build_vpp_ubuntu_amd64 () {
+function build_vpp_ubuntu () {
# This function is using make pkg-verify to build VPP with all dependencies
# that is ARCH/OS aware. VPP repo is SSOT for building mechanics and CSIT
# is consuming artifacts. This way if VPP will introduce change in building
# mechanics they will not be blocked by CSIT repo.
+ #
# Arguments:
# - ${1} - String identifier for echo, can be unset.
# Variables read:
@@ -116,7 +69,6 @@ function compare_test_results () {
# of parent build.
# Functions called:
# - die - Print to stderr and exit, defined in common.sh
- # - parse_bmrr_results - See definition in this file.
# Exit code:
# - 0 - If the comparison utility sees no regression (nor data error).
# - 1 - If the comparison utility sees a regression (or data error).
@@ -137,50 +89,109 @@ function initialize_csit_dirs () {
# Variables read:
# - VPP_DIR - Path to WORKSPACE, parent of created directories.
# Directories created:
- # - csit_current - Holding test results of the patch under test (PUT).
- # - csit_parent - Holding test results of parent of PUT.
+ # - csit_{part} - See the caller what it is used for.
# Functions called:
# - die - Print to stderr and exit, defined in common.sh
set -exuo pipefail
cd "${VPP_DIR}" || die "Change directory operation failed."
- rm -rf "csit_current" "csit_parent" || {
- die "Directory deletion failed."
- }
- mkdir -p "csit_current" "csit_parent" || {
- die "Directory creation failed."
- }
+ while true; do
+ if [[ ${#} < 1 ]]; then
+ # All directories created.
+ break
+ fi
+ name_part="${1}" || die
+ shift || die
+ dir_name="csit_${name_part}" || die
+ rm -rf "${dir_name}" || die "Directory deletion failed."
+ mkdir -p "${dir_name}" || die "Directory creation failed."
+ done
}
-function parse_bmrr_results () {
+function main_bisect_loop () {
- # Currently "parsing" is just two greps.
- # TODO: Re-use PAL parsing code, make parsing more general and centralized.
+ # Perform the iterative part of bisect entry script.
+ #
+ # The logic is too complex to remain in the entry script.
#
+ # At the start, the loop assumes git bisect old/new has just been executed,
+ # and verified more iterations are needed.
+ # The iteration cleans the build directory and builds the new mid commit.
+ # Then, testbed is reserved, tests run, and testbed unreserved.
+ # Results are moved from default to archive location
+ # (indexed by iteration number) and analyzed.
+ # The new adjective ("old" or "new") is selected,
+ # and git bisect with the adjective is executed.
+ # The symlinks csit_early and csit_late are updated to tightest bounds.
+ # The git.log file is examined and if the bisect is finished, loop ends.
+
+ iteration=0
+ while true
+ do
+ let iteration+=1
+ git clean -dffx "build"/ "build-root"/ || die
+ build_vpp_ubuntu "MIDDLE" || die
+ select_build "build-root" || die
+ check_download_dir || die
+ reserve_and_cleanup_testbed || die
+ run_robot || die
+ move_test_results "csit_middle/${iteration}" || die
+ untrap_and_unreserve_testbed || die
+ rm -vf "csit_mid" || die
+ ln -s -T "csit_middle/${iteration}" "csit_mid" || die
+ set +e
+ python3 "${TOOLS_DIR}/integrated/compare_bisect.py"
+ bisect_rc="${?}"
+ set -e
+ if [[ "${bisect_rc}" == "3" ]]; then
+ adjective="new"
+ rm -v "csit_late" || die
+ ln -s -T "csit_middle/${iteration}" "csit_late" || die
+ elif [[ "${bisect_rc}" == "0" ]]; then
+ adjective="old"
+ rm -v "csit_early" || die
+ ln -s -T "csit_middle/${iteration}" "csit_early" || die
+ else
+ die "Unexpected return code: ${bisect_rc}"
+ fi
+ git bisect "${adjective}" | tee "git.log" || die
+ git describe || die
+ git status || die
+ if head -n 1 "git.log" | cut -b -11 | fgrep -q "Bisecting:"; then
+ echo "Still bisecting..."
+ else
+ echo "Bisecting done."
+ break
+ fi
+ done
+}
+
+
+function move_test_results () {
+
# Arguments:
- # - ${1} - Path to (existing) directory holding robot output.xml result.
- # Files read:
- # - output.xml - From argument location.
- # Files updated:
- # - results.txt - (Re)created, in argument location.
+ # - ${1}: Directory to archive to. Required. Parent has to exist.
+ # Variable set:
+ # - TARGET - Target archival directory, equivalent to the argument.
+ # Variables read:
+ # - ARCHIVE_DIR - Path to where robot result files are created in.
+ # - VPP_DIR - Path to existing directory, root for to relative paths.
+ # Directories updated:
+ # - ${1} - Created, and robot and parsing files are moved/created there.
# Functions called:
# - die - Print to stderr and exit, defined in common.sh
set -exuo pipefail
- rel_dir="$(readlink -e "${1}")" || die "Readlink failed."
- in_file="${rel_dir}/output.xml"
- out_file="${rel_dir}/results.txt"
- # TODO: Do we need to check echo exit code explicitly?
- echo "Parsing ${in_file} putting results into ${out_file}"
- echo "TODO: Re-use parts of PAL when they support subsample test parsing."
- pattern='Maximum Receive Rate trial results in .*'
- pattern+=' per second: .*\]</status>'
- grep -o "${pattern}" "${in_file}" | grep -o '\[.*\]' > "${out_file}" || {
- die "Some parsing grep command has failed."
- }
+ cd "${VPP_DIR}" || die "Change directory command failed."
+ TARGET="$(readlink -f "$1")"
+ mkdir -p "${TARGET}" || die "Directory creation failed."
+ file_list=("output.xml" "log.html" "report.html" "tests")
+ for filename in "${file_list[@]}"; do
+ mv "${ARCHIVE_DIR}/${filename}" "${TARGET}/${filename}" || die
+ done
}
@@ -209,56 +220,37 @@ function select_build () {
}
-function set_aside_commit_build_artifacts () {
+function set_aside_build_artifacts () {
- # Function is copying VPP built artifacts from actual checkout commit for
- # further use and clean git.
+ # Function used to save VPP .deb artifacts from currently finished build.
+ #
+ # After the artifacts are copied to the target directory,
+ # the main git tree is cleaned up to not interfere with next build.
+ #
+ # Arguments:
+ # - ${1} - String to derive the target directory name from. Required.
# Variables read:
# - VPP_DIR - Path to existing directory, parent to accessed directories.
# Directories read:
# - build-root - Existing directory with built VPP artifacts (also DPDK).
# Directories updated:
# - ${VPP_DIR} - A local git repository, parent commit gets checked out.
- # - build_current - Old contents removed, content of build-root copied here.
+ # - build_${1} - Old contents removed, content of build-root copied here.
# Functions called:
# - die - Print to stderr and exit, defined in common.sh
set -exuo pipefail
cd "${VPP_DIR}" || die "Change directory operation failed."
- rm -rf "build_current" || die "Remove operation failed."
- mkdir -p "build_current" || die "Directory creation failed."
- mv "build-root"/*".deb" "build_current"/ || die "Move operation failed."
+ dir_name="build_${1}" || die
+ rm -rf "${dir_name}" || die "Remove operation failed."
+ mkdir -p "${dir_name}" || die "Directory creation failed."
+ mv "build-root"/*".deb" "${dir_name}"/ || die "Move operation failed."
# The previous build could have left some incompatible leftovers,
# e.g. DPDK artifacts of different version (in build/external).
# Also, there usually is a copy of dpdk artifact in build-root.
git clean -dffx "build"/ "build-root"/ || die "Git clean operation failed."
- # Finally, check out the parent commit.
- git checkout HEAD~ || die "Git checkout operation failed."
- # Display any other leftovers.
- git status || die "Git status operation failed."
-}
-
-
-function set_aside_parent_build_artifacts () {
-
- # Function is copying VPP built artifacts from parent checkout commit for
- # further use. Checkout to parent is not part of this function.
- # Variables read:
- # - VPP_DIR - Path to existing directory, parent of accessed directories.
- # Directories read:
- # - build-root - Existing directory with built VPP artifacts (also DPDK).
- # Directories updated:
- # - build_parent - Old directory removed, build-root debs moved here.
- # Functions called:
- # - die - Print to stderr and exit, defined in common.sh
-
- set -exuo pipefail
-
- cd "${VPP_DIR}" || die "Change directory operation failed."
- rm -rf "build_parent" || die "Remove failed."
- mkdir -p "build_parent" || die "Directory creation operation failed."
- mv "build-root"/*".deb" "build_parent"/ || die "Move operation failed."
+ git status || die
}
diff --git a/resources/libraries/bash/function/terraform.sh b/resources/libraries/bash/function/terraform.sh
index 1766381f75..2a0e0ed2be 100644
--- a/resources/libraries/bash/function/terraform.sh
+++ b/resources/libraries/bash/function/terraform.sh
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
-# Copyright (c) 2021 Cisco and/or its affiliates.
+# Copyright (c) 2023 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -22,8 +22,7 @@ function terraform_apply () {
#
# Variable read:
# - ${CSIT_DIR} - CSIT main directory, where terraform modules are located.
- # - ${NODENESS} - Node multiplicity of desired testbed.
- # - ${FLAVOR} - Node flavor string, see common.sh
+ # - ${TERRAFORM_MODULE_DIR} - Terraform module directory.
set -exuo pipefail
@@ -32,24 +31,21 @@ function terraform_apply () {
fi
pushd "${CSIT_DIR}"/fdio.infra.terraform || die "Pushd failed!"
- pushd "terraform-aws-${NODENESS}-${FLAVOR}-c5n" || die "Pushd failed!"
+ pushd "${TERRAFORM_MODULE_DIR}" || die "Pushd failed!"
export TF_LOG=INFO
- trap 'terraform_destroy' ERR || {
- die "Trap attempt failed, please cleanup manually. Aborting!"
- }
terraform apply -no-color -auto-approve || die "Terraform apply failed!"
popd || die "Popd failed!"
popd || die "Popd failed!"
}
+
function terraform_destroy () {
# Run terraform destroy command to prepare module.
#
# Variable read:
# - ${CSIT_DIR} - CSIT main directory, where terraform modules are located.
- # - ${NODENESS} - Node multiplicity of desired testbed.
- # - ${FLAVOR} - Node flavor string, see common.sh
+ # - ${TERRAFORM_MODULE_DIR} - Terraform module directory.
set -exuo pipefail
@@ -58,7 +54,7 @@ function terraform_destroy () {
fi
pushd "${CSIT_DIR}"/fdio.infra.terraform || die "Pushd failed!"
- pushd "terraform-aws-${NODENESS}-${FLAVOR}-c5n" || die "Pushd failed!"
+ pushd "${TERRAFORM_MODULE_DIR}" || die "Pushd failed!"
export TF_LOG=INFO
terraform destroy -auto-approve -no-color || die "Terraform destroy failed!"
popd || die "Popd failed!"
@@ -72,37 +68,100 @@ function terraform_init () {
#
# Variable read:
# - ${CSIT_DIR} - CSIT main directory, where terraform modules are located.
- # - ${NODENESS} - Node multiplicity of desired testbed.
- # - ${FLAVOR} - Node flavor string, see common.sh
+ # - ${TERRAFORM_MODULE_DIR} - Terraform module directory.
set -exuo pipefail
if ! installed terraform; then
- curl -fsSL https://apt.releases.hashicorp.com/gpg | sudo apt-key add -
- os="$(lsb_release -cs)" || die "Failed to get OS release!"
- repo="deb [arch=amd64] https://apt.releases.hashicorp.com ${os} main"
- sudo apt-add-repository "${repo}" || die "Failed to add repo!"
- apt update -y
- apt install -y terraform
- #die "Please install terraform!"
+ die "Please install terraform!"
fi
pushd "${CSIT_DIR}"/fdio.infra.terraform || die "Pushd failed!"
- pushd "terraform-aws-${NODENESS}-${FLAVOR}-c5n" || die "Pushd failed!"
+ pushd "${TERRAFORM_MODULE_DIR}" || die "Pushd failed!"
- plugin_url="https://github.com/radekg/terraform-provisioner-ansible/"
- plugin_url+="releases/download/v2.5.0/"
- plugin_url+="terraform-provisioner-ansible-linux-amd64_v2.5.0"
- plugin_dir="${HOME}/.terraform.d/plugins/"
- plugin_path+="${plugin_dir}terraform-provisioner-ansible_v2.5.0"
+ #plugin_url="https://github.com/radekg/terraform-provisioner-ansible/"
+ #plugin_url+="releases/download/v2.5.0/"
+ #plugin_url+="terraform-provisioner-ansible-linux-amd64_v2.5.0"
+ #plugin_dir="${HOME}/.terraform.d/plugins/"
+ #plugin_path+="${plugin_dir}terraform-provisioner-ansible_v2.5.0"
- mkdir -p "${plugin_dir}" || die "Failed to create dir!"
- wget -O "${plugin_path}" "${plugin_url}" || die "Failed to download plugin!"
- chmod +x "${plugin_path}" || die "Failed to add execute rights!"
+ #mkdir -p "${plugin_dir}" || die "Failed to create dir!"
+ #wget -O "${plugin_path}" "${plugin_url}" || die "Failed to download plugin!"
+ #chmod +x "${plugin_path}" || die "Failed to add execute rights!"
+ rm -f terraform.tfstate || die "Failed to clear terraform state!"
export TF_LOG=INFO
terraform init || die "Failed to run terraform init!"
+ popd || die "Popd failed!"
+ popd || die "Popd failed!"
+}
+
+function terraform_install () {
+
+ # Install terraform.
+
+ OS_ARCH=$(uname -m) || die "Failed to get arch."
+ case "${OS_ARCH}" in
+ x86_64) architecture="amd64" ;;
+ aarch64) architecture="arm64" ;;
+ esac
+
+ terraform_version="1.4.2/terraform_1.4.2_linux_${architecture}.zip"
+ terraform_url="https://releases.hashicorp.com/terraform"
+ terraform_link="${terraform_url}/${terraform_version}"
+ wget "${terraform_link}" || die "Failed to install Terraform!"
+ unzip "terraform_1.4.2_linux_${architecture}.zip" || {
+ die "Failed to install Terraform!"
+ }
+ mv "terraform" "/usr/local/bin" || die "Failed to install Terraform!"
+ rm "terraform_1.4.2_linux_${architecture}.zip" || {
+ die "Failed to install Terraform!"
+ }
+}
+
+
+function terraform_output () {
+
+ # Run terraform output command to prepare module.
+ #
+ # Variable read:
+ # - ${CSIT_DIR} - CSIT main directory, where terraform modules are located.
+ # - ${TERRAFORM_MODULE_DIR} - Terraform module directory.
+ # - ${TERRAFORM_OUTPUT_VAR} - Terraform variable to export.
+
+ set -exuo pipefail
+
+ if ! installed terraform; then
+ die "Please install terraform!"
+ fi
+
+ pushd "${CSIT_DIR}"/fdio.infra.terraform || die "Pushd failed!"
+ pushd "${TERRAFORM_MODULE_DIR}" || die "Pushd failed!"
+ TERRAFORM_OUTPUT_VAL=$(terraform output --raw "${TERRAFORM_OUTPUT_VAR}")
+ popd || die "Popd failed!"
+ popd || die "Popd failed!"
+}
+
+
+function terraform_validate () {
+
+ # Run terraform validate command to prepare module.
+ #
+ # Variable read:
+ # - ${CSIT_DIR} - CSIT main directory, where terraform modules are located.
+ # - ${TERRAFORM_MODULE_DIR} - Terraform module directory.
+
+ set -exuo pipefail
+
+ if ! installed terraform; then
+ die "Please install terraform!"
+ fi
+
+ pushd "${CSIT_DIR}"/fdio.infra.terraform || die "Pushd failed!"
+ pushd "${TERRAFORM_MODULE_DIR}" || die "Pushd failed!"
+ export TF_LOG=INFO
+ terraform validate || die "Terraform validate failed!"
popd || die "Popd failed!"
popd || die "Popd failed!"
}