aboutsummaryrefslogtreecommitdiffstats
path: root/resources/libraries/bash
diff options
context:
space:
mode:
Diffstat (limited to 'resources/libraries/bash')
-rw-r--r--resources/libraries/bash/config/defaults8
-rw-r--r--resources/libraries/bash/entry/bisect.sh188
-rwxr-xr-xresources/libraries/bash/entry/bootstrap_aws_eb_version_deploy.sh (renamed from resources/libraries/bash/entry/bootstrap_docs.sh)10
-rwxr-xr-xresources/libraries/bash/entry/bootstrap_aws_eb_version_verify.sh (renamed from resources/libraries/bash/entry/bootstrap_trending.sh)10
-rwxr-xr-xresources/libraries/bash/entry/bootstrap_report.sh6
-rwxr-xr-xresources/libraries/bash/entry/bootstrap_verify_perf.sh11
-rwxr-xr-xresources/libraries/bash/entry/bootstrap_vpp_device.sh9
-rwxr-xr-xresources/libraries/bash/entry/patch_l3fwd_flip_routes2
-rw-r--r--resources/libraries/bash/entry/per_patch_device.sh17
-rw-r--r--resources/libraries/bash/entry/per_patch_perf.sh29
-rw-r--r--resources/libraries/bash/entry/tox/README.txt14
-rw-r--r--resources/libraries/bash/entry/tox/copyright_year.sh9
-rw-r--r--resources/libraries/bash/entry/tox/doc_verify.sh51
-rw-r--r--resources/libraries/bash/entry/tox/fix_copyright_year.sh55
-rw-r--r--resources/libraries/bash/entry/tox/model_version.sh65
-rw-r--r--resources/libraries/bash/function/README.txt20
-rw-r--r--resources/libraries/bash/function/ansible.sh12
-rw-r--r--resources/libraries/bash/function/artifacts.sh79
-rw-r--r--resources/libraries/bash/function/branch.sh6
-rw-r--r--resources/libraries/bash/function/common.sh539
-rw-r--r--resources/libraries/bash/function/device.sh37
-rw-r--r--resources/libraries/bash/function/docs.sh267
-rw-r--r--resources/libraries/bash/function/dpdk.sh23
-rw-r--r--resources/libraries/bash/function/eb_version.sh159
-rw-r--r--resources/libraries/bash/function/gather.sh13
-rw-r--r--resources/libraries/bash/function/hugo.sh113
-rwxr-xr-xresources/libraries/bash/function/nginx.sh8
-rw-r--r--resources/libraries/bash/function/per_patch.sh216
-rw-r--r--resources/libraries/bash/function/terraform.sh115
-rwxr-xr-xresources/libraries/bash/k8s_setup.sh65
-rw-r--r--resources/libraries/bash/shell/k8s_utils.sh132
31 files changed, 1162 insertions, 1126 deletions
diff --git a/resources/libraries/bash/config/defaults b/resources/libraries/bash/config/defaults
deleted file mode 100644
index 6a25a5f04d..0000000000
--- a/resources/libraries/bash/config/defaults
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/bin/bash
-
-typeset -A cfg
-cfg=( # set default values in config array
- [K8S_CALICO]="${SCRIPT_DIR}/../../templates/kubernetes/calico_v2.6.3.yaml"
- [K8S_CONTIV_VPP]="https://raw.githubusercontent.com/contiv/vpp/master/k8s/contiv-vpp.yaml"
- [K8S_CSIT]="${SCRIPT_DIR}/../../templates/kubernetes/csit.yaml"
-)
diff --git a/resources/libraries/bash/entry/bisect.sh b/resources/libraries/bash/entry/bisect.sh
new file mode 100644
index 0000000000..d5cb1d51ba
--- /dev/null
+++ b/resources/libraries/bash/entry/bisect.sh
@@ -0,0 +1,188 @@
+#!/usr/bin/env bash
+
+# Copyright (c) 2023 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -exuo pipefail
+
+# This entry script does not change which CSIT branch is used,
+# use "with_oper_for_vpp.sh" wrapper for that.
+#
+# This script is to be used for locating performance regressions
+# (or breakages, or progressions, or fixes).
+# It uses "git bisect" commands on the VPP repository,
+# between the triggered VPP patch and a commit specified in the first argument
+# of the gerrit comment text.
+# The other arguments are used as tag expressions for selecting tests as usual.
+# Many different result types are supported.
+#
+# Logs are present in the archive directory, but usually the main output
+# is the offending commit as identified by "git bisect", visible in console.
+#
+# While selecting just one testcase is the intended use,
+# this script should be able to deal with multiple testcases as well,
+# grouping all the values together. This usually inflates
+# the standard deviation, but it is not clear how that affects the bisection.
+#
+# For the bisection decision, jumpavg library is used,
+# deciding whether shorter description is achieved by forcefully grouping
+# the middle results with the old, or with the new ones.
+# If the shortest description is achieved with 3 separate groups,
+# bisect interval focuses on biggest relative change
+# (with respect to pairwise maximum).
+#
+# If a test fails, an artificial result is used to distinguish
+# from normal results. Currently, the value 1.0, with the multiplicity of 4.
+#
+# Note that if there was a VPP API change that affects tests in the interval,
+# there frequently is no good way for single CSIT commit to work there.
+# You can try manually reverting the CSIT changes to make tests pass,
+# possibly needing to search over multiple subintervals.
+# Using and older CSIT commit (possibly cherry-picking the bisect Change
+# if it was not present in CSIT compatible with old enough VPP builds)
+# is the fastest solution; but beware of CSIT-induced performance effects
+# (e.g. TRex settings).
+#
+# If a regression happens during a subinterval where the test fails
+# due to a bug in VPP, you may try to create a new commit chain
+# with the fix cherry-picked to the start of the interval.
+# Do not do that as a chain in Gerrit, it would be long and Gerrit will refuse
+# edits of already merged Changes.
+# Instead, add a block of bash code to do the manipulation
+# on local git history between checkout and bisect.
+#
+# At the start, the script executes first bisect iteration in an attempt
+# to avoid work if the search interval has only one commit (or is invalid).
+# Only when the work is needed, earliest and latest commits are built
+# and tested. Branches "earliest", "middle" and "latest" are temporarily created
+# as a way to remember which commits to check out.
+#
+# Test results are parsed from json files,
+# symlinks are used to tell python script which results to compare.
+#
+# Assumptions:
+# + There is a directory holding VPP repo with patch under test checked out.
+# + It contains csit subdirectory with CSIT code to use (this script is there).
+# + Everything needed to build VPP is already installed locally.
+# Consequences:
+# + Working directory is switched to the VPP repo root.
+# + At the end, VPP repo has checked out and built some commit,
+# as chosen by "git bisect".
+# + Directories build_root, build and csit are reset during the run.
+# + The following directories (relative to VPP repo) are (re)created:
+# ++ csit_{earliest,middle,latest}, build_{earliest,latest},
+# ++ archive, csit/archive, csit/download_dir.
+# + Symlinks csit_{early,late,mid} are also created.
+# Arguments:
+# - ${1} - If present, override JOB_NAME to simplify manual usage.
+
+# "set -eu" handles failures from the following two lines.
+BASH_ENTRY_DIR="$(dirname $(readlink -e "${BASH_SOURCE[0]}"))"
+BASH_FUNCTION_DIR="$(readlink -e "${BASH_ENTRY_DIR}/../function")"
+source "${BASH_FUNCTION_DIR}/common.sh" || {
+ echo "Source failed." >&2
+ exit 1
+}
+source "${BASH_FUNCTION_DIR}/per_patch.sh" || die "Source failed."
+# Cleanup needs ansible.
+source "${BASH_FUNCTION_DIR}/ansible.sh" || die "Source failed."
+common_dirs || die
+check_prerequisites || die
+set_perpatch_vpp_dir || die
+get_test_code "${1-}" || die
+get_test_tag_string || die
+# Unfortunately, git bisect only works at the top of the repo.
+cd "${VPP_DIR}" || die
+
+# Save the current commit.
+git checkout -b "latest"
+# Save the lower bound commit.
+git checkout -b "earliest"
+git reset --hard "${GIT_BISECT_FROM}"
+
+# This is the place for custom code manipulating local git history.
+
+#git checkout -b "alter"
+#...
+#git checkout "latest"
+#git rebase "alter" || git rebase --skip
+#git branch -D "alter"
+
+git bisect start || die
+# TODO: Can we add a trap for "git bisect reset" or even "deactivate",
+# without affecting the inner trap for unreserve and cleanup?
+git checkout "latest"
+git status || die
+git describe || die
+git bisect new || die
+# Performing first iteration early to avoid testing or even building.
+git checkout "earliest" || die "Failed to checkout earliest commit."
+git status || die
+git describe || die
+# The first iteration.
+git bisect old | tee "git.log" || die "Invalid bisect interval?"
+git checkout -b "middle" || die "Failed to create branch: middle"
+git status || die
+git describe || die
+if head -n 1 "git.log" | cut -b -11 | fgrep -q "Bisecting:"; then
+ echo "Building and testing initial bounds."
+else
+ echo "Single commit, no work needed."
+ exit 0
+fi
+# Building latest first, good for avoiding DPDK rebuilds.
+git checkout "latest" || die "Failed to checkout latest commit."
+build_vpp_ubuntu "LATEST" || die
+set_aside_build_artifacts "latest" || die
+git checkout "earliest" || die "Failed to checkout earliest commit."
+git status || die
+git describe || die
+build_vpp_ubuntu "EARLIEST" || die
+set_aside_build_artifacts "earliest" || die
+git checkout "middle" || die "Failed to checkout middle commit."
+git branch -D "earliest" "latest" || die "Failed to remove branches."
+# Done with repo manipulation for now, testing commences.
+initialize_csit_dirs "earliest" "middle" "latest" || die
+set_perpatch_dut || die
+select_topology || die
+select_arch_os || die
+activate_virtualenv "${VPP_DIR}" || die
+generate_tests || die
+archive_tests || die
+
+# TODO: Does it matter which build is tested first?
+
+select_build "build_earliest" || die
+check_download_dir || die
+reserve_and_cleanup_testbed || die
+run_robot || die
+move_test_results "csit_earliest" || die
+ln -s -T "csit_earliest" "csit_early" || die
+
+# Explicit cleanup, in case the previous test left the testbed in a bad shape.
+ansible_playbook "cleanup"
+
+select_build "build_latest" || die
+check_download_dir || die
+run_robot || die
+move_test_results "csit_latest" || die
+ln -s -T "csit_latest" "csit_late" || die
+untrap_and_unreserve_testbed || die
+
+# See function documentation for the logic in the loop.
+main_bisect_loop || die
+# In worst case, the middle branch is still checked out.
+# TODO: Is there a way to ensure "middle" branch is always deleted?
+git branch -D "middle" || true
+# Delete symlinks to prevent duplicate archiving.
+rm -vrf "csit_early" "csit_late" "csit_mid"
diff --git a/resources/libraries/bash/entry/bootstrap_docs.sh b/resources/libraries/bash/entry/bootstrap_aws_eb_version_deploy.sh
index 9d2519ebf3..15d6dae405 100755
--- a/resources/libraries/bash/entry/bootstrap_docs.sh
+++ b/resources/libraries/bash/entry/bootstrap_aws_eb_version_deploy.sh
@@ -1,4 +1,4 @@
-# Copyright (c) 2021 Cisco and/or its affiliates.
+# Copyright (c) 2023 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -25,8 +25,8 @@ source "${BASH_FUNCTION_DIR}/common.sh" || {
echo "Source failed." >&2
exit 1
}
-source "${BASH_FUNCTION_DIR}/docs.sh" || die "Source failed."
+source "${BASH_FUNCTION_DIR}/eb_version.sh" || die "Source failed."
+source "${BASH_FUNCTION_DIR}/hugo.sh" || die "Source failed."
+source "${BASH_FUNCTION_DIR}/terraform.sh" || die "Source failed."
common_dirs || die
-activate_virtualenv || die
-generate_docs || die
-die_on_docs_error || die
+eb_version_deploy || die
diff --git a/resources/libraries/bash/entry/bootstrap_trending.sh b/resources/libraries/bash/entry/bootstrap_aws_eb_version_verify.sh
index b172a81be5..362a2f78af 100755
--- a/resources/libraries/bash/entry/bootstrap_trending.sh
+++ b/resources/libraries/bash/entry/bootstrap_aws_eb_version_verify.sh
@@ -1,4 +1,4 @@
-# Copyright (c) 2021 Cisco and/or its affiliates.
+# Copyright (c) 2023 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -25,8 +25,8 @@ source "${BASH_FUNCTION_DIR}/common.sh" || {
echo "Source failed." >&2
exit 1
}
-source "${BASH_FUNCTION_DIR}/docs.sh" || die "Source failed."
+source "${BASH_FUNCTION_DIR}/eb_version.sh" || die "Source failed."
+source "${BASH_FUNCTION_DIR}/hugo.sh" || die "Source failed."
+source "${BASH_FUNCTION_DIR}/terraform.sh" || die "Source failed."
common_dirs || die
-activate_virtualenv || die
-generate_trending || die
-die_on_docs_error || die
+eb_version_build_verify || die
diff --git a/resources/libraries/bash/entry/bootstrap_report.sh b/resources/libraries/bash/entry/bootstrap_report.sh
index 191f910171..47a9d2e3d4 100755
--- a/resources/libraries/bash/entry/bootstrap_report.sh
+++ b/resources/libraries/bash/entry/bootstrap_report.sh
@@ -1,4 +1,4 @@
-# Copyright (c) 2021 Cisco and/or its affiliates.
+# Copyright (c) 2023 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -25,8 +25,8 @@ source "${BASH_FUNCTION_DIR}/common.sh" || {
echo "Source failed." >&2
exit 1
}
-source "${BASH_FUNCTION_DIR}/docs.sh" || die "Source failed."
+source "${BASH_FUNCTION_DIR}/eb_version.sh" || die "Source failed."
common_dirs || die
activate_virtualenv || die
generate_report || die
-die_on_docs_error || die
+die_on_error || die
diff --git a/resources/libraries/bash/entry/bootstrap_verify_perf.sh b/resources/libraries/bash/entry/bootstrap_verify_perf.sh
index 99813573ea..18dfd08c39 100755
--- a/resources/libraries/bash/entry/bootstrap_verify_perf.sh
+++ b/resources/libraries/bash/entry/bootstrap_verify_perf.sh
@@ -1,4 +1,4 @@
-# Copyright (c) 2021 Cisco and/or its affiliates.
+# Copyright (c) 2023 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -46,10 +46,7 @@ archive_tests || die
prepare_topology || die
select_topology || die
reserve_and_cleanup_testbed || die
-select_tags || die
-compose_pybot_arguments || die
-set_environment_variables || die
-run_pybot || die
-untrap_and_unreserve_testbed || die
+run_robot || die
move_archives || die
-die_on_pybot_error || die
+untrap_and_unreserve_testbed || die
+die_on_robot_error || die
diff --git a/resources/libraries/bash/entry/bootstrap_vpp_device.sh b/resources/libraries/bash/entry/bootstrap_vpp_device.sh
index ae4c26a1ba..fd6279f8c7 100755
--- a/resources/libraries/bash/entry/bootstrap_vpp_device.sh
+++ b/resources/libraries/bash/entry/bootstrap_vpp_device.sh
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
-# Copyright (c) 2021 Cisco and/or its affiliates.
+# Copyright (c) 2023 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -44,9 +44,6 @@ archive_tests || die
prepare_topology || die
select_topology || die
activate_docker_topology || die
-select_tags || die
-compose_pybot_arguments || die
-set_environment_variables || die
-run_pybot || die
+run_robot || die
move_archives || die
-die_on_pybot_error || die
+die_on_robot_error || die
diff --git a/resources/libraries/bash/entry/patch_l3fwd_flip_routes b/resources/libraries/bash/entry/patch_l3fwd_flip_routes
index 23a6675145..c5eff5d3ca 100755
--- a/resources/libraries/bash/entry/patch_l3fwd_flip_routes
+++ b/resources/libraries/bash/entry/patch_l3fwd_flip_routes
@@ -1,7 +1,7 @@
#!/bin/sh
patch --ignore-whitespace --forward main.c <<"_EOF"
-185,186c185,186
+204,205c204,205
< {RTE_IPV4(198, 18, 0, 0), 24, 0},
< {RTE_IPV4(198, 18, 1, 0), 24, 1},
---
diff --git a/resources/libraries/bash/entry/per_patch_device.sh b/resources/libraries/bash/entry/per_patch_device.sh
index db977b6e96..88d7f13494 100644
--- a/resources/libraries/bash/entry/per_patch_device.sh
+++ b/resources/libraries/bash/entry/per_patch_device.sh
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
-# Copyright (c) 2021 Cisco and/or its affiliates.
+# Copyright (c) 2023 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -39,14 +39,15 @@ source "${BASH_FUNCTION_DIR}/per_patch.sh" || die "Source failed."
common_dirs || die
check_prerequisites || die
set_perpatch_vpp_dir || die
-build_vpp_ubuntu_amd64 "CURRENT" || die
-set_aside_commit_build_artifacts || die
-initialize_csit_dirs || die
+git status || die
+git describe || die
+build_vpp_ubuntu "CURRENT" || die
+initialize_csit_dirs "current" || die
get_test_code "${1-}" || die
get_test_tag_string || die
set_perpatch_dut || die
select_arch_os || die
-select_build "build_current" || die
+select_build "build-root" || die
check_download_dir || die
activate_virtualenv "${VPP_DIR}" || die
generate_tests || die
@@ -54,8 +55,6 @@ archive_tests || die
prepare_topology || die
select_topology || die
activate_docker_topology || die
-select_tags || die
-compose_pybot_arguments || die
-run_pybot || die
+run_robot || die
move_archives || die
-die_on_pybot_error || die
+die_on_robot_error || die
diff --git a/resources/libraries/bash/entry/per_patch_perf.sh b/resources/libraries/bash/entry/per_patch_perf.sh
index 565a566eb0..9a825a007e 100644
--- a/resources/libraries/bash/entry/per_patch_perf.sh
+++ b/resources/libraries/bash/entry/per_patch_perf.sh
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
-# Copyright (c) 2021 Cisco and/or its affiliates.
+# Copyright (c) 2023 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -44,11 +44,16 @@ source "${BASH_FUNCTION_DIR}/ansible.sh" || die "Source failed."
common_dirs || die
check_prerequisites || die
set_perpatch_vpp_dir || die
-build_vpp_ubuntu_amd64 "CURRENT" || die
-set_aside_commit_build_artifacts || die
-build_vpp_ubuntu_amd64 "PARENT" || die
-set_aside_parent_build_artifacts || die
-initialize_csit_dirs || die
+git status || die
+git describe || die
+build_vpp_ubuntu "CURRENT" || die
+set_aside_build_artifacts "current" || die
+git checkout "HEAD~" || die "Failed to checkout parent commit."
+git status || die
+git describe || die
+build_vpp_ubuntu "PARENT" || die
+set_aside_build_artifacts "parent" || die
+initialize_csit_dirs "parent" "current" || die
get_test_code "${1-}" || die
get_test_tag_string || die
set_perpatch_dut || die
@@ -58,8 +63,6 @@ activate_virtualenv "${VPP_DIR}" || die
generate_tests || die
archive_tests || die
reserve_and_cleanup_testbed || die
-select_tags || die
-compose_pybot_arguments || die
# Support for interleaved measurements is kept for future.
iterations=1 # 8
for ((iter=0; iter<iterations; iter++)); do
@@ -71,16 +74,14 @@ for ((iter=0; iter<iterations; iter++)); do
# Testing current first. Good for early failures or for API changes.
select_build "build_current" || die
check_download_dir || die
- run_pybot || die
- archive_parse_test_results "csit_current/${iter}" || die
- die_on_pybot_error || die
+ run_robot || die
+ move_test_results "csit_current/${iter}" || die
# TODO: Use less heavy way to avoid apt remove failures.
ansible_playbook "cleanup" || die
select_build "build_parent" || die
check_download_dir || die
- run_pybot || die
- archive_parse_test_results "csit_parent/${iter}" || die
- die_on_pybot_error || die
+ run_robot || die
+ move_test_results "csit_parent/${iter}" || die
done
untrap_and_unreserve_testbed || die
compare_test_results # The error code becomes this script's error code.
diff --git a/resources/libraries/bash/entry/tox/README.txt b/resources/libraries/bash/entry/tox/README.txt
index e13b60209e..9ce21e93d0 100644
--- a/resources/libraries/bash/entry/tox/README.txt
+++ b/resources/libraries/bash/entry/tox/README.txt
@@ -1,4 +1,4 @@
-# Copyright (c) 2022 Cisco and/or its affiliates.
+# Copyright (c) 2024 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -14,6 +14,13 @@
This directory contains tox scripts and other files they need.
Generally, a tox script is either a checker (suitable for automated verify)
or a fixer (manually started, risky as uncommitted edits can be lost).
+
+In the tox verify job we want to avoid running fixers,
+as they can affect what other checkers see
+(e.g. autogen fixer could add more too long lines).
+That is why we keep fixers separate from checkers in principle,
+even for fairly safe tasks (e.g. bumping copyright years).
+
Each tox script is assumed to be run from tox,
when working directory is set to ${CSIT_DIR}.
@@ -26,14 +33,9 @@ Each checker script should:
+ Only the output suitable for automated processing by an external caller
should be written to stdout.
++ The level of "less verbose" depends on check and state of codebase.
-+ TODO: Should we carefully document which files are
- whitelisted/blacklisted for a particulat check?
Each fixer script should:
+ Perform edits on current filesystem
+ Not assume git is clean (there may be uncommitted edits).
+ Use "git diff HEAD~" to get both comitted and uncomitted edits to analyze.
+ Output whatever it wants (possibly nothing).
-
-TODO: Should checkers be named differently than fixers?
- E.g. both scripts and tox environments start with fix_?
diff --git a/resources/libraries/bash/entry/tox/copyright_year.sh b/resources/libraries/bash/entry/tox/copyright_year.sh
index 9ed9fcb653..272763100e 100644
--- a/resources/libraries/bash/entry/tox/copyright_year.sh
+++ b/resources/libraries/bash/entry/tox/copyright_year.sh
@@ -1,4 +1,4 @@
-# Copyright (c) 2022 Cisco and/or its affiliates.
+# Copyright (c) 2024 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -24,10 +24,7 @@ set -exuo pipefail
# but not the current year (in the same line).
# The offending lines are stored to copyright_year.log (overwriting).
#
-# 3 lines were chosen, because first two lines could be shebang and empty line,
-# and more than 3 lines would start failing on files with multiple copyright
-# holders. There, only the last updating entity needs to bump its year,
-# and put other copyright lines below.
+# 3 lines were chosen, because first two lines could be shebang and empty line.
# "set -eu" handles failures from the following two lines.
BASH_CHECKS_DIR="$(dirname $(readlink -e "${BASH_SOURCE[0]}"))"
@@ -42,7 +39,7 @@ IFS=$'\n'
files=($(git diff --name-only HEAD~ || true))
unset IFS
truncate -s 0 "copyright_year.log" || die
-# A change can have thousands of files, supress console output in the cycle.
+# A change can have thousands of files, supress console output for the cycle.
set +x
for fil in "${files[@]}"; do
# Greps do "fail" on 0 line output, we need to ignore that
diff --git a/resources/libraries/bash/entry/tox/doc_verify.sh b/resources/libraries/bash/entry/tox/doc_verify.sh
deleted file mode 100644
index 7eec4b69e4..0000000000
--- a/resources/libraries/bash/entry/tox/doc_verify.sh
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright (c) 2022 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -xeuo pipefail
-
-# This file should be executed from tox, as the assumend working directory
-# is different from where this file is located.
-# This file does not have executable flag nor shebang,
-# to dissuade non-tox callers.
-
-# "set -eu" handles failures from the following two lines.
-BASH_CHECKS_DIR="$(dirname $(readlink -e "${BASH_SOURCE[0]}"))"
-BASH_FUNCTION_DIR="$(readlink -e "${BASH_CHECKS_DIR}/../../function")"
-source "${BASH_FUNCTION_DIR}/common.sh" || {
- echo "Source failed." >&2
- exit 1
-}
-source "${BASH_FUNCTION_DIR}/docs.sh" || die "Source failed."
-common_dirs || die
-activate_virtualenv || die
-
-# Documentation generation.
-# Here we do store only stderr to file while stdout (inlcuding Xtrace) is
-# printed to console. This way we can track increased errors in future.
-# We do not need to do trap as the env will be closed after tox finished the
-# task.
-exec 3>&1 || die
-export BASH_XTRACEFD="3" || die
-log_file="$(pwd)/doc_verify.log" || die
-
-generate_docs 2> ${log_file} || die
-
-if [[ "${DOCS_EXIT_STATUS}" != 0 ]]; then
- # Failed to generate report.
- warn
- warn "Doc verify checker: FAIL"
- exit 1
-fi
-
-warn
-warn "Doc verify checker: PASS"
diff --git a/resources/libraries/bash/entry/tox/fix_copyright_year.sh b/resources/libraries/bash/entry/tox/fix_copyright_year.sh
new file mode 100644
index 0000000000..d822f272af
--- /dev/null
+++ b/resources/libraries/bash/entry/tox/fix_copyright_year.sh
@@ -0,0 +1,55 @@
+# Copyright (c) 2024 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -exuo pipefail
+
+# This file should be executed from tox, as the assumed working directory
+# is different from where this file is located.
+# This file does not have executable flag nor shebang,
+# to dissuade non-tox callers.
+
+# This is a fixer script, so be careful before starting it.
+# It is recommended to always commit your recent edits before running this,
+# and use "git diff" after running this to confirm the edits are correct.
+# Otherwise you can lose your edits and introduce bad edits.
+
+# This script runs a variant of "git diff" command
+# to get the list of edited files, and few sed commands to edit the year
+# if "20.." pattern matches in first 3 lines.
+# No detection of "copyright", so edits can apply at surprising places.
+
+# 3 lines were chosen, because first two lines could be shebang and empty line.
+
+# "set -eu" handles failures from the following two lines.
+BASH_CHECKS_DIR="$(dirname $(readlink -e "${BASH_SOURCE[0]}"))"
+BASH_FUNCTION_DIR="$(readlink -e "${BASH_CHECKS_DIR}/../../function")"
+source "${BASH_FUNCTION_DIR}/common.sh" || {
+ echo "Source failed." >&2
+ exit 1
+}
+
+year=$(date +'%Y')
+IFS=$'\n'
+files=($(git diff --name-only HEAD~ || true))
+unset IFS
+# A change can have thousands of files, supress console output for the cycle.
+set +x
+for fil in "${files[@]}"; do
+ if [[ -f "${fil}" ]]; then
+ sed -i "1 s/20../${year}/g" "${fil}"
+ sed -i "2 s/20../${year}/g" "${fil}"
+ sed -i "3 s/20../${year}/g" "${fil}"
+ # Else the file was actually deleted and sed would fail.
+ fi
+done
+set -x
diff --git a/resources/libraries/bash/entry/tox/model_version.sh b/resources/libraries/bash/entry/tox/model_version.sh
deleted file mode 100644
index 2bcc628bfb..0000000000
--- a/resources/libraries/bash/entry/tox/model_version.sh
+++ /dev/null
@@ -1,65 +0,0 @@
-# Copyright (c) 2022 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -exuo pipefail
-
-# This file should be executed from tox, as the assumed working directory
-# is different from where this file is located.
-# This file does not have executable flag nor shebang,
-# to dissuade non-tox callers.
-
-# This script runs a two grep commands as "if" conditions,
-# using log files to store the data (generated by git commands) to grep,
-# failing when model implementation edits do not come with model version edit.
-# The contents of the log files may be useful when fail cause is not obvious.
-
-# "set -eu" handles failures from the following two lines.
-BASH_CHECKS_DIR="$(dirname $(readlink -e "${BASH_SOURCE[0]}"))"
-BASH_FUNCTION_DIR="$(readlink -e "${BASH_CHECKS_DIR}/../../function")"
-source "${BASH_FUNCTION_DIR}/common.sh" || {
- echo "Source failed." >&2
- exit 1
-}
-
-impl_log="edited_files.log"
-git diff --name-only HEAD~ > "${impl_log}"
-if ! grep -q '^docs/model/current/schema/test_case*' "${impl_log}"; then
- # Failing grep means no model edits.
- warn "No model implementation edits detected."
- warn
- warn "CSIT model version checker: PASS"
- exit 0
-fi
-const_log="constants_edits.log"
-git diff -U0 HEAD~ -- "resources/libraries/python/Constants.py" > "${const_log}"
-if ! grep -q '^\+ MODEL_VERSION = ' "${const_log}"; then
- warn "Model implementation edits without version edit detected!"
- warn "See ${impl_log} and ${const_log} for what was detected."
- warn
- warn "CSIT model version checker: FAIL"
- exit 1
-fi
-doc_log="docs_edits.log"
-git diff -U0 HEAD~ -- "docs/model/current/top.rst" > "${doc_log}"
-if ! grep -q '^\+This document is valid for CSIT model' "${doc_log}"; then
- warn "Model implementation edits without documentation update detected!"
- warn "See ${impl_log}, ${const_log} and ${doc_log} for what was detected."
- warn
- warn "CSIT model version checker: FAIL"
- exit 1
-fi
-# TODO: Check constants and docs are specifying the same version.
-warn "Model version and doc are edited, model implementation edits are allowed."
-warn
-warn "CSIT model version checker: PASS"
-exit 0
diff --git a/resources/libraries/bash/function/README.txt b/resources/libraries/bash/function/README.txt
deleted file mode 100644
index e4eb91565b..0000000000
--- a/resources/libraries/bash/function/README.txt
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (c) 2021 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-Files in this directory system are to be executed indirectly,
-sourced from other scripts.
-
-In fact, the files should only define functions,
-except perhaps some minimal logic needed to import dependencies.
-The originating function calls should be executed from elsewhere,
-typically from entry scripts.
diff --git a/resources/libraries/bash/function/ansible.sh b/resources/libraries/bash/function/ansible.sh
index 64508bda11..6cf4d16f43 100644
--- a/resources/libraries/bash/function/ansible.sh
+++ b/resources/libraries/bash/function/ansible.sh
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
-# Copyright (c) 2021 Cisco and/or its affiliates.
+# Copyright (c) 2024 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -28,9 +28,12 @@ function ansible_adhoc () {
set -exuo pipefail
case "$FLAVOR" in
- "aws")
+ "aws" | "c6in" | "c6gn" | "c7gn")
INVENTORY_PATH="cloud_inventory"
;;
+ "x-"*)
+ INVENTORY_PATH="external_inventory"
+ ;;
*)
INVENTORY_PATH="lf_inventory"
;;
@@ -70,9 +73,12 @@ function ansible_playbook () {
set -exuo pipefail
case "$FLAVOR" in
- "aws")
+ "aws" | "c6in" | "c6gn" | "c7gn")
INVENTORY_PATH="cloud_inventory"
;;
+ "x-"*)
+ INVENTORY_PATH="external_inventory"
+ ;;
*)
INVENTORY_PATH="lf_inventory"
;;
diff --git a/resources/libraries/bash/function/artifacts.sh b/resources/libraries/bash/function/artifacts.sh
index 15a4dd2fe1..e4d5dd6fc6 100644
--- a/resources/libraries/bash/function/artifacts.sh
+++ b/resources/libraries/bash/function/artifacts.sh
@@ -1,7 +1,6 @@
#!/usr/bin/env bash
-# Copyright (c) 2021 Cisco and/or its affiliates.
-# Copyright (c) 2021 PANTHEON.tech and/or its affiliates.
+# Copyright (c) 2023 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -26,8 +25,6 @@ function download_artifacts () {
# - REPO_URL - FD.io Packagecloud repository.
# Functions conditionally called (see their documentation for side effects):
# - download_ubuntu_artifacts
- # - download_centos_artifacts
- # - download_opensuse_artifacts
set -exuo pipefail
@@ -46,10 +43,6 @@ function download_artifacts () {
if [ "${os_id}" == "ubuntu" ]; then
download_ubuntu_artifacts || die
- elif [ "${os_id}" == "centos" ]; then
- download_centos_artifacts || die
- elif [ "${os_id}" == "opensuse" ]; then
- download_opensuse_artifacts || die
else
die "${os_id} is not yet supported."
fi
@@ -129,73 +122,3 @@ function download_ubuntu_artifacts () {
}
fi
}
-
-function download_centos_artifacts () {
-
- # Download or install CentOS VPP artifacts from packagecloud.io.
- #
- # Variables read:
- # - REPO_URL - FD.io Packagecloud repository.
- # - VPP_VERSION - VPP version.
- # - INSTALL - Whether install packages (if set to "true") or download only.
- # Default: "false".
-
- set -exuo pipefail
-
- curl -s "${REPO_URL}"/script.rpm.sh | sudo -E bash || {
- die "Packagecloud FD.io repo fetch failed."
- }
- # If version is set we will add suffix.
- artifacts=()
- pkgs=(vpp vpp-selinux-policy vpp-devel vpp-lib vpp-plugins vpp-api-python)
- if [ -z "${VPP_VERSION-}" ]; then
- artifs+=(${pkgs[@]})
- else
- artifs+=(${pkgs[@]/%/-${VPP_VERSION-}})
- fi
-
- if [[ "${INSTALL:-false}" == "true" ]]; then
- sudo yum -y install "${artifs[@]}" || {
- die "Install VPP artifact failed."
- }
- else
- sudo yum -y install --downloadonly --downloaddir=. "${artifs[@]}" || {
- die "Download VPP artifacts failed."
- }
- fi
-}
-
-function download_opensuse_artifacts () {
-
- # Download or install OpenSuSE VPP artifacts from packagecloud.io.
- #
- # Variables read:
- # - REPO_URL - FD.io Packagecloud repository.
- # - VPP_VERSION - VPP version.
- # - INSTALL - Whether install packages (if set to "true") or download only.
- # Default: "false".
-
- set -exuo pipefail
-
- curl -s "${REPO_URL}"/script.rpm.sh | sudo -E bash || {
- die "Packagecloud FD.io repo fetch failed."
- }
- # If version is set we will add suffix.
- artifs=()
- pkgs=(vpp vpp-devel vpp-lib vpp-plugins libvpp0)
- if [ -z "${VPP_VERSION-}" ]; then
- artifs+=(${pkgs[@]})
- else
- artifs+=(${pkgs[@]/%/-${VPP_VERSION-}})
- fi
-
- if [[ "${INSTALL:-false}" == "true" ]]; then
- sudo yum -y install "${artifs[@]}" || {
- die "Install VPP artifact failed."
- }
- else
- sudo yum -y install --downloadonly --downloaddir=. "${artifs[@]}" || {
- die "Download VPP artifacts failed."
- }
- fi
-}
diff --git a/resources/libraries/bash/function/branch.sh b/resources/libraries/bash/function/branch.sh
index ba9cc39c67..37900eab01 100644
--- a/resources/libraries/bash/function/branch.sh
+++ b/resources/libraries/bash/function/branch.sh
@@ -1,4 +1,4 @@
-# Copyright (c) 2021 Cisco and/or its affiliates.
+# Copyright (c) 2023 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -57,10 +57,6 @@ function checkout_csit_for_vpp () {
set -exuo pipefail
case "${1}" in
- "stable/2009")
- # LTS branch
- branch_id="origin/${1/stable\//oper-rls}_lts"
- ;;
"stable/"*)
branch_id="origin/${1/stable\//oper-rls}"
;;
diff --git a/resources/libraries/bash/function/common.sh b/resources/libraries/bash/function/common.sh
index b194c31259..4f104dbfd3 100644
--- a/resources/libraries/bash/function/common.sh
+++ b/resources/libraries/bash/function/common.sh
@@ -1,5 +1,5 @@
-# Copyright (c) 2022 Cisco and/or its affiliates.
-# Copyright (c) 2022 PANTHEON.tech and/or its affiliates.
+# Copyright (c) 2024 Cisco and/or its affiliates.
+# Copyright (c) 2024 PANTHEON.tech and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -45,7 +45,7 @@ function activate_docker_topology () {
device_image="$(< ${CSIT_DIR}/${IMAGE_VER_FILE})"
case_text="${NODENESS}_${FLAVOR}"
case "${case_text}" in
- "1n_skx" | "1n_tx2")
+ "1n_skx" | "1n_alt" | "1n_spr")
# We execute reservation over csit-shim-dcr (ssh) which runs sourced
# script's functions. Env variables are read from ssh output
# back to localhost for further processing.
@@ -93,6 +93,12 @@ function activate_docker_topology () {
cat ${WORKING_TOPOLOGY} | grep -v password || {
die "Topology read failed!"
}
+
+ # Subfunctions to update data that may depend on topology reserved.
+ set_environment_variables || die
+ select_tags || die
+ compose_robot_arguments || die
+
}
@@ -158,7 +164,7 @@ function check_download_dir () {
# Fail if there are no files visible in ${DOWNLOAD_DIR}.
#
# Variables read:
- # - DOWNLOAD_DIR - Path to directory pybot takes the build to test from.
+ # - DOWNLOAD_DIR - Path to directory robot takes the build to test from.
# Directories read:
# - ${DOWNLOAD_DIR} - Has to be non-empty to proceed.
# Functions called:
@@ -227,7 +233,7 @@ function common_dirs () {
TOPOLOGIES_DIR=$(readlink -e "${CSIT_DIR}/topologies/available") || {
die "Readlink failed."
}
- JOB_SPECS_DIR=$(readlink -e "${CSIT_DIR}/docs/job_specs") || {
+ JOB_SPECS_DIR=$(readlink -e "${CSIT_DIR}/resources/job_specs") || {
die "Readlink failed."
}
RESOURCES_DIR=$(readlink -e "${CSIT_DIR}/resources") || {
@@ -236,9 +242,6 @@ function common_dirs () {
TOOLS_DIR=$(readlink -e "${RESOURCES_DIR}/tools") || {
die "Readlink failed."
}
- DOC_GEN_DIR=$(readlink -e "${TOOLS_DIR}/doc_gen") || {
- die "Readlink failed."
- }
PYTHON_SCRIPTS_DIR=$(readlink -e "${TOOLS_DIR}/scripts") || {
die "Readlink failed."
}
@@ -258,8 +261,12 @@ function common_dirs () {
}
-function compose_pybot_arguments () {
+function compose_robot_arguments () {
+ # This function is called by run_tests function.
+ # The reason is that some jobs (bisect) perform reservation multiple times,
+ # so WORKING_TOPOLOGY can be different each time.
+ #
# Variables read:
# - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
# - DUT - CSIT test/ subdirectory, set while processing tags.
@@ -268,21 +275,23 @@ function compose_pybot_arguments () {
# - TEST_CODE - The test selection string from environment or argument.
# - SELECTION_MODE - Selection criteria [test, suite, include, exclude].
# Variables set:
- # - PYBOT_ARGS - String holding part of all arguments for pybot.
- # - EXPANDED_TAGS - Array of strings pybot arguments compiled from tags.
+ # - ROBOT_ARGS - String holding part of all arguments for robot.
+ # - EXPANDED_TAGS - Array of strings robot arguments compiled from tags.
set -exuo pipefail
# No explicit check needed with "set -u".
- PYBOT_ARGS=("--loglevel" "TRACE")
- PYBOT_ARGS+=("--variable" "TOPOLOGY_PATH:${WORKING_TOPOLOGY}")
+ ROBOT_ARGS=("--loglevel" "TRACE")
+ ROBOT_ARGS+=("--variable" "TOPOLOGY_PATH:${WORKING_TOPOLOGY}")
+ # TODO: The rest does not need to be recomputed on each reservation.
+ # Refactor TEST_CODE so this part can be called only once.
case "${TEST_CODE}" in
*"device"*)
- PYBOT_ARGS+=("--suite" "tests.${DUT}.device")
+ ROBOT_ARGS+=("--suite" "tests.${DUT}.device")
;;
- *"perf"*)
- PYBOT_ARGS+=("--suite" "tests.${DUT}.perf")
+ *"perf"* | *"bisect"*)
+ ROBOT_ARGS+=("--suite" "tests.${DUT}.perf")
;;
*)
die "Unknown specification: ${TEST_CODE}"
@@ -319,7 +328,7 @@ function deactivate_docker_topology () {
case_text="${NODENESS}_${FLAVOR}"
case "${case_text}" in
- "1n_skx" | "1n_tx2")
+ "1n_skx" | "1n_alt" | "1n_spr")
ssh="ssh root@172.17.0.1 -p 6022"
env_vars=$(env | grep CSIT_ | tr '\n' ' ' ) || die
# The "declare -f" output is long and boring.
@@ -360,19 +369,19 @@ function die () {
}
-function die_on_pybot_error () {
+function die_on_robot_error () {
# Source this fragment if you want to abort on any failed test case.
#
# Variables read:
- # - PYBOT_EXIT_STATUS - Set by a pybot running fragment.
+ # - ROBOT_EXIT_STATUS - Set by a robot running fragment.
# Functions called:
# - die - Print to stderr and exit.
set -exuo pipefail
- if [[ "${PYBOT_EXIT_STATUS}" != "0" ]]; then
- die "Test failures are present!" "${PYBOT_EXIT_STATUS}"
+ if [[ "${ROBOT_EXIT_STATUS}" != "0" ]]; then
+ die "Test failures are present!" "${ROBOT_EXIT_STATUS}"
fi
}
@@ -385,7 +394,7 @@ function generate_tests () {
# within any subdirectory after copying.
# This is a separate function, because this code is called
- # both by autogen checker and entries calling run_pybot.
+ # both by autogen checker and entries calling run_robot.
# Directories read:
# - ${CSIT_DIR}/tests - Used as templates for the generated tests.
@@ -431,78 +440,110 @@ function get_test_code () {
fi
case "${TEST_CODE}" in
- *"1n-vbox"*)
+ *"1n-vbox")
NODENESS="1n"
FLAVOR="vbox"
;;
- *"1n-skx"*)
+ *"1n-skx")
NODENESS="1n"
FLAVOR="skx"
;;
- *"1n-tx2"*)
+ *"1n-spr")
NODENESS="1n"
- FLAVOR="tx2"
+ FLAVOR="spr"
;;
- *"1n-aws"*)
+ *"1n-alt")
+ NODENESS="1n"
+ FLAVOR="alt"
+ ;;
+ *"1n-aws")
NODENESS="1n"
FLAVOR="aws"
;;
- *"2n-aws"*)
+ *"2n-aws")
NODENESS="2n"
FLAVOR="aws"
;;
- *"3n-aws"*)
+ *"3n-aws")
NODENESS="3n"
FLAVOR="aws"
;;
- *"2n-skx"*)
+ *"2n-c7gn")
NODENESS="2n"
- FLAVOR="skx"
+ FLAVOR="c7gn"
;;
- *"3n-skx"*)
+ *"3n-c7gn")
NODENESS="3n"
- FLAVOR="skx"
+ FLAVOR="c7gn"
;;
- *"2n-zn2"*)
+ *"1n-c6in")
+ NODENESS="1n"
+ FLAVOR="c6in"
+ ;;
+ *"2n-c6in")
+ NODENESS="2n"
+ FLAVOR="c6in"
+ ;;
+ *"3n-c6in")
+ NODENESS="3n"
+ FLAVOR="c6in"
+ ;;
+ *"2n-zn2")
NODENESS="2n"
FLAVOR="zn2"
;;
- *"2n-clx"*)
+ *"2n-clx")
NODENESS="2n"
FLAVOR="clx"
;;
- *"2n-icx"*)
+ *"2n-icx")
NODENESS="2n"
FLAVOR="icx"
;;
- *"3n-icx"*)
+ *"2n-spr")
+ NODENESS="2n"
+ FLAVOR="spr"
+ ;;
+ *"3n-icx")
NODENESS="3n"
FLAVOR="icx"
;;
- *"2n-dnv"*)
- NODENESS="2n"
- FLAVOR="dnv"
+ *"3na-spr")
+ NODENESS="3na"
+ FLAVOR="spr"
;;
- *"3n-dnv"*)
- NODENESS="3n"
- FLAVOR="dnv"
+ *"3nb-spr")
+ NODENESS="3nb"
+ FLAVOR="spr"
;;
- *"3n-snr"*)
+ *"3n-snr")
NODENESS="3n"
FLAVOR="snr"
;;
- *"2n-tx2"*)
+ *"3n-icxd")
+ NODENESS="3n"
+ FLAVOR="icxd"
+ ;;
+ *"2n-tx2")
NODENESS="2n"
FLAVOR="tx2"
;;
- *"3n-tsh"*)
+ *"3n-tsh")
NODENESS="3n"
FLAVOR="tsh"
;;
- *"3n-alt"*)
+ *"3n-alt")
NODENESS="3n"
FLAVOR="alt"
;;
+ *"2n-x-"*)
+ NODENESS="2n"
+ FLAVOR="${TEST_CODE#*2n-}"
+ ;;
+ *"3n-x-"*)
+ NODENESS="3n"
+ FLAVOR="${TEST_CODE#*3n-}"
+ ;;
esac
}
@@ -516,6 +557,10 @@ function get_test_tag_string () {
# Variables set:
# - TEST_TAG_STRING - The string following trigger word in gerrit comment.
# May be empty, or even not set on event types not adding comment.
+ # - GIT_BISECT_FROM - If bisecttest, the commit hash to bisect from.
+ # Else not set.
+ # Variables exported optionally:
+ # - GRAPH_NODE_VARIANT - Node variant to test with, set if found in trigger.
# TODO: ci-management scripts no longer need to perform this.
@@ -523,6 +568,10 @@ function get_test_tag_string () {
if [[ "${GERRIT_EVENT_TYPE-}" == "comment-added" ]]; then
case "${TEST_CODE}" in
+ # Order matters, bisect job contains "perf" in its name.
+ *"bisect"*)
+ trigger="bisecttest"
+ ;;
*"device"*)
trigger="devicetest"
;;
@@ -548,6 +597,18 @@ function get_test_tag_string () {
comment=$(fgrep "${trigger}" <<< "${comment}" || true)
TEST_TAG_STRING=$("${cmd[@]}" <<< "${comment}" || true)
fi
+ if [[ "${trigger}" == "bisecttest" ]]; then
+ # Intentionally without quotes, so spaces delimit elements.
+ test_tag_array=(${TEST_TAG_STRING}) || die "How could this fail?"
+ # First "argument" of bisecttest is a commit hash.
+ GIT_BISECT_FROM="${test_tag_array[0]}" || {
+ die "Bisect job requires commit hash."
+ }
+ # Update the tag string (tag expressions only, no commit hash).
+ TEST_TAG_STRING="${test_tag_array[@]:1}" || {
+ die "Bisect job needs a single test, no default."
+ }
+ fi
if [[ -n "${TEST_TAG_STRING-}" ]]; then
test_tag_array=(${TEST_TAG_STRING})
if [[ "${test_tag_array[0]}" == "icl" ]]; then
@@ -621,42 +682,6 @@ function move_archives () {
}
-function post_process_robot_outputs () {
-
- # Generate INFO level output_info.xml by rebot.
- # Archive UTI raw json outputs.
- #
- # Variables read:
- # - ARCHIVE_DIR - Path to post-processed files.
-
- set -exuo pipefail
-
- # Compress raw json outputs, as they will never be post-processed.
- pushd "${ARCHIVE_DIR}" || die
- if [ -d "tests" ]; then
- # Use deterministic order.
- options+=("--sort=name")
- # We are keeping info outputs where they are.
- # Assuming we want to move anything but info files (and dirs).
- options+=("--exclude=*.info.json")
- tar czf "generated_output_raw.tar.gz" "${options[@]}" "tests" || true
- # Tar can remove when archiving, but chokes (not deterministically)
- # on attempting to remove dirs (not empty as info files are there).
- # So we need to delete the raw files manually.
- find "tests" -type f -name "*.raw.json" -delete || true
- fi
- popd || die
-
- # Generate INFO level output_info.xml for post-processing.
- all_options=("--loglevel" "INFO")
- all_options+=("--log" "none")
- all_options+=("--report" "none")
- all_options+=("--output" "${ARCHIVE_DIR}/output_info.xml")
- all_options+=("${ARCHIVE_DIR}/output.xml")
- rebot "${all_options[@]}" || true
-}
-
-
function prepare_topology () {
# Prepare virtual testbed topology if needed based on flavor.
@@ -665,6 +690,8 @@ function prepare_topology () {
# - TEST_CODE - String affecting test selection, usually jenkins job name.
# - NODENESS - Node multiplicity of testbed, either "2n" or "3n".
# - FLAVOR - Node flavor string, e.g. "clx" or "skx".
+ # Variables set:
+ # - TERRAFORM_MODULE_DIR - Terraform module directory.
# Functions called:
# - die - Print to stderr and exit.
# - terraform_init - Terraform init topology.
@@ -676,7 +703,29 @@ function prepare_topology () {
case "${case_text}" in
"1n_aws" | "2n_aws" | "3n_aws")
export TF_VAR_testbed_name="${TEST_CODE}"
+ TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}-c5n"
+ terraform_init || die "Failed to call terraform init."
+ trap "terraform_destroy" ERR EXIT || {
+ die "Trap attempt failed, please cleanup manually. Aborting!"
+ }
+ terraform_apply || die "Failed to call terraform apply."
+ ;;
+ "2n_c7gn" | "3n_c7gn")
+ export TF_VAR_testbed_name="${TEST_CODE}"
+ TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-c7gn"
+ terraform_init || die "Failed to call terraform init."
+ trap "terraform_destroy" ERR EXIT || {
+ die "Trap attempt failed, please cleanup manually. Aborting!"
+ }
+ terraform_apply || die "Failed to call terraform apply."
+ ;;
+ "1n_c6in" | "2n_c6in" | "3n_c6in")
+ export TF_VAR_testbed_name="${TEST_CODE}"
+ TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-c6in"
terraform_init || die "Failed to call terraform init."
+ trap "terraform_destroy" ERR EXIT || {
+ die "Trap attempt failed, please cleanup manually. Aborting!"
+ }
terraform_apply || die "Failed to call terraform apply."
;;
esac
@@ -689,6 +738,9 @@ function reserve_and_cleanup_testbed () {
# When cleanup fails, remove from topologies and keep retrying
# until all topologies are removed.
#
+ # Multiple other functions are called from here,
+ # as they set variables that depend on reserved topology data.
+ #
# Variables read:
# - TOPOLOGIES - Array of paths to topology yaml to attempt reservation on.
# - PYTHON_SCRIPTS_DIR - Path to directory holding the reservation script.
@@ -757,38 +809,50 @@ function reserve_and_cleanup_testbed () {
echo "Sleeping ${sleep_time}"
sleep "${sleep_time}" || die "Sleep failed."
done
+
+ # Subfunctions to update data that may depend on topology reserved.
+ set_environment_variables || die
+ select_tags || die
+ compose_robot_arguments || die
}
-function run_pybot () {
+function run_robot () {
- # Run pybot with options based on input variables.
- # Generate INFO level output_info.xml by rebot.
- # Archive UTI raw json outputs.
+ # Run robot with options based on input variables.
+ #
+ # Testbed has to be reserved already,
+ # as some data may have changed between reservations,
+ # for example excluded NICs.
#
# Variables read:
# - CSIT_DIR - Path to existing root of local CSIT git repository.
# - ARCHIVE_DIR - Path to store robot result files in.
- # - PYBOT_ARGS, EXPANDED_TAGS - See compose_pybot_arguments.sh
+ # - ROBOT_ARGS, EXPANDED_TAGS - See compose_robot_arguments.sh
# - GENERATED_DIR - Tests are assumed to be generated under there.
+ # - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
+ # - DUT - CSIT test/ subdirectory, set while processing tags.
+ # - TAGS - Array variable holding selected tag boolean expressions.
+ # - TOPOLOGIES_TAGS - Tag boolean expression filtering tests for topology.
+ # - TEST_CODE - The test selection string from environment or argument.
# Variables set:
- # - PYBOT_EXIT_STATUS - Exit status of most recent pybot invocation.
+ # - ROBOT_ARGS - String holding part of all arguments for robot.
+ # - EXPANDED_TAGS - Array of string robot arguments compiled from tags.
+ # - ROBOT_EXIT_STATUS - Exit status of most recent robot invocation.
# Functions called:
# - die - Print to stderr and exit.
set -exuo pipefail
- all_options=("--outputdir" "${ARCHIVE_DIR}" "${PYBOT_ARGS[@]}")
+ all_options=("--outputdir" "${ARCHIVE_DIR}" "${ROBOT_ARGS[@]}")
all_options+=("${EXPANDED_TAGS[@]}")
pushd "${CSIT_DIR}" || die "Change directory operation failed."
set +e
robot "${all_options[@]}" "${GENERATED_DIR}/tests/"
- PYBOT_EXIT_STATUS="$?"
+ ROBOT_EXIT_STATUS="$?"
set -e
- post_process_robot_outputs || die
-
popd || die "Change directory operation failed."
}
@@ -809,9 +873,9 @@ function select_arch_os () {
case "${ID}" in
"ubuntu"*)
case "${VERSION}" in
- *"LTS (Focal Fossa)"*)
- IMAGE_VER_FILE="VPP_DEVICE_IMAGE_UBUNTU"
- VPP_VER_FILE="VPP_STABLE_VER_UBUNTU_FOCAL"
+ *"LTS (Jammy Jellyfish)"*)
+ IMAGE_VER_FILE="VPP_DEVICE_IMAGE_UBUNTU_JAMMY"
+ VPP_VER_FILE="VPP_STABLE_VER_UBUNTU_JAMMY"
PKG_SUFFIX="deb"
;;
*)
@@ -840,6 +904,9 @@ function select_arch_os () {
function select_tags () {
+ # Only to be called from the reservation function,
+ # as resulting tags may change based on topology data.
+ #
# Variables read:
# - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
# - TEST_CODE - String affecting test selection, usually jenkins job name.
@@ -856,7 +923,7 @@ function select_tags () {
# NIC SELECTION
case "${TEST_CODE}" in
- *"1n-aws"*)
+ *"1n-aws"* | *"1n-c6in"*)
start_pattern='^ SUT:'
;;
*)
@@ -879,27 +946,45 @@ function select_tags () {
# Select default NIC tag.
case "${TEST_CODE}" in
- *"3n-dnv"* | *"2n-dnv"*)
- default_nic="nic_intel-x553"
+ *"3n-snr")
+ default_nic="nic_intel-e822cq"
;;
- *"3n-snr"*)
- default_nic="nic_intel-e810xxv"
+ *"3n-icxd")
+ default_nic="nic_intel-e823c"
;;
- *"3n-tsh"*)
+ *"3n-tsh")
default_nic="nic_intel-x520-da2"
;;
- *"3n-icx"* | *"2n-icx"*)
- default_nic="nic_intel-xxv710"
+ *"3n-icx" | *"2n-icx")
+ default_nic="nic_intel-e810cq"
+ ;;
+ *"3na-spr")
+ default_nic="nic_mellanox-cx7veat"
+ ;;
+ *"3nb-spr")
+ default_nic="nic_intel-e810cq"
;;
- *"3n-skx"* | *"2n-skx"* | *"2n-clx"* | *"2n-zn2"*)
+ *"2n-spr")
+ default_nic="nic_intel-e810cq"
+ ;;
+ *"2n-clx" | *"2n-zn2")
default_nic="nic_intel-xxv710"
;;
- *"2n-tx2"* | *"3n-alt"* | *"mrr-daily-master")
+ *"2n-tx2" | *"3n-alt")
default_nic="nic_intel-xl710"
;;
- *"1n-aws"* | *"2n-aws"* | *"3n-aws"*)
+ *"1n-aws" | *"2n-aws" | *"3n-aws")
default_nic="nic_amazon-nitro-50g"
;;
+ *"2n-c7gn" | *"3n-c7gn")
+ default_nic="nic_amazon-nitro-100g"
+ ;;
+ *"1n-c6in" | *"2n-c6in" | *"3n-c6in")
+ default_nic="nic_amazon-nitro-200g"
+ ;;
+ *"2n-x-"* | *"3n-x-"*)
+ default_nic="nic_intel-e810cq"
+ ;;
*)
default_nic="nic_intel-x710"
;;
@@ -911,14 +996,22 @@ function select_tags () {
awk_nics_sub_cmd+='gsub("x710","10ge2p1x710");'
awk_nics_sub_cmd+='gsub("xl710","40ge2p1xl710");'
awk_nics_sub_cmd+='gsub("x520-da2","10ge2p1x520");'
- awk_nics_sub_cmd+='gsub("x553","10ge2p1x553");'
awk_nics_sub_cmd+='gsub("cx556a","100ge2p1cx556a");'
+ awk_nics_sub_cmd+='gsub("2p1cx7veat","200ge2p1cx7veat");'
+ awk_nics_sub_cmd+='gsub("6p3cx7veat","200ge6p3cx7veat");'
+ awk_nics_sub_cmd+='gsub("cx6dx","100ge2p1cx6dx");'
awk_nics_sub_cmd+='gsub("e810cq","100ge2p1e810cq");'
+ awk_nics_sub_cmd+='gsub("e822cq","25ge2p1e822cq");'
+ awk_nics_sub_cmd+='gsub("e823c","25ge2p1e823c");'
awk_nics_sub_cmd+='gsub("vic1227","10ge2p1vic1227");'
awk_nics_sub_cmd+='gsub("vic1385","40ge2p1vic1385");'
awk_nics_sub_cmd+='gsub("nitro-50g","50ge1p1ENA");'
+ awk_nics_sub_cmd+='gsub("nitro-100g","100ge1p1ENA");'
+ awk_nics_sub_cmd+='gsub("nitro-200g","200ge1p1ENA");'
+ awk_nics_sub_cmd+='gsub("virtual","1ge1p82540em");'
awk_nics_sub_cmd+='if ($9 =="drv_avf") drv="avf-";'
awk_nics_sub_cmd+='else if ($9 =="drv_rdma_core") drv ="rdma-";'
+ awk_nics_sub_cmd+='else if ($9 =="drv_mlx5_core") drv ="mlx5-";'
awk_nics_sub_cmd+='else if ($9 =="drv_af_xdp") drv ="af-xdp-";'
awk_nics_sub_cmd+='else drv="";'
awk_nics_sub_cmd+='if ($1 =="-") cores="";'
@@ -935,9 +1028,15 @@ function select_tags () {
awk {"$awk_nics_sub_cmd"} || echo "devicetest") || die
SELECTION_MODE="--test"
;;
+ *"hoststack-daily"* )
+ readarray -t test_tag_array <<< $(grep -v "#" \
+ ${tfd}/hoststack_daily/${DUT}-${NODENESS}-${FLAVOR}.md |
+ awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
+ SELECTION_MODE="--test"
+ ;;
*"ndrpdr-weekly"* )
readarray -t test_tag_array <<< $(grep -v "#" \
- ${tfd}/mlr_weekly/${DUT}-${NODENESS}-${FLAVOR}.md |
+ ${tfd}/ndrpdr_weekly/${DUT}-${NODENESS}-${FLAVOR}.md |
awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
SELECTION_MODE="--test"
;;
@@ -953,6 +1052,12 @@ function select_tags () {
awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
SELECTION_MODE="--test"
;;
+ *"soak-weekly"* )
+ readarray -t test_tag_array <<< $(grep -v "#" \
+ ${tfd}/soak_weekly/${DUT}-${NODENESS}-${FLAVOR}.md |
+ awk {"$awk_nics_sub_cmd"} || echo "perftest") || die
+ SELECTION_MODE="--test"
+ ;;
*"report-iterative"* )
test_sets=(${TEST_TAG_STRING//:/ })
# Run only one test set per run
@@ -993,63 +1098,58 @@ function select_tags () {
# Reasons for blacklisting:
# - ipsechw - Blacklisted on testbeds without crypto hardware accelerator.
case "${TEST_CODE}" in
- *"1n-vbox"*)
+ *"1n-vbox")
test_tag_array+=("!avf")
test_tag_array+=("!vhost")
test_tag_array+=("!flow")
;;
- *"1n_tx2"*)
+ *"1n-alt")
test_tag_array+=("!flow")
;;
- *"2n-skx"*)
+ *"2n-clx")
test_tag_array+=("!ipsechw")
;;
- *"3n-skx"*)
+ *"2n-icx")
test_tag_array+=("!ipsechw")
- # Not enough nic_intel-xxv710 to support double link tests.
- test_tag_array+=("!3_node_double_link_topoANDnic_intel-xxv710")
;;
- *"2n-clx"*)
+ *"2n-spr")
+ ;;
+ *"2n-tx2")
test_tag_array+=("!ipsechw")
;;
- *"2n-icx"*)
+ *"2n-zn2")
test_tag_array+=("!ipsechw")
;;
- *"3n-icx"*)
+ *"3n-alt")
test_tag_array+=("!ipsechw")
- # Not enough nic_intel-xxv710 to support double link tests.
- test_tag_array+=("!3_node_double_link_topoANDnic_intel-xxv710")
;;
- *"2n-zn2"*)
+ *"3n-icx")
test_tag_array+=("!ipsechw")
+ test_tag_array+=("!3_node_double_link_topoANDnic_intel-xxv710")
;;
- *"2n-dnv"*)
- test_tag_array+=("!memif")
- test_tag_array+=("!srv6_proxy")
- test_tag_array+=("!vhost")
- test_tag_array+=("!vts")
- test_tag_array+=("!drv_avf")
+ *"3n-snr")
;;
- *"2n-tx2"* | *"3n-alt"*)
- test_tag_array+=("!ipsechw")
+ *"3n-icxd")
;;
- *"3n-dnv"*)
- test_tag_array+=("!memif")
- test_tag_array+=("!srv6_proxy")
- test_tag_array+=("!vhost")
- test_tag_array+=("!vts")
- test_tag_array+=("!drv_avf")
+ *"3na-spr")
;;
- *"3n-snr"*)
+ *"3nb-spr")
;;
- *"3n-tsh"*)
- # 3n-tsh only has x520 NICs which don't work with AVF
+ *"3n-tsh")
test_tag_array+=("!drv_avf")
test_tag_array+=("!ipsechw")
;;
- *"1n-aws"* | *"2n-aws"* | *"3n-aws"*)
+ *"1n-aws" | *"2n-aws" | *"3n-aws")
test_tag_array+=("!ipsechw")
;;
+ *"2n-c7gn" | *"3n-c7gn")
+ test_tag_array+=("!ipsechw")
+ ;;
+ *"1n-c6in" | *"2n-c6in" | *"3n-c6in")
+ test_tag_array+=("!ipsechw")
+ ;;
+ *"2n-x-"* | *"3n-x-"*)
+ ;;
esac
# We will add excluded NICs.
@@ -1057,19 +1157,17 @@ function select_tags () {
TAGS=()
prefix=""
-
- set +x
- if [[ "${TEST_CODE}" == "vpp-"* ]]; then
- if [[ "${TEST_CODE}" != *"device"* ]]; then
- # Automatic prefixing for VPP perf jobs to limit the NIC used and
- # traffic evaluation to MRR.
- if [[ "${TEST_TAG_STRING-}" == *"nic_"* ]]; then
- prefix="${prefix}mrrAND"
- else
- prefix="${prefix}mrrAND${default_nic}AND"
+ if [[ "${TEST_CODE}" != *"daily"* ]]; then
+ if [[ "${TEST_CODE}" == "vpp-"* ]]; then
+ if [[ "${TEST_CODE}" != *"device"* ]]; then
+ # Automatic prefixing for VPP perf jobs to limit the NIC used.
+ if [[ "${TEST_TAG_STRING-}" != *"nic_"* ]]; then
+ prefix="${default_nic}AND"
+ fi
fi
fi
fi
+ set +x
for tag in "${test_tag_array[@]}"; do
if [[ "${tag}" == "!"* ]]; then
# Exclude tags are not prefixed.
@@ -1114,68 +1212,101 @@ function select_topology () {
case_text="${NODENESS}_${FLAVOR}"
case "${case_text}" in
- "1n_vbox")
+ "1n_aws")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*1n-aws*.yaml )
+ TOPOLOGIES_TAGS="1_node_single_link_topo"
+ ;;
+ "1n_c6in")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*1n-c6in*.yaml )
+ TOPOLOGIES_TAGS="1_node_single_link_topo"
+ ;;
+ "1n_alt" | "1n_spr")
TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*vpp_device*.template )
TOPOLOGIES_TAGS="2_node_single_link_topo"
;;
- "1n_skx" | "1n_tx2")
+ "1n_vbox")
TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*vpp_device*.template )
TOPOLOGIES_TAGS="2_node_single_link_topo"
;;
- "2n_skx")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_skx*.yaml )
- TOPOLOGIES_TAGS="2_node_*_link_topo"
- ;;
- "2n_zn2")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_zn2*.yaml )
- TOPOLOGIES_TAGS="2_node_*_link_topo"
+ "2n_aws")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-aws*.yaml )
+ TOPOLOGIES_TAGS="2_node_single_link_topo"
;;
- "3n_skx")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_skx*.yaml )
- TOPOLOGIES_TAGS="3_node_*_link_topo"
+ "2n_c7gn")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-c7gn*.yaml )
+ TOPOLOGIES_TAGS="2_node_single_link_topo"
;;
- "3n_icx")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_icx*.yaml )
- TOPOLOGIES_TAGS="3_node_*_link_topo"
+ "2n_c6in")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-c6in*.yaml )
+ TOPOLOGIES_TAGS="2_node_single_link_topo"
;;
"2n_clx")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_clx*.yaml )
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_clx_*.yaml )
TOPOLOGIES_TAGS="2_node_*_link_topo"
;;
"2n_icx")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_icx*.yaml )
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_icx_*.yaml )
TOPOLOGIES_TAGS="2_node_*_link_topo"
;;
- "2n_dnv")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_dnv*.yaml )
+ "2n_spr")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_spr_*.yaml )
+ TOPOLOGIES_TAGS="2_node_*_link_topo"
+ ;;
+ "2n_tx2")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_tx2_*.yaml )
TOPOLOGIES_TAGS="2_node_single_link_topo"
;;
- "3n_dnv")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_dnv*.yaml )
+ "2n_zn2")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_zn2_*.yaml )
+ TOPOLOGIES_TAGS="2_node_*_link_topo"
+ ;;
+ "3n_alt")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_alt_*.yaml )
TOPOLOGIES_TAGS="3_node_single_link_topo"
;;
- "3n_tsh")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_tsh*.yaml )
+ "3n_aws")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-aws*.yaml )
TOPOLOGIES_TAGS="3_node_single_link_topo"
;;
- "2n_tx2")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_tx2*.yaml )
- TOPOLOGIES_TAGS="2_node_single_link_topo"
+ "3n_c7gn")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-c7gn*.yaml )
+ TOPOLOGIES_TAGS="3_node_single_link_topo"
;;
- "3n_alt")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_alt*.yaml )
+ "3n_c6in")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-c6in*.yaml )
TOPOLOGIES_TAGS="3_node_single_link_topo"
;;
- "1n_aws")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*1n-aws*.yaml )
- TOPOLOGIES_TAGS="1_node_single_link_topo"
+ "3n_icx")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_icx_*.yaml )
+ # Trailing underscore is needed to distinguish from 3n_icxd.
+ TOPOLOGIES_TAGS="3_node_*_link_topo"
;;
- "2n_aws")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n-aws*.yaml )
+ "3n_icxd")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_icxd_*.yaml )
+ TOPOLOGIES_TAGS="3_node_single_link_topo"
+ ;;
+ "3n_snr")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_snr_*.yaml )
+ TOPOLOGIES_TAGS="3_node_single_link_topo"
+ ;;
+ "3n_tsh")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_tsh_*.yaml )
+ TOPOLOGIES_TAGS="3_node_single_link_topo"
+ ;;
+ "3na_spr")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3na_spr_*.yaml )
+ TOPOLOGIES_TAGS="3_node_*_link_topo"
+ ;;
+ "3nb_spr")
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3nb_spr_*.yaml )
+ TOPOLOGIES_TAGS="3_node_*_link_topo"
+ ;;
+ "2n_x"*)
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*2n_"${FLAVOR}"*.yaml )
TOPOLOGIES_TAGS="2_node_single_link_topo"
;;
- "3n_aws")
- TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n-aws*.yaml )
+ "3n_x"*)
+ TOPOLOGIES=( "${TOPOLOGIES_DIR}"/*3n_"${FLAVOR}"*.yaml )
TOPOLOGIES_TAGS="3_node_single_link_topo"
;;
*)
@@ -1195,6 +1326,9 @@ function set_environment_variables () {
# Depending on testbed topology, overwrite defaults set in the
# resources/libraries/python/Constants.py file
#
+ # Only to be called from the reservation function,
+ # as resulting values may change based on topology data.
+ #
# Variables read:
# - TEST_CODE - String affecting test selection, usually jenkins job name.
# Variables set:
@@ -1203,17 +1337,34 @@ function set_environment_variables () {
set -exuo pipefail
case "${TEST_CODE}" in
- *"1n-aws"* | *"2n-aws"* | *"3n-aws"*)
- # T-Rex 2.88+ workaround for ENA NICs.
+ *"1n-aws" | *"2n-aws" | *"3n-aws")
export TREX_RX_DESCRIPTORS_COUNT=1024
export TREX_EXTRA_CMDLINE="--mbuf-factor 19"
export TREX_CORE_COUNT=6
# Settings to prevent duration stretching.
export PERF_TRIAL_STL_DELAY=0.1
;;
- *"2n-zn2"*)
+ *"2n-c7gn" | *"3n-c7gn")
+ export TREX_RX_DESCRIPTORS_COUNT=1024
+ export TREX_EXTRA_CMDLINE="--mbuf-factor 19"
+ export TREX_CORE_COUNT=6
+ # Settings to prevent duration stretching.
+ export PERF_TRIAL_STL_DELAY=0.1
+ ;;
+ *"1n-c6in" | *"2n-c6in" | *"3n-c6in")
+ export TREX_RX_DESCRIPTORS_COUNT=1024
+ export TREX_EXTRA_CMDLINE="--mbuf-factor 19"
+ export TREX_CORE_COUNT=6
+ # Settings to prevent duration stretching.
+ export PERF_TRIAL_STL_DELAY=0.1
+ ;;
+ *"2n-zn2")
# Maciek's workaround for Zen2 with lower amount of cores.
export TREX_CORE_COUNT=14
+ ;;
+ *"2n-x-"* | *"3n-x-"* )
+ export TREX_CORE_COUNT=2
+ ;;
esac
}
@@ -1232,7 +1383,8 @@ function untrap_and_unreserve_testbed () {
# Variables read (by inner function):
# - WORKING_TOPOLOGY - Path to topology yaml file of the reserved testbed.
# - PYTHON_SCRIPTS_DIR - Path to directory holding Python scripts.
- # Variables written:
+ # Variables set:
+ # - TERRAFORM_MODULE_DIR - Terraform module directory.
# - WORKING_TOPOLOGY - Set to empty string on successful unreservation.
# Trap unregistered:
# - EXIT - Failure to untrap is reported, but ignored otherwise.
@@ -1253,7 +1405,16 @@ function untrap_and_unreserve_testbed () {
die "${1:-FAILED TO UNRESERVE, FIX MANUALLY.}" 2
}
case "${TEST_CODE}" in
- *"1n-aws"* | *"2n-aws"* | *"3n-aws"*)
+ *"1n-aws" | *"2n-aws" | *"3n-aws")
+ TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}-c5n"
+ terraform_destroy || die "Failed to call terraform destroy."
+ ;;
+ *"2n-c7gn" | *"3n-c7gn")
+ TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}"
+ terraform_destroy || die "Failed to call terraform destroy."
+ ;;
+ *"1n-c6in" | *"2n-c6in" | *"3n-c6in")
+ TERRAFORM_MODULE_DIR="terraform-aws-${NODENESS}-${FLAVOR}"
terraform_destroy || die "Failed to call terraform destroy."
;;
*)
diff --git a/resources/libraries/bash/function/device.sh b/resources/libraries/bash/function/device.sh
index 86d482068a..4d39cd2de6 100644
--- a/resources/libraries/bash/function/device.sh
+++ b/resources/libraries/bash/function/device.sh
@@ -1,4 +1,4 @@
-# Copyright (c) 2021 Cisco and/or its affiliates.
+# Copyright (c) 2024 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -173,13 +173,17 @@ function clean_environment () {
}
# Rebind interfaces back to kernel drivers.
+ i=0
for ADDR in ${TG_PCIDEVS[@]}; do
- DRIVER="${TG_DRIVERS[0]}"
+ DRIVER="${TG_DRIVERS[${i}]}"
bind_interfaces_to_driver || die
+ ((i++))
done
+ i=0
for ADDR in ${DUT1_PCIDEVS[@]}; do
- DRIVER="${DUT1_DRIVERS[0]}"
+ DRIVER="${DUT1_DRIVERS[${i}]}"
bind_interfaces_to_driver || die
+ ((i++))
done
}
@@ -312,14 +316,24 @@ function get_available_interfaces () {
dut1_netdev=(ens5 enp175)
ports_per_nic=2
;;
- "1n_tx2")
+ "1n_alt")
# Add Intel Corporation XL710/X710 Virtual Function to the
# whitelist.
- pci_id="0x154c"
- tg_netdev=(enp5)
- dut1_netdev=(enp145)
+ # Add MT2892 Family [ConnectX-6 Dx] Virtual Function to the
+ # whitelist.
+ pci_id="0x154c\|0x101e"
+ tg_netdev=(enp1s0f0 enp1s0f1 enP1p1s0f0)
+ dut1_netdev=(enP3p2s0f0 enP3p2s0f1 enP1p1s0f1)
ports_per_nic=2
;;
+ "1n_spr")
+ # Add Intel Corporation E810 Virtual Function to the
+ # whitelist.
+ pci_id="0x1889"
+ tg_netdev=(enp42s0 enp44s0)
+ dut1_netdev=(enp63s0 enp61s0)
+ ports_per_nic=1
+ ;;
"1n_vbox")
# Add Intel Corporation 82545EM Gigabit Ethernet Controller to the
# whitelist.
@@ -495,7 +509,10 @@ function get_csit_model () {
"0x1572"|"0x154c")
MODEL="Intel-X710"
;;
- "*")
+ "0x101e")
+ MODEL="Mellanox-CX6DX"
+ ;;
+ *)
MODEL="virtual"
esac
fi
@@ -697,7 +714,7 @@ function set_env_variables () {
CSIT_TG_HOST="$(hostname --all-ip-addresses | awk '{print $1}')" || {
die "Reading hostname IP address failed!"
}
- CSIT_TG_PORT="${DCR_PORTS[tg]#*:}"
+ CSIT_TG_PORT="${DCR_PORTS[tg]##*:}"
CSIT_TG_UUID="${DCR_UUIDS[tg]}"
CSIT_TG_ARCH="$(uname -i)" || {
die "Reading machine architecture failed!"
@@ -705,7 +722,7 @@ function set_env_variables () {
CSIT_DUT1_HOST="$(hostname --all-ip-addresses | awk '{print $1}')" || {
die "Reading hostname IP address failed!"
}
- CSIT_DUT1_PORT="${DCR_PORTS[dut1]#*:}"
+ CSIT_DUT1_PORT="${DCR_PORTS[dut1]##*:}"
CSIT_DUT1_UUID="${DCR_UUIDS[dut1]}"
CSIT_DUT1_ARCH="$(uname -i)" || {
die "Reading machine architecture failed!"
diff --git a/resources/libraries/bash/function/docs.sh b/resources/libraries/bash/function/docs.sh
deleted file mode 100644
index cb3f36d21a..0000000000
--- a/resources/libraries/bash/function/docs.sh
+++ /dev/null
@@ -1,267 +0,0 @@
-#!/usr/bin/env bash
-
-# Copyright (c) 2021 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -exuo pipefail
-
-
-function die_on_docs_error () {
-
- # Source this fragment if you want to abort on any failure.
- #
- # Variables read:
- # - DOCS_EXIT_STATUS - Set by a generation function.
- # Functions called:
- # - die - Print to stderr and exit.
-
- set -exuo pipefail
-
- if [[ "${DOCS_EXIT_STATUS}" != "0" ]]; then
- die "Failed to generate docs!" "${DOCS_EXIT_STATUS}"
- fi
-}
-
-function generate_docs () {
-
- # Generate docs content.
- #
- # Variable read:
- # - ${TOOLS_DIR} - Path to existing resources subdirectory "tools".
- # Variables set:
- # - DOCS_EXIT_STATUS - Exit status of docs generation.
- # Functions called:
- # - die - Print to stderr and exit.
-
- set -exuo pipefail
-
- pushd "${TOOLS_DIR}"/doc_gen || die "Pushd failed!"
-
- BUILD_DIR="_build"
-
- # Remove the old build:
- rm -rf ${BUILD_DIR} || true
- rm -rf /tmp/tmp-csit* || true
-
- export WORKING_DIR=$(mktemp -d /tmp/tmp-csitXXX) || die "export failed"
-
- # Create working directories
- mkdir -p "${BUILD_DIR}" || die "Mkdir failed!"
- mkdir -p "${WORKING_DIR}"/resources/libraries/python/ || die "Mkdir failed!"
- mkdir -p "${WORKING_DIR}"/resources/libraries/robot/ || die "Mkdir failed!"
- mkdir -p "${WORKING_DIR}"/tests/ || die "Mkdir failed!"
-
- # Copy the Sphinx source files:
- cp -r src/* ${WORKING_DIR}/ || die "Copy the Sphinx source files failed!"
-
- # Copy the source files to be processed:
- from_dir="${RESOURCES_DIR}/libraries/python/"
- to_dir="${WORKING_DIR}/resources/libraries/python/"
- dirs="${from_dir} ${to_dir}"
- rsync -ar --include='*/' --include='*.py' --exclude='*' ${dirs} || {
- die "rSync failed!"
- }
-
- from_dir="${RESOURCES_DIR}/libraries/robot/"
- to_dir="${WORKING_DIR}/resources/libraries/robot/"
- dirs="${from_dir} ${to_dir}"
- rsync -ar --include='*/' --include '*.robot' --exclude '*' ${dirs} || {
- die "rSync failed!"
- }
- touch ${to_dir}/index.robot || {
- die "Touch index.robot file failed!"
- }
-
- from_dir="${CSIT_DIR}/tests/"
- to_dir="${WORKING_DIR}/tests/"
- dirs="${from_dir} ${to_dir}"
- rsync -ar --include='*/' --include '*.robot' --exclude '*' ${dirs} || {
- die "rSync failed!"
- }
-
- # to remove GPL licence section
- find "${WORKING_DIR}/tests/" -type f -exec sed -i '/\*\*\*/,$!d' {} \;
-
- find ${WORKING_DIR}/ -type d -exec echo {} \; -exec touch {}/__init__.py \;
-
- python3 gen_rst.py || die "Generate .rst files failed!"
-
- # Generate the documentation:
- DATE=$(date -u '+%d-%b-%Y') || die "Get date failed!"
-
- all_options=("-v")
- all_options+=("-c" "${WORKING_DIR}")
- all_options+=("-a")
- all_options+=("-b" "html")
- all_options+=("-E")
- all_options+=("-D" "version="${GERRIT_BRANCH:-master}"")
- all_options+=("${WORKING_DIR}" "${BUILD_DIR}/")
-
- set +e
- sphinx-build "${all_options[@]}"
- DOCS_EXIT_STATUS="$?"
- set -e
-}
-
-function generate_report () {
-
- # Generate report content.
- #
- # Variable read:
- # - ${TOOLS_DIR} - Path to existing resources subdirectory "tools".
- # - ${GERRIT_BRANCH} - Gerrit branch used for release tagging.
- # Variables set:
- # - DOCS_EXIT_STATUS - Exit status of report generation.
- # Functions called:
- # - die - Print to stderr and exit.
-
- set -exuo pipefail
-
- pushd "${TOOLS_DIR}"/presentation || die "Pushd failed!"
-
- # Set default values in config array.
- typeset -A CFG
- typeset -A DIR
-
- DIR[WORKING]="_tmp"
-
- # Create working directories.
- mkdir "${DIR[WORKING]}" || die "Mkdir failed!"
-
- export PYTHONPATH=`pwd`:`pwd`/../../../ || die "Export failed!"
-
- all_options=("pal.py")
- all_options+=("--specification" "specifications/report")
- all_options+=("--release" "${GERRIT_BRANCH:-master}")
- all_options+=("--week" $(date "+%V"))
- all_options+=("--logging" "INFO")
- all_options+=("--force")
-
- set +e
- python "${all_options[@]}"
- DOCS_EXIT_STATUS="$?"
- set -e
-
-}
-
-function generate_report_local () {
-
- # Generate report from local content.
- #
- # Variable read:
- # - ${TOOLS_DIR} - Path to existing resources subdirectory "tools".
- # - ${CSIT_REPORT_FILENAME} - Source filename.
- # - ${CSIT_REPORT_DIRECTORYNAME} - Source directory.
- # - ${CSIT_REPORT_INSTALL_DEPENDENCIES} - Whether to install dependencies.
- # - ${CSIT_REPORT_INSTALL_LATEX} - Whether to install latex.
- # Variables set:
- # - DOCS_EXIT_STATUS - Exit status of report generation.
- # Functions called:
- # - die - Print to stderr and exit.
-
- set -exuo pipefail
-
- pushd "${TOOLS_DIR}"/presentation || die "Pushd failed!"
-
- filename="${CSIT_REPORT_FILENAME-}"
- directoryname="${CSIT_REPORT_DIRECTORYNAME-}"
- install_dependencies="${CSIT_REPORT_INSTALL_DEPENDENCIES:-1}"
- install_latex="${CSIT_REPORT_INSTALL_LATEX:-0}"
-
- # Set default values in config array.
- typeset -A CFG
- typeset -A DIR
-
- DIR[WORKING]="_tmp"
-
- # Install system dependencies.
- if [[ ${install_dependencies} -eq 1 ]] ;
- then
- sudo apt -y update || die "APT update failed!"
- sudo apt -y install libxml2 libxml2-dev libxslt-dev \
- build-essential zlib1g-dev unzip || die "APT install failed!"
- fi
-
- if [[ ${install_latex} -eq 1 ]] ;
- then
- sudo apt -y update || die "APT update failed!"
- sudo apt -y install xvfb texlive-latex-recommended \
- texlive-fonts-recommended texlive-fonts-extra texlive-latex-extra \
- latexmk wkhtmltopdf inkscape || die "APT install failed!"
- target="/usr/share/texlive/texmf-dist/web2c/texmf.cnf"
- sudo sed -i.bak 's/^\(main_memory\s=\s\).*/\110000000/' "${target}" || {
- die "Patching latex failed!"
- }
- fi
-
- # Create working directories.
- mkdir "${DIR[WORKING]}" || die "Mkdir failed!"
-
- export PYTHONPATH=`pwd`:`pwd`/../../../ || die "Export failed!"
-
- all_options=("pal.py")
- all_options+=("--specification" "specifications/report_local")
- all_options+=("--release" "${RELEASE:-master}")
- all_options+=("--week" "${WEEK:-1}")
- all_options+=("--logging" "INFO")
- all_options+=("--force")
- if [[ ${filename} != "" ]]; then
- all_options+=("--input-file" "${filename}")
- fi
- if [[ ${directoryname} != "" ]]; then
- all_options+=("--input-directory" "${directoryname}")
- fi
-
- set +e
- python "${all_options[@]}"
- DOCS_EXIT_STATUS="$?"
- set -e
-
-}
-
-function generate_trending () {
-
- # Generate trending content.
- #
- # Variable read:
- # - ${TOOLS_DIR} - Path to existing resources subdirectory "tools".
- # Variables set:
- # - DOCS_EXIT_STATUS - Exit status of trending generation.
- # Functions called:
- # - die - Print to stderr and exit.
-
- set -exuo pipefail
-
- pushd "${TOOLS_DIR}"/presentation || die "Pushd failed!"
-
- # Set default values in config array.
- typeset -A DIR
-
- DIR[WORKING]="_tmp"
-
- # Create working directories.
- mkdir "${DIR[WORKING]}" || die "Mkdir failed!"
-
- export PYTHONPATH=`pwd`:`pwd`/../../../ || die "Export failed!"
-
- all_options=("pal.py")
- all_options+=("--specification" "specifications/trending")
- all_options+=("--logging" "INFO")
- all_options+=("--force")
-
- set +e
- python "${all_options[@]}"
- DOCS_EXIT_STATUS="$?"
- set -e
-
-}
diff --git a/resources/libraries/bash/function/dpdk.sh b/resources/libraries/bash/function/dpdk.sh
index f013683659..86abb84a02 100644
--- a/resources/libraries/bash/function/dpdk.sh
+++ b/resources/libraries/bash/function/dpdk.sh
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
-# Copyright (c) 2022 Cisco and/or its affiliates.
+# Copyright (c) 2023 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -96,19 +96,8 @@ function dpdk_compile () {
# Configure generic build - the same used by VPP
meson_options="${meson_options} -Dplatform=generic"
- # Patch L3FWD.
- sed_rxd="s/^#define RTE_TEST_RX_DESC_DEFAULT 128"
- sed_rxd+="/#define RTE_TEST_RX_DESC_DEFAULT 1024/g"
- sed_txd="s/^#define RTE_TEST_TX_DESC_DEFAULT 512"
- sed_txd+="/#define RTE_TEST_TX_DESC_DEFAULT 1024/g"
- sed_file="./main.c"
- pushd examples/l3fwd || die "Pushd failed"
- sed -i "${sed_rxd}" "${sed_file}" || die "Patch failed"
- sed -i "${sed_txd}" "${sed_file}" || die "Patch failed"
- popd || die "Popd failed"
-
# Compile using Meson and Ninja.
- meson ${meson_options} build || {
+ meson setup ${meson_options} build || {
die "Failed to compile DPDK!"
}
ninja -C build || die "Failed to compile DPDK!"
@@ -201,7 +190,6 @@ function dpdk_l3fwd_compile () {
#
# Variables read:
# - DPDK_DIR - Path to DPDK framework.
- # - CSIT_DIR - Path to CSIT framework.
# Functions called:
# - die - Print to stderr and exit.
@@ -209,14 +197,7 @@ function dpdk_l3fwd_compile () {
pushd "${DPDK_DIR}" || die "Pushd failed"
# Patch L3FWD.
- sed_rxd="s/^#define RTE_TEST_RX_DESC_DEFAULT 128"
- sed_rxd+="/#define RTE_TEST_RX_DESC_DEFAULT 2048/g"
- sed_txd="s/^#define RTE_TEST_TX_DESC_DEFAULT 512"
- sed_txd+="/#define RTE_TEST_TX_DESC_DEFAULT 2048/g"
- sed_file="./main.c"
pushd examples/l3fwd || die "Pushd failed"
- sed -i "${sed_rxd}" "${sed_file}" || die "Patch failed"
- sed -i "${sed_txd}" "${sed_file}" || die "Patch failed"
chmod +x ${1} && source ${1} || die "Patch failed"
popd || die "Popd failed"
diff --git a/resources/libraries/bash/function/eb_version.sh b/resources/libraries/bash/function/eb_version.sh
new file mode 100644
index 0000000000..0393030065
--- /dev/null
+++ b/resources/libraries/bash/function/eb_version.sh
@@ -0,0 +1,159 @@
+#!/usr/bin/env bash
+
+# Copyright (c) 2023 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -exuo pipefail
+
+
+function die_on_error () {
+
+ # Source this fragment if you want to abort on any failure.
+ #
+ # Variables read:
+ # - ${CODE_EXIT_STATUS} - Exit status of report generation.
+ # Functions called:
+ # - die - Print to stderr and exit.
+
+ set -exuo pipefail
+
+ if [[ "${CODE_EXIT_STATUS}" != "0" ]]; then
+ die "Failed to generate docs!" "${CODE_EXIT_STATUS}"
+ fi
+}
+
+
+function eb_version_deploy () {
+
+ # Deploy Elastic Beanstalk CDash content.
+ #
+ # Variable read:
+ # - ${CSIT_DIR} - CSIT main directory.
+ # - ${TERRAFORM_OUTPUT_VAL} - Terraform output value.
+ # Variables set:
+ # - ${CODE_EXIT_STATUS} - Exit status of report generation.
+ # - ${TERRAFORM_OUTPUT_VAR} - Register Terraform output variable name.
+ # Functions called:
+ # - eb_version_verify - Build and verify EB version.
+ # - terraform_apply - Apply EB version by Terraform.
+ # - terraform_output - Get the application name string from Terraform.
+ # - die - Print to stderr and exit.
+
+ set -exuo pipefail
+
+ eb_version_build_verify || die "Failed to call Elastic Beanstalk verify!"
+ terraform_apply || die "Failed to call Terraform apply!"
+
+ TERRAFORM_OUTPUT_VAR="application_version"
+ terraform_output || die "Failed to call Terraform output!"
+
+ #aws --region eu-central-1 elasticbeanstalk update-environment \
+ # --environment-name fdio-csit-dash-env \
+ # --version-label "${TERRAFORM_OUTPUT_VAL}"
+}
+
+
+function eb_version_build_verify () {
+
+ # Build and verify Elastic Beanstalk CDash integrity.
+ #
+ # Variable read:
+ # - ${CSIT_DIR} - CSIT main directory.
+ # Variables set:
+ # - ${TERRAFORM_MODULE_DIR} - Terraform module sub-directory.
+ # Functions called:
+ # - hugo_init_modules - Initialize Hugo modules.
+ # - hugo_build_site - Build static site with Hugo.
+ # - terraform_init - Initialize Terraform modules.
+ # - terraform_validate - Validate Terraform code.
+ # - die - Print to stderr and exit.
+
+ set -exuo pipefail
+
+ if ! installed zip; then
+ die "Please install zip!"
+ fi
+
+ hugo_init_modules || die "Failed to call Hugo initialize!"
+ hugo_build_site || die "Failed to call Hugo build!"
+
+ pushd "${CSIT_DIR}"/csit.infra.dash || die "Pushd failed!"
+ pushd app || die "Pushd failed!"
+ find . -type d -name "__pycache__" -exec rm -rf "{}" \;
+ find . -type d -name ".webassets-cache" -exec rm -rf "{}" \;
+ zip -r ../app.zip . || die "Compress failed!"
+ popd || die "Popd failed!"
+ popd || die "Popd failed!"
+
+ TERRAFORM_MODULE_DIR="terraform-aws-fdio-csit-dash-app-base"
+
+ export TF_VAR_application_version="${BUILD_ID}"
+ terraform_init || die "Failed to call Terraform init!"
+ terraform_validate || die "Failed to call Terraform validate!"
+}
+
+
+function generate_report () {
+
+ # Generate report content.
+ #
+ # Variable read:
+ # - ${TOOLS_DIR} - Path to existing resources subdirectory "tools".
+ # - ${GERRIT_BRANCH} - Gerrit branch used for release tagging.
+ # Variables set:
+ # - ${CODE_EXIT_STATUS} - Exit status of report generation.
+ # Functions called:
+ # - die - Print to stderr and exit.
+
+ set -exuo pipefail
+
+ pushd "${TOOLS_DIR}"/presentation || die "Pushd failed!"
+
+ # Set default values in config array.
+ typeset -A CFG
+ typeset -A DIR
+
+ DIR[WORKING]="_tmp"
+
+ # Create working directories.
+ mkdir "${DIR[WORKING]}" || die "Mkdir failed!"
+
+ export PYTHONPATH=`pwd`:`pwd`/../../../ || die "Export failed!"
+
+ all_options=("pal.py")
+ all_options+=("--specification" "specifications/report")
+ all_options+=("--release" "${GERRIT_BRANCH:-master}")
+ all_options+=("--week" $(date "+%V"))
+ all_options+=("--logging" "INFO")
+ all_options+=("--force")
+
+ set +e
+ python "${all_options[@]}"
+ CODE_EXIT_STATUS="$?"
+ set -e
+}
+
+function installed () {
+
+ # Check if the given utility is installed. Fail if not installed.
+ #
+ # Arguments:
+ # - ${1} - Utility to check.
+ # Returns (implicitly):
+ # - 0 - If command is installed.
+ # - 1 - If command is not installed.
+
+ set -exuo pipefail
+
+ command -v "${1}"
+}
diff --git a/resources/libraries/bash/function/gather.sh b/resources/libraries/bash/function/gather.sh
index 4958e5251b..e432777e32 100644
--- a/resources/libraries/bash/function/gather.sh
+++ b/resources/libraries/bash/function/gather.sh
@@ -1,5 +1,5 @@
-# Copyright (c) 2021 Cisco and/or its affiliates.
-# Copyright (c) 2021 PANTHEON.tech and/or its affiliates.
+# Copyright (c) 2023 Cisco and/or its affiliates.
+# Copyright (c) 2023 PANTHEON.tech and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -26,7 +26,7 @@ function gather_build () {
# Variables read:
# - TEST_CODE - String affecting test selection, usually jenkins job name.
- # - DOWNLOAD_DIR - Path to directory pybot takes the build to test from.
+ # - DOWNLOAD_DIR - Path to directory robot takes the build to test from.
# Variables set:
# - DUT - CSIT test/ subdirectory containing suites to execute.
# Directories updated:
@@ -92,7 +92,8 @@ function gather_dpdk () {
then
echo "Downloading latest DPDK packages from repo..."
# URL is not in quotes, calling command from variable keeps them.
- wget_command=("wget" "--no-check-certificate" "-nv" "-O" "-")
+ wget_command=("wget" "--no-check-certificate" "--compression=auto")
+ wget_command+=("-nv" "-O" "-")
wget_command+=("${dpdk_repo}")
dpdk_stable_ver="$("${wget_command[@]}" | grep -v "2015"\
| grep -Eo 'dpdk-[^\"]+xz' | tail -1)" || {
@@ -130,7 +131,7 @@ function gather_vpp () {
# Variables read:
# - BASH_FUNCTION_DIR - Bash directory with functions.
# - TEST_CODE - The test selection string from environment or argument.
- # - DOWNLOAD_DIR - Path to directory pybot takes the build to test from.
+ # - DOWNLOAD_DIR - Path to directory robot takes the build to test from.
# - CSIT_DIR - Path to existing root of local CSIT git repository.
# Variables set:
# - VPP_VERSION - VPP stable version under test.
@@ -173,7 +174,7 @@ function gather_vpp () {
;;
"vpp-csit-"*)
# Shorten line.
- pgks="${PKG_SUFFIX}"
+ pkgs="${PKG_SUFFIX}"
# Use locally built packages.
mv "${DOWNLOAD_DIR}"/../*vpp*."${pkgs}" "${DOWNLOAD_DIR}"/ || {
die "Move command failed."
diff --git a/resources/libraries/bash/function/hugo.sh b/resources/libraries/bash/function/hugo.sh
new file mode 100644
index 0000000000..052e8333fb
--- /dev/null
+++ b/resources/libraries/bash/function/hugo.sh
@@ -0,0 +1,113 @@
+#!/usr/bin/env bash
+
+# Copyright (c) 2023 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -exuo pipefail
+
+
+function go_install () {
+
+ # Install Go.
+
+ OS_ARCH=$(uname -m) || die "Failed to get arch."
+ case "${OS_ARCH}" in
+ x86_64) architecture="amd64" ;;
+ aarch64) architecture="arm64" ;;
+ esac
+
+ go_version="go1.20.2.linux-${architecture}.tar.gz"
+ go_url="https://go.dev/dl"
+ wget "${go_url}/${go_version}"
+ rm -rf "/usr/local/go"
+ tar -C "/usr/local" -xzf "go1.20.2.linux-${architecture}.tar.gz"
+ rm "go1.20.2.linux-${architecture}.tar.gz"
+ export PATH=$PATH:/usr/local/go/bin
+}
+
+
+function hugo_build_site () {
+
+ # Build site via Hugo.
+ #
+ # Variable read:
+ # - ${CSIT_DIR} - CSIT main directory.
+ # Functions called:
+ # - die - Print to stderr and exit.
+
+ if ! installed hugo; then
+ die "Please install Hugo!"
+ fi
+
+ pushd "${CSIT_DIR}"/docs || die "Pushd failed!"
+ hugo || die "Failed to run Hugo build!"
+ popd || die "Popd failed!"
+}
+
+
+function hugo_init_modules () {
+
+ # Initialize Hugo modules.
+ #
+ # Variable read:
+ # - ${CSIT_DIR} - CSIT main directory.
+ # Functions called:
+ # - die - Print to stderr and exit.
+
+ if ! installed hugo; then
+ die "Please install Hugo!"
+ fi
+
+ hugo_book_url="github.com/alex-shpak/hugo-book"
+ hugo_book_version="v0.0.0-20230424134111-d86d5e70c7c0"
+ hugo_book_link="${hugo_book_url}@${hugo_book_version}"
+ pushd "${CSIT_DIR}"/docs || die "Pushd failed!"
+ export PATH=$PATH:/usr/local/go/bin
+ hugo mod get "${hugo_book_link}" || die "Failed to run Hugo mod!"
+ popd || die "Popd failed!"
+}
+
+
+function hugo_install () {
+
+ # Install Hugo Extended.
+
+ OS_ARCH=$(uname -m) || die "Failed to get arch."
+ case "${OS_ARCH}" in
+ x86_64) architecture="amd64" ;;
+ aarch64) architecture="arm64" ;;
+ esac
+
+ hugo_version="v0.111.3/hugo_extended_0.111.3_linux-${architecture}.deb"
+ hugo_url="https://github.com/gohugoio/hugo/releases/download"
+ hugo_link="${hugo_url}/${hugo_version}"
+ wget -O "hugo.deb" "${hugo_link}" || die "Failed to install Hugo!"
+ dpkg -i "hugo.deb" || die "Failed to install Hugo!"
+ rm "hugo.deb" || die "Failed to install Hugo!"
+}
+
+
+function installed () {
+
+ # Check if the given utility is installed. Fail if not installed.
+ #
+ # Arguments:
+ # - ${1} - Utility to check.
+ # Returns (implicitly):
+ # - 0 - If command is installed.
+ # - 1 - If command is not installed.
+
+ set -exuo pipefail
+
+ command -v "${1}"
+}
diff --git a/resources/libraries/bash/function/nginx.sh b/resources/libraries/bash/function/nginx.sh
index 122af23852..a2cf8e6514 100755
--- a/resources/libraries/bash/function/nginx.sh
+++ b/resources/libraries/bash/function/nginx.sh
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
-# Copyright (c) 2021 Intel and/or its affiliates.
+# Copyright (c) 2023 Intel and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -21,7 +21,7 @@ function gather_nginx () {
# Ensure stable NGINX archive is downloaded.
#
# Variables read:
- # - DOWNLOAD_DIR - Path to directory pybot takes the build to test from.
+ # - DOWNLOAD_DIR - Path to directory robot takes the build to test from.
# - NGINX_VER - Version number of Nginx.
set -exuo pipefail
pushd "${DOWNLOAD_DIR}" || die "Pushd failed."
@@ -53,7 +53,7 @@ function common_dirs () {
# Variables set:
# - BASH_FUNCTION_DIR - Path to existing directory this file is located in.
# - CSIT_DIR - Path to CSIT framework.
- # - DOWNLOAD_DIR - Path to directory pybot takes the build to test from.
+ # - DOWNLOAD_DIR - Path to directory robot takes the build to test from.
# - NGINX_DIR - Path to NGINX framework.
# - NGINX_VER - Version number of Nginx.
# Functions called:
@@ -121,7 +121,7 @@ function nginx_extract () {
# Variables read:
# - NGINX_DIR - Path to NGINX framework.
# - CSIT_DIR - Path to CSIT framework.
- # - DOWNLOAD_DIR - Path to directory pybot takes the build to test from.
+ # - DOWNLOAD_DIR - Path to directory robot takes the build to test from.
# - NGINX_VER - Version number of Nginx.
# Functions called:
# - die - Print to stderr and exit.
diff --git a/resources/libraries/bash/function/per_patch.sh b/resources/libraries/bash/function/per_patch.sh
index 2149d79b52..44bd57da80 100644
--- a/resources/libraries/bash/function/per_patch.sh
+++ b/resources/libraries/bash/function/per_patch.sh
@@ -1,5 +1,5 @@
-# Copyright (c) 2022 Cisco and/or its affiliates.
-# Copyright (c) 2022 PANTHEON.tech s.r.o.
+# Copyright (c) 2023 Cisco and/or its affiliates.
+# Copyright (c) 2023 PANTHEON.tech s.r.o.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -18,61 +18,14 @@ set -exuo pipefail
# Generally, the functions assume "common.sh" library has been sourced already.
# Keep functions ordered alphabetically, please.
-function archive_test_results () {
- # Arguments:
- # - ${1}: Directory to archive to. Required. Parent has to exist.
- # Variable set:
- # - TARGET - Target directory.
- # Variables read:
- # - ARCHIVE_DIR - Path to where robot result files are created in.
- # - VPP_DIR - Path to existing directory, root for to relative paths.
- # Directories updated:
- # - ${1} - Created, and robot and parsing files are moved/created there.
- # Functions called:
- # - die - Print to stderr and exit, defined in common.sh
-
- set -exuo pipefail
-
- cd "${VPP_DIR}" || die "Change directory command failed."
- TARGET="$(readlink -f "$1")"
- mkdir -p "${TARGET}" || die "Directory creation failed."
- file_list=("output.xml" "log.html" "report.html")
- file_list+=("tests" "generated_output_raw.tar.gz")
- for filename in "${file_list[@]}"; do
- mv "${ARCHIVE_DIR}/${filename}" "${TARGET}/${filename}" || {
- die "Attempt to move '${filename}' failed."
- }
- done
-}
-
-
-function archive_parse_test_results () {
-
- # Arguments:
- # - ${1}: Directory to archive to. Required. Parent has to exist.
- # Variables read:
- # - TARGET - Target directory.
- # Functions called:
- # - die - Print to stderr and exit, defined in common.sh
- # - archive_test_results - Archiving results.
- # - parse_bmrr_results - See definition in this file.
-
- set -exuo pipefail
-
- archive_test_results "$1" || die
- parse_bmrr_results "${TARGET}" || {
- die "The function should have died on error."
- }
-}
-
-
-function build_vpp_ubuntu_amd64 () {
+function build_vpp_ubuntu () {
# This function is using make pkg-verify to build VPP with all dependencies
# that is ARCH/OS aware. VPP repo is SSOT for building mechanics and CSIT
# is consuming artifacts. This way if VPP will introduce change in building
# mechanics they will not be blocked by CSIT repo.
+ #
# Arguments:
# - ${1} - String identifier for echo, can be unset.
# Variables read:
@@ -116,7 +69,6 @@ function compare_test_results () {
# of parent build.
# Functions called:
# - die - Print to stderr and exit, defined in common.sh
- # - parse_bmrr_results - See definition in this file.
# Exit code:
# - 0 - If the comparison utility sees no regression (nor data error).
# - 1 - If the comparison utility sees a regression (or data error).
@@ -137,50 +89,109 @@ function initialize_csit_dirs () {
# Variables read:
# - VPP_DIR - Path to WORKSPACE, parent of created directories.
# Directories created:
- # - csit_current - Holding test results of the patch under test (PUT).
- # - csit_parent - Holding test results of parent of PUT.
+ # - csit_{part} - See the caller what it is used for.
# Functions called:
# - die - Print to stderr and exit, defined in common.sh
set -exuo pipefail
cd "${VPP_DIR}" || die "Change directory operation failed."
- rm -rf "csit_current" "csit_parent" || {
- die "Directory deletion failed."
- }
- mkdir -p "csit_current" "csit_parent" || {
- die "Directory creation failed."
- }
+ while true; do
+ if [[ ${#} < 1 ]]; then
+ # All directories created.
+ break
+ fi
+ name_part="${1}" || die
+ shift || die
+ dir_name="csit_${name_part}" || die
+ rm -rf "${dir_name}" || die "Directory deletion failed."
+ mkdir -p "${dir_name}" || die "Directory creation failed."
+ done
}
-function parse_bmrr_results () {
+function main_bisect_loop () {
- # Currently "parsing" is just two greps.
- # TODO: Re-use PAL parsing code, make parsing more general and centralized.
+ # Perform the iterative part of bisect entry script.
+ #
+ # The logic is too complex to remain in the entry script.
#
+ # At the start, the loop assumes git bisect old/new has just been executed,
+ # and verified more iterations are needed.
+ # The iteration cleans the build directory and builds the new mid commit.
+ # Then, testbed is reserved, tests run, and testbed unreserved.
+ # Results are moved from default to archive location
+ # (indexed by iteration number) and analyzed.
+ # The new adjective ("old" or "new") is selected,
+ # and git bisect with the adjective is executed.
+ # The symlinks csit_early and csit_late are updated to tightest bounds.
+ # The git.log file is examined and if the bisect is finished, loop ends.
+
+ iteration=0
+ while true
+ do
+ let iteration+=1
+ git clean -dffx "build"/ "build-root"/ || die
+ build_vpp_ubuntu "MIDDLE" || die
+ select_build "build-root" || die
+ check_download_dir || die
+ reserve_and_cleanup_testbed || die
+ run_robot || die
+ move_test_results "csit_middle/${iteration}" || die
+ untrap_and_unreserve_testbed || die
+ rm -vf "csit_mid" || die
+ ln -s -T "csit_middle/${iteration}" "csit_mid" || die
+ set +e
+ python3 "${TOOLS_DIR}/integrated/compare_bisect.py"
+ bisect_rc="${?}"
+ set -e
+ if [[ "${bisect_rc}" == "3" ]]; then
+ adjective="new"
+ rm -v "csit_late" || die
+ ln -s -T "csit_middle/${iteration}" "csit_late" || die
+ elif [[ "${bisect_rc}" == "0" ]]; then
+ adjective="old"
+ rm -v "csit_early" || die
+ ln -s -T "csit_middle/${iteration}" "csit_early" || die
+ else
+ die "Unexpected return code: ${bisect_rc}"
+ fi
+ git bisect "${adjective}" | tee "git.log" || die
+ git describe || die
+ git status || die
+ if head -n 1 "git.log" | cut -b -11 | fgrep -q "Bisecting:"; then
+ echo "Still bisecting..."
+ else
+ echo "Bisecting done."
+ break
+ fi
+ done
+}
+
+
+function move_test_results () {
+
# Arguments:
- # - ${1} - Path to (existing) directory holding robot output.xml result.
- # Files read:
- # - output.xml - From argument location.
- # Files updated:
- # - results.txt - (Re)created, in argument location.
+ # - ${1}: Directory to archive to. Required. Parent has to exist.
+ # Variable set:
+ # - TARGET - Target archival directory, equivalent to the argument.
+ # Variables read:
+ # - ARCHIVE_DIR - Path to where robot result files are created in.
+ # - VPP_DIR - Path to existing directory, root for to relative paths.
+ # Directories updated:
+ # - ${1} - Created, and robot and parsing files are moved/created there.
# Functions called:
# - die - Print to stderr and exit, defined in common.sh
set -exuo pipefail
- rel_dir="$(readlink -e "${1}")" || die "Readlink failed."
- in_file="${rel_dir}/output.xml"
- out_file="${rel_dir}/results.txt"
- # TODO: Do we need to check echo exit code explicitly?
- echo "Parsing ${in_file} putting results into ${out_file}"
- echo "TODO: Re-use parts of PAL when they support subsample test parsing."
- pattern='Maximum Receive Rate trial results in .*'
- pattern+=' per second: .*\]</status>'
- grep -o "${pattern}" "${in_file}" | grep -o '\[.*\]' > "${out_file}" || {
- die "Some parsing grep command has failed."
- }
+ cd "${VPP_DIR}" || die "Change directory command failed."
+ TARGET="$(readlink -f "$1")"
+ mkdir -p "${TARGET}" || die "Directory creation failed."
+ file_list=("output.xml" "log.html" "report.html" "tests")
+ for filename in "${file_list[@]}"; do
+ mv "${ARCHIVE_DIR}/${filename}" "${TARGET}/${filename}" || die
+ done
}
@@ -209,56 +220,37 @@ function select_build () {
}
-function set_aside_commit_build_artifacts () {
+function set_aside_build_artifacts () {
- # Function is copying VPP built artifacts from actual checkout commit for
- # further use and clean git.
+ # Function used to save VPP .deb artifacts from currently finished build.
+ #
+ # After the artifacts are copied to the target directory,
+ # the main git tree is cleaned up to not interfere with next build.
+ #
+ # Arguments:
+ # - ${1} - String to derive the target directory name from. Required.
# Variables read:
# - VPP_DIR - Path to existing directory, parent to accessed directories.
# Directories read:
# - build-root - Existing directory with built VPP artifacts (also DPDK).
# Directories updated:
# - ${VPP_DIR} - A local git repository, parent commit gets checked out.
- # - build_current - Old contents removed, content of build-root copied here.
+ # - build_${1} - Old contents removed, content of build-root copied here.
# Functions called:
# - die - Print to stderr and exit, defined in common.sh
set -exuo pipefail
cd "${VPP_DIR}" || die "Change directory operation failed."
- rm -rf "build_current" || die "Remove operation failed."
- mkdir -p "build_current" || die "Directory creation failed."
- mv "build-root"/*".deb" "build_current"/ || die "Move operation failed."
+ dir_name="build_${1}" || die
+ rm -rf "${dir_name}" || die "Remove operation failed."
+ mkdir -p "${dir_name}" || die "Directory creation failed."
+ mv "build-root"/*".deb" "${dir_name}"/ || die "Move operation failed."
# The previous build could have left some incompatible leftovers,
# e.g. DPDK artifacts of different version (in build/external).
# Also, there usually is a copy of dpdk artifact in build-root.
git clean -dffx "build"/ "build-root"/ || die "Git clean operation failed."
- # Finally, check out the parent commit.
- git checkout HEAD~ || die "Git checkout operation failed."
- # Display any other leftovers.
- git status || die "Git status operation failed."
-}
-
-
-function set_aside_parent_build_artifacts () {
-
- # Function is copying VPP built artifacts from parent checkout commit for
- # further use. Checkout to parent is not part of this function.
- # Variables read:
- # - VPP_DIR - Path to existing directory, parent of accessed directories.
- # Directories read:
- # - build-root - Existing directory with built VPP artifacts (also DPDK).
- # Directories updated:
- # - build_parent - Old directory removed, build-root debs moved here.
- # Functions called:
- # - die - Print to stderr and exit, defined in common.sh
-
- set -exuo pipefail
-
- cd "${VPP_DIR}" || die "Change directory operation failed."
- rm -rf "build_parent" || die "Remove failed."
- mkdir -p "build_parent" || die "Directory creation operation failed."
- mv "build-root"/*".deb" "build_parent"/ || die "Move operation failed."
+ git status || die
}
diff --git a/resources/libraries/bash/function/terraform.sh b/resources/libraries/bash/function/terraform.sh
index 1766381f75..2a0e0ed2be 100644
--- a/resources/libraries/bash/function/terraform.sh
+++ b/resources/libraries/bash/function/terraform.sh
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
-# Copyright (c) 2021 Cisco and/or its affiliates.
+# Copyright (c) 2023 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -22,8 +22,7 @@ function terraform_apply () {
#
# Variable read:
# - ${CSIT_DIR} - CSIT main directory, where terraform modules are located.
- # - ${NODENESS} - Node multiplicity of desired testbed.
- # - ${FLAVOR} - Node flavor string, see common.sh
+ # - ${TERRAFORM_MODULE_DIR} - Terraform module directory.
set -exuo pipefail
@@ -32,24 +31,21 @@ function terraform_apply () {
fi
pushd "${CSIT_DIR}"/fdio.infra.terraform || die "Pushd failed!"
- pushd "terraform-aws-${NODENESS}-${FLAVOR}-c5n" || die "Pushd failed!"
+ pushd "${TERRAFORM_MODULE_DIR}" || die "Pushd failed!"
export TF_LOG=INFO
- trap 'terraform_destroy' ERR || {
- die "Trap attempt failed, please cleanup manually. Aborting!"
- }
terraform apply -no-color -auto-approve || die "Terraform apply failed!"
popd || die "Popd failed!"
popd || die "Popd failed!"
}
+
function terraform_destroy () {
# Run terraform destroy command to prepare module.
#
# Variable read:
# - ${CSIT_DIR} - CSIT main directory, where terraform modules are located.
- # - ${NODENESS} - Node multiplicity of desired testbed.
- # - ${FLAVOR} - Node flavor string, see common.sh
+ # - ${TERRAFORM_MODULE_DIR} - Terraform module directory.
set -exuo pipefail
@@ -58,7 +54,7 @@ function terraform_destroy () {
fi
pushd "${CSIT_DIR}"/fdio.infra.terraform || die "Pushd failed!"
- pushd "terraform-aws-${NODENESS}-${FLAVOR}-c5n" || die "Pushd failed!"
+ pushd "${TERRAFORM_MODULE_DIR}" || die "Pushd failed!"
export TF_LOG=INFO
terraform destroy -auto-approve -no-color || die "Terraform destroy failed!"
popd || die "Popd failed!"
@@ -72,37 +68,100 @@ function terraform_init () {
#
# Variable read:
# - ${CSIT_DIR} - CSIT main directory, where terraform modules are located.
- # - ${NODENESS} - Node multiplicity of desired testbed.
- # - ${FLAVOR} - Node flavor string, see common.sh
+ # - ${TERRAFORM_MODULE_DIR} - Terraform module directory.
set -exuo pipefail
if ! installed terraform; then
- curl -fsSL https://apt.releases.hashicorp.com/gpg | sudo apt-key add -
- os="$(lsb_release -cs)" || die "Failed to get OS release!"
- repo="deb [arch=amd64] https://apt.releases.hashicorp.com ${os} main"
- sudo apt-add-repository "${repo}" || die "Failed to add repo!"
- apt update -y
- apt install -y terraform
- #die "Please install terraform!"
+ die "Please install terraform!"
fi
pushd "${CSIT_DIR}"/fdio.infra.terraform || die "Pushd failed!"
- pushd "terraform-aws-${NODENESS}-${FLAVOR}-c5n" || die "Pushd failed!"
+ pushd "${TERRAFORM_MODULE_DIR}" || die "Pushd failed!"
- plugin_url="https://github.com/radekg/terraform-provisioner-ansible/"
- plugin_url+="releases/download/v2.5.0/"
- plugin_url+="terraform-provisioner-ansible-linux-amd64_v2.5.0"
- plugin_dir="${HOME}/.terraform.d/plugins/"
- plugin_path+="${plugin_dir}terraform-provisioner-ansible_v2.5.0"
+ #plugin_url="https://github.com/radekg/terraform-provisioner-ansible/"
+ #plugin_url+="releases/download/v2.5.0/"
+ #plugin_url+="terraform-provisioner-ansible-linux-amd64_v2.5.0"
+ #plugin_dir="${HOME}/.terraform.d/plugins/"
+ #plugin_path+="${plugin_dir}terraform-provisioner-ansible_v2.5.0"
- mkdir -p "${plugin_dir}" || die "Failed to create dir!"
- wget -O "${plugin_path}" "${plugin_url}" || die "Failed to download plugin!"
- chmod +x "${plugin_path}" || die "Failed to add execute rights!"
+ #mkdir -p "${plugin_dir}" || die "Failed to create dir!"
+ #wget -O "${plugin_path}" "${plugin_url}" || die "Failed to download plugin!"
+ #chmod +x "${plugin_path}" || die "Failed to add execute rights!"
+ rm -f terraform.tfstate || die "Failed to clear terraform state!"
export TF_LOG=INFO
terraform init || die "Failed to run terraform init!"
+ popd || die "Popd failed!"
+ popd || die "Popd failed!"
+}
+
+function terraform_install () {
+
+ # Install terraform.
+
+ OS_ARCH=$(uname -m) || die "Failed to get arch."
+ case "${OS_ARCH}" in
+ x86_64) architecture="amd64" ;;
+ aarch64) architecture="arm64" ;;
+ esac
+
+ terraform_version="1.4.2/terraform_1.4.2_linux_${architecture}.zip"
+ terraform_url="https://releases.hashicorp.com/terraform"
+ terraform_link="${terraform_url}/${terraform_version}"
+ wget "${terraform_link}" || die "Failed to install Terraform!"
+ unzip "terraform_1.4.2_linux_${architecture}.zip" || {
+ die "Failed to install Terraform!"
+ }
+ mv "terraform" "/usr/local/bin" || die "Failed to install Terraform!"
+ rm "terraform_1.4.2_linux_${architecture}.zip" || {
+ die "Failed to install Terraform!"
+ }
+}
+
+
+function terraform_output () {
+
+ # Run terraform output command to prepare module.
+ #
+ # Variable read:
+ # - ${CSIT_DIR} - CSIT main directory, where terraform modules are located.
+ # - ${TERRAFORM_MODULE_DIR} - Terraform module directory.
+ # - ${TERRAFORM_OUTPUT_VAR} - Terraform variable to export.
+
+ set -exuo pipefail
+
+ if ! installed terraform; then
+ die "Please install terraform!"
+ fi
+
+ pushd "${CSIT_DIR}"/fdio.infra.terraform || die "Pushd failed!"
+ pushd "${TERRAFORM_MODULE_DIR}" || die "Pushd failed!"
+ TERRAFORM_OUTPUT_VAL=$(terraform output --raw "${TERRAFORM_OUTPUT_VAR}")
+ popd || die "Popd failed!"
+ popd || die "Popd failed!"
+}
+
+
+function terraform_validate () {
+
+ # Run terraform validate command to prepare module.
+ #
+ # Variable read:
+ # - ${CSIT_DIR} - CSIT main directory, where terraform modules are located.
+ # - ${TERRAFORM_MODULE_DIR} - Terraform module directory.
+
+ set -exuo pipefail
+
+ if ! installed terraform; then
+ die "Please install terraform!"
+ fi
+
+ pushd "${CSIT_DIR}"/fdio.infra.terraform || die "Pushd failed!"
+ pushd "${TERRAFORM_MODULE_DIR}" || die "Pushd failed!"
+ export TF_LOG=INFO
+ terraform validate || die "Terraform validate failed!"
popd || die "Popd failed!"
popd || die "Popd failed!"
}
diff --git a/resources/libraries/bash/k8s_setup.sh b/resources/libraries/bash/k8s_setup.sh
deleted file mode 100755
index d1d3a38454..0000000000
--- a/resources/libraries/bash/k8s_setup.sh
+++ /dev/null
@@ -1,65 +0,0 @@
-#!/bin/bash
-# Copyright (c) 2021 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -xo pipefail
-
-SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
-
-# Include
-source ${SCRIPT_DIR}/config/defaults
-source ${SCRIPT_DIR}/shell/k8s_utils.sh
-
-trap "k8s_utils.destroy" ERR
-
-case "$1" in
- prepare)
- # Revert any changes made to this host by 'kubeadm init'
- k8s_utils.destroy
- # Sets up the Kubernetes master
- k8s_utils.prepare
- ;;
- deploy_calico)
- # Revert any changes made to this host by 'kubeadm init'
- k8s_utils.destroy
- # Load kernel modules uio/uio_pci_generic
- sudo modprobe uio
- sudo modprobe uio_pci_generic
- sudo modprobe vfio_pci
- # Sets up the Kubernetes master
- k8s_utils.prepare "--pod-network-cidr=192.168.0.0/16"
- # Apply resources
- k8s_utils.calico_deploy ${cfg[K8S_CALICO]}
- # Dump Kubernetes objects ...
- k8s_utils.dump_all
- ;;
- affinity_non_vpp)
- # Set affinity for all non VPP docker containers to CPU 0
- k8s_utils.affinity_non_vpp
- ;;
- destroy)
- # Revert any changes made to this host by 'kubeadm init'
- k8s_utils.destroy
- ;;
- *)
- echo "usage: $0 function"
- echo "function:"
- echo " prepare"
- echo " deploy_calico"
- echo " affinity_non_vpp"
- echo " destroy"
- exit 1
-esac
-shift
-
-echo Kubernetes setup finished
diff --git a/resources/libraries/bash/shell/k8s_utils.sh b/resources/libraries/bash/shell/k8s_utils.sh
deleted file mode 100644
index b96ec8df6c..0000000000
--- a/resources/libraries/bash/shell/k8s_utils.sh
+++ /dev/null
@@ -1,132 +0,0 @@
-#!/bin/bash
-# Copyright (c) 2021 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-function k8s_utils.destroy {
- # Destroy existing Kubernetes deployment
- kubectl drain $HOSTNAME --delete-local-data --force --ignore-daemonsets
- kubectl delete node $HOSTNAME
-
- # Revert any changes made to this host by 'kubeadm init' or 'kubeadm join'
- sudo kubeadm reset --force && sudo rm -rf $HOME/.kube || \
- { echo "Failed to reset kubeadm"; exit 1; }
-}
-
-function k8s_utils.prepare {
- # Sets up the Kubernetes master
-
- # Disable swap
- sudo swapoff --all
-
- # Set up the Kubernetes master
- sudo -E kubeadm init --token-ttl 0 ${1} || \
- { echo "Failed to init kubeadm"; exit 1; }
-
- # Make cgroup non-exclusive for CPU and MEM
- sudo cgset -r cpuset.cpu_exclusive=0 /kubepods
- sudo cgset -r cpuset.mem_exclusive=0 /kubepods
-
- rm -rf $HOME/.kube
- mkdir -p $HOME/.kube
- sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
- sudo chown $(id -u):$(id -g) $HOME/.kube/config
-}
-
-function k8s_utils.taint {
- # Updates the taints
- kubectl taint nodes --all node-role.kubernetes.io/master- || \
- { echo "Failed to taint nodes"; exit 1; }
-}
-
-function k8s_utils.calico_deploy {
- # Calico yaml URL or file
- k8s_calico=$1
-
- # Apply resources
- kubectl apply -f ${k8s_calico} || \
- { echo "Failed to apply ${k8s_calico}"; exit 1; }
-
- # Update the taints
- k8s_utils.taint
-}
-
-function k8s_utils.contiv_vpp_deploy {
- # Contiv yaml URL or file
- k8s_contiv=$1
- k8s_contiv_patch="kubecon.contiv-vpp-yaml-patch.diff"
-
- # Pull the most recent Docker images
- url="https://raw.githubusercontent.com/contiv/vpp/master/k8s/pull-images.sh"
- bash <(curl -s "${url}")
-
- # Apply resources
- wget ${k8s_contiv}
- patch contiv-vpp.yaml -i ${k8s_contiv_patch} -o - | kubectl apply -f - || \
- { echo "Failed to apply Contiv resources"; exit 1; }
- rm contiv-vpp.yaml
-
- # Update the taints
- k8s_utils.taint
-}
-
-function k8s_utils.cri_shim_install {
- # Install the CRI Shim on host
- url"https://raw.githubusercontent.com/contiv/vpp/master/k8s/cri-install.sh"
- sudo su root -c "bash <(curl -s '${url}')"
-}
-
-function k8s_utils.cri_shim_uninstall {
- # Uninstall the CRI Shim on host
- url="https://raw.githubusercontent.com/contiv/vpp/master/k8s/cri-install.sh"
- sudo su root -c "bash <(curl -s '${url}') --uninstall"
-}
-
-function k8s_utils.kube_proxy_install {
- # Installing custom version of Kube-Proxy to enable Kubernetes services
- url="https://raw.githubusercontent.com/contiv/vpp/master/k8s/"
- url+="proxy-install.sh"
- bash <(curl -s "${url}")
-}
-
-function k8s_utils.apply {
- # Resource yaml URL or file
- k8s_resource=$1
-
- # Apply resources
- kubectl apply -f ${k8s_resource} || \
- { echo "Failed to apply ${k8s_resource}"; exit 1; }
-}
-
-function k8s_utils.resource_delete {
- # Resource yaml URL or file
- k8s_resource=$1
-
- # Delete resources
- kubectl delete -f ${k8s_resource} || \
- { echo "Failed to delete ${k8s_resource}"; exit 1; }
-}
-
-function k8s_utils.affinity_non_vpp {
- # Set affinity for all non VPP docker containers to CPU 0
- command='sudo docker ps --format "{{.ID}} {{.Names}}"'
- command+=" | grep -v vpp | cut -d' ' -f1"
- for i in $(${command}); do
- sudo docker update --cpuset-cpus 0 ${i}
- done
-}
-
-function k8s_utils.dump_all {
- # Dumps the kubernetes objects
- kubectl get all --all-namespaces
- kubectl describe nodes
-}