summaryrefslogtreecommitdiffstats
path: root/jjb/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'jjb/scripts')
-rw-r--r--jjb/scripts/cicn/build-libparc.sh8
-rw-r--r--jjb/scripts/cicn/build-viper.sh8
-rw-r--r--jjb/scripts/cicn/docs-libparc.sh38
-rw-r--r--jjb/scripts/cleanup_vpp_plugin_dev_env.sh31
-rw-r--r--jjb/scripts/create_maven_env.sh15
-rw-r--r--jjb/scripts/csit/cpta.sh87
-rwxr-xr-x[-rw-r--r--]jjb/scripts/csit/device-semiweekly.sh6
-rw-r--r--jjb/scripts/csit/device-verify.sh2
-rw-r--r--jjb/scripts/csit/dmm-functional-virl.sh40
-rw-r--r--jjb/scripts/csit/docs.sh76
-rw-r--r--jjb/scripts/csit/hc2vpp-verify-func.sh49
-rw-r--r--jjb/scripts/csit/nsh_sfc-functional-virl.sh34
-rwxr-xr-x[-rw-r--r--]jjb/scripts/csit/perf-timed.sh8
-rw-r--r--jjb/scripts/csit/perf-verify.sh5
-rw-r--r--jjb/scripts/csit/report.sh76
-rw-r--r--jjb/scripts/csit/terraform-aws-eb-version-deploy.sh (renamed from jjb/scripts/csit/vpp-functional-multilink.sh)19
-rw-r--r--jjb/scripts/csit/terraform-aws-eb-version-verify.sh (renamed from jjb/scripts/csit/nsh_sfc-perf-hw.sh)21
-rw-r--r--jjb/scripts/csit/tldk-functional-virl.sh39
-rw-r--r--jjb/scripts/csit/tox.sh2
-rw-r--r--jjb/scripts/hicn/build-extras.sh9
-rw-r--r--jjb/scripts/hicn/build-vpp-latest.sh7
-rw-r--r--jjb/scripts/hicn/build.sh9
-rw-r--r--jjb/scripts/hicn/checkstyle.sh9
-rw-r--r--jjb/scripts/hicn/docs.sh14
-rw-r--r--jjb/scripts/hicn/functest.sh9
-rw-r--r--jjb/scripts/hicn/sonar.sh8
-rw-r--r--jjb/scripts/maven_push_functions.sh16
-rw-r--r--jjb/scripts/packagecloud_promote.sh15
-rwxr-xr-x[-rw-r--r--]jjb/scripts/packagecloud_push.sh128
-rwxr-xr-xjjb/scripts/post_build_deploy_archives.sh93
-rwxr-xr-xjjb/scripts/post_build_executor_info.sh58
-rw-r--r--jjb/scripts/publish_cov.sh51
-rwxr-xr-xjjb/scripts/publish_docs.sh83
-rw-r--r--jjb/scripts/publish_library_py.sh318
-rw-r--r--jjb/scripts/publish_logs.sh30
-rwxr-xr-xjjb/scripts/setup_executor_env.sh119
-rw-r--r--jjb/scripts/setup_jvpp_dev_env.sh58
-rw-r--r--jjb/scripts/setup_vpp_dpdk_dev_env.sh69
-rwxr-xr-xjjb/scripts/setup_vpp_ext_deps.sh67
-rw-r--r--jjb/scripts/setup_vpp_plugin_dev_env.sh43
-rwxr-xr-x[-rw-r--r--]jjb/scripts/setup_vpp_ubuntu_docker_test.sh62
-rw-r--r--jjb/scripts/terraform_s3_docs_ship.sh99
-rw-r--r--jjb/scripts/vpp/api-checkstyle.sh22
-rw-r--r--jjb/scripts/vpp/build.sh193
-rwxr-xr-x[-rw-r--r--]jjb/scripts/vpp/check_crc.sh7
-rwxr-xr-x[-rw-r--r--]jjb/scripts/vpp/checkstyle.sh35
-rw-r--r--jjb/scripts/vpp/commitmsg.sh19
-rw-r--r--jjb/scripts/vpp/copy_archives.sh38
-rw-r--r--jjb/scripts/vpp/cov-build.sh106
-rw-r--r--jjb/scripts/vpp/coverity.sh32
-rw-r--r--jjb/scripts/vpp/csit-bisect.sh38
-rwxr-xr-x[-rw-r--r--]jjb/scripts/vpp/csit-device.sh7
-rwxr-xr-x[-rw-r--r--]jjb/scripts/vpp/csit-perf.sh7
-rw-r--r--jjb/scripts/vpp/debug-build.sh107
-rwxr-xr-x[-rw-r--r--]jjb/scripts/vpp/docs.sh67
-rwxr-xr-x[-rw-r--r--]jjb/scripts/vpp/docs_spell.sh (renamed from jjb/scripts/csit/vpp-functional-virl.sh)17
-rw-r--r--jjb/scripts/vpp/dpdk-rdma-version-mismatch.sh33
-rw-r--r--jjb/scripts/vpp/gcc-build.sh78
-rw-r--r--jjb/scripts/vpp/make-test-docs.sh49
-rw-r--r--jjb/scripts/vpp/maven-push.sh56
-rw-r--r--jjb/scripts/vpp/sphinx-docs.sh50
-rw-r--r--jjb/scripts/vpp/test-checkstyle.sh16
62 files changed, 1833 insertions, 1082 deletions
diff --git a/jjb/scripts/cicn/build-libparc.sh b/jjb/scripts/cicn/build-libparc.sh
new file mode 100644
index 000000000..967670e3b
--- /dev/null
+++ b/jjb/scripts/cicn/build-libparc.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+echo "---> jjb/scripts/cicn/build-libparc.sh"
+set -euxo pipefail
+IFS=$'\n\t'
+
+pushd libparc/scripts
+bash build-package.sh
+popd
diff --git a/jjb/scripts/cicn/build-viper.sh b/jjb/scripts/cicn/build-viper.sh
new file mode 100644
index 000000000..38018b779
--- /dev/null
+++ b/jjb/scripts/cicn/build-viper.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+echo "---> jjb/scripts/cicn/build-viper.sh"
+set -euxo pipefail
+IFS=$'\n\t'
+
+pushd scripts
+bash build-package.sh
+popd
diff --git a/jjb/scripts/cicn/docs-libparc.sh b/jjb/scripts/cicn/docs-libparc.sh
new file mode 100644
index 000000000..0ca4effb9
--- /dev/null
+++ b/jjb/scripts/cicn/docs-libparc.sh
@@ -0,0 +1,38 @@
+#!/bin/bash
+echo "---> jjb/scripts/cicn/docs-libparc.sh"
+set -xe -o pipefail
+
+update_cmake_repo() {
+ cat /etc/resolv.conf
+ echo "nameserver 8.8.8.8" | sudo tee -a /etc/resolv.conf
+ cat /etc/resolv.conf
+
+ CMAKE_INSTALL_SCRIPT_URL="https://cmake.org/files/v3.8/cmake-3.8.0-Linux-x86_64.sh"
+ CMAKE_INSTALL_SCRIPT="/tmp/install_cmake.sh"
+ curl ${CMAKE_INSTALL_SCRIPT_URL} > ${CMAKE_INSTALL_SCRIPT}
+
+ sudo mkdir -p /opt/cmake
+ sudo bash ${CMAKE_INSTALL_SCRIPT} --skip-license --prefix=/opt/cmake
+ export PATH=/opt/cmake/bin:$PATH
+}
+
+cd libparc
+
+[ "$DOC_DIR" ] || DOC_DIR="build/documentation/generated-documentation/html"
+[ "$SITE_DIR" ] || SITE_DIR="build/documentation/deploy-site/"
+[ "$RESOURCES_DIR" ] || RESOURCES_DIR=${SITE_DIR}/src/site/resources
+
+update_cmake_repo
+mkdir -p build
+pushd build
+cmake -DDOC_ONLY=ON ..
+make documentation
+popd
+
+if [[ ${JOB_NAME} == *merge* ]]; then
+ mkdir -p $(dirname ${RESOURCES_DIR})
+ mv -f ${DOC_DIR} ${RESOURCES_DIR}
+ cd ${SITE_DIR}
+ find . -type f '(' -name '*.md5' -o -name '*.dot' -o -name '*.map' ')' -delete
+ cd -
+fi
diff --git a/jjb/scripts/cleanup_vpp_plugin_dev_env.sh b/jjb/scripts/cleanup_vpp_plugin_dev_env.sh
deleted file mode 100644
index 2fcdf7016..000000000
--- a/jjb/scripts/cleanup_vpp_plugin_dev_env.sh
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/bin/bash
-set -e -o pipefail
-
-# Figure out what system we are running on
-if [ -f /etc/lsb-release ];then
- . /etc/lsb-release
-elif [ -f /etc/redhat-release ];then
- sudo yum install -y redhat-lsb
- DISTRIB_ID=`lsb_release -si`
- DISTRIB_RELEASE=`lsb_release -sr`
- DISTRIB_CODENAME=`lsb_release -sc`
- DISTRIB_DESCRIPTION=`lsb_release -sd`
-fi
-echo DISTRIB_ID: $DISTRIB_ID
-echo DISTRIB_RELEASE: $DISTRIB_RELEASE
-echo DISTRIB_CODENAME: $DISTRIB_CODENAME
-echo DISTRIB_DESCRIPTION: $DISTRIB_DESCRIPTION
-
-function cleanup {
- # Setup by installing vpp-dev and vpp-lib
- if [ $DISTRIB_ID == "Ubuntu" ]; then
- sudo rm -f /etc/apt/sources.list.d/99fd.io.list
- sudo dpkg -r vpp-dev vpp-lib vpp-dev vpp-lib vpp vpp-dpdk-dev vpp-dpdk-dkms vpp-dbg vpp-ext-deps
- elif [[ $DISTRIB_ID == "CentOS" ]]; then
- sudo rm -f /etc/yum.repos.d/fdio-master.repo
- sudo yum -y remove vpp-devel vpp-lib vpp vpp-ext-deps
- fi
-}
-
-trap cleanup EXIT
-cleanup
diff --git a/jjb/scripts/create_maven_env.sh b/jjb/scripts/create_maven_env.sh
index 48a2da06f..cbc9f868b 100644
--- a/jjb/scripts/create_maven_env.sh
+++ b/jjb/scripts/create_maven_env.sh
@@ -1,3 +1,18 @@
#!/bin/bash
+# Copyright (c) 2020 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+echo "---> jjb/scripts/create_maven_env.sh"
+
echo "MAVEN_SETTINGS_OPTIONS=-s $SETTINGS_FILE -gs $GLOBAL_SETTINGS_FILE" > maven_env.txt
diff --git a/jjb/scripts/csit/cpta.sh b/jjb/scripts/csit/cpta.sh
deleted file mode 100644
index d6bc187b4..000000000
--- a/jjb/scripts/csit/cpta.sh
+++ /dev/null
@@ -1,87 +0,0 @@
-#!/bin/bash
-
-# Copyright (c) 2020 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -xe -o pipefail
-
-[ "${DOCS_REPO_URL}" ] || DOCS_REPO_URL="https://nexus.fd.io/content/sites/site"
-[ "${PROJECT_PATH}" ] || PROJECT_PATH="io/fd/csit"
-[ "${DOC_DIR}" ] || DOC_DIR="resources/tools/presentation"
-[ "${BUILD_DIR}" ] || BUILD_DIR="${DOC_DIR}/_build"
-[ "${SITE_DIR}" ] || SITE_DIR="build-root/docs/deploy-site"
-[ "${RESOURCES_DIR}" ] || RESOURCES_DIR="${SITE_DIR}/src/site/resources/trending"
-[ "${STATIC_VPP_DIR}" ] || STATIC_VPP_DIR="${RESOURCES_DIR}/_static/vpp"
-[ "${MVN}" ] || MVN="/opt/apache/maven/bin/mvn"
-[ "${FAILED_TESTS}" ] || FAILED_TESTS="${STATIC_VPP_DIR}/trending-failed-tests.txt"
-[ "${REGRESSIONS}" ] || REGRESSIONS="${STATIC_VPP_DIR}/trending-regressions.txt"
-[ "${PROGRESSIONS}" ] || PROGRESSIONS="${STATIC_VPP_DIR}/trending-progressions.txt"
-
-# Create a text file with email body in case the build fails:
-cd "${WORKSPACE}"
-mkdir -p "${STATIC_VPP_DIR}"
-EMAIL_BODY="ERROR: The build number ${BUILD_NUMBER} of the job ${JOB_NAME} failed. For more information see: ${BUILD_URL}"
-echo "${EMAIL_BODY}" > "${FAILED_TESTS}"
-echo "${EMAIL_BODY}" > "${REGRESSIONS}"
-echo "${EMAIL_BODY}" > "${PROGRESSIONS}"
-
-cd "${DOC_DIR}"
-chmod +x ./run_cpta.sh
-STATUS=$(./run_cpta.sh | tail -1)
-
-cd "${WORKSPACE}"
-rm -rf "${SITE_DIR}/"*
-
-mkdir -p "${RESOURCES_DIR}"
-ls "${RESOURCES_DIR}"
-mv -f "${BUILD_DIR}/"* "${RESOURCES_DIR}"
-
-cd "${SITE_DIR}"
-
-cat > pom.xml << EOF
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>io.fd.csit</groupId>
- <artifactId>docs</artifactId>
- <version>1.0.0</version>
- <packaging>pom</packaging>
- <properties>
- <generateReports>false</generateReports>
- </properties>
- <build>
- <extensions>
- <extension>
- <groupId>org.apache.maven.wagon</groupId>
- <artifactId>wagon-webdav-jackrabbit</artifactId>
- <version>2.10</version>
- </extension>
- </extensions>
- </build>
- <distributionManagement>
- <site>
- <id>fdio-site</id>
- <url>dav:${DOCS_REPO_URL}/${PROJECT_PATH}/${GERRIT_BRANCH}</url>
- </site>
- </distributionManagement>
-</project>
-EOF
-
-${MVN} site:site site:deploy -gs "${GLOBAL_SETTINGS_FILE}" -s "${SETTINGS_FILE}" -T 4C
-
-cd -
-
-if [ "${STATUS}" == "PASS" ]; then
- exit 0
-else
- exit 1
-fi
diff --git a/jjb/scripts/csit/device-semiweekly.sh b/jjb/scripts/csit/device-semiweekly.sh
index 10c3ed7a0..1086b5463 100644..100755
--- a/jjb/scripts/csit/device-semiweekly.sh
+++ b/jjb/scripts/csit/device-semiweekly.sh
@@ -13,19 +13,22 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+echo "---> jjb/scripts/csit/device-semiweekly.sh"
+
set -exuo pipefail
# Clone CSIT git repository and proceed with entry script located there.
#
# Variables read:
# - WORKSPACE - Jenkins workspace to create csit subdirectory in.
+# - GIT_URL - Git clone URL
# - BRANCH_ID - CSIT operational branch to be used for test.
# Directories updated:
# - ${WORKSPACE}/csit - Created, holding a checked out CSIT repository.
# - Multiple other side effects by entry script(s), see CSIT repository.
cd "${WORKSPACE}"
-git clone https://gerrit.fd.io/r/csit --depth=1 --no-single-branch --no-checkout
+git clone "${GIT_URL}/csit" --depth=1 --no-single-branch --no-checkout
# Check BRANCH_ID value.
if [[ -z "${BRANCH_ID-}" ]]; then
echo "BRANCH_ID not provided => 'oper' belonging to master will be used."
@@ -45,3 +48,4 @@ git checkout "${BRANCH_NAME}"
popd
csit_entry_dir="${WORKSPACE}/csit/resources/libraries/bash/entry"
source "${csit_entry_dir}/bootstrap_vpp_device.sh"
+cp -R "${WORKSPACE}/csit/archives" "${WORKSPACE}/archives" || true
diff --git a/jjb/scripts/csit/device-verify.sh b/jjb/scripts/csit/device-verify.sh
index f26e6beba..5ccd2a580 100644
--- a/jjb/scripts/csit/device-verify.sh
+++ b/jjb/scripts/csit/device-verify.sh
@@ -13,6 +13,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+echo "---> jjb/scripts/csit/device-verify.sh"
+
set -exuo pipefail
csit_entry_dir="${WORKSPACE}/resources/libraries/bash/entry"
diff --git a/jjb/scripts/csit/dmm-functional-virl.sh b/jjb/scripts/csit/dmm-functional-virl.sh
deleted file mode 100644
index fcc0a49ad..000000000
--- a/jjb/scripts/csit/dmm-functional-virl.sh
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/bin/bash
-
-# Copyright (c) 2018 Huawei Technologies Co.,Ltd.
-# Copyright (c) 2020 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -xeu -o pipefail
-
-# Clone dmm and start tests
-git clone https://gerrit.fd.io/r/dmm
-
-# If the git clone fails, complain clearly and exit
-if [ $? != 0 ]; then
- echo "Failed to run: git clone https://gerrit.fd.io/r/dmm"
- exit 1
-fi
-
-# execute DMM bootstrap script if it exists
-if [ -e bootstrap-DMM.sh ]
-then
- # make sure that bootstrap-DMM.sh is executable
- chmod +x bootstrap-DMM.sh
- # run the script
- ./bootstrap-DMM.sh
-else
- echo 'ERROR: No bootstrap-DMM.sh found'
- exit 1
-fi
-
-# vim: ts=4 ts=4 sts=4 et : \ No newline at end of file
diff --git a/jjb/scripts/csit/docs.sh b/jjb/scripts/csit/docs.sh
deleted file mode 100644
index ebd8546b8..000000000
--- a/jjb/scripts/csit/docs.sh
+++ /dev/null
@@ -1,76 +0,0 @@
-#!/bin/bash
-
-# Copyright (c) 2020 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -xe -o pipefail
-
-[ "$DOCS_REPO_URL" ] || DOCS_REPO_URL="https://nexus.fd.io/content/sites/site"
-[ "$PROJECT_PATH" ] || PROJECT_PATH=io/fd/csit
-[ "$DOC_DIR" ] || DOC_DIR=resources/tools/doc_gen
-[ "$BUILD_DIR" ] || BUILD_DIR=${DOC_DIR}/_build
-[ "$SITE_DIR" ] || SITE_DIR=build-root/docs/deploy-site
-[ "$RESOURCES_DIR" ] || RESOURCES_DIR=${SITE_DIR}/src/site/resources/doc
-[ "$MVN" ] || MVN="/opt/apache/maven/bin/mvn"
-
-cd ${DOC_DIR}
-chmod +x ./run_doc.sh
-./run_doc.sh ${GERRIT_BRANCH}
-
-retval=$?
-if [ ${retval} -ne "0" ]; then
- echo "Documentation generation failed!"
-exit ${retval}
-fi
-
-if [[ ${JOB_NAME} == *merge* ]]; then
-
- cd ${WORKSPACE}
-
- mkdir -p ${RESOURCES_DIR}
- mv -f ${BUILD_DIR}/* ${RESOURCES_DIR}
- cd ${SITE_DIR}
-
- cat > pom.xml << EOF
- <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>io.fd.csit</groupId>
- <artifactId>docs</artifactId>
- <version>1.0.0</version>
- <packaging>pom</packaging>
- <properties>
- <generateReports>false</generateReports>
- </properties>
- <build>
- <extensions>
- <extension>
- <groupId>org.apache.maven.wagon</groupId>
- <artifactId>wagon-webdav-jackrabbit</artifactId>
- <version>2.10</version>
- </extension>
- </extensions>
- </build>
- <distributionManagement>
- <site>
- <id>fdio-site</id>
- <url>dav:${DOCS_REPO_URL}/${PROJECT_PATH}/${GERRIT_BRANCH}</url>
- </site>
- </distributionManagement>
- </project>
-EOF
-
- ${MVN} site:site site:deploy -gs "${GLOBAL_SETTINGS_FILE}" -s "${SETTINGS_FILE}" -T 4C
-
- cd -
-
-fi
diff --git a/jjb/scripts/csit/hc2vpp-verify-func.sh b/jjb/scripts/csit/hc2vpp-verify-func.sh
deleted file mode 100644
index 3e3c63e07..000000000
--- a/jjb/scripts/csit/hc2vpp-verify-func.sh
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/bin/bash
-
-# Copyright (c) 2020 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Parse optional arguments from gerrit comment trigger
-for i in ${GERRIT_EVENT_COMMENT_TEXT}; do
- case ${i} in
- *honeycomb=*)
- hc_version=`echo "${i}" | cut -d = -f2-`
- ;;
- *)
- ;;
- esac
-done
-
-# If HC variable is set, check honeycomb version.
-if [[ -n "${hc_version}" ]]; then
- if [[ "${hc_version}" == *"-release" ]]; then
- # we are going to test release build. All release
- # packages should be already present in release repo
- STREAM="release"
- echo "STREAM set to: ${STREAM}"
- fi
-fi
-
-# execute csit bootstrap script if it exists
-if [[ ! -e bootstrap-hc2vpp-integration.sh ]]
-then
- echo 'ERROR: No bootstrap-hc2vpp-integration.sh found'
- exit 1
-else
- # make sure that bootstrap.sh is executable
- chmod +x bootstrap-hc2vpp-integration.sh
- # run the script
- ./bootstrap-hc2vpp-integration.sh ${STREAM} ${OS}
-fi
-
-# vim: ts=4 ts=4 sts=4 et :
diff --git a/jjb/scripts/csit/nsh_sfc-functional-virl.sh b/jjb/scripts/csit/nsh_sfc-functional-virl.sh
deleted file mode 100644
index d90003d0c..000000000
--- a/jjb/scripts/csit/nsh_sfc-functional-virl.sh
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/bin/bash
-
-# Copyright (c) 2020 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -xeu -o pipefail
-
-# execute nsh_sfc bootstrap script if it exists
-if [ -e bootstrap-nsh_sfc-functional-virl.sh ]
-then
- # make sure that bootstrap-nsh_sfc-functional-virl.sh is executable
- chmod +x bootstrap-nsh_sfc-functional-virl.sh
- # run the script
- if [ ${STREAM} == 'master' ]; then
- ./bootstrap-nsh_sfc-functional-virl.sh ${STREAM} ${OS}
- else
- ./bootstrap-nsh_sfc-functional-virl.sh 'stable.'${STREAM} ${OS}
- fi
-else
- echo 'ERROR: No bootstrap-nsh_sfc-functional-virl.sh found'
- exit 1
-fi
-
-# vim: ts=4 ts=4 sts=4 et :
diff --git a/jjb/scripts/csit/perf-timed.sh b/jjb/scripts/csit/perf-timed.sh
index 76fabd58f..10925b04f 100644..100755
--- a/jjb/scripts/csit/perf-timed.sh
+++ b/jjb/scripts/csit/perf-timed.sh
@@ -13,12 +13,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+echo "---> jjb/scripts/csit/perf-timed.sh"
+
set -exuo pipefail
# Clone CSIT git repository and proceed with entry script located there.
#
# Variables read:
# - WORKSPACE - Jenkins workspace to create csit subdirectory in.
+# - GIT_URL - Git clone URL
# - GERRIT_BRANCH - Jenkins configured GERRIT_BRANCH parameter equal to required
# CSIT branch.
# - CSIT_REF - Override ref of CSIT git repository to checkout.
@@ -27,10 +30,10 @@ set -exuo pipefail
# - Multiple other side effects by entry script(s), see CSIT repository.
cd "${WORKSPACE}"
-git clone https://gerrit.fd.io/r/csit --depth=1 --no-single-branch --no-checkout
+git clone "${GIT_URL}/csit" --depth=1 --no-single-branch --no-checkout
pushd "${WORKSPACE}/csit"
if [[ -n "${CSIT_REF-}" ]]; then
- git fetch --depth=1 https://gerrit.fd.io/r/csit "${CSIT_REF}"
+ git fetch --depth=1 "${GIT_URL}/csit" "${CSIT_REF}"
git checkout FETCH_HEAD
else
git checkout "${GERRIT_BRANCH}"
@@ -38,3 +41,4 @@ fi
popd
csit_entry_dir="${WORKSPACE}/csit/resources/libraries/bash/entry"
source "${csit_entry_dir}/with_oper_for_vpp.sh" "bootstrap_verify_perf.sh"
+cp -R "${WORKSPACE}/csit/archives" "${WORKSPACE}/archives" || true
diff --git a/jjb/scripts/csit/perf-verify.sh b/jjb/scripts/csit/perf-verify.sh
index c68a88126..859859414 100644
--- a/jjb/scripts/csit/perf-verify.sh
+++ b/jjb/scripts/csit/perf-verify.sh
@@ -13,11 +13,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+echo "---> jjb/scripts/csit/perf-verify.sh"
+
set -exuo pipefail
+# TODO: Figure out how old CSIT branches need the processing here.
if [[ ${GERRIT_EVENT_TYPE} == 'comment-added' ]]; then
TRIGGER=`echo ${GERRIT_EVENT_COMMENT_TEXT} \
- | grep -oE '(perftest$|perftest[[:space:]].+$)'`
+ | grep -oE '(perftest$|perftest[[:space:]].+$)' || true`
else
TRIGGER=''
fi
diff --git a/jjb/scripts/csit/report.sh b/jjb/scripts/csit/report.sh
deleted file mode 100644
index b2ea0f966..000000000
--- a/jjb/scripts/csit/report.sh
+++ /dev/null
@@ -1,76 +0,0 @@
-#!/bin/bash
-
-# Copyright (c) 2020 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -xe -o pipefail
-
-[ "$DOCS_REPO_URL" ] || DOCS_REPO_URL="https://nexus.fd.io/content/sites/site"
-[ "$PROJECT_PATH" ] || PROJECT_PATH=io/fd/csit
-[ "$DOC_DIR" ] || DOC_DIR=resources/tools/presentation
-[ "$BUILD_DIR" ] || BUILD_DIR=${DOC_DIR}/_build
-[ "$SITE_DIR" ] || SITE_DIR=build-root/docs/deploy-site
-[ "$RESOURCES_DIR" ] || RESOURCES_DIR=${SITE_DIR}/src/site/resources/report
-[ "$MVN" ] || MVN="/opt/apache/maven/bin/mvn"
-
-cd ${DOC_DIR}
-chmod +x ./run_report.sh
-./run_report.sh ${GERRIT_BRANCH}
-
-retval=$?
-if [ ${retval} -ne "0" ]; then
- echo "Report generation failed!"
-exit ${retval}
-fi
-
-if [[ ${JOB_NAME} == *merge* ]]; then
-
- cd ${WORKSPACE}
-
- mkdir -p ${RESOURCES_DIR}
- mv -f ${BUILD_DIR}/* ${RESOURCES_DIR}
- cd ${SITE_DIR}
-
- cat > pom.xml << EOF
- <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>io.fd.csit</groupId>
- <artifactId>docs</artifactId>
- <version>1.0.0</version>
- <packaging>pom</packaging>
- <properties>
- <generateReports>false</generateReports>
- </properties>
- <build>
- <extensions>
- <extension>
- <groupId>org.apache.maven.wagon</groupId>
- <artifactId>wagon-webdav-jackrabbit</artifactId>
- <version>2.10</version>
- </extension>
- </extensions>
- </build>
- <distributionManagement>
- <site>
- <id>fdio-site</id>
- <url>dav:${DOCS_REPO_URL}/${PROJECT_PATH}/${GERRIT_BRANCH}</url>
- </site>
- </distributionManagement>
- </project>
-EOF
-
- ${MVN} site:site site:deploy -gs "${GLOBAL_SETTINGS_FILE}" -s "${SETTINGS_FILE}" -T 4C
-
- cd -
-
-fi
diff --git a/jjb/scripts/csit/vpp-functional-multilink.sh b/jjb/scripts/csit/terraform-aws-eb-version-deploy.sh
index 5cf2454eb..3bc683b91 100644
--- a/jjb/scripts/csit/vpp-functional-multilink.sh
+++ b/jjb/scripts/csit/terraform-aws-eb-version-deploy.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-# Copyright (c) 2020 Cisco and/or its affiliates.
+# Copyright (c) 2023 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -13,16 +13,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-# execute csit bootstrap script if it exists
-if [ -e bootstrap-multilink.sh ]
-then
- # make sure that bootstrap.sh is executable
- chmod +x bootstrap-multilink.sh
- # run the script
- ./bootstrap-multilink.sh
-else
- echo 'ERROR: No bootstrap-multilink.sh found'
- exit 1
-fi
+echo "---> jjb/scripts/csit/terraform-aws-eb-version-deploy.sh"
-# vim: ts=4 ts=4 sts=4 et :
+set -exuo pipefail
+
+csit_entry_dir="${WORKSPACE}/resources/libraries/bash/entry"
+source "${csit_entry_dir}/bootstrap_aws_eb_version_deploy.sh"
diff --git a/jjb/scripts/csit/nsh_sfc-perf-hw.sh b/jjb/scripts/csit/terraform-aws-eb-version-verify.sh
index 6f2eb3bdd..7a9f16261 100644
--- a/jjb/scripts/csit/nsh_sfc-perf-hw.sh
+++ b/jjb/scripts/csit/terraform-aws-eb-version-verify.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-# Copyright (c) 2020 Cisco and/or its affiliates.
+# Copyright (c) 2023 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -13,20 +13,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-# execute nsh_sfc bootstrap script if it exists
-if [ ! -e bootstrap-verify-perf-nsh_sfc.sh ]
-then
- echo 'ERROR: No bootstrap-verify-perf-nsh_sfc.sh found'
- exit 1
-fi
+echo "---> jjb/scripts/csit/terraform-aws-eb-version-verify.sh"
-# make sure that bootstrap-verify-perf.sh is executable
-chmod +x bootstrap-verify-perf-nsh_sfc.sh
-# run the script
-if [ ${STREAM} == 'master' ]; then
- ./bootstrap-verify-perf-nsh_sfc.sh ${STREAM} ${OS}
-else
- ./bootstrap-verify-perf-nsh_sfc.sh 'stable.'${STREAM} ${OS}
-fi
+set -exuo pipefail
-# vim: ts=4 ts=4 sts=4 et :
+csit_entry_dir="${WORKSPACE}/resources/libraries/bash/entry"
+source "${csit_entry_dir}/bootstrap_aws_eb_version_verify.sh"
diff --git a/jjb/scripts/csit/tldk-functional-virl.sh b/jjb/scripts/csit/tldk-functional-virl.sh
deleted file mode 100644
index 8e732a517..000000000
--- a/jjb/scripts/csit/tldk-functional-virl.sh
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/bin/bash
-
-# Copyright (c) 2020 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -xeu -o pipefail
-
-# Clone tldk and start tests
-git clone https://gerrit.fd.io/r/tldk
-
-# If the git clone fails, complain clearly and exit
-if [ $? != 0 ]; then
- echo "Failed to run: git clone https://gerrit.fd.io/r/tldk"
- exit 1
-fi
-
-# execute tldk bootstrap script if it exists
-if [ -e bootstrap-TLDK.sh ]
-then
- # make sure that bootstrap-TLDK.sh is executable
- chmod +x bootstrap-TLDK.sh
- # run the script
- ./bootstrap-TLDK.sh
-else
- echo 'ERROR: No bootstrap-TLDK.sh found'
- exit 1
-fi
-
-# vim: ts=4 ts=4 sts=4 et :
diff --git a/jjb/scripts/csit/tox.sh b/jjb/scripts/csit/tox.sh
index 32ccb5c00..6a0c02b41 100644
--- a/jjb/scripts/csit/tox.sh
+++ b/jjb/scripts/csit/tox.sh
@@ -17,6 +17,8 @@
# We do not use source command, to make sure
# the called script choses the interpreter it needs.
+echo "---> jjb/scripts/csit/tox.sh"
+
set -exuo pipefail
${WORKSPACE}/resources/libraries/bash/entry/tox.sh
diff --git a/jjb/scripts/hicn/build-extras.sh b/jjb/scripts/hicn/build-extras.sh
new file mode 100644
index 000000000..c1e0e3e1e
--- /dev/null
+++ b/jjb/scripts/hicn/build-extras.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+# basic build script example
+set -euxo pipefail
+
+echo "---> jjb/scripts/hicn/build-extras.sh"
+
+pushd scripts
+bash ./build-extras.sh
+popd
diff --git a/jjb/scripts/hicn/build-vpp-latest.sh b/jjb/scripts/hicn/build-vpp-latest.sh
new file mode 100644
index 000000000..444f9b699
--- /dev/null
+++ b/jjb/scripts/hicn/build-vpp-latest.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+# basic build script example
+set -euxo pipefail
+
+pushd scripts
+bash ./build-packages.sh vpp_master
+popd
diff --git a/jjb/scripts/hicn/build.sh b/jjb/scripts/hicn/build.sh
new file mode 100644
index 000000000..8dd150a74
--- /dev/null
+++ b/jjb/scripts/hicn/build.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+# basic build script example
+set -euxo pipefail
+
+echo "---> jjb/scripts/hicn/build.sh"
+
+pushd scripts
+bash ./build-packages.sh
+popd
diff --git a/jjb/scripts/hicn/checkstyle.sh b/jjb/scripts/hicn/checkstyle.sh
new file mode 100644
index 000000000..451950ebb
--- /dev/null
+++ b/jjb/scripts/hicn/checkstyle.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+
+echo "---> jjb/scripts/hicn/checkstyle.sh"
+
+if [ -f ./scripts/checkstyle.sh ];then
+ bash scripts/checkstyle.sh
+else
+ echo "Cannot find scripts/checkstyle.sh - skipping checkstyle"
+fi
diff --git a/jjb/scripts/hicn/docs.sh b/jjb/scripts/hicn/docs.sh
new file mode 100644
index 000000000..e927f0bbc
--- /dev/null
+++ b/jjb/scripts/hicn/docs.sh
@@ -0,0 +1,14 @@
+#!/bin/bash
+set -xe -o pipefail
+
+DOC_DIR="docs/build/html"
+SITE_DIR_ROOT="build/doc"
+SITE_DIR="${SITE_DIR_ROOT}/deploy-site"
+
+echo "---> jjb/scripts/hicn/docs.sh"
+
+bash scripts/build-packages.sh sphinx
+
+mkdir -p "${SITE_DIR_ROOT}"
+mv -f "${DOC_DIR}" "${SITE_DIR}"
+find "${SITE_DIR}" -type f '(' -name '*.md5' -o -name '*.dot' -o -name '*.map' ')' -delete
diff --git a/jjb/scripts/hicn/functest.sh b/jjb/scripts/hicn/functest.sh
new file mode 100644
index 000000000..e520c78ce
--- /dev/null
+++ b/jjb/scripts/hicn/functest.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+# basic build script example
+set -euxo pipefail
+
+echo "---> jjb/scripts/hicn/functest.sh"
+
+pushd scripts
+bash ./functional-tests.sh
+popd
diff --git a/jjb/scripts/hicn/sonar.sh b/jjb/scripts/hicn/sonar.sh
new file mode 100644
index 000000000..2f2060cc3
--- /dev/null
+++ b/jjb/scripts/hicn/sonar.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+set -euxo pipefail
+
+echo "---> jjb/scripts/hicn/sonar.sh"
+
+pushd scripts
+bash ./build-sonar.sh
+popd
diff --git a/jjb/scripts/maven_push_functions.sh b/jjb/scripts/maven_push_functions.sh
index 6627615a6..d26b71422 100644
--- a/jjb/scripts/maven_push_functions.sh
+++ b/jjb/scripts/maven_push_functions.sh
@@ -1,4 +1,20 @@
#!/bin/bash
+
+# Copyright (c) 2020 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+echo "---> jjb/scripts/maven_push_functions.sh"
+
set -xe -o pipefail
echo "*******************************************************************"
echo "* STARTING PUSH OF PACKAGES TO REPOS"
diff --git a/jjb/scripts/packagecloud_promote.sh b/jjb/scripts/packagecloud_promote.sh
index 8523e6cf0..6a138b419 100644
--- a/jjb/scripts/packagecloud_promote.sh
+++ b/jjb/scripts/packagecloud_promote.sh
@@ -1,5 +1,20 @@
#!/bin/bash
+# Copyright (c) 2020 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+echo "---> jjb/scripts/packagecloud_promote.sh"
+
stage_repo="https://packagecloud.io/api/v1/repos/fdio/staging"
curl --netrc-file /home/jenkins/packagecloud_api $stage_repo/packages.json | \
python -mjson.tool >filenames.txt
diff --git a/jjb/scripts/packagecloud_push.sh b/jjb/scripts/packagecloud_push.sh
index 95ea92ed9..80226aa04 100644..100755
--- a/jjb/scripts/packagecloud_push.sh
+++ b/jjb/scripts/packagecloud_push.sh
@@ -1,39 +1,113 @@
#!/bin/bash
-# PCIO_CO is a Jenkins Global Environment variable
-set -x
+# Copyright (c) 2021 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+echo "---> jjb/scripts/packagecloud_push.sh"
+
+set -euxo pipefail
+
+line="*************************************************************************"
+
+# Nothing was built if this is a merge job being run when
+# the git HEAD id is not the same as the Gerrit New Revision ID
+if [[ ${JOB_NAME} == *merge* ]] && [ -n "${GERRIT_NEWREV:-}" ] &&
+ [ "$GERRIT_NEWREV" != "$GIT_COMMIT" ] ; then
+ echo -e "\n$line\nSkipping package push. A newer patch has been merged.\n$line\n"
+ exit 0
+fi
+
+DRYRUN="${DRYRUN:-}"
+if [ "${DRYRUN,,}" = "true" ] ; then
+ echo -e "\n$line\nSkipping package push because DRYRUN is '${DRYRUN,,}'.\n$line\n"
+ exit 0
+fi
echo "STARTING PACKAGECLOUD PUSH"
sleep 10
-if [ -f /usr/bin/zypper ]; then
- FACTER_OS="openSUSE"
-else
- FACTER_OS=$(/usr/bin/facter operatingsystem)
-fi
+FACTER_OS=$(/usr/bin/facter operatingsystem)
+push_cmd=""
+push_ext_deps_cmd=""
+ext_deps_pkg=""
+downloads_dir="/root/Downloads"
+
+create_deb_push_cmds()
+{
+ local distro="$1"
+
+ if [ "$distro" = "debian" ] || [ "$distro" = "ubuntu" ] ; then
+ FACTER_LSBNAME=$(/usr/bin/facter lsbdistcodename)
+ DEBS=$(find . -type f -iname '*.deb' | grep -v vpp-ext-deps | xargs || true)
+ push_cmd="package_cloud push ${PCIO_CO}/${STREAM}/${distro}/${FACTER_LSBNAME}/main/ ${DEBS}"
+ ext_deps_ver="$(dpkg -l vpp-ext-deps | mawk '/vpp-ext-deps/{print $3}' || true)"
+ ext_deps_pkg="$(find . -type f -iname 'vpp-ext-deps*.deb' | grep $ext_deps_ver || find $downloads_dir -type f -iname 'vpp-ext-deps*.deb' | grep $ext_deps_ver || true)"
+ if [ -n "$ext_deps_pkg}" ] ; then
+ push_ext_deps_cmd="package_cloud push ${PCIO_CO}/${STREAM}/${distro}/${FACTER_LSBNAME}/main/ ${ext_deps_pkg}"
+ fi
+ else
+ echo "ERROR: Unknown distro: '$distro'"
+ return 1
+ fi
+}
+create_rpm_push_cmds()
+{
+ FACTER_OSMAJREL=$(/usr/bin/facter operatingsystemmajrelease)
+ FACTER_ARCH=$(/usr/bin/facter architecture)
+ RPMS=$(find . -type f -iregex '.*/.*\.\(s\)?rpm' | grep -v vpp-ext-deps | xargs || true)
+ push_cmd="package_cloud push ${PCIO_CO}/${STREAM}/el/${FACTER_OSMAJREL}/os/${FACTER_ARCH}/ ${RPMS}"
+ ext_deps_ver="$(dnf list vpp-ext-deps | mawk '/vpp-ext-deps/{print $2}' || true)"
+ ext_deps_pkg="$(find . -type f -iname 'vpp-ext-deps*.rpm' | grep $ext_deps_ver || find $downloads_dir -type f -iname 'vpp-ext-deps*.rpm' | grep $ext_deps_ver || true)"
+ if [ -n "$ext_deps_pkg" ] ; then
+ push_ext_deps_cmd="package_cloud push ${PCIO_CO}/${STREAM}/el/${FACTER_OSMAJREL}/os/${FACTER_ARCH}/ ${ext_deps_pkg}"
+ fi
+}
+
+# PCIO_CO and SILO are Jenkins Global Environment variables defined in
+# .../ci-management/jenkins-config/global-vars-*.sh
if [ -f ~/.packagecloud ]; then
case "$FACTER_OS" in
- Ubuntu)
- FACTER_LSBNAME=$(/usr/bin/facter lsbdistcodename)
- DEBS=$(find . -type f -iname '*.deb')
- package_cloud push "${PCIO_CO}/${STREAM}/ubuntu/${FACTER_LSBNAME}/main/" ${DEBS}
- ;;
- CentOS)
- FACTER_OSMAJREL=$(/usr/bin/facter operatingsystemmajrelease)
- FACTER_ARCH=$(/usr/bin/facter architecture)
- RPMS=$(find . -type f -iregex '.*/.*\.\(s\)?rpm')
- package_cloud push "${PCIO_CO}/${STREAM}/el/${FACTER_OSMAJREL}/os/${FACTER_ARCH}/" ${RPMS}
- ;;
- openSUSE)
- # Use /etc/os-release on openSUSE to get $VERSION
- . /etc/os-release
- RPMS=$(find . -type f -iregex '.*/.*\.\(s\)?rpm' | grep -v 'vpp-ext-deps')
- VPP_EXT_RPMS=$(find . -type f -iregex '.*/.*\.\(s\)?rpm' | grep 'vpp-ext-deps')
- package_cloud push "${PCIO_CO}/${STREAM}/opensuse/${VERSION}/" ${RPMS}
- # This file may have already been uploaded. Don't error out if it exists.
- package_cloud push "${PCIO_CO}/${STREAM}/opensuse/${VERSION}/" ${VPP_EXT_RPMS} --skip-errors
- ;;
+ Debian)
+ create_deb_push_cmds debian
+ ;;
+ Ubuntu)
+ create_deb_push_cmds ubuntu
+ ;;
+ CentOS)
+ create_rpm_push_cmds
+ ;;
+ *)
+ echo -e "\n$line\n* ERROR: Unsupported OS '$FACTER_OS'\n* PACKAGECLOUD PUSH FAILED!\n$line\n"
+ exit 1
+ ;;
esac
+ if [ "${SILO,,}" = "sandbox" ] ; then
+ echo "SANDBOX: skipping '$push_cmd'"
+ if [ -n "$push_ext_deps_cmd" ] ; then
+ echo "SANDBOX: skipping '$push_ext_deps_cmd'"
+ fi
+ else
+ $push_cmd
+ if [ -n "$push_ext_deps_cmd" ] ; then
+ $push_ext_deps_cmd || true
+ fi
+ fi
+else
+ echo "ERROR: Missing '~/.packagecloud' for user '$(id)'"
+ echo "PACKAGECLOUD PUSH FAILED!"
+ exit 1
fi
+
+echo -e "\n$line\n* PACKAGECLOUD PUSH COMPLETE\n$line\n"
diff --git a/jjb/scripts/post_build_deploy_archives.sh b/jjb/scripts/post_build_deploy_archives.sh
new file mode 100755
index 000000000..a332f21e5
--- /dev/null
+++ b/jjb/scripts/post_build_deploy_archives.sh
@@ -0,0 +1,93 @@
+#!/bin/bash
+
+# Copyright (c) 2021 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+echo "---> jjb/scripts/post_build_deploy_archives.sh"
+
+set +e # Do not affect the build result if some part of archiving fails.
+WS_ARCHIVES_DIR="$WORKSPACE/archives"
+BUILD_ENV_LOG="$WS_ARCHIVES_DIR/_build-enviroment-variables.log"
+
+if curl --output robot-plugin.zip "$BUILD_URL/robot/report/*zip*/robot-plugin.zip"; then
+ unzip -d ./archives robot-plugin.zip
+fi
+
+# Generate gdb-command script to output vpp stack traceback from core files.
+gdb_cmdfile="/tmp/gdb-commands"
+cat >$gdb_cmdfile <<'__END__'
+# Usage:
+# gdb $BINFILE $CORE -ex 'source -v gdb-commands' -ex quit
+
+set pagination off
+thread apply all bt
+
+define printstack
+ set $i=0
+ while $i < 15
+ frame $i
+ x/i $pc
+ info locals
+ info reg
+ set $i = $i + 1
+ end
+end
+thread apply all printstack
+
+# info proc mappings
+
+__END__
+
+STACKTRACE=""
+# Returns stacktrace filename in STACKTRACE
+generate_vpp_stacktrace_and_delete_core() {
+ local corefile="$1"
+ echo "Uncompressing core file $file"
+ gunzip "$corefile"
+ corefile="${corefile::(-3)}"
+ if grep -qe 'debug' <<< "$WORKSPACE" ; then
+ local binfile="$WORKSPACE/build-root/install-vpp_debug-native/vpp/bin/vpp"
+ else
+ local binfile="$WORKSPACE/build-root/install-vpp-native/vpp/bin/vpp"
+ fi
+
+ echo "Generating stack trace from core file: $corefile"
+ STACKTRACE="${corefile}.stacktrace"
+ gdb "$binfile" $corefile -ex 'source -v /tmp/gdb-commands' -ex quit > $STACKTRACE
+ # remove the core to save space
+ echo "Removing core file: $corefile"
+ rm -f "$corefile"
+ # Dump stacktrace to console log
+ if [ -f "$STACKTRACE" ] ; then
+ echo -e "\n=====[ $STACKTRACE ]=====\n$(cat $STACKTRACE)\n=====[ $STACKTRACE ]=====\n"
+ gzip "$STACKTRACE"
+ else
+ echo "Stacktrace file not generated!"
+ STACKTRACE=""
+ fi
+}
+
+mkdir -p "$WS_ARCHIVES_DIR"
+
+# generate stack trace for VPP core files for upload instead of core file.
+if [ -d "$WORKSPACE/build-root" ] ; then
+ for file in $(find $WS_ARCHIVES_DIR -type f -name 'core*.gz') ; do
+ generate_vpp_stacktrace_and_delete_core $file
+ done
+fi
+
+# Remove any socket files in archive
+find $WS_ARCHIVES_DIR -type s -exec rm -rf {} \;
+
+echo "Workspace archived artifacts:"
+ls -alR $WS_ARCHIVES_DIR
diff --git a/jjb/scripts/post_build_executor_info.sh b/jjb/scripts/post_build_executor_info.sh
new file mode 100755
index 000000000..d81b613ed
--- /dev/null
+++ b/jjb/scripts/post_build_executor_info.sh
@@ -0,0 +1,58 @@
+#!/bin/bash
+
+# Copyright (c) 2021 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+echo "---> jjb/scripts/post_build_executor_info.sh"
+
+# Output executor runtime attributes [again] in case the job fails prior to
+# running setup_executor_env.sh
+long_line="************************************************************************"
+OS_ID=$(grep '^ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g')
+OS_VERSION_ID=$(grep '^VERSION_ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g')
+OS_ARCH=$(uname -m)
+
+# Requires all nomad client machines to run the following command
+# and mount /scratch/nomad into the docker container:
+# sudo mkdir -p /scratch/nomad && echo "$(hostname)-$(uname -m)" | sudo tee /scratch/nomad/nomad-client
+nomad_client_file="/scratch/nomad/nomad-client"
+if [ -f "$nomad_client_file" ] ; then
+ NOMAD_CLIENT="$(cat $nomad_client_file)"
+else
+ NOMAD_CLIENT="Unknown"
+fi
+
+echo "$long_line"
+echo "Executor Runtime Attributes:"
+echo "OS: $OS_ID-$OS_VERSION_ID"
+echo " $(uname -a)"
+echo "Number CPUs: $(nproc)"
+echo "Arch: $OS_ARCH"
+echo "Nomad Client Hostname: $NOMAD_CLIENT"
+echo "Container ID: $(hostname)"
+echo "$long_line"
+echo -e "lscpu:\n$(lscpu)"
+echo "$long_line"
+echo -e "df -h:\n$(df -h)"
+echo "$long_line"
+echo -e "free -m:\n$(free -m)"
+
+if [ -n "$(which ccache)" ] ; then
+ echo "$long_line"
+ echo "ccache statistics:"
+ [ -n "${CCACHE_DISABLE:-}" ] && echo "CCACHE_DISABLE = '$CCACHE_DISABLE'"
+ [ -n "${CCACHE_DIR:-}" ] && echo "CCACHE_DIR = '$CCACHE_DIR'"
+ ccache -s
+fi
+
+echo "$long_line"
diff --git a/jjb/scripts/publish_cov.sh b/jjb/scripts/publish_cov.sh
new file mode 100644
index 000000000..cdbbcdd86
--- /dev/null
+++ b/jjb/scripts/publish_cov.sh
@@ -0,0 +1,51 @@
+#!/bin/bash
+
+# Copyright (c) 2023 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+echo "---> publish_cov.sh"
+
+set -exuo pipefail
+
+CDN_URL="s3-docs-7day.fd.io"
+bucket="vpp-docs-7day-retention"
+# Use the same bucket path as logs so that the code coverage report can be viewed by
+# s/s3-logs/s3-docs-7day/ in the URL after selecting the logs URL from
+# the jenkins job page.
+bucket_path="$JENKINS_HOSTNAME/$JOB_NAME/$BUILD_NUMBER/"
+
+if [[ ${JOB_NAME} == *verify* ]]; then
+ case "${JOB_NAME}" in
+ *"vpp-cov"*)
+ workspace_dir="${WORKSPACE}/build-root/test-coverage/html"
+ ;;
+ *)
+ die "Unknown job: ${JOB_NAME}"
+ esac
+else
+ die "Unknown job: ${JOB_NAME}"
+fi
+
+export TF_VAR_workspace_dir="$workspace_dir"
+export TF_VAR_bucket_path="$bucket_path"
+export TF_VAR_bucket="$bucket"
+export AWS_SHARED_CREDENTIALS_FILE=$HOME/.aws/credentials
+export AWS_DEFAULT_REGION="us-east-1"
+
+echo "INFO: archiving test coverage to S3 bucket '$bucket'"
+pushd ..
+terraform init -no-color
+terraform apply -no-color -auto-approve
+popd
+
+echo "S3 Test Coverage: <a href=\"https://${CDN_URL}/${bucket_path}\">https://${CDN_URL}/${bucket_path}</a>"
diff --git a/jjb/scripts/publish_docs.sh b/jjb/scripts/publish_docs.sh
new file mode 100755
index 000000000..63bb5bc89
--- /dev/null
+++ b/jjb/scripts/publish_docs.sh
@@ -0,0 +1,83 @@
+#!/bin/bash
+
+# Copyright (c) 2022 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+echo "---> publish_docs.sh"
+
+set -exuo pipefail
+
+CDN_URL="s3-docs.fd.io"
+bucket="fdio-docs-s3-cloudfront-index"
+
+if [[ ${JOB_NAME} == *merge* ]]; then
+ case "${JOB_NAME}" in
+ *"csit-trending"*)
+ workspace_dir="${WORKSPACE}/resources/tools/presentation/_build"
+ bucket_path="/csit/${GERRIT_BRANCH}/trending/"
+ ;;
+ *"csit-report"*)
+ workspace_dir="${WORKSPACE}/resources/tools/presentation/_build"
+ bucket_path="/csit/${GERRIT_BRANCH}/report/"
+ ;;
+ *"csit-docs"*)
+ workspace_dir="${WORKSPACE}/resources/tools/doc_gen/_build"
+ bucket_path="/csit/${GERRIT_BRANCH}/docs/"
+ ;;
+ *"hicn-docs"*)
+ hicn_release="$(git describe --long --match "v*" | cut -d- -f1 | sed -e 's/^v//')"
+ workspace_dir="${WORKSPACE}/build/doc/deploy-site"
+ bucket_path="/hicn/${hicn_release}/"
+ ;;
+ *"vpp-docs"*)
+ vpp_release="$(${WORKSPACE}/build-root/scripts/version rpm-version)"
+ workspace_dir="${WORKSPACE}/build-root/docs/html"
+ bucket_path="/vpp/${vpp_release}/"
+ ;;
+ *)
+ die "Unknown job: ${JOB_NAME}"
+ esac
+elif [[ ${JOB_NAME} == *verify* ]]; then
+ bucket="vpp-docs-7day-retention"
+ # Use the same bucket path as logs so that the docs can be viewed by
+ # s/s3-logs/s3-docs-7day/ in the URL after selecting the logs URL from
+ # the jenkins job page.
+ bucket_path="$JENKINS_HOSTNAME/$JOB_NAME/$BUILD_NUMBER/"
+ case "${JOB_NAME}" in
+ *"hicn-docs"*)
+ workspace_dir="${WORKSPACE}/build/doc/deploy-site"
+ ;;
+ *"vpp-docs"*)
+ CDN_URL="s3-docs-7day.fd.io"
+ workspace_dir="${WORKSPACE}/build-root/docs/html"
+ ;;
+ *)
+ die "Unknown job: ${JOB_NAME}"
+ esac
+else
+ die "Unknown job: ${JOB_NAME}"
+fi
+
+export TF_VAR_workspace_dir="$workspace_dir"
+export TF_VAR_bucket_path="$bucket_path"
+export TF_VAR_bucket="$bucket"
+export AWS_SHARED_CREDENTIALS_FILE=$HOME/.aws/credentials
+export AWS_DEFAULT_REGION="us-east-1"
+
+echo "INFO: archiving docs to S3 bucket '$bucket'"
+pushd ..
+terraform init -no-color
+terraform apply -no-color -auto-approve
+popd
+
+echo "S3 docs: <a href=\"https://${CDN_URL}/${bucket_path}\">https://${CDN_URL}/${bucket_path}</a>"
diff --git a/jjb/scripts/publish_library_py.sh b/jjb/scripts/publish_library_py.sh
new file mode 100644
index 000000000..1cbeb23c0
--- /dev/null
+++ b/jjb/scripts/publish_library_py.sh
@@ -0,0 +1,318 @@
+#!/bin/bash
+
+# Copyright (c) 2021 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+echo "---> publish_library_py.sh"
+
+set -exuo pipefail
+
+PYTHON_SCRIPT="/w/workspace/publish_library.py"
+
+pip3 install boto3
+mkdir -p $(dirname "$PYTHON_SCRIPT")
+
+cat >$PYTHON_SCRIPT <<'END_OF_PYTHON_SCRIPT'
+#!/usr/bin/python3
+
+"""S3 publish library."""
+
+import glob
+import gzip
+import logging
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+import boto3
+from botocore.exceptions import ClientError
+import requests
+import six
+
+
+logging.basicConfig(
+ format=u"%(levelname)s: %(message)s",
+ stream=sys.stdout,
+ level=logging.INFO
+)
+logging.getLogger(u"botocore").setLevel(logging.INFO)
+
+
+FILE_TYPE = {
+ u"xml": u"application/xml",
+ u"html": u"text/html",
+ u"txt": u"text/plain",
+ u"log": u"text/plain",
+ u"css": u"text/css",
+ u"md": u"text/markdown",
+ u"rst": u"text/x-rst",
+ u"csv": u"text/csv",
+ u"svg": u"image/svg+xml",
+ u"jpg": u"image/jpeg",
+ u"png": u"image/png",
+ u"gif": u"image/gif",
+ u"js": u"application/javascript",
+ u"pdf": u"application/pdf",
+ u"json": u"application/json",
+ u"otf": u"font/otf",
+ u"ttf": u"font/ttf",
+ u"woff": u"font/woff",
+ u"woff2": u"font/woff2"
+}
+
+
+def compress_text(src_dpath):
+ """Compress all text files in directory.
+
+ :param src_dpath: Input dir path.
+ :type src_dpath: str
+ """
+ save_dir = os.getcwd()
+ os.chdir(src_dpath)
+
+ compress_types = [
+ "**/*.html",
+ "**/*.log",
+ "**/*.txt",
+ "**/*.xml",
+ "**/*.json"
+ ]
+ paths = []
+ for _type in compress_types:
+ search = os.path.join(src_dpath, _type)
+ paths.extend(glob.glob(search, recursive=True))
+
+ for _file in paths:
+ # glob may follow symlink paths that open can't find
+ if os.path.exists(_file):
+ gz_file = u"{}.gz".format(_file)
+ with open(_file, "rb") as src, gzip.open(gz_file, "wb") as dest:
+ shutil.copyfileobj(src, dest)
+ os.remove(_file)
+
+ os.chdir(save_dir)
+
+
+def copy_archives(workspace):
+ """Copy files or directories in a $WORKSPACE/archives to the current
+ directory.
+
+ :params workspace: Workspace directery with archives directory.
+ :type workspace: str
+ """
+ archives_dir = os.path.join(workspace, u"archives")
+ dest_dir = os.getcwd()
+
+ logging.debug(u"Copying files from " + archives_dir + u" to " + dest_dir)
+
+ if os.path.exists(archives_dir):
+ if os.path.isfile(archives_dir):
+ logging.error(u"Target is a file, not a directory.")
+ raise RuntimeError(u"Not a directory.")
+ else:
+ logging.debug("Archives dir {} does exist.".format(archives_dir))
+ for item in os.listdir(archives_dir):
+ src = os.path.join(archives_dir, item)
+ dst = os.path.join(dest_dir, item)
+ try:
+ if os.path.isdir(src):
+ shutil.copytree(src, dst, symlinks=False, ignore=None)
+ else:
+ shutil.copy2(src, dst)
+ except shutil.Error as e:
+ logging.error(e)
+ raise RuntimeError(u"Could not copy " + src)
+ else:
+ logging.error(u"Archives dir does not exist.")
+ raise RuntimeError(u"Missing directory " + archives_dir)
+
+
+def upload(s3_resource, s3_bucket, src_fpath, s3_path):
+ """Upload single file to destination bucket.
+
+ :param s3_resource: S3 storage resource.
+ :param s3_bucket: S3 bucket name.
+ :param src_fpath: Input file path.
+ :param s3_path: Destination file path on remote storage.
+ :type s3_resource: Object
+ :type s3_bucket: str
+ :type src_fpath: str
+ :type s3_path: str
+ """
+ def is_gzip_file(filepath):
+ with open(filepath, u"rb") as test_f:
+ return test_f.read(2) == b"\x1f\x8b"
+
+ if os.path.isdir(src_fpath):
+ return
+ if os.path.isfile(src_fpath):
+ file_name, file_extension = os.path.splitext(src_fpath)
+ content_encoding = u""
+ content_type = u"application/octet-stream"
+ if is_gzip_file(src_fpath):
+ file_name, file_extension = os.path.splitext(file_name)
+ content_encoding = "gzip"
+ content_type = FILE_TYPE.get(
+ file_extension.strip("."),
+ u"application/octet-stream"
+ )
+
+ extra_args = dict()
+ extra_args[u"ContentType"] = content_type
+ if content_encoding:
+ extra_args[u"ContentEncoding"] = content_encoding
+
+ try:
+ s3_resource.Bucket(s3_bucket).upload_file(
+ src_fpath, s3_path, ExtraArgs=extra_args
+ )
+ logging.info(u"Successfully uploaded to " + s3_path)
+ except ClientError as e:
+ logging.error(e)
+
+
+def upload_recursive(s3_resource, s3_bucket, src_fpath, s3_path):
+ """Recursively uploads input folder to destination.
+
+ Example:
+ - s3_bucket: logs.fd.io
+ - src_fpath: /workspace/archives.
+ - s3_path: /hostname/job/id/
+
+ :param s3_resource: S3 storage resource.
+ :param s3_bucket: S3 bucket name.
+ :param src_fpath: Input folder path.
+ :param s3_path: S3 destination path.
+ :type s3_resource: Object
+ :type s3_bucket: str
+ :type src_fpath: str
+ :type s3_path: str
+ """
+ for path, _, files in os.walk(src_fpath):
+ for file in files:
+ _path = path.replace(src_fpath, u"")
+ _src_fpath = path + u"/" + file
+ _s3_path = os.path.normpath(s3_path + u"/" + _path + u"/" + file)
+ upload(
+ s3_resource=s3_resource,
+ s3_bucket=s3_bucket,
+ src_fpath=_src_fpath,
+ s3_path=_s3_path
+ )
+
+
+def deploy_docs(s3_bucket, s3_path, docs_dir):
+ """Ship docs dir content to S3 bucket. Requires the s3 bucket to exist.
+
+ :param s3_bucket: Name of S3 bucket. Eg: lf-project-date
+ :param s3_path: Path on S3 bucket to place the docs. Eg:
+ csit/${GERRIT_BRANCH}/report
+ :param docs_dir: Directory in which to recursively upload content.
+ :type s3_bucket: Object
+ :type s3_path: str
+ :type docs_dir: str
+ """
+ try:
+ s3_resource = boto3.resource(
+ u"s3",
+ endpoint_url=os.environ[u"AWS_ENDPOINT_URL"]
+ )
+ except KeyError:
+ s3_resource = boto3.resource(
+ u"s3"
+ )
+
+ upload_recursive(
+ s3_resource=s3_resource,
+ s3_bucket=s3_bucket,
+ src_fpath=docs_dir,
+ s3_path=s3_path
+ )
+
+
+def deploy_s3(s3_bucket, s3_path, build_url, workspace):
+ """Add logs and archives to temp directory to be shipped to S3 bucket.
+ Fetches logs and system information and pushes them and archives to S3
+ for log archiving.
+ Requires the s3 bucket to exist.
+
+ :param s3_bucket: Name of S3 bucket. Eg: lf-project-date
+ :param s3_path: Path on S3 bucket place the logs and archives. Eg:
+ $JENKINS_HOSTNAME/$JOB_NAME/$BUILD_NUMBER
+ :param build_url: URL of the Jenkins build. Jenkins typically provides this
+ via the $BUILD_URL environment variable.
+ :param workspace: Directory in which to search, typically in Jenkins this is
+ $WORKSPACE
+ :type s3_bucket: Object
+ :type s3_path: str
+ :type build_url: str
+ :type workspace: str
+ """
+ try:
+ s3_resource = boto3.resource(
+ u"s3",
+ endpoint_url=os.environ[u"AWS_ENDPOINT_URL"]
+ )
+ except KeyError:
+ s3_resource = boto3.resource(
+ u"s3"
+ )
+
+ previous_dir = os.getcwd()
+ work_dir = tempfile.mkdtemp(prefix="backup-s3.")
+ os.chdir(work_dir)
+
+ # Copy archive files to tmp dir.
+ copy_archives(workspace)
+
+ # Create additional build logs.
+ with open(u"_build-details.log", u"w+") as f:
+ f.write(u"build-url: " + build_url)
+
+ # Magic string used to trim console logs at the appropriate level during
+ # wget.
+ MAGIC_STRING = u"-----END_OF_BUILD-----"
+ logging.info(MAGIC_STRING)
+
+ resp = requests.get(build_url + u"/consoleText")
+ with open(u"console.log", u"w+", encoding=u"utf-8") as f:
+ f.write(
+ six.text_type(resp.content.decode(u"utf-8").split(MAGIC_STRING)[0])
+ )
+
+ query = u"time=HH:mm:ss&appendLog"
+ resp = requests.get(build_url + u"/timestamps?" + query)
+ with open(u"console-timestamp.log", u"w+", encoding=u"utf-8") as f:
+ f.write(
+ six.text_type(resp.content.decode(u"utf-8").split(MAGIC_STRING)[0])
+ )
+
+ compress_text(work_dir)
+
+ upload_recursive(
+ s3_resource=s3_resource,
+ s3_bucket=s3_bucket,
+ src_fpath=work_dir,
+ s3_path=s3_path
+ )
+
+ os.chdir(previous_dir)
+ shutil.rmtree(work_dir)
+
+
+if __name__ == u"__main__":
+ globals()[sys.argv[1]](*sys.argv[2:])
+
+END_OF_PYTHON_SCRIPT
diff --git a/jjb/scripts/publish_logs.sh b/jjb/scripts/publish_logs.sh
new file mode 100644
index 000000000..a567106ad
--- /dev/null
+++ b/jjb/scripts/publish_logs.sh
@@ -0,0 +1,30 @@
+#!/bin/bash
+
+# Copyright (c) 2021 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+echo "---> publish_logs.sh"
+
+PYTHON_SCRIPT="/w/workspace/publish_library.py"
+
+mkdir -p "$WORKSPACE/archives"
+
+s3_path="$JENKINS_HOSTNAME/$JOB_NAME/$BUILD_NUMBER/"
+
+echo "INFO: S3 path $s3_path"
+
+echo "INFO: archiving logs to S3"
+python3 $PYTHON_SCRIPT deploy_s3 "$S3_BUCKET" "$s3_path" \
+ "$BUILD_URL" "$WORKSPACE"
+
+echo "S3 build logs: <a href=\"https://$CDN_URL/$s3_path\">https://$CDN_URL/$s3_path</a>"
diff --git a/jjb/scripts/setup_executor_env.sh b/jjb/scripts/setup_executor_env.sh
new file mode 100755
index 000000000..727824909
--- /dev/null
+++ b/jjb/scripts/setup_executor_env.sh
@@ -0,0 +1,119 @@
+#!/bin/bash
+
+# Copyright (c) 2020 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+echo "---> jjb/scripts/setup_executor_env.sh"
+
+set -e -o pipefail
+
+OS_ID=$(grep '^ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g')
+OS_VERSION_ID=$(grep '^VERSION_ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g')
+OS_ARCH=$(uname -m)
+dockerfile="/scratch/docker-build/Dockerfile"
+file_delimiter="----- %< -----"
+long_line="************************************************************************"
+downloads_cache="/root/Downloads"
+
+# Requires all nomad client machines to run the following command
+# and mount /scratch/nomad into the docker container:
+# sudo mkdir -p /scratch/nomad && echo "$(hostname)-$(uname -m)" | sudo tee /scratch/nomad/nomad-client
+nomad_client_file="/scratch/nomad/nomad-client"
+if [ -f "$nomad_client_file" ] ; then
+ NOMAD_CLIENT="$(cat $nomad_client_file)"
+else
+ NOMAD_CLIENT="Unknown"
+fi
+
+# Node info
+echo "$long_line"
+echo "Executor Runtime Attributes:"
+echo "OS: $OS_ID-$OS_VERSION_ID"
+echo "Arch: $OS_ARCH"
+echo "Nomad Client Hostname: $NOMAD_CLIENT"
+echo "Container ID: $(hostname)"
+
+echo "$long_line"
+if [ -f "$dockerfile" ] ; then
+ echo -e "Executor Dockerfile: ${dockerfile}\n${file_delimiter}"
+ cat $dockerfile
+ echo "$file_delimiter"
+else
+ echo "Unknown Executor: '$dockerfile' not found!"
+fi
+
+# Performance analysis
+perf_trials=2
+perf_interval=1
+if [ "$OS_ID" == "ubuntu" ] || [ "$OS_ID" = "debian" ] ; then
+ SYSSTAT_PATH="/var/log/sysstat"
+elif [ "$OS_ID" == "centos" ] ; then
+ if [ "$OS_VERSION_ID" = "7" ] ; then
+ SYSSTAT_PATH="/var/log/sa/sa02"
+ else
+ SYSSTAT_PATH="/var/log/sa"
+ fi
+fi
+echo "$long_line"
+echo "Virtual memory stat"
+vmstat ${perf_interval} ${perf_trials}
+echo "CPU time breakdowns per CPU"
+mpstat -P ALL ${perf_interval} ${perf_trials}
+echo "Per-process summary"
+pidstat ${perf_interval} ${perf_trials}
+echo "Block device stats"
+iostat -xz ${perf_interval} ${perf_trials}
+echo "Memory utilization"
+free -m
+echo "Network interface throughput"
+sar -n DEV -o ${SYSSTAT_PATH} ${perf_interval} ${perf_trials}
+echo "TCP metrics"
+sar -n TCP,ETCP -o ${SYSSTAT_PATH} ${perf_interval} ${perf_trials}
+
+# SW stack
+echo "$long_line"
+echo "Executor package list:"
+if [ "$OS_ID" == "ubuntu" ] || [ "$OS_ID" = "debian" ] ; then
+ dpkg-query -W -f='${binary:Package}\t${Version}\n' | column -t || true
+elif [ "$OS_ID" == "centos" ] ; then
+ yum list installed || true
+fi
+
+echo "$long_line"
+echo "Python3 package list:"
+pip3 list 2>/dev/null | column -t || true
+
+echo "$long_line"
+echo "Executor Downloads cache '$downloads_cache':"
+ls -lh "$downloads_cache" || true
+
+echo "$long_line"
+echo "DNS nameserver config in '/etc/resolv.conf':"
+cat /etc/resolv.conf || true
+
+echo "$long_line"
+if [ -n "$(which ccache || true)" ] ; then
+ if [ -z "${CCACHE_DIR:-}" ] || [ ! -d "$CCACHE_DIR" ] ; then
+ echo "CCACHE_DIR='$CCACHE_DIR' is missing, disabling CCACHE..."
+ export CCACHE_DISABLE="1"
+ fi
+ if [ -n "${CCACHE_DISABLE:-}" ] ; then
+ echo "CCACHE_DISABLE = '$CCACHE_DISABLE'"
+ fi
+ echo "ccache statistics:"
+ ccache -s
+else
+ echo "WARNING: ccache is not installed!"
+ export CCACHE_DISABLE="1"
+fi
+echo "$long_line"
diff --git a/jjb/scripts/setup_jvpp_dev_env.sh b/jjb/scripts/setup_jvpp_dev_env.sh
deleted file mode 100644
index 9017f0db6..000000000
--- a/jjb/scripts/setup_jvpp_dev_env.sh
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/bin/bash
-set -e -o pipefail
-
-# Figure out what system we are running on
-if [[ -f /etc/lsb-release ]];then
- . /etc/lsb-release
-elif [[ -f /etc/redhat-release ]];then
- sudo yum install -y redhat-lsb
- DISTRIB_ID=`lsb_release -si`
- DISTRIB_RELEASE=`lsb_release -sr`
- DISTRIB_CODENAME=`lsb_release -sc`
- DISTRIB_DESCRIPTION=`lsb_release -sd`
-fi
-echo DISTRIB_ID: $DISTRIB_ID
-echo DISTRIB_RELEASE: $DISTRIB_RELEASE
-echo DISTRIB_CODENAME: $DISTRIB_CODENAME
-echo DISTRIB_DESCRIPTION: $DISTRIB_DESCRIPTION
-
-JVPP_VERSION=`./version`
-echo JVPP_VERSION: $JVPP_VERSION
-# Check release version
-if [[ "$JVPP_VERSION" == *"-release" ]]; then
- # at the time when JVPP release packages are being build,
- # vpp release packages are already promoted to release repository.
- # Therefore we need to switch to release repository in order to download
- # correct vpp package versions
- STREAM="release"
-fi
-
-function setup {
- if ! [[ -z ${REPO_NAME} ]]; then
- echo "INSTALLING VPP-DPKG-DEV from apt/yum repo"
- REPO_URL="https://packagecloud.io/fdio/${STREAM}"
- echo "REPO_URL: ${REPO_URL}"
- # Setup by installing vpp-dev and vpp-lib
- if [[ "$DISTRIB_ID" == "Ubuntu" ]]; then
- if ! [[ "${STREAM}" == "master" ]]; then
- echo "stable branch - clearing all fdio repos. new one will be installed."
- sudo rm -f /etc/apt/sources.list.d/fdio_*.list
- fi
- if [[ -f /etc/apt/sources.list.d/99fd.io.list ]];then
- echo "Deleting: /etc/apt/sources.list.d/99fd.io.list"
- sudo rm /etc/apt/sources.list.d/99fd.io.list
- fi
- curl -s https://packagecloud.io/install/repositories/fdio/${STREAM}/script.deb.sh | sudo bash
- sudo apt-get -y --force-yes install libvppinfra libvppinfra-dev vpp vpp-dev vpp-plugin-core || true
- elif [[ "$DISTRIB_ID" == "CentOS" ]]; then
- if [[ -f /etc/yum.repos.d/fdio-master.repo ]]; then
- echo "Deleting: /etc/yum.repos.d/fdio-master.repo"
- sudo rm /etc/yum.repos.d/fdio-master.repo
- fi
- curl -s https://packagecloud.io/install/repositories/fdio/${STREAM}/script.rpm.sh | sudo bash
- sudo yum -y install vpp-devel vpp-lib vpp-plugins || true
- fi
- fi
-}
-
-setup \ No newline at end of file
diff --git a/jjb/scripts/setup_vpp_dpdk_dev_env.sh b/jjb/scripts/setup_vpp_dpdk_dev_env.sh
deleted file mode 100644
index c98f6def4..000000000
--- a/jjb/scripts/setup_vpp_dpdk_dev_env.sh
+++ /dev/null
@@ -1,69 +0,0 @@
-#!/bin/bash
-set -e -o pipefail
-
-OS_ID=$(grep '^ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g')
-OS_VERSION_ID=$(grep '^VERSION_ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g')
-
-function setup {
- if ! [ -z ${REPO_NAME} ]; then
- echo "INSTALLING VPP-DPKG-DEV from apt/yum repo"
- REPO_URL="https://packagecloud.io/fdio/${STREAM}"
- echo "REPO_URL: ${REPO_URL}"
- # Setup by installing vpp-dev and vpp-lib
- if [ "$OS_ID" == "ubuntu" ]; then
- if ! [ "${STREAM}" == "master" ]; then
- echo "tree not master deleting packagecloud repo pointer"
- sudo rm -f /etc/apt/sources.list.d/fdio_master.list
- curl -s https://packagecloud.io/install/repositories/fdio/${STREAM}/script.deb.sh | sudo bash
- fi
- if [ -f /etc/apt/sources.list.d/99fd.io.list ];then
- echo "Deleting: /etc/apt/sources.list.d/99fd.io.list"
- sudo rm /etc/apt/sources.list.d/99fd.io.list
- fi
- sudo apt-get update -qq || true
- sudo apt-get -y --force-yes install vpp-dpdk-dev || true
- sudo apt-get -y --force-yes install vpp-dpdk-dkms || true
- sudo apt-get -y --force-yes install vpp-ext-deps || true
- elif [ "$OS_ID" == "centos" ]; then
- if [ -f /etc/yum.repos.d/fdio-master.repo ]; then
- echo "Deleting: /etc/yum.repos.d/fdio-master.repo"
- sudo rm /etc/yum.repos.d/fdio-master.repo
- fi
- if ! [ "${STREAM}" == "master" ]; then
- echo "tree not master deleting packagecloud repo pointer"
- sudo rm -f /etc/yum.repos.d/fdio_master.repo
- curl -s https://packagecloud.io/install/repositories/fdio/${STREAM}/script.rpm.sh | sudo bash
- fi
- sudo yum -y install vpp-dpdk-devel || true
- sudo yum -y install vpp-ext-deps || true
- elif [ "$OS_ID" == "opensuse" ]; then
- REPO_URL="${NEXUSPROXY}/content/repositories/fd.io.${REPO_NAME}"
- echo "REPO_URL: ${REPO_URL}"
- sudo cat << EOF > fdio-master.repo
-[fdio-master]
-name=fd.io master branch latest merge
-baseurl=${REPO_URL}
-enabled=1
-gpgcheck=0
-EOF
- sudo mv fdio-master.repo /etc/yum/repos.d/fdio-master.repo
- sudo yum -y install vpp-dpdk-devel || true
- sudo yum -y install vpp-ext-deps || true
- elif [ "$OS_ID" == "opensuse-leap" ]; then
- REPO_URL="${NEXUSPROXY}/content/repositories/fd.io.${REPO_NAME}"
- echo "REPO_URL: ${REPO_URL}"
- sudo cat << EOF > fdio-master.repo
-[fdio-master]
-name=fd.io master branch latest merge
-baseurl=${REPO_URL}
-enabled=1
-gpgcheck=0
-EOF
- sudo mv fdio-master.repo /etc/yum/repos.d/fdio-master.repo
- sudo yum -y install vpp-dpdk-devel || true
- sudo yum -y install vpp-ext-deps || true
- fi
- fi
-}
-
-setup
diff --git a/jjb/scripts/setup_vpp_ext_deps.sh b/jjb/scripts/setup_vpp_ext_deps.sh
new file mode 100755
index 000000000..077dca163
--- /dev/null
+++ b/jjb/scripts/setup_vpp_ext_deps.sh
@@ -0,0 +1,67 @@
+#!/bin/bash
+
+# Copyright (c) 2022 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+echo "---> jjb/scripts/setup_vpp_ext_deps.sh"
+
+# Normally we would have the settings in any bash script stricter:
+# set -e -o pipefail
+#
+# But there is a corner case scenario that triggers an error,
+# namely when a new packagecloud repo is created, it is completely
+# empty. Then the installation fails. However, since this
+# script is an optimization, it is okay for it to fail without failing
+# the entire job.
+#
+# Therefore, we do not use the "-e" here.
+
+set -o pipefail
+
+OS_ID=$(grep '^ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g')
+OS_VERSION_ID=$(grep '^VERSION_ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g')
+
+echo "Installing vpp-ext-deps..."
+REPO_URL="https://packagecloud.io/fdio/${STREAM}"
+echo "REPO_URL: $REPO_URL"
+INSTALL_URL="https://packagecloud.io/install/repositories/fdio/${STREAM}"
+echo "INSTALL_URL: $INSTALL_URL"
+
+downloads_dir="/root/Downloads"
+
+# Setup by installing vpp-dev and vpp-lib
+if [ "${OS_ID,,}" == "ubuntu" ] || [ "${OS_ID,,}" == "debian" ] ; then
+ if [ "${STREAM}" != "master" ]; then
+ echo "stream '${STREAM}' is not master: replacing packagecloud apt sources list with stream specific list"
+ sudo apt-get -y remove vpp-ext-deps || true
+ sudo rm -f /etc/apt/sources.list.d/fdio_master.list
+ curl -s $INSTALL_URL/script.deb.sh | sudo bash || true
+ fi
+ sudo apt-get update -qq || true
+ vpp_ext_deps_version="$(apt-cache show vpp-ext-deps | mawk '/Version/ {print $2}' | head -1)"
+ vpp_ext_deps_arch="$(apt-cache show vpp-ext-deps | mawk '/Architecture/ {print $2}' | head -1)"
+ vpp_ext_deps_pkg="vpp-ext-deps_${vpp_ext_deps_version}_${vpp_ext_deps_arch}.deb"
+ if [ -f "$downloads_dir/$vpp_ext_deps_pkg" ] ; then
+ echo "Installing cached vpp-ext-deps pkg: $downloads_dir/$vpp_ext_deps_pkg"
+ sudo dpkg -i "$downloads_dir/$vpp_ext_deps_pkg" || true
+ else
+ echo "Installing vpp-ext-deps from packagecloud.io"
+ force_opts="--allow-downgrades --allow-remove-essential --allow-change-held-packages"
+ sudo apt-get -y $force_opts install vpp-ext-deps || true
+ fi
+ echo "Removing packagecloud.io repository references and running apt-get update"
+ sudo rm -f /etc/apt/sources.list.d/fdio_*.list
+ sudo apt-get update -qq || true
+else
+ echo "ERROR: Unsupported OS '$OS_ID'!"
+fi
diff --git a/jjb/scripts/setup_vpp_plugin_dev_env.sh b/jjb/scripts/setup_vpp_plugin_dev_env.sh
deleted file mode 100644
index 1b92adb84..000000000
--- a/jjb/scripts/setup_vpp_plugin_dev_env.sh
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/bin/bash
-set -e -o pipefail
-
-#!/bin/bash
-set -e -o pipefail
-
-# Figure out what system we are running on
-if [ -f /etc/lsb-release ];then
- . /etc/lsb-release
-elif [ -f /etc/redhat-release ];then
- sudo yum install -y redhat-lsb
- DISTRIB_ID=`lsb_release -si`
- DISTRIB_RELEASE=`lsb_release -sr`
- DISTRIB_CODENAME=`lsb_release -sc`
- DISTRIB_DESCRIPTION=`lsb_release -sd`
-fi
-echo DISTRIB_ID: $DISTRIB_ID
-echo DISTRIB_RELEASE: $DISTRIB_RELEASE
-echo DISTRIB_CODENAME: $DISTRIB_CODENAME
-echo DISTRIB_DESCRIPTION: $DISTRIB_DESCRIPTION
-
-function setup {
- REPO_URL="${NEXUSPROXY}/content/repositories/fd.io.${REPO_NAME}"
- echo "REPO_URL: ${REPO_URL}"
- # Setup by installing vpp-dev and vpp-lib
- if [ $DISTRIB_ID == "Ubuntu" ]; then
- echo "deb ${REPO_URL} ./" | sudo tee /etc/apt/sources.list.d/99fd.io.list
- sudo apt-get update
- sudo apt-get -y --force-yes install vpp-dev vpp-lib
- elif [[ $DISTRIB_ID == "CentOS" ]]; then
- sudo cat << EOF > fdio-master.repo
-[fdio-master]
-name=fd.io master branch latest merge
-baseurl=${REPO_URL}
-enabled=1
-gpgcheck=0
-EOF
- sudo mv fdio-master.repo /etc/yum.repos.d/fdio-master.repo
- sudo yum -y install vpp-devel vpp-lib
- fi
-}
-
-setup \ No newline at end of file
diff --git a/jjb/scripts/setup_vpp_ubuntu_docker_test.sh b/jjb/scripts/setup_vpp_ubuntu_docker_test.sh
index bdc1257f6..548ac5660 100644..100755
--- a/jjb/scripts/setup_vpp_ubuntu_docker_test.sh
+++ b/jjb/scripts/setup_vpp_ubuntu_docker_test.sh
@@ -1,17 +1,22 @@
#!/bin/bash
+
##############################################################################
-# Copyright (c) 2018 The Linux Foundation and others.
+# Copyright (c) 2021 The Linux Foundation and others.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Eclipse Public License v1.0
# which accompanies this distribution, and is available at
# http://www.eclipse.org/legal/epl-v10.html
##############################################################################
+
+echo "---> jjb/scripts/setup_vpp_ubuntu_docker_test.sh"
+
set -e -o pipefail
OS_ID=$(grep '^ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g')
+OS_VERSION_ID=$(grep '^VERSION_ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g')
-if ! [ -z ${DOCKER_TEST} ] ; then
+if [ -n ${DOCKER_TEST} ] ; then
# for 4 cores:
# framework.VppTestCase.MIN_REQ_SHM + (num_cores * framework.VppTestCase.SHM_PER_PROCESS)
# 1073741824 == 1024M (1073741824 >> 20)
@@ -22,55 +27,6 @@ if ! [ -z ${DOCKER_TEST} ] ; then
# that 2048M is enough
MEM=2048M
fi
- sudo mount -o remount /dev/shm -o size=${MEM} || true
- echo "/dev/shm remounted"
-fi
-
-##container server node detection
-grep search /etc/resolv.conf || true
-
-if [ "${OS_ID}" == "ubuntu" ]; then
- dpkg-query -W -f='${binary:Package}\t${Version}\n' || true
- echo "************************************************************************"
- echo "pip list:"
- pip list || true
- echo "************************************************************************"
- echo "Contents of /var/cache/vpp/python/virtualenv/lib/python2.7/site-packages:"
- ls -lth /var/cache/vpp/python/virtualenv/lib/python2.7/site-packages || true
- echo "************************************************************************"
- echo "Contents of br Downloads:"
- ls -lth /w/Downloads || true
- echo "************************************************************************"
- echo "Contents of /w/dpdk for test folks:"
- echo "************************************************************************"
- ls -lth /w/dpdk || true
-elif [ "${OS_ID}" == "centos" ]; then
- yum list installed || true
- pip list || true
-elif [ "${OS_ID}" == "opensuse" ]; then
- yum list installed || true
- pip list || true
-elif [ "${OS_ID}" == "opensuse-leap" ]; then
- yum list installed || true
- pip list || true
-fi
-
-##This will remove any previously installed dpdk for old branch builds
-
-if [ "${GERRIT_BRANCH}" != "master" ]; then
- if [ "${OS_ID}" == "ubuntu" ]; then
- sudo apt-get -y remove vpp-dpdk-dev || true
- sudo apt-get -y remove vpp-dpdk-dkms || true
- sudo apt-get -y remove vpp-ext-deps || true
- elif [ "${OS_ID}" == "centos" ]; then
- sudo yum -y erase vpp-dpdk-devel || true
- sudo yum -y erase vpp-ext-deps || true
- sudo yum clean all || true
- elif [ "${OS_ID}" == "opensuse" ]; then
- sudo yum -y erase vpp-dpdk-devel || true
- sudo yum -y erase vpp-ext-deps || true
- elif [ "${OS_ID}" == "opensuse-leap" ]; then
- sudo yum -y erase vpp-dpdk-devel || true
- sudo yum -y erase vpp-ext-deps || true
- fi
+ sudo mount -o remount /dev/shm -o size=${MEM} || true
+ echo "/dev/shm remounted with size='${MEM}'"
fi
diff --git a/jjb/scripts/terraform_s3_docs_ship.sh b/jjb/scripts/terraform_s3_docs_ship.sh
new file mode 100644
index 000000000..7b2542ae4
--- /dev/null
+++ b/jjb/scripts/terraform_s3_docs_ship.sh
@@ -0,0 +1,99 @@
+#!/bin/bash
+
+# Copyright (c) 2021 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+echo "---> terraform_s3_docs_ship.sh"
+
+set -exuo pipefail
+
+cat >"/w/workspace/main.tf" <<'END_OF_TERRAFORM_SCRIPT'
+terraform {
+ required_providers {
+ aws = {
+ source = "hashicorp/aws"
+ version = "5.18.1"
+ }
+ }
+}
+
+provider "aws" {
+ region = "us-east-1"
+ profile = "default"
+ s3_use_path_style = false
+ skip_credentials_validation = true
+ skip_metadata_api_check = true
+ skip_requesting_account_id = true
+}
+
+locals {
+ mime_types = {
+ xml = "application/xml",
+ html = "text/html",
+ txt = "text/plain",
+ log = "text/plain",
+ css = "text/css",
+ md = "text/markdown",
+ rst = "text/x-rst",
+ csv = "text/csv",
+ svg = "image/svg+xml",
+ jpg = "image/jpeg",
+ png = "image/png",
+ gif = "image/gif",
+ js = "application/javascript",
+ pdf = "application/pdf"
+ json = "application/json",
+ otf = "font/otf",
+ ttf = "font/ttf",
+ woff = "font/woff",
+ woff2 = "font/woff2"
+ }
+}
+
+variable "workspace_dir" {
+ description = "Workspace base directory"
+ type = string
+}
+
+variable "file_match_pattern" {
+ description = "File matching pattern"
+ type = string
+ default = "**/*"
+}
+
+variable "bucket" {
+ description = "S3 bucket name"
+ type = string
+}
+
+variable "bucket_path" {
+ description = "S3 bucket path to key"
+ type = string
+}
+
+resource "aws_s3_bucket_object" "object" {
+ for_each = fileset(var.workspace_dir, var.file_match_pattern)
+
+ bucket = var.bucket
+ key = "${var.bucket_path}${each.value}"
+ source = "${var.workspace_dir}/${each.value}"
+
+ cache_control = "no-store,max-age=0,s-maxage=0"
+ etag = filemd5("${var.workspace_dir}/${each.value}")
+ content_type = lookup(
+ local.mime_types,
+ regex("\\.(?P<extension>[A-Za-z0-9]+)$", each.value).extension,
+ "application/octet-stream"
+ )
+}
+END_OF_TERRAFORM_SCRIPT
diff --git a/jjb/scripts/vpp/api-checkstyle.sh b/jjb/scripts/vpp/api-checkstyle.sh
index 90740337e..a1c2f84a1 100644
--- a/jjb/scripts/vpp/api-checkstyle.sh
+++ b/jjb/scripts/vpp/api-checkstyle.sh
@@ -1,5 +1,20 @@
#!/bin/bash
+# Copyright (c) 2020 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+echo "---> jjb/scripts/vpp/api-checkstyle.sh"
+
VPP_CRC_CHECKER="extras/scripts/crcchecker.py"
VPP_CRC_CHECKER_CMD="$VPP_CRC_CHECKER --check-patchset"
@@ -11,13 +26,12 @@ send_notify() {
}
if [ -f $VPP_CRC_CHECKER ]; then
+ # API checker complains if the git repo is not clean.
+ # Help diagnosing those issues easier
+ git --no-pager diff
echo "Running $VPP_CRC_CHECKER_CMD"
if $VPP_CRC_CHECKER_CMD; then
echo "API check successful"
-
- # for now - notify the same room during the monitoring period about the successes as well
- WEBEX_TEAMS_MESSAGE="API check successful for $GERRIT_REFSPEC - see $BUILD_URL"
- send_notify
else
RET_CODE=$?
echo "API check failed: ret code $RET_CODE; please read https://wiki.fd.io/view/VPP/ApiChangeProcess and discuss with ayourtch@gmail.com if unsure how to proceed"
diff --git a/jjb/scripts/vpp/build.sh b/jjb/scripts/vpp/build.sh
index 68fa30d1a..850d61003 100644
--- a/jjb/scripts/vpp/build.sh
+++ b/jjb/scripts/vpp/build.sh
@@ -1,90 +1,139 @@
#!/bin/bash
-# basic build script example
-set -xe -o pipefail
-OS_ID=$(grep '^ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g')
-OS_VERSION_ID=$(grep '^VERSION_ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g')
+# Copyright (c) 2021 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
-echo OS_ID: $OS_ID
-echo OS_VERSION_ID: $OS_VERSION_ID
+echo "---> jjb/scripts/vpp/build.sh"
-# do nothing but print the current slave hostname
-hostname
-export CCACHE_DIR=/tmp/ccache
-if [ -d $CCACHE_DIR ];then
- echo $CCACHE_DIR exists
- du -sk $CCACHE_DIR
-else
- echo $CCACHE_DIR does not exist. This must be a new slave.
-fi
-
-echo "cat /etc/bootstrap.sha"
-if [ -f /etc/bootstrap.sha ];then
- cat /etc/bootstrap.sha
-else
- echo "Cannot find cat /etc/bootstrap.sha"
-fi
+set -euxo pipefail
-echo "cat /etc/bootstrap-functions.sha"
-if [ -f /etc/bootstrap-functions.sha ];then
- cat /etc/bootstrap-functions.sha
-else
- echo "Cannot find cat /etc/bootstrap-functions.sha"
+line="*************************************************************************"
+# Don't build anything if this is a merge job being run when
+# the git HEAD id is not the same as the Gerrit New Revision id.
+if [[ ${JOB_NAME} == *merge* ]] && [ -n "${GERRIT_NEWREV:-}" ] &&
+ [ "$GERRIT_NEWREV" != "$GIT_COMMIT" ] ; then
+ echo -e "\n$line\nSkipping build. A newer patch has been merged.\n$line\n"
+ exit 0
fi
-echo "sha1sum of this script: ${0}"
-sha1sum $0
-
-echo "CC=${CC}"
-echo "IS_CSIT_VPP_JOB=${IS_CSIT_VPP_JOB}"
-# If and only if we are doing verify *after* make verify was made to work
-# and we are not a CSIT job just building packages, then use make verify,
-# else use make pkg-verify.
+OS_ID=$(grep '^ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g')
+OS_VERSION_ID=$(grep '^VERSION_ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g')
+OS_ARCH=$(uname -m)
+DRYRUN="${DRYRUN:-}"
+IS_CSIT_VPP_JOB="${IS_CSIT_VPP_JOB:-}"
+MAKE_PARALLEL_FLAGS="${MAKE_PARALLEL_FLAGS:-}"
+MAKE_PARALLEL_JOBS="${MAKE_PARALLEL_JOBS:-}"
+MAKE_TEST_OS="${MAKE_TEST_OS:-ubuntu-22.04}"
+MAKE_TEST_MULTIWORKER_OS="${MAKE_TEST_MULTIWORKER_OS:-debian-11}"
+VPPAPIGEN_TEST_OS="${VPPAPIGEN_TEST_OS:-${MAKE_TEST_OS}}"
+BUILD_RESULT="SUCCESSFULLY COMPLETED"
+BUILD_ERROR=""
+RETVAL="0"
-if [ "x${MAKE_PARALLEL_FLAGS}" != "x" ]
-then
+if [ -n "${MAKE_PARALLEL_FLAGS}" ] ; then
echo "Building VPP. Number of cores for build set with" \
"MAKE_PARALLEL_FLAGS='${MAKE_PARALLEL_FLAGS}'."
-elif [ "x${MAKE_PARALLEL_JOBS}" != "x" ]
-then
+elif [ -n "${MAKE_PARALLEL_JOBS}" ] ; then
echo "Building VPP. Number of cores for build set with" \
"MAKE_PARALLEL_JOBS='${MAKE_PARALLEL_JOBS}'."
else
- echo "Building VPP. Number of cores not set, " \
- "using build default ($(grep -c ^processor /proc/cpuinfo))."
+ echo "Building VPP. Number of cores not set," \
+ "using build default ($(grep -c ^processor /proc/cpuinfo))."
fi
-if [ "x${MAKE_PARALLEL_JOBS}" != "x" ]
-then
- export TEST_JOBS="${MAKE_PARALLEL_JOBS}"
- echo "Testing VPP with ${TEST_JOBS} cores."
-else
- export TEST_JOBS="auto"
- echo "Testing VPP with automatically calculated number of cores. " \
- "See test logs for the exact number."
-fi
+make_build_test() {
+ if ! make UNATTENDED=yes install-dep ; then
+ BUILD_ERROR="FAILED 'make install-dep'"
+ return
+ fi
+ if ! make UNATTENDED=yes install-ext-deps ; then
+ BUILD_ERROR="FAILED 'make install-ext-deps'"
+ return
+ fi
+ if [ -f extras/scripts/build_static_vppctl.sh ]; then
+ if ! extras/scripts/build_static_vppctl.sh ; then
+ BUILD_ERROR="FAILED 'extras/scripts/build_static_vppctl.sh'"
+ return
+ fi
+ fi
+ if ! make UNATTENDED=yes test-dep ; then
+ BUILD_ERROR="FAILED 'make test-dep'"
+ return
+ fi
+ if ! make UNATTENDED=yes pkg-verify ; then
+ BUILD_ERROR="FAILED 'make pkg-verify'"
+ return
+ fi
+ if [ "${IS_CSIT_VPP_JOB,,}" == "true" ] ; then
+ # CSIT jobs don't need to run make test
+ return
+ fi
+ if [ -n "${MAKE_PARALLEL_JOBS}" ] ; then
+ TEST_JOBS="${MAKE_PARALLEL_JOBS}"
+ echo "Testing VPP with ${TEST_JOBS} cores."
+ else
+ TEST_JOBS="auto"
+ echo "Testing VPP with automatically calculated number of cores. " \
+ "See test logs for the exact number."
+ fi
+ if grep -q "${OS_ID}-${OS_VERSION_ID}" <<< "${VPPAPIGEN_TEST_OS}"; then
+ if ! src/tools/vppapigen/test_vppapigen.py ; then
+ BUILD_ERROR="FAILED src/tools/vppapigen/test_vppapigen.py"
+ return
+ fi
+ fi
+ if grep -q "${OS_ID}-${OS_VERSION_ID}" <<< "${MAKE_TEST_OS}"; then
+ if ! make COMPRESS_FAILED_TEST_LOGS=yes TEST_JOBS="$TEST_JOBS" RETRIES=3 test ; then
+ BUILD_ERROR="FAILED 'make test'"
+ return
+ fi
+ else
+ echo "Skip running 'make test' on ${OS_ID}-${OS_VERSION_ID}"
+ fi
+ if grep -q "${OS_ID}-${OS_VERSION_ID}" <<< "${MAKE_TEST_MULTIWORKER_OS}"; then
+ if git grep -q VPP_WORKER_CONFIG ; then
+ if ! make VPP_WORKER_CONFIG="workers 2" COMPRESS_FAILED_TEST_LOGS=yes \
+ RETRIES=3 TEST_JOBS="$TEST_JOBS" test ; then
+ BUILD_ERROR="FAILED 'make test' with VPP_WORKER_CONFIG='workers 2'"
+ return
+ else
+ echo -e "\n* VPP ${OS_ID^^}-${OS_VERSION_ID}-${OS_ARCH^^}" \
+ "MULTIWORKER MAKE TEST SUCCESSFULLY COMPLETED\n"
+ fi
+ elif git grep -q VPP_WORKER_COUNT ; then
+ if ! make VPP_WORKER_COUNT="2" COMPRESS_FAILED_TEST_LOGS=yes \
+ RETRIES=3 TEST_JOBS="$TEST_JOBS" test ; then
+ BUILD_ERROR="FAILED 'make test' with VPP_WORKER_CONFIG='workers 2'"
+ return
+ else
+ echo -e "\n* VPP ${OS_ID^^}-${OS_VERSION_ID}-${OS_ARCH^^}" \
+ "MULTIWORKER MAKE TEST SUCCESSFULLY COMPLETED\n"
+ fi
+ else
+ echo "Skip running MULTIWORKER MAKE TEST on ${OS_ID}-${OS_VERSION_ID}"
+ fi
+ else
+ echo "Skip running MULTIWORKER MAKE TEST on ${OS_ID}-${OS_VERSION_ID}"
+ fi
+}
-if (git log --oneline | grep 37682e1 > /dev/null 2>&1) && \
- [ "x${IS_CSIT_VPP_JOB}" != "xTrue" ]
-then
- echo "Building using \"make verify\""
- [ "x${DRYRUN}" == "xTrue" ] || make UNATTENDED=yes verify
-else
- echo "Building using \"make pkg-verify\""
- [ "x${DRYRUN}" == "xTrue" ] || make UNATTENDED=yes pkg-verify
+if [ "${DRYRUN,,}" != "true" ] ; then
+ make_build_test
fi
-
-if [ "x${VPP_REPO}" == "x1" ]; then
- if [ "x${REBASE_NEEDED}" == "x1" ]; then
- echo "This patch to vpp is based on an old point in the tree that is likely"
- echo "to fail verify."
- echo "PLEASE REBASE PATCH ON THE CURRENT HEAD OF THE VPP REPO"
- exit 1
- fi
+if [ -n "$BUILD_ERROR" ] ; then
+ BUILD_RESULT="$BUILD_ERROR"
+ RETVAL="1"
fi
-
-local_arch=$(uname -m)
-
-echo "*******************************************************************"
-echo "* VPP ${local_arch^^} BUILD SUCCESSFULLY COMPLETED"
-echo "*******************************************************************"
+echo -e "\n$line\n* VPP ${OS_ID^^}-${OS_VERSION_ID}-${OS_ARCH^^}" \
+ "BUILD $BUILD_RESULT\n$line\n"
+exit $RETVAL
diff --git a/jjb/scripts/vpp/check_crc.sh b/jjb/scripts/vpp/check_crc.sh
index 04e53966d..c0a9d507d 100644..100755
--- a/jjb/scripts/vpp/check_crc.sh
+++ b/jjb/scripts/vpp/check_crc.sh
@@ -13,22 +13,25 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+echo "---> jjb/scripts/vpp/check_crc.sh"
+
set -exuo pipefail
# Clone CSIT git repository and proceed with entry script located there.
#
# Variables read:
# - WORKSPACE - Jenkins workspace to create csit subdirectory in.
+# - GIT_URL - Git clone URL
# - CSIT_REF - Override ref of CSIT git repository to checkout.
# Directories updated:
# - ${WORKSPACE}/csit - Created, holding a checked out CSIT repository.
# - Multiple other side effects by entry script(s), see CSIT repository.
cd "${WORKSPACE}"
-git clone https://gerrit.fd.io/r/csit --depth=1 --no-single-branch --no-checkout
+git clone "${GIT_URL}/csit" --depth=1 --no-single-branch --no-checkout
pushd "${WORKSPACE}/csit"
if [[ -n "${CSIT_REF-}" ]]; then
- git fetch --depth=1 https://gerrit.fd.io/r/csit "${CSIT_REF}"
+ git fetch --depth=1 "${GIT_URL}/csit" "${CSIT_REF}"
git checkout FETCH_HEAD
else
git checkout HEAD
diff --git a/jjb/scripts/vpp/checkstyle.sh b/jjb/scripts/vpp/checkstyle.sh
index 7c520dd99..00fc8bdb8 100644..100755
--- a/jjb/scripts/vpp/checkstyle.sh
+++ b/jjb/scripts/vpp/checkstyle.sh
@@ -1,5 +1,38 @@
#!/bin/bash
-# jjb/vpp/include-raw-vpp-checkstyle.sh
+
+# Copyright (c) 2020 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+echo "---> jjb/scripts/vpp/checkstyle.sh"
+
+# If mlx_rdma_dpdk_matrix.txt file has been updated in the current changeset,
+# verify the current rdma-core_version and dpdk_version exist in the matrix
+# file
+LINE="*******************************************************************"
+BUILD_EXT_DIR="build/external"
+MATRIX_FILE="$BUILD_EXT_DIR/mlx_rdma_dpdk_matrix.txt"
+PKGS_DIR="$BUILD_EXT_DIR/packages"
+if git show --stat | grep -q "$MATRIX_FILE" ; then
+ RDMA_CORE_VERSION="$(grep rdma-core_version $PKGS_DIR/rdma-core.mk | grep -v '(' | mawk '{print $3}')"
+ DPDK_VERSION="$(grep dpdk_version $PKGS_DIR/dpdk.mk | grep -v '(' | mawk '{print $3}')"
+ CURRENT_MATRIX="rdma=$RDMA_CORE_VERSION dpdk=$DPDK_VERSION"
+ if grep -q "$CURRENT_MATRIX" "$MATRIX_FILE"; then
+ echo -e "$LINE\n* DPDK/RDMA-CORE matrix file update successfully verified\n$LINE"
+ else
+ echo -e "$LINE\n* ERROR: 'rdma=$RDMA_CORE_VERSION dpdk=$DPDK_VERSION' not found in $MATRIX_FILE!\n$LINE"
+ exit 1
+ fi
+fi
if [ -n "$(grep -E '^checkstyle:' Makefile)" ]
then
diff --git a/jjb/scripts/vpp/commitmsg.sh b/jjb/scripts/vpp/commitmsg.sh
index d926ff5dd..479f35136 100644
--- a/jjb/scripts/vpp/commitmsg.sh
+++ b/jjb/scripts/vpp/commitmsg.sh
@@ -1,7 +1,22 @@
#!/bin/bash
-if [ -f extras/scripts/check_commit_msg.sh ];then
- echo "Running extras/scripts/check_commit_msg.sh"
+# Copyright (c) 2020 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+echo "---> jjb/scripts/vpp/commitmsg.sh"
+
+if [ -f extras/scripts/check_commit_msg.sh ] ; then
+ echo "Running extras/scripts/check_commit_msg.sh"
extras/scripts/check_commit_msg.sh
else
echo "Cannot find cat extras/scripts/check_commit_msg.sh - skipping commit message check"
diff --git a/jjb/scripts/vpp/copy_archives.sh b/jjb/scripts/vpp/copy_archives.sh
new file mode 100644
index 000000000..dafcd4aef
--- /dev/null
+++ b/jjb/scripts/vpp/copy_archives.sh
@@ -0,0 +1,38 @@
+#!/usr/bin/env bash
+
+# Copyright (c) 2023 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+echo "---> jjb/scripts/vpp/copy_archives.sh"
+
+set -xuo pipefail
+set +e
+
+# Copy robot archives from perf job to where archive macro needs them.
+#
+# This has to be a script separate from csit-perf.sh, run as publisher,
+# because otherwise it is not easily possible to ensure this is executed
+# also when there is a test case failure.
+#
+# This can be removed when all CSIT branches use correct archive directory.
+# For fixed CSIT, the copy will fail, so errors are ignored everywhere.
+#
+# Variables read:
+# - WORKSPACE - Jenkins workspace to create csit subdirectory in.
+# Directories updated:
+# - ${WORKSPACE}/archives/csit_* - Test results for various VPP builds are here.
+# e.g. csit_current and csit_parent for vpp per-patch perf job.
+
+mkdir -p "${WORKSPACE}/archives"
+# Using asterisk as bisect job creates variable number of directories.
+cp -Rv "${WORKSPACE}/csit_"* "${WORKSPACE}/archives"
diff --git a/jjb/scripts/vpp/cov-build.sh b/jjb/scripts/vpp/cov-build.sh
new file mode 100644
index 000000000..15f86c3bf
--- /dev/null
+++ b/jjb/scripts/vpp/cov-build.sh
@@ -0,0 +1,106 @@
+#!/bin/bash
+
+# Copyright (c) 2023 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+echo "---> jjb/scripts/vpp/cov-build.sh"
+
+set -euxo pipefail
+
+line="*************************************************************************"
+# Don't build anything if this is a merge job.
+if [[ ${JOB_NAME} == *merge* ]] ; then
+ echo -e "\n$line\nSkipping build."
+ exit 0
+fi
+
+OS_ID=$(grep '^ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g')
+OS_VERSION_ID=$(grep '^VERSION_ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g')
+OS_ARCH=$(uname -m)
+DRYRUN="${DRYRUN:-}"
+IS_CSIT_VPP_JOB="${IS_CSIT_VPP_JOB:-}"
+MAKE_PARALLEL_FLAGS="${MAKE_PARALLEL_FLAGS:-}"
+MAKE_PARALLEL_JOBS="${MAKE_PARALLEL_JOBS:-}"
+MAKE_TEST_OS="${MAKE_TEST_OS:-ubuntu-22.04}"
+MAKE_TEST_MULTIWORKER_OS="${MAKE_TEST_MULTIWORKER_OS:-debian-11}"
+VPPAPIGEN_TEST_OS="${VPPAPIGEN_TEST_OS:-${MAKE_TEST_OS}}"
+BUILD_RESULT="SUCCESSFULLY COMPLETED"
+BUILD_ERROR=""
+RETVAL="0"
+
+if [ -n "${MAKE_PARALLEL_FLAGS}" ] ; then
+ echo "Building VPP. Number of cores for build set with" \
+ "MAKE_PARALLEL_FLAGS='${MAKE_PARALLEL_FLAGS}'."
+elif [ -n "${MAKE_PARALLEL_JOBS}" ] ; then
+ echo "Building VPP. Number of cores for build set with" \
+ "MAKE_PARALLEL_JOBS='${MAKE_PARALLEL_JOBS}'."
+else
+ echo "Building VPP. Number of cores not set," \
+ "using build default ($(grep -c ^processor /proc/cpuinfo))."
+fi
+
+make_test_coverage_report() {
+ if ! make UNATTENDED=yes install-dep ; then
+ BUILD_ERROR="FAILED 'make install-dep'"
+ return
+ fi
+ if ! make UNATTENDED=yes install-ext-deps ; then
+ BUILD_ERROR="FAILED 'make install-ext-deps'"
+ return
+ fi
+ if ! make UNATTENDED=yes test-dep ; then
+ BUILD_ERROR="FAILED 'make test-dep'"
+ return
+ fi
+ if ! make UNATTENDED=yes CCACHE_DISABLE=1 pkg-verify ; then
+ BUILD_ERROR="FAILED 'make pkg-verify'"
+ return
+ fi
+ if [ "${IS_CSIT_VPP_JOB,,}" == "true" ] ; then
+ # CSIT jobs don't need to run make test
+ return
+ fi
+ if [ -n "${MAKE_PARALLEL_JOBS}" ] ; then
+ TEST_JOBS="${MAKE_PARALLEL_JOBS}"
+ echo "Testing VPP with ${TEST_JOBS} cores."
+ else
+ TEST_JOBS="auto"
+ echo "Testing VPP with automatically calculated number of cores. " \
+ "See test logs for the exact number."
+ fi
+ if grep -q "${OS_ID}-${OS_VERSION_ID}" <<< "${VPPAPIGEN_TEST_OS}"; then
+ if ! src/tools/vppapigen/test_vppapigen.py ; then
+ BUILD_ERROR="FAILED src/tools/vppapigen/test_vppapigen.py"
+ return
+ fi
+ fi
+ if grep -q "${OS_ID}-${OS_VERSION_ID}" <<< "${MAKE_TEST_OS}"; then
+ if ! make COMPRESS_FAILED_TEST_LOGS=yes TEST_JOBS="$TEST_JOBS" CCACHE_DISABLE=1 test-cov ; then
+ BUILD_ERROR="FAILED 'make test-cov'"
+ return
+ fi
+ else
+ echo "Skip running 'make test-cov' on ${OS_ID}-${OS_VERSION_ID}"
+ fi
+}
+
+if [ "${DRYRUN,,}" != "true" ] ; then
+ make_test_coverage_report
+fi
+if [ -n "$BUILD_ERROR" ] ; then
+ BUILD_RESULT="$BUILD_ERROR"
+ RETVAL="1"
+fi
+echo -e "\n$line\n* VPP ${OS_ID^^}-${OS_VERSION_ID}-${OS_ARCH^^}" \
+ "TEST COVERAGE REPORT $BUILD_RESULT\n$line\n"
+exit $RETVAL
diff --git a/jjb/scripts/vpp/coverity.sh b/jjb/scripts/vpp/coverity.sh
deleted file mode 100644
index 47a316a89..000000000
--- a/jjb/scripts/vpp/coverity.sh
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/bin/bash
-FILE="scan.txt"
-OUTPUT="output.txt"
-
-wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb
-dpkg -i google-chrome-stable_current_amd64.deb || true
-apt-get install -f -y
-
-google-chrome --headless --disable-gpu -dump-dom --no-sandbox https://scan.coverity.com/projects/fd-io-vpp > $FILE
-
-grep -i '<dt>Newly detected</dt>' $FILE || exit 42
-
-NEW=$(grep -i -B 1 '<dt>Newly detected</dt>' $FILE | grep -Eo '[0-9]{1,4}')
-ELIM=$(grep -i -B 1 '<dt>Eliminated</dt>' $FILE | grep -Eo '[0-9]{1,4}')
-OUT=$(grep -i -B 1 '<dt>Outstanding</dt>' $FILE | grep -Eo '[0-9]{1,4}')
-
-#ls -lg $FILE
-#cat $FILE
-
-if [ "${OUT}" == "0" ]; then
- echo 'Current outstanding issues are zero' > $OUTPUT
- echo "Newly detected: $NEW" >> $OUTPUT
- echo "Eliminated: $ELIM" >> $OUTPUT
- echo "More details can be found at https://scan.coverity.com/projects/fd-io-vpp/view_defects" >> $OUTPUT
-else
- echo "Current number of outstanding issues are $OUT Failing job"
- echo "Current number of outstanding issues are $OUT" > $OUTPUT
- echo "Newly detected: $NEW" >> $OUTPUT
- echo "Eliminated: $ELIM" >> $OUTPUT
- echo "More details can be found at https://scan.coverity.com/projects/fd-io-vpp/view_defects" >> $OUTPUT
- exit 1
-fi
diff --git a/jjb/scripts/vpp/csit-bisect.sh b/jjb/scripts/vpp/csit-bisect.sh
new file mode 100644
index 000000000..0a264dbc8
--- /dev/null
+++ b/jjb/scripts/vpp/csit-bisect.sh
@@ -0,0 +1,38 @@
+#!/usr/bin/env bash
+
+# Copyright (c) 2023 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -exuo pipefail
+
+# Clone CSIT git repository and proceed with entry script located there.
+#
+# Variables read:
+# - WORKSPACE - Jenkins workspace to create csit subdirectory in.
+# - CSIT_REF - Override ref of CSIT git repository to checkout.
+# Directories updated:
+# - ${WORKSPACE}/csit - Created, holding a checked out CSIT repository.
+# - Multiple other side effects by entry script(s), see CSIT repository.
+
+cd "${WORKSPACE}"
+git clone https://gerrit.fd.io/r/csit --depth=1 --no-single-branch --no-checkout
+pushd "${WORKSPACE}/csit"
+if [[ -n "${CSIT_REF-}" ]]; then
+ git fetch --depth=1 https://gerrit.fd.io/r/csit "${CSIT_REF}"
+ git checkout FETCH_HEAD
+else
+ git checkout HEAD
+fi
+popd
+csit_entry_dir="${WORKSPACE}/csit/resources/libraries/bash/entry"
+source "${csit_entry_dir}/with_oper_for_vpp.sh" "bisect.sh"
diff --git a/jjb/scripts/vpp/csit-device.sh b/jjb/scripts/vpp/csit-device.sh
index af54c9dc4..989193001 100644..100755
--- a/jjb/scripts/vpp/csit-device.sh
+++ b/jjb/scripts/vpp/csit-device.sh
@@ -13,22 +13,25 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+echo "---> jjb/scripts/vpp/csit-device.sh"
+
set -exuo pipefail
# Clone CSIT git repository and proceed with entry script located there.
#
# Variables read:
# - WORKSPACE - Jenkins workspace to create csit subdirectory in.
+# - GIT_URL - Git clone URL
# - CSIT_REF - Override ref of CSIT git repository to checkout.
# Directories updated:
# - ${WORKSPACE}/csit - Created, holding a checked out CSIT repository.
# - Multiple other side effects by entry script(s), see CSIT repository.
cd "${WORKSPACE}"
-git clone https://gerrit.fd.io/r/csit --depth=1 --no-single-branch --no-checkout
+git clone "${GIT_URL}/csit" --depth=1 --no-single-branch --no-checkout
pushd "${WORKSPACE}/csit"
if [[ -n "${CSIT_REF-}" ]]; then
- git fetch --depth=1 https://gerrit.fd.io/r/csit "${CSIT_REF}"
+ git fetch --depth=1 "${GIT_URL}/csit" "${CSIT_REF}"
git checkout FETCH_HEAD
else
git checkout HEAD
diff --git a/jjb/scripts/vpp/csit-perf.sh b/jjb/scripts/vpp/csit-perf.sh
index 17a9d39bc..ee0b500c6 100644..100755
--- a/jjb/scripts/vpp/csit-perf.sh
+++ b/jjb/scripts/vpp/csit-perf.sh
@@ -13,22 +13,25 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+echo "---> jjb/scripts/vpp/csit-perf.sh"
+
set -exuo pipefail
# Clone CSIT git repository and proceed with entry script located there.
#
# Variables read:
# - WORKSPACE - Jenkins workspace to create csit subdirectory in.
+# - GIT_URL - Git clone URL
# - CSIT_REF - Override ref of CSIT git repository to checkout.
# Directories updated:
# - ${WORKSPACE}/csit - Created, holding a checked out CSIT repository.
# - Multiple other side effects by entry script(s), see CSIT repository.
cd "${WORKSPACE}"
-git clone https://gerrit.fd.io/r/csit --depth=1 --no-single-branch --no-checkout
+git clone "${GIT_URL}/csit" --depth=1 --no-single-branch --no-checkout
pushd "${WORKSPACE}/csit"
if [[ -n "${CSIT_REF-}" ]]; then
- git fetch --depth=1 https://gerrit.fd.io/r/csit "${CSIT_REF}"
+ git fetch --depth=1 "${GIT_URL}/csit" "${CSIT_REF}"
git checkout FETCH_HEAD
else
git checkout HEAD
diff --git a/jjb/scripts/vpp/debug-build.sh b/jjb/scripts/vpp/debug-build.sh
index cdf1d0760..68338df56 100644
--- a/jjb/scripts/vpp/debug-build.sh
+++ b/jjb/scripts/vpp/debug-build.sh
@@ -1,49 +1,78 @@
#!/bin/bash
-# basic build script example
-set -xe -o pipefail
-OS_ID=$(grep '^ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g')
-OS_VERSION_ID=$(grep '^VERSION_ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g')
+# Copyright (c) 2021 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
-echo OS_ID: $OS_ID
-echo OS_VERSION_ID: $OS_VERSION_ID
+echo "---> jjb/scripts/vpp/debug-build.sh"
-# do nothing but print the current slave hostname
-hostname
-
-echo "cat /etc/bootstrap.sha"
-if [ -f /etc/bootstrap.sha ];then
- cat /etc/bootstrap.sha
-else
- echo "Cannot find cat /etc/bootstrap.sha"
-fi
-
-echo "cat /etc/bootstrap-functions.sha"
-if [ -f /etc/bootstrap-functions.sha ];then
- cat /etc/bootstrap-functions.sha
-else
- echo "Cannot find cat /etc/bootstrap-functions.sha"
-fi
-
-echo "sha1sum of this script: ${0}"
-sha1sum $0
+set -euxo pipefail
+line="*************************************************************************"
+OS_ID=$(grep '^ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g')
+OS_VERSION_ID=$(grep '^VERSION_ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g')
+OS_ARCH=$(uname -m)
+DRYRUN="${DRYRUN:-}"
+MAKE_PARALLEL_JOBS="${MAKE_PARALLEL_JOBS:-}"
+BUILD_RESULT="SUCCESSFULLY COMPLETED"
+BUILD_ERROR=""
+RETVAL="0"
# run with ASAN on
+# disable ASAN for now in the debug build - it's broken with PAPI
+# in make test transitioning to unix sockets
# export VPP_EXTRA_CMAKE_ARGS='-DVPP_ENABLE_SANITIZE_ADDR=ON'
-# clang is not working with ASAN right now - see change 27268
-# apparently gcc neither...
-# export CC=gcc
-
-
-
-make UNATTENDED=yes install-dep
-make UNATTENDED=yes install-ext-deps
-make UNATTENDED=yes build
-make UNATTENDED=yes TEST_JOBS=auto test-debug
+make_build_test_debug() {
+ if ! make UNATTENDED=yes install-dep ; then
+ BUILD_ERROR="FAILED 'make install-dep'"
+ return
+ fi
+ if ! make UNATTENDED=yes install-ext-deps ; then
+ BUILD_ERROR="FAILED 'make install-ext-deps'"
+ return
+ fi
+ if ! make UNATTENDED=yes build ; then
+ BUILD_ERROR="FAILED 'make build'"
+ return
+ fi
+ if grep -q "${OS_ID}-${OS_VERSION_ID}" <<< "${MAKE_TEST_OS}"; then
+ if [ -n "${MAKE_PARALLEL_JOBS}" ] ; then
+ TEST_JOBS="${MAKE_PARALLEL_JOBS}"
+ echo "Testing VPP with ${TEST_JOBS} cores."
+ else
+ TEST_JOBS="auto"
+ echo "Testing VPP with automatically calculated number of cores. " \
+ "See test logs for the exact number."
+ fi
+ if ! make UNATTENDED=yes COMPRESS_FAILED_TEST_LOGS=yes \
+ TEST_JOBS="$TEST_JOBS" test-debug ; then
+ BUILD_ERROR="FAILED 'make UNATTENDED=yes COMPRESS_FAILED_TEST_LOGS=yes TEST_JOBS=$TEST_JOBS test-debug'"
+ return
+ fi
+ else
+ echo "Skip running 'make test-debug' on ${OS_ID}-${OS_VERSION_ID}"
+ fi
+}
-
-echo "*******************************************************************"
-echo "* VPP debug/asan test BUILD SUCCESSFULLY COMPLETED"
-echo "*******************************************************************"
+# clang is not working with ASAN right now - see change 27268
+# also, it does not work with gcc-7, we need gcc-8 at least
+# on ubuntu 20.04 executor the gcc is gcc9
+if [ "${DRYRUN,,}" != "true" ] ; then
+ make_build_test_debug
+fi
+if [ -n "$BUILD_ERROR" ] ; then
+ BUILD_RESULT="$BUILD_ERROR"
+ RETVAL="1"
+fi
+echo -e "\n$line\n* VPP ${OS_ID^^}-${OS_VERSION_ID}-${OS_ARCH^^} DEBUG BUILD $BUILD_RESULT\n$line\n"
+exit $RETVAL
diff --git a/jjb/scripts/vpp/docs.sh b/jjb/scripts/vpp/docs.sh
index 608f8f979..52b920e31 100644..100755
--- a/jjb/scripts/vpp/docs.sh
+++ b/jjb/scripts/vpp/docs.sh
@@ -1,50 +1,29 @@
#!/bin/bash
-set -xe -o pipefail
-[ "$DOCS_REPO_URL" ] || DOCS_REPO_URL="https://nexus.fd.io/content/sites/site"
-[ "$PROJECT_PATH" ] || PROJECT_PATH=io/fd/vpp
-[ "$DOC_FILE" ] || DOC_FILE=vpp.docs.zip
-[ "$DOC_DIR" ] || DOC_DIR=build-root/docs/html
-[ "$SITE_DIR" ] || SITE_DIR=build-root/docs/deploy-site/
-[ "$RESOURCES_DIR" ] || RESOURCES_DIR=${SITE_DIR}/src/site/resources
-[ "$MVN" ] || MVN="/opt/apache/maven/bin/mvn"
-[ "$VERSION" ] || VERSION=$(./build-root/scripts/version rpm-version)
-make doxygen
+# Copyright (c) 2022 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
-if [[ ${JOB_NAME} == *merge* ]]; then
- mkdir -p $(dirname ${RESOURCES_DIR})
- mv -f ${DOC_DIR} ${RESOURCES_DIR}
- cd ${SITE_DIR}
- find . -type f '(' -name '*.md5' -o -name '*.dot' -o -name '*.map' ')' -delete
- cat > pom.xml << EOF
- <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>io.fd.vpp</groupId>
- <artifactId>docs</artifactId>
- <version>1.0.0</version>
- <packaging>pom</packaging>
+echo "---> jjb/scripts/vpp/docs.sh"
- <properties>
- <generateReports>false</generateReports>
- </properties>
+set -euxo pipefail
- <build>
- <extensions>
- <extension>
- <groupId>org.apache.maven.wagon</groupId>
- <artifactId>wagon-webdav-jackrabbit</artifactId>
- <version>2.10</version>
- </extension>
- </extensions>
- </build>
- <distributionManagement>
- <site>
- <id>fdio-site</id>
- <url>dav:${DOCS_REPO_URL}/${PROJECT_PATH}/${VERSION}</url>
- </site>
- </distributionManagement>
- </project>
-EOF
- ${MVN} -B site:site site:deploy -gs "${GLOBAL_SETTINGS_FILE}" -s "${SETTINGS_FILE}" -T 4C
- cd -
+line="*************************************************************************"
+# Don't build anything if this is a merge job being run when
+# the git HEAD id is not the same as the Gerrit New Revision id.
+if [[ ${JOB_NAME} == *merge* ]] && [ -n "${GERRIT_NEWREV:-}" ] &&
+ [ "$GERRIT_NEWREV" != "$GIT_COMMIT" ] ; then
+ echo -e "\n$line\nSkipping docs build. A newer patch has been merged.\n$line\n"
+ exit 0
fi
+
+make UNATTENDED=yes docs
diff --git a/jjb/scripts/csit/vpp-functional-virl.sh b/jjb/scripts/vpp/docs_spell.sh
index f2e38e8e1..4b6a97851 100644..100755
--- a/jjb/scripts/csit/vpp-functional-virl.sh
+++ b/jjb/scripts/vpp/docs_spell.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-# Copyright (c) 2020 Cisco and/or its affiliates.
+# Copyright (c) 2022 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
@@ -13,16 +13,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-# execute csit bootstrap script if it exists
-if [ -e bootstrap.sh ]
+echo "---> jjb/scripts/vpp/docs_spell.sh"
+
+if grep -qE '^docs\-%' Makefile && grep -qE '^spell' docs/Makefile
then
- # make sure that bootstrap.sh is executable
- chmod +x bootstrap.sh
- # run the script
- ./bootstrap.sh
+ make docs-spell
else
- echo 'ERROR: No bootstrap.sh found'
- exit 1
+ echo "Can't find docs-spell target in Makefile - skipping docs-spell"
fi
-
-# vim: ts=4 ts=4 sts=4 et :
diff --git a/jjb/scripts/vpp/dpdk-rdma-version-mismatch.sh b/jjb/scripts/vpp/dpdk-rdma-version-mismatch.sh
new file mode 100644
index 000000000..b2248064b
--- /dev/null
+++ b/jjb/scripts/vpp/dpdk-rdma-version-mismatch.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+
+# Copyright (c) 2022 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+echo "---> jjb/scripts/vpp/dpdk-rdma-version-mismatch.sh"
+
+set -euxo pipefail
+
+line="*************************************************************************"
+EXTERNAL_BUILD_DIR="$WORKSPACE/build/external"
+RETVAL="0"
+MISMATCH_RESULT="INCLUDED IN"
+
+make -C "$EXTERNAL_BUILD_DIR" build-deb
+source "$EXTERNAL_BUILD_DIR/dpdk_mlx_default.sh" || true
+
+if [ "${DPDK_MLX_DEFAULT-}" = "n" ] ; then
+ MISMATCH_RESULT="MISSING FROM"
+ RETVAL="1"
+fi
+echo -e "\n$line\n* MLX DPDK DRIVER $MISMATCH_RESULT VPP-EXT-DEPS PACKAGE\n$line\n"
+exit $RETVAL
diff --git a/jjb/scripts/vpp/gcc-build.sh b/jjb/scripts/vpp/gcc-build.sh
new file mode 100644
index 000000000..9bd6d4ff7
--- /dev/null
+++ b/jjb/scripts/vpp/gcc-build.sh
@@ -0,0 +1,78 @@
+#!/bin/bash
+
+# Copyright (c) 2022 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+echo "---> jjb/scripts/vpp/gcc-build.sh"
+
+set -euxo pipefail
+
+line="*************************************************************************"
+OS_ID=$(grep '^ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g')
+OS_VERSION_ID=$(grep '^VERSION_ID=' /etc/os-release | cut -f2- -d= | sed -e 's/\"//g')
+OS_ARCH=$(uname -m)
+DRYRUN="${DRYRUN:-}"
+BUILD_RESULT="SUCCESSFULLY COMPLETED"
+BUILD_ERROR=""
+RETVAL="0"
+export CC=gcc
+
+make_build_release_build_test_gcov_sanity() {
+ if ! make UNATTENDED=yes install-dep ; then
+ BUILD_ERROR="FAILED 'make install-dep'"
+ return
+ fi
+ if ! make UNATTENDED=yes install-ext-deps ; then
+ BUILD_ERROR="FAILED 'make install-ext-deps'"
+ return
+ fi
+ if ! make UNATTENDED=yes CCACHE_DISABLE=1 build-release ; then
+ BUILD_ERROR="FAILED 'make build-release'"
+ return
+ fi
+ if ! make UNATTENDED=yes CCACHE_DISABLE=1 build ; then
+ BUILD_ERROR="FAILED 'make build'"
+ return
+ fi
+ if [ -n "${MAKE_PARALLEL_JOBS}" ] ; then
+ TEST_JOBS="${MAKE_PARALLEL_JOBS}"
+ echo "Testing VPP with ${TEST_JOBS} cores."
+ else
+ TEST_JOBS="auto"
+ echo "Testing VPP with automatically calculated number of cores. " \
+ "See test logs for the exact number."
+ fi
+ # TODO: Add 'smoke test' env var to select smoke test cases
+ # then update this accordingly. For now pick a few basic suites...
+ MAKE_TEST_SUITES="vlib vppinfra vpe_api vapi cli bihash"
+ for suite in $MAKE_TEST_SUITES ; do
+ if ! make UNATTENDED=yes CCACHE_DISABLE=1 TESTS_GCOV=1 TEST_JOBS="$TEST_JOBS" TEST=$suite test ; then
+ BUILD_ERROR="FAILED 'make TEST=$suite test'!"
+ return
+ fi
+ if ! make UNATTENDED=yes CCACHE_DISABLE=1 TESTS_GCOV=1 TEST_JOBS="$TEST_JOBS" TEST=$suite test-debug ; then
+ BUILD_ERROR="FAILED 'make TEST=$suite test-debug'!"
+ return
+ fi
+ done
+}
+
+if [ "${DRYRUN,,}" != "true" ] ; then
+ make_build_release_build_test_gcov_sanity
+fi
+if [ -n "$BUILD_ERROR" ] ; then
+ BUILD_RESULT="$BUILD_ERROR"
+ RETVAL="1"
+fi
+echo -e "\n$line\n* VPP ${OS_ID^^}-${OS_VERSION_ID}-${OS_ARCH^^} GCC BUILD $BUILD_RESULT\n$line\n"
+exit $RETVAL
diff --git a/jjb/scripts/vpp/make-test-docs.sh b/jjb/scripts/vpp/make-test-docs.sh
deleted file mode 100644
index 1f82ab502..000000000
--- a/jjb/scripts/vpp/make-test-docs.sh
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/bin/bash
-set -xe -o pipefail
-[ "$DOCS_REPO_URL" ] || DOCS_REPO_URL="https://nexus.fd.io/content/sites/site"
-[ "$PROJECT_PATH" ] || PROJECT_PATH=io/fd/vpp
-[ "$DOC_DIR" ] || DOC_DIR=build-root/build-test/doc/html
-[ "$SITE_DIR" ] || SITE_DIR=build-root/docs/deploy-site
-[ "$RESOURCES_DIR" ] || RESOURCES_DIR=${SITE_DIR}/src/site/resources/vpp_make_test
-[ "$MVN" ] || MVN="/opt/apache/maven/bin/mvn"
-[ "$VERSION" ] || VERSION=$(./build-root/scripts/version rpm-version)
-
-make test-doc
-
-if [[ ${JOB_NAME} == *merge* ]]; then
- mkdir -p ${RESOURCES_DIR}
- mv -f ${DOC_DIR} ${RESOURCES_DIR}
- cd ${SITE_DIR}
-
- cat > pom.xml << EOF
- <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>io.fd.vpp</groupId>
- <artifactId>docs</artifactId>
- <version>1.0.0</version>
- <packaging>pom</packaging>
-
- <properties>
- <generateReports>false</generateReports>
- </properties>
-
- <build>
- <extensions>
- <extension>
- <groupId>org.apache.maven.wagon</groupId>
- <artifactId>wagon-webdav-jackrabbit</artifactId>
- <version>2.10</version>
- </extension>
- </extensions>
- </build>
- <distributionManagement>
- <site>
- <id>fdio-site</id>
- <url>dav:${DOCS_REPO_URL}/${PROJECT_PATH}/${VERSION}</url>
- </site>
- </distributionManagement>
- </project>
-EOF
- ${MVN} -B site:site site:deploy -gs "${GLOBAL_SETTINGS_FILE}" -s "${SETTINGS_FILE}" -T 4C
- cd -
-fi
diff --git a/jjb/scripts/vpp/maven-push.sh b/jjb/scripts/vpp/maven-push.sh
deleted file mode 100644
index 6ce3b2c06..000000000
--- a/jjb/scripts/vpp/maven-push.sh
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/bin/bash
-set -xe -o pipefail
-echo "*******************************************************************"
-echo "* STARTING PUSH OF PACKAGES TO REPOS"
-echo "* NOTHING THAT HAPPENS BELOW THIS POINT IS RELATED TO BUILD FAILURE"
-echo "*******************************************************************"
-
-[ "$MVN" ] || MVN="/opt/apache/maven/bin/mvn"
-GROUP_ID="io.fd.${PROJECT}"
-BASEURL="${NEXUSPROXY}/content/repositories/fd.io."
-BASEREPOID='fdio-'
-
-if [ "${OS}" == "ubuntu1604" ]; then
- # Find the files
- JARS=$(find . -type f -iname '*.jar')
- DEBS=$(find . -type f -iname '*.deb')
- for i in $JARS
- do
- push_jar "$i"
- done
-
- for i in $DEBS
- do
- push_deb "$i"
- done
-elif [ "${OS}" == "ubuntu1804" ]; then
- # Find the files
- JARS=$(find . -type f -iname '*.jar')
- DEBS=$(find . -type f -iname '*.deb')
- for i in $JARS
- do
- push_jar "$i"
- done
-
- for i in $DEBS
- do
- push_deb "$i"
- done
-elif [ "${OS}" == "centos7" ]; then
- # Find the files
- RPMS=$(find . -type f -iname '*.rpm')
- SRPMS=$(find . -type f -iname '*.srpm')
- SRCRPMS=$(find . -type f -name '*.src.rpm')
- for i in $RPMS $SRPMS $SRCRPMS
- do
- push_rpm "$i"
- done
-elif [ "${OS}" == "opensuse" ]; then
- # Find the files
- RPMS=$(find . -type f -iname '*.rpm')
- for i in $RPMS
- do
- push_rpm "$i"
- done
-fi
-# vim: ts=4 sw=4 sts=4 et ft=sh :
diff --git a/jjb/scripts/vpp/sphinx-docs.sh b/jjb/scripts/vpp/sphinx-docs.sh
deleted file mode 100644
index 749728731..000000000
--- a/jjb/scripts/vpp/sphinx-docs.sh
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/bin/bash
-set -xe -o pipefail
-[ "$DOCS_REPO_URL" ] || DOCS_REPO_URL="https://nexus.fd.io/content/sites/site"
-[ "$PROJECT_PATH" ] || PROJECT_PATH=io/fd/vpp
-[ "$DOC_FILE" ] || DOC_FILE=vpp.docs.zip
-[ "$DOC_DIR" ] || DOC_DIR=./docs/_build/html
-[ "$SITE_DIR" ] || SITE_DIR=build-root/docs/deploy-site
-[ "$RESOURCES_DIR" ] || RESOURCES_DIR=${SITE_DIR}/src/site/resources
-[ "$MVN" ] || MVN="/opt/apache/maven/bin/mvn"
-[ "$VERSION" ] || VERSION=$(./build-root/scripts/version rpm-version)
-
-make docs-venv
-CONFIRM=-y FORCE=--force-yes make docs
-
-if [[ ${JOB_NAME} == *merge* ]]; then
- mkdir -p $(dirname ${RESOURCES_DIR})
- mv -f ${DOC_DIR} ${RESOURCES_DIR}
- cd ${SITE_DIR}
- cat > pom.xml << EOF
- <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>io.fd.vpp</groupId>
- <artifactId>docs</artifactId>
- <version>1.0.0</version>
- <packaging>pom</packaging>
-
- <properties>
- <generateReports>false</generateReports>
- </properties>
-
- <build>
- <extensions>
- <extension>
- <groupId>org.apache.maven.wagon</groupId>
- <artifactId>wagon-webdav-jackrabbit</artifactId>
- <version>2.10</version>
- </extension>
- </extensions>
- </build>
- <distributionManagement>
- <site>
- <id>fdio-site</id>
- <url>dav:${DOCS_REPO_URL}/${PROJECT_PATH}/v${VERSION}</url>
- </site>
- </distributionManagement>
- </project>
-EOF
- ${MVN} -B site:site site:deploy -gs "${GLOBAL_SETTINGS_FILE}" -s "${SETTINGS_FILE}" -T 4C
- cd -
-fi
diff --git a/jjb/scripts/vpp/test-checkstyle.sh b/jjb/scripts/vpp/test-checkstyle.sh
index bdc843114..3f0bc9a06 100644
--- a/jjb/scripts/vpp/test-checkstyle.sh
+++ b/jjb/scripts/vpp/test-checkstyle.sh
@@ -1,5 +1,19 @@
#!/bin/bash
-# jjb/vpp/include-raw-vpp-test-checkstyle.sh
+
+# Copyright (c) 2020 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+echo "---> jjb/scripts/vpp/test-checkstyle.sh"
if [ -n "$(grep -E '^test-checkstyle:' Makefile)" ]
then