aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorVratko Polak <vrpolak@cisco.com>2021-06-15 14:35:41 +0200
committerPeter Mikus <pmikus@cisco.com>2021-06-16 13:29:14 +0000
commit6da5a6920171682bd5bf6a77517bedfef91cbd0e (patch)
tree01e631f1a80deb4e26605fcb45cbfce35aae7dc5
parent46f0194afb8d4c8191b3c412332e3fbb92528c19 (diff)
Line length: Fix recent merges
Not fixing .rst, .md, .yaml, conf.py, .vat, and so on. Change-Id: Icc585d6dbebc8eb5c483b10326302571e94c614d Signed-off-by: Vratko Polak <vrpolak@cisco.com>
-rw-r--r--resources/libraries/bash/entry/check/pylint.sh3
-rw-r--r--resources/libraries/bash/function/artifacts.sh26
-rw-r--r--resources/libraries/bash/function/common.sh3
-rw-r--r--resources/libraries/bash/function/dpdk.sh12
-rw-r--r--resources/libraries/bash/function/gather.sh7
-rw-r--r--resources/libraries/bash/function/per_patch.sh2
-rw-r--r--resources/libraries/bash/shell/k8s_utils.sh17
-rw-r--r--resources/libraries/python/HoststackUtil.py1
-rw-r--r--resources/libraries/python/VppApiCrc.py3
-rwxr-xr-xresources/tools/doc_gen/run_doc.sh17
-rw-r--r--resources/tools/doc_gen/src/Makefile2
-rw-r--r--resources/tools/iperf/iperf_client.py2
-rwxr-xr-xresources/tools/presentation/run_report_local.sh6
13 files changed, 64 insertions, 37 deletions
diff --git a/resources/libraries/bash/entry/check/pylint.sh b/resources/libraries/bash/entry/check/pylint.sh
index 18e7d3db72..fbfea4773e 100644
--- a/resources/libraries/bash/entry/check/pylint.sh
+++ b/resources/libraries/bash/entry/check/pylint.sh
@@ -19,7 +19,8 @@ set -exuo pipefail
# to dissuade non-tox callers.
# This script runs pylint and propagates its exit code.
-# Config is taken from pylint.cfg, and proper virtualenv is assumed to be active.
+# Config is taken from pylint.cfg,
+# and proper virtualenv is assumed to be active.
# The pylint output stored to pylint.log (overwriting).
# "set -eu" handles failures from the following two lines.
diff --git a/resources/libraries/bash/function/artifacts.sh b/resources/libraries/bash/function/artifacts.sh
index 3fe6bae428..15a4dd2fe1 100644
--- a/resources/libraries/bash/function/artifacts.sh
+++ b/resources/libraries/bash/function/artifacts.sh
@@ -86,7 +86,7 @@ function download_ubuntu_artifacts () {
repository installation was not successful."
fi
- packages=$(apt-cache -o Dir::Etc::SourceList=${apt_fdio_repo_file} \
+ pkgs=$(apt-cache -o Dir::Etc::SourceList=${apt_fdio_repo_file} \
-o Dir::Etc::SourceParts=${apt_fdio_repo_file} dumpavail \
| grep Package: | cut -d " " -f 2 | grep vpp) || {
die "Retrieval of available VPP packages failed."
@@ -102,7 +102,7 @@ function download_ubuntu_artifacts () {
fi
set +x
- for package in ${packages}; do
+ for package in ${pkgs}; do
# Filter packages with given version
pkg_info=$(apt-cache show -- ${package}) || {
die "apt-cache show on ${package} failed."
@@ -147,19 +147,19 @@ function download_centos_artifacts () {
}
# If version is set we will add suffix.
artifacts=()
- packages=(vpp vpp-selinux-policy vpp-devel vpp-lib vpp-plugins vpp-api-python)
+ pkgs=(vpp vpp-selinux-policy vpp-devel vpp-lib vpp-plugins vpp-api-python)
if [ -z "${VPP_VERSION-}" ]; then
- artifacts+=(${packages[@]})
+ artifs+=(${pkgs[@]})
else
- artifacts+=(${packages[@]/%/-${VPP_VERSION-}})
+ artifs+=(${pkgs[@]/%/-${VPP_VERSION-}})
fi
if [[ "${INSTALL:-false}" == "true" ]]; then
- sudo yum -y install "${artifacts[@]}" || {
+ sudo yum -y install "${artifs[@]}" || {
die "Install VPP artifact failed."
}
else
- sudo yum -y install --downloadonly --downloaddir=. "${artifacts[@]}" || {
+ sudo yum -y install --downloadonly --downloaddir=. "${artifs[@]}" || {
die "Download VPP artifacts failed."
}
fi
@@ -181,20 +181,20 @@ function download_opensuse_artifacts () {
die "Packagecloud FD.io repo fetch failed."
}
# If version is set we will add suffix.
- artifacts=()
- packages=(vpp vpp-devel vpp-lib vpp-plugins libvpp0)
+ artifs=()
+ pkgs=(vpp vpp-devel vpp-lib vpp-plugins libvpp0)
if [ -z "${VPP_VERSION-}" ]; then
- artifacts+=(${packages[@]})
+ artifs+=(${pkgs[@]})
else
- artifacts+=(${packages[@]/%/-${VPP_VERSION-}})
+ artifs+=(${pkgs[@]/%/-${VPP_VERSION-}})
fi
if [[ "${INSTALL:-false}" == "true" ]]; then
- sudo yum -y install "${artifacts[@]}" || {
+ sudo yum -y install "${artifs[@]}" || {
die "Install VPP artifact failed."
}
else
- sudo yum -y install --downloadonly --downloaddir=. "${artifacts[@]}" || {
+ sudo yum -y install --downloadonly --downloaddir=. "${artifs[@]}" || {
die "Download VPP artifacts failed."
}
fi
diff --git a/resources/libraries/bash/function/common.sh b/resources/libraries/bash/function/common.sh
index ec95f9227c..ed3b2044d3 100644
--- a/resources/libraries/bash/function/common.sh
+++ b/resources/libraries/bash/function/common.sh
@@ -526,7 +526,8 @@ function get_test_tag_string () {
TEST_TAG_STRING=$("${cmd[@]}" <<< "${comment}" || true)
if [[ -z "${TEST_TAG_STRING-}" ]]; then
# Probably we got a base64 encoded comment.
- comment=$(base64 --decode <<< "${GERRIT_EVENT_COMMENT_TEXT}" || true)
+ comment="${GERRIT_EVENT_COMMENT_TEXT}"
+ comment=$(base64 --decode <<< "${comment}" || true)
comment=$(fgrep "${trigger}" <<< "${comment}" || true)
TEST_TAG_STRING=$("${cmd[@]}" <<< "${comment}" || true)
fi
diff --git a/resources/libraries/bash/function/dpdk.sh b/resources/libraries/bash/function/dpdk.sh
index 362524349c..3c16372294 100644
--- a/resources/libraries/bash/function/dpdk.sh
+++ b/resources/libraries/bash/function/dpdk.sh
@@ -95,8 +95,10 @@ function dpdk_compile () {
sed -i "${sed_cmd}" "${sed_file}" || die "RTE_MAX_NUMA_NODES Patch failed"
# Patch L3FWD.
- sed_rxd="s/^#define RTE_TEST_RX_DESC_DEFAULT 128/#define RTE_TEST_RX_DESC_DEFAULT 1024/g"
- sed_txd="s/^#define RTE_TEST_TX_DESC_DEFAULT 512/#define RTE_TEST_TX_DESC_DEFAULT 1024/g"
+ sed_rxd="s/^#define RTE_TEST_RX_DESC_DEFAULT 128"
+ sed_rxd+="/#define RTE_TEST_RX_DESC_DEFAULT 1024/g"
+ sed_txd="s/^#define RTE_TEST_TX_DESC_DEFAULT 512"
+ sed_txd+="/#define RTE_TEST_TX_DESC_DEFAULT 1024/g"
sed_file="./main.c"
pushd examples/l3fwd || die "Pushd failed"
sed -i "${sed_rxd}" "${sed_file}" || die "Patch failed"
@@ -207,8 +209,10 @@ function dpdk_l3fwd_compile () {
pushd "${DPDK_DIR}" || die "Pushd failed"
# Patch L3FWD.
- sed_rxd="s/^#define RTE_TEST_RX_DESC_DEFAULT 128/#define RTE_TEST_RX_DESC_DEFAULT 2048/g"
- sed_txd="s/^#define RTE_TEST_TX_DESC_DEFAULT 512/#define RTE_TEST_TX_DESC_DEFAULT 2048/g"
+ sed_rxd="s/^#define RTE_TEST_RX_DESC_DEFAULT 128"
+ sed_rxd+="/#define RTE_TEST_RX_DESC_DEFAULT 2048/g"
+ sed_txd="s/^#define RTE_TEST_TX_DESC_DEFAULT 512"
+ sed_txd+="/#define RTE_TEST_TX_DESC_DEFAULT 2048/g"
sed_file="./main.c"
pushd examples/l3fwd || die "Pushd failed"
sed -i "${sed_rxd}" "${sed_file}" || die "Patch failed"
diff --git a/resources/libraries/bash/function/gather.sh b/resources/libraries/bash/function/gather.sh
index 2112e1be01..e3a6a9d150 100644
--- a/resources/libraries/bash/function/gather.sh
+++ b/resources/libraries/bash/function/gather.sh
@@ -124,7 +124,8 @@ function gather_vpp () {
# - ${CSIT_DIR}/DPDK_STABLE_VER - DPDK version to use
# by csit-vpp not-timed jobs.
# - ${CSIT_DIR}/${VPP_VER_FILE} - Ubuntu VPP version to use.
- # - ../*vpp*.deb|rpm - Relative to ${DOWNLOAD_DIR}, copied for vpp-csit jobs.
+ # - ../*vpp*.deb|rpm - Relative to ${DOWNLOAD_DIR},
+ # copied for vpp-csit jobs.
# Directories updated:
# - ${DOWNLOAD_DIR}, vpp-*.deb files are copied here for vpp-csit jobs.
# - ./ - Assumed ${DOWNLOAD_DIR}, *vpp*.deb|rpm files
@@ -157,8 +158,10 @@ function gather_vpp () {
download_artifacts || die
;;
"vpp-csit-"*)
+ # Shorten line.
+ pgks="${PKG_SUFFIX}"
# Use locally built packages.
- mv "${DOWNLOAD_DIR}"/../*vpp*."${PKG_SUFFIX}" "${DOWNLOAD_DIR}"/ || {
+ mv "${DOWNLOAD_DIR}"/../*vpp*."${pkgs}" "${DOWNLOAD_DIR}"/ || {
die "Move command failed."
}
;;
diff --git a/resources/libraries/bash/function/per_patch.sh b/resources/libraries/bash/function/per_patch.sh
index 76dbf51416..4af3302008 100644
--- a/resources/libraries/bash/function/per_patch.sh
+++ b/resources/libraries/bash/function/per_patch.sh
@@ -96,7 +96,7 @@ function build_vpp_ubuntu_amd64 () {
"using build default ($(grep -c ^processor /proc/cpuinfo))."
fi
- make UNATTENDED=y pkg-verify || die "VPP build using make pkg-verify failed."
+ make UNATTENDED=y pkg-verify || die "VPP build with make pkg-verify failed."
echo "* VPP ${1-} BUILD SUCCESSFULLY COMPLETED" || {
die "Argument not found."
}
diff --git a/resources/libraries/bash/shell/k8s_utils.sh b/resources/libraries/bash/shell/k8s_utils.sh
index c29151059b..b96ec8df6c 100644
--- a/resources/libraries/bash/shell/k8s_utils.sh
+++ b/resources/libraries/bash/shell/k8s_utils.sh
@@ -66,7 +66,8 @@ function k8s_utils.contiv_vpp_deploy {
k8s_contiv_patch="kubecon.contiv-vpp-yaml-patch.diff"
# Pull the most recent Docker images
- bash <(curl -s https://raw.githubusercontent.com/contiv/vpp/master/k8s/pull-images.sh)
+ url="https://raw.githubusercontent.com/contiv/vpp/master/k8s/pull-images.sh"
+ bash <(curl -s "${url}")
# Apply resources
wget ${k8s_contiv}
@@ -80,17 +81,21 @@ function k8s_utils.contiv_vpp_deploy {
function k8s_utils.cri_shim_install {
# Install the CRI Shim on host
- sudo su root -c 'bash <(curl -s https://raw.githubusercontent.com/contiv/vpp/master/k8s/cri-install.sh)'
+ url"https://raw.githubusercontent.com/contiv/vpp/master/k8s/cri-install.sh"
+ sudo su root -c "bash <(curl -s '${url}')"
}
function k8s_utils.cri_shim_uninstall {
# Uninstall the CRI Shim on host
- sudo su root -c 'bash <(curl -s https://raw.githubusercontent.com/contiv/vpp/master/k8s/cri-install.sh) --uninstall'
+ url="https://raw.githubusercontent.com/contiv/vpp/master/k8s/cri-install.sh"
+ sudo su root -c "bash <(curl -s '${url}') --uninstall"
}
function k8s_utils.kube_proxy_install {
# Installing custom version of Kube-Proxy to enable Kubernetes services
- bash <(curl -s https://raw.githubusercontent.com/contiv/vpp/master/k8s/proxy-install.sh)
+ url="https://raw.githubusercontent.com/contiv/vpp/master/k8s/"
+ url+="proxy-install.sh"
+ bash <(curl -s "${url}")
}
function k8s_utils.apply {
@@ -113,7 +118,9 @@ function k8s_utils.resource_delete {
function k8s_utils.affinity_non_vpp {
# Set affinity for all non VPP docker containers to CPU 0
- for i in `sudo docker ps --format "{{.ID}} {{.Names}}" | grep -v vpp | cut -d' ' -f1`; do
+ command='sudo docker ps --format "{{.ID}} {{.Names}}"'
+ command+=" | grep -v vpp | cut -d' ' -f1"
+ for i in $(${command}); do
sudo docker update --cpuset-cpus 0 ${i}
done
}
diff --git a/resources/libraries/python/HoststackUtil.py b/resources/libraries/python/HoststackUtil.py
index 76c75ee867..7e6ba56913 100644
--- a/resources/libraries/python/HoststackUtil.py
+++ b/resources/libraries/python/HoststackUtil.py
@@ -84,6 +84,7 @@ class HoststackUtil():
ip_address = f" {iperf3_attributes[u'ip_address']}" if u"ip_address" \
in iperf3_attributes else u""
iperf3_cmd[u"name"] = u"iperf3"
+ # TODO: Use OptionString library.
iperf3_cmd[u"args"] = f"--{iperf3_attributes[u'role']}{ip_address} " \
f"--interval 0{json_results} " \
f"--version{iperf3_attributes[u'ip_version']}"
diff --git a/resources/libraries/python/VppApiCrc.py b/resources/libraries/python/VppApiCrc.py
index ca76397fc6..0cb8c2b7e7 100644
--- a/resources/libraries/python/VppApiCrc.py
+++ b/resources/libraries/python/VppApiCrc.py
@@ -373,8 +373,7 @@ class VppApiCrcChecker:
if not matching:
self._reported[api_name] = crc
self.log_and_raise(
- f"No active collection contains API {api_name!r} with CRC "
- f"{crc!r}"
+ f"No active collection has API {api_name!r} with CRC {crc!r}"
)
options = self._options[api_name]
options.pop(u"vat_help", None)
diff --git a/resources/tools/doc_gen/run_doc.sh b/resources/tools/doc_gen/run_doc.sh
index d50c5b852d..10cc3e249d 100755
--- a/resources/tools/doc_gen/run_doc.sh
+++ b/resources/tools/doc_gen/run_doc.sh
@@ -21,11 +21,18 @@ mkdir --parents ${WORKING_DIR}/tests/
cp -r src/* ${WORKING_DIR}/
# Copy the source files to be processed:
-rsync -a --include '*/' --include '*.py' --exclude '*' ../../../resources/libraries/python/ ${WORKING_DIR}/resources/libraries/python/
+from_dir="../../../resources/libraries/python/"
+to_dir="${WORKING_DIR}/resources/libraries/python/"
+command="rsync -a --include '*/'"
+${command} --include '*.py' --exclude '*' "${from_dir}" "${to_dir}"
cp ../../../resources/__init__.py ${WORKING_DIR}/resources/
cp ../../../resources/libraries/__init__.py ${WORKING_DIR}/resources/libraries/
-rsync -a --include '*/' --include '*.robot' --exclude '*' ../../../resources/libraries/robot/ ${WORKING_DIR}/resources/libraries/robot/
-rsync -a --include '*/' --include '*.robot' --exclude '*' ../../../tests/ ${WORKING_DIR}/tests/
+from_dir="../../../resources/libraries/robot/"
+to_dir="${WORKING_DIR}/resources/libraries/robot/"
+${command} --include '*.robot' --exclude '*' "${from_dir}" "${to_dir}"
+from_dir="../../../tests/"
+to_dir="${WORKING_DIR}/tests/"
+${command} --include '*.robot' --exclude '*' "${from_dir}" "${to_dir}"
# Create virtual environment:
virtualenv --python=$(which python3) ${WORKING_DIR}/env
@@ -44,7 +51,9 @@ find ./${WORKING_DIR}/env -type f -name '*.rst' | xargs rm -f
# Generate the documentation:
DATE=$(date -u '+%d-%b-%Y')
-sphinx-build -v -c ${WORKING_DIR} -a -b html -E -D release=$1 -D version="$1 documentation - $DATE" ${WORKING_DIR} ${BUILD_DIR}/
+command="sphinx-build -v -c '${WORKING_DIR}' -a -b html -E -D release='$1' -D"
+command+=" version='$1 documentation - $DATE' '${WORKING_DIR}' '${BUILD_DIR}/'"
+${command}
find . -type d -name 'env' | xargs rm -rf
diff --git a/resources/tools/doc_gen/src/Makefile b/resources/tools/doc_gen/src/Makefile
index dc34917ca9..087683e4eb 100644
--- a/resources/tools/doc_gen/src/Makefile
+++ b/resources/tools/doc_gen/src/Makefile
@@ -40,7 +40,7 @@ help:
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@echo " linkcheck to check all external links for integrity"
- @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+ @echo " doctest to run all doctests embedded in the documentation"
@echo " coverage to run coverage check of the documentation (if enabled)"
@echo " dummy to check syntax errors of document sources"
diff --git a/resources/tools/iperf/iperf_client.py b/resources/tools/iperf/iperf_client.py
index b77dea1179..d719ee485f 100644
--- a/resources/tools/iperf/iperf_client.py
+++ b/resources/tools/iperf/iperf_client.py
@@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-"""This module gets a bandwith limit together with other parameters, reads
+"""This script gets a bandwith limit together with other parameters, reads
the iPerf3 configuration and sends the traffic. At the end, it measures
the packet loss and latency.
"""
diff --git a/resources/tools/presentation/run_report_local.sh b/resources/tools/presentation/run_report_local.sh
index b48d4d2db5..df0e89e5e5 100755
--- a/resources/tools/presentation/run_report_local.sh
+++ b/resources/tools/presentation/run_report_local.sh
@@ -83,8 +83,10 @@ fi
if [[ ${cfg_install_latex} -eq 1 ]] ;
then
sudo apt-get -y install xvfb texlive-latex-recommended \
- texlive-fonts-recommended texlive-fonts-extra texlive-latex-extra latexmk wkhtmltopdf inkscape
- sudo sed -i.bak 's/^\(main_memory\s=\s\).*/\110000000/' /usr/share/texlive/texmf-dist/web2c/texmf.cnf
+ texlive-fonts-recommended texlive-fonts-extra texlive-latex-extra \
+ latexmk wkhtmltopdf inkscape
+ target="/usr/share/texlive/texmf-dist/web2c/texmf.cnf"
+ sudo sed -i.bak 's/^\(main_memory\s=\s\).*/\110000000/' "${target}"
fi
# Create working directories