aboutsummaryrefslogtreecommitdiffstats
path: root/resources/tools/scripts
diff options
context:
space:
mode:
authorTibor Frank <tifrank@cisco.com>2017-06-20 13:57:08 +0200
committerTibor Frank <tifrank@cisco.com>2017-06-29 12:17:28 +0000
commit6721e7f09aa95bff6622068332a3f56afad9c87b (patch)
tree37ef7f40e53f740a62830ab46142aa87342dc56b /resources/tools/scripts
parent859157b5db45927c7b4bb0b2d575e68805777a86 (diff)
CSIT-687: Directory structure reorganization
Change-Id: I772c9e214be2461adf58124998d272e7d795220f Signed-off-by: Tibor Frank <tifrank@cisco.com> Signed-off-by: Maciek Konstantynowicz <mkonstan@cisco.com>
Diffstat (limited to 'resources/tools/scripts')
-rwxr-xr-xresources/tools/scripts/download_hc_build_pkgs.sh96
-rwxr-xr-xresources/tools/scripts/download_hc_pkgs.sh99
-rwxr-xr-xresources/tools/scripts/download_install_vpp_pkgs.sh81
-rw-r--r--resources/tools/scripts/download_install_vpp_rpms.sh39
-rwxr-xr-xresources/tools/scripts/download_nsh_sfc_pkgs.sh65
-rwxr-xr-xresources/tools/scripts/rename_robot_keywords.py243
-rwxr-xr-xresources/tools/scripts/robot_output_parser.py208
-rwxr-xr-xresources/tools/scripts/topo_installation.py149
-rwxr-xr-xresources/tools/scripts/topo_reservation.py65
9 files changed, 1045 insertions, 0 deletions
diff --git a/resources/tools/scripts/download_hc_build_pkgs.sh b/resources/tools/scripts/download_hc_build_pkgs.sh
new file mode 100755
index 0000000000..6683499956
--- /dev/null
+++ b/resources/tools/scripts/download_hc_build_pkgs.sh
@@ -0,0 +1,96 @@
+#!/bin/bash
+
+# Copyright (c) 2017 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -ex
+
+STREAM=$1
+OS=$2
+
+# Download the latest VPP and VPP plugin .deb packages
+URL="https://nexus.fd.io/service/local/artifact/maven/content"
+VER="RELEASE"
+VPP_GROUP="io.fd.vpp"
+NSH_GROUP="io.fd.nsh_sfc"
+NSH_ARTIFACTS="vpp-nsh-plugin"
+
+if [ "${OS}" == "ubuntu1404" ]; then
+ OS="ubuntu.trusty.main"
+ PACKAGE="deb deb.md5"
+ CLASS="deb"
+ VPP_ARTIFACTS="vpp vpp-dbg vpp-dev vpp-dpdk-dkms vpp-lib vpp-plugins vpp-api-java"
+elif [ "${OS}" == "ubuntu1604" ]; then
+ OS="ubuntu.xenial.main"
+ PACKAGE="deb deb.md5"
+ CLASS="deb"
+ VPP_ARTIFACTS="vpp vpp-dbg vpp-dev vpp-dpdk-dkms vpp-lib vpp-plugins vpp-api-java"
+elif [ "${OS}" == "centos7" ]; then
+ OS="centos7"
+ PACKAGE="rpm rpm.md5"
+ CLASS=""
+ VPP_ARTIFACTS="vpp vpp-debuginfo vpp-devel vpp-lib vpp-plugins vpp-api-java"
+fi
+
+REPO="fd.io.${STREAM}.${OS}"
+
+for ART in ${VPP_ARTIFACTS}; do
+ for PAC in ${PACKAGE}; do
+ curl "${URL}?r=${REPO}&g=${VPP_GROUP}&a=${ART}&p=${PAC}&v=${VER}&c=${CLASS}" -O -J || exit
+ done
+done
+
+for ART in ${NSH_ARTIFACTS}; do
+ for PAC in ${PACKAGE}; do
+ curl "${URL}?r=${REPO}&g=${NSH_GROUP}&a=${ART}&p=${PAC}&v=${VER}&c=${CLASS}" -O -J || exit
+ done
+done
+
+# verify downloaded packages
+if [ "${OS}" == "centos7" ]; then
+ FILES=*.rpm
+else
+ FILES=*.deb
+fi
+
+for FILE in ${FILES}; do
+ echo " "${FILE} >> ${FILE}.md5
+done
+for MD5FILE in *.md5; do
+ md5sum -c ${MD5FILE} || exit
+ rm ${MD5FILE}
+done
+
+# install vpp-api-java, this extracts jvpp .jar files into usr/share/java
+if [ "${OS}" == "centos7" ]; then
+ sudo rpm --nodeps --install vpp-api-java*
+else
+ sudo dpkg --ignore-depends=vpp --install vpp-api-java*
+fi
+
+# install jvpp jars into maven repo, so that maven picks them up when building hc2vpp
+version=`../jvpp-version`
+
+current_dir=`pwd`
+cd /usr/share/java
+
+for item in jvpp*.jar; do
+ # Example filename: jvpp-registry-17.01-20161206.125556-1.jar
+ # ArtifactId = jvpp-registry
+ # Version = 17.01
+ basefile=$(basename -s .jar "$item")
+ artifactId=$(echo "$basefile" | cut -d '-' -f 1-2)
+ mvn install:install-file -Dfile=${item} -DgroupId=io.fd.vpp -DartifactId=${artifactId} -Dversion=${version} -Dpackaging=jar -Dmaven.repo.local=/tmp/r -Dorg.ops4j.pax.url.mvn.localRepository=/tmp/r
+done
+
+cd ${current_dir} \ No newline at end of file
diff --git a/resources/tools/scripts/download_hc_pkgs.sh b/resources/tools/scripts/download_hc_pkgs.sh
new file mode 100755
index 0000000000..23e0be4b16
--- /dev/null
+++ b/resources/tools/scripts/download_hc_pkgs.sh
@@ -0,0 +1,99 @@
+#!/bin/bash
+
+# Copyright (c) 2017 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -ex
+
+STREAM=$1
+OS=$2
+
+URL="https://nexus.fd.io/service/local/artifact/maven/content"
+VER="RELEASE"
+GROUP="io.fd.vpp"
+HC_GROUP="io.fd.hc2vpp"
+NSH_GROUP="io.fd.nsh_sfc"
+HC_ARTIFACTS="honeycomb"
+NSH_ARTIFACTS="vpp-nsh-plugin"
+
+if [ "${OS}" == "ubuntu1404" ]; then
+ OS="ubuntu.trusty.main"
+ PACKAGE="deb deb.md5"
+ CLASS="deb"
+ VPP_ARTIFACTS="vpp vpp-dbg vpp-dev vpp-lib vpp-plugins vpp-api-java"
+ DPDK_ARTIFACTS="vpp-dpdk-dkms"
+elif [ "${OS}" == "ubuntu1604" ]; then
+ OS="ubuntu.xenial.main"
+ PACKAGE="deb deb.md5"
+ CLASS="deb"
+ VPP_ARTIFACTS="vpp vpp-dbg vpp-dev vpp-lib vpp-plugins vpp-api-java"
+ DPDK_ARTIFACTS="vpp-dpdk-dkms"
+elif [ "${OS}" == "centos7" ]; then
+ OS="centos7"
+ PACKAGE="rpm rpm.md5"
+ CLASS=""
+ VPP_ARTIFACTS="vpp vpp-debuginfo vpp-devel vpp-lib vpp-plugins vpp-api-java"
+ DPDK_ARTIFACTS=""
+fi
+
+REPO="fd.io.${STREAM}.${OS}"
+
+# download latest honeycomb, vpp-dpdk and nsh packages
+for ART in ${HC_ARTIFACTS}; do
+ for PAC in ${PACKAGE}; do
+ curl "${URL}?r=${REPO}&g=${HC_GROUP}&a=${ART}&p=${PAC}&v=${VER}&c=${CLASS}" -O -J || exit
+ done
+done
+
+for ART in ${DPDK_ARTIFACTS}; do
+ for PAC in ${PACKAGE}; do
+ curl "${URL}?r=${REPO}&g=${GROUP}&a=${ART}&p=${PAC}&v=${VER}&c=${CLASS}" -O -J || exit
+ done
+done
+
+for ART in ${NSH_ARTIFACTS}; do
+ for PAC in ${PACKAGE}; do
+ curl "${URL}?r=${REPO}&g=${NSH_GROUP}&a=${ART}&p=${PAC}&v=${VER}&c=${CLASS}" -O -J || exit
+ done
+done
+
+# determine VPP dependency
+if [ "${OS}" == "centos7" ]; then
+ VER=`rpm -qpR honeycomb*.rpm | grep 'vpp ' | cut -d ' ' -f 3`
+ VER=${VER}.x86_64
+else
+ VER=`dpkg -I honeycomb*.deb | grep -oP 'vpp \(= \K[^\)]+'`
+ VER=${VER}_amd64
+fi
+
+# download VPP packages
+for ART in ${VPP_ARTIFACTS}; do
+ for PAC in ${PACKAGE}; do
+ curl "${URL}?r=${REPO}&g=${GROUP}&a=${ART}&p=${PAC}&v=${VER}&c=${CLASS}" -O -J || exit
+ done
+done
+
+# verify downloaded package
+if [ "${OS}" == "centos7" ]; then
+ FILES=*.rpm
+else
+ FILES=*.deb
+fi
+
+for FILE in ${FILES}; do
+ echo " "${FILE} >> ${FILE}.md5
+done
+for MD5FILE in *.md5; do
+ md5sum -c ${MD5FILE} || exit
+ rm ${MD5FILE}
+done \ No newline at end of file
diff --git a/resources/tools/scripts/download_install_vpp_pkgs.sh b/resources/tools/scripts/download_install_vpp_pkgs.sh
new file mode 100755
index 0000000000..8928a2fee2
--- /dev/null
+++ b/resources/tools/scripts/download_install_vpp_pkgs.sh
@@ -0,0 +1,81 @@
+#!/bin/bash
+
+# Copyright (c) 2016 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -ex
+
+URL="https://nexus.fd.io/service/local/artifact/maven/content"
+VER="RELEASE"
+GROUP="io.fd.vpp"
+
+if [ -f "/etc/redhat-release" ]; then
+ trap 'rm -f *.rpm.md5; exit' EXIT
+ trap 'rm -f *.rpm.md5;rm -f *.rpm; exit' ERR
+
+ VPP_REPO_URL_PATH="./VPP_REPO_URL_CENTOS"
+ if [ -e "$VPP_REPO_URL_PATH" ]; then
+ VPP_REPO_URL=$(cat $VPP_REPO_URL_PATH)
+ REPO=$(echo ${VPP_REPO_URL#https://nexus.fd.io/content/repositories/})
+ REPO=$(echo ${REPO%/io/fd/vpp/})
+ else
+ REPO='fd.io.master.centos7'
+ FILES=*.rpm
+ MD5FILES=*.rpm.md5
+ fi
+
+ ARTIFACTS="vpp vpp-debuginfo vpp-devel vpp-lib vpp-plugins"
+ PACKAGE="rpm rpm.md5"
+ CLASS=""
+ VPP_INSTALL_COMMAND="rpm -ivh *.rpm"
+else
+ trap 'rm -f *.deb.md5; exit' EXIT
+ trap 'rm -f *.deb.md5;rm -f *.deb; exit' ERR
+
+ VPP_REPO_URL_PATH="./VPP_REPO_URL_UBUNTU"
+ if [ -e "$VPP_REPO_URL_PATH" ]; then
+ VPP_REPO_URL=$(cat $VPP_REPO_URL_PATH)
+ REPO=$(echo ${VPP_REPO_URL#https://nexus.fd.io/content/repositories/})
+ REPO=$(echo ${REPO%/io/fd/vpp/})
+ else
+ REPO='fd.io.master.ubuntu.xenial.main'
+ FILES=*.deb
+ MD5FILES=*.deb.md5
+ fi
+
+ ARTIFACTS="vpp vpp-dbg vpp-dev vpp-dpdk-dkms vpp-lib vpp-plugins"
+ PACKAGE="deb deb.md5"
+ CLASS="deb"
+ VPP_INSTALL_COMMAND="dpkg -i *.deb"
+fi
+
+for ART in ${ARTIFACTS}; do
+ for PAC in $PACKAGE; do
+ curl "${URL}?r=${REPO}&g=${GROUP}&a=${ART}&p=${PAC}&v=${VER}&c=${CLASS}" -O -J || exit
+ done
+done
+
+for FILE in ${FILES}; do
+ echo " "${FILE} >> ${FILE}.md5
+done
+
+for MD5FILE in ${MD5FILES}; do
+ md5sum -c ${MD5FILE} || exit
+done
+
+if [ "$1" != "--skip-install" ]; then
+ echo Installing VPP
+ sudo ${VPP_INSTALL_COMMAND}
+else
+ echo VPP Installation skipped
+fi
diff --git a/resources/tools/scripts/download_install_vpp_rpms.sh b/resources/tools/scripts/download_install_vpp_rpms.sh
new file mode 100644
index 0000000000..29e09216db
--- /dev/null
+++ b/resources/tools/scripts/download_install_vpp_rpms.sh
@@ -0,0 +1,39 @@
+#!/bin/bash
+
+# Copyright (c) 2016 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -ex
+
+VER="RELEASE"
+
+VPP_REPO_URL_PATH="./VPP_REPO_URL"
+if [ -e "$VPP_REPO_URL_PATH" ]; then
+ VPP_REPO_URL=$(cat $VPP_REPO_URL_PATH)
+ REPO=$(echo ${VPP_REPO_URL#https://nexus.fd.io/content/repositories/})
+ REPO=$(echo ${REPO%/fd.io.centos7})
+else
+ REPO='https://nexus.fd.io/content/repositories/fd.io.centos7'
+fi
+
+ARTIFACTS="vpp vpp-lib vpp-debuginfo vpp-devel vpp-python-api vpp-plugins"
+
+
+yum-config-manager --add-repo $REPO
+
+if [ "$1" != "--skip-install" ]; then
+ echo Installing VPP
+ sudo yum install -y $ARTIFACTS
+else
+ echo VPP Installation skipped
+fi
diff --git a/resources/tools/scripts/download_nsh_sfc_pkgs.sh b/resources/tools/scripts/download_nsh_sfc_pkgs.sh
new file mode 100755
index 0000000000..2261f084e5
--- /dev/null
+++ b/resources/tools/scripts/download_nsh_sfc_pkgs.sh
@@ -0,0 +1,65 @@
+#!/bin/bash
+
+# Copyright (c) 2017 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -ex
+
+trap 'rm -f *.deb.md5; exit' EXIT
+trap 'rm -f *.deb.md5;rm -f *.deb; exit' ERR
+
+STREAM=$1
+OS=$2
+
+URL="https://nexus.fd.io/service/local/artifact/maven/content"
+VER="RELEASE"
+GROUP="io.fd.vpp"
+NSH_GROUP="io.fd.nsh_sfc"
+VPP_ARTIFACTS="vpp vpp-dbg vpp-dev vpp-dpdk-dkms vpp-lib vpp-plugins vpp-api-java vpp-api-python vpp-api-lua"
+NSH_ARTIFACTS="vpp-nsh-plugin"
+
+if [ "${OS}" == "ubuntu1404" ]; then
+ OS="ubuntu.trusty.main"
+ PACKAGE="deb deb.md5"
+ CLASS="deb"
+elif [ "${OS}" == "ubuntu1604" ]; then
+ OS="ubuntu.xenial.main"
+ PACKAGE="deb deb.md5"
+ CLASS="deb"
+elif [ "${OS}" == "centos7" ]; then
+ OS="centos7"
+ PACKAGE="rpm rpm.md5"
+ CLASS="rpm"
+fi
+
+REPO="fd.io.${STREAM}.${OS}"
+
+for ART in ${VPP_ARTIFACTS}; do
+ for PAC in $PACKAGE; do
+ curl "${URL}?r=${REPO}&g=${GROUP}&a=${ART}&p=${PAC}&v=${VER}&c=${CLASS}" -O -J || exit
+ done
+done
+
+for ART in ${NSH_ARTIFACTS}; do
+ for PAC in $PACKAGE; do
+ curl "${URL}?r=${REPO}&g=${NSH_GROUP}&a=${ART}&p=${PAC}&v=${VER}&c=${CLASS}" -O -J || exit
+ done
+done
+
+for FILE in *.deb; do
+ echo " "${FILE} >> ${FILE}.md5
+done
+
+for MD5FILE in *.md5; do
+ md5sum -c ${MD5FILE} || exit
+done
diff --git a/resources/tools/scripts/rename_robot_keywords.py b/resources/tools/scripts/rename_robot_keywords.py
new file mode 100755
index 0000000000..9f27b4aaec
--- /dev/null
+++ b/resources/tools/scripts/rename_robot_keywords.py
@@ -0,0 +1,243 @@
+#!/usr/bin/python
+
+# Copyright (c) 2017 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This script renames the given robot keywords in the given directory
+recursively.
+
+Example:
+
+ ./rename_robot_keywords.py -i kws.csv -s ";" -d ~/ws/vpp/git/csit/ -vvv
+
+ Input file "kws.csv" is CSV file exported from e.g. MS Excel. Its structure
+ must be:
+
+ <Old keyword name><separator><New keyword name>
+
+ One keyword per line.
+
+"""
+
+import argparse
+import sys
+import re
+from os import walk, rename
+from os.path import join
+
+
+def time_interval(func):
+ """Decorator function to measure the time spent by the decorated function.
+
+ :param func: Decorated function.
+ :type func: Callable object.
+ :returns: Wrapper function.
+ :rtype: Callable object.
+ """
+
+ import time
+
+ def wrapper(*args, **kwargs):
+ start = time.clock()
+ result = func(*args, **kwargs)
+ stop = time.clock()
+ print("\nRenaming done in {:.5g} seconds\n".
+ format(stop - start))
+ return result
+ return wrapper
+
+
+def get_files(path, extension):
+ """Generates the list of files to process.
+
+ :param path: Path to files.
+ :param extension: Extension of files to process. If it is the empty string,
+ all files will be processed.
+ :type path: str
+ :type extension: str
+ :returns: List of files to process.
+ :rtype: list
+ """
+
+ file_list = list()
+ for root, dirs, files in walk(path):
+ for filename in files:
+ if extension:
+ if filename.endswith(extension):
+ file_list.append(join(root, filename))
+ else:
+ file_list.append(join(root, filename))
+
+ return file_list
+
+
+def read_keywords(args):
+ """This function reads the keywords from the input file and creates:
+
+ - a dictionary where the key is the old name and the value is the new name,
+ these keywords will be further processed.
+ - a list of keywords which will not be processed, typically keywords with
+ argument(s) in its names.
+ - a list of duplicates - duplicated keyword names or names which are parts
+ of another keyword name, they will not be processed.
+
+ :param args: Parsed arguments.
+ :type args: ArgumentParser
+ :returns: keyword names - dictionary where the key is the old name and the
+ value is the new name; ignored keyword names - list of keywords which will
+ not be processed; duplicates - duplicated keyword names or names which are
+ parts of another keyword name, they will not be processed.
+ :rtype: tuple(dict, list, list)
+ """
+
+ kw_names = dict()
+ ignored_kw_names = list()
+ duplicates = list()
+
+ for line in args.input:
+ old_name, new_name = line.split(args.separator)
+ if '$' in old_name:
+ ignored_kw_names.append((old_name, new_name[:-1]))
+ elif old_name in kw_names.keys():
+ duplicates.append((old_name, new_name[:-1]))
+ else:
+ kw_names[old_name] = new_name[:-1]
+
+ # Remove duplicates:
+ for old_name, _ in duplicates:
+ new_name = kw_names.pop(old_name, None)
+ if new_name:
+ duplicates.append((old_name, new_name))
+
+ # Find KW names which are parts of other KW names:
+ for old_name in kw_names.keys():
+ count = 0
+ for key in kw_names.keys():
+ if old_name in key:
+ count += 1
+ if old_name in kw_names[key]:
+ if old_name != key:
+ count += 1
+ if count > 1:
+ duplicates.append((old_name, kw_names[old_name]))
+ kw_names.pop(old_name)
+
+ return kw_names, ignored_kw_names, duplicates
+
+
+def rename_keywords(file_list, kw_names, args):
+ """Rename the keywords in specified files.
+
+ :param file_list: List of files to be processed.
+ :param kw_names: Dictionary where the key is the old name and the value is
+ the new name
+ :type file_list: list
+ :type kw_names: dict
+ """
+
+ kw_not_found = list()
+
+ for old_name, new_name in kw_names.items():
+ kw_found = False
+ if args.verbosity > 0:
+ print("\nFrom: {}\n To: {}\n".format(old_name, new_name))
+ for file_name in file_list:
+ tmp_file_name = file_name + ".new"
+ with open(file_name) as file_read:
+ file_write = open(tmp_file_name, 'w')
+ occurrences = 0
+ for line in file_read:
+ new_line = re.sub(old_name, new_name, line)
+ file_write.write(new_line)
+ if new_line != line:
+ occurrences += 1
+ if occurrences:
+ kw_found = True
+ if args.verbosity > 1:
+ print(" {:3d}: {}".format(occurrences, file_name))
+ file_write.close()
+ rename(tmp_file_name, file_name)
+ if not kw_found:
+ kw_not_found.append(old_name)
+
+ if args.verbosity > 0:
+ print("\nKeywords not found:")
+ for item in kw_not_found:
+ print(" {}".format(item))
+
+
+def parse_args():
+ """Parse arguments from command line.
+
+ :returns: Parsed arguments.
+ :rtype: ArgumentParser
+ """
+
+ parser = argparse.ArgumentParser(description=__doc__,
+ formatter_class=argparse.
+ RawDescriptionHelpFormatter)
+ parser.add_argument("-i", "--input",
+ required=True,
+ type=argparse.FileType('r'),
+ help="Text file with the old keyword name and the new "
+ "keyword name separated by separator per line.")
+ parser.add_argument("-s", "--separator",
+ default=";",
+ type=str,
+ help="Separator which separates the old and the new "
+ "keyword name.")
+ parser.add_argument("-d", "--dir",
+ required=True,
+ type=str,
+ help="Directory with robot files where the keywords "
+ "should be recursively searched.")
+ parser.add_argument("-v", "--verbosity", action="count",
+ help="Set the output verbosity.")
+ return parser.parse_args()
+
+
+@time_interval
+def main():
+ """Main function."""
+
+ args = parse_args()
+
+ kw_names, ignored_kw_names, duplicates = read_keywords(args)
+
+ file_list = get_files(args.dir, "robot")
+
+ if args.verbosity > 2:
+ print("\nList of files to be processed:")
+ for item in file_list:
+ print(" {}".format(item))
+ print("\n{} files to be processed.\n".format(len(file_list)))
+
+ print("\nList of keywords to be renamed:")
+ for item in kw_names:
+ print(" {}".format(item))
+ print("\n{} keywords to be renamed.\n".format(len(kw_names)))
+
+ rename_keywords(file_list, kw_names, args)
+
+ if args.verbosity >= 0:
+ print("\nIgnored keywords: ({})".format(len(ignored_kw_names)))
+ for old, new in ignored_kw_names:
+ print(" From: {}\n To: {}\n".format(old, new))
+
+ print("\nIgnored duplicates ({}):".format(len(duplicates)))
+ for old, new in duplicates:
+ print(" From: {}\n To: {}\n".format(old, new))
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/resources/tools/scripts/robot_output_parser.py b/resources/tools/scripts/robot_output_parser.py
new file mode 100755
index 0000000000..b9ad8f8aa9
--- /dev/null
+++ b/resources/tools/scripts/robot_output_parser.py
@@ -0,0 +1,208 @@
+#!/usr/bin/python
+
+# Copyright (c) 2016 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Script parses the data taken by robot framework (output.xml) and dumps
+interested values into XML output file."""
+
+import argparse
+import re
+import sys
+import xml.etree.ElementTree as ET
+
+from robot.api import ExecutionResult, ResultVisitor
+
+
+class ExecutionChecker(ResultVisitor):
+ """Iterates through test cases."""
+
+ tc_regexp = re.compile(ur'^tc\d+-((\d+)B|IMIX)-(\d)t(\d)c-(.*)')
+ rate_regexp = re.compile(ur'^[\D\d]*FINAL_RATE:\s(\d+\.\d+)[\D\d]*')
+ lat_regexp = re.compile(ur'^[\D\d]*'\
+ ur'LAT_\d+%NDR:\s\[\'(-?\d+\/-?\d+\/-?\d+)\','\
+ ur'\s\'(-?\d+\/-?\d+\/-?\d+)\'\]\s\n'\
+ ur'LAT_\d+%NDR:\s\[\'(-?\d+\/-?\d+\/-?\d+)\','\
+ ur'\s\'(-?\d+\/-?\d+\/-?\d+)\'\]\s\n'\
+ ur'LAT_\d+%NDR:\s\[\'(-?\d+\/-?\d+\/-?\d+)\','\
+ ur'\s\'(-?\d+\/-?\d+\/-?\d+)\'\]')
+
+ def __init__(self, args):
+ self.root = ET.Element('build',
+ attrib={'vdevice': args.vdevice})
+
+ def visit_suite(self, suite):
+ """Implements traversing through the suite and its direct children.
+
+ :param suite: Suite to process.
+ :type suite: Suite
+ :return: Nothing.
+ """
+ if self.start_suite(suite) is not False:
+ suite.suites.visit(self)
+ suite.tests.visit(self)
+ self.end_suite(suite)
+
+ def start_suite(self, suite):
+ """Called when suite starts.
+
+ :param suite: Suite to process.
+ :type suite: Suite
+ :return: Nothing.
+ """
+ pass
+
+ def end_suite(self, suite):
+ """Called when suite ends.
+
+ :param suite: Suite to process.
+ :type suite: Suite
+ :return: Nothing.
+ """
+ pass
+
+ def visit_test(self, test):
+ """Implements traversing through the test.
+
+ :param test: Test to process.
+ :type test: Test
+ :return: Nothing.
+ """
+ if self.start_test(test) is not False:
+ self.end_test(test)
+
+ def start_test(self, test):
+ """Called when test starts.
+
+ :param test: Test to process.
+ :type test: Test
+ :return: Nothing.
+ """
+ if any("NDRPDRDISC" in tag for tag in test.tags):
+ if test.status == 'PASS':
+ tags = []
+ for tag in test.tags:
+ tags.append(tag)
+
+ test_elem = ET.SubElement(
+ self.root, "S" + test.parent.name.replace(" ", ""))
+ test_elem.attrib['name'] = test.parent.name
+ test_elem.attrib['framesize'] = str(re.search(
+ self.tc_regexp, test.name).group(1))
+ test_elem.attrib['threads'] = str(re.search(
+ self.tc_regexp, test.name).group(3))
+ test_elem.attrib['cores'] = str(re.search(
+ self.tc_regexp, test.name).group(4))
+ if any("NDRDISC" in tag for tag in test.tags):
+ try:
+ test_elem.attrib['lat_100'] = str(re.search(
+ self.lat_regexp, test.message).group(1)) + '/' +\
+ str(re.search(self.lat_regexp, test.message).
+ group(2))
+ except AttributeError:
+ test_elem.attrib['lat_100'] = "-1/-1/-1/-1/-1/-1"
+ try:
+ test_elem.attrib['lat_50'] = str(re.search(
+ self.lat_regexp, test.message).group(3)) + '/' +\
+ str(re.search(self.lat_regexp, test.message).
+ group(4))
+ except AttributeError:
+ test_elem.attrib['lat_50'] = "-1/-1/-1/-1/-1/-1"
+ try:
+ test_elem.attrib['lat_10'] = str(re.search(
+ self.lat_regexp, test.message).group(5)) + '/' +\
+ str(re.search(self.lat_regexp, test.message).
+ group(6))
+ except AttributeError:
+ test_elem.attrib['lat_10'] = "-1/-1/-1/-1/-1/-1"
+ test_elem.attrib['tags'] = ', '.join(tags)
+ try:
+ test_elem.text = str(re.search(
+ self.rate_regexp, test.message).group(1))
+ except AttributeError:
+ test_elem.text = "-1"
+
+ def end_test(self, test):
+ """Called when test ends.
+
+ :param test: Test to process.
+ :type test: Test
+ :return: Nothing.
+ """
+ pass
+
+
+def parse_tests(args):
+ """Process data from robot output.xml file and return XML data.
+
+ :param args: Parsed arguments.
+ :type args: ArgumentParser
+
+ :return: XML formatted output.
+ :rtype: ElementTree
+ """
+
+ result = ExecutionResult(args.input)
+ checker = ExecutionChecker(args)
+ result.visit(checker)
+
+ return checker.root
+
+
+def print_error(msg):
+ """Print error message on stderr.
+
+ :param msg: Error message to print.
+ :type msg: str
+ :return: nothing
+ """
+
+ sys.stderr.write(msg + '\n')
+
+
+def parse_args():
+ """Parse arguments from cmd line.
+
+ :return: Parsed arguments.
+ :rtype ArgumentParser
+ """
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("-i", "--input",
+ required=True,
+ type=argparse.FileType('r'),
+ help="Robot XML log file")
+ parser.add_argument("-o", "--output",
+ required=True,
+ type=argparse.FileType('w'),
+ help="XML output file")
+ parser.add_argument("-v", "--vdevice",
+ required=False,
+ default="",
+ type=str,
+ help="VPP version")
+
+ return parser.parse_args()
+
+
+def main():
+ """Main function."""
+
+ args = parse_args()
+
+ root = parse_tests(args)
+ ET.ElementTree.write(ET.ElementTree(root), args.output)
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/resources/tools/scripts/topo_installation.py b/resources/tools/scripts/topo_installation.py
new file mode 100755
index 0000000000..0488bdae69
--- /dev/null
+++ b/resources/tools/scripts/topo_installation.py
@@ -0,0 +1,149 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2016 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This script provides copy and installation of VPP build deb packages.
+ As destinations are used all DUT nodes from the topology file."""
+
+import sys
+import argparse
+from yaml import load
+
+from resources.libraries.python.ssh import SSH
+
+
+def ssh_no_error(ssh, cmd, sudo=False):
+ """Execute a command over ssh channel, and log and exit if the command
+ fails.
+
+ :param ssh: SSH() object connected to a node.
+ :param cmd: Command line to execute on remote node.
+ :type ssh: SSH() object
+ :type cmd: str
+ :return: stdout from the SSH command.
+ :rtype: str
+ """
+
+ if sudo:
+ ret, stdo, stde = ssh.exec_command_sudo(cmd, timeout=60)
+ else:
+ ret, stdo, stde = ssh.exec_command(cmd, timeout=60)
+
+ if ret != 0:
+ print 'Command execution failed: "{}"'.format(cmd)
+ print 'stdout: {0}'.format(stdo)
+ print 'stderr: {0}'.format(stde)
+ raise RuntimeError('Unexpected ssh command failure')
+
+ return stdo
+
+
+def ssh_ignore_error(ssh, cmd, sudo=False):
+ """Execute a command over ssh channel, ignore errors.
+
+ :param ssh: SSH() object connected to a node.
+ :param cmd: Command line to execute on remote node.
+ :type ssh: SSH() object
+ :type cmd: str
+ :return: stdout from the SSH command.
+ :rtype: str
+ """
+
+ if sudo:
+ ret, stdo, stde = ssh.exec_command_sudo(cmd)
+ else:
+ ret, stdo, stde = ssh.exec_command(cmd)
+
+ if ret != 0:
+ print 'Command execution failed: "{}"'.format(cmd)
+ print 'stdout: {0}'.format(stdo)
+ print 'stderr: {0}'.format(stde)
+
+ return stdo
+
+
+def main():
+ """Copy and installation of VPP packages."""
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("-t", "--topo", required=True,
+ help="Topology file")
+ parser.add_argument("-d", "--directory", required=True,
+ help="Installation directory")
+ parser.add_argument("-p", "--packages", required=False, nargs='+',
+ help="Packages paths to copy")
+ parser.add_argument("-c", "--cancel", help="Cancel installation",
+ action="store_true")
+ args = parser.parse_args()
+ topology_file = args.topo
+ packages = args.packages
+ install_dir = args.directory
+ cancel_installation = args.cancel
+
+ work_file = open(topology_file)
+ topology = load(work_file.read())['nodes']
+
+ ssh = SSH()
+ for node in topology:
+ if topology[node]['type'] == "DUT":
+ print "###TI host: {}".format(topology[node]['host'])
+ ssh.connect(topology[node])
+
+ if cancel_installation:
+ # Remove installation directory on DUT
+ cmd = "rm -r {}".format(install_dir)
+ stdout = ssh_ignore_error(ssh, cmd)
+ print "###TI {}".format(stdout)
+
+ cmd = "dpkg -l | grep vpp"
+ ret, _, _ = ssh.exec_command(cmd)
+ if ret == 0:
+ # Try to fix interrupted installations
+ cmd = 'dpkg --configure -a'
+ stdout = ssh_no_error(ssh, cmd, sudo=True)
+ print "###TI {}".format(stdout)
+ # Try to remove installed vpp.* packages
+ cmd = 'apt-get purge -y "vpp.*"'
+ stdout = ssh_no_error(ssh, cmd, sudo=True)
+ print "###TI {}".format(stdout)
+ else:
+ # Create installation directory on DUT
+ cmd = "rm -r {0}; mkdir {0}".format(install_dir)
+ stdout = ssh_no_error(ssh, cmd)
+ print "###TI {}".format(stdout)
+
+ # Copy packages from local path to installation dir
+ for deb in packages:
+ print "###TI scp: {}".format(deb)
+ ssh.scp(local_path=deb, remote_path=install_dir)
+
+ cmd = "dpkg -l | grep vpp"
+ ret, _, _ = ssh.exec_command(cmd)
+ if ret == 0:
+ # Try to fix interrupted installations
+ cmd = 'dpkg --configure -a'
+ stdout = ssh_no_error(ssh, cmd, sudo=True)
+ print "###TI {}".format(stdout)
+ # Try to remove installed vpp.* packages
+ cmd = 'apt-get purge -y "vpp.*"'
+ stdout = ssh_no_error(ssh, cmd, sudo=True)
+ print "###TI {}".format(stdout)
+
+ # Installation of VPP deb packages
+ cmd = "dpkg -i --force-all {}/*.deb".format(install_dir)
+ stdout = ssh_no_error(ssh, cmd, sudo=True)
+ print "###TI {}".format(stdout)
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/resources/tools/scripts/topo_reservation.py b/resources/tools/scripts/topo_reservation.py
new file mode 100755
index 0000000000..4b5ed2459c
--- /dev/null
+++ b/resources/tools/scripts/topo_reservation.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2016 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This script provides simple reservation mechanism to avoid
+ simultaneous use of nodes listed in topology file.
+ As source of truth is used DUT1 node from the topology file."""
+
+import sys
+import argparse
+from resources.libraries.python.ssh import SSH
+from yaml import load
+
+RESERVATION_DIR = "/tmp/reservation_dir"
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument("-t", "--topo", required=True,
+ help="Topology file")
+ parser.add_argument("-c", "--cancel", help="Cancel reservation",
+ action="store_true")
+ args = parser.parse_args()
+ topology_file = args.topo
+ cancel_reservation = args.cancel
+
+ work_file = open(topology_file)
+ topology = load(work_file.read())['nodes']
+
+ #we are using DUT1 node because we expect DUT1 to be a linux host
+ #we don't use TG because we don't expect TG to be linux only host
+ try:
+ tg_node = topology["DUT1"]
+ except KeyError:
+ print "Topology file does not contain 'DUT1' node"
+ return 1
+
+ ssh = SSH()
+ ssh.connect(tg_node)
+
+ #For system reservation we use mkdir it is an atomic operation and we can
+ #store additional data (time, client_ID, ..) within reservation directory
+ if cancel_reservation:
+ ret, _, err = ssh.exec_command("rm -r {}".format(RESERVATION_DIR))
+ else:
+ ret, _, err = ssh.exec_command("mkdir {}".format(RESERVATION_DIR))
+
+ if ret != 0:
+ print("{} unsuccessful:\n{}".
+ format(("Cancellation " if cancel_reservation else "Reservation"),
+ err))
+ return ret
+
+if __name__ == "__main__":
+ sys.exit(main())