aboutsummaryrefslogtreecommitdiffstats
path: root/resources/tools/scripts
diff options
context:
space:
mode:
authorPeter Mikus <pmikus@cisco.com>2019-10-10 15:31:28 +0000
committerPeter Mikus <pmikus@cisco.com>2019-11-05 07:23:56 +0000
commitd01411c3c4af6c724a3800c621804ea979818d6d (patch)
tree8c2745c25a575c7f637473fe98d3c39c1c8e2b28 /resources/tools/scripts
parent50d21f72ff61d06641954c22a8bc13c2468388f9 (diff)
Cleanup via Ansible
+ Remove dependency on topo_ scripts that depends on custom SSH() that depends on framework itself. This way the cleanup is independent of failure in our SSH libs. + Simple ansible command can do cleanup of a machine: ansible-playbook --inventory inventories/lf_inventory/hosts site.yaml \ --limit '10.32.8.18' --tags 'cleanup' + Add vpp_device reset and cleanup. + Remove historical scripts. - Still in testing beta phase. - Need to add SRIOV cleanup. Signed-off-by: Peter Mikus <pmikus@cisco.com> Change-Id: I68e23304c7ad01041f51263c328c6e8d9b555cb7
Diffstat (limited to 'resources/tools/scripts')
-rwxr-xr-xresources/tools/scripts/rename_robot_keywords.py243
-rwxr-xr-xresources/tools/scripts/robot_output_parser.py208
-rw-r--r--resources/tools/scripts/topo_container_copy.py137
-rwxr-xr-xresources/tools/scripts/topo_installation.py171
4 files changed, 0 insertions, 759 deletions
diff --git a/resources/tools/scripts/rename_robot_keywords.py b/resources/tools/scripts/rename_robot_keywords.py
deleted file mode 100755
index 9f27b4aaec..0000000000
--- a/resources/tools/scripts/rename_robot_keywords.py
+++ /dev/null
@@ -1,243 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (c) 2017 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""This script renames the given robot keywords in the given directory
-recursively.
-
-Example:
-
- ./rename_robot_keywords.py -i kws.csv -s ";" -d ~/ws/vpp/git/csit/ -vvv
-
- Input file "kws.csv" is CSV file exported from e.g. MS Excel. Its structure
- must be:
-
- <Old keyword name><separator><New keyword name>
-
- One keyword per line.
-
-"""
-
-import argparse
-import sys
-import re
-from os import walk, rename
-from os.path import join
-
-
-def time_interval(func):
- """Decorator function to measure the time spent by the decorated function.
-
- :param func: Decorated function.
- :type func: Callable object.
- :returns: Wrapper function.
- :rtype: Callable object.
- """
-
- import time
-
- def wrapper(*args, **kwargs):
- start = time.clock()
- result = func(*args, **kwargs)
- stop = time.clock()
- print("\nRenaming done in {:.5g} seconds\n".
- format(stop - start))
- return result
- return wrapper
-
-
-def get_files(path, extension):
- """Generates the list of files to process.
-
- :param path: Path to files.
- :param extension: Extension of files to process. If it is the empty string,
- all files will be processed.
- :type path: str
- :type extension: str
- :returns: List of files to process.
- :rtype: list
- """
-
- file_list = list()
- for root, dirs, files in walk(path):
- for filename in files:
- if extension:
- if filename.endswith(extension):
- file_list.append(join(root, filename))
- else:
- file_list.append(join(root, filename))
-
- return file_list
-
-
-def read_keywords(args):
- """This function reads the keywords from the input file and creates:
-
- - a dictionary where the key is the old name and the value is the new name,
- these keywords will be further processed.
- - a list of keywords which will not be processed, typically keywords with
- argument(s) in its names.
- - a list of duplicates - duplicated keyword names or names which are parts
- of another keyword name, they will not be processed.
-
- :param args: Parsed arguments.
- :type args: ArgumentParser
- :returns: keyword names - dictionary where the key is the old name and the
- value is the new name; ignored keyword names - list of keywords which will
- not be processed; duplicates - duplicated keyword names or names which are
- parts of another keyword name, they will not be processed.
- :rtype: tuple(dict, list, list)
- """
-
- kw_names = dict()
- ignored_kw_names = list()
- duplicates = list()
-
- for line in args.input:
- old_name, new_name = line.split(args.separator)
- if '$' in old_name:
- ignored_kw_names.append((old_name, new_name[:-1]))
- elif old_name in kw_names.keys():
- duplicates.append((old_name, new_name[:-1]))
- else:
- kw_names[old_name] = new_name[:-1]
-
- # Remove duplicates:
- for old_name, _ in duplicates:
- new_name = kw_names.pop(old_name, None)
- if new_name:
- duplicates.append((old_name, new_name))
-
- # Find KW names which are parts of other KW names:
- for old_name in kw_names.keys():
- count = 0
- for key in kw_names.keys():
- if old_name in key:
- count += 1
- if old_name in kw_names[key]:
- if old_name != key:
- count += 1
- if count > 1:
- duplicates.append((old_name, kw_names[old_name]))
- kw_names.pop(old_name)
-
- return kw_names, ignored_kw_names, duplicates
-
-
-def rename_keywords(file_list, kw_names, args):
- """Rename the keywords in specified files.
-
- :param file_list: List of files to be processed.
- :param kw_names: Dictionary where the key is the old name and the value is
- the new name
- :type file_list: list
- :type kw_names: dict
- """
-
- kw_not_found = list()
-
- for old_name, new_name in kw_names.items():
- kw_found = False
- if args.verbosity > 0:
- print("\nFrom: {}\n To: {}\n".format(old_name, new_name))
- for file_name in file_list:
- tmp_file_name = file_name + ".new"
- with open(file_name) as file_read:
- file_write = open(tmp_file_name, 'w')
- occurrences = 0
- for line in file_read:
- new_line = re.sub(old_name, new_name, line)
- file_write.write(new_line)
- if new_line != line:
- occurrences += 1
- if occurrences:
- kw_found = True
- if args.verbosity > 1:
- print(" {:3d}: {}".format(occurrences, file_name))
- file_write.close()
- rename(tmp_file_name, file_name)
- if not kw_found:
- kw_not_found.append(old_name)
-
- if args.verbosity > 0:
- print("\nKeywords not found:")
- for item in kw_not_found:
- print(" {}".format(item))
-
-
-def parse_args():
- """Parse arguments from command line.
-
- :returns: Parsed arguments.
- :rtype: ArgumentParser
- """
-
- parser = argparse.ArgumentParser(description=__doc__,
- formatter_class=argparse.
- RawDescriptionHelpFormatter)
- parser.add_argument("-i", "--input",
- required=True,
- type=argparse.FileType('r'),
- help="Text file with the old keyword name and the new "
- "keyword name separated by separator per line.")
- parser.add_argument("-s", "--separator",
- default=";",
- type=str,
- help="Separator which separates the old and the new "
- "keyword name.")
- parser.add_argument("-d", "--dir",
- required=True,
- type=str,
- help="Directory with robot files where the keywords "
- "should be recursively searched.")
- parser.add_argument("-v", "--verbosity", action="count",
- help="Set the output verbosity.")
- return parser.parse_args()
-
-
-@time_interval
-def main():
- """Main function."""
-
- args = parse_args()
-
- kw_names, ignored_kw_names, duplicates = read_keywords(args)
-
- file_list = get_files(args.dir, "robot")
-
- if args.verbosity > 2:
- print("\nList of files to be processed:")
- for item in file_list:
- print(" {}".format(item))
- print("\n{} files to be processed.\n".format(len(file_list)))
-
- print("\nList of keywords to be renamed:")
- for item in kw_names:
- print(" {}".format(item))
- print("\n{} keywords to be renamed.\n".format(len(kw_names)))
-
- rename_keywords(file_list, kw_names, args)
-
- if args.verbosity >= 0:
- print("\nIgnored keywords: ({})".format(len(ignored_kw_names)))
- for old, new in ignored_kw_names:
- print(" From: {}\n To: {}\n".format(old, new))
-
- print("\nIgnored duplicates ({}):".format(len(duplicates)))
- for old, new in duplicates:
- print(" From: {}\n To: {}\n".format(old, new))
-
-
-if __name__ == "__main__":
- sys.exit(main())
diff --git a/resources/tools/scripts/robot_output_parser.py b/resources/tools/scripts/robot_output_parser.py
deleted file mode 100755
index b9ad8f8aa9..0000000000
--- a/resources/tools/scripts/robot_output_parser.py
+++ /dev/null
@@ -1,208 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (c) 2016 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Script parses the data taken by robot framework (output.xml) and dumps
-interested values into XML output file."""
-
-import argparse
-import re
-import sys
-import xml.etree.ElementTree as ET
-
-from robot.api import ExecutionResult, ResultVisitor
-
-
-class ExecutionChecker(ResultVisitor):
- """Iterates through test cases."""
-
- tc_regexp = re.compile(ur'^tc\d+-((\d+)B|IMIX)-(\d)t(\d)c-(.*)')
- rate_regexp = re.compile(ur'^[\D\d]*FINAL_RATE:\s(\d+\.\d+)[\D\d]*')
- lat_regexp = re.compile(ur'^[\D\d]*'\
- ur'LAT_\d+%NDR:\s\[\'(-?\d+\/-?\d+\/-?\d+)\','\
- ur'\s\'(-?\d+\/-?\d+\/-?\d+)\'\]\s\n'\
- ur'LAT_\d+%NDR:\s\[\'(-?\d+\/-?\d+\/-?\d+)\','\
- ur'\s\'(-?\d+\/-?\d+\/-?\d+)\'\]\s\n'\
- ur'LAT_\d+%NDR:\s\[\'(-?\d+\/-?\d+\/-?\d+)\','\
- ur'\s\'(-?\d+\/-?\d+\/-?\d+)\'\]')
-
- def __init__(self, args):
- self.root = ET.Element('build',
- attrib={'vdevice': args.vdevice})
-
- def visit_suite(self, suite):
- """Implements traversing through the suite and its direct children.
-
- :param suite: Suite to process.
- :type suite: Suite
- :return: Nothing.
- """
- if self.start_suite(suite) is not False:
- suite.suites.visit(self)
- suite.tests.visit(self)
- self.end_suite(suite)
-
- def start_suite(self, suite):
- """Called when suite starts.
-
- :param suite: Suite to process.
- :type suite: Suite
- :return: Nothing.
- """
- pass
-
- def end_suite(self, suite):
- """Called when suite ends.
-
- :param suite: Suite to process.
- :type suite: Suite
- :return: Nothing.
- """
- pass
-
- def visit_test(self, test):
- """Implements traversing through the test.
-
- :param test: Test to process.
- :type test: Test
- :return: Nothing.
- """
- if self.start_test(test) is not False:
- self.end_test(test)
-
- def start_test(self, test):
- """Called when test starts.
-
- :param test: Test to process.
- :type test: Test
- :return: Nothing.
- """
- if any("NDRPDRDISC" in tag for tag in test.tags):
- if test.status == 'PASS':
- tags = []
- for tag in test.tags:
- tags.append(tag)
-
- test_elem = ET.SubElement(
- self.root, "S" + test.parent.name.replace(" ", ""))
- test_elem.attrib['name'] = test.parent.name
- test_elem.attrib['framesize'] = str(re.search(
- self.tc_regexp, test.name).group(1))
- test_elem.attrib['threads'] = str(re.search(
- self.tc_regexp, test.name).group(3))
- test_elem.attrib['cores'] = str(re.search(
- self.tc_regexp, test.name).group(4))
- if any("NDRDISC" in tag for tag in test.tags):
- try:
- test_elem.attrib['lat_100'] = str(re.search(
- self.lat_regexp, test.message).group(1)) + '/' +\
- str(re.search(self.lat_regexp, test.message).
- group(2))
- except AttributeError:
- test_elem.attrib['lat_100'] = "-1/-1/-1/-1/-1/-1"
- try:
- test_elem.attrib['lat_50'] = str(re.search(
- self.lat_regexp, test.message).group(3)) + '/' +\
- str(re.search(self.lat_regexp, test.message).
- group(4))
- except AttributeError:
- test_elem.attrib['lat_50'] = "-1/-1/-1/-1/-1/-1"
- try:
- test_elem.attrib['lat_10'] = str(re.search(
- self.lat_regexp, test.message).group(5)) + '/' +\
- str(re.search(self.lat_regexp, test.message).
- group(6))
- except AttributeError:
- test_elem.attrib['lat_10'] = "-1/-1/-1/-1/-1/-1"
- test_elem.attrib['tags'] = ', '.join(tags)
- try:
- test_elem.text = str(re.search(
- self.rate_regexp, test.message).group(1))
- except AttributeError:
- test_elem.text = "-1"
-
- def end_test(self, test):
- """Called when test ends.
-
- :param test: Test to process.
- :type test: Test
- :return: Nothing.
- """
- pass
-
-
-def parse_tests(args):
- """Process data from robot output.xml file and return XML data.
-
- :param args: Parsed arguments.
- :type args: ArgumentParser
-
- :return: XML formatted output.
- :rtype: ElementTree
- """
-
- result = ExecutionResult(args.input)
- checker = ExecutionChecker(args)
- result.visit(checker)
-
- return checker.root
-
-
-def print_error(msg):
- """Print error message on stderr.
-
- :param msg: Error message to print.
- :type msg: str
- :return: nothing
- """
-
- sys.stderr.write(msg + '\n')
-
-
-def parse_args():
- """Parse arguments from cmd line.
-
- :return: Parsed arguments.
- :rtype ArgumentParser
- """
-
- parser = argparse.ArgumentParser()
- parser.add_argument("-i", "--input",
- required=True,
- type=argparse.FileType('r'),
- help="Robot XML log file")
- parser.add_argument("-o", "--output",
- required=True,
- type=argparse.FileType('w'),
- help="XML output file")
- parser.add_argument("-v", "--vdevice",
- required=False,
- default="",
- type=str,
- help="VPP version")
-
- return parser.parse_args()
-
-
-def main():
- """Main function."""
-
- args = parse_args()
-
- root = parse_tests(args)
- ET.ElementTree.write(ET.ElementTree(root), args.output)
-
-
-if __name__ == "__main__":
- sys.exit(main())
diff --git a/resources/tools/scripts/topo_container_copy.py b/resources/tools/scripts/topo_container_copy.py
deleted file mode 100644
index 83599b4444..0000000000
--- a/resources/tools/scripts/topo_container_copy.py
+++ /dev/null
@@ -1,137 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2017 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""This script provides copy and load of Docker container images.
- As destinations are used all DUT nodes from the topology file."""
-
-import sys
-import argparse
-from yaml import load
-
-from resources.libraries.python.ssh import SSH
-
-
-def ssh_no_error(ssh, cmd, sudo=False):
- """Execute a command over ssh channel, and log and exit if the command
- fails.
-
- :param ssh: SSH() object connected to a node.
- :param cmd: Command line to execute on remote node.
- :param sudo: Run command with sudo privileges.
- :type ssh: SSH() object
- :type cmd: str
- :type sudo: bool
- :returns: stdout from the SSH command.
- :rtype: str
- :raises RuntimeError: In case of unexpected ssh command failure
- """
- if sudo:
- ret, stdo, stde = ssh.exec_command_sudo(cmd, timeout=60)
- else:
- ret, stdo, stde = ssh.exec_command(cmd, timeout=60)
-
- if ret != 0:
- print('Command execution failed: "{}"'.format(cmd))
- print('stdout: {0}'.format(stdo))
- print('stderr: {0}'.format(stde))
- raise RuntimeError('Unexpected ssh command failure')
-
- return stdo
-
-
-def ssh_ignore_error(ssh, cmd, sudo=False):
- """Execute a command over ssh channel, ignore errors.
-
- :param ssh: SSH() object connected to a node.
- :param cmd: Command line to execute on remote node.
- :param sudo: Run command with sudo privileges.
- :type ssh: SSH() object
- :type cmd: str
- :type sudo: bool
- :returns: stdout from the SSH command.
- :rtype: str
- """
- if sudo:
- ret, stdo, stde = ssh.exec_command_sudo(cmd)
- else:
- ret, stdo, stde = ssh.exec_command(cmd)
-
- if ret != 0:
- print('Command execution failed: "{}"'.format(cmd))
- print('stdout: {0}'.format(stdo))
- print('stderr: {0}'.format(stde))
-
- return stdo
-
-
-def main():
- """Copy and load of Docker image."""
- parser = argparse.ArgumentParser()
- parser.add_argument("-t", "--topo", required=True,
- help="Topology file")
- parser.add_argument("-d", "--directory", required=True,
- help="Destination directory")
- parser.add_argument("-i", "--images", required=False, nargs='+',
- help="Images paths to copy")
- parser.add_argument("-c", "--cancel", help="Cancel all",
- action="store_true")
-
- args = parser.parse_args()
- topology_file = args.topo
- images = args.images
- directory = args.directory
- cancel_all = args.cancel
-
- work_file = open(topology_file)
- topology = load(work_file.read())['nodes']
-
- ssh = SSH()
- for node in topology:
- if topology[node]['type'] == "DUT":
- print("###TI host: {host}".format(host=topology[node]['host']))
- ssh.connect(topology[node])
-
- if cancel_all:
- # Remove destination directory on DUT
- cmd = "rm -r {directory}".format(directory=directory)
- stdout = ssh_ignore_error(ssh, cmd)
- print("###TI {stdout}".format(stdout=stdout))
-
- else:
- # Create installation directory on DUT
- cmd = "rm -r {directory}; mkdir {directory}"\
- .format(directory=directory)
- stdout = ssh_no_error(ssh, cmd)
- print("###TI {stdout}".format(stdout=stdout))
-
- # Copy images from local path to destination dir
- for image in images:
- print("###TI scp: {}".format(image))
- ssh.scp(local_path=image, remote_path=directory)
-
- # Load image to Docker.
- cmd = "for f in {directory}/*.tar.gz; do "\
- "sudo docker load -i $f; done".format(directory=directory)
- stdout = ssh_no_error(ssh, cmd)
- print("###TI {}".format(stdout))
-
- # Remove <none> images from Docker.
- cmd = "docker rmi $(sudo docker images -f 'dangling=true' -q)"
- stdout = ssh_ignore_error(ssh, cmd, sudo=True)
- print("###TI {}".format(stdout))
-
-
-if __name__ == "__main__":
- sys.exit(main())
diff --git a/resources/tools/scripts/topo_installation.py b/resources/tools/scripts/topo_installation.py
deleted file mode 100755
index 5c91abbd0f..0000000000
--- a/resources/tools/scripts/topo_installation.py
+++ /dev/null
@@ -1,171 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2016 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""This script provides copy and installation of VPP build deb packages.
- As destinations are used all DUT nodes from the topology file."""
-
-import sys
-import argparse
-from yaml import load
-
-from resources.libraries.python.ssh import SSH
-
-
-def ssh_no_error(ssh, cmd, sudo=False):
- """Execute a command over ssh channel, and log and exit if the command
- fails.
-
- :param ssh: SSH() object connected to a node.
- :param cmd: Command line to execute on remote node.
- :type ssh: SSH() object
- :type cmd: str
- :return: stdout from the SSH command.
- :rtype: str
- """
-
- if sudo:
- ret, stdo, stde = ssh.exec_command_sudo(cmd, timeout=60)
- else:
- ret, stdo, stde = ssh.exec_command(cmd, timeout=60)
-
- if ret != 0:
- print 'Command execution failed: "{}"'.format(cmd)
- print 'stdout: {0}'.format(stdo)
- print 'stderr: {0}'.format(stde)
- raise RuntimeError('Unexpected ssh command failure')
-
- return stdo
-
-
-def ssh_ignore_error(ssh, cmd, sudo=False):
- """Execute a command over ssh channel, ignore errors.
-
- :param ssh: SSH() object connected to a node.
- :param cmd: Command line to execute on remote node.
- :type ssh: SSH() object
- :type cmd: str
- :return: stdout from the SSH command.
- :rtype: str
- """
-
- if sudo:
- ret, stdo, stde = ssh.exec_command_sudo(cmd)
- else:
- ret, stdo, stde = ssh.exec_command(cmd)
-
- if ret != 0:
- print 'Command execution failed: "{}"'.format(cmd)
- print 'stdout: {0}'.format(stdo)
- print 'stderr: {0}'.format(stde)
-
- return stdo
-
-
-def main():
- """Copy and installation of VPP packages."""
-
- parser = argparse.ArgumentParser()
- parser.add_argument("-t", "--topo", required=True,
- help="Topology file")
- parser.add_argument("-d", "--directory", required=True,
- help="Installation directory")
- parser.add_argument("-p", "--packages", required=False, nargs='+',
- help="Packages paths to copy")
- parser.add_argument("-c", "--cancel", help="Cancel installation",
- action="store_true")
- parser.add_argument("-hc", "--honeycomb", help="Include Honeycomb package.",
- required=False, default=False)
-
- args = parser.parse_args()
- topology_file = args.topo
- packages = args.packages
- install_dir = args.directory
- cancel_installation = args.cancel
- honeycomb = args.honeycomb
-
- work_file = open(topology_file)
- topology = load(work_file.read())['nodes']
-
- def fix_interrupted(package):
- """If there are interrupted installations, clean them up."""
-
- cmd = "dpkg -l | grep {0}".format(package)
- ret, _, _ = ssh.exec_command(cmd)
- if ret == 0:
- # Try to fix interrupted installations
- cmd = 'dpkg --configure -a'
- stdout = ssh_no_error(ssh, cmd, sudo=True)
- print "###TI {}".format(stdout)
- # Try to remove installed packages
- cmd = 'apt-get purge -y "{0}.*"'.format(package)
- stdout = ssh_no_error(ssh, cmd, sudo=True)
- print "###TI {}".format(stdout)
-
- ssh = SSH()
- for node in topology:
- if topology[node]['type'] == "DUT":
- print "###TI host: {}".format(topology[node]['host'])
- ssh.connect(topology[node])
-
- if cancel_installation:
- # Remove installation directory on DUT
- cmd = "rm -r {}".format(install_dir)
- stdout = ssh_ignore_error(ssh, cmd)
- print "###TI {}".format(stdout)
-
- if honeycomb:
- fix_interrupted("honeycomb")
- # remove HC logs
- cmd = "rm -rf /var/log/honeycomb"
- stdout = ssh_ignore_error(ssh, cmd, sudo=True)
- print "###TI {}".format(stdout)
- fix_interrupted("vpp")
-
- else:
- # Create installation directory on DUT
- cmd = "rm -r {0}; mkdir {0}".format(install_dir)
- stdout = ssh_no_error(ssh, cmd)
- print "###TI {}".format(stdout)
-
- if honeycomb:
- smd = "ls ~/honeycomb | grep .deb"
- stdout = ssh_ignore_error(ssh, smd)
- if "honeycomb" in stdout:
- # If custom honeycomb packages exist, use them
- cmd = "cp ~/honeycomb/*.deb {0}".format(install_dir)
- stdout = ssh_no_error(ssh, cmd)
- print "###TI {}".format(stdout)
- else:
- # Copy packages from local path to installation dir
- for deb in packages:
- print "###TI scp: {}".format(deb)
- ssh.scp(local_path=deb, remote_path=install_dir)
- else:
- # Copy packages from local path to installation dir
- for deb in packages:
- print "###TI scp: {}".format(deb)
- ssh.scp(local_path=deb, remote_path=install_dir)
-
- if honeycomb:
- fix_interrupted("honeycomb")
- fix_interrupted("vpp")
-
- # Installation of deb packages
- cmd = "dpkg -i --force-all {}/*.deb".format(install_dir)
- stdout = ssh_no_error(ssh, cmd, sudo=True)
- print "###TI {}".format(stdout)
-
-if __name__ == "__main__":
- sys.exit(main())