aboutsummaryrefslogtreecommitdiffstats
path: root/resources
diff options
context:
space:
mode:
authorPeter Mikus <pmikus@cisco.com>2018-05-10 08:38:06 +0200
committerPeter Mikus <pmikus@cisco.com>2018-05-10 16:51:51 +0000
commitf1bb434e0392882a49f90ed1847f839e8bf46135 (patch)
treeeccfe0d765e58f95e11676fc960e8d8d68a62aa1 /resources
parentabd1c00c657242ac481526d7cccfb53b5a8d86bd (diff)
Cleanup DPDK framework setup
Moving installation of DPDK into KW instead of framework setup. This will unify the framework setup and allow future optimizations. Change-Id: I360ba95a2858e73e4bbb12020567d5d174ab69ca Signed-off-by: Peter Mikus <pmikus@cisco.com>
Diffstat (limited to 'resources')
-rw-r--r--resources/libraries/python/DPDK/DPDKTools.py40
-rw-r--r--resources/libraries/python/DPDK/SetupDPDKTest.py234
2 files changed, 38 insertions, 236 deletions
diff --git a/resources/libraries/python/DPDK/DPDKTools.py b/resources/libraries/python/DPDK/DPDKTools.py
index b0e67b7ab8..4bdc68ceee 100644
--- a/resources/libraries/python/DPDK/DPDKTools.py
+++ b/resources/libraries/python/DPDK/DPDKTools.py
@@ -89,5 +89,41 @@ class DPDKTools(object):
ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=600)
if ret_code != 0:
- raise RuntimeError('Failed to cleanup the dpdk at node {name}'
- .format(name=dut_node['host']))
+ raise RuntimeError('Failed to cleanup the dpdk at node {name}'.
+ format(name=dut_node['host']))
+
+ @staticmethod
+ def install_dpdk_test(node):
+ """
+ Prepare the DPDK test environment
+
+ :param node: Dictionary created from topology
+ :type node: dict
+ :returns: nothing
+ :raise RuntimeError: If command returns nonzero return code.
+ """
+ arch = Topology.get_node_arch(node)
+
+ ssh = SSH()
+ ssh.connect(node)
+
+ ret_code, _, _ = ssh.exec_command(
+ '{fwdir}/tests/dpdk/dpdk_scripts/install_dpdk.sh {arch}'.
+ format(fwdir=Constants.REMOTE_FW_DIR, arch=arch), timeout=600)
+
+ if ret_code != 0:
+ raise RuntimeError('Install the DPDK failed')
+
+ @staticmethod
+ def install_dpdk_test_on_all_duts(nodes):
+ """
+ Prepare the DPDK test environment on all DUTs.
+
+ :param nodes: Nodes from topology file.
+ :type nodes: dict
+ :returns: nothing
+ """
+ for node in nodes.values():
+ if node['type'] == NodeType.DUT:
+ DPDKTools.install_dpdk_test(node)
+
diff --git a/resources/libraries/python/DPDK/SetupDPDKTest.py b/resources/libraries/python/DPDK/SetupDPDKTest.py
deleted file mode 100644
index 1e88f8d8c1..0000000000
--- a/resources/libraries/python/DPDK/SetupDPDKTest.py
+++ /dev/null
@@ -1,234 +0,0 @@
-# Copyright (c) 2018 Cisco and/or its affiliates.
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at:
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""This module exists to provide setup utilities for the framework on topology
-nodes. All tasks required to be run before the actual tests are started is
-supposed to end up here.
-"""
-
-from shlex import split
-from subprocess import Popen, PIPE, call
-from multiprocessing import Pool
-from tempfile import NamedTemporaryFile
-from os.path import basename
-
-from robot.api import logger
-from robot.libraries.BuiltIn import BuiltIn
-
-from resources.libraries.python.ssh import SSH
-from resources.libraries.python.constants import Constants as con
-from resources.libraries.python.topology import NodeType
-from resources.libraries.python.topology import Topology
-
-__all__ = ["SetupDPDKTest"]
-
-
-def pack_framework_dir():
- """Pack the testing WS into temp file, return its name.
-
- :raise RuntimeError: If command returns nonzero return code."""
-
- tmpfile = NamedTemporaryFile(suffix=".tgz", prefix="DPDK-testing-")
- file_name = tmpfile.name
- tmpfile.close()
-
- proc = Popen(
- split("tar --exclude-vcs -zcf {0} .".format(file_name)),
- stdout=PIPE, stderr=PIPE)
- (stdout, stderr) = proc.communicate()
-
- logger.debug(stdout)
- logger.debug(stderr)
-
- return_code = proc.wait()
- if return_code != 0:
- raise RuntimeError("Could not pack testing framework.")
-
- return file_name
-
-
-def copy_tarball_to_node(tarball, node):
- """Copy tarball file from local host to remote node.
-
- :param tarball: Path to tarball to upload.
- :param node: Dictionary created from topology.
- :type tarball: str
- :type node: dict
- :returns: nothing
- """
- logger.console('Copying tarball to {0}'.format(node['host']))
- ssh = SSH()
- ssh.connect(node)
-
- ssh.scp(tarball, "/tmp/")
-
-
-def extract_tarball_at_node(tarball, node):
- """Extract tarball at given node.
-
- Extracts tarball using tar on given node to specific CSIT loocation.
-
- :param tarball: Path to tarball to upload.
- :param node: Dictionary created from topology.
- :type tarball: str
- :type node: dict
- :returns: nothing
- :raise RuntimeError: If command returns nonzero return code.
- """
- logger.console('Extracting tarball to {0} on {1}'.format(
- con.REMOTE_FW_DIR, node['host']))
- ssh = SSH()
- ssh.connect(node)
-
- cmd = 'sudo rm -rf {1}; mkdir {1} ; tar -zxf {0} -C {1}; ' \
- 'rm -f {0}'.format(tarball, con.REMOTE_FW_DIR)
- (ret_code, _, stderr) = ssh.exec_command(cmd, timeout=30)
- if ret_code != 0:
- logger.error('Unpack error: {0}'.format(stderr))
- raise RuntimeError('Failed to unpack {0} at node {1}'.format(
- tarball, node['host']))
-
-
-def create_env_directory_at_node(node):
- """
- Create fresh virtualenv to a directory, install pip requirements.
-
- :param node: Dictionary created from topology, will only install in the TG
- :type node: dict
- :returns: nothing
- :raise RuntimeError: If command returns nonzero return code.
- """
- logger.console('Extracting virtualenv, installing requirements.txt '
- 'on {0}'.format(node['host']))
- ssh = SSH()
- ssh.connect(node)
- (ret_code, stdout, stderr) = ssh.exec_command(
- 'cd {0} && rm -rf env && virtualenv env && '
- '. env/bin/activate && pip install -r requirements.txt'
- .format(con.REMOTE_FW_DIR), timeout=100)
- if ret_code != 0:
- logger.error('Virtualenv creation error: {0}'.format(stdout + stderr))
- raise RuntimeError('Virtualenv setup failed')
- else:
- logger.console('Virtualenv created on {0}'.format(node['host']))
-
-def install_dpdk_test(node):
- """
- Prepare the DPDK test envrionment
-
- :param node: Dictionary created from topology
- :type node: dict
- :returns: nothing
- :raise RuntimeError: If command returns nonzero return code.
- """
- arch = Topology.get_node_arch(node)
- logger.console('Install the DPDK on {0} ({1})'.format(node['host'],
- arch))
-
- ssh = SSH()
- ssh.connect(node)
-
- (ret_code, _, stderr) = ssh.exec_command(
- 'cd {0}/tests/dpdk/dpdk_scripts/ && ./install_dpdk.sh {1}'
- .format(con.REMOTE_FW_DIR, arch), timeout=600)
-
- if ret_code != 0:
- logger.error('Install the DPDK error: {0}'.format(stderr))
- raise RuntimeError('Install the DPDK failed')
- else:
- logger.console('Install the DPDK on {0} success!'.format(node['host']))
-
-def setup_node(args):
- """Run all set-up methods for a node.
-
- This method is used as map_async parameter. It receives tuple with all
- parameters as passed to map_async function.
-
- :param args: All parameters needed to setup one node.
- :type args: tuple
- :returns: True - success, False - error
- :rtype: bool
- """
- tarball, remote_tarball, node = args
-
- # if unset, arch defaults to x86_64
- if 'arch' not in node or not node['arch']:
- node['arch'] = 'x86_64'
-
- try:
- copy_tarball_to_node(tarball, node)
- extract_tarball_at_node(remote_tarball, node)
- if node['type'] == NodeType.DUT:
- install_dpdk_test(node)
- if node['type'] == NodeType.TG:
- create_env_directory_at_node(node)
- except RuntimeError as exc:
- logger.error("Node setup failed, error:'{0}'".format(exc.message))
- return False
- else:
- logger.console('Setup of node {0} done'.format(node['host']))
- return True
-
-def delete_local_tarball(tarball):
- """Delete local tarball to prevent disk pollution.
-
- :param tarball: Path to tarball to upload.
- :type tarball: str
- :returns: nothing
- """
- call(split('sh -c "rm {0} > /dev/null 2>&1"'.format(tarball)))
-
-
-class SetupDPDKTest(object):
- """Setup suite run on topology nodes.
-
- Many VAT/CLI based tests need the scripts at remote hosts before executing
- them. This class packs the whole testing directory and copies it over
- to all nodes in topology under /tmp/
- """
-
- @staticmethod
- def setup_dpdk_test(nodes):
- """Pack the whole directory and extract in temp on each node."""
-
- tarball = pack_framework_dir()
- msg = 'Framework packed to {0}'.format(tarball)
- logger.console(msg)
- logger.trace(msg)
- remote_tarball = "/tmp/{0}".format(basename(tarball))
-
- # Turn off logging since we use multiprocessing
- log_level = BuiltIn().set_log_level('NONE')
- params = ((tarball, remote_tarball, node) for node in nodes.values())
- pool = Pool(processes=len(nodes))
- result = pool.map_async(setup_node, params)
- pool.close()
- pool.join()
-
- # Turn on logging
- BuiltIn().set_log_level(log_level)
-
- logger.info(
- 'Executed node setups in parallel, waiting for processes to end')
- result.wait()
-
- results = result.get()
- node_setup_success = all(results)
- logger.info('Results: {0}'.format(results))
-
- logger.trace('Test framework copied to all topology nodes')
- delete_local_tarball(tarball)
- if node_setup_success:
- logger.console('All nodes are ready')
- else:
- logger.console('Failed to setup dpdk on all the nodes')