summaryrefslogtreecommitdiffstats
path: root/scripts/automation/regression/stateful_tests
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/automation/regression/stateful_tests')
-rw-r--r--scripts/automation/regression/stateful_tests/__init__.py0
-rwxr-xr-xscripts/automation/regression/stateful_tests/tests_exceptions.py37
-rwxr-xr-xscripts/automation/regression/stateful_tests/trex_client_pkg_test.py34
-rwxr-xr-xscripts/automation/regression/stateful_tests/trex_general_test.py363
-rwxr-xr-xscripts/automation/regression/stateful_tests/trex_imix_test.py213
-rwxr-xr-xscripts/automation/regression/stateful_tests/trex_ipv6_test.py103
-rwxr-xr-xscripts/automation/regression/stateful_tests/trex_nat_test.py169
-rwxr-xr-xscripts/automation/regression/stateful_tests/trex_nbar_test.py123
-rwxr-xr-xscripts/automation/regression/stateful_tests/trex_rx_test.py280
9 files changed, 1322 insertions, 0 deletions
diff --git a/scripts/automation/regression/stateful_tests/__init__.py b/scripts/automation/regression/stateful_tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/scripts/automation/regression/stateful_tests/__init__.py
diff --git a/scripts/automation/regression/stateful_tests/tests_exceptions.py b/scripts/automation/regression/stateful_tests/tests_exceptions.py
new file mode 100755
index 00000000..360f44a5
--- /dev/null
+++ b/scripts/automation/regression/stateful_tests/tests_exceptions.py
@@ -0,0 +1,37 @@
+#!/router/bin/python
+
+class TRexInUseError(Exception):
+ def __init__(self, value):
+ self.value = value
+ def __str__(self):
+ return repr(self.value)
+
+class TRexRunFailedError(Exception):
+ def __init__(self, value):
+ self.value = value
+ def __str__(self):
+ return repr(self.value)
+
+class TRexIncompleteRunError(Exception):
+ def __init__(self, value):
+ self.value = value
+ def __str__(self):
+ return repr(self.value)
+
+class TRexLowCpuUtilError(Exception):
+ def __init__(self, value):
+ self.value = value
+ def __str__(self):
+ return repr(self.value)
+
+class AbnormalResultError(Exception):
+ def __init__(self, value):
+ self.value = value
+ def __str__(self):
+ return repr(self.value)
+
+class ClassificationMissmatchError(Exception):
+ def __init__(self, value):
+ self.value = value
+ def __str__(self):
+ return repr(self.value)
diff --git a/scripts/automation/regression/stateful_tests/trex_client_pkg_test.py b/scripts/automation/regression/stateful_tests/trex_client_pkg_test.py
new file mode 100755
index 00000000..892be966
--- /dev/null
+++ b/scripts/automation/regression/stateful_tests/trex_client_pkg_test.py
@@ -0,0 +1,34 @@
+#!/router/bin/python
+from .trex_general_test import CTRexGeneral_Test, CTRexScenario
+from misc_methods import run_command
+from nose.plugins.attrib import attr
+
+
+@attr('client_package')
+class CTRexClientPKG_Test(CTRexGeneral_Test):
+ """This class tests TRex client package"""
+
+ def setUp(self):
+ CTRexGeneral_Test.setUp(self)
+ if not self.is_loopback:
+ self.router.configure_basic_interfaces()
+ self.router.config_pbr(mode = 'config')
+ self.unzip_client_package()
+
+ def run_client_package_stf_example(self, python_version):
+ commands = [
+ 'cd %s' % CTRexScenario.scripts_path,
+ 'source find_python.sh --%s' % python_version,
+ 'which $PYTHON',
+ 'cd trex_client/stf/examples',
+ '$PYTHON stf_example.py -s %s' % self.configuration.trex['trex_name'],
+ ]
+ return_code, _, stderr = run_command("bash -ce '%s'" % '; '.join(commands))
+ if return_code:
+ self.fail('Error in running stf_example using %s: %s' % (python_version, stderr))
+
+ def test_client_python2(self):
+ self.run_client_package_stf_example(python_version = 'python2')
+
+ def test_client_python3(self):
+ self.run_client_package_stf_example(python_version = 'python3')
diff --git a/scripts/automation/regression/stateful_tests/trex_general_test.py b/scripts/automation/regression/stateful_tests/trex_general_test.py
new file mode 100755
index 00000000..e968d380
--- /dev/null
+++ b/scripts/automation/regression/stateful_tests/trex_general_test.py
@@ -0,0 +1,363 @@
+#!/router/bin/python
+
+__copyright__ = "Copyright 2014"
+
+"""
+Name:
+ trex_general_test.py
+
+
+Description:
+
+ This script creates the functionality to test the performance of the TRex traffic generator
+ The tested scenario is a TRex TG directly connected to a Cisco router.
+
+::
+
+ Topology:
+
+ ------- --------
+ | | Tx---1gig/10gig----Rx | |
+ | TRex | | router |
+ | | Rx---1gig/10gig----Tx | |
+ ------- --------
+
+"""
+from nose.plugins import Plugin
+from nose.plugins.skip import SkipTest
+import trex
+from trex import CTRexScenario
+import misc_methods
+import sys
+import os
+# from CPlatformUnderTest import *
+from CPlatform import *
+import termstyle
+import threading
+from .tests_exceptions import *
+from platform_cmd_link import *
+import unittest
+from glob import glob
+
+def setUpModule(module):
+ pass
+
+def tearDownModule(module):
+ pass
+
+class CTRexGeneral_Test(unittest.TestCase):
+ """This class defines the general stateful testcase of the TRex traffic generator"""
+ def __init__ (self, *args, **kwargs):
+ sys.stdout.flush()
+ unittest.TestCase.__init__(self, *args, **kwargs)
+ if CTRexScenario.is_test_list:
+ return
+ # Point test object to scenario global object
+ self.configuration = CTRexScenario.configuration
+ self.benchmark = CTRexScenario.benchmark
+ self.trex = CTRexScenario.trex
+ self.stl_trex = CTRexScenario.stl_trex
+ self.trex_crashed = CTRexScenario.trex_crashed
+ self.modes = CTRexScenario.modes
+ self.GAManager = CTRexScenario.GAManager
+ self.no_daemon = CTRexScenario.no_daemon
+ self.skipping = False
+ self.fail_reasons = []
+ if not hasattr(self, 'unsupported_modes'):
+ self.unsupported_modes = []
+ self.is_loopback = True if 'loopback' in self.modes else False
+ self.is_virt_nics = True if 'virt_nics' in self.modes else False
+ self.is_VM = True if 'VM' in self.modes else False
+
+ if not CTRexScenario.is_init:
+ if self.trex and not self.no_daemon: # stateful
+ CTRexScenario.trex_version = self.trex.get_trex_version()
+ if not self.is_loopback:
+ # initilize the scenario based on received configuration, once per entire testing session
+ CTRexScenario.router = CPlatform(CTRexScenario.router_cfg['silent_mode'])
+ device_cfg = CDeviceCfg()
+ device_cfg.set_platform_config(CTRexScenario.router_cfg['config_dict'])
+ device_cfg.set_tftp_config(CTRexScenario.router_cfg['tftp_config_dict'])
+ CTRexScenario.router.load_platform_data_from_file(device_cfg)
+ CTRexScenario.router.launch_connection(device_cfg)
+ if CTRexScenario.router_cfg['forceImageReload']:
+ running_image = CTRexScenario.router.get_running_image_details()['image']
+ print('Current router image: %s' % running_image)
+ needed_image = device_cfg.get_image_name()
+ if not CTRexScenario.router.is_image_matches(needed_image):
+ print('Setting router image: %s' % needed_image)
+ CTRexScenario.router.config_tftp_server(device_cfg)
+ CTRexScenario.router.load_platform_image(needed_image)
+ CTRexScenario.router.set_boot_image(needed_image)
+ CTRexScenario.router.reload_platform(device_cfg)
+ CTRexScenario.router.launch_connection(device_cfg)
+ running_image = CTRexScenario.router.get_running_image_details()['image'] # verify image
+ if not CTRexScenario.router.is_image_matches(needed_image):
+ self.fail('Unable to set router image: %s, current image is: %s' % (needed_image, running_image))
+ else:
+ print('Matches needed image: %s' % needed_image)
+ CTRexScenario.router_image = running_image
+
+ if self.modes:
+ print(termstyle.green('\t!!!\tRunning with modes: %s, not suitable tests will be skipped.\t!!!' % list(self.modes)))
+
+ CTRexScenario.is_init = True
+ print(termstyle.green("Done instantiating TRex scenario!\n"))
+
+# raise RuntimeError('CTRexScenario class is not initialized!')
+ self.router = CTRexScenario.router
+
+
+
+# def assert_dict_eq (self, dict, key, val, error=''):
+# v1 = int(dict[key]))
+# self.assertEqual(v1, int(val), error)
+#
+# def assert_dict_gt (self, d, key, val, error=''):
+# v1 = int(dict[key])
+# self.assert_gt(v1, int(val), error)
+
+ def assertEqual(self, v1, v2, s):
+ if v1 != v2:
+ error='ERROR '+str(v1)+' != '+str(v2)+ ' '+s;
+ self.fail(error)
+
+ def assert_gt(self, v1, v2, s):
+ if not v1 > v2:
+ error='ERROR {big} < {small} {str}'.format(big = v1, small = v2, str = s)
+ self.fail(error)
+
+ def check_results_eq (self,res,name,val):
+ if res is None:
+ self.fail('TRex results cannot be None !')
+ return
+
+ if name not in res:
+ self.fail('TRex results does not include key %s' % name)
+ return
+
+ if res[name] != float(val):
+ self.fail('TRex results[%s]==%f and not as expected %f ' % (name, res[name], val))
+
+ def check_CPU_benchmark (self, trex_res, err = 25, minimal_cpu = 10, maximal_cpu = 85):
+ cpu_util = trex_res.get_avg_steady_state_value('trex-global.data.m_cpu_util_raw')
+ trex_tx_bps = trex_res.get_avg_steady_state_value('trex-global.data.m_tx_bps')
+ expected_norm_cpu = self.get_benchmark_param('bw_per_core')
+ cores = self.get_benchmark_param('cores')
+ ports_count = trex_res.get_ports_count()
+ if not (cpu_util and ports_count and cores):
+ print("Can't calculate CPU benchmark, need to divide by zero: cpu util: %s, ports: %s, cores: %s" % (cpu_util, ports_count, cores))
+ test_norm_cpu = -1
+ else:
+ test_norm_cpu = trex_tx_bps / (cpu_util * ports_count * cores * 2.5e6)
+
+ if '1G' in self.modes:
+ minimal_cpu /= 10.0
+
+ if not self.is_virt_nics:
+ if cpu_util > maximal_cpu:
+ self.fail("CPU is too high (%s%%), probably queue full." % cpu_util )
+ #if cpu_util < minimal_cpu:
+ # self.fail("CPU is too low (%s%%), can't verify performance in such low CPU%%." % cpu_util )
+
+ print("TRex CPU utilization: %g%%, norm_cpu is : %g Gb/core" % (round(cpu_util, 2), round(test_norm_cpu, 2)))
+ if test_norm_cpu < 0:
+ return
+
+ if not expected_norm_cpu:
+ expected_norm_cpu = 1
+
+ calc_error_precent = abs(100.0 * test_norm_cpu / expected_norm_cpu - 100)
+ print('Err percent: %s' % calc_error_precent)
+ #if calc_error_precent > err and cpu_util > 10:
+ # self.fail('Excepted bw_per_core ratio: %s, got: %g' % (expected_norm_cpu, round(test_norm_cpu)))
+
+ # report benchmarks
+ if self.GAManager:
+ try:
+ pass
+ #setup_test = '%s.%s' % (CTRexScenario.setup_name, self.get_name())
+ #self.GAManager.gaAddAction(Event = 'stateful_test', action = setup_test, label = 'bw_per_core', value = int(test_norm_cpu))
+ #self.GAManager.gaAddAction(Event = 'stateful_test', action = setup_test, label = 'bw_per_core_exp', value = int(expected_norm_cpu))
+ #self.GAManager.emptyAndReportQ()
+ except Exception as e:
+ print('Sending GA failed: %s' % e)
+
+ def check_results_gt (self, res, name, val):
+ if res is None:
+ self.fail('TRex results canot be None !')
+ return
+
+ if name not in res:
+ self.fail('TRex results does not include key %s' % name)
+ return
+
+ if res[name]< float(val):
+ self.fail('TRex results[%s]<%f and not as expected greater than %f ' % (name, res[name], val))
+
+ def check_for_trex_crash(self):
+ pass
+
+ def get_benchmark_param (self, param, sub_param = None, test_name = None):
+ if not test_name:
+ test_name = self.get_name()
+ if test_name not in self.benchmark:
+ self.skip('No data in benchmark.yaml for test: %s, param: %s. Skipping.' % (test_name, param))
+ if sub_param:
+ return self.benchmark[test_name][param].get(sub_param)
+ else:
+ return self.benchmark[test_name].get(param)
+
+ def check_general_scenario_results (self, trex_res, check_latency = True):
+
+ try:
+ # check if test is valid
+ if not trex_res.is_done_warmup():
+ self.fail('TRex did not reach warm-up situtaion. Results are not valid.')
+
+ # check history size is enough
+ if len(trex_res._history) < 5:
+ self.fail('TRex results list is too short. Increase the test duration or check unexpected stopping.')
+
+ # check TRex number of drops
+ trex_tx_pckt = trex_res.get_last_value("trex-global.data.m_total_tx_pkts")
+ trex_drops = trex_res.get_total_drops()
+ trex_drop_rate = trex_res.get_drop_rate()
+ if ( trex_drops > 0.001 * trex_tx_pckt) and (trex_drop_rate > 0.0): # deliberately mask kickoff drops when TRex first initiated
+ self.fail('Number of packet drops larger than 0.1% of all traffic')
+
+ # check queue full, queue drop, allocation error
+ m_total_alloc_error = trex_res.get_last_value("trex-global.data.m_total_alloc_error")
+ m_total_queue_full = trex_res.get_last_value("trex-global.data.m_total_queue_full")
+ m_total_queue_drop = trex_res.get_last_value("trex-global.data.m_total_queue_drop")
+ self.assert_gt(1000, m_total_alloc_error, 'Got allocation errors. (%s), please review multiplier and templates configuration.' % m_total_alloc_error)
+ self.assert_gt(1000, m_total_queue_drop, 'Too much queue_drop (%s), please review multiplier.' % m_total_queue_drop)
+
+ if self.is_VM:
+ allowed_queue_full = 10000 + trex_tx_pckt / 100
+ else:
+ allowed_queue_full = 1000 + trex_tx_pckt / 1000
+ self.assert_gt(allowed_queue_full, m_total_queue_full, 'Too much queue_full (%s), please review multiplier.' % m_total_queue_full)
+
+ # # check TRex expected counters
+ #trex_exp_rate = trex_res.get_expected_tx_rate().get('m_tx_expected_bps')
+ #assert trex_exp_rate is not None
+ #trex_exp_gbps = trex_exp_rate/(10**9)
+
+ if check_latency:
+ # check that max latency does not exceed 1 msec
+ if self.configuration.trex['trex_name'] == '10.56.217.210': # temporary workaround for latency issue in kiwi02, remove it ASAP. http://trex-tgn.cisco.com/youtrack/issue/trex-194
+ allowed_latency = 8000
+ elif self.is_VM:
+ allowed_latency = 9999999
+ else: # no excuses, check 1ms
+ allowed_latency = 1000
+ if max(trex_res.get_max_latency().values()) > allowed_latency:
+ self.fail('LatencyError: Maximal latency exceeds %s (usec)' % allowed_latency)
+
+ # check that avg latency does not exceed 1 msec
+ if self.is_VM:
+ allowed_latency = 9999999
+ else: # no excuses, check 1ms
+ allowed_latency = 1000
+ if max(trex_res.get_avg_latency().values()) > allowed_latency:
+ self.fail('LatencyError: Average latency exceeds %s (usec)' % allowed_latency)
+
+ if not self.is_loopback:
+ # check router number of drops --> deliberately masked- need to be figured out!!!!!
+ pkt_drop_stats = self.router.get_drop_stats()
+# assert pkt_drop_stats['total_drops'] < 20
+
+ # check for trex-router packet consistency
+ # TODO: check if it's ok
+ print('router drop stats: %s' % pkt_drop_stats)
+ print('TRex drop stats: %s' % trex_drops)
+ #self.assertEqual(pkt_drop_stats, trex_drops, "TRex's and router's drop stats don't match.")
+
+ except KeyError as e:
+ self.fail(e)
+ #assert False
+
+ # except AssertionError as e:
+ # e.args += ('TRex has crashed!')
+ # raise
+
+ @staticmethod
+ def unzip_client_package():
+ client_pkg_files = glob('%s/trex_client*.tar.gz' % CTRexScenario.scripts_path)
+ if not len(client_pkg_files):
+ raise Exception('Could not find client package')
+ if len(client_pkg_files) > 1:
+ raise Exception('Found more than one client packages')
+ if not os.path.exists('%s/trex_client' % CTRexScenario.scripts_path):
+ print('\nUnzipping package')
+ return_code, _, stderr = misc_methods.run_command("tar -xzf %s -C %s" % (client_pkg_files[0], CTRexScenario.scripts_path))
+ if return_code:
+ raise Exception('Could not untar the client package: %s' % stderr)
+ else:
+ print('\nClient package is untarred')
+
+ # We encountered error, don't fail the test immediately
+ def fail(self, reason = 'Unknown error'):
+ print('Error: %s' % reason)
+ self.fail_reasons.append(reason)
+
+ # skip running of the test, counts as 'passed' but prints 'skipped'
+ def skip(self, message = 'Unknown reason'):
+ print('Skip: %s' % message)
+ self.skipping = True
+ raise SkipTest(message)
+
+ # get name of currently running test
+ def get_name(self):
+ return self._testMethodName
+
+ def setUp(self):
+ test_setup_modes_conflict = self.modes & set(self.unsupported_modes)
+ if test_setup_modes_conflict:
+ self.skip("The test can't run with following modes of given setup: %s " % test_setup_modes_conflict)
+ if not self.stl_trex and not self.trex.is_idle():
+ print('Warning: TRex is not idle at setUp, trying to stop it.')
+ self.trex.force_kill(confirm = False)
+ if not self.is_loopback:
+ print('')
+ if not self.stl_trex: # stateful
+ self.router.load_clean_config()
+ self.router.clear_counters()
+ self.router.clear_packet_drop_stats()
+
+ ########################################################################
+ #### DO NOT ADD TESTS TO THIS FILE ####
+ #### Added tests here will held once for EVERY test sub-class ####
+ ########################################################################
+
+ # masked example to such test. uncomment to watch how it affects #
+# def test_isInitialized(self):
+# assert CTRexScenario.is_init == True
+ def tearDown(self):
+ if not self.stl_trex and not self.trex.is_idle():
+ print('Warning: TRex is not idle at tearDown, trying to stop it.')
+ self.trex.force_kill(confirm = False)
+ if not self.skipping:
+ # print server logs of test run
+ if self.trex and CTRexScenario.server_logs and not self.no_daemon:
+ try:
+ print(termstyle.green('\n>>>>>>>>>>>>>>> Daemon log <<<<<<<<<<<<<<<'))
+ daemon_log = self.trex.get_trex_daemon_log()
+ log_size = len(daemon_log)
+ print(''.join(daemon_log[CTRexScenario.daemon_log_lines:]))
+ CTRexScenario.daemon_log_lines = log_size
+ except Exception as e:
+ print("Can't get TRex daemon log:", e)
+ try:
+ print(termstyle.green('>>>>>>>>>>>>>>>> Trex log <<<<<<<<<<<<<<<<'))
+ print(''.join(self.trex.get_trex_log()))
+ except Exception as e:
+ print("Can't get TRex log:", e)
+ if len(self.fail_reasons):
+ sys.stdout.flush()
+ raise Exception('The test is failed, reasons:\n%s' % '\n'.join(self.fail_reasons))
+ sys.stdout.flush()
+
+ def check_for_trex_crash(self):
+ pass
diff --git a/scripts/automation/regression/stateful_tests/trex_imix_test.py b/scripts/automation/regression/stateful_tests/trex_imix_test.py
new file mode 100755
index 00000000..f8fe0ed1
--- /dev/null
+++ b/scripts/automation/regression/stateful_tests/trex_imix_test.py
@@ -0,0 +1,213 @@
+#!/router/bin/python
+from .trex_general_test import CTRexGeneral_Test
+from CPlatform import CStaticRouteConfig
+from .tests_exceptions import *
+#import sys
+import time
+from nose.tools import nottest
+
+class CTRexIMIX_Test(CTRexGeneral_Test):
+ """This class defines the IMIX testcase of the TRex traffic generator"""
+ def __init__(self, *args, **kwargs):
+ # super(CTRexIMIX_Test, self).__init__()
+ CTRexGeneral_Test.__init__(self, *args, **kwargs)
+
+ def setUp(self):
+ super(CTRexIMIX_Test, self).setUp() # launch super test class setUp process
+ # CTRexGeneral_Test.setUp(self) # launch super test class setUp process
+ # self.router.clear_counters()
+ pass
+
+ def test_routing_imix_64(self):
+ # test initializtion
+ if not self.is_loopback:
+ self.router.configure_basic_interfaces()
+ self.router.config_pbr(mode = "config")
+
+# self.trex.set_yaml_file('cap2/imix_64.yaml')
+ mult = self.get_benchmark_param('multiplier')
+ core = self.get_benchmark_param('cores')
+
+# trex_res = self.trex.run(multiplier = mult, cores = core, duration = 30, l = 1000, p = True)
+ ret = self.trex.start_trex(
+ c = core,
+ m = mult,
+ p = True,
+ nc = True,
+ d = 30,
+ f = 'cap2/imix_64.yaml',
+ l = 1000)
+
+ trex_res = self.trex.sample_to_run_finish()
+
+ # trex_res is a CTRexResult instance- and contains the summary of the test results
+ # you may see all the results keys by simply calling here for 'print trex_res.result'
+ print("\nLATEST RESULT OBJECT:")
+ print(trex_res)
+
+ self.check_general_scenario_results(trex_res)
+ self.check_CPU_benchmark(trex_res)
+
+ # the name intentionally not matches nose default pattern, including the test should be specified explicitly
+ def dummy(self):
+ ret = self.trex.start_trex(
+ c = 1,
+ m = 1,
+ p = True,
+ nc = True,
+ d = 5,
+ f = 'cap2/imix_fast_1g.yaml',
+ l = 1000,
+ trex_development = True)
+
+ trex_res = self.trex.sample_to_run_finish()
+ print(trex_res)
+
+ def test_routing_imix (self):
+ # test initializtion
+ if not self.is_loopback:
+ self.router.configure_basic_interfaces()
+ self.router.config_pbr(mode = "config")
+
+# self.trex.set_yaml_file('cap2/imix_fast_1g.yaml')
+ mult = self.get_benchmark_param('multiplier')
+ core = self.get_benchmark_param('cores')
+
+ ret = self.trex.start_trex(
+ c = core,
+ m = mult,
+ p = True,
+ nc = True,
+ d = 60,
+ f = 'cap2/imix_fast_1g.yaml',
+ l = 1000)
+
+ trex_res = self.trex.sample_to_run_finish()
+
+ # trex_res is a CTRexResult instance- and contains the summary of the test results
+ # you may see all the results keys by simply calling here for 'print trex_res.result'
+ print("\nLATEST RESULT OBJECT:")
+ print(trex_res)
+
+ self.check_general_scenario_results(trex_res)
+
+ self.check_CPU_benchmark(trex_res)
+
+
+ def test_static_routing_imix (self):
+ if self.is_loopback:
+ self.skip('In loopback mode the test is same as test_routing_imix')
+ # test initializtion
+ if not self.is_loopback:
+ self.router.configure_basic_interfaces()
+
+ # Configure static routing based on benchmark data input
+ stat_route_dict = self.get_benchmark_param('stat_route_dict')
+ stat_route_obj = CStaticRouteConfig(stat_route_dict)
+ self.router.config_static_routing(stat_route_obj, mode = "config")
+
+ mult = self.get_benchmark_param('multiplier')
+ core = self.get_benchmark_param('cores')
+
+ ret = self.trex.start_trex(
+ c = core,
+ m = mult,
+ p = True,
+ nc = True,
+ d = 60,
+ f = 'cap2/imix_fast_1g.yaml',
+ l = 1000)
+
+ trex_res = self.trex.sample_to_run_finish()
+
+ # trex_res is a CTRexResult instance- and contains the summary of the test results
+ # you may see all the results keys by simply calling here for 'print trex_res.result'
+ print("\nLATEST RESULT OBJECT:")
+ print(trex_res)
+ print("\nLATEST DUMP:")
+ print(trex_res.get_latest_dump())
+
+ self.check_general_scenario_results(trex_res)
+ self.check_CPU_benchmark(trex_res)
+
+
+ def test_static_routing_imix_asymmetric (self):
+ # test initializtion
+ if not self.is_loopback:
+ self.router.configure_basic_interfaces()
+
+ # Configure static routing based on benchmark data input
+ stat_route_dict = self.get_benchmark_param('stat_route_dict')
+ stat_route_obj = CStaticRouteConfig(stat_route_dict)
+ self.router.config_static_routing(stat_route_obj, mode = "config")
+
+ mult = self.get_benchmark_param('multiplier')
+ core = self.get_benchmark_param('cores')
+
+ ret = self.trex.start_trex(
+ c = core,
+ m = mult,
+ nc = True,
+ d = 100,
+ f = 'cap2/imix_fast_1g.yaml',
+ l = 1000)
+
+ trex_res = self.trex.sample_to_run_finish()
+
+ # trex_res is a CTRexResults instance- and contains the summary of the test results
+ # you may see all the results keys by simply calling here for 'print trex_res.result'
+ print("\nLATEST RESULT OBJECT:")
+ print(trex_res)
+
+ self.check_general_scenario_results(trex_res)
+
+ self.check_CPU_benchmark(trex_res, minimal_cpu = 25)
+
+
+ def test_jumbo(self, duration = 100, **kwargs):
+ if not self.is_loopback:
+ self.router.configure_basic_interfaces(mtu = 9216)
+ self.router.config_pbr(mode = "config")
+
+ mult = self.get_benchmark_param('multiplier')
+ core = self.get_benchmark_param('cores')
+
+ ret = self.trex.start_trex(
+ c = core,
+ m = mult,
+ p = True,
+ nc = True,
+ d = duration,
+ f = 'cap2/imix_9k.yaml',
+ l = 1000,
+ **kwargs)
+
+ trex_res = self.trex.sample_to_run_finish()
+
+ # trex_res is a CTRexResults instance- and contains the summary of the test results
+ # you may see all the results keys by simply calling here for 'print trex_res.result'
+ print("\nLATEST RESULT OBJECT:")
+ print(trex_res)
+
+ self.check_general_scenario_results(trex_res)
+ self.check_CPU_benchmark(trex_res, minimal_cpu = 0, maximal_cpu = 10)
+
+ # don't include it to regular nose search
+ @nottest
+ def test_warm_up(self):
+ try:
+ self._testMethodName = 'test_jumbo'
+ self.test_jumbo(duration = 5, trex_development = True)
+ except Exception as e:
+ print('Ignoring this error: %s' % e)
+ if self.fail_reasons:
+ print('Ignoring this error(s):\n%s' % '\n'.join(self.fail_reasons))
+ self.fail_reasons = []
+
+ def tearDown(self):
+ CTRexGeneral_Test.tearDown(self)
+ # remove nbar config here
+ pass
+
+if __name__ == "__main__":
+ pass
diff --git a/scripts/automation/regression/stateful_tests/trex_ipv6_test.py b/scripts/automation/regression/stateful_tests/trex_ipv6_test.py
new file mode 100755
index 00000000..4d6f7953
--- /dev/null
+++ b/scripts/automation/regression/stateful_tests/trex_ipv6_test.py
@@ -0,0 +1,103 @@
+#!/router/bin/python
+from .trex_general_test import CTRexGeneral_Test
+from .tests_exceptions import *
+import time
+from nose.tools import assert_equal
+
+class CTRexIPv6_Test(CTRexGeneral_Test):
+ """This class defines the IPv6 testcase of the TRex traffic generator"""
+ def __init__(self, *args, **kwargs):
+ super(CTRexIPv6_Test, self).__init__(*args, **kwargs)
+
+ def setUp(self):
+ super(CTRexIPv6_Test, self).setUp() # launch super test class setUp process
+# print " before sleep setup !!"
+# time.sleep(100000);
+# pass
+
+ def test_ipv6_simple(self):
+ if self.is_virt_nics:
+ self.skip('--ipv6 flag does not work correctly in with virtual NICs') # TODO: fix
+ # test initializtion
+ if not self.is_loopback:
+ self.router.configure_basic_interfaces()
+
+ self.router.config_pbr(mode = "config")
+ self.router.config_ipv6_pbr(mode = "config")
+
+ mult = self.get_benchmark_param('multiplier')
+ core = self.get_benchmark_param('cores')
+
+ ret = self.trex.start_trex(
+ cfg = '/etc/trex_cfg_mac.yaml',
+ c = core,
+ m = mult,
+ p = True,
+ nc = True,
+ ipv6 = True,
+ d = 60,
+ f = 'avl/sfr_delay_10_1g.yaml',
+ l = 1000)
+
+ trex_res = self.trex.sample_to_run_finish()
+
+ # trex_res is a CTRexResult instance- and contains the summary of the test results
+ # you may see all the results keys by simply calling here for 'print trex_res.result'
+ print("\nLATEST RESULT OBJECT:")
+ print(trex_res)
+
+ self.check_general_scenario_results(trex_res)
+
+ self.check_CPU_benchmark (trex_res, 10.0)
+
+ assert True
+
+
+ def test_ipv6_negative (self):
+ if self.is_loopback:
+ self.skip('The test checks ipv6 drops by device and we are in loopback setup')
+ # test initializtion
+ self.router.configure_basic_interfaces()
+
+ # NOT CONFIGURING IPv6 INTENTIONALLY TO GET DROPS!
+ self.router.config_pbr(mode = "config")
+
+ # same params as test_ipv6_simple
+ mult = self.get_benchmark_param('multiplier', test_name = 'test_ipv6_simple')
+ core = self.get_benchmark_param('cores', test_name = 'test_ipv6_simple')
+
+ ret = self.trex.start_trex(
+ cfg = '/etc/trex_cfg_mac.yaml',
+ c = core,
+ m = mult,
+ p = True,
+ nc = True,
+ ipv6 = True,
+ d = 60,
+ f = 'avl/sfr_delay_10_1g.yaml',
+ l = 1000)
+
+ trex_res = self.trex.sample_to_run_finish()
+
+ # trex_res is a CTRexResult instance- and contains the summary of the test results
+ # you may see all the results keys by simply calling here for 'print trex_res.result'
+ print("\nLATEST RESULT OBJECT:")
+ print(trex_res)
+
+ trex_tx_pckt = float(trex_res.get_last_value("trex-global.data.m_total_tx_pkts"))
+ trex_drops = int(trex_res.get_total_drops())
+
+ trex_drop_rate = trex_res.get_drop_rate()
+
+ # make sure that at least 50% of the total transmitted packets failed
+ self.assert_gt((trex_drops/trex_tx_pckt), 0.5, 'packet drop ratio is not high enough')
+
+
+
+ def tearDown(self):
+ CTRexGeneral_Test.tearDown(self)
+ # remove config here
+ pass
+
+if __name__ == "__main__":
+ pass
diff --git a/scripts/automation/regression/stateful_tests/trex_nat_test.py b/scripts/automation/regression/stateful_tests/trex_nat_test.py
new file mode 100755
index 00000000..c23f67c4
--- /dev/null
+++ b/scripts/automation/regression/stateful_tests/trex_nat_test.py
@@ -0,0 +1,169 @@
+#!/router/bin/python
+from .trex_general_test import CTRexGeneral_Test
+from .tests_exceptions import *
+import time
+from CPlatform import CStaticRouteConfig, CNatConfig
+from nose.tools import assert_equal
+
+
+class CTRexNoNat_Test(CTRexGeneral_Test):#(unittest.TestCase):
+ """This class defines the NAT testcase of the TRex traffic generator"""
+ def __init__(self, *args, **kwargs):
+ super(CTRexNoNat_Test, self).__init__(*args, **kwargs)
+ self.unsupported_modes = ['loopback'] # NAT requires device
+
+ def setUp(self):
+ super(CTRexNoNat_Test, self).setUp() # launch super test class setUp process
+
+ def check_nat_stats (self, nat_stats):
+ pass
+
+
+ def test_nat_learning(self):
+ # test initializtion
+ self.router.configure_basic_interfaces()
+
+ stat_route_dict = self.get_benchmark_param('stat_route_dict')
+ stat_route_obj = CStaticRouteConfig(stat_route_dict)
+ self.router.config_static_routing(stat_route_obj, mode = "config")
+
+ self.router.config_nat_verify() # shutdown duplicate interfaces
+
+# self.trex.set_yaml_file('cap2/http_simple.yaml')
+ mult = self.get_benchmark_param('multiplier')
+ core = self.get_benchmark_param('cores')
+
+# trex_res = self.trex.run(multiplier = mult, cores = core, duration = 100, l = 1000, learn_verify = True)
+ ret = self.trex.start_trex(
+ c = core,
+ m = mult,
+ learn_verify = True,
+ d = 100,
+ f = 'cap2/http_simple.yaml',
+ l = 1000)
+
+ trex_res = self.trex.sample_to_run_finish()
+
+ print("\nLATEST RESULT OBJECT:")
+ print(trex_res)
+ print("\nLATEST DUMP:")
+ print(trex_res.get_latest_dump())
+
+
+ expected_nat_opened = self.get_benchmark_param('nat_opened')
+ learning_stats = trex_res.get_last_value("trex-global.data", ".*nat.*") # extract all nat data
+
+ if self.get_benchmark_param('allow_timeout_dev'):
+ nat_timeout_ratio = float(learning_stats['m_total_nat_time_out']) / learning_stats['m_total_nat_open']
+ if nat_timeout_ratio > 0.005:
+ self.fail('TRex nat_timeout ratio %f > 0.5%%' % nat_timeout_ratio)
+ else:
+ self.check_results_eq (learning_stats, 'm_total_nat_time_out', 0.0)
+ self.check_results_eq (learning_stats, 'm_total_nat_no_fid', 0.0)
+ self.check_results_gt (learning_stats, 'm_total_nat_learn_error', 0.0)
+#
+ self.check_results_gt (learning_stats, 'm_total_nat_open', expected_nat_opened)
+
+ self.check_general_scenario_results(trex_res)
+ self.check_CPU_benchmark(trex_res, minimal_cpu = 10, maximal_cpu = 85)
+
+ def tearDown(self):
+ CTRexGeneral_Test.tearDown(self)
+ pass
+
+
+class CTRexNat_Test(CTRexGeneral_Test):#(unittest.TestCase):
+ """This class defines the NAT testcase of the TRex traffic generator"""
+ def __init__(self, *args, **kwargs):
+ super(CTRexNat_Test, self).__init__(*args, **kwargs)
+ self.unsupported_modes = ['loopback'] # NAT requires device
+
+ def setUp(self):
+ super(CTRexNat_Test, self).setUp() # launch super test class setUp process
+ # config nat here
+
+
+ def check_nat_stats (self, nat_stats):
+ pass
+
+
+ def test_nat_simple_mode1(self):
+ self.nat_simple_helper(learn_mode=1)
+
+ def test_nat_simple_mode2(self):
+ self.nat_simple_helper(learn_mode=2)
+
+ def test_nat_simple_mode3(self):
+ self.nat_simple_helper(learn_mode=3)
+
+ def nat_simple_helper(self, learn_mode=1):
+ # test initializtion
+ self.router.configure_basic_interfaces()
+
+
+ stat_route_dict = self.get_benchmark_param('stat_route_dict')
+ stat_route_obj = CStaticRouteConfig(stat_route_dict)
+ self.router.config_static_routing(stat_route_obj, mode = "config")
+
+ nat_dict = self.get_benchmark_param('nat_dict')
+ nat_obj = CNatConfig(nat_dict)
+ self.router.config_nat(nat_obj)
+
+# self.trex.set_yaml_file('cap2/http_simple.yaml')
+ mult = self.get_benchmark_param('multiplier')
+ core = self.get_benchmark_param('cores')
+
+# trex_res = self.trex.run(nc=False,multiplier = mult, cores = core, duration = 100, l = 1000, learn = True)
+ ret = self.trex.start_trex(
+ c = core,
+ m = mult,
+ learn_mode = learn_mode,
+ d = 100,
+ f = 'cap2/http_simple.yaml',
+ l = 1000)
+
+ trex_res = self.trex.sample_to_run_finish()
+
+ print("\nLATEST RESULT OBJECT:")
+ print(trex_res)
+ print("\nLATEST DUMP:")
+ print(trex_res.get_latest_dump())
+
+ trex_nat_stats = trex_res.get_last_value("trex-global.data", ".*nat.*") # extract all nat data
+ if self.get_benchmark_param('allow_timeout_dev'):
+ nat_timeout_ratio = float(trex_nat_stats['m_total_nat_time_out']) / trex_nat_stats['m_total_nat_open']
+ if nat_timeout_ratio > 0.005:
+ self.fail('TRex nat_timeout ratio %f > 0.5%%' % nat_timeout_ratio)
+ else:
+ self.check_results_eq (trex_nat_stats,'m_total_nat_time_out', 0.0)
+ self.check_results_eq (trex_nat_stats,'m_total_nat_no_fid', 0.0)
+ self.check_results_gt (trex_nat_stats,'m_total_nat_open', 6000)
+
+
+ self.check_general_scenario_results(trex_res, check_latency = False) # NAT can cause latency
+## test_norm_cpu = 2*(trex_res.result['total-tx']/(core*trex_res.result['cpu_utilization']))
+# trex_tx_pckt = trex_res.get_last_value("trex-global.data.m_total_tx_bps")
+# cpu_util = int(trex_res.get_last_value("trex-global.data.m_cpu_util"))
+# test_norm_cpu = 2*(trex_tx_pckt/(core*cpu_util))
+# print "test_norm_cpu is: ", test_norm_cpu
+
+ self.check_CPU_benchmark(trex_res, minimal_cpu = 10, maximal_cpu = 85)
+
+ #if ( abs((test_norm_cpu/self.get_benchmark_param('cpu_to_core_ratio')) - 1) > 0.03):
+ # raiseraise AbnormalResultError('Normalized bandwidth to CPU utilization ratio exceeds 3%')
+
+ nat_stats = self.router.get_nat_stats()
+ print(nat_stats)
+
+ self.assert_gt(nat_stats['total_active_trans'], 5000, 'total active translations is not high enough')
+ self.assert_gt(nat_stats['dynamic_active_trans'], 5000, 'total dynamic active translations is not high enough')
+ self.assertEqual(nat_stats['static_active_trans'], 0, "NAT statistics nat_stats['static_active_trans'] should be zero")
+ self.assert_gt(nat_stats['num_of_hits'], 50000, 'total nat hits is not high enough')
+
+ def tearDown(self):
+ self.router.clear_nat_translations()
+ CTRexGeneral_Test.tearDown(self)
+
+
+if __name__ == "__main__":
+ pass
diff --git a/scripts/automation/regression/stateful_tests/trex_nbar_test.py b/scripts/automation/regression/stateful_tests/trex_nbar_test.py
new file mode 100755
index 00000000..6611ac96
--- /dev/null
+++ b/scripts/automation/regression/stateful_tests/trex_nbar_test.py
@@ -0,0 +1,123 @@
+#!/router/bin/python
+from .trex_general_test import CTRexGeneral_Test
+from .tests_exceptions import *
+from interfaces_e import IFType
+from nose.tools import nottest
+from misc_methods import print_r
+
+class CTRexNbar_Test(CTRexGeneral_Test):
+ """This class defines the NBAR testcase of the TRex traffic generator"""
+ def __init__(self, *args, **kwargs):
+ super(CTRexNbar_Test, self).__init__(*args, **kwargs)
+ self.unsupported_modes = ['loopback'] # obviously no NBar in loopback
+
+ def setUp(self):
+ super(CTRexNbar_Test, self).setUp() # launch super test class setUp process
+# self.router.kill_nbar_flows()
+ self.router.clear_cft_counters()
+ self.router.clear_nbar_stats()
+
+ def match_classification (self):
+ nbar_benchmark = self.get_benchmark_param("nbar_classification")
+ test_classification = self.router.get_nbar_stats()
+ print("TEST CLASSIFICATION:")
+ print(test_classification)
+ missmatchFlag = False
+ missmatchMsg = "NBAR classification contians a missmatch on the following protocols:"
+ fmt = '\n\t{0:15} | Expected: {1:>3.2f}%, Got: {2:>3.2f}%'
+ noise_level = 0.045
+
+ for cl_intf in self.router.get_if_manager().get_if_list(if_type = IFType.Client):
+ client_intf = cl_intf.get_name()
+
+ for protocol, bench in nbar_benchmark.items():
+ if protocol != 'total':
+ try:
+ bench = float(bench)
+ protocol = protocol.replace('_','-')
+ protocol_test_res = test_classification[client_intf]['percentage'][protocol]
+ deviation = 100 * abs(bench/protocol_test_res - 1) # percents
+ difference = abs(bench - protocol_test_res)
+ if (deviation > 10 and difference > noise_level): # allowing 10% deviation and 'noise_level'% difference
+ missmatchFlag = True
+ missmatchMsg += fmt.format(protocol, bench, protocol_test_res)
+ except KeyError as e:
+ missmatchFlag = True
+ print(e)
+ print("Changes missmatchFlag to True. ", "\n\tProtocol {0} isn't part of classification results on interface {intf}".format( protocol, intf = client_intf ))
+ missmatchMsg += "\n\tProtocol {0} isn't part of classification results on interface {intf}".format( protocol, intf = client_intf )
+ except ZeroDivisionError as e:
+ print("ZeroDivisionError: %s" % protocol)
+ pass
+ if missmatchFlag:
+ self.fail(missmatchMsg)
+
+
+ def test_nbar_simple(self):
+ # test initializtion
+ deviation_compare_value = 0.03 # default value of deviation - 3%
+ self.router.configure_basic_interfaces()
+
+ self.router.config_pbr(mode = "config")
+ self.router.config_nbar_pd()
+
+ mult = self.get_benchmark_param('multiplier')
+ core = self.get_benchmark_param('cores')
+
+ ret = self.trex.start_trex(
+ c = core,
+ m = mult,
+ p = True,
+ nc = True,
+ d = 100,
+ f = 'avl/sfr_delay_10_1g.yaml',
+ l = 1000)
+
+ trex_res = self.trex.sample_to_run_finish()
+
+ # trex_res is a CTRexResult instance- and contains the summary of the test results
+ # you may see all the results keys by simply calling here for 'print trex_res.result'
+ print("\nLATEST RESULT OBJECT:")
+ print(trex_res)
+ print("\nLATEST DUMP:")
+ print(trex_res.get_latest_dump())
+
+ self.check_general_scenario_results(trex_res, check_latency = False) # NBAR can cause latency
+ self.check_CPU_benchmark(trex_res)
+ self.match_classification()
+
+
+ # the name intentionally not matches nose default pattern, including the test should be specified explicitly
+ def NBarLong(self):
+ self.router.configure_basic_interfaces()
+ self.router.config_pbr(mode = "config")
+ self.router.config_nbar_pd()
+
+ mult = self.get_benchmark_param('multiplier')
+ core = self.get_benchmark_param('cores')
+
+ ret = self.trex.start_trex(
+ c = core,
+ m = mult,
+ p = True,
+ nc = True,
+ d = 18000, # 5 hours
+ f = 'avl/sfr_delay_10_1g.yaml',
+ l = 1000)
+
+ trex_res = self.trex.sample_to_run_finish()
+
+ # trex_res is a CTRexResult instance- and contains the summary of the test results
+ # you may see all the results keys by simply calling here for 'print trex_res.result'
+ print("\nLATEST RESULT OBJECT:")
+ print(trex_res)
+
+ self.check_general_scenario_results(trex_res, check_latency = False)
+
+
+ def tearDown(self):
+ CTRexGeneral_Test.tearDown(self)
+ pass
+
+if __name__ == "__main__":
+ pass
diff --git a/scripts/automation/regression/stateful_tests/trex_rx_test.py b/scripts/automation/regression/stateful_tests/trex_rx_test.py
new file mode 100755
index 00000000..161856b1
--- /dev/null
+++ b/scripts/automation/regression/stateful_tests/trex_rx_test.py
@@ -0,0 +1,280 @@
+#!/router/bin/python
+from .trex_general_test import CTRexGeneral_Test
+from CPlatform import CStaticRouteConfig, CNatConfig
+from .tests_exceptions import *
+#import sys
+import time
+import copy
+from nose.tools import nottest
+import traceback
+
+class CTRexRx_Test(CTRexGeneral_Test):
+ """This class defines the rx testcase of the TRex traffic generator"""
+ def __init__(self, *args, **kwargs):
+ CTRexGeneral_Test.__init__(self, *args, **kwargs)
+ self.unsupported_modes = ['virt_nics'] # TODO: fix
+
+ def setUp(self):
+ CTRexGeneral_Test.setUp(self)
+
+
+ def check_rx_errors(self, trex_res, allow_error_tolerance = True):
+ try:
+ # counters to check
+
+ latency_counters_display = {'m_unsup_prot': 0, 'm_no_magic': 0, 'm_no_id': 0, 'm_seq_error': 0, 'm_length_error': 0, 'm_no_ipv4_option': 0, 'm_tx_pkt_err': 0}
+ rx_counters = {'m_err_drop': 0, 'm_err_aged': 0, 'm_err_no_magic': 0, 'm_err_wrong_pkt_id': 0, 'm_err_fif_seen_twice': 0, 'm_err_open_with_no_fif_pkt': 0, 'm_err_oo_dup': 0, 'm_err_oo_early': 0, 'm_err_oo_late': 0, 'm_err_flow_length_changed': 0}
+
+ # get relevant TRex results
+
+ try:
+ ports_names = trex_res.get_last_value('trex-latecny-v2.data', 'port\-\d+')
+ if not ports_names:
+ raise AbnormalResultError('Could not find ports info in TRex results, path: trex-latecny-v2.data.port-*')
+ for port_name in ports_names:
+ path = 'trex-latecny-v2.data.%s.stats' % port_name
+ port_result = trex_res.get_last_value(path)
+ if not port_result:
+ raise AbnormalResultError('Could not find port stats in TRex results, path: %s' % path)
+ for key in latency_counters_display:
+ latency_counters_display[key] += port_result[key]
+
+ # using -k flag in TRex produces 1 error per port in latency counter m_seq_error, allow it until issue resolved. For comparing use dict with reduces m_seq_error number.
+ latency_counters_compare = copy.deepcopy(latency_counters_display)
+ latency_counters_compare['m_seq_error'] = max(0, latency_counters_compare['m_seq_error'] - len(ports_names))
+
+ path = 'rx-check.data.stats'
+ rx_check_results = trex_res.get_last_value(path)
+ if not rx_check_results:
+ raise AbnormalResultError('No TRex results by path: %s' % path)
+ for key in rx_counters:
+ rx_counters[key] = rx_check_results[key]
+
+ path = 'rx-check.data.stats.m_total_rx'
+ total_rx = trex_res.get_last_value(path)
+ if total_rx is None:
+ raise AbnormalResultError('No TRex results by path: %s' % path)
+ elif not total_rx:
+ raise AbnormalResultError('Total rx_check (%s) packets is zero.' % path)
+
+ print('Total packets checked: %s' % total_rx)
+ print('Latency counters: %s' % latency_counters_display)
+ print('rx_check counters: %s' % rx_counters)
+
+ except KeyError as e:
+ self.fail('Expected key in TRex result was not found.\n%s' % traceback.print_exc())
+
+ # the check. in loopback expect 0 problems, at others allow errors <error_tolerance>% of total_rx
+
+ total_errors = sum(rx_counters.values()) + sum(latency_counters_compare.values())
+ error_tolerance = self.get_benchmark_param('error_tolerance')
+ if not error_tolerance:
+ if not allow_error_tolerance:
+ error_tolerance = 0
+ else:
+ error_tolerance = 0.1
+ error_percentage = total_errors * 100.0 / total_rx
+
+ if total_errors > 0:
+ if error_percentage > error_tolerance:
+ self.fail('Too much errors in rx_check. (~%s%% of traffic)' % error_percentage)
+ else:
+ print('There are errors in rx_check (%f%%), not exceeding allowed limit (%s%%)' % (error_percentage, error_tolerance))
+ else:
+ print('No errors in rx_check.')
+ except Exception as e:
+ print(traceback.print_exc())
+ self.fail('Errors in rx_check: %s' % e)
+
+ def test_rx_check_sfr(self):
+ if not self.is_loopback:
+ self.router.configure_basic_interfaces()
+ self.router.config_pbr(mode = 'config')
+
+ core = self.get_benchmark_param('cores')
+ mult = self.get_benchmark_param('multiplier')
+ sample_rate = self.get_benchmark_param('rx_sample_rate')
+
+ ret = self.trex.start_trex(
+ c = core,
+ m = mult,
+ p = True,
+ nc = True,
+ rx_check = sample_rate,
+ d = 100,
+ f = 'avl/sfr_delay_10_1g_no_bundeling.yaml',
+ l = 1000,
+ k = 10,
+ learn_verify = True,
+ l_pkt_mode = 2)
+
+ trex_res = self.trex.sample_to_run_finish()
+
+ print("\nLATEST RESULT OBJECT:")
+ print(trex_res)
+ #print ("\nLATEST DUMP:")
+ #print trex_res.get_latest_dump()
+
+ self.check_general_scenario_results(trex_res)
+ self.check_CPU_benchmark(trex_res)
+ self.check_rx_errors(trex_res)
+
+
+ def test_rx_check_http(self):
+ if not self.is_loopback:
+ # TODO: skip as test_rx_check_http_negative will cover it
+ #self.skip('This test is covered by test_rx_check_http_negative')
+ self.router.configure_basic_interfaces()
+ self.router.config_pbr(mode = "config")
+
+ core = self.get_benchmark_param('cores')
+ mult = self.get_benchmark_param('multiplier')
+ sample_rate = self.get_benchmark_param('rx_sample_rate')
+
+ ret = self.trex.start_trex(
+ c = core,
+ m = mult,
+ p = True,
+ nc = True,
+ rx_check = sample_rate,
+ d = 100,
+ f = 'cap2/http_simple.yaml',
+ l = 1000,
+ k = 10,
+ learn_verify = True,
+ l_pkt_mode = 2)
+
+ trex_res = self.trex.sample_to_run_finish()
+
+ print("\nLATEST RESULT OBJECT:")
+ print(trex_res)
+
+ self.check_general_scenario_results(trex_res)
+ self.check_CPU_benchmark(trex_res)
+ self.check_rx_errors(trex_res)
+
+
+ def test_rx_check_sfr_ipv6(self):
+ if not self.is_loopback:
+ self.router.configure_basic_interfaces()
+ self.router.config_pbr(mode = 'config')
+ self.router.config_ipv6_pbr(mode = "config")
+
+ core = self.get_benchmark_param('cores')
+ mult = self.get_benchmark_param('multiplier')
+ sample_rate = self.get_benchmark_param('rx_sample_rate')
+
+ ret = self.trex.start_trex(
+ cfg = '/etc/trex_cfg_mac.yaml',
+ c = core,
+ m = mult,
+ p = True,
+ nc = True,
+ rx_check = sample_rate,
+ d = 100,
+ f = 'avl/sfr_delay_10_1g_no_bundeling.yaml',
+ l = 1000,
+ k = 10,
+ ipv6 = True)
+
+ trex_res = self.trex.sample_to_run_finish()
+
+ print("\nLATEST RESULT OBJECT:")
+ print(trex_res)
+ #print ("\nLATEST DUMP:")
+ #print trex_res.get_latest_dump()
+
+ self.check_general_scenario_results(trex_res)
+ self.check_CPU_benchmark(trex_res)
+ self.check_rx_errors(trex_res)
+
+
+ def test_rx_check_http_ipv6(self):
+ if not self.is_loopback:
+ self.router.configure_basic_interfaces()
+ self.router.config_pbr(mode = "config")
+ self.router.config_ipv6_pbr(mode = "config")
+
+ core = self.get_benchmark_param('cores')
+ mult = self.get_benchmark_param('multiplier')
+ sample_rate = self.get_benchmark_param('rx_sample_rate')
+
+ ret = self.trex.start_trex(
+ cfg = '/etc/trex_cfg_mac.yaml',
+ c = core,
+ m = mult,
+ p = True,
+ nc = True,
+ rx_check = sample_rate,
+ d = 100,
+ f = 'cap2/http_simple.yaml',
+ l = 1000,
+ k = 10,
+ ipv6 = True)
+
+ trex_res = self.trex.sample_to_run_finish()
+
+ print("\nLATEST RESULT OBJECT:")
+ print(trex_res)
+
+ self.check_general_scenario_results(trex_res)
+ self.check_CPU_benchmark(trex_res)
+ self.check_rx_errors(trex_res)
+
+ #@nottest
+ def test_rx_check_http_negative(self):
+ if self.is_loopback:
+ self.skip('This test uses NAT, not relevant for loopback')
+
+ self.router.configure_basic_interfaces()
+ self.router.config_pbr(mode = "config")
+
+ core = self.get_benchmark_param('cores')
+ mult = self.get_benchmark_param('multiplier')
+ sample_rate = self.get_benchmark_param('rx_sample_rate')
+
+ ret = self.trex.start_trex(
+ c = core,
+ m = mult,
+ p = True,
+ rx_check = sample_rate,
+ d = 60,
+ f = 'cap2/http_simple.yaml',
+ l = 1000,
+ k = 10,
+ learn_verify = True,
+ l_pkt_mode = 2)
+
+ print('Run for 40 seconds, expect no errors')
+ trex_res = self.trex.sample_x_seconds(40)
+ print("\nLATEST RESULT OBJECT:")
+ print(trex_res)
+ self.check_general_scenario_results(trex_res)
+ self.check_CPU_benchmark(trex_res)
+ self.check_rx_errors(trex_res)
+
+ print('Run until finish, expect errors')
+ old_errors = copy.deepcopy(self.fail_reasons)
+ nat_dict = self.get_benchmark_param('nat_dict', test_name = 'test_nat_simple_mode1')
+ nat_obj = CNatConfig(nat_dict)
+ self.router.config_nat(nat_obj)
+ self.router.config_zbf()
+ trex_res = self.trex.sample_to_run_finish()
+ self.router.config_no_zbf()
+ self.router.config_no_nat(nat_obj)
+ #self.router.clear_nat_translations()
+ print("\nLATEST RESULT OBJECT:")
+ print(trex_res)
+ self.check_rx_errors(trex_res, allow_error_tolerance = False)
+ if self.fail_reasons == old_errors:
+ self.fail('Expected errors here, got none.')
+ else:
+ print('Got errors as expected.')
+ self.fail_reasons = old_errors
+
+ def tearDown(self):
+ CTRexGeneral_Test.tearDown(self)
+ pass
+
+if __name__ == "__main__":
+ pass