summaryrefslogtreecommitdiffstats
path: root/scripts/automation/regression/stateful_tests
diff options
context:
space:
mode:
authorimarom <imarom@cisco.com>2017-01-22 16:20:45 +0200
committerimarom <imarom@cisco.com>2017-01-22 16:20:45 +0200
commit904eacd9be1230efb7ae0ab7997ec131b588ec8a (patch)
tree8e4bcd1b1a5f683efdb8f3eeb962acefc3201961 /scripts/automation/regression/stateful_tests
parentd2f1c8451e2e8ffc47b208f68f9b16697d706d60 (diff)
parentb81cdb6c2d6d118c1c346e7c8dae6a5e747d867d (diff)
Merge branch 'master' into capture
Signed-off-by: imarom <imarom@cisco.com> Conflicts: scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_client.py scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_jsonrpc_client.py scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_port.py src/main_dpdk.cpp
Diffstat (limited to 'scripts/automation/regression/stateful_tests')
-rw-r--r--scripts/automation/regression/stateful_tests/trex_client_cfg_test.py68
-rwxr-xr-xscripts/automation/regression/stateful_tests/trex_general_test.py73
-rwxr-xr-xscripts/automation/regression/stateful_tests/trex_imix_test.py95
-rwxr-xr-xscripts/automation/regression/stateful_tests/trex_nbar_test.py24
4 files changed, 216 insertions, 44 deletions
diff --git a/scripts/automation/regression/stateful_tests/trex_client_cfg_test.py b/scripts/automation/regression/stateful_tests/trex_client_cfg_test.py
index 852e745d..158f59b9 100644
--- a/scripts/automation/regression/stateful_tests/trex_client_cfg_test.py
+++ b/scripts/automation/regression/stateful_tests/trex_client_cfg_test.py
@@ -1,5 +1,6 @@
#!/router/bin/python
from .trex_general_test import CTRexGeneral_Test, CTRexScenario
+from .trex_nbar_test import CTRexNbarBase
from CPlatform import CStaticRouteConfig
from .tests_exceptions import *
#import sys
@@ -8,44 +9,71 @@ from nose.tools import nottest
# Testing client cfg ARP resolve. Actually, just need to check that TRex run finished with no errors.
# If resolve will fail, TRex will exit with exit code != 0
-class CTRexClientCfg_Test(CTRexGeneral_Test):
+class CTRexClientCfg_Test(CTRexNbarBase):
"""This class defines the IMIX testcase of the TRex traffic generator"""
def __init__(self, *args, **kwargs):
- # super(CTRexClientCfg_Test, self).__init__()
- CTRexGeneral_Test.__init__(self, *args, **kwargs)
+ CTRexNbarBase.__init__(self, *args, **kwargs)
def setUp(self):
if CTRexScenario.setup_name == 'kiwi02':
self.skip("Can't run currently on kiwi02")
+
super(CTRexClientCfg_Test, self).setUp() # launch super test class setUp process
- pass
- def test_client_cfg(self):
- # test initializtion
+ def test_client_cfg_nbar(self):
if self.is_loopback:
- return
- else:
- self.router.configure_basic_interfaces()
- self.router.config_pbr(mode = "config")
-
- ret = self.trex.start_trex(
- c = 1,
- m = 1,
- d = 10,
- f = 'cap2/dns.yaml',
- v = 3,
+ self.skip('No NBAR on loopback')
+
+ self.router.configure_basic_interfaces()
+ self.router.config_pbr(mode = "config")
+ self.router.config_nbar_pd()
+
+ mult = self.get_benchmark_param('multiplier')
+ core = self.get_benchmark_param('cores')
+
+ ret = self.trex.start_trex (
+ c = core,
+ m = mult,
+ nc = True,
+ p = True,
+ d = 100,
+ f = 'avl/sfr_delay_10_1g.yaml',
client_cfg = 'automation/regression/cfg/client_cfg.yaml',
l = 1000)
trex_res = self.trex.sample_to_run_finish()
-
print("\nLATEST RESULT OBJECT:")
print(trex_res)
+ self.check_general_scenario_results(trex_res, check_latency = False) # no latency with client config
+ self.match_classification()
+
+ def test_client_cfg_vlan(self):
+ if self.is_loopback:
+ self.skip('Not relevant on loopback')
+
+ self.router.configure_basic_interfaces(vlan = True)
+ self.router.config_pbr(mode = "config", vlan = True)
+
+ mult = self.get_benchmark_param('multiplier')
+ core = self.get_benchmark_param('cores')
+
+ ret = self.trex.start_trex (
+ c = core,
+ m = mult,
+ nc = True,
+ p = True,
+ d = 60,
+ f = 'cap2/dns.yaml',
+ limit_ports = 4,
+ client_cfg = 'automation/regression/cfg/client_cfg_vlan.yaml')
- self.check_general_scenario_results(trex_res)
+ trex_res = self.trex.sample_to_run_finish()
+ print("\nLATEST RESULT OBJECT:")
+ print(trex_res)
+ self.check_general_scenario_results(trex_res, check_latency = False) # no latency with client config
def tearDown(self):
- CTRexGeneral_Test.tearDown(self)
+ CTRexNbarBase.tearDown(self)
pass
if __name__ == "__main__":
diff --git a/scripts/automation/regression/stateful_tests/trex_general_test.py b/scripts/automation/regression/stateful_tests/trex_general_test.py
index f6d2b917..4453fd94 100755
--- a/scripts/automation/regression/stateful_tests/trex_general_test.py
+++ b/scripts/automation/regression/stateful_tests/trex_general_test.py
@@ -28,6 +28,7 @@ from nose.plugins.skip import SkipTest
import trex
from trex import CTRexScenario
import misc_methods
+import pprint
import sys
import os
# from CPlatformUnderTest import *
@@ -60,6 +61,7 @@ class CTRexGeneral_Test(unittest.TestCase):
self.trex_crashed = CTRexScenario.trex_crashed
self.modes = CTRexScenario.modes
self.GAManager = CTRexScenario.GAManager
+ self.elk = CTRexScenario.elk
self.no_daemon = CTRexScenario.no_daemon
self.skipping = False
self.fail_reasons = []
@@ -70,6 +72,21 @@ class CTRexGeneral_Test(unittest.TestCase):
self.is_VM = True if 'VM' in self.modes else False
if not CTRexScenario.is_init:
+ #update elk const object
+ if self.elk:
+ setup = CTRexScenario.elk_info['info']['setup']
+
+ if self.is_loopback :
+ setup['dut'] = 'loopback'
+ else:
+ setup['dut'] = 'router'
+
+ if self.is_VM:
+ setup['baremetal'] = False
+ setup['hypervisor'] = 'ESXi' #TBD
+ else:
+ setup['baremetal'] = True
+
if self.trex and not self.no_daemon: # stateful
CTRexScenario.trex_version = self.trex.get_trex_version()
if not self.is_loopback:
@@ -81,8 +98,12 @@ class CTRexGeneral_Test(unittest.TestCase):
CTRexScenario.router.load_platform_data_from_file(device_cfg)
CTRexScenario.router.launch_connection(device_cfg)
if CTRexScenario.router_cfg['forceImageReload']:
- running_image = CTRexScenario.router.get_running_image_details()['image']
+ image_d = CTRexScenario.router.get_running_image_details();
+ running_image = image_d['image']
print('Current router image: %s' % running_image)
+ if self.elk:
+ setup['dut'] = image_d.get('model','router');
+ print('Current router model : %s' % setup['dut'])
needed_image = device_cfg.get_image_name()
if not CTRexScenario.router.is_image_matches(needed_image):
print('Setting router image: %s' % needed_image)
@@ -107,7 +128,9 @@ class CTRexGeneral_Test(unittest.TestCase):
# raise RuntimeError('CTRexScenario class is not initialized!')
self.router = CTRexScenario.router
-
+ def get_elk_obj (self):
+ obj=trex.copy_elk_info ()
+ return (obj);
# def assert_dict_eq (self, dict, key, val, error=''):
# v1 = int(dict[key]))
@@ -142,9 +165,11 @@ class CTRexGeneral_Test(unittest.TestCase):
def check_CPU_benchmark (self, trex_res, err = 25, minimal_cpu = 10, maximal_cpu = 85):
cpu_util = trex_res.get_avg_steady_state_value('trex-global.data.m_cpu_util_raw')
trex_tx_bps = trex_res.get_avg_steady_state_value('trex-global.data.m_tx_bps')
+ trex_tx_pps = trex_res.get_avg_steady_state_value('trex-global.data.m_tx_pps')
expected_norm_cpu = self.get_benchmark_param('bw_per_core')
cores = self.get_benchmark_param('cores')
ports_count = trex_res.get_ports_count()
+ total_dp_cores = cores * (ports_count/2);
if not (cpu_util and ports_count and cores):
print("Can't calculate CPU benchmark, need to divide by zero: cpu util: %s, ports: %s, cores: %s" % (cpu_util, ports_count, cores))
test_norm_cpu = -1
@@ -172,16 +197,42 @@ class CTRexGeneral_Test(unittest.TestCase):
#if calc_error_precent > err and cpu_util > 10:
# self.fail('Excepted bw_per_core ratio: %s, got: %g' % (expected_norm_cpu, round(test_norm_cpu)))
+ trex_tx_gbps = trex_tx_bps/1e9
+ trex_tx_mpps = trex_tx_pps/1e6
+
+ trex_tx_gbps_pc = trex_tx_gbps*100.0/(cpu_util*total_dp_cores);
+ trex_tx_mpps_pc = trex_tx_mpps*100.0/(cpu_util*total_dp_cores)
+
+ trex_tx_pckt = trex_res.get_last_value("trex-global.data.m_total_tx_pkts")
+ trex_drops = trex_res.get_total_drops()
+ trex_drop_precent = trex_drops *100.0/trex_tx_pckt;
+
# report benchmarks
- if self.GAManager:
- try:
- pass
- #setup_test = '%s.%s' % (CTRexScenario.setup_name, self.get_name())
- #self.GAManager.gaAddAction(Event = 'stateful_test', action = setup_test, label = 'bw_per_core', value = int(test_norm_cpu))
- #self.GAManager.gaAddAction(Event = 'stateful_test', action = setup_test, label = 'bw_per_core_exp', value = int(expected_norm_cpu))
- #self.GAManager.emptyAndReportQ()
- except Exception as e:
- print('Sending GA failed: %s' % e)
+ if self.elk:
+ elk_obj = self.get_elk_obj()
+ print("Reporting to elk")
+ elk_obj['test']={ "name" : self.get_name(),
+ "type" : "stateful",
+ "cores" : total_dp_cores,
+ "cpu%" : cpu_util,
+ "mpps" : (trex_tx_mpps),
+ "streams_count" :1,
+ "mpps_pc" : (trex_tx_mpps_pc),
+ "gbps_pc" : (trex_tx_gbps_pc),
+ "gbps" : (trex_tx_gbps),
+ "kcps" : (trex_res.get_last_value("trex-global.data.m_tx_cps")/1000.0),
+ "avg-pktsize" : round((1000.0*trex_tx_gbps/(8.0*trex_tx_mpps))),
+ "latecny" : { "min" : min(trex_res.get_min_latency().values()),
+ "max" : max(trex_res.get_max_latency().values()),
+ "avr" : max(trex_res.get_avg_latency().values()),
+ "jitter" : max(trex_res.get_jitter_latency().values()),
+ "max-win" : max(trex_res.get_avg_window_latency ().values()),
+ "drop-rate" :trex_drop_precent
+ }
+ };
+ pprint.pprint(elk_obj['test']);
+ self.elk.perf.push_data(elk_obj)
+
def check_results_gt (self, res, name, val):
if res is None:
diff --git a/scripts/automation/regression/stateful_tests/trex_imix_test.py b/scripts/automation/regression/stateful_tests/trex_imix_test.py
index f8fe0ed1..5f52fab7 100755
--- a/scripts/automation/regression/stateful_tests/trex_imix_test.py
+++ b/scripts/automation/regression/stateful_tests/trex_imix_test.py
@@ -18,6 +18,99 @@ class CTRexIMIX_Test(CTRexGeneral_Test):
# self.router.clear_counters()
pass
+ def test_short_flow(self):
+ """ short UDP flow with 64B packets, this test with small number of active flows """
+ # test initializtion
+ if not self.is_loopback:
+ self.router.configure_basic_interfaces()
+ self.router.config_pbr(mode = "config")
+
+ mult = self.get_benchmark_param('multiplier')
+ core = self.get_benchmark_param('cores')
+
+ ret = self.trex.start_trex(
+ c = core,
+ m = mult,
+ p = True,
+ nc = True,
+ d = 30,
+ f = 'cap2/cur_flow.yaml',
+ l = 1000)
+
+ trex_res = self.trex.sample_to_run_finish()
+
+ # trex_res is a CTRexResult instance- and contains the summary of the test results
+ # you may see all the results keys by simply calling here for 'print trex_res.result'
+ print("\nLATEST RESULT OBJECT:")
+ print(trex_res)
+
+ self.check_general_scenario_results(trex_res)
+ self.check_CPU_benchmark(trex_res)
+
+ def test_short_flow_high_active(self):
+ """ short UDP flow with 64B packets, this test with 8M active flows """
+ # test initializtion
+ if not self.is_loopback:
+ self.router.configure_basic_interfaces()
+ self.router.config_pbr(mode = "config")
+
+ mult = self.get_benchmark_param('multiplier')
+ core = self.get_benchmark_param('cores')
+ active_flows =self.get_benchmark_param('active_flows')
+
+
+ ret = self.trex.start_trex(
+ c = core,
+ m = mult,
+ p = True,
+ nc = True,
+ d = 60,
+ active_flows = active_flows,
+ f = 'cap2/cur_flow.yaml',
+ l = 1000)
+
+ trex_res = self.trex.sample_to_run_finish()
+
+ # trex_res is a CTRexResult instance- and contains the summary of the test results
+ # you may see all the results keys by simply calling here for 'print trex_res.result'
+ print("\nLATEST RESULT OBJECT:")
+ print(trex_res)
+
+ self.check_general_scenario_results(trex_res)
+ self.check_CPU_benchmark(trex_res)
+
+ def test_short_flow_high_active2(self):
+ """ short UDP flow with 64B packets, this test with 8M active flows """
+ # test initializtion
+ if not self.is_loopback:
+ self.router.configure_basic_interfaces()
+ self.router.config_pbr(mode = "config")
+
+ mult = self.get_benchmark_param('multiplier')
+ core = self.get_benchmark_param('cores')
+ active_flows =self.get_benchmark_param('active_flows')
+
+
+ ret = self.trex.start_trex(
+ c = core,
+ m = mult,
+ p = True,
+ nc = True,
+ d = 60,
+ active_flows = active_flows,
+ f = 'cap2/cur_flow_single.yaml',
+ l = 1000)
+
+ trex_res = self.trex.sample_to_run_finish()
+
+ # trex_res is a CTRexResult instance- and contains the summary of the test results
+ # you may see all the results keys by simply calling here for 'print trex_res.result'
+ print("\nLATEST RESULT OBJECT:")
+ print(trex_res)
+
+ self.check_general_scenario_results(trex_res)
+ self.check_CPU_benchmark(trex_res)
+
def test_routing_imix_64(self):
# test initializtion
if not self.is_loopback:
@@ -112,7 +205,7 @@ class CTRexIMIX_Test(CTRexGeneral_Test):
ret = self.trex.start_trex(
c = core,
m = mult,
- p = True,
+ e = True,
nc = True,
d = 60,
f = 'cap2/imix_fast_1g.yaml',
diff --git a/scripts/automation/regression/stateful_tests/trex_nbar_test.py b/scripts/automation/regression/stateful_tests/trex_nbar_test.py
index 6611ac96..a98da9ac 100755
--- a/scripts/automation/regression/stateful_tests/trex_nbar_test.py
+++ b/scripts/automation/regression/stateful_tests/trex_nbar_test.py
@@ -5,18 +5,7 @@ from interfaces_e import IFType
from nose.tools import nottest
from misc_methods import print_r
-class CTRexNbar_Test(CTRexGeneral_Test):
- """This class defines the NBAR testcase of the TRex traffic generator"""
- def __init__(self, *args, **kwargs):
- super(CTRexNbar_Test, self).__init__(*args, **kwargs)
- self.unsupported_modes = ['loopback'] # obviously no NBar in loopback
-
- def setUp(self):
- super(CTRexNbar_Test, self).setUp() # launch super test class setUp process
-# self.router.kill_nbar_flows()
- self.router.clear_cft_counters()
- self.router.clear_nbar_stats()
-
+class CTRexNbarBase(CTRexGeneral_Test):
def match_classification (self):
nbar_benchmark = self.get_benchmark_param("nbar_classification")
test_classification = self.router.get_nbar_stats()
@@ -52,6 +41,17 @@ class CTRexNbar_Test(CTRexGeneral_Test):
if missmatchFlag:
self.fail(missmatchMsg)
+class CTRexNbar_Test(CTRexNbarBase):
+ """This class defines the NBAR testcase of the TRex traffic generator"""
+ def __init__(self, *args, **kwargs):
+ super(CTRexNbar_Test, self).__init__(*args, **kwargs)
+ self.unsupported_modes = ['loopback'] # obviously no NBar in loopback
+
+ def setUp(self):
+ super(CTRexNbar_Test, self).setUp() # launch super test class setUp process
+# self.router.kill_nbar_flows()
+ self.router.clear_cft_counters()
+ self.router.clear_nbar_stats()
def test_nbar_simple(self):
# test initializtion