summaryrefslogtreecommitdiffstats
path: root/scripts/automation/regression
diff options
context:
space:
mode:
authorYaroslav Brustinov <ybrustin@cisco.com>2016-06-23 10:37:04 +0300
committerYaroslav Brustinov <ybrustin@cisco.com>2016-06-23 10:37:04 +0300
commitf2320939a5deec2db2948788479199931e1f9176 (patch)
treefc1b12908503d5b7d67cefe34e0c5fb0f908d2a6 /scripts/automation/regression
parent1eed7e59f23d3ab9b957d9822eefe72877e291da (diff)
parentd04442ab671f768a1b645fb887d4a9cd575c7852 (diff)
Merge branch 'master' into cpu_per_core
Conflicts: scripts/automation/trex_control_plane/server/singleton_daemon.py
Diffstat (limited to 'scripts/automation/regression')
-rw-r--r--scripts/automation/regression/functional_tests/stl_basic_tests.py6
-rwxr-xr-xscripts/automation/regression/stateful_tests/trex_general_test.py6
-rw-r--r--scripts/automation/regression/stateless_tests/stl_client_test.py2
-rw-r--r--scripts/automation/regression/stateless_tests/stl_general_test.py3
-rw-r--r--scripts/automation/regression/stateless_tests/stl_rx_test.py152
-rw-r--r--scripts/automation/regression/trex.py2
-rwxr-xr-xscripts/automation/regression/trex_unit_test.py45
7 files changed, 195 insertions, 21 deletions
diff --git a/scripts/automation/regression/functional_tests/stl_basic_tests.py b/scripts/automation/regression/functional_tests/stl_basic_tests.py
index dbbf2530..a4e28ca9 100644
--- a/scripts/automation/regression/functional_tests/stl_basic_tests.py
+++ b/scripts/automation/regression/functional_tests/stl_basic_tests.py
@@ -119,9 +119,9 @@ class CStlBasic_Test(functional_general_test.CGeneralFunctional_Test):
def run_sim (self, yaml, output, options = "", silent = False, obj = None):
if output:
- user_cmd = "-f {0} -o {1} {2}".format(yaml, output, options)
+ user_cmd = "-f {0} -o {1} {2} -p {3}".format(yaml, output, options, self.scripts_path)
else:
- user_cmd = "-f {0} {1}".format(yaml, options)
+ user_cmd = "-f {0} {1} -p {2}".format(yaml, options, self.scripts_path)
if silent:
user_cmd += " --silent"
@@ -201,6 +201,8 @@ class CStlBasic_Test(functional_general_test.CGeneralFunctional_Test):
["multi_burst_2st_1000pkt.py","-m 1 -l 100",True],
["pcap.py", "-m 1", True],
["pcap_with_vm.py", "-m 1", True],
+ ["flow_stats.py", "-m 1 -l 1", True],
+ ["flow_stats_latency.py", "-m 1 -l 1", True],
# YAML test
["yaml/burst_1000_pkt.yaml","-m 1 -l 100",True],
diff --git a/scripts/automation/regression/stateful_tests/trex_general_test.py b/scripts/automation/regression/stateful_tests/trex_general_test.py
index 86fe93e7..82b1d9d1 100755
--- a/scripts/automation/regression/stateful_tests/trex_general_test.py
+++ b/scripts/automation/regression/stateful_tests/trex_general_test.py
@@ -80,9 +80,9 @@ class CTRexGeneral_Test(unittest.TestCase):
device_cfg.set_tftp_config(CTRexScenario.router_cfg['tftp_config_dict'])
CTRexScenario.router.load_platform_data_from_file(device_cfg)
CTRexScenario.router.launch_connection(device_cfg)
- running_image = CTRexScenario.router.get_running_image_details()['image']
- print('Current router image: %s' % running_image)
if CTRexScenario.router_cfg['forceImageReload']:
+ running_image = CTRexScenario.router.get_running_image_details()['image']
+ print('Current router image: %s' % running_image)
needed_image = device_cfg.get_image_name()
if not CTRexScenario.router.is_image_matches(needed_image):
print('Setting router image: %s' % needed_image)
@@ -96,7 +96,7 @@ class CTRexGeneral_Test(unittest.TestCase):
self.fail('Unable to set router image: %s, current image is: %s' % (needed_image, running_image))
else:
print('Matches needed image: %s' % needed_image)
- CTRexScenario.router_image = running_image
+ CTRexScenario.router_image = running_image
if self.modes:
print(termstyle.green('\t!!!\tRunning with modes: %s, not suitable tests will be skipped.\t!!!' % list(self.modes)))
diff --git a/scripts/automation/regression/stateless_tests/stl_client_test.py b/scripts/automation/regression/stateless_tests/stl_client_test.py
index c5e0c3c6..ed125cde 100644
--- a/scripts/automation/regression/stateless_tests/stl_client_test.py
+++ b/scripts/automation/regression/stateless_tests/stl_client_test.py
@@ -46,6 +46,8 @@ class STLClient_Test(CStlGeneral_Test):
@classmethod
def tearDownClass(cls):
+ if CTRexScenario.stl_init_error:
+ return
# connect back at end of tests
if not cls.is_connected():
CTRexScenario.stl_trex.connect()
diff --git a/scripts/automation/regression/stateless_tests/stl_general_test.py b/scripts/automation/regression/stateless_tests/stl_general_test.py
index 4ff1eec9..82738f96 100644
--- a/scripts/automation/regression/stateless_tests/stl_general_test.py
+++ b/scripts/automation/regression/stateless_tests/stl_general_test.py
@@ -85,7 +85,8 @@ class STLBasic_Test(CStlGeneral_Test):
@nottest
def test_connectivity(self):
if not self.is_loopback:
- CTRexScenario.router.load_clean_config()
+ if CTRexScenario.router_cfg['forceImageReload']:
+ CTRexScenario.router.load_clean_config()
CTRexScenario.router.configure_basic_interfaces()
CTRexScenario.router.config_pbr(mode = "config")
diff --git a/scripts/automation/regression/stateless_tests/stl_rx_test.py b/scripts/automation/regression/stateless_tests/stl_rx_test.py
index 9725e821..84f32081 100644
--- a/scripts/automation/regression/stateless_tests/stl_rx_test.py
+++ b/scripts/automation/regression/stateless_tests/stl_rx_test.py
@@ -3,6 +3,8 @@ from .stl_general_test import CStlGeneral_Test, CTRexScenario
from trex_stl_lib.api import *
import os, sys
+ERROR_LATENCY_TOO_HIGH = 1
+
class STLRX_Test(CStlGeneral_Test):
"""Tests for RX feature"""
@@ -11,8 +13,8 @@ class STLRX_Test(CStlGeneral_Test):
# self.skip('This test makes trex08 and trex09 sick. Fix those ASAP.')
if self.is_virt_nics:
self.skip('Skip this for virtual NICs for now')
- per_driver_params = {"rte_vmxnet3_pmd": [1, 50, 1], "rte_ixgbe_pmd": [30, 5000, 1], "rte_i40e_pmd": [80, 5000, 1],
- "rte_igb_pmd": [80, 500, 1], "rte_em_pmd": [1, 50, 1], "rte_virtio_pmd": [1, 50, 1]}
+ per_driver_params = {"rte_vmxnet3_pmd": [1, 50, 1,False], "rte_ixgbe_pmd": [30, 5000, 1,True,200,400], "rte_i40e_pmd": [80, 5000, 1,True,100,250],
+ "rte_igb_pmd": [80, 500, 1,False], "rte_em_pmd": [1, 50, 1,False], "rte_virtio_pmd": [1, 50, 1,False]}
CStlGeneral_Test.setUp(self)
assert 'bi' in CTRexScenario.stl_ports_map
@@ -22,6 +24,9 @@ class STLRX_Test(CStlGeneral_Test):
self.tx_port, self.rx_port = CTRexScenario.stl_ports_map['bi'][0]
port_info = self.c.get_port_info(ports = self.rx_port)[0]
+ self.speed = port_info['speed']
+
+
cap = port_info['rx']['caps']
if "flow_stats" not in cap or "latency" not in cap:
self.skip('port {0} does not support RX'.format(self.rx_port))
@@ -38,14 +43,55 @@ class STLRX_Test(CStlGeneral_Test):
self.pkt = STLPktBuilder(pkt = Ether()/IP(src="16.0.0.1",dst="48.0.0.1")/UDP(dport=12,sport=1025)/('Your_paylaod_comes_here'))
self.large_pkt = STLPktBuilder(pkt = Ether()/IP(src="16.0.0.1",dst="48.0.0.1")/UDP(dport=12,sport=1025)/('a'*1000))
+ self.pkt_9k = STLPktBuilder(pkt = Ether()/IP(src="16.0.0.1",dst="48.0.0.1")/UDP(dport=12,sport=1025)/('a'*9000))
+
+
+ drv_name=port_info['driver']
+ self.latency_9k_enable=per_driver_params[drv_name][3]
+ if self.latency_9k_enable:
+ self.latency_9k_max_average = per_driver_params[drv_name][4]
+ self.latency_9k_max_latency = per_driver_params[drv_name][5]
+
@classmethod
def tearDownClass(cls):
+ if CTRexScenario.stl_init_error:
+ return
# connect back at end of tests
if not cls.is_connected():
CTRexScenario.stl_trex.connect()
+ def __verify_latency (self, latency_stats,max_latency,max_average):
+
+ error=0;
+ err_latency = latency_stats['err_cntrs']
+ latency = latency_stats['latency']
+
+ for key in err_latency :
+ error +=err_latency[key]
+ if error !=0 :
+ pprint.pprint(err_latency)
+ tmp = 'RX pkts ERROR - one of the error is on'
+ print(tmp)
+ #assert False, tmp
+
+ if latency['average']> max_average:
+ pprint.pprint(latency_stats)
+ tmp = 'Average latency is too high {0} {1} '.format(latency['average'], max_average)
+ print(tmp)
+ return ERROR_LATENCY_TOO_HIGH
+
+ if latency['total_max']> max_latency:
+ pprint.pprint(latency_stats)
+ tmp = 'Max latency is too high {0} {1} '.format(latency['total_max'], max_latency)
+ print(tmp)
+ return ERROR_LATENCY_TOO_HIGH
+
+ return 0
+
+
+
def __verify_flow (self, pg_id, total_pkts, pkt_len, stats):
flow_stats = stats['flow_stats'].get(pg_id)
latency_stats = stats['latency'].get(pg_id)
@@ -78,7 +124,7 @@ class STLRX_Test(CStlGeneral_Test):
tmp = 'TX pkts mismatch - got: {0}, expected: {1}'.format(tx_pkts, total_pkts)
assert False, tmp
- if tx_bytes != (total_pkts * pkt_len):
+ if tx_bytes != (total_pkts * (pkt_len + 4)): # + 4 for ethernet CRC
pprint.pprint(flow_stats)
tmp = 'TX bytes mismatch - got: {0}, expected: {1}'.format(tx_bytes, (total_pkts * pkt_len))
assert False, tmp
@@ -90,7 +136,7 @@ class STLRX_Test(CStlGeneral_Test):
if "rx_bytes" in self.cap:
rx_bytes = flow_stats['rx_bytes'].get(self.rx_port, 0)
- if rx_bytes != (total_pkts * pkt_len) and not self.drops_expected:
+ if rx_bytes != (total_pkts * (pkt_len + 4)) and not self.drops_expected: # +4 for ethernet CRC
pprint.pprint(flow_stats)
tmp = 'RX bytes mismatch - got: {0}, expected: {1}'.format(rx_bytes, (total_pkts * pkt_len))
assert False, tmp
@@ -214,6 +260,104 @@ class STLRX_Test(CStlGeneral_Test):
assert False , '{0}'.format(e)
+
+ def __test_9k_stream(self,pgid,ports,precet,max_latency,avg_latency,duration,pkt_size):
+ my_pg_id=pgid
+ s_ports=ports;
+ all_ports=list(CTRexScenario.stl_ports_map['map'].keys());
+ if ports == None:
+ s_ports=all_ports
+ assert( type(s_ports)==list)
+
+ stream_pkt = STLPktBuilder(pkt = Ether()/IP(src="16.0.0.1",dst="48.0.0.1")/UDP(dport=12,sport=1025)/('a'*pkt_size))
+
+ try:
+ # reset all ports
+ self.c.reset(ports = all_ports)
+
+
+ for pid in s_ports:
+ s1 = STLStream(name = 'rx',
+ packet = self.pkt,
+ flow_stats = STLFlowLatencyStats(pg_id = my_pg_id+pid),
+ mode = STLTXCont(pps = 1000))
+
+ s2 = STLStream(name = 'bulk',
+ packet = stream_pkt,
+ mode = STLTXCont(percentage =precet))
+
+
+ # add both streams to ports
+ self.c.add_streams([s1,s2], ports = [pid])
+
+ self.c.clear_stats()
+
+ self.c.start(ports = s_ports,duration = duration)
+ self.c.wait_on_traffic(ports = s_ports,timeout = duration+10,rx_delay_ms = 100)
+ stats = self.c.get_stats()
+
+ for pid in s_ports:
+ latency_stats = stats['latency'].get(my_pg_id+pid)
+ #pprint.pprint(latency_stats)
+ if self.__verify_latency (latency_stats,max_latency,avg_latency) !=0:
+ return (ERROR_LATENCY_TOO_HIGH);
+
+ return 0
+
+ except STLError as e:
+ assert False , '{0}'.format(e)
+
+
+
+
+
+ # check low latency when you have stream of 9K stream
+ def test_9k_stream(self):
+
+ if self.latency_9k_enable == False:
+ print("SKIP")
+ return
+
+ for i in range(0,5):
+ print("Iteration {0}".format(i));
+ duration=random.randint(10, 70);
+ pgid=random.randint(1, 65000);
+ pkt_size=random.randint(1000, 9000);
+ all_ports = list(CTRexScenario.stl_ports_map['map'].keys());
+
+
+ s_port=random.sample(all_ports, random.randint(1, len(all_ports)) )
+ s_port=sorted(s_port)
+ if self.speed == 40 :
+ # the NIC does not support all full rate in case both port works let's filter odd ports
+ tmp_l=[]
+ for port in s_port:
+ if ((int(port) % 2) ==0):
+ tmp_l.append(port);
+ s_port=tmp_l;
+ if len(s_port)==0:
+ s_port=[0];
+
+ error=1;
+ for j in range(0,5):
+ print(" {4} - duration {0} pgid {1} pkt_size {2} s_port {3} ".format(duration,pgid,pkt_size,s_port,j));
+ if self.__test_9k_stream(pgid,
+ s_port,90,
+ self.latency_9k_max_latency,
+ self.latency_9k_max_average,
+ duration,
+ pkt_size)==0:
+ error=0;
+ break;
+
+ if error:
+ assert False , "Latency too high"
+ else:
+ print("===>Iteration {0} PASS {1}".format(i,j));
+
+
+
+
# this test adds more and more latency streams and re-test with incremental
def test_incremental_latency_streams (self):
diff --git a/scripts/automation/regression/trex.py b/scripts/automation/regression/trex.py
index a0a1d42d..44f2faba 100644
--- a/scripts/automation/regression/trex.py
+++ b/scripts/automation/regression/trex.py
@@ -38,6 +38,8 @@ class CTRexScenario:
is_copied = False
GAManager = None
no_daemon = False
+ router_image = None
+ debug_image = False
class CTRexRunner:
"""This is an instance for generating a CTRexRunner"""
diff --git a/scripts/automation/regression/trex_unit_test.py b/scripts/automation/regression/trex_unit_test.py
index 0762fc95..915cd682 100755
--- a/scripts/automation/regression/trex_unit_test.py
+++ b/scripts/automation/regression/trex_unit_test.py
@@ -48,6 +48,17 @@ import re
import time
from distutils.dir_util import mkpath
+# override nose's strange representation of setUpClass errors
+def __suite_repr__(self):
+ if hasattr(self.context, '__module__'): # inside class, setUpClass etc.
+ class_repr = nose.suite._strclass(self.context)
+ else: # outside of class, setUpModule etc.
+ class_repr = nose.suite._strclass(self.__class__)
+ return '%s.%s' % (class_repr, getattr(self.context, '__name__', self.context))
+
+nose.suite.ContextSuite.__repr__ = __suite_repr__
+nose.suite.ContextSuite.__str__ = __suite_repr__
+
def check_trex_path(trex_path):
if os.path.isfile('%s/trex_daemon_server' % trex_path):
return os.path.abspath(trex_path)
@@ -132,6 +143,12 @@ class CTRexTestConfiguringPlugin(Plugin):
parser.add_option('--no-daemon', action="store_true", default = False,
dest="no_daemon",
help="Flag that specifies to use running stl server, no need daemons.")
+ parser.add_option('--debug-image', action="store_true", default = False,
+ dest="debug_image",
+ help="Flag that specifies to use t-rex-64-debug as TRex executable.")
+ parser.add_option('--trex-args', action='store', default = '',
+ dest="trex_args",
+ help="Additional TRex arguments (--no-watchdog etc.).")
def configure(self, options, conf):
@@ -168,9 +185,12 @@ class CTRexTestConfiguringPlugin(Plugin):
CTRexScenario.benchmark = self.benchmark
CTRexScenario.modes = set(self.modes)
CTRexScenario.server_logs = self.server_logs
+ CTRexScenario.debug_image = options.debug_image
if not self.no_daemon:
- CTRexScenario.trex = CTRexClient(trex_host = self.configuration.trex['trex_name'],
- verbose = self.json_verbose)
+ CTRexScenario.trex = CTRexClient(trex_host = self.configuration.trex['trex_name'],
+ verbose = self.json_verbose,
+ debug_image = options.debug_image,
+ trex_args = options.trex_args)
if not CTRexScenario.trex.check_master_connectivity():
print('Could not connect to master daemon')
sys.exit(-1)
@@ -202,12 +222,12 @@ class CTRexTestConfiguringPlugin(Plugin):
if not res:
print('Could not restart TRex daemon server')
sys.exit(-1)
+ print('Restarted.')
- trex_cmds = CTRexScenario.trex.get_trex_cmds()
- if trex_cmds:
- if self.kill_running:
- CTRexScenario.trex.kill_all_trexes()
- else:
+ if self.kill_running:
+ CTRexScenario.trex.kill_all_trexes()
+ else:
+ if CTRexScenario.trex.get_trex_cmds():
print('TRex is already running')
sys.exit(-1)
@@ -238,11 +258,11 @@ class CTRexTestConfiguringPlugin(Plugin):
if self.stateful:
CTRexScenario.trex = None
if self.stateless:
- if not self.no_daemon:
+ if self.no_daemon:
+ if CTRexScenario.stl_trex and CTRexScenario.stl_trex.is_connected():
+ CTRexScenario.stl_trex.disconnect()
+ else:
CTRexScenario.trex.force_kill(False)
- if CTRexScenario.stl_trex and CTRexScenario.stl_trex.is_connected():
- CTRexScenario.stl_trex.disconnect()
- #time.sleep(3)
CTRexScenario.stl_trex = None
@@ -256,6 +276,9 @@ def save_setup_info():
setup_info += 'Server: %s, Modes: %s' % (cfg.trex.get('trex_name'), cfg.trex.get('modes'))
if cfg.router:
setup_info += '\nRouter: Model: %s, Image: %s' % (cfg.router.get('model'), CTRexScenario.router_image)
+ if CTRexScenario.debug_image:
+ setup_info += '\nDebug image: %s' % CTRexScenario.debug_image
+
with open('%s/report_%s.info' % (CTRexScenario.report_dir, CTRexScenario.setup_name), 'w') as f:
f.write(setup_info)
except Exception as err: