diff options
Diffstat (limited to 'scripts/automation/regression/unit_tests')
-rwxr-xr-x | scripts/automation/regression/unit_tests/trex_general_test.py | 21 | ||||
-rwxr-xr-x | scripts/automation/regression/unit_tests/trex_imix_test.py | 38 |
2 files changed, 44 insertions, 15 deletions
diff --git a/scripts/automation/regression/unit_tests/trex_general_test.py b/scripts/automation/regression/unit_tests/trex_general_test.py index 6a6ad79c..92ece30f 100755 --- a/scripts/automation/regression/unit_tests/trex_general_test.py +++ b/scripts/automation/regression/unit_tests/trex_general_test.py @@ -165,12 +165,15 @@ class CTRexGeneral_Test(unittest.TestCase): if res[name] != float(val): self.fail('TRex results[%s]==%f and not as expected %f ' % (name, res[name], val)) - def check_CPU_benchmark (self, trex_res, err): + def check_CPU_benchmark (self, trex_res, err = 10, minimal_cpu = 30, maximal_cpu = 85): #cpu_util = float(trex_res.get_last_value("trex-global.data.m_cpu_util")) cpu_util = sum([float(x) for x in trex_res.get_value_list("trex-global.data.m_cpu_util")[-4:-1]]) / 3 # mean of 3 values before last - if cpu_util < 30 and not self.is_virt_nics: - self.fail("CPU is too low (%s%%), can't verify performance in such low CPU%%." % cpu_util ) + if not self.is_virt_nics: + if cpu_util > maximal_cpu: + self.fail("CPU is too high (%s%%), probably queue full." % cpu_util ) + if cpu_util < minimal_cpu: + self.fail("CPU is too low (%s%%), can't verify performance in such low CPU%%." % cpu_util ) cores = self.get_benchmark_param('cores') trex_tx_bps = trex_res.get_last_value("trex-global.data.m_total_tx_bytes") @@ -235,17 +238,15 @@ class CTRexGeneral_Test(unittest.TestCase): #trex_exp_gbps = trex_exp_rate/(10**9) if check_latency: - # check that max latency does not exceed 1 msec in regular setup or 20ms in VM - allowed_latency = 20000 if self.is_VM else 1000 + # check that max latency does not exceed 1 msec in regular setup or 100ms in VM + allowed_latency = 9999999 if self.is_VM else 1000 if max(trex_res.get_max_latency().values()) > allowed_latency: - print 'LatencyError: Maximal latency exceeds %s (usec)' % allowed_latency - #raise AbnormalResultError('Maximal latency above 1ms') + self.fail('LatencyError: Maximal latency exceeds %s (usec)' % allowed_latency) # check that avg latency does not exceed 1 msec in regular setup or 3ms in VM - allowed_latency = 3000 if self.is_VM else 1000 + allowed_latency = 9999999 if self.is_VM else 1000 if max(trex_res.get_avg_latency().values()) > allowed_latency: - print 'LatencyError: Average latency exceeds %s (usec)' % allowed_latency - #raise AbnormalResultError('Maximal latency above 1ms') + self.fail('LatencyError: Average latency exceeds %s (usec)' % allowed_latency) if not self.is_loopback: # check router number of drops --> deliberately masked- need to be figured out!!!!! diff --git a/scripts/automation/regression/unit_tests/trex_imix_test.py b/scripts/automation/regression/unit_tests/trex_imix_test.py index b56f7f4e..f5ebeb30 100755 --- a/scripts/automation/regression/unit_tests/trex_imix_test.py +++ b/scripts/automation/regression/unit_tests/trex_imix_test.py @@ -1,17 +1,16 @@ - #!/router/bin/python from trex_general_test import CTRexGeneral_Test from CPlatform import CStaticRouteConfig from tests_exceptions import * #import sys -import time; +import time class CTRexIMIX_Test(CTRexGeneral_Test): """This class defines the IMIX testcase of the T-Rex traffic generator""" def __init__(self, *args, **kwargs): - # super(CTRexIMIX_Test, self).__init__() + # super(CTRexIMIX_Test, self).__init__() CTRexGeneral_Test.__init__(self, *args, **kwargs) - pass + pass def setUp(self): super(CTRexIMIX_Test, self).setUp() # launch super test class setUp process @@ -167,9 +166,38 @@ class CTRexIMIX_Test(CTRexGeneral_Test): self.check_CPU_benchmark(trex_res, 10) + + def test_jumbo(self): + if not self.is_loopback: + self.skip('Verify drops in router') # TODO: verify and remove ASAP + self.router.configure_basic_interfaces() + self.router.config_pbr(mode = "config") + + mult = self.get_benchmark_param('multiplier') + core = self.get_benchmark_param('cores') + + ret = self.trex.start_trex( + c = core, + m = mult, + p = True, + nc = True, + d = 100, + f = 'cap2/imix_9k.yaml', + l = 1000) + + trex_res = self.trex.sample_to_run_finish() + + # trex_res is a CTRexResults instance- and contains the summary of the test results + # you may see all the results keys by simply calling here for 'print trex_res.result' + print ("\nLATEST RESULT OBJECT:") + print trex_res + + self.check_general_scenario_results(trex_res) + self.check_CPU_benchmark(trex_res, minimal_cpu = 0, maximal_cpu = 10) + def tearDown(self): CTRexGeneral_Test.tearDown(self) - # remove nbar config here + # remove nbar config here pass if __name__ == "__main__": |