summaryrefslogtreecommitdiffstats
path: root/scripts/automation
diff options
context:
space:
mode:
authorHanoh Haim <hhaim@cisco.com>2016-03-10 19:32:29 +0200
committerHanoh Haim <hhaim@cisco.com>2016-03-10 19:32:29 +0200
commit71433c48afeddb37e3c5a8e134e701d71b09f869 (patch)
tree860cab39c447a426287d0c49a4c0da736297ba3b /scripts/automation
parent2be2f7e96be26fbe6dd6763f2ec97fb248abb330 (diff)
parentf24d22eb359753255527430cb8a8b759a424a0df (diff)
merge doc
Diffstat (limited to 'scripts/automation')
-rwxr-xr-xscripts/automation/regression/aggregate_results.py115
-rw-r--r--scripts/automation/regression/functional_tests/config.yaml (renamed from scripts/automation/regression/unit_tests/functional_tests/config.yaml)0
-rwxr-xr-xscripts/automation/regression/functional_tests/functional_general_test.py (renamed from scripts/automation/regression/unit_tests/functional_tests/functional_general_test.py)0
-rw-r--r--scripts/automation/regression/functional_tests/golden/basic_imix_golden.cap (renamed from scripts/automation/regression/stl/golden/basic_imix_golden.cap)bin198474 -> 198474 bytes
-rw-r--r--scripts/automation/regression/functional_tests/golden/basic_imix_vm_golden.cap (renamed from scripts/automation/regression/stl/golden/basic_imix_vm_golden.cap)bin316552 -> 316552 bytes
-rw-r--r--scripts/automation/regression/functional_tests/golden/basic_tuple_gen_golden.cap (renamed from scripts/automation/regression/stl/golden/basic_tuple_gen_golden.cap)bin38024 -> 38024 bytes
-rw-r--r--scripts/automation/regression/functional_tests/golden/udp_590.cap (renamed from scripts/automation/regression/stl/golden/udp_590.cap)bin630 -> 630 bytes
-rwxr-xr-xscripts/automation/regression/functional_tests/hltapi_stream_builder_test.py (renamed from scripts/automation/regression/unit_tests/functional_tests/hltapi_stream_builder_test.py)0
-rwxr-xr-xscripts/automation/regression/functional_tests/misc_methods_test.py (renamed from scripts/automation/regression/unit_tests/functional_tests/misc_methods_test.py)0
-rwxr-xr-xscripts/automation/regression/functional_tests/pkt_bld_general_test.py (renamed from scripts/automation/regression/unit_tests/functional_tests/pkt_bld_general_test.py)0
-rwxr-xr-xscripts/automation/regression/functional_tests/platform_cmd_cache_test.py (renamed from scripts/automation/regression/unit_tests/functional_tests/platform_cmd_cache_test.py)0
-rwxr-xr-xscripts/automation/regression/functional_tests/platform_cmd_link_test.py (renamed from scripts/automation/regression/unit_tests/functional_tests/platform_cmd_link_test.py)0
-rwxr-xr-xscripts/automation/regression/functional_tests/platform_device_cfg_test.py (renamed from scripts/automation/regression/unit_tests/functional_tests/platform_device_cfg_test.py)2
-rwxr-xr-xscripts/automation/regression/functional_tests/platform_dual_if_obj_test.py (renamed from scripts/automation/regression/unit_tests/functional_tests/platform_dual_if_obj_test.py)0
-rwxr-xr-xscripts/automation/regression/functional_tests/platform_if_manager_test.py (renamed from scripts/automation/regression/unit_tests/functional_tests/platform_if_manager_test.py)2
-rwxr-xr-xscripts/automation/regression/functional_tests/platform_if_obj_test.py (renamed from scripts/automation/regression/unit_tests/functional_tests/platform_if_obj_test.py)0
-rw-r--r--scripts/automation/regression/functional_tests/scapy_pkt_builder_test.py (renamed from scripts/automation/regression/unit_tests/functional_tests/scapy_pkt_builder_test.py)18
-rw-r--r--scripts/automation/regression/functional_tests/stl_basic_tests.py (renamed from scripts/automation/regression/unit_tests/functional_tests/stl_basic_tests.py)33
-rwxr-xr-xscripts/automation/regression/functional_unit_tests.py78
-rwxr-xr-xscripts/automation/regression/misc_methods.py45
-rwxr-xr-xscripts/automation/regression/outer_packages.py3
-rw-r--r--scripts/automation/regression/stateful_tests/__init__.py0
-rwxr-xr-xscripts/automation/regression/stateful_tests/tests_exceptions.py (renamed from scripts/automation/regression/unit_tests/tests_exceptions.py)0
-rwxr-xr-xscripts/automation/regression/stateful_tests/trex_general_test.py (renamed from scripts/automation/regression/unit_tests/trex_general_test.py)49
-rwxr-xr-xscripts/automation/regression/stateful_tests/trex_imix_test.py (renamed from scripts/automation/regression/unit_tests/trex_imix_test.py)0
-rwxr-xr-xscripts/automation/regression/stateful_tests/trex_ipv6_test.py (renamed from scripts/automation/regression/unit_tests/trex_ipv6_test.py)0
-rwxr-xr-xscripts/automation/regression/stateful_tests/trex_nat_test.py (renamed from scripts/automation/regression/unit_tests/trex_nat_test.py)0
-rwxr-xr-xscripts/automation/regression/stateful_tests/trex_nbar_test.py (renamed from scripts/automation/regression/unit_tests/trex_nbar_test.py)0
-rwxr-xr-xscripts/automation/regression/stateful_tests/trex_rx_test.py (renamed from scripts/automation/regression/unit_tests/trex_rx_test.py)0
-rwxr-xr-xscripts/automation/regression/stateless_tests/stl_examples_test.py33
-rw-r--r--scripts/automation/regression/stateless_tests/stl_general_test.py68
-rw-r--r--scripts/automation/regression/trex.py83
-rwxr-xr-xscripts/automation/regression/trex_unit_test.py343
-rwxr-xr-xscripts/automation/regression/unit_tests/__init__.py1
-rw-r--r--scripts/automation/trex_control_plane/stl/console/trex_tui.py38
-rw-r--r--scripts/automation/trex_control_plane/stl/examples/stl_flow_stats.py85
-rw-r--r--scripts/automation/trex_control_plane/stl/examples/stl_imix.py22
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_async_client.py18
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_client.py29
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_packet_builder_scapy.py96
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_stats.py550
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_std.py24
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_streams.py4
-rwxr-xr-xscripts/automation/trex_control_plane/stl/trex_stl_lib/utils/parsing_opts.py10
44 files changed, 1157 insertions, 592 deletions
diff --git a/scripts/automation/regression/aggregate_results.py b/scripts/automation/regression/aggregate_results.py
index 01f9ff56..31929d50 100755
--- a/scripts/automation/regression/aggregate_results.py
+++ b/scripts/automation/regression/aggregate_results.py
@@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
import xml.etree.ElementTree as ET
+import outer_packages
import argparse
import glob
from pprint import pprint
@@ -9,6 +10,13 @@ import copy
import datetime, time
import cPickle as pickle
import subprocess, shlex
+from ansi2html import Ansi2HTMLConverter
+
+converter = Ansi2HTMLConverter(inline = True)
+convert = converter.convert
+
+def ansi2html(text):
+ return convert(text, full = False)
FUNCTIONAL_CATEGORY = 'Functional' # how to display those categories
ERROR_CATEGORY = 'Error'
@@ -27,9 +35,9 @@ def is_functional_test_name(testname):
#if testname.startswith(('platform_', 'misc_methods_', 'vm_', 'payload_gen_', 'pkt_builder_')):
# return True
#return False
- if testname.startswith('unit_tests.'):
- return False
- return True
+ if testname.startswith('functional_tests.'):
+ return True
+ return False
def is_good_status(text):
return text in ('Successful', 'Fixed', 'Passed', 'True', 'Pass')
@@ -56,15 +64,16 @@ def add_th_th(key, value):
# returns <div> with table of tests under given category.
# category - string with name of category
-# hidden - bool, true = <div> is hidden by CSS
# tests - list of tests, derived from aggregated xml report, changed a little to get easily stdout etc.
+# tests_type - stateful or stateless
# category_info_dir - folder to search for category info file
# expanded - bool, false = outputs (stdout etc.) of tests are hidden by CSS
# brief - bool, true = cut some part of tests outputs (useful for errors section with expanded flag)
-def add_category_of_tests(category, tests, hidden = False, category_info_dir = None, expanded = False, brief = False):
+def add_category_of_tests(category, tests, tests_type = None, category_info_dir = None, expanded = False, brief = False):
is_actual_category = category not in (FUNCTIONAL_CATEGORY, ERROR_CATEGORY)
- html_output = '<div style="display:%s;" id="cat_tglr_%s">\n' % ('none' if hidden else 'block', category)
-
+ category_id = '_'.join([category, tests_type]) if tests_type else category
+ category_name = ' '.join([category, tests_type.capitalize()]) if tests_type else category
+ html_output = ''
if is_actual_category:
html_output += '<br><table class="reference">\n'
@@ -80,6 +89,8 @@ def add_category_of_tests(category, tests, hidden = False, category_info_dir = N
else:
html_output += add_th_td('Info:', 'No info')
print 'add_category_of_tests: no category info %s' % category_info_file
+ if tests_type:
+ html_output += add_th_td('Tests type:', tests_type.capitalize())
if len(tests):
total_duration = 0.0
for test in tests:
@@ -88,13 +99,13 @@ def add_category_of_tests(category, tests, hidden = False, category_info_dir = N
html_output += '</table>\n'
if not len(tests):
- return html_output + pad_tag('<br><font color=red>No tests!</font>', 'b') + '</div>'
+ return html_output + pad_tag('<br><font color=red>No tests!</font>', 'b')
html_output += '<br>\n<table class="reference" width="100%">\n<tr><th align="left">'
if category == ERROR_CATEGORY:
html_output += 'Setup</th><th align="left">Failed tests:'
else:
- html_output += '%s tests:' % category
+ html_output += '%s tests:' % category_name
html_output += '</th><th align="center">Final Result</th>\n<th align="center">Time (s)</th>\n</tr>\n'
for test in tests:
functional_test = is_functional_test_name(test.attrib['name'])
@@ -103,7 +114,7 @@ def add_category_of_tests(category, tests, hidden = False, category_info_dir = N
if category == ERROR_CATEGORY:
test_id = ('err_' + test.attrib['classname'] + test.attrib['name']).replace('.', '_')
else:
- test_id = (category + test.attrib['name']).replace('.', '_')
+ test_id = (category_id + test.attrib['name']).replace('.', '_')
if expanded:
html_output += '<tr>\n<th>'
else:
@@ -128,15 +139,21 @@ def add_category_of_tests(category, tests, hidden = False, category_info_dir = N
result, result_text = test.attrib.get('result', ('', ''))
if result_text:
+ start_index_errors_stl = result_text.find('STLError: \n******')
+ if start_index_errors_stl > 0:
+ result_text = result_text[start_index_errors_stl:].strip() # cut traceback
start_index_errors = result_text.find('Exception: The test is failed, reasons:')
if start_index_errors > 0:
result_text = result_text[start_index_errors + 10:].strip() # cut traceback
+ result_text = ansi2html(result_text)
result_text = '<b style="color:000080;">%s:</b><br>%s<br><br>' % (result.capitalize(), result_text.replace('\n', '<br>'))
stderr = '' if brief and result_text else test.get('stderr', '')
if stderr:
+ stderr = ansi2html(stderr)
stderr = '<b style="color:000080;"><text color=000080>Stderr</text>:</b><br>%s<br><br>\n' % stderr.replace('\n', '<br>')
stdout = '' if brief and result_text else test.get('stdout', '')
if stdout:
+ stdout = ansi2html(stdout)
if brief: # cut off server logs
stdout = stdout.split('>>>>>>>>>>>>>>>', 1)[0]
stdout = '<b style="color:000080;">Stdout:</b><br>%s<br><br>\n' % stdout.replace('\n', '<br>')
@@ -147,7 +164,7 @@ def add_category_of_tests(category, tests, hidden = False, category_info_dir = N
else:
html_output += '<b style="color:000080;">No output</b></td></tr>'
- html_output += '\n</table>\n</div>'
+ html_output += '\n</table>'
return html_output
style_css = """
@@ -292,35 +309,40 @@ if __name__ == '__main__':
##### aggregate results to 1 single tree
aggregated_root = ET.Element('testsuite')
+ test_types = ('functional', 'stateful', 'stateless')
setups = {}
for job in jobs_list:
- xml_file = '%s/report_%s.xml' % (args.input_dir, job)
- if not os.path.exists(xml_file):
- message = '%s referenced in jobs_list.info does not exist!' % xml_file
+ setups[job] = {}
+ for test_type in test_types:
+ xml_file = '%s/report_%s_%s.xml' % (args.input_dir, job, test_type)
+ if not os.path.exists(xml_file):
+ continue
+ if os.path.basename(xml_file) == os.path.basename(args.output_xmlfile):
+ continue
+ setups[job][test_type] = []
+ print('Processing report: %s.%s' % (job, test_type))
+ tree = ET.parse(xml_file)
+ root = tree.getroot()
+ for key, value in root.attrib.items():
+ if key in aggregated_root.attrib and value.isdigit(): # sum total number of failed tests etc.
+ aggregated_root.attrib[key] = str(int(value) + int(aggregated_root.attrib[key]))
+ else:
+ aggregated_root.attrib[key] = value
+ tests = root.getchildren()
+ if not len(tests): # there should be tests:
+ message = 'No tests in xml %s' % xml_file
+ print message
+ #err.append(message)
+ for test in tests:
+ setups[job][test_type].append(test)
+ test.attrib['name'] = test.attrib['classname'] + '.' + test.attrib['name']
+ test.attrib['classname'] = job
+ aggregated_root.append(test)
+ if not sum([len(x) for x in setups[job].values()]):
+ message = 'No reports from setup %s!' % job
print message
err.append(message)
continue
- if os.path.basename(xml_file) == os.path.basename(args.output_xmlfile):
- continue
- setups[job] = []
- print('Processing setup: %s' % job)
- tree = ET.parse(xml_file)
- root = tree.getroot()
- for key, value in root.attrib.items():
- if key in aggregated_root.attrib and value.isdigit(): # sum total number of failed tests etc.
- aggregated_root.attrib[key] = str(int(value) + int(aggregated_root.attrib[key]))
- else:
- aggregated_root.attrib[key] = value
- tests = root.getchildren()
- if not len(tests): # there should be tests:
- message = 'No tests in xml %s' % xml_file
- print message
- err.append(message)
- for test in tests:
- setups[job].append(test)
- test.attrib['name'] = test.attrib['classname'] + '.' + test.attrib['name']
- test.attrib['classname'] = job
- aggregated_root.append(test)
total_tests_count = int(aggregated_root.attrib.get('tests', 0))
error_tests_count = int(aggregated_root.attrib.get('errors', 0))
@@ -426,7 +448,7 @@ if __name__ == '__main__':
if len(error_tests):
html_output += '\n<button onclick=tgl_cat("cat_tglr_{error}")>{error}</button>'.format(error = ERROR_CATEGORY)
# Setups buttons
- for category, tests in setups.items():
+ for category in setups.keys():
category_arr.append(category)
html_output += '\n<button onclick=tgl_cat("cat_tglr_%s")>%s</button>' % (category_arr[-1], category)
# Functional buttons
@@ -436,13 +458,22 @@ if __name__ == '__main__':
# Adding tests
# Error tests
if len(error_tests):
- html_output += add_category_of_tests(ERROR_CATEGORY, error_tests, hidden=False)
+ html_output += '<div style="display:block;" id="cat_tglr_%s">' % ERROR_CATEGORY
+ html_output += add_category_of_tests(ERROR_CATEGORY, error_tests)
+ html_output += '</div>'
# Setups tests
for category, tests in setups.items():
- html_output += add_category_of_tests(category, tests, hidden=True, category_info_dir=args.input_dir)
+ html_output += '<div style="display:none;" id="cat_tglr_%s">' % category
+ if 'stateful' in tests:
+ html_output += add_category_of_tests(category, tests['stateful'], 'stateful', category_info_dir=args.input_dir)
+ if 'stateless' in tests:
+ html_output += add_category_of_tests(category, tests['stateless'], 'stateless', category_info_dir=(None if 'stateful' in tests else args.input_dir))
+ html_output += '</div>'
# Functional tests
if len(functional_tests):
- html_output += add_category_of_tests(FUNCTIONAL_CATEGORY, functional_tests.values(), hidden=True)
+ html_output += '<div style="display:none;" id="cat_tglr_%s">' % FUNCTIONAL_CATEGORY
+ html_output += add_category_of_tests(FUNCTIONAL_CATEGORY, functional_tests.values())
+ html_output += '</div>'
html_output += '\n\n<script type="text/javascript">\n var category_arr = %s\n' % ['cat_tglr_%s' % x for x in category_arr]
html_output += '''
@@ -524,7 +555,7 @@ if __name__ == '__main__':
for test in error_tests:
if test.attrib['classname'] == category:
failing_category = True
- if failing_category or not len(setups[category]):
+ if failing_category or not len(setups[category]) or not sum([len(x) for x in setups[category]]):
mail_output += '<table class="reference_fail" align=left style="Margin-bottom:10;Margin-right:10;">\n'
else:
mail_output += '<table class="reference" align=left style="Margin-bottom:10;Margin-right:10;">\n'
@@ -549,9 +580,9 @@ if __name__ == '__main__':
if len(error_tests) > 5:
mail_output += '\n<font color=red>More than 5 failed tests, showing brief output.<font>\n<br>'
# show only brief version (cut some info)
- mail_output += add_category_of_tests(ERROR_CATEGORY, error_tests, hidden=False, expanded=True, brief=True)
+ mail_output += add_category_of_tests(ERROR_CATEGORY, error_tests, expanded=True, brief=True)
else:
- mail_output += add_category_of_tests(ERROR_CATEGORY, error_tests, hidden=False, expanded=True)
+ mail_output += add_category_of_tests(ERROR_CATEGORY, error_tests, expanded=True)
else:
mail_output += '<table><tr style="font-size:120;color:green;font-family:arial"><td>☺</td><td style="font-size:20">All passed.</td></tr></table>\n'
mail_output += '\n</body>\n</html>'
diff --git a/scripts/automation/regression/unit_tests/functional_tests/config.yaml b/scripts/automation/regression/functional_tests/config.yaml
index 4f4c7c40..4f4c7c40 100644
--- a/scripts/automation/regression/unit_tests/functional_tests/config.yaml
+++ b/scripts/automation/regression/functional_tests/config.yaml
diff --git a/scripts/automation/regression/unit_tests/functional_tests/functional_general_test.py b/scripts/automation/regression/functional_tests/functional_general_test.py
index 525b58d2..525b58d2 100755
--- a/scripts/automation/regression/unit_tests/functional_tests/functional_general_test.py
+++ b/scripts/automation/regression/functional_tests/functional_general_test.py
diff --git a/scripts/automation/regression/stl/golden/basic_imix_golden.cap b/scripts/automation/regression/functional_tests/golden/basic_imix_golden.cap
index 6ca32299..6ca32299 100644
--- a/scripts/automation/regression/stl/golden/basic_imix_golden.cap
+++ b/scripts/automation/regression/functional_tests/golden/basic_imix_golden.cap
Binary files differ
diff --git a/scripts/automation/regression/stl/golden/basic_imix_vm_golden.cap b/scripts/automation/regression/functional_tests/golden/basic_imix_vm_golden.cap
index 43ae2368..43ae2368 100644
--- a/scripts/automation/regression/stl/golden/basic_imix_vm_golden.cap
+++ b/scripts/automation/regression/functional_tests/golden/basic_imix_vm_golden.cap
Binary files differ
diff --git a/scripts/automation/regression/stl/golden/basic_tuple_gen_golden.cap b/scripts/automation/regression/functional_tests/golden/basic_tuple_gen_golden.cap
index 7d5e7ec2..7d5e7ec2 100644
--- a/scripts/automation/regression/stl/golden/basic_tuple_gen_golden.cap
+++ b/scripts/automation/regression/functional_tests/golden/basic_tuple_gen_golden.cap
Binary files differ
diff --git a/scripts/automation/regression/stl/golden/udp_590.cap b/scripts/automation/regression/functional_tests/golden/udp_590.cap
index 29302f22..29302f22 100644
--- a/scripts/automation/regression/stl/golden/udp_590.cap
+++ b/scripts/automation/regression/functional_tests/golden/udp_590.cap
Binary files differ
diff --git a/scripts/automation/regression/unit_tests/functional_tests/hltapi_stream_builder_test.py b/scripts/automation/regression/functional_tests/hltapi_stream_builder_test.py
index c6b477aa..c6b477aa 100755
--- a/scripts/automation/regression/unit_tests/functional_tests/hltapi_stream_builder_test.py
+++ b/scripts/automation/regression/functional_tests/hltapi_stream_builder_test.py
diff --git a/scripts/automation/regression/unit_tests/functional_tests/misc_methods_test.py b/scripts/automation/regression/functional_tests/misc_methods_test.py
index 096f86d8..096f86d8 100755
--- a/scripts/automation/regression/unit_tests/functional_tests/misc_methods_test.py
+++ b/scripts/automation/regression/functional_tests/misc_methods_test.py
diff --git a/scripts/automation/regression/unit_tests/functional_tests/pkt_bld_general_test.py b/scripts/automation/regression/functional_tests/pkt_bld_general_test.py
index 5f89eaff..5f89eaff 100755
--- a/scripts/automation/regression/unit_tests/functional_tests/pkt_bld_general_test.py
+++ b/scripts/automation/regression/functional_tests/pkt_bld_general_test.py
diff --git a/scripts/automation/regression/unit_tests/functional_tests/platform_cmd_cache_test.py b/scripts/automation/regression/functional_tests/platform_cmd_cache_test.py
index 24ccf7a5..24ccf7a5 100755
--- a/scripts/automation/regression/unit_tests/functional_tests/platform_cmd_cache_test.py
+++ b/scripts/automation/regression/functional_tests/platform_cmd_cache_test.py
diff --git a/scripts/automation/regression/unit_tests/functional_tests/platform_cmd_link_test.py b/scripts/automation/regression/functional_tests/platform_cmd_link_test.py
index 7a31815b..7a31815b 100755
--- a/scripts/automation/regression/unit_tests/functional_tests/platform_cmd_link_test.py
+++ b/scripts/automation/regression/functional_tests/platform_cmd_link_test.py
diff --git a/scripts/automation/regression/unit_tests/functional_tests/platform_device_cfg_test.py b/scripts/automation/regression/functional_tests/platform_device_cfg_test.py
index 890d0cb9..3935a4c5 100755
--- a/scripts/automation/regression/unit_tests/functional_tests/platform_device_cfg_test.py
+++ b/scripts/automation/regression/functional_tests/platform_device_cfg_test.py
@@ -9,7 +9,7 @@ from nose.tools import assert_not_equal
class CDeviceCfg_Test(functional_general_test.CGeneralFunctional_Test):
def setUp(self):
- self.dev_cfg = CDeviceCfg('./unit_tests/functional_tests/config.yaml')
+ self.dev_cfg = CDeviceCfg('./functional_tests/config.yaml')
def test_get_interfaces_cfg(self):
assert_equal (self.dev_cfg.get_interfaces_cfg(),
diff --git a/scripts/automation/regression/unit_tests/functional_tests/platform_dual_if_obj_test.py b/scripts/automation/regression/functional_tests/platform_dual_if_obj_test.py
index ff54b9ee..ff54b9ee 100755
--- a/scripts/automation/regression/unit_tests/functional_tests/platform_dual_if_obj_test.py
+++ b/scripts/automation/regression/functional_tests/platform_dual_if_obj_test.py
diff --git a/scripts/automation/regression/unit_tests/functional_tests/platform_if_manager_test.py b/scripts/automation/regression/functional_tests/platform_if_manager_test.py
index 7ba6e66e..b09e8d75 100755
--- a/scripts/automation/regression/unit_tests/functional_tests/platform_if_manager_test.py
+++ b/scripts/automation/regression/functional_tests/platform_if_manager_test.py
@@ -9,7 +9,7 @@ from nose.tools import assert_not_equal
class CIfManager_Test(functional_general_test.CGeneralFunctional_Test):
def setUp(self):
- self.dev_cfg = CDeviceCfg('./unit_tests/functional_tests/config.yaml')
+ self.dev_cfg = CDeviceCfg('./functional_tests/config.yaml')
self.if_mng = CIfManager()
# main testing method to check the entire class
diff --git a/scripts/automation/regression/unit_tests/functional_tests/platform_if_obj_test.py b/scripts/automation/regression/functional_tests/platform_if_obj_test.py
index 534d4170..534d4170 100755
--- a/scripts/automation/regression/unit_tests/functional_tests/platform_if_obj_test.py
+++ b/scripts/automation/regression/functional_tests/platform_if_obj_test.py
diff --git a/scripts/automation/regression/unit_tests/functional_tests/scapy_pkt_builder_test.py b/scripts/automation/regression/functional_tests/scapy_pkt_builder_test.py
index 7e2f6271..eaff9530 100644
--- a/scripts/automation/regression/unit_tests/functional_tests/scapy_pkt_builder_test.py
+++ b/scripts/automation/regression/functional_tests/scapy_pkt_builder_test.py
@@ -80,22 +80,22 @@ class CTRexPktBuilderSanitySCapy_Test(pkt_bld_general_test.CGeneralPktBld_Test):
pkt_builder = CScapyTRexPktBuilder(pkt = pkt);
- assert_equal( pkt_builder.is_def_src_mac () ,True)
- assert_equal( pkt_builder.is_def_dst_mac () ,True)
+ assert_equal( pkt_builder.is_default_src_mac () ,True)
+ assert_equal( pkt_builder.is_default_dst_mac () ,True)
pkt = Ether(src="00:00:00:00:00:01")/IP()/UDP()
pkt_builder = CScapyTRexPktBuilder(pkt = pkt);
- assert_equal( pkt_builder.is_def_src_mac (), False)
- assert_equal( pkt_builder.is_def_dst_mac (), True)
+ assert_equal( pkt_builder.is_default_src_mac (), False)
+ assert_equal( pkt_builder.is_default_dst_mac (), True)
pkt = Ether(dst="00:00:00:00:00:01")/IP()/UDP()
pkt_builder = CScapyTRexPktBuilder(pkt = pkt);
- assert_equal( pkt_builder.is_def_src_mac (),True)
- assert_equal( pkt_builder.is_def_dst_mac (),False)
+ assert_equal( pkt_builder.is_default_src_mac (),True)
+ assert_equal( pkt_builder.is_default_dst_mac (),False)
@@ -299,7 +299,7 @@ class CTRexPktBuilderSanitySCapy_Test(pkt_bld_general_test.CGeneralPktBld_Test):
assert_equal(d['instructions'][4]['pkt_offset'],38)
def test_simple_pkt_loader(self):
- p=RawPcapReader("stl/golden/basic_imix_golden.cap")
+ p=RawPcapReader("functional_tests/golden/basic_imix_golden.cap")
print ""
for pkt in p:
print pkt[1]
@@ -308,7 +308,7 @@ class CTRexPktBuilderSanitySCapy_Test(pkt_bld_general_test.CGeneralPktBld_Test):
def test_simple_pkt_loader1(self):
- pkt_builder = CScapyTRexPktBuilder(pkt = "stl/golden/udp_590.cap", build_raw = False);
+ pkt_builder = CScapyTRexPktBuilder(pkt = "functional_tests/golden/udp_590.cap", build_raw = False);
print ""
pkt_builder.dump_as_hex()
r = pkt_builder.pkt_raw
@@ -322,7 +322,7 @@ class CTRexPktBuilderSanitySCapy_Test(pkt_bld_general_test.CGeneralPktBld_Test):
def test_simple_pkt_loader2(self):
- pkt_builder = CScapyTRexPktBuilder(pkt = "stl/golden/basic_imix_golden.cap");
+ pkt_builder = CScapyTRexPktBuilder(pkt = "functional_tests/golden/basic_imix_golden.cap");
assert_equal(pkt_builder.pkt_layers_desc (), "Ethernet:IP:UDP:Raw");
def test_simple_pkt_loader3(self):
diff --git a/scripts/automation/regression/unit_tests/functional_tests/stl_basic_tests.py b/scripts/automation/regression/functional_tests/stl_basic_tests.py
index cd653895..ea515401 100644
--- a/scripts/automation/regression/unit_tests/functional_tests/stl_basic_tests.py
+++ b/scripts/automation/regression/functional_tests/stl_basic_tests.py
@@ -6,9 +6,10 @@ from nose.tools import assert_equal
from nose.tools import assert_not_equal
from nose.tools import nottest
from nose.plugins.attrib import attr
-from unit_tests.trex_general_test import CTRexScenario
+from trex import CTRexScenario
from dpkt import pcap
from trex_stl_lib import trex_stl_sim
+from trex_stl_lib.trex_stl_streams import STLProfile
import sys
import os
import subprocess
@@ -73,11 +74,11 @@ class CStlBasic_Test(functional_general_test.CGeneralFunctional_Test):
pkts2 = reader2.readpkts()
assert_equal(len(pkts1), len(pkts2))
-
+
for pkt1, pkt2, i in zip(pkts1, pkts2, xrange(1, len(pkts1))):
ts1 = pkt1[0]
ts2 = pkt2[0]
- if abs(ts1-ts2) > 0.000005: # 5 nsec
+ if abs(ts1-ts2) > 0.000005: # 5 nsec
raise AssertionError("TS error: cap files '{0}', '{1}' differ in cap #{2} - '{3}' vs. '{4}'".format(cap1, cap2, i, ts1, ts2))
if pkt1[1] != pkt2[1]:
@@ -102,7 +103,7 @@ class CStlBasic_Test(functional_general_test.CGeneralFunctional_Test):
- def run_py_profile_path (self, profile, options,silent = False, do_no_remove=False,compare =True, test_generated=True):
+ def run_py_profile_path (self, profile, options,silent = False, do_no_remove=False,compare =True, test_generated=True, do_no_remove_generated = False):
output_cap = "a.pcap"
input_file = os.path.join('stl/', profile)
golden_file = os.path.join('exp',os.path.basename(profile).split('.')[0]+'.pcap');
@@ -118,38 +119,42 @@ class CStlBasic_Test(functional_general_test.CGeneralFunctional_Test):
if compare:
self.compare_caps(output_cap, golden_file)
finally:
- if not do_no_remove:
+ if not do_no_remove:
os.unlink(output_cap)
if test_generated:
try:
- from trex_stl_lib.api import STLProfile # if test is skipped, don't load it
generated_filename = input_file.replace('.py', '_GENERATED.py').replace('.yaml', '_GENERATED.py')
if input_file.endswith('.py'):
profile = STLProfile.load_py(input_file)
elif input_file.endswith('.yaml'):
profile = STLProfile.load_yaml(input_file)
profile.dump_to_code(generated_filename)
+
rc = self.run_sim(generated_filename, output_cap, options, silent)
assert_equal(rc, True)
-
+
if compare:
self.compare_caps(output_cap, golden_file)
+ except Exception as e:
+ print e
finally:
- if not do_no_remove:
+ if not do_no_remove_generated:
os.unlink(generated_filename)
+ os.unlink(generated_filename + 'c')
+ if not do_no_remove:
os.unlink(output_cap)
def test_stl_profiles (self):
- p = [
+ p = [
["udp_1pkt_1mac_override.py","-m 1 -l 50",True],
- ["syn_attack.py","-m 1 -l 50",True], # can't compare random now
+ ["syn_attack.py","-m 1 -l 50",True], # can't compare random now
["udp_1pkt_1mac.py","-m 1 -l 50",True],
["udp_1pkt_mac.py","-m 1 -l 50",True],
["udp_1pkt.py","-m 1 -l 50",True],
["udp_1pkt_tuple_gen.py","-m 1 -l 50",True],
- ["udp_rand_len_9k.py","-m 1 -l 50",True], # can't do the compare
+ ["udp_rand_len_9k.py","-m 1 -l 50",True], # can't do the compare
["udp_1pkt_mpls.py","-m 1 -l 50",True],
["udp_1pkt_mpls_vm.py","-m 1 ",True],
["imix.py","-m 1 -l 100",True],
@@ -195,14 +200,14 @@ class CStlBasic_Test(functional_general_test.CGeneralFunctional_Test):
p1 = [ ["udp_1pkt_range_clients_split_garp.py","-m 1 -l 50",True] ]
-
+
for obj in p:
try:
test_generated = obj[3]
except: # check generated if not said otherwise
test_generated = True
- self.run_py_profile_path (obj[0],obj[1],compare =obj[2], test_generated = test_generated, do_no_remove=True)
+ self.run_py_profile_path (obj[0],obj[1],compare =obj[2], test_generated = test_generated, do_no_remove=True, do_no_remove_generated = False)
def test_hlt_profiles (self):
@@ -231,7 +236,7 @@ class CStlBasic_Test(functional_general_test.CGeneralFunctional_Test):
)
for obj in p:
- self.run_py_profile_path (obj[0], obj[1], compare =obj[2], do_no_remove=True)
+ self.run_py_profile_path (obj[0], obj[1], compare =obj[2], do_no_remove=True, do_no_remove_generated = False)
# valgrind tests - this runs in multi thread as it safe (no output)
def test_valgrind_various_profiles (self):
diff --git a/scripts/automation/regression/functional_unit_tests.py b/scripts/automation/regression/functional_unit_tests.py
deleted file mode 100755
index 30e915c4..00000000
--- a/scripts/automation/regression/functional_unit_tests.py
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/router/bin/python
-
-__copyright__ = "Copyright 2014"
-
-
-
-import os
-import sys
-import outer_packages
-import nose
-from nose.plugins import Plugin
-import logging
-from rednose import RedNose
-import termstyle
-
-
-
-
-def set_report_dir (report_dir):
- if not os.path.exists(report_dir):
- os.mkdir(report_dir)
-
-if __name__ == "__main__":
-
- # setting defaults. By default we run all the test suite
- specific_tests = False
- disableLogCapture = False
- long_test = False
- report_dir = "reports"
-
- nose_argv= sys.argv + ['-s', '-v', '--exe', '--rednose', '--detailed-errors']
-
-# for arg in sys.argv:
-# if 'unit_tests/' in arg:
-# specific_tests = True
-# if 'log-path' in arg:
-# disableLogCapture = True
-# if arg=='--collect-only': # this is a user trying simply to view the available tests. removing xunit param from nose args
-# nose_argv[5:7] = []
-
-
-
- try:
- result = nose.run(argv = nose_argv, addplugins = [RedNose()])
-
- if (result == True):
- print termstyle.green("""
- ..::''''::..
- .;'' ``;.
- :: :: :: ::
- :: :: :: ::
- :: :: :: ::
- :: .:' :: :: `:. ::
- :: : : ::
- :: `:. .:' ::
- `;..``::::''..;'
- ``::,,,,::''
-
- ___ ___ __________
- / _ \/ _ | / __/ __/ /
- / ___/ __ |_\ \_\ \/_/
- /_/ /_/ |_/___/___(_)
-
- """)
- sys.exit(0)
- else:
- sys.exit(-1)
-
- finally:
- pass
-
-
-
-
-
-
-
-
diff --git a/scripts/automation/regression/misc_methods.py b/scripts/automation/regression/misc_methods.py
index 2341b9be..783858e8 100755
--- a/scripts/automation/regression/misc_methods.py
+++ b/scripts/automation/regression/misc_methods.py
@@ -20,29 +20,28 @@ def mix_string (str):
return str.replace(' ', '_').lower()
# executes given command, returns tuple (return_code, stdout, stderr)
-def run_command(cmd):
- print 'Running command:', cmd
- proc = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- (stdout, stderr) = proc.communicate()
- if stdout:
- print 'Stdout:\n%s' % stdout
- if stderr:
- print 'Stderr:\n%s' % stderr
- print 'Return code: %s' % proc.returncode
- return (proc.returncode, stdout, stderr)
-
-
-def run_remote_command(host, passwd, command_string):
- cmd = 'ssh -tt %s \'sudo sh -c "%s"\'' % (host, command_string)
- print 'Trying connection with ssh...'
- return_code, stdout, stderr = run_command(cmd)
- if return_code == 0:
- return (return_code, stdout, stderr)
- elif passwd is not None:
- print 'Trying connection with expect + sshpass.exp...'
- cmd = 'sshpass.exp %s %s root "%s"' % (passwd, host, command_string)
- return_code, stdout, stderr = run_command(cmd)
- return (return_code, stdout, stderr)
+def run_command(cmd, background = False):
+ if background:
+ print 'Running command in background:', cmd
+ with open(os.devnull, 'w') as tempf:
+ subprocess.Popen(shlex.split(cmd), stdin=tempf, stdout=tempf, stderr=tempf)
+ return (None,)*3
+ else:
+ print 'Running command:', cmd
+ proc = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ (stdout, stderr) = proc.communicate()
+ if stdout:
+ print 'Stdout:\n%s' % stdout
+ if proc.returncode:
+ if stderr:
+ print 'Stderr:\n%s' % stderr
+ print 'Return code: %s' % proc.returncode
+ return (proc.returncode, stdout, stderr)
+
+
+def run_remote_command(host, command_string, background = False):
+ cmd = 'ssh -tt %s \'sudo sh -ec "%s"\'' % (host, command_string)
+ return run_command(cmd, background)
def generate_intf_lists (interfacesList):
diff --git a/scripts/automation/regression/outer_packages.py b/scripts/automation/regression/outer_packages.py
index 6b7c58f9..f55c247d 100755
--- a/scripts/automation/regression/outer_packages.py
+++ b/scripts/automation/regression/outer_packages.py
@@ -11,7 +11,8 @@ PATH_TO_PYTHON_LIB = os.path.abspath(os.path.join(TREX_PATH, 'external_libs'))
PATH_TO_CTRL_PLANE = os.path.abspath(os.path.join(TREX_PATH, 'automation', 'trex_control_plane'))
PATH_STL_API = os.path.abspath(os.path.join(PATH_TO_CTRL_PLANE, 'stl'))
-NIGHTLY_MODULES = ['enum34-1.0.4',
+NIGHTLY_MODULES = ['ansi2html',
+ 'enum34-1.0.4',
'nose-1.3.4',
'rednose-0.4.1',
'progressbar-2.2',
diff --git a/scripts/automation/regression/stateful_tests/__init__.py b/scripts/automation/regression/stateful_tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/scripts/automation/regression/stateful_tests/__init__.py
diff --git a/scripts/automation/regression/unit_tests/tests_exceptions.py b/scripts/automation/regression/stateful_tests/tests_exceptions.py
index 604efcc8..604efcc8 100755
--- a/scripts/automation/regression/unit_tests/tests_exceptions.py
+++ b/scripts/automation/regression/stateful_tests/tests_exceptions.py
diff --git a/scripts/automation/regression/unit_tests/trex_general_test.py b/scripts/automation/regression/stateful_tests/trex_general_test.py
index f367a397..21f5d8aa 100755
--- a/scripts/automation/regression/unit_tests/trex_general_test.py
+++ b/scripts/automation/regression/stateful_tests/trex_general_test.py
@@ -26,6 +26,7 @@ Description:
from nose.plugins import Plugin
from nose.plugins.skip import SkipTest
import trex
+from trex import CTRexScenario
import misc_methods
import sys
import os
@@ -37,50 +38,14 @@ from tests_exceptions import *
from platform_cmd_link import *
import unittest
-
-class CTRexScenario():
- modes = set() # list of modes of this setup: loopback, virtual etc.
- server_logs = False
- is_test_list = False
- is_init = False
- trex_crashed = False
- configuration = None
- trex = None
- router = None
- router_cfg = None
- daemon_log_lines = 0
- setup_name = None
- setup_dir = None
- router_image = None
- trex_version = None
- scripts_path = None
- benchmark = None
- report_dir = 'reports'
- # logger = None
-
-#scenario = CTRexScenario()
-
def setUpModule(module):
-# print ("") # this is to get a newline after the dots
-# print ("setup_module before anything in this file")
-# # ff = CTRexScenario()
-# scenario.configuration = misc_methods.load_complete_config_file('config/config.yaml')
-# scenario.trex = trex.CTRexRunner(scenario.configuration[0], None)
-# scenario.router = CPlatform(scenario.configuration[1], False, scenario.configuration[2])
-# scenario.router.platform.preCheck()
-# print "Done instantiating trex scenario!"
pass
def tearDownModule(module):
-# print ("") # this is to get a newline after the dots
-# scenario.router.platform.postCheck()
-# print ("teardown_module after anything in this file")
pass
-
-
class CTRexGeneral_Test(unittest.TestCase):
- """This class defines the general testcase of the T-Rex traffic generator"""
+ """This class defines the general stateful testcase of the T-Rex traffic generator"""
def __init__ (self, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
if CTRexScenario.is_test_list:
@@ -100,7 +65,8 @@ class CTRexGeneral_Test(unittest.TestCase):
self.is_VM = True if 'VM' in self.modes else False
if not CTRexScenario.is_init:
- CTRexScenario.trex_version = self.trex.get_trex_version()
+ if self.trex: # stateful
+ CTRexScenario.trex_version = self.trex.get_trex_version()
if not self.is_loopback:
# initilize the scenario based on received configuration, once per entire testing session
CTRexScenario.router = CPlatform(CTRexScenario.router_cfg['silent_mode'])
@@ -306,12 +272,13 @@ class CTRexGeneral_Test(unittest.TestCase):
test_setup_modes_conflict = self.modes & set(self.unsupported_modes)
if test_setup_modes_conflict:
self.skip("The test can't run with following modes of given setup: %s " % test_setup_modes_conflict)
- if not self.trex.is_idle():
+ if self.trex and not self.trex.is_idle():
print 'Warning: TRex is not idle at setUp, trying to stop it.'
self.trex.force_kill(confirm = False)
if not self.is_loopback:
print ''
- self.router.load_clean_config()
+ if self.trex: # stateful
+ self.router.load_clean_config()
self.router.clear_counters()
self.router.clear_packet_drop_stats()
@@ -324,6 +291,8 @@ class CTRexGeneral_Test(unittest.TestCase):
# def test_isInitialized(self):
# assert CTRexScenario.is_init == True
def tearDown(self):
+ if not self.trex:
+ return
if not self.trex.is_idle():
print 'Warning: TRex is not idle at tearDown, trying to stop it.'
self.trex.force_kill(confirm = False)
diff --git a/scripts/automation/regression/unit_tests/trex_imix_test.py b/scripts/automation/regression/stateful_tests/trex_imix_test.py
index 43dea900..43dea900 100755
--- a/scripts/automation/regression/unit_tests/trex_imix_test.py
+++ b/scripts/automation/regression/stateful_tests/trex_imix_test.py
diff --git a/scripts/automation/regression/unit_tests/trex_ipv6_test.py b/scripts/automation/regression/stateful_tests/trex_ipv6_test.py
index bffb4754..bffb4754 100755
--- a/scripts/automation/regression/unit_tests/trex_ipv6_test.py
+++ b/scripts/automation/regression/stateful_tests/trex_ipv6_test.py
diff --git a/scripts/automation/regression/unit_tests/trex_nat_test.py b/scripts/automation/regression/stateful_tests/trex_nat_test.py
index e7fe5ca5..e7fe5ca5 100755
--- a/scripts/automation/regression/unit_tests/trex_nat_test.py
+++ b/scripts/automation/regression/stateful_tests/trex_nat_test.py
diff --git a/scripts/automation/regression/unit_tests/trex_nbar_test.py b/scripts/automation/regression/stateful_tests/trex_nbar_test.py
index 74d0227b..74d0227b 100755
--- a/scripts/automation/regression/unit_tests/trex_nbar_test.py
+++ b/scripts/automation/regression/stateful_tests/trex_nbar_test.py
diff --git a/scripts/automation/regression/unit_tests/trex_rx_test.py b/scripts/automation/regression/stateful_tests/trex_rx_test.py
index 37b1c722..37b1c722 100755
--- a/scripts/automation/regression/unit_tests/trex_rx_test.py
+++ b/scripts/automation/regression/stateful_tests/trex_rx_test.py
diff --git a/scripts/automation/regression/stateless_tests/stl_examples_test.py b/scripts/automation/regression/stateless_tests/stl_examples_test.py
new file mode 100755
index 00000000..9e4fffc9
--- /dev/null
+++ b/scripts/automation/regression/stateless_tests/stl_examples_test.py
@@ -0,0 +1,33 @@
+#!/router/bin/python
+from stl_general_test import CStlGeneral_Test, CTRexScenario
+import os, sys
+from misc_methods import run_command
+
+class STLExamples_Test(CStlGeneral_Test):
+ """This class defines the IMIX testcase of the T-Rex traffic generator"""
+
+ def setUp(self):
+ CStlGeneral_Test.setUp(self)
+ # examples connect by their own
+ if self.is_connected():
+ CTRexScenario.stl_trex.disconnect()
+
+ @classmethod
+ def tearDownClass(cls):
+ # connect back at end of tests
+ if not cls.is_connected():
+ CTRexScenario.stl_trex.connect()
+
+ def test_stl_examples(self):
+ examples_dir = '../trex_control_plane/stl/examples'
+ examples_to_test = [
+ 'stl_imix.py',
+ ]
+
+ for example in examples_to_test:
+ return_code, stdout, stderr = run_command("sh -c 'cd %s; %s %s -s %s'" % (examples_dir, sys.executable, example, CTRexScenario.configuration.trex['trex_name']))
+ assert return_code == 0, 'example %s failed.\nstdout: %s\nstderr: %s' % (return_code, stdout, stderr)
+
+ def test_stl_examples1(self):
+ print 'in test_stl_examples1'
+
diff --git a/scripts/automation/regression/stateless_tests/stl_general_test.py b/scripts/automation/regression/stateless_tests/stl_general_test.py
new file mode 100644
index 00000000..435c7eea
--- /dev/null
+++ b/scripts/automation/regression/stateless_tests/stl_general_test.py
@@ -0,0 +1,68 @@
+import os, sys
+import unittest
+from trex import CTRexScenario
+from stateful_tests.trex_general_test import CTRexGeneral_Test
+from trex_stl_lib.api import *
+import time
+from nose.tools import nottest
+
+
+class CStlGeneral_Test(CTRexGeneral_Test):
+ """This class defines the general stateless testcase of the T-Rex traffic generator"""
+
+ #once for all tests under CStlGeneral_Test
+ @classmethod
+ def setUpClass(cls):
+ cls.stl_trex = CTRexScenario.stl_trex
+
+ def setUp(self):
+ CTRexGeneral_Test.setUp(self)
+ # check basic requirements, should be verified at test_connectivity, here only skip test
+ if CTRexScenario.stl_init_error:
+ self.skip(CTRexScenario.stl_init_error)
+
+ @staticmethod
+ def connect(timeout = 20):
+ sys.stdout.write('Connecting')
+ for i in range(timeout):
+ try:
+ sys.stdout.write('.')
+ sys.stdout.flush()
+ CTRexScenario.stl_trex.connect()
+ return
+ except:
+ time.sleep(1)
+ CTRexScenario.stl_trex.connect()
+
+ @staticmethod
+ def get_port_count():
+ return CTRexScenario.stl_trex.get_port_count()
+
+ @staticmethod
+ def is_connected():
+ return CTRexScenario.stl_trex.is_connected()
+
+class STLBasic_Test(CStlGeneral_Test):
+ # will run it first explicitly, check connectivity and configure routing
+ @nottest
+ def test_connectivity(self):
+ if not self.is_loopback:
+ CTRexScenario.router.load_clean_config()
+ CTRexScenario.router.configure_basic_interfaces()
+ CTRexScenario.router.config_pbr(mode = "config")
+
+ CTRexScenario.stl_init_error = 'Client could not connect'
+ self.connect()
+ print ''
+ try:
+ stl_map_ports(CTRexScenario.stl_trex)
+ except:
+ pass
+ time.sleep(5)
+ CTRexScenario.stl_init_error = 'Client could not map ports'
+ CTRexScenario.stl_ports_map = stl_map_ports(CTRexScenario.stl_trex)
+ CTRexScenario.stl_init_error = 'Could not determine bidirectional ports'
+ print 'Ports mapping: %s' % CTRexScenario.stl_ports_map
+ if not len(CTRexScenario.stl_ports_map['bi']):
+ raise STLError('No bidirectional ports')
+ CTRexScenario.stl_init_error = None
diff --git a/scripts/automation/regression/trex.py b/scripts/automation/regression/trex.py
index b9fd87ec..8efa41f6 100644
--- a/scripts/automation/regression/trex.py
+++ b/scripts/automation/regression/trex.py
@@ -8,10 +8,35 @@ import re
import signal
import time
from CProgressDisp import TimedProgressBar
-import unit_tests.trex_general_test
-from unit_tests.tests_exceptions import TRexInUseError
+from stateful_tests.tests_exceptions import TRexInUseError
import datetime
+class CTRexScenario:
+ modes = set() # list of modes of this setup: loopback, virtual etc.
+ server_logs = False
+ is_test_list = False
+ is_init = False
+ is_stl_init = False
+ trex_crashed = False
+ configuration = None
+ trex = None
+ stl_trex = None
+ stl_ports_map = None
+ stl_init_error = None
+ router = None
+ router_cfg = None
+ daemon_log_lines = 0
+ setup_name = None
+ setup_dir = None
+ router_image = None
+ trex_version = None
+ scripts_path = None
+ benchmark = None
+ report_dir = 'reports'
+ # logger = None
+ test_types = {'functional_tests': [], 'stateful_tests': [], 'stateless_tests': []}
+ is_copied = False
+
class CTRexRunner:
"""This is an instance for generating a CTRexRunner"""
@@ -67,7 +92,7 @@ class CTRexRunner:
trex_cmd = trex_cmd_str % (cores,
multiplier,
- duration,
+ duration,
self.yaml)
# self.trex_config['trex_latency'])
@@ -81,8 +106,8 @@ class CTRexRunner:
print "\nT-REX COMMAND: ", trex_cmd
- cmd = 'sshpass.exp %s %s root "cd %s; %s > %s"' % (self.trex_config['trex_password'],
- self.trex_config['trex_name'],
+ cmd = 'sshpass.exp %s %s root "cd %s; %s > %s"' % (self.trex_config['trex_password'],
+ self.trex_config['trex_name'],
self.trex_config['trex_version_path'],
trex_cmd,
export_path)
@@ -91,18 +116,18 @@ class CTRexRunner:
def generate_fetch_cmd (self, result_file_full_path="/tmp/trex.txt"):
""" generate_fetch_cmd(self, result_file_full_path) -> str
-
+
Generates a custom command for which will enable to fetch the resutls of the T-Rex run.
Returns a command (string) to be issued on the trex server.
-
+
Example use: fetch_trex_results() - command that will fetch the content from the default log file- /tmp/trex.txt
fetch_trex_results("/tmp/trex_secondary_file.txt") - command that will fetch the content from a custom log file- /tmp/trex_secondary_file.txt
"""
#dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
script_running_dir = os.path.dirname(os.path.realpath(__file__)) # get the current script working directory so that the sshpass could be accessed.
- cmd = script_running_dir + '/sshpass.exp %s %s root "cat %s"' % (self.trex_config['trex_password'],
- self.trex_config['trex_name'],
- result_file_full_path);
+ cmd = script_running_dir + '/sshpass.exp %s %s root "cat %s"' % (self.trex_config['trex_password'],
+ self.trex_config['trex_name'],
+ result_file_full_path);
return cmd;
@@ -153,10 +178,10 @@ class CTRexRunner:
interrupted = True
if ((end_time - start_time) < 2):
raise TRexInUseError ('T-Rex run failed since T-Rex is used by another process, or due to reachability issues')
- else:
- unit_tests.trex_general_test.CTRexScenario.trex_crashed = True
- # results = subprocess.Popen(cmd, stdout = open(os.devnull, 'wb'),
- # shell=True, preexec_fn=os.setsid)
+ else:
+ CTRexScenario.trex_crashed = True
+ # results = subprocess.Popen(cmd, stdout = open(os.devnull, 'wb'),
+ # shell=True, preexec_fn=os.setsid)
except KeyboardInterrupt:
print "\nT-Rex test interrupted by user during traffic generation!!"
results.killpg(results.pid, signal.SIGTERM) # Send the kill signal to all the process groups
@@ -174,7 +199,7 @@ class CTRexRunner:
sys.stderr.flush()
return None
else:
-
+
if tmp_path:
cmd = self.generate_fetch_cmd( tmp_path )#**kwargs)#results_file_path)
else:
@@ -198,7 +223,7 @@ class CTRexResult():
def __init__ (self, file, buffer = None):
self.file = file
self.buffer = buffer
- self.result = {}
+ self.result = {}
def load_file_lines (self):
@@ -230,7 +255,7 @@ class CTRexResult():
Parameters
----------
- key :
+ key :
Key of the self.result dictionary of the TRexResult instance
val : float
Key of the self.result dictionary of the TRexResult instance
@@ -240,8 +265,8 @@ class CTRexResult():
"""
s = _str.strip()
-
- if s[0]=="G":
+
+ if s[0]=="G":
val = val*1E9
elif s[0]=="M":
val = val*1E6
@@ -262,14 +287,14 @@ class CTRexResult():
def parse (self):
""" parse(self) -> None
- Parse the content of the result file from the TRex test and upload the data into
+ Parse the content of the result file from the TRex test and upload the data into
"""
stop_read = False
d = {
- 'total-tx' : 0,
- 'total-rx' : 0,
- 'total-pps' : 0,
- 'total-cps' : 0,
+ 'total-tx' : 0,
+ 'total-rx' : 0,
+ 'total-pps' : 0,
+ 'total-cps' : 0,
'expected-pps' : 0,
'expected-cps' : 0,
@@ -296,7 +321,7 @@ class CTRexResult():
# # continue to parse !! we try the second
# self.result[key] = val #update latest
- # check if we need to stop reading
+ # check if we need to stop reading
match = re.match(".*latency daemon has stopped.*", line)
if match:
stop_read = True
@@ -307,7 +332,7 @@ class CTRexResult():
key = misc_methods.mix_string(match.group(1))
val = float(match.group(4))
if d.has_key(key):
- if stop_read == False:
+ if stop_read == False:
self.update (key, val, match.group(5))
else:
self.result[key] = val # update latest
@@ -321,7 +346,7 @@ class CTRexResult():
key = misc_methods.mix_string(match.group(1))
val = float(match.group(4))
if d.has_key(key):
- if stop_read == False:
+ if stop_read == False:
self.update (key, val, match.group(5))
else:
self.result[key] = val # update latest
@@ -337,7 +362,7 @@ class CTRexResult():
match = re.match("\W*(\w(\w|[-])+)\W*([:]|[=])\W*(OK)(.*)", line)
if match:
key = misc_methods.mix_string(match.group(1))
- val = 0 # valid
+ val = 0 # valid
self.result[key] = val #update latest
continue
@@ -347,7 +372,7 @@ class CTRexResult():
val = float(match.group(3))
if self.result.has_key(key):
if (self.result[key] < val): # update only if larger than previous value
- self.result[key] = val
+ self.result[key] = val
else:
self.result[key] = val
continue
diff --git a/scripts/automation/regression/trex_unit_test.py b/scripts/automation/regression/trex_unit_test.py
index 1d75a8b6..c90d5bdc 100755
--- a/scripts/automation/regression/trex_unit_test.py
+++ b/scripts/automation/regression/trex_unit_test.py
@@ -34,14 +34,16 @@ import CustomLogger
import misc_methods
from rednose import RedNose
import termstyle
-from unit_tests.trex_general_test import CTRexScenario
+from trex import CTRexScenario
from client.trex_client import *
from common.trex_exceptions import *
+from trex_stl_lib.api import *
import trex
import socket
from pprint import pprint
import subprocess
import re
+import time
def check_trex_path(trex_path):
if os.path.isfile('%s/trex_daemon_server' % trex_path):
@@ -60,34 +62,44 @@ def get_trex_path():
raise Exception('Could not determine trex_under_test folder, try setting env.var. TREX_UNDER_TEST')
return latest_build_path
-def _start_stop_trex_remote_server(trex_data, command):
- # start t-rex server as daemon process
- # subprocess.call(["/usr/bin/python", "trex_daemon_server", "restart"], cwd = trex_latest_build)
- misc_methods.run_remote_command(trex_data['trex_name'],
- trex_data['trex_password'],
- command)
-
-def start_trex_remote_server(trex_data, kill_running = False):
- if kill_running:
- (return_code, stdout, stderr) = misc_methods.run_remote_command(trex_data['trex_name'],
- trex_data['trex_password'],
- 'ps -u root --format comm,pid,cmd | grep t-rex-64')
- if stdout:
- for process in stdout.split('\n'):
- try:
- proc_name, pid, full_cmd = re.split('\s+', process, maxsplit=2)
- if proc_name.find('t-rex-64') >= 0:
- print 'Killing remote process: %s' % full_cmd
- misc_methods.run_remote_command(trex_data['trex_name'],
- trex_data['trex_password'],
- 'kill %s' % pid)
- except:
- continue
-
- _start_stop_trex_remote_server(trex_data, DAEMON_START_COMMAND)
-
-def stop_trex_remote_server(trex_data):
- _start_stop_trex_remote_server(trex_data, DAEMON_STOP_COMMAND)
+STATEFUL_STOP_COMMAND = './trex_daemon_server stop; sleep 1; ./trex_daemon_server stop; sleep 1'
+STATEFUL_RUN_COMMAND = 'rm /var/log/trex/trex_daemon_server.log; ./trex_daemon_server start; sleep 2; ./trex_daemon_server show'
+TREX_FILES = ('_t-rex-64', '_t-rex-64-o', '_t-rex-64-debug', '_t-rex-64-debug-o')
+
+def trex_remote_command(trex_data, command, background = False, from_scripts = True):
+ if from_scripts:
+ return misc_methods.run_remote_command(trex_data['trex_name'], ('cd %s; ' % CTRexScenario.scripts_path)+ command, background)
+ return misc_methods.run_remote_command(trex_data['trex_name'], command, background)
+
+# 1 = running, 0 - not running
+def check_trex_running(trex_data):
+ commands = []
+ for filename in TREX_FILES:
+ commands.append('ps -C %s > /dev/null' % filename)
+ return_code, _, _ = trex_remote_command(trex_data, ' || '.join(commands), from_scripts = False)
+ return not return_code
+
+def kill_trex_process(trex_data):
+ return_code, stdout, _ = trex_remote_command(trex_data, 'ps -u root --format comm,pid,cmd | grep _t-rex-64 | grep -v grep || true', from_scripts = False)
+ assert return_code == 0, 'last remote command failed'
+ if stdout:
+ for process in stdout.split('\n'):
+ try:
+ proc_name, pid, full_cmd = re.split('\s+', process, maxsplit=2)
+ if proc_name.find('t-rex-64') >= 0:
+ print 'Killing remote process: %s' % full_cmd
+ trex_remote_command(trex_data, 'kill %s' % pid, from_scripts = False)
+ except:
+ continue
+
+def address_to_ip(address):
+ for i in range(10):
+ try:
+ return socket.gethostbyname(address)
+ except:
+ continue
+ return socket.gethostbyname(address)
+
class CTRexTestConfiguringPlugin(Plugin):
def options(self, parser, env = os.environ):
@@ -105,74 +117,124 @@ class CTRexTestConfiguringPlugin(Plugin):
dest='log_path',
help='Specify path for the tests` log to be saved at. Once applied, logs capturing by nose will be disabled.') # Default is CURRENT/WORKING/PATH/trex_log/trex_log.log')
parser.add_option('--verbose-mode', '--verbose_mode', action="store_true", default = False,
- dest="verbose_mode",
+ dest="verbose_mode",
help="Print RPC command and router commands.")
parser.add_option('--server-logs', '--server_logs', action="store_true", default = False,
- dest="server_logs",
+ dest="server_logs",
help="Print server side (TRex and trex_daemon) logs per test.")
parser.add_option('--kill-running', '--kill_running', action="store_true", default = False,
- dest="kill_running",
+ dest="kill_running",
help="Kills running TRex process on remote server (useful for regression).")
- parser.add_option('--functional', action="store_true", default = False,
- dest="functional",
- help="Don't connect to remote server for runnning daemon (For functional tests).")
- parser.add_option('--copy', action="store_true", default = False,
- dest="copy",
- help="Copy TRex server to temp directory and run from there.")
+ parser.add_option('--func', '--functional', action="store_true", default = False,
+ dest="functional",
+ help="Run functional tests.")
+ parser.add_option('--stl', '--stateless', action="store_true", default = False,
+ dest="stateless",
+ help="Run stateless tests.")
+ parser.add_option('--stf', '--stateful', action="store_true", default = False,
+ dest="stateful",
+ help="Run stateful tests.")
+ parser.add_option('--pkg', action="store",
+ dest="pkg",
+ help="Run with given TRex package. Make sure the path available at server machine.")
+ parser.add_option('--no-ssh', '--no_ssh', action="store_true", default = False,
+ dest="no_ssh",
+ help="Flag to disable any ssh to server machine.")
def configure(self, options, conf):
- self.functional = options.functional
self.collect_only = options.collect_only
- if self.functional or self.collect_only:
+ if self.collect_only:
+ return
+ self.functional = options.functional
+ self.stateless = options.stateless
+ self.stateful = options.stateful
+ self.pkg = options.pkg
+ self.no_ssh = options.no_ssh
+ self.verbose_mode = options.verbose_mode
+ if self.functional and (not self.pkg or self.no_ssh):
return
if CTRexScenario.setup_dir and options.config_path:
raise Exception('Please either define --cfg or use env. variable SETUP_DIR, not both.')
if not options.config_path and CTRexScenario.setup_dir:
options.config_path = CTRexScenario.setup_dir
- if options.config_path:
- self.configuration = misc_methods.load_complete_config_file(os.path.join(options.config_path, 'config.yaml'))
- self.benchmark = misc_methods.load_benchmark_config_file(os.path.join(options.config_path, 'benchmark.yaml'))
- self.enabled = True
- else:
+ if not options.config_path:
raise Exception('Please specify path to config.yaml using --cfg parameter or env. variable SETUP_DIR')
+ self.configuration = misc_methods.load_complete_config_file(os.path.join(options.config_path, 'config.yaml'))
+ self.configuration.trex['trex_name'] = address_to_ip(self.configuration.trex['trex_name']) # translate hostname to ip
+ self.benchmark = misc_methods.load_benchmark_config_file(os.path.join(options.config_path, 'benchmark.yaml'))
+ self.enabled = True
self.modes = self.configuration.trex.get('modes', [])
self.kill_running = options.kill_running
self.load_image = options.load_image
- self.verbose_mode = options.verbose_mode
self.clean_config = False if options.skip_clean_config else True
self.server_logs = options.server_logs
if options.log_path:
self.loggerPath = options.log_path
-
- def begin (self):
- if self.functional or self.collect_only:
- return
# initialize CTRexScenario global testing class, to be used by all tests
CTRexScenario.configuration = self.configuration
CTRexScenario.benchmark = self.benchmark
CTRexScenario.modes = set(self.modes)
CTRexScenario.server_logs = self.server_logs
- # launch TRex daemon on relevant setup
- start_trex_remote_server(self.configuration.trex, self.kill_running)
- CTRexScenario.trex = CTRexClient(trex_host = self.configuration.trex['trex_name'], verbose = self.verbose_mode)
+ def begin (self):
+ if self.pkg and not CTRexScenario.is_copied and not self.no_ssh:
+ new_path = '/tmp/trex-scripts'
+ rsync_template = 'rm -rf /tmp/trex-scripts; mkdir -p %s; rsync -Lc %s /tmp; tar -mxzf /tmp/%s -C %s; mv %s/v*.*/* %s'
+ rsync_command = rsync_template % (new_path, self.pkg, os.path.basename(self.pkg), new_path, new_path, new_path)
+ return_code, stdout, stderr = trex_remote_command(self.configuration.trex, rsync_command, from_scripts = False)
+ if return_code:
+ print 'Failed copying'
+ sys.exit(-1)
+ CTRexScenario.scripts_path = new_path
+ CTRexScenario.is_copied = True
+ if self.functional or self.collect_only:
+ return
+ # launch TRex daemon on relevant setup
+ if not self.no_ssh:
+ if self.kill_running:
+ if self.stateful:
+ trex_remote_command(self.configuration.trex, STATEFUL_STOP_COMMAND)
+ kill_trex_process(self.configuration.trex)
+ time.sleep(1)
+ elif check_trex_running(self.configuration.trex):
+ print 'TRex is already running'
+ sys.exit(-1)
+
+
+ if self.stateful:
+ if not self.no_ssh:
+ trex_remote_command(self.configuration.trex, STATEFUL_RUN_COMMAND)
+ CTRexScenario.trex = CTRexClient(trex_host = self.configuration.trex['trex_name'], verbose = self.verbose_mode)
+ elif self.stateless:
+ if not self.no_ssh:
+ trex_remote_command(self.configuration.trex, './t-rex-64 -i', background = True)
+ CTRexScenario.stl_trex = STLClient(username = 'TRexRegression',
+ server = self.configuration.trex['trex_name'],
+ verbose_level = self.verbose_mode)
if 'loopback' not in self.modes:
- CTRexScenario.router_cfg = dict( config_dict = self.configuration.router,
- forceImageReload = self.load_image,
- silent_mode = not self.verbose_mode,
- forceCleanConfig = self.clean_config,
- tftp_config_dict = self.configuration.tftp )
+ CTRexScenario.router_cfg = dict(config_dict = self.configuration.router,
+ forceImageReload = self.load_image,
+ silent_mode = not self.verbose_mode,
+ forceCleanConfig = self.clean_config,
+ tftp_config_dict = self.configuration.tftp)
try:
CustomLogger.setup_custom_logger('TRexLogger', self.loggerPath)
except AttributeError:
CustomLogger.setup_custom_logger('TRexLogger')
-
+
def finalize(self, result):
if self.functional or self.collect_only:
return
- CTRexScenario.is_init = False
- stop_trex_remote_server(self.configuration.trex)
+ CTRexScenario.is_init = False
+ if self.stateful:
+ CTRexScenario.trex = None
+ if self.stateless:
+ CTRexScenario.trex_stl = None
+ if not self.no_ssh:
+ if self.stateful:
+ trex_remote_command(self.configuration.trex, STATEFUL_STOP_COMMAND)
+ kill_trex_process(self.configuration.trex)
def save_setup_info():
@@ -195,102 +257,111 @@ def set_report_dir (report_dir):
if not os.path.exists(report_dir):
os.mkdir(report_dir)
-
if __name__ == "__main__":
-
+
# setting defaults. By default we run all the test suite
specific_tests = False
- disableLogCapture = False
- long_test = False
- xml_name = 'unit_test.xml'
CTRexScenario.report_dir = 'reports'
- CTRexScenario.scripts_path = get_trex_path()
- COMMON_RUN_COMMAND = 'rm /var/log/trex/trex_daemon_server.log; ./trex_daemon_server start; sleep 2; ./trex_daemon_server show'
- COMMON_STOP_COMMAND = './trex_daemon_server stop; sleep 1; ./trex_daemon_server stop; sleep 1'
- if '--copy' in sys.argv:
- new_path = '/tmp/trex_scripts'
- DAEMON_STOP_COMMAND = 'cd %s; %s' % (new_path, COMMON_STOP_COMMAND)
- DAEMON_START_COMMAND = 'mkdir -p %s; cd %s; %s; rsync -L -az %s/ %s; %s' % (new_path, new_path, COMMON_STOP_COMMAND,
- CTRexScenario.scripts_path, new_path, COMMON_RUN_COMMAND)
- else:
- DAEMON_STOP_COMMAND = 'cd %s; %s' % (CTRexScenario.scripts_path, COMMON_STOP_COMMAND)
- DAEMON_START_COMMAND = DAEMON_STOP_COMMAND + COMMON_RUN_COMMAND
-
+ need_to_copy = False
setup_dir = os.getenv('SETUP_DIR', '').rstrip('/')
CTRexScenario.setup_dir = check_setup_path(setup_dir)
+ CTRexScenario.scripts_path = get_trex_path()
if not CTRexScenario.setup_dir:
CTRexScenario.setup_dir = check_setup_path(os.path.join('setups', setup_dir))
-
- if CTRexScenario.setup_dir:
- CTRexScenario.setup_name = os.path.basename(CTRexScenario.setup_dir)
- xml_name = 'report_%s.xml' % CTRexScenario.setup_name
+
nose_argv = ['', '-s', '-v', '--exe', '--rednose', '--detailed-errors']
if '--collect-only' in sys.argv: # this is a user trying simply to view the available tests. no need xunit.
- CTRexScenario.is_test_list = True
+ CTRexScenario.is_test_list = True
+ xml_arg = ''
else:
- nose_argv += ['--with-xunit', '--xunit-file=%s/%s' % (CTRexScenario.report_dir, xml_name)]
+ xml_name = 'unit_test.xml'
+ if CTRexScenario.setup_dir:
+ CTRexScenario.setup_name = os.path.basename(CTRexScenario.setup_dir)
+ xml_name = 'report_%s.xml' % CTRexScenario.setup_name
+ xml_arg= '--xunit-file=%s/%s' % (CTRexScenario.report_dir, xml_name)
set_report_dir(CTRexScenario.report_dir)
+ sys_args = sys.argv[:]
for i, arg in enumerate(sys.argv):
- if 'unit_tests/' in arg:
- specific_tests = True
- sys.argv[i] = arg[arg.find('unit_tests/'):]
if 'log-path' in arg:
- disableLogCapture = True
-
- nose_argv += sys.argv
-
- # Run all of the unit tests or just the selected ones
- if not specific_tests:
- if '--functional' in sys.argv:
- nose_argv += ['unit_tests/functional_tests']
+ nose_argv += ['--nologcapture']
else:
- nose_argv += ['unit_tests']
- if disableLogCapture:
- nose_argv += ['--nologcapture']
+ for tests_type in CTRexScenario.test_types.keys():
+ if tests_type in arg:
+ specific_tests = True
+ CTRexScenario.test_types[tests_type].append(arg[arg.find(tests_type):])
+ sys_args.remove(arg)
+ if not specific_tests:
+ for key in ('--func', '--functional'):
+ if key in sys_args:
+ CTRexScenario.test_types['functional_tests'].append('functional_tests')
+ sys_args.remove(key)
+ for key in ('--stf', '--stateful'):
+ if key in sys_args:
+ CTRexScenario.test_types['stateful_tests'].append('stateful_tests')
+ sys_args.remove(key)
+ for key in ('--stl', '--stateless'):
+ if key in sys_args:
+ CTRexScenario.test_types['stateless_tests'].append('stateless_tests')
+ sys_args.remove(key)
+ # Run all of the tests or just the selected ones
+ if not sum([len(x) for x in CTRexScenario.test_types.values()]):
+ for key in CTRexScenario.test_types.keys():
+ CTRexScenario.test_types[key].append(key)
+
+ nose_argv += sys_args
+
+ config_plugin = CTRexTestConfiguringPlugin()
+ red_nose = RedNose()
+ result = True
try:
- config_plugin = CTRexTestConfiguringPlugin()
- red_nose = RedNose()
- try:
- result = nose.run(argv = nose_argv, addplugins = [red_nose, config_plugin])
- except socket.error: # handle consecutive tests exception, try once again
- print "TRex connectivity error identified. Possibly due to consecutive nightly runs.\nRetrying..."
- result = nose.run(argv = nose_argv, addplugins = [red_nose, config_plugin])
- finally:
- save_setup_info()
-
- if (result == True and not CTRexScenario.is_test_list):
- print termstyle.green("""
- ..::''''::..
- .;'' ``;.
- :: :: :: ::
- :: :: :: ::
- :: :: :: ::
- :: .:' :: :: `:. ::
- :: : : ::
- :: `:. .:' ::
- `;..``::::''..;'
- ``::,,,,::''
-
- ___ ___ __________
- / _ \/ _ | / __/ __/ /
- / ___/ __ |_\ \_\ \/_/
- /_/ /_/ |_/___/___(_)
-
- """)
- sys.exit(0)
- else:
- sys.exit(-1)
-
+ if len(CTRexScenario.test_types['functional_tests']):
+ additional_args = ['--func'] + CTRexScenario.test_types['functional_tests']
+ if xml_arg:
+ additional_args += ['--with-xunit', xml_arg.replace('.xml', '_functional.xml')]
+ result = nose.run(argv = nose_argv + additional_args, addplugins = [red_nose, config_plugin])
+ if len(CTRexScenario.test_types['stateful_tests']):
+ additional_args = ['--stf'] + CTRexScenario.test_types['stateful_tests']
+ if xml_arg:
+ additional_args += ['--with-xunit', xml_arg.replace('.xml', '_stateful.xml')]
+ result = result and nose.run(argv = nose_argv + additional_args, addplugins = [red_nose, config_plugin])
+ if len(CTRexScenario.test_types['stateless_tests']):
+ additional_args = ['--stl', 'stateless_tests/stl_general_test.py:STLBasic_Test.test_connectivity'] + CTRexScenario.test_types['stateless_tests']
+ if xml_arg:
+ additional_args += ['--with-xunit', xml_arg.replace('.xml', '_stateless.xml')]
+ result = result and nose.run(argv = nose_argv + additional_args, addplugins = [red_nose, config_plugin])
finally:
- pass
-
-
-
+ save_setup_info()
+
+ if (result == True and not CTRexScenario.is_test_list):
+ print termstyle.green("""
+ ..::''''::..
+ .;'' ``;.
+ :: :: :: ::
+ :: :: :: ::
+ :: :: :: ::
+ :: .:' :: :: `:. ::
+ :: : : ::
+ :: `:. .:' ::
+ `;..``::::''..;'
+ ``::,,,,::''
+
+ ___ ___ __________
+ / _ \/ _ | / __/ __/ /
+ / ___/ __ |_\ \_\ \/_/
+ /_/ /_/ |_/___/___(_)
+
+ """)
+ sys.exit(0)
+ sys.exit(-1)
+
+
+
+
+
-
diff --git a/scripts/automation/regression/unit_tests/__init__.py b/scripts/automation/regression/unit_tests/__init__.py
deleted file mode 100755
index 8b137891..00000000
--- a/scripts/automation/regression/unit_tests/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/scripts/automation/trex_control_plane/stl/console/trex_tui.py b/scripts/automation/trex_control_plane/stl/console/trex_tui.py
index f972b905..02b00b78 100644
--- a/scripts/automation/trex_control_plane/stl/console/trex_tui.py
+++ b/scripts/automation/trex_control_plane/stl/console/trex_tui.py
@@ -8,6 +8,7 @@ from cStringIO import StringIO
from trex_stl_lib.utils.text_opts import *
from trex_stl_lib.utils import text_tables
+from trex_stl_lib import trex_stl_stats
# for STL exceptions
from trex_stl_lib.api import *
@@ -217,6 +218,35 @@ class TrexTUIPort(TrexTUIPanel):
self.stateless_client.clear_stats([self.port_id])
return "port {0}: cleared stats".format(self.port_id)
+
+
+# streams stats
+class TrexTUIStreamsStats(TrexTUIPanel):
+ def __init__ (self, mng):
+ super(TrexTUIStreamsStats, self).__init__(mng, "sstats")
+
+ self.key_actions = OrderedDict()
+
+ self.key_actions['c'] = {'action': self.action_clear, 'legend': 'clear', 'show': True}
+
+
+ def show (self):
+ stats = self.stateless_client._get_formatted_stats(port_id_list = None, stats_mask = trex_stl_stats.SS_COMPAT)
+ # print stats to screen
+ for stat_type, stat_data in stats.iteritems():
+ text_tables.print_table_with_header(stat_data.text_table, stat_type)
+ pass
+
+
+ def get_key_actions (self):
+ return self.key_actions
+
+ def action_clear (self):
+ self.stateless_client.flow_stats.clear_stats()
+
+ return ""
+
+
# log
class TrexTUILog():
def __init__ (self):
@@ -247,10 +277,12 @@ class TrexTUIPanelManager():
self.panels = {}
self.panels['dashboard'] = TrexTUIDashBoard(self)
+ self.panels['sstats'] = TrexTUIStreamsStats(self)
self.key_actions = OrderedDict()
self.key_actions['q'] = {'action': self.action_quit, 'legend': 'quit', 'show': True}
self.key_actions['g'] = {'action': self.action_show_dash, 'legend': 'dashboard', 'show': True}
+ self.key_actions['s'] = {'action': self.action_show_sstats, 'legend': 'streams stats', 'show': True}
for port_id in self.ports:
self.key_actions[str(port_id)] = {'action': self.action_show_port(port_id), 'legend': 'port {0}'.format(port_id), 'show': False}
@@ -352,6 +384,10 @@ class TrexTUIPanelManager():
return action_show_port_x
+ def action_show_sstats (self):
+ self.main_panel = self.panels['sstats']
+ self.init(self.show_log)
+ return ""
# shows a textual top style window
class TrexTUI():
@@ -427,7 +463,7 @@ class TrexTUI():
elif self.state == self.STATE_RECONNECT:
try:
- self.stateless_client.connect("RO")
+ self.stateless_client.connect()
self.state = self.STATE_ACTIVE
except STLError:
self.state = self.STATE_LOST_CONT
diff --git a/scripts/automation/trex_control_plane/stl/examples/stl_flow_stats.py b/scripts/automation/trex_control_plane/stl/examples/stl_flow_stats.py
index 3708834e..fa6e67c3 100644
--- a/scripts/automation/trex_control_plane/stl/examples/stl_flow_stats.py
+++ b/scripts/automation/trex_control_plane/stl/examples/stl_flow_stats.py
@@ -14,12 +14,14 @@ def rx_example (tx_port, rx_port, burst_size):
try:
pkt = STLPktBuilder(pkt = Ether()/IP(src="16.0.0.1",dst="48.0.0.1")/UDP(dport=12,sport=1025)/IP()/'a_payload_example')
-
total_pkts = burst_size
s1 = STLStream(name = 'rx',
packet = pkt,
flow_stats = STLFlowStats(pg_id = 5),
- mode = STLTXSingleBurst(total_pkts = total_pkts, bps_L2 = 250000000))
+ mode = STLTXSingleBurst(total_pkts = total_pkts,
+ #pps = total_pkts
+ percentage = 80
+ ))
# connect to server
c.connect()
@@ -30,38 +32,14 @@ def rx_example (tx_port, rx_port, burst_size):
# add both streams to ports
c.add_streams([s1], ports = [tx_port])
- print "injecting {0} packets on port {1}\n".format(total_pkts, tx_port)
- c.clear_stats()
- c.start(ports = [tx_port])
- c.wait_on_traffic(ports = [tx_port])
-
- # no error check - just an example... should be 5
- flow_stats = c.get_stats()['flow_stats'][5]
-
- tx_pkts = flow_stats['tx_pkts'][tx_port]
- tx_bytes = flow_stats['tx_bytes'][tx_port]
- rx_pkts = flow_stats['rx_pkts'][rx_port]
-
- if tx_pkts != total_pkts:
- print "TX pkts mismatch - got: {0}, expected: {1}".format(tx_pkts, total_pkts)
- passed = False
- return
- else:
- print "TX pkts match - {0}".format(tx_pkts)
+ print "\ninjecting {0} packets on port {1}\n".format(total_pkts, tx_port)
- if tx_bytes != (total_pkts * pkt.get_pkt_len()):
- print "TX bytes mismatch - got: {0}, expected: {1}".format(tx_bytes, (total_pkts * len(pkt)))
- passed = False
- return
- else:
- print "TX bytes match - {0}".format(tx_bytes)
-
- if rx_pkts != total_pkts:
- print "RX pkts mismatch - got: {0}, expected: {1}".format(rx_pkts, total_pkts)
- passed = False
- return
- else:
- print "RX pkts match - {0}".format(rx_pkts)
+ for i in range(0, 10):
+ print "\nStarting iteration: {0}:".format(i)
+ rc = rx_iteration(c, tx_port, rx_port, total_pkts, pkt.get_pkt_len())
+ if not rc:
+ passed = False
+ break
except STLError as e:
@@ -76,7 +54,46 @@ def rx_example (tx_port, rx_port, burst_size):
else:
print "\nTest has failed :-(\n"
+# RX one iteration
+def rx_iteration (c, tx_port, rx_port, total_pkts, pkt_len):
+
+ c.clear_stats()
+
+ c.start(ports = [tx_port])
+ c.wait_on_traffic(ports = [tx_port])
+
+ flow_stats = c.get_stats()['flow_stats'].get(5)
+ if not flow_stats:
+ print "no flow stats available"
+ return False
+
+ tx_pkts = flow_stats['tx_pkts'].get(tx_port, 0)
+ tx_bytes = flow_stats['tx_bytes'].get(tx_port, 0)
+ rx_pkts = flow_stats['rx_pkts'].get(rx_port, 0)
+
+ if tx_pkts != total_pkts:
+ print "TX pkts mismatch - got: {0}, expected: {1}".format(tx_pkts, total_pkts)
+ pprint.pprint(flow_stats)
+ return False
+ else:
+ print "TX pkts match - {0}".format(tx_pkts)
+
+ if tx_bytes != (total_pkts * pkt_len):
+ print "TX bytes mismatch - got: {0}, expected: {1}".format(tx_bytes, (total_pkts * pkt_len))
+ pprint.pprint(flow_stats)
+ return False
+ else:
+ print "TX bytes match - {0}".format(tx_bytes)
+
+ if rx_pkts != total_pkts:
+ print "RX pkts mismatch - got: {0}, expected: {1}".format(rx_pkts, total_pkts)
+ pprint.pprint(flow_stats)
+ return False
+ else:
+ print "RX pkts match - {0}".format(rx_pkts)
+
+ return True
# run the tests
-rx_example(tx_port = 0, rx_port = 3, burst_size = 500000)
+rx_example(tx_port = 1, rx_port = 2, burst_size = 500000)
diff --git a/scripts/automation/trex_control_plane/stl/examples/stl_imix.py b/scripts/automation/trex_control_plane/stl/examples/stl_imix.py
index cc7691a3..94165614 100644
--- a/scripts/automation/trex_control_plane/stl/examples/stl_imix.py
+++ b/scripts/automation/trex_control_plane/stl/examples/stl_imix.py
@@ -4,6 +4,7 @@ from trex_stl_lib.api import *
import time
import json
from pprint import pprint
+import argparse
# IMIX test
# it maps the ports to sides
@@ -11,11 +12,11 @@ from pprint import pprint
# and attach it to both sides and inject
# at a certain rate for some time
# finally it checks that all packets arrived
-def imix_test ():
+def imix_test (server):
# create client
- c = STLClient()
+ c = STLClient(server = server)
passed = True
@@ -48,7 +49,7 @@ def imix_test ():
# choose rate and start traffic for 10 seconds on 5 mpps
duration = 10
- mult = "5mpps"
+ mult = "30%"
print "Injecting {0} <--> {1} on total rate of '{2}' for {3} seconds".format(dir_0, dir_1, mult, duration)
c.start(ports = (dir_0 + dir_1), mult = mult, duration = duration, total = True)
@@ -78,9 +79,9 @@ def imix_test ():
print "Packets injected from {0}: {1:,}".format(dir_1, dir_1_opackets)
print "\npackets lost from {0} --> {1}: {2:,} pkts".format(dir_0, dir_0, lost_0)
- print "packets lost from {0} --> {1}: {2:,} pkts".format(dir_0, dir_0, lost_0)
+ print "packets lost from {0} --> {1}: {2:,} pkts".format(dir_1, dir_1, lost_1)
- if (lost_0 == 0) and (lost_0 == 0):
+ if (lost_0 <= 0) and (lost_1 <= 0): # less or equal because we might have incoming arps etc.
passed = True
else:
passed = False
@@ -95,10 +96,19 @@ def imix_test ():
if passed:
print "\nTest has passed :-)\n"
+ sys.exit(0)
else:
print "\nTest has failed :-(\n"
+ sys.exit(-1)
+parser = argparse.ArgumentParser(description="Example for TRex Stateless, sending IMIX traffic")
+parser.add_argument('-s', '--server',
+ dest='server',
+ help='Remote trex address',
+ default='127.0.0.1',
+ type = str)
+args = parser.parse_args()
# run the tests
-imix_test()
+imix_test(args.server)
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_async_client.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_async_client.py
index 36103cae..ae6cb497 100644
--- a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_async_client.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_async_client.py
@@ -178,7 +178,8 @@ class CTRexAsyncClient():
self.connected = True
- rc = self.barrier()
+ # sync all stats data as a baseline from the server
+ rc = self.barrier(baseline = True)
if not rc:
self.disconnect()
return rc
@@ -245,9 +246,11 @@ class CTRexAsyncClient():
name = msg['name']
data = msg['data']
type = msg['type']
+ baseline = msg.get('baseline', False)
+
self.raw_snapshot[name] = data
- self.__dispatch(name, type, data)
+ self.__dispatch(name, type, data, baseline)
# closing of socket must be from the same thread
@@ -268,10 +271,11 @@ class CTRexAsyncClient():
return self.raw_snapshot
# dispatch the message to the right place
- def __dispatch (self, name, type, data):
+ def __dispatch (self, name, type, data, baseline):
+
# stats
if name == "trex-global":
- self.event_handler.handle_async_stats_update(data)
+ self.event_handler.handle_async_stats_update(data, baseline)
# events
elif name == "trex-event":
@@ -282,7 +286,7 @@ class CTRexAsyncClient():
self.handle_async_barrier(type, data)
elif name == "flow_stats":
- self.event_handler.handle_async_rx_stats_event(data)
+ self.event_handler.handle_async_rx_stats_event(data, baseline)
else:
pass
@@ -295,7 +299,7 @@ class CTRexAsyncClient():
# block on barrier for async channel
- def barrier(self, timeout = 5):
+ def barrier(self, timeout = 5, baseline = False):
# set a random key
key = random.getrandbits(32)
@@ -307,7 +311,7 @@ class CTRexAsyncClient():
while not self.async_barrier['ack']:
# inject
- rc = self.stateless_client._transmit("publish_now", params = {'key' : key})
+ rc = self.stateless_client._transmit("publish_now", params = {'key' : key, 'baseline': baseline})
if not rc:
return rc
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_client.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_client.py
index 130fee2c..c7503ab0 100644
--- a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_client.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_client.py
@@ -155,12 +155,12 @@ class AsyncEventHandler(object):
pass
- def handle_async_rx_stats_event (self, data):
- self.client.flow_stats.update(data)
+ def handle_async_rx_stats_event (self, data, baseline):
+ self.client.flow_stats.update(data, baseline)
# handles an async stats update from the subscriber
- def handle_async_stats_update(self, dump_data):
+ def handle_async_stats_update(self, dump_data, baseline):
global_stats = {}
port_stats = {}
@@ -182,11 +182,11 @@ class AsyncEventHandler(object):
global_stats[key] = value
# update the general object with the snapshot
- self.client.global_stats.update(global_stats)
+ self.client.global_stats.update(global_stats, baseline)
# update all ports
for port_id, data in port_stats.iteritems():
- self.client.ports[port_id].port_stats.update(data)
+ self.client.ports[port_id].port_stats.update(data, baseline)
# dispatcher for server async events (port started, port stopped and etc.)
@@ -458,10 +458,12 @@ class STLClient(object):
self.server_version,
self.ports)
+ self.flow_stats = trex_stl_stats.CRxStats()
+
self.stats_generator = trex_stl_stats.CTRexInfoGenerator(self.global_stats,
- self.ports)
+ self.ports,
+ self.flow_stats)
- self.flow_stats = trex_stl_stats.CRxStats()
############# private functions - used by the class itself ###########
@@ -736,7 +738,7 @@ class STLClient(object):
# clear stats
- def __clear_stats(self, port_id_list, clear_global):
+ def __clear_stats(self, port_id_list, clear_global, clear_flow_stats):
for port_id in port_id_list:
self.ports[port_id].clear_stats()
@@ -744,6 +746,9 @@ class STLClient(object):
if clear_global:
self.global_stats.clear_stats()
+ if clear_flow_stats:
+ self.flow_stats.clear_stats()
+
self.logger.log_cmd("clearing stats on port(s) {0}:".format(port_id_list))
return RC
@@ -825,6 +830,7 @@ class STLClient(object):
self.ports[port_id].invalidate_stats()
self.global_stats.invalidate()
+ self.flow_stats.invalidate()
return RC_OK()
@@ -1697,7 +1703,7 @@ class STLClient(object):
@__api_check(False)
- def clear_stats (self, ports = None, clear_global = True):
+ def clear_stats (self, ports = None, clear_global = True, clear_flow_stats = True):
"""
clear stats on port(s)
@@ -1708,6 +1714,9 @@ class STLClient(object):
clear_global : bool
clear the global stats
+ clear_flow_stats : bool
+ clear the flow stats
+
:raises:
+ :exc:`STLError`
@@ -1721,7 +1730,7 @@ class STLClient(object):
raise STLArgumentError('clear_global', clear_global)
- rc = self.__clear_stats(ports, clear_global)
+ rc = self.__clear_stats(ports, clear_global, clear_flow_stats)
if not rc:
raise STLError(rc)
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_packet_builder_scapy.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_packet_builder_scapy.py
index 59a047ec..eac12ebb 100644
--- a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_packet_builder_scapy.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_packet_builder_scapy.py
@@ -1,7 +1,7 @@
import random
import string
import struct
-import socket
+import socket
import json
import yaml
import binascii
@@ -50,13 +50,13 @@ def mac_str_to_num (mac_buffer):
def is_valid_ipv4(ip_addr):
"""
- return buffer in network order
+ return buffer in network order
"""
if type(ip_addr)==str and len(ip_addr) == 4:
return ip_addr
if type(ip_addr)==int :
- ip_addr = socket.inet_ntoa(struct.pack("!I", ip_addr))
+ ip_addr = socket.inet_ntoa(struct.pack("!I", ip_addr))
try:
return socket.inet_pton(socket.AF_INET, ip_addr)
@@ -81,7 +81,7 @@ def is_valid_ipv6(ipv6_addr):
class CTRexScriptsBase(object):
"""
- VM Script base class
+ VM Script base class
"""
def clone (self):
return copy.deepcopy(self)
@@ -105,7 +105,7 @@ class CTRexScFieldRangeValue(CTRexScFieldRangeBase):
"""
range of field value
"""
- def __init__(self, field_name,
+ def __init__(self, field_name,
field_type,
min_value,
max_value
@@ -135,7 +135,7 @@ class CTRexScIpv4SimpleRange(CTRexScFieldRangeBase):
class CTRexScIpv4TupleGen(CTRexScriptsBase):
"""
- range tuple
+ range tuple
"""
FLAGS_ULIMIT_FLOWS =1
@@ -157,7 +157,7 @@ class CTRexScIpv4TupleGen(CTRexScriptsBase):
class CTRexScTrimPacketSize(CTRexScriptsBase):
"""
- trim packet size. field type is CTRexScFieldRangeBase.FILED_TYPES = ["inc","dec","rand"]
+ trim packet size. field type is CTRexScFieldRangeBase.FILED_TYPES = ["inc","dec","rand"]
"""
def __init__(self,field_type="rand",min_pkt_size=None, max_pkt_size=None):
super(CTRexScTrimPacketSize, self).__init__()
@@ -174,7 +174,7 @@ class CTRexScTrimPacketSize(CTRexScriptsBase):
class CTRexScRaw(CTRexScriptsBase):
"""
- raw instructions
+ raw instructions
"""
def __init__(self,list_of_commands=None,split_by_field=None):
super(CTRexScRaw, self).__init__()
@@ -190,7 +190,7 @@ class CTRexScRaw(CTRexScriptsBase):
################################################################################################
-# VM raw instructions
+# VM raw instructions
################################################################################################
class CTRexVmInsBase(object):
@@ -283,7 +283,7 @@ class CTRexVmInsTupleGen(CTRexVmInsBase):
################################################################################################
-#
+#
class CTRexVmEngine(object):
def __init__(self):
@@ -294,7 +294,7 @@ class CTRexVmEngine(object):
self.ins=[]
self.split_by_var = ''
- # return as json
+ # return as json
def get_json (self):
inst_array = [];
# dump it as dict
@@ -352,7 +352,7 @@ class CTRexScapyPktUtl(object):
def _layer_offset(self, name, cnt = 0):
"""
- return offset of layer e.g 'IP',1 will return offfset of layer ip:1
+ return offset of layer e.g 'IP',1 will return offfset of layer ip:1
"""
save_cnt=cnt
for pkt in self.pkt_iter ():
@@ -367,7 +367,7 @@ class CTRexScapyPktUtl(object):
def layer_offset(self, name, cnt = 0):
"""
- return offset of layer e.g 'IP',1 will return offfset of layer ip:1
+ return offset of layer e.g 'IP',1 will return offfset of layer ip:1
"""
save_cnt=cnt
for pkt in self.pkt_iter ():
@@ -381,7 +381,7 @@ class CTRexScapyPktUtl(object):
def get_field_offet(self, layer, layer_cnt, field_name):
"""
- return offset of layer e.g 'IP',1 will return offfset of layer ip:1
+ return offset of layer e.g 'IP',1 will return offfset of layer ip:1
"""
t=self._layer_offset(layer,layer_cnt);
l_offset=t[1];
@@ -397,7 +397,7 @@ class CTRexScapyPktUtl(object):
def get_layer_offet_by_str(self, layer_des):
"""
- return layer offset by string
+ return layer offset by string
:parameters:
@@ -423,14 +423,14 @@ class CTRexScapyPktUtl(object):
def get_field_offet_by_str(self, field_des):
"""
- return field_des (offset,size) layer:cnt.field
- for example
+ return field_des (offset,size) layer:cnt.field
+ for example
802|1Q.vlan get 802.1Q->valn replace | with .
IP.src
IP:0.src (first IP.src like IP.src)
for example IP:1.src for internal IP
- return (offset, size) as tuple
+ return (offset, size) as tuple
"""
@@ -489,19 +489,19 @@ class CTRexVmDescBase(object):
def get_var_ref (self):
'''
virtual function return a ref var name
- '''
+ '''
return None
def get_var_name(self):
'''
virtual function return the varible name if exists
- '''
+ '''
return None
- def compile(self,parent):
+ def compile(self,parent):
'''
virtual function to take parent than has function name_to_offset
- '''
+ '''
pass;
@@ -565,12 +565,12 @@ class CTRexVmDescFlowVar(CTRexVmDescBase):
class CTRexVmDescFixIpv4(CTRexVmDescBase):
def __init__(self, offset):
super(CTRexVmDescFixIpv4, self).__init__()
- self.offset = offset; # could be a name of offset
+ self.offset = offset; # could be a name of offset
def get_obj (self):
return CTRexVmInsFixIpv4(self.offset);
- def compile(self,parent):
+ def compile(self,parent):
if type(self.offset)==str:
self.offset = parent._pkt_layer_offset(self.offset);
@@ -593,7 +593,7 @@ class CTRexVmDescWrFlowVar(CTRexVmDescBase):
def get_obj (self):
return CTRexVmInsWrFlowVar(self.name,self.pkt_offset+self.offset_fixup,self.add_val,self.is_big)
- def compile(self,parent):
+ def compile(self,parent):
if type(self.pkt_offset)==str:
t=parent._name_to_offset(self.pkt_offset)
self.pkt_offset = t[0]
@@ -627,7 +627,7 @@ class CTRexVmDescWrMaskFlowVar(CTRexVmDescBase):
def get_obj (self):
return CTRexVmInsWrMaskFlowVar(self.name,self.pkt_offset+self.offset_fixup,self.pkt_cast_size,self.mask,self.shift,self.add_value,self.is_big)
- def compile(self,parent):
+ def compile(self,parent):
if type(self.pkt_offset)==str:
t=parent._name_to_offset(self.pkt_offset)
self.pkt_offset = t[0]
@@ -680,7 +680,7 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
When path_relative_to_profile is a True load pcap file from a path relative to the profile
"""
- def __init__(self, pkt = None, pkt_buffer = None, vm = None, path_relative_to_profile = False, build_raw = True, remove_fcs = True):
+ def __init__(self, pkt = None, pkt_buffer = None, vm = None, path_relative_to_profile = False, build_raw = False, remove_fcs = True):
"""
Instantiate a CTRexPktBuilder object
@@ -737,7 +737,7 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
"""
super(CScapyTRexPktBuilder, self).__init__()
- self.pkt = None # as input
+ self.pkt = None # as input
self.pkt_raw = None # from raw pcap file
self.vm_scripts = [] # list of high level instructions
self.vm_low_level = None
@@ -745,7 +745,8 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
self.metadata=""
self.path_relative_to_profile = path_relative_to_profile
self.remove_fcs = remove_fcs
-
+ self.is_binary_source = pkt_buffer != None
+
if pkt != None and pkt_buffer != None:
raise CTRexPacketBuildException(-15, "packet builder cannot be provided with both pkt and pkt_buffer")
@@ -778,7 +779,7 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
def get_vm_data(self):
"""
- Dumps the instructions
+ Dumps the instructions
:parameters:
None
@@ -792,7 +793,7 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
assert self.vm_low_level is not None, 'vm_low_level is None, please use compile()'
- return self.vm_low_level.get_json()
+ return self.vm_low_level.get_json()
def dump_pkt(self, encode = True):
"""
@@ -816,7 +817,7 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
return {'binary': base64.b64encode(pkt_buf) if encode else pkt_buf,
'meta': self.metadata}
-
+
def dump_pkt_to_pcap(self, file_path):
wrpcap(file_path, self._get_pkt_as_str())
@@ -852,7 +853,7 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
def set_pcap_file (self, pcap_file):
"""
- load raw pcap file into a buffer. load only the first packet
+ load raw pcap file into a buffer. load only the first packet
:parameters:
pcap_file : file_name
@@ -898,7 +899,9 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
else:
raise CTRexPacketBuildException(-14, "bad packet" )
- def is_def_src_mac (self):
+ def is_default_src_mac (self):
+ if self.is_binary_source:
+ return True
p = self.pkt
if isinstance(p, Packet):
if isinstance(p,Ether):
@@ -906,7 +909,9 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
return False
return True
- def is_def_dst_mac (self):
+ def is_default_dst_mac (self):
+ if self.is_binary_source:
+ return True
p = self.pkt
if isinstance(p, Packet):
if isinstance(p,Ether):
@@ -918,7 +923,7 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
if self.pkt == None and self.pkt_raw == None:
raise CTRexPacketBuildException(-14, "Packet is empty")
-
+
self.vm_low_level = CTRexVmEngine()
# compile the VM
@@ -935,7 +940,7 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
raise CTRexPacketBuildException(-14, "Packet is empty")
####################################################
- # private
+ # private
def _get_pcap_file_path (self,pcap_file_name):
@@ -944,7 +949,7 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
f_path = pcap_file_name
else:
if self.path_relative_to_profile:
- p = self._get_path_relative_to_profile () # loader
+ p = self._get_path_relative_to_profile () # loader
if p :
f_path=os.path.abspath(os.path.join(os.path.dirname(p),pcap_file_name))
@@ -960,7 +965,7 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
def _compile_raw (self,obj):
- # make sure we have varibles once
+ # make sure we have varibles once
vars={};
# add it add var to dit
@@ -979,17 +984,17 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
var_name = desc.get_var_ref()
if var_name :
if not vars.has_key(var_name):
- raise CTRexPacketBuildException(-11,("variable %s does not exists ") % (var_name) );
+ raise CTRexPacketBuildException(-11,("variable %s does not exists ") % (var_name) );
desc.compile(self);
for desc in obj.commands:
self.vm_low_level.add_ins(desc.get_obj());
# set split_by_var
- if obj.split_by_field :
+ if obj.split_by_field :
assert type(obj.split_by_field)==str, "type of split by var should be string"
#if not vars.has_key(obj.split_by_field):
- # raise CTRexPacketBuildException(-11,("variable %s does not exists. change split_by_var args ") % (var_name) );
+ # raise CTRexPacketBuildException(-11,("variable %s does not exists. change split_by_var args ") % (var_name) );
self.vm_low_level.split_by_var = obj.split_by_field
@@ -1008,12 +1013,11 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
# regular scapy packet
elif not self.pkt:
# should not reach here
- raise CTRexPacketBuildException(-11, 'empty packet')
+ raise CTRexPacketBuildException(-11, 'empty packet')
if self.remove_fcs and self.pkt.lastlayer().name == 'Padding':
self.pkt.lastlayer().underlayer.remove_payload()
- if len(self.pkt) < 60: # simulator can write padding with non-zeros, set it explicit
- self.pkt /= Padding('\x00' * (60 - len(self.pkt)))
+
self.pkt.build()
self.is_pkt_built = True
@@ -1036,7 +1040,7 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
return str(self.pkt)
if self.pkt_raw:
return self.pkt_raw
- raise CTRexPacketBuildException(-11, 'empty packet');
+ raise CTRexPacketBuildException(-11, 'empty packet');
def _add_tuple_gen(self,tuple_gen):
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_stats.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_stats.py
index c2e318bc..bb877586 100644
--- a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_stats.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_stats.py
@@ -13,15 +13,45 @@ import re
import math
import copy
import threading
+import pprint
GLOBAL_STATS = 'g'
PORT_STATS = 'p'
PORT_STATUS = 'ps'
-ALL_STATS_OPTS = {GLOBAL_STATS, PORT_STATS, PORT_STATUS}
+STREAMS_STATS = 's'
+
+ALL_STATS_OPTS = {GLOBAL_STATS, PORT_STATS, PORT_STATUS, STREAMS_STATS}
COMPACT = {GLOBAL_STATS, PORT_STATS}
+SS_COMPAT = {GLOBAL_STATS, STREAMS_STATS}
ExportableStats = namedtuple('ExportableStats', ['raw_data', 'text_table'])
+# deep mrege of dicts dst = src + dst
+def deep_merge_dicts (dst, src):
+ for k, v in src.iteritems():
+ # if not exists - deep copy it
+ if not k in dst:
+ dst[k] = copy.deepcopy(v)
+ else:
+ if isinstance(v, dict):
+ deep_merge_dicts(dst[k], v)
+
+# BPS L1 from pps and BPS L2
+def calc_bps_L1 (bps, pps):
+ if (pps == 0) or (bps == 0):
+ return 0
+
+ factor = bps / (pps * 8.0)
+ return bps * ( 1 + (20 / factor) )
+#
+
+def is_intable (value):
+ try:
+ int(value)
+ return True
+ except ValueError:
+ return False
+
# use to calculate diffs relative to the previous values
# for example, BW
def calculate_diff (samples):
@@ -66,18 +96,23 @@ class CTRexInfoGenerator(object):
STLClient and the ports.
"""
- def __init__(self, global_stats_ref, ports_dict_ref):
+ def __init__(self, global_stats_ref, ports_dict_ref, rx_stats_ref):
self._global_stats = global_stats_ref
self._ports_dict = ports_dict_ref
+ self._rx_stats_ref = rx_stats_ref
def generate_single_statistic(self, port_id_list, statistic_type):
if statistic_type == GLOBAL_STATS:
return self._generate_global_stats()
+
elif statistic_type == PORT_STATS:
return self._generate_port_stats(port_id_list)
- pass
+
elif statistic_type == PORT_STATUS:
return self._generate_port_status(port_id_list)
+
+ elif statistic_type == STREAMS_STATS:
+ return self._generate_streams_stats()
else:
# ignore by returning empty object
return {}
@@ -110,6 +145,90 @@ class CTRexInfoGenerator(object):
return {"global_statistics": ExportableStats(stats_data, stats_table)}
+ def _generate_streams_stats (self):
+
+ streams_keys, sstats_data = self._rx_stats_ref.generate_stats()
+ stream_count = len(streams_keys)
+
+ stats_table = text_tables.TRexTextTable()
+ stats_table.set_cols_align(["l"] + ["r"] * stream_count)
+ stats_table.set_cols_width([10] + [17] * stream_count)
+ stats_table.set_cols_dtype(['t'] + ['t'] * stream_count)
+
+ stats_table.add_rows([[k] + v
+ for k, v in sstats_data.iteritems()],
+ header=False)
+
+ header = ["PG ID"] + [key for key in streams_keys]
+ stats_table.header(header)
+
+ return {"streams_statistics": ExportableStats(sstats_data, stats_table)}
+
+
+
+ per_stream_stats = OrderedDict([("owner", []),
+ ("state", []),
+ ("--", []),
+ ("Tx bps L2", []),
+ ("Tx bps L1", []),
+ ("Tx pps", []),
+ ("Line Util.", []),
+
+ ("---", []),
+ ("Rx bps", []),
+ ("Rx pps", []),
+
+ ("----", []),
+ ("opackets", []),
+ ("ipackets", []),
+ ("obytes", []),
+ ("ibytes", []),
+ ("tx-bytes", []),
+ ("rx-bytes", []),
+ ("tx-pkts", []),
+ ("rx-pkts", []),
+
+ ("-----", []),
+ ("oerrors", []),
+ ("ierrors", []),
+
+ ]
+ )
+
+ total_stats = CPortStats(None)
+
+ for port_obj in relevant_ports:
+ # fetch port data
+ port_stats = port_obj.generate_port_stats()
+
+ total_stats += port_obj.port_stats
+
+ # populate to data structures
+ return_stats_data[port_obj.port_id] = port_stats
+ self.__update_per_field_dict(port_stats, per_field_stats)
+
+ total_cols = len(relevant_ports)
+ header = ["port"] + [port.port_id for port in relevant_ports]
+
+ if (total_cols > 1):
+ self.__update_per_field_dict(total_stats.generate_stats(), per_field_stats)
+ header += ['total']
+ total_cols += 1
+
+ stats_table = text_tables.TRexTextTable()
+ stats_table.set_cols_align(["l"] + ["r"] * total_cols)
+ stats_table.set_cols_width([10] + [17] * total_cols)
+ stats_table.set_cols_dtype(['t'] + ['t'] * total_cols)
+
+ stats_table.add_rows([[k] + v
+ for k, v in per_field_stats.iteritems()],
+ header=False)
+
+ stats_table.header(header)
+
+ return {"streams_statistics": ExportableStats(return_stats_data, stats_table)}
+
+
def _generate_port_stats(self, port_id_list):
relevant_ports = self.__get_relevant_ports(port_id_list)
@@ -131,10 +250,10 @@ class CTRexInfoGenerator(object):
("ipackets", []),
("obytes", []),
("ibytes", []),
- ("tx_bytes", []),
- ("rx_bytes", []),
- ("tx_pkts", []),
- ("rx_pkts", []),
+ ("tx-bytes", []),
+ ("rx-bytes", []),
+ ("tx-pkts", []),
+ ("rx-pkts", []),
("-----", []),
("oerrors", []),
@@ -284,97 +403,94 @@ class CTRexStats(object):
self.last_update_ts = time.time()
self.history = deque(maxlen = 10)
self.lock = threading.Lock()
+ self.has_baseline = False
- def __getitem__(self, item):
- # override this to allow quick and clean access to fields
- if not item in self.latest_stats:
- return "N/A"
-
- # item must exist
- m = re.search('_(([a-z])ps)$', item)
- if m:
- # this is a non-relative item
- unit = m.group(2)
- if unit == "b":
- return self.get(item, format=True, suffix="b/sec")
- elif unit == "p":
- return self.get(item, format=True, suffix="pkt/sec")
- else:
- return self.get(item, format=True, suffix=m.group(1))
-
- m = re.search('^[i|o](a-z+)$', item)
- if m:
- # this is a non-relative item
- type = m.group(1)
- if type == "bytes":
- return self.get_rel(item, format=True, suffix="B")
- elif type == "packets":
- return self.get_rel(item, format=True, suffix="pkts")
- else:
- # do not format with suffix
- return self.get_rel(item, format=True)
-
- # can't match to any known pattern, return N/A
- return "N/A"
+ ######## abstract methods ##########
+ # get stats for user / API
+ def get_stats (self):
+ raise NotImplementedError()
+ # generate format stats (for TUI)
def generate_stats(self):
- # must be implemented by designated classes (such as port/ global stats)
raise NotImplementedError()
- def generate_extended_values (self, snapshot):
+ # called when a snapshot arrives - add more fields
+ def _update (self, snapshot, baseline):
raise NotImplementedError()
- def update(self, snapshot):
-
- # some extended generated values (from base values)
- self.generate_extended_values(snapshot)
-
- # update
- self.latest_stats = snapshot
+ ######## END abstract methods ##########
- with self.lock:
- self.history.append(snapshot)
+ def update(self, snapshot, baseline):
- diff_time = time.time() - self.last_update_ts
+ # no update is valid before baseline
+ if not self.has_baseline and not baseline:
+ return
- # 3 seconds is too much - this is the new reference
- if (not self.reference_stats) or (diff_time > 3):
- self.reference_stats = self.latest_stats
+ # call the underlying method
+ rc = self._update(snapshot)
+ if not rc:
+ return
-
+ # sync one time
+ if not self.has_baseline and baseline:
+ self.reference_stats = copy.deepcopy(self.latest_stats)
+ self.has_baseline = True
- self.last_update_ts = time.time()
+ # save history
+ with self.lock:
+ self.history.append(self.latest_stats)
def clear_stats(self):
- self.reference_stats = self.latest_stats
+ self.reference_stats = copy.deepcopy(self.latest_stats)
def invalidate (self):
self.latest_stats = {}
+
+ def _get (self, src, field, default = None):
+ if isinstance(field, list):
+ # deep
+ value = src
+ for level in field:
+ if not level in value:
+ return default
+ value = value[level]
+ else:
+ # flat
+ if not field in src:
+ return default
+ value = src[field]
+
+ return value
+
def get(self, field, format=False, suffix=""):
- if not field in self.latest_stats:
+ value = self._get(self.latest_stats, field)
+ if value == None:
return "N/A"
- if not format:
- return self.latest_stats[field]
- else:
- return format_num(self.latest_stats[field], suffix)
+
+ return value if not format else format_num(value, suffix)
+
def get_rel(self, field, format=False, suffix=""):
- if not field in self.latest_stats:
+
+ ref_value = self._get(self.reference_stats, field)
+ latest_value = self._get(self.latest_stats, field)
+
+ # latest value is an aggregation - must contain the value
+ if latest_value == None:
return "N/A"
- if not format:
- if not field in self.reference_stats:
- print "REF: " + str(self.reference_stats)
- print "BASE: " + str(self.latest_stats)
+ if ref_value == None:
+ ref_value = 0
+
+ value = latest_value - ref_value
+
+ return value if not format else format_num(value, suffix)
- return (self.latest_stats[field] - self.reference_stats[field])
- else:
- return format_num(self.latest_stats[field] - self.reference_stats[field], suffix)
# get trend for a field
def get_trend (self, field, use_raw = False, percision = 10.0):
@@ -458,18 +574,19 @@ class CGlobalStats(CTRexStats):
return stats
- def generate_extended_values (self, snapshot):
+
+ def _update(self, snapshot):
# L1 bps
bps = snapshot.get("m_tx_bps")
pps = snapshot.get("m_tx_pps")
- if pps > 0:
- avg_pkt_size = bps / (pps * 8.0)
- bps_L1 = bps * ( (avg_pkt_size + 20.0) / avg_pkt_size )
- else:
- bps_L1 = 0.0
+ snapshot['m_tx_bps_L1'] = calc_bps_L1(bps, pps)
+
+
+ # simple...
+ self.latest_stats = snapshot
- snapshot['m_tx_bps_L1'] = bps_L1
+ return True
def generate_stats(self):
@@ -568,20 +685,22 @@ class CPortStats(CTRexStats):
return stats
- def generate_extended_values (self, snapshot):
+
+ def _update(self, snapshot):
+
# L1 bps
bps = snapshot.get("m_total_tx_bps")
pps = snapshot.get("m_total_tx_pps")
- if pps > 0:
- avg_pkt_size = bps / (pps * 8.0)
- bps_L1 = bps * ( (avg_pkt_size + 20.0) / avg_pkt_size )
- else:
- bps_L1 = 0.0
-
+ bps_L1 = calc_bps_L1(bps, pps)
snapshot['m_total_tx_bps_L1'] = bps_L1
snapshot['m_percentage'] = (bps_L1 / self._port_obj.get_speed_bps()) * 100
+ # simple...
+ self.latest_stats = snapshot
+
+ return True
+
def generate_stats(self):
@@ -627,10 +746,10 @@ class CPortStats(CTRexStats):
"obytes" : self.get_rel("obytes"),
"ibytes" : self.get_rel("ibytes"),
- "tx_bytes": self.get_rel("obytes", format = True, suffix = "B"),
- "rx_bytes": self.get_rel("ibytes", format = True, suffix = "B"),
- "tx_pkts": self.get_rel("opackets", format = True, suffix = "pkts"),
- "rx_pkts": self.get_rel("ipackets", format = True, suffix = "pkts"),
+ "tx-bytes": self.get_rel("obytes", format = True, suffix = "B"),
+ "rx-bytes": self.get_rel("ibytes", format = True, suffix = "B"),
+ "tx-pkts": self.get_rel("opackets", format = True, suffix = "pkts"),
+ "rx-pkts": self.get_rel("ipackets", format = True, suffix = "pkts"),
"oerrors" : format_num(self.get_rel("oerrors"),
compact = False,
@@ -643,33 +762,260 @@ class CPortStats(CTRexStats):
}
-class CRxStats(object):
+
+
+# RX stats objects - COMPLEX :-(
+class CRxStats(CTRexStats):
def __init__(self):
- self.flow_stats = {}
+ super(CRxStats, self).__init__()
+
+
+ # calculates a diff between previous snapshot
+ # and current one
+ def calculate_diff_sec (self, current, prev):
+ if not 'ts' in current:
+ raise ValueError("INTERNAL ERROR: RX stats snapshot MUST contain 'ts' field")
+
+ if prev:
+ prev_ts = prev['ts']
+ now_ts = current['ts']
+ diff_sec = (now_ts['value'] - prev_ts['value']) / float(now_ts['freq'])
+ else:
+ diff_sec = 0.0
+
+ return diff_sec
+
+
+ # this is the heart of the complex
+ def process_single_pg (self, current_pg, prev_pg):
+
+ # start with the previous PG
+ output = copy.deepcopy(prev_pg)
+
+ for field in ['tx_pkts', 'tx_bytes', 'rx_pkts', 'rx_bytes']:
+ # is in the first time ? (nothing in prev)
+ if not field in output:
+ output[field] = {}
+
+ # does the current snapshot has this field ?
+ if field in current_pg:
+ for port, pv in current_pg[field].iteritems():
+ if not is_intable(port):
+ continue
+
+ output[field][port] = pv
+
+ # sum up
+ total = None
+ for port, pv in output[field].iteritems():
+ if not is_intable(port):
+ continue
+ if total is None:
+ total = 0
+ total += pv
+
+ output[field]['total'] = total
+
+
+ return output
+
+
+ def process_snapshot (self, current, prev):
+
+ # final output
+ output = {}
+
+ # copy timestamp field
+ output['ts'] = current['ts']
+
+ # aggregate all the PG ids (previous and current)
+ pg_ids = filter(is_intable, set(prev.keys() + current.keys()))
+
+ for pg_id in pg_ids:
+
+ current_pg = current.get(pg_id, {})
+
+ # first time - we do not care
+ if current_pg.get('first_time'):
+ # new value - ignore history
+ output[pg_id] = self.process_single_pg(current_pg, {})
+ self.reference_stats[pg_id] = {}
+
+ # 'dry' B/W
+ self.calculate_bw_for_pg(output[pg_id])
+ else:
+ # aggregate the two values
+ prev_pg = prev.get(pg_id, {})
+ output[pg_id] = self.process_single_pg(current_pg, prev_pg)
+
+ # calculate B/W
+ diff_sec = self.calculate_diff_sec(current, prev)
+ self.calculate_bw_for_pg(output[pg_id], prev_pg, diff_sec)
+
+
+ return output
+
+
+
+ def calculate_bw_for_pg (self, pg_current, pg_prev = None, diff_sec = 0.0):
+
+ # if no previous values - its None
+ if (pg_prev == None) or not (diff_sec > 0):
+ pg_current['tx_pps'] = None
+ pg_current['tx_bps'] = None
+ pg_current['tx_bps_L1'] = None
+ pg_current['rx_pps'] = None
+ pg_current['rx_bps'] = None
+ return
+
+
+ # read the current values
+ now_tx_pkts = pg_current['tx_pkts']['total']
+ now_tx_bytes = pg_current['tx_bytes']['total']
+ now_rx_pkts = pg_current['rx_pkts']['total']
+ now_rx_bytes = pg_current['rx_bytes']['total']
+
+ # prev values
+ prev_tx_pkts = pg_prev['tx_pkts']['total']
+ prev_tx_bytes = pg_prev['tx_bytes']['total']
+ prev_rx_pkts = pg_prev['rx_pkts']['total']
+ prev_rx_bytes = pg_prev['rx_bytes']['total']
+
+ # prev B/W
+ prev_tx_pps = pg_prev['tx_pps']
+ prev_tx_bps = pg_prev['tx_bps']
+ prev_rx_pps = pg_prev['rx_pps']
+ prev_rx_bps = pg_prev['rx_bps']
+
+
+ #assert(now_tx_pkts >= prev_tx_pkts)
+ pg_current['tx_pps'] = self.calc_pps(prev_tx_pps, now_tx_pkts, prev_tx_pkts, diff_sec)
+ pg_current['tx_bps'] = self.calc_bps(prev_tx_bps, now_tx_bytes, prev_tx_bytes, diff_sec)
+ pg_current['rx_pps'] = self.calc_pps(prev_rx_pps, now_rx_pkts, prev_rx_pkts, diff_sec)
+ pg_current['rx_bps'] = self.calc_bps(prev_rx_bps, now_rx_bytes, prev_rx_bytes, diff_sec)
+
+ if pg_current['tx_bps'] != None and pg_current['tx_pps'] != None:
+ pg_current['tx_bps_L1'] = calc_bps_L1(pg_current['tx_bps'], pg_current['tx_pps'])
+ else:
+ pg_current['tx_bps_L1'] = None
+
+
+ def calc_pps (self, prev_bw, now, prev, diff_sec):
+ return self.calc_bw(prev_bw, now, prev, diff_sec, False)
+
+
+ def calc_bps (self, prev_bw, now, prev, diff_sec):
+ return self.calc_bw(prev_bw, now, prev, diff_sec, True)
+
+
+ def calc_bw (self, prev_bw, now, prev, diff_sec, is_bps):
+ # B/W is not valid when the values are None
+ if (now is None) or (prev is None):
+ return None
+
+ # calculate the B/W for current snapshot
+ current_bw = (now - prev) / diff_sec
+ if is_bps:
+ current_bw *= 8
+
+ # previous B/W is None ? ignore it
+ if prev_bw is None:
+ prev_bw = 0
- def update (self, snapshot):
- self.flow_stats = snapshot
+ return ( (0.5 * prev_bw) + (0.5 * current_bw) )
+
+
+ def _update (self, snapshot):
+
+ # generate a new snapshot
+ new_snapshot = self.process_snapshot(snapshot, self.latest_stats)
+
+ #print new_snapshot
+ # advance
+ self.latest_stats = new_snapshot
+
+
+ return True
+
+
+
+ # for API
def get_stats (self):
stats = {}
- for pg_id, pg_id_data in self.flow_stats.iteritems():
- # ignore non pg ID keys
- try:
- pg_id = int(pg_id)
- except ValueError:
+
+ for pg_id, value in self.latest_stats.iteritems():
+ # skip non ints
+ if not is_intable(pg_id):
continue
- # handle pg id
- stats[pg_id] = {}
- for field, per_port_data in pg_id_data.iteritems():
- stats[pg_id][field] = {}
- for port, value in per_port_data.iteritems():
- stats[pg_id][field][int(port)] = value
+ stats[int(pg_id)] = {}
+ for field in ['tx_pkts', 'tx_bytes', 'rx_pkts']:
+ stats[int(pg_id)][field] = {'total': self.get_rel([pg_id, field, 'total'])}
+
+ for port, pv in value[field].iteritems():
+ try:
+ int(port)
+ except ValueError:
+ continue
+ stats[int(pg_id)][field][int(port)] = self.get_rel([pg_id, field, port])
return stats
+
+ def generate_stats (self):
+
+ # for TUI - maximum 4
+ pg_ids = filter(is_intable, self.latest_stats.keys())[:4]
+ cnt = len(pg_ids)
+
+ formatted_stats = OrderedDict([ ('Tx pps', []),
+ ('Tx bps L2', []),
+ ('Tx bps L1', []),
+ ('---', [''] * cnt),
+ ('Rx pps', []),
+ ('Rx bps', []),
+ ('----', [''] * cnt),
+ ('opackets', []),
+ ('ipackets', []),
+ ('obytes', []),
+ ('ibytes', []),
+ ('-----', [''] * cnt),
+ ('tx_pkts', []),
+ ('rx_pkts', []),
+ ('tx_bytes', []),
+ ('rx_bytes', [])
+ ])
+
+
+
+ # maximum 4
+ for pg_id in pg_ids:
+
+ formatted_stats['Tx pps'].append(self.get([pg_id, 'tx_pps'], format = True, suffix = "pps"))
+ formatted_stats['Tx bps L2'].append(self.get([pg_id, 'tx_bps'], format = True, suffix = "bps"))
+
+ formatted_stats['Tx bps L1'].append(self.get([pg_id, 'tx_bps_L1'], format = True, suffix = "bps"))
+
+ formatted_stats['Rx pps'].append(self.get([pg_id, 'rx_pps'], format = True, suffix = "pps"))
+ formatted_stats['Rx bps'].append(self.get([pg_id, 'rx_bps'], format = True, suffix = "bps"))
+
+ formatted_stats['opackets'].append(self.get_rel([pg_id, 'tx_pkts', 'total']))
+ formatted_stats['ipackets'].append(self.get_rel([pg_id, 'rx_pkts', 'total']))
+ formatted_stats['obytes'].append(self.get_rel([pg_id, 'tx_bytes', 'total']))
+ formatted_stats['ibytes'].append(self.get_rel([pg_id, 'rx_bytes', 'total']))
+ formatted_stats['tx_bytes'].append(self.get_rel([pg_id, 'tx_bytes', 'total'], format = True, suffix = "B"))
+ formatted_stats['rx_bytes'].append(self.get_rel([pg_id, 'rx_bytes', 'total'], format = True, suffix = "B"))
+ formatted_stats['tx_pkts'].append(self.get_rel([pg_id, 'tx_pkts', 'total'], format = True, suffix = "pkts"))
+ formatted_stats['rx_pkts'].append(self.get_rel([pg_id, 'rx_pkts', 'total'], format = True, suffix = "pkts"))
+
+
+
+ return pg_ids, formatted_stats
+
if __name__ == "__main__":
pass
+
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_std.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_std.py
index e0b25b1d..d582b499 100644
--- a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_std.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_std.py
@@ -13,21 +13,30 @@ def stl_map_ports (client, ports = None):
# generate streams
base_pkt = CScapyTRexPktBuilder(pkt = Ether()/IP())
-
+
+ # send something initial to calm down switches with arps etc.
+ stream = STLStream(packet = base_pkt,
+ mode = STLTXSingleBurst(pps = 100000, total_pkts = 1))
+ client.add_streams(stream, ports)
+
+ client.start(ports, mult = "50%")
+ client.wait_on_traffic(ports)
+ client.reset(ports)
+
tx_pkts = {}
pkts = 1
for port in ports:
tx_pkts[pkts] = port
stream = STLStream(packet = base_pkt,
- mode = STLTXSingleBurst(pps = 100000, total_pkts = pkts))
+ mode = STLTXSingleBurst(pps = 100000, total_pkts = pkts * 3))
client.add_streams(stream, [port])
- pkts = pkts * 2
+ pkts *= 2
# inject
client.clear_stats()
- client.start(ports, mult = "1mpps")
+ client.start(ports, mult = "50%")
client.wait_on_traffic(ports)
stats = client.get_stats()
@@ -40,7 +49,7 @@ def stl_map_ports (client, ports = None):
# actual mapping
for port in ports:
- ipackets = stats[port]["ipackets"]
+ ipackets = int(round(stats[port]["ipackets"] / 3.0)) # majority out of 3 to clean random noises
table['map'][port] = None
for pkts in tx_pkts.keys():
@@ -48,7 +57,6 @@ def stl_map_ports (client, ports = None):
tx_port = tx_pkts[pkts]
table['map'][port] = tx_port
-
unmapped = list(ports)
while len(unmapped) > 0:
port_a = unmapped.pop(0)
@@ -57,7 +65,9 @@ def stl_map_ports (client, ports = None):
# if unknown - add to the unknown list
if port_b == None:
table['unknown'].append(port_a)
-
+ # self-loop, due to bug?
+ elif port_a == port_b:
+ continue
# bi-directional ports
elif (table['map'][port_b] == port_a):
unmapped.remove(port_b)
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_streams.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_streams.py
index b7368767..2a99be8d 100644
--- a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_streams.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_streams.py
@@ -329,7 +329,7 @@ class STLStream(object):
if mac_src_override_by_pkt == None:
int_mac_src_override_by_pkt=0
if packet :
- if packet.is_def_src_mac ()==False:
+ if packet.is_default_src_mac ()==False:
int_mac_src_override_by_pkt=1
else:
@@ -338,7 +338,7 @@ class STLStream(object):
if mac_dst_override_mode == None:
int_mac_dst_override_mode = 0;
if packet :
- if packet.is_def_dst_mac ()==False:
+ if packet.is_default_dst_mac ()==False:
int_mac_dst_override_mode=STLStreamDstMAC_PKT
else:
int_mac_dst_override_mode = int(mac_dst_override_mode);
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/parsing_opts.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/parsing_opts.py
index 649c192a..0390ac9c 100755
--- a/scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/parsing_opts.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/parsing_opts.py
@@ -34,7 +34,8 @@ PROMISCUOUS_SWITCH = 21
GLOBAL_STATS = 50
PORT_STATS = 51
PORT_STATUS = 52
-STATS_MASK = 53
+STREAMS_STATS = 53
+STATS_MASK = 54
STREAMS_MASK = 60
# ALL_STREAMS = 61
@@ -312,6 +313,10 @@ OPTIONS_DB = {MULTIPLIER: ArgumentPack(['-m', '--multiplier'],
{'action': 'store_true',
'help': "Fetch only port status data"}),
+ STREAMS_STATS: ArgumentPack(['-s'],
+ {'action': 'store_true',
+ 'help': "Fetch only streams stats"}),
+
STREAMS_MASK: ArgumentPack(['--streams'],
{"nargs": '+',
'dest':'streams',
@@ -336,7 +341,8 @@ OPTIONS_DB = {MULTIPLIER: ArgumentPack(['-m', '--multiplier'],
{'required': True}),
STATS_MASK: ArgumentGroup(MUTEX, [GLOBAL_STATS,
PORT_STATS,
- PORT_STATUS],
+ PORT_STATUS,
+ STREAMS_STATS],
{})
}