summaryrefslogtreecommitdiffstats
path: root/scripts
diff options
context:
space:
mode:
authorHanoh Haim <hhaim@cisco.com>2016-03-10 19:32:29 +0200
committerHanoh Haim <hhaim@cisco.com>2016-03-10 19:32:29 +0200
commit71433c48afeddb37e3c5a8e134e701d71b09f869 (patch)
tree860cab39c447a426287d0c49a4c0da736297ba3b /scripts
parent2be2f7e96be26fbe6dd6763f2ec97fb248abb330 (diff)
parentf24d22eb359753255527430cb8a8b759a424a0df (diff)
merge doc
Diffstat (limited to 'scripts')
-rwxr-xr-xscripts/automation/regression/aggregate_results.py115
-rw-r--r--scripts/automation/regression/functional_tests/config.yaml (renamed from scripts/automation/regression/unit_tests/functional_tests/config.yaml)0
-rwxr-xr-xscripts/automation/regression/functional_tests/functional_general_test.py (renamed from scripts/automation/regression/unit_tests/functional_tests/functional_general_test.py)0
-rw-r--r--scripts/automation/regression/functional_tests/golden/basic_imix_golden.cap (renamed from scripts/automation/regression/stl/golden/basic_imix_golden.cap)bin198474 -> 198474 bytes
-rw-r--r--scripts/automation/regression/functional_tests/golden/basic_imix_vm_golden.cap (renamed from scripts/automation/regression/stl/golden/basic_imix_vm_golden.cap)bin316552 -> 316552 bytes
-rw-r--r--scripts/automation/regression/functional_tests/golden/basic_tuple_gen_golden.cap (renamed from scripts/automation/regression/stl/golden/basic_tuple_gen_golden.cap)bin38024 -> 38024 bytes
-rw-r--r--scripts/automation/regression/functional_tests/golden/udp_590.cap (renamed from scripts/automation/regression/stl/golden/udp_590.cap)bin630 -> 630 bytes
-rwxr-xr-xscripts/automation/regression/functional_tests/hltapi_stream_builder_test.py (renamed from scripts/automation/regression/unit_tests/functional_tests/hltapi_stream_builder_test.py)0
-rwxr-xr-xscripts/automation/regression/functional_tests/misc_methods_test.py (renamed from scripts/automation/regression/unit_tests/functional_tests/misc_methods_test.py)0
-rwxr-xr-xscripts/automation/regression/functional_tests/pkt_bld_general_test.py (renamed from scripts/automation/regression/unit_tests/functional_tests/pkt_bld_general_test.py)0
-rwxr-xr-xscripts/automation/regression/functional_tests/platform_cmd_cache_test.py (renamed from scripts/automation/regression/unit_tests/functional_tests/platform_cmd_cache_test.py)0
-rwxr-xr-xscripts/automation/regression/functional_tests/platform_cmd_link_test.py (renamed from scripts/automation/regression/unit_tests/functional_tests/platform_cmd_link_test.py)0
-rwxr-xr-xscripts/automation/regression/functional_tests/platform_device_cfg_test.py (renamed from scripts/automation/regression/unit_tests/functional_tests/platform_device_cfg_test.py)2
-rwxr-xr-xscripts/automation/regression/functional_tests/platform_dual_if_obj_test.py (renamed from scripts/automation/regression/unit_tests/functional_tests/platform_dual_if_obj_test.py)0
-rwxr-xr-xscripts/automation/regression/functional_tests/platform_if_manager_test.py (renamed from scripts/automation/regression/unit_tests/functional_tests/platform_if_manager_test.py)2
-rwxr-xr-xscripts/automation/regression/functional_tests/platform_if_obj_test.py (renamed from scripts/automation/regression/unit_tests/functional_tests/platform_if_obj_test.py)0
-rw-r--r--scripts/automation/regression/functional_tests/scapy_pkt_builder_test.py (renamed from scripts/automation/regression/unit_tests/functional_tests/scapy_pkt_builder_test.py)18
-rw-r--r--scripts/automation/regression/functional_tests/stl_basic_tests.py (renamed from scripts/automation/regression/unit_tests/functional_tests/stl_basic_tests.py)33
-rwxr-xr-xscripts/automation/regression/functional_unit_tests.py78
-rwxr-xr-xscripts/automation/regression/misc_methods.py45
-rwxr-xr-xscripts/automation/regression/outer_packages.py3
-rw-r--r--scripts/automation/regression/stateful_tests/__init__.py0
-rwxr-xr-xscripts/automation/regression/stateful_tests/tests_exceptions.py (renamed from scripts/automation/regression/unit_tests/tests_exceptions.py)0
-rwxr-xr-xscripts/automation/regression/stateful_tests/trex_general_test.py (renamed from scripts/automation/regression/unit_tests/trex_general_test.py)49
-rwxr-xr-xscripts/automation/regression/stateful_tests/trex_imix_test.py (renamed from scripts/automation/regression/unit_tests/trex_imix_test.py)0
-rwxr-xr-xscripts/automation/regression/stateful_tests/trex_ipv6_test.py (renamed from scripts/automation/regression/unit_tests/trex_ipv6_test.py)0
-rwxr-xr-xscripts/automation/regression/stateful_tests/trex_nat_test.py (renamed from scripts/automation/regression/unit_tests/trex_nat_test.py)0
-rwxr-xr-xscripts/automation/regression/stateful_tests/trex_nbar_test.py (renamed from scripts/automation/regression/unit_tests/trex_nbar_test.py)0
-rwxr-xr-xscripts/automation/regression/stateful_tests/trex_rx_test.py (renamed from scripts/automation/regression/unit_tests/trex_rx_test.py)0
-rwxr-xr-xscripts/automation/regression/stateless_tests/stl_examples_test.py33
-rw-r--r--scripts/automation/regression/stateless_tests/stl_general_test.py68
-rw-r--r--scripts/automation/regression/trex.py83
-rwxr-xr-xscripts/automation/regression/trex_unit_test.py343
-rwxr-xr-xscripts/automation/regression/unit_tests/__init__.py1
-rw-r--r--scripts/automation/trex_control_plane/stl/console/trex_tui.py38
-rw-r--r--scripts/automation/trex_control_plane/stl/examples/stl_flow_stats.py85
-rw-r--r--scripts/automation/trex_control_plane/stl/examples/stl_imix.py22
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_async_client.py18
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_client.py29
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_packet_builder_scapy.py96
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_stats.py550
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_std.py24
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_streams.py4
-rwxr-xr-xscripts/automation/trex_control_plane/stl/trex_stl_lib/utils/parsing_opts.py10
-rw-r--r--scripts/exp/pcap.pcapbin476 -> 496 bytes
-rw-r--r--scripts/exp/pcap_with_vm.pcapbin2284 -> 2284 bytes
-rw-r--r--scripts/exp/udp_1pkt_pcap.pcapbin784 -> 784 bytes
-rw-r--r--scripts/exp/udp_1pkt_pcap_relative_path.pcapbin252 -> 252 bytes
-rw-r--r--scripts/exp/udp_3pkt_pcap.pcapbin784 -> 784 bytes
-rwxr-xr-xscripts/external_libs/ansi2html/LICENSE674
-rwxr-xr-xscripts/external_libs/ansi2html/README.rst71
-rwxr-xr-xscripts/external_libs/ansi2html/ansi2html/__init__.py2
-rwxr-xr-xscripts/external_libs/ansi2html/ansi2html/converter.py548
-rwxr-xr-xscripts/external_libs/ansi2html/ansi2html/style.py135
-rwxr-xr-xscripts/external_libs/ansi2html/ansi2html/util.py2
-rw-r--r--scripts/stl/flow_stats.py8
56 files changed, 2595 insertions, 594 deletions
diff --git a/scripts/automation/regression/aggregate_results.py b/scripts/automation/regression/aggregate_results.py
index 01f9ff56..31929d50 100755
--- a/scripts/automation/regression/aggregate_results.py
+++ b/scripts/automation/regression/aggregate_results.py
@@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
import xml.etree.ElementTree as ET
+import outer_packages
import argparse
import glob
from pprint import pprint
@@ -9,6 +10,13 @@ import copy
import datetime, time
import cPickle as pickle
import subprocess, shlex
+from ansi2html import Ansi2HTMLConverter
+
+converter = Ansi2HTMLConverter(inline = True)
+convert = converter.convert
+
+def ansi2html(text):
+ return convert(text, full = False)
FUNCTIONAL_CATEGORY = 'Functional' # how to display those categories
ERROR_CATEGORY = 'Error'
@@ -27,9 +35,9 @@ def is_functional_test_name(testname):
#if testname.startswith(('platform_', 'misc_methods_', 'vm_', 'payload_gen_', 'pkt_builder_')):
# return True
#return False
- if testname.startswith('unit_tests.'):
- return False
- return True
+ if testname.startswith('functional_tests.'):
+ return True
+ return False
def is_good_status(text):
return text in ('Successful', 'Fixed', 'Passed', 'True', 'Pass')
@@ -56,15 +64,16 @@ def add_th_th(key, value):
# returns <div> with table of tests under given category.
# category - string with name of category
-# hidden - bool, true = <div> is hidden by CSS
# tests - list of tests, derived from aggregated xml report, changed a little to get easily stdout etc.
+# tests_type - stateful or stateless
# category_info_dir - folder to search for category info file
# expanded - bool, false = outputs (stdout etc.) of tests are hidden by CSS
# brief - bool, true = cut some part of tests outputs (useful for errors section with expanded flag)
-def add_category_of_tests(category, tests, hidden = False, category_info_dir = None, expanded = False, brief = False):
+def add_category_of_tests(category, tests, tests_type = None, category_info_dir = None, expanded = False, brief = False):
is_actual_category = category not in (FUNCTIONAL_CATEGORY, ERROR_CATEGORY)
- html_output = '<div style="display:%s;" id="cat_tglr_%s">\n' % ('none' if hidden else 'block', category)
-
+ category_id = '_'.join([category, tests_type]) if tests_type else category
+ category_name = ' '.join([category, tests_type.capitalize()]) if tests_type else category
+ html_output = ''
if is_actual_category:
html_output += '<br><table class="reference">\n'
@@ -80,6 +89,8 @@ def add_category_of_tests(category, tests, hidden = False, category_info_dir = N
else:
html_output += add_th_td('Info:', 'No info')
print 'add_category_of_tests: no category info %s' % category_info_file
+ if tests_type:
+ html_output += add_th_td('Tests type:', tests_type.capitalize())
if len(tests):
total_duration = 0.0
for test in tests:
@@ -88,13 +99,13 @@ def add_category_of_tests(category, tests, hidden = False, category_info_dir = N
html_output += '</table>\n'
if not len(tests):
- return html_output + pad_tag('<br><font color=red>No tests!</font>', 'b') + '</div>'
+ return html_output + pad_tag('<br><font color=red>No tests!</font>', 'b')
html_output += '<br>\n<table class="reference" width="100%">\n<tr><th align="left">'
if category == ERROR_CATEGORY:
html_output += 'Setup</th><th align="left">Failed tests:'
else:
- html_output += '%s tests:' % category
+ html_output += '%s tests:' % category_name
html_output += '</th><th align="center">Final Result</th>\n<th align="center">Time (s)</th>\n</tr>\n'
for test in tests:
functional_test = is_functional_test_name(test.attrib['name'])
@@ -103,7 +114,7 @@ def add_category_of_tests(category, tests, hidden = False, category_info_dir = N
if category == ERROR_CATEGORY:
test_id = ('err_' + test.attrib['classname'] + test.attrib['name']).replace('.', '_')
else:
- test_id = (category + test.attrib['name']).replace('.', '_')
+ test_id = (category_id + test.attrib['name']).replace('.', '_')
if expanded:
html_output += '<tr>\n<th>'
else:
@@ -128,15 +139,21 @@ def add_category_of_tests(category, tests, hidden = False, category_info_dir = N
result, result_text = test.attrib.get('result', ('', ''))
if result_text:
+ start_index_errors_stl = result_text.find('STLError: \n******')
+ if start_index_errors_stl > 0:
+ result_text = result_text[start_index_errors_stl:].strip() # cut traceback
start_index_errors = result_text.find('Exception: The test is failed, reasons:')
if start_index_errors > 0:
result_text = result_text[start_index_errors + 10:].strip() # cut traceback
+ result_text = ansi2html(result_text)
result_text = '<b style="color:000080;">%s:</b><br>%s<br><br>' % (result.capitalize(), result_text.replace('\n', '<br>'))
stderr = '' if brief and result_text else test.get('stderr', '')
if stderr:
+ stderr = ansi2html(stderr)
stderr = '<b style="color:000080;"><text color=000080>Stderr</text>:</b><br>%s<br><br>\n' % stderr.replace('\n', '<br>')
stdout = '' if brief and result_text else test.get('stdout', '')
if stdout:
+ stdout = ansi2html(stdout)
if brief: # cut off server logs
stdout = stdout.split('>>>>>>>>>>>>>>>', 1)[0]
stdout = '<b style="color:000080;">Stdout:</b><br>%s<br><br>\n' % stdout.replace('\n', '<br>')
@@ -147,7 +164,7 @@ def add_category_of_tests(category, tests, hidden = False, category_info_dir = N
else:
html_output += '<b style="color:000080;">No output</b></td></tr>'
- html_output += '\n</table>\n</div>'
+ html_output += '\n</table>'
return html_output
style_css = """
@@ -292,35 +309,40 @@ if __name__ == '__main__':
##### aggregate results to 1 single tree
aggregated_root = ET.Element('testsuite')
+ test_types = ('functional', 'stateful', 'stateless')
setups = {}
for job in jobs_list:
- xml_file = '%s/report_%s.xml' % (args.input_dir, job)
- if not os.path.exists(xml_file):
- message = '%s referenced in jobs_list.info does not exist!' % xml_file
+ setups[job] = {}
+ for test_type in test_types:
+ xml_file = '%s/report_%s_%s.xml' % (args.input_dir, job, test_type)
+ if not os.path.exists(xml_file):
+ continue
+ if os.path.basename(xml_file) == os.path.basename(args.output_xmlfile):
+ continue
+ setups[job][test_type] = []
+ print('Processing report: %s.%s' % (job, test_type))
+ tree = ET.parse(xml_file)
+ root = tree.getroot()
+ for key, value in root.attrib.items():
+ if key in aggregated_root.attrib and value.isdigit(): # sum total number of failed tests etc.
+ aggregated_root.attrib[key] = str(int(value) + int(aggregated_root.attrib[key]))
+ else:
+ aggregated_root.attrib[key] = value
+ tests = root.getchildren()
+ if not len(tests): # there should be tests:
+ message = 'No tests in xml %s' % xml_file
+ print message
+ #err.append(message)
+ for test in tests:
+ setups[job][test_type].append(test)
+ test.attrib['name'] = test.attrib['classname'] + '.' + test.attrib['name']
+ test.attrib['classname'] = job
+ aggregated_root.append(test)
+ if not sum([len(x) for x in setups[job].values()]):
+ message = 'No reports from setup %s!' % job
print message
err.append(message)
continue
- if os.path.basename(xml_file) == os.path.basename(args.output_xmlfile):
- continue
- setups[job] = []
- print('Processing setup: %s' % job)
- tree = ET.parse(xml_file)
- root = tree.getroot()
- for key, value in root.attrib.items():
- if key in aggregated_root.attrib and value.isdigit(): # sum total number of failed tests etc.
- aggregated_root.attrib[key] = str(int(value) + int(aggregated_root.attrib[key]))
- else:
- aggregated_root.attrib[key] = value
- tests = root.getchildren()
- if not len(tests): # there should be tests:
- message = 'No tests in xml %s' % xml_file
- print message
- err.append(message)
- for test in tests:
- setups[job].append(test)
- test.attrib['name'] = test.attrib['classname'] + '.' + test.attrib['name']
- test.attrib['classname'] = job
- aggregated_root.append(test)
total_tests_count = int(aggregated_root.attrib.get('tests', 0))
error_tests_count = int(aggregated_root.attrib.get('errors', 0))
@@ -426,7 +448,7 @@ if __name__ == '__main__':
if len(error_tests):
html_output += '\n<button onclick=tgl_cat("cat_tglr_{error}")>{error}</button>'.format(error = ERROR_CATEGORY)
# Setups buttons
- for category, tests in setups.items():
+ for category in setups.keys():
category_arr.append(category)
html_output += '\n<button onclick=tgl_cat("cat_tglr_%s")>%s</button>' % (category_arr[-1], category)
# Functional buttons
@@ -436,13 +458,22 @@ if __name__ == '__main__':
# Adding tests
# Error tests
if len(error_tests):
- html_output += add_category_of_tests(ERROR_CATEGORY, error_tests, hidden=False)
+ html_output += '<div style="display:block;" id="cat_tglr_%s">' % ERROR_CATEGORY
+ html_output += add_category_of_tests(ERROR_CATEGORY, error_tests)
+ html_output += '</div>'
# Setups tests
for category, tests in setups.items():
- html_output += add_category_of_tests(category, tests, hidden=True, category_info_dir=args.input_dir)
+ html_output += '<div style="display:none;" id="cat_tglr_%s">' % category
+ if 'stateful' in tests:
+ html_output += add_category_of_tests(category, tests['stateful'], 'stateful', category_info_dir=args.input_dir)
+ if 'stateless' in tests:
+ html_output += add_category_of_tests(category, tests['stateless'], 'stateless', category_info_dir=(None if 'stateful' in tests else args.input_dir))
+ html_output += '</div>'
# Functional tests
if len(functional_tests):
- html_output += add_category_of_tests(FUNCTIONAL_CATEGORY, functional_tests.values(), hidden=True)
+ html_output += '<div style="display:none;" id="cat_tglr_%s">' % FUNCTIONAL_CATEGORY
+ html_output += add_category_of_tests(FUNCTIONAL_CATEGORY, functional_tests.values())
+ html_output += '</div>'
html_output += '\n\n<script type="text/javascript">\n var category_arr = %s\n' % ['cat_tglr_%s' % x for x in category_arr]
html_output += '''
@@ -524,7 +555,7 @@ if __name__ == '__main__':
for test in error_tests:
if test.attrib['classname'] == category:
failing_category = True
- if failing_category or not len(setups[category]):
+ if failing_category or not len(setups[category]) or not sum([len(x) for x in setups[category]]):
mail_output += '<table class="reference_fail" align=left style="Margin-bottom:10;Margin-right:10;">\n'
else:
mail_output += '<table class="reference" align=left style="Margin-bottom:10;Margin-right:10;">\n'
@@ -549,9 +580,9 @@ if __name__ == '__main__':
if len(error_tests) > 5:
mail_output += '\n<font color=red>More than 5 failed tests, showing brief output.<font>\n<br>'
# show only brief version (cut some info)
- mail_output += add_category_of_tests(ERROR_CATEGORY, error_tests, hidden=False, expanded=True, brief=True)
+ mail_output += add_category_of_tests(ERROR_CATEGORY, error_tests, expanded=True, brief=True)
else:
- mail_output += add_category_of_tests(ERROR_CATEGORY, error_tests, hidden=False, expanded=True)
+ mail_output += add_category_of_tests(ERROR_CATEGORY, error_tests, expanded=True)
else:
mail_output += '<table><tr style="font-size:120;color:green;font-family:arial"><td>☺</td><td style="font-size:20">All passed.</td></tr></table>\n'
mail_output += '\n</body>\n</html>'
diff --git a/scripts/automation/regression/unit_tests/functional_tests/config.yaml b/scripts/automation/regression/functional_tests/config.yaml
index 4f4c7c40..4f4c7c40 100644
--- a/scripts/automation/regression/unit_tests/functional_tests/config.yaml
+++ b/scripts/automation/regression/functional_tests/config.yaml
diff --git a/scripts/automation/regression/unit_tests/functional_tests/functional_general_test.py b/scripts/automation/regression/functional_tests/functional_general_test.py
index 525b58d2..525b58d2 100755
--- a/scripts/automation/regression/unit_tests/functional_tests/functional_general_test.py
+++ b/scripts/automation/regression/functional_tests/functional_general_test.py
diff --git a/scripts/automation/regression/stl/golden/basic_imix_golden.cap b/scripts/automation/regression/functional_tests/golden/basic_imix_golden.cap
index 6ca32299..6ca32299 100644
--- a/scripts/automation/regression/stl/golden/basic_imix_golden.cap
+++ b/scripts/automation/regression/functional_tests/golden/basic_imix_golden.cap
Binary files differ
diff --git a/scripts/automation/regression/stl/golden/basic_imix_vm_golden.cap b/scripts/automation/regression/functional_tests/golden/basic_imix_vm_golden.cap
index 43ae2368..43ae2368 100644
--- a/scripts/automation/regression/stl/golden/basic_imix_vm_golden.cap
+++ b/scripts/automation/regression/functional_tests/golden/basic_imix_vm_golden.cap
Binary files differ
diff --git a/scripts/automation/regression/stl/golden/basic_tuple_gen_golden.cap b/scripts/automation/regression/functional_tests/golden/basic_tuple_gen_golden.cap
index 7d5e7ec2..7d5e7ec2 100644
--- a/scripts/automation/regression/stl/golden/basic_tuple_gen_golden.cap
+++ b/scripts/automation/regression/functional_tests/golden/basic_tuple_gen_golden.cap
Binary files differ
diff --git a/scripts/automation/regression/stl/golden/udp_590.cap b/scripts/automation/regression/functional_tests/golden/udp_590.cap
index 29302f22..29302f22 100644
--- a/scripts/automation/regression/stl/golden/udp_590.cap
+++ b/scripts/automation/regression/functional_tests/golden/udp_590.cap
Binary files differ
diff --git a/scripts/automation/regression/unit_tests/functional_tests/hltapi_stream_builder_test.py b/scripts/automation/regression/functional_tests/hltapi_stream_builder_test.py
index c6b477aa..c6b477aa 100755
--- a/scripts/automation/regression/unit_tests/functional_tests/hltapi_stream_builder_test.py
+++ b/scripts/automation/regression/functional_tests/hltapi_stream_builder_test.py
diff --git a/scripts/automation/regression/unit_tests/functional_tests/misc_methods_test.py b/scripts/automation/regression/functional_tests/misc_methods_test.py
index 096f86d8..096f86d8 100755
--- a/scripts/automation/regression/unit_tests/functional_tests/misc_methods_test.py
+++ b/scripts/automation/regression/functional_tests/misc_methods_test.py
diff --git a/scripts/automation/regression/unit_tests/functional_tests/pkt_bld_general_test.py b/scripts/automation/regression/functional_tests/pkt_bld_general_test.py
index 5f89eaff..5f89eaff 100755
--- a/scripts/automation/regression/unit_tests/functional_tests/pkt_bld_general_test.py
+++ b/scripts/automation/regression/functional_tests/pkt_bld_general_test.py
diff --git a/scripts/automation/regression/unit_tests/functional_tests/platform_cmd_cache_test.py b/scripts/automation/regression/functional_tests/platform_cmd_cache_test.py
index 24ccf7a5..24ccf7a5 100755
--- a/scripts/automation/regression/unit_tests/functional_tests/platform_cmd_cache_test.py
+++ b/scripts/automation/regression/functional_tests/platform_cmd_cache_test.py
diff --git a/scripts/automation/regression/unit_tests/functional_tests/platform_cmd_link_test.py b/scripts/automation/regression/functional_tests/platform_cmd_link_test.py
index 7a31815b..7a31815b 100755
--- a/scripts/automation/regression/unit_tests/functional_tests/platform_cmd_link_test.py
+++ b/scripts/automation/regression/functional_tests/platform_cmd_link_test.py
diff --git a/scripts/automation/regression/unit_tests/functional_tests/platform_device_cfg_test.py b/scripts/automation/regression/functional_tests/platform_device_cfg_test.py
index 890d0cb9..3935a4c5 100755
--- a/scripts/automation/regression/unit_tests/functional_tests/platform_device_cfg_test.py
+++ b/scripts/automation/regression/functional_tests/platform_device_cfg_test.py
@@ -9,7 +9,7 @@ from nose.tools import assert_not_equal
class CDeviceCfg_Test(functional_general_test.CGeneralFunctional_Test):
def setUp(self):
- self.dev_cfg = CDeviceCfg('./unit_tests/functional_tests/config.yaml')
+ self.dev_cfg = CDeviceCfg('./functional_tests/config.yaml')
def test_get_interfaces_cfg(self):
assert_equal (self.dev_cfg.get_interfaces_cfg(),
diff --git a/scripts/automation/regression/unit_tests/functional_tests/platform_dual_if_obj_test.py b/scripts/automation/regression/functional_tests/platform_dual_if_obj_test.py
index ff54b9ee..ff54b9ee 100755
--- a/scripts/automation/regression/unit_tests/functional_tests/platform_dual_if_obj_test.py
+++ b/scripts/automation/regression/functional_tests/platform_dual_if_obj_test.py
diff --git a/scripts/automation/regression/unit_tests/functional_tests/platform_if_manager_test.py b/scripts/automation/regression/functional_tests/platform_if_manager_test.py
index 7ba6e66e..b09e8d75 100755
--- a/scripts/automation/regression/unit_tests/functional_tests/platform_if_manager_test.py
+++ b/scripts/automation/regression/functional_tests/platform_if_manager_test.py
@@ -9,7 +9,7 @@ from nose.tools import assert_not_equal
class CIfManager_Test(functional_general_test.CGeneralFunctional_Test):
def setUp(self):
- self.dev_cfg = CDeviceCfg('./unit_tests/functional_tests/config.yaml')
+ self.dev_cfg = CDeviceCfg('./functional_tests/config.yaml')
self.if_mng = CIfManager()
# main testing method to check the entire class
diff --git a/scripts/automation/regression/unit_tests/functional_tests/platform_if_obj_test.py b/scripts/automation/regression/functional_tests/platform_if_obj_test.py
index 534d4170..534d4170 100755
--- a/scripts/automation/regression/unit_tests/functional_tests/platform_if_obj_test.py
+++ b/scripts/automation/regression/functional_tests/platform_if_obj_test.py
diff --git a/scripts/automation/regression/unit_tests/functional_tests/scapy_pkt_builder_test.py b/scripts/automation/regression/functional_tests/scapy_pkt_builder_test.py
index 7e2f6271..eaff9530 100644
--- a/scripts/automation/regression/unit_tests/functional_tests/scapy_pkt_builder_test.py
+++ b/scripts/automation/regression/functional_tests/scapy_pkt_builder_test.py
@@ -80,22 +80,22 @@ class CTRexPktBuilderSanitySCapy_Test(pkt_bld_general_test.CGeneralPktBld_Test):
pkt_builder = CScapyTRexPktBuilder(pkt = pkt);
- assert_equal( pkt_builder.is_def_src_mac () ,True)
- assert_equal( pkt_builder.is_def_dst_mac () ,True)
+ assert_equal( pkt_builder.is_default_src_mac () ,True)
+ assert_equal( pkt_builder.is_default_dst_mac () ,True)
pkt = Ether(src="00:00:00:00:00:01")/IP()/UDP()
pkt_builder = CScapyTRexPktBuilder(pkt = pkt);
- assert_equal( pkt_builder.is_def_src_mac (), False)
- assert_equal( pkt_builder.is_def_dst_mac (), True)
+ assert_equal( pkt_builder.is_default_src_mac (), False)
+ assert_equal( pkt_builder.is_default_dst_mac (), True)
pkt = Ether(dst="00:00:00:00:00:01")/IP()/UDP()
pkt_builder = CScapyTRexPktBuilder(pkt = pkt);
- assert_equal( pkt_builder.is_def_src_mac (),True)
- assert_equal( pkt_builder.is_def_dst_mac (),False)
+ assert_equal( pkt_builder.is_default_src_mac (),True)
+ assert_equal( pkt_builder.is_default_dst_mac (),False)
@@ -299,7 +299,7 @@ class CTRexPktBuilderSanitySCapy_Test(pkt_bld_general_test.CGeneralPktBld_Test):
assert_equal(d['instructions'][4]['pkt_offset'],38)
def test_simple_pkt_loader(self):
- p=RawPcapReader("stl/golden/basic_imix_golden.cap")
+ p=RawPcapReader("functional_tests/golden/basic_imix_golden.cap")
print ""
for pkt in p:
print pkt[1]
@@ -308,7 +308,7 @@ class CTRexPktBuilderSanitySCapy_Test(pkt_bld_general_test.CGeneralPktBld_Test):
def test_simple_pkt_loader1(self):
- pkt_builder = CScapyTRexPktBuilder(pkt = "stl/golden/udp_590.cap", build_raw = False);
+ pkt_builder = CScapyTRexPktBuilder(pkt = "functional_tests/golden/udp_590.cap", build_raw = False);
print ""
pkt_builder.dump_as_hex()
r = pkt_builder.pkt_raw
@@ -322,7 +322,7 @@ class CTRexPktBuilderSanitySCapy_Test(pkt_bld_general_test.CGeneralPktBld_Test):
def test_simple_pkt_loader2(self):
- pkt_builder = CScapyTRexPktBuilder(pkt = "stl/golden/basic_imix_golden.cap");
+ pkt_builder = CScapyTRexPktBuilder(pkt = "functional_tests/golden/basic_imix_golden.cap");
assert_equal(pkt_builder.pkt_layers_desc (), "Ethernet:IP:UDP:Raw");
def test_simple_pkt_loader3(self):
diff --git a/scripts/automation/regression/unit_tests/functional_tests/stl_basic_tests.py b/scripts/automation/regression/functional_tests/stl_basic_tests.py
index cd653895..ea515401 100644
--- a/scripts/automation/regression/unit_tests/functional_tests/stl_basic_tests.py
+++ b/scripts/automation/regression/functional_tests/stl_basic_tests.py
@@ -6,9 +6,10 @@ from nose.tools import assert_equal
from nose.tools import assert_not_equal
from nose.tools import nottest
from nose.plugins.attrib import attr
-from unit_tests.trex_general_test import CTRexScenario
+from trex import CTRexScenario
from dpkt import pcap
from trex_stl_lib import trex_stl_sim
+from trex_stl_lib.trex_stl_streams import STLProfile
import sys
import os
import subprocess
@@ -73,11 +74,11 @@ class CStlBasic_Test(functional_general_test.CGeneralFunctional_Test):
pkts2 = reader2.readpkts()
assert_equal(len(pkts1), len(pkts2))
-
+
for pkt1, pkt2, i in zip(pkts1, pkts2, xrange(1, len(pkts1))):
ts1 = pkt1[0]
ts2 = pkt2[0]
- if abs(ts1-ts2) > 0.000005: # 5 nsec
+ if abs(ts1-ts2) > 0.000005: # 5 nsec
raise AssertionError("TS error: cap files '{0}', '{1}' differ in cap #{2} - '{3}' vs. '{4}'".format(cap1, cap2, i, ts1, ts2))
if pkt1[1] != pkt2[1]:
@@ -102,7 +103,7 @@ class CStlBasic_Test(functional_general_test.CGeneralFunctional_Test):
- def run_py_profile_path (self, profile, options,silent = False, do_no_remove=False,compare =True, test_generated=True):
+ def run_py_profile_path (self, profile, options,silent = False, do_no_remove=False,compare =True, test_generated=True, do_no_remove_generated = False):
output_cap = "a.pcap"
input_file = os.path.join('stl/', profile)
golden_file = os.path.join('exp',os.path.basename(profile).split('.')[0]+'.pcap');
@@ -118,38 +119,42 @@ class CStlBasic_Test(functional_general_test.CGeneralFunctional_Test):
if compare:
self.compare_caps(output_cap, golden_file)
finally:
- if not do_no_remove:
+ if not do_no_remove:
os.unlink(output_cap)
if test_generated:
try:
- from trex_stl_lib.api import STLProfile # if test is skipped, don't load it
generated_filename = input_file.replace('.py', '_GENERATED.py').replace('.yaml', '_GENERATED.py')
if input_file.endswith('.py'):
profile = STLProfile.load_py(input_file)
elif input_file.endswith('.yaml'):
profile = STLProfile.load_yaml(input_file)
profile.dump_to_code(generated_filename)
+
rc = self.run_sim(generated_filename, output_cap, options, silent)
assert_equal(rc, True)
-
+
if compare:
self.compare_caps(output_cap, golden_file)
+ except Exception as e:
+ print e
finally:
- if not do_no_remove:
+ if not do_no_remove_generated:
os.unlink(generated_filename)
+ os.unlink(generated_filename + 'c')
+ if not do_no_remove:
os.unlink(output_cap)
def test_stl_profiles (self):
- p = [
+ p = [
["udp_1pkt_1mac_override.py","-m 1 -l 50",True],
- ["syn_attack.py","-m 1 -l 50",True], # can't compare random now
+ ["syn_attack.py","-m 1 -l 50",True], # can't compare random now
["udp_1pkt_1mac.py","-m 1 -l 50",True],
["udp_1pkt_mac.py","-m 1 -l 50",True],
["udp_1pkt.py","-m 1 -l 50",True],
["udp_1pkt_tuple_gen.py","-m 1 -l 50",True],
- ["udp_rand_len_9k.py","-m 1 -l 50",True], # can't do the compare
+ ["udp_rand_len_9k.py","-m 1 -l 50",True], # can't do the compare
["udp_1pkt_mpls.py","-m 1 -l 50",True],
["udp_1pkt_mpls_vm.py","-m 1 ",True],
["imix.py","-m 1 -l 100",True],
@@ -195,14 +200,14 @@ class CStlBasic_Test(functional_general_test.CGeneralFunctional_Test):
p1 = [ ["udp_1pkt_range_clients_split_garp.py","-m 1 -l 50",True] ]
-
+
for obj in p:
try:
test_generated = obj[3]
except: # check generated if not said otherwise
test_generated = True
- self.run_py_profile_path (obj[0],obj[1],compare =obj[2], test_generated = test_generated, do_no_remove=True)
+ self.run_py_profile_path (obj[0],obj[1],compare =obj[2], test_generated = test_generated, do_no_remove=True, do_no_remove_generated = False)
def test_hlt_profiles (self):
@@ -231,7 +236,7 @@ class CStlBasic_Test(functional_general_test.CGeneralFunctional_Test):
)
for obj in p:
- self.run_py_profile_path (obj[0], obj[1], compare =obj[2], do_no_remove=True)
+ self.run_py_profile_path (obj[0], obj[1], compare =obj[2], do_no_remove=True, do_no_remove_generated = False)
# valgrind tests - this runs in multi thread as it safe (no output)
def test_valgrind_various_profiles (self):
diff --git a/scripts/automation/regression/functional_unit_tests.py b/scripts/automation/regression/functional_unit_tests.py
deleted file mode 100755
index 30e915c4..00000000
--- a/scripts/automation/regression/functional_unit_tests.py
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/router/bin/python
-
-__copyright__ = "Copyright 2014"
-
-
-
-import os
-import sys
-import outer_packages
-import nose
-from nose.plugins import Plugin
-import logging
-from rednose import RedNose
-import termstyle
-
-
-
-
-def set_report_dir (report_dir):
- if not os.path.exists(report_dir):
- os.mkdir(report_dir)
-
-if __name__ == "__main__":
-
- # setting defaults. By default we run all the test suite
- specific_tests = False
- disableLogCapture = False
- long_test = False
- report_dir = "reports"
-
- nose_argv= sys.argv + ['-s', '-v', '--exe', '--rednose', '--detailed-errors']
-
-# for arg in sys.argv:
-# if 'unit_tests/' in arg:
-# specific_tests = True
-# if 'log-path' in arg:
-# disableLogCapture = True
-# if arg=='--collect-only': # this is a user trying simply to view the available tests. removing xunit param from nose args
-# nose_argv[5:7] = []
-
-
-
- try:
- result = nose.run(argv = nose_argv, addplugins = [RedNose()])
-
- if (result == True):
- print termstyle.green("""
- ..::''''::..
- .;'' ``;.
- :: :: :: ::
- :: :: :: ::
- :: :: :: ::
- :: .:' :: :: `:. ::
- :: : : ::
- :: `:. .:' ::
- `;..``::::''..;'
- ``::,,,,::''
-
- ___ ___ __________
- / _ \/ _ | / __/ __/ /
- / ___/ __ |_\ \_\ \/_/
- /_/ /_/ |_/___/___(_)
-
- """)
- sys.exit(0)
- else:
- sys.exit(-1)
-
- finally:
- pass
-
-
-
-
-
-
-
-
diff --git a/scripts/automation/regression/misc_methods.py b/scripts/automation/regression/misc_methods.py
index 2341b9be..783858e8 100755
--- a/scripts/automation/regression/misc_methods.py
+++ b/scripts/automation/regression/misc_methods.py
@@ -20,29 +20,28 @@ def mix_string (str):
return str.replace(' ', '_').lower()
# executes given command, returns tuple (return_code, stdout, stderr)
-def run_command(cmd):
- print 'Running command:', cmd
- proc = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- (stdout, stderr) = proc.communicate()
- if stdout:
- print 'Stdout:\n%s' % stdout
- if stderr:
- print 'Stderr:\n%s' % stderr
- print 'Return code: %s' % proc.returncode
- return (proc.returncode, stdout, stderr)
-
-
-def run_remote_command(host, passwd, command_string):
- cmd = 'ssh -tt %s \'sudo sh -c "%s"\'' % (host, command_string)
- print 'Trying connection with ssh...'
- return_code, stdout, stderr = run_command(cmd)
- if return_code == 0:
- return (return_code, stdout, stderr)
- elif passwd is not None:
- print 'Trying connection with expect + sshpass.exp...'
- cmd = 'sshpass.exp %s %s root "%s"' % (passwd, host, command_string)
- return_code, stdout, stderr = run_command(cmd)
- return (return_code, stdout, stderr)
+def run_command(cmd, background = False):
+ if background:
+ print 'Running command in background:', cmd
+ with open(os.devnull, 'w') as tempf:
+ subprocess.Popen(shlex.split(cmd), stdin=tempf, stdout=tempf, stderr=tempf)
+ return (None,)*3
+ else:
+ print 'Running command:', cmd
+ proc = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ (stdout, stderr) = proc.communicate()
+ if stdout:
+ print 'Stdout:\n%s' % stdout
+ if proc.returncode:
+ if stderr:
+ print 'Stderr:\n%s' % stderr
+ print 'Return code: %s' % proc.returncode
+ return (proc.returncode, stdout, stderr)
+
+
+def run_remote_command(host, command_string, background = False):
+ cmd = 'ssh -tt %s \'sudo sh -ec "%s"\'' % (host, command_string)
+ return run_command(cmd, background)
def generate_intf_lists (interfacesList):
diff --git a/scripts/automation/regression/outer_packages.py b/scripts/automation/regression/outer_packages.py
index 6b7c58f9..f55c247d 100755
--- a/scripts/automation/regression/outer_packages.py
+++ b/scripts/automation/regression/outer_packages.py
@@ -11,7 +11,8 @@ PATH_TO_PYTHON_LIB = os.path.abspath(os.path.join(TREX_PATH, 'external_libs'))
PATH_TO_CTRL_PLANE = os.path.abspath(os.path.join(TREX_PATH, 'automation', 'trex_control_plane'))
PATH_STL_API = os.path.abspath(os.path.join(PATH_TO_CTRL_PLANE, 'stl'))
-NIGHTLY_MODULES = ['enum34-1.0.4',
+NIGHTLY_MODULES = ['ansi2html',
+ 'enum34-1.0.4',
'nose-1.3.4',
'rednose-0.4.1',
'progressbar-2.2',
diff --git a/scripts/automation/regression/stateful_tests/__init__.py b/scripts/automation/regression/stateful_tests/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/scripts/automation/regression/stateful_tests/__init__.py
diff --git a/scripts/automation/regression/unit_tests/tests_exceptions.py b/scripts/automation/regression/stateful_tests/tests_exceptions.py
index 604efcc8..604efcc8 100755
--- a/scripts/automation/regression/unit_tests/tests_exceptions.py
+++ b/scripts/automation/regression/stateful_tests/tests_exceptions.py
diff --git a/scripts/automation/regression/unit_tests/trex_general_test.py b/scripts/automation/regression/stateful_tests/trex_general_test.py
index f367a397..21f5d8aa 100755
--- a/scripts/automation/regression/unit_tests/trex_general_test.py
+++ b/scripts/automation/regression/stateful_tests/trex_general_test.py
@@ -26,6 +26,7 @@ Description:
from nose.plugins import Plugin
from nose.plugins.skip import SkipTest
import trex
+from trex import CTRexScenario
import misc_methods
import sys
import os
@@ -37,50 +38,14 @@ from tests_exceptions import *
from platform_cmd_link import *
import unittest
-
-class CTRexScenario():
- modes = set() # list of modes of this setup: loopback, virtual etc.
- server_logs = False
- is_test_list = False
- is_init = False
- trex_crashed = False
- configuration = None
- trex = None
- router = None
- router_cfg = None
- daemon_log_lines = 0
- setup_name = None
- setup_dir = None
- router_image = None
- trex_version = None
- scripts_path = None
- benchmark = None
- report_dir = 'reports'
- # logger = None
-
-#scenario = CTRexScenario()
-
def setUpModule(module):
-# print ("") # this is to get a newline after the dots
-# print ("setup_module before anything in this file")
-# # ff = CTRexScenario()
-# scenario.configuration = misc_methods.load_complete_config_file('config/config.yaml')
-# scenario.trex = trex.CTRexRunner(scenario.configuration[0], None)
-# scenario.router = CPlatform(scenario.configuration[1], False, scenario.configuration[2])
-# scenario.router.platform.preCheck()
-# print "Done instantiating trex scenario!"
pass
def tearDownModule(module):
-# print ("") # this is to get a newline after the dots
-# scenario.router.platform.postCheck()
-# print ("teardown_module after anything in this file")
pass
-
-
class CTRexGeneral_Test(unittest.TestCase):
- """This class defines the general testcase of the T-Rex traffic generator"""
+ """This class defines the general stateful testcase of the T-Rex traffic generator"""
def __init__ (self, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
if CTRexScenario.is_test_list:
@@ -100,7 +65,8 @@ class CTRexGeneral_Test(unittest.TestCase):
self.is_VM = True if 'VM' in self.modes else False
if not CTRexScenario.is_init:
- CTRexScenario.trex_version = self.trex.get_trex_version()
+ if self.trex: # stateful
+ CTRexScenario.trex_version = self.trex.get_trex_version()
if not self.is_loopback:
# initilize the scenario based on received configuration, once per entire testing session
CTRexScenario.router = CPlatform(CTRexScenario.router_cfg['silent_mode'])
@@ -306,12 +272,13 @@ class CTRexGeneral_Test(unittest.TestCase):
test_setup_modes_conflict = self.modes & set(self.unsupported_modes)
if test_setup_modes_conflict:
self.skip("The test can't run with following modes of given setup: %s " % test_setup_modes_conflict)
- if not self.trex.is_idle():
+ if self.trex and not self.trex.is_idle():
print 'Warning: TRex is not idle at setUp, trying to stop it.'
self.trex.force_kill(confirm = False)
if not self.is_loopback:
print ''
- self.router.load_clean_config()
+ if self.trex: # stateful
+ self.router.load_clean_config()
self.router.clear_counters()
self.router.clear_packet_drop_stats()
@@ -324,6 +291,8 @@ class CTRexGeneral_Test(unittest.TestCase):
# def test_isInitialized(self):
# assert CTRexScenario.is_init == True
def tearDown(self):
+ if not self.trex:
+ return
if not self.trex.is_idle():
print 'Warning: TRex is not idle at tearDown, trying to stop it.'
self.trex.force_kill(confirm = False)
diff --git a/scripts/automation/regression/unit_tests/trex_imix_test.py b/scripts/automation/regression/stateful_tests/trex_imix_test.py
index 43dea900..43dea900 100755
--- a/scripts/automation/regression/unit_tests/trex_imix_test.py
+++ b/scripts/automation/regression/stateful_tests/trex_imix_test.py
diff --git a/scripts/automation/regression/unit_tests/trex_ipv6_test.py b/scripts/automation/regression/stateful_tests/trex_ipv6_test.py
index bffb4754..bffb4754 100755
--- a/scripts/automation/regression/unit_tests/trex_ipv6_test.py
+++ b/scripts/automation/regression/stateful_tests/trex_ipv6_test.py
diff --git a/scripts/automation/regression/unit_tests/trex_nat_test.py b/scripts/automation/regression/stateful_tests/trex_nat_test.py
index e7fe5ca5..e7fe5ca5 100755
--- a/scripts/automation/regression/unit_tests/trex_nat_test.py
+++ b/scripts/automation/regression/stateful_tests/trex_nat_test.py
diff --git a/scripts/automation/regression/unit_tests/trex_nbar_test.py b/scripts/automation/regression/stateful_tests/trex_nbar_test.py
index 74d0227b..74d0227b 100755
--- a/scripts/automation/regression/unit_tests/trex_nbar_test.py
+++ b/scripts/automation/regression/stateful_tests/trex_nbar_test.py
diff --git a/scripts/automation/regression/unit_tests/trex_rx_test.py b/scripts/automation/regression/stateful_tests/trex_rx_test.py
index 37b1c722..37b1c722 100755
--- a/scripts/automation/regression/unit_tests/trex_rx_test.py
+++ b/scripts/automation/regression/stateful_tests/trex_rx_test.py
diff --git a/scripts/automation/regression/stateless_tests/stl_examples_test.py b/scripts/automation/regression/stateless_tests/stl_examples_test.py
new file mode 100755
index 00000000..9e4fffc9
--- /dev/null
+++ b/scripts/automation/regression/stateless_tests/stl_examples_test.py
@@ -0,0 +1,33 @@
+#!/router/bin/python
+from stl_general_test import CStlGeneral_Test, CTRexScenario
+import os, sys
+from misc_methods import run_command
+
+class STLExamples_Test(CStlGeneral_Test):
+ """This class defines the IMIX testcase of the T-Rex traffic generator"""
+
+ def setUp(self):
+ CStlGeneral_Test.setUp(self)
+ # examples connect by their own
+ if self.is_connected():
+ CTRexScenario.stl_trex.disconnect()
+
+ @classmethod
+ def tearDownClass(cls):
+ # connect back at end of tests
+ if not cls.is_connected():
+ CTRexScenario.stl_trex.connect()
+
+ def test_stl_examples(self):
+ examples_dir = '../trex_control_plane/stl/examples'
+ examples_to_test = [
+ 'stl_imix.py',
+ ]
+
+ for example in examples_to_test:
+ return_code, stdout, stderr = run_command("sh -c 'cd %s; %s %s -s %s'" % (examples_dir, sys.executable, example, CTRexScenario.configuration.trex['trex_name']))
+ assert return_code == 0, 'example %s failed.\nstdout: %s\nstderr: %s' % (return_code, stdout, stderr)
+
+ def test_stl_examples1(self):
+ print 'in test_stl_examples1'
+
diff --git a/scripts/automation/regression/stateless_tests/stl_general_test.py b/scripts/automation/regression/stateless_tests/stl_general_test.py
new file mode 100644
index 00000000..435c7eea
--- /dev/null
+++ b/scripts/automation/regression/stateless_tests/stl_general_test.py
@@ -0,0 +1,68 @@
+import os, sys
+import unittest
+from trex import CTRexScenario
+from stateful_tests.trex_general_test import CTRexGeneral_Test
+from trex_stl_lib.api import *
+import time
+from nose.tools import nottest
+
+
+class CStlGeneral_Test(CTRexGeneral_Test):
+ """This class defines the general stateless testcase of the T-Rex traffic generator"""
+
+ #once for all tests under CStlGeneral_Test
+ @classmethod
+ def setUpClass(cls):
+ cls.stl_trex = CTRexScenario.stl_trex
+
+ def setUp(self):
+ CTRexGeneral_Test.setUp(self)
+ # check basic requirements, should be verified at test_connectivity, here only skip test
+ if CTRexScenario.stl_init_error:
+ self.skip(CTRexScenario.stl_init_error)
+
+ @staticmethod
+ def connect(timeout = 20):
+ sys.stdout.write('Connecting')
+ for i in range(timeout):
+ try:
+ sys.stdout.write('.')
+ sys.stdout.flush()
+ CTRexScenario.stl_trex.connect()
+ return
+ except:
+ time.sleep(1)
+ CTRexScenario.stl_trex.connect()
+
+ @staticmethod
+ def get_port_count():
+ return CTRexScenario.stl_trex.get_port_count()
+
+ @staticmethod
+ def is_connected():
+ return CTRexScenario.stl_trex.is_connected()
+
+class STLBasic_Test(CStlGeneral_Test):
+ # will run it first explicitly, check connectivity and configure routing
+ @nottest
+ def test_connectivity(self):
+ if not self.is_loopback:
+ CTRexScenario.router.load_clean_config()
+ CTRexScenario.router.configure_basic_interfaces()
+ CTRexScenario.router.config_pbr(mode = "config")
+
+ CTRexScenario.stl_init_error = 'Client could not connect'
+ self.connect()
+ print ''
+ try:
+ stl_map_ports(CTRexScenario.stl_trex)
+ except:
+ pass
+ time.sleep(5)
+ CTRexScenario.stl_init_error = 'Client could not map ports'
+ CTRexScenario.stl_ports_map = stl_map_ports(CTRexScenario.stl_trex)
+ CTRexScenario.stl_init_error = 'Could not determine bidirectional ports'
+ print 'Ports mapping: %s' % CTRexScenario.stl_ports_map
+ if not len(CTRexScenario.stl_ports_map['bi']):
+ raise STLError('No bidirectional ports')
+ CTRexScenario.stl_init_error = None
diff --git a/scripts/automation/regression/trex.py b/scripts/automation/regression/trex.py
index b9fd87ec..8efa41f6 100644
--- a/scripts/automation/regression/trex.py
+++ b/scripts/automation/regression/trex.py
@@ -8,10 +8,35 @@ import re
import signal
import time
from CProgressDisp import TimedProgressBar
-import unit_tests.trex_general_test
-from unit_tests.tests_exceptions import TRexInUseError
+from stateful_tests.tests_exceptions import TRexInUseError
import datetime
+class CTRexScenario:
+ modes = set() # list of modes of this setup: loopback, virtual etc.
+ server_logs = False
+ is_test_list = False
+ is_init = False
+ is_stl_init = False
+ trex_crashed = False
+ configuration = None
+ trex = None
+ stl_trex = None
+ stl_ports_map = None
+ stl_init_error = None
+ router = None
+ router_cfg = None
+ daemon_log_lines = 0
+ setup_name = None
+ setup_dir = None
+ router_image = None
+ trex_version = None
+ scripts_path = None
+ benchmark = None
+ report_dir = 'reports'
+ # logger = None
+ test_types = {'functional_tests': [], 'stateful_tests': [], 'stateless_tests': []}
+ is_copied = False
+
class CTRexRunner:
"""This is an instance for generating a CTRexRunner"""
@@ -67,7 +92,7 @@ class CTRexRunner:
trex_cmd = trex_cmd_str % (cores,
multiplier,
- duration,
+ duration,
self.yaml)
# self.trex_config['trex_latency'])
@@ -81,8 +106,8 @@ class CTRexRunner:
print "\nT-REX COMMAND: ", trex_cmd
- cmd = 'sshpass.exp %s %s root "cd %s; %s > %s"' % (self.trex_config['trex_password'],
- self.trex_config['trex_name'],
+ cmd = 'sshpass.exp %s %s root "cd %s; %s > %s"' % (self.trex_config['trex_password'],
+ self.trex_config['trex_name'],
self.trex_config['trex_version_path'],
trex_cmd,
export_path)
@@ -91,18 +116,18 @@ class CTRexRunner:
def generate_fetch_cmd (self, result_file_full_path="/tmp/trex.txt"):
""" generate_fetch_cmd(self, result_file_full_path) -> str
-
+
Generates a custom command for which will enable to fetch the resutls of the T-Rex run.
Returns a command (string) to be issued on the trex server.
-
+
Example use: fetch_trex_results() - command that will fetch the content from the default log file- /tmp/trex.txt
fetch_trex_results("/tmp/trex_secondary_file.txt") - command that will fetch the content from a custom log file- /tmp/trex_secondary_file.txt
"""
#dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
script_running_dir = os.path.dirname(os.path.realpath(__file__)) # get the current script working directory so that the sshpass could be accessed.
- cmd = script_running_dir + '/sshpass.exp %s %s root "cat %s"' % (self.trex_config['trex_password'],
- self.trex_config['trex_name'],
- result_file_full_path);
+ cmd = script_running_dir + '/sshpass.exp %s %s root "cat %s"' % (self.trex_config['trex_password'],
+ self.trex_config['trex_name'],
+ result_file_full_path);
return cmd;
@@ -153,10 +178,10 @@ class CTRexRunner:
interrupted = True
if ((end_time - start_time) < 2):
raise TRexInUseError ('T-Rex run failed since T-Rex is used by another process, or due to reachability issues')
- else:
- unit_tests.trex_general_test.CTRexScenario.trex_crashed = True
- # results = subprocess.Popen(cmd, stdout = open(os.devnull, 'wb'),
- # shell=True, preexec_fn=os.setsid)
+ else:
+ CTRexScenario.trex_crashed = True
+ # results = subprocess.Popen(cmd, stdout = open(os.devnull, 'wb'),
+ # shell=True, preexec_fn=os.setsid)
except KeyboardInterrupt:
print "\nT-Rex test interrupted by user during traffic generation!!"
results.killpg(results.pid, signal.SIGTERM) # Send the kill signal to all the process groups
@@ -174,7 +199,7 @@ class CTRexRunner:
sys.stderr.flush()
return None
else:
-
+
if tmp_path:
cmd = self.generate_fetch_cmd( tmp_path )#**kwargs)#results_file_path)
else:
@@ -198,7 +223,7 @@ class CTRexResult():
def __init__ (self, file, buffer = None):
self.file = file
self.buffer = buffer
- self.result = {}
+ self.result = {}
def load_file_lines (self):
@@ -230,7 +255,7 @@ class CTRexResult():
Parameters
----------
- key :
+ key :
Key of the self.result dictionary of the TRexResult instance
val : float
Key of the self.result dictionary of the TRexResult instance
@@ -240,8 +265,8 @@ class CTRexResult():
"""
s = _str.strip()
-
- if s[0]=="G":
+
+ if s[0]=="G":
val = val*1E9
elif s[0]=="M":
val = val*1E6
@@ -262,14 +287,14 @@ class CTRexResult():
def parse (self):
""" parse(self) -> None
- Parse the content of the result file from the TRex test and upload the data into
+ Parse the content of the result file from the TRex test and upload the data into
"""
stop_read = False
d = {
- 'total-tx' : 0,
- 'total-rx' : 0,
- 'total-pps' : 0,
- 'total-cps' : 0,
+ 'total-tx' : 0,
+ 'total-rx' : 0,
+ 'total-pps' : 0,
+ 'total-cps' : 0,
'expected-pps' : 0,
'expected-cps' : 0,
@@ -296,7 +321,7 @@ class CTRexResult():
# # continue to parse !! we try the second
# self.result[key] = val #update latest
- # check if we need to stop reading
+ # check if we need to stop reading
match = re.match(".*latency daemon has stopped.*", line)
if match:
stop_read = True
@@ -307,7 +332,7 @@ class CTRexResult():
key = misc_methods.mix_string(match.group(1))
val = float(match.group(4))
if d.has_key(key):
- if stop_read == False:
+ if stop_read == False:
self.update (key, val, match.group(5))
else:
self.result[key] = val # update latest
@@ -321,7 +346,7 @@ class CTRexResult():
key = misc_methods.mix_string(match.group(1))
val = float(match.group(4))
if d.has_key(key):
- if stop_read == False:
+ if stop_read == False:
self.update (key, val, match.group(5))
else:
self.result[key] = val # update latest
@@ -337,7 +362,7 @@ class CTRexResult():
match = re.match("\W*(\w(\w|[-])+)\W*([:]|[=])\W*(OK)(.*)", line)
if match:
key = misc_methods.mix_string(match.group(1))
- val = 0 # valid
+ val = 0 # valid
self.result[key] = val #update latest
continue
@@ -347,7 +372,7 @@ class CTRexResult():
val = float(match.group(3))
if self.result.has_key(key):
if (self.result[key] < val): # update only if larger than previous value
- self.result[key] = val
+ self.result[key] = val
else:
self.result[key] = val
continue
diff --git a/scripts/automation/regression/trex_unit_test.py b/scripts/automation/regression/trex_unit_test.py
index 1d75a8b6..c90d5bdc 100755
--- a/scripts/automation/regression/trex_unit_test.py
+++ b/scripts/automation/regression/trex_unit_test.py
@@ -34,14 +34,16 @@ import CustomLogger
import misc_methods
from rednose import RedNose
import termstyle
-from unit_tests.trex_general_test import CTRexScenario
+from trex import CTRexScenario
from client.trex_client import *
from common.trex_exceptions import *
+from trex_stl_lib.api import *
import trex
import socket
from pprint import pprint
import subprocess
import re
+import time
def check_trex_path(trex_path):
if os.path.isfile('%s/trex_daemon_server' % trex_path):
@@ -60,34 +62,44 @@ def get_trex_path():
raise Exception('Could not determine trex_under_test folder, try setting env.var. TREX_UNDER_TEST')
return latest_build_path
-def _start_stop_trex_remote_server(trex_data, command):
- # start t-rex server as daemon process
- # subprocess.call(["/usr/bin/python", "trex_daemon_server", "restart"], cwd = trex_latest_build)
- misc_methods.run_remote_command(trex_data['trex_name'],
- trex_data['trex_password'],
- command)
-
-def start_trex_remote_server(trex_data, kill_running = False):
- if kill_running:
- (return_code, stdout, stderr) = misc_methods.run_remote_command(trex_data['trex_name'],
- trex_data['trex_password'],
- 'ps -u root --format comm,pid,cmd | grep t-rex-64')
- if stdout:
- for process in stdout.split('\n'):
- try:
- proc_name, pid, full_cmd = re.split('\s+', process, maxsplit=2)
- if proc_name.find('t-rex-64') >= 0:
- print 'Killing remote process: %s' % full_cmd
- misc_methods.run_remote_command(trex_data['trex_name'],
- trex_data['trex_password'],
- 'kill %s' % pid)
- except:
- continue
-
- _start_stop_trex_remote_server(trex_data, DAEMON_START_COMMAND)
-
-def stop_trex_remote_server(trex_data):
- _start_stop_trex_remote_server(trex_data, DAEMON_STOP_COMMAND)
+STATEFUL_STOP_COMMAND = './trex_daemon_server stop; sleep 1; ./trex_daemon_server stop; sleep 1'
+STATEFUL_RUN_COMMAND = 'rm /var/log/trex/trex_daemon_server.log; ./trex_daemon_server start; sleep 2; ./trex_daemon_server show'
+TREX_FILES = ('_t-rex-64', '_t-rex-64-o', '_t-rex-64-debug', '_t-rex-64-debug-o')
+
+def trex_remote_command(trex_data, command, background = False, from_scripts = True):
+ if from_scripts:
+ return misc_methods.run_remote_command(trex_data['trex_name'], ('cd %s; ' % CTRexScenario.scripts_path)+ command, background)
+ return misc_methods.run_remote_command(trex_data['trex_name'], command, background)
+
+# 1 = running, 0 - not running
+def check_trex_running(trex_data):
+ commands = []
+ for filename in TREX_FILES:
+ commands.append('ps -C %s > /dev/null' % filename)
+ return_code, _, _ = trex_remote_command(trex_data, ' || '.join(commands), from_scripts = False)
+ return not return_code
+
+def kill_trex_process(trex_data):
+ return_code, stdout, _ = trex_remote_command(trex_data, 'ps -u root --format comm,pid,cmd | grep _t-rex-64 | grep -v grep || true', from_scripts = False)
+ assert return_code == 0, 'last remote command failed'
+ if stdout:
+ for process in stdout.split('\n'):
+ try:
+ proc_name, pid, full_cmd = re.split('\s+', process, maxsplit=2)
+ if proc_name.find('t-rex-64') >= 0:
+ print 'Killing remote process: %s' % full_cmd
+ trex_remote_command(trex_data, 'kill %s' % pid, from_scripts = False)
+ except:
+ continue
+
+def address_to_ip(address):
+ for i in range(10):
+ try:
+ return socket.gethostbyname(address)
+ except:
+ continue
+ return socket.gethostbyname(address)
+
class CTRexTestConfiguringPlugin(Plugin):
def options(self, parser, env = os.environ):
@@ -105,74 +117,124 @@ class CTRexTestConfiguringPlugin(Plugin):
dest='log_path',
help='Specify path for the tests` log to be saved at. Once applied, logs capturing by nose will be disabled.') # Default is CURRENT/WORKING/PATH/trex_log/trex_log.log')
parser.add_option('--verbose-mode', '--verbose_mode', action="store_true", default = False,
- dest="verbose_mode",
+ dest="verbose_mode",
help="Print RPC command and router commands.")
parser.add_option('--server-logs', '--server_logs', action="store_true", default = False,
- dest="server_logs",
+ dest="server_logs",
help="Print server side (TRex and trex_daemon) logs per test.")
parser.add_option('--kill-running', '--kill_running', action="store_true", default = False,
- dest="kill_running",
+ dest="kill_running",
help="Kills running TRex process on remote server (useful for regression).")
- parser.add_option('--functional', action="store_true", default = False,
- dest="functional",
- help="Don't connect to remote server for runnning daemon (For functional tests).")
- parser.add_option('--copy', action="store_true", default = False,
- dest="copy",
- help="Copy TRex server to temp directory and run from there.")
+ parser.add_option('--func', '--functional', action="store_true", default = False,
+ dest="functional",
+ help="Run functional tests.")
+ parser.add_option('--stl', '--stateless', action="store_true", default = False,
+ dest="stateless",
+ help="Run stateless tests.")
+ parser.add_option('--stf', '--stateful', action="store_true", default = False,
+ dest="stateful",
+ help="Run stateful tests.")
+ parser.add_option('--pkg', action="store",
+ dest="pkg",
+ help="Run with given TRex package. Make sure the path available at server machine.")
+ parser.add_option('--no-ssh', '--no_ssh', action="store_true", default = False,
+ dest="no_ssh",
+ help="Flag to disable any ssh to server machine.")
def configure(self, options, conf):
- self.functional = options.functional
self.collect_only = options.collect_only
- if self.functional or self.collect_only:
+ if self.collect_only:
+ return
+ self.functional = options.functional
+ self.stateless = options.stateless
+ self.stateful = options.stateful
+ self.pkg = options.pkg
+ self.no_ssh = options.no_ssh
+ self.verbose_mode = options.verbose_mode
+ if self.functional and (not self.pkg or self.no_ssh):
return
if CTRexScenario.setup_dir and options.config_path:
raise Exception('Please either define --cfg or use env. variable SETUP_DIR, not both.')
if not options.config_path and CTRexScenario.setup_dir:
options.config_path = CTRexScenario.setup_dir
- if options.config_path:
- self.configuration = misc_methods.load_complete_config_file(os.path.join(options.config_path, 'config.yaml'))
- self.benchmark = misc_methods.load_benchmark_config_file(os.path.join(options.config_path, 'benchmark.yaml'))
- self.enabled = True
- else:
+ if not options.config_path:
raise Exception('Please specify path to config.yaml using --cfg parameter or env. variable SETUP_DIR')
+ self.configuration = misc_methods.load_complete_config_file(os.path.join(options.config_path, 'config.yaml'))
+ self.configuration.trex['trex_name'] = address_to_ip(self.configuration.trex['trex_name']) # translate hostname to ip
+ self.benchmark = misc_methods.load_benchmark_config_file(os.path.join(options.config_path, 'benchmark.yaml'))
+ self.enabled = True
self.modes = self.configuration.trex.get('modes', [])
self.kill_running = options.kill_running
self.load_image = options.load_image
- self.verbose_mode = options.verbose_mode
self.clean_config = False if options.skip_clean_config else True
self.server_logs = options.server_logs
if options.log_path:
self.loggerPath = options.log_path
-
- def begin (self):
- if self.functional or self.collect_only:
- return
# initialize CTRexScenario global testing class, to be used by all tests
CTRexScenario.configuration = self.configuration
CTRexScenario.benchmark = self.benchmark
CTRexScenario.modes = set(self.modes)
CTRexScenario.server_logs = self.server_logs
- # launch TRex daemon on relevant setup
- start_trex_remote_server(self.configuration.trex, self.kill_running)
- CTRexScenario.trex = CTRexClient(trex_host = self.configuration.trex['trex_name'], verbose = self.verbose_mode)
+ def begin (self):
+ if self.pkg and not CTRexScenario.is_copied and not self.no_ssh:
+ new_path = '/tmp/trex-scripts'
+ rsync_template = 'rm -rf /tmp/trex-scripts; mkdir -p %s; rsync -Lc %s /tmp; tar -mxzf /tmp/%s -C %s; mv %s/v*.*/* %s'
+ rsync_command = rsync_template % (new_path, self.pkg, os.path.basename(self.pkg), new_path, new_path, new_path)
+ return_code, stdout, stderr = trex_remote_command(self.configuration.trex, rsync_command, from_scripts = False)
+ if return_code:
+ print 'Failed copying'
+ sys.exit(-1)
+ CTRexScenario.scripts_path = new_path
+ CTRexScenario.is_copied = True
+ if self.functional or self.collect_only:
+ return
+ # launch TRex daemon on relevant setup
+ if not self.no_ssh:
+ if self.kill_running:
+ if self.stateful:
+ trex_remote_command(self.configuration.trex, STATEFUL_STOP_COMMAND)
+ kill_trex_process(self.configuration.trex)
+ time.sleep(1)
+ elif check_trex_running(self.configuration.trex):
+ print 'TRex is already running'
+ sys.exit(-1)
+
+
+ if self.stateful:
+ if not self.no_ssh:
+ trex_remote_command(self.configuration.trex, STATEFUL_RUN_COMMAND)
+ CTRexScenario.trex = CTRexClient(trex_host = self.configuration.trex['trex_name'], verbose = self.verbose_mode)
+ elif self.stateless:
+ if not self.no_ssh:
+ trex_remote_command(self.configuration.trex, './t-rex-64 -i', background = True)
+ CTRexScenario.stl_trex = STLClient(username = 'TRexRegression',
+ server = self.configuration.trex['trex_name'],
+ verbose_level = self.verbose_mode)
if 'loopback' not in self.modes:
- CTRexScenario.router_cfg = dict( config_dict = self.configuration.router,
- forceImageReload = self.load_image,
- silent_mode = not self.verbose_mode,
- forceCleanConfig = self.clean_config,
- tftp_config_dict = self.configuration.tftp )
+ CTRexScenario.router_cfg = dict(config_dict = self.configuration.router,
+ forceImageReload = self.load_image,
+ silent_mode = not self.verbose_mode,
+ forceCleanConfig = self.clean_config,
+ tftp_config_dict = self.configuration.tftp)
try:
CustomLogger.setup_custom_logger('TRexLogger', self.loggerPath)
except AttributeError:
CustomLogger.setup_custom_logger('TRexLogger')
-
+
def finalize(self, result):
if self.functional or self.collect_only:
return
- CTRexScenario.is_init = False
- stop_trex_remote_server(self.configuration.trex)
+ CTRexScenario.is_init = False
+ if self.stateful:
+ CTRexScenario.trex = None
+ if self.stateless:
+ CTRexScenario.trex_stl = None
+ if not self.no_ssh:
+ if self.stateful:
+ trex_remote_command(self.configuration.trex, STATEFUL_STOP_COMMAND)
+ kill_trex_process(self.configuration.trex)
def save_setup_info():
@@ -195,102 +257,111 @@ def set_report_dir (report_dir):
if not os.path.exists(report_dir):
os.mkdir(report_dir)
-
if __name__ == "__main__":
-
+
# setting defaults. By default we run all the test suite
specific_tests = False
- disableLogCapture = False
- long_test = False
- xml_name = 'unit_test.xml'
CTRexScenario.report_dir = 'reports'
- CTRexScenario.scripts_path = get_trex_path()
- COMMON_RUN_COMMAND = 'rm /var/log/trex/trex_daemon_server.log; ./trex_daemon_server start; sleep 2; ./trex_daemon_server show'
- COMMON_STOP_COMMAND = './trex_daemon_server stop; sleep 1; ./trex_daemon_server stop; sleep 1'
- if '--copy' in sys.argv:
- new_path = '/tmp/trex_scripts'
- DAEMON_STOP_COMMAND = 'cd %s; %s' % (new_path, COMMON_STOP_COMMAND)
- DAEMON_START_COMMAND = 'mkdir -p %s; cd %s; %s; rsync -L -az %s/ %s; %s' % (new_path, new_path, COMMON_STOP_COMMAND,
- CTRexScenario.scripts_path, new_path, COMMON_RUN_COMMAND)
- else:
- DAEMON_STOP_COMMAND = 'cd %s; %s' % (CTRexScenario.scripts_path, COMMON_STOP_COMMAND)
- DAEMON_START_COMMAND = DAEMON_STOP_COMMAND + COMMON_RUN_COMMAND
-
+ need_to_copy = False
setup_dir = os.getenv('SETUP_DIR', '').rstrip('/')
CTRexScenario.setup_dir = check_setup_path(setup_dir)
+ CTRexScenario.scripts_path = get_trex_path()
if not CTRexScenario.setup_dir:
CTRexScenario.setup_dir = check_setup_path(os.path.join('setups', setup_dir))
-
- if CTRexScenario.setup_dir:
- CTRexScenario.setup_name = os.path.basename(CTRexScenario.setup_dir)
- xml_name = 'report_%s.xml' % CTRexScenario.setup_name
+
nose_argv = ['', '-s', '-v', '--exe', '--rednose', '--detailed-errors']
if '--collect-only' in sys.argv: # this is a user trying simply to view the available tests. no need xunit.
- CTRexScenario.is_test_list = True
+ CTRexScenario.is_test_list = True
+ xml_arg = ''
else:
- nose_argv += ['--with-xunit', '--xunit-file=%s/%s' % (CTRexScenario.report_dir, xml_name)]
+ xml_name = 'unit_test.xml'
+ if CTRexScenario.setup_dir:
+ CTRexScenario.setup_name = os.path.basename(CTRexScenario.setup_dir)
+ xml_name = 'report_%s.xml' % CTRexScenario.setup_name
+ xml_arg= '--xunit-file=%s/%s' % (CTRexScenario.report_dir, xml_name)
set_report_dir(CTRexScenario.report_dir)
+ sys_args = sys.argv[:]
for i, arg in enumerate(sys.argv):
- if 'unit_tests/' in arg:
- specific_tests = True
- sys.argv[i] = arg[arg.find('unit_tests/'):]
if 'log-path' in arg:
- disableLogCapture = True
-
- nose_argv += sys.argv
-
- # Run all of the unit tests or just the selected ones
- if not specific_tests:
- if '--functional' in sys.argv:
- nose_argv += ['unit_tests/functional_tests']
+ nose_argv += ['--nologcapture']
else:
- nose_argv += ['unit_tests']
- if disableLogCapture:
- nose_argv += ['--nologcapture']
+ for tests_type in CTRexScenario.test_types.keys():
+ if tests_type in arg:
+ specific_tests = True
+ CTRexScenario.test_types[tests_type].append(arg[arg.find(tests_type):])
+ sys_args.remove(arg)
+ if not specific_tests:
+ for key in ('--func', '--functional'):
+ if key in sys_args:
+ CTRexScenario.test_types['functional_tests'].append('functional_tests')
+ sys_args.remove(key)
+ for key in ('--stf', '--stateful'):
+ if key in sys_args:
+ CTRexScenario.test_types['stateful_tests'].append('stateful_tests')
+ sys_args.remove(key)
+ for key in ('--stl', '--stateless'):
+ if key in sys_args:
+ CTRexScenario.test_types['stateless_tests'].append('stateless_tests')
+ sys_args.remove(key)
+ # Run all of the tests or just the selected ones
+ if not sum([len(x) for x in CTRexScenario.test_types.values()]):
+ for key in CTRexScenario.test_types.keys():
+ CTRexScenario.test_types[key].append(key)
+
+ nose_argv += sys_args
+
+ config_plugin = CTRexTestConfiguringPlugin()
+ red_nose = RedNose()
+ result = True
try:
- config_plugin = CTRexTestConfiguringPlugin()
- red_nose = RedNose()
- try:
- result = nose.run(argv = nose_argv, addplugins = [red_nose, config_plugin])
- except socket.error: # handle consecutive tests exception, try once again
- print "TRex connectivity error identified. Possibly due to consecutive nightly runs.\nRetrying..."
- result = nose.run(argv = nose_argv, addplugins = [red_nose, config_plugin])
- finally:
- save_setup_info()
-
- if (result == True and not CTRexScenario.is_test_list):
- print termstyle.green("""
- ..::''''::..
- .;'' ``;.
- :: :: :: ::
- :: :: :: ::
- :: :: :: ::
- :: .:' :: :: `:. ::
- :: : : ::
- :: `:. .:' ::
- `;..``::::''..;'
- ``::,,,,::''
-
- ___ ___ __________
- / _ \/ _ | / __/ __/ /
- / ___/ __ |_\ \_\ \/_/
- /_/ /_/ |_/___/___(_)
-
- """)
- sys.exit(0)
- else:
- sys.exit(-1)
-
+ if len(CTRexScenario.test_types['functional_tests']):
+ additional_args = ['--func'] + CTRexScenario.test_types['functional_tests']
+ if xml_arg:
+ additional_args += ['--with-xunit', xml_arg.replace('.xml', '_functional.xml')]
+ result = nose.run(argv = nose_argv + additional_args, addplugins = [red_nose, config_plugin])
+ if len(CTRexScenario.test_types['stateful_tests']):
+ additional_args = ['--stf'] + CTRexScenario.test_types['stateful_tests']
+ if xml_arg:
+ additional_args += ['--with-xunit', xml_arg.replace('.xml', '_stateful.xml')]
+ result = result and nose.run(argv = nose_argv + additional_args, addplugins = [red_nose, config_plugin])
+ if len(CTRexScenario.test_types['stateless_tests']):
+ additional_args = ['--stl', 'stateless_tests/stl_general_test.py:STLBasic_Test.test_connectivity'] + CTRexScenario.test_types['stateless_tests']
+ if xml_arg:
+ additional_args += ['--with-xunit', xml_arg.replace('.xml', '_stateless.xml')]
+ result = result and nose.run(argv = nose_argv + additional_args, addplugins = [red_nose, config_plugin])
finally:
- pass
-
-
-
+ save_setup_info()
+
+ if (result == True and not CTRexScenario.is_test_list):
+ print termstyle.green("""
+ ..::''''::..
+ .;'' ``;.
+ :: :: :: ::
+ :: :: :: ::
+ :: :: :: ::
+ :: .:' :: :: `:. ::
+ :: : : ::
+ :: `:. .:' ::
+ `;..``::::''..;'
+ ``::,,,,::''
+
+ ___ ___ __________
+ / _ \/ _ | / __/ __/ /
+ / ___/ __ |_\ \_\ \/_/
+ /_/ /_/ |_/___/___(_)
+
+ """)
+ sys.exit(0)
+ sys.exit(-1)
+
+
+
+
+
-
diff --git a/scripts/automation/regression/unit_tests/__init__.py b/scripts/automation/regression/unit_tests/__init__.py
deleted file mode 100755
index 8b137891..00000000
--- a/scripts/automation/regression/unit_tests/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/scripts/automation/trex_control_plane/stl/console/trex_tui.py b/scripts/automation/trex_control_plane/stl/console/trex_tui.py
index f972b905..02b00b78 100644
--- a/scripts/automation/trex_control_plane/stl/console/trex_tui.py
+++ b/scripts/automation/trex_control_plane/stl/console/trex_tui.py
@@ -8,6 +8,7 @@ from cStringIO import StringIO
from trex_stl_lib.utils.text_opts import *
from trex_stl_lib.utils import text_tables
+from trex_stl_lib import trex_stl_stats
# for STL exceptions
from trex_stl_lib.api import *
@@ -217,6 +218,35 @@ class TrexTUIPort(TrexTUIPanel):
self.stateless_client.clear_stats([self.port_id])
return "port {0}: cleared stats".format(self.port_id)
+
+
+# streams stats
+class TrexTUIStreamsStats(TrexTUIPanel):
+ def __init__ (self, mng):
+ super(TrexTUIStreamsStats, self).__init__(mng, "sstats")
+
+ self.key_actions = OrderedDict()
+
+ self.key_actions['c'] = {'action': self.action_clear, 'legend': 'clear', 'show': True}
+
+
+ def show (self):
+ stats = self.stateless_client._get_formatted_stats(port_id_list = None, stats_mask = trex_stl_stats.SS_COMPAT)
+ # print stats to screen
+ for stat_type, stat_data in stats.iteritems():
+ text_tables.print_table_with_header(stat_data.text_table, stat_type)
+ pass
+
+
+ def get_key_actions (self):
+ return self.key_actions
+
+ def action_clear (self):
+ self.stateless_client.flow_stats.clear_stats()
+
+ return ""
+
+
# log
class TrexTUILog():
def __init__ (self):
@@ -247,10 +277,12 @@ class TrexTUIPanelManager():
self.panels = {}
self.panels['dashboard'] = TrexTUIDashBoard(self)
+ self.panels['sstats'] = TrexTUIStreamsStats(self)
self.key_actions = OrderedDict()
self.key_actions['q'] = {'action': self.action_quit, 'legend': 'quit', 'show': True}
self.key_actions['g'] = {'action': self.action_show_dash, 'legend': 'dashboard', 'show': True}
+ self.key_actions['s'] = {'action': self.action_show_sstats, 'legend': 'streams stats', 'show': True}
for port_id in self.ports:
self.key_actions[str(port_id)] = {'action': self.action_show_port(port_id), 'legend': 'port {0}'.format(port_id), 'show': False}
@@ -352,6 +384,10 @@ class TrexTUIPanelManager():
return action_show_port_x
+ def action_show_sstats (self):
+ self.main_panel = self.panels['sstats']
+ self.init(self.show_log)
+ return ""
# shows a textual top style window
class TrexTUI():
@@ -427,7 +463,7 @@ class TrexTUI():
elif self.state == self.STATE_RECONNECT:
try:
- self.stateless_client.connect("RO")
+ self.stateless_client.connect()
self.state = self.STATE_ACTIVE
except STLError:
self.state = self.STATE_LOST_CONT
diff --git a/scripts/automation/trex_control_plane/stl/examples/stl_flow_stats.py b/scripts/automation/trex_control_plane/stl/examples/stl_flow_stats.py
index 3708834e..fa6e67c3 100644
--- a/scripts/automation/trex_control_plane/stl/examples/stl_flow_stats.py
+++ b/scripts/automation/trex_control_plane/stl/examples/stl_flow_stats.py
@@ -14,12 +14,14 @@ def rx_example (tx_port, rx_port, burst_size):
try:
pkt = STLPktBuilder(pkt = Ether()/IP(src="16.0.0.1",dst="48.0.0.1")/UDP(dport=12,sport=1025)/IP()/'a_payload_example')
-
total_pkts = burst_size
s1 = STLStream(name = 'rx',
packet = pkt,
flow_stats = STLFlowStats(pg_id = 5),
- mode = STLTXSingleBurst(total_pkts = total_pkts, bps_L2 = 250000000))
+ mode = STLTXSingleBurst(total_pkts = total_pkts,
+ #pps = total_pkts
+ percentage = 80
+ ))
# connect to server
c.connect()
@@ -30,38 +32,14 @@ def rx_example (tx_port, rx_port, burst_size):
# add both streams to ports
c.add_streams([s1], ports = [tx_port])
- print "injecting {0} packets on port {1}\n".format(total_pkts, tx_port)
- c.clear_stats()
- c.start(ports = [tx_port])
- c.wait_on_traffic(ports = [tx_port])
-
- # no error check - just an example... should be 5
- flow_stats = c.get_stats()['flow_stats'][5]
-
- tx_pkts = flow_stats['tx_pkts'][tx_port]
- tx_bytes = flow_stats['tx_bytes'][tx_port]
- rx_pkts = flow_stats['rx_pkts'][rx_port]
-
- if tx_pkts != total_pkts:
- print "TX pkts mismatch - got: {0}, expected: {1}".format(tx_pkts, total_pkts)
- passed = False
- return
- else:
- print "TX pkts match - {0}".format(tx_pkts)
+ print "\ninjecting {0} packets on port {1}\n".format(total_pkts, tx_port)
- if tx_bytes != (total_pkts * pkt.get_pkt_len()):
- print "TX bytes mismatch - got: {0}, expected: {1}".format(tx_bytes, (total_pkts * len(pkt)))
- passed = False
- return
- else:
- print "TX bytes match - {0}".format(tx_bytes)
-
- if rx_pkts != total_pkts:
- print "RX pkts mismatch - got: {0}, expected: {1}".format(rx_pkts, total_pkts)
- passed = False
- return
- else:
- print "RX pkts match - {0}".format(rx_pkts)
+ for i in range(0, 10):
+ print "\nStarting iteration: {0}:".format(i)
+ rc = rx_iteration(c, tx_port, rx_port, total_pkts, pkt.get_pkt_len())
+ if not rc:
+ passed = False
+ break
except STLError as e:
@@ -76,7 +54,46 @@ def rx_example (tx_port, rx_port, burst_size):
else:
print "\nTest has failed :-(\n"
+# RX one iteration
+def rx_iteration (c, tx_port, rx_port, total_pkts, pkt_len):
+
+ c.clear_stats()
+
+ c.start(ports = [tx_port])
+ c.wait_on_traffic(ports = [tx_port])
+
+ flow_stats = c.get_stats()['flow_stats'].get(5)
+ if not flow_stats:
+ print "no flow stats available"
+ return False
+
+ tx_pkts = flow_stats['tx_pkts'].get(tx_port, 0)
+ tx_bytes = flow_stats['tx_bytes'].get(tx_port, 0)
+ rx_pkts = flow_stats['rx_pkts'].get(rx_port, 0)
+
+ if tx_pkts != total_pkts:
+ print "TX pkts mismatch - got: {0}, expected: {1}".format(tx_pkts, total_pkts)
+ pprint.pprint(flow_stats)
+ return False
+ else:
+ print "TX pkts match - {0}".format(tx_pkts)
+
+ if tx_bytes != (total_pkts * pkt_len):
+ print "TX bytes mismatch - got: {0}, expected: {1}".format(tx_bytes, (total_pkts * pkt_len))
+ pprint.pprint(flow_stats)
+ return False
+ else:
+ print "TX bytes match - {0}".format(tx_bytes)
+
+ if rx_pkts != total_pkts:
+ print "RX pkts mismatch - got: {0}, expected: {1}".format(rx_pkts, total_pkts)
+ pprint.pprint(flow_stats)
+ return False
+ else:
+ print "RX pkts match - {0}".format(rx_pkts)
+
+ return True
# run the tests
-rx_example(tx_port = 0, rx_port = 3, burst_size = 500000)
+rx_example(tx_port = 1, rx_port = 2, burst_size = 500000)
diff --git a/scripts/automation/trex_control_plane/stl/examples/stl_imix.py b/scripts/automation/trex_control_plane/stl/examples/stl_imix.py
index cc7691a3..94165614 100644
--- a/scripts/automation/trex_control_plane/stl/examples/stl_imix.py
+++ b/scripts/automation/trex_control_plane/stl/examples/stl_imix.py
@@ -4,6 +4,7 @@ from trex_stl_lib.api import *
import time
import json
from pprint import pprint
+import argparse
# IMIX test
# it maps the ports to sides
@@ -11,11 +12,11 @@ from pprint import pprint
# and attach it to both sides and inject
# at a certain rate for some time
# finally it checks that all packets arrived
-def imix_test ():
+def imix_test (server):
# create client
- c = STLClient()
+ c = STLClient(server = server)
passed = True
@@ -48,7 +49,7 @@ def imix_test ():
# choose rate and start traffic for 10 seconds on 5 mpps
duration = 10
- mult = "5mpps"
+ mult = "30%"
print "Injecting {0} <--> {1} on total rate of '{2}' for {3} seconds".format(dir_0, dir_1, mult, duration)
c.start(ports = (dir_0 + dir_1), mult = mult, duration = duration, total = True)
@@ -78,9 +79,9 @@ def imix_test ():
print "Packets injected from {0}: {1:,}".format(dir_1, dir_1_opackets)
print "\npackets lost from {0} --> {1}: {2:,} pkts".format(dir_0, dir_0, lost_0)
- print "packets lost from {0} --> {1}: {2:,} pkts".format(dir_0, dir_0, lost_0)
+ print "packets lost from {0} --> {1}: {2:,} pkts".format(dir_1, dir_1, lost_1)
- if (lost_0 == 0) and (lost_0 == 0):
+ if (lost_0 <= 0) and (lost_1 <= 0): # less or equal because we might have incoming arps etc.
passed = True
else:
passed = False
@@ -95,10 +96,19 @@ def imix_test ():
if passed:
print "\nTest has passed :-)\n"
+ sys.exit(0)
else:
print "\nTest has failed :-(\n"
+ sys.exit(-1)
+parser = argparse.ArgumentParser(description="Example for TRex Stateless, sending IMIX traffic")
+parser.add_argument('-s', '--server',
+ dest='server',
+ help='Remote trex address',
+ default='127.0.0.1',
+ type = str)
+args = parser.parse_args()
# run the tests
-imix_test()
+imix_test(args.server)
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_async_client.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_async_client.py
index 36103cae..ae6cb497 100644
--- a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_async_client.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_async_client.py
@@ -178,7 +178,8 @@ class CTRexAsyncClient():
self.connected = True
- rc = self.barrier()
+ # sync all stats data as a baseline from the server
+ rc = self.barrier(baseline = True)
if not rc:
self.disconnect()
return rc
@@ -245,9 +246,11 @@ class CTRexAsyncClient():
name = msg['name']
data = msg['data']
type = msg['type']
+ baseline = msg.get('baseline', False)
+
self.raw_snapshot[name] = data
- self.__dispatch(name, type, data)
+ self.__dispatch(name, type, data, baseline)
# closing of socket must be from the same thread
@@ -268,10 +271,11 @@ class CTRexAsyncClient():
return self.raw_snapshot
# dispatch the message to the right place
- def __dispatch (self, name, type, data):
+ def __dispatch (self, name, type, data, baseline):
+
# stats
if name == "trex-global":
- self.event_handler.handle_async_stats_update(data)
+ self.event_handler.handle_async_stats_update(data, baseline)
# events
elif name == "trex-event":
@@ -282,7 +286,7 @@ class CTRexAsyncClient():
self.handle_async_barrier(type, data)
elif name == "flow_stats":
- self.event_handler.handle_async_rx_stats_event(data)
+ self.event_handler.handle_async_rx_stats_event(data, baseline)
else:
pass
@@ -295,7 +299,7 @@ class CTRexAsyncClient():
# block on barrier for async channel
- def barrier(self, timeout = 5):
+ def barrier(self, timeout = 5, baseline = False):
# set a random key
key = random.getrandbits(32)
@@ -307,7 +311,7 @@ class CTRexAsyncClient():
while not self.async_barrier['ack']:
# inject
- rc = self.stateless_client._transmit("publish_now", params = {'key' : key})
+ rc = self.stateless_client._transmit("publish_now", params = {'key' : key, 'baseline': baseline})
if not rc:
return rc
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_client.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_client.py
index 130fee2c..c7503ab0 100644
--- a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_client.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_client.py
@@ -155,12 +155,12 @@ class AsyncEventHandler(object):
pass
- def handle_async_rx_stats_event (self, data):
- self.client.flow_stats.update(data)
+ def handle_async_rx_stats_event (self, data, baseline):
+ self.client.flow_stats.update(data, baseline)
# handles an async stats update from the subscriber
- def handle_async_stats_update(self, dump_data):
+ def handle_async_stats_update(self, dump_data, baseline):
global_stats = {}
port_stats = {}
@@ -182,11 +182,11 @@ class AsyncEventHandler(object):
global_stats[key] = value
# update the general object with the snapshot
- self.client.global_stats.update(global_stats)
+ self.client.global_stats.update(global_stats, baseline)
# update all ports
for port_id, data in port_stats.iteritems():
- self.client.ports[port_id].port_stats.update(data)
+ self.client.ports[port_id].port_stats.update(data, baseline)
# dispatcher for server async events (port started, port stopped and etc.)
@@ -458,10 +458,12 @@ class STLClient(object):
self.server_version,
self.ports)
+ self.flow_stats = trex_stl_stats.CRxStats()
+
self.stats_generator = trex_stl_stats.CTRexInfoGenerator(self.global_stats,
- self.ports)
+ self.ports,
+ self.flow_stats)
- self.flow_stats = trex_stl_stats.CRxStats()
############# private functions - used by the class itself ###########
@@ -736,7 +738,7 @@ class STLClient(object):
# clear stats
- def __clear_stats(self, port_id_list, clear_global):
+ def __clear_stats(self, port_id_list, clear_global, clear_flow_stats):
for port_id in port_id_list:
self.ports[port_id].clear_stats()
@@ -744,6 +746,9 @@ class STLClient(object):
if clear_global:
self.global_stats.clear_stats()
+ if clear_flow_stats:
+ self.flow_stats.clear_stats()
+
self.logger.log_cmd("clearing stats on port(s) {0}:".format(port_id_list))
return RC
@@ -825,6 +830,7 @@ class STLClient(object):
self.ports[port_id].invalidate_stats()
self.global_stats.invalidate()
+ self.flow_stats.invalidate()
return RC_OK()
@@ -1697,7 +1703,7 @@ class STLClient(object):
@__api_check(False)
- def clear_stats (self, ports = None, clear_global = True):
+ def clear_stats (self, ports = None, clear_global = True, clear_flow_stats = True):
"""
clear stats on port(s)
@@ -1708,6 +1714,9 @@ class STLClient(object):
clear_global : bool
clear the global stats
+ clear_flow_stats : bool
+ clear the flow stats
+
:raises:
+ :exc:`STLError`
@@ -1721,7 +1730,7 @@ class STLClient(object):
raise STLArgumentError('clear_global', clear_global)
- rc = self.__clear_stats(ports, clear_global)
+ rc = self.__clear_stats(ports, clear_global, clear_flow_stats)
if not rc:
raise STLError(rc)
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_packet_builder_scapy.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_packet_builder_scapy.py
index 59a047ec..eac12ebb 100644
--- a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_packet_builder_scapy.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_packet_builder_scapy.py
@@ -1,7 +1,7 @@
import random
import string
import struct
-import socket
+import socket
import json
import yaml
import binascii
@@ -50,13 +50,13 @@ def mac_str_to_num (mac_buffer):
def is_valid_ipv4(ip_addr):
"""
- return buffer in network order
+ return buffer in network order
"""
if type(ip_addr)==str and len(ip_addr) == 4:
return ip_addr
if type(ip_addr)==int :
- ip_addr = socket.inet_ntoa(struct.pack("!I", ip_addr))
+ ip_addr = socket.inet_ntoa(struct.pack("!I", ip_addr))
try:
return socket.inet_pton(socket.AF_INET, ip_addr)
@@ -81,7 +81,7 @@ def is_valid_ipv6(ipv6_addr):
class CTRexScriptsBase(object):
"""
- VM Script base class
+ VM Script base class
"""
def clone (self):
return copy.deepcopy(self)
@@ -105,7 +105,7 @@ class CTRexScFieldRangeValue(CTRexScFieldRangeBase):
"""
range of field value
"""
- def __init__(self, field_name,
+ def __init__(self, field_name,
field_type,
min_value,
max_value
@@ -135,7 +135,7 @@ class CTRexScIpv4SimpleRange(CTRexScFieldRangeBase):
class CTRexScIpv4TupleGen(CTRexScriptsBase):
"""
- range tuple
+ range tuple
"""
FLAGS_ULIMIT_FLOWS =1
@@ -157,7 +157,7 @@ class CTRexScIpv4TupleGen(CTRexScriptsBase):
class CTRexScTrimPacketSize(CTRexScriptsBase):
"""
- trim packet size. field type is CTRexScFieldRangeBase.FILED_TYPES = ["inc","dec","rand"]
+ trim packet size. field type is CTRexScFieldRangeBase.FILED_TYPES = ["inc","dec","rand"]
"""
def __init__(self,field_type="rand",min_pkt_size=None, max_pkt_size=None):
super(CTRexScTrimPacketSize, self).__init__()
@@ -174,7 +174,7 @@ class CTRexScTrimPacketSize(CTRexScriptsBase):
class CTRexScRaw(CTRexScriptsBase):
"""
- raw instructions
+ raw instructions
"""
def __init__(self,list_of_commands=None,split_by_field=None):
super(CTRexScRaw, self).__init__()
@@ -190,7 +190,7 @@ class CTRexScRaw(CTRexScriptsBase):
################################################################################################
-# VM raw instructions
+# VM raw instructions
################################################################################################
class CTRexVmInsBase(object):
@@ -283,7 +283,7 @@ class CTRexVmInsTupleGen(CTRexVmInsBase):
################################################################################################
-#
+#
class CTRexVmEngine(object):
def __init__(self):
@@ -294,7 +294,7 @@ class CTRexVmEngine(object):
self.ins=[]
self.split_by_var = ''
- # return as json
+ # return as json
def get_json (self):
inst_array = [];
# dump it as dict
@@ -352,7 +352,7 @@ class CTRexScapyPktUtl(object):
def _layer_offset(self, name, cnt = 0):
"""
- return offset of layer e.g 'IP',1 will return offfset of layer ip:1
+ return offset of layer e.g 'IP',1 will return offfset of layer ip:1
"""
save_cnt=cnt
for pkt in self.pkt_iter ():
@@ -367,7 +367,7 @@ class CTRexScapyPktUtl(object):
def layer_offset(self, name, cnt = 0):
"""
- return offset of layer e.g 'IP',1 will return offfset of layer ip:1
+ return offset of layer e.g 'IP',1 will return offfset of layer ip:1
"""
save_cnt=cnt
for pkt in self.pkt_iter ():
@@ -381,7 +381,7 @@ class CTRexScapyPktUtl(object):
def get_field_offet(self, layer, layer_cnt, field_name):
"""
- return offset of layer e.g 'IP',1 will return offfset of layer ip:1
+ return offset of layer e.g 'IP',1 will return offfset of layer ip:1
"""
t=self._layer_offset(layer,layer_cnt);
l_offset=t[1];
@@ -397,7 +397,7 @@ class CTRexScapyPktUtl(object):
def get_layer_offet_by_str(self, layer_des):
"""
- return layer offset by string
+ return layer offset by string
:parameters:
@@ -423,14 +423,14 @@ class CTRexScapyPktUtl(object):
def get_field_offet_by_str(self, field_des):
"""
- return field_des (offset,size) layer:cnt.field
- for example
+ return field_des (offset,size) layer:cnt.field
+ for example
802|1Q.vlan get 802.1Q->valn replace | with .
IP.src
IP:0.src (first IP.src like IP.src)
for example IP:1.src for internal IP
- return (offset, size) as tuple
+ return (offset, size) as tuple
"""
@@ -489,19 +489,19 @@ class CTRexVmDescBase(object):
def get_var_ref (self):
'''
virtual function return a ref var name
- '''
+ '''
return None
def get_var_name(self):
'''
virtual function return the varible name if exists
- '''
+ '''
return None
- def compile(self,parent):
+ def compile(self,parent):
'''
virtual function to take parent than has function name_to_offset
- '''
+ '''
pass;
@@ -565,12 +565,12 @@ class CTRexVmDescFlowVar(CTRexVmDescBase):
class CTRexVmDescFixIpv4(CTRexVmDescBase):
def __init__(self, offset):
super(CTRexVmDescFixIpv4, self).__init__()
- self.offset = offset; # could be a name of offset
+ self.offset = offset; # could be a name of offset
def get_obj (self):
return CTRexVmInsFixIpv4(self.offset);
- def compile(self,parent):
+ def compile(self,parent):
if type(self.offset)==str:
self.offset = parent._pkt_layer_offset(self.offset);
@@ -593,7 +593,7 @@ class CTRexVmDescWrFlowVar(CTRexVmDescBase):
def get_obj (self):
return CTRexVmInsWrFlowVar(self.name,self.pkt_offset+self.offset_fixup,self.add_val,self.is_big)
- def compile(self,parent):
+ def compile(self,parent):
if type(self.pkt_offset)==str:
t=parent._name_to_offset(self.pkt_offset)
self.pkt_offset = t[0]
@@ -627,7 +627,7 @@ class CTRexVmDescWrMaskFlowVar(CTRexVmDescBase):
def get_obj (self):
return CTRexVmInsWrMaskFlowVar(self.name,self.pkt_offset+self.offset_fixup,self.pkt_cast_size,self.mask,self.shift,self.add_value,self.is_big)
- def compile(self,parent):
+ def compile(self,parent):
if type(self.pkt_offset)==str:
t=parent._name_to_offset(self.pkt_offset)
self.pkt_offset = t[0]
@@ -680,7 +680,7 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
When path_relative_to_profile is a True load pcap file from a path relative to the profile
"""
- def __init__(self, pkt = None, pkt_buffer = None, vm = None, path_relative_to_profile = False, build_raw = True, remove_fcs = True):
+ def __init__(self, pkt = None, pkt_buffer = None, vm = None, path_relative_to_profile = False, build_raw = False, remove_fcs = True):
"""
Instantiate a CTRexPktBuilder object
@@ -737,7 +737,7 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
"""
super(CScapyTRexPktBuilder, self).__init__()
- self.pkt = None # as input
+ self.pkt = None # as input
self.pkt_raw = None # from raw pcap file
self.vm_scripts = [] # list of high level instructions
self.vm_low_level = None
@@ -745,7 +745,8 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
self.metadata=""
self.path_relative_to_profile = path_relative_to_profile
self.remove_fcs = remove_fcs
-
+ self.is_binary_source = pkt_buffer != None
+
if pkt != None and pkt_buffer != None:
raise CTRexPacketBuildException(-15, "packet builder cannot be provided with both pkt and pkt_buffer")
@@ -778,7 +779,7 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
def get_vm_data(self):
"""
- Dumps the instructions
+ Dumps the instructions
:parameters:
None
@@ -792,7 +793,7 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
assert self.vm_low_level is not None, 'vm_low_level is None, please use compile()'
- return self.vm_low_level.get_json()
+ return self.vm_low_level.get_json()
def dump_pkt(self, encode = True):
"""
@@ -816,7 +817,7 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
return {'binary': base64.b64encode(pkt_buf) if encode else pkt_buf,
'meta': self.metadata}
-
+
def dump_pkt_to_pcap(self, file_path):
wrpcap(file_path, self._get_pkt_as_str())
@@ -852,7 +853,7 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
def set_pcap_file (self, pcap_file):
"""
- load raw pcap file into a buffer. load only the first packet
+ load raw pcap file into a buffer. load only the first packet
:parameters:
pcap_file : file_name
@@ -898,7 +899,9 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
else:
raise CTRexPacketBuildException(-14, "bad packet" )
- def is_def_src_mac (self):
+ def is_default_src_mac (self):
+ if self.is_binary_source:
+ return True
p = self.pkt
if isinstance(p, Packet):
if isinstance(p,Ether):
@@ -906,7 +909,9 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
return False
return True
- def is_def_dst_mac (self):
+ def is_default_dst_mac (self):
+ if self.is_binary_source:
+ return True
p = self.pkt
if isinstance(p, Packet):
if isinstance(p,Ether):
@@ -918,7 +923,7 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
if self.pkt == None and self.pkt_raw == None:
raise CTRexPacketBuildException(-14, "Packet is empty")
-
+
self.vm_low_level = CTRexVmEngine()
# compile the VM
@@ -935,7 +940,7 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
raise CTRexPacketBuildException(-14, "Packet is empty")
####################################################
- # private
+ # private
def _get_pcap_file_path (self,pcap_file_name):
@@ -944,7 +949,7 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
f_path = pcap_file_name
else:
if self.path_relative_to_profile:
- p = self._get_path_relative_to_profile () # loader
+ p = self._get_path_relative_to_profile () # loader
if p :
f_path=os.path.abspath(os.path.join(os.path.dirname(p),pcap_file_name))
@@ -960,7 +965,7 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
def _compile_raw (self,obj):
- # make sure we have varibles once
+ # make sure we have varibles once
vars={};
# add it add var to dit
@@ -979,17 +984,17 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
var_name = desc.get_var_ref()
if var_name :
if not vars.has_key(var_name):
- raise CTRexPacketBuildException(-11,("variable %s does not exists ") % (var_name) );
+ raise CTRexPacketBuildException(-11,("variable %s does not exists ") % (var_name) );
desc.compile(self);
for desc in obj.commands:
self.vm_low_level.add_ins(desc.get_obj());
# set split_by_var
- if obj.split_by_field :
+ if obj.split_by_field :
assert type(obj.split_by_field)==str, "type of split by var should be string"
#if not vars.has_key(obj.split_by_field):
- # raise CTRexPacketBuildException(-11,("variable %s does not exists. change split_by_var args ") % (var_name) );
+ # raise CTRexPacketBuildException(-11,("variable %s does not exists. change split_by_var args ") % (var_name) );
self.vm_low_level.split_by_var = obj.split_by_field
@@ -1008,12 +1013,11 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
# regular scapy packet
elif not self.pkt:
# should not reach here
- raise CTRexPacketBuildException(-11, 'empty packet')
+ raise CTRexPacketBuildException(-11, 'empty packet')
if self.remove_fcs and self.pkt.lastlayer().name == 'Padding':
self.pkt.lastlayer().underlayer.remove_payload()
- if len(self.pkt) < 60: # simulator can write padding with non-zeros, set it explicit
- self.pkt /= Padding('\x00' * (60 - len(self.pkt)))
+
self.pkt.build()
self.is_pkt_built = True
@@ -1036,7 +1040,7 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
return str(self.pkt)
if self.pkt_raw:
return self.pkt_raw
- raise CTRexPacketBuildException(-11, 'empty packet');
+ raise CTRexPacketBuildException(-11, 'empty packet');
def _add_tuple_gen(self,tuple_gen):
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_stats.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_stats.py
index c2e318bc..bb877586 100644
--- a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_stats.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_stats.py
@@ -13,15 +13,45 @@ import re
import math
import copy
import threading
+import pprint
GLOBAL_STATS = 'g'
PORT_STATS = 'p'
PORT_STATUS = 'ps'
-ALL_STATS_OPTS = {GLOBAL_STATS, PORT_STATS, PORT_STATUS}
+STREAMS_STATS = 's'
+
+ALL_STATS_OPTS = {GLOBAL_STATS, PORT_STATS, PORT_STATUS, STREAMS_STATS}
COMPACT = {GLOBAL_STATS, PORT_STATS}
+SS_COMPAT = {GLOBAL_STATS, STREAMS_STATS}
ExportableStats = namedtuple('ExportableStats', ['raw_data', 'text_table'])
+# deep mrege of dicts dst = src + dst
+def deep_merge_dicts (dst, src):
+ for k, v in src.iteritems():
+ # if not exists - deep copy it
+ if not k in dst:
+ dst[k] = copy.deepcopy(v)
+ else:
+ if isinstance(v, dict):
+ deep_merge_dicts(dst[k], v)
+
+# BPS L1 from pps and BPS L2
+def calc_bps_L1 (bps, pps):
+ if (pps == 0) or (bps == 0):
+ return 0
+
+ factor = bps / (pps * 8.0)
+ return bps * ( 1 + (20 / factor) )
+#
+
+def is_intable (value):
+ try:
+ int(value)
+ return True
+ except ValueError:
+ return False
+
# use to calculate diffs relative to the previous values
# for example, BW
def calculate_diff (samples):
@@ -66,18 +96,23 @@ class CTRexInfoGenerator(object):
STLClient and the ports.
"""
- def __init__(self, global_stats_ref, ports_dict_ref):
+ def __init__(self, global_stats_ref, ports_dict_ref, rx_stats_ref):
self._global_stats = global_stats_ref
self._ports_dict = ports_dict_ref
+ self._rx_stats_ref = rx_stats_ref
def generate_single_statistic(self, port_id_list, statistic_type):
if statistic_type == GLOBAL_STATS:
return self._generate_global_stats()
+
elif statistic_type == PORT_STATS:
return self._generate_port_stats(port_id_list)
- pass
+
elif statistic_type == PORT_STATUS:
return self._generate_port_status(port_id_list)
+
+ elif statistic_type == STREAMS_STATS:
+ return self._generate_streams_stats()
else:
# ignore by returning empty object
return {}
@@ -110,6 +145,90 @@ class CTRexInfoGenerator(object):
return {"global_statistics": ExportableStats(stats_data, stats_table)}
+ def _generate_streams_stats (self):
+
+ streams_keys, sstats_data = self._rx_stats_ref.generate_stats()
+ stream_count = len(streams_keys)
+
+ stats_table = text_tables.TRexTextTable()
+ stats_table.set_cols_align(["l"] + ["r"] * stream_count)
+ stats_table.set_cols_width([10] + [17] * stream_count)
+ stats_table.set_cols_dtype(['t'] + ['t'] * stream_count)
+
+ stats_table.add_rows([[k] + v
+ for k, v in sstats_data.iteritems()],
+ header=False)
+
+ header = ["PG ID"] + [key for key in streams_keys]
+ stats_table.header(header)
+
+ return {"streams_statistics": ExportableStats(sstats_data, stats_table)}
+
+
+
+ per_stream_stats = OrderedDict([("owner", []),
+ ("state", []),
+ ("--", []),
+ ("Tx bps L2", []),
+ ("Tx bps L1", []),
+ ("Tx pps", []),
+ ("Line Util.", []),
+
+ ("---", []),
+ ("Rx bps", []),
+ ("Rx pps", []),
+
+ ("----", []),
+ ("opackets", []),
+ ("ipackets", []),
+ ("obytes", []),
+ ("ibytes", []),
+ ("tx-bytes", []),
+ ("rx-bytes", []),
+ ("tx-pkts", []),
+ ("rx-pkts", []),
+
+ ("-----", []),
+ ("oerrors", []),
+ ("ierrors", []),
+
+ ]
+ )
+
+ total_stats = CPortStats(None)
+
+ for port_obj in relevant_ports:
+ # fetch port data
+ port_stats = port_obj.generate_port_stats()
+
+ total_stats += port_obj.port_stats
+
+ # populate to data structures
+ return_stats_data[port_obj.port_id] = port_stats
+ self.__update_per_field_dict(port_stats, per_field_stats)
+
+ total_cols = len(relevant_ports)
+ header = ["port"] + [port.port_id for port in relevant_ports]
+
+ if (total_cols > 1):
+ self.__update_per_field_dict(total_stats.generate_stats(), per_field_stats)
+ header += ['total']
+ total_cols += 1
+
+ stats_table = text_tables.TRexTextTable()
+ stats_table.set_cols_align(["l"] + ["r"] * total_cols)
+ stats_table.set_cols_width([10] + [17] * total_cols)
+ stats_table.set_cols_dtype(['t'] + ['t'] * total_cols)
+
+ stats_table.add_rows([[k] + v
+ for k, v in per_field_stats.iteritems()],
+ header=False)
+
+ stats_table.header(header)
+
+ return {"streams_statistics": ExportableStats(return_stats_data, stats_table)}
+
+
def _generate_port_stats(self, port_id_list):
relevant_ports = self.__get_relevant_ports(port_id_list)
@@ -131,10 +250,10 @@ class CTRexInfoGenerator(object):
("ipackets", []),
("obytes", []),
("ibytes", []),
- ("tx_bytes", []),
- ("rx_bytes", []),
- ("tx_pkts", []),
- ("rx_pkts", []),
+ ("tx-bytes", []),
+ ("rx-bytes", []),
+ ("tx-pkts", []),
+ ("rx-pkts", []),
("-----", []),
("oerrors", []),
@@ -284,97 +403,94 @@ class CTRexStats(object):
self.last_update_ts = time.time()
self.history = deque(maxlen = 10)
self.lock = threading.Lock()
+ self.has_baseline = False
- def __getitem__(self, item):
- # override this to allow quick and clean access to fields
- if not item in self.latest_stats:
- return "N/A"
-
- # item must exist
- m = re.search('_(([a-z])ps)$', item)
- if m:
- # this is a non-relative item
- unit = m.group(2)
- if unit == "b":
- return self.get(item, format=True, suffix="b/sec")
- elif unit == "p":
- return self.get(item, format=True, suffix="pkt/sec")
- else:
- return self.get(item, format=True, suffix=m.group(1))
-
- m = re.search('^[i|o](a-z+)$', item)
- if m:
- # this is a non-relative item
- type = m.group(1)
- if type == "bytes":
- return self.get_rel(item, format=True, suffix="B")
- elif type == "packets":
- return self.get_rel(item, format=True, suffix="pkts")
- else:
- # do not format with suffix
- return self.get_rel(item, format=True)
-
- # can't match to any known pattern, return N/A
- return "N/A"
+ ######## abstract methods ##########
+ # get stats for user / API
+ def get_stats (self):
+ raise NotImplementedError()
+ # generate format stats (for TUI)
def generate_stats(self):
- # must be implemented by designated classes (such as port/ global stats)
raise NotImplementedError()
- def generate_extended_values (self, snapshot):
+ # called when a snapshot arrives - add more fields
+ def _update (self, snapshot, baseline):
raise NotImplementedError()
- def update(self, snapshot):
-
- # some extended generated values (from base values)
- self.generate_extended_values(snapshot)
-
- # update
- self.latest_stats = snapshot
+ ######## END abstract methods ##########
- with self.lock:
- self.history.append(snapshot)
+ def update(self, snapshot, baseline):
- diff_time = time.time() - self.last_update_ts
+ # no update is valid before baseline
+ if not self.has_baseline and not baseline:
+ return
- # 3 seconds is too much - this is the new reference
- if (not self.reference_stats) or (diff_time > 3):
- self.reference_stats = self.latest_stats
+ # call the underlying method
+ rc = self._update(snapshot)
+ if not rc:
+ return
-
+ # sync one time
+ if not self.has_baseline and baseline:
+ self.reference_stats = copy.deepcopy(self.latest_stats)
+ self.has_baseline = True
- self.last_update_ts = time.time()
+ # save history
+ with self.lock:
+ self.history.append(self.latest_stats)
def clear_stats(self):
- self.reference_stats = self.latest_stats
+ self.reference_stats = copy.deepcopy(self.latest_stats)
def invalidate (self):
self.latest_stats = {}
+
+ def _get (self, src, field, default = None):
+ if isinstance(field, list):
+ # deep
+ value = src
+ for level in field:
+ if not level in value:
+ return default
+ value = value[level]
+ else:
+ # flat
+ if not field in src:
+ return default
+ value = src[field]
+
+ return value
+
def get(self, field, format=False, suffix=""):
- if not field in self.latest_stats:
+ value = self._get(self.latest_stats, field)
+ if value == None:
return "N/A"
- if not format:
- return self.latest_stats[field]
- else:
- return format_num(self.latest_stats[field], suffix)
+
+ return value if not format else format_num(value, suffix)
+
def get_rel(self, field, format=False, suffix=""):
- if not field in self.latest_stats:
+
+ ref_value = self._get(self.reference_stats, field)
+ latest_value = self._get(self.latest_stats, field)
+
+ # latest value is an aggregation - must contain the value
+ if latest_value == None:
return "N/A"
- if not format:
- if not field in self.reference_stats:
- print "REF: " + str(self.reference_stats)
- print "BASE: " + str(self.latest_stats)
+ if ref_value == None:
+ ref_value = 0
+
+ value = latest_value - ref_value
+
+ return value if not format else format_num(value, suffix)
- return (self.latest_stats[field] - self.reference_stats[field])
- else:
- return format_num(self.latest_stats[field] - self.reference_stats[field], suffix)
# get trend for a field
def get_trend (self, field, use_raw = False, percision = 10.0):
@@ -458,18 +574,19 @@ class CGlobalStats(CTRexStats):
return stats
- def generate_extended_values (self, snapshot):
+
+ def _update(self, snapshot):
# L1 bps
bps = snapshot.get("m_tx_bps")
pps = snapshot.get("m_tx_pps")
- if pps > 0:
- avg_pkt_size = bps / (pps * 8.0)
- bps_L1 = bps * ( (avg_pkt_size + 20.0) / avg_pkt_size )
- else:
- bps_L1 = 0.0
+ snapshot['m_tx_bps_L1'] = calc_bps_L1(bps, pps)
+
+
+ # simple...
+ self.latest_stats = snapshot
- snapshot['m_tx_bps_L1'] = bps_L1
+ return True
def generate_stats(self):
@@ -568,20 +685,22 @@ class CPortStats(CTRexStats):
return stats
- def generate_extended_values (self, snapshot):
+
+ def _update(self, snapshot):
+
# L1 bps
bps = snapshot.get("m_total_tx_bps")
pps = snapshot.get("m_total_tx_pps")
- if pps > 0:
- avg_pkt_size = bps / (pps * 8.0)
- bps_L1 = bps * ( (avg_pkt_size + 20.0) / avg_pkt_size )
- else:
- bps_L1 = 0.0
-
+ bps_L1 = calc_bps_L1(bps, pps)
snapshot['m_total_tx_bps_L1'] = bps_L1
snapshot['m_percentage'] = (bps_L1 / self._port_obj.get_speed_bps()) * 100
+ # simple...
+ self.latest_stats = snapshot
+
+ return True
+
def generate_stats(self):
@@ -627,10 +746,10 @@ class CPortStats(CTRexStats):
"obytes" : self.get_rel("obytes"),
"ibytes" : self.get_rel("ibytes"),
- "tx_bytes": self.get_rel("obytes", format = True, suffix = "B"),
- "rx_bytes": self.get_rel("ibytes", format = True, suffix = "B"),
- "tx_pkts": self.get_rel("opackets", format = True, suffix = "pkts"),
- "rx_pkts": self.get_rel("ipackets", format = True, suffix = "pkts"),
+ "tx-bytes": self.get_rel("obytes", format = True, suffix = "B"),
+ "rx-bytes": self.get_rel("ibytes", format = True, suffix = "B"),
+ "tx-pkts": self.get_rel("opackets", format = True, suffix = "pkts"),
+ "rx-pkts": self.get_rel("ipackets", format = True, suffix = "pkts"),
"oerrors" : format_num(self.get_rel("oerrors"),
compact = False,
@@ -643,33 +762,260 @@ class CPortStats(CTRexStats):
}
-class CRxStats(object):
+
+
+# RX stats objects - COMPLEX :-(
+class CRxStats(CTRexStats):
def __init__(self):
- self.flow_stats = {}
+ super(CRxStats, self).__init__()
+
+
+ # calculates a diff between previous snapshot
+ # and current one
+ def calculate_diff_sec (self, current, prev):
+ if not 'ts' in current:
+ raise ValueError("INTERNAL ERROR: RX stats snapshot MUST contain 'ts' field")
+
+ if prev:
+ prev_ts = prev['ts']
+ now_ts = current['ts']
+ diff_sec = (now_ts['value'] - prev_ts['value']) / float(now_ts['freq'])
+ else:
+ diff_sec = 0.0
+
+ return diff_sec
+
+
+ # this is the heart of the complex
+ def process_single_pg (self, current_pg, prev_pg):
+
+ # start with the previous PG
+ output = copy.deepcopy(prev_pg)
+
+ for field in ['tx_pkts', 'tx_bytes', 'rx_pkts', 'rx_bytes']:
+ # is in the first time ? (nothing in prev)
+ if not field in output:
+ output[field] = {}
+
+ # does the current snapshot has this field ?
+ if field in current_pg:
+ for port, pv in current_pg[field].iteritems():
+ if not is_intable(port):
+ continue
+
+ output[field][port] = pv
+
+ # sum up
+ total = None
+ for port, pv in output[field].iteritems():
+ if not is_intable(port):
+ continue
+ if total is None:
+ total = 0
+ total += pv
+
+ output[field]['total'] = total
+
+
+ return output
+
+
+ def process_snapshot (self, current, prev):
+
+ # final output
+ output = {}
+
+ # copy timestamp field
+ output['ts'] = current['ts']
+
+ # aggregate all the PG ids (previous and current)
+ pg_ids = filter(is_intable, set(prev.keys() + current.keys()))
+
+ for pg_id in pg_ids:
+
+ current_pg = current.get(pg_id, {})
+
+ # first time - we do not care
+ if current_pg.get('first_time'):
+ # new value - ignore history
+ output[pg_id] = self.process_single_pg(current_pg, {})
+ self.reference_stats[pg_id] = {}
+
+ # 'dry' B/W
+ self.calculate_bw_for_pg(output[pg_id])
+ else:
+ # aggregate the two values
+ prev_pg = prev.get(pg_id, {})
+ output[pg_id] = self.process_single_pg(current_pg, prev_pg)
+
+ # calculate B/W
+ diff_sec = self.calculate_diff_sec(current, prev)
+ self.calculate_bw_for_pg(output[pg_id], prev_pg, diff_sec)
+
+
+ return output
+
+
+
+ def calculate_bw_for_pg (self, pg_current, pg_prev = None, diff_sec = 0.0):
+
+ # if no previous values - its None
+ if (pg_prev == None) or not (diff_sec > 0):
+ pg_current['tx_pps'] = None
+ pg_current['tx_bps'] = None
+ pg_current['tx_bps_L1'] = None
+ pg_current['rx_pps'] = None
+ pg_current['rx_bps'] = None
+ return
+
+
+ # read the current values
+ now_tx_pkts = pg_current['tx_pkts']['total']
+ now_tx_bytes = pg_current['tx_bytes']['total']
+ now_rx_pkts = pg_current['rx_pkts']['total']
+ now_rx_bytes = pg_current['rx_bytes']['total']
+
+ # prev values
+ prev_tx_pkts = pg_prev['tx_pkts']['total']
+ prev_tx_bytes = pg_prev['tx_bytes']['total']
+ prev_rx_pkts = pg_prev['rx_pkts']['total']
+ prev_rx_bytes = pg_prev['rx_bytes']['total']
+
+ # prev B/W
+ prev_tx_pps = pg_prev['tx_pps']
+ prev_tx_bps = pg_prev['tx_bps']
+ prev_rx_pps = pg_prev['rx_pps']
+ prev_rx_bps = pg_prev['rx_bps']
+
+
+ #assert(now_tx_pkts >= prev_tx_pkts)
+ pg_current['tx_pps'] = self.calc_pps(prev_tx_pps, now_tx_pkts, prev_tx_pkts, diff_sec)
+ pg_current['tx_bps'] = self.calc_bps(prev_tx_bps, now_tx_bytes, prev_tx_bytes, diff_sec)
+ pg_current['rx_pps'] = self.calc_pps(prev_rx_pps, now_rx_pkts, prev_rx_pkts, diff_sec)
+ pg_current['rx_bps'] = self.calc_bps(prev_rx_bps, now_rx_bytes, prev_rx_bytes, diff_sec)
+
+ if pg_current['tx_bps'] != None and pg_current['tx_pps'] != None:
+ pg_current['tx_bps_L1'] = calc_bps_L1(pg_current['tx_bps'], pg_current['tx_pps'])
+ else:
+ pg_current['tx_bps_L1'] = None
+
+
+ def calc_pps (self, prev_bw, now, prev, diff_sec):
+ return self.calc_bw(prev_bw, now, prev, diff_sec, False)
+
+
+ def calc_bps (self, prev_bw, now, prev, diff_sec):
+ return self.calc_bw(prev_bw, now, prev, diff_sec, True)
+
+
+ def calc_bw (self, prev_bw, now, prev, diff_sec, is_bps):
+ # B/W is not valid when the values are None
+ if (now is None) or (prev is None):
+ return None
+
+ # calculate the B/W for current snapshot
+ current_bw = (now - prev) / diff_sec
+ if is_bps:
+ current_bw *= 8
+
+ # previous B/W is None ? ignore it
+ if prev_bw is None:
+ prev_bw = 0
- def update (self, snapshot):
- self.flow_stats = snapshot
+ return ( (0.5 * prev_bw) + (0.5 * current_bw) )
+
+
+ def _update (self, snapshot):
+
+ # generate a new snapshot
+ new_snapshot = self.process_snapshot(snapshot, self.latest_stats)
+
+ #print new_snapshot
+ # advance
+ self.latest_stats = new_snapshot
+
+
+ return True
+
+
+
+ # for API
def get_stats (self):
stats = {}
- for pg_id, pg_id_data in self.flow_stats.iteritems():
- # ignore non pg ID keys
- try:
- pg_id = int(pg_id)
- except ValueError:
+
+ for pg_id, value in self.latest_stats.iteritems():
+ # skip non ints
+ if not is_intable(pg_id):
continue
- # handle pg id
- stats[pg_id] = {}
- for field, per_port_data in pg_id_data.iteritems():
- stats[pg_id][field] = {}
- for port, value in per_port_data.iteritems():
- stats[pg_id][field][int(port)] = value
+ stats[int(pg_id)] = {}
+ for field in ['tx_pkts', 'tx_bytes', 'rx_pkts']:
+ stats[int(pg_id)][field] = {'total': self.get_rel([pg_id, field, 'total'])}
+
+ for port, pv in value[field].iteritems():
+ try:
+ int(port)
+ except ValueError:
+ continue
+ stats[int(pg_id)][field][int(port)] = self.get_rel([pg_id, field, port])
return stats
+
+ def generate_stats (self):
+
+ # for TUI - maximum 4
+ pg_ids = filter(is_intable, self.latest_stats.keys())[:4]
+ cnt = len(pg_ids)
+
+ formatted_stats = OrderedDict([ ('Tx pps', []),
+ ('Tx bps L2', []),
+ ('Tx bps L1', []),
+ ('---', [''] * cnt),
+ ('Rx pps', []),
+ ('Rx bps', []),
+ ('----', [''] * cnt),
+ ('opackets', []),
+ ('ipackets', []),
+ ('obytes', []),
+ ('ibytes', []),
+ ('-----', [''] * cnt),
+ ('tx_pkts', []),
+ ('rx_pkts', []),
+ ('tx_bytes', []),
+ ('rx_bytes', [])
+ ])
+
+
+
+ # maximum 4
+ for pg_id in pg_ids:
+
+ formatted_stats['Tx pps'].append(self.get([pg_id, 'tx_pps'], format = True, suffix = "pps"))
+ formatted_stats['Tx bps L2'].append(self.get([pg_id, 'tx_bps'], format = True, suffix = "bps"))
+
+ formatted_stats['Tx bps L1'].append(self.get([pg_id, 'tx_bps_L1'], format = True, suffix = "bps"))
+
+ formatted_stats['Rx pps'].append(self.get([pg_id, 'rx_pps'], format = True, suffix = "pps"))
+ formatted_stats['Rx bps'].append(self.get([pg_id, 'rx_bps'], format = True, suffix = "bps"))
+
+ formatted_stats['opackets'].append(self.get_rel([pg_id, 'tx_pkts', 'total']))
+ formatted_stats['ipackets'].append(self.get_rel([pg_id, 'rx_pkts', 'total']))
+ formatted_stats['obytes'].append(self.get_rel([pg_id, 'tx_bytes', 'total']))
+ formatted_stats['ibytes'].append(self.get_rel([pg_id, 'rx_bytes', 'total']))
+ formatted_stats['tx_bytes'].append(self.get_rel([pg_id, 'tx_bytes', 'total'], format = True, suffix = "B"))
+ formatted_stats['rx_bytes'].append(self.get_rel([pg_id, 'rx_bytes', 'total'], format = True, suffix = "B"))
+ formatted_stats['tx_pkts'].append(self.get_rel([pg_id, 'tx_pkts', 'total'], format = True, suffix = "pkts"))
+ formatted_stats['rx_pkts'].append(self.get_rel([pg_id, 'rx_pkts', 'total'], format = True, suffix = "pkts"))
+
+
+
+ return pg_ids, formatted_stats
+
if __name__ == "__main__":
pass
+
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_std.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_std.py
index e0b25b1d..d582b499 100644
--- a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_std.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_std.py
@@ -13,21 +13,30 @@ def stl_map_ports (client, ports = None):
# generate streams
base_pkt = CScapyTRexPktBuilder(pkt = Ether()/IP())
-
+
+ # send something initial to calm down switches with arps etc.
+ stream = STLStream(packet = base_pkt,
+ mode = STLTXSingleBurst(pps = 100000, total_pkts = 1))
+ client.add_streams(stream, ports)
+
+ client.start(ports, mult = "50%")
+ client.wait_on_traffic(ports)
+ client.reset(ports)
+
tx_pkts = {}
pkts = 1
for port in ports:
tx_pkts[pkts] = port
stream = STLStream(packet = base_pkt,
- mode = STLTXSingleBurst(pps = 100000, total_pkts = pkts))
+ mode = STLTXSingleBurst(pps = 100000, total_pkts = pkts * 3))
client.add_streams(stream, [port])
- pkts = pkts * 2
+ pkts *= 2
# inject
client.clear_stats()
- client.start(ports, mult = "1mpps")
+ client.start(ports, mult = "50%")
client.wait_on_traffic(ports)
stats = client.get_stats()
@@ -40,7 +49,7 @@ def stl_map_ports (client, ports = None):
# actual mapping
for port in ports:
- ipackets = stats[port]["ipackets"]
+ ipackets = int(round(stats[port]["ipackets"] / 3.0)) # majority out of 3 to clean random noises
table['map'][port] = None
for pkts in tx_pkts.keys():
@@ -48,7 +57,6 @@ def stl_map_ports (client, ports = None):
tx_port = tx_pkts[pkts]
table['map'][port] = tx_port
-
unmapped = list(ports)
while len(unmapped) > 0:
port_a = unmapped.pop(0)
@@ -57,7 +65,9 @@ def stl_map_ports (client, ports = None):
# if unknown - add to the unknown list
if port_b == None:
table['unknown'].append(port_a)
-
+ # self-loop, due to bug?
+ elif port_a == port_b:
+ continue
# bi-directional ports
elif (table['map'][port_b] == port_a):
unmapped.remove(port_b)
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_streams.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_streams.py
index b7368767..2a99be8d 100644
--- a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_streams.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_streams.py
@@ -329,7 +329,7 @@ class STLStream(object):
if mac_src_override_by_pkt == None:
int_mac_src_override_by_pkt=0
if packet :
- if packet.is_def_src_mac ()==False:
+ if packet.is_default_src_mac ()==False:
int_mac_src_override_by_pkt=1
else:
@@ -338,7 +338,7 @@ class STLStream(object):
if mac_dst_override_mode == None:
int_mac_dst_override_mode = 0;
if packet :
- if packet.is_def_dst_mac ()==False:
+ if packet.is_default_dst_mac ()==False:
int_mac_dst_override_mode=STLStreamDstMAC_PKT
else:
int_mac_dst_override_mode = int(mac_dst_override_mode);
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/parsing_opts.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/parsing_opts.py
index 649c192a..0390ac9c 100755
--- a/scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/parsing_opts.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/parsing_opts.py
@@ -34,7 +34,8 @@ PROMISCUOUS_SWITCH = 21
GLOBAL_STATS = 50
PORT_STATS = 51
PORT_STATUS = 52
-STATS_MASK = 53
+STREAMS_STATS = 53
+STATS_MASK = 54
STREAMS_MASK = 60
# ALL_STREAMS = 61
@@ -312,6 +313,10 @@ OPTIONS_DB = {MULTIPLIER: ArgumentPack(['-m', '--multiplier'],
{'action': 'store_true',
'help': "Fetch only port status data"}),
+ STREAMS_STATS: ArgumentPack(['-s'],
+ {'action': 'store_true',
+ 'help': "Fetch only streams stats"}),
+
STREAMS_MASK: ArgumentPack(['--streams'],
{"nargs": '+',
'dest':'streams',
@@ -336,7 +341,8 @@ OPTIONS_DB = {MULTIPLIER: ArgumentPack(['-m', '--multiplier'],
{'required': True}),
STATS_MASK: ArgumentGroup(MUTEX, [GLOBAL_STATS,
PORT_STATS,
- PORT_STATUS],
+ PORT_STATUS,
+ STREAMS_STATS],
{})
}
diff --git a/scripts/exp/pcap.pcap b/scripts/exp/pcap.pcap
index b13275ee..e2465848 100644
--- a/scripts/exp/pcap.pcap
+++ b/scripts/exp/pcap.pcap
Binary files differ
diff --git a/scripts/exp/pcap_with_vm.pcap b/scripts/exp/pcap_with_vm.pcap
index a7a2d2ba..b9476261 100644
--- a/scripts/exp/pcap_with_vm.pcap
+++ b/scripts/exp/pcap_with_vm.pcap
Binary files differ
diff --git a/scripts/exp/udp_1pkt_pcap.pcap b/scripts/exp/udp_1pkt_pcap.pcap
index f150434b..28900217 100644
--- a/scripts/exp/udp_1pkt_pcap.pcap
+++ b/scripts/exp/udp_1pkt_pcap.pcap
Binary files differ
diff --git a/scripts/exp/udp_1pkt_pcap_relative_path.pcap b/scripts/exp/udp_1pkt_pcap_relative_path.pcap
index 56ae1bac..2d8b777c 100644
--- a/scripts/exp/udp_1pkt_pcap_relative_path.pcap
+++ b/scripts/exp/udp_1pkt_pcap_relative_path.pcap
Binary files differ
diff --git a/scripts/exp/udp_3pkt_pcap.pcap b/scripts/exp/udp_3pkt_pcap.pcap
index f9ee2dc4..9e332a23 100644
--- a/scripts/exp/udp_3pkt_pcap.pcap
+++ b/scripts/exp/udp_3pkt_pcap.pcap
Binary files differ
diff --git a/scripts/external_libs/ansi2html/LICENSE b/scripts/external_libs/ansi2html/LICENSE
new file mode 100755
index 00000000..94a9ed02
--- /dev/null
+++ b/scripts/external_libs/ansi2html/LICENSE
@@ -0,0 +1,674 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users. We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors. You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights. Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received. You must make sure that they, too, receive
+or can get the source code. And you must show them these terms so they
+know their rights.
+
+ Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+ For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software. For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+ Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so. This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software. The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable. Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products. If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+ Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary. To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Use with the GNU Affero General Public License.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+ <one line to give the program's name and a brief idea of what it does.>
+ Copyright (C) <year> <name of author>
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+ <program> Copyright (C) <year> <name of author>
+ This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+<http://www.gnu.org/licenses/>.
+
+ The GNU General Public License does not permit incorporating your program
+into proprietary programs. If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License. But first, please read
+<http://www.gnu.org/philosophy/why-not-lgpl.html>.
diff --git a/scripts/external_libs/ansi2html/README.rst b/scripts/external_libs/ansi2html/README.rst
new file mode 100755
index 00000000..eab11608
--- /dev/null
+++ b/scripts/external_libs/ansi2html/README.rst
@@ -0,0 +1,71 @@
+ansi2html
+=========
+
+:Author: Ralph Bean <rbean@redhat.com>
+:Contributor: Robin Schneider <ypid23@aol.de>
+
+.. comment: split here
+
+Convert text with ANSI color codes to HTML or to LaTeX.
+
+.. _pixelbeat: http://www.pixelbeat.org/docs/terminal_colours/
+.. _blackjack: http://www.koders.com/python/fid5D57DD37184B558819D0EE22FCFD67F53078B2A3.aspx
+
+Inspired by and developed off of the work of `pixelbeat`_ and `blackjack`_.
+
+Build Status
+------------
+
+.. |master| image:: https://secure.travis-ci.org/ralphbean/ansi2html.png?branch=master
+ :alt: Build Status - master branch
+ :target: http://travis-ci.org/#!/ralphbean/ansi2html
+
+.. |develop| image:: https://secure.travis-ci.org/ralphbean/ansi2html.png?branch=develop
+ :alt: Build Status - develop branch
+ :target: http://travis-ci.org/#!/ralphbean/ansi2html
+
++----------+-----------+
+| Branch | Status |
++==========+===========+
+| master | |master| |
++----------+-----------+
+| develop | |develop| |
++----------+-----------+
+
+
+Example - Python API
+--------------------
+
+>>> from ansi2html import Ansi2HTMLConverter
+>>> conv = Ansi2HTMLConverter()
+>>> ansi = "".join(sys.stdin.readlines())
+>>> html = conv.convert(ansi)
+
+Example - Shell Usage
+---------------------
+
+::
+
+ $ ls --color=always | ansi2html > directories.html
+ $ sudo tail /var/log/messages | ccze -A | ansi2html > logs.html
+ $ task burndown | ansi2html > burndown.html
+
+See the list of full options with::
+
+ $ ansi2html --help
+
+Get this project:
+-----------------
+
+::
+
+ $ sudo yum install python-ansi2html
+
+Source: http://github.com/ralphbean/ansi2html/
+
+pypi: http://pypi.python.org/pypi/ansi2html/
+
+License
+-------
+
+``ansi2html`` is licensed GPLv3+.
diff --git a/scripts/external_libs/ansi2html/ansi2html/__init__.py b/scripts/external_libs/ansi2html/ansi2html/__init__.py
new file mode 100755
index 00000000..58250b81
--- /dev/null
+++ b/scripts/external_libs/ansi2html/ansi2html/__init__.py
@@ -0,0 +1,2 @@
+from ansi2html.converter import Ansi2HTMLConverter
+__all__ = ['Ansi2HTMLConverter']
diff --git a/scripts/external_libs/ansi2html/ansi2html/converter.py b/scripts/external_libs/ansi2html/ansi2html/converter.py
new file mode 100755
index 00000000..c3e46ce7
--- /dev/null
+++ b/scripts/external_libs/ansi2html/ansi2html/converter.py
@@ -0,0 +1,548 @@
+# encoding: utf-8
+# This file is part of ansi2html
+# Convert ANSI (terminal) colours and attributes to HTML
+# Copyright (C) 2012 Ralph Bean <rbean@redhat.com>
+# Copyright (C) 2013 Sebastian Pipping <sebastian@pipping.org>
+#
+# Inspired by and developed off of the work by pixelbeat and blackjack.
+#
+# This program is free software: you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as
+# published by the Free Software Foundation, either version 3 of
+# the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see
+# <http://www.gnu.org/licenses/>.
+
+import re
+import sys
+import optparse
+import pkg_resources
+
+try:
+ from collections import OrderedDict
+except ImportError:
+ from ordereddict import OrderedDict
+
+from ansi2html.style import get_styles, SCHEME
+import six
+from six.moves import map
+from six.moves import zip
+
+
+ANSI_FULL_RESET = 0
+ANSI_INTENSITY_INCREASED = 1
+ANSI_INTENSITY_REDUCED = 2
+ANSI_INTENSITY_NORMAL = 22
+ANSI_STYLE_ITALIC = 3
+ANSI_STYLE_NORMAL = 23
+ANSI_BLINK_SLOW = 5
+ANSI_BLINK_FAST = 6
+ANSI_BLINK_OFF = 25
+ANSI_UNDERLINE_ON = 4
+ANSI_UNDERLINE_OFF = 24
+ANSI_CROSSED_OUT_ON = 9
+ANSI_CROSSED_OUT_OFF = 29
+ANSI_VISIBILITY_ON = 28
+ANSI_VISIBILITY_OFF = 8
+ANSI_FOREGROUND_CUSTOM_MIN = 30
+ANSI_FOREGROUND_CUSTOM_MAX = 37
+ANSI_FOREGROUND_256 = 38
+ANSI_FOREGROUND_DEFAULT = 39
+ANSI_BACKGROUND_CUSTOM_MIN = 40
+ANSI_BACKGROUND_CUSTOM_MAX = 47
+ANSI_BACKGROUND_256 = 48
+ANSI_BACKGROUND_DEFAULT = 49
+ANSI_NEGATIVE_ON = 7
+ANSI_NEGATIVE_OFF = 27
+
+
+# http://stackoverflow.com/a/15190498
+_latex_template = '''\\documentclass{scrartcl}
+\\usepackage[utf8]{inputenc}
+\\usepackage{fancyvrb}
+\\usepackage[usenames,dvipsnames]{xcolor}
+%% \\definecolor{red-sd}{HTML}{7ed2d2}
+
+\\title{%(title)s}
+
+\\fvset{commandchars=\\\\\\{\}}
+
+\\begin{document}
+
+\\begin{Verbatim}
+%(content)s
+\\end{Verbatim}
+\\end{document}
+'''
+
+_html_template = six.u("""<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=%(output_encoding)s">
+<title>%(title)s</title>
+<style type="text/css">\n%(style)s\n</style>
+</head>
+<body class="body_foreground body_background" style="font-size: %(font_size)s;" >
+<pre class="ansi2html-content">
+%(content)s
+</pre>
+</body>
+
+</html>
+""")
+
+class _State(object):
+ def __init__(self):
+ self.reset()
+
+ def reset(self):
+ self.intensity = ANSI_INTENSITY_NORMAL
+ self.style = ANSI_STYLE_NORMAL
+ self.blink = ANSI_BLINK_OFF
+ self.underline = ANSI_UNDERLINE_OFF
+ self.crossedout = ANSI_CROSSED_OUT_OFF
+ self.visibility = ANSI_VISIBILITY_ON
+ self.foreground = (ANSI_FOREGROUND_DEFAULT, None)
+ self.background = (ANSI_BACKGROUND_DEFAULT, None)
+ self.negative = ANSI_NEGATIVE_OFF
+
+ def adjust(self, ansi_code, parameter=None):
+ if ansi_code in (ANSI_INTENSITY_INCREASED, ANSI_INTENSITY_REDUCED, ANSI_INTENSITY_NORMAL):
+ self.intensity = ansi_code
+ elif ansi_code in (ANSI_STYLE_ITALIC, ANSI_STYLE_NORMAL):
+ self.style = ansi_code
+ elif ansi_code in (ANSI_BLINK_SLOW, ANSI_BLINK_FAST, ANSI_BLINK_OFF):
+ self.blink = ansi_code
+ elif ansi_code in (ANSI_UNDERLINE_ON, ANSI_UNDERLINE_OFF):
+ self.underline = ansi_code
+ elif ansi_code in (ANSI_CROSSED_OUT_ON, ANSI_CROSSED_OUT_OFF):
+ self.crossedout = ansi_code
+ elif ansi_code in (ANSI_VISIBILITY_ON, ANSI_VISIBILITY_OFF):
+ self.visibility = ansi_code
+ elif ANSI_FOREGROUND_CUSTOM_MIN <= ansi_code <= ANSI_FOREGROUND_CUSTOM_MAX:
+ self.foreground = (ansi_code, None)
+ elif ansi_code == ANSI_FOREGROUND_256:
+ self.foreground = (ansi_code, parameter)
+ elif ansi_code == ANSI_FOREGROUND_DEFAULT:
+ self.foreground = (ansi_code, None)
+ elif ANSI_BACKGROUND_CUSTOM_MIN <= ansi_code <= ANSI_BACKGROUND_CUSTOM_MAX:
+ self.background = (ansi_code, None)
+ elif ansi_code == ANSI_BACKGROUND_256:
+ self.background = (ansi_code, parameter)
+ elif ansi_code == ANSI_BACKGROUND_DEFAULT:
+ self.background = (ansi_code, None)
+ elif ansi_code in (ANSI_NEGATIVE_ON, ANSI_NEGATIVE_OFF):
+ self.negative = ansi_code
+
+ def to_css_classes(self):
+ css_classes = []
+
+ def append_unless_default(output, value, default):
+ if value != default:
+ css_class = 'ansi%d' % value
+ output.append(css_class)
+
+ def append_color_unless_default(output, color, default, negative, neg_css_class):
+ value, parameter = color
+ if value != default:
+ prefix = 'inv' if negative else 'ansi'
+ css_class_index = str(value) \
+ if (parameter is None) \
+ else '%d-%d' % (value, parameter)
+ output.append(prefix + css_class_index)
+ elif negative:
+ output.append(neg_css_class)
+
+ append_unless_default(css_classes, self.intensity, ANSI_INTENSITY_NORMAL)
+ append_unless_default(css_classes, self.style, ANSI_STYLE_NORMAL)
+ append_unless_default(css_classes, self.blink, ANSI_BLINK_OFF)
+ append_unless_default(css_classes, self.underline, ANSI_UNDERLINE_OFF)
+ append_unless_default(css_classes, self.crossedout, ANSI_CROSSED_OUT_OFF)
+ append_unless_default(css_classes, self.visibility, ANSI_VISIBILITY_ON)
+
+ flip_fore_and_background = (self.negative == ANSI_NEGATIVE_ON)
+ append_color_unless_default(css_classes, self.foreground, ANSI_FOREGROUND_DEFAULT, flip_fore_and_background, 'inv_background')
+ append_color_unless_default(css_classes, self.background, ANSI_BACKGROUND_DEFAULT, flip_fore_and_background, 'inv_foreground')
+
+ return css_classes
+
+
+def linkify(line, latex_mode):
+ for match in re.findall(r'https?:\/\/\S+', line):
+ if latex_mode:
+ line = line.replace(match, '\\url{%s}' % match)
+ else:
+ line = line.replace(match, '<a href="%s">%s</a>' % (match, match))
+
+ return line
+
+
+def _needs_extra_newline(text):
+ if not text or text.endswith('\n'):
+ return False
+ return True
+
+
+class CursorMoveUp(object):
+ pass
+
+
+class Ansi2HTMLConverter(object):
+ """ Convert Ansi color codes to CSS+HTML
+
+ Example:
+ >>> conv = Ansi2HTMLConverter()
+ >>> ansi = " ".join(sys.stdin.readlines())
+ >>> html = conv.convert(ansi)
+ """
+
+ def __init__(self,
+ latex=False,
+ inline=False,
+ dark_bg=True,
+ font_size='normal',
+ linkify=False,
+ escaped=True,
+ markup_lines=False,
+ output_encoding='utf-8',
+ scheme='ansi2html',
+ title=''
+ ):
+
+ self.latex = latex
+ self.inline = inline
+ self.dark_bg = dark_bg
+ self.font_size = font_size
+ self.linkify = linkify
+ self.escaped = escaped
+ self.markup_lines = markup_lines
+ self.output_encoding = output_encoding
+ self.scheme = scheme
+ self.title = title
+ self._attrs = None
+
+ if inline:
+ self.styles = dict([(item.klass.strip('.'), item) for item in get_styles(self.dark_bg, self.scheme)])
+
+ self.ansi_codes_prog = re.compile('\?\\[' '([\\d;]*)' '([a-zA-z])')
+
+ def apply_regex(self, ansi):
+ parts = self._apply_regex(ansi)
+ parts = self._collapse_cursor(parts)
+ parts = list(parts)
+
+ if self.linkify:
+ parts = [linkify(part, self.latex) for part in parts]
+
+ combined = "".join(parts)
+
+ if self.markup_lines and not self.latex:
+ combined = "\n".join([
+ """<span id="line-%i">%s</span>""" % (i, line)
+ for i, line in enumerate(combined.split('\n'))
+ ])
+
+ return combined
+
+ def _apply_regex(self, ansi):
+ if self.escaped:
+ if self.latex: # Known Perl function which does this: https://tex.stackexchange.com/questions/34580/escape-character-in-latex/119383#119383
+ specials = OrderedDict([
+ ])
+ else:
+ specials = OrderedDict([
+ ('&', '&amp;'),
+ ('<', '&lt;'),
+ ('>', '&gt;'),
+ ])
+ for pattern, special in specials.items():
+ ansi = ansi.replace(pattern, special)
+
+ state = _State()
+ inside_span = False
+ last_end = 0 # the index of the last end of a code we've seen
+ for match in self.ansi_codes_prog.finditer(ansi):
+ yield ansi[last_end:match.start()]
+ last_end = match.end()
+
+ params, command = match.groups()
+
+ if command not in 'mMA':
+ continue
+
+ # Special cursor-moving code. The only supported one.
+ if command == 'A':
+ yield CursorMoveUp
+ continue
+
+ try:
+ params = list(map(int, params.split(';')))
+ except ValueError:
+ params = [ANSI_FULL_RESET]
+
+ # Find latest reset marker
+ last_null_index = None
+ skip_after_index = -1
+ for i, v in enumerate(params):
+ if i <= skip_after_index:
+ continue
+
+ if v == ANSI_FULL_RESET:
+ last_null_index = i
+ elif v in (ANSI_FOREGROUND_256, ANSI_BACKGROUND_256):
+ skip_after_index = i + 2
+
+ # Process reset marker, drop everything before
+ if last_null_index is not None:
+ params = params[last_null_index + 1:]
+ if inside_span:
+ inside_span = False
+ if self.latex:
+ yield '}'
+ else:
+ yield '</span>'
+ state.reset()
+
+ if not params:
+ continue
+
+ # Turn codes into CSS classes
+ skip_after_index = -1
+ for i, v in enumerate(params):
+ if i <= skip_after_index:
+ continue
+
+ if v in (ANSI_FOREGROUND_256, ANSI_BACKGROUND_256):
+ try:
+ parameter = params[i + 2]
+ except IndexError:
+ continue
+ skip_after_index = i + 2
+ else:
+ parameter = None
+ state.adjust(v, parameter=parameter)
+
+ if inside_span:
+ if self.latex:
+ yield '}'
+ else:
+ yield '</span>'
+ inside_span = False
+
+ css_classes = state.to_css_classes()
+ if not css_classes:
+ continue
+
+ if self.inline:
+ if self.latex:
+ style = [self.styles[klass].kwl[0][1] for klass in css_classes if
+ self.styles[klass].kwl[0][0] == 'color']
+ yield '\\textcolor[HTML]{%s}{' % style[0]
+ else:
+ style = [self.styles[klass].kw for klass in css_classes if
+ klass in self.styles]
+ yield '<span style="%s">' % "; ".join(style)
+ else:
+ if self.latex:
+ yield '\\textcolor{%s}{' % " ".join(css_classes)
+ else:
+ yield '<span class="%s">' % " ".join(css_classes)
+ inside_span = True
+
+ yield ansi[last_end:]
+ if inside_span:
+ if self.latex:
+ yield '}'
+ else:
+ yield '</span>'
+ inside_span = False
+
+ def _collapse_cursor(self, parts):
+ """ Act on any CursorMoveUp commands by deleting preceding tokens """
+
+ final_parts = []
+ for part in parts:
+
+ # Throw out empty string tokens ("")
+ if not part:
+ continue
+
+ # Go back, deleting every token in the last 'line'
+ if part == CursorMoveUp:
+ final_parts.pop()
+
+ if final_parts:
+ while '\n' not in final_parts[-1]:
+ final_parts.pop()
+
+ continue
+
+ # Otherwise, just pass this token forward
+ final_parts.append(part)
+
+ return final_parts
+
+ def prepare(self, ansi='', ensure_trailing_newline=False):
+ """ Load the contents of 'ansi' into this object """
+
+ body = self.apply_regex(ansi)
+
+ if ensure_trailing_newline and _needs_extra_newline(body):
+ body += '\n'
+
+ self._attrs = {
+ 'dark_bg': self.dark_bg,
+ 'font_size': self.font_size,
+ 'body': body,
+ }
+
+ return self._attrs
+
+ def attrs(self):
+ """ Prepare attributes for the template """
+ if not self._attrs:
+ raise Exception("Method .prepare not yet called.")
+ return self._attrs
+
+ def convert(self, ansi, full=True, ensure_trailing_newline=False):
+ attrs = self.prepare(ansi, ensure_trailing_newline=ensure_trailing_newline)
+ if not full:
+ return attrs["body"]
+ else:
+ if self.latex:
+ _template = _latex_template
+ else:
+ _template = _html_template
+ return _template % {
+ 'style' : "\n".join(map(str, get_styles(self.dark_bg, self.scheme))),
+ 'title' : self.title,
+ 'font_size' : self.font_size,
+ 'content' : attrs["body"],
+ 'output_encoding' : self.output_encoding,
+ }
+
+ def produce_headers(self):
+ return '<style type="text/css">\n%(style)s\n</style>\n' % {
+ 'style' : "\n".join(map(str, get_styles(self.dark_bg, self.scheme)))
+ }
+
+
+def main():
+ """
+ $ ls --color=always | ansi2html > directories.html
+ $ sudo tail /var/log/messages | ccze -A | ansi2html > logs.html
+ $ task burndown | ansi2html > burndown.html
+ """
+
+ scheme_names = sorted(six.iterkeys(SCHEME))
+ version_str = pkg_resources.get_distribution('ansi2html').version
+ parser = optparse.OptionParser(
+ usage=main.__doc__,
+ version="%%prog %s" % version_str)
+ parser.add_option(
+ "-p", "--partial", dest="partial",
+ default=False, action="store_true",
+ help="Process lines as them come in. No headers are produced.")
+ parser.add_option(
+ "-L", "--latex", dest="latex",
+ default=False, action="store_true",
+ help="Export as LaTeX instead of HTML.")
+ parser.add_option(
+ "-i", "--inline", dest="inline",
+ default=False, action="store_true",
+ help="Inline style without headers or template.")
+ parser.add_option(
+ "-H", "--headers", dest="headers",
+ default=False, action="store_true",
+ help="Just produce the <style> tag.")
+ parser.add_option(
+ "-f", '--font-size', dest='font_size', metavar='SIZE',
+ default="normal",
+ help="Set the global font size in the output.")
+ parser.add_option(
+ "-l", '--light-background', dest='light_background',
+ default=False, action="store_true",
+ help="Set output to 'light background' mode.")
+ parser.add_option(
+ "-a", '--linkify', dest='linkify',
+ default=False, action="store_true",
+ help="Transform URLs into <a> links.")
+ parser.add_option(
+ "-u", '--unescape', dest='escaped',
+ default=True, action="store_false",
+ help="Do not escape XML tags found in the input.")
+ parser.add_option(
+ "-m", '--markup-lines', dest="markup_lines",
+ default=False, action="store_true",
+ help="Surround lines with <span id='line-n'>..</span>.")
+ parser.add_option(
+ '--input-encoding', dest='input_encoding', metavar='ENCODING',
+ default='utf-8',
+ help="Specify input encoding")
+ parser.add_option(
+ '--output-encoding', dest='output_encoding', metavar='ENCODING',
+ default='utf-8',
+ help="Specify output encoding")
+ parser.add_option(
+ '-s', '--scheme', dest='scheme', metavar='SCHEME',
+ default='ansi2html', choices=scheme_names,
+ help=("Specify color palette scheme. Default: %%default. Choices: %s"
+ % scheme_names))
+ parser.add_option(
+ '-t', '--title', dest='output_title',
+ default='',
+ help="Specify output title")
+
+ opts, args = parser.parse_args()
+
+ conv = Ansi2HTMLConverter(
+ latex=opts.latex,
+ inline=opts.inline,
+ dark_bg=not opts.light_background,
+ font_size=opts.font_size,
+ linkify=opts.linkify,
+ escaped=opts.escaped,
+ markup_lines=opts.markup_lines,
+ output_encoding=opts.output_encoding,
+ scheme=opts.scheme,
+ title=opts.output_title,
+ )
+
+ def _read(input_bytes):
+ if six.PY3:
+ # This is actually already unicode. How to we explicitly decode in
+ # python3? I don't know the answer yet.
+ return input_bytes
+ else:
+ return input_bytes.decode(opts.input_encoding)
+
+ def _print(output_unicode, end='\n'):
+ if hasattr(sys.stdout, 'buffer'):
+ output_bytes = (output_unicode + end).encode(opts.output_encoding)
+ sys.stdout.buffer.write(output_bytes)
+ elif not six.PY3:
+ sys.stdout.write((output_unicode + end).encode(opts.output_encoding))
+ else:
+ sys.stdout.write(output_unicode + end)
+
+ # Produce only the headers and quit
+ if opts.headers:
+ _print(conv.produce_headers(), end='')
+ return
+
+ full = not bool(opts.partial or opts.inline)
+ if six.PY3:
+ output = conv.convert("".join(sys.stdin.readlines()), full=full, ensure_trailing_newline=True)
+ _print(output, end='')
+ else:
+ output = conv.convert(six.u("").join(
+ map(_read, sys.stdin.readlines())
+ ), full=full, ensure_trailing_newline=True)
+ _print(output, end='')
diff --git a/scripts/external_libs/ansi2html/ansi2html/style.py b/scripts/external_libs/ansi2html/ansi2html/style.py
new file mode 100755
index 00000000..fe95b966
--- /dev/null
+++ b/scripts/external_libs/ansi2html/ansi2html/style.py
@@ -0,0 +1,135 @@
+# This file is part of ansi2html.
+# Copyright (C) 2012 Kuno Woudt <kuno@frob.nl>
+# Copyright (C) 2013 Sebastian Pipping <sebastian@pipping.org>
+#
+# This program is free software: you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as
+# published by the Free Software Foundation, either version 3 of
+# the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see
+# <http://www.gnu.org/licenses/>.
+
+
+class Rule(object):
+
+ def __init__(self, klass, **kw):
+
+ self.klass = klass
+ self.kw = '; '.join([(k.replace('_', '-')+': '+kw[k])
+ for k in sorted(kw.keys())]).strip()
+ self.kwl = [(k.replace('_', '-'), kw[k][1:])
+ for k in sorted(kw.keys())]
+
+ def __str__(self):
+ return '%s { %s; }' % (self.klass, self.kw)
+
+
+def index(r, g, b):
+ return str(16 + (r * 36) + (g * 6) + b)
+
+
+def color(r, g, b):
+ return "#%.2x%.2x%.2x" % (r * 42, g * 42, b * 42)
+
+
+def level(grey):
+ return "#%.2x%.2x%.2x" % (((grey * 10) + 8,) * 3)
+
+
+def index2(grey):
+ return str(232 + grey)
+
+# http://en.wikipedia.org/wiki/ANSI_escape_code#Colors
+SCHEME = {
+ # black red green brown/yellow blue magenta cyan grey/white
+ 'ansi2html': (
+ "#000316", "#aa0000", "#00aa00", "#aa5500",
+ "#0000aa", "#E850A8", "#00aaaa", "#F5F1DE",
+ "#7f7f7f", "#ff0000", "#00ff00", "#ffff00",
+ "#5c5cff", "#ff00ff", "#00ffff", "#ffffff"),
+
+ 'xterm': (
+ "#000000", "#cd0000", "#00cd00", "#cdcd00",
+ "#0000ee", "#cd00cd", "#00cdcd", "#e5e5e5",
+ "#7f7f7f", "#ff0000", "#00ff00", "#ffff00",
+ "#5c5cff", "#ff00ff", "#00ffff", "#ffffff"),
+
+ 'osx': (
+ "#000000", "#c23621", "#25bc24", "#adad27",
+ "#492ee1", "#d338d3", "#33bbc8", "#cbcccd") * 2,
+
+ # http://ethanschoonover.com/solarized
+ 'solarized': (
+ "#262626", "#d70000", "#5f8700", "#af8700",
+ "#0087ff", "#af005f", "#00afaf", "#e4e4e4",
+ "#1c1c1c", "#d75f00", "#585858", "#626262",
+ "#808080", "#5f5faf", "#8a8a8a", "#ffffd7"),
+ }
+
+
+def get_styles(dark_bg=True, scheme='ansi2html'):
+
+ css = [
+ Rule('.ansi2html-content', white_space='pre-wrap', word_wrap='break-word', display='inline'),
+ Rule('.body_foreground', color=('#000000', '#AAAAAA')[dark_bg]),
+ Rule('.body_background', background_color=('#AAAAAA', '#000000')[dark_bg]),
+ Rule('.body_foreground > .bold,.bold > .body_foreground, body.body_foreground > pre > .bold',
+ color=('#000000', '#FFFFFF')[dark_bg], font_weight=('bold', 'normal')[dark_bg]),
+ Rule('.inv_foreground', color=('#000000', '#FFFFFF')[not dark_bg]),
+ Rule('.inv_background', background_color=('#AAAAAA', '#000000')[not dark_bg]),
+ Rule('.ansi1', font_weight='bold'),
+ Rule('.ansi2', font_weight='lighter'),
+ Rule('.ansi3', font_style='italic'),
+ Rule('.ansi4', text_decoration='underline'),
+ Rule('.ansi5', text_decoration='blink'),
+ Rule('.ansi6', text_decoration='blink'),
+ Rule('.ansi8', visibility='hidden'),
+ Rule('.ansi9', text_decoration='line-through'),
+ ]
+
+ # set palette
+ pal = SCHEME[scheme]
+ for _index in range(8):
+ css.append(Rule('.ansi3%s' % _index, color=pal[_index]))
+ css.append(Rule('.inv3%s' % _index, background_color=pal[_index]))
+ for _index in range(8):
+ css.append(Rule('.ansi4%s' % _index, background_color=pal[_index]))
+ css.append(Rule('.inv4%s' % _index, color=pal[_index]))
+
+ # set palette colors in 256 color encoding
+ pal = SCHEME[scheme]
+ for _index in range(len(pal)):
+ css.append(Rule('.ansi38-%s' % _index, color=pal[_index]))
+ css.append(Rule('.inv38-%s' % _index, background_color=pal[_index]))
+ for _index in range(len(pal)):
+ css.append(Rule('.ansi48-%s' % _index, background_color=pal[_index]))
+ css.append(Rule('.inv48-%s' % _index, color=pal[_index]))
+
+ # css.append("/* Define the explicit color codes (obnoxious) */\n\n")
+
+ for green in range(0, 6):
+ for red in range(0, 6):
+ for blue in range(0, 6):
+ css.append(Rule(".ansi38-%s" % index(red, green, blue),
+ color=color(red, green, blue)))
+ css.append(Rule(".inv38-%s" % index(red, green, blue),
+ background=color(red, green, blue)))
+ css.append(Rule(".ansi48-%s" % index(red, green, blue),
+ background=color(red, green, blue)))
+ css.append(Rule(".inv48-%s" % index(red, green, blue),
+ color=color(red, green, blue)))
+
+ for grey in range(0, 24):
+ css.append(Rule('.ansi38-%s' % index2(grey), color=level(grey)))
+ css.append(Rule('.inv38-%s' % index2(grey), background=level(grey)))
+ css.append(Rule('.ansi48-%s' % index2(grey), background=level(grey)))
+ css.append(Rule('.inv48-%s' % index2(grey), color=level(grey)))
+
+ return css
diff --git a/scripts/external_libs/ansi2html/ansi2html/util.py b/scripts/external_libs/ansi2html/ansi2html/util.py
new file mode 100755
index 00000000..20ea0441
--- /dev/null
+++ b/scripts/external_libs/ansi2html/ansi2html/util.py
@@ -0,0 +1,2 @@
+def read_to_unicode(obj):
+ return [line.decode('utf-8') for line in obj.readlines()]
diff --git a/scripts/stl/flow_stats.py b/scripts/stl/flow_stats.py
index 05d7a9f7..e2701d23 100644
--- a/scripts/stl/flow_stats.py
+++ b/scripts/stl/flow_stats.py
@@ -6,8 +6,12 @@ class STLS1(object):
def get_streams (self, direction = 0):
return [STLStream(packet = STLPktBuilder(pkt ="stl/yaml/udp_64B_no_crc.pcap"), # path relative to pwd
- mode = STLTXCont(pps=10),
- flow_stats = STLFlowStats(pg_id = 7))
+ mode = STLTXCont(pps=1000),
+ flow_stats = STLFlowStats(pg_id = 7)),
+
+ STLStream(packet = STLPktBuilder(pkt ="stl/yaml/udp_594B_no_crc.pcap"), # path relative to pwd
+ mode = STLTXCont(pps=5000),
+ flow_stats = STLFlowStats(pg_id = 12))
]