summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.gitignore1
-rwxr-xr-xlinux_dpdk/ws_main.py19
-rwxr-xr-xscripts/automation/regression/CPlatform.py15
-rwxr-xr-xscripts/automation/regression/trex_unit_test.py4
-rwxr-xr-xscripts/automation/trex_control_plane/client/__init__.py1
-rwxr-xr-xscripts/automation/trex_control_plane/client/trex_adv_client.py70
-rw-r--r--scripts/automation/trex_control_plane/client/trex_root_path.py15
-rwxr-xr-xscripts/automation/trex_control_plane/client_utils/__init__.py1
-rwxr-xr-xscripts/automation/trex_control_plane/client_utils/external_packages.py72
-rwxr-xr-xscripts/automation/trex_control_plane/client_utils/trex_yaml_gen.py212
-rwxr-xr-xscripts/automation/trex_control_plane/client_utils/yaml_utils.py163
-rwxr-xr-xscripts/automation/trex_control_plane/dirtree_no_files.txt11
-rwxr-xr-xscripts/automation/trex_control_plane/dirtree_with_files.txt31
-rwxr-xr-xscripts/automation/trex_control_plane/stf/CCustomLogger.py100
-rwxr-xr-xscripts/automation/trex_control_plane/stf/__init__.py1
-rwxr-xr-xscripts/automation/trex_control_plane/stf/external_packages.py28
-rwxr-xr-xscripts/automation/trex_control_plane/stf/general_utils.py (renamed from scripts/automation/trex_control_plane/client_utils/general_utils.py)0
-rwxr-xr-xscripts/automation/trex_control_plane/stf/outer_packages.py30
-rwxr-xr-xscripts/automation/trex_control_plane/stf/text_opts.py192
-rwxr-xr-xscripts/automation/trex_control_plane/stf/trex_client.py1216
-rwxr-xr-xscripts/automation/trex_control_plane/stf/trex_daemon_server.py79
-rwxr-xr-xscripts/automation/trex_control_plane/stf/trex_exceptions.py140
-rw-r--r--scripts/automation/trex_control_plane/stf/trex_status.py8
-rwxr-xr-xscripts/automation/trex_control_plane/stf/trex_status_e.py11
-rw-r--r--scripts/automation/trex_control_plane/stl/examples/hlt_udp_simple.py12
-rw-r--r--scripts/automation/trex_control_plane/stl/examples/stl_pcap.py52
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_ext.py9
-rwxr-xr-xscripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_hltapi.py6
-rw-r--r--scripts/external_libs/scapy-2.3.1/python2/scapy/data.py1
-rw-r--r--scripts/external_libs/scapy-2.3.1/python3/scapy/data.py1
-rwxr-xr-xscripts/find_python.sh27
-rwxr-xr-xscripts/run_functional_tests7
32 files changed, 1918 insertions, 617 deletions
diff --git a/.gitignore b/.gitignore
index 39995917..0b02fb21 100644
--- a/.gitignore
+++ b/.gitignore
@@ -82,6 +82,7 @@ src/GTAGS
#generated
+*.bak
*.pyc
__pycache__
*_GENERATED.py
diff --git a/linux_dpdk/ws_main.py b/linux_dpdk/ws_main.py
index 2aa06e3b..6310a8c2 100755
--- a/linux_dpdk/ws_main.py
+++ b/linux_dpdk/ws_main.py
@@ -546,7 +546,14 @@ dpdk_includes_path =''' ../src/
DPDK_FLAGS=['-D_GNU_SOURCE', '-DPF_DRIVER', '-DX722_SUPPORT', '-DVF_DRIVER', '-DINTEGRATED_VF'];
-
+client_external_libs = [
+ 'enum34-1.0.4',
+ 'jsonrpclib-pelix-0.2.5',
+ 'pyyaml-3.11',
+ 'pyzmq-14.5.0',
+ 'scapy-2.3.1',
+ 'texttable-0.8.4',
+ ]
RELEASE_ = "release"
@@ -956,6 +963,16 @@ def release(bld, custom_dir = None):
os.system("cp -rv %s %s " %(src_file,dest_file));
os.system("chmod 755 %s " %(dest_file));
+ # create client package
+ os.system('mkdir -p %s/trex_client/external_libs' % exec_p)
+ os.system('touch %s/trex_client/__init__.py' % exec_p)
+ for ext_lib in client_external_libs:
+ os.system('cp ../scripts/external_libs/%s %s/trex_client/external_libs/ -r' % (ext_lib, exec_p))
+ os.system('cp ../scripts/automation/trex_control_plane/stf %s/trex_client/ -r' % exec_p)
+ os.system('cp ../scripts/automation/trex_control_plane/stl/trex_stl_lib %s/trex_client/stl -r' % exec_p)
+ shutil.make_archive(os.path.join(exec_p, 'trex_client'), 'gztar', exec_p, 'trex_client')
+ os.system('rm -r %s/trex_client' % exec_p)
+
rel=get_build_num ()
os.system('cd %s/..;tar --exclude="*.pyc" -zcvf %s/%s.tar.gz %s' %(exec_p,os.getcwd(),rel,rel))
os.system("mv %s/%s.tar.gz %s" % (os.getcwd(),rel,exec_p));
diff --git a/scripts/automation/regression/CPlatform.py b/scripts/automation/regression/CPlatform.py
index 6741d5c1..da056d23 100755
--- a/scripts/automation/regression/CPlatform.py
+++ b/scripts/automation/regression/CPlatform.py
@@ -650,7 +650,7 @@ class CPlatform(object):
command = "dir {drive}: | include {image}".format(drive = search_drive, image = img_name)
response = self.cmd_link.run_single_command(command, timeout = 10)
if CShowParser.parse_image_existence(response, img_name):
- self.needed_image_path = '%s:%s' % (search_drive, img_name)
+ self.needed_image_path = '%s:/%s' % (search_drive, img_name)
print('Found image in platform:', self.needed_image_path)
return True
return False
@@ -734,8 +734,10 @@ class CPlatform(object):
boot_img_cmd = "boot system flash %s" % self.needed_image_path
config_register_cmd = "config-register 0x2021"
- cache.add('CONF', ["no boot system", boot_img_cmd, config_register_cmd])
- self.cmd_link.run_single_command( cache )
+ cache.add('CONF', ["no boot system", boot_img_cmd, config_register_cmd, '\r'])
+ response = self.cmd_link.run_single_command( cache )
+ print("RESPONSE:")
+ print(response)
self.save_config_to_startup_config()
def is_image_matches(self, needed_image):
@@ -777,7 +779,9 @@ class CPlatform(object):
Copies running-config into startup-config.
"""
- self.cmd_link.run_single_command('wr')
+ cache = CCommandCache()
+ cache.add('EXEC', ['wr', '\r'] )
+ self.cmd_link.run_single_command(cache)
def reload_platform(self, device_cfg_obj):
""" reload_platform(self) -> None
@@ -814,8 +818,9 @@ class CPlatform(object):
time.sleep(30)
self.reload_connection(device_cfg_obj)
- finally:
progress_thread.join()
+ except Exception as e:
+ print e
def get_if_manager(self):
return self.if_mngr
diff --git a/scripts/automation/regression/trex_unit_test.py b/scripts/automation/regression/trex_unit_test.py
index 4348d004..fb666382 100755
--- a/scripts/automation/regression/trex_unit_test.py
+++ b/scripts/automation/regression/trex_unit_test.py
@@ -36,8 +36,8 @@ import misc_methods
from rednose import RedNose
import termstyle
from trex import CTRexScenario
-from client.trex_client import *
-from common.trex_exceptions import *
+from stf.trex_client import *
+from stf.trex_exceptions import *
from trex_stl_lib.api import *
import trex
import socket
diff --git a/scripts/automation/trex_control_plane/client/__init__.py b/scripts/automation/trex_control_plane/client/__init__.py
deleted file mode 100755
index e1d24710..00000000
--- a/scripts/automation/trex_control_plane/client/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-__all__ = ["trex_client"]
diff --git a/scripts/automation/trex_control_plane/client/trex_adv_client.py b/scripts/automation/trex_control_plane/client/trex_adv_client.py
deleted file mode 100755
index bf7ccf58..00000000
--- a/scripts/automation/trex_control_plane/client/trex_adv_client.py
+++ /dev/null
@@ -1,70 +0,0 @@
-#!/router/bin/python
-
-import trex_client
-from jsonrpclib import ProtocolError, AppError
-
-class CTRexAdvClient(trex_client.CTRexClient):
- def __init__ (self, trex_host, max_history_size = 100, trex_daemon_port = 8090, trex_zmq_port = 4500, verbose = False):
- super(CTRexAdvClient, self).__init__(trex_host, max_history_size, trex_daemon_port, trex_zmq_port, verbose)
- pass
-
- # TRex KIWI advanced methods
- def start_quick_trex(self, pcap_file, d, delay, dual, ipv6, times, interfaces):
- try:
- return self.server.start_quick_trex(pcap_file = pcap_file, duration = d, dual = dual, delay = delay, ipv6 = ipv6, times = times, interfaces = interfaces)
- except AppError as err:
- self.__handle_AppError_exception(err.args[0])
- except ProtocolError:
- raise
- finally:
- self.prompt_verbose_data()
-
- def stop_quick_trex(self):
- try:
- return self.server.stop_quick_trex()
- except AppError as err:
- self.__handle_AppError_exception(err.args[0])
- except ProtocolError:
- raise
- finally:
- self.prompt_verbose_data()
-
-# def is_running(self):
-# pass
-
- def get_running_stats(self):
- try:
- return self.server.get_running_stats()
- except AppError as err:
- self.__handle_AppError_exception(err.args[0])
- except ProtocolError:
- raise
- finally:
- self.prompt_verbose_data()
-
- def clear_counters(self):
- try:
- return self.server.clear_counters()
- except AppError as err:
- self.__handle_AppError_exception(err.args[0])
- except ProtocolError:
- raise
- finally:
- self.prompt_verbose_data()
-
-
-if __name__ == "__main__":
- trex = CTRexAdvClient('trex-dan', trex_daemon_port = 8383, verbose = True)
- print trex.start_quick_trex(delay = 10,
- dual = True,
- d = 20,
- interfaces = ["gig0/0/1", "gig0/0/2"],
- ipv6 = False,
- pcap_file="avl/http_browsing.pcap",
- times=3)
- print trex.stop_quick_trex()
- print trex.get_running_stats()
- print trex.clear_counters()
- pass
-
-
diff --git a/scripts/automation/trex_control_plane/client/trex_root_path.py b/scripts/automation/trex_control_plane/client/trex_root_path.py
deleted file mode 100644
index de4ec03b..00000000
--- a/scripts/automation/trex_control_plane/client/trex_root_path.py
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/router/bin/python
-
-import os
-import sys
-
-def add_root_to_path ():
- """adds trex_control_plane root dir to script path, up to `depth` parent dirs"""
- root_dirname = 'trex_control_plane'
- file_path = os.path.dirname(os.path.realpath(__file__))
-
- components = file_path.split(os.sep)
- sys.path.append( str.join(os.sep, components[:components.index(root_dirname)+1]) )
- return
-
-add_root_to_path()
diff --git a/scripts/automation/trex_control_plane/client_utils/__init__.py b/scripts/automation/trex_control_plane/client_utils/__init__.py
deleted file mode 100755
index c38c2cca..00000000
--- a/scripts/automation/trex_control_plane/client_utils/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-__all__ = ["general_utils", "trex_yaml_gen"]
diff --git a/scripts/automation/trex_control_plane/client_utils/external_packages.py b/scripts/automation/trex_control_plane/client_utils/external_packages.py
deleted file mode 100755
index c682dc18..00000000
--- a/scripts/automation/trex_control_plane/client_utils/external_packages.py
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/router/bin/python
-
-import sys
-import os
-import warnings
-
-CURRENT_PATH = os.path.dirname(os.path.realpath(__file__))
-ROOT_PATH = os.path.abspath(os.path.join(CURRENT_PATH, os.pardir)) # path to trex_control_plane directory
-PATH_TO_PYTHON_LIB = os.path.abspath(os.path.join(ROOT_PATH, os.pardir, os.pardir, 'external_libs'))
-
-CLIENT_UTILS_MODULES = ['dpkt-1.8.6',
- 'yaml-3.11',
- 'texttable-0.8.4',
- 'scapy-2.3.1'
- ]
-
-def import_client_utils_modules():
-
- # must be in a higher priority
- sys.path.insert(0, PATH_TO_PYTHON_LIB)
-
- sys.path.append(ROOT_PATH)
- import_module_list(CLIENT_UTILS_MODULES)
-
-
-def import_module_list(modules_list):
- assert(isinstance(modules_list, list))
- for p in modules_list:
- full_path = os.path.join(PATH_TO_PYTHON_LIB, p)
- fix_path = os.path.normcase(full_path)
- sys.path.insert(1, full_path)
-
-
- import_platform_dirs()
-
-
-
-def import_platform_dirs ():
- # handle platform dirs
-
- # try fedora 18 first and then cel5.9
- # we are using the ZMQ module to determine the right platform
-
- full_path = os.path.join(PATH_TO_PYTHON_LIB, 'platform/fedora18')
- fix_path = os.path.normcase(full_path)
- sys.path.insert(0, full_path)
- try:
- # try to import and delete it from the namespace
- import zmq
- del zmq
- return
- except:
- sys.path.pop(0)
- pass
-
- full_path = os.path.join(PATH_TO_PYTHON_LIB, 'platform/cel59')
- fix_path = os.path.normcase(full_path)
- sys.path.insert(0, full_path)
- try:
- # try to import and delete it from the namespace
- import zmq
- del zmq
- return
-
- except:
- sys.path.pop(0)
- sys.modules['zmq'] = None
- warnings.warn("unable to determine platform type for ZMQ import")
-
-
-
-import_client_utils_modules()
diff --git a/scripts/automation/trex_control_plane/client_utils/trex_yaml_gen.py b/scripts/automation/trex_control_plane/client_utils/trex_yaml_gen.py
deleted file mode 100755
index c26fef29..00000000
--- a/scripts/automation/trex_control_plane/client_utils/trex_yaml_gen.py
+++ /dev/null
@@ -1,212 +0,0 @@
-#!/router/bin/python
-
-import pprint
-import yaml
-import os
-# import bisect
-
-class CTRexYaml(object):
- """
- This class functions as a YAML generator according to TRex YAML format.
-
- CTRexYaml is compatible with both Python 2 and Python 3.
- """
- YAML_TEMPLATE = [{'cap_info': [],
- 'duration': 10.0,
- 'generator': {'clients_end': '16.0.1.255',
- 'clients_per_gb': 201,
- 'clients_start': '16.0.0.1',
- 'distribution': 'seq',
- 'dual_port_mask': '1.0.0.0',
- 'min_clients': 101,
- 'servers_end': '48.0.0.255',
- 'servers_start': '48.0.0.1',
- 'tcp_aging': 1,
- 'udp_aging': 1},
- 'mac' : [0x00,0x00,0x00,0x01,0x00,0x00]}]
- PCAP_TEMPLATE = {'cps': 1.0,
- 'ipg': 10000,
- 'name': '',
- 'rtt': 10000,
- 'w': 1}
-
- def __init__ (self, trex_files_path):
- """
- The initialization of this class creates a CTRexYaml object with **empty** 'cap-info', and with default client-server configuration.
-
- Use class methods to add and assign pcap files and export the data to a YAML file.
-
- :parameters:
- trex_files_path : str
- a path (on TRex server side) for the pcap files using which TRex can access it.
-
- """
- self.yaml_obj = list(CTRexYaml.YAML_TEMPLATE)
- self.empty_cap = True
- self.file_list = []
- self.yaml_dumped = False
- self.trex_files_path = trex_files_path
-
- def add_pcap_file (self, local_pcap_path):
- """
- Adds a .pcap file with recorded traffic to the yaml object by linking the file with 'cap-info' template key fields.
-
- :parameters:
- local_pcap_path : str
- a path (on client side) for the pcap file to be added.
-
- :return:
- + The index of the inserted item (as int) if item added successfully
- + -1 if pcap file already exists in 'cap_info'.
-
- """
- new_pcap = dict(CTRexYaml.PCAP_TEMPLATE)
- new_pcap['name'] = self.trex_files_path + os.path.basename(local_pcap_path)
- if self.get_pcap_idx(new_pcap['name']) != -1:
- # pcap already exists in 'cap_info'
- return -1
- else:
- self.yaml_obj[0]['cap_info'].append(new_pcap)
- if self.empty_cap:
- self.empty_cap = False
- self.file_list.append(local_pcap_path)
- return ( len(self.yaml_obj[0]['cap_info']) - 1)
-
-
- def get_pcap_idx (self, pcap_name):
- """
- Checks if a certain .pcap file has been added into the yaml object.
-
- :parameters:
- pcap_name : str
- the name of the pcap file to be searched
-
- :return:
- + The index of the pcap file (as int) if exists
- + -1 if not exists.
-
- """
- comp_pcap = pcap_name if pcap_name.startswith(self.trex_files_path) else (self.trex_files_path + pcap_name)
- for idx, pcap in enumerate(self.yaml_obj[0]['cap_info']):
- print (pcap['name'] == comp_pcap)
- if pcap['name'] == comp_pcap:
- return idx
- # pcap file wasn't found
- return -1
-
- def dump_as_python_obj (self):
- """
- dumps with nice indentation the pythonic format (dictionaries and lists) of the currently built yaml object.
-
- :parameters:
- None
-
- :return:
- None
-
- """
- pprint.pprint(self.yaml_obj)
-
- def dump(self):
- """
- dumps with nice indentation the YAML format of the currently built yaml object.
-
- :parameters:
- None
-
- :return:
- None
-
- """
- print (yaml.safe_dump(self.yaml_obj, default_flow_style = False))
-
- def to_yaml(self, filename):
- """
- Exports to YAML file the built configuration into an actual YAML file.
-
- :parameters:
- filename : str
- a path (on client side, including filename) to store the generated yaml file.
-
- :return:
- None
-
- :raises:
- + :exc:`ValueError`, in case no pcap files has been added to the object.
- + :exc:`EnvironmentError`, in case of any IO error of writing to the files or OSError when trying to open it for writing.
-
- """
- if self.empty_cap:
- raise ValueError("No .pcap file has been assigned to yaml object. Must add at least one")
- else:
- try:
- with open(filename, 'w') as yaml_file:
- yaml_file.write( yaml.safe_dump(self.yaml_obj, default_flow_style = False) )
- self.yaml_dumped = True
- self.file_list.append(filename)
- except EnvironmentError as inst:
- raise
-
- def set_cap_info_param (self, param, value, seq):
- """
- Set cap-info parameters' value of a specific pcap file.
-
- :parameters:
- param : str
- the name of the parameters to be set.
- value : int/float
- the desired value to be set to `param` key.
- seq : int
- an index to the relevant caps array to be changed (index supplied when adding new pcap file, see :func:`add_pcap_file`).
-
- :return:
- **True** on success
-
- :raises:
- :exc:`IndexError`, in case an out-of range index was given.
-
- """
- try:
- self.yaml_obj[0]['cap_info'][seq][param] = value
-
- return True
- except IndexError:
- return False
-
- def set_generator_param (self, param, value):
- """
- Set generator parameters' value of the yaml object.
-
- :parameters:
- param : str
- the name of the parameters to be set.
- value : int/float/str
- the desired value to be set to `param` key.
-
- :return:
- None
-
- """
- self.yaml_obj[0]['generator'][param] = value
-
- def get_file_list(self):
- """
- Returns a list of all files related to the YAML object, including the YAML filename itself.
-
- .. tip:: This method is especially useful for listing all the files that should be pushed to TRex server as part of the same yaml selection.
-
- :parameters:
- None
-
- :return:
- a list of filepaths, each is a local client-machine file path.
-
- """
- if not self.yaml_dumped:
- print ("WARNING: .yaml file wasn't dumped yet. Files list contains only .pcap files")
- return self.file_list
-
-
-
-if __name__ == "__main__":
- pass
diff --git a/scripts/automation/trex_control_plane/client_utils/yaml_utils.py b/scripts/automation/trex_control_plane/client_utils/yaml_utils.py
deleted file mode 100755
index 776a51a7..00000000
--- a/scripts/automation/trex_control_plane/client_utils/yaml_utils.py
+++ /dev/null
@@ -1,163 +0,0 @@
-
-"""
-Dan Klein
-Cisco Systems, Inc.
-
-Copyright (c) 2015-2015 Cisco Systems, Inc.
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-import traceback
-import sys
-import yaml
-
-
-class CTRexYAMLLoader(object):
- TYPE_DICT = {"double":float,
- "int":int,
- "array":list,
- "string":str,
- "boolean":bool}
-
- def __init__(self, yaml_ref_file_path):
- self.yaml_path = yaml_ref_file_path
- self.ref_obj = None
-
- def check_term_param_type(self, val, val_field, ref_val, multiplier):
- # print val, val_field, ref_val
- tmp_type = ref_val.get('type')
- if isinstance(tmp_type, list):
- # item can be one of multiple types
- # print "multiple choice!"
- python_types = set()
- for t in tmp_type:
- if t in self.TYPE_DICT:
- python_types.add(self.TYPE_DICT.get(t))
- else:
- return False, TypeError("Unknown resolving for type {0}".format(t))
- # print "python legit types: ", python_types
- if type(val) not in python_types:
- return False, TypeError("Type of object field '{0}' is not allowed".format(val_field))
- else:
- # WE'RE OK!
- return True, CTRexYAMLLoader._calc_final_value(val, multiplier, ref_val.get('multiply', False))
- else:
- # this is a single type field
- python_type = self.TYPE_DICT.get(tmp_type)
- if not isinstance(val, python_type):
- return False, TypeError("Type of object field '{0}' is not allowed".format(val_field))
- else:
- # WE'RE OK!
- return True, CTRexYAMLLoader._calc_final_value(val, multiplier, ref_val.get('multiply', False))
-
- def get_reference_default(self, root_obj, sub_obj, key):
- # print root_obj, sub_obj, key
- if sub_obj:
- ref_field = self.ref_obj.get(root_obj).get(sub_obj).get(key)
- else:
- ref_field = self.ref_obj.get(root_obj).get(key)
- if 'has_default' in ref_field:
- if ref_field.get('has_default'):
- # WE'RE OK!
- return True, ref_field.get('default')
- else:
- # This is a mandatory field!
- return False, ValueError("The {0} field is mandatory and must be specified explicitly".format(key))
- else:
- return False, ValueError("The {0} field has no indication about default value".format(key))
-
- def validate_yaml(self, evaluated_obj, root_obj, fill_defaults=True, multiplier=1):
- if isinstance(evaluated_obj, dict) and evaluated_obj.keys() == [root_obj]:
- evaluated_obj = evaluated_obj.get(root_obj)
- if not self.ref_obj:
- self.ref_obj = load_yaml_to_obj(self.yaml_path)
- # self.load_reference()
- ref_item = self.ref_obj.get(root_obj)
- if ref_item is not None:
- try:
- typed_obj = [False, None] # first item stores validity (multiple object "shapes"), second stored type
- if "type" in evaluated_obj:
- ref_item = ref_item[evaluated_obj.get("type")]
- # print "lower resolution with typed object"
- typed_obj = [True, evaluated_obj.get("type")]
- if isinstance(ref_item, dict) and "type" not in ref_item: # this is not a terminal
- result_obj = {}
- if typed_obj[0]:
- result_obj["type"] = typed_obj[1]
- # print "processing dictionary non-terminal value"
- for k, v in ref_item.items():
- # print "processing element '{0}' with value '{1}'".format(k,v)
- if k in evaluated_obj:
- # validate with ref obj
- # print "found in evaluated object!"
- tmp_type = v.get('type')
- # print tmp_type
- # print evaluated_obj
- if tmp_type == "object":
- # go deeper into nesting hierarchy
- # print "This is an object type, recursion!"
- result_obj[k] = self.validate_yaml(evaluated_obj.get(k), k, fill_defaults, multiplier)
- else:
- # validation on terminal type
- # print "Validating terminal type %s" % k
- res_ok, data = self.check_term_param_type(evaluated_obj.get(k), k, v, multiplier)
- if res_ok:
- # data field contains the value to save
- result_obj[k] = data
- else:
- # data var contains the exception to throw
- raise data
- elif fill_defaults:
- # complete missing values with default value, if exists
- sub_obj = typed_obj[1] if typed_obj[0] else None
- res_ok, data = self.get_reference_default(root_obj, sub_obj, k)
- if res_ok:
- # data field contains the value to save
- result_obj[k] = data
- else:
- # data var contains the exception to throw
- raise data
- return result_obj
- elif isinstance(ref_item, list):
- # currently not handling list objects
- return NotImplementedError("List object are currently unsupported")
- else:
- raise TypeError("Unknown parse tree object type.")
- except KeyError as e:
- raise
- else:
- raise KeyError("The given root_key '{key}' does not exists on reference object".format(key=root_obj))
-
- @staticmethod
- def _calc_final_value(val, multiplier, multiply):
- def to_num(s):
- try:
- return int(s)
- except ValueError:
- return float(s)
- if multiply:
- return val * to_num(multiplier)
- else:
- return val
-
-
-def load_yaml_to_obj(file_path):
- try:
- return yaml.load(file(file_path, 'r'))
- except yaml.YAMLError as e:
- raise
- except Exception as e:
- raise
-
-def yaml_exporter(file_path):
- pass
-
-if __name__ == "__main__":
- pass
diff --git a/scripts/automation/trex_control_plane/dirtree_no_files.txt b/scripts/automation/trex_control_plane/dirtree_no_files.txt
deleted file mode 100755
index b87c4167..00000000
--- a/scripts/automation/trex_control_plane/dirtree_no_files.txt
+++ /dev/null
@@ -1,11 +0,0 @@
-trex_control_plane/
-|-- Client
-|-- Server
-|-- common
-`-- python_lib
- |-- enum34-1.0.4
- |-- jsonrpclib-0.1.3
- |-- lockfile-0.10.2
- |-- python-daemon-2.0.5
- `-- zmq
-
diff --git a/scripts/automation/trex_control_plane/dirtree_with_files.txt b/scripts/automation/trex_control_plane/dirtree_with_files.txt
deleted file mode 100755
index 5ce7cdfc..00000000
--- a/scripts/automation/trex_control_plane/dirtree_with_files.txt
+++ /dev/null
@@ -1,31 +0,0 @@
-trex_control_plane/
-|-- Client
-| |-- __init__.py
-| |-- outer_packages.py
-| |-- python_lib
-| `-- trex_client.py
-|-- Server
-| |-- CCustomLogger.py
-| |-- outer_packages.py
-| |-- trex_daemon_server
-| |-- trex_daemon_server.py
-| |-- trex_launch_thread.py
-| |-- trex_server.py
-| `-- zmq_monitor_thread.py
-|-- __init__.py
-|-- common
-| |-- __init__.py
-| |-- __init__.pyc
-| |-- trex_status_e.py
-| `-- trex_status_e.pyc
-|-- dirtree_no_files.txt
-|-- dirtree_with_files.txt
-`-- python_lib
- |-- __init__.py
- |-- enum34-1.0.4
- |-- jsonrpclib-0.1.3
- |-- lockfile-0.10.2
- |-- python-daemon-2.0.5
- |-- zmq
- `-- zmq_fedora.tar.gz
-
diff --git a/scripts/automation/trex_control_plane/stf/CCustomLogger.py b/scripts/automation/trex_control_plane/stf/CCustomLogger.py
new file mode 100755
index 00000000..ecf7d519
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stf/CCustomLogger.py
@@ -0,0 +1,100 @@
+
+import sys
+import os
+import logging
+
+
+def setup_custom_logger(name, log_path = None):
+ # first make sure path availabe
+# if log_path is None:
+# log_path = os.getcwd()+'/trex_log.log'
+# else:
+# directory = os.path.dirname(log_path)
+# if not os.path.exists(directory):
+# os.makedirs(directory)
+ logging.basicConfig(level = logging.INFO,
+ format = '%(asctime)s %(name)-10s %(module)-20s %(levelname)-8s %(message)s',
+ datefmt = '%m-%d %H:%M')
+# filename= log_path,
+# filemode= 'w')
+#
+# # define a Handler which writes INFO messages or higher to the sys.stderr
+# consoleLogger = logging.StreamHandler()
+# consoleLogger.setLevel(logging.ERROR)
+# # set a format which is simpler for console use
+# formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
+# # tell the handler to use this format
+# consoleLogger.setFormatter(formatter)
+#
+# # add the handler to the logger
+# logging.getLogger(name).addHandler(consoleLogger)
+
+def setup_daemon_logger (name, log_path = None):
+ # first make sure path availabe
+ logging.basicConfig(level = logging.INFO,
+ format = '%(asctime)s %(name)-10s %(module)-20s %(levelname)-8s %(message)s',
+ datefmt = '%m-%d %H:%M',
+ filename= log_path,
+ filemode= 'w')
+
+class CustomLogger(object):
+
+ def __init__(self, log_filename):
+ # Store the original stdout and stderr
+ sys.stdout.flush()
+ sys.stderr.flush()
+
+ self.stdout_fd = os.dup(sys.stdout.fileno())
+ self.devnull = os.open('/dev/null', os.O_WRONLY)
+ self.log_file = open(log_filename, 'w')
+ self.silenced = False
+ self.pending_log_file_prints = 0
+
+ # silence all prints from stdout
+ def silence(self):
+ os.dup2(self.devnull, sys.stdout.fileno())
+ self.silenced = True
+
+ # restore stdout status
+ def restore(self):
+ sys.stdout.flush()
+ sys.stderr.flush()
+ # Restore normal stdout
+ os.dup2(self.stdout_fd, sys.stdout.fileno())
+ self.silenced = False
+
+ #print a message to the log (both stdout / log file)
+ def log(self, text, force = False, newline = True):
+ self.log_file.write((text + "\n") if newline else text)
+ self.pending_log_file_prints += 1
+
+ if (self.pending_log_file_prints >= 10):
+ self.log_file.flush()
+ self.pending_log_file_prints = 0
+
+ self.console(text, force, newline)
+
+ # print a message to the console alone
+ def console(self, text, force = False, newline = True):
+ _text = (text + "\n") if newline else text
+ # if we are silenced and not forced - go home
+ if self.silenced and not force:
+ return
+
+ if self.silenced:
+ os.write(self.stdout_fd, _text)
+ else:
+ sys.stdout.write(_text)
+
+ sys.stdout.flush()
+
+ # flush
+ def flush(self):
+ sys.stdout.flush()
+ self.log_file.flush()
+
+ def __exit__(self, type, value, traceback):
+ sys.stdout.flush()
+ self.log_file.flush()
+ os.close(self.devnull)
+ os.close(self.log_file)
diff --git a/scripts/automation/trex_control_plane/stf/__init__.py b/scripts/automation/trex_control_plane/stf/__init__.py
new file mode 100755
index 00000000..5a1da046
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stf/__init__.py
@@ -0,0 +1 @@
+__all__ = ["trex_status_e", "trex_exceptions"]
diff --git a/scripts/automation/trex_control_plane/stf/external_packages.py b/scripts/automation/trex_control_plane/stf/external_packages.py
new file mode 100755
index 00000000..7353c397
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stf/external_packages.py
@@ -0,0 +1,28 @@
+#!/router/bin/python
+
+import sys
+import os
+
+CURRENT_PATH = os.path.dirname(os.path.realpath(__file__))
+ROOT_PATH = os.path.abspath(os.path.join(CURRENT_PATH, os.pardir)) # path to trex_control_plane directory
+PATH_TO_PYTHON_LIB = os.path.abspath(os.path.join(ROOT_PATH, os.pardir, os.pardir, 'external_libs'))
+
+CLIENT_UTILS_MODULES = ['yaml-3.11'
+ ]
+
+def import_common_modules():
+ # must be in a higher priority
+ sys.path.insert(0, PATH_TO_PYTHON_LIB)
+ sys.path.append(ROOT_PATH)
+ import_module_list(CLIENT_UTILS_MODULES)
+
+
+def import_module_list(modules_list):
+ assert(isinstance(modules_list, list))
+ for p in modules_list:
+ full_path = os.path.join(PATH_TO_PYTHON_LIB, p)
+ fix_path = os.path.normcase(full_path)
+ sys.path.insert(1, full_path)
+
+import_common_modules()
+
diff --git a/scripts/automation/trex_control_plane/client_utils/general_utils.py b/scripts/automation/trex_control_plane/stf/general_utils.py
index d2521f02..d2521f02 100755
--- a/scripts/automation/trex_control_plane/client_utils/general_utils.py
+++ b/scripts/automation/trex_control_plane/stf/general_utils.py
diff --git a/scripts/automation/trex_control_plane/stf/outer_packages.py b/scripts/automation/trex_control_plane/stf/outer_packages.py
new file mode 100755
index 00000000..5e29f8d6
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stf/outer_packages.py
@@ -0,0 +1,30 @@
+#!/router/bin/python
+
+import sys
+import os
+
+
+CURRENT_PATH = os.path.dirname(os.path.realpath(__file__))
+PARENT_PATH = os.path.abspath(os.path.join(CURRENT_PATH, os.pardir, 'external_libs'))
+SCRIPTS_PATH = os.path.abspath(os.path.join(CURRENT_PATH, os.pardir, os.pardir, os.pardir, 'external_libs'))
+
+CLIENT_MODULES = ['enum34-1.0.4',
+ 'jsonrpclib-pelix-0.2.5',
+# 'termstyle',
+# 'yaml-3.11'
+ ]
+
+
+def import_module_list(ext_libs_path):
+ for p in CLIENT_MODULES:
+ full_path = os.path.join(ext_libs_path, p)
+ if not os.path.exists(full_path):
+ raise Exception('Library %s is absent in path %s' % (p, ext_libs_path))
+ sys.path.insert(1, full_path)
+
+if os.path.exists(PARENT_PATH):
+ import_module_list(PARENT_PATH)
+elif os.path.exists(SCRIPTS_PATH):
+ import_module_list(SCRIPTS_PATH)
+else:
+ raise Exception('Could not find external libs in path: %s' % [PARENT_PATH, SCRIPTS_PATH])
diff --git a/scripts/automation/trex_control_plane/stf/text_opts.py b/scripts/automation/trex_control_plane/stf/text_opts.py
new file mode 100755
index 00000000..78a0ab1f
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stf/text_opts.py
@@ -0,0 +1,192 @@
+import json
+import re
+
+TEXT_CODES = {'bold': {'start': '\x1b[1m',
+ 'end': '\x1b[22m'},
+ 'cyan': {'start': '\x1b[36m',
+ 'end': '\x1b[39m'},
+ 'blue': {'start': '\x1b[34m',
+ 'end': '\x1b[39m'},
+ 'red': {'start': '\x1b[31m',
+ 'end': '\x1b[39m'},
+ 'magenta': {'start': '\x1b[35m',
+ 'end': '\x1b[39m'},
+ 'green': {'start': '\x1b[32m',
+ 'end': '\x1b[39m'},
+ 'yellow': {'start': '\x1b[33m',
+ 'end': '\x1b[39m'},
+ 'underline': {'start': '\x1b[4m',
+ 'end': '\x1b[24m'}}
+
+class TextCodesStripper:
+ keys = [re.escape(v['start']) for k,v in TEXT_CODES.iteritems()]
+ keys += [re.escape(v['end']) for k,v in TEXT_CODES.iteritems()]
+ pattern = re.compile("|".join(keys))
+
+ @staticmethod
+ def strip (s):
+ return re.sub(TextCodesStripper.pattern, '', s)
+
+def format_num (size, suffix = "", compact = True, opts = ()):
+ txt = "NaN"
+
+ if type(size) == str:
+ return "N/A"
+
+ u = ''
+
+ if compact:
+ for unit in ['','K','M','G','T','P']:
+ if abs(size) < 1000.0:
+ u = unit
+ break
+ size /= 1000.0
+
+ if isinstance(size, float):
+ txt = "%3.2f" % (size)
+ else:
+ txt = "{:,}".format(size)
+
+ if u or suffix:
+ txt += " {:}{:}".format(u, suffix)
+
+ if isinstance(opts, tuple):
+ return format_text(txt, *opts)
+ else:
+ return format_text(txt, (opts))
+
+
+
+def format_time (t_sec):
+ if t_sec < 0:
+ return "infinite"
+
+ if t_sec < 1:
+ # low numbers
+ for unit in ['ms', 'usec', 'ns']:
+ t_sec *= 1000.0
+ if t_sec >= 1.0:
+ return '{:,.2f} [{:}]'.format(t_sec, unit)
+
+ return "NaN"
+
+ else:
+ # seconds
+ if t_sec < 60.0:
+ return '{:,.2f} [{:}]'.format(t_sec, 'sec')
+
+ # minutes
+ t_sec /= 60.0
+ if t_sec < 60.0:
+ return '{:,.2f} [{:}]'.format(t_sec, 'minutes')
+
+ # hours
+ t_sec /= 60.0
+ if t_sec < 24.0:
+ return '{:,.2f} [{:}]'.format(t_sec, 'hours')
+
+ # days
+ t_sec /= 24.0
+ return '{:,.2f} [{:}]'.format(t_sec, 'days')
+
+
+def format_percentage (size):
+ return "%0.2f %%" % (size)
+
+def bold(text):
+ return text_attribute(text, 'bold')
+
+
+def cyan(text):
+ return text_attribute(text, 'cyan')
+
+
+def blue(text):
+ return text_attribute(text, 'blue')
+
+
+def red(text):
+ return text_attribute(text, 'red')
+
+
+def magenta(text):
+ return text_attribute(text, 'magenta')
+
+
+def green(text):
+ return text_attribute(text, 'green')
+
+def yellow(text):
+ return text_attribute(text, 'yellow')
+
+def underline(text):
+ return text_attribute(text, 'underline')
+
+
+def text_attribute(text, attribute):
+ if isinstance(text, str):
+ return "{start}{txt}{stop}".format(start=TEXT_CODES[attribute]['start'],
+ txt=text,
+ stop=TEXT_CODES[attribute]['end'])
+ elif isinstance(text, unicode):
+ return u"{start}{txt}{stop}".format(start=TEXT_CODES[attribute]['start'],
+ txt=text,
+ stop=TEXT_CODES[attribute]['end'])
+ else:
+ raise Exception("not a string")
+
+
+FUNC_DICT = {'blue': blue,
+ 'bold': bold,
+ 'green': green,
+ 'yellow': yellow,
+ 'cyan': cyan,
+ 'magenta': magenta,
+ 'underline': underline,
+ 'red': red}
+
+
+def format_text(text, *args):
+ return_string = text
+ for i in args:
+ func = FUNC_DICT.get(i)
+ if func:
+ return_string = func(return_string)
+
+ return return_string
+
+
+def format_threshold (value, red_zone, green_zone):
+ if value >= red_zone[0] and value <= red_zone[1]:
+ return format_text("{0}".format(value), 'red')
+
+ if value >= green_zone[0] and value <= green_zone[1]:
+ return format_text("{0}".format(value), 'green')
+
+ return "{0}".format(value)
+
+# pretty print for JSON
+def pretty_json (json_str, use_colors = True):
+ pretty_str = json.dumps(json.loads(json_str), indent = 4, separators=(',', ': '), sort_keys = True)
+
+ if not use_colors:
+ return pretty_str
+
+ try:
+ # int numbers
+ pretty_str = re.sub(r'([ ]*:[ ]+)(\-?[1-9][0-9]*[^.])',r'\1{0}'.format(blue(r'\2')), pretty_str)
+ # float
+ pretty_str = re.sub(r'([ ]*:[ ]+)(\-?[1-9][0-9]*\.[0-9]+)',r'\1{0}'.format(magenta(r'\2')), pretty_str)
+ # # strings
+ #
+ pretty_str = re.sub(r'([ ]*:[ ]+)("[^"]*")',r'\1{0}'.format(red(r'\2')), pretty_str)
+ pretty_str = re.sub(r"('[^']*')", r'{0}\1{1}'.format(TEXT_CODES['magenta']['start'],
+ TEXT_CODES['red']['start']), pretty_str)
+ except :
+ pass
+
+ return pretty_str
+
+
+if __name__ == "__main__":
+ pass
diff --git a/scripts/automation/trex_control_plane/stf/trex_client.py b/scripts/automation/trex_control_plane/stf/trex_client.py
new file mode 100755
index 00000000..919253d1
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stf/trex_client.py
@@ -0,0 +1,1216 @@
+#!/router/bin/python
+
+# internal libs
+import sys
+import os
+import socket
+import errno
+import time
+import re
+import copy
+import binascii
+from distutils.util import strtobool
+from collections import deque, OrderedDict
+from json import JSONDecoder
+import traceback
+
+try:
+ from . import outer_packages
+ from .trex_status_e import TRexStatus
+ from .trex_exceptions import *
+ from .trex_exceptions import exception_handler
+ from .general_utils import *
+except Exception as e: # is __main__
+ import outer_packages
+ from trex_status_e import TRexStatus
+ from trex_exceptions import *
+ from trex_exceptions import exception_handler
+ from general_utils import *
+
+# external libs
+import jsonrpclib
+from jsonrpclib import ProtocolError, AppError
+from enum import Enum
+
+
+
+class CTRexClient(object):
+ """
+ This class defines the client side of the RESTfull interaction with TRex
+ """
+
+ def __init__(self, trex_host, max_history_size = 100, trex_daemon_port = 8090, trex_zmq_port = 4500, verbose = False):
+ """
+ Instantiate a TRex client object, and connecting it to listening daemon-server
+
+ :parameters:
+ trex_host : str
+ a string of the TRex ip address or hostname.
+ max_history_size : int
+ a number to set the maximum history size of a single TRex run. Each sampling adds a new item to history.
+
+ default value : **100**
+ trex_daemon_port : int
+ the port number on which the trex-daemon server can be reached
+
+ default value: **8090**
+ trex_zmq_port : int
+ the port number on which trex's zmq module will interact with daemon server
+
+ default value: **4500**
+ verbose : bool
+ sets a verbose output on supported class method.
+
+ default value : **False**
+
+ :raises:
+ socket errors, in case server could not be reached.
+
+ """
+ try:
+ self.trex_host = socket.gethostbyname(trex_host)
+ except: # give it another try
+ self.trex_host = socket.gethostbyname(trex_host)
+ self.trex_daemon_port = trex_daemon_port
+ self.trex_zmq_port = trex_zmq_port
+ self.seq = None
+ self.verbose = verbose
+ self.result_obj = CTRexResult(max_history_size)
+ self.decoder = JSONDecoder()
+ self.trex_server_path = "http://{hostname}:{port}/".format( hostname = self.trex_host, port = trex_daemon_port )
+ self.__verbose_print("Connecting to TRex @ {trex_path} ...".format( trex_path = self.trex_server_path ) )
+ self.history = jsonrpclib.history.History()
+ self.server = jsonrpclib.Server(self.trex_server_path, history = self.history)
+ self.check_server_connectivity()
+ self.__verbose_print("Connection established successfully!")
+ self._last_sample = time.time()
+ self.__default_user = get_current_user()
+
+
+ def add (self, x, y):
+ try:
+ return self.server.add(x,y)
+ except AppError as err:
+ self._handle_AppError_exception(err.args[0])
+ except ProtocolError:
+ raise
+ finally:
+ self.prompt_verbose_data()
+
+ def start_trex (self, f, d, block_to_success = True, timeout = 40, user = None, trex_development = False, **trex_cmd_options):
+ """
+ Request to start a TRex run on server.
+
+ :parameters:
+ f : str
+ a path (on server) for the injected traffic data (.yaml file)
+ d : int
+ the desired duration of the test. must be at least 30 seconds long.
+ block_to_success : bool
+ determine if this method blocks until TRex changes state from 'Starting' to either 'Idle' or 'Running'
+
+ default value : **True**
+ timeout : int
+ maximum time (in seconds) to wait in blocking state until TRex changes state from 'Starting' to either 'Idle' or 'Running'
+
+ default value: **40**
+ user : str
+ the identity of the the run issuer.
+ trex_cmd_options : key, val
+ sets desired TRex options using key=val syntax, separated by comma.
+ for keys with no value, state key=True
+
+ :return:
+ **True** on success
+
+ :raises:
+ + :exc:`ValueError`, in case 'd' parameter inserted with wrong value.
+ + :exc:`trex_exceptions.TRexError`, in case one of the trex_cmd_options raised an exception at server.
+ + :exc:`trex_exceptions.TRexInUseError`, in case TRex is already taken.
+ + :exc:`trex_exceptions.TRexRequestDenied`, in case TRex is reserved for another user than the one trying start TRex.
+ + ProtocolError, in case of error in JSON-RPC protocol.
+
+ """
+ user = user or self.__default_user
+ try:
+ d = int(d)
+ if d < 30 and not trex_development: # test duration should be at least 30 seconds, unless trex_development flag is specified.
+ raise ValueError
+ except ValueError:
+ raise ValueError('d parameter must be integer, specifying how long TRex run, and must be larger than 30 secs.')
+
+ trex_cmd_options.update( {'f' : f, 'd' : d} )
+ if not trex_cmd_options.get('l'):
+ self.result_obj.latency_checked = False
+ if 'k' in trex_cmd_options:
+ timeout += int(trex_cmd_options['k']) # during 'k' seconds TRex stays in 'Starting' state
+
+ self.result_obj.clear_results()
+ try:
+ issue_time = time.time()
+ retval = self.server.start_trex(trex_cmd_options, user, block_to_success, timeout)
+ except AppError as err:
+ self._handle_AppError_exception(err.args[0])
+ except ProtocolError:
+ raise
+ finally:
+ self.prompt_verbose_data()
+
+ if retval!=0:
+ self.seq = retval # update seq num only on successful submission
+ return True
+ else: # TRex is has been started by another user
+ raise TRexInUseError('TRex is already being used by another user or process. Try again once TRex is back in IDLE state.')
+
+ def stop_trex (self):
+ """
+ Request to stop a TRex run on server.
+
+ The request is only valid if the stop initiator is the same client as the TRex run initiator.
+
+ :parameters:
+ None
+
+ :return:
+ + **True** on successful termination
+ + **False** if request issued but TRex wasn't running.
+
+ :raises:
+ + :exc:`trex_exceptions.TRexRequestDenied`, in case TRex ir running but started by another user.
+ + :exc:`trex_exceptions.TRexIncompleteRunError`, in case one of failed TRex run (unexpected termination).
+ + ProtocolError, in case of error in JSON-RPC protocol.
+
+ """
+ try:
+ return self.server.stop_trex(self.seq)
+ except AppError as err:
+ self._handle_AppError_exception(err.args[0])
+ except ProtocolError:
+ raise
+ finally:
+ self.prompt_verbose_data()
+
+ def force_kill (self, confirm = True):
+ """
+ Force killing of running TRex process (if exists) on the server.
+
+ .. tip:: This method is a safety method and **overrides any running or reserved resources**, and as such isn't designed to be used on a regular basis.
+ Always consider using :func:`trex_client.CTRexClient.stop_trex` instead.
+
+ In the end of this method, TRex will return to IDLE state with no reservation.
+
+ :parameters:
+ confirm : bool
+ Prompt a user confirmation before continue terminating TRex session
+
+ :return:
+ + **True** on successful termination
+ + **False** otherwise.
+
+ :raises:
+ + ProtocolError, in case of error in JSON-RPC protocol.
+
+ """
+ if confirm:
+ prompt = "WARNING: This will terminate active TRex session indiscriminately.\nAre you sure? "
+ sys.stdout.write('%s [y/n]\n' % prompt)
+ while True:
+ try:
+ if strtobool(user_input().lower()):
+ break
+ else:
+ return
+ except ValueError:
+ sys.stdout.write('Please respond with \'y\' or \'n\'.\n')
+ try:
+ return self.server.force_trex_kill()
+ except AppError as err:
+ # Silence any kind of application errors- by design
+ return False
+ except ProtocolError:
+ raise
+ finally:
+ self.prompt_verbose_data()
+
+ def wait_until_kickoff_finish(self, timeout = 40):
+ """
+ Block the client application until TRex changes state from 'Starting' to either 'Idle' or 'Running'
+
+ The request is only valid if the stop initiator is the same client as the TRex run initiator.
+
+ :parameters:
+ timeout : int
+ maximum time (in seconds) to wait in blocking state until TRex changes state from 'Starting' to either 'Idle' or 'Running'
+
+ :return:
+ + **True** on successful termination
+ + **False** if request issued but TRex wasn't running.
+
+ :raises:
+ + :exc:`trex_exceptions.TRexIncompleteRunError`, in case one of failed TRex run (unexpected termination).
+ + ProtocolError, in case of error in JSON-RPC protocol.
+
+ .. note:: Exceptions are throws only when start_trex did not block in the first place, i.e. `block_to_success` parameter was set to `False`
+
+ """
+
+ try:
+ return self.server.wait_until_kickoff_finish(timeout)
+ except AppError as err:
+ self._handle_AppError_exception(err.args[0])
+ except ProtocolError:
+ raise
+ finally:
+ self.prompt_verbose_data()
+
+ def is_running (self, dump_out = False):
+ """
+ Poll for TRex running status.
+
+ If TRex is running, a history item will be added into result_obj and processed.
+
+ .. tip:: This method is especially useful for iterating until TRex run is finished.
+
+ :parameters:
+ dump_out : dict
+ if passed, the pointer object is cleared and the latest dump stored in it.
+
+ :return:
+ + **True** if TRex is running.
+ + **False** if TRex is not running.
+
+ :raises:
+ + :exc:`trex_exceptions.TRexIncompleteRunError`, in case one of failed TRex run (unexpected termination).
+ + :exc:`TypeError`, in case JSON stream decoding error.
+ + ProtocolError, in case of error in JSON-RPC protocol.
+
+ """
+ try:
+ res = self.get_running_info()
+ if res == {}:
+ return False
+ if (dump_out != False) and (isinstance(dump_out, dict)): # save received dump to given 'dump_out' pointer
+ dump_out.clear()
+ dump_out.update(res)
+ return True
+ except TRexWarning as err:
+ if err.code == -12: # TRex is either still at 'Starting' state or in Idle state, however NO error occured
+ return False
+ except TRexException:
+ raise
+ except ProtocolError as err:
+ raise
+ finally:
+ self.prompt_verbose_data()
+
+ def is_idle (self):
+ """
+ Poll for TRex running status, check if TRex is in Idle state.
+
+ :parameters:
+ None
+
+ :return:
+ + **True** if TRex is idle.
+ + **False** if TRex is starting or running.
+
+ :raises:
+ + :exc:`trex_exceptions.TRexIncompleteRunError`, in case one of failed TRex run (unexpected termination).
+ + :exc:`TypeError`, in case JSON stream decoding error.
+ + ProtocolError, in case of error in JSON-RPC protocol.
+
+ """
+ try:
+ if self.get_running_status()['state'] == TRexStatus.Idle:
+ return True
+ return False
+ except TRexException:
+ raise
+ except ProtocolError as err:
+ raise
+ finally:
+ self.prompt_verbose_data()
+
+ def get_trex_files_path (self):
+ """
+ Fetches the local path in which files are stored when pushed to TRex server from client.
+
+ :parameters:
+ None
+
+ :return:
+ string representation of the desired path
+
+ .. note:: The returned path represents a path on the TRex server **local machine**
+
+ :raises:
+ ProtocolError, in case of error in JSON-RPC protocol.
+
+ """
+ try:
+ return (self.server.get_files_path() + '/')
+ except AppError as err:
+ self._handle_AppError_exception(err.args[0])
+ except ProtocolError:
+ raise
+ finally:
+ self.prompt_verbose_data()
+
+ def get_running_status (self):
+ """
+ Fetches the current TRex status.
+
+ If available, a verbose data will accompany the state itself.
+
+ :parameters:
+ None
+
+ :return:
+ dictionary with 'state' and 'verbose' keys.
+
+ :raises:
+ ProtocolError, in case of error in JSON-RPC protocol.
+
+ """
+ try:
+ res = self.server.get_running_status()
+ res['state'] = TRexStatus(res['state'])
+ return res
+ except AppError as err:
+ self._handle_AppError_exception(err.args[0])
+ except ProtocolError:
+ raise
+ finally:
+ self.prompt_verbose_data()
+
+ def get_running_info (self):
+ """
+ Performs single poll of TRex running data and process it into the result object (named `result_obj`).
+
+ .. tip:: This method will throw an exception if TRex isn't running. Always consider using :func:`trex_client.CTRexClient.is_running` which handles a single poll operation in safer manner.
+
+ :parameters:
+ None
+
+ :return:
+ dictionary containing the most updated data dump from TRex.
+
+ :raises:
+ + :exc:`trex_exceptions.TRexIncompleteRunError`, in case one of failed TRex run (unexpected termination).
+ + :exc:`TypeError`, in case JSON stream decoding error.
+ + ProtocolError, in case of error in JSON-RPC protocol.
+
+ """
+ if not self.is_query_relevance():
+ # if requested in timeframe smaller than the original sample rate, return the last known data without interacting with server
+ return self.result_obj.get_latest_dump()
+ else:
+ try:
+ latest_dump = self.decoder.decode( self.server.get_running_info() ) # latest dump is not a dict, but json string. decode it.
+ self.result_obj.update_result_data(latest_dump)
+ return latest_dump
+ except TypeError as inst:
+ raise TypeError('JSON-RPC data decoding failed. Check out incoming JSON stream.')
+ except AppError as err:
+ self._handle_AppError_exception(err.args[0])
+ except ProtocolError:
+ raise
+ finally:
+ self.prompt_verbose_data()
+
+ def sample_until_condition (self, condition_func, time_between_samples = 5):
+ """
+ Automatically sets ongoing sampling of TRex data, with sampling rate described by time_between_samples.
+
+ On each fetched dump, the condition_func is applied on the result objects, and if returns True, the sampling will stop.
+
+ :parameters:
+ condition_func : function
+ function that operates on result_obj and checks if a condition has been met
+
+ .. note:: `condition_finc` is applied on `CTRexResult` object. Make sure to design a relevant method.
+ time_between_samples : int
+ determines the time between each sample of the server
+
+ default value : **5**
+
+ :return:
+ the first result object (see :class:`CTRexResult` for further details) of the TRex run on which the condition has been met.
+
+ :raises:
+ + :exc:`UserWarning`, in case the condition_func method condition hasn't been met
+ + :exc:`trex_exceptions.TRexIncompleteRunError`, in case one of failed TRex run (unexpected termination).
+ + :exc:`TypeError`, in case JSON stream decoding error.
+ + ProtocolError, in case of error in JSON-RPC protocol.
+ + :exc:`Exception`, in case the condition_func suffered from any kind of exception
+
+ """
+ # make sure TRex is running. raise exceptions here if any
+ self.wait_until_kickoff_finish()
+ try:
+ while self.is_running():
+ results = self.get_result_obj()
+ if condition_func(results):
+ # if condition satisfied, stop TRex and return result object
+ self.stop_trex()
+ return results
+ time.sleep(time_between_samples)
+ except TRexWarning:
+ # means we're back to Idle state, and didn't meet our condition
+ raise UserWarning("TRex results condition wasn't met during TRex run.")
+ except Exception:
+ # this could come from provided method 'condition_func'
+ raise
+
+ def sample_to_run_finish (self, time_between_samples = 5):
+ """
+ Automatically sets automatically sampling of TRex data with sampling rate described by time_between_samples until TRex run finished.
+
+ :parameters:
+ time_between_samples : int
+ determines the time between each sample of the server
+
+ default value : **5**
+
+ :return:
+ the latest result object (see :class:`CTRexResult` for further details) with sampled data.
+
+ :raises:
+ + :exc:`UserWarning`, in case the condition_func method condition hasn't been met
+ + :exc:`trex_exceptions.TRexIncompleteRunError`, in case one of failed TRex run (unexpected termination).
+ + :exc:`TypeError`, in case JSON stream decoding error.
+ + ProtocolError, in case of error in JSON-RPC protocol.
+
+ """
+ self.wait_until_kickoff_finish()
+
+ try:
+ while self.is_running():
+ time.sleep(time_between_samples)
+ except TRexWarning:
+ pass
+ results = self.get_result_obj()
+ return results
+
+ def sample_x_seconds (self, sample_time, time_between_samples = 5):
+ """
+ Automatically sets ongoing sampling of TRex data for sample_time seconds, with sampling rate described by time_between_samples.
+ Does not stop the TRex afterwards!
+
+ .. tip:: Useful for changing the device (Router, ASA etc.) configuration after given time.
+
+ :parameters:
+ sample_time : int
+ sample the TRex this number of seconds
+
+ time_between_samples : int
+ determines the time between each sample of the server
+
+ default value : **5**
+
+ :return:
+ the first result object (see :class:`CTRexResult` for further details) of the TRex run after given sample_time.
+
+ :raises:
+ + :exc:`UserWarning`, in case the TRex run ended before sample_time duration
+ + :exc:`trex_exceptions.TRexIncompleteRunError`, in case one of failed TRex run (unexpected termination).
+ + :exc:`TypeError`, in case JSON stream decoding error.
+ + ProtocolError, in case of error in JSON-RPC protocol.
+
+ """
+ # make sure TRex is running. raise exceptions here if any
+ self.wait_until_kickoff_finish()
+ elapsed_time = 0
+ while self.is_running():
+ if elapsed_time >= sample_time:
+ return self.get_result_obj()
+ time.sleep(time_between_samples)
+ elapsed_time += time_between_samples
+ raise UserWarning("TRex has stopped at %s seconds (before expected %s seconds)\nTry increasing test duration or decreasing sample_time" % (elapsed_time, sample_time))
+
+ def get_result_obj (self, copy_obj = True):
+ """
+ Returns the result object of the trex_client's instance.
+
+ By default, returns a **copy** of the objects (so that changes to the original object are masked).
+
+ :parameters:
+ copy_obj : bool
+ False means that a reference to the original (possibly changing) object are passed
+
+ defaul value : **True**
+
+ :return:
+ the latest result object (see :class:`CTRexResult` for further details) with sampled data.
+
+ """
+ if copy_obj:
+ return copy.deepcopy(self.result_obj)
+ else:
+ return self.result_obj
+
+ def is_reserved (self):
+ """
+ Checks if TRex is currently reserved to any user or not.
+
+ :parameters:
+ None
+
+ :return:
+ + **True** if TRex is reserved.
+ + **False** otherwise.
+
+ :raises:
+ ProtocolError, in case of error in JSON-RPC protocol.
+
+ """
+ try:
+ return self.server.is_reserved()
+ except AppError as err:
+ self._handle_AppError_exception(err.args[0])
+ except ProtocolError:
+ raise
+ finally:
+ self.prompt_verbose_data()
+
+ def get_trex_daemon_log (self):
+ """
+ Get Trex daemon log.
+
+ :return:
+ String representation of TRex daemon log
+
+ :raises:
+ + :exc:`trex_exceptions.TRexRequestDenied`, in case file could not be read.
+ + ProtocolError, in case of error in JSON-RPC protocol.
+
+ """
+ try:
+ return binascii.a2b_base64(self.server.get_trex_daemon_log())
+ except AppError as err:
+ self._handle_AppError_exception(err.args[0])
+ except ProtocolError:
+ raise
+ finally:
+ self.prompt_verbose_data()
+
+ def get_trex_log (self):
+ """
+ Get TRex CLI output log
+
+ :return:
+ String representation of TRex log
+
+ :raises:
+ + :exc:`trex_exceptions.TRexRequestDenied`, in case file could not be fetched at server side.
+ + ProtocolError, in case of error in JSON-RPC protocol.
+
+ """
+ try:
+ return binascii.a2b_base64(self.server.get_trex_log())
+ except AppError as err:
+ self._handle_AppError_exception(err.args[0])
+ except ProtocolError:
+ raise
+ finally:
+ self.prompt_verbose_data()
+
+ def get_trex_version (self):
+ """
+ Get TRex version details.
+
+ :return:
+ Trex details (Version, User, Date, Uuid, Git SHA) as ordered dictionary
+
+ :raises:
+ + :exc:`trex_exceptions.TRexRequestDenied`, in case TRex version could not be determined.
+ + ProtocolError, in case of error in JSON-RPC protocol.
+ + General Exception is case one of the keys is missing in response
+ """
+
+ try:
+ version_dict = OrderedDict()
+ result_lines = binascii.a2b_base64(self.server.get_trex_version()).split('\n')
+ for line in result_lines:
+ if not line:
+ continue
+ key, value = line.strip().split(':', 1)
+ version_dict[key.strip()] = value.strip()
+ for key in ('Version', 'User', 'Date', 'Uuid', 'Git SHA'):
+ if key not in version_dict:
+ raise Exception('get_trex_version: got server response without key: {0}'.format(key))
+ return version_dict
+ except AppError as err:
+ self._handle_AppError_exception(err.args[0])
+ except ProtocolError:
+ raise
+ finally:
+ self.prompt_verbose_data()
+
+ def reserve_trex (self, user = None):
+ """
+ Reserves the usage of TRex to a certain user.
+
+ When TRex is reserved, it can't be reserved.
+
+ :parameters:
+ user : str
+ a username of the desired owner of TRex
+
+ default: current logged user
+
+ :return:
+ **True** if reservation made successfully
+
+ :raises:
+ + :exc:`trex_exceptions.TRexRequestDenied`, in case TRex is reserved for another user than the one trying to make the reservation.
+ + :exc:`trex_exceptions.TRexInUseError`, in case TRex is currently running.
+ + ProtocolError, in case of error in JSON-RPC protocol.
+
+ """
+ username = user or self.__default_user
+ try:
+ return self.server.reserve_trex(user = username)
+ except AppError as err:
+ self._handle_AppError_exception(err.args[0])
+ except ProtocolError:
+ raise
+ finally:
+ self.prompt_verbose_data()
+
+ def cancel_reservation (self, user = None):
+ """
+ Cancels a current reservation of TRex to a certain user.
+
+ When TRex is reserved, no other user can start new TRex runs.
+
+
+ :parameters:
+ user : str
+ a username of the desired owner of TRex
+
+ default: current logged user
+
+ :return:
+ + **True** if reservation canceled successfully,
+ + **False** if there was no reservation at all.
+
+ :raises:
+ + :exc:`trex_exceptions.TRexRequestDenied`, in case TRex is reserved for another user than the one trying to cancel the reservation.
+ + ProtocolError, in case of error in JSON-RPC protocol.
+
+ """
+
+ username = user or self.__default_user
+ try:
+ return self.server.cancel_reservation(user = username)
+ except AppError as err:
+ self._handle_AppError_exception(err.args[0])
+ except ProtocolError:
+ raise
+ finally:
+ self.prompt_verbose_data()
+
+ def push_files (self, filepaths):
+ """
+ Pushes a file (or a list of files) to store locally on server.
+
+ :parameters:
+ filepaths : str or list
+ a path to a file to be pushed to server.
+ if a list of paths is passed, all of those will be pushed to server
+
+ :return:
+ + **True** if file(s) copied successfully.
+ + **False** otherwise.
+
+ :raises:
+ + :exc:`IOError`, in case specified file wasn't found or could not be accessed.
+ + ProtocolError, in case of error in JSON-RPC protocol.
+
+ """
+ paths_list = None
+ if isinstance(filepaths, str):
+ paths_list = [filepaths]
+ elif isinstance(filepaths, list):
+ paths_list = filepaths
+ else:
+ raise TypeError("filepaths argument must be of type str or list")
+
+ for filepath in paths_list:
+ try:
+ if not os.path.exists(filepath):
+ raise IOError(errno.ENOENT, "The requested `{fname}` file wasn't found. Operation aborted.".format(
+ fname = filepath) )
+ else:
+ filename = os.path.basename(filepath)
+ with open(filepath, 'rb') as f:
+ file_content = f.read()
+ self.server.push_file(filename, binascii.b2a_base64(file_content))
+ finally:
+ self.prompt_verbose_data()
+ return True
+
+ def is_query_relevance(self):
+ """
+ Checks if time between any two consecutive server queries (asking for live running data) passed.
+
+ .. note:: The allowed minimum time between each two consecutive samples is 0.5 seconds.
+
+ :parameters:
+ None
+
+ :return:
+ + **True** if more than 0.5 seconds has been past from last server query.
+ + **False** otherwise.
+
+ """
+ cur_time = time.time()
+ if cur_time-self._last_sample < 0.5:
+ return False
+ else:
+ self._last_sample = cur_time
+ return True
+
+ def call_server_mathod_safely (self, method_to_call):
+ try:
+ return method_to_call()
+ except socket.error as e:
+ if e.errno == errno.ECONNREFUSED:
+ raise SocketError(errno.ECONNREFUSED, "Connection from TRex server was refused. Please make sure the server is up.")
+
+ def check_server_connectivity (self):
+ """
+ Checks for server valid connectivity.
+ """
+ try:
+ socket.gethostbyname(self.trex_host)
+ return self.server.connectivity_check()
+ except socket.gaierror as e:
+ raise socket.gaierror(e.errno, "Could not resolve server hostname. Please make sure hostname entered correctly.")
+ except socket.error as e:
+ if e.errno == errno.ECONNREFUSED:
+ raise socket.error(errno.ECONNREFUSED, "Connection from TRex server was refused. Please make sure the server is up.")
+ finally:
+ self.prompt_verbose_data()
+
+ def prompt_verbose_data(self):
+ """
+ This method prompts any verbose data available, only if `verbose` option has been turned on.
+ """
+ if self.verbose:
+ print ('\n')
+ print ("(*) JSON-RPC request:", self.history.request)
+ print ("(*) JSON-RPC response:", self.history.response)
+
+ def __verbose_print(self, print_str):
+ """
+ This private method prints the `print_str` string only in case self.verbose flag is turned on.
+
+ :parameters:
+ print_str : str
+ a string to be printed
+
+ :returns:
+ None
+ """
+ if self.verbose:
+ print (print_str)
+
+
+
+ def _handle_AppError_exception(self, err):
+ """
+ This private method triggres the TRex dedicated exception generation in case a general ProtocolError has been raised.
+ """
+ # handle known exceptions based on known error codes.
+ # if error code is not known, raise ProtocolError
+ raise exception_handler.gen_exception(err)
+
+
+class CTRexResult(object):
+ """
+ A class containing all results received from TRex.
+
+ Ontop to containing the results, this class offers easier data access and extended results processing options
+ """
+ def __init__(self, max_history_size):
+ """
+ Instatiate a TRex result object
+
+ :parameters:
+ max_history_size : int
+ a number to set the maximum history size of a single TRex run. Each sampling adds a new item to history.
+
+ """
+ self._history = deque(maxlen = max_history_size)
+ self.clear_results()
+ self.latency_checked = True
+
+ def __repr__(self):
+ return ("Is valid history? {arg}\n".format( arg = self.is_valid_hist() ) +
+ "Done warmup? {arg}\n".format( arg = self.is_done_warmup() ) +
+ "Expected tx rate: {arg}\n".format( arg = self.get_expected_tx_rate() ) +
+ "Current tx rate: {arg}\n".format( arg = self.get_current_tx_rate() ) +
+ "Maximum latency: {arg}\n".format( arg = self.get_max_latency() ) +
+ "Average latency: {arg}\n".format( arg = self.get_avg_latency() ) +
+ "Average window latency: {arg}\n".format( arg = self.get_avg_window_latency() ) +
+ "Total drops: {arg}\n".format( arg = self.get_total_drops() ) +
+ "Drop rate: {arg}\n".format( arg = self.get_drop_rate() ) +
+ "History size so far: {arg}\n".format( arg = len(self._history) ) )
+
+ def get_expected_tx_rate (self):
+ """
+ Fetches the expected TX rate in various units representation
+
+ :parameters:
+ None
+
+ :return:
+ dictionary containing the expected TX rate, where the key is the measurement units, and the value is the measurement value.
+
+ """
+ return self._expected_tx_rate
+
+ def get_current_tx_rate (self):
+ """
+ Fetches the current TX rate in various units representation
+
+ :parameters:
+ None
+
+ :return:
+ dictionary containing the current TX rate, where the key is the measurement units, and the value is the measurement value.
+
+ """
+ return self._current_tx_rate
+
+ def get_max_latency (self):
+ """
+ Fetches the maximum latency measured on each of the interfaces
+
+ :parameters:
+ None
+
+ :return:
+ dictionary containing the maximum latency, where the key is the measurement interface (`c` indicates client), and the value is the measurement value.
+
+ """
+ return self._max_latency
+
+ def get_avg_latency (self):
+ """
+ Fetches the average latency measured on each of the interfaces from the start of TRex run
+
+ :parameters:
+ None
+
+ :return:
+ dictionary containing the average latency, where the key is the measurement interface (`c` indicates client), and the value is the measurement value.
+
+ The `all` key represents the average of all interfaces' average
+
+ """
+ return self._avg_latency
+
+ def get_avg_window_latency (self):
+ """
+ Fetches the average latency measured on each of the interfaces from all the sampled currently stored in window.
+
+ :parameters:
+ None
+
+ :return:
+ dictionary containing the average latency, where the key is the measurement interface (`c` indicates client), and the value is the measurement value.
+
+ The `all` key represents the average of all interfaces' average
+
+ """
+ return self._avg_window_latency
+
+ def get_total_drops (self):
+ """
+ Fetches the total number of drops identified from the moment TRex run began.
+
+ :parameters:
+ None
+
+ :return:
+ total drops count (as int)
+
+ """
+ return self._total_drops
+
+ def get_drop_rate (self):
+ """
+ Fetches the most recent drop rate in pkts/sec units.
+
+ :parameters:
+ None
+
+ :return:
+ current drop rate (as float)
+
+ """
+ return self._drop_rate
+
+ def is_valid_hist (self):
+ """
+ Checks if result obejct contains valid data.
+
+ :parameters:
+ None
+
+ :return:
+ + **True** if history is valid.
+ + **False** otherwise.
+
+ """
+ return self.valid
+
+ def set_valid_hist (self, valid_stat = True):
+ """
+ Sets result obejct validity status.
+
+ :parameters:
+ valid_stat : bool
+ defines the validity status
+
+ dafault value : **True**
+
+ :return:
+ None
+
+ """
+ self.valid = valid_stat
+
+ def is_done_warmup (self):
+ """
+ Checks if TRex latest results TX-rate indicates that TRex has reached its expected TX-rate.
+
+ :parameters:
+ None
+
+ :return:
+ + **True** if expected TX-rate has been reached.
+ + **False** otherwise.
+
+ """
+ return self._done_warmup
+
+ def get_last_value (self, tree_path_to_key, regex = None):
+ """
+ A dynamic getter from the latest sampled data item stored in the result object.
+
+ :parameters:
+ tree_path_to_key : str
+ defines a path to desired data.
+
+ .. tip:: | Use '.' to enter one level deeper in dictionary hierarchy.
+ | Use '[i]' to access the i'th indexed object of an array.
+
+ tree_path_to_key : regex
+ apply a regex to filter results out from a multiple results set.
+
+ Filter applies only on keys of dictionary type.
+
+ dafault value : **None**
+
+ :return:
+ + a list of values relevant to the specified path
+ + None if no results were fetched or the history isn't valid.
+
+ """
+ if not self.is_valid_hist():
+ return None
+ else:
+ return CTRexResult.__get_value_by_path(self._history[len(self._history)-1], tree_path_to_key, regex)
+
+ def get_value_list (self, tree_path_to_key, regex = None, filter_none = True):
+ """
+ A dynamic getter from all sampled data items stored in the result object.
+
+ :parameters:
+ tree_path_to_key : str
+ defines a path to desired data.
+
+ .. tip:: | Use '.' to enter one level deeper in dictionary hierarchy.
+ | Use '[i]' to access the i'th indexed object of an array.
+
+ tree_path_to_key : regex
+ apply a regex to filter results out from a multiple results set.
+
+ Filter applies only on keys of dictionary type.
+
+ dafault value : **None**
+
+ filter_none : bool
+ specify if None results should be filtered out or not.
+
+ dafault value : **True**
+
+ :return:
+ + a list of values relevant to the specified path. Each item on the list refers to a single server sample.
+ + None if no results were fetched or the history isn't valid.
+ """
+
+ if not self.is_valid_hist():
+ return None
+ else:
+ raw_list = list( map(lambda x: CTRexResult.__get_value_by_path(x, tree_path_to_key, regex), self._history) )
+ if filter_none:
+ return list (filter(lambda x: x!=None, raw_list) )
+ else:
+ return raw_list
+
+ def get_latest_dump(self):
+ """
+ A getter to the latest sampled data item stored in the result object.
+
+ :parameters:
+ None
+
+ :return:
+ + a dictionary of the latest data item
+ + an empty dictionary if history is empty.
+
+ """
+ history_size = len(self._history)
+ if history_size != 0:
+ return self._history[len(self._history) - 1]
+ else:
+ return {}
+
+ def update_result_data (self, latest_dump):
+ """
+ Integrates a `latest_dump` dictionary into the CTRexResult object.
+
+ :parameters:
+ latest_dump : dict
+ a dictionary with the items desired to be integrated into the object history and stats
+
+ :return:
+ None
+
+ """
+ # add latest dump to history
+ if latest_dump != {}:
+ self._history.append(latest_dump)
+ if not self.valid:
+ self.valid = True
+
+ # parse important fields and calculate averages and others
+ if self._expected_tx_rate is None:
+ # get the expected data only once since it doesn't change
+ self._expected_tx_rate = CTRexResult.__get_value_by_path(latest_dump, "trex-global.data", "m_tx_expected_\w+")
+
+ self._current_tx_rate = CTRexResult.__get_value_by_path(latest_dump, "trex-global.data", "m_tx_(?!expected_)\w+")
+ if not self._done_warmup and self._expected_tx_rate is not None:
+ # check for up to 2% change between expected and actual
+ if (self._current_tx_rate['m_tx_bps']/self._expected_tx_rate['m_tx_expected_bps'] > 0.98):
+ self._done_warmup = True
+
+ # handle latency data
+ if self.latency_checked:
+ latency_pre = "trex-latency"
+ self._max_latency = self.get_last_value("{latency}.data".format(latency = latency_pre), "max-")#None # TBC
+ # support old typo
+ if self._max_latency is None:
+ latency_pre = "trex-latecny"
+ self._max_latency = self.get_last_value("{latency}.data".format(latency = latency_pre), "max-")
+
+ self._avg_latency = self.get_last_value("{latency}.data".format(latency = latency_pre), "avg-")#None # TBC
+ self._avg_latency = CTRexResult.__avg_all_and_rename_keys(self._avg_latency)
+
+ avg_win_latency_list = self.get_value_list("{latency}.data".format(latency = latency_pre), "avg-")
+ self._avg_window_latency = CTRexResult.__calc_latency_win_stats(avg_win_latency_list)
+
+ tx_pkts = CTRexResult.__get_value_by_path(latest_dump, "trex-global.data.m_total_tx_pkts")
+ rx_pkts = CTRexResult.__get_value_by_path(latest_dump, "trex-global.data.m_total_rx_pkts")
+ if tx_pkts is not None and rx_pkts is not None:
+ self._total_drops = tx_pkts - rx_pkts
+ self._drop_rate = CTRexResult.__get_value_by_path(latest_dump, "trex-global.data.m_rx_drop_bps")
+
+ def clear_results (self):
+ """
+ Clears all results and sets the history's validity to `False`
+
+ :parameters:
+ None
+
+ :return:
+ None
+
+ """
+ self.valid = False
+ self._done_warmup = False
+ self._expected_tx_rate = None
+ self._current_tx_rate = None
+ self._max_latency = None
+ self._avg_latency = None
+ self._avg_window_latency = None
+ self._total_drops = None
+ self._drop_rate = None
+ self._history.clear()
+
+ @staticmethod
+ def __get_value_by_path (dct, tree_path, regex = None):
+ try:
+ for i, p in re.findall(r'(\d+)|([\w|-]+)', tree_path):
+ dct = dct[p or int(i)]
+ if regex is not None and isinstance(dct, dict):
+ res = {}
+ for key,val in dct.items():
+ match = re.match(regex, key)
+ if match:
+ res[key]=val
+ return res
+ else:
+ return dct
+ except (KeyError, TypeError):
+ return None
+
+ @staticmethod
+ def __calc_latency_win_stats (latency_win_list):
+ res = {'all' : None }
+ port_dict = {'all' : []}
+ list( map(lambda x: CTRexResult.__update_port_dict(x, port_dict), latency_win_list) )
+
+ # finally, calculate everages for each list
+ res['all'] = float("%.3f" % (sum(port_dict['all'])/float(len(port_dict['all']))) )
+ port_dict.pop('all')
+ for port, avg_list in port_dict.items():
+ res[port] = float("%.3f" % (sum(avg_list)/float(len(avg_list))) )
+
+ return res
+
+ @staticmethod
+ def __update_port_dict (src_avg_dict, dest_port_dict):
+ all_list = src_avg_dict.values()
+ dest_port_dict['all'].extend(all_list)
+ for key, val in src_avg_dict.items():
+ reg_res = re.match("avg-(\d+)", key)
+ if reg_res:
+ tmp_key = "port"+reg_res.group(1)
+ if tmp_key in dest_port_dict:
+ dest_port_dict[tmp_key].append(val)
+ else:
+ dest_port_dict[tmp_key] = [val]
+
+ @staticmethod
+ def __avg_all_and_rename_keys (src_dict):
+ res = {}
+ all_list = src_dict.values()
+ res['all'] = float("%.3f" % (sum(all_list)/float(len(all_list))) )
+ for key, val in src_dict.items():
+ reg_res = re.match("avg-(\d+)", key)
+ if reg_res:
+ tmp_key = "port"+reg_res.group(1)
+ res[tmp_key] = val # don't touch original fields values
+ return res
+
+
+
+if __name__ == "__main__":
+ pass
+
diff --git a/scripts/automation/trex_control_plane/stf/trex_daemon_server.py b/scripts/automation/trex_control_plane/stf/trex_daemon_server.py
new file mode 100755
index 00000000..9784d42a
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stf/trex_daemon_server.py
@@ -0,0 +1,79 @@
+#!/usr/bin/python
+
+import outer_packages
+import daemon
+from trex_server import do_main_program, trex_parser
+import CCustomLogger
+
+import logging
+import time
+import sys
+import os, errno
+import grp
+import signal
+from daemon import runner
+from extended_daemon_runner import ExtendedDaemonRunner
+import lockfile
+import errno
+
+class TRexServerApp(object):
+ def __init__(self):
+ TRexServerApp.create_working_dirs()
+ self.stdin_path = '/dev/null'
+ self.stdout_path = '/dev/tty' # All standard prints will come up from this source.
+ self.stderr_path = "/var/log/trex/trex_daemon_server.log" # All log messages will come up from this source
+ self.pidfile_path = '/var/run/trex/trex_daemon_server.pid'
+ self.pidfile_timeout = 5 # timeout in seconds
+
+ def run(self):
+ do_main_program()
+
+
+ @staticmethod
+ def create_working_dirs():
+ if not os.path.exists('/var/log/trex'):
+ os.mkdir('/var/log/trex')
+ if not os.path.exists('/var/run/trex'):
+ os.mkdir('/var/run/trex')
+
+
+
+def main ():
+
+ trex_app = TRexServerApp()
+
+ # setup the logger
+ default_log_path = '/var/log/trex/trex_daemon_server.log'
+
+ try:
+ CCustomLogger.setup_daemon_logger('TRexServer', default_log_path)
+ logger = logging.getLogger('TRexServer')
+ logger.setLevel(logging.INFO)
+ formatter = logging.Formatter("%(asctime)s %(name)-10s %(module)-20s %(levelname)-8s %(message)s")
+ handler = logging.FileHandler("/var/log/trex/trex_daemon_server.log")
+ logger.addHandler(handler)
+ except EnvironmentError, e:
+ if e.errno == errno.EACCES: # catching permission denied error
+ print "Launching user must have sudo privileges in order to run TRex daemon.\nTerminating daemon process."
+ exit(-1)
+
+ daemon_runner = ExtendedDaemonRunner(trex_app, trex_parser)
+
+ #This ensures that the logger file handle does not get closed during daemonization
+ daemon_runner.daemon_context.files_preserve=[handler.stream]
+
+ try:
+ if not set(['start', 'stop']).isdisjoint(set(sys.argv)):
+ print "Logs are saved at: {log_path}".format( log_path = default_log_path )
+ daemon_runner.do_action()
+
+ except lockfile.LockTimeout as inst:
+ logger.error(inst)
+ print inst
+ print """
+ Please try again once the timeout has been reached.
+ If this error continues, consider killing the process manually and restart the daemon."""
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/automation/trex_control_plane/stf/trex_exceptions.py b/scripts/automation/trex_control_plane/stf/trex_exceptions.py
new file mode 100755
index 00000000..0de38411
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stf/trex_exceptions.py
@@ -0,0 +1,140 @@
+#!/router/bin/python
+
+#from rpc_exceptions import RPCExceptionHandler, WrappedRPCError
+
+from jsonrpclib import Fault, ProtocolError, AppError
+
+class RPCError(Exception):
+ """
+ This is the general RPC error exception class from which :exc:`trex_exceptions.TRexException` inherits.
+
+ Every exception in this class has as error format according to JSON-RPC convention convention: code, message and data.
+
+ """
+ def __init__(self, code, message, remote_data = None):
+ self.code = code
+ self.msg = message or self._default_message
+ self.data = remote_data
+ self.args = (code, self.msg, remote_data)
+
+ def __str__(self):
+ return self.__repr__()
+ def __repr__(self):
+ if self.args[2] is not None:
+ return u"[errcode:%r] %r. Extended data: %r" % (self.args[0], self.args[1], self.args[2])
+ else:
+ return u"[errcode:%r] %r" % (self.args[0], self.args[1])
+
+class TRexException(RPCError):
+ """
+ This is the most general TRex exception.
+
+ All exceptions inherits from this class has an error code and a default message which describes the most common use case of the error.
+
+ This exception isn't used by default and will only when an unrelated to ProtocolError will occur, and it can't be resolved to any of the deriviate exceptions.
+
+ """
+ code = -10
+ _default_message = 'TRex encountered an unexpected error. please contact TRex dev team.'
+ # api_name = 'TRex'
+
+class TRexError(TRexException):
+ """
+ This is the most general TRex exception.
+
+ This exception isn't used by default and will only when an unrelated to ProtocolError will occur, and it can't be resolved to any of the deriviate exceptions.
+ """
+ code = -11
+ _default_message = 'TRex run failed due to wrong input parameters, or due to reachability issues.'
+
+class TRexWarning(TRexException):
+ """ Indicates a warning from TRex server. When this exception raises it normally used to indicate required data isn't ready yet """
+ code = -12
+ _default_message = 'TRex is starting (data is not available yet).'
+
+class TRexRequestDenied(TRexException):
+ """ Indicates the desired reques was denied by the server """
+ code = -33
+ _default_message = 'TRex desired request denied because the requested resource is already taken. Try again once TRex is back in IDLE state.'
+
+class TRexInUseError(TRexException):
+ """
+ Indicates that TRex is currently in use
+
+ """
+ code = -13
+ _default_message = 'TRex is already being used by another user or process. Try again once TRex is back in IDLE state.'
+
+class TRexRunFailedError(TRexException):
+ """ Indicates that TRex has failed due to some reason. This Exception is used when TRex process itself terminates due to unknown reason """
+ code = -14
+ _default_message = ''
+
+class TRexIncompleteRunError(TRexException):
+ """
+ Indicates that TRex has failed due to some reason.
+ This Exception is used when TRex process itself terminated with error fault or it has been terminated by an external intervention in the OS.
+
+ """
+ code = -15
+ _default_message = 'TRex run was terminated unexpectedly by outer process or by the hosting OS'
+
+EXCEPTIONS = [TRexException, TRexError, TRexWarning, TRexInUseError, TRexRequestDenied, TRexRunFailedError, TRexIncompleteRunError]
+
+class CExceptionHandler(object):
+ """
+ CExceptionHandler is responsible for generating TRex API related exceptions in client side.
+ """
+ def __init__(self, exceptions):
+ """
+ Instatiate a CExceptionHandler object
+
+ :parameters:
+
+ exceptions : list
+ a list of all TRex acceptable exception objects.
+
+ default list:
+ - :exc:`trex_exceptions.TRexException`
+ - :exc:`trex_exceptions.TRexError`
+ - :exc:`trex_exceptions.TRexWarning`
+ - :exc:`trex_exceptions.TRexInUseError`
+ - :exc:`trex_exceptions.TRexRequestDenied`
+ - :exc:`trex_exceptions.TRexRunFailedError`
+ - :exc:`trex_exceptions.TRexIncompleteRunError`
+
+ """
+ if isinstance(exceptions, type):
+ exceptions = [ exceptions, ]
+ self.exceptions = exceptions
+ self.exceptions_dict = dict((e.code, e) for e in self.exceptions)
+
+ def gen_exception (self, err):
+ """
+ Generates an exception based on a general ProtocolError exception object `err`.
+
+ When TRex is reserved, no other user can start new TRex runs.
+
+
+ :parameters:
+
+ err : exception
+ a ProtocolError exception raised by :class:`trex_client.CTRexClient` class
+
+ :return:
+ A TRex exception from the exception list defined in class creation.
+
+ If such exception wasn't found, returns a TRexException exception
+
+ """
+ code, message, data = err
+ try:
+ exp = self.exceptions_dict[code]
+ return exp(exp.code, message, data)
+ except KeyError:
+ # revert to TRexException when unknown error application raised
+ return TRexException(err)
+
+
+exception_handler = CExceptionHandler( EXCEPTIONS )
+
diff --git a/scripts/automation/trex_control_plane/stf/trex_status.py b/scripts/automation/trex_control_plane/stf/trex_status.py
new file mode 100644
index 00000000..f132720c
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stf/trex_status.py
@@ -0,0 +1,8 @@
+#!/router/bin/python
+
+# define the states in which a T-Rex can hold during its lifetime
+# TRexStatus = Enum('TRexStatus', 'Idle Starting Running')
+
+IDLE = 1
+STARTING = 2
+RUNNING = 3
diff --git a/scripts/automation/trex_control_plane/stf/trex_status_e.py b/scripts/automation/trex_control_plane/stf/trex_status_e.py
new file mode 100755
index 00000000..79a25acc
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stf/trex_status_e.py
@@ -0,0 +1,11 @@
+#!/router/bin/python
+
+try:
+ from . import outer_packages
+except:
+ import outer_packages
+from enum import Enum
+
+
+# define the states in which a TRex can hold during its lifetime
+TRexStatus = Enum('TRexStatus', 'Idle Starting Running')
diff --git a/scripts/automation/trex_control_plane/stl/examples/hlt_udp_simple.py b/scripts/automation/trex_control_plane/stl/examples/hlt_udp_simple.py
index 56e2005a..bdec9999 100644
--- a/scripts/automation/trex_control_plane/stl/examples/hlt_udp_simple.py
+++ b/scripts/automation/trex_control_plane/stl/examples/hlt_udp_simple.py
@@ -67,15 +67,15 @@ if __name__ == "__main__":
args = parser.parse_args();
hltapi = CTRexHltApi()
- print 'Connecting to TRex'
+ print('Connecting to TRex')
res = hltapi.connect(device = args.server, port_list = [0, 1], reset = True, break_locks = True)
check_res(res)
ports = res['port_handle']
if len(ports) < 2:
error('Should have at least 2 ports for this test')
- print 'Connected, acquired ports: %s' % ports
+ print('Connected, acquired ports: %s' % ports)
- print 'Creating traffic'
+ print('Creating traffic')
res = hltapi.traffic_config(mode = 'create', bidirectional = True,
port_handle = ports[0], port_handle2 = ports[1],
@@ -91,12 +91,12 @@ if __name__ == "__main__":
)
check_res(res)
- print 'Starting traffic'
+ print('Starting traffic')
res = hltapi.traffic_control(action = 'run', port_handle = ports[:2])
check_res(res)
wait_with_progress(args.duration)
- print 'Stopping traffic'
+ print('Stopping traffic')
res = hltapi.traffic_control(action = 'stop', port_handle = ports[:2])
check_res(res)
@@ -107,4 +107,4 @@ if __name__ == "__main__":
res = hltapi.cleanup_session(port_handle = 'all')
check_res(res)
- print 'Done'
+ print('Done')
diff --git a/scripts/automation/trex_control_plane/stl/examples/stl_pcap.py b/scripts/automation/trex_control_plane/stl/examples/stl_pcap.py
index 317f44c7..45ddc24b 100644
--- a/scripts/automation/trex_control_plane/stl/examples/stl_pcap.py
+++ b/scripts/automation/trex_control_plane/stl/examples/stl_pcap.py
@@ -1,7 +1,7 @@
import stl_path
from trex_stl_lib.api import *
import argparse
-
+import sys
def create_vm (ip_start, ip_end):
vm =[
@@ -17,11 +17,23 @@ def create_vm (ip_start, ip_end):
return vm
-
-def inject_pcap (pcap_file, port, loop_count, ipg_usec, use_vm):
+# warning: might make test slow
+def alter_streams(streams, remove_fcs, vlan_id):
+ for stream in streams:
+ packet = Ether(stream.pkt)
+ if vlan_id >= 0 and vlan_id <= 4096:
+ packet_l3 = packet.payload
+ packet = Ether() / Dot1Q(vlan = vlan_id) / packet_l3
+ if remove_fcs and packet.lastlayer().name == 'Padding':
+ packet.lastlayer().underlayer.remove_payload()
+ packet = STLPktBuilder(packet)
+ stream.fields['packet'] = packet.dump_pkt()
+ stream.pkt = base64.b64decode(stream.fields['packet']['binary'])
+
+def inject_pcap (pcap_file, server, port, loop_count, ipg_usec, use_vm, remove_fcs, vlan_id):
# create client
- c = STLClient()
+ c = STLClient(server = server)
try:
if use_vm:
@@ -32,13 +44,16 @@ def inject_pcap (pcap_file, port, loop_count, ipg_usec, use_vm):
profile = STLProfile.load_pcap(pcap_file, ipg_usec = ipg_usec, loop_count = loop_count, vm = vm)
print("Loaded pcap {0} with {1} packets...\n".format(pcap_file, len(profile)))
+ streams = profile.get_streams()
+ if remove_fcs or (vlan_id >= 0 and vlan_id <= 4096):
+ alter_streams(streams, remove_fcs, vlan_id)
# uncomment this for simulator run
- #STLSim().run(profile.get_streams(), outfile = 'out.cap')
+ #STLSim().run(profile.get_streams(), outfile = '/auto/srg-sce-swinfra-usr/emb/users/ybrustin/out.pcap')
c.connect()
c.reset(ports = [port])
- stream_ids = c.add_streams(profile.get_streams(), ports = [port])
+ stream_ids = c.add_streams(streams, ports = [port])
c.clear_stats()
@@ -51,6 +66,7 @@ def inject_pcap (pcap_file, port, loop_count, ipg_usec, use_vm):
except STLError as e:
print(e)
+ sys.exit(1)
finally:
c.disconnect()
@@ -59,17 +75,22 @@ def inject_pcap (pcap_file, port, loop_count, ipg_usec, use_vm):
def setParserOptions():
parser = argparse.ArgumentParser(prog="stl_pcap.py")
- parser.add_argument("-f", help = "pcap file to inject",
+ parser.add_argument("-f", "--file", help = "pcap file to inject",
dest = "pcap",
required = True,
type = str)
- parser.add_argument("-p", help = "port to inject on",
+ parser.add_argument("-s", "--server", help = "TRex server address",
+ dest = "server",
+ default = 'localhost',
+ type = str)
+
+ parser.add_argument("-p", "--port", help = "port to inject on",
dest = "port",
required = True,
type = int)
- parser.add_argument("-n", help = "How many times to inject pcap [default is 1, 0 means forever]",
+ parser.add_argument("-n", "--number", help = "How many times to inject pcap [default is 1, 0 means forever]",
dest = "loop_count",
default = 1,
type = int)
@@ -79,19 +100,28 @@ def setParserOptions():
default = 10.0,
type = float)
-
parser.add_argument("-x", help = "Iterate over IP dest",
dest = "use_vm",
default = False,
action = "store_true")
+ parser.add_argument("-r", "--remove-fcs", help = "Remove FCS if exists. Limited by Scapy capabilities.",
+ dest = "remove",
+ default = False,
+ action = "store_true")
+
+ parser.add_argument("-v", "--vlan", help = "Add VLAN header with this ID. Limited by Scapy capabilities.",
+ dest = "vlan",
+ default = -1,
+ type = int)
+
return parser
def main ():
parser = setParserOptions()
options = parser.parse_args()
- inject_pcap(options.pcap, options.port, options.loop_count, options.ipg, options.use_vm)
+ inject_pcap(options.pcap, options.server, options.port, options.loop_count, options.ipg, options.use_vm, options.remove, options.vlan)
# inject pcap
if __name__ == '__main__':
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_ext.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_ext.py
index c614c4bd..d6d66ec3 100644
--- a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_ext.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_ext.py
@@ -7,11 +7,14 @@ import platform
TREX_STL_EXT_PATH = os.environ.get('TREX_STL_EXT_PATH')
# take default
-if not TREX_STL_EXT_PATH:
+if not TREX_STL_EXT_PATH or not os.path.exists(TREX_STL_EXT_PATH):
CURRENT_PATH = os.path.dirname(os.path.realpath(__file__))
+ TREX_STL_EXT_PATH = os.path.normpath(os.path.join(CURRENT_PATH, os.pardir, 'external_libs'))
+if not os.path.exists(TREX_STL_EXT_PATH):
# ../../../../external_libs
- TREX_STL_EXT_PATH = os.path.abspath(os.path.join(CURRENT_PATH, os.pardir, os.pardir, os.pardir, os.pardir, 'external_libs'))
-
+ TREX_STL_EXT_PATH = os.path.normpath(os.path.join(CURRENT_PATH, os.pardir, os.pardir, os.pardir, os.pardir, 'external_libs'))
+if not os.path.exists(TREX_STL_EXT_PATH):
+ raise Exception('Could not determine path of external_libs, try setting TREX_STL_EXT_PATH variable')
# the modules required
# py-dep requires python2/python3 directories
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_hltapi.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_hltapi.py
index 9387c3a6..b506137b 100755
--- a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_hltapi.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_hltapi.py
@@ -174,7 +174,7 @@ import socket
import copy
from collections import defaultdict
-from trex_stl_lib.api import *
+from .api import *
from .trex_stl_types import *
from .utils.common import get_number
@@ -249,7 +249,7 @@ def print_brief_stats(res):
title_str = ' '*3
tx_str = 'TX:'
rx_str = 'RX:'
- for port_id, stat in res.iteritems():
+ for port_id, stat in res.items():
if type(port_id) is not int:
continue
title_str += ' '*10 + 'Port%s' % port_id
@@ -663,7 +663,7 @@ class CTRexHltApi(object):
stats = self.trex_client.get_stats(port_handle)
except Exception as e:
return HLT_ERR('Could not retrieve stats: %s' % e if isinstance(e, STLError) else traceback.format_exc())
- for port_id, stat_dict in stats.iteritems():
+ for port_id, stat_dict in stats.items():
if is_integer(port_id):
hlt_stats_dict[port_id] = {
'aggregate': {
diff --git a/scripts/external_libs/scapy-2.3.1/python2/scapy/data.py b/scripts/external_libs/scapy-2.3.1/python2/scapy/data.py
index de01cfc9..9c5bb6bd 100644
--- a/scripts/external_libs/scapy-2.3.1/python2/scapy/data.py
+++ b/scripts/external_libs/scapy-2.3.1/python2/scapy/data.py
@@ -105,6 +105,7 @@ def load_services(filename):
spaces = re.compile("[ \t]+|\n")
tdct=DADict(_name="%s-tcp"%filename)
udct=DADict(_name="%s-udp"%filename)
+ return tdct,udct
try:
f=open(filename)
for l in f:
diff --git a/scripts/external_libs/scapy-2.3.1/python3/scapy/data.py b/scripts/external_libs/scapy-2.3.1/python3/scapy/data.py
index fc92ebe2..2aded28a 100644
--- a/scripts/external_libs/scapy-2.3.1/python3/scapy/data.py
+++ b/scripts/external_libs/scapy-2.3.1/python3/scapy/data.py
@@ -105,6 +105,7 @@ def load_services(filename):
spaces = re.compile("[ \t]+|\n")
tdct=DADict(_name="%s-tcp"%filename)
udct=DADict(_name="%s-udp"%filename)
+ return tdct,udct
try:
f=open(filename)
for l in f:
diff --git a/scripts/find_python.sh b/scripts/find_python.sh
index 929e873d..e9607fe5 100755
--- a/scripts/find_python.sh
+++ b/scripts/find_python.sh
@@ -24,12 +24,27 @@ function find_python {
exit -1
}
-if [ -z "$PYTHON" ]; then
- # for development here - move us to python 3 for now
- if [ "$USER" == "imarom" ] || [ "$USER" == "hhaim" ] || [ "$USER" == "ybrustin" ] || [ "$USER" == "ibarnea" ]; then
- PYTHON=/auto/proj-pcube-b/apps/PL-b/tools/python3.4/bin/python3
- else
- find_python
+function find_python3 {
+ MACHINE_PYTHON=python3
+ ITAY_PYTHON=/auto/proj-pcube-b/apps/PL-b/tools/python3.4/bin/python3
+ PYTHON3=$MACHINE_PYTHON
+ PCHECK=`$PYTHON3 -c "import sys; ver = sys.version_info[0] * 10 + sys.version_info[1];sys.exit(ver != 34)"`
+ if [ $? -eq 0 ]; then
+ return
+ fi
+ PYTHON3=$ITAY_PYTHON
+ PCHECK=`$PYTHON3 -c "import sys; ver = sys.version_info[0] * 10 + sys.version_info[1];sys.exit(ver != 34)"`
+ if [ $? -eq 0 ]; then
+ return
fi
+ echo "*** $PYTHON3 - python version does not match, 3.4 is required"
+ exit -1
+}
+
+if [ -z "$PYTHON" ]; then
+ find_python
fi
+if [ -z "$PYTHON3" ]; then
+ find_python3
+fi
diff --git a/scripts/run_functional_tests b/scripts/run_functional_tests
index 995b1b0d..6e6a00a1 100755
--- a/scripts/run_functional_tests
+++ b/scripts/run_functional_tests
@@ -1,12 +1,10 @@
#!/bin/bash
-#source find_python.sh
+source find_python.sh
cd automation/regression
-PYTHON=/usr/bin/python2
-PYTHON3=/auto/proj-pcube-b/apps/PL-b/tools/python3.4/bin/python3
-
# Python 2
+echo Python2 test
$PYTHON trex_unit_test.py --functional $@
if [ $? -eq 0 ]; then
printf "\n$PYTHON test succeeded\n\n"
@@ -16,6 +14,7 @@ else
fi
# Python 3
+echo Python3 test
$PYTHON3 trex_unit_test.py --functional $@
if [ $? -eq 0 ]; then
printf "\n$PYTHON3 test succeeded\n\n"