summaryrefslogtreecommitdiffstats
path: root/scripts/automation/trex_control_plane
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/automation/trex_control_plane')
-rwxr-xr-xscripts/automation/trex_control_plane/client/trex_adv_client.py2
-rwxr-xr-xscripts/automation/trex_control_plane/client_utils/external_packages.py3
-rwxr-xr-xscripts/automation/trex_control_plane/client_utils/jsonrpc_client.py294
-rwxr-xr-xscripts/automation/trex_control_plane/client_utils/packet_builder.py8
-rwxr-xr-xscripts/automation/trex_control_plane/client_utils/yaml_utils.py163
-rwxr-xr-xscripts/automation/trex_control_plane/common/external_packages.py28
-rwxr-xr-xscripts/automation/trex_control_plane/common/rpc_defaults.yaml115
-rw-r--r--scripts/automation/trex_control_plane/common/trex_status.py8
-rwxr-xr-xscripts/automation/trex_control_plane/common/trex_streams.py248
-rwxr-xr-x[-rw-r--r--]scripts/automation/trex_control_plane/console/trex_console.py248
-rwxr-xr-x[-rw-r--r--]scripts/automation/trex_control_plane/console/trex_root_path.py0
-rw-r--r--scripts/automation/trex_control_plane/console/trex_status.py2
-rwxr-xr-xscripts/automation/trex_control_plane/examples/client_interactive_example.py2
-rw-r--r--scripts/automation/trex_control_plane/examples/interactive_stateless.py14
-rwxr-xr-xscripts/automation/trex_control_plane/unit_tests/control_plane_general_test.py2
-rwxr-xr-xscripts/automation/trex_control_plane/unit_tests/control_plane_unit_test.py2
-rwxr-xr-xscripts/automation/trex_control_plane/unit_tests/functional_test.py24
17 files changed, 1112 insertions, 51 deletions
diff --git a/scripts/automation/trex_control_plane/client/trex_adv_client.py b/scripts/automation/trex_control_plane/client/trex_adv_client.py
index b3fe3dad..bf7ccf58 100755
--- a/scripts/automation/trex_control_plane/client/trex_adv_client.py
+++ b/scripts/automation/trex_control_plane/client/trex_adv_client.py
@@ -8,7 +8,7 @@ class CTRexAdvClient(trex_client.CTRexClient):
super(CTRexAdvClient, self).__init__(trex_host, max_history_size, trex_daemon_port, trex_zmq_port, verbose)
pass
- # T-REX KIWI advanced methods
+ # TRex KIWI advanced methods
def start_quick_trex(self, pcap_file, d, delay, dual, ipv6, times, interfaces):
try:
return self.server.start_quick_trex(pcap_file = pcap_file, duration = d, dual = dual, delay = delay, ipv6 = ipv6, times = times, interfaces = interfaces)
diff --git a/scripts/automation/trex_control_plane/client_utils/external_packages.py b/scripts/automation/trex_control_plane/client_utils/external_packages.py
index 4b10609b..e2bb37a5 100755
--- a/scripts/automation/trex_control_plane/client_utils/external_packages.py
+++ b/scripts/automation/trex_control_plane/client_utils/external_packages.py
@@ -8,7 +8,8 @@ ROOT_PATH = os.path.abspath(os.path.join(CURRENT_PATH, os.pardir))
PATH_TO_PYTHON_LIB = os.path.abspath(os.path.join(ROOT_PATH, os.pardir, os.pardir, 'external_libs'))
CLIENT_UTILS_MODULES = ['zmq',
- 'dpkt-1.8.6'
+ 'dpkt-1.8.6',
+ 'PyYAML-3.01/lib'
]
def import_client_utils_modules():
diff --git a/scripts/automation/trex_control_plane/client_utils/jsonrpc_client.py b/scripts/automation/trex_control_plane/client_utils/jsonrpc_client.py
index 163c6923..ed14e6f8 100755
--- a/scripts/automation/trex_control_plane/client_utils/jsonrpc_client.py
+++ b/scripts/automation/trex_control_plane/client_utils/jsonrpc_client.py
@@ -6,9 +6,6 @@ import json
import general_utils
import re
from time import sleep
-from collections import namedtuple
-
-CmdResponse = namedtuple('CmdResponse', ['success', 'data'])
class bcolors:
BLUE = '\033[94m'
@@ -26,12 +23,12 @@ class BatchMessage(object):
self.rpc_client = rpc_client
self.batch_list = []
- def add (self, method_name, params={}):
+ def add (self, method_name, params = {}):
id, msg = self.rpc_client.create_jsonrpc_v2(method_name, params, encode = False)
self.batch_list.append(msg)
- def invoke(self, block = False):
+ def invoke (self, block = False):
if not self.rpc_client.connected:
return False, "Not connected to server"
@@ -39,9 +36,9 @@ class BatchMessage(object):
rc, resp_list = self.rpc_client.send_raw_msg(msg, block = False)
if len(self.batch_list) == 1:
- return CmdResponse(True, [CmdResponse(rc, resp_list)])
+ return True, [(rc, resp_list)]
else:
- return CmdResponse(rc, resp_list)
+ return rc, resp_list
# JSON RPC v2.0 client
@@ -130,7 +127,7 @@ class JsonRpcClient(object):
self.socket.send(msg, flags = zmq.NOBLOCK)
except zmq.error.ZMQError as e:
self.disconnect()
- return CmdResponse(False, "Failed To Get Send Message")
+ return False, "Failed To Get Send Message"
got_response = False
@@ -148,7 +145,7 @@ class JsonRpcClient(object):
if not got_response:
self.disconnect()
- return CmdResponse(False, "Failed To Get Server Response")
+ return False, "Failed To Get Server Response"
self.verbose_msg("Server Response:\n\n" + self.pretty_json(response) + "\n")
@@ -162,19 +159,19 @@ class JsonRpcClient(object):
for single_response in response_json:
rc, msg = self.process_single_response(single_response)
- rc_list.append( CmdResponse(rc, msg) )
+ rc_list.append( (rc, msg) )
- return CmdResponse(True, rc_list)
+ return True, rc_list
else:
rc, msg = self.process_single_response(response_json)
- return CmdResponse(rc, msg)
+ return rc, msg
def process_single_response (self, response_json):
if (response_json.get("jsonrpc") != "2.0"):
- return False, "Malformed Response ({0})".format(str(response))
+ return False, "Malfromed Response ({0})".format(str(response))
# error reported by server
if ("error" in response_json):
@@ -185,7 +182,7 @@ class JsonRpcClient(object):
# if no error there should be a result
if ("result" not in response_json):
- return False, "Malformed Response ({0})".format(str(response))
+ return False, "Malfromed Response ({0})".format(str(response))
return True, response_json["result"]
@@ -194,7 +191,7 @@ class JsonRpcClient(object):
def set_verbose(self, mode):
self.verbose = mode
- def disconnect(self):
+ def disconnect (self):
if self.connected:
self.socket.close(linger = 0)
self.context.destroy(linger = 0)
@@ -247,3 +244,270 @@ class JsonRpcClient(object):
print "Shutting down RPC client\n"
if hasattr(self, "context"):
self.context.destroy(linger=0)
+
+# MOVE THIS TO DAN'S FILE
+class TrexStatelessClient(JsonRpcClient):
+
+ def __init__ (self, server, port, user):
+
+ super(TrexStatelessClient, self).__init__(server, port)
+
+ self.user = user
+ self.port_handlers = {}
+
+ self.supported_cmds = []
+ self.system_info = None
+ self.server_version = None
+
+
+ def whoami (self):
+ return self.user
+
+ def ping_rpc_server(self):
+
+ return self.invoke_rpc_method("ping", block = False)
+
+ def get_rpc_server_version (self):
+ return self.server_version
+
+ def get_system_info (self):
+ if not self.system_info:
+ return {}
+
+ return self.system_info
+
+ def get_supported_cmds(self):
+ if not self.supported_cmds:
+ return {}
+
+ return self.supported_cmds
+
+ def get_port_count (self):
+ if not self.system_info:
+ return 0
+
+ return self.system_info["port_count"]
+
+ # sync the client with all the server required data
+ def sync (self):
+
+ # get server version
+ rc, msg = self.invoke_rpc_method("get_version")
+ if not rc:
+ self.disconnect()
+ return rc, msg
+
+ self.server_version = msg
+
+ # get supported commands
+ rc, msg = self.invoke_rpc_method("get_supported_cmds")
+ if not rc:
+ self.disconnect()
+ return rc, msg
+
+ self.supported_cmds = [str(x) for x in msg if x]
+
+ # get system info
+ rc, msg = self.invoke_rpc_method("get_system_info")
+ if not rc:
+ self.disconnect()
+ return rc, msg
+
+ self.system_info = msg
+
+ return True, ""
+
+ def connect (self):
+ rc, err = super(TrexStatelessClient, self).connect()
+ if not rc:
+ return rc, err
+
+ return self.sync()
+
+
+ # take ownership over ports
+ def take_ownership (self, port_id_array, force = False):
+ if not self.connected:
+ return False, "Not connected to server"
+
+ batch = self.create_batch()
+
+ for port_id in port_id_array:
+ batch.add("acquire", params = {"port_id":port_id, "user":self.user, "force":force})
+
+ rc, resp_list = batch.invoke()
+ if not rc:
+ return rc, resp_list
+
+ for i, rc in enumerate(resp_list):
+ if rc[0]:
+ self.port_handlers[port_id_array[i]] = rc[1]
+
+ return True, resp_list
+
+
+ def release_ports (self, port_id_array):
+ batch = self.create_batch()
+
+ for port_id in port_id_array:
+
+ # let the server handle un-acquired errors
+ if self.port_handlers.get(port_id):
+ handler = self.port_handlers[port_id]
+ else:
+ handler = ""
+
+ batch.add("release", params = {"port_id":port_id, "handler":handler})
+
+
+ rc, resp_list = batch.invoke()
+ if not rc:
+ return rc, resp_list
+
+ for i, rc in enumerate(resp_list):
+ if rc[0]:
+ self.port_handlers.pop(port_id_array[i])
+
+ return True, resp_list
+
+ def get_owned_ports (self):
+ return self.port_handlers.keys()
+
+ # fetch port stats
+ def get_port_stats (self, port_id_array):
+ if not self.connected:
+ return False, "Not connected to server"
+
+ batch = self.create_batch()
+
+ # empty list means all
+ if port_id_array == []:
+ port_id_array = list([x for x in xrange(0, self.system_info["port_count"])])
+
+ for port_id in port_id_array:
+
+ # let the server handle un-acquired errors
+ if self.port_handlers.get(port_id):
+ handler = self.port_handlers[port_id]
+ else:
+ handler = ""
+
+ batch.add("get_port_stats", params = {"port_id":port_id, "handler":handler})
+
+
+ rc, resp_list = batch.invoke()
+
+ return rc, resp_list
+
+ # snapshot will take a snapshot of all your owned ports for streams and etc.
+ def snapshot(self):
+
+
+ if len(self.get_owned_ports()) == 0:
+ return {}
+
+ snap = {}
+
+ batch = self.create_batch()
+
+ for port_id in self.get_owned_ports():
+
+ batch.add("get_port_stats", params = {"port_id": port_id, "handler": self.port_handlers[port_id]})
+ batch.add("get_stream_list", params = {"port_id": port_id, "handler": self.port_handlers[port_id]})
+
+ rc, resp_list = batch.invoke()
+ if not rc:
+ return rc, resp_list
+
+ # split the list to 2s
+ index = 0
+ for port_id in self.get_owned_ports():
+ if not resp_list[index] or not resp_list[index + 1]:
+ snap[port_id] = None
+ continue
+
+ # fetch the first two
+ stats = resp_list[index][1]
+ stream_list = resp_list[index + 1][1]
+
+ port = {}
+ port['status'] = stats['status']
+ port['stream_list'] = []
+
+ # get all the streams
+ if len(stream_list) > 0:
+ batch = self.create_batch()
+ for stream_id in stream_list:
+ batch.add("get_stream", params = {"port_id": port_id, "stream_id": stream_id, "handler": self.port_handlers[port_id]})
+
+ rc, stream_resp_list = batch.invoke()
+ if not rc:
+ port = {}
+
+ port['streams'] = {}
+ for i, resp in enumerate(stream_resp_list):
+ if resp[0]:
+ port['streams'][stream_list[i]] = resp[1]
+
+ snap[port_id] = port
+
+ # move to next one
+ index += 2
+
+
+ return snap
+
+ # add stream
+ # def add_stream (self, port_id, stream_id, isg, next_stream_id, packet, vm=[]):
+ # if not port_id in self.get_owned_ports():
+ # return False, "Port {0} is not owned... please take ownership before adding streams".format(port_id)
+ #
+ # handler = self.port_handlers[port_id]
+ #
+ # stream = {}
+ # stream['enabled'] = True
+ # stream['self_start'] = True
+ # stream['isg'] = isg
+ # stream['next_stream_id'] = next_stream_id
+ # stream['packet'] = {}
+ # stream['packet']['binary'] = packet
+ # stream['packet']['meta'] = ""
+ # stream['vm'] = vm
+ # stream['rx_stats'] = {}
+ # stream['rx_stats']['enabled'] = False
+ #
+ # stream['mode'] = {}
+ # stream['mode']['type'] = 'continuous'
+ # stream['mode']['pps'] = 10.0
+ #
+ # params = {}
+ # params['handler'] = handler
+ # params['stream'] = stream
+ # params['port_id'] = port_id
+ # params['stream_id'] = stream_id
+ #
+ # print params
+ # return self.invoke_rpc_method('add_stream', params = params)
+
+ def add_stream(self, port_id_array, stream_pack_list):
+ batch = self.create_batch()
+
+ for port_id in port_id_array:
+ for stream_pack in stream_pack_list:
+ params = {"port_id": port_id,
+ "handler": self.port_handlers[port_id],
+ "stream_id": stream_pack.stream_id,
+ "stream": stream_pack.stream}
+ batch.add("add_stream", params=params)
+ rc, resp_list = batch.invoke()
+ if not rc:
+ return rc, resp_list
+
+ for i, rc in enumerate(resp_list):
+ if rc[0]:
+ print "Stream {0} - {1}".format(i, rc[1])
+ # self.port_handlers[port_id_array[i]] = rc[1]
+
+ return True, resp_list
+
+ # return self.invoke_rpc_method('add_stream', params = params)
diff --git a/scripts/automation/trex_control_plane/client_utils/packet_builder.py b/scripts/automation/trex_control_plane/client_utils/packet_builder.py
index c687126b..3aeb6a34 100755
--- a/scripts/automation/trex_control_plane/client_utils/packet_builder.py
+++ b/scripts/automation/trex_control_plane/client_utils/packet_builder.py
@@ -33,6 +33,7 @@ class CTRexPktBuilder(object):
self._max_pkt_size = max_pkt_size
self.payload_gen = CTRexPktBuilder.CTRexPayloadGen(self._packet, self._max_pkt_size)
self.vm = CTRexPktBuilder.CTRexVM()
+ self.metadata = ""
def add_pkt_layer(self, layer_name, pkt_layer):
"""
@@ -441,8 +442,9 @@ class CTRexPktBuilder(object):
if self._packet is None:
raise CTRexPktBuilder.EmptyPacketError()
pkt_in_hex = binascii.hexlify(str(self._packet))
- return [int(pkt_in_hex[i:i+2], 16)
- for i in range(0, len(pkt_in_hex), 2)]
+ return {"binary": [int(pkt_in_hex[i:i+2], 16)
+ for i in range(0, len(pkt_in_hex), 2)],
+ "meta": self.metadata}
# return [pkt_in_hex[i:i+2] for i in range(0, len(pkt_in_hex), 2)]
def dump_pkt_to_pcap(self, file_path, ts=None):
@@ -887,7 +889,7 @@ class CTRexPktBuilder(object):
dictionary holds variable data of VM variable
"""
- return {"ins_name": "flow_var", # VM variable dump always refers to manipulate instruction.
+ return {"type": "flow_var", # VM variable dump always refers to manipulate instruction.
"name": self.name,
"size": self.size,
"op": self.operation,
diff --git a/scripts/automation/trex_control_plane/client_utils/yaml_utils.py b/scripts/automation/trex_control_plane/client_utils/yaml_utils.py
new file mode 100755
index 00000000..60630a04
--- /dev/null
+++ b/scripts/automation/trex_control_plane/client_utils/yaml_utils.py
@@ -0,0 +1,163 @@
+
+"""
+Dan Klein
+Cisco Systems, Inc.
+
+Copyright (c) 2015-2015 Cisco Systems, Inc.
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import external_packages
+import yaml
+
+
+class CTRexYAMLLoader(object):
+ TYPE_DICT = {"double":float,
+ "int":int,
+ "array":list,
+ "string":str,
+ "boolean":bool}
+
+ def __init__(self, yaml_ref_file_path):
+ self.yaml_path = yaml_ref_file_path
+ self.ref_obj = None
+
+ def check_term_param_type(self, val, val_field, ref_val, multiplier):
+ # print val, val_field, ref_val
+ tmp_type = ref_val.get('type')
+ if isinstance(tmp_type, list):
+ # item can be one of multiple types
+ # print "multiple choice!"
+ python_types = set()
+ for t in tmp_type:
+ if t in self.TYPE_DICT:
+ python_types.add(self.TYPE_DICT.get(t))
+ else:
+ return False, TypeError("Unknown resolving for type {0}".format(t))
+ # print "python legit types: ", python_types
+ if type(val) not in python_types:
+ return False, TypeError("Type of object field '{0}' is not allowed".format(val_field))
+ else:
+ # WE'RE OK!
+ return True, CTRexYAMLLoader._calc_final_value(val, multiplier, ref_val.get('multiply', False))
+ else:
+ # this is a single type field
+ python_type = self.TYPE_DICT.get(tmp_type)
+ if not isinstance(val, python_type):
+ return False, TypeError("Type of object field '{0}' is not allowed".format(val_field))
+ else:
+ # WE'RE OK!
+ return True, CTRexYAMLLoader._calc_final_value(val, multiplier, ref_val.get('multiply', False))
+
+ def get_reference_default(self, root_obj, sub_obj, key):
+ # print root_obj, sub_obj, key
+ if sub_obj:
+ ref_field = self.ref_obj.get(root_obj).get(sub_obj).get(key)
+ else:
+ ref_field = self.ref_obj.get(root_obj).get(key)
+ if 'has_default' in ref_field:
+ if ref_field.get('has_default'):
+ # WE'RE OK!
+ return True, ref_field.get('default')
+ else:
+ # This is a mandatory field!
+ return False, ValueError("The {0} field is mandatory and must be specified explicitly".format(key))
+ else:
+ return False, ValueError("The {0} field has no indication about default value".format(key))
+
+ def validate_yaml(self, evaluated_obj, root_obj, fill_defaults=True, multiplier=1):
+ if isinstance(evaluated_obj, dict) and evaluated_obj.keys() == [root_obj]:
+ evaluated_obj = evaluated_obj.get(root_obj)
+ if not self.ref_obj:
+ self.ref_obj = load_yaml_to_obj(self.yaml_path)
+ # self.load_reference()
+ ref_item = self.ref_obj.get(root_obj)
+ if ref_item is not None:
+ try:
+ typed_obj = [False, None] # first item stores validity (multiple object "shapes"), second stored type
+ if "type" in evaluated_obj:
+ ref_item = ref_item[evaluated_obj.get("type")]
+ # print "lower resolution with typed object"
+ typed_obj = [True, evaluated_obj.get("type")]
+ if isinstance(ref_item, dict) and "type" not in ref_item: # this is not a terminal
+ result_obj = {}
+ if typed_obj[0]:
+ result_obj["type"] = typed_obj[1]
+ # print "processing dictionary non-terminal value"
+ for k, v in ref_item.items():
+ # print "processing element '{0}' with value '{1}'".format(k,v)
+ if k in evaluated_obj:
+ # validate with ref obj
+ # print "found in evaluated object!"
+ tmp_type = v.get('type')
+ # print tmp_type
+ # print evaluated_obj
+ if tmp_type == "object":
+ # go deeper into nesting hierarchy
+ # print "This is an object type, recursion!"
+ result_obj[k] = self.validate_yaml(evaluated_obj.get(k), k, fill_defaults, multiplier)
+ else:
+ # validation on terminal type
+ # print "Validating terminal type %s" % k
+ res_ok, data = self.check_term_param_type(evaluated_obj.get(k), k, v, multiplier)
+ if res_ok:
+ # data field contains the value to save
+ result_obj[k] = data
+ else:
+ # data var contains the exception to throw
+ raise data
+ elif fill_defaults:
+ # complete missing values with default value, if exists
+ sub_obj = typed_obj[1] if typed_obj[0] else None
+ res_ok, data = self.get_reference_default(root_obj, sub_obj, k)
+ if res_ok:
+ # data field contains the value to save
+ result_obj[k] = data
+ else:
+ # data var contains the exception to throw
+ raise data
+ return result_obj
+ elif isinstance(ref_item, list):
+ # currently not handling list objects
+ return NotImplementedError("List object are currently unsupported")
+ else:
+ raise TypeError("Unknown parse tree object type.")
+ except KeyError as e:
+ raise
+ else:
+ raise KeyError("The given root_key '{key}' does not exists on reference object".format(key=root_obj))
+
+ @staticmethod
+ def _calc_final_value(val, multiplier, multiply):
+ def to_num(s):
+ try:
+ return int(s)
+ except ValueError:
+ return float(s)
+ if multiply:
+ return val * to_num(multiplier)
+ else:
+ return val
+
+
+def load_yaml_to_obj(file_path):
+ try:
+ return yaml.load(file(file_path, 'r'))
+ except yaml.YAMLError as e:
+ raise
+ except Exception as e:
+ raise
+
+def yaml_exporter(file_path):
+ pass
+
+if __name__ == "__main__":
+ pass
diff --git a/scripts/automation/trex_control_plane/common/external_packages.py b/scripts/automation/trex_control_plane/common/external_packages.py
new file mode 100755
index 00000000..62121d4f
--- /dev/null
+++ b/scripts/automation/trex_control_plane/common/external_packages.py
@@ -0,0 +1,28 @@
+#!/router/bin/python
+
+import sys
+import os
+
+CURRENT_PATH = os.path.dirname(os.path.realpath(__file__))
+ROOT_PATH = os.path.abspath(os.path.join(CURRENT_PATH, os.pardir)) # path to trex_control_plane directory
+PATH_TO_PYTHON_LIB = os.path.abspath(os.path.join(ROOT_PATH, os.pardir, os.pardir, 'external_libs'))
+
+CLIENT_UTILS_MODULES = ['PyYAML-3.01/lib'
+ ]
+
+def import_common_modules():
+ # must be in a higher priority
+ sys.path.insert(0, PATH_TO_PYTHON_LIB)
+ sys.path.append(ROOT_PATH)
+ import_module_list(CLIENT_UTILS_MODULES)
+
+
+def import_module_list(modules_list):
+ assert(isinstance(modules_list, list))
+ for p in modules_list:
+ full_path = os.path.join(PATH_TO_PYTHON_LIB, p)
+ fix_path = os.path.normcase(full_path)
+ sys.path.insert(1, full_path)
+
+import_common_modules()
+
diff --git a/scripts/automation/trex_control_plane/common/rpc_defaults.yaml b/scripts/automation/trex_control_plane/common/rpc_defaults.yaml
new file mode 100755
index 00000000..32631609
--- /dev/null
+++ b/scripts/automation/trex_control_plane/common/rpc_defaults.yaml
@@ -0,0 +1,115 @@
+##############################################################
+#### TRex RPC stream list default values ####
+##############################################################
+
+# this document is based on TRex RPC server spec and its fields:
+# http://trex-tgn.cisco.com/trex/doc/trex_rpc_server_spec.html
+
+### HOW TO READ THIS FILE
+# 1. Each key represents an object type
+# 2. Each value can be either a value field or another object
+# 2.1. If a value field, read as:
+# + type: type of field
+# + has_default: if the value has any default
+# + default: the default value (Only appears if has_default field is 'YES')
+# 2.2. If an object type, jump to corresponding object key.
+# 3. If an object has more than one instance type, another layer with the type shall be added.
+# For example, 'mode' object has 3 types: 'continuous', 'single_burst', 'multi_burst'
+# So, 3 mode objects will be defined, named:
+# - mode['continuous']
+# - mode['single_burst']
+# - mode['multi_burst']
+# In this case, there's no default for the 'type' field on the object
+# 4. Some values has 'multiply' property attached.
+# In such case, the loaded value will be multiplied by the multiplier
+# For example, if the mode's 'pps' field value is 10, and its multiplier is 5,
+# the loaded pps value will be 10*5=50
+# 5. Any object type must be listed by the user, even if all its field are defaults.
+# The most basic option would be to declare the object with "[]", which stands for empty object in YAML syntax.
+
+
+stream:
+ enabled:
+ type: boolean
+ has_default: YES
+ default: True
+ self_start:
+ type: boolean
+ has_default: YES
+ default: True
+ isg:
+ type: [int, double, string]
+ has_default: YES
+ default: 0.0
+ next_stream_id:
+ type: string # string to allow naming binding
+ has_default: YES
+ default: -1 # no next streams
+ packet:
+ type: object
+ mode:
+ type: object
+ vm:
+ type: array
+ has_default: YES
+ default: [] # no ranging instructions
+ rx_stats:
+ type: object
+
+packet:
+ binary:
+ type: [array,string]
+ has_default: NO
+ meta:
+ type: string
+ has_default: YES
+ default: ""
+
+mode:
+ continuous:
+ pps:
+ type: [int, double]
+ has_default: NO
+ multiply: YES
+ single_burst:
+ pps:
+ type: [int, double]
+ has_default: NO
+ multiply: YES
+ total_pkts:
+ type: int
+ has_default: NO
+ multi_burst:
+ pps:
+ type: [int, double]
+ has_default: NO
+ multiply: YES
+ pkts_per_burst:
+ type: int
+ has_default: NO
+ ibg:
+ type: [int, double, string]
+ has_default: YES
+ default: 100.0
+ count:
+ type: int
+ has_default: YES
+ default: 0 # loop forever
+
+rx_stats:
+ enabled:
+ type: boolean
+ has_default: YES
+ default: False
+ stream_id:
+ type: string
+ has_default: YES
+ default: False # use related stream_id
+ seq_enabled:
+ type: boolean
+ has_default: YES
+ default: False
+ latency_enabled:
+ type: boolean
+ has_default: YES
+ default: False \ No newline at end of file
diff --git a/scripts/automation/trex_control_plane/common/trex_status.py b/scripts/automation/trex_control_plane/common/trex_status.py
new file mode 100644
index 00000000..f132720c
--- /dev/null
+++ b/scripts/automation/trex_control_plane/common/trex_status.py
@@ -0,0 +1,8 @@
+#!/router/bin/python
+
+# define the states in which a T-Rex can hold during its lifetime
+# TRexStatus = Enum('TRexStatus', 'Idle Starting Running')
+
+IDLE = 1
+STARTING = 2
+RUNNING = 3
diff --git a/scripts/automation/trex_control_plane/common/trex_streams.py b/scripts/automation/trex_control_plane/common/trex_streams.py
new file mode 100755
index 00000000..783f2769
--- /dev/null
+++ b/scripts/automation/trex_control_plane/common/trex_streams.py
@@ -0,0 +1,248 @@
+#!/router/bin/python
+
+import external_packages
+from client_utils.packet_builder import CTRexPktBuilder
+from collections import OrderedDict, namedtuple
+from client_utils.yaml_utils import *
+import dpkt
+import struct
+import copy
+import os
+
+StreamPack = namedtuple('StreamPack', ['stream_id', 'stream'])
+
+class CStreamList(object):
+
+ def __init__(self):
+ self.streams_list = {}
+ self.yaml_loader = CTRexYAMLLoader(os.path.join(os.path.dirname(os.path.realpath(__file__)),
+ "rpc_defaults.yaml"))
+
+ def append_stream(self, name, stream_obj):
+ assert isinstance(stream_obj, CStream)
+ if name in self.streams_list:
+ raise NameError("A stream with this name already exists on this list.")
+ self.streams_list[name]=stream_obj
+ return
+
+ def remove_stream(self, name):
+ popped = self.streams_list.pop(name)
+ if popped:
+ for stream_name, stream in self.streams_list.items():
+ if stream.next_stream_id == name:
+ stream.next_stream_id = -1
+ try:
+ rx_stats_stream = getattr(stream.rx_stats, "stream_id")
+ if rx_stats_stream == name:
+ # if a referenced stream of rx_stats object deleted, revert to rx stats of current stream
+ setattr(stream.rx_stats, "stream_id", stream_name)
+ except AttributeError as e:
+ continue #
+ return popped
+
+ def export_to_yaml(self, file_path):
+ raise NotImplementedError("export_to_yaml method is not implemented, yet")
+
+ def load_yaml(self, file_path, multiplier=1):
+ # clear all existing streams linked to this object
+ self.streams_list.clear()
+ streams_data = load_yaml_to_obj(file_path)
+ assert isinstance(streams_data, list)
+ for stream in streams_data:
+ stream_name = stream.get("name")
+ raw_stream = stream.get("stream")
+ if not stream_name or not raw_stream:
+ raise ValueError("Provided stream is not according to convention."
+ "Each stream must be provided as two keys: 'name' and 'stream'. "
+ "Provided item was:\n {stream}".format(stream))
+ new_stream_data = self.yaml_loader.validate_yaml(raw_stream,
+ "stream",
+ multiplier= multiplier)
+ new_stream_obj = CStream()
+ new_stream_obj.load_data(**new_stream_data)
+ self.append_stream(stream_name, new_stream_obj)
+ return new_stream_data
+
+ def compile_streams(self):
+ # first, assign an id to each stream
+ stream_ids = {}
+ for idx, stream_name in enumerate(self.streams_list):
+ stream_ids[stream_name] = idx
+ # next, iterate over the streams and transform them from working with names to ids.
+ # with that build a new dict with old stream_name as the key, and StreamPack as the stored value
+ compiled_streams = {}
+ for stream_name, stream in self.streams_list.items():
+ tmp_stream = CStreamList._compile_single_stream(stream_name, stream, stream_ids)
+ compiled_streams[stream_name] = StreamPack(stream_ids.get(stream_name),
+ tmp_stream)
+ return compiled_streams
+
+ @staticmethod
+ def _compile_single_stream(stream_name, stream, id_dict):
+ # copy the old stream to temporary one, no change to class attributes
+ tmp_stream = copy.copy(stream)
+ next_stream_id = id_dict.get(getattr(tmp_stream, "next_stream_id"), -1)
+ try:
+ rx_stats_stream_id = id_dict.get(getattr(tmp_stream.rx_stats, "stream_id"),
+ id_dict.get(stream_name))
+ except AttributeError as e:
+ rx_stats_stream_id = id_dict.get(stream_name)
+ # assign resolved values to stream object
+ tmp_stream.next_stream_id = next_stream_id
+ tmp_stream.rx_stats.stream_id = rx_stats_stream_id
+ return tmp_stream
+
+
+class CRxStats(object):
+
+ FIELDS = ["seq_enabled", "latency_enabled", "stream_id"]
+ def __init__(self, enabled=False, **kwargs):
+ self.enabled = bool(enabled)
+ for field in CRxStats.FIELDS:
+ setattr(self, field, kwargs.get(field, False))
+
+ def dump(self):
+ if self.enabled:
+ dump = {"enabled": True}
+ dump.update({k: getattr(self, k)
+ for k in CRxStats.FIELDS}
+ )
+ return dump
+ else:
+ return {"enabled": False}
+
+
+
+class CTxMode(object):
+ """docstring for CTxMode"""
+ GENERAL_FIELDS = ["type", "pps"]
+ FIELDS = {"continuous": [],
+ "single_burst": ["total_pkts"],
+ "multi_burst": ["pkts_per_burst", "ibg", "count"]}
+
+ def __init__(self, type, pps=0, **kwargs):
+ self._MODES = CTxMode.FIELDS.keys()
+ self.type = type
+ self.pps = pps
+ for field in CTxMode.FIELDS.get(self.type):
+ setattr(self, field, kwargs.get(field, 0))
+
+ @property
+ def type(self):
+ return self._type
+
+ @type.setter
+ def type(self, type):
+ if type not in self._MODES:
+ raise ValueError("Unknown TX mode ('{0}')has been initialized.".format(type))
+ self._type = type
+ self._reset_fields()
+
+ def dump(self):
+ dump = ({k: getattr(self, k)
+ for k in CTxMode.GENERAL_FIELDS
+ })
+ dump.update({k: getattr(self, k)
+ for k in CTxMode.FIELDS.get(self.type)
+ })
+ return dump
+
+ def _reset_fields(self):
+ for field in CTxMode.FIELDS.get(self.type):
+ setattr(self, field, 0)
+
+
+class CStream(object):
+ """docstring for CStream"""
+
+ FIELDS = ["enabled", "self_start", "next_stream_id", "isg", "mode", "rx_stats", "packet", "vm"]
+ # COMPILE_FIELDS = ["enabled", "self_start", "next_stream_id", "isg", "mode", "rx_stats", "packet", "vm"]
+
+ def __init__(self):
+ self.is_loaded = False
+ self._is_compiled = False
+ for field in CStream.FIELDS:
+ setattr(self, field, None)
+
+ def load_data(self, **kwargs):
+ try:
+ for k in CStream.FIELDS:
+ if k == "rx_stats":
+ rx_stats_data = kwargs[k]
+ if isinstance(rx_stats_data, dict):
+ setattr(self, k, CRxStats(**rx_stats_data))
+ elif isinstance(rx_stats_data, CRxStats):
+ setattr(self, k, rx_stats_data)
+ elif k == "mode":
+ tx_mode = kwargs[k]
+ if isinstance(tx_mode, dict):
+ setattr(self, k, CTxMode(**tx_mode))
+ elif isinstance(tx_mode, CTxMode):
+ setattr(self, k, tx_mode)
+ elif k == "packet":
+ if isinstance(kwargs[k], CTRexPktBuilder):
+ if "vm" not in kwargs:
+ self.load_packet_obj(kwargs[k])
+ else:
+ raise ValueError("When providing packet object with a CTRexPktBuilder, vm parameter "
+ "should not be supplied")
+ else:
+ binary = kwargs[k]["binary"]
+ if isinstance(binary, list):
+ setattr(self, k, kwargs[k])
+ elif isinstance(binary, str) and binary.endswith(".pcap"):
+ self.load_packet_from_pcap(binary, kwargs[k]["meta"])
+ else:
+ raise ValueError("Packet binary attribute has been loaded with unsupported value."
+ "Supported values are reference to pcap file with SINGLE packet, "
+ "or a list of unsigned-byte integers")
+ else:
+ setattr(self, k, kwargs[k])
+ self.is_loaded = True
+ except KeyError as e:
+ cause = e.args[0]
+ raise KeyError("The attribute '{0}' is missing as a field of the CStream object.\n"
+ "Loaded data must contain all of the following fields: {1}".format(cause, CStream.FIELDS))
+
+ def load_packet_obj(self, packet_obj):
+ assert isinstance(packet_obj, CTRexPktBuilder)
+ self.packet = packet_obj.dump_pkt()
+ self.vm = packet_obj.get_vm_data()
+
+ def load_packet_from_pcap(self, pcap_path, metadata=''):
+ with open(pcap_path, 'r') as f:
+ pcap = dpkt.pcap.Reader(f)
+ first_packet = True
+ for _, buf in pcap:
+ # this is an iterator, can't evaluate the number of files in advance
+ if first_packet:
+ self.packet = {"binary": [struct.unpack('B', buf[i:i+1])[0] # represent data as list of 0-255 ints
+ for i in range(0, len(buf))],
+ "meta": metadata} # meta data continues without a change.
+ first_packet = False
+ else:
+ raise ValueError("Provided pcap file contains more than single packet.")
+ # arrive here ONLY if pcap contained SINGLE packet
+ return
+
+
+ def dump(self, compilation=False):
+ # fields = CStream.COMPILE_FIELDS if compilation else CStream.FIELDS
+ if self.is_loaded:
+ dump = {}
+ for key in CStream.FIELDS:
+ try:
+ dump[key] = getattr(self, key).dump() # use dump() method of compound object, such TxMode
+ except AttributeError:
+ dump[key] = getattr(self, key)
+ return dump
+ else:
+ raise RuntimeError("CStream object isn't loaded with data. Use 'load_data' method.")
+
+ def dump_compiled(self):
+ return self.dump(compilation=True)
+
+
+
+if __name__ == "__main__":
+ pass
diff --git a/scripts/automation/trex_control_plane/console/trex_console.py b/scripts/automation/trex_control_plane/console/trex_console.py
index 3aeab901..a9ac040b 100644..100755
--- a/scripts/automation/trex_control_plane/console/trex_console.py
+++ b/scripts/automation/trex_control_plane/console/trex_console.py
@@ -1,18 +1,124 @@
#!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# -*- coding: utf-8 -*-
+
+"""
+Dan Klein, Itay Marom
+Cisco Systems, Inc.
+
+Copyright (c) 2015-2015 Cisco Systems, Inc.
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
import cmd
import json
import ast
import argparse
import random
import string
-
+import os
import sys
+import tty, termios
import trex_root_path
+from common.trex_streams import *
+
from client_utils.jsonrpc_client import TrexStatelessClient
import trex_status
+from collections import namedtuple
+
+LoadedStreamList = namedtuple('LoadedStreamList', ['loaded', 'compiled'])
+
+#
+
+def readch (choices = []):
+
+ fd = sys.stdin.fileno()
+ old_settings = termios.tcgetattr(fd)
+ try:
+ tty.setraw(sys.stdin.fileno())
+ while True:
+ ch = sys.stdin.read(1)
+ if (ord(ch) == 3) or (ord(ch) == 4):
+ return None
+ if ch in choices:
+ return ch
+ finally:
+ termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
+
+ return None
+
+class YesNoMenu(object):
+ def __init__ (self, caption):
+ self.caption = caption
+
+ def show (self):
+ print "{0}".format(self.caption)
+ sys.stdout.write("[Y/y/N/n] : ")
+ ch = readch(choices = ['y', 'Y', 'n', 'N'])
+ if ch == None:
+ return None
+
+ print "\n"
+ if ch == 'y' or ch == 'Y':
+ return True
+ else:
+ return False
+
+# multi level cmd menu
+class CmdMenu(object):
+ def __init__ (self):
+ self.menus = []
+
+
+ def add_menu (self, caption, options):
+ menu = {}
+ menu['caption'] = caption
+ menu['options'] = options
+ self.menus.append(menu)
+
+ def show (self):
+ cur_level = 0
+ print "\n"
+
+ selected_path = []
+ for menu in self.menus:
+ # show all the options
+ print "{0}\n".format(menu['caption'])
+ for i, option in enumerate(menu['options']):
+ print "{0}. {1}".format(i + 1, option)
+
+ #print "\nPlease select an option: "
+ choices = range(0, len(menu['options']))
+ choices = [ chr(x + 48) for x in choices]
+
+ print ""
+ ch = readch(choices)
+ print ""
+
+ if ch == None:
+ return None
+
+ selected_path.append(int(ch) - 1)
+
+ return selected_path
+
+
+class AddStreamMenu(CmdMenu):
+ def __init__ (self):
+ super(AddStreamMenu, self).__init__()
+ self.add_menu('Please select type of stream', ['a', 'b', 'c'])
+ self.add_menu('Please select ISG', ['d', 'e', 'f'])
+
+# main console object
class TrexConsole(cmd.Cmd):
"""Trex Console"""
@@ -29,6 +135,8 @@ class TrexConsole(cmd.Cmd):
self.verbose = False
self.postcmd(False, "")
+
+ self.user_streams = {}
# a cool hack - i stole this function and added space
@@ -108,6 +216,13 @@ class TrexConsole(cmd.Cmd):
def do_acquire (self, line, force = False):
'''Acquire ports\n'''
+ # make sure that the user wants to acquire all
+ if line == "":
+ ask = YesNoMenu('Do you want to acquire all ports ? ')
+ rc = ask.show()
+ if rc == False:
+ return
+
port_list = self.parse_ports_from_line(line)
if not port_list:
return
@@ -312,25 +427,142 @@ class TrexConsole(cmd.Cmd):
print "{:<30} {:<30}".format(cmd + " - ", help)
+ def do_load_stream_list(self, line):
+ '''Loads a YAML stream list serialization into user console \n'''
+ args = line.split()
+ if args >= 2:
+ name = args[0]
+ yaml_path = args[1]
+ try:
+ multiplier = args[2]
+ except IndexError:
+ multiplier = 1
+ stream_list = CStreamList()
+ loaded_obj = stream_list.load_yaml(yaml_path, multiplier)
+ # print self.rpc_client.pretty_json(json.dumps(loaded_obj))
+ if name in self.user_streams:
+ print "Picked name already exist. Please pick another name."
+ else:
+ try:
+ compiled_streams = stream_list.compile_streams()
+ self.user_streams[name] = LoadedStreamList(loaded_obj,
+ [StreamPack(v.stream_id, v.stream.dump_compiled())
+ for k, v in compiled_streams.items()])
+
+ print "Stream '{0}' loaded successfully".format(name)
+ except Exception as e:
+ raise
+ return
+ else:
+ print "please provide load name and YAML path, separated by space.\n" \
+ "Optionally, you may provide a third argument to specify multiplier."
+
+ @staticmethod
+ def tree_autocomplete(text):
+ dir = os.path.dirname(text)
+ if dir:
+ path = dir
+ else:
+ path = "."
+ start_string = os.path.basename(text)
+ return [x
+ for x in os.listdir(path)
+ if x.startswith(start_string)]
+
+
+ def complete_load_stream_list(self, text, line, begidx, endidx):
+ arg_num = len(line.split()) - 1
+ if arg_num == 2:
+ return TrexConsole.tree_autocomplete(line.split()[-1])
+ else:
+ return [text]
+
+ def do_show_stream_list(self, line):
+ '''Shows the loaded stream list named [name] \n'''
+ args = line.split()
+ if args:
+ list_name = args[0]
+ try:
+ stream = self.user_streams[list_name]
+ if len(args) >= 2 and args[1] == "full":
+ print self.rpc_client.pretty_json(json.dumps(stream.compiled))
+ else:
+ print self.rpc_client.pretty_json(json.dumps(stream.loaded))
+ except KeyError as e:
+ print "Unknown stream list name provided"
+ else:
+ print "\nAvailable stream lists:\n{0}".format(', '.join([x
+ for x in self.user_streams.keys()]))
+
+ def complete_show_stream_list(self, text, line, begidx, endidx):
+ return [x
+ for x in self.user_streams.keys()
+ if x.startswith(text)]
+
+ def do_attach(self, line):
+ args = line.split()
+ if len(args) >= 1:
+ try:
+ stream_list = self.user_streams[args[0]]
+ port_list = self.parse_ports_from_line(' '.join(args[1:]))
+ owned = set(self.rpc_client.get_owned_ports())
+ if set(port_list).issubset(owned):
+ rc, resp_list = self.rpc_client.add_stream(port_list, stream_list.compiled)
+ if not rc:
+ print "\n*** " + resp_list + "\n"
+ return
+ else:
+ print "Not all desired ports are aquired.\n" \
+ "Acquired ports are: {acq}\n" \
+ "Requested ports: {req}\n" \
+ "Missing ports: {miss}".format(acq=list(owned),
+ req=port_list,
+ miss=list(set(port_list).difference(owned)))
+ except KeyError as e:
+ cause = e.args[0]
+ print "Provided stream list name '{0}' doesn't exists.".format(cause)
+ else:
+ print "Please provide list name and ports to attach to, or leave empty to attach to all ports."
+
- # do
- #def do_snapshot (self, line):
- #for key, value in self.rpc_client.snapshot()[1]['streams'].iteritems():
- #print str(key) + " " + str(value)
+
+
+
+
+
+
+ # adds a very simple stream
+ def do_add_simple_stream (self, line):
+ if line == "":
+ add_stream = AddStreamMenu()
+ add_stream.show()
+ return
+
+ params = line.split()
+ port_id = int(params[0])
+ stream_id = int(params[1])
+
+ packet = [0xFF,0xFF,0xFF]
+ rc, msg = self.rpc_client.add_stream(port_id = port_id, stream_id = stream_id, isg = 1.1, next_stream_id = -1, packet = packet)
+ if rc:
+ print "\nServer Response:\n\n" + self.rpc_client.pretty_json(json.dumps(msg)) + "\n"
+ else:
+ print "\n*** " + msg + "\n"
+
# aliasing
do_exit = do_EOF = do_q = do_quit
def setParserOptions ():
parser = argparse.ArgumentParser(prog="trex_console.py")
- parser.add_argument("-s", "--server", help = "T-Rex Server [default is localhost]",
+ parser.add_argument("-s", "--server", help = "TRex Server [default is localhost]",
default = "localhost",
type = str)
- parser.add_argument("-p", "--port", help = "T-Rex Server Port [default is 5050]\n",
+ parser.add_argument("-p", "--port", help = "TRex Server Port [default is 5050]\n",
default = 5050,
type = int)
diff --git a/scripts/automation/trex_control_plane/console/trex_root_path.py b/scripts/automation/trex_control_plane/console/trex_root_path.py
index de4ec03b..de4ec03b 100644..100755
--- a/scripts/automation/trex_control_plane/console/trex_root_path.py
+++ b/scripts/automation/trex_control_plane/console/trex_root_path.py
diff --git a/scripts/automation/trex_control_plane/console/trex_status.py b/scripts/automation/trex_control_plane/console/trex_status.py
index b881f9f5..2c5a648f 100644
--- a/scripts/automation/trex_control_plane/console/trex_status.py
+++ b/scripts/automation/trex_control_plane/console/trex_status.py
@@ -170,7 +170,7 @@ class PortsStatsPanel(TrexStatusPanel):
port_stats = self.status_obj.stats.get_port_stats(port_index)
if port_stats:
- self.getwin().addstr(5 + (i * 4), 2, "{:^15} {:^15,} {:^15,} {:^15,} {:^15,} {:^15,} {:^15,}".format(
+ self.getwin().addstr(5 + (i * 4), 2, "{:^15} {:^15,.2f} {:^15,.2f} {:^15,} {:^15,.2f} {:^15,.2f} {:^15,}".format(
"{0} ({1})".format(str(port_index), self.status_obj.server_sys_info["ports"][port_index]["speed"]),
port_stats["tx_pps"],
port_stats["tx_bps"],
diff --git a/scripts/automation/trex_control_plane/examples/client_interactive_example.py b/scripts/automation/trex_control_plane/examples/client_interactive_example.py
index 9ee28898..d21b2b15 100755
--- a/scripts/automation/trex_control_plane/examples/client_interactive_example.py
+++ b/scripts/automation/trex_control_plane/examples/client_interactive_example.py
@@ -74,7 +74,7 @@ class InteractiveTRexClient(cmd.Cmd):
print termstyle.green("*** End of TRex status prompt ***")
def do_show_trex_files_path (self, line):
- """Prompts the local path in which files are stored when pushed to t-rex server from client"""
+ """Prompts the local path in which files are stored when pushed to trex server from client"""
print self.trex.get_trex_files_path()
print termstyle.green("*** End of trex_files_path prompt ***")
diff --git a/scripts/automation/trex_control_plane/examples/interactive_stateless.py b/scripts/automation/trex_control_plane/examples/interactive_stateless.py
index 7c25b4ef..e64b4755 100644
--- a/scripts/automation/trex_control_plane/examples/interactive_stateless.py
+++ b/scripts/automation/trex_control_plane/examples/interactive_stateless.py
@@ -76,18 +76,18 @@ class InteractiveStatelessTRex(cmd.Cmd):
def do_push_files(self, filepaths):
- """Pushes a custom file to be stored locally on T-Rex server.\
+ """Pushes a custom file to be stored locally on TRex server.\
\nPush multiple files by specifying their path separated by ' ' (space)."""
try:
filepaths = filepaths.split(' ')
- print termstyle.green("*** Starting pushing files ({trex_files}) to T-Rex. ***".format(
+ print termstyle.green("*** Starting pushing files ({trex_files}) to TRex. ***".format(
trex_files=', '.join(filepaths))
)
ret_val = self.trex.push_files(filepaths)
if ret_val:
- print termstyle.green("*** End of T-Rex push_files method (success) ***")
+ print termstyle.green("*** End of TRex push_files method (success) ***")
else:
- print termstyle.magenta("*** End of T-Rex push_files method (failed) ***")
+ print termstyle.magenta("*** End of TRex push_files method (failed) ***")
except IOError as inst:
print termstyle.magenta(inst)
@@ -99,10 +99,10 @@ if __name__ == "__main__":
parser.add_argument('-v', '--version', action='version', version='%(prog)s 1.0 \t (C) Cisco Systems Inc.\n')
parser.add_argument("-t", "--trex-host", required = True, dest="trex_host",
- action="store", help="Specify the hostname or ip to connect with T-Rex server.",
+ action="store", help="Specify the hostname or ip to connect with TRex server.",
metavar="HOST" )
parser.add_argument("-p", "--trex-port", type=int, default = 5050, metavar="PORT", dest="trex_port",
- help="Select port on which the T-Rex server listens. Default port is 5050.", action="store")
+ help="Select port on which the TRex server listens. Default port is 5050.", action="store")
# parser.add_argument("-m", "--maxhist", type=int, default = 100, metavar="SIZE", dest="hist_size",
# help="Specify maximum history size saved at client side. Default size is 100.", action="store")
parser.add_argument("--virtual", dest="virtual",
@@ -124,5 +124,5 @@ if __name__ == "__main__":
except socket.error, e:
if e.errno == errno.ECONNREFUSED:
raise socket.error(errno.ECONNREFUSED,
- "Connection from T-Rex server was terminated. \
+ "Connection from TRex server was terminated. \
Please make sure the server is up.")
diff --git a/scripts/automation/trex_control_plane/unit_tests/control_plane_general_test.py b/scripts/automation/trex_control_plane/unit_tests/control_plane_general_test.py
index 95f259b8..32ad5243 100755
--- a/scripts/automation/trex_control_plane/unit_tests/control_plane_general_test.py
+++ b/scripts/automation/trex_control_plane/unit_tests/control_plane_general_test.py
@@ -9,7 +9,7 @@ Name:
Description:
- This script creates the functionality to test the performance of the T-Rex traffic generator control plane.
+ This script creates the functionality to test the performance of the TRex traffic generator control plane.
The scenarios assumes a WORKING server is listening and processing the requests.
::
diff --git a/scripts/automation/trex_control_plane/unit_tests/control_plane_unit_test.py b/scripts/automation/trex_control_plane/unit_tests/control_plane_unit_test.py
index 37130ee4..1120256c 100755
--- a/scripts/automation/trex_control_plane/unit_tests/control_plane_unit_test.py
+++ b/scripts/automation/trex_control_plane/unit_tests/control_plane_unit_test.py
@@ -18,7 +18,7 @@ class TRexCPConfiguringPlugin(Plugin):
super(TRexCPConfiguringPlugin, self).options(parser, env)
parser.add_option('-t', '--trex-server', action='store',
dest='trex_server', default='trex-dan',
- help='Specify T-Rex server hostname. This server will be used to test control-plane functionality.')
+ help='Specify TRex server hostname. This server will be used to test control-plane functionality.')
def configure(self, options, conf):
if options.trex_server:
diff --git a/scripts/automation/trex_control_plane/unit_tests/functional_test.py b/scripts/automation/trex_control_plane/unit_tests/functional_test.py
index f742403d..30836985 100755
--- a/scripts/automation/trex_control_plane/unit_tests/functional_test.py
+++ b/scripts/automation/trex_control_plane/unit_tests/functional_test.py
@@ -37,7 +37,7 @@ class CTRexStartStop_Test(CControlPlaneGeneral_Test):
def test_parameter_name_error(self):
ret = self.trex.start_trex( c = 4,
- wrong_key = 1.1, # <----- This key does not exists in T-Rex API
+ wrong_key = 1.1, # <----- This key does not exists in TRex API
d = 70,
f = 'avl/sfr_delay_10_1g.yaml',
nc = True,
@@ -50,7 +50,7 @@ class CTRexStartStop_Test(CControlPlaneGeneral_Test):
run_status = self.trex.get_running_status()
assert isinstance(run_status, dict)
assert_equal (run_status['state'], TRexStatus.Idle )
- assert_equal (run_status['verbose'], "T-Rex run failed due to wrong input parameters, or due to reachability issues.")
+ assert_equal (run_status['verbose'], "TRex run failed due to wrong input parameters, or due to reachability issues.")
assert_raises(TRexError, self.trex.get_running_info)
def test_too_early_sample(self):
@@ -83,33 +83,33 @@ class CTRexStartStop_Test(CControlPlaneGeneral_Test):
assert self.trex.is_running() == False
def test_start_more_than_once_same_user(self):
- assert self.trex.is_running() == False # first, make sure T-Rex is not running
- ret = self.trex.start_trex(**self.valid_start_params) # start 1st T-Rex run
+ assert self.trex.is_running() == False # first, make sure TRex is not running
+ ret = self.trex.start_trex(**self.valid_start_params) # start 1st TRex run
assert ret == True # make sure 1st run submitted successfuly
# time.sleep(1)
- assert_raises(TRexInUseError, self.trex.start_trex, **self.valid_start_params) # try to start T-Rex again
+ assert_raises(TRexInUseError, self.trex.start_trex, **self.valid_start_params) # try to start TRex again
ret = self.trex.stop_trex()
assert ret==True # make sure stop succeeded
assert self.trex.is_running() == False
def test_start_more_than_once_different_users(self):
- assert self.trex.is_running() == False # first, make sure T-Rex is not running
- ret = self.trex.start_trex(**self.valid_start_params) # start 1st T-Rex run
+ assert self.trex.is_running() == False # first, make sure TRex is not running
+ ret = self.trex.start_trex(**self.valid_start_params) # start 1st TRex run
assert ret == True # make sure 1st run submitted successfuly
# time.sleep(1)
tmp_trex = CTRexClient(self.trex_server_name) # initialize another client connecting same server
- assert_raises(TRexInUseError, tmp_trex.start_trex, **self.valid_start_params) # try to start T-Rex again
+ assert_raises(TRexInUseError, tmp_trex.start_trex, **self.valid_start_params) # try to start TRex again
ret = self.trex.stop_trex()
assert ret==True # make sure stop succeeded
assert self.trex.is_running() == False
def test_simultaneous_sampling(self):
- assert self.trex.is_running() == False # first, make sure T-Rex is not running
+ assert self.trex.is_running() == False # first, make sure TRex is not running
tmp_trex = CTRexClient(self.trex_server_name) # initialize another client connecting same server
- ret = self.trex.start_trex(**self.valid_start_params) # start T-Rex run
+ ret = self.trex.start_trex(**self.valid_start_params) # start TRex run
assert ret == True # make sure 1st run submitted successfuly
time.sleep(6)
@@ -123,7 +123,7 @@ class CTRexStartStop_Test(CControlPlaneGeneral_Test):
assert tmp_trex.get_result_obj().is_valid_hist() == True
if self.trex.get_result_obj().is_done_warmup():
assert tmp_trex.get_result_obj().is_done_warmup() == True
- # except TRexError as inst: # T-Rex might have stopped between is_running result and get_running_info() call
+ # except TRexError as inst: # TRex might have stopped between is_running result and get_running_info() call
# # hence, ingore that case
# break
@@ -132,7 +132,7 @@ class CTRexStartStop_Test(CControlPlaneGeneral_Test):
def test_fast_toggling(self):
assert self.trex.is_running() == False
for i in range(20):
- ret = self.trex.start_trex(**self.valid_start_params) # start T-Rex run
+ ret = self.trex.start_trex(**self.valid_start_params) # start TRex run
assert ret == True
assert self.trex.is_running() == False # we expect the status to be 'Starting'
ret = self.trex.stop_trex()