summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rwxr-xr-xscripts/automation/trex_control_plane/client_utils/jsonrpc_client.py81
-rwxr-xr-xscripts/automation/trex_control_plane/client_utils/packet_builder.py2
-rwxr-xr-xscripts/automation/trex_control_plane/client_utils/yaml_utils.py28
-rwxr-xr-xscripts/automation/trex_control_plane/common/rpc_defaults.yaml8
-rwxr-xr-xscripts/automation/trex_control_plane/common/trex_streams.py85
-rwxr-xr-x[-rw-r--r--]scripts/automation/trex_control_plane/console/trex_console.py86
-rwxr-xr-xscripts/stl/imix_1pkt.yaml11
7 files changed, 226 insertions, 75 deletions
diff --git a/scripts/automation/trex_control_plane/client_utils/jsonrpc_client.py b/scripts/automation/trex_control_plane/client_utils/jsonrpc_client.py
index 8c8987b6..b2d83cff 100755
--- a/scripts/automation/trex_control_plane/client_utils/jsonrpc_client.py
+++ b/scripts/automation/trex_control_plane/client_utils/jsonrpc_client.py
@@ -458,32 +458,55 @@ class TrexStatelessClient(JsonRpcClient):
return snap
# add stream
- def add_stream (self, port_id, stream_id, isg, next_stream_id, packet):
- if not port_id in self.get_owned_ports():
- return False, "Port {0} is not owned... please take ownership before adding streams".format(port_id)
-
- handler = self.port_handlers[port_id]
-
- stream = {}
- stream['enabled'] = True
- stream['self_start'] = True
- stream['isg'] = isg
- stream['next_stream_id'] = next_stream_id
- stream['packet'] = {}
- stream['packet']['binary'] = packet
- stream['packet']['meta'] = ""
- stream['vm'] = []
- stream['rx_stats'] = {}
- stream['rx_stats']['enabled'] = False
-
- stream['mode'] = {}
- stream['mode']['type'] = 'continuous'
- stream['mode']['pps'] = 10.0
-
- params = {}
- params['handler'] = handler
- params['stream'] = stream
- params['port_id'] = port_id
- params['stream_id'] = stream_id
-
- return self.invoke_rpc_method('add_stream', params = params)
+ # def add_stream (self, port_id, stream_id, isg, next_stream_id, packet, vm=[]):
+ # if not port_id in self.get_owned_ports():
+ # return False, "Port {0} is not owned... please take ownership before adding streams".format(port_id)
+ #
+ # handler = self.port_handlers[port_id]
+ #
+ # stream = {}
+ # stream['enabled'] = True
+ # stream['self_start'] = True
+ # stream['isg'] = isg
+ # stream['next_stream_id'] = next_stream_id
+ # stream['packet'] = {}
+ # stream['packet']['binary'] = packet
+ # stream['packet']['meta'] = ""
+ # stream['vm'] = vm
+ # stream['rx_stats'] = {}
+ # stream['rx_stats']['enabled'] = False
+ #
+ # stream['mode'] = {}
+ # stream['mode']['type'] = 'continuous'
+ # stream['mode']['pps'] = 10.0
+ #
+ # params = {}
+ # params['handler'] = handler
+ # params['stream'] = stream
+ # params['port_id'] = port_id
+ # params['stream_id'] = stream_id
+ #
+ # print params
+ # return self.invoke_rpc_method('add_stream', params = params)
+
+ def add_stream(self, port_id_array, stream_pack_list):
+ batch = self.create_batch()
+
+ for port_id in port_id_array:
+ for stream_pack in stream_pack_list:
+ params = {"port_id": port_id,
+ "handler": self.port_handlers[port_id],
+ "stream_id": stream_pack.stream_id,
+ "stream": stream_pack.stream}
+ batch.add("add_stream", params=params)
+ rc, resp_list = batch.invoke()
+ if not rc:
+ return rc, resp_list
+
+ for i, rc in enumerate(resp_list):
+ if rc[0]:
+ self.port_handlers[port_id_array[i]] = rc[1]
+
+ return True, resp_list
+
+ # return self.invoke_rpc_method('add_stream', params = params)
diff --git a/scripts/automation/trex_control_plane/client_utils/packet_builder.py b/scripts/automation/trex_control_plane/client_utils/packet_builder.py
index caa6eab3..3aeb6a34 100755
--- a/scripts/automation/trex_control_plane/client_utils/packet_builder.py
+++ b/scripts/automation/trex_control_plane/client_utils/packet_builder.py
@@ -889,7 +889,7 @@ class CTRexPktBuilder(object):
dictionary holds variable data of VM variable
"""
- return {"ins_name": "flow_var", # VM variable dump always refers to manipulate instruction.
+ return {"type": "flow_var", # VM variable dump always refers to manipulate instruction.
"name": self.name,
"size": self.size,
"op": self.operation,
diff --git a/scripts/automation/trex_control_plane/client_utils/yaml_utils.py b/scripts/automation/trex_control_plane/client_utils/yaml_utils.py
index 024b73c2..414744fc 100755
--- a/scripts/automation/trex_control_plane/client_utils/yaml_utils.py
+++ b/scripts/automation/trex_control_plane/client_utils/yaml_utils.py
@@ -31,18 +31,18 @@ class CTRexYAMLLoader(object):
self.ref_obj = None
def check_term_param_type(self, val, val_field, ref_val, multiplier):
- print val, val_field, ref_val
+ # print val, val_field, ref_val
tmp_type = ref_val.get('type')
if isinstance(tmp_type, list):
# item can be one of multiple types
- print "multiple choice!"
+ # print "multiple choice!"
python_types = set()
for t in tmp_type:
if t in self.TYPE_DICT:
python_types.add(self.TYPE_DICT.get(t))
else:
return False, TypeError("Unknown resolving for type {0}".format(t))
- print "python legit types: ", python_types
+ # print "python legit types: ", python_types
if type(val) not in python_types:
return False, TypeError("Type of object field '{0}' is not allowed".format(val_field))
else:
@@ -58,7 +58,7 @@ class CTRexYAMLLoader(object):
return True, CTRexYAMLLoader._calc_final_value(val, multiplier, ref_val.get('multiply', False))
def get_reference_default(self, root_obj, sub_obj, key):
- print root_obj, sub_obj, key
+ # print root_obj, sub_obj, key
if sub_obj:
ref_field = self.ref_obj.get(root_obj).get(sub_obj).get(key)
else:
@@ -77,7 +77,7 @@ class CTRexYAMLLoader(object):
if isinstance(evaluated_obj, dict) and evaluated_obj.keys() == [root_obj]:
evaluated_obj = evaluated_obj.get(root_obj)
if not self.ref_obj:
- self.ref_obj = load_yaml_to_any_obj(self.yaml_path)
+ self.ref_obj = load_yaml_to_obj(self.yaml_path)
# self.load_reference()
ref_item = self.ref_obj.get(root_obj)
if ref_item is not None:
@@ -85,30 +85,30 @@ class CTRexYAMLLoader(object):
typed_obj = [False, None] # first item stores validity (multiple object "shapes"), second stored type
if "type" in evaluated_obj:
ref_item = ref_item[evaluated_obj.get("type")]
- print "lower resolution with typed object"
+ # print "lower resolution with typed object"
typed_obj = [True, evaluated_obj.get("type")]
if isinstance(ref_item, dict) and "type" not in ref_item: # this is not a terminal
result_obj = {}
if typed_obj[0]:
result_obj["type"] = typed_obj[1]
- print "processing dictionary non-terminal value"
+ # print "processing dictionary non-terminal value"
for k, v in ref_item.items():
- print "processing element '{0}' with value '{1}'".format(k,v)
+ # print "processing element '{0}' with value '{1}'".format(k,v)
if k in evaluated_obj:
# validate with ref obj
- print "found in evaluated object!"
+ # print "found in evaluated object!"
tmp_type = v.get('type')
- print tmp_type
- print evaluated_obj
+ # print tmp_type
+ # print evaluated_obj
if tmp_type == "object":
# go deeper into nesting hierarchy
- print "This is an object type, recursion!"
+ # print "This is an object type, recursion!"
result_obj[k] = self.validate_yaml(evaluated_obj.get(k), k, fill_defaults, multiplier)
else:
# validation on terminal type
- print "Validating terminal type %s" % k
+ # print "Validating terminal type %s" % k
res_ok, data = self.check_term_param_type(evaluated_obj.get(k), k, v, multiplier)
- print "Validating: ", res_ok
+ # print "Validating: ", res_ok
if res_ok:
# data field contains the value to save
result_obj[k] = data
diff --git a/scripts/automation/trex_control_plane/common/rpc_defaults.yaml b/scripts/automation/trex_control_plane/common/rpc_defaults.yaml
index 5816f980..32631609 100755
--- a/scripts/automation/trex_control_plane/common/rpc_defaults.yaml
+++ b/scripts/automation/trex_control_plane/common/rpc_defaults.yaml
@@ -41,8 +41,8 @@ stream:
type: [int, double, string]
has_default: YES
default: 0.0
- next_stream:
- type: [int, string] # string to allow naming binding
+ next_stream_id:
+ type: string # string to allow naming binding
has_default: YES
default: -1 # no next streams
packet:
@@ -101,6 +101,10 @@ rx_stats:
type: boolean
has_default: YES
default: False
+ stream_id:
+ type: string
+ has_default: YES
+ default: False # use related stream_id
seq_enabled:
type: boolean
has_default: YES
diff --git a/scripts/automation/trex_control_plane/common/trex_streams.py b/scripts/automation/trex_control_plane/common/trex_streams.py
index e366001d..674a6bcc 100755
--- a/scripts/automation/trex_control_plane/common/trex_streams.py
+++ b/scripts/automation/trex_control_plane/common/trex_streams.py
@@ -2,19 +2,21 @@
import external_packages
from client_utils.packet_builder import CTRexPktBuilder
-from collections import OrderedDict
+from collections import OrderedDict, namedtuple
from client_utils.yaml_utils import *
import dpkt
import struct
+import copy
+import os
+StreamPack = namedtuple('StreamPack', ['stream_id', 'stream'])
class CStreamList(object):
def __init__(self):
- self.streams_list = {OrderedDict()}
- self.yaml_loader = CTRexYAMLLoader("rpc_defaults.yaml")
- self._stream_id = 0
- # self._stream_by_name = {}
+ self.streams_list = {}
+ self.yaml_loader = CTRexYAMLLoader(os.path.join(os.path.dirname(os.path.realpath(__file__)),
+ "rpc_defaults.yaml"))
def append_stream(self, name, stream_obj):
assert isinstance(stream_obj, CStream)
@@ -24,21 +26,19 @@ class CStreamList(object):
return
def remove_stream(self, name):
- return self.streams_list.pop(name)
-
- def rearrange_streams(self, streams_names_list, new_streams_dict={}):
- tmp_list = OrderedDict()
- for stream in streams_names_list:
- if stream in self.streams_list:
- tmp_list[stream] = self.streams_list.get(stream)
- elif stream in new_streams_dict:
- new_stream_obj = new_streams_dict.get(stream)
- assert isinstance(new_stream_obj, CStream)
- tmp_list[stream] = new_stream_obj
- else:
- raise NameError("Given stream named '{0}' cannot be found in existing stream list or and wasn't"
- "provided with the new_stream_dict parameter.".format(stream))
- self.streams_list = tmp_list
+ popped = self.streams_list.pop(name)
+ if popped:
+ for stream_name, stream in self.streams_list.items():
+ if stream.next_stream_id == name:
+ stream.next_stream_id = -1
+ try:
+ rx_stats_stream = getattr(stream.rx_stats, "stream_id")
+ if rx_stats_stream == name:
+ # if a referenced stream of rx_stats object deleted, revert to rx stats of current stream
+ setattr(stream.rx_stats, "stream_id", stream_name)
+ except AttributeError as e:
+ continue #
+ return popped
def export_to_yaml(self, file_path):
raise NotImplementedError("export_to_yaml method is not implemented, yet")
@@ -48,7 +48,6 @@ class CStreamList(object):
self.streams_list.clear()
streams_data = load_yaml_to_obj(file_path)
assert isinstance(streams_data, list)
- raw_streams = {}
for stream in streams_data:
stream_name = stream.get("name")
raw_stream = stream.get("stream")
@@ -62,16 +61,41 @@ class CStreamList(object):
new_stream_obj = CStream()
new_stream_obj.load_data(**new_stream_data)
self.append_stream(stream_name, new_stream_obj)
+ return streams_data
def compile_streams(self):
+ # first, assign an id to each stream
stream_ids = {}
-
- pass
+ for idx, stream_name in enumerate(self.streams_list):
+ stream_ids[stream_name] = idx
+ # next, iterate over the streams and transform them from working with names to ids.
+ # with that build a new dict with old stream_name as the key, and StreamPack as the stored value
+ compiled_streams = {}
+ for stream_name, stream in self.streams_list.items():
+ tmp_stream = CStreamList._compile_single_stream(stream_name, stream, stream_ids)
+ compiled_streams[stream_name] = StreamPack(stream_ids.get(stream_name),
+ tmp_stream)
+ return compiled_streams
+
+ @staticmethod
+ def _compile_single_stream(stream_name, stream, id_dict):
+ # copy the old stream to temporary one, no change to class attributes
+ tmp_stream = copy.copy(stream)
+ next_stream_id = id_dict.get(getattr(tmp_stream, "next_stream_id"), -1)
+ try:
+ rx_stats_stream_id = id_dict.get(getattr(tmp_stream.rx_stats, "stream_id"),
+ id_dict.get(stream_name))
+ except AttributeError as e:
+ rx_stats_stream_id = id_dict.get(stream_name)
+ # assign resolved values to stream object
+ tmp_stream.next_stream_id = next_stream_id
+ tmp_stream.rx_stats.stream_id = rx_stats_stream_id
+ return tmp_stream
class CRxStats(object):
- FIELDS = ["seq_enabled", "latency_enabled"]
+ FIELDS = ["seq_enabled", "latency_enabled", "stream_id"]
def __init__(self, enabled=False, **kwargs):
self.enabled = bool(enabled)
for field in CRxStats.FIELDS:
@@ -82,7 +106,7 @@ class CRxStats(object):
dump = {"enabled": True}
dump.update({k: getattr(self, k)
for k in CRxStats.FIELDS
- if getattr(self, k)
+ if getattr(self, k) or k == "stream_id"
})
return dump
else:
@@ -132,10 +156,12 @@ class CTxMode(object):
class CStream(object):
"""docstring for CStream"""
- FIELDS = ["enabled", "self_start", "next_stream", "isg", "mode", "rx_stats", "packet", "vm"]
+ FIELDS = ["enabled", "self_start", "next_stream_id", "isg", "mode", "rx_stats", "packet", "vm"]
+ # COMPILE_FIELDS = ["enabled", "self_start", "next_stream_id", "isg", "mode", "rx_stats", "packet", "vm"]
def __init__(self):
self.is_loaded = False
+ self._is_compiled = False
for field in CStream.FIELDS:
setattr(self, field, None)
@@ -201,7 +227,8 @@ class CStream(object):
return
- def dump(self):
+ def dump(self, compilation=False):
+ # fields = CStream.COMPILE_FIELDS if compilation else CStream.FIELDS
if self.is_loaded:
dump = {}
for key in CStream.FIELDS:
@@ -213,8 +240,8 @@ class CStream(object):
else:
raise RuntimeError("CStream object isn't loaded with data. Use 'load_data' method.")
-
-
+ def dump_compiled(self):
+ return self.dump(compilation=True)
diff --git a/scripts/automation/trex_control_plane/console/trex_console.py b/scripts/automation/trex_control_plane/console/trex_console.py
index 9478db5a..945bb177 100644..100755
--- a/scripts/automation/trex_control_plane/console/trex_console.py
+++ b/scripts/automation/trex_control_plane/console/trex_console.py
@@ -9,9 +9,13 @@ import string
import sys
import trex_root_path
+from common.trex_streams import *
from client_utils.jsonrpc_client import TrexStatelessClient
import trex_status
+from collections import namedtuple
+
+LoadedStreamList = namedtuple('LoadedStreamList', ['loaded', 'compiled'])
class TrexConsole(cmd.Cmd):
"""Trex Console"""
@@ -29,6 +33,8 @@ class TrexConsole(cmd.Cmd):
self.verbose = False
self.postcmd(False, "")
+
+ self.user_streams = {}
# a cool hack - i stole this function and added space
@@ -312,6 +318,86 @@ class TrexConsole(cmd.Cmd):
print "{:<30} {:<30}".format(cmd + " - ", help)
+ def do_load_stream_list(self, line):
+ '''Loads a YAML stream list serialization into user console \n'''
+ args = line.split()
+ if args >= 2:
+ name = args[0]
+ yaml_path = args[1]
+ stream_list = CStreamList()
+ loaded_obj = stream_list.load_yaml(yaml_path)
+ # print self.rpc_client.pretty_json(json.dumps(loaded_obj))
+ if name in self.user_streams:
+ print "Picked name already exist. Please pick another name."
+ else:
+ try:
+ compiled_streams = stream_list.compile_streams()
+ self.user_streams[name] = LoadedStreamList(loaded_obj,
+ [StreamPack(v.stream_id, v.stream.dump_compiled())
+ for k, v in compiled_streams.items()])
+
+ print "Stream '{0}' loaded successfully".format(name)
+ except Exception as e:
+ raise
+ return
+ else:
+ print "please provide load name and YAML path, separated by space."
+
+ def do_show_stream_list(self, line):
+ '''Shows the loaded stream list named [name] \n'''
+ args = line.split()
+ if args:
+ list_name = args[0]
+ try:
+ stream = self.user_streams[list_name]
+ if len(args) >= 2 and args[1] == "full":
+ print self.rpc_client.pretty_json(json.dumps(stream.compiled))
+ else:
+ print self.rpc_client.pretty_json(json.dumps(stream.loaded))
+ except KeyError as e:
+ print "Unknown stream list name provided"
+ else:
+ print "\nAvailable stream lists:\n{0}".format(', '.join([x
+ for x in self.user_streams.keys()]))
+
+ def complete_show_stream_list (self, text, line, begidx, endidx):
+ return [x
+ for x in self.user_streams.keys()
+ if x.startswith(text)]
+
+ def do_attach(self, line):
+ args = line.split()
+ if len(args) >= 1:
+ try:
+ stream_list = self.user_streams[args[0]]
+ port_list = self.parse_ports_from_line(' '.join(args[1:]))
+ owned = set(self.rpc_client.get_owned_ports())
+ if set(port_list).issubset(owned):
+ rc, resp_list = self.rpc_client.add_stream(port_list, stream_list.compiled)
+ if not rc:
+ print "\n*** " + resp_list + "\n"
+ return
+ else:
+ print "Not all desired ports are aquired.\n" \
+ "Acquired ports are: {acq}\n" \
+ "Requested ports: {req}\n" \
+ "Missing ports: {miss}".format(acq=list(owned),
+ req=port_list,
+ miss=list(set(port_list).difference(owned)))
+ except KeyError as e:
+ cause = e.args[0]
+ print "Provided stream list name '{0}' doesn't exists.".format(cause)
+ else:
+ print "Please provide list name and ports to attach to, or leave empty to attach to all ports."
+
+
+
+
+
+
+
+
+
# do
#def do_snapshot (self, line):
diff --git a/scripts/stl/imix_1pkt.yaml b/scripts/stl/imix_1pkt.yaml
new file mode 100755
index 00000000..128bd559
--- /dev/null
+++ b/scripts/stl/imix_1pkt.yaml
@@ -0,0 +1,11 @@
+### Single stream UDP packet, 64B ###
+#####################################
+- name: udp_64B
+ stream:
+ self_start: True
+ packet:
+ binary: cap2/udp_64B_1pkt.pcap
+ mode:
+ type: continuous
+ pps: 100
+ rx_stats: [] \ No newline at end of file