diff options
Diffstat (limited to 'scripts')
12 files changed, 791 insertions, 34 deletions
diff --git a/scripts/automation/trex_control_plane/client_utils/external_packages.py b/scripts/automation/trex_control_plane/client_utils/external_packages.py index 4b10609b..e2bb37a5 100755 --- a/scripts/automation/trex_control_plane/client_utils/external_packages.py +++ b/scripts/automation/trex_control_plane/client_utils/external_packages.py @@ -8,7 +8,8 @@ ROOT_PATH = os.path.abspath(os.path.join(CURRENT_PATH, os.pardir)) PATH_TO_PYTHON_LIB = os.path.abspath(os.path.join(ROOT_PATH, os.pardir, os.pardir, 'external_libs')) CLIENT_UTILS_MODULES = ['zmq', - 'dpkt-1.8.6' + 'dpkt-1.8.6', + 'PyYAML-3.01/lib' ] def import_client_utils_modules(): diff --git a/scripts/automation/trex_control_plane/client_utils/jsonrpc_client.py b/scripts/automation/trex_control_plane/client_utils/jsonrpc_client.py index 8c8987b6..ed14e6f8 100755 --- a/scripts/automation/trex_control_plane/client_utils/jsonrpc_client.py +++ b/scripts/automation/trex_control_plane/client_utils/jsonrpc_client.py @@ -458,32 +458,56 @@ class TrexStatelessClient(JsonRpcClient): return snap # add stream - def add_stream (self, port_id, stream_id, isg, next_stream_id, packet): - if not port_id in self.get_owned_ports(): - return False, "Port {0} is not owned... please take ownership before adding streams".format(port_id) - - handler = self.port_handlers[port_id] - - stream = {} - stream['enabled'] = True - stream['self_start'] = True - stream['isg'] = isg - stream['next_stream_id'] = next_stream_id - stream['packet'] = {} - stream['packet']['binary'] = packet - stream['packet']['meta'] = "" - stream['vm'] = [] - stream['rx_stats'] = {} - stream['rx_stats']['enabled'] = False - - stream['mode'] = {} - stream['mode']['type'] = 'continuous' - stream['mode']['pps'] = 10.0 - - params = {} - params['handler'] = handler - params['stream'] = stream - params['port_id'] = port_id - params['stream_id'] = stream_id - - return self.invoke_rpc_method('add_stream', params = params) + # def add_stream (self, port_id, stream_id, isg, next_stream_id, packet, vm=[]): + # if not port_id in self.get_owned_ports(): + # return False, "Port {0} is not owned... please take ownership before adding streams".format(port_id) + # + # handler = self.port_handlers[port_id] + # + # stream = {} + # stream['enabled'] = True + # stream['self_start'] = True + # stream['isg'] = isg + # stream['next_stream_id'] = next_stream_id + # stream['packet'] = {} + # stream['packet']['binary'] = packet + # stream['packet']['meta'] = "" + # stream['vm'] = vm + # stream['rx_stats'] = {} + # stream['rx_stats']['enabled'] = False + # + # stream['mode'] = {} + # stream['mode']['type'] = 'continuous' + # stream['mode']['pps'] = 10.0 + # + # params = {} + # params['handler'] = handler + # params['stream'] = stream + # params['port_id'] = port_id + # params['stream_id'] = stream_id + # + # print params + # return self.invoke_rpc_method('add_stream', params = params) + + def add_stream(self, port_id_array, stream_pack_list): + batch = self.create_batch() + + for port_id in port_id_array: + for stream_pack in stream_pack_list: + params = {"port_id": port_id, + "handler": self.port_handlers[port_id], + "stream_id": stream_pack.stream_id, + "stream": stream_pack.stream} + batch.add("add_stream", params=params) + rc, resp_list = batch.invoke() + if not rc: + return rc, resp_list + + for i, rc in enumerate(resp_list): + if rc[0]: + print "Stream {0} - {1}".format(i, rc[1]) + # self.port_handlers[port_id_array[i]] = rc[1] + + return True, resp_list + + # return self.invoke_rpc_method('add_stream', params = params) diff --git a/scripts/automation/trex_control_plane/client_utils/packet_builder.py b/scripts/automation/trex_control_plane/client_utils/packet_builder.py index c687126b..3aeb6a34 100755 --- a/scripts/automation/trex_control_plane/client_utils/packet_builder.py +++ b/scripts/automation/trex_control_plane/client_utils/packet_builder.py @@ -33,6 +33,7 @@ class CTRexPktBuilder(object): self._max_pkt_size = max_pkt_size self.payload_gen = CTRexPktBuilder.CTRexPayloadGen(self._packet, self._max_pkt_size) self.vm = CTRexPktBuilder.CTRexVM() + self.metadata = "" def add_pkt_layer(self, layer_name, pkt_layer): """ @@ -441,8 +442,9 @@ class CTRexPktBuilder(object): if self._packet is None: raise CTRexPktBuilder.EmptyPacketError() pkt_in_hex = binascii.hexlify(str(self._packet)) - return [int(pkt_in_hex[i:i+2], 16) - for i in range(0, len(pkt_in_hex), 2)] + return {"binary": [int(pkt_in_hex[i:i+2], 16) + for i in range(0, len(pkt_in_hex), 2)], + "meta": self.metadata} # return [pkt_in_hex[i:i+2] for i in range(0, len(pkt_in_hex), 2)] def dump_pkt_to_pcap(self, file_path, ts=None): @@ -887,7 +889,7 @@ class CTRexPktBuilder(object): dictionary holds variable data of VM variable """ - return {"ins_name": "flow_var", # VM variable dump always refers to manipulate instruction. + return {"type": "flow_var", # VM variable dump always refers to manipulate instruction. "name": self.name, "size": self.size, "op": self.operation, diff --git a/scripts/automation/trex_control_plane/client_utils/yaml_utils.py b/scripts/automation/trex_control_plane/client_utils/yaml_utils.py new file mode 100755 index 00000000..60630a04 --- /dev/null +++ b/scripts/automation/trex_control_plane/client_utils/yaml_utils.py @@ -0,0 +1,163 @@ + +""" +Dan Klein +Cisco Systems, Inc. + +Copyright (c) 2015-2015 Cisco Systems, Inc. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + +import external_packages +import yaml + + +class CTRexYAMLLoader(object): + TYPE_DICT = {"double":float, + "int":int, + "array":list, + "string":str, + "boolean":bool} + + def __init__(self, yaml_ref_file_path): + self.yaml_path = yaml_ref_file_path + self.ref_obj = None + + def check_term_param_type(self, val, val_field, ref_val, multiplier): + # print val, val_field, ref_val + tmp_type = ref_val.get('type') + if isinstance(tmp_type, list): + # item can be one of multiple types + # print "multiple choice!" + python_types = set() + for t in tmp_type: + if t in self.TYPE_DICT: + python_types.add(self.TYPE_DICT.get(t)) + else: + return False, TypeError("Unknown resolving for type {0}".format(t)) + # print "python legit types: ", python_types + if type(val) not in python_types: + return False, TypeError("Type of object field '{0}' is not allowed".format(val_field)) + else: + # WE'RE OK! + return True, CTRexYAMLLoader._calc_final_value(val, multiplier, ref_val.get('multiply', False)) + else: + # this is a single type field + python_type = self.TYPE_DICT.get(tmp_type) + if not isinstance(val, python_type): + return False, TypeError("Type of object field '{0}' is not allowed".format(val_field)) + else: + # WE'RE OK! + return True, CTRexYAMLLoader._calc_final_value(val, multiplier, ref_val.get('multiply', False)) + + def get_reference_default(self, root_obj, sub_obj, key): + # print root_obj, sub_obj, key + if sub_obj: + ref_field = self.ref_obj.get(root_obj).get(sub_obj).get(key) + else: + ref_field = self.ref_obj.get(root_obj).get(key) + if 'has_default' in ref_field: + if ref_field.get('has_default'): + # WE'RE OK! + return True, ref_field.get('default') + else: + # This is a mandatory field! + return False, ValueError("The {0} field is mandatory and must be specified explicitly".format(key)) + else: + return False, ValueError("The {0} field has no indication about default value".format(key)) + + def validate_yaml(self, evaluated_obj, root_obj, fill_defaults=True, multiplier=1): + if isinstance(evaluated_obj, dict) and evaluated_obj.keys() == [root_obj]: + evaluated_obj = evaluated_obj.get(root_obj) + if not self.ref_obj: + self.ref_obj = load_yaml_to_obj(self.yaml_path) + # self.load_reference() + ref_item = self.ref_obj.get(root_obj) + if ref_item is not None: + try: + typed_obj = [False, None] # first item stores validity (multiple object "shapes"), second stored type + if "type" in evaluated_obj: + ref_item = ref_item[evaluated_obj.get("type")] + # print "lower resolution with typed object" + typed_obj = [True, evaluated_obj.get("type")] + if isinstance(ref_item, dict) and "type" not in ref_item: # this is not a terminal + result_obj = {} + if typed_obj[0]: + result_obj["type"] = typed_obj[1] + # print "processing dictionary non-terminal value" + for k, v in ref_item.items(): + # print "processing element '{0}' with value '{1}'".format(k,v) + if k in evaluated_obj: + # validate with ref obj + # print "found in evaluated object!" + tmp_type = v.get('type') + # print tmp_type + # print evaluated_obj + if tmp_type == "object": + # go deeper into nesting hierarchy + # print "This is an object type, recursion!" + result_obj[k] = self.validate_yaml(evaluated_obj.get(k), k, fill_defaults, multiplier) + else: + # validation on terminal type + # print "Validating terminal type %s" % k + res_ok, data = self.check_term_param_type(evaluated_obj.get(k), k, v, multiplier) + if res_ok: + # data field contains the value to save + result_obj[k] = data + else: + # data var contains the exception to throw + raise data + elif fill_defaults: + # complete missing values with default value, if exists + sub_obj = typed_obj[1] if typed_obj[0] else None + res_ok, data = self.get_reference_default(root_obj, sub_obj, k) + if res_ok: + # data field contains the value to save + result_obj[k] = data + else: + # data var contains the exception to throw + raise data + return result_obj + elif isinstance(ref_item, list): + # currently not handling list objects + return NotImplementedError("List object are currently unsupported") + else: + raise TypeError("Unknown parse tree object type.") + except KeyError as e: + raise + else: + raise KeyError("The given root_key '{key}' does not exists on reference object".format(key=root_obj)) + + @staticmethod + def _calc_final_value(val, multiplier, multiply): + def to_num(s): + try: + return int(s) + except ValueError: + return float(s) + if multiply: + return val * to_num(multiplier) + else: + return val + + +def load_yaml_to_obj(file_path): + try: + return yaml.load(file(file_path, 'r')) + except yaml.YAMLError as e: + raise + except Exception as e: + raise + +def yaml_exporter(file_path): + pass + +if __name__ == "__main__": + pass diff --git a/scripts/automation/trex_control_plane/common/external_packages.py b/scripts/automation/trex_control_plane/common/external_packages.py new file mode 100755 index 00000000..62121d4f --- /dev/null +++ b/scripts/automation/trex_control_plane/common/external_packages.py @@ -0,0 +1,28 @@ +#!/router/bin/python + +import sys +import os + +CURRENT_PATH = os.path.dirname(os.path.realpath(__file__)) +ROOT_PATH = os.path.abspath(os.path.join(CURRENT_PATH, os.pardir)) # path to trex_control_plane directory +PATH_TO_PYTHON_LIB = os.path.abspath(os.path.join(ROOT_PATH, os.pardir, os.pardir, 'external_libs')) + +CLIENT_UTILS_MODULES = ['PyYAML-3.01/lib' + ] + +def import_common_modules(): + # must be in a higher priority + sys.path.insert(0, PATH_TO_PYTHON_LIB) + sys.path.append(ROOT_PATH) + import_module_list(CLIENT_UTILS_MODULES) + + +def import_module_list(modules_list): + assert(isinstance(modules_list, list)) + for p in modules_list: + full_path = os.path.join(PATH_TO_PYTHON_LIB, p) + fix_path = os.path.normcase(full_path) + sys.path.insert(1, full_path) + +import_common_modules() + diff --git a/scripts/automation/trex_control_plane/common/rpc_defaults.yaml b/scripts/automation/trex_control_plane/common/rpc_defaults.yaml new file mode 100755 index 00000000..32631609 --- /dev/null +++ b/scripts/automation/trex_control_plane/common/rpc_defaults.yaml @@ -0,0 +1,115 @@ +##############################################################
+#### TRex RPC stream list default values ####
+##############################################################
+
+# this document is based on TRex RPC server spec and its fields:
+# http://trex-tgn.cisco.com/trex/doc/trex_rpc_server_spec.html
+
+### HOW TO READ THIS FILE
+# 1. Each key represents an object type
+# 2. Each value can be either a value field or another object
+# 2.1. If a value field, read as:
+# + type: type of field
+# + has_default: if the value has any default
+# + default: the default value (Only appears if has_default field is 'YES')
+# 2.2. If an object type, jump to corresponding object key.
+# 3. If an object has more than one instance type, another layer with the type shall be added.
+# For example, 'mode' object has 3 types: 'continuous', 'single_burst', 'multi_burst'
+# So, 3 mode objects will be defined, named:
+# - mode['continuous']
+# - mode['single_burst']
+# - mode['multi_burst']
+# In this case, there's no default for the 'type' field on the object
+# 4. Some values has 'multiply' property attached.
+# In such case, the loaded value will be multiplied by the multiplier
+# For example, if the mode's 'pps' field value is 10, and its multiplier is 5,
+# the loaded pps value will be 10*5=50
+# 5. Any object type must be listed by the user, even if all its field are defaults.
+# The most basic option would be to declare the object with "[]", which stands for empty object in YAML syntax.
+
+
+stream:
+ enabled:
+ type: boolean
+ has_default: YES
+ default: True
+ self_start:
+ type: boolean
+ has_default: YES
+ default: True
+ isg:
+ type: [int, double, string]
+ has_default: YES
+ default: 0.0
+ next_stream_id:
+ type: string # string to allow naming binding
+ has_default: YES
+ default: -1 # no next streams
+ packet:
+ type: object
+ mode:
+ type: object
+ vm:
+ type: array
+ has_default: YES
+ default: [] # no ranging instructions
+ rx_stats:
+ type: object
+
+packet:
+ binary:
+ type: [array,string]
+ has_default: NO
+ meta:
+ type: string
+ has_default: YES
+ default: ""
+
+mode:
+ continuous:
+ pps:
+ type: [int, double]
+ has_default: NO
+ multiply: YES
+ single_burst:
+ pps:
+ type: [int, double]
+ has_default: NO
+ multiply: YES
+ total_pkts:
+ type: int
+ has_default: NO
+ multi_burst:
+ pps:
+ type: [int, double]
+ has_default: NO
+ multiply: YES
+ pkts_per_burst:
+ type: int
+ has_default: NO
+ ibg:
+ type: [int, double, string]
+ has_default: YES
+ default: 100.0
+ count:
+ type: int
+ has_default: YES
+ default: 0 # loop forever
+
+rx_stats:
+ enabled:
+ type: boolean
+ has_default: YES
+ default: False
+ stream_id:
+ type: string
+ has_default: YES
+ default: False # use related stream_id
+ seq_enabled:
+ type: boolean
+ has_default: YES
+ default: False
+ latency_enabled:
+ type: boolean
+ has_default: YES
+ default: False
\ No newline at end of file diff --git a/scripts/automation/trex_control_plane/common/trex_status.py b/scripts/automation/trex_control_plane/common/trex_status.py new file mode 100644 index 00000000..f132720c --- /dev/null +++ b/scripts/automation/trex_control_plane/common/trex_status.py @@ -0,0 +1,8 @@ +#!/router/bin/python + +# define the states in which a T-Rex can hold during its lifetime +# TRexStatus = Enum('TRexStatus', 'Idle Starting Running') + +IDLE = 1 +STARTING = 2 +RUNNING = 3 diff --git a/scripts/automation/trex_control_plane/common/trex_streams.py b/scripts/automation/trex_control_plane/common/trex_streams.py new file mode 100755 index 00000000..783f2769 --- /dev/null +++ b/scripts/automation/trex_control_plane/common/trex_streams.py @@ -0,0 +1,248 @@ +#!/router/bin/python + +import external_packages +from client_utils.packet_builder import CTRexPktBuilder +from collections import OrderedDict, namedtuple +from client_utils.yaml_utils import * +import dpkt +import struct +import copy +import os + +StreamPack = namedtuple('StreamPack', ['stream_id', 'stream']) + +class CStreamList(object): + + def __init__(self): + self.streams_list = {} + self.yaml_loader = CTRexYAMLLoader(os.path.join(os.path.dirname(os.path.realpath(__file__)), + "rpc_defaults.yaml")) + + def append_stream(self, name, stream_obj): + assert isinstance(stream_obj, CStream) + if name in self.streams_list: + raise NameError("A stream with this name already exists on this list.") + self.streams_list[name]=stream_obj + return + + def remove_stream(self, name): + popped = self.streams_list.pop(name) + if popped: + for stream_name, stream in self.streams_list.items(): + if stream.next_stream_id == name: + stream.next_stream_id = -1 + try: + rx_stats_stream = getattr(stream.rx_stats, "stream_id") + if rx_stats_stream == name: + # if a referenced stream of rx_stats object deleted, revert to rx stats of current stream + setattr(stream.rx_stats, "stream_id", stream_name) + except AttributeError as e: + continue # + return popped + + def export_to_yaml(self, file_path): + raise NotImplementedError("export_to_yaml method is not implemented, yet") + + def load_yaml(self, file_path, multiplier=1): + # clear all existing streams linked to this object + self.streams_list.clear() + streams_data = load_yaml_to_obj(file_path) + assert isinstance(streams_data, list) + for stream in streams_data: + stream_name = stream.get("name") + raw_stream = stream.get("stream") + if not stream_name or not raw_stream: + raise ValueError("Provided stream is not according to convention." + "Each stream must be provided as two keys: 'name' and 'stream'. " + "Provided item was:\n {stream}".format(stream)) + new_stream_data = self.yaml_loader.validate_yaml(raw_stream, + "stream", + multiplier= multiplier) + new_stream_obj = CStream() + new_stream_obj.load_data(**new_stream_data) + self.append_stream(stream_name, new_stream_obj) + return new_stream_data + + def compile_streams(self): + # first, assign an id to each stream + stream_ids = {} + for idx, stream_name in enumerate(self.streams_list): + stream_ids[stream_name] = idx + # next, iterate over the streams and transform them from working with names to ids. + # with that build a new dict with old stream_name as the key, and StreamPack as the stored value + compiled_streams = {} + for stream_name, stream in self.streams_list.items(): + tmp_stream = CStreamList._compile_single_stream(stream_name, stream, stream_ids) + compiled_streams[stream_name] = StreamPack(stream_ids.get(stream_name), + tmp_stream) + return compiled_streams + + @staticmethod + def _compile_single_stream(stream_name, stream, id_dict): + # copy the old stream to temporary one, no change to class attributes + tmp_stream = copy.copy(stream) + next_stream_id = id_dict.get(getattr(tmp_stream, "next_stream_id"), -1) + try: + rx_stats_stream_id = id_dict.get(getattr(tmp_stream.rx_stats, "stream_id"), + id_dict.get(stream_name)) + except AttributeError as e: + rx_stats_stream_id = id_dict.get(stream_name) + # assign resolved values to stream object + tmp_stream.next_stream_id = next_stream_id + tmp_stream.rx_stats.stream_id = rx_stats_stream_id + return tmp_stream + + +class CRxStats(object): + + FIELDS = ["seq_enabled", "latency_enabled", "stream_id"] + def __init__(self, enabled=False, **kwargs): + self.enabled = bool(enabled) + for field in CRxStats.FIELDS: + setattr(self, field, kwargs.get(field, False)) + + def dump(self): + if self.enabled: + dump = {"enabled": True} + dump.update({k: getattr(self, k) + for k in CRxStats.FIELDS} + ) + return dump + else: + return {"enabled": False} + + + +class CTxMode(object): + """docstring for CTxMode""" + GENERAL_FIELDS = ["type", "pps"] + FIELDS = {"continuous": [], + "single_burst": ["total_pkts"], + "multi_burst": ["pkts_per_burst", "ibg", "count"]} + + def __init__(self, type, pps=0, **kwargs): + self._MODES = CTxMode.FIELDS.keys() + self.type = type + self.pps = pps + for field in CTxMode.FIELDS.get(self.type): + setattr(self, field, kwargs.get(field, 0)) + + @property + def type(self): + return self._type + + @type.setter + def type(self, type): + if type not in self._MODES: + raise ValueError("Unknown TX mode ('{0}')has been initialized.".format(type)) + self._type = type + self._reset_fields() + + def dump(self): + dump = ({k: getattr(self, k) + for k in CTxMode.GENERAL_FIELDS + }) + dump.update({k: getattr(self, k) + for k in CTxMode.FIELDS.get(self.type) + }) + return dump + + def _reset_fields(self): + for field in CTxMode.FIELDS.get(self.type): + setattr(self, field, 0) + + +class CStream(object): + """docstring for CStream""" + + FIELDS = ["enabled", "self_start", "next_stream_id", "isg", "mode", "rx_stats", "packet", "vm"] + # COMPILE_FIELDS = ["enabled", "self_start", "next_stream_id", "isg", "mode", "rx_stats", "packet", "vm"] + + def __init__(self): + self.is_loaded = False + self._is_compiled = False + for field in CStream.FIELDS: + setattr(self, field, None) + + def load_data(self, **kwargs): + try: + for k in CStream.FIELDS: + if k == "rx_stats": + rx_stats_data = kwargs[k] + if isinstance(rx_stats_data, dict): + setattr(self, k, CRxStats(**rx_stats_data)) + elif isinstance(rx_stats_data, CRxStats): + setattr(self, k, rx_stats_data) + elif k == "mode": + tx_mode = kwargs[k] + if isinstance(tx_mode, dict): + setattr(self, k, CTxMode(**tx_mode)) + elif isinstance(tx_mode, CTxMode): + setattr(self, k, tx_mode) + elif k == "packet": + if isinstance(kwargs[k], CTRexPktBuilder): + if "vm" not in kwargs: + self.load_packet_obj(kwargs[k]) + else: + raise ValueError("When providing packet object with a CTRexPktBuilder, vm parameter " + "should not be supplied") + else: + binary = kwargs[k]["binary"] + if isinstance(binary, list): + setattr(self, k, kwargs[k]) + elif isinstance(binary, str) and binary.endswith(".pcap"): + self.load_packet_from_pcap(binary, kwargs[k]["meta"]) + else: + raise ValueError("Packet binary attribute has been loaded with unsupported value." + "Supported values are reference to pcap file with SINGLE packet, " + "or a list of unsigned-byte integers") + else: + setattr(self, k, kwargs[k]) + self.is_loaded = True + except KeyError as e: + cause = e.args[0] + raise KeyError("The attribute '{0}' is missing as a field of the CStream object.\n" + "Loaded data must contain all of the following fields: {1}".format(cause, CStream.FIELDS)) + + def load_packet_obj(self, packet_obj): + assert isinstance(packet_obj, CTRexPktBuilder) + self.packet = packet_obj.dump_pkt() + self.vm = packet_obj.get_vm_data() + + def load_packet_from_pcap(self, pcap_path, metadata=''): + with open(pcap_path, 'r') as f: + pcap = dpkt.pcap.Reader(f) + first_packet = True + for _, buf in pcap: + # this is an iterator, can't evaluate the number of files in advance + if first_packet: + self.packet = {"binary": [struct.unpack('B', buf[i:i+1])[0] # represent data as list of 0-255 ints + for i in range(0, len(buf))], + "meta": metadata} # meta data continues without a change. + first_packet = False + else: + raise ValueError("Provided pcap file contains more than single packet.") + # arrive here ONLY if pcap contained SINGLE packet + return + + + def dump(self, compilation=False): + # fields = CStream.COMPILE_FIELDS if compilation else CStream.FIELDS + if self.is_loaded: + dump = {} + for key in CStream.FIELDS: + try: + dump[key] = getattr(self, key).dump() # use dump() method of compound object, such TxMode + except AttributeError: + dump[key] = getattr(self, key) + return dump + else: + raise RuntimeError("CStream object isn't loaded with data. Use 'load_data' method.") + + def dump_compiled(self): + return self.dump(compilation=True) + + + +if __name__ == "__main__": + pass diff --git a/scripts/automation/trex_control_plane/console/trex_console.py b/scripts/automation/trex_control_plane/console/trex_console.py index e9d8ad84..27a5eeab 100755 --- a/scripts/automation/trex_control_plane/console/trex_console.py +++ b/scripts/automation/trex_control_plane/console/trex_console.py @@ -1,5 +1,22 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- + +""" +Dan Klein, Itay Marom +Cisco Systems, Inc. + +Copyright (c) 2015-2015 Cisco Systems, Inc. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + import cmd import json import ast @@ -10,10 +27,14 @@ import string import sys import tty, termios import trex_root_path +from common.trex_streams import * from client_utils.jsonrpc_client import TrexStatelessClient import trex_status +from collections import namedtuple + +LoadedStreamList = namedtuple('LoadedStreamList', ['loaded', 'compiled']) # @@ -114,6 +135,8 @@ class TrexConsole(cmd.Cmd): self.verbose = False self.postcmd(False, "") + + self.user_streams = {} # a cool hack - i stole this function and added space @@ -404,6 +427,91 @@ class TrexConsole(cmd.Cmd): print "{:<30} {:<30}".format(cmd + " - ", help) + def do_load_stream_list(self, line): + '''Loads a YAML stream list serialization into user console \n''' + args = line.split() + if args >= 2: + name = args[0] + yaml_path = args[1] + try: + multiplier = args[2] + except IndexError: + multiplier = 1 + stream_list = CStreamList() + loaded_obj = stream_list.load_yaml(yaml_path, multiplier) + # print self.rpc_client.pretty_json(json.dumps(loaded_obj)) + if name in self.user_streams: + print "Picked name already exist. Please pick another name." + else: + try: + compiled_streams = stream_list.compile_streams() + self.user_streams[name] = LoadedStreamList(loaded_obj, + [StreamPack(v.stream_id, v.stream.dump_compiled()) + for k, v in compiled_streams.items()]) + + print "Stream '{0}' loaded successfully".format(name) + except Exception as e: + raise + return + else: + print "please provide load name and YAML path, separated by space.\n" \ + "Optionally, you may provide a third argument to specify multiplier." + + def do_show_stream_list(self, line): + '''Shows the loaded stream list named [name] \n''' + args = line.split() + if args: + list_name = args[0] + try: + stream = self.user_streams[list_name] + if len(args) >= 2 and args[1] == "full": + print self.rpc_client.pretty_json(json.dumps(stream.compiled)) + else: + print self.rpc_client.pretty_json(json.dumps(stream.loaded)) + except KeyError as e: + print "Unknown stream list name provided" + else: + print "\nAvailable stream lists:\n{0}".format(', '.join([x + for x in self.user_streams.keys()])) + + def complete_show_stream_list (self, text, line, begidx, endidx): + return [x + for x in self.user_streams.keys() + if x.startswith(text)] + + def do_attach(self, line): + args = line.split() + if len(args) >= 1: + try: + stream_list = self.user_streams[args[0]] + port_list = self.parse_ports_from_line(' '.join(args[1:])) + owned = set(self.rpc_client.get_owned_ports()) + if set(port_list).issubset(owned): + rc, resp_list = self.rpc_client.add_stream(port_list, stream_list.compiled) + if not rc: + print "\n*** " + resp_list + "\n" + return + else: + print "Not all desired ports are aquired.\n" \ + "Acquired ports are: {acq}\n" \ + "Requested ports: {req}\n" \ + "Missing ports: {miss}".format(acq=list(owned), + req=port_list, + miss=list(set(port_list).difference(owned))) + except KeyError as e: + cause = e.args[0] + print "Provided stream list name '{0}' doesn't exists.".format(cause) + else: + print "Please provide list name and ports to attach to, or leave empty to attach to all ports." + + + + + + + + + # adds a very simple stream diff --git a/scripts/stl/imix_1pkt.yaml b/scripts/stl/imix_1pkt.yaml new file mode 100755 index 00000000..511f8695 --- /dev/null +++ b/scripts/stl/imix_1pkt.yaml @@ -0,0 +1,11 @@ +### Single stream UDP packet, 64B ###
+#####################################
+- name: udp_64B
+ stream:
+ self_start: True
+ packet:
+ binary: cap2/udp_64B.pcap
+ mode:
+ type: continuous
+ pps: 100
+ rx_stats: []
\ No newline at end of file diff --git a/scripts/stl/imix_2pkt.yaml b/scripts/stl/imix_2pkt.yaml new file mode 100755 index 00000000..17a7bdc1 --- /dev/null +++ b/scripts/stl/imix_2pkt.yaml @@ -0,0 +1,20 @@ +### Two-stream UDP packets, 64B and 594B ###
+############################################
+- name: udp_64B
+ stream:
+ self_start: True
+ packet:
+ binary: cap2/udp_64B.pcap
+ mode:
+ type: continuous
+ pps: 100
+ rx_stats: []
+- name: udp_594B
+ stream:
+ self_start: True
+ packet:
+ binary: cap2/udp_594B.pcap
+ mode:
+ type: continuous
+ pps: 100
+ rx_stats: []
\ No newline at end of file diff --git a/scripts/stl/imix_3pkt.yaml b/scripts/stl/imix_3pkt.yaml new file mode 100755 index 00000000..d3923fb8 --- /dev/null +++ b/scripts/stl/imix_3pkt.yaml @@ -0,0 +1,29 @@ +### Three-stream UDP packets, 64B, 594B and 1518B ###
+#####################################################
+- name: udp_64B
+ stream:
+ self_start: True
+ packet:
+ binary: cap2/udp_64B.pcap
+ mode:
+ type: continuous
+ pps: 100
+ rx_stats: []
+- name: udp_594B
+ stream:
+ self_start: True
+ packet:
+ binary: cap2/udp_594B.pcap
+ mode:
+ type: continuous
+ pps: 100
+ rx_stats: []
+- name: udp_1518B
+ stream:
+ self_start: True
+ packet:
+ binary: cap2/udp_1518B.pcap
+ mode:
+ type: continuous
+ pps: 100
+ rx_stats: []
\ No newline at end of file |