summaryrefslogtreecommitdiffstats
path: root/scripts/automation/trex_control_plane/common
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/automation/trex_control_plane/common')
-rwxr-xr-xscripts/automation/trex_control_plane/common/external_packages.py28
-rwxr-xr-xscripts/automation/trex_control_plane/common/rpc_defaults.yaml115
-rw-r--r--scripts/automation/trex_control_plane/common/trex_status.py8
-rwxr-xr-xscripts/automation/trex_control_plane/common/trex_streams.py248
4 files changed, 399 insertions, 0 deletions
diff --git a/scripts/automation/trex_control_plane/common/external_packages.py b/scripts/automation/trex_control_plane/common/external_packages.py
new file mode 100755
index 00000000..62121d4f
--- /dev/null
+++ b/scripts/automation/trex_control_plane/common/external_packages.py
@@ -0,0 +1,28 @@
+#!/router/bin/python
+
+import sys
+import os
+
+CURRENT_PATH = os.path.dirname(os.path.realpath(__file__))
+ROOT_PATH = os.path.abspath(os.path.join(CURRENT_PATH, os.pardir)) # path to trex_control_plane directory
+PATH_TO_PYTHON_LIB = os.path.abspath(os.path.join(ROOT_PATH, os.pardir, os.pardir, 'external_libs'))
+
+CLIENT_UTILS_MODULES = ['PyYAML-3.01/lib'
+ ]
+
+def import_common_modules():
+ # must be in a higher priority
+ sys.path.insert(0, PATH_TO_PYTHON_LIB)
+ sys.path.append(ROOT_PATH)
+ import_module_list(CLIENT_UTILS_MODULES)
+
+
+def import_module_list(modules_list):
+ assert(isinstance(modules_list, list))
+ for p in modules_list:
+ full_path = os.path.join(PATH_TO_PYTHON_LIB, p)
+ fix_path = os.path.normcase(full_path)
+ sys.path.insert(1, full_path)
+
+import_common_modules()
+
diff --git a/scripts/automation/trex_control_plane/common/rpc_defaults.yaml b/scripts/automation/trex_control_plane/common/rpc_defaults.yaml
new file mode 100755
index 00000000..32631609
--- /dev/null
+++ b/scripts/automation/trex_control_plane/common/rpc_defaults.yaml
@@ -0,0 +1,115 @@
+##############################################################
+#### TRex RPC stream list default values ####
+##############################################################
+
+# this document is based on TRex RPC server spec and its fields:
+# http://trex-tgn.cisco.com/trex/doc/trex_rpc_server_spec.html
+
+### HOW TO READ THIS FILE
+# 1. Each key represents an object type
+# 2. Each value can be either a value field or another object
+# 2.1. If a value field, read as:
+# + type: type of field
+# + has_default: if the value has any default
+# + default: the default value (Only appears if has_default field is 'YES')
+# 2.2. If an object type, jump to corresponding object key.
+# 3. If an object has more than one instance type, another layer with the type shall be added.
+# For example, 'mode' object has 3 types: 'continuous', 'single_burst', 'multi_burst'
+# So, 3 mode objects will be defined, named:
+# - mode['continuous']
+# - mode['single_burst']
+# - mode['multi_burst']
+# In this case, there's no default for the 'type' field on the object
+# 4. Some values has 'multiply' property attached.
+# In such case, the loaded value will be multiplied by the multiplier
+# For example, if the mode's 'pps' field value is 10, and its multiplier is 5,
+# the loaded pps value will be 10*5=50
+# 5. Any object type must be listed by the user, even if all its field are defaults.
+# The most basic option would be to declare the object with "[]", which stands for empty object in YAML syntax.
+
+
+stream:
+ enabled:
+ type: boolean
+ has_default: YES
+ default: True
+ self_start:
+ type: boolean
+ has_default: YES
+ default: True
+ isg:
+ type: [int, double, string]
+ has_default: YES
+ default: 0.0
+ next_stream_id:
+ type: string # string to allow naming binding
+ has_default: YES
+ default: -1 # no next streams
+ packet:
+ type: object
+ mode:
+ type: object
+ vm:
+ type: array
+ has_default: YES
+ default: [] # no ranging instructions
+ rx_stats:
+ type: object
+
+packet:
+ binary:
+ type: [array,string]
+ has_default: NO
+ meta:
+ type: string
+ has_default: YES
+ default: ""
+
+mode:
+ continuous:
+ pps:
+ type: [int, double]
+ has_default: NO
+ multiply: YES
+ single_burst:
+ pps:
+ type: [int, double]
+ has_default: NO
+ multiply: YES
+ total_pkts:
+ type: int
+ has_default: NO
+ multi_burst:
+ pps:
+ type: [int, double]
+ has_default: NO
+ multiply: YES
+ pkts_per_burst:
+ type: int
+ has_default: NO
+ ibg:
+ type: [int, double, string]
+ has_default: YES
+ default: 100.0
+ count:
+ type: int
+ has_default: YES
+ default: 0 # loop forever
+
+rx_stats:
+ enabled:
+ type: boolean
+ has_default: YES
+ default: False
+ stream_id:
+ type: string
+ has_default: YES
+ default: False # use related stream_id
+ seq_enabled:
+ type: boolean
+ has_default: YES
+ default: False
+ latency_enabled:
+ type: boolean
+ has_default: YES
+ default: False \ No newline at end of file
diff --git a/scripts/automation/trex_control_plane/common/trex_status.py b/scripts/automation/trex_control_plane/common/trex_status.py
new file mode 100644
index 00000000..f132720c
--- /dev/null
+++ b/scripts/automation/trex_control_plane/common/trex_status.py
@@ -0,0 +1,8 @@
+#!/router/bin/python
+
+# define the states in which a T-Rex can hold during its lifetime
+# TRexStatus = Enum('TRexStatus', 'Idle Starting Running')
+
+IDLE = 1
+STARTING = 2
+RUNNING = 3
diff --git a/scripts/automation/trex_control_plane/common/trex_streams.py b/scripts/automation/trex_control_plane/common/trex_streams.py
new file mode 100755
index 00000000..783f2769
--- /dev/null
+++ b/scripts/automation/trex_control_plane/common/trex_streams.py
@@ -0,0 +1,248 @@
+#!/router/bin/python
+
+import external_packages
+from client_utils.packet_builder import CTRexPktBuilder
+from collections import OrderedDict, namedtuple
+from client_utils.yaml_utils import *
+import dpkt
+import struct
+import copy
+import os
+
+StreamPack = namedtuple('StreamPack', ['stream_id', 'stream'])
+
+class CStreamList(object):
+
+ def __init__(self):
+ self.streams_list = {}
+ self.yaml_loader = CTRexYAMLLoader(os.path.join(os.path.dirname(os.path.realpath(__file__)),
+ "rpc_defaults.yaml"))
+
+ def append_stream(self, name, stream_obj):
+ assert isinstance(stream_obj, CStream)
+ if name in self.streams_list:
+ raise NameError("A stream with this name already exists on this list.")
+ self.streams_list[name]=stream_obj
+ return
+
+ def remove_stream(self, name):
+ popped = self.streams_list.pop(name)
+ if popped:
+ for stream_name, stream in self.streams_list.items():
+ if stream.next_stream_id == name:
+ stream.next_stream_id = -1
+ try:
+ rx_stats_stream = getattr(stream.rx_stats, "stream_id")
+ if rx_stats_stream == name:
+ # if a referenced stream of rx_stats object deleted, revert to rx stats of current stream
+ setattr(stream.rx_stats, "stream_id", stream_name)
+ except AttributeError as e:
+ continue #
+ return popped
+
+ def export_to_yaml(self, file_path):
+ raise NotImplementedError("export_to_yaml method is not implemented, yet")
+
+ def load_yaml(self, file_path, multiplier=1):
+ # clear all existing streams linked to this object
+ self.streams_list.clear()
+ streams_data = load_yaml_to_obj(file_path)
+ assert isinstance(streams_data, list)
+ for stream in streams_data:
+ stream_name = stream.get("name")
+ raw_stream = stream.get("stream")
+ if not stream_name or not raw_stream:
+ raise ValueError("Provided stream is not according to convention."
+ "Each stream must be provided as two keys: 'name' and 'stream'. "
+ "Provided item was:\n {stream}".format(stream))
+ new_stream_data = self.yaml_loader.validate_yaml(raw_stream,
+ "stream",
+ multiplier= multiplier)
+ new_stream_obj = CStream()
+ new_stream_obj.load_data(**new_stream_data)
+ self.append_stream(stream_name, new_stream_obj)
+ return new_stream_data
+
+ def compile_streams(self):
+ # first, assign an id to each stream
+ stream_ids = {}
+ for idx, stream_name in enumerate(self.streams_list):
+ stream_ids[stream_name] = idx
+ # next, iterate over the streams and transform them from working with names to ids.
+ # with that build a new dict with old stream_name as the key, and StreamPack as the stored value
+ compiled_streams = {}
+ for stream_name, stream in self.streams_list.items():
+ tmp_stream = CStreamList._compile_single_stream(stream_name, stream, stream_ids)
+ compiled_streams[stream_name] = StreamPack(stream_ids.get(stream_name),
+ tmp_stream)
+ return compiled_streams
+
+ @staticmethod
+ def _compile_single_stream(stream_name, stream, id_dict):
+ # copy the old stream to temporary one, no change to class attributes
+ tmp_stream = copy.copy(stream)
+ next_stream_id = id_dict.get(getattr(tmp_stream, "next_stream_id"), -1)
+ try:
+ rx_stats_stream_id = id_dict.get(getattr(tmp_stream.rx_stats, "stream_id"),
+ id_dict.get(stream_name))
+ except AttributeError as e:
+ rx_stats_stream_id = id_dict.get(stream_name)
+ # assign resolved values to stream object
+ tmp_stream.next_stream_id = next_stream_id
+ tmp_stream.rx_stats.stream_id = rx_stats_stream_id
+ return tmp_stream
+
+
+class CRxStats(object):
+
+ FIELDS = ["seq_enabled", "latency_enabled", "stream_id"]
+ def __init__(self, enabled=False, **kwargs):
+ self.enabled = bool(enabled)
+ for field in CRxStats.FIELDS:
+ setattr(self, field, kwargs.get(field, False))
+
+ def dump(self):
+ if self.enabled:
+ dump = {"enabled": True}
+ dump.update({k: getattr(self, k)
+ for k in CRxStats.FIELDS}
+ )
+ return dump
+ else:
+ return {"enabled": False}
+
+
+
+class CTxMode(object):
+ """docstring for CTxMode"""
+ GENERAL_FIELDS = ["type", "pps"]
+ FIELDS = {"continuous": [],
+ "single_burst": ["total_pkts"],
+ "multi_burst": ["pkts_per_burst", "ibg", "count"]}
+
+ def __init__(self, type, pps=0, **kwargs):
+ self._MODES = CTxMode.FIELDS.keys()
+ self.type = type
+ self.pps = pps
+ for field in CTxMode.FIELDS.get(self.type):
+ setattr(self, field, kwargs.get(field, 0))
+
+ @property
+ def type(self):
+ return self._type
+
+ @type.setter
+ def type(self, type):
+ if type not in self._MODES:
+ raise ValueError("Unknown TX mode ('{0}')has been initialized.".format(type))
+ self._type = type
+ self._reset_fields()
+
+ def dump(self):
+ dump = ({k: getattr(self, k)
+ for k in CTxMode.GENERAL_FIELDS
+ })
+ dump.update({k: getattr(self, k)
+ for k in CTxMode.FIELDS.get(self.type)
+ })
+ return dump
+
+ def _reset_fields(self):
+ for field in CTxMode.FIELDS.get(self.type):
+ setattr(self, field, 0)
+
+
+class CStream(object):
+ """docstring for CStream"""
+
+ FIELDS = ["enabled", "self_start", "next_stream_id", "isg", "mode", "rx_stats", "packet", "vm"]
+ # COMPILE_FIELDS = ["enabled", "self_start", "next_stream_id", "isg", "mode", "rx_stats", "packet", "vm"]
+
+ def __init__(self):
+ self.is_loaded = False
+ self._is_compiled = False
+ for field in CStream.FIELDS:
+ setattr(self, field, None)
+
+ def load_data(self, **kwargs):
+ try:
+ for k in CStream.FIELDS:
+ if k == "rx_stats":
+ rx_stats_data = kwargs[k]
+ if isinstance(rx_stats_data, dict):
+ setattr(self, k, CRxStats(**rx_stats_data))
+ elif isinstance(rx_stats_data, CRxStats):
+ setattr(self, k, rx_stats_data)
+ elif k == "mode":
+ tx_mode = kwargs[k]
+ if isinstance(tx_mode, dict):
+ setattr(self, k, CTxMode(**tx_mode))
+ elif isinstance(tx_mode, CTxMode):
+ setattr(self, k, tx_mode)
+ elif k == "packet":
+ if isinstance(kwargs[k], CTRexPktBuilder):
+ if "vm" not in kwargs:
+ self.load_packet_obj(kwargs[k])
+ else:
+ raise ValueError("When providing packet object with a CTRexPktBuilder, vm parameter "
+ "should not be supplied")
+ else:
+ binary = kwargs[k]["binary"]
+ if isinstance(binary, list):
+ setattr(self, k, kwargs[k])
+ elif isinstance(binary, str) and binary.endswith(".pcap"):
+ self.load_packet_from_pcap(binary, kwargs[k]["meta"])
+ else:
+ raise ValueError("Packet binary attribute has been loaded with unsupported value."
+ "Supported values are reference to pcap file with SINGLE packet, "
+ "or a list of unsigned-byte integers")
+ else:
+ setattr(self, k, kwargs[k])
+ self.is_loaded = True
+ except KeyError as e:
+ cause = e.args[0]
+ raise KeyError("The attribute '{0}' is missing as a field of the CStream object.\n"
+ "Loaded data must contain all of the following fields: {1}".format(cause, CStream.FIELDS))
+
+ def load_packet_obj(self, packet_obj):
+ assert isinstance(packet_obj, CTRexPktBuilder)
+ self.packet = packet_obj.dump_pkt()
+ self.vm = packet_obj.get_vm_data()
+
+ def load_packet_from_pcap(self, pcap_path, metadata=''):
+ with open(pcap_path, 'r') as f:
+ pcap = dpkt.pcap.Reader(f)
+ first_packet = True
+ for _, buf in pcap:
+ # this is an iterator, can't evaluate the number of files in advance
+ if first_packet:
+ self.packet = {"binary": [struct.unpack('B', buf[i:i+1])[0] # represent data as list of 0-255 ints
+ for i in range(0, len(buf))],
+ "meta": metadata} # meta data continues without a change.
+ first_packet = False
+ else:
+ raise ValueError("Provided pcap file contains more than single packet.")
+ # arrive here ONLY if pcap contained SINGLE packet
+ return
+
+
+ def dump(self, compilation=False):
+ # fields = CStream.COMPILE_FIELDS if compilation else CStream.FIELDS
+ if self.is_loaded:
+ dump = {}
+ for key in CStream.FIELDS:
+ try:
+ dump[key] = getattr(self, key).dump() # use dump() method of compound object, such TxMode
+ except AttributeError:
+ dump[key] = getattr(self, key)
+ return dump
+ else:
+ raise RuntimeError("CStream object isn't loaded with data. Use 'load_data' method.")
+
+ def dump_compiled(self):
+ return self.dump(compilation=True)
+
+
+
+if __name__ == "__main__":
+ pass