summaryrefslogtreecommitdiffstats
path: root/scripts/automation/trex_control_plane
diff options
context:
space:
mode:
authorimarom <imarom@cisco.com>2016-02-09 11:18:47 -0500
committerimarom <imarom@cisco.com>2016-02-09 11:18:47 -0500
commitede68c669fde984d6095e9313d49a8af295ae885 (patch)
tree3c3d52457bc94475f413a04b82f6e4e80b48b64f /scripts/automation/trex_control_plane
parent1ab9a175ca7d49f7ae843d46a76c36baa16ff39d (diff)
parent59d48a12d2c2f1e7a42e44265c4a3a4c1c8651fd (diff)
Merge branch 'refactor'
Diffstat (limited to 'scripts/automation/trex_control_plane')
-rwxr-xr-xscripts/automation/trex_control_plane/client/trex_hltapi.py23
-rwxr-xr-xscripts/automation/trex_control_plane/client_utils/packet_builder.py1209
-rwxr-xr-xscripts/automation/trex_control_plane/client_utils/yaml_utils.py1
-rwxr-xr-xscripts/automation/trex_control_plane/common/rpc_defaults.yaml124
-rwxr-xr-xscripts/automation/trex_control_plane/common/trex_streams.py526
-rw-r--r--scripts/automation/trex_control_plane/stl/console/__init__.py0
-rwxr-xr-xscripts/automation/trex_control_plane/stl/console/trex_console.py (renamed from scripts/automation/trex_control_plane/console/trex_console.py)16
-rwxr-xr-xscripts/automation/trex_control_plane/stl/console/trex_root_path.py (renamed from scripts/automation/trex_control_plane/console/trex_root_path.py)0
-rw-r--r--scripts/automation/trex_control_plane/stl/console/trex_tui.py (renamed from scripts/automation/trex_control_plane/console/trex_tui.py)14
-rw-r--r--scripts/automation/trex_control_plane/stl/examples/stl_bi_dir_flows.py114
-rw-r--r--scripts/automation/trex_control_plane/stl/examples/stl_imix.py103
-rw-r--r--scripts/automation/trex_control_plane/stl/examples/stl_path.py4
-rw-r--r--scripts/automation/trex_control_plane/stl/examples/stl_run_udp_simple.py219
-rw-r--r--scripts/automation/trex_control_plane/stl/examples/stl_simple_burst.py64
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/__init__.py1
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/api.py28
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_async_client.py (renamed from scripts/automation/trex_control_plane/client/trex_async_client.py)18
-rw-r--r--[-rwxr-xr-x]scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_client.py (renamed from scripts/automation/trex_control_plane/client/trex_stateless_client.py)128
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_exceptions.py (renamed from scripts/automation/trex_control_plane/common/trex_stl_exceptions.py)3
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_ext.py74
-rw-r--r--[-rwxr-xr-x]scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_jsonrpc_client.py (renamed from scripts/automation/trex_control_plane/client_utils/jsonrpc_client.py)8
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_packet_builder_interface.py (renamed from scripts/automation/trex_control_plane/client_utils/packet_builder_interface.py)0
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_packet_builder_scapy.py (renamed from scripts/automation/trex_control_plane/client_utils/scapy_packet_builder.py)87
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_port.py (renamed from scripts/automation/trex_control_plane/client/trex_port.py)67
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_sim.py (renamed from scripts/automation/trex_control_plane/client/trex_stateless_sim.py)75
-rw-r--r--[-rwxr-xr-x]scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_stats.py (renamed from scripts/automation/trex_control_plane/common/trex_stats.py)9
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_std.py67
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_streams.py386
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_types.py (renamed from scripts/automation/trex_control_plane/common/trex_types.py)2
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/__init__.py0
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/common.py47
-rwxr-xr-xscripts/automation/trex_control_plane/stl/trex_stl_lib/utils/parsing_opts.py (renamed from scripts/automation/trex_control_plane/client_utils/parsing_opts.py)0
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/text_opts.py192
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/text_tables.py (renamed from scripts/automation/trex_control_plane/client_utils/text_tables.py)4
34 files changed, 1548 insertions, 2065 deletions
diff --git a/scripts/automation/trex_control_plane/client/trex_hltapi.py b/scripts/automation/trex_control_plane/client/trex_hltapi.py
index 5d3f506c..c2a08306 100755
--- a/scripts/automation/trex_control_plane/client/trex_hltapi.py
+++ b/scripts/automation/trex_control_plane/client/trex_hltapi.py
@@ -108,11 +108,22 @@ traffic_stats_kwargs = {
}
-#import trex_root_path
-import client_utils.scapy_packet_builder as pkt_bld
-from client_utils.scapy_packet_builder import CTRexVmDescFlowVar, CTRexVmDescWrFlowVar
-from trex_stateless_client import STLClient
-from common.trex_streams import *
+#HACK FIX ME START
+import sys
+import os
+
+CURRENT_PATH = os.path.dirname(os.path.realpath(__file__))
+sys.path.append(os.path.join(CURRENT_PATH, '../stl/'))
+#HACK FIX ME END
+
+from trex_stl_lib.api import *
+import trex_stl_lib.api as pkt_bld
+#import trex_stl_lib.trex_stl_packet_builder_scapy as pkt_bld
+#from trex_stl_lib.trex_stl_packet_builder_scapy import import CTRexVmDescFlowVar, CTRexVmDescWrFlowVar
+#from trex_stl_lib.trex_stl_client import STLClient
+#from common.trex_streams import *
+
+import trex_root_path
from client_utils.general_utils import get_integer
import socket
import copy
@@ -523,7 +534,7 @@ class CTRexHltApiPktBuilder:
debug_filename = kwargs.get('save_to_yaml')
if type(debug_filename) is str:
- stream_obj.dump_to_yaml(debug_filename, stream_obj)
+ STLProfile(stream_obj).dump_to_yaml(debug_filename)
return stream_obj
@staticmethod
diff --git a/scripts/automation/trex_control_plane/client_utils/packet_builder.py b/scripts/automation/trex_control_plane/client_utils/packet_builder.py
deleted file mode 100755
index f9031436..00000000
--- a/scripts/automation/trex_control_plane/client_utils/packet_builder.py
+++ /dev/null
@@ -1,1209 +0,0 @@
-#!/router/bin/python
-
-import external_packages
-import dpkt
-import socket
-import binascii
-import copy
-import random
-import string
-import struct
-import re
-import itertools
-from abc import ABCMeta, abstractmethod
-from collections import namedtuple
-import base64
-
-from packet_builder_interface import CTrexPktBuilderInterface
-
-class CTRexPktBuilder(CTrexPktBuilderInterface):
- """
- This class defines the TRex API of building a packet using dpkt package.
- Using this class the user can also define how TRex will handle the packet by specifying the VM setting.
- """
- def __init__(self, max_pkt_size=dpkt.ethernet.ETH_LEN_MAX):
- """
- Instantiate a CTRexPktBuilder object
-
- :parameters:
- None
-
- """
- super(CTRexPktBuilder, self).__init__()
- self._packet = None
- self._pkt_by_hdr = {}
- self._pkt_top_layer = None
- self._max_pkt_size = max_pkt_size
- self.vm = CTRexPktBuilder.CTRexVM()
- self.metadata = ""
-
- def clone (self):
- return copy.deepcopy(self)
-
- def add_pkt_layer(self, layer_name, pkt_layer):
- """
- This method adds additional header to the already existing packet
-
- :parameters:
- layer_name: str
- a string representing the name of the layer.
- Example: "l2", "l4_tcp", etc.
-
- pkt_layer : dpkt.Packet obj
- a dpkt object, generally from higher layer, that will be added on top of existing layer.
-
- :raises:
- + :exc:`ValueError`, in case the desired layer_name already exists.
-
- """
- assert isinstance(pkt_layer, dpkt.Packet)
- if layer_name in self._pkt_by_hdr:
- raise ValueError("Given layer name '{0}' already exists.".format(layer_name))
- else:
- dup_pkt = copy.copy(pkt_layer) # using copy of layer to avoid cyclic packets that may lead to infinite loop
- if not self._pkt_top_layer: # this is the first header added
- self._packet = dup_pkt
- else:
- self._pkt_top_layer.data = dup_pkt
- self._pkt_top_layer = dup_pkt
- self._pkt_by_hdr[layer_name] = dup_pkt
- return
-
- def set_ip_layer_addr(self, layer_name, attr, ip_addr, ip_type="ipv4"):
- """
- This method sets the IP address fields of an IP header (source or destination, for both IPv4 and IPv6)
- using a human readable addressing representation.
-
- :parameters:
- layer_name: str
- a string representing the name of the layer.
- Example: "l3_ip", etc.
-
- attr: str
- a string representation of the sub-field to be set:
-
- + "src" for source
- + "dst" for destination
-
- ip_addr: str
- a string representation of the IP address to be set.
- Example: "10.0.0.1" for IPv4, or "5001::DB8:1:3333:1:1" for IPv6
-
- ip_type : str
- a string representation of the IP version to be set:
-
- + "ipv4" for IPv4
- + "ipv6" for IPv6
-
- Default: **ipv4**
-
- :raises:
- + :exc:`ValueError`, in case the desired layer_name is not an IP layer
- + :exc:`KeyError`, in case the desired layer_name does not exists.
-
- """
- try:
- layer = self._pkt_by_hdr[layer_name.lower()]
- if not (isinstance(layer, dpkt.ip.IP) or isinstance(layer, dpkt.ip6.IP6)):
- raise ValueError("The specified layer '{0}' is not of IPv4/IPv6 type.".format(layer_name))
- else:
- decoded_ip = CTRexPktBuilder._decode_ip_addr(ip_addr, ip_type)
- setattr(layer, attr, decoded_ip)
- except KeyError:
- raise KeyError("Specified layer '{0}' doesn't exist on packet.".format(layer_name))
-
- def set_ipv6_layer_addr(self, layer_name, attr, ip_addr):
- """
- This method sets the IPv6 address fields of an IP header (source or destination)
-
- :parameters:
- layer_name: str
- a string representing the name of the layer.
- Example: "l3_ip", etc.
-
- attr: str
- a string representation of the sub-field to be set:
-
- + "src" for source
- + "dst" for destination
-
- ip_addr: str
- a string representation of the IP address to be set.
- Example: "5001::DB8:1:3333:1:1"
-
- :raises:
- + :exc:`ValueError`, in case the desired layer_name is not an IPv6 layer
- + :exc:`KeyError`, in case the desired layer_name does not exists.
-
- """
- self.set_ip_layer_addr(layer_name, attr, ip_addr, ip_type="ipv6")
-
- def set_eth_layer_addr(self, layer_name, attr, mac_addr):
- """
- This method sets the ethernet address fields of an Ethernet header (source or destination)
- using a human readable addressing representation.
-
- :parameters:
- layer_name: str
- a string representing the name of the layer.
- Example: "l2", etc.
-
- attr: str
- a string representation of the sub-field to be set:
- + "src" for source
- + "dst" for destination
-
- mac_addr: str
- a string representation of the MAC address to be set.
- Example: "00:de:34:ef:2e:f4".
-
- :raises:
- + :exc:`ValueError`, in case the desired layer_name is not an Ethernet layer
- + :exc:`KeyError`, in case the desired layer_name does not exists.
-
- """
- try:
- layer = self._pkt_by_hdr[layer_name.lower()]
- if not isinstance(layer, dpkt.ethernet.Ethernet):
- raise ValueError("The specified layer '{0}' is not of Ethernet type.".format(layer_name))
- else:
- decoded_mac = CTRexPktBuilder._decode_mac_addr(mac_addr)
- setattr(layer, attr, decoded_mac)
- except KeyError:
- raise KeyError("Specified layer '{0}' doesn't exist on packet.".format(layer_name))
-
- def set_layer_attr(self, layer_name, attr, val, toggle_bit=False):
- """
- This method enables the user to change a value of a previously defined packet layer.
- This method isn't to be used to set the data attribute of a packet with payload.
- Use :func:`packet_builder.CTRexPktBuilder.set_payload` instead.
-
- :parameters:
- layer_name: str
- a string representing the name of the layer.
- Example: "l2", "l4_tcp", etc.
-
- attr : str
- a string representing the attribute to be changed on desired layer
-
- val :
- value of attribute.
-
- toggle_bit : bool
- Indicating if trying to set a specific bit of a field, such as "do not fragment" bit of IP layer.
-
- Default: **False**
-
- :raises:
- + :exc:`KeyError`, in case of missing layer (the desired layer isn't part of packet)
- + :exc:`ValueError`, in case invalid attribute to the specified layer.
-
- """
- try:
- layer = self._pkt_by_hdr[layer_name.lower()]
- if attr == 'data' and not isinstance(val, dpkt.Packet):
- # Don't allow setting 'data' attribute
- raise ValueError("Set a data attribute with object that is not dpkt.Packet is not allowed using "
- "set_layer_attr method.\nUse set_payload method instead.")
- if hasattr(layer, attr):
- if toggle_bit:
- setattr(layer, attr, val | getattr(layer, attr, 0))
- else:
- setattr(layer, attr, val)
- if attr == 'data':
- # re-evaluate packet from the start, possible broken link between layers
- self._reevaluate_packet(layer_name.lower())
- else:
- raise ValueError("Given attr name '{0}' doesn't exists on specified layer ({1}).".format(layer_name,
- attr))
- except KeyError:
- raise KeyError("Specified layer '{0}' doesn't exist on packet.".format(layer_name))
-
- def set_layer_bit_attr(self, layer_name, attr, val):
- """
- This method enables the user to set the value of a field smaller that 1 Byte in size.
- This method isn't used to set full-sized fields value (>= 1 byte).
- Use :func:`packet_builder.CTRexPktBuilder.set_layer_attr` instead.
-
- :parameters:
- layer_name: str
- a string representing the name of the layer.
- Example: "l2", "l4_tcp", etc.
-
- attr : str
- a string representing the attribute to be set on desired layer
-
- val : int
- value of attribute.
- This value will be set "ontop" of the existing value using bitwise "OR" operation.
-
- .. tip:: It is very useful to use dpkt constants to define the values of these fields.
-
- :raises:
- + :exc:`KeyError`, in case of missing layer (the desired layer isn't part of packet)
- + :exc:`ValueError`, in case invalid attribute to the specified layer.
-
- """
- return self.set_layer_attr(layer_name, attr, val, True)
-
- def set_pkt_payload(self, payload):
- """
- This method sets a payload to the topmost layer of the generated packet.
- This method isn't to be used to set another networking layer to the packet.
- Use :func:`packet_builder.CTRexPktBuilder.set_layer_attr` instead.
-
-
- :parameters:
- payload:
- a payload to be added to the packet at the topmost layer.
- this object cannot be of type dpkt.Packet.
-
- :raises:
- + :exc:`AttributeError`, in case no underlying header to host the payload.
-
- """
- assert isinstance(payload, str)
- try:
- self._pkt_top_layer.data = payload
- except AttributeError:
- raise AttributeError("The so far built packet doesn't contain an option for payload attachment.\n"
- "Make sure to set appropriate underlying header before adding payload")
-
- def load_packet(self, packet):
- """
- This method enables the user to change a value of a previously defined packet layer.
-
- :parameters:
- packet: dpkt.Packet obj
- a dpkt object that represents a packet.
-
-
- :raises:
- + :exc:`CTRexPktBuilder.IPAddressError`, in case invalid ip type option specified.
-
- """
- assert isinstance(packet, dpkt.Packet)
- self._packet = copy.copy(packet)
-
- self._pkt_by_hdr.clear()
- self._pkt_top_layer = self._packet
- # analyze packet to layers
- tmp_layer = self._packet
- while True:
- if isinstance(tmp_layer, dpkt.Packet):
- layer_name = self._gen_layer_name(type(tmp_layer).__name__)
- self._pkt_by_hdr[layer_name] = tmp_layer
- self._pkt_top_layer = tmp_layer
- try:
- # check existence of upper layer
- tmp_layer = tmp_layer.data
- except AttributeError:
- # this is the most upper header
- self._pkt_by_hdr['pkt_final_payload'] = tmp_layer.data
- break
- else:
- self._pkt_by_hdr['pkt_final_payload'] = tmp_layer
- break
- return
-
- def load_packet_from_pcap(self, pcap_path):
- """
- This method loads a pcap file into a parsed packet builder object.
-
- :parameters:
- pcap_path: str
- a path to a pcap file, containing a SINGLE packet.
-
- :raises:
- + :exc:`IOError`, in case provided path doesn't exists.
-
- """
- with open(pcap_path, 'r') as f:
- pcap = dpkt.pcap.Reader(f)
- first_packet = True
- for _, buf in pcap:
- # this is an iterator, can't evaluate the number of files in advance
- if first_packet:
- self.load_packet(dpkt.ethernet.Ethernet(buf))
- else:
- raise ValueError("Provided pcap file contains more than single packet.")
- # arrive here ONLY if pcap contained SINGLE packet
- return
-
- def load_from_stream_obj(self, stream_obj):
- self.load_packet_from_byte_list(stream_obj['packet']['binary'])
-
-
- def load_packet_from_byte_list(self, byte_list):
-
- buf = base64.b64decode(byte_list)
- # thn, load it based on dpkt parsing
- self.load_packet(dpkt.ethernet.Ethernet(buf))
-
- def get_packet(self, get_ptr=False):
- """
- This method provides access to the built packet, as an instance or as a pointer to packet itself.
-
- :parameters:
- get_ptr : bool
- indicate whether to get a reference to packet or a copy.
- Use only in advanced modes
- if set to true, metadata for packet is cleared, and any further modification is not guaranteed.
-
- default value : False
-
- :return:
- + the current packet built by CTRexPktBuilder object.
- + None if packet is empty
-
- """
- if get_ptr:
- self._pkt_by_hdr = {}
- self._pkt_top_layer = None
- return self._packet
- else:
- return copy.copy(self._packet)
-
- def get_packet_length(self):
- return len(self._packet)
-
- def get_layer(self, layer_name):
- """
- This method provides access to a specific layer of the packet, as a **copy of the layer instance**.
-
- :parameters:
- layer_name : str
- the name given to desired layer
-
- :return:
- + a copy of the desired layer of the current packet if exists.
- + None if no such layer
-
- """
- layer = self._pkt_by_hdr.get(layer_name)
- return copy.copy(layer) if layer else None
-
-
- # VM access methods
- def set_vm_ip_range(self, ip_layer_name, ip_field,
- ip_start, ip_end, operation,
- ip_init = None, add_value = 0,
- is_big_endian=True, val_size=4,
- ip_type="ipv4", add_checksum_inst=True,
- split = False):
-
- if ip_field not in ["src", "dst"]:
- raise ValueError("set_vm_ip_range only available for source ('src') or destination ('dst') ip addresses")
- # set differences between IPv4 and IPv6
- if ip_type == "ipv4":
- ip_class = dpkt.ip.IP
- ip_addr_size = val_size if val_size <= 4 else 4
- elif ip_type == "ipv6":
- ip_class = dpkt.ip6.IP6
- ip_addr_size = val_size if val_size <= 8 else 4
- else:
- raise CTRexPktBuilder.IPAddressError()
-
- self._verify_layer_prop(ip_layer_name, ip_class)
- trim_size = ip_addr_size*2
- start_val = int(binascii.hexlify(CTRexPktBuilder._decode_ip_addr(ip_start, ip_type))[-trim_size:], 16)
- end_val = int(binascii.hexlify(CTRexPktBuilder._decode_ip_addr(ip_end, ip_type))[-trim_size:], 16)
-
- if ip_init == None:
- init_val = start_val
- else:
- init_val = int(binascii.hexlify(CTRexPktBuilder._decode_ip_addr(ip_init, ip_type))[-trim_size:], 16)
-
-
- # All validations are done, start adding VM instructions
- flow_var_name = "{layer}__{field}".format(layer=ip_layer_name, field=ip_field)
-
- hdr_offset, field_abs_offset = self._calc_offset(ip_layer_name, ip_field, ip_addr_size)
- self.vm.add_flow_man_inst(flow_var_name, size=ip_addr_size, operation=operation,
- init_value=init_val,
- min_value=start_val,
- max_value=end_val)
- self.vm.add_write_flow_inst(flow_var_name, field_abs_offset)
- self.vm.set_vm_off_inst_field(flow_var_name, "add_value", add_value)
- self.vm.set_vm_off_inst_field(flow_var_name, "is_big_endian", is_big_endian)
- if ip_type == "ipv4" and add_checksum_inst:
- self.vm.add_fix_checksum_inst(self._pkt_by_hdr.get(ip_layer_name), hdr_offset)
-
- if split:
- self.vm.set_split_by_var(flow_var_name)
-
-
- def set_vm_eth_range(self, eth_layer_name, eth_field,
- mac_init, mac_start, mac_end, add_value,
- operation, val_size=4, is_big_endian=False):
- if eth_field not in ["src", "dst"]:
- raise ValueError("set_vm_eth_range only available for source ('src') or destination ('dst') eth addresses")
- self._verify_layer_prop(eth_layer_name, dpkt.ethernet.Ethernet)
- eth_addr_size = val_size if val_size <= 4 else 4
- trim_size = eth_addr_size*2
- init_val = int(binascii.hexlify(CTRexPktBuilder._decode_mac_addr(mac_init))[-trim_size:], 16)
- start_val = int(binascii.hexlify(CTRexPktBuilder._decode_mac_addr(mac_start))[-trim_size:], 16)
- end_val = int(binascii.hexlify(CTRexPktBuilder._decode_mac_addr(mac_end))[-trim_size:], 16)
- # All validations are done, start adding VM instructions
- flow_var_name = "{layer}__{field}".format(layer=eth_layer_name, field=eth_field)
- hdr_offset, field_abs_offset = self._calc_offset(eth_layer_name, eth_field, eth_addr_size)
- self.vm.add_flow_man_inst(flow_var_name, size=8, operation=operation,
- init_value=init_val,
- min_value=start_val,
- max_value=end_val)
- self.vm.add_write_flow_inst(flow_var_name, field_abs_offset)
- self.vm.set_vm_off_inst_field(flow_var_name, "add_value", add_value)
- self.vm.set_vm_off_inst_field(flow_var_name, "is_big_endian", is_big_endian)
-
- def set_vm_custom_range(self, layer_name, hdr_field,
- init_val, start_val, end_val, add_val, val_size,
- operation, is_big_endian=True, range_name="",
- add_checksum_inst=True):
- # verify input validity for init/start/end values
- for val in [init_val, start_val, end_val]:
- if not isinstance(val, int):
- raise ValueError("init/start/end values are expected integers, but received type '{0}'".
- format(type(val)))
- self._verify_layer_prop(layer_name=layer_name, field_name=hdr_field)
- if not range_name:
- range_name = "{layer}__{field}".format(layer=layer_name, field=hdr_field)
- trim_size = val_size*2
- hdr_offset, field_abs_offset = self._calc_offset(layer_name, hdr_field, val_size)
- self.vm.add_flow_man_inst(range_name, size=val_size, operation=operation,
- init_value=init_val,
- min_value=start_val,
- max_value=end_val)
- self.vm.add_write_flow_inst(range_name, field_abs_offset)
- self.vm.set_vm_off_inst_field(range_name, "add_value", add_val)
- self.vm.set_vm_off_inst_field(range_name, "is_big_endian", is_big_endian)
- if isinstance(self._pkt_by_hdr.get(layer_name), dpkt.ip.IP) and add_checksum_inst:
- self.vm.add_fix_checksum_inst(self._pkt_by_hdr.get(layer_name), hdr_offset)
-
- def get_vm_data(self):
- return self.vm.dump()
-
- def compile (self):
- pass
-
- def dump_pkt(self, encode = True):
- """
- Dumps the packet as a decimal array of bytes (each item x gets value between 0-255)
-
- :parameters:
- encode : bool
- Encode using base64. (disable for debug)
-
- Default: **True**
-
- :return:
- + packet representation as array of bytes
-
- :raises:
- + :exc:`CTRexPktBuilder.EmptyPacketError`, in case packet is empty.
-
- """
- if self._packet is None:
- raise CTRexPktBuilder.EmptyPacketError()
-
- if encode:
- return {"binary": base64.b64encode(str(self._packet)),
- "meta": self.metadata}
- return {"binary": str(self._packet),
- "meta": self.metadata}
-
-
- def dump_pkt_to_pcap(self, file_path, ts=None):
- """
- Dumps the packet as a decimal array of bytes (each item x gets value between 0-255)
-
- :parameters:
- file_path : str
- a path (including filename) to which to write to pcap file to.
-
- ts : int
- a timestamp to attach to the packet when dumped to pcap file.
- if ts in None, then time.time() is used to set the timestamp.
-
- Default: **None**
-
- :return:
- None
-
- :raises:
- + :exc:`CTRexPktBuilder.EmptyPacketError`, in case packet is empty.
-
- """
- if self._packet is None:
- raise CTRexPktBuilder.EmptyPacketError()
- try:
- with open(file_path, 'wb') as f:
- pcap_wr = dpkt.pcap.Writer(f)
- pcap_wr.writepkt(self._packet, ts)
- return
- except IOError:
- raise IOError(2, "The provided path could not be accessed")
-
- def get_packet_layers(self, depth_limit=Ellipsis):
- if self._packet is None:
- raise CTRexPktBuilder.EmptyPacketError()
- cur_layer = self._packet
- layer_types = []
- if depth_limit == Ellipsis:
- iterator = itertools.count(1)
- else:
- iterator = xrange(depth_limit)
- for _ in iterator:
- # append current layer type
- if isinstance(cur_layer, dpkt.Packet):
- layer_types.append(type(cur_layer).__name__)
- else:
- # if not dpkt layer, refer as payload
- layer_types.append("PLD")
- # advance to next layer
- if not hasattr(cur_layer, "data"):
- break
- else:
- cur_layer = cur_layer.data
- return layer_types
-
- def export_pkt(self, file_path, link_pcap=False, pcap_name=None, pcap_ts=None):
- pass
-
- # ----- internal methods ----- #
- def _reevaluate_packet(self, layer_name):
- cur_layer = self._packet
- known_layers = set(self._pkt_by_hdr.keys())
- found_layers = set()
- while True:
- pointing_layer_name = self._find_pointing_layer(known_layers, cur_layer)
- found_layers.add(pointing_layer_name)
- if self._pkt_by_hdr[layer_name] is cur_layer:
- self._pkt_top_layer = cur_layer
- disconnected_layers = known_layers.difference(found_layers)
- # remove disconnected layers
- for layer in disconnected_layers:
- self._pkt_by_hdr.pop(layer)
- break
- else:
- cur_layer = cur_layer.data
-
- def _gen_layer_name(self, layer_class_name):
- assert isinstance(layer_class_name, str)
- layer_name = layer_class_name.lower()
- idx = 1
- while True:
- tmp_name = "{name}_{id}".format(name=layer_name, id=idx)
- if tmp_name not in self._pkt_by_hdr:
- return tmp_name
- else:
- idx += 1
-
- def _find_pointing_layer(self, known_layers, layer_obj):
- assert isinstance(known_layers, set)
- for layer in known_layers:
- if self._pkt_by_hdr[layer] is layer_obj:
- return layer
-
- def _calc_offset(self, layer_name, hdr_field, hdr_field_size):
- pkt_header = self._pkt_by_hdr.get(layer_name)
- hdr_offset = len(self._packet) - len(pkt_header)
- inner_hdr_offsets = []
- for field in pkt_header.__hdr__:
- if field[0] == hdr_field:
- field_size = struct.calcsize(field[1])
- if field_size == hdr_field_size:
- break
- elif field_size < hdr_field_size:
- raise CTRexPktBuilder.PacketLayerError(layer_name,
- "The specified field '{0}' size is smaller than given range"
- " size ('{1}')".format(hdr_field, hdr_field_size))
- else:
- inner_hdr_offsets.append(field_size - hdr_field_size)
- break
- else:
- inner_hdr_offsets.append(struct.calcsize(field[1]))
- return hdr_offset, hdr_offset + sum(inner_hdr_offsets)
-
- def _verify_layer_prop(self, layer_name, layer_type=None, field_name=None):
- if layer_name not in self._pkt_by_hdr:
- raise CTRexPktBuilder.PacketLayerError(layer_name)
- pkt_layer = self._pkt_by_hdr.get(layer_name)
- if layer_type:
- # check for layer type
- if not isinstance(pkt_layer, layer_type):
- raise CTRexPktBuilder.PacketLayerTypeError(layer_name, type(pkt_layer), layer_type)
- if field_name and not hasattr(pkt_layer, field_name):
- # check if field exists on certain header
- raise CTRexPktBuilder.PacketLayerError(layer_name, "The specified field '{0}' does not exists on "
- "given packet layer ('{1}')".format(field_name,
- layer_name))
- return
-
- @property
- def payload_gen(self):
- return CTRexPktBuilder.CTRexPayloadGen(self._packet, self._max_pkt_size)
-
- @staticmethod
- def _decode_mac_addr(mac_addr):
- """
- Static method to test for MAC address validity.
-
- :parameters:
- mac_addr : str
- a string representing an MAC address, separated by ':' or '-'.
-
- examples: '00:de:34:ef:2e:f4', '00-de-34-ef-2e-f4
-
- :return:
- + an hex-string representation of the MAC address.
- for example, ip 00:de:34:ef:2e:f4 will return '\x00\xdeU\xef.\xf4'
-
- :raises:
- + :exc:`CTRexPktBuilder.MACAddressError`, in case invalid ip type option specified.
-
- """
- tmp_mac = mac_addr.lower().replace('-', ':')
- if re.match("[0-9a-f]{2}([-:])[0-9a-f]{2}(\\1[0-9a-f]{2}){4}$", tmp_mac):
- return binascii.unhexlify(tmp_mac.replace(':', ''))
- # another option for both Python 2 and 3:
- # codecs.decode(tmp_mac.replace(':', ''), 'hex')
- else:
- raise CTRexPktBuilder.MACAddressError()
-
- @staticmethod
- def _decode_ip_addr(ip_addr, ip_type):
- """
- Static method to test for IPv4/IPv6 address validity.
-
- :parameters:
- ip_addr : str
- a string representing an IP address (IPv4/IPv6)
-
- ip_type : str
- The type of IP to be checked.
- Valid types: "ipv4", "ipv6".
-
- :return:
- + an hex-string representation of the ip address.
- for example, ip 1.2.3.4 will return '\x01\x02\x03\x04'
-
- :raises:
- + :exc:`CTRexPktBuilder.IPAddressError`, in case invalid ip type option specified.
-
- """
- if ip_type == "ipv4":
- try:
- return socket.inet_pton(socket.AF_INET, ip_addr)
- except AttributeError: # no inet_pton here, sorry
- # try:
- return socket.inet_aton(ip_addr)
- # except socket.error:
- # return False
- # return ip_addr.count('.') == 3
- except socket.error: # not a valid address
- raise CTRexPktBuilder.IPAddressError()
- elif ip_type == "ipv6":
- try:
- return socket.inet_pton(socket.AF_INET6, ip_addr)
- except socket.error: # not a valid address
- raise CTRexPktBuilder.IPAddressError()
- else:
- raise CTRexPktBuilder.IPAddressError()
-
- # ------ private classes ------ #
- class CTRexPayloadGen(object):
-
- def __init__(self, packet_ref, max_pkt_size):
- self._pkt_ref = packet_ref
- self._max_pkt_size = max_pkt_size
-
- def gen_random_str(self):
- gen_length = self._calc_gen_length()
- # return a string of size gen_length bytes, to pad the packet to its max_size
- return ''.join(random.SystemRandom().choice(string.ascii_letters + string.digits)
- for _ in range(gen_length))
-
- def gen_repeat_ptrn(self, ptrn_to_repeat):
- gen_length = self._calc_gen_length()
- if isinstance(ptrn_to_repeat, str):
- # generate repeated string
- return (ptrn_to_repeat * (gen_length/len(ptrn_to_repeat) + 1))[:gen_length]
- elif isinstance(ptrn_to_repeat, int):
- ptrn = binascii.unhexlify(hex(ptrn_to_repeat)[2:])
- return (ptrn * (gen_length/len(ptrn) + 1))[:gen_length]
- elif isinstance(ptrn_to_repeat, tuple):
- if not all((isinstance(x, int) and (x < 255) and (x >= 0))
- for x in ptrn_to_repeat):
- raise ValueError("All numbers in tuple must be in range 0 <= number <= 255 ")
- # generate repeated sequence
- to_pack = (ptrn_to_repeat * (gen_length/len(ptrn_to_repeat) + 1))[:gen_length]
- return struct.pack('B'*gen_length, *to_pack)
- else:
- raise ValueError("Given ptrn_to_repeat argument type ({0}) is illegal.".
- format(type(ptrn_to_repeat)))
-
- def _calc_gen_length(self):
- return self._max_pkt_size - len(self._pkt_ref)
-
- class CTRexVM(object):
- """
- This class defines the TRex VM which represents how TRex will regenerate packets.
- The packets will be regenerated based on the built packet containing this class.
- """
- InstStore = namedtuple('InstStore', ['type', 'inst'])
-
- def __init__(self):
- """
- Instantiate a CTRexVM object
-
- :parameters:
- None
- """
- super(CTRexPktBuilder.CTRexVM, self).__init__()
- self.vm_variables = {}
- self._inst_by_offset = {} # this data structure holds only offset-related instructions, ordered in tuples
- self._off_inst_by_name = {}
- self.split_by_var = ''
-
- def set_vm_var_field(self, var_name, field_name, val, offset_inst=False):
- """
- Set VM variable field. Only existing variables are allowed to be changed.
-
- :parameters:
- var_name : str
- a string representing the name of the VM variable to be changed.
- field_name : str
- a string representing the field name of the VM variable to be changed.
- val :
- a value to be applied to field_name field of the var_name VM variable.
-
- :raises:
- + :exc:`KeyError`, in case invalid var_name has been specified.
- + :exc:`CTRexPktBuilder.VMVarFieldTypeError`, in case mismatch between `val` and allowed type.
- + :exc:`CTRexPktBuilder.VMVarValueError`, in case val isn't one of allowed options of field_name.
-
- """
- if offset_inst:
- return self._off_inst_by_name[var_name].inst.set_field(field_name, val)
- else:
- return self.vm_variables[var_name].set_field(field_name, val)
-
- def set_vm_off_inst_field(self, var_name, field_name, val):
- return self.set_vm_var_field(var_name, field_name, val, True)
-
- def add_flow_man_inst(self, name, **kwargs):
- """
- Adds a new flow manipulation object to the VM instance.
-
- :parameters:
- name : str
- name of the manipulation, must be distinct.
- Example: 'source_ip_change'
-
- **kwargs** : dict
- optional, set flow_man fields on initialization (key = field_name, val = field_val).
- Must be used with legit fields, see :func:`CTRexPktBuilder.CTRexVM.CTRexVMVariable.set_field`.
-
- :return:
- None
-
- :raises:
- + :exc:`CTRexPktBuilder.VMVarNameExistsError`, in case of desired flow_man name already taken.
- + Exceptions from :func:`CTRexPktBuilder.CTRexVM.CTRexVMVariable.set_field` method.
- Will rise when VM variables were misconfiguration.
- """
- if name not in self.vm_variables:
- self.vm_variables[name] = self.CTRexVMFlowVariable(name)
- # try configuring VM instruction attributes
- for (field, value) in kwargs.items():
- self.vm_variables[name].set_field(field, value)
- else:
- raise CTRexPktBuilder.VMVarNameExistsError(name)
-
- def add_fix_checksum_inst(self, linked_ipv4_obj, offset_to_obj=14, name=None):
- # check if specified linked_ipv4_obj is indeed an ipv4 object
- if not (isinstance(linked_ipv4_obj, dpkt.ip.IP)):
- raise ValueError("The provided layer object is not of IPv4.")
- if not name:
- name = "checksum_{off}".format(off=offset_to_obj) # name will override previous checksum inst, OK
- new_checksum_inst = self.CTRexVMChecksumInst(name, offset_to_obj)
- # store the checksum inst in the end of the IP header (20 Bytes long)
- inst = self.InstStore('checksum', new_checksum_inst)
- self._inst_by_offset[offset_to_obj + 20] = inst
- self._off_inst_by_name[name] = inst
-
- def add_write_flow_inst(self, name, pkt_offset, **kwargs):
- if name not in self.vm_variables:
- raise KeyError("Trying to add write_flow_var instruction to a not-exists VM flow variable ('{0}')".
- format(name))
- else:
- new_write_inst = self.CTRexVMWrtFlowVarInst(name, pkt_offset)
- # try configuring VM instruction attributes
- for (field, value) in kwargs.items():
- new_write_inst.set_field(field, value)
- # add the instruction to the date-structure
- inst = self.InstStore('write', new_write_inst)
- self._inst_by_offset[pkt_offset] = inst
- self._off_inst_by_name[name] = inst
-
- def load_flow_man(self, flow_obj):
- """
- Loads an outer VM variable (instruction) into current VM.
- The outer VM variable must contain different name than existing VM variables currently registered on VM.
-
- :parameters:
- flow_obj : CTRexVMVariable
- a CTRexVMVariable to be loaded into VM variable sets.
-
- :return:
- list holds variables data of VM
-
- """
- assert isinstance(flow_obj, CTRexPktBuilder.CTRexVM.CTRexVMFlowVariable)
- if flow_obj.name not in self.vm_variables.keys():
- self.vm_variables[flow_obj.name] = flow_obj
- else:
- raise CTRexPktBuilder.VMVarNameExistsError(flow_obj.name)
-
- def set_split_by_var (self, var_name):
- if var_name not in self.vm_variables:
- raise KeyError("cannot set split by var to an unknown VM var ('{0}')".
- format(var_name))
-
- self.split_by_var = var_name
-
- def dump(self):
- """
- dumps a VM variables (instructions) and split_by_var into a dict data structure.
-
- :parameters:
- None
-
- :return:
- dict with VM instructions as list and split_by_var as str
-
- """
-
- # at first, dump all CTRexVMFlowVariable instructions
- inst_array = [var.dump() if hasattr(var, 'dump') else var
- for key, var in self.vm_variables.items()]
- # then, dump all the CTRexVMWrtFlowVarInst and CTRexVMChecksumInst instructions
- inst_array += [self._inst_by_offset.get(key).inst.dump()
- for key in sorted(self._inst_by_offset)]
- return {'instructions': inst_array, 'split_by_var': self.split_by_var}
-
- class CVMAbstractInstruction(object):
- __metaclass__ = ABCMeta
-
- def __init__(self, name):
- """
- Instantiate a CTRexVMVariable object
-
- :parameters:
- name : str
- a string representing the name of the VM variable.
- """
- super(CTRexPktBuilder.CTRexVM.CVMAbstractInstruction, self).__init__()
- self.name = name
-
- def set_field(self, field_name, val):
- if not hasattr(self, field_name):
- raise CTRexPktBuilder.VMFieldNameError(field_name)
- setattr(self, field_name, val)
-
- @abstractmethod
- def dump(self):
- pass
-
- class CTRexVMFlowVariable(CVMAbstractInstruction):
- """
- This class defines a single VM variable to be used as part of CTRexVar object.
- """
- VALID_SIZE = [1, 2, 4, 8] # size in Bytes
- VALID_OPERATION = ["inc", "dec", "random"]
-
- def __init__(self, name):
- """
- Instantiate a CTRexVMVariable object
-
- :parameters:
- name : str
- a string representing the name of the VM variable.
- """
- super(CTRexPktBuilder.CTRexVM.CTRexVMFlowVariable, self).__init__(name)
- # self.name = name
- self.size = 4
- self.big_endian = True
- self.operation = "inc"
- # self.split_by_core = False
- self.init_value = 1
- self.min_value = self.init_value
- self.max_value = self.init_value
-
- def set_field(self, field_name, val):
- """
- Set VM variable field. Only existing variables are allowed to be changed.
-
- :parameters:
- field_name : str
- a string representing the field name of the VM variable to be changed.
- val :
- a value to be applied to field_name field of the var_name VM variable.
-
- :return:
- None
-
- :raises:
- + :exc:`CTRexPktBuilder.VMVarNameError`, in case of illegal field name.
- + :exc:`CTRexPktBuilder.VMVarFieldTypeError`, in case mismatch between `val` and allowed type.
- + :exc:`CTRexPktBuilder.VMVarValueError`, in case val isn't one of allowed options of field_name.
-
- """
- if not hasattr(self, field_name):
- raise CTRexPktBuilder.VMFieldNameError(field_name)
- elif field_name == "size":
- if type(val) != int:
- raise CTRexPktBuilder.VMFieldTypeError("size", int)
- elif val not in self.VALID_SIZE:
- raise CTRexPktBuilder.VMFieldValueError("size", self.VALID_SIZE)
- elif field_name in ["init_value", "min_value", "max_value"]:
- if type(val) != int:
- raise CTRexPktBuilder.VMFieldTypeError(field_name, int)
- elif field_name == "operation":
- if type(val) != str:
- raise CTRexPktBuilder.VMFieldTypeError("operation", str)
- elif val not in self.VALID_OPERATION:
- raise CTRexPktBuilder.VMFieldValueError("operation", self.VALID_OPERATION)
- # elif field_name == "split_by_core":
- # val = bool(val)
- # update field value on success
- setattr(self, field_name, val)
-
- def dump(self):
- """
- dumps a variable fields in a dictionary data structure.
-
- :parameters:
- None
-
- :return:
- dictionary holds variable data of VM variable
-
- """
- return {"type": "flow_var", # VM variable dump always refers to manipulate instruction.
- "name": self.name,
- "size": self.size,
- "op": self.operation,
- # "split_by_core": self.split_by_core,
- "init_value": self.init_value,
- "min_value": self.min_value,
- "max_value": self.max_value}
-
- class CTRexVMChecksumInst(CVMAbstractInstruction):
-
- def __init__(self, name, offset):
- """
- Instantiate a CTRexVMChecksumInst object
-
- :parameters:
- name : str
- a string representing the name of the VM variable.
- """
- super(CTRexPktBuilder.CTRexVM.CTRexVMChecksumInst, self).__init__(name)
- self.pkt_offset = offset
-
- def dump(self):
- return {"type": "fix_checksum_ipv4",
- "pkt_offset": int(self.pkt_offset)}
-
- class CTRexVMWrtFlowVarInst(CVMAbstractInstruction):
-
- def __init__(self, name, pkt_offset):
- """
- Instantiate a CTRexVMWrtFlowVarInst object
-
- :parameters:
- name : str
- a string representing the name of the VM variable.
- """
- super(CTRexPktBuilder.CTRexVM.CTRexVMWrtFlowVarInst, self).__init__(name)
- self.pkt_offset = int(pkt_offset)
- self.add_value = 0
- self.is_big_endian = False
-
- def set_field(self, field_name, val):
- if not hasattr(self, field_name):
- raise CTRexPktBuilder.VMFieldNameError(field_name)
- elif field_name == 'pkt_offset':
- raise ValueError("pkt_offset value cannot be changed")
- cur_attr_type = type(getattr(self, field_name))
- if cur_attr_type == type(val):
- setattr(self, field_name, val)
- else:
- CTRexPktBuilder.VMFieldTypeError(field_name, cur_attr_type)
-
- def dump(self):
- return {"type": "write_flow_var",
- "name": self.name,
- "pkt_offset": self.pkt_offset,
- "add_value": int(self.add_value),
- "is_big_endian": bool(self.is_big_endian)
- }
-
- class CTRexVMChecksumInst(CVMAbstractInstruction):
-
- def __init__(self, name, offset):
- """
- Instantiate a CTRexVMChecksumInst object
-
- :parameters:
- name : str
- a string representing the name of the VM variable.
- """
- super(CTRexPktBuilder.CTRexVM.CTRexVMChecksumInst, self).__init__(name)
- self.pkt_offset = offset
-
- def dump(self):
- return {"type": "fix_checksum_ipv4",
- "pkt_offset": int(self.pkt_offset)}
-
- class CTRexVMWrtFlowVarInst(CVMAbstractInstruction):
-
- def __init__(self, name, pkt_offset):
- """
- Instantiate a CTRexVMWrtFlowVarInst object
-
- :parameters:
- name : str
- a string representing the name of the VM variable.
- """
- super(CTRexPktBuilder.CTRexVM.CTRexVMWrtFlowVarInst, self).__init__(name)
- self.pkt_offset = int(pkt_offset)
- self.add_value = 0
- self.is_big_endian = False
-
- def set_field(self, field_name, val):
- if not hasattr(self, field_name):
- raise CTRexPktBuilder.VMFieldNameError(field_name)
- elif field_name == 'pkt_offset':
- raise ValueError("pkt_offset value cannot be changed")
- cur_attr_type = type(getattr(self, field_name))
- if cur_attr_type == type(val):
- setattr(self, field_name, val)
- else:
- CTRexPktBuilder.VMFieldTypeError(field_name, cur_attr_type)
-
- def dump(self):
- return {"type": "write_flow_var",
- "name": self.name,
- "pkt_offset": self.pkt_offset,
- "add_value": int(self.add_value),
- "is_big_endian": bool(self.is_big_endian)
- }
-
- class CPacketBuildException(Exception):
- """
- This is the general Packet Building error exception class.
- """
- def __init__(self, code, message):
- self.code = code
- self.message = message
-
- def __str__(self):
- return self.__repr__()
-
- def __repr__(self):
- return u"[errcode:%r] %r" % (self.code, self.message)
-
- class EmptyPacketError(CPacketBuildException):
- """
- This exception is used to indicate an error caused by operation performed on an empty packet.
- """
- def __init__(self, message=''):
- self._default_message = 'Illegal operation on empty packet.'
- self.message = message or self._default_message
- super(CTRexPktBuilder.EmptyPacketError, self).__init__(-10, self.message)
-
- class IPAddressError(CPacketBuildException):
- """
- This exception is used to indicate an error on the IP addressing part of the packet.
- """
- def __init__(self, message=''):
- self._default_message = 'Illegal type or value of IP address has been provided.'
- self.message = message or self._default_message
- super(CTRexPktBuilder.IPAddressError, self).__init__(-11, self.message)
-
- class MACAddressError(CPacketBuildException):
- """
- This exception is used to indicate an error on the MAC addressing part of the packet.
- """
- def __init__(self, message=''):
- self._default_message = 'Illegal MAC address has been provided.'
- self.message = message or self._default_message
- super(CTRexPktBuilder.MACAddressError, self).__init__(-12, self.message)
-
- class PacketLayerError(CPacketBuildException):
- """
- This exception is used to indicate an error caused by operation performed on an non-exists layer of the packet.
- """
- def __init__(self, name, message=''):
- self._default_message = "The given packet layer name ({0}) does not exists.".format(name)
- self.message = message or self._default_message
- super(CTRexPktBuilder.PacketLayerError, self).__init__(-13, self.message)
-
- class PacketLayerTypeError(CPacketBuildException):
- """
- This exception is used to indicate an error caused by operation performed on an non-exists layer of the packet.
- """
- def __init__(self, name, layer_type, ok_type, message=''):
- self._default_message = "The type of packet layer {layer_name} is of type {layer_type}, " \
- "and not of the expected {allowed_type}.".format(layer_name=name,
- layer_type=layer_type,
- allowed_type=ok_type.__name__)
- self.message = message or self._default_message
- super(CTRexPktBuilder.PacketLayerTypeError, self).__init__(-13, self.message)
-
- class VMVarNameExistsError(CPacketBuildException):
- """
- This exception is used to indicate a duplicate usage of VM variable.
- """
- def __init__(self, name, message=''):
- self._default_message = 'The given VM name ({0}) already exists as part of the stream.'.format(name)
- self.message = message or self._default_message
- super(CTRexPktBuilder.VMVarNameExistsError, self).__init__(-21, self.message)
-
- class VMFieldNameError(CPacketBuildException):
- """
- This exception is used to indicate that an undefined VM var field name has been accessed.
- """
- def __init__(self, name, message=''):
- self._default_message = "The given VM field name ({0}) is not defined and isn't legal.".format(name)
- self.message = message or self._default_message
- super(CTRexPktBuilder.VMFieldNameError, self).__init__(-22, self.message)
-
- class VMFieldTypeError(CPacketBuildException):
- """
- This exception is used to indicate an illegal value has type has been given to VM variable field.
- """
- def __init__(self, name, ok_type, message=''):
- self._default_message = "The desired value of field {field_name} is of type {field_type}, " \
- "and not of the allowed {allowed_type}.".format(field_name=name,
- field_type=type(name).__name__,
- allowed_type=ok_type.__name__)
- self.message = message or self._default_message
- super(CTRexPktBuilder.VMFieldTypeError, self).__init__(-31, self.message)
-
- class VMFieldValueError(CPacketBuildException):
- """
- This exception is used to indicate an error an illegal value has been assigned to VM variable field.
- """
- def __init__(self, name, ok_opts, message=''):
- self._default_message = "The desired value of field {field_name} is illegal.\n" \
- "The only allowed options are: {allowed_opts}.".format(field_name=name,
- allowed_opts=ok_opts)
- self.message = message or self._default_message
- super(CTRexPktBuilder.VMFieldValueError, self).__init__(-32, self.message)
-
-
-if __name__ == "__main__":
- pass
diff --git a/scripts/automation/trex_control_plane/client_utils/yaml_utils.py b/scripts/automation/trex_control_plane/client_utils/yaml_utils.py
index 825d6fc9..776a51a7 100755
--- a/scripts/automation/trex_control_plane/client_utils/yaml_utils.py
+++ b/scripts/automation/trex_control_plane/client_utils/yaml_utils.py
@@ -16,7 +16,6 @@ limitations under the License.
"""
import traceback
import sys
-import external_packages
import yaml
diff --git a/scripts/automation/trex_control_plane/common/rpc_defaults.yaml b/scripts/automation/trex_control_plane/common/rpc_defaults.yaml
deleted file mode 100755
index 9325a0e4..00000000
--- a/scripts/automation/trex_control_plane/common/rpc_defaults.yaml
+++ /dev/null
@@ -1,124 +0,0 @@
-##############################################################
-#### TRex RPC stream list default values ####
-##############################################################
-
-# this document is based on TRex RPC server spec and its fields:
-# http://trex-tgn.cisco.com/trex/doc/trex_rpc_server_spec.html
-
-### HOW TO READ THIS FILE
-# 1. Each key represents an object type
-# 2. Each value can be either a value field or another object
-# 2.1. If a value field, read as:
-# + type: type of field
-# + has_default: if the value has any default
-# + default: the default value (Only appears if has_default field is 'YES')
-# 2.2. If an object type, jump to corresponding object key.
-# 3. If an object has more than one instance type, another layer with the type shall be added.
-# For example, 'mode' object has 3 types: 'continuous', 'single_burst', 'multi_burst'
-# So, 3 mode objects will be defined, named:
-# - mode['continuous']
-# - mode['single_burst']
-# - mode['multi_burst']
-# In this case, there's no default for the 'type' field on the object
-# 4. Some values has 'multiply' property attached.
-# In such case, the loaded value will be multiplied by the multiplier
-# For example, if the mode's 'pps' field value is 10, and its multiplier is 5,
-# the loaded pps value will be 10*5=50
-# 5. Any object type must be listed by the user, even if all its field are defaults.
-# The most basic option would be to declare the object with "[]", which stands for empty object in YAML syntax.
-
-
-stream:
- enabled:
- type: boolean
- has_default: YES
- default: True
- self_start:
- type: boolean
- has_default: YES
- default: True
- isg:
- type: [int, double, string]
- has_default: YES
- default: 0.0
- next_stream_id:
- type: string # string to allow naming binding
- has_default: YES
- default: -1 # no next streams
- packet:
- type: object
- mode:
- type: object
- vm:
- type: object
- rx_stats:
- type: object
-
-packet:
- binary:
- type: [array,string]
- has_default: NO
- meta:
- type: string
- has_default: YES
- default: ""
-
-mode:
- continuous:
- pps:
- type: [int, double]
- has_default: NO
- multiply: YES
- single_burst:
- pps:
- type: [int, double]
- has_default: NO
- multiply: YES
- total_pkts:
- type: int
- has_default: NO
- multi_burst:
- pps:
- type: [int, double]
- has_default: NO
- multiply: YES
- pkts_per_burst:
- type: int
- has_default: NO
- ibg:
- type: [int, double, string]
- has_default: YES
- default: 100.0
- count:
- type: int
- has_default: YES
- default: 0 # loop forever
-
-rx_stats:
- enabled:
- type: boolean
- has_default: YES
- default: False
- stream_id:
- type: string
- has_default: YES
- default: False # use related stream_id
- seq_enabled:
- type: boolean
- has_default: YES
- default: False
- latency_enabled:
- type: boolean
- has_default: YES
- default: False
-
-vm:
- instructions:
- type: array
- has_default: YES
- default: []
- split_by_var:
- type: string
- has_default: YES
- default: ""
-
diff --git a/scripts/automation/trex_control_plane/common/trex_streams.py b/scripts/automation/trex_control_plane/common/trex_streams.py
deleted file mode 100755
index c1f1bfa6..00000000
--- a/scripts/automation/trex_control_plane/common/trex_streams.py
+++ /dev/null
@@ -1,526 +0,0 @@
-#!/router/bin/python
-
-import external_packages
-from client_utils.packet_builder_interface import CTrexPktBuilderInterface
-from client_utils.packet_builder import CTRexPktBuilder
-from collections import OrderedDict, namedtuple
-from client_utils.yaml_utils import *
-import trex_stl_exceptions
-import dpkt
-import struct
-import copy
-import os
-import random
-import yaml
-import base64
-
-StreamPack = namedtuple('StreamPack', ['stream_id', 'stream'])
-LoadedStreamList = namedtuple('LoadedStreamList', ['name', 'loaded', 'compiled'])
-
-class CStreamList(object):
-
- def __init__(self):
- self.streams_list = OrderedDict()
- self.yaml_loader = CTRexYAMLLoader(os.path.join(os.path.dirname(os.path.realpath(__file__)),
- "rpc_defaults.yaml"))
-
- def generate_numbered_name (self, name):
- prefix = name.rstrip('01234567890')
- suffix = name[len(prefix):]
- if suffix == "":
- n = "_1"
- else:
- n = int(suffix) + 1
- return prefix + str(n)
-
- def append_stream(self, name, stream_obj):
- assert isinstance(stream_obj, CStream)
-
- # if name exists simply add numbered suffix to it
- while name in self.streams_list:
- name = self.generate_numbered_name(name)
-
- self.streams_list[name]=stream_obj
- return name
-
- def remove_stream(self, name):
- popped = self.streams_list.pop(name)
- if popped:
- for stream_name, stream in self.streams_list.items():
- if stream.next_stream_id == name:
- stream.next_stream_id = -1
- try:
- rx_stats_stream = getattr(stream.rx_stats, "stream_id")
- if rx_stats_stream == name:
- # if a referenced stream of rx_stats object deleted, revert to rx stats of current stream
- setattr(stream.rx_stats, "stream_id", stream_name)
- except AttributeError as e:
- continue #
- return popped
-
- def export_to_yaml(self, file_path):
- raise NotImplementedError("export_to_yaml method is not implemented, yet")
-
- def load_yaml(self, file_path, multiplier=1):
- # clear all existing streams linked to this object
- self.streams_list.clear()
- streams_data = load_yaml_to_obj(file_path)
- assert isinstance(streams_data, list)
- new_streams_data = []
- for stream in streams_data:
- stream_name = stream.get("name")
- raw_stream = stream.get("stream")
- if not stream_name or not raw_stream:
- raise ValueError("Provided stream is not according to convention."
- "Each stream must be provided as two keys: 'name' and 'stream'. "
- "Provided item was:\n {stream}".format(stream))
- new_stream_data = self.yaml_loader.validate_yaml(raw_stream,
- "stream",
- multiplier= multiplier)
- new_streams_data.append(new_stream_data)
- new_stream_obj = CStream()
- new_stream_obj.load_data(**new_stream_data)
- self.append_stream(stream_name, new_stream_obj)
- return new_streams_data
-
- def compile_streams(self):
- # first, assign an id to each stream
- stream_ids = {}
- for idx, stream_name in enumerate(self.streams_list):
- stream_ids[stream_name] = idx
-
- # next, iterate over the streams and transform them from working with names to ids.
- # with that build a new dict with old stream_name as the key, and StreamPack as the stored value
- compiled_streams = {}
- for stream_name, stream in self.streams_list.items():
- tmp_stream = CStreamList._compile_single_stream(stream_name, stream, stream_ids)
- compiled_streams[stream_name] = StreamPack(stream_ids.get(stream_name),
- tmp_stream)
- return compiled_streams
-
- @staticmethod
- def _compile_single_stream(stream_name, stream, id_dict):
- # copy the old stream to temporary one, no change to class attributes
- tmp_stream = copy.copy(stream)
- next_stream_id = id_dict.get(getattr(tmp_stream, "next_stream_id"), -1)
- try:
- rx_stats_stream_id = id_dict.get(getattr(tmp_stream.rx_stats, "stream_id"),
- id_dict.get(stream_name))
- except AttributeError as e:
- rx_stats_stream_id = id_dict.get(stream_name)
- # assign resolved values to stream object
- tmp_stream.next_stream_id = next_stream_id
- tmp_stream.rx_stats.stream_id = rx_stats_stream_id
- return tmp_stream
-
-
-class CRxStats(object):
-
- FIELDS = ["seq_enabled", "latency_enabled", "stream_id"]
- def __init__(self, enabled=False, **kwargs):
- self.enabled = bool(enabled)
- for field in CRxStats.FIELDS:
- setattr(self, field, kwargs.get(field, False))
-
- def dump(self):
- if self.enabled:
- dump = {"enabled": True}
- dump.update({k: getattr(self, k)
- for k in CRxStats.FIELDS}
- )
- return dump
- else:
- return {"enabled": False}
-
-
-
-class CTxMode(object):
- """docstring for CTxMode"""
- GENERAL_FIELDS = ["type", "pps"]
- FIELDS = {"continuous": [],
- "single_burst": ["total_pkts"],
- "multi_burst": ["pkts_per_burst", "ibg", "count"]}
-
- def __init__(self, type, pps=0, **kwargs):
- self._MODES = CTxMode.FIELDS.keys()
- self.type = type
- self.pps = pps
- for field in CTxMode.FIELDS.get(self.type):
- setattr(self, field, kwargs.get(field, 0))
-
- @property
- def type(self):
- return self._type
-
- @type.setter
- def type(self, type):
- if type not in self._MODES:
- raise ValueError("Unknown TX mode ('{0}')has been initialized.".format(type))
- self._type = type
- self._reset_fields()
-
- def dump(self):
- dump = ({k: getattr(self, k)
- for k in CTxMode.GENERAL_FIELDS
- })
- dump.update({k: getattr(self, k)
- for k in CTxMode.FIELDS.get(self.type)
- })
- return dump
-
- def _reset_fields(self):
- for field in CTxMode.FIELDS.get(self.type):
- setattr(self, field, 0)
-
-
-class CStream(object):
- """docstring for CStream"""
-
- FIELDS = ["enabled", "self_start", "next_stream_id", "isg", "mode", "rx_stats", "packet", "vm"]
-
- def __init__(self):
- self.is_loaded = False
- self._is_compiled = False
- self._pkt_bld_obj = CTRexPktBuilder()
- for field in CStream.FIELDS:
- setattr(self, field, None)
-
-
- def load_data(self, **kwargs):
- try:
- for k in CStream.FIELDS:
- if k == "rx_stats":
- rx_stats_data = kwargs[k]
- if isinstance(rx_stats_data, dict):
- setattr(self, k, CRxStats(**rx_stats_data))
- elif isinstance(rx_stats_data, CRxStats):
- setattr(self, k, rx_stats_data)
- elif k == "mode":
- tx_mode = kwargs[k]
- if isinstance(tx_mode, dict):
- setattr(self, k, CTxMode(**tx_mode))
- elif isinstance(tx_mode, CTxMode):
- setattr(self, k, tx_mode)
- elif k == "packet":
- if isinstance(kwargs[k], CTRexPktBuilder):
- if "vm" not in kwargs:
- self.load_packet_obj(kwargs[k])
- break # vm field check is skipped
- else:
- raise ValueError("When providing packet object with a CTRexPktBuilder, vm parameter "
- "should not be supplied")
- else:
- binary = kwargs[k]["binary"]
- if isinstance(binary, str):
-
- # TODO: load to _pkt_bld_obj also when passed as byte array!
- if binary.endswith(".pcap"):
- self._pkt_bld_obj.load_packet_from_pcap(binary)
- self._pkt_bld_obj.metadata = kwargs[k]["meta"]
- self.packet = self._pkt_bld_obj.dump_pkt()
- else:
- self.packet = {}
- self.packet['binary'] = binary
- self.packet['meta'] = ""
-
- else:
- raise ValueError("Packet binary attribute has been loaded with unsupported value."
- "Supported values are reference to pcap file with SINGLE packet, "
- "or a list of unsigned-byte integers")
- else:
- setattr(self, k, kwargs[k])
- self.is_loaded = True
- except KeyError as e:
- cause = e.args[0]
- raise KeyError("The attribute '{0}' is missing as a field of the CStream object.\n"
- "Loaded data must contain all of the following fields: {1}".format(cause, CStream.FIELDS))
-
- def load_packet_obj(self, packet_obj):
- assert isinstance(packet_obj, CTRexPktBuilder)
- self.packet = packet_obj.dump_pkt()
- self.vm = packet_obj.get_vm_data()
-
- def load_packet_from_pcap(self, pcap_path, metadata=''):
- with open(pcap_path, 'r') as f:
- pcap = dpkt.pcap.Reader(f)
- first_packet = True
- for _, buf in pcap:
- # this is an iterator, can't evaluate the number of files in advance
- if first_packet:
- self.packet = {"binary": [struct.unpack('B', buf[i:i+1])[0] # represent data as list of 0-255 ints
- for i in range(0, len(buf))],
- "meta": metadata} # meta data continues without a change.
- first_packet = False
- else:
- raise ValueError("Provided pcap file contains more than single packet.")
- # arrive here ONLY if pcap contained SINGLE packet
- return
-
-
- def dump(self):
- if self.is_loaded:
- dump = {}
- for key in CStream.FIELDS:
- try:
- dump[key] = getattr(self, key).dump() # use dump() method of compound object, such TxMode
- except AttributeError:
- dump[key] = getattr(self, key)
- return dump
- else:
- raise RuntimeError("CStream object isn't loaded with data. Use 'load_data' method.")
-
- def get_stream_layers(self, depth_limit=Ellipsis):
- stream_layers = self._pkt_bld_obj.get_packet_layers(depth_limit)
- return "/".join(stream_layers)
-
-
-
-# describes a stream DB
-class CStreamsDB(object):
-
- def __init__(self):
- self.stream_packs = {}
-
- def load_yaml_file(self, filename):
-
- stream_pack_name = filename
- if stream_pack_name in self.get_loaded_streams_names():
- self.remove_stream_packs(stream_pack_name)
-
- stream_list = CStreamList()
- loaded_obj = stream_list.load_yaml(filename)
-
- try:
- compiled_streams = stream_list.compile_streams()
- rc = self.load_streams(LoadedStreamList(stream_pack_name,
- loaded_obj,
- [StreamPack(v.stream_id, v.stream.dump())
- for k, v in compiled_streams.items()]))
- except Exception as e:
- return None
-
-
- return self.get_stream_pack(stream_pack_name)
-
- def load_streams(self, LoadedStreamList_obj):
- if LoadedStreamList_obj.name in self.stream_packs:
- return False
- else:
- self.stream_packs[LoadedStreamList_obj.name] = LoadedStreamList_obj
- return True
-
- def remove_stream_packs(self, *names):
- removed_streams = []
- for name in names:
- removed = self.stream_packs.pop(name)
- if removed:
- removed_streams.append(name)
- return removed_streams
-
- def clear(self):
- self.stream_packs.clear()
-
- def get_loaded_streams_names(self):
- return self.stream_packs.keys()
-
- def stream_pack_exists (self, name):
- return name in self.get_loaded_streams_names()
-
- def get_stream_pack(self, name):
- if not self.stream_pack_exists(name):
- return None
- else:
- return self.stream_packs.get(name)
-
-
-########################### Simple Streams ###########################
-from trex_stl_exceptions import *
-
-# base class for TX mode
-class STLTXMode(object):
- def __init__ (self):
- self.fields = {}
-
- def to_json (self):
- return self.fields
-
-
-# continuous mode
-class STLTXCont(STLTXMode):
-
- def __init__ (self, pps = 1):
-
- if not isinstance(pps, (int, float)):
- raise STLArgumentError('pps', pps)
-
- super(STLTXCont, self).__init__()
-
- self.fields['type'] = 'continuous'
- self.fields['pps'] = pps
-
-
-# single burst mode
-class STLTXSingleBurst(STLTXMode):
-
- def __init__ (self, pps = 1, total_pkts = 1):
-
- if not isinstance(pps, (int, float)):
- raise STLArgumentError('pps', pps)
-
- if not isinstance(total_pkts, int):
- raise STLArgumentError('total_pkts', total_pkts)
-
- super(STLTXSingleBurst, self).__init__()
-
- self.fields['type'] = 'single_burst'
- self.fields['pps'] = pps
- self.fields['total_pkts'] = total_pkts
-
-
-# multi burst mode
-class STLTXMultiBurst(STLTXMode):
-
- def __init__ (self,
- pps = 1,
- pkts_per_burst = 1,
- ibg = 0.0,
- count = 1):
-
- if not isinstance(pps, (int, float)):
- raise STLArgumentError('pps', pps)
-
- if not isinstance(pkts_per_burst, int):
- raise STLArgumentError('pkts_per_burst', pkts_per_burst)
-
- if not isinstance(ibg, (int, float)):
- raise STLArgumentError('ibg', ibg)
-
- if not isinstance(count, int):
- raise STLArgumentError('count', count)
-
- super(STLTXMultiBurst, self).__init__()
-
- self.fields['type'] = 'multi_burst'
- self.fields['pps'] = pps
- self.fields['pkts_per_burst'] = pkts_per_burst
- self.fields['ibg'] = ibg
- self.fields['count'] = count
-
-
-class STLStream(object):
-
- def __init__ (self,
- packet,
- mode = STLTXCont(1),
- enabled = True,
- self_start = True,
- isg = 0.0,
- rx_stats = None,
- next_stream_id = -1,
- stream_id = None):
-
- # type checking
- if not isinstance(mode, STLTXMode):
- raise STLArgumentError('mode', mode)
-
- if not isinstance(packet, CTrexPktBuilderInterface):
- raise STLArgumentError('packet', packet)
-
- if not isinstance(enabled, bool):
- raise STLArgumentError('enabled', enabled)
-
- if not isinstance(self_start, bool):
- raise STLArgumentError('self_start', self_start)
-
- if not isinstance(isg, (int, float)):
- raise STLArgumentError('isg', isg)
-
- if (type(mode) == STLTXCont) and (next_stream_id != -1):
- raise STLError("continuous stream cannot have a next stream ID")
-
- # use a random 31 bit for ID
- self.stream_id = stream_id if stream_id is not None else random.getrandbits(31)
-
- self.fields = {}
-
- # basic fields
- self.fields['enabled'] = enabled
- self.fields['self_start'] = self_start
- self.fields['isg'] = isg
-
- self.fields['next_stream_id'] = next_stream_id
-
- # mode
- self.fields['mode'] = mode.to_json()
-
- packet.compile()
-
- # packet and VM
- self.fields['packet'] = packet.dump_pkt()
- self.fields['vm'] = packet.get_vm_data()
-
- self.fields['rx_stats'] = {}
- if not rx_stats:
- self.fields['rx_stats']['enabled'] = False
-
-
- def __str__ (self):
- return json.dumps(self.fields, indent = 4, separators=(',', ': '), sort_keys = True)
-
- def to_json (self):
- return self.fields
-
- def get_id (self):
- return self.stream_id
-
- @staticmethod
- def dump_to_yaml (yaml_file, stream_list):
-
- # type check
- if isinstance(stream_list, STLStream):
- stream_list = [stream_list]
-
- if not all([isinstance(stream, STLStream) for stream in stream_list]):
- raise STLArgumentError('stream_list', stream_list)
-
-
- names = {}
- for i, stream in enumerate(stream_list):
- names[stream.get_id()] = "stream-{0}".format(i)
-
- yaml_lst = []
- for stream in stream_list:
-
- fields = dict(stream.fields)
-
- # handle the next stream id
- if fields['next_stream_id'] == -1:
- del fields['next_stream_id']
-
- else:
- if not stream.get_id() in names:
- raise STLError('broken dependencies in stream list')
-
- fields['next_stream'] = names[stream.get_id()]
-
- # add to list
- yaml_lst.append({'name': names[stream.get_id()], 'stream': fields})
-
- # write to file
- x = yaml.dump(yaml_lst, default_flow_style=False)
- with open(yaml_file, 'w') as f:
- f.write(x)
- return x
-
-
-# REMOVE ME when can - convert from stream pack to a simple stream
-class HACKSTLStream(STLStream):
- def __init__ (self, stream_pack):
- if not isinstance(stream_pack, StreamPack):
- raise Exception("internal error")
-
- packet = CTRexPktBuilder()
- packet.load_from_stream_obj(stream_pack.stream)
- super(HACKSTLStream, self).__init__(packet, stream_id = stream_pack.stream_id)
-
- self.fields = stream_pack.stream
diff --git a/scripts/automation/trex_control_plane/stl/console/__init__.py b/scripts/automation/trex_control_plane/stl/console/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stl/console/__init__.py
diff --git a/scripts/automation/trex_control_plane/console/trex_console.py b/scripts/automation/trex_control_plane/stl/console/trex_console.py
index c8624626..789ad4ab 100755
--- a/scripts/automation/trex_control_plane/console/trex_console.py
+++ b/scripts/automation/trex_control_plane/stl/console/trex_console.py
@@ -27,14 +27,16 @@ import string
import os
import sys
import tty, termios
-import trex_root_path
-from common.trex_streams import *
-from client.trex_stateless_client import STLClient, LoggerApi
-from common.text_opts import *
-from client_utils.general_utils import user_input, get_current_user
-from client_utils import parsing_opts
+
+from trex_stl_lib.api import *
+
+from trex_stl_lib.utils.text_opts import *
+from trex_stl_lib.utils.common import user_input, get_current_user
+from trex_stl_lib.utils import parsing_opts
+
+
import trex_tui
-from common.trex_stl_exceptions import *
+
from functools import wraps
__version__ = "1.1"
diff --git a/scripts/automation/trex_control_plane/console/trex_root_path.py b/scripts/automation/trex_control_plane/stl/console/trex_root_path.py
index de4ec03b..de4ec03b 100755
--- a/scripts/automation/trex_control_plane/console/trex_root_path.py
+++ b/scripts/automation/trex_control_plane/stl/console/trex_root_path.py
diff --git a/scripts/automation/trex_control_plane/console/trex_tui.py b/scripts/automation/trex_control_plane/stl/console/trex_tui.py
index 1e22b005..f972b905 100644
--- a/scripts/automation/trex_control_plane/console/trex_tui.py
+++ b/scripts/automation/trex_control_plane/stl/console/trex_tui.py
@@ -2,13 +2,15 @@ import termios
import sys
import os
import time
-from common.text_opts import *
-from common import trex_stats
-from client_utils import text_tables
from collections import OrderedDict
import datetime
from cStringIO import StringIO
-from client.trex_stateless_client import STLError
+
+from trex_stl_lib.utils.text_opts import *
+from trex_stl_lib.utils import text_tables
+
+# for STL exceptions
+from trex_stl_lib.api import *
class SimpleBar(object):
def __init__ (self, desc, pattern):
@@ -61,7 +63,7 @@ class TrexTUIDashBoard(TrexTUIPanel):
def show (self):
- stats = self.stateless_client._get_formatted_stats(self.ports, trex_stats.COMPACT)
+ stats = self.stateless_client._get_formatted_stats(self.ports)
# print stats to screen
for stat_type, stat_data in stats.iteritems():
text_tables.print_table_with_header(stat_data.text_table, stat_type)
@@ -148,7 +150,7 @@ class TrexTUIPort(TrexTUIPanel):
def show (self):
- stats = self.stateless_client._get_formatted_stats([self.port_id], trex_stats.COMPACT)
+ stats = self.stateless_client._get_formatted_stats([self.port_id])
# print stats to screen
for stat_type, stat_data in stats.iteritems():
text_tables.print_table_with_header(stat_data.text_table, stat_type)
diff --git a/scripts/automation/trex_control_plane/stl/examples/stl_bi_dir_flows.py b/scripts/automation/trex_control_plane/stl/examples/stl_bi_dir_flows.py
new file mode 100644
index 00000000..2382f2f4
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stl/examples/stl_bi_dir_flows.py
@@ -0,0 +1,114 @@
+import stl_path
+from trex_stl_lib.api import *
+
+import time
+import json
+
+# simple packet creation
+def create_pkt (size, direction):
+
+ ip_range = {'src': {'start': "10.0.0.1", 'end': "10.0.0.254"},
+ 'dst': {'start': "8.0.0.1", 'end': "8.0.0.254"}}
+
+ if (direction == 0):
+ src = ip_range['src']
+ dst = ip_range['dst']
+ else:
+ src = ip_range['dst']
+ dst = ip_range['src']
+
+ vm = [
+ # src
+ STLVmFlowVar(name="src",min_value=src['start'],max_value=src['end'],size=4,op="inc"),
+ STLVmWriteFlowVar(fv_name="src",pkt_offset= "IP.src"),
+
+ # dst
+ STLVmFlowVar(name="dst",min_value=dst['start'],max_value=dst['end'],size=4,op="inc"),
+ STLVmWriteFlowVar(fv_name="dst",pkt_offset= "IP.dst"),
+
+ # checksum
+ STLVmFixIpv4(offset = "IP")
+ ]
+
+
+ base = Ether()/IP()/UDP()
+ pad = max(0, len(base)) * 'x'
+
+ return STLPktBuilder(pkt = base/pad,
+ vm = vm)
+
+
+def simple_burst ():
+
+
+ # create client
+ c = STLClient()
+ passed = True
+
+ try:
+ # turn this on for some information
+ #c.set_verbose("high")
+
+ # create two streams
+ s1 = STLStream(packet = create_pkt(200, 0),
+ mode = STLTXCont(pps = 100))
+
+ # second stream with a phase of 1ms (inter stream gap)
+ s2 = STLStream(packet = create_pkt(200, 1),
+ isg = 1000,
+ mode = STLTXCont(pps = 100))
+
+
+ # connect to server
+ c.connect()
+
+ # prepare our ports (my machine has 0 <--> 1 with static route)
+ c.reset(ports = [0, 1])
+
+ # add both streams to ports
+ c.add_streams(s1, ports = [0])
+ c.add_streams(s2, ports = [1])
+
+ # clear the stats before injecting
+ c.clear_stats()
+
+ # choose rate and start traffic for 10 seconds on 5 mpps
+ print "Running 5 Mpps on ports 0, 1 for 10 seconds..."
+ c.start(ports = [0, 1], mult = "5mpps", duration = 10)
+
+ # block until done
+ c.wait_on_traffic(ports = [0, 1])
+
+ # read the stats after the test
+ stats = c.get_stats()
+
+ print json.dumps(stats[0], indent = 4, separators=(',', ': '), sort_keys = True)
+ print json.dumps(stats[1], indent = 4, separators=(',', ': '), sort_keys = True)
+
+ lost_a = stats[0]["opackets"] - stats[1]["ipackets"]
+ lost_b = stats[1]["opackets"] - stats[0]["ipackets"]
+
+ print "\npackets lost from 0 --> 1: {0} pkts".format(lost_a)
+ print "packets lost from 1 --> 0: {0} pkts".format(lost_b)
+
+ if (lost_a == 0) and (lost_b == 0):
+ passed = True
+ else:
+ passed = False
+
+ except STLError as e:
+ passed = False
+ print e
+
+ finally:
+ c.disconnect()
+
+ if passed:
+ print "\nTest has passed :-)\n"
+ else:
+ print "\nTest has failed :-(\n"
+
+
+# run the tests
+simple_burst()
+
diff --git a/scripts/automation/trex_control_plane/stl/examples/stl_imix.py b/scripts/automation/trex_control_plane/stl/examples/stl_imix.py
new file mode 100644
index 00000000..b9fbbbb6
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stl/examples/stl_imix.py
@@ -0,0 +1,103 @@
+import stl_path
+from trex_stl_lib.api import *
+
+import time
+import json
+from pprint import pprint
+
+# IMIX test
+# it maps the ports to sides
+# then it load a predefind profile 'IMIX'
+# and attach it to both sides and inject
+# at a certain rate for some time
+# finally it checks that all packets arrived
+def imix_test ():
+
+
+ # create client
+ c = STLClient()
+ passed = True
+
+
+ try:
+
+ # connect to server
+ c.connect()
+
+ # take all the ports
+ c.reset()
+
+ # map ports - identify the routes
+ table = stl_map_ports(c)
+
+ print "Mapped ports to sides {0} <--> {1}".format(table['dir'][0], table['dir'][1])
+ dir_0 = table['dir'][0]
+ dir_1 = table['dir'][1]
+
+ # load IMIX profile
+ profile = STLProfile.load_py('../../../../stl/profiles/imix.py')
+ streams = profile.get_streams()
+
+ # add both streams to ports
+ c.add_streams(streams, ports = dir_0)
+ c.add_streams(streams, ports = dir_1)
+
+ # clear the stats before injecting
+ c.clear_stats()
+
+ # choose rate and start traffic for 10 seconds on 5 mpps
+ duration = 10
+ mult = "5mpps"
+ print "Injecting {0} <--> {1} on total rate of '{2}' for {3} seconds".format(dir_0, dir_1, mult, duration)
+
+ c.start(ports = (dir_0 + dir_1), mult = mult, duration = duration, total = True)
+
+ # block until done
+ c.wait_on_traffic(ports = (dir_0 + dir_1))
+
+ # read the stats after the test
+ stats = c.get_stats()
+
+ # use this for debug info on all the stats
+ #pprint(stats)
+
+ # sum dir 0
+ dir_0_opackets = sum([stats[i]["opackets"] for i in dir_0])
+ dir_0_ipackets = sum([stats[i]["ipackets"] for i in dir_0])
+
+ # sum dir 1
+ dir_1_opackets = sum([stats[i]["opackets"] for i in dir_1])
+ dir_1_ipackets = sum([stats[i]["ipackets"] for i in dir_1])
+
+
+ lost_0 = dir_0_opackets - dir_1_ipackets
+ lost_1 = dir_1_opackets - dir_0_ipackets
+
+ print "\nPackets injected from {0}: {1:,}".format(dir_0, dir_0_opackets)
+ print "Packets injected from {0}: {1:,}".format(dir_1, dir_1_opackets)
+
+ print "\npackets lost from {0} --> {1}: {2:,} pkts".format(dir_0, dir_0, lost_0)
+ print "packets lost from {0} --> {1}: {2:,} pkts".format(dir_0, dir_0, lost_0)
+
+ if (lost_0 == 0) and (lost_0 == 0):
+ passed = True
+ else:
+ passed = False
+
+
+ except STLError as e:
+ passed = False
+ print e
+
+ finally:
+ c.disconnect()
+
+ if passed:
+ print "\nTest has passed :-)\n"
+ else:
+ print "\nTest has failed :-(\n"
+
+
+# run the tests
+imix_test()
+
diff --git a/scripts/automation/trex_control_plane/stl/examples/stl_path.py b/scripts/automation/trex_control_plane/stl/examples/stl_path.py
new file mode 100644
index 00000000..8f400d23
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stl/examples/stl_path.py
@@ -0,0 +1,4 @@
+import sys
+
+# FIXME to the write path for trex_stl_lib
+sys.path.insert(0, "../")
diff --git a/scripts/automation/trex_control_plane/stl/examples/stl_run_udp_simple.py b/scripts/automation/trex_control_plane/stl/examples/stl_run_udp_simple.py
new file mode 100644
index 00000000..388e42e7
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stl/examples/stl_run_udp_simple.py
@@ -0,0 +1,219 @@
+#!/usr/bin/python
+import sys, getopt
+import argparse;
+"""
+Sample API application,
+Connect to TRex
+Send UDP packet in specific length
+Each direction has its own IP range
+Compare Rx-pkts to TX-pkts assuming ports are loopback
+
+"""
+
+import stl_path
+from trex_stl_lib.api import *
+
+H_VER = "trex-x v0.1 "
+
+class t_global(object):
+ args=None;
+
+
+import dpkt
+import time
+import json
+import string
+
+def generate_payload(length):
+ word = ''
+ alphabet_size = len(string.letters)
+ for i in range(length):
+ word += string.letters[(i % alphabet_size)]
+ return word
+
+# simple packet creation
+def create_pkt (frame_size = 9000, direction=0):
+
+ ip_range = {'src': {'start': "10.0.0.1", 'end': "10.0.0.254"},
+ 'dst': {'start': "8.0.0.1", 'end': "8.0.0.254"}}
+
+ if (direction == 0):
+ src = ip_range['src']
+ dst = ip_range['dst']
+ else:
+ src = ip_range['dst']
+ dst = ip_range['src']
+
+ vm = [
+ # src
+ STLVmFlowVar(name="src",min_value=src['start'],max_value=src['end'],size=4,op="inc"),
+ STLVmWriteFlowVar(fv_name="src",pkt_offset= "IP.src"),
+
+ # dst
+ STLVmFlowVar(name="dst",min_value=dst['start'],max_value=dst['end'],size=4,op="inc"),
+ STLVmWriteFlowVar(fv_name="dst",pkt_offset= "IP.dst"),
+
+ # checksum
+ STLVmFixIpv4(offset = "IP")
+ ]
+
+ pkt_base = Ether(src="00:00:00:00:00:01",dst="00:00:00:00:00:02")/IP()/UDP(dport=12,sport=1025)
+ pyld_size = frame_size - len(pkt_base);
+ pkt_pyld = generate_payload(pyld_size)
+
+ return STLPktBuilder(pkt = pkt_base/pkt_pyld,
+ vm = vm)
+
+
+def simple_burst (duration = 10, frame_size = 9000, speed = '1gbps'):
+
+ if (frame_size < 60):
+ frame_size = 60
+
+ pkt_dir_0 = create_pkt (frame_size, 0)
+
+ pkt_dir_1 = create_pkt (frame_size, 1)
+
+ # create client
+ c = STLClient(server = t_global.args.ip)
+
+ passed = True
+
+ try:
+ # turn this on for some information
+ #c.set_verbose("high")
+
+ # create two streams
+ s1 = STLStream(packet = pkt_dir_0,
+ mode = STLTXCont(pps = 100))
+
+ # second stream with a phase of 1ms (inter stream gap)
+ s2 = STLStream(packet = pkt_dir_1,
+ isg = 1000,
+ mode = STLTXCont(pps = 100))
+
+ if t_global.args.debug:
+ STLStream.dump_to_yaml ("example.yaml", [s1,s2]) # export to YAML so you can run it on simulator ./stl-sim -f example.yaml -o o.pcap
+
+ # connect to server
+ c.connect()
+
+ # prepare our ports (my machine has 0 <--> 1 with static route)
+ c.reset(ports = [0, 1])
+
+ # add both streams to ports
+ c.add_streams(s1, ports = [0])
+ c.add_streams(s2, ports = [1])
+
+ # clear the stats before injecting
+ c.clear_stats()
+
+ # choose rate and start traffic for 10 seconds on 5 mpps
+ print "Running {0} on ports 0, 1 for 10 seconds, UDP {1}...".format(speed,frame_size+4)
+ c.start(ports = [0, 1], mult = speed, duration = duration)
+
+ # block until done
+ c.wait_on_traffic(ports = [0, 1])
+
+ # read the stats after the test
+ stats = c.get_stats()
+
+ #print stats
+ print json.dumps(stats[0], indent = 4, separators=(',', ': '), sort_keys = True)
+ print json.dumps(stats[1], indent = 4, separators=(',', ': '), sort_keys = True)
+
+ lost_a = stats[0]["opackets"] - stats[1]["ipackets"]
+ lost_b = stats[1]["opackets"] - stats[0]["ipackets"]
+
+ print "\npackets lost from 0 --> 1: {0} pkts".format(lost_a)
+ print "packets lost from 1 --> 0: {0} pkts".format(lost_b)
+
+ if (lost_a == 0) and (lost_b == 0):
+ passed = True
+ else:
+ passed = False
+
+ except STLError as e:
+ passed = False
+ print e
+
+ finally:
+ c.disconnect()
+
+ if passed:
+ print "\nPASSED\n"
+ else:
+ print "\nFAILED\n"
+
+def process_options ():
+ parser = argparse.ArgumentParser(usage="""
+ connect to TRex and send burst of packets
+
+ examples
+
+ stl_run_udp_simple.py -s 9001
+
+ stl_run_udp_simple.py -s 9000 -d 2
+
+ stl_run_udp_simple.py -s 3000 -d 3 -m 10mbps
+
+ stl_run_udp_simple.py -s 3000 -d 3 -m 10mbps --debug
+
+ then run the simulator on the output
+ ./stl-sim -f example.yaml -o a.pcap ==> a.pcap include the packet
+
+ """,
+ description="example for TRex api",
+ epilog=" written by hhaim");
+
+ parser.add_argument("-s", "--frame-size",
+ dest="frame_size",
+ help='L2 frame size in bytes without FCS',
+ default=60,
+ type = int,
+ )
+
+ parser.add_argument("--ip",
+ dest="ip",
+ help='remote trex ip default local',
+ default="127.0.0.1",
+ type = str
+ )
+
+
+ parser.add_argument('-d','--duration',
+ dest='duration',
+ help='duration in second ',
+ default=10,
+ type = int,
+ )
+
+
+ parser.add_argument('-m','--multiplier',
+ dest='mul',
+ help='speed in gbps/pps for example 1gbps, 1mbps, 1mpps ',
+ default="1mbps"
+ )
+
+ parser.add_argument('--debug',
+ action='store_true',
+ help='see debug into ')
+
+ parser.add_argument('--version', action='version',
+ version=H_VER )
+
+ t_global.args = parser.parse_args();
+ print t_global.args
+
+
+
+def main():
+ process_options ()
+ simple_burst(duration = t_global.args.duration,
+ frame_size = t_global.args.frame_size,
+ speed = t_global.args.mul
+ )
+
+if __name__ == "__main__":
+ main()
+
diff --git a/scripts/automation/trex_control_plane/stl/examples/stl_simple_burst.py b/scripts/automation/trex_control_plane/stl/examples/stl_simple_burst.py
new file mode 100644
index 00000000..ed0cb93a
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stl/examples/stl_simple_burst.py
@@ -0,0 +1,64 @@
+import stl_path
+from trex_stl_lib.api import *
+
+import time
+
+def simple_burst ():
+
+ # create client
+ c = STLClient()
+ passed = True
+
+ try:
+ pkt = STLPktBuilder(pkt = Ether()/IP(src="16.0.0.1",dst="48.0.0.1")/UDP(dport=12,sport=1025)/IP()/'a_payload_example')
+
+ # create two bursts and link them
+ s1 = STLStream(name = 'A',
+ packet = pkt,
+ mode = STLTXSingleBurst(total_pkts = 5000),
+ next = 'B')
+
+ s2 = STLStream(name = 'B',
+ self_start = False,
+ packet = pkt,
+ mode = STLTXSingleBurst(total_pkts = 3000))
+
+ # connect to server
+ c.connect()
+
+ # prepare our ports
+ c.reset(ports = [0, 3])
+
+ # add both streams to ports
+ stream_ids = c.add_streams([s1, s2], ports = [0, 3])
+
+ # run 5 times
+ for i in xrange(1, 6):
+ c.clear_stats()
+ c.start(ports = [0, 3], mult = "1gbps")
+ c.wait_on_traffic(ports = [0, 3])
+
+ stats = c.get_stats()
+ ipackets = stats['total']['ipackets']
+
+ print "Test iteration {0} - Packets Received: {1} ".format(i, ipackets)
+ # (5000 + 3000) * 2 ports = 16,000
+ if (ipackets != (16000)):
+ passed = False
+
+ except STLError as e:
+ passed = False
+ print e
+
+ finally:
+ c.disconnect()
+
+ if passed:
+ print "\nTest has passed :-)\n"
+ else:
+ print "\nTest has failed :-(\n"
+
+
+# run the tests
+simple_burst()
+
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/__init__.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/__init__.py
new file mode 100644
index 00000000..60bf7be8
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/__init__.py
@@ -0,0 +1 @@
+import trex_stl_ext
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/api.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/api.py
new file mode 100644
index 00000000..a9e99178
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/api.py
@@ -0,0 +1,28 @@
+
+# client and exceptions
+from trex_stl_exceptions import *
+from trex_stl_client import STLClient, LoggerApi
+
+# streams
+from trex_stl_streams import *
+
+# packet builder
+from trex_stl_packet_builder_scapy import *
+from scapy.all import *
+
+# packet builder
+STLPktBuilder = CScapyTRexPktBuilder
+
+# VM
+STLVmFlowVar = CTRexVmDescFlowVar
+STLVmWriteFlowVar = CTRexVmDescWrFlowVar
+STLVmFixIpv4 = CTRexVmDescFixIpv4
+STLVmTrimPktSize = CTRexVmDescTrimPktSize
+STLVmTupleGen = CTRexVmDescTupleGen
+
+
+# simulator
+from trex_stl_sim import STLSim
+
+# std lib (various lib functions)
+from trex_stl_std import *
diff --git a/scripts/automation/trex_control_plane/client/trex_async_client.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_async_client.py
index ef4c48f9..410482b9 100644
--- a/scripts/automation/trex_control_plane/client/trex_async_client.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_async_client.py
@@ -1,15 +1,5 @@
#!/router/bin/python
-try:
- # support import for Python 2
- import outer_packages
-except ImportError:
- # support import for Python 3
- import client.outer_packages
-from client_utils.jsonrpc_client import JsonRpcClient, BatchMessage
-
-from common.text_opts import *
-
import json
import threading
import time
@@ -18,9 +8,11 @@ import zmq
import re
import random
-from common.trex_stats import *
-from common.trex_streams import *
-from common.trex_types import *
+from trex_stl_jsonrpc_client import JsonRpcClient, BatchMessage
+
+from utils.text_opts import *
+from trex_stl_stats import *
+from trex_stl_types import *
# basic async stats class
class CTRexAsyncStats(object):
diff --git a/scripts/automation/trex_control_plane/client/trex_stateless_client.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_client.py
index 7bbfc125..c8049bf9 100755..100644
--- a/scripts/automation/trex_control_plane/client/trex_stateless_client.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_client.py
@@ -1,33 +1,28 @@
#!/router/bin/python
-try:
- # support import for Python 2
- import outer_packages
-except ImportError:
- # support import for Python 3
- import client.outer_packages
-
-from client_utils.jsonrpc_client import JsonRpcClient, BatchMessage
-from client_utils import general_utils
-from client_utils.packet_builder import CTRexPktBuilder
-import json
+# for API usage the path name must be full
+from trex_stl_lib.trex_stl_exceptions import *
+from trex_stl_lib.trex_stl_streams import *
+
+from trex_stl_jsonrpc_client import JsonRpcClient, BatchMessage
+import trex_stl_stats
+
+from trex_stl_port import Port
+from trex_stl_types import *
+from trex_stl_async_client import CTRexAsyncClient
+
+from utils import parsing_opts, text_tables, common
+from utils.text_opts import *
+
-from common.trex_streams import *
from collections import namedtuple
-from common.text_opts import *
-from common import trex_stats
-from client_utils import parsing_opts, text_tables
+from yaml import YAMLError
import time
import datetime
import re
import random
-from trex_port import Port
-from common.trex_types import *
-from common.trex_stl_exceptions import *
-from trex_async_client import CTRexAsyncClient
-from yaml import YAMLError
-
-
+import json
+import traceback
############################ logger #############################
############################ #############################
@@ -193,7 +188,6 @@ class AsyncEventHandler(object):
# dispatcher for server async events (port started, port stopped and etc.)
def handle_async_event (self, type, data):
# DP stopped
-
show_event = False
# port started
@@ -382,7 +376,7 @@ class STLClient(object):
"""docstring for STLClient"""
def __init__(self,
- username = general_utils.get_current_user(),
+ username = common.get_current_user(),
server = "localhost",
sync_port = 4501,
async_port = 4500,
@@ -431,11 +425,11 @@ class STLClient(object):
"virtual": virtual}
- self.global_stats = trex_stats.CGlobalStats(self.connection_info,
+ self.global_stats = trex_stl_stats.CGlobalStats(self.connection_info,
self.server_version,
self.ports)
- self.stats_generator = trex_stats.CTRexInfoGenerator(self.global_stats,
+ self.stats_generator = trex_stl_stats.CTRexInfoGenerator(self.global_stats,
self.ports)
@@ -770,8 +764,8 @@ class STLClient(object):
return self.comm_link.transmit_batch(batch_list)
# stats
- def _get_formatted_stats(self, port_id_list, stats_mask=set()):
- stats_opts = trex_stats.ALL_STATS_OPTS.intersection(stats_mask)
+ def _get_formatted_stats(self, port_id_list, stats_mask = trex_stl_stats.COMPACT):
+ stats_opts = trex_stl_stats.ALL_STATS_OPTS.intersection(stats_mask)
stats_obj = {}
for stats_type in stats_opts:
@@ -1270,67 +1264,7 @@ class STLClient(object):
raise STLError(rc)
- """
- load a profile file to port(s)
-
- :parameters:
- filename : str
- filename to load
- ports : list
- ports to execute the command
-
-
- :raises:
- + :exc:`STLError`
-
- """
- @__api_check(True)
- def load_profile (self, filename, ports = None):
-
- # check filename
- if not os.path.isfile(filename):
- raise STLError("file '{0}' does not exists".format(filename))
-
- # by default use all ports
- if ports == None:
- ports = self.get_acquired_ports()
-
- # verify valid port id list
- rc = self._validate_port_list(ports)
- if not rc:
- raise STLArgumentError('ports', ports, valid_values = self.get_all_ports())
-
-
- streams = None
-
- # try YAML
- try:
- streams_db = CStreamsDB()
- stream_list = streams_db.load_yaml_file(filename)
- # convert to new style stream object
- streams = [HACKSTLStream(stream) for stream in stream_list.compiled]
- except YAMLError:
- # try python loader
- try:
- basedir = os.path.dirname(filename)
-
- sys.path.append(basedir)
- file = os.path.basename(filename).split('.')[0]
- module = __import__(file, globals(), locals(), [], -1)
- reload(module) # reload the update
-
- streams = module.register().get_streams()
-
- except Exception as e :
- print str(e);
- traceback.print_exc(file=sys.stdout)
- raise STLError("Unexpected error: '{0}'".format(filename))
-
-
- self.add_streams(streams, ports)
-
-
-
+
"""
start traffic on port(s)
@@ -1817,7 +1751,15 @@ class STLClient(object):
self.remove_all_streams(opts.ports)
# pack the profile
- self.load_profile(opts.file[0], opts.ports)
+ try:
+ profile = STLProfile.load(opts.file[0])
+ except STLError as e:
+ print format_text("\nError while loading profile '{0}'\n".format(opts.file[0]), 'bold')
+ print e.brief() + "\n"
+ return
+
+
+ self.add_streams(profile.get_streams(), ports = opts.ports)
if opts.dry:
self.validate(opts.ports, opts.mult, opts.duration, opts.total)
@@ -1971,12 +1913,12 @@ class STLClient(object):
return
# determine stats mask
- mask = self.__get_mask_keys(**self.__filter_namespace_args(opts, trex_stats.ALL_STATS_OPTS))
+ mask = self.__get_mask_keys(**self.__filter_namespace_args(opts, trex_stl_stats.ALL_STATS_OPTS))
if not mask:
# set to show all stats if no filter was given
- mask = trex_stats.ALL_STATS_OPTS
+ mask = trex_stl_stats.ALL_STATS_OPTS
- stats_opts = trex_stats.ALL_STATS_OPTS.intersection(mask)
+ stats_opts = trex_stl_stats.ALL_STATS_OPTS.intersection(mask)
stats = self._get_formatted_stats(opts.ports, mask)
diff --git a/scripts/automation/trex_control_plane/common/trex_stl_exceptions.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_exceptions.py
index 9be20db9..45acc72e 100644
--- a/scripts/automation/trex_control_plane/common/trex_stl_exceptions.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_exceptions.py
@@ -1,6 +1,7 @@
import os
import sys
-from common.text_opts import *
+
+from utils.text_opts import *
# basic error for API
class STLError(Exception):
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_ext.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_ext.py
new file mode 100644
index 00000000..835918d9
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_ext.py
@@ -0,0 +1,74 @@
+import sys
+import os
+import warnings
+
+# if not set - set it to default
+TREX_STL_EXT_PATH = os.environ.get('TREX_STL_EXT_PATH')
+
+# take default
+if not TREX_STL_EXT_PATH:
+ CURRENT_PATH = os.path.dirname(os.path.realpath(__file__))
+ # ../../../../external_libs
+ TREX_STL_EXT_PATH = os.path.abspath(os.path.join(CURRENT_PATH, os.pardir, os.pardir, os.pardir, os.pardir, 'external_libs'))
+
+
+# the modules required
+CLIENT_UTILS_MODULES = ['dpkt-1.8.6',
+ 'yaml-3.11',
+ 'texttable-0.8.4',
+ 'scapy-2.3.1'
+ ]
+
+
+def import_module_list(modules_list):
+ assert(isinstance(modules_list, list))
+
+ for p in modules_list:
+ full_path = os.path.join(TREX_STL_EXT_PATH, p)
+ fix_path = os.path.normcase(full_path)
+
+ if not os.path.exists(fix_path):
+ print "Unable to find required module library: '{0}'".format(p)
+ print "Please provide the correct path using TREX_STL_EXT_PATH variable"
+ print "current path used: '{0}'".format(TREX_STL_EXT_PATH)
+ exit(0)
+
+ sys.path.insert(1, full_path)
+
+
+
+def import_platform_dirs ():
+ # handle platform dirs
+
+ # try fedora 18 first and then cel5.9
+ # we are using the ZMQ module to determine the right platform
+
+ full_path = os.path.join(TREX_STL_EXT_PATH, 'platform/fedora18')
+ fix_path = os.path.normcase(full_path)
+ sys.path.insert(0, full_path)
+ try:
+ # try to import and delete it from the namespace
+ import zmq
+ del zmq
+ return
+ except:
+ sys.path.pop(0)
+ pass
+
+ full_path = os.path.join(TREX_STL_EXT_PATH, 'platform/cel59')
+ fix_path = os.path.normcase(full_path)
+ sys.path.insert(0, full_path)
+ try:
+ # try to import and delete it from the namespace
+ import zmq
+ del zmq
+ return
+
+ except:
+ sys.path.pop(0)
+ sys.modules['zmq'] = None
+ warnings.warn("unable to determine platform type for ZMQ import")
+
+
+import_module_list(CLIENT_UTILS_MODULES)
+import_platform_dirs()
diff --git a/scripts/automation/trex_control_plane/client_utils/jsonrpc_client.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_jsonrpc_client.py
index 9c351175..ab3c7282 100755..100644
--- a/scripts/automation/trex_control_plane/client_utils/jsonrpc_client.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_jsonrpc_client.py
@@ -1,13 +1,12 @@
#!/router/bin/python
-import external_packages
import zmq
import json
-import general_utils
import re
from time import sleep
from collections import namedtuple
-from common.trex_types import *
+from trex_stl_types import *
+from utils.common import random_id_gen
class bcolors:
BLUE = '\033[94m'
@@ -49,7 +48,8 @@ class JsonRpcClient(object):
# default values
self.port = default_port
self.server = default_server
- self.id_gen = general_utils.random_id_gen()
+
+ self.id_gen = random_id_gen()
def get_connection_details (self):
diff --git a/scripts/automation/trex_control_plane/client_utils/packet_builder_interface.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_packet_builder_interface.py
index b6e7c026..b6e7c026 100644
--- a/scripts/automation/trex_control_plane/client_utils/packet_builder_interface.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_packet_builder_interface.py
diff --git a/scripts/automation/trex_control_plane/client_utils/scapy_packet_builder.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_packet_builder_scapy.py
index b1b181c6..0828fbd9 100644
--- a/scripts/automation/trex_control_plane/client_utils/scapy_packet_builder.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_packet_builder_scapy.py
@@ -1,4 +1,3 @@
-import external_packages
import random
import string
import struct
@@ -8,7 +7,7 @@ import yaml
import binascii
import base64
-from packet_builder_interface import CTrexPktBuilderInterface
+from trex_stl_packet_builder_interface import CTrexPktBuilderInterface
from scapy.all import *
@@ -586,12 +585,12 @@ class CTRexVmDescTupleGen(CTRexVmDescBase):
################################################################################################
-
class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
"""
This class defines the TRex API of building a packet using dpkt package.
Using this class the user can also define how TRex will handle the packet by specifying the VM setting.
+ pkt could be Scapy pkt or pcap file name
"""
def __init__(self, pkt = None, vm = None):
"""
@@ -603,17 +602,18 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
"""
super(CScapyTRexPktBuilder, self).__init__()
- self.pkt = None
+ self.pkt = None # as input
+ self.pkt_raw = None # from raw pcap file
self.vm_scripts = [] # list of high level instructions
self.vm_low_level = None
self.metadata=""
+ was_set=False
# process packet
if pkt != None:
- if not isinstance(pkt, Packet):
- raise CTRexPacketBuildException(-14, "bad value for variable pkt")
self.set_packet(pkt)
+ was_set=True
# process VM
if vm != None:
@@ -621,6 +621,10 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
raise CTRexPacketBuildException(-14, "bad value for variable vm")
self.add_command(vm if isinstance(vm, CTRexScRaw) else CTRexScRaw(vm))
+ was_set=True
+
+ if was_set:
+ self.compile ()
def dump_vm_data_as_yaml(self):
@@ -661,14 +665,14 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
+ :exc:`AssertionError`, in case packet is empty.
"""
+ pkt_buf = self._get_pkt_as_str()
- assert self.pkt, 'empty packet'
-
- return {'binary': base64.b64encode(str(self.pkt)) if encode else str(self.pkt),
+ return {'binary': base64.b64encode(pkt_buf) if encode else pkt_buf,
'meta': self.metadata}
+
def dump_pkt_to_pcap(self, file_path):
- wrpcap(file_path, self.pkt)
+ wrpcap(file_path, self._get_pkt_as_str())
def add_command (self, script):
self.vm_scripts.append(script.clone());
@@ -676,18 +680,67 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
def dump_scripts (self):
self.vm_low_level.dump_as_yaml()
+ def dump_as_hex (self):
+ pkt_buf = self._get_pkt_as_str()
+ print hexdump(pkt_buf)
+
+ def pkt_layers_desc (self):
+ """
+ return layer description like this IP:TCP:Pyload
+
+ """
+ pkt_buf = self._get_pkt_as_str()
+ scapy_pkt = Ether(pkt_buf);
+ pkt_utl = CTRexScapyPktUtl(scapy_pkt);
+ return pkt_utl.get_pkt_layers()
+
+ def set_pkt_as_str (self, pkt_buffer):
+ assert type(pkt_buffer)==str, "pkt_buffer should be string"
+ self.pkt_raw = pkt_buffer
+
+ def set_pcap_file (self, pcap_file):
+ """
+ load raw pcap file into a buffer. load only the first packet
+
+ :parameters:
+ pcap_file : file_name
+
+ :raises:
+ + :exc:`AssertionError`, in case packet is empty.
+
+ """
+
+ p=RawPcapReader(pcap_file)
+ was_set = False
+
+ for pkt in p:
+ was_set=True;
+ self.pkt_raw = str(pkt[0])
+ break
+ if not was_set :
+ raise CTRexPacketBuildException(-14, "no buffer inside the pcap file")
+
def set_packet (self, pkt):
"""
Scapy packet Ether()/IP(src="16.0.0.1",dst="48.0.0.1")/UDP(dport=12,sport=1025)/IP()/"A"*10
"""
- self.pkt = pkt;
+ if isinstance(pkt, Packet):
+ self.pkt = pkt;
+ else:
+ if isinstance(pkt, str):
+ self.set_pcap_file(pkt)
+ else:
+ raise CTRexPacketBuildException(-14, "bad packet" )
+
def compile (self):
self.vm_low_level=CTRexVmEngine()
- assert self.pkt, 'empty packet'
- self.pkt.build();
+ if self.pkt == None and self.pkt_raw == None:
+ raise CTRexPacketBuildException(-14, "Packet is empty")
+ if self.pkt:
+ self.pkt.build();
for sc in self.vm_scripts:
if isinstance(sc, CTRexScRaw):
@@ -739,10 +792,16 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
p_utl=CTRexScapyPktUtl(self.pkt);
return p_utl.get_field_offet_by_str(field_name)
+ def _get_pkt_as_str(self):
+ if self.pkt:
+ return str(self.pkt)
+ if self.pkt_raw:
+ return self.pkt_raw
+ raise CTRexPacketBuildException(-11, 'empty packet');
+
def _add_tuple_gen(self,tuple_gen):
pass;
-
diff --git a/scripts/automation/trex_control_plane/client/trex_port.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_port.py
index eaf64ac2..732cfc1e 100644
--- a/scripts/automation/trex_control_plane/client/trex_port.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_port.py
@@ -1,8 +1,8 @@
from collections import namedtuple, OrderedDict
-from common.trex_types import *
-from common import trex_stats
-from client_utils import packet_builder
+
+import trex_stl_stats
+from trex_stl_types import *
StreamOnPort = namedtuple('StreamOnPort', ['compiled_stream', 'metadata'])
@@ -50,7 +50,9 @@ class Port(object):
self.profile = None
self.session_id = session_id
- self.port_stats = trex_stats.CPortStats(self)
+ self.port_stats = trex_stl_stats.CPortStats(self)
+
+ self.next_available_id = 1
def err(self, msg):
@@ -130,6 +132,8 @@ class Port(object):
# TODO: handle syncing the streams into stream_db
+ self.next_available_id = rc.data()['max_stream_id']
+
return self.ok()
@@ -139,6 +143,12 @@ class Port(object):
return ((self.state == self.STATE_IDLE) or (self.state == self.STATE_STREAMS))
+ def __allocate_stream_id (self):
+ id = self.next_available_id
+ self.next_available_id += 1
+ return id
+
+
# add streams
def add_streams (self, streams_list):
@@ -148,8 +158,33 @@ class Port(object):
if not self.is_port_writable():
return self.err("Please stop port before attempting to add streams")
+ # listify
+ streams_list = streams_list if isinstance(streams_list, list) else [streams_list]
+
+ lookup = {}
+
+ # allocate IDs
+ for stream in streams_list:
+ if stream.get_id() == None:
+ stream.set_id(self.__allocate_stream_id())
+
+ lookup[stream.get_name()] = stream.get_id()
+
+ # resolve names
+ for stream in streams_list:
+ next_id = -1
+
+ next = stream.get_next()
+ if next:
+ if not next in lookup:
+ return self.err("stream dependency error - unable to find '{0}'".format(next))
+ next_id = lookup[next]
+
+ stream.fields['next_stream_id'] = next_id
+
+
batch = []
- for stream in (streams_list if isinstance(streams_list, list) else [streams_list]):
+ for stream in streams_list:
params = {"handler": self.handler,
"port_id": self.port_id,
@@ -161,7 +196,7 @@ class Port(object):
# meta data for show streams
self.streams[stream.get_id()] = StreamOnPort(stream.to_json(),
- Port._generate_stream_metadata(stream.get_id(), stream.to_json()))
+ Port._generate_stream_metadata(stream))
rc = self.transmit_batch(batch)
if not rc:
@@ -473,21 +508,21 @@ class Port(object):
"streams" : streams_data}
@staticmethod
- def _generate_stream_metadata(stream_id, compiled_stream_obj):
+ def _generate_stream_metadata(stream):
meta_dict = {}
# create packet stream description
- pkt_bld_obj = packet_builder.CTRexPktBuilder()
- pkt_bld_obj.load_from_stream_obj(compiled_stream_obj)
+ #pkt_bld_obj = packet_builder.CTRexPktBuilder()
+ #pkt_bld_obj.load_from_stream_obj(compiled_stream_obj)
# generate stream summary based on that
- next_stream = "None" if compiled_stream_obj['next_stream_id']==-1 else compiled_stream_obj['next_stream_id']
+ #next_stream = "None" if stream['next_stream_id']==-1 else stream['next_stream_id']
- meta_dict['stream_sum'] = OrderedDict([("id", stream_id),
- ("packet_type", "/".join(pkt_bld_obj.get_packet_layers())),
- ("length", pkt_bld_obj.get_packet_length()),
- ("mode", compiled_stream_obj['mode']['type']),
- ("rate_pps", compiled_stream_obj['mode']['pps']),
- ("next_stream", next_stream)
+ meta_dict['stream_sum'] = OrderedDict([("id", stream.get_id()),
+ ("packet_type", "FIXME!!!"),
+ ("L2 len", "FIXME!!! +++4"),
+ ("mode", "FIXME!!!"),
+ ("rate_pps", "FIXME!!!"),
+ ("next_stream", "FIXME!!!")
])
return meta_dict
diff --git a/scripts/automation/trex_control_plane/client/trex_stateless_sim.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_sim.py
index 1452cdd1..9cea3ea8 100644
--- a/scripts/automation/trex_control_plane/client/trex_stateless_sim.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_sim.py
@@ -16,18 +16,14 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
+# simulator can be run as a standalone
+import trex_stl_ext
-try:
- # support import for Python 2
- import outer_packages
-except ImportError:
- # support import for Python 3
- import client.outer_packages
-
-from common.trex_stl_exceptions import STLError
+from trex_stl_exceptions import *
from yaml import YAMLError
-from common.trex_streams import *
-from client_utils import parsing_opts
+from trex_stl_streams import *
+from utils import parsing_opts
+from trex_stl_client import STLClient
import re
import json
@@ -95,33 +91,6 @@ class STLSim(object):
self.port_id = port_id
- def load_input_file (self, input_file):
- # try YAML
- try:
- streams_db = CStreamsDB()
- stream_list = streams_db.load_yaml_file(input_file)
-
- # convert to new style stream object
- return [HACKSTLStream(stream) for stream in stream_list.compiled]
- except YAMLError:
- pass
-
- # try python
- try:
- basedir = os.path.dirname(input_file)
- sys.path.append(basedir)
-
- file = os.path.basename(input_file).split('.')[0]
- module = __import__(file, globals(), locals(), [], -1)
-
- return module.register().get_streams()
-
- except (AttributeError, ImportError) as e:
- print "specific error: {0}".format(e)
-
- raise STLError("bad format input file '{0}'".format(input_file))
-
-
def generate_start_cmd (self, mult = "1", force = True, duration = -1):
return {"id":1,
"jsonrpc": "2.0",
@@ -171,11 +140,41 @@ class STLSim(object):
# handle YAMLs
for input_file in input_files:
- stream_list += self.load_input_file(input_file)
+ try:
+ profile = STLProfile.load(input_file)
+ except STLError as e:
+ print format_text("\nError while loading profile '{0}'\n".format(input_file), 'bold')
+ print e.brief() + "\n"
+ return
+
+ stream_list += profile.get_streams()
# load streams
cmds_json = []
+
+ id = 1
+
+ lookup = {}
+ # allocate IDs
+ for stream in stream_list:
+ if stream.get_id() == None:
+ stream.set_id(id)
+ id += 1
+
+ lookup[stream.get_name()] = stream.get_id()
+
+ # resolve names
+ for stream in stream_list:
+ next_id = -1
+ next = stream.get_next()
+ if next:
+ if not next in lookup:
+ raise STLError("stream dependency error - unable to find '{0}'".format(next))
+ next_id = lookup[next]
+
+ stream.fields['next_stream_id'] = next_id
+
for stream in stream_list:
cmd = {"id":1,
"jsonrpc": "2.0",
diff --git a/scripts/automation/trex_control_plane/common/trex_stats.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_stats.py
index 3bd6e0cd..3f09e47c 100755..100644
--- a/scripts/automation/trex_control_plane/common/trex_stats.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_stats.py
@@ -1,8 +1,11 @@
#!/router/bin/python
+
+from utils import text_tables
+from utils.text_opts import format_text, format_threshold, format_num
+
+from trex_stl_async_client import CTRexAsyncStats
+
from collections import namedtuple, OrderedDict, deque
-from client_utils import text_tables
-from common.text_opts import format_text, format_threshold, format_num
-from client.trex_async_client import CTRexAsyncStats
import copy
import datetime
import time
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_std.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_std.py
new file mode 100644
index 00000000..72a5ea52
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_std.py
@@ -0,0 +1,67 @@
+from trex_stl_streams import *
+from trex_stl_packet_builder_scapy import *
+
+# map ports
+# will destroy all streams/data on the ports
+def stl_map_ports (client, ports = None):
+
+ # by default use all ports
+ if ports == None:
+ ports = client.get_all_ports()
+
+ # reset the ports
+ client.reset(ports)
+
+ # generate streams
+ base_pkt = CScapyTRexPktBuilder(pkt = Ether()/IP())
+
+ pkts = 1
+ for port in ports:
+ stream = STLStream(packet = base_pkt,
+ mode = STLTXSingleBurst(pps = 100000, total_pkts = pkts))
+
+ client.add_streams(stream, [port])
+ pkts = pkts * 2
+
+ # inject
+ client.clear_stats()
+ client.start(ports, mult = "1mpps")
+ client.wait_on_traffic(ports)
+
+ stats = client.get_stats()
+
+ # cleanup
+ client.reset(ports = ports)
+
+ table = {}
+ for port in ports:
+ table[port] = None
+
+ for port in ports:
+ ipackets = stats[port]["ipackets"]
+
+ exp = 1
+ while ipackets >= exp:
+ if ((ipackets & exp) == (exp)):
+ source = int(math.log(exp, 2))
+ table[source] = port
+
+ exp *= 2
+
+ if not all(x != None for x in table.values()):
+ raise STLError('unable to map ports')
+
+ dir_a = set()
+ dir_b = set()
+ for src, dst in table.iteritems():
+ # src is not in
+ if src not in (dir_a, dir_b):
+ if dst in dir_a:
+ dir_b.add(src)
+ else:
+ dir_a.add(src)
+
+ table['dir'] = [list(dir_a), list(dir_b)]
+
+ return table
+
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_streams.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_streams.py
new file mode 100644
index 00000000..abfa32cd
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_streams.py
@@ -0,0 +1,386 @@
+#!/router/bin/python
+
+from trex_stl_exceptions import *
+from trex_stl_packet_builder_interface import CTrexPktBuilderInterface
+from trex_stl_packet_builder_scapy import CScapyTRexPktBuilder, Ether, IP
+from collections import OrderedDict, namedtuple
+
+from dpkt import pcap
+import random
+import yaml
+import base64
+import string
+import traceback
+
+def random_name (l):
+ return ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(l))
+
+
+# base class for TX mode
+class STLTXMode(object):
+ def __init__ (self):
+ self.fields = {}
+
+ def to_json (self):
+ return self.fields
+
+
+# continuous mode
+class STLTXCont(STLTXMode):
+
+ def __init__ (self, pps = 1):
+
+ if not isinstance(pps, (int, float)):
+ raise STLArgumentError('pps', pps)
+
+ super(STLTXCont, self).__init__()
+
+ self.fields['type'] = 'continuous'
+ self.fields['pps'] = pps
+
+
+# single burst mode
+class STLTXSingleBurst(STLTXMode):
+
+ def __init__ (self, pps = 1, total_pkts = 1):
+
+ if not isinstance(pps, (int, float)):
+ raise STLArgumentError('pps', pps)
+
+ if not isinstance(total_pkts, int):
+ raise STLArgumentError('total_pkts', total_pkts)
+
+ super(STLTXSingleBurst, self).__init__()
+
+ self.fields['type'] = 'single_burst'
+ self.fields['pps'] = pps
+ self.fields['total_pkts'] = total_pkts
+
+
+# multi burst mode
+class STLTXMultiBurst(STLTXMode):
+
+ def __init__ (self,
+ pps = 1,
+ pkts_per_burst = 1,
+ ibg = 0.0,
+ count = 1):
+
+ if not isinstance(pps, (int, float)):
+ raise STLArgumentError('pps', pps)
+
+ if not isinstance(pkts_per_burst, int):
+ raise STLArgumentError('pkts_per_burst', pkts_per_burst)
+
+ if not isinstance(ibg, (int, float)):
+ raise STLArgumentError('ibg', ibg)
+
+ if not isinstance(count, int):
+ raise STLArgumentError('count', count)
+
+ super(STLTXMultiBurst, self).__init__()
+
+ self.fields['type'] = 'multi_burst'
+ self.fields['pps'] = pps
+ self.fields['pkts_per_burst'] = pkts_per_burst
+ self.fields['ibg'] = ibg
+ self.fields['count'] = count
+
+
+class STLStream(object):
+
+ def __init__ (self,
+ name = random_name(8),
+ packet = None,
+ mode = STLTXCont(1),
+ enabled = True,
+ self_start = True,
+ isg = 0.0,
+ rx_stats = None,
+ next = None,
+ stream_id = None):
+
+ # type checking
+ if not isinstance(mode, STLTXMode):
+ raise STLArgumentError('mode', mode)
+
+ if packet and not isinstance(packet, CTrexPktBuilderInterface):
+ raise STLArgumentError('packet', packet)
+
+ if not isinstance(enabled, bool):
+ raise STLArgumentError('enabled', enabled)
+
+ if not isinstance(self_start, bool):
+ raise STLArgumentError('self_start', self_start)
+
+ if not isinstance(isg, (int, float)):
+ raise STLArgumentError('isg', isg)
+
+ if (type(mode) == STLTXCont) and (next != None):
+ raise STLError("continuous stream cannot have a next stream ID")
+
+ # tag for the stream and next - can be anything
+ self.name = name
+ self.next = next
+ self.set_id(stream_id)
+
+ self.fields = {}
+
+ # basic fields
+ self.fields['enabled'] = enabled
+ self.fields['self_start'] = self_start
+ self.fields['isg'] = isg
+
+ # mode
+ self.fields['mode'] = mode.to_json()
+
+ self.fields['packet'] = {}
+ self.fields['vm'] = {}
+
+ if not packet:
+ packet = CScapyTRexPktBuilder(pkt = Ether()/IP())
+
+ # packet builder
+ packet.compile()
+
+ # packet and VM
+ self.fields['packet'] = packet.dump_pkt()
+ self.fields['vm'] = packet.get_vm_data()
+
+ if not rx_stats:
+ self.fields['rx_stats'] = {}
+ self.fields['rx_stats']['enabled'] = False
+ else:
+ self.fields['rx_stats'] = rx_stats
+
+
+ def __str__ (self):
+ return json.dumps(self.fields, indent = 4, separators=(',', ': '), sort_keys = True)
+
+ def to_json (self):
+ return self.fields
+
+ def get_id (self):
+ return self.id
+
+ def set_id (self, id):
+ self.id = id
+
+ def get_name (self):
+ return self.name
+
+ def get_next (self):
+ return self.next
+
+
+ def to_yaml (self):
+ return {'name': self.name, 'stream': self.fields}
+
+
+
+class YAMLLoader(object):
+
+ def __init__ (self, yaml_file):
+ self.yaml_path = os.path.dirname(yaml_file)
+ self.yaml_file = yaml_file
+
+
+ def __parse_packet (self, packet_dict):
+ builder = CScapyTRexPktBuilder()
+
+ packet_type = set(packet_dict).intersection(['binary', 'pcap'])
+ if len(packet_type) != 1:
+ raise STLError("packet section must contain either 'binary' or 'pcap'")
+
+ if 'binary' in packet_type:
+ try:
+ pkt_str = base64.b64decode(packet_dict['binary'])
+ except TypeError:
+ raise STLError("'binary' field is not a valid packet format")
+
+ builder.set_pkt_as_str(pkt_str)
+
+ elif 'pcap' in packet_type:
+ pcap = os.path.join(self.yaml_path, packet_dict['pcap'])
+
+ if not os.path.exists(pcap):
+ raise STLError("'pcap' - cannot find '{0}'".format(pcap))
+
+ builder.set_packet(pcap)
+
+ return builder
+
+
+ def __parse_mode (self, mode_obj):
+
+ mode_type = mode_obj.get('type')
+
+ if mode_type == 'continuous':
+ defaults = STLTXCont()
+ mode = STLTXCont(pps = mode_obj.get('pps', defaults.fields['pps']))
+
+ elif mode_type == 'single_burst':
+ defaults = STLTXSingleBurst()
+ mode = STLTXSingleBurst(pps = mode_obj.get('pps', defaults.fields['pps']),
+ total_pkts = mode_obj.get('total_pkts', defaults.fields['total_pkts']))
+
+ elif mode_type == 'multi_burst':
+ defaults = STLTXMultiBurst()
+ mode = STLTXMultiBurst(pps = mode_obj.get('pps', defaults.fields['pps']),
+ pkts_per_burst = mode_obj.get('pkts_per_burst', defaults.fields['pkts_per_burst']),
+ ibg = mode_obj.get('ibg', defaults.fields['ibg']),
+ count = mode_obj.get('count', defaults.fields['count']))
+
+ else:
+ raise STLError("mode type can be 'continuous', 'single_burst' or 'multi_burst")
+
+
+ return mode
+
+
+ def __parse_stream (self, yaml_object):
+ s_obj = yaml_object['stream']
+
+ # parse packet
+ packet = s_obj.get('packet')
+ if not packet:
+ raise STLError("YAML file must contain 'packet' field")
+
+ builder = self.__parse_packet(packet)
+
+
+ # mode
+ mode_obj = s_obj.get('mode')
+ if not mode_obj:
+ raise STLError("YAML file must contain 'mode' field")
+
+ mode = self.__parse_mode(mode_obj)
+
+
+ defaults = STLStream()
+
+ # create the stream
+ stream = STLStream(name = yaml_object.get('name'),
+ packet = builder,
+ mode = mode,
+ enabled = s_obj.get('enabled', defaults.fields['enabled']),
+ self_start = s_obj.get('self_start', defaults.fields['self_start']),
+ isg = s_obj.get('isg', defaults.fields['isg']),
+ rx_stats = s_obj.get('rx_stats', defaults.fields['rx_stats']),
+ next = yaml_object.get('next'))
+
+ # hack the VM fields for now
+ if 'vm' in s_obj:
+ stream.fields['vm'].update(s_obj['vm'])
+
+ return stream
+
+
+ def parse (self):
+ with open(self.yaml_file, 'r') as f:
+ # read YAML and pass it down to stream object
+ yaml_str = f.read()
+
+ try:
+ objects = yaml.load(yaml_str)
+ except yaml.parser.ParserError as e:
+ raise STLError(str(e))
+
+ streams = [self.__parse_stream(object) for object in objects]
+
+ return streams
+
+
+# profile class
+class STLProfile(object):
+ def __init__ (self, streams = None):
+ if streams == None:
+ streams = []
+
+ if not type(streams) == list:
+ streams = [streams]
+
+ if not all([isinstance(stream, STLStream) for stream in streams]):
+ raise STLArgumentError('streams', streams)
+
+ self.streams = streams
+
+
+ def get_streams (self):
+ return self.streams
+
+ def __str__ (self):
+ return '\n'.join([str(stream) for stream in self.streams])
+
+
+ @staticmethod
+ def load_yaml (yaml_file):
+ # check filename
+ if not os.path.isfile(yaml_file):
+ raise STLError("file '{0}' does not exists".format(yaml_file))
+
+ yaml_loader = YAMLLoader(yaml_file)
+ streams = yaml_loader.parse()
+
+ return STLProfile(streams)
+
+
+ @staticmethod
+ def load_py (python_file):
+ # check filename
+ if not os.path.isfile(python_file):
+ raise STLError("file '{0}' does not exists".format(python_file))
+
+ basedir = os.path.dirname(python_file)
+ sys.path.append(basedir)
+
+ try:
+ file = os.path.basename(python_file).split('.')[0]
+ module = __import__(file, globals(), locals(), [], -1)
+ reload(module) # reload the update
+
+ streams = module.register().get_streams()
+
+ return STLProfile(streams)
+
+ except Exception as e:
+ a, b, tb = sys.exc_info()
+ x =''.join(traceback.format_list(traceback.extract_tb(tb)[1:])) + a.__name__ + ": " + str(b) + "\n"
+
+ summary = "\nPython Traceback follows:\n\n" + x
+ raise STLError(summary)
+
+
+ finally:
+ sys.path.remove(basedir)
+
+
+ @staticmethod
+ def load (filename):
+ x = os.path.basename(filename).split('.')
+ suffix = x[1] if (len(x) == 2) else None
+
+ if suffix == 'py':
+ profile = STLProfile.load_py(filename)
+
+ elif suffix == 'yaml':
+ profile = STLProfile.load_yaml(filename)
+
+ else:
+ raise STLError("unknown profile file type: '{0}'".format(suffix))
+
+ return profile
+
+
+ def dump_to_yaml (self, yaml_file = None):
+ yaml_list = [stream.to_yaml() for stream in self.streams]
+ yaml_str = yaml.dump(yaml_list, default_flow_style = False)
+
+ # write to file if provided
+ if yaml_file:
+ with open(yaml_file, 'w') as f:
+ f.write(yaml_str)
+
+ return yaml_str
+
+
diff --git a/scripts/automation/trex_control_plane/common/trex_types.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_types.py
index a7ddacea..1164076b 100644
--- a/scripts/automation/trex_control_plane/common/trex_types.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_types.py
@@ -1,6 +1,6 @@
from collections import namedtuple
-from common.text_opts import *
+from utils.text_opts import *
RpcCmdData = namedtuple('RpcCmdData', ['method', 'params'])
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/__init__.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/__init__.py
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/common.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/common.py
new file mode 100644
index 00000000..117017c3
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/common.py
@@ -0,0 +1,47 @@
+import os
+import sys
+import string
+import random
+
+try:
+ import pwd
+except ImportError:
+ import getpass
+ pwd = None
+
+using_python_3 = True if sys.version_info.major == 3 else False
+
+def get_current_user():
+ if pwd:
+ return pwd.getpwuid(os.geteuid()).pw_name
+ else:
+ return getpass.getuser()
+
+
+def user_input():
+ if using_python_3:
+ return input()
+ else:
+ # using python version 2
+ return raw_input()
+
+
+def random_id_gen(length=8):
+ """
+ A generator for creating a random chars id of specific length
+
+ :parameters:
+ length : int
+ the desired length of the generated id
+
+ default: 8
+
+ :return:
+ a random id with each next() request.
+ """
+ id_chars = string.ascii_lowercase + string.digits
+ while True:
+ return_id = ''
+ for i in range(length):
+ return_id += random.choice(id_chars)
+ yield return_id
diff --git a/scripts/automation/trex_control_plane/client_utils/parsing_opts.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/parsing_opts.py
index 968bbb7e..968bbb7e 100755
--- a/scripts/automation/trex_control_plane/client_utils/parsing_opts.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/parsing_opts.py
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/text_opts.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/text_opts.py
new file mode 100644
index 00000000..78a0ab1f
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/text_opts.py
@@ -0,0 +1,192 @@
+import json
+import re
+
+TEXT_CODES = {'bold': {'start': '\x1b[1m',
+ 'end': '\x1b[22m'},
+ 'cyan': {'start': '\x1b[36m',
+ 'end': '\x1b[39m'},
+ 'blue': {'start': '\x1b[34m',
+ 'end': '\x1b[39m'},
+ 'red': {'start': '\x1b[31m',
+ 'end': '\x1b[39m'},
+ 'magenta': {'start': '\x1b[35m',
+ 'end': '\x1b[39m'},
+ 'green': {'start': '\x1b[32m',
+ 'end': '\x1b[39m'},
+ 'yellow': {'start': '\x1b[33m',
+ 'end': '\x1b[39m'},
+ 'underline': {'start': '\x1b[4m',
+ 'end': '\x1b[24m'}}
+
+class TextCodesStripper:
+ keys = [re.escape(v['start']) for k,v in TEXT_CODES.iteritems()]
+ keys += [re.escape(v['end']) for k,v in TEXT_CODES.iteritems()]
+ pattern = re.compile("|".join(keys))
+
+ @staticmethod
+ def strip (s):
+ return re.sub(TextCodesStripper.pattern, '', s)
+
+def format_num (size, suffix = "", compact = True, opts = ()):
+ txt = "NaN"
+
+ if type(size) == str:
+ return "N/A"
+
+ u = ''
+
+ if compact:
+ for unit in ['','K','M','G','T','P']:
+ if abs(size) < 1000.0:
+ u = unit
+ break
+ size /= 1000.0
+
+ if isinstance(size, float):
+ txt = "%3.2f" % (size)
+ else:
+ txt = "{:,}".format(size)
+
+ if u or suffix:
+ txt += " {:}{:}".format(u, suffix)
+
+ if isinstance(opts, tuple):
+ return format_text(txt, *opts)
+ else:
+ return format_text(txt, (opts))
+
+
+
+def format_time (t_sec):
+ if t_sec < 0:
+ return "infinite"
+
+ if t_sec < 1:
+ # low numbers
+ for unit in ['ms', 'usec', 'ns']:
+ t_sec *= 1000.0
+ if t_sec >= 1.0:
+ return '{:,.2f} [{:}]'.format(t_sec, unit)
+
+ return "NaN"
+
+ else:
+ # seconds
+ if t_sec < 60.0:
+ return '{:,.2f} [{:}]'.format(t_sec, 'sec')
+
+ # minutes
+ t_sec /= 60.0
+ if t_sec < 60.0:
+ return '{:,.2f} [{:}]'.format(t_sec, 'minutes')
+
+ # hours
+ t_sec /= 60.0
+ if t_sec < 24.0:
+ return '{:,.2f} [{:}]'.format(t_sec, 'hours')
+
+ # days
+ t_sec /= 24.0
+ return '{:,.2f} [{:}]'.format(t_sec, 'days')
+
+
+def format_percentage (size):
+ return "%0.2f %%" % (size)
+
+def bold(text):
+ return text_attribute(text, 'bold')
+
+
+def cyan(text):
+ return text_attribute(text, 'cyan')
+
+
+def blue(text):
+ return text_attribute(text, 'blue')
+
+
+def red(text):
+ return text_attribute(text, 'red')
+
+
+def magenta(text):
+ return text_attribute(text, 'magenta')
+
+
+def green(text):
+ return text_attribute(text, 'green')
+
+def yellow(text):
+ return text_attribute(text, 'yellow')
+
+def underline(text):
+ return text_attribute(text, 'underline')
+
+
+def text_attribute(text, attribute):
+ if isinstance(text, str):
+ return "{start}{txt}{stop}".format(start=TEXT_CODES[attribute]['start'],
+ txt=text,
+ stop=TEXT_CODES[attribute]['end'])
+ elif isinstance(text, unicode):
+ return u"{start}{txt}{stop}".format(start=TEXT_CODES[attribute]['start'],
+ txt=text,
+ stop=TEXT_CODES[attribute]['end'])
+ else:
+ raise Exception("not a string")
+
+
+FUNC_DICT = {'blue': blue,
+ 'bold': bold,
+ 'green': green,
+ 'yellow': yellow,
+ 'cyan': cyan,
+ 'magenta': magenta,
+ 'underline': underline,
+ 'red': red}
+
+
+def format_text(text, *args):
+ return_string = text
+ for i in args:
+ func = FUNC_DICT.get(i)
+ if func:
+ return_string = func(return_string)
+
+ return return_string
+
+
+def format_threshold (value, red_zone, green_zone):
+ if value >= red_zone[0] and value <= red_zone[1]:
+ return format_text("{0}".format(value), 'red')
+
+ if value >= green_zone[0] and value <= green_zone[1]:
+ return format_text("{0}".format(value), 'green')
+
+ return "{0}".format(value)
+
+# pretty print for JSON
+def pretty_json (json_str, use_colors = True):
+ pretty_str = json.dumps(json.loads(json_str), indent = 4, separators=(',', ': '), sort_keys = True)
+
+ if not use_colors:
+ return pretty_str
+
+ try:
+ # int numbers
+ pretty_str = re.sub(r'([ ]*:[ ]+)(\-?[1-9][0-9]*[^.])',r'\1{0}'.format(blue(r'\2')), pretty_str)
+ # float
+ pretty_str = re.sub(r'([ ]*:[ ]+)(\-?[1-9][0-9]*\.[0-9]+)',r'\1{0}'.format(magenta(r'\2')), pretty_str)
+ # # strings
+ #
+ pretty_str = re.sub(r'([ ]*:[ ]+)("[^"]*")',r'\1{0}'.format(red(r'\2')), pretty_str)
+ pretty_str = re.sub(r"('[^']*')", r'{0}\1{1}'.format(TEXT_CODES['magenta']['start'],
+ TEXT_CODES['red']['start']), pretty_str)
+ except :
+ pass
+
+ return pretty_str
+
+
+if __name__ == "__main__":
+ pass
diff --git a/scripts/automation/trex_control_plane/client_utils/text_tables.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/text_tables.py
index d8928da8..07753fda 100644
--- a/scripts/automation/trex_control_plane/client_utils/text_tables.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/text_tables.py
@@ -1,7 +1,5 @@
-
-import external_packages
from texttable import Texttable
-from common.text_opts import format_text
+from text_opts import format_text
class TRexTextTable(Texttable):