diff options
author | imarom <imarom@cisco.com> | 2016-02-08 06:08:14 -0500 |
---|---|---|
committer | imarom <imarom@cisco.com> | 2016-02-08 06:08:14 -0500 |
commit | 995267db77f5554d5228697b8b2a862b51859fe6 (patch) | |
tree | 1a44007a59d8cabacab0690da515a68c3c25e7ac /scripts | |
parent | 69e5a5c6b94175ece07b247af1b5ca6c0cfdf0e9 (diff) |
first refactor
Diffstat (limited to 'scripts')
-rw-r--r-- | scripts/api/stl/examples/stl_bi_dir_flows.py | 7 | ||||
-rw-r--r-- | scripts/api/stl/examples/stl_imix.py | 19 | ||||
-rw-r--r-- | scripts/api/stl/examples/stl_path.py | 2 | ||||
-rw-r--r-- | scripts/api/stl/examples/stl_run_udp_simple.py | 5 | ||||
-rw-r--r-- | scripts/api/stl/examples/stl_simple_burst.py | 13 | ||||
-rw-r--r-- | scripts/api/stl/trex_stl_api.py | 44 | ||||
-rwxr-xr-x | scripts/automation/trex_control_plane/client_utils/packet_builder.py | 1209 | ||||
-rw-r--r-- | scripts/automation/trex_control_plane/client_utils/text_tables.py | 4 | ||||
-rwxr-xr-x | scripts/automation/trex_control_plane/client_utils/yaml_utils.py | 1 | ||||
-rwxr-xr-x | scripts/automation/trex_control_plane/common/trex_streams.py | 526 | ||||
-rw-r--r-- | scripts/automation/trex_control_plane/console/__init__.py | 0 | ||||
-rwxr-xr-x | scripts/automation/trex_control_plane/console/trex_console.py | 16 | ||||
-rw-r--r-- | scripts/automation/trex_control_plane/console/trex_tui.py | 14 | ||||
-rw-r--r-- | scripts/automation/trex_control_plane/stl/__init__.py | 0 | ||||
-rw-r--r-- | scripts/automation/trex_control_plane/stl/api.py | 31 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/automation/trex_control_plane/stl/rpc_defaults.yaml (renamed from scripts/automation/trex_control_plane/common/rpc_defaults.yaml) | 248 | ||||
-rw-r--r-- | scripts/automation/trex_control_plane/stl/trex_stl_async_client.py (renamed from scripts/automation/trex_control_plane/client/trex_async_client.py) | 18 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/automation/trex_control_plane/stl/trex_stl_client.py (renamed from scripts/automation/trex_control_plane/client/trex_stateless_client.py) | 87 | ||||
-rw-r--r-- | scripts/automation/trex_control_plane/stl/trex_stl_exceptions.py (renamed from scripts/automation/trex_control_plane/common/trex_stl_exceptions.py) | 3 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/automation/trex_control_plane/stl/trex_stl_jsonrpc_client.py (renamed from scripts/automation/trex_control_plane/client_utils/jsonrpc_client.py) | 7 | ||||
-rw-r--r-- | scripts/automation/trex_control_plane/stl/trex_stl_packet_builder_interface.py (renamed from scripts/automation/trex_control_plane/client_utils/packet_builder_interface.py) | 0 | ||||
-rw-r--r-- | scripts/automation/trex_control_plane/stl/trex_stl_packet_builder_scapy.py (renamed from scripts/automation/trex_control_plane/client_utils/scapy_packet_builder.py) | 3 | ||||
-rw-r--r-- | scripts/automation/trex_control_plane/stl/trex_stl_port.py (renamed from scripts/automation/trex_control_plane/client/trex_port.py) | 30 | ||||
-rw-r--r-- | scripts/automation/trex_control_plane/stl/trex_stl_sim.py (renamed from scripts/automation/trex_control_plane/client/trex_stateless_sim.py) | 42 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/automation/trex_control_plane/stl/trex_stl_stats.py (renamed from scripts/automation/trex_control_plane/common/trex_stats.py) | 7 | ||||
-rw-r--r-- | scripts/automation/trex_control_plane/stl/trex_stl_std.py (renamed from scripts/api/stl/trex_stl_lib.py) | 10 | ||||
-rw-r--r-- | scripts/automation/trex_control_plane/stl/trex_stl_streams.py | 230 | ||||
-rw-r--r-- | scripts/automation/trex_control_plane/stl/trex_stl_types.py (renamed from scripts/automation/trex_control_plane/common/trex_types.py) | 0 | ||||
-rw-r--r-- | scripts/automation/wrap_stl_console.py | 3 | ||||
-rw-r--r-- | scripts/automation/wrap_stl_sim.py | 4 | ||||
-rwxr-xr-x | scripts/stl-sim | 2 | ||||
-rw-r--r-- | scripts/stl/profiles/imix.py | 10 | ||||
-rw-r--r-- | scripts/stl/profiles/syn_attack.py | 13 | ||||
-rw-r--r-- | scripts/stl/profiles/udp_1pkt.py | 11 | ||||
-rw-r--r-- | scripts/stl/profiles/udp_1pkt_tuple_gen.py | 13 | ||||
-rw-r--r-- | scripts/stl/profiles/udp_rand_len_9k.py | 11 | ||||
-rwxr-xr-x | scripts/trex-console | 2 | ||||
-rw-r--r-- | scripts/yaml/burst_1000_pkt.yaml (renamed from scripts/stl/burst_1000_pkt.yaml) | 0 | ||||
-rw-r--r-- | scripts/yaml/burst_1pkt.yaml (renamed from scripts/stl/burst_1pkt.yaml) | 0 | ||||
-rw-r--r-- | scripts/yaml/burst_1pkt_1burst.yaml (renamed from scripts/stl/burst_1pkt_1burst.yaml) | 0 | ||||
-rw-r--r-- | scripts/yaml/burst_1pkt_vm.yaml (renamed from scripts/stl/burst_1pkt_vm.yaml) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/yaml/imix_1pkt.yaml (renamed from scripts/stl/imix_1pkt.yaml) | 2 | ||||
-rw-r--r-- | scripts/yaml/imix_1pkt_2.yaml (renamed from scripts/stl/imix_1pkt_2.yaml) | 0 | ||||
-rw-r--r-- | scripts/yaml/imix_1pkt_tuple_gen.yaml (renamed from scripts/stl/imix_1pkt_tuple_gen.yaml) | 0 | ||||
-rw-r--r-- | scripts/yaml/imix_1pkt_vm.yaml (renamed from scripts/stl/imix_1pkt_vm.yaml) | 0 | ||||
-rw-r--r-- | scripts/yaml/imix_1pkt_vm2.yaml (renamed from scripts/stl/imix_1pkt_vm2.yaml) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/yaml/imix_2pkt.yaml (renamed from scripts/stl/imix_2pkt.yaml) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/yaml/imix_3pkt.yaml (renamed from scripts/stl/imix_3pkt.yaml) | 0 | ||||
-rw-r--r-- | scripts/yaml/imix_3pkt_vm.yaml (renamed from scripts/stl/imix_3pkt_vm.yaml) | 0 | ||||
-rw-r--r-- | scripts/yaml/imix_scale_1000.yaml (renamed from scripts/stl/imix_scale_1000.yaml) | 0 | ||||
-rw-r--r-- | scripts/yaml/imix_scale_300.yaml (renamed from scripts/stl/imix_scale_300.yaml) | 0 | ||||
-rw-r--r-- | scripts/yaml/ipv4_udp_9000.pcap (renamed from scripts/stl/ipv4_udp_9000.pcap) | bin | 9082 -> 9082 bytes | |||
-rw-r--r-- | scripts/yaml/ipv4_udp_9k.pcap (renamed from scripts/stl/ipv4_udp_9k.pcap) | bin | 9270 -> 9270 bytes | |||
-rw-r--r--[-rwxr-xr-x] | scripts/yaml/ipv4_udp_9k_burst_10.pcap (renamed from scripts/stl/ipv4_udp_9k_burst_10.pcap) | bin | 92484 -> 92484 bytes | |||
-rw-r--r-- | scripts/yaml/syn_attack_sample.yaml (renamed from scripts/stl/syn_attack_sample.yaml) | 0 | ||||
-rw-r--r-- | scripts/yaml/syn_packet.pcap (renamed from scripts/stl/syn_packet.pcap) | bin | 100 -> 100 bytes | |||
-rw-r--r-- | scripts/yaml/udp_1518B_no_crc.pcap (renamed from scripts/stl/udp_1518B_no_crc.pcap) | bin | 1554 -> 1554 bytes | |||
-rw-r--r-- | scripts/yaml/udp_594B_no_crc.pcap (renamed from scripts/stl/udp_594B_no_crc.pcap) | bin | 630 -> 630 bytes | |||
-rw-r--r-- | scripts/yaml/udp_64B_no_crc.pcap (renamed from scripts/stl/udp_64B_no_crc.pcap) | bin | 100 -> 100 bytes | |||
-rw-r--r-- | scripts/yaml/udp_rand_size.yaml (renamed from scripts/stl/udp_rand_size.yaml) | 0 | ||||
-rw-r--r-- | scripts/yaml/udp_rand_size_9k.yaml (renamed from scripts/stl/udp_rand_size_9k.yaml) | 0 |
61 files changed, 512 insertions, 2135 deletions
diff --git a/scripts/api/stl/examples/stl_bi_dir_flows.py b/scripts/api/stl/examples/stl_bi_dir_flows.py index 7d090345..46b84c6e 100644 --- a/scripts/api/stl/examples/stl_bi_dir_flows.py +++ b/scripts/api/stl/examples/stl_bi_dir_flows.py @@ -1,8 +1,5 @@ -# include the path of trex_stl_api.py -import sys -sys.path.insert(0, "../") - -from trex_stl_api import * +import stl_path +from trex_control_plane.stl.api import * import time import json diff --git a/scripts/api/stl/examples/stl_imix.py b/scripts/api/stl/examples/stl_imix.py index 01eec9b4..c083a207 100644 --- a/scripts/api/stl/examples/stl_imix.py +++ b/scripts/api/stl/examples/stl_imix.py @@ -1,10 +1,5 @@ -# include the path of trex_stl_api.py -import sys -sys.path.insert(0, "../") - -from trex_stl_api import * -from trex_stl_lib import * -from profiles.imix import STLImix +import stl_path +from trex_control_plane.stl.api import * import time import json @@ -26,9 +21,6 @@ def imix_test (): try: - # base profile - imix - profile = STLImix() - # connect to server c.connect() @@ -42,9 +34,12 @@ def imix_test (): dir_0 = table['dir'][0] dir_1 = table['dir'][1] + # load IMIX profile + streams = c.load_profile('../../../stl/profiles/imix.py') + # add both streams to ports - c.add_streams(profile.get_streams(direction = 0), ports = dir_0) - c.add_streams(profile.get_streams(direction = 1), ports = dir_1) + c.add_streams(streams, ports = dir_0) + c.add_streams(streams, ports = dir_1) # clear the stats before injecting c.clear_stats() diff --git a/scripts/api/stl/examples/stl_path.py b/scripts/api/stl/examples/stl_path.py new file mode 100644 index 00000000..e0056585 --- /dev/null +++ b/scripts/api/stl/examples/stl_path.py @@ -0,0 +1,2 @@ +import sys +sys.path.insert(0, "../../../automation") diff --git a/scripts/api/stl/examples/stl_run_udp_simple.py b/scripts/api/stl/examples/stl_run_udp_simple.py index 88dba5ac..47db1b5a 100644 --- a/scripts/api/stl/examples/stl_run_udp_simple.py +++ b/scripts/api/stl/examples/stl_run_udp_simple.py @@ -10,9 +10,8 @@ Compare Rx-pkts to TX-pkts assuming ports are loopback """ -sys.path.insert(0, "../") - -from trex_stl_api import * +import stl_path +from trex_control_plane.stl.api import * H_VER = "trex-x v0.1 " diff --git a/scripts/api/stl/examples/stl_simple_burst.py b/scripts/api/stl/examples/stl_simple_burst.py index 0de4df89..2ccd01c4 100644 --- a/scripts/api/stl/examples/stl_simple_burst.py +++ b/scripts/api/stl/examples/stl_simple_burst.py @@ -1,8 +1,6 @@ -import sys -sys.path.insert(0, "../") +import stl_path +from trex_control_plane.stl.api import * -from trex_stl_api import * -from scapy.all import * import time def simple_burst (): @@ -13,7 +11,6 @@ def simple_burst (): passed = True try: - pkt = STLPktBuilder(pkt = Ether()/IP(src="16.0.0.1",dst="48.0.0.1")/UDP(dport=12,sport=1025)/IP()/'a_payload_example') # create two bursts and link them @@ -24,7 +21,11 @@ def simple_burst (): mode = STLTXSingleBurst(total_pkts = 3000), next_stream_id = s1.get_id()) - + STLStream.dump_to_yaml([s1, s2], '1.yaml') + stream_list = STLStream.load_from_yaml('1.yaml') + print s2 + print stream_list[1] + exit(0) # connect to server c.connect() diff --git a/scripts/api/stl/trex_stl_api.py b/scripts/api/stl/trex_stl_api.py deleted file mode 100644 index 4dae4dce..00000000 --- a/scripts/api/stl/trex_stl_api.py +++ /dev/null @@ -1,44 +0,0 @@ -import os -import sys -import time -import json -import math - -# update the import path to include the stateless client -root_path = os.path.dirname(os.path.abspath(__file__)) - -sys.path.insert(0, os.path.join(root_path, '../../automation/trex_control_plane/')) -sys.path.insert(0, os.path.join(root_path, '../../stl/')) - -# aliasing -import common.trex_streams -from client_utils.scapy_packet_builder import * -import common.trex_stl_exceptions -import client.trex_stateless_client -import client.trex_stateless_sim - -# client and errors -STLClient = client.trex_stateless_client.STLClient -STLError = common.trex_stl_exceptions.STLError - -# streams -STLStream = common.trex_streams.STLStream -STLTXCont = common.trex_streams.STLTXCont -STLTXSingleBurst = common.trex_streams.STLTXSingleBurst -STLTXMultiBurst = common.trex_streams.STLTXMultiBurst - -# packet builder -STLPktBuilder = CScapyTRexPktBuilder - -# VM -STLVmFlowVar = CTRexVmDescFlowVar -STLVmWriteFlowVar = CTRexVmDescWrFlowVar -STLVmFixIpv4 = CTRexVmDescFixIpv4 -STLVmTrimPktSize = CTRexVmDescTrimPktSize -STLVmTupleGen = CTRexVmDescTupleGen - - -# simulator -STLSim = client.trex_stateless_sim.STLSim - - diff --git a/scripts/automation/trex_control_plane/client_utils/packet_builder.py b/scripts/automation/trex_control_plane/client_utils/packet_builder.py deleted file mode 100755 index f9031436..00000000 --- a/scripts/automation/trex_control_plane/client_utils/packet_builder.py +++ /dev/null @@ -1,1209 +0,0 @@ -#!/router/bin/python - -import external_packages -import dpkt -import socket -import binascii -import copy -import random -import string -import struct -import re -import itertools -from abc import ABCMeta, abstractmethod -from collections import namedtuple -import base64 - -from packet_builder_interface import CTrexPktBuilderInterface - -class CTRexPktBuilder(CTrexPktBuilderInterface): - """ - This class defines the TRex API of building a packet using dpkt package. - Using this class the user can also define how TRex will handle the packet by specifying the VM setting. - """ - def __init__(self, max_pkt_size=dpkt.ethernet.ETH_LEN_MAX): - """ - Instantiate a CTRexPktBuilder object - - :parameters: - None - - """ - super(CTRexPktBuilder, self).__init__() - self._packet = None - self._pkt_by_hdr = {} - self._pkt_top_layer = None - self._max_pkt_size = max_pkt_size - self.vm = CTRexPktBuilder.CTRexVM() - self.metadata = "" - - def clone (self): - return copy.deepcopy(self) - - def add_pkt_layer(self, layer_name, pkt_layer): - """ - This method adds additional header to the already existing packet - - :parameters: - layer_name: str - a string representing the name of the layer. - Example: "l2", "l4_tcp", etc. - - pkt_layer : dpkt.Packet obj - a dpkt object, generally from higher layer, that will be added on top of existing layer. - - :raises: - + :exc:`ValueError`, in case the desired layer_name already exists. - - """ - assert isinstance(pkt_layer, dpkt.Packet) - if layer_name in self._pkt_by_hdr: - raise ValueError("Given layer name '{0}' already exists.".format(layer_name)) - else: - dup_pkt = copy.copy(pkt_layer) # using copy of layer to avoid cyclic packets that may lead to infinite loop - if not self._pkt_top_layer: # this is the first header added - self._packet = dup_pkt - else: - self._pkt_top_layer.data = dup_pkt - self._pkt_top_layer = dup_pkt - self._pkt_by_hdr[layer_name] = dup_pkt - return - - def set_ip_layer_addr(self, layer_name, attr, ip_addr, ip_type="ipv4"): - """ - This method sets the IP address fields of an IP header (source or destination, for both IPv4 and IPv6) - using a human readable addressing representation. - - :parameters: - layer_name: str - a string representing the name of the layer. - Example: "l3_ip", etc. - - attr: str - a string representation of the sub-field to be set: - - + "src" for source - + "dst" for destination - - ip_addr: str - a string representation of the IP address to be set. - Example: "10.0.0.1" for IPv4, or "5001::DB8:1:3333:1:1" for IPv6 - - ip_type : str - a string representation of the IP version to be set: - - + "ipv4" for IPv4 - + "ipv6" for IPv6 - - Default: **ipv4** - - :raises: - + :exc:`ValueError`, in case the desired layer_name is not an IP layer - + :exc:`KeyError`, in case the desired layer_name does not exists. - - """ - try: - layer = self._pkt_by_hdr[layer_name.lower()] - if not (isinstance(layer, dpkt.ip.IP) or isinstance(layer, dpkt.ip6.IP6)): - raise ValueError("The specified layer '{0}' is not of IPv4/IPv6 type.".format(layer_name)) - else: - decoded_ip = CTRexPktBuilder._decode_ip_addr(ip_addr, ip_type) - setattr(layer, attr, decoded_ip) - except KeyError: - raise KeyError("Specified layer '{0}' doesn't exist on packet.".format(layer_name)) - - def set_ipv6_layer_addr(self, layer_name, attr, ip_addr): - """ - This method sets the IPv6 address fields of an IP header (source or destination) - - :parameters: - layer_name: str - a string representing the name of the layer. - Example: "l3_ip", etc. - - attr: str - a string representation of the sub-field to be set: - - + "src" for source - + "dst" for destination - - ip_addr: str - a string representation of the IP address to be set. - Example: "5001::DB8:1:3333:1:1" - - :raises: - + :exc:`ValueError`, in case the desired layer_name is not an IPv6 layer - + :exc:`KeyError`, in case the desired layer_name does not exists. - - """ - self.set_ip_layer_addr(layer_name, attr, ip_addr, ip_type="ipv6") - - def set_eth_layer_addr(self, layer_name, attr, mac_addr): - """ - This method sets the ethernet address fields of an Ethernet header (source or destination) - using a human readable addressing representation. - - :parameters: - layer_name: str - a string representing the name of the layer. - Example: "l2", etc. - - attr: str - a string representation of the sub-field to be set: - + "src" for source - + "dst" for destination - - mac_addr: str - a string representation of the MAC address to be set. - Example: "00:de:34:ef:2e:f4". - - :raises: - + :exc:`ValueError`, in case the desired layer_name is not an Ethernet layer - + :exc:`KeyError`, in case the desired layer_name does not exists. - - """ - try: - layer = self._pkt_by_hdr[layer_name.lower()] - if not isinstance(layer, dpkt.ethernet.Ethernet): - raise ValueError("The specified layer '{0}' is not of Ethernet type.".format(layer_name)) - else: - decoded_mac = CTRexPktBuilder._decode_mac_addr(mac_addr) - setattr(layer, attr, decoded_mac) - except KeyError: - raise KeyError("Specified layer '{0}' doesn't exist on packet.".format(layer_name)) - - def set_layer_attr(self, layer_name, attr, val, toggle_bit=False): - """ - This method enables the user to change a value of a previously defined packet layer. - This method isn't to be used to set the data attribute of a packet with payload. - Use :func:`packet_builder.CTRexPktBuilder.set_payload` instead. - - :parameters: - layer_name: str - a string representing the name of the layer. - Example: "l2", "l4_tcp", etc. - - attr : str - a string representing the attribute to be changed on desired layer - - val : - value of attribute. - - toggle_bit : bool - Indicating if trying to set a specific bit of a field, such as "do not fragment" bit of IP layer. - - Default: **False** - - :raises: - + :exc:`KeyError`, in case of missing layer (the desired layer isn't part of packet) - + :exc:`ValueError`, in case invalid attribute to the specified layer. - - """ - try: - layer = self._pkt_by_hdr[layer_name.lower()] - if attr == 'data' and not isinstance(val, dpkt.Packet): - # Don't allow setting 'data' attribute - raise ValueError("Set a data attribute with object that is not dpkt.Packet is not allowed using " - "set_layer_attr method.\nUse set_payload method instead.") - if hasattr(layer, attr): - if toggle_bit: - setattr(layer, attr, val | getattr(layer, attr, 0)) - else: - setattr(layer, attr, val) - if attr == 'data': - # re-evaluate packet from the start, possible broken link between layers - self._reevaluate_packet(layer_name.lower()) - else: - raise ValueError("Given attr name '{0}' doesn't exists on specified layer ({1}).".format(layer_name, - attr)) - except KeyError: - raise KeyError("Specified layer '{0}' doesn't exist on packet.".format(layer_name)) - - def set_layer_bit_attr(self, layer_name, attr, val): - """ - This method enables the user to set the value of a field smaller that 1 Byte in size. - This method isn't used to set full-sized fields value (>= 1 byte). - Use :func:`packet_builder.CTRexPktBuilder.set_layer_attr` instead. - - :parameters: - layer_name: str - a string representing the name of the layer. - Example: "l2", "l4_tcp", etc. - - attr : str - a string representing the attribute to be set on desired layer - - val : int - value of attribute. - This value will be set "ontop" of the existing value using bitwise "OR" operation. - - .. tip:: It is very useful to use dpkt constants to define the values of these fields. - - :raises: - + :exc:`KeyError`, in case of missing layer (the desired layer isn't part of packet) - + :exc:`ValueError`, in case invalid attribute to the specified layer. - - """ - return self.set_layer_attr(layer_name, attr, val, True) - - def set_pkt_payload(self, payload): - """ - This method sets a payload to the topmost layer of the generated packet. - This method isn't to be used to set another networking layer to the packet. - Use :func:`packet_builder.CTRexPktBuilder.set_layer_attr` instead. - - - :parameters: - payload: - a payload to be added to the packet at the topmost layer. - this object cannot be of type dpkt.Packet. - - :raises: - + :exc:`AttributeError`, in case no underlying header to host the payload. - - """ - assert isinstance(payload, str) - try: - self._pkt_top_layer.data = payload - except AttributeError: - raise AttributeError("The so far built packet doesn't contain an option for payload attachment.\n" - "Make sure to set appropriate underlying header before adding payload") - - def load_packet(self, packet): - """ - This method enables the user to change a value of a previously defined packet layer. - - :parameters: - packet: dpkt.Packet obj - a dpkt object that represents a packet. - - - :raises: - + :exc:`CTRexPktBuilder.IPAddressError`, in case invalid ip type option specified. - - """ - assert isinstance(packet, dpkt.Packet) - self._packet = copy.copy(packet) - - self._pkt_by_hdr.clear() - self._pkt_top_layer = self._packet - # analyze packet to layers - tmp_layer = self._packet - while True: - if isinstance(tmp_layer, dpkt.Packet): - layer_name = self._gen_layer_name(type(tmp_layer).__name__) - self._pkt_by_hdr[layer_name] = tmp_layer - self._pkt_top_layer = tmp_layer - try: - # check existence of upper layer - tmp_layer = tmp_layer.data - except AttributeError: - # this is the most upper header - self._pkt_by_hdr['pkt_final_payload'] = tmp_layer.data - break - else: - self._pkt_by_hdr['pkt_final_payload'] = tmp_layer - break - return - - def load_packet_from_pcap(self, pcap_path): - """ - This method loads a pcap file into a parsed packet builder object. - - :parameters: - pcap_path: str - a path to a pcap file, containing a SINGLE packet. - - :raises: - + :exc:`IOError`, in case provided path doesn't exists. - - """ - with open(pcap_path, 'r') as f: - pcap = dpkt.pcap.Reader(f) - first_packet = True - for _, buf in pcap: - # this is an iterator, can't evaluate the number of files in advance - if first_packet: - self.load_packet(dpkt.ethernet.Ethernet(buf)) - else: - raise ValueError("Provided pcap file contains more than single packet.") - # arrive here ONLY if pcap contained SINGLE packet - return - - def load_from_stream_obj(self, stream_obj): - self.load_packet_from_byte_list(stream_obj['packet']['binary']) - - - def load_packet_from_byte_list(self, byte_list): - - buf = base64.b64decode(byte_list) - # thn, load it based on dpkt parsing - self.load_packet(dpkt.ethernet.Ethernet(buf)) - - def get_packet(self, get_ptr=False): - """ - This method provides access to the built packet, as an instance or as a pointer to packet itself. - - :parameters: - get_ptr : bool - indicate whether to get a reference to packet or a copy. - Use only in advanced modes - if set to true, metadata for packet is cleared, and any further modification is not guaranteed. - - default value : False - - :return: - + the current packet built by CTRexPktBuilder object. - + None if packet is empty - - """ - if get_ptr: - self._pkt_by_hdr = {} - self._pkt_top_layer = None - return self._packet - else: - return copy.copy(self._packet) - - def get_packet_length(self): - return len(self._packet) - - def get_layer(self, layer_name): - """ - This method provides access to a specific layer of the packet, as a **copy of the layer instance**. - - :parameters: - layer_name : str - the name given to desired layer - - :return: - + a copy of the desired layer of the current packet if exists. - + None if no such layer - - """ - layer = self._pkt_by_hdr.get(layer_name) - return copy.copy(layer) if layer else None - - - # VM access methods - def set_vm_ip_range(self, ip_layer_name, ip_field, - ip_start, ip_end, operation, - ip_init = None, add_value = 0, - is_big_endian=True, val_size=4, - ip_type="ipv4", add_checksum_inst=True, - split = False): - - if ip_field not in ["src", "dst"]: - raise ValueError("set_vm_ip_range only available for source ('src') or destination ('dst') ip addresses") - # set differences between IPv4 and IPv6 - if ip_type == "ipv4": - ip_class = dpkt.ip.IP - ip_addr_size = val_size if val_size <= 4 else 4 - elif ip_type == "ipv6": - ip_class = dpkt.ip6.IP6 - ip_addr_size = val_size if val_size <= 8 else 4 - else: - raise CTRexPktBuilder.IPAddressError() - - self._verify_layer_prop(ip_layer_name, ip_class) - trim_size = ip_addr_size*2 - start_val = int(binascii.hexlify(CTRexPktBuilder._decode_ip_addr(ip_start, ip_type))[-trim_size:], 16) - end_val = int(binascii.hexlify(CTRexPktBuilder._decode_ip_addr(ip_end, ip_type))[-trim_size:], 16) - - if ip_init == None: - init_val = start_val - else: - init_val = int(binascii.hexlify(CTRexPktBuilder._decode_ip_addr(ip_init, ip_type))[-trim_size:], 16) - - - # All validations are done, start adding VM instructions - flow_var_name = "{layer}__{field}".format(layer=ip_layer_name, field=ip_field) - - hdr_offset, field_abs_offset = self._calc_offset(ip_layer_name, ip_field, ip_addr_size) - self.vm.add_flow_man_inst(flow_var_name, size=ip_addr_size, operation=operation, - init_value=init_val, - min_value=start_val, - max_value=end_val) - self.vm.add_write_flow_inst(flow_var_name, field_abs_offset) - self.vm.set_vm_off_inst_field(flow_var_name, "add_value", add_value) - self.vm.set_vm_off_inst_field(flow_var_name, "is_big_endian", is_big_endian) - if ip_type == "ipv4" and add_checksum_inst: - self.vm.add_fix_checksum_inst(self._pkt_by_hdr.get(ip_layer_name), hdr_offset) - - if split: - self.vm.set_split_by_var(flow_var_name) - - - def set_vm_eth_range(self, eth_layer_name, eth_field, - mac_init, mac_start, mac_end, add_value, - operation, val_size=4, is_big_endian=False): - if eth_field not in ["src", "dst"]: - raise ValueError("set_vm_eth_range only available for source ('src') or destination ('dst') eth addresses") - self._verify_layer_prop(eth_layer_name, dpkt.ethernet.Ethernet) - eth_addr_size = val_size if val_size <= 4 else 4 - trim_size = eth_addr_size*2 - init_val = int(binascii.hexlify(CTRexPktBuilder._decode_mac_addr(mac_init))[-trim_size:], 16) - start_val = int(binascii.hexlify(CTRexPktBuilder._decode_mac_addr(mac_start))[-trim_size:], 16) - end_val = int(binascii.hexlify(CTRexPktBuilder._decode_mac_addr(mac_end))[-trim_size:], 16) - # All validations are done, start adding VM instructions - flow_var_name = "{layer}__{field}".format(layer=eth_layer_name, field=eth_field) - hdr_offset, field_abs_offset = self._calc_offset(eth_layer_name, eth_field, eth_addr_size) - self.vm.add_flow_man_inst(flow_var_name, size=8, operation=operation, - init_value=init_val, - min_value=start_val, - max_value=end_val) - self.vm.add_write_flow_inst(flow_var_name, field_abs_offset) - self.vm.set_vm_off_inst_field(flow_var_name, "add_value", add_value) - self.vm.set_vm_off_inst_field(flow_var_name, "is_big_endian", is_big_endian) - - def set_vm_custom_range(self, layer_name, hdr_field, - init_val, start_val, end_val, add_val, val_size, - operation, is_big_endian=True, range_name="", - add_checksum_inst=True): - # verify input validity for init/start/end values - for val in [init_val, start_val, end_val]: - if not isinstance(val, int): - raise ValueError("init/start/end values are expected integers, but received type '{0}'". - format(type(val))) - self._verify_layer_prop(layer_name=layer_name, field_name=hdr_field) - if not range_name: - range_name = "{layer}__{field}".format(layer=layer_name, field=hdr_field) - trim_size = val_size*2 - hdr_offset, field_abs_offset = self._calc_offset(layer_name, hdr_field, val_size) - self.vm.add_flow_man_inst(range_name, size=val_size, operation=operation, - init_value=init_val, - min_value=start_val, - max_value=end_val) - self.vm.add_write_flow_inst(range_name, field_abs_offset) - self.vm.set_vm_off_inst_field(range_name, "add_value", add_val) - self.vm.set_vm_off_inst_field(range_name, "is_big_endian", is_big_endian) - if isinstance(self._pkt_by_hdr.get(layer_name), dpkt.ip.IP) and add_checksum_inst: - self.vm.add_fix_checksum_inst(self._pkt_by_hdr.get(layer_name), hdr_offset) - - def get_vm_data(self): - return self.vm.dump() - - def compile (self): - pass - - def dump_pkt(self, encode = True): - """ - Dumps the packet as a decimal array of bytes (each item x gets value between 0-255) - - :parameters: - encode : bool - Encode using base64. (disable for debug) - - Default: **True** - - :return: - + packet representation as array of bytes - - :raises: - + :exc:`CTRexPktBuilder.EmptyPacketError`, in case packet is empty. - - """ - if self._packet is None: - raise CTRexPktBuilder.EmptyPacketError() - - if encode: - return {"binary": base64.b64encode(str(self._packet)), - "meta": self.metadata} - return {"binary": str(self._packet), - "meta": self.metadata} - - - def dump_pkt_to_pcap(self, file_path, ts=None): - """ - Dumps the packet as a decimal array of bytes (each item x gets value between 0-255) - - :parameters: - file_path : str - a path (including filename) to which to write to pcap file to. - - ts : int - a timestamp to attach to the packet when dumped to pcap file. - if ts in None, then time.time() is used to set the timestamp. - - Default: **None** - - :return: - None - - :raises: - + :exc:`CTRexPktBuilder.EmptyPacketError`, in case packet is empty. - - """ - if self._packet is None: - raise CTRexPktBuilder.EmptyPacketError() - try: - with open(file_path, 'wb') as f: - pcap_wr = dpkt.pcap.Writer(f) - pcap_wr.writepkt(self._packet, ts) - return - except IOError: - raise IOError(2, "The provided path could not be accessed") - - def get_packet_layers(self, depth_limit=Ellipsis): - if self._packet is None: - raise CTRexPktBuilder.EmptyPacketError() - cur_layer = self._packet - layer_types = [] - if depth_limit == Ellipsis: - iterator = itertools.count(1) - else: - iterator = xrange(depth_limit) - for _ in iterator: - # append current layer type - if isinstance(cur_layer, dpkt.Packet): - layer_types.append(type(cur_layer).__name__) - else: - # if not dpkt layer, refer as payload - layer_types.append("PLD") - # advance to next layer - if not hasattr(cur_layer, "data"): - break - else: - cur_layer = cur_layer.data - return layer_types - - def export_pkt(self, file_path, link_pcap=False, pcap_name=None, pcap_ts=None): - pass - - # ----- internal methods ----- # - def _reevaluate_packet(self, layer_name): - cur_layer = self._packet - known_layers = set(self._pkt_by_hdr.keys()) - found_layers = set() - while True: - pointing_layer_name = self._find_pointing_layer(known_layers, cur_layer) - found_layers.add(pointing_layer_name) - if self._pkt_by_hdr[layer_name] is cur_layer: - self._pkt_top_layer = cur_layer - disconnected_layers = known_layers.difference(found_layers) - # remove disconnected layers - for layer in disconnected_layers: - self._pkt_by_hdr.pop(layer) - break - else: - cur_layer = cur_layer.data - - def _gen_layer_name(self, layer_class_name): - assert isinstance(layer_class_name, str) - layer_name = layer_class_name.lower() - idx = 1 - while True: - tmp_name = "{name}_{id}".format(name=layer_name, id=idx) - if tmp_name not in self._pkt_by_hdr: - return tmp_name - else: - idx += 1 - - def _find_pointing_layer(self, known_layers, layer_obj): - assert isinstance(known_layers, set) - for layer in known_layers: - if self._pkt_by_hdr[layer] is layer_obj: - return layer - - def _calc_offset(self, layer_name, hdr_field, hdr_field_size): - pkt_header = self._pkt_by_hdr.get(layer_name) - hdr_offset = len(self._packet) - len(pkt_header) - inner_hdr_offsets = [] - for field in pkt_header.__hdr__: - if field[0] == hdr_field: - field_size = struct.calcsize(field[1]) - if field_size == hdr_field_size: - break - elif field_size < hdr_field_size: - raise CTRexPktBuilder.PacketLayerError(layer_name, - "The specified field '{0}' size is smaller than given range" - " size ('{1}')".format(hdr_field, hdr_field_size)) - else: - inner_hdr_offsets.append(field_size - hdr_field_size) - break - else: - inner_hdr_offsets.append(struct.calcsize(field[1])) - return hdr_offset, hdr_offset + sum(inner_hdr_offsets) - - def _verify_layer_prop(self, layer_name, layer_type=None, field_name=None): - if layer_name not in self._pkt_by_hdr: - raise CTRexPktBuilder.PacketLayerError(layer_name) - pkt_layer = self._pkt_by_hdr.get(layer_name) - if layer_type: - # check for layer type - if not isinstance(pkt_layer, layer_type): - raise CTRexPktBuilder.PacketLayerTypeError(layer_name, type(pkt_layer), layer_type) - if field_name and not hasattr(pkt_layer, field_name): - # check if field exists on certain header - raise CTRexPktBuilder.PacketLayerError(layer_name, "The specified field '{0}' does not exists on " - "given packet layer ('{1}')".format(field_name, - layer_name)) - return - - @property - def payload_gen(self): - return CTRexPktBuilder.CTRexPayloadGen(self._packet, self._max_pkt_size) - - @staticmethod - def _decode_mac_addr(mac_addr): - """ - Static method to test for MAC address validity. - - :parameters: - mac_addr : str - a string representing an MAC address, separated by ':' or '-'. - - examples: '00:de:34:ef:2e:f4', '00-de-34-ef-2e-f4 - - :return: - + an hex-string representation of the MAC address. - for example, ip 00:de:34:ef:2e:f4 will return '\x00\xdeU\xef.\xf4' - - :raises: - + :exc:`CTRexPktBuilder.MACAddressError`, in case invalid ip type option specified. - - """ - tmp_mac = mac_addr.lower().replace('-', ':') - if re.match("[0-9a-f]{2}([-:])[0-9a-f]{2}(\\1[0-9a-f]{2}){4}$", tmp_mac): - return binascii.unhexlify(tmp_mac.replace(':', '')) - # another option for both Python 2 and 3: - # codecs.decode(tmp_mac.replace(':', ''), 'hex') - else: - raise CTRexPktBuilder.MACAddressError() - - @staticmethod - def _decode_ip_addr(ip_addr, ip_type): - """ - Static method to test for IPv4/IPv6 address validity. - - :parameters: - ip_addr : str - a string representing an IP address (IPv4/IPv6) - - ip_type : str - The type of IP to be checked. - Valid types: "ipv4", "ipv6". - - :return: - + an hex-string representation of the ip address. - for example, ip 1.2.3.4 will return '\x01\x02\x03\x04' - - :raises: - + :exc:`CTRexPktBuilder.IPAddressError`, in case invalid ip type option specified. - - """ - if ip_type == "ipv4": - try: - return socket.inet_pton(socket.AF_INET, ip_addr) - except AttributeError: # no inet_pton here, sorry - # try: - return socket.inet_aton(ip_addr) - # except socket.error: - # return False - # return ip_addr.count('.') == 3 - except socket.error: # not a valid address - raise CTRexPktBuilder.IPAddressError() - elif ip_type == "ipv6": - try: - return socket.inet_pton(socket.AF_INET6, ip_addr) - except socket.error: # not a valid address - raise CTRexPktBuilder.IPAddressError() - else: - raise CTRexPktBuilder.IPAddressError() - - # ------ private classes ------ # - class CTRexPayloadGen(object): - - def __init__(self, packet_ref, max_pkt_size): - self._pkt_ref = packet_ref - self._max_pkt_size = max_pkt_size - - def gen_random_str(self): - gen_length = self._calc_gen_length() - # return a string of size gen_length bytes, to pad the packet to its max_size - return ''.join(random.SystemRandom().choice(string.ascii_letters + string.digits) - for _ in range(gen_length)) - - def gen_repeat_ptrn(self, ptrn_to_repeat): - gen_length = self._calc_gen_length() - if isinstance(ptrn_to_repeat, str): - # generate repeated string - return (ptrn_to_repeat * (gen_length/len(ptrn_to_repeat) + 1))[:gen_length] - elif isinstance(ptrn_to_repeat, int): - ptrn = binascii.unhexlify(hex(ptrn_to_repeat)[2:]) - return (ptrn * (gen_length/len(ptrn) + 1))[:gen_length] - elif isinstance(ptrn_to_repeat, tuple): - if not all((isinstance(x, int) and (x < 255) and (x >= 0)) - for x in ptrn_to_repeat): - raise ValueError("All numbers in tuple must be in range 0 <= number <= 255 ") - # generate repeated sequence - to_pack = (ptrn_to_repeat * (gen_length/len(ptrn_to_repeat) + 1))[:gen_length] - return struct.pack('B'*gen_length, *to_pack) - else: - raise ValueError("Given ptrn_to_repeat argument type ({0}) is illegal.". - format(type(ptrn_to_repeat))) - - def _calc_gen_length(self): - return self._max_pkt_size - len(self._pkt_ref) - - class CTRexVM(object): - """ - This class defines the TRex VM which represents how TRex will regenerate packets. - The packets will be regenerated based on the built packet containing this class. - """ - InstStore = namedtuple('InstStore', ['type', 'inst']) - - def __init__(self): - """ - Instantiate a CTRexVM object - - :parameters: - None - """ - super(CTRexPktBuilder.CTRexVM, self).__init__() - self.vm_variables = {} - self._inst_by_offset = {} # this data structure holds only offset-related instructions, ordered in tuples - self._off_inst_by_name = {} - self.split_by_var = '' - - def set_vm_var_field(self, var_name, field_name, val, offset_inst=False): - """ - Set VM variable field. Only existing variables are allowed to be changed. - - :parameters: - var_name : str - a string representing the name of the VM variable to be changed. - field_name : str - a string representing the field name of the VM variable to be changed. - val : - a value to be applied to field_name field of the var_name VM variable. - - :raises: - + :exc:`KeyError`, in case invalid var_name has been specified. - + :exc:`CTRexPktBuilder.VMVarFieldTypeError`, in case mismatch between `val` and allowed type. - + :exc:`CTRexPktBuilder.VMVarValueError`, in case val isn't one of allowed options of field_name. - - """ - if offset_inst: - return self._off_inst_by_name[var_name].inst.set_field(field_name, val) - else: - return self.vm_variables[var_name].set_field(field_name, val) - - def set_vm_off_inst_field(self, var_name, field_name, val): - return self.set_vm_var_field(var_name, field_name, val, True) - - def add_flow_man_inst(self, name, **kwargs): - """ - Adds a new flow manipulation object to the VM instance. - - :parameters: - name : str - name of the manipulation, must be distinct. - Example: 'source_ip_change' - - **kwargs** : dict - optional, set flow_man fields on initialization (key = field_name, val = field_val). - Must be used with legit fields, see :func:`CTRexPktBuilder.CTRexVM.CTRexVMVariable.set_field`. - - :return: - None - - :raises: - + :exc:`CTRexPktBuilder.VMVarNameExistsError`, in case of desired flow_man name already taken. - + Exceptions from :func:`CTRexPktBuilder.CTRexVM.CTRexVMVariable.set_field` method. - Will rise when VM variables were misconfiguration. - """ - if name not in self.vm_variables: - self.vm_variables[name] = self.CTRexVMFlowVariable(name) - # try configuring VM instruction attributes - for (field, value) in kwargs.items(): - self.vm_variables[name].set_field(field, value) - else: - raise CTRexPktBuilder.VMVarNameExistsError(name) - - def add_fix_checksum_inst(self, linked_ipv4_obj, offset_to_obj=14, name=None): - # check if specified linked_ipv4_obj is indeed an ipv4 object - if not (isinstance(linked_ipv4_obj, dpkt.ip.IP)): - raise ValueError("The provided layer object is not of IPv4.") - if not name: - name = "checksum_{off}".format(off=offset_to_obj) # name will override previous checksum inst, OK - new_checksum_inst = self.CTRexVMChecksumInst(name, offset_to_obj) - # store the checksum inst in the end of the IP header (20 Bytes long) - inst = self.InstStore('checksum', new_checksum_inst) - self._inst_by_offset[offset_to_obj + 20] = inst - self._off_inst_by_name[name] = inst - - def add_write_flow_inst(self, name, pkt_offset, **kwargs): - if name not in self.vm_variables: - raise KeyError("Trying to add write_flow_var instruction to a not-exists VM flow variable ('{0}')". - format(name)) - else: - new_write_inst = self.CTRexVMWrtFlowVarInst(name, pkt_offset) - # try configuring VM instruction attributes - for (field, value) in kwargs.items(): - new_write_inst.set_field(field, value) - # add the instruction to the date-structure - inst = self.InstStore('write', new_write_inst) - self._inst_by_offset[pkt_offset] = inst - self._off_inst_by_name[name] = inst - - def load_flow_man(self, flow_obj): - """ - Loads an outer VM variable (instruction) into current VM. - The outer VM variable must contain different name than existing VM variables currently registered on VM. - - :parameters: - flow_obj : CTRexVMVariable - a CTRexVMVariable to be loaded into VM variable sets. - - :return: - list holds variables data of VM - - """ - assert isinstance(flow_obj, CTRexPktBuilder.CTRexVM.CTRexVMFlowVariable) - if flow_obj.name not in self.vm_variables.keys(): - self.vm_variables[flow_obj.name] = flow_obj - else: - raise CTRexPktBuilder.VMVarNameExistsError(flow_obj.name) - - def set_split_by_var (self, var_name): - if var_name not in self.vm_variables: - raise KeyError("cannot set split by var to an unknown VM var ('{0}')". - format(var_name)) - - self.split_by_var = var_name - - def dump(self): - """ - dumps a VM variables (instructions) and split_by_var into a dict data structure. - - :parameters: - None - - :return: - dict with VM instructions as list and split_by_var as str - - """ - - # at first, dump all CTRexVMFlowVariable instructions - inst_array = [var.dump() if hasattr(var, 'dump') else var - for key, var in self.vm_variables.items()] - # then, dump all the CTRexVMWrtFlowVarInst and CTRexVMChecksumInst instructions - inst_array += [self._inst_by_offset.get(key).inst.dump() - for key in sorted(self._inst_by_offset)] - return {'instructions': inst_array, 'split_by_var': self.split_by_var} - - class CVMAbstractInstruction(object): - __metaclass__ = ABCMeta - - def __init__(self, name): - """ - Instantiate a CTRexVMVariable object - - :parameters: - name : str - a string representing the name of the VM variable. - """ - super(CTRexPktBuilder.CTRexVM.CVMAbstractInstruction, self).__init__() - self.name = name - - def set_field(self, field_name, val): - if not hasattr(self, field_name): - raise CTRexPktBuilder.VMFieldNameError(field_name) - setattr(self, field_name, val) - - @abstractmethod - def dump(self): - pass - - class CTRexVMFlowVariable(CVMAbstractInstruction): - """ - This class defines a single VM variable to be used as part of CTRexVar object. - """ - VALID_SIZE = [1, 2, 4, 8] # size in Bytes - VALID_OPERATION = ["inc", "dec", "random"] - - def __init__(self, name): - """ - Instantiate a CTRexVMVariable object - - :parameters: - name : str - a string representing the name of the VM variable. - """ - super(CTRexPktBuilder.CTRexVM.CTRexVMFlowVariable, self).__init__(name) - # self.name = name - self.size = 4 - self.big_endian = True - self.operation = "inc" - # self.split_by_core = False - self.init_value = 1 - self.min_value = self.init_value - self.max_value = self.init_value - - def set_field(self, field_name, val): - """ - Set VM variable field. Only existing variables are allowed to be changed. - - :parameters: - field_name : str - a string representing the field name of the VM variable to be changed. - val : - a value to be applied to field_name field of the var_name VM variable. - - :return: - None - - :raises: - + :exc:`CTRexPktBuilder.VMVarNameError`, in case of illegal field name. - + :exc:`CTRexPktBuilder.VMVarFieldTypeError`, in case mismatch between `val` and allowed type. - + :exc:`CTRexPktBuilder.VMVarValueError`, in case val isn't one of allowed options of field_name. - - """ - if not hasattr(self, field_name): - raise CTRexPktBuilder.VMFieldNameError(field_name) - elif field_name == "size": - if type(val) != int: - raise CTRexPktBuilder.VMFieldTypeError("size", int) - elif val not in self.VALID_SIZE: - raise CTRexPktBuilder.VMFieldValueError("size", self.VALID_SIZE) - elif field_name in ["init_value", "min_value", "max_value"]: - if type(val) != int: - raise CTRexPktBuilder.VMFieldTypeError(field_name, int) - elif field_name == "operation": - if type(val) != str: - raise CTRexPktBuilder.VMFieldTypeError("operation", str) - elif val not in self.VALID_OPERATION: - raise CTRexPktBuilder.VMFieldValueError("operation", self.VALID_OPERATION) - # elif field_name == "split_by_core": - # val = bool(val) - # update field value on success - setattr(self, field_name, val) - - def dump(self): - """ - dumps a variable fields in a dictionary data structure. - - :parameters: - None - - :return: - dictionary holds variable data of VM variable - - """ - return {"type": "flow_var", # VM variable dump always refers to manipulate instruction. - "name": self.name, - "size": self.size, - "op": self.operation, - # "split_by_core": self.split_by_core, - "init_value": self.init_value, - "min_value": self.min_value, - "max_value": self.max_value} - - class CTRexVMChecksumInst(CVMAbstractInstruction): - - def __init__(self, name, offset): - """ - Instantiate a CTRexVMChecksumInst object - - :parameters: - name : str - a string representing the name of the VM variable. - """ - super(CTRexPktBuilder.CTRexVM.CTRexVMChecksumInst, self).__init__(name) - self.pkt_offset = offset - - def dump(self): - return {"type": "fix_checksum_ipv4", - "pkt_offset": int(self.pkt_offset)} - - class CTRexVMWrtFlowVarInst(CVMAbstractInstruction): - - def __init__(self, name, pkt_offset): - """ - Instantiate a CTRexVMWrtFlowVarInst object - - :parameters: - name : str - a string representing the name of the VM variable. - """ - super(CTRexPktBuilder.CTRexVM.CTRexVMWrtFlowVarInst, self).__init__(name) - self.pkt_offset = int(pkt_offset) - self.add_value = 0 - self.is_big_endian = False - - def set_field(self, field_name, val): - if not hasattr(self, field_name): - raise CTRexPktBuilder.VMFieldNameError(field_name) - elif field_name == 'pkt_offset': - raise ValueError("pkt_offset value cannot be changed") - cur_attr_type = type(getattr(self, field_name)) - if cur_attr_type == type(val): - setattr(self, field_name, val) - else: - CTRexPktBuilder.VMFieldTypeError(field_name, cur_attr_type) - - def dump(self): - return {"type": "write_flow_var", - "name": self.name, - "pkt_offset": self.pkt_offset, - "add_value": int(self.add_value), - "is_big_endian": bool(self.is_big_endian) - } - - class CTRexVMChecksumInst(CVMAbstractInstruction): - - def __init__(self, name, offset): - """ - Instantiate a CTRexVMChecksumInst object - - :parameters: - name : str - a string representing the name of the VM variable. - """ - super(CTRexPktBuilder.CTRexVM.CTRexVMChecksumInst, self).__init__(name) - self.pkt_offset = offset - - def dump(self): - return {"type": "fix_checksum_ipv4", - "pkt_offset": int(self.pkt_offset)} - - class CTRexVMWrtFlowVarInst(CVMAbstractInstruction): - - def __init__(self, name, pkt_offset): - """ - Instantiate a CTRexVMWrtFlowVarInst object - - :parameters: - name : str - a string representing the name of the VM variable. - """ - super(CTRexPktBuilder.CTRexVM.CTRexVMWrtFlowVarInst, self).__init__(name) - self.pkt_offset = int(pkt_offset) - self.add_value = 0 - self.is_big_endian = False - - def set_field(self, field_name, val): - if not hasattr(self, field_name): - raise CTRexPktBuilder.VMFieldNameError(field_name) - elif field_name == 'pkt_offset': - raise ValueError("pkt_offset value cannot be changed") - cur_attr_type = type(getattr(self, field_name)) - if cur_attr_type == type(val): - setattr(self, field_name, val) - else: - CTRexPktBuilder.VMFieldTypeError(field_name, cur_attr_type) - - def dump(self): - return {"type": "write_flow_var", - "name": self.name, - "pkt_offset": self.pkt_offset, - "add_value": int(self.add_value), - "is_big_endian": bool(self.is_big_endian) - } - - class CPacketBuildException(Exception): - """ - This is the general Packet Building error exception class. - """ - def __init__(self, code, message): - self.code = code - self.message = message - - def __str__(self): - return self.__repr__() - - def __repr__(self): - return u"[errcode:%r] %r" % (self.code, self.message) - - class EmptyPacketError(CPacketBuildException): - """ - This exception is used to indicate an error caused by operation performed on an empty packet. - """ - def __init__(self, message=''): - self._default_message = 'Illegal operation on empty packet.' - self.message = message or self._default_message - super(CTRexPktBuilder.EmptyPacketError, self).__init__(-10, self.message) - - class IPAddressError(CPacketBuildException): - """ - This exception is used to indicate an error on the IP addressing part of the packet. - """ - def __init__(self, message=''): - self._default_message = 'Illegal type or value of IP address has been provided.' - self.message = message or self._default_message - super(CTRexPktBuilder.IPAddressError, self).__init__(-11, self.message) - - class MACAddressError(CPacketBuildException): - """ - This exception is used to indicate an error on the MAC addressing part of the packet. - """ - def __init__(self, message=''): - self._default_message = 'Illegal MAC address has been provided.' - self.message = message or self._default_message - super(CTRexPktBuilder.MACAddressError, self).__init__(-12, self.message) - - class PacketLayerError(CPacketBuildException): - """ - This exception is used to indicate an error caused by operation performed on an non-exists layer of the packet. - """ - def __init__(self, name, message=''): - self._default_message = "The given packet layer name ({0}) does not exists.".format(name) - self.message = message or self._default_message - super(CTRexPktBuilder.PacketLayerError, self).__init__(-13, self.message) - - class PacketLayerTypeError(CPacketBuildException): - """ - This exception is used to indicate an error caused by operation performed on an non-exists layer of the packet. - """ - def __init__(self, name, layer_type, ok_type, message=''): - self._default_message = "The type of packet layer {layer_name} is of type {layer_type}, " \ - "and not of the expected {allowed_type}.".format(layer_name=name, - layer_type=layer_type, - allowed_type=ok_type.__name__) - self.message = message or self._default_message - super(CTRexPktBuilder.PacketLayerTypeError, self).__init__(-13, self.message) - - class VMVarNameExistsError(CPacketBuildException): - """ - This exception is used to indicate a duplicate usage of VM variable. - """ - def __init__(self, name, message=''): - self._default_message = 'The given VM name ({0}) already exists as part of the stream.'.format(name) - self.message = message or self._default_message - super(CTRexPktBuilder.VMVarNameExistsError, self).__init__(-21, self.message) - - class VMFieldNameError(CPacketBuildException): - """ - This exception is used to indicate that an undefined VM var field name has been accessed. - """ - def __init__(self, name, message=''): - self._default_message = "The given VM field name ({0}) is not defined and isn't legal.".format(name) - self.message = message or self._default_message - super(CTRexPktBuilder.VMFieldNameError, self).__init__(-22, self.message) - - class VMFieldTypeError(CPacketBuildException): - """ - This exception is used to indicate an illegal value has type has been given to VM variable field. - """ - def __init__(self, name, ok_type, message=''): - self._default_message = "The desired value of field {field_name} is of type {field_type}, " \ - "and not of the allowed {allowed_type}.".format(field_name=name, - field_type=type(name).__name__, - allowed_type=ok_type.__name__) - self.message = message or self._default_message - super(CTRexPktBuilder.VMFieldTypeError, self).__init__(-31, self.message) - - class VMFieldValueError(CPacketBuildException): - """ - This exception is used to indicate an error an illegal value has been assigned to VM variable field. - """ - def __init__(self, name, ok_opts, message=''): - self._default_message = "The desired value of field {field_name} is illegal.\n" \ - "The only allowed options are: {allowed_opts}.".format(field_name=name, - allowed_opts=ok_opts) - self.message = message or self._default_message - super(CTRexPktBuilder.VMFieldValueError, self).__init__(-32, self.message) - - -if __name__ == "__main__": - pass diff --git a/scripts/automation/trex_control_plane/client_utils/text_tables.py b/scripts/automation/trex_control_plane/client_utils/text_tables.py index d8928da8..6b52a4a9 100644 --- a/scripts/automation/trex_control_plane/client_utils/text_tables.py +++ b/scripts/automation/trex_control_plane/client_utils/text_tables.py @@ -1,7 +1,5 @@ - -import external_packages from texttable import Texttable -from common.text_opts import format_text +from trex_control_plane.common.text_opts import format_text class TRexTextTable(Texttable): diff --git a/scripts/automation/trex_control_plane/client_utils/yaml_utils.py b/scripts/automation/trex_control_plane/client_utils/yaml_utils.py index 825d6fc9..776a51a7 100755 --- a/scripts/automation/trex_control_plane/client_utils/yaml_utils.py +++ b/scripts/automation/trex_control_plane/client_utils/yaml_utils.py @@ -16,7 +16,6 @@ limitations under the License. """ import traceback import sys -import external_packages import yaml diff --git a/scripts/automation/trex_control_plane/common/trex_streams.py b/scripts/automation/trex_control_plane/common/trex_streams.py deleted file mode 100755 index c1f1bfa6..00000000 --- a/scripts/automation/trex_control_plane/common/trex_streams.py +++ /dev/null @@ -1,526 +0,0 @@ -#!/router/bin/python - -import external_packages -from client_utils.packet_builder_interface import CTrexPktBuilderInterface -from client_utils.packet_builder import CTRexPktBuilder -from collections import OrderedDict, namedtuple -from client_utils.yaml_utils import * -import trex_stl_exceptions -import dpkt -import struct -import copy -import os -import random -import yaml -import base64 - -StreamPack = namedtuple('StreamPack', ['stream_id', 'stream']) -LoadedStreamList = namedtuple('LoadedStreamList', ['name', 'loaded', 'compiled']) - -class CStreamList(object): - - def __init__(self): - self.streams_list = OrderedDict() - self.yaml_loader = CTRexYAMLLoader(os.path.join(os.path.dirname(os.path.realpath(__file__)), - "rpc_defaults.yaml")) - - def generate_numbered_name (self, name): - prefix = name.rstrip('01234567890') - suffix = name[len(prefix):] - if suffix == "": - n = "_1" - else: - n = int(suffix) + 1 - return prefix + str(n) - - def append_stream(self, name, stream_obj): - assert isinstance(stream_obj, CStream) - - # if name exists simply add numbered suffix to it - while name in self.streams_list: - name = self.generate_numbered_name(name) - - self.streams_list[name]=stream_obj - return name - - def remove_stream(self, name): - popped = self.streams_list.pop(name) - if popped: - for stream_name, stream in self.streams_list.items(): - if stream.next_stream_id == name: - stream.next_stream_id = -1 - try: - rx_stats_stream = getattr(stream.rx_stats, "stream_id") - if rx_stats_stream == name: - # if a referenced stream of rx_stats object deleted, revert to rx stats of current stream - setattr(stream.rx_stats, "stream_id", stream_name) - except AttributeError as e: - continue # - return popped - - def export_to_yaml(self, file_path): - raise NotImplementedError("export_to_yaml method is not implemented, yet") - - def load_yaml(self, file_path, multiplier=1): - # clear all existing streams linked to this object - self.streams_list.clear() - streams_data = load_yaml_to_obj(file_path) - assert isinstance(streams_data, list) - new_streams_data = [] - for stream in streams_data: - stream_name = stream.get("name") - raw_stream = stream.get("stream") - if not stream_name or not raw_stream: - raise ValueError("Provided stream is not according to convention." - "Each stream must be provided as two keys: 'name' and 'stream'. " - "Provided item was:\n {stream}".format(stream)) - new_stream_data = self.yaml_loader.validate_yaml(raw_stream, - "stream", - multiplier= multiplier) - new_streams_data.append(new_stream_data) - new_stream_obj = CStream() - new_stream_obj.load_data(**new_stream_data) - self.append_stream(stream_name, new_stream_obj) - return new_streams_data - - def compile_streams(self): - # first, assign an id to each stream - stream_ids = {} - for idx, stream_name in enumerate(self.streams_list): - stream_ids[stream_name] = idx - - # next, iterate over the streams and transform them from working with names to ids. - # with that build a new dict with old stream_name as the key, and StreamPack as the stored value - compiled_streams = {} - for stream_name, stream in self.streams_list.items(): - tmp_stream = CStreamList._compile_single_stream(stream_name, stream, stream_ids) - compiled_streams[stream_name] = StreamPack(stream_ids.get(stream_name), - tmp_stream) - return compiled_streams - - @staticmethod - def _compile_single_stream(stream_name, stream, id_dict): - # copy the old stream to temporary one, no change to class attributes - tmp_stream = copy.copy(stream) - next_stream_id = id_dict.get(getattr(tmp_stream, "next_stream_id"), -1) - try: - rx_stats_stream_id = id_dict.get(getattr(tmp_stream.rx_stats, "stream_id"), - id_dict.get(stream_name)) - except AttributeError as e: - rx_stats_stream_id = id_dict.get(stream_name) - # assign resolved values to stream object - tmp_stream.next_stream_id = next_stream_id - tmp_stream.rx_stats.stream_id = rx_stats_stream_id - return tmp_stream - - -class CRxStats(object): - - FIELDS = ["seq_enabled", "latency_enabled", "stream_id"] - def __init__(self, enabled=False, **kwargs): - self.enabled = bool(enabled) - for field in CRxStats.FIELDS: - setattr(self, field, kwargs.get(field, False)) - - def dump(self): - if self.enabled: - dump = {"enabled": True} - dump.update({k: getattr(self, k) - for k in CRxStats.FIELDS} - ) - return dump - else: - return {"enabled": False} - - - -class CTxMode(object): - """docstring for CTxMode""" - GENERAL_FIELDS = ["type", "pps"] - FIELDS = {"continuous": [], - "single_burst": ["total_pkts"], - "multi_burst": ["pkts_per_burst", "ibg", "count"]} - - def __init__(self, type, pps=0, **kwargs): - self._MODES = CTxMode.FIELDS.keys() - self.type = type - self.pps = pps - for field in CTxMode.FIELDS.get(self.type): - setattr(self, field, kwargs.get(field, 0)) - - @property - def type(self): - return self._type - - @type.setter - def type(self, type): - if type not in self._MODES: - raise ValueError("Unknown TX mode ('{0}')has been initialized.".format(type)) - self._type = type - self._reset_fields() - - def dump(self): - dump = ({k: getattr(self, k) - for k in CTxMode.GENERAL_FIELDS - }) - dump.update({k: getattr(self, k) - for k in CTxMode.FIELDS.get(self.type) - }) - return dump - - def _reset_fields(self): - for field in CTxMode.FIELDS.get(self.type): - setattr(self, field, 0) - - -class CStream(object): - """docstring for CStream""" - - FIELDS = ["enabled", "self_start", "next_stream_id", "isg", "mode", "rx_stats", "packet", "vm"] - - def __init__(self): - self.is_loaded = False - self._is_compiled = False - self._pkt_bld_obj = CTRexPktBuilder() - for field in CStream.FIELDS: - setattr(self, field, None) - - - def load_data(self, **kwargs): - try: - for k in CStream.FIELDS: - if k == "rx_stats": - rx_stats_data = kwargs[k] - if isinstance(rx_stats_data, dict): - setattr(self, k, CRxStats(**rx_stats_data)) - elif isinstance(rx_stats_data, CRxStats): - setattr(self, k, rx_stats_data) - elif k == "mode": - tx_mode = kwargs[k] - if isinstance(tx_mode, dict): - setattr(self, k, CTxMode(**tx_mode)) - elif isinstance(tx_mode, CTxMode): - setattr(self, k, tx_mode) - elif k == "packet": - if isinstance(kwargs[k], CTRexPktBuilder): - if "vm" not in kwargs: - self.load_packet_obj(kwargs[k]) - break # vm field check is skipped - else: - raise ValueError("When providing packet object with a CTRexPktBuilder, vm parameter " - "should not be supplied") - else: - binary = kwargs[k]["binary"] - if isinstance(binary, str): - - # TODO: load to _pkt_bld_obj also when passed as byte array! - if binary.endswith(".pcap"): - self._pkt_bld_obj.load_packet_from_pcap(binary) - self._pkt_bld_obj.metadata = kwargs[k]["meta"] - self.packet = self._pkt_bld_obj.dump_pkt() - else: - self.packet = {} - self.packet['binary'] = binary - self.packet['meta'] = "" - - else: - raise ValueError("Packet binary attribute has been loaded with unsupported value." - "Supported values are reference to pcap file with SINGLE packet, " - "or a list of unsigned-byte integers") - else: - setattr(self, k, kwargs[k]) - self.is_loaded = True - except KeyError as e: - cause = e.args[0] - raise KeyError("The attribute '{0}' is missing as a field of the CStream object.\n" - "Loaded data must contain all of the following fields: {1}".format(cause, CStream.FIELDS)) - - def load_packet_obj(self, packet_obj): - assert isinstance(packet_obj, CTRexPktBuilder) - self.packet = packet_obj.dump_pkt() - self.vm = packet_obj.get_vm_data() - - def load_packet_from_pcap(self, pcap_path, metadata=''): - with open(pcap_path, 'r') as f: - pcap = dpkt.pcap.Reader(f) - first_packet = True - for _, buf in pcap: - # this is an iterator, can't evaluate the number of files in advance - if first_packet: - self.packet = {"binary": [struct.unpack('B', buf[i:i+1])[0] # represent data as list of 0-255 ints - for i in range(0, len(buf))], - "meta": metadata} # meta data continues without a change. - first_packet = False - else: - raise ValueError("Provided pcap file contains more than single packet.") - # arrive here ONLY if pcap contained SINGLE packet - return - - - def dump(self): - if self.is_loaded: - dump = {} - for key in CStream.FIELDS: - try: - dump[key] = getattr(self, key).dump() # use dump() method of compound object, such TxMode - except AttributeError: - dump[key] = getattr(self, key) - return dump - else: - raise RuntimeError("CStream object isn't loaded with data. Use 'load_data' method.") - - def get_stream_layers(self, depth_limit=Ellipsis): - stream_layers = self._pkt_bld_obj.get_packet_layers(depth_limit) - return "/".join(stream_layers) - - - -# describes a stream DB -class CStreamsDB(object): - - def __init__(self): - self.stream_packs = {} - - def load_yaml_file(self, filename): - - stream_pack_name = filename - if stream_pack_name in self.get_loaded_streams_names(): - self.remove_stream_packs(stream_pack_name) - - stream_list = CStreamList() - loaded_obj = stream_list.load_yaml(filename) - - try: - compiled_streams = stream_list.compile_streams() - rc = self.load_streams(LoadedStreamList(stream_pack_name, - loaded_obj, - [StreamPack(v.stream_id, v.stream.dump()) - for k, v in compiled_streams.items()])) - except Exception as e: - return None - - - return self.get_stream_pack(stream_pack_name) - - def load_streams(self, LoadedStreamList_obj): - if LoadedStreamList_obj.name in self.stream_packs: - return False - else: - self.stream_packs[LoadedStreamList_obj.name] = LoadedStreamList_obj - return True - - def remove_stream_packs(self, *names): - removed_streams = [] - for name in names: - removed = self.stream_packs.pop(name) - if removed: - removed_streams.append(name) - return removed_streams - - def clear(self): - self.stream_packs.clear() - - def get_loaded_streams_names(self): - return self.stream_packs.keys() - - def stream_pack_exists (self, name): - return name in self.get_loaded_streams_names() - - def get_stream_pack(self, name): - if not self.stream_pack_exists(name): - return None - else: - return self.stream_packs.get(name) - - -########################### Simple Streams ########################### -from trex_stl_exceptions import * - -# base class for TX mode -class STLTXMode(object): - def __init__ (self): - self.fields = {} - - def to_json (self): - return self.fields - - -# continuous mode -class STLTXCont(STLTXMode): - - def __init__ (self, pps = 1): - - if not isinstance(pps, (int, float)): - raise STLArgumentError('pps', pps) - - super(STLTXCont, self).__init__() - - self.fields['type'] = 'continuous' - self.fields['pps'] = pps - - -# single burst mode -class STLTXSingleBurst(STLTXMode): - - def __init__ (self, pps = 1, total_pkts = 1): - - if not isinstance(pps, (int, float)): - raise STLArgumentError('pps', pps) - - if not isinstance(total_pkts, int): - raise STLArgumentError('total_pkts', total_pkts) - - super(STLTXSingleBurst, self).__init__() - - self.fields['type'] = 'single_burst' - self.fields['pps'] = pps - self.fields['total_pkts'] = total_pkts - - -# multi burst mode -class STLTXMultiBurst(STLTXMode): - - def __init__ (self, - pps = 1, - pkts_per_burst = 1, - ibg = 0.0, - count = 1): - - if not isinstance(pps, (int, float)): - raise STLArgumentError('pps', pps) - - if not isinstance(pkts_per_burst, int): - raise STLArgumentError('pkts_per_burst', pkts_per_burst) - - if not isinstance(ibg, (int, float)): - raise STLArgumentError('ibg', ibg) - - if not isinstance(count, int): - raise STLArgumentError('count', count) - - super(STLTXMultiBurst, self).__init__() - - self.fields['type'] = 'multi_burst' - self.fields['pps'] = pps - self.fields['pkts_per_burst'] = pkts_per_burst - self.fields['ibg'] = ibg - self.fields['count'] = count - - -class STLStream(object): - - def __init__ (self, - packet, - mode = STLTXCont(1), - enabled = True, - self_start = True, - isg = 0.0, - rx_stats = None, - next_stream_id = -1, - stream_id = None): - - # type checking - if not isinstance(mode, STLTXMode): - raise STLArgumentError('mode', mode) - - if not isinstance(packet, CTrexPktBuilderInterface): - raise STLArgumentError('packet', packet) - - if not isinstance(enabled, bool): - raise STLArgumentError('enabled', enabled) - - if not isinstance(self_start, bool): - raise STLArgumentError('self_start', self_start) - - if not isinstance(isg, (int, float)): - raise STLArgumentError('isg', isg) - - if (type(mode) == STLTXCont) and (next_stream_id != -1): - raise STLError("continuous stream cannot have a next stream ID") - - # use a random 31 bit for ID - self.stream_id = stream_id if stream_id is not None else random.getrandbits(31) - - self.fields = {} - - # basic fields - self.fields['enabled'] = enabled - self.fields['self_start'] = self_start - self.fields['isg'] = isg - - self.fields['next_stream_id'] = next_stream_id - - # mode - self.fields['mode'] = mode.to_json() - - packet.compile() - - # packet and VM - self.fields['packet'] = packet.dump_pkt() - self.fields['vm'] = packet.get_vm_data() - - self.fields['rx_stats'] = {} - if not rx_stats: - self.fields['rx_stats']['enabled'] = False - - - def __str__ (self): - return json.dumps(self.fields, indent = 4, separators=(',', ': '), sort_keys = True) - - def to_json (self): - return self.fields - - def get_id (self): - return self.stream_id - - @staticmethod - def dump_to_yaml (yaml_file, stream_list): - - # type check - if isinstance(stream_list, STLStream): - stream_list = [stream_list] - - if not all([isinstance(stream, STLStream) for stream in stream_list]): - raise STLArgumentError('stream_list', stream_list) - - - names = {} - for i, stream in enumerate(stream_list): - names[stream.get_id()] = "stream-{0}".format(i) - - yaml_lst = [] - for stream in stream_list: - - fields = dict(stream.fields) - - # handle the next stream id - if fields['next_stream_id'] == -1: - del fields['next_stream_id'] - - else: - if not stream.get_id() in names: - raise STLError('broken dependencies in stream list') - - fields['next_stream'] = names[stream.get_id()] - - # add to list - yaml_lst.append({'name': names[stream.get_id()], 'stream': fields}) - - # write to file - x = yaml.dump(yaml_lst, default_flow_style=False) - with open(yaml_file, 'w') as f: - f.write(x) - return x - - -# REMOVE ME when can - convert from stream pack to a simple stream -class HACKSTLStream(STLStream): - def __init__ (self, stream_pack): - if not isinstance(stream_pack, StreamPack): - raise Exception("internal error") - - packet = CTRexPktBuilder() - packet.load_from_stream_obj(stream_pack.stream) - super(HACKSTLStream, self).__init__(packet, stream_id = stream_pack.stream_id) - - self.fields = stream_pack.stream diff --git a/scripts/automation/trex_control_plane/console/__init__.py b/scripts/automation/trex_control_plane/console/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/scripts/automation/trex_control_plane/console/__init__.py diff --git a/scripts/automation/trex_control_plane/console/trex_console.py b/scripts/automation/trex_control_plane/console/trex_console.py index c8624626..45428b89 100755 --- a/scripts/automation/trex_control_plane/console/trex_console.py +++ b/scripts/automation/trex_control_plane/console/trex_console.py @@ -27,14 +27,16 @@ import string import os import sys import tty, termios -import trex_root_path -from common.trex_streams import * -from client.trex_stateless_client import STLClient, LoggerApi -from common.text_opts import * -from client_utils.general_utils import user_input, get_current_user -from client_utils import parsing_opts + +from trex_control_plane.stl.api import * + +from trex_control_plane.common.text_opts import * +from trex_control_plane.client_utils.general_utils import user_input, get_current_user +from trex_control_plane.client_utils import parsing_opts + + import trex_tui -from common.trex_stl_exceptions import * + from functools import wraps __version__ = "1.1" diff --git a/scripts/automation/trex_control_plane/console/trex_tui.py b/scripts/automation/trex_control_plane/console/trex_tui.py index 1e22b005..1ecf0868 100644 --- a/scripts/automation/trex_control_plane/console/trex_tui.py +++ b/scripts/automation/trex_control_plane/console/trex_tui.py @@ -2,13 +2,15 @@ import termios import sys import os import time -from common.text_opts import * -from common import trex_stats -from client_utils import text_tables from collections import OrderedDict import datetime from cStringIO import StringIO -from client.trex_stateless_client import STLError + +from common.text_opts import * +from client_utils import text_tables + +# for STL exceptions +from trex_control_plane.stl.api import * class SimpleBar(object): def __init__ (self, desc, pattern): @@ -61,7 +63,7 @@ class TrexTUIDashBoard(TrexTUIPanel): def show (self): - stats = self.stateless_client._get_formatted_stats(self.ports, trex_stats.COMPACT) + stats = self.stateless_client._get_formatted_stats(self.ports) # print stats to screen for stat_type, stat_data in stats.iteritems(): text_tables.print_table_with_header(stat_data.text_table, stat_type) @@ -148,7 +150,7 @@ class TrexTUIPort(TrexTUIPanel): def show (self): - stats = self.stateless_client._get_formatted_stats([self.port_id], trex_stats.COMPACT) + stats = self.stateless_client._get_formatted_stats([self.port_id]) # print stats to screen for stat_type, stat_data in stats.iteritems(): text_tables.print_table_with_header(stat_data.text_table, stat_type) diff --git a/scripts/automation/trex_control_plane/stl/__init__.py b/scripts/automation/trex_control_plane/stl/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/scripts/automation/trex_control_plane/stl/__init__.py diff --git a/scripts/automation/trex_control_plane/stl/api.py b/scripts/automation/trex_control_plane/stl/api.py new file mode 100644 index 00000000..c12628b5 --- /dev/null +++ b/scripts/automation/trex_control_plane/stl/api.py @@ -0,0 +1,31 @@ + +# get external libs +import trex_control_plane.client_utils.external_packages + +# client and exceptions +from trex_stl_exceptions import * +from trex_stl_client import STLClient, LoggerApi + +# streams +from trex_stl_streams import * + +# packet builder +from trex_stl_packet_builder_scapy import * +from scapy.all import * + +# packet builder +STLPktBuilder = CScapyTRexPktBuilder + +# VM +STLVmFlowVar = CTRexVmDescFlowVar +STLVmWriteFlowVar = CTRexVmDescWrFlowVar +STLVmFixIpv4 = CTRexVmDescFixIpv4 +STLVmTrimPktSize = CTRexVmDescTrimPktSize +STLVmTupleGen = CTRexVmDescTupleGen + + +# simulator +from trex_stl_sim import STLSim + +# std lib (various lib functions) +from trex_stl_std import * diff --git a/scripts/automation/trex_control_plane/common/rpc_defaults.yaml b/scripts/automation/trex_control_plane/stl/rpc_defaults.yaml index 9325a0e4..ad814b3e 100755..100644 --- a/scripts/automation/trex_control_plane/common/rpc_defaults.yaml +++ b/scripts/automation/trex_control_plane/stl/rpc_defaults.yaml @@ -1,124 +1,124 @@ -##############################################################
-#### TRex RPC stream list default values ####
-##############################################################
-
-# this document is based on TRex RPC server spec and its fields:
-# http://trex-tgn.cisco.com/trex/doc/trex_rpc_server_spec.html
-
-### HOW TO READ THIS FILE
-# 1. Each key represents an object type
-# 2. Each value can be either a value field or another object
-# 2.1. If a value field, read as:
-# + type: type of field
-# + has_default: if the value has any default
-# + default: the default value (Only appears if has_default field is 'YES')
-# 2.2. If an object type, jump to corresponding object key.
-# 3. If an object has more than one instance type, another layer with the type shall be added.
-# For example, 'mode' object has 3 types: 'continuous', 'single_burst', 'multi_burst'
-# So, 3 mode objects will be defined, named:
-# - mode['continuous']
-# - mode['single_burst']
-# - mode['multi_burst']
-# In this case, there's no default for the 'type' field on the object
-# 4. Some values has 'multiply' property attached.
-# In such case, the loaded value will be multiplied by the multiplier
-# For example, if the mode's 'pps' field value is 10, and its multiplier is 5,
-# the loaded pps value will be 10*5=50
-# 5. Any object type must be listed by the user, even if all its field are defaults.
-# The most basic option would be to declare the object with "[]", which stands for empty object in YAML syntax.
-
-
-stream:
- enabled:
- type: boolean
- has_default: YES
- default: True
- self_start:
- type: boolean
- has_default: YES
- default: True
- isg:
- type: [int, double, string]
- has_default: YES
- default: 0.0
- next_stream_id:
- type: string # string to allow naming binding
- has_default: YES
- default: -1 # no next streams
- packet:
- type: object
- mode:
- type: object
- vm:
- type: object
- rx_stats:
- type: object
-
-packet:
- binary:
- type: [array,string]
- has_default: NO
- meta:
- type: string
- has_default: YES
- default: ""
-
-mode:
- continuous:
- pps:
- type: [int, double]
- has_default: NO
- multiply: YES
- single_burst:
- pps:
- type: [int, double]
- has_default: NO
- multiply: YES
- total_pkts:
- type: int
- has_default: NO
- multi_burst:
- pps:
- type: [int, double]
- has_default: NO
- multiply: YES
- pkts_per_burst:
- type: int
- has_default: NO
- ibg:
- type: [int, double, string]
- has_default: YES
- default: 100.0
- count:
- type: int
- has_default: YES
- default: 0 # loop forever
-
-rx_stats:
- enabled:
- type: boolean
- has_default: YES
- default: False
- stream_id:
- type: string
- has_default: YES
- default: False # use related stream_id
- seq_enabled:
- type: boolean
- has_default: YES
- default: False
- latency_enabled:
- type: boolean
- has_default: YES
- default: False
-
-vm:
- instructions:
- type: array
- has_default: YES
- default: []
- split_by_var:
- type: string
- has_default: YES
- default: ""
-
+############################################################## +#### TRex RPC stream list default values #### +############################################################## + +# this document is based on TRex RPC server spec and its fields: +# http://trex-tgn.cisco.com/trex/doc/trex_rpc_server_spec.html + +### HOW TO READ THIS FILE +# 1. Each key represents an object type +# 2. Each value can be either a value field or another object +# 2.1. If a value field, read as: +# + type: type of field +# + has_default: if the value has any default +# + default: the default value (Only appears if has_default field is 'YES') +# 2.2. If an object type, jump to corresponding object key. +# 3. If an object has more than one instance type, another layer with the type shall be added. +# For example, 'mode' object has 3 types: 'continuous', 'single_burst', 'multi_burst' +# So, 3 mode objects will be defined, named: +# - mode['continuous'] +# - mode['single_burst'] +# - mode['multi_burst'] +# In this case, there's no default for the 'type' field on the object +# 4. Some values has 'multiply' property attached. +# In such case, the loaded value will be multiplied by the multiplier +# For example, if the mode's 'pps' field value is 10, and its multiplier is 5, +# the loaded pps value will be 10*5=50 +# 5. Any object type must be listed by the user, even if all its field are defaults. +# The most basic option would be to declare the object with "[]", which stands for empty object in YAML syntax. + + +stream: + enabled: + type: boolean + has_default: YES + default: True + self_start: + type: boolean + has_default: YES + default: True + isg: + type: [int, double, string] + has_default: YES + default: 0.0 + next_stream_id: + type: string # string to allow naming binding + has_default: YES + default: -1 # no next streams + packet: + type: object + mode: + type: object + vm: + type: object + rx_stats: + type: object + +packet: + binary: + type: [array,string] + has_default: NO + meta: + type: string + has_default: YES + default: "" + +mode: + continuous: + pps: + type: [int, double] + has_default: NO + multiply: YES + single_burst: + pps: + type: [int, double] + has_default: NO + multiply: YES + total_pkts: + type: int + has_default: NO + multi_burst: + pps: + type: [int, double] + has_default: NO + multiply: YES + pkts_per_burst: + type: int + has_default: NO + ibg: + type: [int, double, string] + has_default: YES + default: 100.0 + count: + type: int + has_default: YES + default: 0 # loop forever + +rx_stats: + enabled: + type: boolean + has_default: YES + default: False + stream_id: + type: string + has_default: YES + default: False # use related stream_id + seq_enabled: + type: boolean + has_default: YES + default: False + latency_enabled: + type: boolean + has_default: YES + default: False + +vm: + instructions: + type: array + has_default: YES + default: [] + split_by_var: + type: string + has_default: YES + default: "" + diff --git a/scripts/automation/trex_control_plane/client/trex_async_client.py b/scripts/automation/trex_control_plane/stl/trex_stl_async_client.py index ef4c48f9..9b3b9577 100644 --- a/scripts/automation/trex_control_plane/client/trex_async_client.py +++ b/scripts/automation/trex_control_plane/stl/trex_stl_async_client.py @@ -1,15 +1,5 @@ #!/router/bin/python -try: - # support import for Python 2 - import outer_packages -except ImportError: - # support import for Python 3 - import client.outer_packages -from client_utils.jsonrpc_client import JsonRpcClient, BatchMessage - -from common.text_opts import * - import json import threading import time @@ -18,9 +8,11 @@ import zmq import re import random -from common.trex_stats import * -from common.trex_streams import * -from common.trex_types import * +from trex_stl_jsonrpc_client import JsonRpcClient, BatchMessage + +from common.text_opts import * +from trex_stl_stats import * +from trex_stl_types import * # basic async stats class class CTRexAsyncStats(object): diff --git a/scripts/automation/trex_control_plane/client/trex_stateless_client.py b/scripts/automation/trex_control_plane/stl/trex_stl_client.py index 95fd2a69..08f640b5 100755..100644 --- a/scripts/automation/trex_control_plane/client/trex_stateless_client.py +++ b/scripts/automation/trex_control_plane/stl/trex_stl_client.py @@ -1,33 +1,28 @@ #!/router/bin/python -try: - # support import for Python 2 - import outer_packages -except ImportError: - # support import for Python 3 - import client.outer_packages - -from client_utils.jsonrpc_client import JsonRpcClient, BatchMessage -from client_utils import general_utils -from client_utils.packet_builder import CTRexPktBuilder -import json +# for API usage the path name must be full +from trex_control_plane.stl.trex_stl_exceptions import * +#from trex_control_plane.stl.trex_stl_streams import * +from trex_stl_streams import * + +from trex_stl_jsonrpc_client import JsonRpcClient, BatchMessage +import trex_stl_stats + +from trex_stl_port import Port +from trex_stl_types import * +from trex_stl_async_client import CTRexAsyncClient + +from trex_control_plane.client_utils import parsing_opts, text_tables, general_utils +from trex_control_plane.common.text_opts import * + -from common.trex_streams import * from collections import namedtuple -from common.text_opts import * -from common import trex_stats -from client_utils import parsing_opts, text_tables +from yaml import YAMLError import time import datetime import re import random -from trex_port import Port -from common.trex_types import * -from common.trex_stl_exceptions import * -from trex_async_client import CTRexAsyncClient -from yaml import YAMLError - - +import json ############################ logger ############################# ############################ ############################# @@ -193,7 +188,6 @@ class AsyncEventHandler(object): # dispatcher for server async events (port started, port stopped and etc.) def handle_async_event (self, type, data): # DP stopped - show_event = False # port started @@ -431,11 +425,11 @@ class STLClient(object): "virtual": virtual} - self.global_stats = trex_stats.CGlobalStats(self.connection_info, + self.global_stats = trex_stl_stats.CGlobalStats(self.connection_info, self.server_version, self.ports) - self.stats_generator = trex_stats.CTRexInfoGenerator(self.global_stats, + self.stats_generator = trex_stl_stats.CTRexInfoGenerator(self.global_stats, self.ports) @@ -770,8 +764,8 @@ class STLClient(object): return self.comm_link.transmit_batch(batch_list) # stats - def _get_formatted_stats(self, port_id_list, stats_mask=set()): - stats_opts = trex_stats.ALL_STATS_OPTS.intersection(stats_mask) + def _get_formatted_stats(self, port_id_list, stats_mask = trex_stl_stats.COMPACT): + stats_opts = trex_stl_stats.ALL_STATS_OPTS.intersection(stats_mask) stats_obj = {} for stats_type in stats_opts: @@ -1271,44 +1265,34 @@ class STLClient(object): """ - load a profile file to port(s) + load a profile from file :parameters: filename : str filename to load - ports : list - ports to execute the command + :returns: + list of streams from the profile :raises: + :exc:`STLError` """ - @__api_check(True) - def load_profile (self, filename, ports = None): + @staticmethod + def load_profile (filename): # check filename if not os.path.isfile(filename): raise STLError("file '{0}' does not exists".format(filename)) - # by default use all ports - if ports == None: - ports = self.get_acquired_ports() - - # verify valid port id list - rc = self._validate_port_list(ports) - if not rc: - raise STLArgumentError('ports', ports, valid_values = self.get_all_ports()) - - streams = None # try YAML try: - streams_db = CStreamsDB() - stream_list = streams_db.load_yaml_file(filename) - # convert to new style stream object - streams = [HACKSTLStream(stream) for stream in stream_list.compiled] + streams = STLStream.load_from_yaml(filename) + print "***** YAML IS NOT WORKING !!! *********" + + except YAMLError: # try python loader try: @@ -1326,8 +1310,8 @@ class STLClient(object): traceback.print_exc(file=sys.stdout) raise STLError("Unexpected error: '{0}'".format(filename)) + return streams - self.add_streams(streams, ports) @@ -1817,7 +1801,8 @@ class STLClient(object): self.remove_all_streams(opts.ports) # pack the profile - self.load_profile(opts.file[0], opts.ports) + streams = self.load_profile(opts.file[0]) + self.add_streams(streams, ports = opts.ports) if opts.dry: self.validate(opts.ports, opts.mult, opts.duration, opts.total) @@ -1971,12 +1956,12 @@ class STLClient(object): return # determine stats mask - mask = self.__get_mask_keys(**self.__filter_namespace_args(opts, trex_stats.ALL_STATS_OPTS)) + mask = self.__get_mask_keys(**self.__filter_namespace_args(opts, trex_stl_stats.ALL_STATS_OPTS)) if not mask: # set to show all stats if no filter was given - mask = trex_stats.ALL_STATS_OPTS + mask = trex_stl_stats.ALL_STATS_OPTS - stats_opts = trex_stats.ALL_STATS_OPTS.intersection(mask) + stats_opts = trex_stl_stats.ALL_STATS_OPTS.intersection(mask) stats = self._get_formatted_stats(opts.ports, mask) diff --git a/scripts/automation/trex_control_plane/common/trex_stl_exceptions.py b/scripts/automation/trex_control_plane/stl/trex_stl_exceptions.py index 9be20db9..d5b3885d 100644 --- a/scripts/automation/trex_control_plane/common/trex_stl_exceptions.py +++ b/scripts/automation/trex_control_plane/stl/trex_stl_exceptions.py @@ -1,6 +1,7 @@ import os import sys -from common.text_opts import * + +from trex_control_plane.common.text_opts import * # basic error for API class STLError(Exception): diff --git a/scripts/automation/trex_control_plane/client_utils/jsonrpc_client.py b/scripts/automation/trex_control_plane/stl/trex_stl_jsonrpc_client.py index 9c351175..887681a7 100755..100644 --- a/scripts/automation/trex_control_plane/client_utils/jsonrpc_client.py +++ b/scripts/automation/trex_control_plane/stl/trex_stl_jsonrpc_client.py @@ -1,13 +1,12 @@ #!/router/bin/python -import external_packages import zmq import json -import general_utils +import client_utils.general_utils import re from time import sleep from collections import namedtuple -from common.trex_types import * +from trex_stl_types import * class bcolors: BLUE = '\033[94m' @@ -49,7 +48,7 @@ class JsonRpcClient(object): # default values self.port = default_port self.server = default_server - self.id_gen = general_utils.random_id_gen() + self.id_gen = client_utils.general_utils.random_id_gen() def get_connection_details (self): diff --git a/scripts/automation/trex_control_plane/client_utils/packet_builder_interface.py b/scripts/automation/trex_control_plane/stl/trex_stl_packet_builder_interface.py index b6e7c026..b6e7c026 100644 --- a/scripts/automation/trex_control_plane/client_utils/packet_builder_interface.py +++ b/scripts/automation/trex_control_plane/stl/trex_stl_packet_builder_interface.py diff --git a/scripts/automation/trex_control_plane/client_utils/scapy_packet_builder.py b/scripts/automation/trex_control_plane/stl/trex_stl_packet_builder_scapy.py index b1b181c6..8d2d6b8f 100644 --- a/scripts/automation/trex_control_plane/client_utils/scapy_packet_builder.py +++ b/scripts/automation/trex_control_plane/stl/trex_stl_packet_builder_scapy.py @@ -1,4 +1,3 @@ -import external_packages import random import string import struct @@ -8,7 +7,7 @@ import yaml import binascii import base64 -from packet_builder_interface import CTrexPktBuilderInterface +from trex_stl_packet_builder_interface import CTrexPktBuilderInterface from scapy.all import * diff --git a/scripts/automation/trex_control_plane/client/trex_port.py b/scripts/automation/trex_control_plane/stl/trex_stl_port.py index eaf64ac2..8923d3d6 100644 --- a/scripts/automation/trex_control_plane/client/trex_port.py +++ b/scripts/automation/trex_control_plane/stl/trex_stl_port.py @@ -1,8 +1,8 @@ from collections import namedtuple, OrderedDict -from common.trex_types import * -from common import trex_stats -from client_utils import packet_builder + +import trex_stl_stats +from trex_stl_types import * StreamOnPort = namedtuple('StreamOnPort', ['compiled_stream', 'metadata']) @@ -50,7 +50,7 @@ class Port(object): self.profile = None self.session_id = session_id - self.port_stats = trex_stats.CPortStats(self) + self.port_stats = trex_stl_stats.CPortStats(self) def err(self, msg): @@ -161,7 +161,7 @@ class Port(object): # meta data for show streams self.streams[stream.get_id()] = StreamOnPort(stream.to_json(), - Port._generate_stream_metadata(stream.get_id(), stream.to_json())) + Port._generate_stream_metadata(stream)) rc = self.transmit_batch(batch) if not rc: @@ -473,21 +473,21 @@ class Port(object): "streams" : streams_data} @staticmethod - def _generate_stream_metadata(stream_id, compiled_stream_obj): + def _generate_stream_metadata(stream): meta_dict = {} # create packet stream description - pkt_bld_obj = packet_builder.CTRexPktBuilder() - pkt_bld_obj.load_from_stream_obj(compiled_stream_obj) + #pkt_bld_obj = packet_builder.CTRexPktBuilder() + #pkt_bld_obj.load_from_stream_obj(compiled_stream_obj) # generate stream summary based on that - next_stream = "None" if compiled_stream_obj['next_stream_id']==-1 else compiled_stream_obj['next_stream_id'] + #next_stream = "None" if stream['next_stream_id']==-1 else stream['next_stream_id'] - meta_dict['stream_sum'] = OrderedDict([("id", stream_id), - ("packet_type", "/".join(pkt_bld_obj.get_packet_layers())), - ("length", pkt_bld_obj.get_packet_length()), - ("mode", compiled_stream_obj['mode']['type']), - ("rate_pps", compiled_stream_obj['mode']['pps']), - ("next_stream", next_stream) + meta_dict['stream_sum'] = OrderedDict([("id", stream.get_id()), + ("packet_type", "FIXME!!!"), + ("length", "FIXME!!!"), + ("mode", "FIXME!!!"), + ("rate_pps", "FIXME!!!"), + ("next_stream", "FIXME!!!") ]) return meta_dict diff --git a/scripts/automation/trex_control_plane/client/trex_stateless_sim.py b/scripts/automation/trex_control_plane/stl/trex_stl_sim.py index 1452cdd1..d61e04bf 100644 --- a/scripts/automation/trex_control_plane/client/trex_stateless_sim.py +++ b/scripts/automation/trex_control_plane/stl/trex_stl_sim.py @@ -16,18 +16,11 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ - -try: - # support import for Python 2 - import outer_packages -except ImportError: - # support import for Python 3 - import client.outer_packages - -from common.trex_stl_exceptions import STLError +from trex_stl_exceptions import * from yaml import YAMLError -from common.trex_streams import * +from trex_stl_streams import * from client_utils import parsing_opts +from trex_stl_client import STLClient import re import json @@ -95,33 +88,6 @@ class STLSim(object): self.port_id = port_id - def load_input_file (self, input_file): - # try YAML - try: - streams_db = CStreamsDB() - stream_list = streams_db.load_yaml_file(input_file) - - # convert to new style stream object - return [HACKSTLStream(stream) for stream in stream_list.compiled] - except YAMLError: - pass - - # try python - try: - basedir = os.path.dirname(input_file) - sys.path.append(basedir) - - file = os.path.basename(input_file).split('.')[0] - module = __import__(file, globals(), locals(), [], -1) - - return module.register().get_streams() - - except (AttributeError, ImportError) as e: - print "specific error: {0}".format(e) - - raise STLError("bad format input file '{0}'".format(input_file)) - - def generate_start_cmd (self, mult = "1", force = True, duration = -1): return {"id":1, "jsonrpc": "2.0", @@ -171,7 +137,7 @@ class STLSim(object): # handle YAMLs for input_file in input_files: - stream_list += self.load_input_file(input_file) + stream_list += STLClient.load_profile(input_file) # load streams diff --git a/scripts/automation/trex_control_plane/common/trex_stats.py b/scripts/automation/trex_control_plane/stl/trex_stl_stats.py index 3bd6e0cd..f880a914 100755..100644 --- a/scripts/automation/trex_control_plane/common/trex_stats.py +++ b/scripts/automation/trex_control_plane/stl/trex_stl_stats.py @@ -1,8 +1,11 @@ #!/router/bin/python -from collections import namedtuple, OrderedDict, deque + from client_utils import text_tables from common.text_opts import format_text, format_threshold, format_num -from client.trex_async_client import CTRexAsyncStats + +from trex_stl_async_client import CTRexAsyncStats + +from collections import namedtuple, OrderedDict, deque import copy import datetime import time diff --git a/scripts/api/stl/trex_stl_lib.py b/scripts/automation/trex_control_plane/stl/trex_stl_std.py index a8574e82..72a5ea52 100644 --- a/scripts/api/stl/trex_stl_lib.py +++ b/scripts/automation/trex_control_plane/stl/trex_stl_std.py @@ -1,9 +1,5 @@ - -from trex_stl_api import * -from scapy.all import * - -# stl library for various utilities - +from trex_stl_streams import * +from trex_stl_packet_builder_scapy import * # map ports # will destroy all streams/data on the ports @@ -17,7 +13,7 @@ def stl_map_ports (client, ports = None): client.reset(ports) # generate streams - base_pkt = STLPktBuilder(pkt = Ether()/IP()) + base_pkt = CScapyTRexPktBuilder(pkt = Ether()/IP()) pkts = 1 for port in ports: diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_streams.py b/scripts/automation/trex_control_plane/stl/trex_stl_streams.py new file mode 100644 index 00000000..d5cba9e2 --- /dev/null +++ b/scripts/automation/trex_control_plane/stl/trex_stl_streams.py @@ -0,0 +1,230 @@ +#!/router/bin/python + +from trex_stl_exceptions import * +from trex_stl_packet_builder_interface import CTrexPktBuilderInterface +from trex_stl_packet_builder_scapy import CScapyTRexPktBuilder, Ether, IP +from collections import OrderedDict, namedtuple + +from trex_control_plane.client_utils.yaml_utils import * + +from dpkt import pcap +import random +import yaml +import base64 + +# base class for TX mode +class STLTXMode(object): + def __init__ (self): + self.fields = {} + + def to_json (self): + return self.fields + + +# continuous mode +class STLTXCont(STLTXMode): + + def __init__ (self, pps = 1): + + if not isinstance(pps, (int, float)): + raise STLArgumentError('pps', pps) + + super(STLTXCont, self).__init__() + + self.fields['type'] = 'continuous' + self.fields['pps'] = pps + + +# single burst mode +class STLTXSingleBurst(STLTXMode): + + def __init__ (self, pps = 1, total_pkts = 1): + + if not isinstance(pps, (int, float)): + raise STLArgumentError('pps', pps) + + if not isinstance(total_pkts, int): + raise STLArgumentError('total_pkts', total_pkts) + + super(STLTXSingleBurst, self).__init__() + + self.fields['type'] = 'single_burst' + self.fields['pps'] = pps + self.fields['total_pkts'] = total_pkts + + +# multi burst mode +class STLTXMultiBurst(STLTXMode): + + def __init__ (self, + pps = 1, + pkts_per_burst = 1, + ibg = 0.0, + count = 1): + + if not isinstance(pps, (int, float)): + raise STLArgumentError('pps', pps) + + if not isinstance(pkts_per_burst, int): + raise STLArgumentError('pkts_per_burst', pkts_per_burst) + + if not isinstance(ibg, (int, float)): + raise STLArgumentError('ibg', ibg) + + if not isinstance(count, int): + raise STLArgumentError('count', count) + + super(STLTXMultiBurst, self).__init__() + + self.fields['type'] = 'multi_burst' + self.fields['pps'] = pps + self.fields['pkts_per_burst'] = pkts_per_burst + self.fields['ibg'] = ibg + self.fields['count'] = count + + +class STLStream(object): + + def __init__ (self, + packet = None, + mode = STLTXCont(1), + enabled = True, + self_start = True, + isg = 0.0, + rx_stats = None, + next_stream_id = -1, + stream_id = None): + + # type checking + if not isinstance(mode, STLTXMode): + raise STLArgumentError('mode', mode) + + if packet and not isinstance(packet, CTrexPktBuilderInterface): + raise STLArgumentError('packet', packet) + + if not isinstance(enabled, bool): + raise STLArgumentError('enabled', enabled) + + if not isinstance(self_start, bool): + raise STLArgumentError('self_start', self_start) + + if not isinstance(isg, (int, float)): + raise STLArgumentError('isg', isg) + + if (type(mode) == STLTXCont) and (next_stream_id != -1): + raise STLError("continuous stream cannot have a next stream ID") + + self.fields = {} + + # use a random 31 bit for ID + self.fields['stream_id'] = stream_id if stream_id is not None else random.getrandbits(31) + + # basic fields + self.fields['enabled'] = enabled + self.fields['self_start'] = self_start + self.fields['isg'] = isg + + self.fields['next_stream_id'] = next_stream_id + + # mode + self.fields['mode'] = mode.to_json() + + self.fields['packet'] = {} + self.fields['vm'] = {} + + if not packet: + packet = CScapyTRexPktBuilder(pkt = Ether()/IP()) + + # packet builder + packet.compile() + # packet and VM + self.fields['packet'] = packet.dump_pkt() + self.fields['vm'] = packet.get_vm_data() + + self.fields['rx_stats'] = {} + if not rx_stats: + self.fields['rx_stats']['enabled'] = False + + + def __str__ (self): + return json.dumps(self.fields, indent = 4, separators=(',', ': '), sort_keys = True) + + def to_json (self): + return self.fields + + def get_id (self): + return self.fields['stream_id'] + + + + @staticmethod + def dump_to_yaml (stream_list, yaml_file = None): + + # type check + if isinstance(stream_list, STLStream): + stream_list = [stream_list] + + if not all([isinstance(stream, STLStream) for stream in stream_list]): + raise STLArgumentError('stream_list', stream_list) + + + names = {} + for i, stream in enumerate(stream_list): + names[stream.get_id()] = "stream-{0}".format(i) + + yaml_lst = [] + for stream in stream_list: + + fields = dict(stream.fields) + + # handle the next stream id + if fields['next_stream_id'] == -1: + del fields['next_stream_id'] + + else: + if not stream.get_id() in names: + raise STLError('broken dependencies in stream list') + + fields['next_stream'] = names[stream.get_id()] + + # add to list + yaml_lst.append({'name': names[stream.get_id()], 'stream': fields}) + + # write to file + x = yaml.dump(yaml_lst, default_flow_style=False) + if yaml_file: + with open(yaml_file, 'w') as f: + f.write(x) + + return x + + + @staticmethod + def load_from_yaml (yaml_file): + + with open(yaml_file, 'r') as f: + yaml_str = f.read() + + + # load YAML + lst = yaml.load(yaml_str) + + # decode to streams + streams = [] + for stream in lst: + # for defaults + defaults = STLStream() + s = STLStream(packet = None, + mode = STLTXCont(1), + enabled = True, + self_start = True, + isg = 0.0, + rx_stats = None, + next_stream_id = -1, + stream_id = None + ) + + streams.append(s) + + return streams + diff --git a/scripts/automation/trex_control_plane/common/trex_types.py b/scripts/automation/trex_control_plane/stl/trex_stl_types.py index a7ddacea..a7ddacea 100644 --- a/scripts/automation/trex_control_plane/common/trex_types.py +++ b/scripts/automation/trex_control_plane/stl/trex_stl_types.py diff --git a/scripts/automation/wrap_stl_console.py b/scripts/automation/wrap_stl_console.py new file mode 100644 index 00000000..cb3696ea --- /dev/null +++ b/scripts/automation/wrap_stl_console.py @@ -0,0 +1,3 @@ +import trex_control_plane.console.trex_console + +trex_control_plane.console.trex_console.main() diff --git a/scripts/automation/wrap_stl_sim.py b/scripts/automation/wrap_stl_sim.py new file mode 100644 index 00000000..97ff1145 --- /dev/null +++ b/scripts/automation/wrap_stl_sim.py @@ -0,0 +1,4 @@ +import trex_control_plane.stl.api +import trex_control_plane.stl.trex_stl_sim + +trex_control_plane.stl.trex_stl_sim.main() diff --git a/scripts/stl-sim b/scripts/stl-sim index f778de2c..bb937175 100755 --- a/scripts/stl-sim +++ b/scripts/stl-sim @@ -1,5 +1,5 @@ #!/bin/bash source find_python.sh -$PYTHON automation/trex_control_plane/client/trex_stateless_sim.py $@ +$PYTHON automation/wrap_stl_sim.py $@ diff --git a/scripts/stl/profiles/imix.py b/scripts/stl/profiles/imix.py index c453222d..ddcd1904 100644 --- a/scripts/stl/profiles/imix.py +++ b/scripts/stl/profiles/imix.py @@ -1,13 +1,5 @@ -import sys -import os -# we need the API path -CURRENT_PATH = os.path.dirname(os.path.realpath(__file__)) -API_PATH = os.path.join(CURRENT_PATH, "../../api/stl") -sys.path.insert(0, API_PATH) - -from trex_stl_api import * -from scapy.all import * +from trex_control_plane.stl.api import * # IMIX profile - involves 3 streams of UDP packets # 1 - 60 bytes diff --git a/scripts/stl/profiles/syn_attack.py b/scripts/stl/profiles/syn_attack.py index 0df7a740..e3ed3d0e 100644 --- a/scripts/stl/profiles/syn_attack.py +++ b/scripts/stl/profiles/syn_attack.py @@ -1,15 +1,4 @@ -import sys -import os - -# Should be removed -# TBD fix this -CURRENT_PATH = os.path.dirname(os.path.realpath(__file__)) -API_PATH = os.path.join(CURRENT_PATH, "../../api/stl") -sys.path.insert(0, API_PATH) - -from scapy.all import * -from trex_stl_api import * - +from trex_control_plane.stl.api import * class STLS1(object): """ attack 48.0.0.1 at port 80 diff --git a/scripts/stl/profiles/udp_1pkt.py b/scripts/stl/profiles/udp_1pkt.py index d195b22c..6b49e592 100644 --- a/scripts/stl/profiles/udp_1pkt.py +++ b/scripts/stl/profiles/udp_1pkt.py @@ -1,14 +1,5 @@ -import sys -import os -# Should be removed -# TBD fix this -CURRENT_PATH = os.path.dirname(os.path.realpath(__file__)) -API_PATH = os.path.join(CURRENT_PATH, "../../api/stl") -sys.path.insert(0, API_PATH) - -from scapy.all import * -from trex_stl_api import * +from trex_control_plane.stl.api import * class STLS1(object): diff --git a/scripts/stl/profiles/udp_1pkt_tuple_gen.py b/scripts/stl/profiles/udp_1pkt_tuple_gen.py index f556cc31..3fb15597 100644 --- a/scripts/stl/profiles/udp_1pkt_tuple_gen.py +++ b/scripts/stl/profiles/udp_1pkt_tuple_gen.py @@ -1,16 +1,5 @@ -import sys -import os -#./stl-sim -f stl/profiles/udp_1pkt_tuple_gen.py -l 10 -o a.pcap - -# Should be removed -# TBD fix this -CURRENT_PATH = os.path.dirname(os.path.realpath(__file__)) -API_PATH = os.path.join(CURRENT_PATH, "../../api/stl") -sys.path.insert(0, API_PATH) - -from scapy.all import * -from trex_stl_api import * +from trex_control_plane.stl.api import * class STLS1(object): diff --git a/scripts/stl/profiles/udp_rand_len_9k.py b/scripts/stl/profiles/udp_rand_len_9k.py index 56d84535..b975c954 100644 --- a/scripts/stl/profiles/udp_rand_len_9k.py +++ b/scripts/stl/profiles/udp_rand_len_9k.py @@ -1,14 +1,5 @@ -import sys -import os -# Should be removed -# TBD fix this -CURRENT_PATH = os.path.dirname(os.path.realpath(__file__)) -API_PATH = os.path.join(CURRENT_PATH, "../../api/stl") -sys.path.insert(0, API_PATH) - -from scapy.all import * -from trex_stl_api import * +from trex_control_plane.stl.api import * class STLS1(object): diff --git a/scripts/trex-console b/scripts/trex-console index 53dbb0c1..6e278077 100755 --- a/scripts/trex-console +++ b/scripts/trex-console @@ -1,5 +1,5 @@ #!/bin/bash source find_python.sh -$PYTHON automation/trex_control_plane/console/trex_console.py $@ +$PYTHON automation/wrap_stl_console.py $@ diff --git a/scripts/stl/burst_1000_pkt.yaml b/scripts/yaml/burst_1000_pkt.yaml index f982fa4f..f982fa4f 100644 --- a/scripts/stl/burst_1000_pkt.yaml +++ b/scripts/yaml/burst_1000_pkt.yaml diff --git a/scripts/stl/burst_1pkt.yaml b/scripts/yaml/burst_1pkt.yaml index 48a62e68..48a62e68 100644 --- a/scripts/stl/burst_1pkt.yaml +++ b/scripts/yaml/burst_1pkt.yaml diff --git a/scripts/stl/burst_1pkt_1burst.yaml b/scripts/yaml/burst_1pkt_1burst.yaml index defa776f..defa776f 100644 --- a/scripts/stl/burst_1pkt_1burst.yaml +++ b/scripts/yaml/burst_1pkt_1burst.yaml diff --git a/scripts/stl/burst_1pkt_vm.yaml b/scripts/yaml/burst_1pkt_vm.yaml index e202b42d..e202b42d 100644 --- a/scripts/stl/burst_1pkt_vm.yaml +++ b/scripts/yaml/burst_1pkt_vm.yaml diff --git a/scripts/stl/imix_1pkt.yaml b/scripts/yaml/imix_1pkt.yaml index f2476024..6a198da6 100755..100644 --- a/scripts/stl/imix_1pkt.yaml +++ b/scripts/yaml/imix_1pkt.yaml @@ -4,7 +4,7 @@ stream: self_start: True packet: - binary: stl/udp_64B_no_crc.pcap # pcap should not include CRC + binary: yaml/udp_64B_no_crc.pcap # pcap should not include CRC mode: type: continuous pps: 100 diff --git a/scripts/stl/imix_1pkt_2.yaml b/scripts/yaml/imix_1pkt_2.yaml index bffc72eb..bffc72eb 100644 --- a/scripts/stl/imix_1pkt_2.yaml +++ b/scripts/yaml/imix_1pkt_2.yaml diff --git a/scripts/stl/imix_1pkt_tuple_gen.yaml b/scripts/yaml/imix_1pkt_tuple_gen.yaml index 78156e5c..78156e5c 100644 --- a/scripts/stl/imix_1pkt_tuple_gen.yaml +++ b/scripts/yaml/imix_1pkt_tuple_gen.yaml diff --git a/scripts/stl/imix_1pkt_vm.yaml b/scripts/yaml/imix_1pkt_vm.yaml index 21075fa2..21075fa2 100644 --- a/scripts/stl/imix_1pkt_vm.yaml +++ b/scripts/yaml/imix_1pkt_vm.yaml diff --git a/scripts/stl/imix_1pkt_vm2.yaml b/scripts/yaml/imix_1pkt_vm2.yaml index e0a6b771..e0a6b771 100644 --- a/scripts/stl/imix_1pkt_vm2.yaml +++ b/scripts/yaml/imix_1pkt_vm2.yaml diff --git a/scripts/stl/imix_2pkt.yaml b/scripts/yaml/imix_2pkt.yaml index 45f2303d..45f2303d 100755..100644 --- a/scripts/stl/imix_2pkt.yaml +++ b/scripts/yaml/imix_2pkt.yaml diff --git a/scripts/stl/imix_3pkt.yaml b/scripts/yaml/imix_3pkt.yaml index bc28549d..bc28549d 100755..100644 --- a/scripts/stl/imix_3pkt.yaml +++ b/scripts/yaml/imix_3pkt.yaml diff --git a/scripts/stl/imix_3pkt_vm.yaml b/scripts/yaml/imix_3pkt_vm.yaml index d812634c..d812634c 100644 --- a/scripts/stl/imix_3pkt_vm.yaml +++ b/scripts/yaml/imix_3pkt_vm.yaml diff --git a/scripts/stl/imix_scale_1000.yaml b/scripts/yaml/imix_scale_1000.yaml index bba3df5d..bba3df5d 100644 --- a/scripts/stl/imix_scale_1000.yaml +++ b/scripts/yaml/imix_scale_1000.yaml diff --git a/scripts/stl/imix_scale_300.yaml b/scripts/yaml/imix_scale_300.yaml index 8b04979c..8b04979c 100644 --- a/scripts/stl/imix_scale_300.yaml +++ b/scripts/yaml/imix_scale_300.yaml diff --git a/scripts/stl/ipv4_udp_9000.pcap b/scripts/yaml/ipv4_udp_9000.pcap Binary files differindex 86385997..86385997 100644 --- a/scripts/stl/ipv4_udp_9000.pcap +++ b/scripts/yaml/ipv4_udp_9000.pcap diff --git a/scripts/stl/ipv4_udp_9k.pcap b/scripts/yaml/ipv4_udp_9k.pcap Binary files differindex c5466f78..c5466f78 100644 --- a/scripts/stl/ipv4_udp_9k.pcap +++ b/scripts/yaml/ipv4_udp_9k.pcap diff --git a/scripts/stl/ipv4_udp_9k_burst_10.pcap b/scripts/yaml/ipv4_udp_9k_burst_10.pcap Binary files differindex bb71ca79..bb71ca79 100755..100644 --- a/scripts/stl/ipv4_udp_9k_burst_10.pcap +++ b/scripts/yaml/ipv4_udp_9k_burst_10.pcap diff --git a/scripts/stl/syn_attack_sample.yaml b/scripts/yaml/syn_attack_sample.yaml index c5734e43..c5734e43 100644 --- a/scripts/stl/syn_attack_sample.yaml +++ b/scripts/yaml/syn_attack_sample.yaml diff --git a/scripts/stl/syn_packet.pcap b/scripts/yaml/syn_packet.pcap Binary files differindex 93325547..93325547 100644 --- a/scripts/stl/syn_packet.pcap +++ b/scripts/yaml/syn_packet.pcap diff --git a/scripts/stl/udp_1518B_no_crc.pcap b/scripts/yaml/udp_1518B_no_crc.pcap Binary files differindex 145a44ad..145a44ad 100644 --- a/scripts/stl/udp_1518B_no_crc.pcap +++ b/scripts/yaml/udp_1518B_no_crc.pcap diff --git a/scripts/stl/udp_594B_no_crc.pcap b/scripts/yaml/udp_594B_no_crc.pcap Binary files differindex 5cde2f7c..5cde2f7c 100644 --- a/scripts/stl/udp_594B_no_crc.pcap +++ b/scripts/yaml/udp_594B_no_crc.pcap diff --git a/scripts/stl/udp_64B_no_crc.pcap b/scripts/yaml/udp_64B_no_crc.pcap Binary files differindex ab3f985b..ab3f985b 100644 --- a/scripts/stl/udp_64B_no_crc.pcap +++ b/scripts/yaml/udp_64B_no_crc.pcap diff --git a/scripts/stl/udp_rand_size.yaml b/scripts/yaml/udp_rand_size.yaml index 2bbad4a0..2bbad4a0 100644 --- a/scripts/stl/udp_rand_size.yaml +++ b/scripts/yaml/udp_rand_size.yaml diff --git a/scripts/stl/udp_rand_size_9k.yaml b/scripts/yaml/udp_rand_size_9k.yaml index ecdd7ae0..ecdd7ae0 100644 --- a/scripts/stl/udp_rand_size_9k.yaml +++ b/scripts/yaml/udp_rand_size_9k.yaml |