summaryrefslogtreecommitdiffstats
path: root/scripts/automation/trex_control_plane
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/automation/trex_control_plane')
-rwxr-xr-xscripts/automation/trex_control_plane/client/trex_hltapi.py91
-rw-r--r--scripts/automation/trex_control_plane/client/trex_stateless_sim.py181
-rwxr-xr-xscripts/automation/trex_control_plane/client_utils/external_packages.py2
-rwxr-xr-xscripts/automation/trex_control_plane/client_utils/packet_builder.py6
-rwxr-xr-xscripts/automation/trex_control_plane/common/external_packages.py2
-rwxr-xr-xscripts/automation/trex_control_plane/common/trex_streams.py2
6 files changed, 202 insertions, 82 deletions
diff --git a/scripts/automation/trex_control_plane/client/trex_hltapi.py b/scripts/automation/trex_control_plane/client/trex_hltapi.py
index 92768ca4..848d5a9e 100755
--- a/scripts/automation/trex_control_plane/client/trex_hltapi.py
+++ b/scripts/automation/trex_control_plane/client/trex_hltapi.py
@@ -17,14 +17,16 @@ class CTRexHltApi(object):
self._port_data = {}
# ----- session functions ----- #
-
- def connect(self, device, port_list, username, port=5050, reset=False, break_locks=False):
+ # sync = RPC, async = ZMQ
+ def connect(self, device, port_list, username, sync_port = 4501, async_port = 4500, reset=False, break_locks=False):
ret_dict = {"status": 0}
- self.trex_client = CTRexStatelessClient(username, device, port)
- res_ok, msg = self.trex_client.connect()
- if not res_ok:
+ self.trex_client = CTRexStatelessClient(username, device, sync_port, async_port)
+
+ rc = self.trex_client.connect()
+ if rc.bad():
+
self.trex_client = None
- ret_dict.update({"log": msg})
+ ret_dict.update({"log": rc.err()})
return ret_dict
# arrived here, connection successfully created with server
# next, try acquiring ports of TRex
@@ -70,7 +72,6 @@ class CTRexHltApi(object):
port_list = self.parse_port_list(port_list)
response = self.trex_client.release(port_list)
res_ok, log = CTRexHltApi.process_response(port_list, response)
- print log
if not res_ok:
ret_dict.update({"log": log})
return ret_dict
@@ -89,11 +90,13 @@ class CTRexHltApi(object):
return {"status": 1, "log": None}
# ----- traffic functions ----- #
- def traffic_config(self, mode, port_handle,
+ def traffic_config(self, mode, port_list,
l2_encap="ethernet_ii", mac_src="00:00:01:00:00:01", mac_dst="00:00:00:00:00:00",
l3_protocol="ipv4", ip_src_addr="0.0.0.0", ip_dst_addr="192.0.0.1", l3_length=110,
transmit_mode="continuous", rate_pps=100,
**kwargs):
+ if type(port_list) is not list():
+ port_list = [port_list]
ALLOWED_MODES = ["create", "modify", "remove", "enable", "disable", "reset"]
if mode not in ALLOWED_MODES:
raise ValueError("mode must be one of the following values: {modes}".format(modes=ALLOWED_MODES))
@@ -119,45 +122,55 @@ class CTRexHltApi(object):
except Exception as e:
# some exception happened during the stream creation
return {"status": 0, "log": str(e)}
- # try adding the stream, until free stream_id is found
- port_data = self._port_data.get(port_handle)
- id_candidate = None
- # TODO: change this to better implementation
- while True:
- id_candidate = port_data["stream_id_gen"].next()
- response = self.trex_client.add_stream(stream_id=id_candidate,
- stream_obj=stream_obj,
- port_id=port_handle)
- res_ok, log = CTRexHltApi.process_response(port_handle, response)
- if res_ok:
- # found non-taken stream_id on server
- # save it for modifying needs
- port_data["streams"].update({id_candidate: stream_obj})
- break
- else:
- # proceed to another iteration to use another id
- continue
- return {"status": 1,
- "stream_id": id_candidate,
- "log": None}
+ # try adding the stream per port, until free stream_id is found
+ for port_id in port_list:
+ port_data = self._port_data.get(port_id)
+ id_candidate = None
+ # TODO: change this to better implementation
+ while True:
+ id_candidate = port_data["stream_id_gen"].next()
+ response = self.trex_client.add_stream(stream_id=id_candidate,
+ stream_obj=stream_obj.dump(),
+ port_id_list=port_id)
+ res_ok, log = CTRexHltApi.process_response(port_id, response)
+ if res_ok:
+ # found non-taken stream_id on server
+ # save it for modifying needs
+ port_data["streams"].update({id_candidate: stream_obj})
+ break
+ else:
+ print log
+ # proceed to another iteration to use another id
+ print 'need another iteration?'
+ continue
+ return {"status": 1,
+ "stream_id": id_candidate,
+ "log": None}
+
else:
raise NotImplementedError("mode '{0}' is not supported yet on TRex".format(mode))
- def traffic_control(self, action, port_handle):
+ def traffic_control(self, action, port_handle, **kwargs):
ALLOWED_ACTIONS = ["clear_stats", "run", "stop", "sync_run"]
if action not in ALLOWED_ACTIONS:
raise ValueError("action must be one of the following values: {actions}".format(actions=ALLOWED_ACTIONS))
# ret_dict = {"status": 0, "stopped": 1}
port_list = self.parse_port_list(port_handle)
+ if type(port_list) is not list():
+ port_list = [port_list]
if action == "run":
- response = self.trex_client.start_traffic(port_id=port_list)
+ if not set(kwargs.keys()) >= set(['mul', 'duration']):
+ raise ValueError("For 'run' action should be specified mul and duration arguments")
+ response = self.trex_client.start_traffic(kwargs['mul'], kwargs['duration'], port_id_list=port_list)
res_ok, log = CTRexHltApi.process_response(port_list, response)
if res_ok:
return {"status": 1,
"stopped": 0,
"log": None}
+ else:
+ print log
elif action == "stop":
- response = self.trex_client.stop_traffic(port_id=port_list)
+ response = self.trex_client.stop_traffic(port_id_list=port_list)
res_ok, log = CTRexHltApi.process_response(port_list, response)
if res_ok:
return {"status": 1,
@@ -236,13 +249,10 @@ class CTRexHltApi(object):
@staticmethod
def process_response(port_list, response):
+ log = response.data() if response.good() else response.err()
if isinstance(port_list, list):
- res_ok, response = response
- log = CTRexHltApi.join_batch_response(response)
- else:
- res_ok = response.success
- log = str(response)
- return res_ok, log
+ log = CTRexHltApi.join_batch_response(log)
+ return response.good(), log
@staticmethod
def parse_port_list(port_list):
@@ -257,8 +267,9 @@ class CTRexHltApi(object):
@staticmethod
def join_batch_response(responses):
- return "\n".join([str(response)
- for response in responses])
+ if type(responses) is list():
+ return "\n". join([str(response) for response in responses])
+ return responses
@staticmethod
def generate_stream(l2_encap, mac_src, mac_dst,
diff --git a/scripts/automation/trex_control_plane/client/trex_stateless_sim.py b/scripts/automation/trex_control_plane/client/trex_stateless_sim.py
index 7655b27c..d38411a3 100644
--- a/scripts/automation/trex_control_plane/client/trex_stateless_sim.py
+++ b/scripts/automation/trex_control_plane/client/trex_stateless_sim.py
@@ -26,6 +26,8 @@ except ImportError:
from client_utils.jsonrpc_client import JsonRpcClient, BatchMessage
from client_utils.packet_builder import CTRexPktBuilder
+from client_utils import parsing_opts
+
import json
from common.trex_streams import *
@@ -34,32 +36,57 @@ import argparse
import tempfile
import subprocess
import os
+from dpkt import pcap
+from operator import itemgetter
+
+class BpSimException(Exception):
+ pass
+
+def merge_cap_files (pcap_file_list, out_filename, delete_src = False):
+
+ out_pkts = []
+
+ # read all packets to a list
+ for src in pcap_file_list:
+ f = open(src, 'r')
+ reader = pcap.Reader(f)
+ pkts = reader.readpkts()
+ out_pkts += pkts
+ f.close()
+ if delete_src:
+ os.unlink(src)
+
+ # sort by the timestamp
+ out_pkts = sorted(out_pkts, key=itemgetter(0))
+
+
+ out = open(out_filename, 'w')
+ out_writer = pcap.Writer(out)
+
+ for ts, pkt in out_pkts:
+ out_writer.writepkt(pkt, ts)
+
+ out.close()
+
class SimRun(object):
- def __init__ (self, yaml_file, dp_core_count, core_index, packet_limit, output_filename, is_valgrind, is_gdb):
+ def __init__ (self, options):
- self.yaml_file = yaml_file
- self.output_filename = output_filename
- self.dp_core_count = dp_core_count
- self.core_index = core_index
- self.packet_limit = packet_limit
- self.is_valgrind = is_valgrind
- self.is_gdb = is_gdb
+ self.options = options
# dummies
self.handler = 0
self.port_id = 0
- self.mul = {"op": "abs",
- "type": "raw",
- "value": 1}
+
+ self.mul = options.mult
self.duration = -1
def load_yaml_file (self):
streams_db = CStreamsDB()
- stream_list = streams_db.load_yaml_file(self.yaml_file)
+ stream_list = streams_db.load_yaml_file(self.options.input_file)
streams_json = []
for stream in stream_list.compiled:
@@ -99,18 +126,73 @@ class SimRun(object):
f.close()
try:
- cmd = ['bp-sim-64-debug', '--sl', '--cores', str(self.dp_core_count), '--core_index', str(self.core_index), '-f', f.name, '-o', self.output_filename]
- if self.is_valgrind:
- cmd = ['valgrind', '--leak-check=full'] + cmd
- elif self.is_gdb:
- cmd = ['gdb', '--args'] + cmd
-
- subprocess.call(cmd)
-
+ if self.options.json:
+ with open(f.name) as file:
+ data = "\n".join(file.readlines())
+ print json.dumps(json.loads(data), indent = 4, separators=(',', ': '), sort_keys = True)
+ else:
+ self.execute_bp_sim(f.name)
finally:
os.unlink(f.name)
+
+ def execute_bp_sim (self, json_filename):
+ exe = 'bp-sim-64' if self.options.release else 'bp-sim-64-debug'
+ if not os.path.exists(exe):
+ print "cannot find executable '{0}'".format(exe)
+ exit(-1)
+
+ cmd = [exe,
+ '--pcap',
+ '--sl',
+ '--cores',
+ str(self.options.cores),
+ '--limit',
+ str(self.options.limit),
+ '-f',
+ json_filename,
+ '-o',
+ self.options.output_file]
+
+ if self.options.dry:
+ cmd += ['--dry']
+
+ if self.options.core_index != None:
+ cmd += ['--core_index', str(self.options.core_index)]
+
+ if self.options.valgrind:
+ cmd = ['valgrind', '--leak-check=full', '--error-exitcode=1'] + cmd
+
+ elif self.options.gdb:
+ cmd = ['gdb', '--args'] + cmd
+
+ print "executing command: '{0}'".format(" ".join(cmd))
+ rc = subprocess.call(cmd)
+ if rc != 0:
+ raise BpSimException()
+
+ self.merge_results()
+
+
+ def merge_results (self):
+ if self.options.dry:
+ return
+
+ if self.options.cores == 1:
+ return
+
+ if self.options.core_index != None:
+ return
+
+
+ print "Mering cores output to a single pcap file...\n"
+ inputs = ["{0}-{1}".format(self.options.output_file, index) for index in xrange(0, self.options.cores)]
+ merge_cap_files(inputs, self.options.output_file, delete_src = True)
+
+
+
+
def is_valid_file(filename):
if not os.path.isfile(filename):
raise argparse.ArgumentTypeError("The file '%s' does not exist" % filename)
@@ -120,8 +202,8 @@ def is_valid_file(filename):
def unsigned_int (x):
x = int(x)
- if x <= 0:
- raise argparse.ArgumentTypeError("argument must be >= 1")
+ if x < 0:
+ raise argparse.ArgumentTypeError("argument must be >= 0")
return x
@@ -143,20 +225,30 @@ def setParserOptions():
choices = xrange(1, 9))
parser.add_argument("-n", "--core_index",
- help = "DP core index to examine [default is 0]",
- default = 0,
+ help = "Record only a specific core",
+ default = None,
type = int)
- parser.add_argument("-j", "--join",
- help = "run and join output from 0..core_count [default is False]",
- default = False,
- type = bool)
+ parser.add_argument("-r", "--release",
+ help = "runs on release image instead of debug [default is False]",
+ action = "store_true",
+ default = False)
+
+ parser.add_argument("-s", "--dry",
+ help = "dry run only (nothing will be written to the file) [default is False]",
+ action = "store_true",
+ default = False)
parser.add_argument("-l", "--limit",
help = "limit test total packet count [default is 5000]",
default = 5000,
type = unsigned_int)
+ parser.add_argument('-m', '--multiplier',
+ help = parsing_opts.match_multiplier_help,
+ dest = 'mult',
+ default = {'type':'raw', 'value':1, 'op': 'abs'},
+ type = parsing_opts.match_multiplier_strict)
group = parser.add_mutually_exclusive_group()
@@ -170,13 +262,23 @@ def setParserOptions():
action = "store_true",
default = False)
+ group.add_argument("--json",
+ help = "generate JSON output only to stdout [default is False]",
+ action = "store_true",
+ default = False)
+
return parser
def validate_args (parser, options):
- if options.core_index < 0 or options.core_index >= options.cores:
- parser.error("DP core index valid range is 0 to {0}".format(options.cores - 1))
+ if options.core_index:
+ if not options.core_index in xrange(0, options.cores):
+ parser.error("DP core index valid range is 0 to {0}".format(options.cores - 1))
+
+ # zero is ok - no limit, but other values must be at least as the number of cores
+ if (options.limit != 0) and options.limit < options.cores:
+ parser.error("limit cannot be lower than number of DP cores")
def main ():
@@ -185,16 +287,19 @@ def main ():
validate_args(parser, options)
- r = SimRun(options.input_file,
- options.cores,
- options.core_index,
- options.limit,
- options.output_file,
- options.valgrind,
- options.gdb)
+ r = SimRun(options)
+
+ try:
+ r.run()
+ except KeyboardInterrupt as e:
+ print "\n\n*** Caught Ctrl + C... Exiting...\n\n"
+ exit(1)
- r.run()
+ except BpSimException as e:
+ print "\n\n*** BP sim exit code was non zero\n\n"
+ exit(1)
+ exit(0)
if __name__ == '__main__':
main()
diff --git a/scripts/automation/trex_control_plane/client_utils/external_packages.py b/scripts/automation/trex_control_plane/client_utils/external_packages.py
index 3982a1b2..9d8c4dcf 100755
--- a/scripts/automation/trex_control_plane/client_utils/external_packages.py
+++ b/scripts/automation/trex_control_plane/client_utils/external_packages.py
@@ -8,7 +8,7 @@ ROOT_PATH = os.path.abspath(os.path.join(CURRENT_PATH, os.pardir))
PATH_TO_PYTHON_LIB = os.path.abspath(os.path.join(ROOT_PATH, os.pardir, os.pardir, 'external_libs'))
CLIENT_UTILS_MODULES = ['dpkt-1.8.6',
- 'PyYAML-3.01/lib',
+ 'yaml-3.11',
'texttable-0.8.4'
]
diff --git a/scripts/automation/trex_control_plane/client_utils/packet_builder.py b/scripts/automation/trex_control_plane/client_utils/packet_builder.py
index 19ac7f32..e7fdb5d9 100755
--- a/scripts/automation/trex_control_plane/client_utils/packet_builder.py
+++ b/scripts/automation/trex_control_plane/client_utils/packet_builder.py
@@ -730,7 +730,7 @@ class CTRexPktBuilder(object):
None
"""
super(CTRexPktBuilder.CTRexVM, self).__init__()
- self.vm_variables = {}
+ self.vm_variables = {'instructions': [], 'split_by_var': ""}
self._inst_by_offset = {} # this data structure holds only offset-related instructions, ordered in tuples
self._off_inst_by_name = {}
@@ -845,6 +845,10 @@ class CTRexPktBuilder(object):
list holds variables data of VM
"""
+
+ return self.vm_variables
+ # !!! TODO: review code below !!!
+
# at first, dump all CTRexVMFlowVariable instructions
ret_val = [var.dump()
for key, var in self.vm_variables.items()]
diff --git a/scripts/automation/trex_control_plane/common/external_packages.py b/scripts/automation/trex_control_plane/common/external_packages.py
index 62121d4f..7353c397 100755
--- a/scripts/automation/trex_control_plane/common/external_packages.py
+++ b/scripts/automation/trex_control_plane/common/external_packages.py
@@ -7,7 +7,7 @@ CURRENT_PATH = os.path.dirname(os.path.realpath(__file__))
ROOT_PATH = os.path.abspath(os.path.join(CURRENT_PATH, os.pardir)) # path to trex_control_plane directory
PATH_TO_PYTHON_LIB = os.path.abspath(os.path.join(ROOT_PATH, os.pardir, os.pardir, 'external_libs'))
-CLIENT_UTILS_MODULES = ['PyYAML-3.01/lib'
+CLIENT_UTILS_MODULES = ['yaml-3.11'
]
def import_common_modules():
diff --git a/scripts/automation/trex_control_plane/common/trex_streams.py b/scripts/automation/trex_control_plane/common/trex_streams.py
index 44731088..800b6d49 100755
--- a/scripts/automation/trex_control_plane/common/trex_streams.py
+++ b/scripts/automation/trex_control_plane/common/trex_streams.py
@@ -288,10 +288,10 @@ class CStreamsDB(object):
loaded_obj,
[StreamPack(v.stream_id, v.stream.dump())
for k, v in compiled_streams.items()]))
-
except Exception as e:
return None
+
return self.get_stream_pack(stream_pack_name)
def load_streams(self, LoadedStreamList_obj):