summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorimarom <imarom@cisco.com>2016-02-01 03:45:44 -0500
committerimarom <imarom@cisco.com>2016-02-01 03:47:29 -0500
commitf9dcbd38e90601f5571bb5bcfb67170876ecd3c6 (patch)
tree4a6eb47e4a1a528e3df2c866c717b6cfad5d41c3
parent696cbfb3616a506cca7e7c68c1b92ff037f54f87 (diff)
simulator support (API and profiles)
-rw-r--r--scripts/api/stl/examples/udp_64B.pcapbin104 -> 0 bytes
-rw-r--r--scripts/api/stl/trex_stl_api.py12
-rw-r--r--scripts/automation/regression/unit_tests/functional_tests/stl_basic_tests.py18
-rw-r--r--scripts/automation/trex_control_plane/client/trex_stateless_sim.py257
-rwxr-xr-xscripts/automation/trex_control_plane/common/trex_stats.py10
-rw-r--r--scripts/stl/imix_1pkt_vm.yaml2
-rw-r--r--scripts/stl/profiles/__init__.py0
-rw-r--r--scripts/stl/profiles/imix.py87
-rw-r--r--src/rpc-server/commands/trex_rpc_cmd_general.cpp6
-rw-r--r--src/rpc-server/commands/trex_rpc_cmd_stream.cpp12
-rw-r--r--src/sim/trex_sim_stateless.cpp8
-rw-r--r--src/stateless/cp/trex_stateless_port.cpp14
12 files changed, 332 insertions, 94 deletions
diff --git a/scripts/api/stl/examples/udp_64B.pcap b/scripts/api/stl/examples/udp_64B.pcap
deleted file mode 100644
index 699b9c80..00000000
--- a/scripts/api/stl/examples/udp_64B.pcap
+++ /dev/null
Binary files differ
diff --git a/scripts/api/stl/trex_stl_api.py b/scripts/api/stl/trex_stl_api.py
index 09a0af18..0bfd1181 100644
--- a/scripts/api/stl/trex_stl_api.py
+++ b/scripts/api/stl/trex_stl_api.py
@@ -7,19 +7,29 @@ import time
root_path = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, os.path.join(root_path, '../../automation/trex_control_plane/'))
+sys.path.insert(0, os.path.join(root_path, '../../stl/'))
# aliasing
import common.trex_streams
from client_utils.packet_builder import CTRexPktBuilder
import common.trex_stl_exceptions
import client.trex_stateless_client
+import client.trex_stateless_sim
+# client and errors
STLClient = client.trex_stateless_client.STLClient
STLError = common.trex_stl_exceptions.STLError
-STLStream = common.trex_streams.STLStream
+# streams
+STLStream = common.trex_streams.STLStream
STLTXCont = common.trex_streams.STLTXCont
STLTXSingleBurst = common.trex_streams.STLTXSingleBurst
STLTXMultiBurst = common.trex_streams.STLTXMultiBurst
+# packet builder
STLPktBuilder = CTRexPktBuilder
+
+# simulator
+STLSim = client.trex_stateless_sim.STLSim
+
+
diff --git a/scripts/automation/regression/unit_tests/functional_tests/stl_basic_tests.py b/scripts/automation/regression/unit_tests/functional_tests/stl_basic_tests.py
index d19d76b4..6e83c3ac 100644
--- a/scripts/automation/regression/unit_tests/functional_tests/stl_basic_tests.py
+++ b/scripts/automation/regression/unit_tests/functional_tests/stl_basic_tests.py
@@ -83,7 +83,10 @@ class CStlBasic_Test(functional_general_test.CGeneralFunctional_Test):
def run_sim (self, yaml, output, options = "", silent = False):
- user_cmd = "{0} {1} {2}".format(yaml, output, options)
+ if output:
+ user_cmd = "{0} -o {1} {2}".format(yaml, output, options)
+ else:
+ user_cmd = "{0} {1}".format(yaml, options)
cmd = "{0} {1} {2}".format(sys.executable,
self.stl_sim,
@@ -100,13 +103,18 @@ class CStlBasic_Test(functional_general_test.CGeneralFunctional_Test):
def golden_run (self, testname, profile, options, silent = False):
+
output_cap = os.path.join("/tmp/", "{0}_test.cap".format(testname))
golden_cap = os.path.join(self.test_path, "stl/golden/{0}_golden.cap".format(testname))
- rc = self.run_sim(self.profiles[profile], output_cap, options, silent)
- assert_equal(rc, True)
+ try:
+ rc = self.run_sim(self.profiles[profile], output_cap, options, silent)
+ assert_equal(rc, True)
+
+ self.compare_caps(output_cap, golden_cap)
- self.compare_caps(output_cap, golden_cap)
+ finally:
+ os.unlink(output_cap)
@@ -129,7 +137,7 @@ class CStlBasic_Test(functional_general_test.CGeneralFunctional_Test):
print "\n"
for profile in self.valgrind_profiles:
print "\n*** testing profile '{0}' ***\n".format(profile)
- rc = self.run_sim(profile, output = "dummy.cap", options = "--dry --cores 8 --limit 500 --valgrind", silent = False)
+ rc = self.run_sim(profile, output = None, options = "--cores 8 --limit 500 --valgrind", silent = False)
assert_equal(rc, True)
diff --git a/scripts/automation/trex_control_plane/client/trex_stateless_sim.py b/scripts/automation/trex_control_plane/client/trex_stateless_sim.py
index a8f24d65..7f65996d 100644
--- a/scripts/automation/trex_control_plane/client/trex_stateless_sim.py
+++ b/scripts/automation/trex_control_plane/client/trex_stateless_sim.py
@@ -24,9 +24,12 @@ except ImportError:
# support import for Python 3
import client.outer_packages
+from common.trex_stl_exceptions import STLError
+from yaml.scanner import ScannerError
from common.trex_streams import *
from client_utils import parsing_opts
+import re
import json
@@ -44,6 +47,9 @@ class BpSimException(Exception):
def merge_cap_files (pcap_file_list, out_filename, delete_src = False):
out_pkts = []
+ if not all([os.path.exists(f) for f in pcap_file_list]):
+ print "failed to merge cap file list...\nnot all files exist\n"
+ return
# read all packets to a list
for src in pcap_file_list:
@@ -69,126 +75,215 @@ def merge_cap_files (pcap_file_list, out_filename, delete_src = False):
+# stateless simulation
+class STLSim(object):
+ def __init__ (self, bp_sim_path = None, handler = 0, port_id = 0):
-class SimRun(object):
- def __init__ (self, options):
+ if not bp_sim_path:
+ # auto find scripts
+ m = re.match(".*/trex-core", os.getcwd())
+ if not m:
+ raise STLError('cannot find BP sim path, please provide it')
- self.options = options
+ self.bp_sim_path = os.path.join(m.group(0), 'scripts')
+
+ else:
+ self.bp_sim_path = bp_sim_path
# dummies
- self.handler = 0
- self.port_id = 0
+ self.handler = handler
+ self.port_id = port_id
+
- self.mul = options.mult
+ def load_input_file (self, input_file):
+ # try YAML
+ try:
+ streams_db = CStreamsDB()
+ stream_list = streams_db.load_yaml_file(input_file)
- self.duration = -1
+ # convert to new style stream object
+ return [HACKSTLStream(stream) for stream in stream_list.compiled]
+ except ScannerError:
+ pass
+
+ # try python
+ try:
+ basedir = os.path.dirname(input_file)
+ sys.path.append(basedir)
- def load_yaml_file (self):
- streams_db = CStreamsDB()
- stream_list = streams_db.load_yaml_file(self.options.input_file)
+ file = os.path.basename(input_file).split('.py')[0]
+ module = __import__(file, globals(), locals(), [], -1)
- streams_json = []
- for stream in stream_list.compiled:
- stream_json = {"id":1,
- "jsonrpc": "2.0",
- "method": "add_stream",
- "params": {"handler": self.handler,
- "port_id": self.port_id,
- "stream_id": stream.stream_id,
- "stream": stream.stream}
- }
+ return module.register().get_streams()
- streams_json.append(stream_json)
+ except AttributeError:
+ pass
- return streams_json
+ raise STLError("bad format input file '{0}'".format(input_file))
- def generate_start_cmd (self):
+ def generate_start_cmd (self, mult = "1", force = True, duration = -1):
return {"id":1,
"jsonrpc": "2.0",
"method": "start_traffic",
"params": {"handler": self.handler,
- "force": False,
+ "force": force,
"port_id": self.port_id,
- "mul": parsing_opts.decode_multiplier(self.mul),
- "duration": self.duration}
+ "mul": parsing_opts.decode_multiplier(mult),
+ "duration": duration}
}
- def run (self):
- # load the streams
- cmds_json = (self.load_yaml_file())
- cmds_json.append(self.generate_start_cmd())
+ # run command
+ # input_list - a list of streams or YAML files
+ # outfile - pcap file to save output, if None its a dry run
+ # dp_core_count - how many DP cores to use
+ # dp_core_index - simulate only specific dp core without merging
+ # is_debug - debug or release image
+ # pkt_limit - how many packets to simulate
+ # mult - multiplier
+ # mode - can be 'valgrind, 'gdb', 'json' or 'none'
+ def run (self,
+ input_list,
+ outfile = None,
+ dp_core_count = 1,
+ dp_core_index = None,
+ is_debug = True,
+ pkt_limit = 5000,
+ mult = "1",
+ duration = -1,
+ mode = 'none'):
+
+ if not mode in ['none', 'gdb', 'valgrind', 'json']:
+ raise STLArgumentError('mode', mode)
+
+ # listify
+ input_list = input_list if isinstance(input_list, list) else [input_list]
+
+ # check streams arguments
+ if not all([isinstance(i, (STLStream, str)) for i in input_list]):
+ raise STLArgumentError('input_list', input_list)
+
+ # split to two type
+ input_files = [x for x in input_list if isinstance(x, str)]
+ stream_list = [x for x in input_list if isinstance(x, STLStream)]
+
+ # handle YAMLs
+ for input_file in input_files:
+ stream_list += self.load_input_file(input_file)
+
+
+ # load streams
+ cmds_json = []
+ for stream in stream_list:
+ cmd = {"id":1,
+ "jsonrpc": "2.0",
+ "method": "add_stream",
+ "params": {"handler": self.handler,
+ "port_id": self.port_id,
+ "stream_id": stream.get_id(),
+ "stream": stream.to_json()}
+ }
+
+ cmds_json.append(cmd)
+
+ # generate start command
+ cmds_json.append(self.generate_start_cmd(mult = mult,
+ force = True,
+ duration = duration))
+
+ if mode == 'json':
+ print json.dumps(cmds_json, indent = 4, separators=(',', ': '), sort_keys = True)
+ return
+
+ # start simulation
+ self.outfile = outfile
+ self.dp_core_count = dp_core_count
+ self.dp_core_index = dp_core_index
+ self.is_debug = is_debug
+ self.pkt_limit = pkt_limit
+ self.mult = mult
+ self.duration = duration,
+ self.mode = mode
+
+ self.__run(cmds_json)
+
+ # internal run
+ def __run (self, cmds_json):
+
+ # write to temp file
f = tempfile.NamedTemporaryFile(delete = False)
f.write(json.dumps(cmds_json))
f.close()
+ # launch bp-sim
try:
- if self.options.json:
- with open(f.name) as file:
- data = "\n".join(file.readlines())
- print json.dumps(json.loads(data), indent = 4, separators=(',', ': '), sort_keys = True)
- else:
- self.execute_bp_sim(f.name)
+ self.execute_bp_sim(f.name)
finally:
os.unlink(f.name)
def execute_bp_sim (self, json_filename):
- exe = './bp-sim-64' if self.options.release else './bp-sim-64-debug'
+ if self.is_debug:
+ exe = os.path.join(self.bp_sim_path, 'bp-sim-64-debug')
+ else:
+ exe = os.path.join(self.bp_sim_path, 'bp-sim-64')
+
if not os.path.exists(exe):
- print "cannot find executable '{0}'".format(exe)
- exit(-1)
+ raise STLError("'{0}' does not exists, please build it before calling the simulation".format(exe))
+
cmd = [exe,
'--pcap',
'--sl',
'--cores',
- str(self.options.cores),
+ str(self.dp_core_count),
'--limit',
- str(self.options.limit),
+ str(self.pkt_limit),
'-f',
- json_filename,
- '-o',
- self.options.output_file]
+ json_filename]
- if self.options.dry:
+ # out or dry
+ if not self.outfile:
cmd += ['--dry']
+ cmd += ['-o', '/dev/null']
+ else:
+ cmd += ['-o', self.outfile]
- if self.options.core_index != None:
- cmd += ['--core_index', str(self.options.core_index)]
+ if self.dp_core_index != None:
+ cmd += ['--core_index', str(self.dp_core_index)]
- if self.options.valgrind:
+ if self.mode == 'valgrind':
cmd = ['valgrind', '--leak-check=full', '--error-exitcode=1'] + cmd
- elif self.options.gdb:
+ elif self.mode == 'gdb':
cmd = ['gdb', '--args'] + cmd
print "executing command: '{0}'".format(" ".join(cmd))
rc = subprocess.call(cmd)
if rc != 0:
- raise BpSimException()
+ raise STLError('simulation has failed with error code {0}'.format(rc))
self.merge_results()
def merge_results (self):
- if self.options.dry:
+ if not self.outfile:
return
- if self.options.cores == 1:
+ if self.dp_core_count == 1:
return
- if self.options.core_index != None:
+ if self.dp_core_index != None:
return
print "Mering cores output to a single pcap file...\n"
- inputs = ["{0}-{1}".format(self.options.output_file, index) for index in xrange(0, self.options.cores)]
- merge_cap_files(inputs, self.options.output_file, delete_src = True)
+ inputs = ["{0}-{1}".format(self.outfile, index) for index in xrange(0, self.dp_core_count)]
+ merge_cap_files(inputs, self.outfile, delete_src = True)
@@ -214,18 +309,22 @@ def setParserOptions():
help = "input file in YAML or Python format",
type = is_valid_file)
- parser.add_argument("output_file",
+ parser.add_argument("-o",
+ dest = "output_file",
+ default = None,
help = "output file in ERF format")
parser.add_argument("-c", "--cores",
help = "DP core count [default is 1]",
+ dest = "dp_core_count",
default = 1,
type = int,
choices = xrange(1, 9))
parser.add_argument("-n", "--core_index",
help = "Record only a specific core",
+ dest = "dp_core_index",
default = None,
type = int)
@@ -234,10 +333,6 @@ def setParserOptions():
action = "store_true",
default = False)
- parser.add_argument("-s", "--dry",
- help = "dry run only (nothing will be written to the file) [default is False]",
- action = "store_true",
- default = False)
parser.add_argument("-l", "--limit",
help = "limit test total packet count [default is 5000]",
@@ -250,6 +345,13 @@ def setParserOptions():
default = "1",
type = parsing_opts.match_multiplier_strict)
+ parser.add_argument('-d', '--duration',
+ help = "run duration",
+ dest = 'duration',
+ default = -1,
+ type = float)
+
+
group = parser.add_mutually_exclusive_group()
group.add_argument("-x", "--valgrind",
@@ -272,12 +374,12 @@ def setParserOptions():
def validate_args (parser, options):
- if options.core_index:
- if not options.core_index in xrange(0, options.cores):
- parser.error("DP core index valid range is 0 to {0}".format(options.cores - 1))
+ if options.dp_core_index:
+ if not options.dp_core_index in xrange(0, options.dp_core_count):
+ parser.error("DP core index valid range is 0 to {0}".format(options.dp_core_count - 1))
# zero is ok - no limit, but other values must be at least as the number of cores
- if (options.limit != 0) and options.limit < options.cores:
+ if (options.limit != 0) and options.limit < options.dp_core_count:
parser.error("limit cannot be lower than number of DP cores")
@@ -287,16 +389,35 @@ def main ():
validate_args(parser, options)
- r = SimRun(options)
+
+
+ if options.valgrind:
+ mode = 'valgrind'
+ elif options.gdb:
+ mode = 'gdb'
+ elif options.json:
+ mode = 'json'
+ else:
+ mode = 'none'
try:
- r.run()
+ r = STLSim()
+ r.run(input_list = options.input_file,
+ outfile = options.output_file,
+ dp_core_count = options.dp_core_count,
+ dp_core_index = options.dp_core_index,
+ is_debug = (not options.release),
+ pkt_limit = options.limit,
+ mult = options.mult,
+ duration = options.duration,
+ mode = mode)
+
except KeyboardInterrupt as e:
print "\n\n*** Caught Ctrl + C... Exiting...\n\n"
exit(1)
- except BpSimException as e:
- print "\n\n*** BP sim exit code was non zero\n\n"
+ except STLError as e:
+ print e
exit(1)
exit(0)
diff --git a/scripts/automation/trex_control_plane/common/trex_stats.py b/scripts/automation/trex_control_plane/common/trex_stats.py
index 464ee56a..3bd6e0cd 100755
--- a/scripts/automation/trex_control_plane/common/trex_stats.py
+++ b/scripts/automation/trex_control_plane/common/trex_stats.py
@@ -524,8 +524,16 @@ class CPortStats(CTRexStats):
def generate_stats(self):
+ state = self._port_obj.get_port_state_name() if self._port_obj else ""
+ if state == "ACTIVE":
+ state = format_text(state, 'green', 'bold')
+ elif state == "PAUSE":
+ state = format_text(state, 'magenta', 'bold')
+ else:
+ state = format_text(state, 'bold')
+
return {"owner": self._port_obj.user if self._port_obj else "",
- "state": self._port_obj.get_port_state_name() if self._port_obj else "",
+ "state": "{0}".format(state),
"--": " ",
"---": " ",
diff --git a/scripts/stl/imix_1pkt_vm.yaml b/scripts/stl/imix_1pkt_vm.yaml
index bdb0b438..21075fa2 100644
--- a/scripts/stl/imix_1pkt_vm.yaml
+++ b/scripts/stl/imix_1pkt_vm.yaml
@@ -25,7 +25,7 @@
"add_value" : 1,
"is_big_endian" : false,
"name" : "l3_src",
- "pkt_offset" : 34,
+ "pkt_offset" : 30,
"type" : "write_flow_var"
}
]
diff --git a/scripts/stl/profiles/__init__.py b/scripts/stl/profiles/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/scripts/stl/profiles/__init__.py
diff --git a/scripts/stl/profiles/imix.py b/scripts/stl/profiles/imix.py
new file mode 100644
index 00000000..c0305cc3
--- /dev/null
+++ b/scripts/stl/profiles/imix.py
@@ -0,0 +1,87 @@
+
+from common.trex_streams import *
+from client_utils.packet_builder import CTRexPktBuilder
+
+
+class STLImix(object):
+
+ def __init__ (self):
+ ip_range = {'src' : {}, 'dst': {}}
+
+ ip_range['src']['start'] = "10.0.0.1"
+ ip_range['src']['end'] = "10.0.0.254"
+ ip_range['dst']['start'] = "8.0.0.1"
+ ip_range['dst']['end'] = "8.0.0.254"
+
+ self.ip_range = ip_range
+
+ def get_streams (self, flip = False):
+
+ # construct the base packet for the profile
+ base_pkt = CTRexPktBuilder()
+
+ base_pkt.add_pkt_layer("l2", dpkt.ethernet.Ethernet())
+ base_pkt.set_layer_attr("l2", "type", dpkt.ethernet.ETH_TYPE_IP)
+ base_pkt.add_pkt_layer("l3_ip", dpkt.ip.IP())
+ base_pkt.add_pkt_layer("l4_udp", dpkt.udp.UDP())
+
+
+ if not flip:
+ src = self.ip_range['src']
+ dst = self.ip_range['dst']
+ else:
+ src = self.ip_range['dst']
+ dst = self.ip_range['src']
+
+ base_pkt.set_vm_ip_range(ip_layer_name = "l3_ip",
+ ip_field = "src",
+ ip_start = src['start'],
+ ip_end = src['end'],
+ operation = "inc",
+ split = True)
+
+ base_pkt.set_vm_ip_range(ip_layer_name = "l3_ip",
+ ip_field = "dst",
+ ip_start = dst['start'],
+ ip_end = dst['end'],
+ operation = "inc")
+
+
+
+ # pad to 60 bytes
+ pkt_1 = base_pkt.clone()
+ payload_size = 60 - len(pkt_1.get_layer('l2'))
+ pkt_1.set_pkt_payload("a" * payload_size)
+
+ pkt_1.set_layer_attr("l3_ip", "len", len(pkt_1.get_layer('l3_ip')))
+
+
+ s1 = STLStream(packet = pkt_1,
+ mode = STLTXCont())
+
+ # stream 2
+ pkt_2 = base_pkt.clone()
+ payload_size = 590 - len(pkt_2.get_layer('l2'))
+ pkt_2.set_pkt_payload("a" * payload_size)
+
+ pkt_2.set_layer_attr("l3_ip", "len", len(pkt_2.get_layer('l3_ip')))
+
+ s2 = STLStream(packet = pkt_2,
+ mode = STLTXCont())
+
+
+ # stream 3
+ pkt_3 = base_pkt.clone()
+ payload_size = 1514 - len(pkt_3.get_layer('l2'))
+ pkt_3.set_pkt_payload("a" * payload_size)
+
+ pkt_3.set_layer_attr("l3_ip", "len", len(pkt_3.get_layer('l3_ip')))
+
+ s3 = STLStream(packet = pkt_3,
+ mode = STLTXCont())
+
+ return [s1, s2, s3]
+
+# dynamic load
+def register():
+ return STLImix()
diff --git a/src/rpc-server/commands/trex_rpc_cmd_general.cpp b/src/rpc-server/commands/trex_rpc_cmd_general.cpp
index 66999144..084e154a 100644
--- a/src/rpc-server/commands/trex_rpc_cmd_general.cpp
+++ b/src/rpc-server/commands/trex_rpc_cmd_general.cpp
@@ -243,7 +243,7 @@ TrexRpcCmdAcquire::_run(const Json::Value &params, Json::Value &result) {
try {
port->acquire(new_owner, session_id, force);
- } catch (const TrexRpcException &ex) {
+ } catch (const TrexException &ex) {
generate_execute_err(result, ex.what());
}
@@ -265,7 +265,7 @@ TrexRpcCmdRelease::_run(const Json::Value &params, Json::Value &result) {
try {
port->release();
- } catch (const TrexRpcException &ex) {
+ } catch (const TrexException &ex) {
generate_execute_err(result, ex.what());
}
@@ -287,7 +287,7 @@ TrexRpcCmdGetPortStats::_run(const Json::Value &params, Json::Value &result) {
try {
port->encode_stats(result["result"]);
- } catch (const TrexRpcException &ex) {
+ } catch (const TrexException &ex) {
generate_execute_err(result, ex.what());
}
diff --git a/src/rpc-server/commands/trex_rpc_cmd_stream.cpp b/src/rpc-server/commands/trex_rpc_cmd_stream.cpp
index 69e5dd18..8c07bc46 100644
--- a/src/rpc-server/commands/trex_rpc_cmd_stream.cpp
+++ b/src/rpc-server/commands/trex_rpc_cmd_stream.cpp
@@ -116,7 +116,7 @@ TrexRpcCmdAddStream::_run(const Json::Value &params, Json::Value &result) {
try {
port->add_stream(stream);
- } catch (const TrexRpcException &ex) {
+ } catch (const TrexException &ex) {
generate_execute_err(result, ex.what());
}
@@ -350,7 +350,7 @@ TrexRpcCmdRemoveStream::_run(const Json::Value &params, Json::Value &result) {
try {
port->remove_stream(stream);
- } catch (const TrexRpcException &ex) {
+ } catch (const TrexException &ex) {
generate_execute_err(result, ex.what());
}
@@ -374,7 +374,7 @@ TrexRpcCmdRemoveAllStreams::_run(const Json::Value &params, Json::Value &result)
try {
port->remove_and_delete_all_streams();
- } catch (const TrexRpcException &ex) {
+ } catch (const TrexException &ex) {
generate_execute_err(result, ex.what());
}
@@ -493,7 +493,7 @@ TrexRpcCmdStopTraffic::_run(const Json::Value &params, Json::Value &result) {
try {
port->stop_traffic();
- } catch (const TrexRpcException &ex) {
+ } catch (const TrexException &ex) {
generate_execute_err(result, ex.what());
}
@@ -550,7 +550,7 @@ TrexRpcCmdPauseTraffic::_run(const Json::Value &params, Json::Value &result) {
try {
port->pause_traffic();
- } catch (const TrexRpcException &ex) {
+ } catch (const TrexException &ex) {
generate_execute_err(result, ex.what());
}
@@ -571,7 +571,7 @@ TrexRpcCmdResumeTraffic::_run(const Json::Value &params, Json::Value &result) {
try {
port->resume_traffic();
- } catch (const TrexRpcException &ex) {
+ } catch (const TrexException &ex) {
generate_execute_err(result, ex.what());
}
diff --git a/src/sim/trex_sim_stateless.cpp b/src/sim/trex_sim_stateless.cpp
index 13f264cf..46b9916f 100644
--- a/src/sim/trex_sim_stateless.cpp
+++ b/src/sim/trex_sim_stateless.cpp
@@ -268,12 +268,16 @@ SimStateless::prepare_dataplane() {
void
SimStateless::execute_json(const std::string &json_filename) {
-
+ std::string rep;
std::ifstream test(json_filename);
std::stringstream buffer;
buffer << test.rdbuf();
- std::string rep = m_trex_stateless->get_rpc_server()->test_inject_request(buffer.str());
+ try {
+ rep = m_trex_stateless->get_rpc_server()->test_inject_request(buffer.str());
+ } catch (TrexRpcException &e) {
+ throw SimRunException(e.what());
+ }
Json::Value root;
Json::Reader reader;
diff --git a/src/stateless/cp/trex_stateless_port.cpp b/src/stateless/cp/trex_stateless_port.cpp
index d47802ec..2286ef76 100644
--- a/src/stateless/cp/trex_stateless_port.cpp
+++ b/src/stateless/cp/trex_stateless_port.cpp
@@ -114,9 +114,9 @@ TrexStatelessPort::acquire(const std::string &user, uint32_t session_id, bool fo
} else {
/* not same user or session id and not force - report error */
if (get_owner().get_name() == user) {
- throw TrexRpcException("port is already owned by another session of '" + user + "'");
+ throw TrexException("port is already owned by another session of '" + user + "'");
} else {
- throw TrexRpcException("port is already taken by '" + get_owner().get_name() + "'");
+ throw TrexException("port is already taken by '" + get_owner().get_name() + "'");
}
}
@@ -163,7 +163,7 @@ TrexStatelessPort::start_traffic(const TrexPortMultiplier &mul, double duration,
factor,
&fail_msg);
if (!rc) {
- throw TrexRpcException(fail_msg);
+ throw TrexException(fail_msg);
}
/* generate a message to all the relevant DP cores to start transmitting */
@@ -238,11 +238,11 @@ TrexStatelessPort::pause_traffic(void) {
verify_state(PORT_STATE_TX);
if (m_last_all_streams_continues == false) {
- throw TrexRpcException(" pause is supported when all streams are in continues mode ");
+ throw TrexException(" pause is supported when all streams are in continues mode ");
}
if ( m_last_duration>0.0 ) {
- throw TrexRpcException(" pause is supported when duration is not enable is start command ");
+ throw TrexException(" pause is supported when duration is not enable is start command ");
}
TrexStatelessCpToDpMsgBase *pause_msg = new TrexStatelessDpPause(m_port_id);
@@ -296,7 +296,7 @@ TrexStatelessPort::update_traffic(const TrexPortMultiplier &mul, bool force) {
case TrexPortMultiplier::OP_SUB:
factor = (m_factor - new_factor) / m_factor;
if (factor <= 0) {
- throw TrexRpcException("Update request will lower traffic to less than zero");
+ throw TrexException("Update request will lower traffic to less than zero");
}
break;
@@ -347,7 +347,7 @@ bool
TrexStatelessPort::verify_state(int state, bool should_throw) const {
if ( (state & m_port_state) == 0 ) {
if (should_throw) {
- throw TrexRpcException("command cannot be executed on current state: '" + get_state_as_string() + "'");
+ throw TrexException("command cannot be executed on current state: '" + get_state_as_string() + "'");
} else {
return false;
}