summaryrefslogtreecommitdiffstats
path: root/scripts/automation/trex_control_plane/client
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/automation/trex_control_plane/client')
-rwxr-xr-xscripts/automation/trex_control_plane/client/trex_hltapi.py91
-rw-r--r--scripts/automation/trex_control_plane/client/trex_stateless_sim.py265
2 files changed, 316 insertions, 40 deletions
diff --git a/scripts/automation/trex_control_plane/client/trex_hltapi.py b/scripts/automation/trex_control_plane/client/trex_hltapi.py
index 92768ca4..848d5a9e 100755
--- a/scripts/automation/trex_control_plane/client/trex_hltapi.py
+++ b/scripts/automation/trex_control_plane/client/trex_hltapi.py
@@ -17,14 +17,16 @@ class CTRexHltApi(object):
self._port_data = {}
# ----- session functions ----- #
-
- def connect(self, device, port_list, username, port=5050, reset=False, break_locks=False):
+ # sync = RPC, async = ZMQ
+ def connect(self, device, port_list, username, sync_port = 4501, async_port = 4500, reset=False, break_locks=False):
ret_dict = {"status": 0}
- self.trex_client = CTRexStatelessClient(username, device, port)
- res_ok, msg = self.trex_client.connect()
- if not res_ok:
+ self.trex_client = CTRexStatelessClient(username, device, sync_port, async_port)
+
+ rc = self.trex_client.connect()
+ if rc.bad():
+
self.trex_client = None
- ret_dict.update({"log": msg})
+ ret_dict.update({"log": rc.err()})
return ret_dict
# arrived here, connection successfully created with server
# next, try acquiring ports of TRex
@@ -70,7 +72,6 @@ class CTRexHltApi(object):
port_list = self.parse_port_list(port_list)
response = self.trex_client.release(port_list)
res_ok, log = CTRexHltApi.process_response(port_list, response)
- print log
if not res_ok:
ret_dict.update({"log": log})
return ret_dict
@@ -89,11 +90,13 @@ class CTRexHltApi(object):
return {"status": 1, "log": None}
# ----- traffic functions ----- #
- def traffic_config(self, mode, port_handle,
+ def traffic_config(self, mode, port_list,
l2_encap="ethernet_ii", mac_src="00:00:01:00:00:01", mac_dst="00:00:00:00:00:00",
l3_protocol="ipv4", ip_src_addr="0.0.0.0", ip_dst_addr="192.0.0.1", l3_length=110,
transmit_mode="continuous", rate_pps=100,
**kwargs):
+ if type(port_list) is not list():
+ port_list = [port_list]
ALLOWED_MODES = ["create", "modify", "remove", "enable", "disable", "reset"]
if mode not in ALLOWED_MODES:
raise ValueError("mode must be one of the following values: {modes}".format(modes=ALLOWED_MODES))
@@ -119,45 +122,55 @@ class CTRexHltApi(object):
except Exception as e:
# some exception happened during the stream creation
return {"status": 0, "log": str(e)}
- # try adding the stream, until free stream_id is found
- port_data = self._port_data.get(port_handle)
- id_candidate = None
- # TODO: change this to better implementation
- while True:
- id_candidate = port_data["stream_id_gen"].next()
- response = self.trex_client.add_stream(stream_id=id_candidate,
- stream_obj=stream_obj,
- port_id=port_handle)
- res_ok, log = CTRexHltApi.process_response(port_handle, response)
- if res_ok:
- # found non-taken stream_id on server
- # save it for modifying needs
- port_data["streams"].update({id_candidate: stream_obj})
- break
- else:
- # proceed to another iteration to use another id
- continue
- return {"status": 1,
- "stream_id": id_candidate,
- "log": None}
+ # try adding the stream per port, until free stream_id is found
+ for port_id in port_list:
+ port_data = self._port_data.get(port_id)
+ id_candidate = None
+ # TODO: change this to better implementation
+ while True:
+ id_candidate = port_data["stream_id_gen"].next()
+ response = self.trex_client.add_stream(stream_id=id_candidate,
+ stream_obj=stream_obj.dump(),
+ port_id_list=port_id)
+ res_ok, log = CTRexHltApi.process_response(port_id, response)
+ if res_ok:
+ # found non-taken stream_id on server
+ # save it for modifying needs
+ port_data["streams"].update({id_candidate: stream_obj})
+ break
+ else:
+ print log
+ # proceed to another iteration to use another id
+ print 'need another iteration?'
+ continue
+ return {"status": 1,
+ "stream_id": id_candidate,
+ "log": None}
+
else:
raise NotImplementedError("mode '{0}' is not supported yet on TRex".format(mode))
- def traffic_control(self, action, port_handle):
+ def traffic_control(self, action, port_handle, **kwargs):
ALLOWED_ACTIONS = ["clear_stats", "run", "stop", "sync_run"]
if action not in ALLOWED_ACTIONS:
raise ValueError("action must be one of the following values: {actions}".format(actions=ALLOWED_ACTIONS))
# ret_dict = {"status": 0, "stopped": 1}
port_list = self.parse_port_list(port_handle)
+ if type(port_list) is not list():
+ port_list = [port_list]
if action == "run":
- response = self.trex_client.start_traffic(port_id=port_list)
+ if not set(kwargs.keys()) >= set(['mul', 'duration']):
+ raise ValueError("For 'run' action should be specified mul and duration arguments")
+ response = self.trex_client.start_traffic(kwargs['mul'], kwargs['duration'], port_id_list=port_list)
res_ok, log = CTRexHltApi.process_response(port_list, response)
if res_ok:
return {"status": 1,
"stopped": 0,
"log": None}
+ else:
+ print log
elif action == "stop":
- response = self.trex_client.stop_traffic(port_id=port_list)
+ response = self.trex_client.stop_traffic(port_id_list=port_list)
res_ok, log = CTRexHltApi.process_response(port_list, response)
if res_ok:
return {"status": 1,
@@ -236,13 +249,10 @@ class CTRexHltApi(object):
@staticmethod
def process_response(port_list, response):
+ log = response.data() if response.good() else response.err()
if isinstance(port_list, list):
- res_ok, response = response
- log = CTRexHltApi.join_batch_response(response)
- else:
- res_ok = response.success
- log = str(response)
- return res_ok, log
+ log = CTRexHltApi.join_batch_response(log)
+ return response.good(), log
@staticmethod
def parse_port_list(port_list):
@@ -257,8 +267,9 @@ class CTRexHltApi(object):
@staticmethod
def join_batch_response(responses):
- return "\n".join([str(response)
- for response in responses])
+ if type(responses) is list():
+ return "\n". join([str(response) for response in responses])
+ return responses
@staticmethod
def generate_stream(l2_encap, mac_src, mac_dst,
diff --git a/scripts/automation/trex_control_plane/client/trex_stateless_sim.py b/scripts/automation/trex_control_plane/client/trex_stateless_sim.py
new file mode 100644
index 00000000..5d06c6e5
--- /dev/null
+++ b/scripts/automation/trex_control_plane/client/trex_stateless_sim.py
@@ -0,0 +1,265 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+"""
+Itay Marom
+Cisco Systems, Inc.
+
+Copyright (c) 2015-2015 Cisco Systems, Inc.
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+try:
+ # support import for Python 2
+ import outer_packages
+except ImportError:
+ # support import for Python 3
+ import client.outer_packages
+
+from client_utils.jsonrpc_client import JsonRpcClient, BatchMessage
+from client_utils.packet_builder import CTRexPktBuilder
+import json
+
+from common.trex_streams import *
+
+import argparse
+import tempfile
+import subprocess
+import os
+from dpkt import pcap
+from operator import itemgetter
+
+
+def merge_cap_files (pcap_file_list, out_filename, delete_src = False):
+
+ out_pkts = []
+
+ # read all packets to a list
+ for src in pcap_file_list:
+ f = open(src, 'r')
+ reader = pcap.Reader(f)
+ pkts = reader.readpkts()
+ out_pkts += pkts
+ f.close()
+ if delete_src:
+ os.unlink(src)
+
+ # sort by the timestamp
+ out_pkts = sorted(out_pkts, key=itemgetter(0))
+
+
+ out = open(out_filename, 'w')
+ out_writer = pcap.Writer(out)
+
+ for ts, pkt in out_pkts:
+ out_writer.writepkt(pkt, ts)
+
+ out.close()
+
+
+
+
+class SimRun(object):
+ def __init__ (self, yaml_file, dp_core_count, core_index, packet_limit, output_filename, is_valgrind, is_gdb, limit):
+
+ self.yaml_file = yaml_file
+ self.output_filename = output_filename
+ self.dp_core_count = dp_core_count
+ self.core_index = core_index
+ self.packet_limit = packet_limit
+ self.is_valgrind = is_valgrind
+ self.is_gdb = is_gdb
+ self.limit = limit
+
+ # dummies
+ self.handler = 0
+ self.port_id = 0
+ self.mul = {"op": "abs",
+ "type": "raw",
+ "value": 1}
+
+ self.duration = -1
+
+ def load_yaml_file (self):
+ streams_db = CStreamsDB()
+ stream_list = streams_db.load_yaml_file(self.yaml_file)
+
+ streams_json = []
+ for stream in stream_list.compiled:
+ stream_json = {"id":1,
+ "jsonrpc": "2.0",
+ "method": "add_stream",
+ "params": {"handler": self.handler,
+ "port_id": self.port_id,
+ "stream_id": stream.stream_id,
+ "stream": stream.stream}
+ }
+
+ streams_json.append(stream_json)
+
+ return streams_json
+
+
+ def generate_start_cmd (self):
+ return {"id":1,
+ "jsonrpc": "2.0",
+ "method": "start_traffic",
+ "params": {"handler": self.handler,
+ "port_id": self.port_id,
+ "mul": self.mul,
+ "duration": self.duration}
+ }
+
+
+ def run (self):
+
+ # load the streams
+ cmds_json = (self.load_yaml_file())
+ cmds_json.append(self.generate_start_cmd())
+
+ f = tempfile.NamedTemporaryFile(delete = False)
+ f.write(json.dumps(cmds_json))
+ f.close()
+
+ try:
+ cmd = ['bp-sim-64-debug',
+ '--pcap',
+ '--sl',
+ '--cores',
+ str(self.dp_core_count),
+ '--limit',
+ str(self.limit),
+ '-f',
+ f.name,
+ '-o',
+ self.output_filename]
+
+ if self.core_index != None:
+ cmd += ['--core_index', str(self.core_index)]
+
+ if self.is_valgrind:
+ cmd = ['valgrind', '--leak-check=full'] + cmd
+ elif self.is_gdb:
+ cmd = ['gdb', '--args'] + cmd
+
+ print "executing command: '{0}'".format(" ".join(cmd))
+ subprocess.call(cmd)
+
+ # core index
+ if (self.dp_core_count > 1) and (self.core_index == None):
+ self.merge_results()
+
+ finally:
+ os.unlink(f.name)
+
+
+ def merge_results (self):
+ if (self.core_index != None) or (self.dp_core_count == 1):
+ # nothing to do
+ return
+
+ inputs = ["{0}-{1}".format(self.output_filename, index) for index in xrange(0, self.dp_core_count)]
+ merge_cap_files(inputs, self.output_filename, delete_src = True)
+
+
+
+def is_valid_file(filename):
+ if not os.path.isfile(filename):
+ raise argparse.ArgumentTypeError("The file '%s' does not exist" % filename)
+
+ return filename
+
+
+def unsigned_int (x):
+ x = int(x)
+ if x <= 0:
+ raise argparse.ArgumentTypeError("argument must be >= 1")
+
+ return x
+
+def setParserOptions():
+ parser = argparse.ArgumentParser(prog="stl_sim.py")
+
+ parser.add_argument("input_file",
+ help = "input file in YAML or Python format",
+ type = is_valid_file)
+
+ parser.add_argument("output_file",
+ help = "output file in ERF format")
+
+
+ parser.add_argument("-c", "--cores",
+ help = "DP core count [default is 1]",
+ default = 1,
+ type = int,
+ choices = xrange(1, 9))
+
+ parser.add_argument("-n", "--core_index",
+ help = "Record only a specific core",
+ default = None,
+ type = int)
+
+ parser.add_argument("-j", "--join",
+ help = "run and join output from 0..core_count [default is False]",
+ default = False,
+ type = bool)
+
+ parser.add_argument("-l", "--limit",
+ help = "limit test total packet count [default is 5000]",
+ default = 5000,
+ type = unsigned_int)
+
+
+ group = parser.add_mutually_exclusive_group()
+
+ group.add_argument("-x", "--valgrind",
+ help = "run under valgrind [default is False]",
+ action = "store_true",
+ default = False)
+
+ group.add_argument("-g", "--gdb",
+ help = "run under GDB [default is False]",
+ action = "store_true",
+ default = False)
+
+ return parser
+
+
+def validate_args (parser, options):
+
+ if options.core_index:
+ if not options.core_index in xrange(0, options.cores):
+ parser.error("DP core index valid range is 0 to {0}".format(options.cores - 1))
+
+
+
+def main ():
+ parser = setParserOptions()
+ options = parser.parse_args()
+
+ validate_args(parser, options)
+
+ r = SimRun(options.input_file,
+ options.cores,
+ options.core_index,
+ options.limit,
+ options.output_file,
+ options.valgrind,
+ options.gdb,
+ options.limit)
+
+ r.run()
+
+
+if __name__ == '__main__':
+ main()
+
+