summaryrefslogtreecommitdiffstats
path: root/scripts/automation/trex_control_plane/client
diff options
context:
space:
mode:
authorDan Klein <danklein10@gmail.com>2016-01-04 23:31:31 +0200
committerDan Klein <danklein10@gmail.com>2016-01-04 23:31:31 +0200
commit629b54c4c9df9c718d818a004ecf15c2cf6c770a (patch)
tree7dfc3c64c7561032d690ce6188130e80d344054e /scripts/automation/trex_control_plane/client
parent3757099103ed1bf56f85ccf5bb861a331287cbbb (diff)
parent857bdcf05a920b99e1cf180c700176b04801da00 (diff)
Merge branch 'master' into dan_stateless
Diffstat (limited to 'scripts/automation/trex_control_plane/client')
-rwxr-xr-xscripts/automation/trex_control_plane/client/trex_client.py67
-rw-r--r--scripts/automation/trex_control_plane/client/trex_stateless_sim.py202
2 files changed, 267 insertions, 2 deletions
diff --git a/scripts/automation/trex_control_plane/client/trex_client.py b/scripts/automation/trex_control_plane/client/trex_client.py
index 5709b7a5..1d94dc06 100755
--- a/scripts/automation/trex_control_plane/client/trex_client.py
+++ b/scripts/automation/trex_control_plane/client/trex_client.py
@@ -294,6 +294,34 @@ class CTRexClient(object):
finally:
self.prompt_verbose_data()
+ def is_idle (self):
+ """
+ Poll for TRex running status, check if TRex is in Idle state.
+
+ :parameters:
+ None
+
+ :return:
+ + **True** if TRex is idle.
+ + **False** if TRex is starting or running.
+
+ :raises:
+ + :exc:`trex_exceptions.TRexIncompleteRunError`, in case one of failed TRex run (unexpected termination).
+ + :exc:`TypeError`, in case JSON stream decoding error.
+ + ProtocolError, in case of error in JSON-RPC protocol.
+
+ """
+ try:
+ if self.get_running_status()['state'] == TRexStatus.Idle:
+ return True
+ return False
+ except TRexException:
+ raise
+ except ProtocolError as err:
+ raise
+ finally:
+ self.prompt_verbose_data()
+
def get_trex_files_path (self):
"""
Fetches the local path in which files are stored when pushed to TRex server from client.
@@ -455,6 +483,41 @@ class CTRexClient(object):
results = self.get_result_obj()
return results
+ def sample_x_seconds (self, sample_time, time_between_samples = 5):
+ """
+ Automatically sets ongoing sampling of TRex data for sample_time seconds, with sampling rate described by time_between_samples.
+ Does not stop the TRex afterwards!
+
+ .. tip:: Useful for changing the device (Router, ASA etc.) configuration after given time.
+
+ :parameters:
+ sample_time : int
+ sample the TRex this number of seconds
+
+ time_between_samples : int
+ determines the time between each sample of the server
+
+ default value : **5**
+
+ :return:
+ the first result object (see :class:`CTRexResult` for further details) of the TRex run after given sample_time.
+
+ :raises:
+ + :exc:`UserWarning`, in case the TRex run ended before sample_time duration
+ + :exc:`trex_exceptions.TRexIncompleteRunError`, in case one of failed TRex run (unexpected termination).
+ + :exc:`TypeError`, in case JSON stream decoding error.
+ + ProtocolError, in case of error in JSON-RPC protocol.
+
+ """
+ # make sure TRex is running. raise exceptions here if any
+ self.wait_until_kickoff_finish()
+ elapsed_time = 0
+ while self.is_running():
+ if elapsed_time >= sample_time:
+ return self.get_result_obj()
+ time.sleep(time_between_samples)
+ elapsed_time += time_between_samples
+ raise UserWarning("TRex has stopped at %s seconds (before expected %s seconds)\nTry increasing test duration or decreasing sample_time" % (elapsed_time, sample_time))
def get_result_obj (self, copy_obj = True):
"""
@@ -1041,11 +1104,11 @@ class CTRexResult(object):
# handle latency data
if self.latency_checked:
latency_pre = "trex-latency"
- self._max_latency = self.get_last_value("{latency}.data".format(latency = latency_pre), ".*max-")#None # TBC
+ self._max_latency = self.get_last_value("{latency}.data".format(latency = latency_pre), "max-")#None # TBC
# support old typo
if self._max_latency is None:
latency_pre = "trex-latecny"
- self._max_latency = self.get_last_value("{latency}.data".format(latency = latency_pre), ".*max-")
+ self._max_latency = self.get_last_value("{latency}.data".format(latency = latency_pre), "max-")
self._avg_latency = self.get_last_value("{latency}.data".format(latency = latency_pre), "avg-")#None # TBC
self._avg_latency = CTRexResult.__avg_all_and_rename_keys(self._avg_latency)
diff --git a/scripts/automation/trex_control_plane/client/trex_stateless_sim.py b/scripts/automation/trex_control_plane/client/trex_stateless_sim.py
new file mode 100644
index 00000000..7655b27c
--- /dev/null
+++ b/scripts/automation/trex_control_plane/client/trex_stateless_sim.py
@@ -0,0 +1,202 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+"""
+Itay Marom
+Cisco Systems, Inc.
+
+Copyright (c) 2015-2015 Cisco Systems, Inc.
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+try:
+ # support import for Python 2
+ import outer_packages
+except ImportError:
+ # support import for Python 3
+ import client.outer_packages
+
+from client_utils.jsonrpc_client import JsonRpcClient, BatchMessage
+from client_utils.packet_builder import CTRexPktBuilder
+import json
+
+from common.trex_streams import *
+
+import argparse
+import tempfile
+import subprocess
+import os
+
+
+
+class SimRun(object):
+ def __init__ (self, yaml_file, dp_core_count, core_index, packet_limit, output_filename, is_valgrind, is_gdb):
+
+ self.yaml_file = yaml_file
+ self.output_filename = output_filename
+ self.dp_core_count = dp_core_count
+ self.core_index = core_index
+ self.packet_limit = packet_limit
+ self.is_valgrind = is_valgrind
+ self.is_gdb = is_gdb
+
+ # dummies
+ self.handler = 0
+ self.port_id = 0
+ self.mul = {"op": "abs",
+ "type": "raw",
+ "value": 1}
+
+ self.duration = -1
+
+ def load_yaml_file (self):
+ streams_db = CStreamsDB()
+ stream_list = streams_db.load_yaml_file(self.yaml_file)
+
+ streams_json = []
+ for stream in stream_list.compiled:
+ stream_json = {"id":1,
+ "jsonrpc": "2.0",
+ "method": "add_stream",
+ "params": {"handler": self.handler,
+ "port_id": self.port_id,
+ "stream_id": stream.stream_id,
+ "stream": stream.stream}
+ }
+
+ streams_json.append(stream_json)
+
+ return streams_json
+
+
+ def generate_start_cmd (self):
+ return {"id":1,
+ "jsonrpc": "2.0",
+ "method": "start_traffic",
+ "params": {"handler": self.handler,
+ "port_id": self.port_id,
+ "mul": self.mul,
+ "duration": self.duration}
+ }
+
+
+ def run (self):
+
+ # load the streams
+ cmds_json = (self.load_yaml_file())
+ cmds_json.append(self.generate_start_cmd())
+
+ f = tempfile.NamedTemporaryFile(delete = False)
+ f.write(json.dumps(cmds_json))
+ f.close()
+
+ try:
+ cmd = ['bp-sim-64-debug', '--sl', '--cores', str(self.dp_core_count), '--core_index', str(self.core_index), '-f', f.name, '-o', self.output_filename]
+ if self.is_valgrind:
+ cmd = ['valgrind', '--leak-check=full'] + cmd
+ elif self.is_gdb:
+ cmd = ['gdb', '--args'] + cmd
+
+ subprocess.call(cmd)
+
+ finally:
+ os.unlink(f.name)
+
+
+def is_valid_file(filename):
+ if not os.path.isfile(filename):
+ raise argparse.ArgumentTypeError("The file '%s' does not exist" % filename)
+
+ return filename
+
+
+def unsigned_int (x):
+ x = int(x)
+ if x <= 0:
+ raise argparse.ArgumentTypeError("argument must be >= 1")
+
+ return x
+
+def setParserOptions():
+ parser = argparse.ArgumentParser(prog="stl_sim.py")
+
+ parser.add_argument("input_file",
+ help = "input file in YAML or Python format",
+ type = is_valid_file)
+
+ parser.add_argument("output_file",
+ help = "output file in ERF format")
+
+
+ parser.add_argument("-c", "--cores",
+ help = "DP core count [default is 1]",
+ default = 1,
+ type = int,
+ choices = xrange(1, 9))
+
+ parser.add_argument("-n", "--core_index",
+ help = "DP core index to examine [default is 0]",
+ default = 0,
+ type = int)
+
+ parser.add_argument("-j", "--join",
+ help = "run and join output from 0..core_count [default is False]",
+ default = False,
+ type = bool)
+
+ parser.add_argument("-l", "--limit",
+ help = "limit test total packet count [default is 5000]",
+ default = 5000,
+ type = unsigned_int)
+
+
+ group = parser.add_mutually_exclusive_group()
+
+ group.add_argument("-x", "--valgrind",
+ help = "run under valgrind [default is False]",
+ action = "store_true",
+ default = False)
+
+ group.add_argument("-g", "--gdb",
+ help = "run under GDB [default is False]",
+ action = "store_true",
+ default = False)
+
+ return parser
+
+
+def validate_args (parser, options):
+ if options.core_index < 0 or options.core_index >= options.cores:
+ parser.error("DP core index valid range is 0 to {0}".format(options.cores - 1))
+
+
+
+def main ():
+ parser = setParserOptions()
+ options = parser.parse_args()
+
+ validate_args(parser, options)
+
+ r = SimRun(options.input_file,
+ options.cores,
+ options.core_index,
+ options.limit,
+ options.output_file,
+ options.valgrind,
+ options.gdb)
+
+ r.run()
+
+
+if __name__ == '__main__':
+ main()
+
+