diff options
Diffstat (limited to 'scripts/automation')
3 files changed, 211 insertions, 74 deletions
diff --git a/scripts/automation/regression/unit_tests/functional_tests/stl_basic_tests.py b/scripts/automation/regression/unit_tests/functional_tests/stl_basic_tests.py index d19d76b4..6e83c3ac 100644 --- a/scripts/automation/regression/unit_tests/functional_tests/stl_basic_tests.py +++ b/scripts/automation/regression/unit_tests/functional_tests/stl_basic_tests.py @@ -83,7 +83,10 @@ class CStlBasic_Test(functional_general_test.CGeneralFunctional_Test): def run_sim (self, yaml, output, options = "", silent = False): - user_cmd = "{0} {1} {2}".format(yaml, output, options) + if output: + user_cmd = "{0} -o {1} {2}".format(yaml, output, options) + else: + user_cmd = "{0} {1}".format(yaml, options) cmd = "{0} {1} {2}".format(sys.executable, self.stl_sim, @@ -100,13 +103,18 @@ class CStlBasic_Test(functional_general_test.CGeneralFunctional_Test): def golden_run (self, testname, profile, options, silent = False): + output_cap = os.path.join("/tmp/", "{0}_test.cap".format(testname)) golden_cap = os.path.join(self.test_path, "stl/golden/{0}_golden.cap".format(testname)) - rc = self.run_sim(self.profiles[profile], output_cap, options, silent) - assert_equal(rc, True) + try: + rc = self.run_sim(self.profiles[profile], output_cap, options, silent) + assert_equal(rc, True) + + self.compare_caps(output_cap, golden_cap) - self.compare_caps(output_cap, golden_cap) + finally: + os.unlink(output_cap) @@ -129,7 +137,7 @@ class CStlBasic_Test(functional_general_test.CGeneralFunctional_Test): print "\n" for profile in self.valgrind_profiles: print "\n*** testing profile '{0}' ***\n".format(profile) - rc = self.run_sim(profile, output = "dummy.cap", options = "--dry --cores 8 --limit 500 --valgrind", silent = False) + rc = self.run_sim(profile, output = None, options = "--cores 8 --limit 500 --valgrind", silent = False) assert_equal(rc, True) diff --git a/scripts/automation/trex_control_plane/client/trex_stateless_sim.py b/scripts/automation/trex_control_plane/client/trex_stateless_sim.py index a8f24d65..7f65996d 100644 --- a/scripts/automation/trex_control_plane/client/trex_stateless_sim.py +++ b/scripts/automation/trex_control_plane/client/trex_stateless_sim.py @@ -24,9 +24,12 @@ except ImportError: # support import for Python 3 import client.outer_packages +from common.trex_stl_exceptions import STLError +from yaml.scanner import ScannerError from common.trex_streams import * from client_utils import parsing_opts +import re import json @@ -44,6 +47,9 @@ class BpSimException(Exception): def merge_cap_files (pcap_file_list, out_filename, delete_src = False): out_pkts = [] + if not all([os.path.exists(f) for f in pcap_file_list]): + print "failed to merge cap file list...\nnot all files exist\n" + return # read all packets to a list for src in pcap_file_list: @@ -69,126 +75,215 @@ def merge_cap_files (pcap_file_list, out_filename, delete_src = False): +# stateless simulation +class STLSim(object): + def __init__ (self, bp_sim_path = None, handler = 0, port_id = 0): -class SimRun(object): - def __init__ (self, options): + if not bp_sim_path: + # auto find scripts + m = re.match(".*/trex-core", os.getcwd()) + if not m: + raise STLError('cannot find BP sim path, please provide it') - self.options = options + self.bp_sim_path = os.path.join(m.group(0), 'scripts') + + else: + self.bp_sim_path = bp_sim_path # dummies - self.handler = 0 - self.port_id = 0 + self.handler = handler + self.port_id = port_id + - self.mul = options.mult + def load_input_file (self, input_file): + # try YAML + try: + streams_db = CStreamsDB() + stream_list = streams_db.load_yaml_file(input_file) - self.duration = -1 + # convert to new style stream object + return [HACKSTLStream(stream) for stream in stream_list.compiled] + except ScannerError: + pass + + # try python + try: + basedir = os.path.dirname(input_file) + sys.path.append(basedir) - def load_yaml_file (self): - streams_db = CStreamsDB() - stream_list = streams_db.load_yaml_file(self.options.input_file) + file = os.path.basename(input_file).split('.py')[0] + module = __import__(file, globals(), locals(), [], -1) - streams_json = [] - for stream in stream_list.compiled: - stream_json = {"id":1, - "jsonrpc": "2.0", - "method": "add_stream", - "params": {"handler": self.handler, - "port_id": self.port_id, - "stream_id": stream.stream_id, - "stream": stream.stream} - } + return module.register().get_streams() - streams_json.append(stream_json) + except AttributeError: + pass - return streams_json + raise STLError("bad format input file '{0}'".format(input_file)) - def generate_start_cmd (self): + def generate_start_cmd (self, mult = "1", force = True, duration = -1): return {"id":1, "jsonrpc": "2.0", "method": "start_traffic", "params": {"handler": self.handler, - "force": False, + "force": force, "port_id": self.port_id, - "mul": parsing_opts.decode_multiplier(self.mul), - "duration": self.duration} + "mul": parsing_opts.decode_multiplier(mult), + "duration": duration} } - def run (self): - # load the streams - cmds_json = (self.load_yaml_file()) - cmds_json.append(self.generate_start_cmd()) + # run command + # input_list - a list of streams or YAML files + # outfile - pcap file to save output, if None its a dry run + # dp_core_count - how many DP cores to use + # dp_core_index - simulate only specific dp core without merging + # is_debug - debug or release image + # pkt_limit - how many packets to simulate + # mult - multiplier + # mode - can be 'valgrind, 'gdb', 'json' or 'none' + def run (self, + input_list, + outfile = None, + dp_core_count = 1, + dp_core_index = None, + is_debug = True, + pkt_limit = 5000, + mult = "1", + duration = -1, + mode = 'none'): + + if not mode in ['none', 'gdb', 'valgrind', 'json']: + raise STLArgumentError('mode', mode) + + # listify + input_list = input_list if isinstance(input_list, list) else [input_list] + + # check streams arguments + if not all([isinstance(i, (STLStream, str)) for i in input_list]): + raise STLArgumentError('input_list', input_list) + + # split to two type + input_files = [x for x in input_list if isinstance(x, str)] + stream_list = [x for x in input_list if isinstance(x, STLStream)] + + # handle YAMLs + for input_file in input_files: + stream_list += self.load_input_file(input_file) + + + # load streams + cmds_json = [] + for stream in stream_list: + cmd = {"id":1, + "jsonrpc": "2.0", + "method": "add_stream", + "params": {"handler": self.handler, + "port_id": self.port_id, + "stream_id": stream.get_id(), + "stream": stream.to_json()} + } + + cmds_json.append(cmd) + + # generate start command + cmds_json.append(self.generate_start_cmd(mult = mult, + force = True, + duration = duration)) + + if mode == 'json': + print json.dumps(cmds_json, indent = 4, separators=(',', ': '), sort_keys = True) + return + + # start simulation + self.outfile = outfile + self.dp_core_count = dp_core_count + self.dp_core_index = dp_core_index + self.is_debug = is_debug + self.pkt_limit = pkt_limit + self.mult = mult + self.duration = duration, + self.mode = mode + + self.__run(cmds_json) + + # internal run + def __run (self, cmds_json): + + # write to temp file f = tempfile.NamedTemporaryFile(delete = False) f.write(json.dumps(cmds_json)) f.close() + # launch bp-sim try: - if self.options.json: - with open(f.name) as file: - data = "\n".join(file.readlines()) - print json.dumps(json.loads(data), indent = 4, separators=(',', ': '), sort_keys = True) - else: - self.execute_bp_sim(f.name) + self.execute_bp_sim(f.name) finally: os.unlink(f.name) def execute_bp_sim (self, json_filename): - exe = './bp-sim-64' if self.options.release else './bp-sim-64-debug' + if self.is_debug: + exe = os.path.join(self.bp_sim_path, 'bp-sim-64-debug') + else: + exe = os.path.join(self.bp_sim_path, 'bp-sim-64') + if not os.path.exists(exe): - print "cannot find executable '{0}'".format(exe) - exit(-1) + raise STLError("'{0}' does not exists, please build it before calling the simulation".format(exe)) + cmd = [exe, '--pcap', '--sl', '--cores', - str(self.options.cores), + str(self.dp_core_count), '--limit', - str(self.options.limit), + str(self.pkt_limit), '-f', - json_filename, - '-o', - self.options.output_file] + json_filename] - if self.options.dry: + # out or dry + if not self.outfile: cmd += ['--dry'] + cmd += ['-o', '/dev/null'] + else: + cmd += ['-o', self.outfile] - if self.options.core_index != None: - cmd += ['--core_index', str(self.options.core_index)] + if self.dp_core_index != None: + cmd += ['--core_index', str(self.dp_core_index)] - if self.options.valgrind: + if self.mode == 'valgrind': cmd = ['valgrind', '--leak-check=full', '--error-exitcode=1'] + cmd - elif self.options.gdb: + elif self.mode == 'gdb': cmd = ['gdb', '--args'] + cmd print "executing command: '{0}'".format(" ".join(cmd)) rc = subprocess.call(cmd) if rc != 0: - raise BpSimException() + raise STLError('simulation has failed with error code {0}'.format(rc)) self.merge_results() def merge_results (self): - if self.options.dry: + if not self.outfile: return - if self.options.cores == 1: + if self.dp_core_count == 1: return - if self.options.core_index != None: + if self.dp_core_index != None: return print "Mering cores output to a single pcap file...\n" - inputs = ["{0}-{1}".format(self.options.output_file, index) for index in xrange(0, self.options.cores)] - merge_cap_files(inputs, self.options.output_file, delete_src = True) + inputs = ["{0}-{1}".format(self.outfile, index) for index in xrange(0, self.dp_core_count)] + merge_cap_files(inputs, self.outfile, delete_src = True) @@ -214,18 +309,22 @@ def setParserOptions(): help = "input file in YAML or Python format", type = is_valid_file) - parser.add_argument("output_file", + parser.add_argument("-o", + dest = "output_file", + default = None, help = "output file in ERF format") parser.add_argument("-c", "--cores", help = "DP core count [default is 1]", + dest = "dp_core_count", default = 1, type = int, choices = xrange(1, 9)) parser.add_argument("-n", "--core_index", help = "Record only a specific core", + dest = "dp_core_index", default = None, type = int) @@ -234,10 +333,6 @@ def setParserOptions(): action = "store_true", default = False) - parser.add_argument("-s", "--dry", - help = "dry run only (nothing will be written to the file) [default is False]", - action = "store_true", - default = False) parser.add_argument("-l", "--limit", help = "limit test total packet count [default is 5000]", @@ -250,6 +345,13 @@ def setParserOptions(): default = "1", type = parsing_opts.match_multiplier_strict) + parser.add_argument('-d', '--duration', + help = "run duration", + dest = 'duration', + default = -1, + type = float) + + group = parser.add_mutually_exclusive_group() group.add_argument("-x", "--valgrind", @@ -272,12 +374,12 @@ def setParserOptions(): def validate_args (parser, options): - if options.core_index: - if not options.core_index in xrange(0, options.cores): - parser.error("DP core index valid range is 0 to {0}".format(options.cores - 1)) + if options.dp_core_index: + if not options.dp_core_index in xrange(0, options.dp_core_count): + parser.error("DP core index valid range is 0 to {0}".format(options.dp_core_count - 1)) # zero is ok - no limit, but other values must be at least as the number of cores - if (options.limit != 0) and options.limit < options.cores: + if (options.limit != 0) and options.limit < options.dp_core_count: parser.error("limit cannot be lower than number of DP cores") @@ -287,16 +389,35 @@ def main (): validate_args(parser, options) - r = SimRun(options) + + + if options.valgrind: + mode = 'valgrind' + elif options.gdb: + mode = 'gdb' + elif options.json: + mode = 'json' + else: + mode = 'none' try: - r.run() + r = STLSim() + r.run(input_list = options.input_file, + outfile = options.output_file, + dp_core_count = options.dp_core_count, + dp_core_index = options.dp_core_index, + is_debug = (not options.release), + pkt_limit = options.limit, + mult = options.mult, + duration = options.duration, + mode = mode) + except KeyboardInterrupt as e: print "\n\n*** Caught Ctrl + C... Exiting...\n\n" exit(1) - except BpSimException as e: - print "\n\n*** BP sim exit code was non zero\n\n" + except STLError as e: + print e exit(1) exit(0) diff --git a/scripts/automation/trex_control_plane/common/trex_stats.py b/scripts/automation/trex_control_plane/common/trex_stats.py index 464ee56a..3bd6e0cd 100755 --- a/scripts/automation/trex_control_plane/common/trex_stats.py +++ b/scripts/automation/trex_control_plane/common/trex_stats.py @@ -524,8 +524,16 @@ class CPortStats(CTRexStats): def generate_stats(self): + state = self._port_obj.get_port_state_name() if self._port_obj else "" + if state == "ACTIVE": + state = format_text(state, 'green', 'bold') + elif state == "PAUSE": + state = format_text(state, 'magenta', 'bold') + else: + state = format_text(state, 'bold') + return {"owner": self._port_obj.user if self._port_obj else "", - "state": self._port_obj.get_port_state_name() if self._port_obj else "", + "state": "{0}".format(state), "--": " ", "---": " ", |