summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_port.py49
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_sim.py9
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_stats.py9
3 files changed, 27 insertions, 40 deletions
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_port.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_port.py
index ce7a630c..c3fa70ec 100644
--- a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_port.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_port.py
@@ -199,9 +199,7 @@ class Port(object):
cmd = RpcCmdData('add_stream', params)
batch.append(cmd)
- # meta data for show streams
- #self.streams[stream.get_id()] = StreamOnPort(stream.to_json(),
- # Port._generate_stream_metadata(stream))
+ self.streams[stream.get_id()] = stream
rc = self.transmit_batch(batch)
if not rc:
@@ -493,39 +491,22 @@ class Port(object):
return self.port_stats.invalidate()
################# stream printout ######################
- def generate_loaded_streams_sum(self, stream_id_list):
+ def generate_loaded_streams_sum(self):
if self.state == self.STATE_DOWN:
return {}
- streams_data = {}
-
- if not stream_id_list:
- # if no mask has been provided, apply to all streams on port
- stream_id_list = self.streams.keys()
-
-
- streams_data = {stream_id: self.streams[stream_id].metadata.get('stream_sum', ["N/A"] * 6)
- for stream_id in stream_id_list
- if stream_id in self.streams}
-
- # sort the data
- return {"streams" : OrderedDict(sorted(streams_data.items())) }
-
- @staticmethod
- def _generate_stream_metadata(stream):
- meta_dict = {}
-
- next = stream.get_next_id()
- if next == -1:
- next = "-"
-
- meta_dict['stream_sum'] = OrderedDict([("id", stream.get_id()),
- ("packet_type", stream.get_pkt_type()),
- ("L2 len", stream.get_pkt_len()),
- ("mode", stream.get_mode()),
- ("rate_pps", stream.get_pps()),
- ("next_stream", next)
- ])
- return meta_dict
+
+ data = {}
+ for id, stream in self.streams.iteritems():
+ data[id] = OrderedDict([ ('id', id),
+ ('packet_type', stream.get_pkt_type()),
+ ('L2 len', stream.get_pkt_len()),
+ ('mode' , stream.get_mode()),
+ ('rate_pps', stream.get_pps()),
+ ('next_stream', stream.get_next_id())
+ ])
+
+ return {"streams" : OrderedDict(sorted(data.items())) }
+
################# events handler ######################
def async_event_port_stopped (self):
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_sim.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_sim.py
index 086e46af..380b7a39 100644
--- a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_sim.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_sim.py
@@ -218,14 +218,17 @@ class STLSim(object):
# internal run
- def __run (self, cmds_json):
+ def __run (self, cmds_json, zipped = True):
# write to temp file
f = tempfile.NamedTemporaryFile(delete = False)
msg = json.dumps(cmds_json)
- compressed = zlib.compress(msg)
- new_msg = struct.pack(">II", 0xABE85CEA, len(msg)) + compressed
+
+ # stress the zip path
+ if zipped:
+ compressed = zlib.compress(msg)
+ new_msg = struct.pack(">II", 0xABE85CEA, len(msg)) + compressed
f.write(new_msg)
f.close()
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_stats.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_stats.py
index 30f303a8..e5578564 100644
--- a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_stats.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_stats.py
@@ -208,7 +208,7 @@ class CTRexInfoGenerator(object):
def _generate_single_port_streams_info(self, port_obj, stream_id_list):
- return_streams_data = port_obj.generate_loaded_streams_sum(stream_id_list)
+ return_streams_data = port_obj.generate_loaded_streams_sum()
if not return_streams_data.get("streams"):
# we got no streams available
@@ -219,13 +219,16 @@ class CTRexInfoGenerator(object):
# because we mutate this - deep copy before
return_streams_data = copy.deepcopy(return_streams_data)
+ p_type_field_len = 0
+
for stream_id, stream_id_sum in return_streams_data['streams'].iteritems():
stream_id_sum['rate_pps'] = format_num(stream_id_sum['rate_pps'], suffix='pps')
- stream_id_sum['packet_type'] = self._trim_packet_headers(stream_id_sum['packet_type'], 20)
+ stream_id_sum['packet_type'] = self._trim_packet_headers(stream_id_sum['packet_type'], 30)
+ p_type_field_len = max(p_type_field_len, len(stream_id_sum['packet_type']))
info_table = text_tables.TRexTextTable()
info_table.set_cols_align(["c"] + ["l"] + ["r"] + ["c"] + ["r"] + ["c"])
- info_table.set_cols_width([10] + [20] + [8] + [16] + [10] + [12])
+ info_table.set_cols_width([10] + [p_type_field_len] + [8] + [16] + [10] + [12])
info_table.set_cols_dtype(["t"] + ["t"] + ["t"] + ["t"] + ["t"] + ["t"])
info_table.add_rows([v.values()