summaryrefslogtreecommitdiffstats
path: root/scripts/automation/trex_control_plane/stl/trex_stl_lib
diff options
context:
space:
mode:
authorimarom <imarom@cisco.com>2016-03-08 09:20:02 +0200
committerimarom <imarom@cisco.com>2016-03-10 17:16:36 +0200
commit60fbd456c4d804adc903839f916c9c2bbe272d94 (patch)
tree319c377cbf8ac4c857bd2a60090bcd687b3541c1 /scripts/automation/trex_control_plane/stl/trex_stl_lib
parent834431083b4f9e6ac7eac00d5ec6682c92b16da9 (diff)
RX stats - major refactor
Diffstat (limited to 'scripts/automation/trex_control_plane/stl/trex_stl_lib')
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_client.py15
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_packet_builder_scapy.py3
-rw-r--r--scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_stats.py353
-rwxr-xr-xscripts/automation/trex_control_plane/stl/trex_stl_lib/utils/parsing_opts.py10
4 files changed, 293 insertions, 88 deletions
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_client.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_client.py
index a13e2793..7dc7ff32 100644
--- a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_client.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_client.py
@@ -436,10 +436,12 @@ class STLClient(object):
self.server_version,
self.ports)
+ self.flow_stats = trex_stl_stats.CRxStats()
+
self.stats_generator = trex_stl_stats.CTRexInfoGenerator(self.global_stats,
- self.ports)
+ self.ports,
+ self.flow_stats)
- self.flow_stats = trex_stl_stats.CRxStats()
############# private functions - used by the class itself ###########
@@ -714,7 +716,7 @@ class STLClient(object):
# clear stats
- def __clear_stats(self, port_id_list, clear_global):
+ def __clear_stats(self, port_id_list, clear_global, clear_flow_stats):
for port_id in port_id_list:
self.ports[port_id].clear_stats()
@@ -722,6 +724,9 @@ class STLClient(object):
if clear_global:
self.global_stats.clear_stats()
+ if clear_flow_stats:
+ self.flow_stats.clear_stats()
+
self.logger.log_cmd("clearing stats on port(s) {0}:".format(port_id_list))
return RC
@@ -1546,7 +1551,7 @@ class STLClient(object):
"""
@__api_check(False)
- def clear_stats (self, ports = None, clear_global = True):
+ def clear_stats (self, ports = None, clear_global = True, clear_flow_stats = True):
ports = ports if ports is not None else self.get_all_ports()
ports = self._validate_port_list(ports)
@@ -1556,7 +1561,7 @@ class STLClient(object):
raise STLArgumentError('clear_global', clear_global)
- rc = self.__clear_stats(ports, clear_global)
+ rc = self.__clear_stats(ports, clear_global, clear_flow_stats)
if not rc:
raise STLError(rc)
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_packet_builder_scapy.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_packet_builder_scapy.py
index bb210226..14c04a52 100644
--- a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_packet_builder_scapy.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_packet_builder_scapy.py
@@ -964,8 +964,7 @@ class CScapyTRexPktBuilder(CTrexPktBuilderInterface):
if self.remove_fcs and self.pkt.lastlayer().name == 'Padding':
self.pkt.lastlayer().underlayer.remove_payload()
- if len(self.pkt) < 60: # simulator can write padding with non-zeros, set it explicit
- self.pkt /= Padding('\x00' * (60 - len(self.pkt)))
+
self.pkt.build()
self.is_pkt_built = True
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_stats.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_stats.py
index c2e318bc..78bd2358 100644
--- a/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_stats.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/trex_stl_stats.py
@@ -17,11 +17,24 @@ import threading
GLOBAL_STATS = 'g'
PORT_STATS = 'p'
PORT_STATUS = 'ps'
-ALL_STATS_OPTS = {GLOBAL_STATS, PORT_STATS, PORT_STATUS}
+STREAMS_STATS = 's'
+
+ALL_STATS_OPTS = {GLOBAL_STATS, PORT_STATS, PORT_STATUS, STREAMS_STATS}
COMPACT = {GLOBAL_STATS, PORT_STATS}
+SS_COMPAT = {GLOBAL_STATS, STREAMS_STATS}
ExportableStats = namedtuple('ExportableStats', ['raw_data', 'text_table'])
+# deep mrege of dicts dst = src + dst
+def deep_merge_dicts (dst, src):
+ for k, v in src.iteritems():
+ # if not exists - deep copy it
+ if not k in dst:
+ dst[k] = copy.deepcopy(v)
+ else:
+ if isinstance(v, dict):
+ deep_merge_dicts(dst[k], v)
+
# use to calculate diffs relative to the previous values
# for example, BW
def calculate_diff (samples):
@@ -66,18 +79,23 @@ class CTRexInfoGenerator(object):
STLClient and the ports.
"""
- def __init__(self, global_stats_ref, ports_dict_ref):
+ def __init__(self, global_stats_ref, ports_dict_ref, rx_stats_ref):
self._global_stats = global_stats_ref
self._ports_dict = ports_dict_ref
+ self._rx_stats_ref = rx_stats_ref
def generate_single_statistic(self, port_id_list, statistic_type):
if statistic_type == GLOBAL_STATS:
return self._generate_global_stats()
+
elif statistic_type == PORT_STATS:
return self._generate_port_stats(port_id_list)
- pass
+
elif statistic_type == PORT_STATUS:
return self._generate_port_status(port_id_list)
+
+ elif statistic_type == STREAMS_STATS:
+ return self._generate_streams_stats()
else:
# ignore by returning empty object
return {}
@@ -110,6 +128,92 @@ class CTRexInfoGenerator(object):
return {"global_statistics": ExportableStats(stats_data, stats_table)}
+ def _generate_streams_stats (self):
+
+ sstats_data = self._rx_stats_ref.generate_stats()
+ streams_keys = self._rx_stats_ref.get_streams_keys()
+ stream_count = len(streams_keys)
+
+
+ stats_table = text_tables.TRexTextTable()
+ stats_table.set_cols_align(["l"] + ["r"] * stream_count)
+ stats_table.set_cols_width([20] + [17] * stream_count)
+ stats_table.set_cols_dtype(['t'] + ['t'] * stream_count)
+
+ stats_table.add_rows([[k] + v
+ for k, v in sstats_data.iteritems()],
+ header=False)
+
+ header = ["PG ID"] + [key for key in streams_keys]
+ stats_table.header(header)
+
+ return {"streams_statistics": ExportableStats(sstats_data, stats_table)}
+
+
+
+ per_stream_stats = OrderedDict([("owner", []),
+ ("state", []),
+ ("--", []),
+ ("Tx bps L2", []),
+ ("Tx bps L1", []),
+ ("Tx pps", []),
+ ("Line Util.", []),
+
+ ("---", []),
+ ("Rx bps", []),
+ ("Rx pps", []),
+
+ ("----", []),
+ ("opackets", []),
+ ("ipackets", []),
+ ("obytes", []),
+ ("ibytes", []),
+ ("tx-bytes", []),
+ ("rx-bytes", []),
+ ("tx-pkts", []),
+ ("rx-pkts", []),
+
+ ("-----", []),
+ ("oerrors", []),
+ ("ierrors", []),
+
+ ]
+ )
+
+ total_stats = CPortStats(None)
+
+ for port_obj in relevant_ports:
+ # fetch port data
+ port_stats = port_obj.generate_port_stats()
+
+ total_stats += port_obj.port_stats
+
+ # populate to data structures
+ return_stats_data[port_obj.port_id] = port_stats
+ self.__update_per_field_dict(port_stats, per_field_stats)
+
+ total_cols = len(relevant_ports)
+ header = ["port"] + [port.port_id for port in relevant_ports]
+
+ if (total_cols > 1):
+ self.__update_per_field_dict(total_stats.generate_stats(), per_field_stats)
+ header += ['total']
+ total_cols += 1
+
+ stats_table = text_tables.TRexTextTable()
+ stats_table.set_cols_align(["l"] + ["r"] * total_cols)
+ stats_table.set_cols_width([10] + [17] * total_cols)
+ stats_table.set_cols_dtype(['t'] + ['t'] * total_cols)
+
+ stats_table.add_rows([[k] + v
+ for k, v in per_field_stats.iteritems()],
+ header=False)
+
+ stats_table.header(header)
+
+ return {"streams_statistics": ExportableStats(return_stats_data, stats_table)}
+
+
def _generate_port_stats(self, port_id_list):
relevant_ports = self.__get_relevant_ports(port_id_list)
@@ -131,10 +235,10 @@ class CTRexInfoGenerator(object):
("ipackets", []),
("obytes", []),
("ibytes", []),
- ("tx_bytes", []),
- ("rx_bytes", []),
- ("tx_pkts", []),
- ("rx_pkts", []),
+ ("tx-bytes", []),
+ ("rx-bytes", []),
+ ("tx-pkts", []),
+ ("rx-pkts", []),
("-----", []),
("oerrors", []),
@@ -278,58 +382,40 @@ class CTRexInfoGenerator(object):
class CTRexStats(object):
""" This is an abstract class to represent a stats object """
- def __init__(self):
+ def __init__(self, flowing = True):
self.reference_stats = {}
self.latest_stats = {}
self.last_update_ts = time.time()
self.history = deque(maxlen = 10)
self.lock = threading.Lock()
- def __getitem__(self, item):
- # override this to allow quick and clean access to fields
- if not item in self.latest_stats:
- return "N/A"
-
- # item must exist
- m = re.search('_(([a-z])ps)$', item)
- if m:
- # this is a non-relative item
- unit = m.group(2)
- if unit == "b":
- return self.get(item, format=True, suffix="b/sec")
- elif unit == "p":
- return self.get(item, format=True, suffix="pkt/sec")
- else:
- return self.get(item, format=True, suffix=m.group(1))
-
- m = re.search('^[i|o](a-z+)$', item)
- if m:
- # this is a non-relative item
- type = m.group(1)
- if type == "bytes":
- return self.get_rel(item, format=True, suffix="B")
- elif type == "packets":
- return self.get_rel(item, format=True, suffix="pkts")
- else:
- # do not format with suffix
- return self.get_rel(item, format=True)
+ # does the object gets a constant flow of data ?
+ self.flowing = flowing
- # can't match to any known pattern, return N/A
- return "N/A"
+ ######## abstract methods ##########
+ # get stats for user / API
+ def get_stats (self):
+ raise NotImplementedError()
+ # generate format stats (for TUI)
def generate_stats(self):
- # must be implemented by designated classes (such as port/ global stats)
raise NotImplementedError()
- def generate_extended_values (self, snapshot):
+ # called when a snapshot arrives - add more fields
+ def preprocess_snapshot (self, snapshot):
raise NotImplementedError()
+ ######## END abstract methods ##########
+
+ def __update_ref (self):
+ deep_merge_dicts(self.reference_stats, self.latest_stats)
+
def update(self, snapshot):
# some extended generated values (from base values)
- self.generate_extended_values(snapshot)
+ self.preprocess_snapshot(snapshot)
# update
self.latest_stats = snapshot
@@ -339,42 +425,64 @@ class CTRexStats(object):
diff_time = time.time() - self.last_update_ts
- # 3 seconds is too much - this is the new reference
- if (not self.reference_stats) or (diff_time > 3):
- self.reference_stats = self.latest_stats
+ # handle the reference (base)
+ self.__update_ref()
+
+ # for flowing objects 3 seconds is too much
+ if self.flowing and (diff_time > 3):
+ self.clear_stats()
-
self.last_update_ts = time.time()
def clear_stats(self):
- self.reference_stats = self.latest_stats
+ self.reference_stats = copy.deepcopy(self.latest_stats)
def invalidate (self):
self.latest_stats = {}
+
+ def __get (self, src, field):
+ if isinstance(field, list):
+ # deep
+ value = src
+ for level in field:
+ if not level in value:
+ return None
+ value = value[level]
+ else:
+ # flat
+ if not field in src:
+ return None
+ value = src[field]
+
+ return value
+
def get(self, field, format=False, suffix=""):
- if not field in self.latest_stats:
+ value = self.__get(self.latest_stats, field)
+ if value == None:
return "N/A"
- if not format:
- return self.latest_stats[field]
- else:
- return format_num(self.latest_stats[field], suffix)
+
+ return value if not format else format_num(value, suffix)
+
def get_rel(self, field, format=False, suffix=""):
- if not field in self.latest_stats:
+ ref_value = self.__get(self.reference_stats, field)
+ if ref_value == None:
return "N/A"
- if not format:
- if not field in self.reference_stats:
- print "REF: " + str(self.reference_stats)
- print "BASE: " + str(self.latest_stats)
+ # if the latest does not have the value - its like the ref
+ latest_value = self.__get(self.latest_stats, field)
+ if latest_value == None:
+ latest_value = ref_value
+
+
+ value = latest_value - ref_value
+
+ return value if not format else format_num(value, suffix)
- return (self.latest_stats[field] - self.reference_stats[field])
- else:
- return format_num(self.latest_stats[field] - self.reference_stats[field], suffix)
# get trend for a field
def get_trend (self, field, use_raw = False, percision = 10.0):
@@ -458,7 +566,7 @@ class CGlobalStats(CTRexStats):
return stats
- def generate_extended_values (self, snapshot):
+ def preprocess_snapshot (self, snapshot):
# L1 bps
bps = snapshot.get("m_tx_bps")
pps = snapshot.get("m_tx_pps")
@@ -568,7 +676,7 @@ class CPortStats(CTRexStats):
return stats
- def generate_extended_values (self, snapshot):
+ def preprocess_snapshot (self, snapshot):
# L1 bps
bps = snapshot.get("m_total_tx_bps")
pps = snapshot.get("m_total_tx_pps")
@@ -627,10 +735,10 @@ class CPortStats(CTRexStats):
"obytes" : self.get_rel("obytes"),
"ibytes" : self.get_rel("ibytes"),
- "tx_bytes": self.get_rel("obytes", format = True, suffix = "B"),
- "rx_bytes": self.get_rel("ibytes", format = True, suffix = "B"),
- "tx_pkts": self.get_rel("opackets", format = True, suffix = "pkts"),
- "rx_pkts": self.get_rel("ipackets", format = True, suffix = "pkts"),
+ "tx-bytes": self.get_rel("obytes", format = True, suffix = "B"),
+ "rx-bytes": self.get_rel("ibytes", format = True, suffix = "B"),
+ "tx-pkts": self.get_rel("opackets", format = True, suffix = "pkts"),
+ "rx-pkts": self.get_rel("ipackets", format = True, suffix = "pkts"),
"oerrors" : format_num(self.get_rel("oerrors"),
compact = False,
@@ -643,33 +751,120 @@ class CPortStats(CTRexStats):
}
-class CRxStats(object):
+class CRxStats(CTRexStats):
def __init__(self):
- self.flow_stats = {}
+ super(CRxStats, self).__init__(flowing = False)
+
+
+
+ def preprocess_snapshot (self, snapshot):
+ # heavy pre-processing here...
+ new_snapshot = {}
+
+ if not 'timestamp' in snapshot:
+ raise ValueError("INTERNAL ERROR: RX stats snapshot MUST contain 'timestamp' field")
+
+ for key, value in snapshot.iteritems():
+ # skip non int values (simply copy)
+ if key == 'timestamp':
+ new_snapshot[key] = value
+ continue
+
+ # all the rest MUST be ints
+ try:
+ pg_id = int(key)
+ except ValueError:
+ assert(0)
+ # handle PG ID
+ new_snapshot[pg_id] = {}
+ for field in ['tx_pkts', 'tx_bytes', 'rx_pkts']:
+ new_snapshot[pg_id][field] = {'total': 0}
+ if field in value:
+ for port, pv in value[field].iteritems():
+ new_snapshot[pg_id][field][int(port)] = pv
+ new_snapshot[pg_id][field]['total'] += pv
- def update (self, snapshot):
- self.flow_stats = snapshot
+ snapshot.clear()
+ snapshot.update(new_snapshot)
+
def get_stats (self):
stats = {}
- for pg_id, pg_id_data in self.flow_stats.iteritems():
- # ignore non pg ID keys
+
+ for pg_id in self.get_streams_keys():
+ stats[pg_id] = {}
+ for field in ['tx_pkts', 'tx_bytes', 'rx_pkts']:
+ stats[pg_id][field] = {'total': self.get_rel([pg_id, field, 'total'])}
+
+ for port, pv in self.reference_stats[pg_id][field].iteritems():
+ stats[pg_id][field][port] = self.get_rel([pg_id, field, port])
+
+ return stats
+
+
+
+ def get_streams_keys (self):
+ keys = []
+ for user_id, user_id_data in self.reference_stats.iteritems():
+ # ignore non user ID keys
try:
- pg_id = int(pg_id)
+ keys.append(int(user_id))
except ValueError:
continue
- # handle pg id
- stats[pg_id] = {}
- for field, per_port_data in pg_id_data.iteritems():
- stats[pg_id][field] = {}
- for port, value in per_port_data.iteritems():
- stats[pg_id][field][int(port)] = value
+ return keys
- return stats
+ def generate_stats (self):
+
+ stats = self.get_stats()
+ pg_ids = stats.keys()[:4]
+ cnt = len(pg_ids)
+
+
+ formatted_stats = OrderedDict([ ('Tx bps L2', []),
+ ('Tx bps L1', []),
+ ('Tx pps', []),
+ ('---', [''] * cnt),
+ ('Rx pps', []),
+ ('----', [''] * cnt),
+ ('opackets', []),
+ ('ipackets', []),
+ ('obytes', []),
+ ('ibytes', []),
+ ('-----', [''] * cnt),
+ ('tx_pkts', []),
+ ('rx_pkts', []),
+ ('tx_bytes', []),
+ ('rx_bytes', [])
+ ])
+
+
+
+ # maximum 4
+ for pg_id in pg_ids:
+
+ #formatted_stats['TX packet count'].append(stats[key]['tx-pkts']['total'])
+ #formatted_stats['TX byte count'].append(stats[key]['tx-bytes']['total'])
+ #formatted_stats['RX packet count'].append(stats[key]['rx-pkts']['total'])
+ formatted_stats['Tx bps L2'].append(0)
+ formatted_stats['Tx bps L1'].append(0)
+ formatted_stats['Tx pps'].append(0)
+ formatted_stats['Rx pps'].append(0)
+ formatted_stats['opackets'].append(self.get_rel([pg_id, 'tx_pkts', 'total']))
+ formatted_stats['ipackets'].append(self.get_rel([pg_id, 'rx_pkts', 'total']))
+ formatted_stats['obytes'].append(self.get_rel([pg_id, 'tx_bytes', 'total']))
+ formatted_stats['ibytes'].append(self.get_rel([pg_id, 'rx_bytes', 'total']))
+ formatted_stats['tx_bytes'].append(self.get_rel([pg_id, 'tx_bytes', 'total'], format = True, suffix = "B"))
+ formatted_stats['rx_bytes'].append(self.get_rel([pg_id, 'rx_bytes', 'total'], format = True, suffix = "B"))
+ formatted_stats['tx_pkts'].append(self.get_rel([pg_id, 'tx_pkts', 'total'], format = True, suffix = "pkts"))
+ formatted_stats['rx_pkts'].append(self.get_rel([pg_id, 'rx_pkts', 'total'], format = True, suffix = "pkts"))
+
+
+
+ return formatted_stats
if __name__ == "__main__":
pass
diff --git a/scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/parsing_opts.py b/scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/parsing_opts.py
index 649c192a..0390ac9c 100755
--- a/scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/parsing_opts.py
+++ b/scripts/automation/trex_control_plane/stl/trex_stl_lib/utils/parsing_opts.py
@@ -34,7 +34,8 @@ PROMISCUOUS_SWITCH = 21
GLOBAL_STATS = 50
PORT_STATS = 51
PORT_STATUS = 52
-STATS_MASK = 53
+STREAMS_STATS = 53
+STATS_MASK = 54
STREAMS_MASK = 60
# ALL_STREAMS = 61
@@ -312,6 +313,10 @@ OPTIONS_DB = {MULTIPLIER: ArgumentPack(['-m', '--multiplier'],
{'action': 'store_true',
'help': "Fetch only port status data"}),
+ STREAMS_STATS: ArgumentPack(['-s'],
+ {'action': 'store_true',
+ 'help': "Fetch only streams stats"}),
+
STREAMS_MASK: ArgumentPack(['--streams'],
{"nargs": '+',
'dest':'streams',
@@ -336,7 +341,8 @@ OPTIONS_DB = {MULTIPLIER: ArgumentPack(['-m', '--multiplier'],
{'required': True}),
STATS_MASK: ArgumentGroup(MUTEX, [GLOBAL_STATS,
PORT_STATS,
- PORT_STATUS],
+ PORT_STATUS,
+ STREAMS_STATS],
{})
}