summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorDan Klein <danklein10@gmail.com>2016-01-08 13:54:36 +0200
committerDan Klein <danklein10@gmail.com>2016-01-08 13:54:36 +0200
commit8e037c2bd51844dc7c42ce7b2339806d9dcb964b (patch)
treecd45e5214e3db13f214a0e68b3fd37f366438b80
parent8db09096b9dcf030b7dc744fbd7ee463d8e6fd1b (diff)
parent9fc980b8aa43cf53446eeeb5184f10a86476da28 (diff)
Merge branch 'dan_stateless'
Added the support for "streams" command Missing: 1. "--full" output 2. sync with server after crash 3. merging output for identical port streams
-rw-r--r--scripts/automation/trex_control_plane/client/trex_port.py72
-rwxr-xr-xscripts/automation/trex_control_plane/client/trex_stateless_client.py41
-rwxr-xr-xscripts/automation/trex_control_plane/client_utils/packet_builder.py38
-rwxr-xr-xscripts/automation/trex_control_plane/client_utils/parsing_opts.py19
-rw-r--r--scripts/automation/trex_control_plane/client_utils/text_tables.py5
-rwxr-xr-xscripts/automation/trex_control_plane/common/trex_stats.py46
-rwxr-xr-xscripts/automation/trex_control_plane/common/trex_streams.py34
-rwxr-xr-xscripts/automation/trex_control_plane/console/trex_console.py9
-rw-r--r--scripts/stl/imix_3pkt_vm.yaml94
9 files changed, 326 insertions, 32 deletions
diff --git a/scripts/automation/trex_control_plane/client/trex_port.py b/scripts/automation/trex_control_plane/client/trex_port.py
index 54b4945e..fc63cf0d 100644
--- a/scripts/automation/trex_control_plane/client/trex_port.py
+++ b/scripts/automation/trex_control_plane/client/trex_port.py
@@ -1,8 +1,9 @@
-from collections import namedtuple
+from collections import namedtuple, OrderedDict
from common.trex_types import *
from common import trex_stats
-
+from client_utils import packet_builder
+StreamOnPort = namedtuple('StreamOnPort', ['compiled_stream', 'metadata'])
########## utlity ############
def mult_to_factor (mult, max_bps, max_pps, line_util):
@@ -47,6 +48,7 @@ class Port(object):
self.streams = {}
self.profile = None
self.session_id = session_id
+ self.loaded_stream_pack = None
self.port_stats = trex_stats.CPortStats(self)
@@ -124,7 +126,9 @@ class Port(object):
elif port_state == "PAUSE":
self.state = self.STATE_PAUSE
else:
- raise Exception("port {0}: bad state received from server '{1}'".format(self.port_id, sync_data['state']))
+ raise Exception("port {0}: bad state received from server '{1}'".format(self.port_id, port_state))
+
+ # TODO: handle syncing the streams into stream_db
return self.ok()
@@ -151,7 +155,7 @@ class Port(object):
return self.err(rc.err())
# add the stream
- self.streams[stream_id] = stream_obj
+ self.streams[stream_id] = StreamOnPort(stream_obj, Port._generate_stream_metadata(stream_id, stream_obj))
# the only valid state now
self.state = self.STATE_STREAMS
@@ -159,14 +163,17 @@ class Port(object):
return self.ok()
# add multiple streams
- def add_streams (self, streams_list):
+ def add_streams (self, LoadedStreamList_obj):
batch = []
- for stream in streams_list:
+ self.loaded_stream_pack = LoadedStreamList_obj
+ compiled_stream_list = LoadedStreamList_obj.compiled
+
+ for stream_pack in compiled_stream_list:
params = {"handler": self.handler,
"port_id": self.port_id,
- "stream_id": stream.stream_id,
- "stream": stream.stream}
+ "stream_id": stream_pack.stream_id,
+ "stream": stream_pack.stream}
cmd = RpcCmdData('add_stream', params)
batch.append(cmd)
@@ -178,8 +185,10 @@ class Port(object):
# validate that every action succeeded
# add the stream
- for stream in streams_list:
- self.streams[stream.stream_id] = stream.stream
+ for stream_pack in compiled_stream_list:
+ self.streams[stream_pack.stream_id] = StreamOnPort(stream_pack.stream,
+ Port._generate_stream_metadata(stream_pack.stream_id,
+ stream_pack.stream))
# the only valid state now
self.state = self.STATE_STREAMS
@@ -203,7 +212,7 @@ class Port(object):
self.streams[stream_id] = None
- self.state = self.STATE_STREAMS if len(self.streams > 0) else self.STATE_IDLE
+ self.state = self.STATE_STREAMS if (len(self.streams) > 0) else self.STATE_IDLE
return self.ok()
@@ -408,6 +417,47 @@ class Port(object):
def invalidate_stats(self):
return self.port_stats.invalidate()
+ ################# stream printout ######################
+ def generate_loaded_streams_sum(self, stream_id_list):
+ if self.state == self.STATE_DOWN or self.state == self.STATE_STREAMS:
+ return {}
+ elif self.loaded_stream_pack is None:
+ # avoid crashing when sync with remote server isn't operational
+ # TODO: MAKE SURE TO HANDLE THIS CASE FOR BETTER UX
+ return {}
+ streams_data = {}
+
+ if not stream_id_list:
+ # if no mask has been provided, apply to all streams on port
+ stream_id_list = self.streams.keys()
+
+
+ streams_data = {stream_id: self.streams[stream_id].metadata.get('stream_sum', ["N/A"] * 6)
+ for stream_id in stream_id_list
+ if stream_id in self.streams}
+
+
+ return {"referring_file" : self.loaded_stream_pack.name,
+ "streams" : streams_data}
+
+ @staticmethod
+ def _generate_stream_metadata(stream_id, compiled_stream_obj):
+ meta_dict = {}
+ # create packet stream description
+ pkt_bld_obj = packet_builder.CTRexPktBuilder()
+ pkt_bld_obj.load_from_stream_obj(compiled_stream_obj)
+ # generate stream summary based on that
+
+ next_stream = "None" if compiled_stream_obj['next_stream_id']==-1 else compiled_stream_obj['next_stream_id']
+
+ meta_dict['stream_sum'] = OrderedDict([("id", stream_id),
+ ("packet_type", "/".join(pkt_bld_obj.get_packet_layers())),
+ ("length", pkt_bld_obj.get_packet_length()),
+ ("mode", compiled_stream_obj['mode']['type']),
+ ("rate_pps", compiled_stream_obj['mode']['pps']),
+ ("next_stream", next_stream)
+ ])
+ return meta_dict
################# events handler ######################
def async_event_port_stopped (self):
diff --git a/scripts/automation/trex_control_plane/client/trex_stateless_client.py b/scripts/automation/trex_control_plane/client/trex_stateless_client.py
index 58fa53c9..6139884d 100755
--- a/scripts/automation/trex_control_plane/client/trex_stateless_client.py
+++ b/scripts/automation/trex_control_plane/client/trex_stateless_client.py
@@ -61,7 +61,7 @@ class CTRexStatelessClient(object):
self.global_stats = trex_stats.CGlobalStats(self._connection_info,
self.server_version,
self.ports)
- self.stats_generator = trex_stats.CTRexStatsGenerator(self.global_stats,
+ self.stats_generator = trex_stats.CTRexInfoGenerator(self.global_stats,
self.ports)
self.events = []
@@ -538,14 +538,14 @@ class CTRexStatelessClient(object):
- def add_stream_pack(self, stream_pack_list, port_id_list = None):
+ def add_stream_pack(self, stream_pack, port_id_list = None):
port_id_list = self.__ports(port_id_list)
rc = RC()
for port_id in port_id_list:
- rc.add(self.ports[port_id].add_streams(stream_pack_list))
+ rc.add(self.ports[port_id].add_streams(stream_pack))
return rc
@@ -821,7 +821,7 @@ class CTRexStatelessClient(object):
return rc
- rc = self.add_stream_pack(stream_list.compiled, port_id_list)
+ rc = self.add_stream_pack(stream_list, port_id_list)
rc.annotate("Attaching {0} streams to port(s) {1}:".format(len(stream_list.compiled), port_id_list))
if rc.bad():
return rc
@@ -861,6 +861,12 @@ class CTRexStatelessClient(object):
stats_obj.update(self.stats_generator.generate_single_statistic(port_id_list, stats_type))
return stats_obj
+ def cmd_streams(self, port_id_list, streams_mask=set()):
+
+ streams_obj = self.stats_generator.generate_streams_info(port_id_list, streams_mask)
+
+ return streams_obj
+
############## High Level API With Parser ################
@@ -916,7 +922,7 @@ class CTRexStatelessClient(object):
else:
# load streams from file
- stream_list = None;
+ stream_list = None
try:
stream_list = self.streams_db.load_yaml_file(opts.file[0])
except Exception as e:
@@ -1050,11 +1056,36 @@ class CTRexStatelessClient(object):
for stat_type, stat_data in stats.iteritems():
text_tables.print_table_with_header(stat_data.text_table, stat_type)
+ return RC_OK()
+
+ def cmd_streams_line(self, line):
+ '''Fetch streams statistics from TRex server by port\n'''
+ # define a parser
+ parser = parsing_opts.gen_parser(self,
+ "streams",
+ self.cmd_streams_line.__doc__,
+ parsing_opts.PORT_LIST_WITH_ALL,
+ parsing_opts.STREAMS_MASK,
+ parsing_opts.FULL_OUTPUT)
+
+ opts = parser.parse_args(line.split())
+
+ if opts is None:
+ return RC_ERR("bad command line parameters")
+
+ streams = self.cmd_streams(opts.ports, set(opts.streams))
+
+ # print stats to screen
+ for stream_hdr, port_streams_data in streams.iteritems():
+ text_tables.print_table_with_header(port_streams_data.text_table,
+ header= stream_hdr.split(":")[0] + ":",
+ untouched_header= stream_hdr.split(":")[1])
return RC_OK()
+
@timing
def cmd_validate_line (self, line):
'''validates port(s) stream configuration\n'''
diff --git a/scripts/automation/trex_control_plane/client_utils/packet_builder.py b/scripts/automation/trex_control_plane/client_utils/packet_builder.py
index 1ca01a33..e7fdb5d9 100755
--- a/scripts/automation/trex_control_plane/client_utils/packet_builder.py
+++ b/scripts/automation/trex_control_plane/client_utils/packet_builder.py
@@ -9,6 +9,7 @@ import random
import string
import struct
import re
+import itertools
from abc import ABCMeta, abstractmethod
from collections import namedtuple
@@ -325,6 +326,17 @@ class CTRexPktBuilder(object):
# arrive here ONLY if pcap contained SINGLE packet
return
+ def load_from_stream_obj(self, stream_obj):
+ self.load_packet_from_byte_list(stream_obj['packet']['binary'])
+
+
+ def load_packet_from_byte_list(self, byte_list):
+ # convert byte array into buffer
+ buf = struct.pack('B'*len(byte_list), *byte_list)
+
+ # thn, load it based on dpkt parsing
+ self.load_packet(dpkt.ethernet.Ethernet(buf))
+
def get_packet(self, get_ptr=False):
"""
This method provides access to the built packet, as an instance or as a pointer to packet itself.
@@ -349,6 +361,9 @@ class CTRexPktBuilder(object):
else:
return copy.copy(self._packet)
+ def get_packet_length(self):
+ return len(self._packet)
+
def get_layer(self, layer_name):
"""
This method provides access to a specific layer of the packet, as a **copy of the layer instance**.
@@ -502,6 +517,29 @@ class CTRexPktBuilder(object):
except IOError:
raise IOError(2, "The provided path could not be accessed")
+ def get_packet_layers(self, depth_limit=Ellipsis):
+ if self._packet is None:
+ raise CTRexPktBuilder.EmptyPacketError()
+ cur_layer = self._packet
+ layer_types = []
+ if depth_limit == Ellipsis:
+ iterator = itertools.count(1)
+ else:
+ iterator = xrange(depth_limit)
+ for _ in iterator:
+ # append current layer type
+ if isinstance(cur_layer, dpkt.Packet):
+ layer_types.append(type(cur_layer).__name__)
+ else:
+ # if not dpkt layer, refer as payload
+ layer_types.append("PLD")
+ # advance to next layer
+ if not hasattr(cur_layer, "data"):
+ break
+ else:
+ cur_layer = cur_layer.data
+ return layer_types
+
def export_pkt(self, file_path, link_pcap=False, pcap_name=None, pcap_ts=None):
pass
diff --git a/scripts/automation/trex_control_plane/client_utils/parsing_opts.py b/scripts/automation/trex_control_plane/client_utils/parsing_opts.py
index 5cb06604..43c97a1d 100755
--- a/scripts/automation/trex_control_plane/client_utils/parsing_opts.py
+++ b/scripts/automation/trex_control_plane/client_utils/parsing_opts.py
@@ -23,12 +23,19 @@ FORCE = 11
DRY_RUN = 12
XTERM = 13
TOTAL = 14
+FULL_OUTPUT = 15
GLOBAL_STATS = 50
PORT_STATS = 51
PORT_STATUS = 52
STATS_MASK = 53
+STREAMS_MASK = 60
+# ALL_STREAMS = 61
+# STREAM_LIST_WITH_ALL = 62
+
+
+
# list of ArgumentGroup types
MUTEX = 1
@@ -221,6 +228,10 @@ OPTIONS_DB = {MULTIPLIER: ArgumentPack(['-m', '--multiplier'],
'default': False,
'help': "Starts TUI in xterm window"}),
+ FULL_OUTPUT: ArgumentPack(['--full'],
+ {'action': 'store_true',
+ 'help': "Prompt full info in a JSON format"}),
+
GLOBAL_STATS: ArgumentPack(['-g'],
{'action': 'store_true',
'help': "Fetch only global statistics"}),
@@ -233,6 +244,14 @@ OPTIONS_DB = {MULTIPLIER: ArgumentPack(['-m', '--multiplier'],
{'action': 'store_true',
'help': "Fetch only port status data"}),
+ STREAMS_MASK: ArgumentPack(['--streams'],
+ {"nargs": '+',
+ 'dest':'streams',
+ 'metavar': 'STREAMS',
+ 'type': int,
+ 'help': "A list of stream IDs to query about. Default: analyze all streams",
+ 'default': []}),
+
# advanced options
PORT_LIST_WITH_ALL: ArgumentGroup(MUTEX, [PORT_LIST,
diff --git a/scripts/automation/trex_control_plane/client_utils/text_tables.py b/scripts/automation/trex_control_plane/client_utils/text_tables.py
index 2debca38..2fa17f09 100644
--- a/scripts/automation/trex_control_plane/client_utils/text_tables.py
+++ b/scripts/automation/trex_control_plane/client_utils/text_tables.py
@@ -22,12 +22,11 @@ class TRexTextInfo(Texttable):
def generate_trex_stats_table():
pass
-def print_table_with_header(texttable_obj, header=""):
- header = header.replace("_", " ").title()
+def print_table_with_header(texttable_obj, header="", untouched_header=""):
+ header = header.replace("_", " ").title() + untouched_header
print format_text(header, 'cyan', 'underline') + "\n"
print texttable_obj.draw() + "\n"
- pass
if __name__ == "__main__":
pass
diff --git a/scripts/automation/trex_control_plane/common/trex_stats.py b/scripts/automation/trex_control_plane/common/trex_stats.py
index f2a965b2..f792ab9b 100755
--- a/scripts/automation/trex_control_plane/common/trex_stats.py
+++ b/scripts/automation/trex_control_plane/common/trex_stats.py
@@ -55,10 +55,9 @@ def calculate_diff_raw (samples):
return total
-
-class CTRexStatsGenerator(object):
+class CTRexInfoGenerator(object):
"""
- This object is responsible of generating stats from objects maintained at
+ This object is responsible of generating stats and information from objects maintained at
CTRexStatelessClient and the ports.
"""
@@ -78,6 +77,19 @@ class CTRexStatsGenerator(object):
# ignore by returning empty object
return {}
+ def generate_streams_info(self, port_id_list, stream_id_list):
+ relevant_ports = self.__get_relevant_ports(port_id_list)
+
+ return_data = {}
+ for port_obj in relevant_ports:
+ streams_data = self._generate_single_port_streams_info(port_obj, stream_id_list)
+ hdr_key = "Port {port}: {yaml_file}".format(port= port_obj.port_id,
+ yaml_file= streams_data.raw_data.get('referring_file', ''))
+
+ # TODO: test for other ports with same stream structure, and join them
+ return_data[hdr_key] = streams_data
+ return return_data
+
def _generate_global_stats(self):
# stats_obj = self._async_stats.get_general_stats()
stats_data = self._global_stats.generate_stats()
@@ -170,6 +182,27 @@ class CTRexStatsGenerator(object):
return {"port_status": ExportableStats(return_stats_data, stats_table)}
+ def _generate_single_port_streams_info(self, port_obj, stream_id_list):
+
+ return_streams_data = port_obj.generate_loaded_streams_sum(stream_id_list)
+
+ # FORMAT VALUES ON DEMAND
+ for stream_id, stream_id_sum in return_streams_data['streams'].iteritems():
+ stream_id_sum['rate_pps'] = CTRexStats.format_num(stream_id_sum['rate_pps'], suffix='pps')
+ stream_id_sum['packet_type'] = self._trim_packet_headers(stream_id_sum['packet_type'], 20)
+
+ info_table = text_tables.TRexTextTable()
+ info_table.set_cols_align(["c"] + ["l"] + ["r"] + ["c"] + ["r"] + ["c"])
+ info_table.set_cols_width([4] + [20] + [8] + [16] + [10] + [12])
+
+ info_table.add_rows([v.values()
+ for k, v in return_streams_data['streams'].iteritems()],
+ header=False)
+ info_table.header(["ID", "packet type", "length", "mode", "rate", "next stream"])
+
+ return ExportableStats(return_streams_data, info_table)
+
+
def __get_relevant_ports(self, port_id_list):
# fetch owned ports
ports = [port_obj
@@ -187,6 +220,13 @@ class CTRexStatsGenerator(object):
if key in dict_dest_ref:
dict_dest_ref[key].append(val)
+ @staticmethod
+ def _trim_packet_headers(headers_str, trim_limit):
+ if len(headers_str) < trim_limit:
+ # do nothing
+ return headers_str
+ else:
+ return (headers_str[:trim_limit-3] + "...")
diff --git a/scripts/automation/trex_control_plane/common/trex_streams.py b/scripts/automation/trex_control_plane/common/trex_streams.py
index 007e2464..800b6d49 100755
--- a/scripts/automation/trex_control_plane/common/trex_streams.py
+++ b/scripts/automation/trex_control_plane/common/trex_streams.py
@@ -10,7 +10,7 @@ import copy
import os
StreamPack = namedtuple('StreamPack', ['stream_id', 'stream'])
-LoadedStreamList = namedtuple('LoadedStreamList', ['loaded', 'compiled'])
+LoadedStreamList = namedtuple('LoadedStreamList', ['name', 'loaded', 'compiled'])
class CStreamList(object):
@@ -176,9 +176,11 @@ class CStream(object):
def __init__(self):
self.is_loaded = False
self._is_compiled = False
+ self._pkt_bld_obj = CTRexPktBuilder()
for field in CStream.FIELDS:
setattr(self, field, None)
+
def load_data(self, **kwargs):
try:
for k in CStream.FIELDS:
@@ -206,8 +208,13 @@ class CStream(object):
binary = kwargs[k]["binary"]
if isinstance(binary, list):
setattr(self, k, kwargs[k])
+ # TODO: load to _pkt_bld_obj also when passed as byte array!
elif isinstance(binary, str) and binary.endswith(".pcap"):
- self.load_packet_from_pcap(binary, kwargs[k]["meta"])
+ # self.load_packet_from_pcap(binary, kwargs[k]["meta"])
+ self._pkt_bld_obj.load_packet_from_pcap(binary)
+ self._pkt_bld_obj.metadata = kwargs[k]["meta"]
+ self.packet = self._pkt_bld_obj.dump_pkt()
+
else:
raise ValueError("Packet binary attribute has been loaded with unsupported value."
"Supported values are reference to pcap file with SINGLE packet, "
@@ -254,6 +261,10 @@ class CStream(object):
else:
raise RuntimeError("CStream object isn't loaded with data. Use 'load_data' method.")
+ def get_stream_layers(self, depth_limit=Ellipsis):
+ stream_layers = self._pkt_bld_obj.get_packet_layers(depth_limit)
+ return "/".join(stream_layers)
+
# describes a stream DB
@@ -271,20 +282,23 @@ class CStreamsDB(object):
stream_list = CStreamList()
loaded_obj = stream_list.load_yaml(filename)
- compiled_streams = stream_list.compile_streams()
- rc = self.load_streams(stream_pack_name,
- LoadedStreamList(loaded_obj,
- [StreamPack(v.stream_id, v.stream.dump())
- for k, v in compiled_streams.items()]))
+ try:
+ compiled_streams = stream_list.compile_streams()
+ rc = self.load_streams(LoadedStreamList(stream_pack_name,
+ loaded_obj,
+ [StreamPack(v.stream_id, v.stream.dump())
+ for k, v in compiled_streams.items()]))
+ except Exception as e:
+ return None
return self.get_stream_pack(stream_pack_name)
- def load_streams(self, name, LoadedStreamList_obj):
- if name in self.stream_packs:
+ def load_streams(self, LoadedStreamList_obj):
+ if LoadedStreamList_obj.name in self.stream_packs:
return False
else:
- self.stream_packs[name] = LoadedStreamList_obj
+ self.stream_packs[LoadedStreamList_obj.name] = LoadedStreamList_obj
return True
def remove_stream_packs(self, *names):
diff --git a/scripts/automation/trex_control_plane/console/trex_console.py b/scripts/automation/trex_control_plane/console/trex_console.py
index a3ea6693..fe4001b2 100755
--- a/scripts/automation/trex_control_plane/console/trex_console.py
+++ b/scripts/automation/trex_control_plane/console/trex_console.py
@@ -416,6 +416,15 @@ class TRexConsole(TRexGeneralCmd):
self.do_stats("-h")
@verify_connected
+ def do_streams(self, line):
+ '''Fetch statistics from TRex server by port\n'''
+ self.stateless_client.cmd_streams_line(line)
+
+
+ def help_streams(self):
+ self.do_streams("-h")
+
+ @verify_connected
def do_clear(self, line):
'''Clear cached local statistics\n'''
self.stateless_client.cmd_clear_line(line)
diff --git a/scripts/stl/imix_3pkt_vm.yaml b/scripts/stl/imix_3pkt_vm.yaml
new file mode 100644
index 00000000..d812634c
--- /dev/null
+++ b/scripts/stl/imix_3pkt_vm.yaml
@@ -0,0 +1,94 @@
+### Three-stream UDP packets, 64B, 594B and 1518B ###
+#####################################################
+- name: udp_64B
+ stream:
+ self_start: True
+ packet:
+ binary: stl/udp_64B_no_crc.pcap # pcap should not include CRC
+ mode:
+ type: single_burst
+ total_pkts: 100
+ pps: 100
+ rx_stats: []
+
+ vm:
+ instructions: [
+ {
+ "init_value" : 100,
+ "max_value" : 5000,
+ "min_value" : 100,
+ "name" : "l3_src",
+ "op" : "inc",
+ "size" : 2,
+ "type" : "flow_var"
+ },
+ {
+ "add_value" : 1,
+ "is_big_endian" : true,
+ "name" : "l3_src",
+ "pkt_offset" : 34,
+ "type" : "write_flow_var"
+ }
+ ]
+ split_by_var: "l3_src"
+
+- name: udp_594B
+ stream:
+ self_start: True
+ packet:
+ binary: stl/udp_594B_no_crc.pcap # pcap should not include CRC
+ mode:
+ type: continuous
+ pps: 20
+ rx_stats: []
+ vm:
+ instructions: [
+ {
+ "init_value" : 100,
+ "max_value" : 5000,
+ "min_value" : 100,
+ "name" : "l3_src",
+ "op" : "inc",
+ "size" : 2,
+ "type" : "flow_var"
+ },
+ {
+ "add_value" : 1,
+ "is_big_endian" : true,
+ "name" : "l3_src",
+ "pkt_offset" : 34,
+ "type" : "write_flow_var"
+ }
+ ]
+ split_by_var: "l3_src"
+
+- name: udp_1518B
+ stream:
+ self_start: True
+ packet:
+ binary: stl/udp_1518B_no_crc.pcap # pcap should not include CRC
+ mode:
+ type: single_burst
+ total_pkts: 100
+ pps: 4
+ rx_stats: []
+ vm:
+ instructions: [
+ {
+ "init_value" : 100,
+ "max_value" : 5000,
+ "min_value" : 100,
+ "name" : "l3_src",
+ "op" : "inc",
+ "size" : 2,
+ "type" : "flow_var"
+ },
+ {
+ "add_value" : 1,
+ "is_big_endian" : true,
+ "name" : "l3_src",
+ "pkt_offset" : 34,
+ "type" : "write_flow_var"
+ }
+ ]
+ split_by_var: "l3_src" \ No newline at end of file