summaryrefslogtreecommitdiffstats
path: root/scripts/automation/trex_control_plane/stl/services
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/automation/trex_control_plane/stl/services')
-rwxr-xr-xscripts/automation/trex_control_plane/stl/services/scapy_server/scapy_service.py798
-rw-r--r--scripts/automation/trex_control_plane/stl/services/scapy_server/scapy_zmq_client.py116
-rwxr-xr-xscripts/automation/trex_control_plane/stl/services/scapy_server/scapy_zmq_server.py188
-rw-r--r--scripts/automation/trex_control_plane/stl/services/scapy_server/unit_tests/basetest.py84
-rw-r--r--scripts/automation/trex_control_plane/stl/services/scapy_server/unit_tests/test_scapy_service.py155
-rwxr-xr-xscripts/automation/trex_control_plane/stl/services/scapy_server/zmq_for_scapy_server_test.py14
6 files changed, 1355 insertions, 0 deletions
diff --git a/scripts/automation/trex_control_plane/stl/services/scapy_server/scapy_service.py b/scripts/automation/trex_control_plane/stl/services/scapy_server/scapy_service.py
new file mode 100755
index 00000000..91257596
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stl/services/scapy_server/scapy_service.py
@@ -0,0 +1,798 @@
+
+import os
+import sys
+stl_pathname = os.path.abspath(os.path.join(os.pardir, os.pardir))
+sys.path.append(stl_pathname)
+
+from trex_stl_lib.api import *
+import tempfile
+import hashlib
+import base64
+import numbers
+import inspect
+import json
+from pprint import pprint
+
+# add some layers as an example
+# need to test more
+from scapy.layers.dns import *
+from scapy.layers.dhcp import *
+from scapy.layers.ipsec import *
+from scapy.layers.netflow import *
+from scapy.layers.sctp import *
+from scapy.layers.tftp import *
+
+from scapy.contrib.mpls import *
+from scapy.contrib.igmp import *
+from scapy.contrib.igmpv3 import *
+
+
+
+
+#additional_stl_udp_pkts = os.path.abspath(os.path.join(os.pardir,os.pardir,os.pardir,os.pardir, os.pardir,'stl'))
+#sys.path.append(additional_stl_udp_pkts)
+#from udp_1pkt_vxlan import VXLAN
+#sys.path.remove(additional_stl_udp_pkts)
+
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from io import StringIO
+
+
+
+
+class Scapy_service_api():
+
+ def get_version_handler(self,client_v_major,client_v_minor):
+ """ get_version_handler(self,client_v_major,client_v_minor)
+
+ Gives a handler to client to connect and use server api
+
+ Parameters
+ ----------
+ client_v_major - major number of api version on the client side
+
+ Returns
+ -------
+ Handler(string) to provide when using server api
+ """
+ pass
+ def get_all(self,client_v_handler):
+ """ get_all(self,client_v_handler)
+
+ Sends all the protocols and fields that Scapy Service supports.
+ also sends the md5 of the Protocol DB and Fields DB used to check if the DB's are up to date
+
+ Parameters
+ ----------
+ None
+
+ Returns
+ -------
+ Dictionary (of protocol DB and scapy fields DB)
+
+ Raises
+ ------
+ Raises an exception when a DB error occurs (i.e a layer is not loaded properly and has missing components)
+ """
+ pass
+
+ def check_update_of_dbs(self,client_v_handler,db_md5,field_md5):
+ """ check_update_of_dbs(self,client_v_handler,db_md5,field_md5)
+ Checks if the Scapy Service running on the server has a newer version of the databases that the client has
+
+ Parameters
+ ----------
+ db_md5 - The md5 that was delivered with the protocol database that the client owns, when first received at the client
+ field_md5 - The md5 that was delivered with the fields database that the client owns, when first received at the client
+
+ Returns
+ -------
+ True/False according the Databases version(determined by their md5)
+
+ Raises
+ ------
+ Raises an exception (ScapyException) when protocol DB/Fields DB is not up to date
+ """
+ pass
+
+
+ def build_pkt(self,client_v_handler,pkt_model_descriptor):
+ """ build_pkt(self,client_v_handler,pkt_model_descriptor) -> Dictionary (of Offsets,Show2 and Buffer)
+
+ Performs calculations on the given packet and returns results for that packet.
+
+ Parameters
+ ----------
+ pkt_descriptor - An array of dictionaries describing a network packet
+
+ Returns
+ -------
+ - The packets offsets: each field in every layer is mapped inside the Offsets Dictionary
+ - The Show2: A description of each field and its value in every layer of the packet
+ - The Buffer: The Hexdump of packet encoded in base64
+
+ Raises
+ ------
+ will raise an exception when the Scapy string format is illegal, contains syntax error, contains non-supported
+ protocl, etc.
+ """
+ pass
+
+
+ def get_tree(self,client_v_handler):
+ """ get_tree(self) -> Dictionary describing an example of hierarchy in layers
+
+ Scapy service holds a tree of layers that can be stacked to a recommended packet
+ according to the hierarchy
+
+ Parameters
+ ----------
+ None
+
+ Returns
+ -------
+ Returns an example hierarchy tree of layers that can be stacked to a packet
+
+ Raises
+ ------
+ None
+ """
+ pass
+
+ def reconstruct_pkt(self,client_v_handler,binary_pkt,model_descriptor):
+ """ reconstruct_pkt(self,client_v_handler,binary_pkt)
+
+ Makes a Scapy valid packet by applying changes to binary packet and returns all information returned in build_pkt
+
+ Parameters
+ ----------
+ Source packet in binary_pkt, formatted in "base64" encoding
+ List of changes in model_descriptor
+
+ Returns
+ -------
+ All data provided in build_pkt:
+ show2 - detailed description of the packet
+ buffer - the packet presented in binary
+ offsets - the offset[in bytes] of each field in the packet
+
+ """
+ pass
+
+ def read_pcap(self,client_v_handler,pcap_base64):
+ """ read_pcap(self,client_v_handler,pcap_base64)
+
+ Parses pcap file contents and returns an array with build_pkt information for each packet
+
+ Parameters
+ ----------
+ binary pcap file in base64 encoding
+
+ Returns
+ -------
+ Array of build_pkt(packet)
+ """
+ pass
+
+ def write_pcap(self,client_v_handler,packets_base64):
+ """ write_pcap(self,client_v_handler,packets_base64)
+
+ Writes binary packets to pcap file
+
+ Parameters
+ ----------
+ array of binary packets in base64 encoding
+
+ Returns
+ -------
+ binary pcap file in base64 encoding
+ """
+ pass
+
+ def get_definitions(self,client_v_handler, def_filter):
+ """ get_definitions(self,client_v_handler, def_filter)
+
+ Returns protocols and fields metadata of scapy service
+
+ Parameters
+ ----------
+ def_filter - array of protocol names
+
+ Returns
+ -------
+ definitions for protocols
+ """
+ pass
+
+ def get_payload_classes(self,client_v_handler, pkt_model_descriptor):
+ """ get_payload_classes(self,client_v_handler, pkt_model_descriptor)
+
+ Returns an array of protocol classes, which normally can be used as a payload
+
+ Parameters
+ ----------
+ pkt_model_descriptor - see build_pkt
+
+ Returns
+ -------
+ array of supported protocol classes
+ """
+ pass
+
+def is_python(version):
+ return version == sys.version_info[0]
+
+def is_number(obj):
+ return isinstance(obj, numbers.Number)
+
+def is_string(obj):
+ return type(obj) == str or type(obj).__name__ == 'unicode' # python3 doesn't have unicode type
+
+def is_ascii_str(strval):
+ return strval and all(ord(ch) < 128 for ch in strval)
+
+def is_ascii_bytes(buf):
+ return buf and all(byte < 128 for byte in buf)
+
+def is_ascii(obj):
+ if is_bytes3(obj):
+ return is_ascii_bytes(obj)
+ else:
+ return is_ascii_str(obj)
+
+def is_bytes3(obj):
+ # checks if obj is exactly bytes(always false for python2)
+ return is_python(3) and type(obj) == bytes
+
+def str_to_bytes(strval):
+ return strval.encode("utf8")
+
+def bytes_to_str(buf):
+ return buf.decode("utf8")
+
+def b64_to_bytes(payload_base64):
+ # get bytes from base64 string(unicode)
+ return base64.b64decode(payload_base64)
+
+def bytes_to_b64(buf):
+ # bytes to base64 string(unicode)
+ return base64.b64encode(buf).decode('ascii')
+
+def get_sample_field_val(scapy_layer, fieldId):
+ # get some sample value for the field, to determine the value type
+ # use random or serialized value if default value is None
+ field_desc, current_val = scapy_layer.getfield_and_val(fieldId)
+ if current_val is not None:
+ return current_val
+ try:
+ # try to get some random value to determine type
+ return field_desc.randval()._fix()
+ except:
+ pass
+ try:
+ # try to serialize/deserialize
+ ltype = type(scapy_layer)
+ pkt = ltype(bytes(ltype()))
+ return pkt.getfieldval(fieldId)
+ except:
+ pass
+
+class ScapyException(Exception): pass
+class Scapy_service(Scapy_service_api):
+
+#----------------------------------------------------------------------------------------------------
+ class ScapyFieldDesc:
+ def __init__(self,FieldName,regex='empty'):
+ self.FieldName = FieldName
+ self.regex = regex
+ #defualt values - should be changed when needed, or added to constructor
+ self.string_input =""
+ self.string_input_mex_len = 1
+ self.integer_input = 0
+ self.integer_input_min = 0
+ self.integer_input_max = 1
+ self.input_array = []
+ self.input_list_max_len = 1
+
+ def stringRegex(self):
+ return self.regex
+#----------------------------------------------------------------------------------------------------
+ def __init__(self):
+ self.Raw = {'Raw':''}
+ self.high_level_protocols = ['Raw']
+ self.transport_protocols = {'TCP':self.Raw,'UDP':self.Raw}
+ self.network_protocols = {'IP':self.transport_protocols ,'ARP':''}
+ self.low_level_protocols = { 'Ether': self.network_protocols }
+ self.regexDB= {'MACField' : self.ScapyFieldDesc('MACField','^([0-9a-fA-F][0-9a-fA-F]:){5}([0-9a-fA-F][0-9a-fA-F])$'),
+ 'IPField' : self.ScapyFieldDesc('IPField','^(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9])\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9]|0)\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9]|0)\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[0-9])$')}
+ self.all_protocols = self._build_lib()
+ self.protocol_tree = {'ALL':{'Ether':{'ARP':{},'IP':{'TCP':{'RAW':'payload'},'UDP':{'RAW':'payload'}}}}}
+ self.version_major = '1'
+ self.version_minor = '01'
+ self.server_v_hashed = self._generate_version_hash(self.version_major,self.version_minor)
+
+
+ def _all_protocol_structs(self):
+ old_stdout = sys.stdout
+ sys.stdout = mystdout = StringIO()
+ ls()
+ sys.stdout = old_stdout
+ all_protocol_data= mystdout.getvalue()
+ return all_protocol_data
+
+ def _protocol_struct(self,protocol):
+ if '_' in protocol:
+ return []
+ if not protocol=='':
+ if protocol not in self.all_protocols:
+ return 'protocol not supported'
+ protocol = eval(protocol)
+ old_stdout = sys.stdout
+ sys.stdout = mystdout = StringIO()
+ ls(protocol)
+ sys.stdout = old_stdout
+ protocol_data= mystdout.getvalue()
+ return protocol_data
+
+ def _build_lib(self):
+ lib = self._all_protocol_structs()
+ lib = lib.splitlines()
+ all_protocols=[]
+ for entry in lib:
+ entry = entry.split(':')
+ all_protocols.append(entry[0].strip())
+ del all_protocols[len(all_protocols)-1]
+ return all_protocols
+
+ def _parse_description_line(self,line):
+ line_arr = [x.strip() for x in re.split(': | = ',line)]
+ return tuple(line_arr)
+
+ def _parse_entire_description(self,description):
+ description = description.split('\n')
+ description_list = [self._parse_description_line(x) for x in description]
+ del description_list[len(description_list)-1]
+ return description_list
+
+ def _get_protocol_details(self,p_name):
+ protocol_str = self._protocol_struct(p_name)
+ if protocol_str=='protocol not supported':
+ return 'protocol not supported'
+ if len(protocol_str) is 0:
+ return []
+ tupled_protocol = self._parse_entire_description(protocol_str)
+ return tupled_protocol
+
+ def _value_from_dict(self, val):
+ # allows building python objects from json
+ if type(val) == type({}):
+ value_type = val['vtype']
+ if value_type == 'EXPRESSION':
+ return eval(val['expr'], {})
+ elif value_type == 'BYTES': # bytes payload(ex Raw.load)
+ return b64_to_bytes(val['base64'])
+ elif value_type == 'OBJECT':
+ return val['value']
+ else:
+ return val # it's better to specify type explicitly
+ elif type(val) == type([]):
+ return [self._value_from_dict(v) for v in val]
+ else:
+ return val
+
+ def _field_value_from_def(self, layer, fieldId, val):
+ field_desc = layer.get_field(fieldId)
+ sample_val = get_sample_field_val(layer, fieldId)
+ # extensions for field values
+ if type(val) == type({}):
+ value_type = val['vtype']
+ if value_type == 'UNDEFINED': # clear field value
+ return None
+ elif value_type == 'RANDOM': # random field value
+ return field_desc.randval()
+ elif value_type == 'MACHINE': # internal machine field repr
+ return field_desc.m2i(layer, b64_to_bytes(val['base64']))
+ if is_number(sample_val) and is_string(val):
+ # human-value. guess the type and convert to internal value
+ # seems setfieldval already does this for some fields,
+ # but does not convert strings/hex(0x123) to integers and long
+ val = str(val) # unicode -> str(ascii)
+ # parse str to int/long as a decimal or hex
+ val_constructor = type(sample_val)
+ if len(val) == 0:
+ return None
+ elif re.match(r"^0x[\da-f]+$", val, flags=re.IGNORECASE): # hex
+ return val_constructor(val, 16)
+ elif re.match(r"^\d+L?$", val): # base10
+ return val_constructor(val)
+ # generate recursive field-independent values
+ return self._value_from_dict(val)
+
+ def _print_tree(self):
+ pprint(self.protocol_tree)
+
+ def _get_all_db(self):
+ db = {}
+ for pro in self.all_protocols:
+ details = self._get_protocol_details(pro)
+ db[pro] = details
+ return db
+
+ def _get_all_fields(self):
+ fields = []
+ for pro in self.all_protocols:
+ details = self._get_protocol_details(pro)
+ for i in range(0,len(details),1):
+ if len(details[i]) == 3:
+ fields.append(details[i][1])
+ uniqueFields = list(set(fields))
+ fieldDict = {}
+ for f in uniqueFields:
+ if f in self.regexDB:
+ fieldDict[f] = self.regexDB[f].stringRegex()
+ else:
+ fieldDict[f] = self.ScapyFieldDesc(f).stringRegex()
+ return fieldDict
+
+ def _fully_define(self,pkt):
+ # returns scapy object with all fields initialized
+ rootClass = type(pkt)
+ full_pkt = rootClass(bytes(pkt))
+ full_pkt.build() # this trick initializes offset
+ return full_pkt
+
+ def _bytes_to_value(self, payload_bytes):
+ # generates struct with a value
+ return { "vtype": "BYTES", "base64": bytes_to_b64(payload_bytes) }
+
+ def _pkt_to_field_tree(self,pkt):
+ pkt.build()
+ result = []
+ pcap_struct = self._fully_define(pkt) # structure, which will appear in pcap binary
+ while pkt:
+ layer_id = type(pkt).__name__ # Scapy classname
+ layer_full = self._fully_define(pkt) # current layer recreated from binary to get auto-calculated vals
+ real_layer_id = type(pcap_struct).__name__ if pcap_struct else None
+ valid_struct = True # shows if packet is mapped correctly to the binary representation
+ if not pcap_struct:
+ valid_struct = False
+ elif not issubclass(type(pkt), type(pcap_struct)) and not issubclass(type(pcap_struct), type(pkt)):
+ # structure mismatch. no need to go deeper in pcap_struct
+ valid_struct = False
+ pcap_struct = None
+ fields = []
+ for field_desc in pkt.fields_desc:
+ field_id = field_desc.name
+ ignored = field_id not in layer_full.fields
+ offset = field_desc.offset
+ protocol_offset = pkt.offset
+ field_sz = field_desc.get_size_bytes()
+ # some values are unavailable in pkt(original model)
+ # at the same time,
+ fieldval = pkt.getfieldval(field_id)
+ pkt_fieldval_defined = is_string(fieldval) or is_number(fieldval) or is_bytes3(fieldval)
+ if not pkt_fieldval_defined:
+ fieldval = layer_full.getfieldval(field_id)
+ value = None
+ hvalue = None
+ value_base64 = None
+ if is_python(3) and is_bytes3(fieldval):
+ value = self._bytes_to_value(fieldval)
+ if is_ascii_bytes(fieldval):
+ hvalue = bytes_to_str(fieldval)
+ else:
+ # can't be shown as ascii.
+ # also this buffer may not be unicode-compatible(still can try to convert)
+ value = self._bytes_to_value(fieldval)
+ hvalue = '<binary>'
+ elif not is_string(fieldval):
+ # value as is. this can be int,long, or custom object(list/dict)
+ # "nice" human value, i2repr(string) will have quotes, so we have special handling for them
+ hvalue = field_desc.i2repr(pkt, fieldval)
+
+ if is_number(fieldval):
+ value = fieldval
+ if is_string(hvalue) and re.match(r"^\d+L$", hvalue):
+ hvalue = hvalue[:-1] # chop trailing L for long decimal number(python2)
+ else:
+ # fieldval is an object( class / list / dict )
+ # generic serialization/deserialization needed for proper packet rebuilding from packet tree,
+ # some classes can not be mapped to json, but we can pass them serialize them
+ # as a python eval expr, value bytes base64, or field machine internal val(m2i)
+ value = {"vtype": "EXPRESSION", "expr": hvalue}
+ if is_python(3) and is_string(fieldval):
+ hvalue = value = fieldval
+ if is_python(2) and is_string(fieldval):
+ if is_ascii(fieldval):
+ hvalue = value = fieldval
+ else:
+ # python2 non-ascii byte buffers
+ # payload contains non-ascii chars, which
+ # sometimes can not be passed as unicode strings
+ value = self._bytes_to_value(fieldval)
+ hvalue = '<binary>'
+ if field_desc.name == 'load':
+ # show Padding(and possible similar classes) as Raw
+ layer_id = 'Raw'
+ field_sz = len(pkt)
+ value = self._bytes_to_value(fieldval)
+ field_data = {
+ "id": field_id,
+ "value": value,
+ "hvalue": hvalue,
+ "offset": offset,
+ "length": field_sz
+ }
+ if ignored:
+ field_data["ignored"] = ignored
+ fields.append(field_data)
+ layer_data = {
+ "id": layer_id,
+ "offset": pkt.offset,
+ "fields": fields,
+ "real_id": real_layer_id,
+ "valid_structure": valid_struct,
+ }
+ result.append(layer_data)
+ pkt = pkt.payload
+ if pcap_struct:
+ pcap_struct = pcap_struct.payload or None
+ return result
+
+#input: container
+#output: md5 encoded in base64
+ def _get_md5(self,container):
+ container = json.dumps(container)
+ m = hashlib.md5()
+ m.update(str_to_bytes(container))
+ res_md5 = bytes_to_b64(m.digest())
+ return res_md5
+
+ def get_version(self):
+ return {'built_by':'itraviv','version':self.version_major+'.'+self.version_minor}
+
+ def supported_methods(self,method_name='all'):
+ if method_name=='all':
+ methods = {}
+ for f in dir(Scapy_service):
+ if f[0]=='_':
+ continue
+ if inspect.ismethod(eval('Scapy_service.'+f)):
+ param_list = inspect.getargspec(eval('Scapy_service.'+f))[0]
+ del param_list[0] #deleting the parameter "self" that appears in every method
+ #because the server automatically operates on an instance,
+ #and this can cause confusion
+ methods[f] = (len(param_list), param_list)
+ return methods
+ if method_name in dir(Scapy_service):
+ return True
+ return False
+
+ def _generate_version_hash(self,v_major,v_minor):
+ v_for_hash = v_major+v_minor+v_major+v_minor
+ m = hashlib.md5()
+ m.update(str_to_bytes(v_for_hash))
+ return bytes_to_b64(m.digest())
+
+ def _generate_invalid_version_error(self):
+ error_desc1 = "Provided version handler does not correspond to the server's version.\nUpdate client to latest version.\nServer version:"+self.version_major+"."+self.version_minor
+ return error_desc1
+
+ def _verify_version_handler(self,client_v_handler):
+ return (self.server_v_hashed == client_v_handler)
+
+ def _parse_packet_dict(self,layer,scapy_layers,scapy_layer_names):
+ class_name = scapy_layer_names.index(layer['id'])
+ class_p = scapy_layers[class_name] # class pointer
+ scapy_layer = class_p()
+ if isinstance(scapy_layer, Raw):
+ scapy_layer.load = str_to_bytes("dummy")
+ if 'fields' in layer:
+ self._modify_layer(scapy_layer, layer['fields'])
+ return scapy_layer
+
+ def _packet_model_to_scapy_packet(self,data):
+ layers = Packet.__subclasses__()
+ layer_names = [ layer.__name__ for layer in layers]
+ base_layer = self._parse_packet_dict(data[0],layers,layer_names)
+ for i in range(1,len(data),1):
+ packet_layer = self._parse_packet_dict(data[i],layers,layer_names)
+ base_layer = base_layer/packet_layer
+ return base_layer
+
+ def _pkt_data(self,pkt):
+ if pkt == None:
+ return {'data': [], 'binary': None}
+ data = self._pkt_to_field_tree(pkt)
+ binary = bytes_to_b64(bytes(pkt))
+ res = {'data': data, 'binary': binary}
+ return res
+
+#--------------------------------------------API implementation-------------
+ def get_tree(self,client_v_handler):
+ if not (self._verify_version_handler(client_v_handler)):
+ raise ScapyException(self._generate_invalid_version_error())
+ return self.protocol_tree
+
+ def get_version_handler(self,client_v_major,client_v_minor):
+ v_handle = self._generate_version_hash(client_v_major,client_v_minor)
+ return v_handle
+
+# pkt_descriptor in packet model format (dictionary)
+ def build_pkt(self,client_v_handler,pkt_model_descriptor):
+ if not (self._verify_version_handler(client_v_handler)):
+ raise ScapyException(self._generate_invalid_version_error())
+ pkt = self._packet_model_to_scapy_packet(pkt_model_descriptor)
+ return self._pkt_data(pkt)
+
+ # @deprecated. to be removed
+ def get_all(self,client_v_handler):
+ if not (self._verify_version_handler(client_v_handler)):
+ raise ScapyException(self._generate_invalid_version_error())
+ fields=self._get_all_fields()
+ db=self._get_all_db()
+ fields_md5 = self._get_md5(fields)
+ db_md5 = self._get_md5(db)
+ res = {}
+ res['db'] = db
+ res['fields'] = fields
+ res['db_md5'] = db_md5
+ res['fields_md5'] = fields_md5
+ return res
+
+ def _is_packet_class(self, pkt_class):
+ # returns true for final Packet classes. skips aliases and metaclasses
+ return issubclass(pkt_class, Packet) and pkt_class.name and pkt_class.fields_desc
+
+ def _getDummyPacket(self, pkt_class):
+ if issubclass(pkt_class, Raw):
+ # need to have some payload. otherwise won't appear in the binary chunk
+ return pkt_class(load=str_to_bytes("dummy"))
+ else:
+ return pkt_class()
+
+
+ def _get_payload_classes(self, pkt):
+ # tries to find, which subclasses allowed.
+ # this can take long time, since it tries to build packets with all subclasses(O(N))
+ pkt_class = type(pkt)
+ allowed_subclasses = []
+ for pkt_subclass in conf.layers:
+ if self._is_packet_class(pkt_subclass):
+ try:
+ pkt_w_payload = pkt_class() / self._getDummyPacket(pkt_subclass)
+ recreated_pkt = pkt_class(bytes(pkt_w_payload))
+ if type(recreated_pkt.lastlayer()) is pkt_subclass:
+ allowed_subclasses.append(pkt_subclass)
+ except Exception as e:
+ # no actions needed on fail, just sliently skip
+ pass
+ return allowed_subclasses
+
+ def _get_fields_definition(self, pkt_class):
+ fields = []
+ for field_desc in pkt_class.fields_desc:
+ field_data = {
+ "id": field_desc.name,
+ "name": field_desc.name
+ }
+ if isinstance(field_desc, EnumField):
+ try:
+ field_data["values_dict"] = field_desc.s2i
+ except:
+ # MultiEnumField doesn't have s2i. need better handling
+ pass
+ fields.append(field_data)
+ return fields
+
+ def get_definitions(self,client_v_handler, def_filter):
+ # def_filter is an array of classnames or None
+ all_classes = Packet.__subclasses__() # as an alternative to conf.layers
+ if def_filter:
+ all_classes = [c for c in all_classes if c.__name__ in def_filter]
+ protocols = []
+ for pkt_class in all_classes:
+ if self._is_packet_class(pkt_class):
+ # enumerate all non-abstract Packet classes
+ protocols.append({
+ "id": pkt_class.__name__,
+ "name": pkt_class.name,
+ "fields": self._get_fields_definition(pkt_class)
+ })
+ res = {"protocols": protocols}
+ return res
+
+ def get_payload_classes(self,client_v_handler, pkt_model_descriptor):
+ pkt = self._packet_model_to_scapy_packet(pkt_model_descriptor)
+ return [c.__name__ for c in self._get_payload_classes(pkt)]
+
+#input in string encoded base64
+ def check_update_of_dbs(self,client_v_handler,db_md5,field_md5):
+ if not (self._verify_version_handler(client_v_handler)):
+ raise ScapyException(self._generate_invalid_version_error())
+ fields=self._get_all_fields()
+ db=self._get_all_db()
+ current_db_md5 = self._get_md5(db)
+ current_field_md5 = self._get_md5(fields)
+ res = []
+ if (field_md5 == current_field_md5):
+ if (db_md5 == current_db_md5):
+ return True
+ else:
+ raise ScapyException("Protocol DB is not up to date")
+ else:
+ raise ScapyException("Fields DB is not up to date")
+
+ def _modify_layer(self, scapy_layer, fields):
+ for field in fields:
+ fieldId = str(field['id'])
+ fieldval = self._field_value_from_def(scapy_layer, fieldId, field['value'])
+ if fieldval is not None:
+ scapy_layer.setfieldval(fieldId, fieldval)
+ else:
+ scapy_layer.delfieldval(fieldId)
+
+ def _is_last_layer(self, layer):
+ # can be used, that layer has no payload
+ # if true, the layer.payload is likely NoPayload()
+ return layer is layer.lastlayer()
+
+#input of binary_pkt must be encoded in base64
+ def reconstruct_pkt(self,client_v_handler,binary_pkt,model_descriptor):
+ pkt_bin = b64_to_bytes(binary_pkt)
+ scapy_pkt = Ether(pkt_bin)
+ if not model_descriptor:
+ model_descriptor = []
+ for depth in range(len(model_descriptor)):
+ model_layer = model_descriptor[depth]
+ if model_layer.get('delete') is True:
+ # slice packet from the current item
+ if depth == 0:
+ scapy_pkt = None
+ break
+ else:
+ scapy_pkt[depth-1].payload = None
+ break
+ if depth > 0 and self._is_last_layer(scapy_pkt[depth-1]):
+ # insert new layer(s) from json definition
+ remaining_definitions = model_descriptor[depth:]
+ pkt_to_append = self._packet_model_to_scapy_packet(remaining_definitions)
+ scapy_pkt = scapy_pkt / pkt_to_append
+ break
+ # modify fields of existing stack items
+ scapy_layer = scapy_pkt[depth]
+ if model_layer['id'] != type(scapy_layer).__name__:
+ # TODO: support replacing payload, instead of breaking
+ raise ScapyException("Protocol id inconsistent")
+ if 'fields' in model_layer:
+ self._modify_layer(scapy_layer, model_layer['fields'])
+ return self._pkt_data(scapy_pkt)
+
+ def read_pcap(self,client_v_handler,pcap_base64):
+ pcap_bin = b64_to_bytes(pcap_base64)
+ pcap = []
+ res_packets = []
+ with tempfile.NamedTemporaryFile(mode='w+b') as tmpPcap:
+ tmpPcap.write(pcap_bin)
+ tmpPcap.flush()
+ pcap = rdpcap(tmpPcap.name)
+ for scapy_packet in pcap:
+ res_packets.append(self._pkt_data(scapy_packet))
+ return res_packets
+
+ def write_pcap(self,client_v_handler,packets_base64):
+ packets = [Ether(b64_to_bytes(pkt_b64)) for pkt_b64 in packets_base64]
+ pcap_bin = None
+ with tempfile.NamedTemporaryFile(mode='r+b') as tmpPcap:
+ wrpcap(tmpPcap.name, packets)
+ pcap_bin = tmpPcap.read()
+ return bytes_to_b64(pcap_bin)
+
+
+
+
+#---------------------------------------------------------------------------
+
+
diff --git a/scripts/automation/trex_control_plane/stl/services/scapy_server/scapy_zmq_client.py b/scripts/automation/trex_control_plane/stl/services/scapy_server/scapy_zmq_client.py
new file mode 100644
index 00000000..18d32272
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stl/services/scapy_server/scapy_zmq_client.py
@@ -0,0 +1,116 @@
+
+import sys
+import os
+python2_zmq_path = os.path.abspath(os.path.join(os.pardir,os.pardir,os.pardir,os.pardir,
+ os.pardir,'external_libs','pyzmq-14.5.0','python2','fedora18','64bit'))
+sys.path.append(python2_zmq_path)
+
+import zmq
+import json
+from argparse import *
+from pprint import pprint
+
+class Scapy_server_wrapper():
+ def __init__(self,dest_scapy_port=5555,server_ip_address='localhost'):
+ self.server_ip_address = server_ip_address
+ self.context = zmq.Context()
+ self.socket = self.context.socket(zmq.REQ)
+ self.dest_scapy_port =dest_scapy_port
+ self.socket.connect("tcp://"+str(self.server_ip_address)+":"+str(self.dest_scapy_port))
+
+ def call_method(self,method_name,method_params):
+ json_rpc_req = { "jsonrpc":"2.0","method": method_name ,"params": method_params, "id":"1"}
+ request = json.dumps(json_rpc_req)
+ self.socket.send_string(request)
+ # Get the reply.
+ message = self.socket.recv_string()
+ message_parsed = json.loads(message)
+ if 'result' in message_parsed.keys():
+ result = message_parsed['result']
+ else:
+ result = {'error':message_parsed['error']}
+ return result
+
+ def get_all(self):
+ return self.call_method('get_all',[])
+
+ def check_update(self,db_md5,field_md5):
+ result = self.call_method('check_update',[db_md5,field_md5])
+ if result!=True:
+ if 'error' in result.keys():
+ if "Fields DB is not up to date" in result['error']['message:']:
+ raise Exception("Fields DB is not up to date")
+ if "Protocol DB is not up to date" in result['error']['message:']:
+ raise Exception("Protocol DB is not up to date")
+ return result
+
+ def build_pkt(self,pkt_descriptor):
+ return self.call_method('build_pkt',[pkt_descriptor])
+
+ def _get_all_pkt_offsets(self,pkt_desc):
+ return self.call_method('_get_all_pkt_offsets',[pkt_desc])
+
+ def _activate_console(self):
+ context = zmq.Context()
+ # Socket to talk to server
+ print 'Connecting:'
+ socket = context.socket(zmq.REQ)
+ socket.connect("tcp://"+str(self.server_ip_address)+":"+str(self.dest_scapy_port))
+ try:
+ print('This is a simple console to communicate with Scapy server.\nInvoke supported_methods (with 1 parameter = all) to see supported commands\n')
+ while True:
+ command = raw_input("enter RPC command [enter quit to exit]:\n")
+ if (command == 'quit'):
+ break
+ parameter_num = 0
+ params = []
+ while True:
+ try:
+ parameter_num = int(raw_input('Enter number of parameters to command:\n'))
+ break
+ except Exception:
+ print('Invalid input. Try again')
+ for i in range(1,parameter_num+1,1):
+ print "input parameter %d:" % i
+ user_parameter = raw_input()
+ params.append(user_parameter)
+ pprint_output = raw_input('pprint the output [y/n]? ')
+ while ((pprint_output!= 'y') and (pprint_output!='n')):
+ pprint_output = raw_input('pprint the output [y/n]? ')
+ json_rpc_req = { "jsonrpc":"2.0","method": command ,"params":params, "id":"1"}
+ request = json.dumps(json_rpc_req)
+ print("Sending request in json format %s " % request)
+ socket.send(request)
+
+ # Get the reply.
+ message = socket.recv()
+ print ('received reply:')
+ parsed_message = json.loads(message)
+ if (pprint_output == 'y'):
+ pprint(parsed_message)
+ else:
+ print message
+ except KeyboardInterrupt:
+ print('Terminated By Ctrl+C')
+ finally:
+ socket.close()
+ context.destroy()
+
+
+
+if __name__=='__main__':
+ parser = ArgumentParser(description='Example of client module for Scapy server ')
+ parser.add_argument('-p','--dest-scapy-port',type=int, default = 4507, dest='dest_scapy_port',
+ help='Select port to which this Scapy Server client will send to.\n default is 4507\n',action='store')
+ parser.add_argument('-s','--server',type=str, default = 'localhost', dest='dest_scapy_ip',
+ help='Remote server IP address .\n default is localhost\n',action='store')
+ parser.add_argument('-c','--console',
+ help='Run simple client console for Scapy server.\nrun with \'-s\' and \'-p\' to determine IP and port of the server\n',
+ action='store_true',default = False)
+ args = parser.parse_args()
+ if (args.console):
+ s = Scapy_server_wrapper(args.dest_scapy_port,args.dest_scapy_ip)
+ sys.exit(s._activate_console())
+ else:
+ print('Scapy client: for interactive console re-run with \'-c\', else import as seperate module.')
+
diff --git a/scripts/automation/trex_control_plane/stl/services/scapy_server/scapy_zmq_server.py b/scripts/automation/trex_control_plane/stl/services/scapy_server/scapy_zmq_server.py
new file mode 100755
index 00000000..6489b36a
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stl/services/scapy_server/scapy_zmq_server.py
@@ -0,0 +1,188 @@
+
+import time
+import sys
+import os
+import traceback
+
+stl_pathname = os.path.abspath(os.path.join(os.pardir, os.pardir))
+if stl_pathname not in sys.path:
+ sys.path.append(stl_pathname)
+from trex_stl_lib.api import *
+import zmq
+import inspect
+from scapy_service import *
+from argparse import *
+import socket
+import logging
+import logging.handlers
+
+
+class ParseException(Exception): pass
+class InvalidRequest(Exception): pass
+class MethodNotFound(Exception): pass
+class InvalidParams(Exception): pass
+
+class Scapy_wrapper:
+ def __init__(self):
+ self.scapy_master = Scapy_service()
+
+ def parse_req_msg(self,JSON_req):
+ try:
+ req = json.loads(JSON_req)
+ req_id='null'
+ if (type(req)!= type({})):
+ raise ParseException(req_id)
+ json_rpc_keys = ['jsonrpc','id','method']
+ if ((set(req.keys())!=set(json_rpc_keys)) and (set(req.keys())!=set(json_rpc_keys+['params']))) :
+ if 'id' in req.keys():
+ req_id = req['id']
+ raise InvalidRequest(req_id)
+ req_id = req['id']
+ if (req['method']=='shut_down'):
+ return 'shut_down',[],req_id
+ if not (self.scapy_master.supported_methods(req['method'])):
+ raise MethodNotFound(req_id)
+ scapy_method = eval("self.scapy_master."+req['method'])
+ arg_num_for_method = len(inspect.getargspec(scapy_method)[0])
+ if (arg_num_for_method>1) :
+ if not ('params' in req.keys()):
+ raise InvalidRequest(req_id)
+ params_len = len(req['params'])+1 # +1 because "self" is considered parameter in args for method
+ if not (params_len==arg_num_for_method):
+ raise InvalidParams(req_id)
+ return req['method'],req['params'],req_id
+ else:
+ return req['method'],[],req_id
+ except ValueError:
+ raise ParseException(req_id)
+
+ def create_error_response(self,error_code,error_msg,req_id):
+ return {"jsonrpc": "2.0", "error": {"code": error_code, "message": error_msg}, "id": req_id}
+
+ def create_success_response(self,result,req_id):
+ return {"jsonrpc": "2.0", "result": result, "id": req_id }
+
+ def get_exception(self):
+ return sys.exc_info()
+
+
+ def execute(self,method,params):
+ if len(params)>0:
+ result = eval('self.scapy_master.'+method+'(*'+str(params)+')')
+ else:
+ result = eval('self.scapy_master.'+method+'()')
+ return result
+
+
+ def error_handler(self,e,req_id):
+ response = []
+ try:
+ raise e
+ except ParseException as e:
+ response = self.create_error_response(-32700,'Parse error ',req_id)
+ except InvalidRequest as e:
+ response = self.create_error_response(-32600,'Invalid Request',req_id)
+ except MethodNotFound as e:
+ response = self.create_error_response(-32601,'Method not found',req_id)
+ except InvalidParams as e:
+ response = self.create_error_response(-32603,'Invalid params',req_id)
+ except SyntaxError as e:
+ response = self.create_error_response(-32097,'SyntaxError',req_id)
+ except Exception as e:
+ if hasattr(e,'message'):
+ response = self.create_error_response(-32098,'Scapy Server: '+str(e.message),req_id)
+ else:
+ response = self.create_error_response(-32096,'Scapy Server: Unknown Error',req_id)
+ finally:
+ return response
+
+class Scapy_server():
+ def __init__(self, args,port=4507):
+ self.scapy_wrapper = Scapy_wrapper()
+ self.port = port
+ self.context = zmq.Context()
+ self.socket = self.context.socket(zmq.REP)
+ self.socket.bind("tcp://*:"+str(port))
+ self.IP_address = socket.gethostbyname(socket.gethostname())
+ self.logger = logging.getLogger('scapy_logger')
+ self.logger.setLevel(logging.INFO)
+ console_h = logging.StreamHandler(sys.__stdout__)
+ formatter = logging.Formatter(fmt='%(asctime)s %(message)s',datefmt='%d-%m-%Y %H:%M:%S')
+ if args.log:
+ logfile_h = logging.FileHandler('scapy_server.log')
+ logfile_h.setLevel(logging.INFO)
+ logfile_h.setFormatter(formatter)
+ self.logger.addHandler(logfile_h)
+ if args.verbose:
+ console_h.setLevel(logging.INFO)
+ else:
+ console_h.setLevel(logging.WARNING)
+ console_h.setFormatter(formatter)
+ self.logger.addHandler(console_h)
+
+
+ def activate(self):
+ self.logger.info('***Scapy Server Started***')
+ self.logger.info('Listening on port: %d' % self.port)
+ self.logger.info('Server IP address: %s' % self.IP_address)
+ try:
+ while True:
+ message = self.socket.recv_string()
+ self.logger.info('Received Message: %s' % message)
+ try:
+ params = []
+ method=''
+ req_id = 'null'
+ method,params,req_id = self.scapy_wrapper.parse_req_msg(message)
+ if (method == 'shut_down'):
+ self.logger.info('Shut down by remote user')
+ result = 'Server shut down command received - server had shut down'
+ else:
+ result = self.scapy_wrapper.execute(method,params)
+ response = self.scapy_wrapper.create_success_response(result,req_id)
+ except Exception as e:
+ response = self.scapy_wrapper.error_handler(e,req_id)
+ self.logger.info('ERROR %s: %s',response['error']['code'], response['error']['message'])
+ self.logger.info('Exception info: %s' % traceback.format_exc())
+ finally:
+ try:
+ json_response = json.dumps(response)
+ self.logger.info('Sending Message: %s' % json_response)
+ except Exception as e:
+ # rare case when json can not be searialized due to encoding issues
+ # object is not JSON serializable
+ self.logger.error('Unexpected Error: %s' % traceback.format_exc())
+ json_response = json.dumps(self.scapy_wrapper.error_handler(e,req_id))
+
+ # Send reply back to client
+ self.socket.send_string(json_response)
+ if (method == 'shut_down'):
+ break
+
+ except KeyboardInterrupt:
+ self.logger.info(b'Terminated By local user')
+
+ finally:
+ self.socket.close()
+ self.context.destroy()
+
+
+
+#arg1 is port number for the server to listen to
+def main(args,port):
+ s = Scapy_server(args,port)
+ s.activate()
+
+if __name__=='__main__':
+
+ parser = ArgumentParser(description=' Runs Scapy Server ')
+ parser.add_argument('-s','--scapy-port',type=int, default = 4507, dest='scapy_port',
+ help='Select port to which Scapy Server will listen to.\n default is 4507.',action='store')
+ parser.add_argument('-v','--verbose',help='Print Client-Server Request-Reply information to console.',action='store_true',default = False)
+ parser.add_argument('-l','--log',help='Log every activity of the server to the log file scapy_server.log .The log does not discard older entries, the file is not limited by size.',
+ action='store_true',default = False)
+ args = parser.parse_args()
+ port = args.scapy_port
+ sys.exit(main(args,port))
+
+
diff --git a/scripts/automation/trex_control_plane/stl/services/scapy_server/unit_tests/basetest.py b/scripts/automation/trex_control_plane/stl/services/scapy_server/unit_tests/basetest.py
new file mode 100644
index 00000000..17dd304a
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stl/services/scapy_server/unit_tests/basetest.py
@@ -0,0 +1,84 @@
+import os
+import sys
+import json
+import base64
+import inspect
+from inspect import getcallargs
+# add paths to scapy_service and trex_stl_lib.api
+sys.path.append(os.path.abspath(os.pardir))
+sys.path.append(os.path.abspath(os.path.join(os.pardir, os.pardir, os.pardir)))
+
+from scapy_service import *
+from scapy.all import *
+
+service = Scapy_service()
+v_handler = service.get_version_handler('1','01')
+
+def pretty_json(obj):
+ return json.dumps(obj, indent=4)
+
+def pprint(obj):
+ print(pretty_json(obj))
+
+def is_verbose():
+ return True
+
+def pass_result(result, *args):
+ # returns result unchanged, but can display debug info if enabled
+ if is_verbose():
+ fargs = (inspect.stack()[-1][4])
+ print(fargs[0])
+ pprint(result)
+ return result
+
+def pass_pkt(result):
+ # returns packet unchanged, but can display debug info if enabled
+ if is_verbose() and result is not None:
+ result.show2()
+ return result
+
+# utility functions for tests
+
+def layer_def(layerId, **layerfields):
+ # test helper method to generate JSON-like protocol definition object for scapy
+ # ex. { "id": "Ether", "fields": [ { "id": "dst", "value": "10:10:10:10:10:10" } ] }
+ res = { "id": layerId }
+ if layerfields:
+ res["fields"] = [ {"id": k, "value": v} for k,v in layerfields.items() ]
+ return res
+
+def get_version_handler():
+ return pass_result(service.get_version_handler("1", "01"))
+
+def build_pkt(model_def):
+ return pass_result(service.build_pkt(v_handler, model_def))
+
+def build_pkt_get_scapy(model_def):
+ return build_pkt_to_scapy(build_pkt(model_def))
+
+def reconstruct_pkt(bytes_b64, model_def):
+ return pass_result(service.reconstruct_pkt(v_handler, bytes_b64, model_def))
+
+def get_definitions(def_filter):
+ return pass_result(service.get_definitions(v_handler, def_filter))
+
+def get_payload_classes(def_filter):
+ return pass_result(service.get_payload_classes(v_handler, def_filter))
+
+def build_pkt_to_scapy(buildpkt_result):
+ return pass_pkt(Ether(b64_to_bytes(buildpkt_result['binary'])))
+
+def fields_to_map(field_array):
+ # [{id, value, hvalue, offset}, ...] to map id -> {value, hvalue, offset}
+ res = {}
+ if field_array:
+ for f in field_array:
+ res[ f["id"] ] = f
+ return res
+
+def adapt_json_protocol_fields(protocols_array):
+ # replaces layer.fields(array) with map for easier access in tests
+ for protocol in protocols_array:
+ # change structure for easier
+ if protocol.get("fields"):
+ protocol["fields"] = fields_to_map(protocol["fields"])
diff --git a/scripts/automation/trex_control_plane/stl/services/scapy_server/unit_tests/test_scapy_service.py b/scripts/automation/trex_control_plane/stl/services/scapy_server/unit_tests/test_scapy_service.py
new file mode 100644
index 00000000..9cd473d7
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stl/services/scapy_server/unit_tests/test_scapy_service.py
@@ -0,0 +1,155 @@
+#
+# run with 'nosetests' utility
+
+import tempfile
+import re
+from basetest import *
+
+RE_MAC = "^([0-9A-Fa-f]{2}:){5}([0-9A-Fa-f]{2})$"
+
+TEST_MAC_1 = "10:10:10:10:10:10"
+# Test scapy structure
+TEST_PKT = Ether(dst=TEST_MAC_1)/IP(src='127.0.0.1')/TCP(sport=443)
+
+# Corresponding JSON-like structure
+TEST_PKT_DEF = [
+ layer_def("Ether", dst=TEST_MAC_1),
+ layer_def("IP", dst="127.0.0.1"),
+ layer_def("TCP", sport="443")
+ ]
+
+def test_build_pkt():
+ pkt = build_pkt_get_scapy(TEST_PKT_DEF)
+ assert(pkt[TCP].sport == 443)
+
+def test_build_invalid_structure_pkt():
+ ether_fields = {"dst": TEST_MAC_1, "type": "LOOP"}
+ pkt = build_pkt_get_scapy([
+ layer_def("Ether", **ether_fields),
+ layer_def("IP"),
+ layer_def("TCP", sport=8080)
+ ])
+ assert(pkt[Ether].dst == TEST_MAC_1)
+ assert(isinstance(pkt[Ether].payload, Raw))
+
+def test_reconstruct_pkt():
+ res = reconstruct_pkt(base64.b64encode(bytes(TEST_PKT)), None)
+ pkt = build_pkt_to_scapy(res)
+ assert(pkt[TCP].sport == 443)
+
+def test_layer_del():
+ modif = [
+ {"id": "Ether"},
+ {"id": "IP"},
+ {"id": "TCP", "delete": True},
+ ]
+ res = reconstruct_pkt(base64.b64encode(bytes(TEST_PKT)), modif)
+ pkt = build_pkt_to_scapy(res)
+ assert(not pkt[IP].payload)
+
+def test_layer_field_edit():
+ modif = [
+ {"id": "Ether"},
+ {"id": "IP"},
+ {"id": "TCP", "fields": [{"id": "dport", "value": 777}]},
+ ]
+ res = reconstruct_pkt(base64.b64encode(bytes(TEST_PKT)), modif)
+ pkt = build_pkt_to_scapy(res)
+ assert(pkt[TCP].dport == 777)
+ assert(pkt[TCP].sport == 443)
+
+def test_layer_add():
+ modif = [
+ {"id": "Ether"},
+ {"id": "IP"},
+ {"id": "TCP"},
+ {"id": "Raw", "fields": [{"id": "load", "value": "GET /helloworld HTTP/1.0\n\n"}]},
+ ]
+ res = reconstruct_pkt(base64.b64encode(bytes(TEST_PKT)), modif)
+ pkt = build_pkt_to_scapy(res)
+ assert("GET /helloworld" in str(pkt[TCP].payload.load))
+
+def test_build_Raw():
+ pkt = build_pkt_get_scapy([
+ layer_def("Ether"),
+ layer_def("IP"),
+ layer_def("TCP"),
+ layer_def("Raw", load={"vtype": "BYTES", "base64": bytes_to_b64(b"hi")})
+ ])
+ assert(str(pkt[Raw].load == "hi"))
+
+def test_get_all():
+ service.get_all(v_handler)
+
+def test_get_definitions_all():
+ get_definitions(None)
+ def_classnames = [pdef['id'] for pdef in get_definitions(None)['protocols']]
+ assert("IP" in def_classnames)
+ assert("Dot1Q" in def_classnames)
+ assert("TCP" in def_classnames)
+
+def test_get_definitions_ether():
+ res = get_definitions(["Ether"])
+ assert(len(res) == 1)
+ assert(res['protocols'][0]['id'] == "Ether")
+
+def test_get_payload_classes():
+ eth_payloads = get_payload_classes([{"id":"Ether"}])
+ assert("IP" in eth_payloads)
+ assert("Dot1Q" in eth_payloads)
+ assert("TCP" not in eth_payloads)
+
+def test_pcap_read_and_write():
+ pkts_to_write = [bytes_to_b64(bytes(TEST_PKT))]
+ pcap_b64 = service.write_pcap(v_handler, pkts_to_write)
+ array_pkt = service.read_pcap(v_handler, pcap_b64)
+ pkt = build_pkt_to_scapy(array_pkt[0])
+ assert(pkt[Ether].dst == TEST_MAC_1)
+
+def test_layer_default_value():
+ res = build_pkt([
+ layer_def("Ether", src={"vtype": "UNDEFINED"})
+ ])
+ ether_fields = fields_to_map(res['data'][0]['fields'])
+ assert(re.match(RE_MAC, ether_fields['src']['value']))
+
+def test_layer_random_value():
+ res = build_pkt([
+ layer_def("Ether", src={"vtype": "RANDOM"})
+ ])
+ ether_fields = fields_to_map(res['data'][0]['fields'])
+ assert(re.match(RE_MAC, ether_fields['src']['value']))
+
+def test_layer_wrong_structure():
+ payload = [
+ layer_def("Ether"),
+ layer_def("IP"),
+ layer_def("Raw", load="dummy"),
+ layer_def("Ether"),
+ layer_def("IP"),
+ ]
+ res = build_pkt(payload)
+ pkt = build_pkt_to_scapy(res)
+ assert(type(pkt[0]) is Ether)
+ assert(type(pkt[1]) is IP)
+ assert(isinstance(pkt[2], Raw))
+ assert(not pkt[2].payload)
+ model = res["data"]
+ assert(len(payload) == len(model))
+ # verify same protocol structure as in abstract model
+ # and all fields defined
+ for depth in range(len(payload)):
+ layer_model = model[depth]
+ layer_fields = fields_to_map(layer_model["fields"])
+ assert(payload[depth]["id"] == model[depth]["id"])
+ for field in layer_model["fields"]:
+ required_field_properties = ["value", "hvalue", "offset"]
+ for field_property in required_field_properties:
+ assert(field[field_property] is not None)
+ if (model[depth]["id"] == "Ether"):
+ assert(layer_fields["type"]["hvalue"] == "IPv4")
+ real_structure = [layer["real_id"] for layer in model]
+ valid_structure_flags = [layer["valid_structure"] for layer in model]
+ assert(real_structure == ["Ether", "IP", "Raw", None, None])
+ assert(valid_structure_flags == [True, True, True, False, False])
+
diff --git a/scripts/automation/trex_control_plane/stl/services/scapy_server/zmq_for_scapy_server_test.py b/scripts/automation/trex_control_plane/stl/services/scapy_server/zmq_for_scapy_server_test.py
new file mode 100755
index 00000000..8f7f7b01
--- /dev/null
+++ b/scripts/automation/trex_control_plane/stl/services/scapy_server/zmq_for_scapy_server_test.py
@@ -0,0 +1,14 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+