aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorViliam Luc <vluc@cisco.com>2021-08-02 14:25:26 +0200
committerViliam Luc <vluc@cisco.com>2021-09-10 10:26:24 +0200
commitec467277744783015a2da6713298b35d13d92e6f (patch)
tree57b1bb084f5ae9c6346be5a92f8fe5fac38f1cd4
parent9cceefae3248e9a1e9e5586391c9263cf114a753 (diff)
back-to-back tests: add TG tests
Change-Id: I9d028294deb1e31b6d185deb1c7523e0226a0ada Signed-off-by: Viliam Luc <vluc@cisco.com>
-rw-r--r--docs/tag_documentation.rst14
-rw-r--r--resources/libraries/bash/function/gather.sh14
-rw-r--r--resources/libraries/python/NodePath.py43
-rw-r--r--resources/libraries/python/autogen/Regenerator.py82
-rw-r--r--resources/libraries/python/autogen/Testcase.py27
-rw-r--r--resources/libraries/python/topology.py55
-rw-r--r--resources/libraries/robot/shared/suite_setup.robot32
-rw-r--r--tests/trex/perf/__init__.robot41
-rwxr-xr-xtests/trex/perf/ip4/1n1l-10ge2p1x710-ethip4-ip4base-tg-ndrpdr.robot84
-rwxr-xr-xtests/trex/perf/ip4/regenerate_testcases.py18
10 files changed, 381 insertions, 29 deletions
diff --git a/docs/tag_documentation.rst b/docs/tag_documentation.rst
index b7cab943c3..b2130c1f28 100644
--- a/docs/tag_documentation.rst
+++ b/docs/tag_documentation.rst
@@ -297,6 +297,10 @@ Test Type Tags
Tests which use any kind of TCP traffic (STL or ASTF profile).
+.. topic:: TREX
+
+ Tests which test trex traffic without any software DUTs in the traffic path.
+
..
TODO: Should we define tags STL and ASTF?
@@ -430,6 +434,11 @@ Forwarding Mode Tags
VPP Load balancer nat4 mode.
+.. topic:: N2N
+
+ Mode, where NICs from the same physical server are directly
+ connected with a cable.
+
Underlay Tags
-------------
@@ -638,6 +647,11 @@ Interface Tags
All test cases which NIC Driver for DUT is set to {d}. Default is VFIO_PCI.
{d}=(AVF, RDMA_CORE, VFIO_PCI, AF_XDP).
+.. topic:: TG_DRV_{d}
+
+ All test cases which NIC Driver for TG is set to {d}. Default is IGB_UIO.
+ {d}=(RDMA_CORE, IGB_UIO).
+
.. topic:: RXQ_SIZE_{n}
All test cases which RXQ size (RX descriptors) are set to {n}. Default is 0,
diff --git a/resources/libraries/bash/function/gather.sh b/resources/libraries/bash/function/gather.sh
index e3a6a9d150..4958e5251b 100644
--- a/resources/libraries/bash/function/gather.sh
+++ b/resources/libraries/bash/function/gather.sh
@@ -57,6 +57,10 @@ function gather_build () {
DUT="dpdk"
gather_dpdk || die "The function should have died on error."
;;
+ *"trex"*)
+ DUT="trex"
+ gather_trex || die "The function should have died on error."
+ ;;
*)
die "Unable to identify DUT type from: ${TEST_CODE}"
;;
@@ -110,6 +114,16 @@ function gather_dpdk () {
fi
}
+function gather_trex () {
+
+ # This function is required to bypass download dir check.
+ # Currently it creates empty file in download dir.
+ # TODO: Add required packages
+
+ set -exuo pipefail
+
+ touch trex-download-to-be-added.txt
+}
function gather_vpp () {
diff --git a/resources/libraries/python/NodePath.py b/resources/libraries/python/NodePath.py
index cc91c4a2f5..d1f974aafe 100644
--- a/resources/libraries/python/NodePath.py
+++ b/resources/libraries/python/NodePath.py
@@ -92,7 +92,7 @@ class NodePath:
self._path = []
self._path_iter = []
- def compute_path(self, always_same_link=True):
+ def compute_path(self, always_same_link=True, topo_has_dut=True):
"""Compute path for added nodes.
.. note:: First add at least two nodes to the topology.
@@ -100,19 +100,26 @@ class NodePath:
:param always_same_link: If True use always same link between two nodes
in path. If False use different link (if available)
between two nodes if one link was used before.
+ :param topo_has_dut: If False we want to test back to back test on TG.
:type always_same_link: bool
+ :type topo_has_dut: bool
:raises RuntimeError: If not enough nodes for path.
"""
nodes = self._nodes
- if len(nodes) < 2:
+ if len(nodes) < 2 and topo_has_dut:
raise RuntimeError(u"Not enough nodes to compute path")
for idx in range(0, len(nodes) - 1):
topo = Topology()
node1 = nodes[idx]
- node2 = nodes[idx + 1]
n1_list = self._nodes_filter[idx]
- n2_list = self._nodes_filter[idx + 1]
+ if topo_has_dut:
+ node2 = nodes[idx + 1]
+ n2_list = self._nodes_filter[idx + 1]
+ else:
+ node2 = node1
+ n2_list = n1_list
+
links = topo.get_active_connecting_links(
node1, node2, filter_list_node1=n1_list,
filter_list_node2=n2_list
@@ -139,8 +146,11 @@ class NodePath:
link = l_set[0]
self._links.append(link)
+
+ use_subsequent = not topo_has_dut
interface1 = topo.get_interface_by_link_name(node1, link)
- interface2 = topo.get_interface_by_link_name(node2, link)
+ interface2 = topo.get_interface_by_link_name(node2, link,
+ use_subsequent)
self._path.append((interface1, node1))
self._path.append((interface2, node2))
@@ -208,7 +218,7 @@ class NodePath:
return self._path[-2]
def compute_circular_topology(self, nodes, filter_list=None, nic_pfs=1,
- always_same_link=False, topo_has_tg=True):
+ always_same_link=False, topo_has_tg=True, topo_has_dut=True):
"""Return computed circular path.
:param nodes: Nodes to append to the path.
@@ -219,29 +229,34 @@ class NodePath:
between two nodes if one link was used before.
:param topo_has_tg: If True, the topology has a TG node. If False,
the topology consists entirely of DUT nodes.
+ :param topo_has_dut: If True, the topology has a DUT node(s). If False,
+ the topology consists entirely of TG nodes.
:type nodes: dict
:type filter_list: list of strings
:type nic_pfs: int
:type always_same_link: bool
:type topo_has_tg: bool
+ :type topo_has_dut: bool
:returns: Topology information dictionary.
:rtype: dict
:raises RuntimeError: If unsupported combination of parameters.
"""
t_dict = dict()
- duts = [key for key in nodes if u"DUT" in key]
- t_dict[u"duts"] = duts
- t_dict[u"duts_count"] = len(duts)
- t_dict[u"int"] = u"pf"
+ if topo_has_dut:
+ duts = [key for key in nodes if u"DUT" in key]
+ t_dict[u"duts"] = duts
+ t_dict[u"duts_count"] = len(duts)
+ t_dict[u"int"] = u"pf"
for _ in range(0, nic_pfs // 2):
if topo_has_tg:
self.append_node(nodes[u"TG"])
- for dut in duts:
- self.append_node(nodes[dut], filter_list=filter_list)
+ if topo_has_dut:
+ for dut in duts:
+ self.append_node(nodes[dut], filter_list=filter_list)
if topo_has_tg:
self.append_node(nodes[u"TG"])
- self.compute_path(always_same_link)
+ self.compute_path(always_same_link, topo_has_dut)
n_idx = 0 # node index
t_idx = 1 # TG interface index
@@ -257,7 +272,7 @@ class NodePath:
i_pfx = f"if{t_idx}" # [backwards compatible] interface prefix
n_idx = 0
t_idx = t_idx + 1
- elif topo_has_tg:
+ elif topo_has_tg and topo_has_dut:
# Each node has 2 interfaces, starting with 1
# Calculate prefixes appropriately for current
# path topology nomenclature:
diff --git a/resources/libraries/python/autogen/Regenerator.py b/resources/libraries/python/autogen/Regenerator.py
index 7003d3905d..14a9fd0ddb 100644
--- a/resources/libraries/python/autogen/Regenerator.py
+++ b/resources/libraries/python/autogen/Regenerator.py
@@ -195,6 +195,20 @@ def add_iperf3_testcases(testcase, file_out, tc_kwargs_list):
file_out.write(testcase.generate(**kwargs))
+def add_trex_testcases(testcase, file_out, tc_kwargs_list):
+ """Add trex testcases to file.
+
+ :param testcase: Testcase class.
+ :param file_out: File to write testcases to.
+ :param tc_kwargs_list: Key-value pairs used to construct testcases.
+ :type testcase: Testcase
+ :type file_out: file
+ :type tc_kwargs_list: dict
+ """
+ for kwargs in tc_kwargs_list:
+ file_out.write(testcase.generate(**kwargs))
+
+
def write_default_files(in_filename, in_prolog, kwargs_list):
"""Using given filename and prolog, write all generated suites.
@@ -480,6 +494,64 @@ def write_iperf3_files(in_filename, in_prolog, kwargs_list):
add_iperf3_testcases(testcase, file_out, kwargs_list)
+def write_trex_files(in_filename, in_prolog, kwargs_list):
+ """Using given filename and prolog, write all generated trex suites.
+
+ :param in_filename: Template filename to derive real filenames from.
+ :param in_prolog: Template content to derive real content from.
+ :param kwargs_list: List of kwargs for add_trex_testcase.
+ :type in_filename: str
+ :type in_prolog: str
+ :type kwargs_list: list of dict
+ """
+ for suite_type in Constants.PERF_TYPE_TO_KEYWORD:
+ tmp_filename = replace_defensively(
+ in_filename, u"ndrpdr", suite_type, 1,
+ u"File name should contain suite type once.", in_filename
+ )
+ tmp_prolog = replace_defensively(
+ in_prolog, u"ndrpdr".upper(), suite_type.upper(), 1,
+ u"Suite type should appear once in uppercase (as tag).",
+ in_filename
+ )
+ tmp_prolog = replace_defensively(
+ tmp_prolog,
+ u"Find NDR and PDR intervals using optimized search",
+ Constants.PERF_TYPE_TO_KEYWORD[suite_type], 1,
+ u"Main search keyword should appear once in suite.",
+ in_filename
+ )
+ tmp_prolog = replace_defensively(
+ tmp_prolog,
+ Constants.PERF_TYPE_TO_SUITE_DOC_VER[u"ndrpdr"],
+ Constants.PERF_TYPE_TO_SUITE_DOC_VER[suite_type],
+ 1, u"Exact suite type doc not found.", in_filename
+ )
+ tmp_prolog = replace_defensively(
+ tmp_prolog,
+ Constants.PERF_TYPE_TO_TEMPLATE_DOC_VER[u"ndrpdr"],
+ Constants.PERF_TYPE_TO_TEMPLATE_DOC_VER[suite_type],
+ 1, u"Exact template type doc not found.", in_filename
+ )
+ _, suite_id, suite_tag = get_iface_and_suite_ids(tmp_filename)
+ testcase = Testcase.trex(suite_id)
+ for nic_name in Constants.NIC_NAME_TO_CODE:
+ out_filename = replace_defensively(
+ tmp_filename, u"10ge2p1x710",
+ Constants.NIC_NAME_TO_CODE[nic_name], 1,
+ u"File name should contain NIC code once.", in_filename
+ )
+ out_prolog = replace_defensively(
+ tmp_prolog, u"Intel-X710", nic_name, 2,
+ u"NIC name should appear twice (tag and variable).",
+ in_filename
+ )
+ check_suite_tag(suite_tag, out_prolog)
+ with open(out_filename, u"wt") as file_out:
+ file_out.write(out_prolog)
+ add_trex_testcases(testcase, file_out, kwargs_list)
+
+
def write_device_files(in_filename, in_prolog, kwargs_list):
"""Using given filename and prolog, write all generated suites.
@@ -637,6 +709,13 @@ class Regenerator:
{u"frame_size": min_frame_size, u"phy_cores": 0}
]
+ trex_kwargs_list = [
+ {u"frame_size": min_frame_size},
+ {u"frame_size": 1518},
+ {u"frame_size": 9000},
+ {u"frame_size": u"IMIX_v4_1"}
+ ]
+
for in_filename in glob(pattern):
if not self.quiet:
print(
@@ -656,6 +735,9 @@ class Regenerator:
in_prolog = u"".join(
file_in.read().partition(u"*** Test Cases ***")[:-1]
)
+ if "-tg-" in in_filename:
+ write_trex_files(in_filename, in_prolog, trex_kwargs_list)
+ continue
if in_filename.endswith(u"-ndrpdr.robot"):
if u"scheduler" in in_filename:
write_default_files(
diff --git a/resources/libraries/python/autogen/Testcase.py b/resources/libraries/python/autogen/Testcase.py
index 643d32a3cb..32fc5014cc 100644
--- a/resources/libraries/python/autogen/Testcase.py
+++ b/resources/libraries/python/autogen/Testcase.py
@@ -33,16 +33,17 @@ class Testcase:
"""
self.template = Template(template_string)
- def generate(self, frame_size, phy_cores):
+ def generate(self, frame_size, phy_cores=None):
"""Return string of test case code with placeholders filled.
Fail if there are placeholders left unfilled.
It is not required for all placeholders to be present in template.
:param frame_size: Imix string or numeric frame size. Example: 74.
- :param phy_cores: Number of physical cores to use. Example: 2.
+ :param phy_cores: Number of physical cores to use. Example: 2. It can
+ be None in n2n testcases.
:type frame_size: str or int
- :type phy_cores: int or str
+ :type phy_cores: int, str or None
:returns: Filled template, usable as test case code.
:rtype: str
"""
@@ -57,6 +58,8 @@ class Testcase:
u"frame_num": str(frame_size),
u"frame_str": u"IMIX"
}
+ if phy_cores is None:
+ return self.template.substitute(subst_dict)
cores_str = str(phy_cores)
cores_num = int(cores_str)
subst_dict.update(
@@ -136,3 +139,21 @@ class Testcase:
| | frame_size=${{frame_num}} | phy_cores=${{cores_num}}
'''
return cls(template_string)
+
+ @classmethod
+ def trex(cls, suite_id):
+ """Factory method for creating "trex" testcase objects.
+
+ Testcase name will contain frame size, but not core count.
+
+ :param suite_id: Part of suite name to distinguish from other suites.
+ :type suite_id: str
+ :returns: Instance for generating testcase text of this type.
+ :rtype: Testcase
+ """
+ template_string = f'''
+| ${{frame_str}}--{suite_id}
+| | [Tags] | ${{frame_str}}
+| | frame_size=${{frame_num}}
+'''
+ return cls(template_string)
diff --git a/resources/libraries/python/topology.py b/resources/libraries/python/topology.py
index e05f4ef732..fba2dcb189 100644
--- a/resources/libraries/python/topology.py
+++ b/resources/libraries/python/topology.py
@@ -377,16 +377,19 @@ class Topology:
return links
@staticmethod
- def _get_interface_by_key_value(node, key, value):
+ def _get_interface_by_key_value(node, key, value, subsequent=False):
"""Return node interface key from topology file
according to key and value.
:param node: The node dictionary.
:param key: Key by which to select the interface.
:param value: Value that should be found using the key.
+ :param subsequent: Use second interface of the link. Useful for
+ back-to-back links. Default: False
:type node: dict
:type key: string
:type value: string
+ :type subsequent: bool
:returns: Interface key from topology file
:rtype: string
"""
@@ -396,8 +399,11 @@ class Topology:
k_val = if_val.get(key)
if k_val is not None:
if k_val == value:
- retval = if_key
- break
+ if subsequent:
+ subsequent = False
+ else:
+ retval = if_key
+ break
return retval
@staticmethod
@@ -417,7 +423,7 @@ class Topology:
return Topology._get_interface_by_key_value(node, u"name", iface_name)
@staticmethod
- def get_interface_by_link_name(node, link_name):
+ def get_interface_by_link_name(node, link_name, subsequent=False):
"""Return interface key of link on node.
This method returns the interface name associated with a given link
@@ -425,12 +431,17 @@ class Topology:
:param node: The node topology dictionary.
:param link_name: Name of the link that a interface is connected to.
+ :param subsequent: Use second interface of the link. Useful for
+ back-to-back links. Default: False
:type node: dict
:type link_name: string
+ :type subsequent: bool
:returns: Interface key of the interface connected to the given link.
:rtype: str
"""
- return Topology._get_interface_by_key_value(node, u"link", link_name)
+ return Topology._get_interface_by_key_value(
+ node, u"link", link_name, subsequent=subsequent
+ )
def get_interfaces_by_link_names(self, node, link_names):
"""Return dictionary of dictionaries {"interfaceN", interface name}.
@@ -836,13 +847,15 @@ class Topology:
return None
@staticmethod
- def _get_node_active_link_names(node, filter_list=None):
+ def _get_node_active_link_names(node, filter_list=None, topo_has_dut=True):
"""Return list of link names that are other than mgmt links.
:param node: Node topology dictionary.
:param filter_list: Link filter criteria.
+ :param topo_has_dut: Whether we require back-to-back links.
:type node: dict
:type filter_list: list of strings
+ :type topo_has_dut: bool
:returns: List of link names occupied by the node.
:rtype: None or list of string
"""
@@ -862,6 +875,17 @@ class Topology:
link_names.append(interface[u"link"])
if not link_names:
link_names = None
+ if not topo_has_dut:
+ new_link_names = list()
+ for link_name in link_names:
+ count = 0
+ for interface in interfaces.values():
+ link = interface.get(u"link", None)
+ if link == link_name:
+ count += 1
+ if count == 2:
+ new_link_names.append(link_name)
+ link_names = new_link_names
return link_names
def get_active_connecting_links(
@@ -880,12 +904,19 @@ class Topology:
:rtype: list
"""
- node1_links = self._get_node_active_link_names(
- node1, filter_list=filter_list_node1
- )
- node2_links = self._get_node_active_link_names(
- node2, filter_list=filter_list_node2
- )
+ if node1 != node2:
+ node1_links = self._get_node_active_link_names(
+ node1, filter_list=filter_list_node1
+ )
+ node2_links = self._get_node_active_link_names(
+ node2, filter_list=filter_list_node2
+ )
+ else:
+ # Looking for back-to-back links.
+ node1_links = self._get_node_active_link_names(
+ node1, filter_list=filter_list_node1, topo_has_dut=False
+ )
+ node2_links = node1_links
connecting_links = None
if node1_links is None:
diff --git a/resources/libraries/robot/shared/suite_setup.robot b/resources/libraries/robot/shared/suite_setup.robot
index 6e1136761c..26d7f52205 100644
--- a/resources/libraries/robot/shared/suite_setup.robot
+++ b/resources/libraries/robot/shared/suite_setup.robot
@@ -111,6 +111,27 @@
| | Set suite variable | &{topology_info} | &{info}
| | Create suite topology variables | @{actions}
+| Setup suite topology interfaces with no DUT
+| | [Documentation]
+| | ... | Common suite setup for single link tests with no device under test
+| | ... | node.
+| | ... |
+| | ... | Compute path for testing on given topology nodes in circular topology
+| | ... | based on interface model provided as an argument and set
+| | ... | corresponding suite variables.
+| |
+| | ... | *Arguments:*
+| | ... | - ${actions} - Additional setup action. Type: list
+| |
+| | [Arguments] | @{actions}
+| |
+| | ${nic_model_list}= | Create list | ${nic_name}
+| | &{info}= | Compute Circular Topology
+| | ... | ${nodes} | filter_list=${nic_model_list} | nic_pfs=${nic_pfs}
+| | ... | always_same_link=${True} | topo_has_tg=${True} | topo_has_dut=${False}
+| | Set suite variable | &{topology_info} | &{info}
+| | Create suite topology variables | @{actions}
+
| Additional Suite Setup Action For scapy
| | [Documentation]
| | ... | Additional Setup for suites which uses scapy as Traffic generator.
@@ -212,6 +233,17 @@
| | ... | ${dut${duts_count}} | ${DUT${duts_count}_${int}2}[0]
| | ... | ${osi_layer}
+| Additional Suite Setup Action For performance_tg_nic
+| | [Documentation]
+| | ... | Additional Setup for suites which uses performance measurement
+| | ... | for L1 cross connect tests
+| |
+| | Initialize traffic generator
+| | ... | ${tg} | ${TG_pf1}[0] | ${TG_pf2}[0]
+| | ... | ${tg} | ${TG_pf2}[0]
+| | ... | ${tg} | ${TG_pf1}[0]
+| | ... | ${osi_layer}
+
| Additional Suite Setup Action For ipsechw
| | [Documentation]
| | ... | Additional Setup for suites which uses QAT HW.
diff --git a/tests/trex/perf/__init__.robot b/tests/trex/perf/__init__.robot
new file mode 100644
index 0000000000..7d7531d309
--- /dev/null
+++ b/tests/trex/perf/__init__.robot
@@ -0,0 +1,41 @@
+# Copyright (c) 2021 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+*** Settings ***
+| Resource | resources/libraries/robot/shared/default.robot
+| Resource | resources/libraries/robot/shared/interfaces.robot
+|
+| Library | resources.libraries.python.SetupFramework
+| Library | resources.libraries.python.SetupFramework.CleanupFramework
+| Library | resources.libraries.python.CpuUtils
+|
+| Suite Setup | Run Keywords | Setup Global Variables
+| ... | AND | Setup Framework | ${nodes}
+| ... | AND | Get CPU Info from All Nodes | ${nodes}
+| ... | AND | Update All Interface Data on All Nodes | ${nodes}
+| ... | skip_tg=${True} | skip_vpp=${True}
+|
+| Suite Teardown | Cleanup Framework | ${nodes}
+
+*** Keywords ***
+| Setup Global Variables
+| | [Documentation]
+| | ... | Setup suite Variables. Variables are used across performance testing.
+| |
+| | ${stat_runtime}= | Create List | noop
+| | ${stat_pre_trial}= | Create List | noop
+| | ${stat_post_trial}= | Create List | noop
+| | Set Global Variable | ${stat_runtime}
+| | Set Global Variable | ${stat_pre_trial}
+| | Set Global Variable | ${stat_post_trial}
+| | Set Global Variable | ${nodes}
diff --git a/tests/trex/perf/ip4/1n1l-10ge2p1x710-ethip4-ip4base-tg-ndrpdr.robot b/tests/trex/perf/ip4/1n1l-10ge2p1x710-ethip4-ip4base-tg-ndrpdr.robot
new file mode 100755
index 0000000000..e58b6f2b5c
--- /dev/null
+++ b/tests/trex/perf/ip4/1n1l-10ge2p1x710-ethip4-ip4base-tg-ndrpdr.robot
@@ -0,0 +1,84 @@
+# Copyright (c) 2021 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+*** Settings ***
+| Resource | resources/libraries/robot/shared/default.robot
+|
+| Force Tags | 2_NODE_SINGLE_LINK_TOPO | PERFTEST | HW_ENV | NDRPDR
+| ... | NIC_Intel-X710 | TREX | ETH | N2N | BASE | TG_DRV_IGB_UIO
+| ... | ethip4-ip4base-tg
+|
+| Suite Setup | Setup suite topology interfaces with no DUT | performance_tg_nic
+| Suite Teardown | Tear down suite | performance
+| Test Teardown | Tear down test raw | performance
+|
+| Test Template | Local Template
+|
+| Documentation | *RFC2544: Pkt throughput L1 cross connect test cases*
+|
+| ... | *[Top] Network Topologies:* TG-TG 1-node circular topology\
+| ... | with single links between nodes.
+| ... | *[Enc] Packet Encapsulations:* Eth-IPv4 for L1 cross connect patch.
+| ... | *[Ver] TG verification:* TG finds and reports throughput NDR (Non Drop\
+| ... | Rate) with zero packet loss tolerance and throughput PDR (Partial Drop\
+| ... | Rate) with non-zero packet loss tolerance (LT) expressed in percentage\
+| ... | of packets transmitted. NDR and PDR are discovered for different\
+| ... | Ethernet L2 frame sizes using MLRsearch library.\
+| ... | Test packets are generated by TG on links to TG.\
+| ... | TG traffic profile contains two L3 flow-groups (flow-group per\
+| ... | direction, 254 flows per flow-group) with all packets containing\
+| ... | Ethernet header,IPv4 header with static payload.\
+| ... | MAC addresses are matching MAC addresses of the TG node interfaces.
+| ... | *[Ref] Applicable standard specifications:* RFC2544.
+
+*** Variables ***
+| ${nic_name}= | Intel-X710
+| ${nic_pfs}= | 2
+| ${osi_layer}= | L2
+| ${overhead}= | ${0}
+# Traffic profile:
+| ${traffic_profile}= | trex-stl-2n-ethip4-ip4src254
+
+*** Keywords ***
+| Local Template
+| | [Documentation]
+| | ... | [Cfg] TG runs L1 cross connect config.
+| | ... | [Ver] Measure NDR and PDR values using MLRsearch algorithm.\
+| |
+| | ... | *Arguments:*
+| | ... | - frame_size - Framesize in Bytes in integer or string (IMIX_v4_1).
+| | ... | Type: integer, string
+| |
+| | [Arguments] | ${frame_size}
+| |
+| | Set Test Variable | \${frame_size}
+| |
+| | Given Set Max Rate And Jumbo
+| | Then Find NDR and PDR intervals using optimized search
+
+*** Test Cases ***
+| 64B--ethip4-ip4base-tg-ndrpdr
+| | [Tags] | 64B
+| | frame_size=${64}
+
+| 1518B--ethip4-ip4base-tg-ndrpdr
+| | [Tags] | 1518B
+| | frame_size=${1518}
+
+| 9000B--ethip4-ip4base-tg-ndrpdr
+| | [Tags] | 9000B
+| | frame_size=${9000}
+
+| IMIX--ethip4-ip4base-tg-ndrpdr
+| | [Tags] | IMIX
+| | frame_size=IMIX_v4_1
diff --git a/tests/trex/perf/ip4/regenerate_testcases.py b/tests/trex/perf/ip4/regenerate_testcases.py
new file mode 100755
index 0000000000..87521e662d
--- /dev/null
+++ b/tests/trex/perf/ip4/regenerate_testcases.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python3
+
+# Copyright (c) 2021 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from resources.libraries.python.autogen.Regenerator import Regenerator
+
+Regenerator().regenerate_glob(u"*.robot")