summaryrefslogtreecommitdiffstats
path: root/scripts/automation/regression
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/automation/regression')
-rwxr-xr-xscripts/automation/regression/outer_packages.py8
-rwxr-xr-xscripts/automation/regression/stateful_tests/trex_general_test.py66
-rwxr-xr-xscripts/automation/regression/stateless_tests/stl_benchmark_test.py82
-rw-r--r--scripts/automation/regression/stateless_tests/stl_performance_test.py43
-rw-r--r--scripts/automation/regression/trex.py15
-rw-r--r--scripts/automation/regression/trex_elk.py320
-rwxr-xr-xscripts/automation/regression/trex_unit_test.py49
7 files changed, 548 insertions, 35 deletions
diff --git a/scripts/automation/regression/outer_packages.py b/scripts/automation/regression/outer_packages.py
index 61ddc5cd..b2839dee 100755
--- a/scripts/automation/regression/outer_packages.py
+++ b/scripts/automation/regression/outer_packages.py
@@ -1,7 +1,7 @@
#!/router/bin/python
-
import sys, site
import platform, os
+import pprint
CURRENT_PATH = os.path.dirname(os.path.realpath(__file__)) # alternate use with: os.getcwd()
TREX_PATH = os.getenv('TREX_UNDER_TEST') # path to <trex-core>/scripts directory, env. variable TREX_UNDER_TEST should override it.
@@ -18,8 +18,11 @@ NIGHTLY_MODULES = [ {'name': 'ansi2html'},
{'name': 'rednose-0.4.1'},
{'name': 'progressbar-2.2'},
{'name': 'termstyle'},
+ {'name': 'urllib3'},
+ {'name': 'elasticsearch'},
+ {'name': 'requests'},
{'name': 'pyyaml-3.11', 'py-dep': True},
- {'name': 'nose-1.3.4', 'py-dep': True}
+ {'name': 'nose-1.3.4', 'py-dep': True},
]
@@ -62,6 +65,7 @@ def import_nightly_modules ():
sys.path.append(PATH_STL_API)
sys.path.append(PATH_STF_API)
import_module_list(NIGHTLY_MODULES)
+ #pprint.pprint(sys.path)
import_nightly_modules()
diff --git a/scripts/automation/regression/stateful_tests/trex_general_test.py b/scripts/automation/regression/stateful_tests/trex_general_test.py
index f6d2b917..24c38783 100755
--- a/scripts/automation/regression/stateful_tests/trex_general_test.py
+++ b/scripts/automation/regression/stateful_tests/trex_general_test.py
@@ -28,6 +28,7 @@ from nose.plugins.skip import SkipTest
import trex
from trex import CTRexScenario
import misc_methods
+import pprint
import sys
import os
# from CPlatformUnderTest import *
@@ -60,6 +61,7 @@ class CTRexGeneral_Test(unittest.TestCase):
self.trex_crashed = CTRexScenario.trex_crashed
self.modes = CTRexScenario.modes
self.GAManager = CTRexScenario.GAManager
+ self.elk = CTRexScenario.elk
self.no_daemon = CTRexScenario.no_daemon
self.skipping = False
self.fail_reasons = []
@@ -69,6 +71,20 @@ class CTRexGeneral_Test(unittest.TestCase):
self.is_virt_nics = True if 'virt_nics' in self.modes else False
self.is_VM = True if 'VM' in self.modes else False
+ #update elk const object
+ setup = CTRexScenario.elk_info['info']['setup']
+
+ if self.is_loopback :
+ setup['dut'] ='loopback'
+ else:
+ setup['dut'] ='router'
+
+ if self.is_VM:
+ setup['baremetal']=False
+ setup['hypervisor']='ESXi' #TBD
+ else:
+ setup['baremetal']=True
+
if not CTRexScenario.is_init:
if self.trex and not self.no_daemon: # stateful
CTRexScenario.trex_version = self.trex.get_trex_version()
@@ -82,6 +98,7 @@ class CTRexGeneral_Test(unittest.TestCase):
CTRexScenario.router.launch_connection(device_cfg)
if CTRexScenario.router_cfg['forceImageReload']:
running_image = CTRexScenario.router.get_running_image_details()['image']
+ setup['dut'] =CTRexScenario.router.get_running_image_details()['model']
print('Current router image: %s' % running_image)
needed_image = device_cfg.get_image_name()
if not CTRexScenario.router.is_image_matches(needed_image):
@@ -107,7 +124,9 @@ class CTRexGeneral_Test(unittest.TestCase):
# raise RuntimeError('CTRexScenario class is not initialized!')
self.router = CTRexScenario.router
-
+ def get_elk_obj (self):
+ obj=trex.copy_elk_info ()
+ return (obj);
# def assert_dict_eq (self, dict, key, val, error=''):
# v1 = int(dict[key]))
@@ -142,6 +161,7 @@ class CTRexGeneral_Test(unittest.TestCase):
def check_CPU_benchmark (self, trex_res, err = 25, minimal_cpu = 10, maximal_cpu = 85):
cpu_util = trex_res.get_avg_steady_state_value('trex-global.data.m_cpu_util_raw')
trex_tx_bps = trex_res.get_avg_steady_state_value('trex-global.data.m_tx_bps')
+ trex_tx_pps = trex_res.get_avg_steady_state_value('trex-global.data.m_tx_pps')
expected_norm_cpu = self.get_benchmark_param('bw_per_core')
cores = self.get_benchmark_param('cores')
ports_count = trex_res.get_ports_count()
@@ -172,16 +192,42 @@ class CTRexGeneral_Test(unittest.TestCase):
#if calc_error_precent > err and cpu_util > 10:
# self.fail('Excepted bw_per_core ratio: %s, got: %g' % (expected_norm_cpu, round(test_norm_cpu)))
+ trex_tx_gbps = trex_tx_bps/1e9
+ trex_tx_mpps = trex_tx_pps/1e6
+
+ trex_tx_gbps_pc = trex_tx_gbps*100.0/(cpu_util*cores);
+ trex_tx_mpps_pc = trex_tx_mpps*100.0/(cpu_util*cores)
+
+ trex_tx_pckt = trex_res.get_last_value("trex-global.data.m_total_tx_pkts")
+ trex_drops = trex_res.get_total_drops()
+ trex_drop_precent = trex_drops *100.0/trex_tx_pckt;
+
# report benchmarks
- if self.GAManager:
- try:
- pass
- #setup_test = '%s.%s' % (CTRexScenario.setup_name, self.get_name())
- #self.GAManager.gaAddAction(Event = 'stateful_test', action = setup_test, label = 'bw_per_core', value = int(test_norm_cpu))
- #self.GAManager.gaAddAction(Event = 'stateful_test', action = setup_test, label = 'bw_per_core_exp', value = int(expected_norm_cpu))
- #self.GAManager.emptyAndReportQ()
- except Exception as e:
- print('Sending GA failed: %s' % e)
+ if self.elk:
+ elk_obj = self.get_elk_obj()
+ print("Reporting to elk")
+ elk_obj['test']={ "name" : self.get_name(),
+ "type" : "stateful",
+ "cores" : cores,
+ "cpu%" : cpu_util,
+ "mpps" : (trex_tx_mpps),
+ "streams_count" :1,
+ "mpps_pc" : (trex_tx_mpps_pc),
+ "gbps_pc" : (trex_tx_gbps_pc),
+ "gbps" : (trex_tx_gbps),
+ "kcps" : (trex_res.get_last_value("trex-global.data.m_tx_cps")/1000.0),
+ "avg-pktsize" : round((1000.0*trex_tx_gbps/(8.0*trex_tx_mpps))),
+ "latecny" : { "min" : min(trex_res.get_min_latency().values()),
+ "max" : max(trex_res.get_max_latency().values()),
+ "avr" : max(trex_res.get_avg_latency().values()),
+ "jitter" : max(trex_res.get_jitter_latency().values()),
+ "max-win" : max(trex_res.get_avg_window_latency ().values()),
+ "drop-rate" :trex_drop_precent
+ }
+ };
+ pprint.pprint(elk_obj['test']);
+ self.elk.perf.push_data(elk_obj)
+
def check_results_gt (self, res, name, val):
if res is None:
diff --git a/scripts/automation/regression/stateless_tests/stl_benchmark_test.py b/scripts/automation/regression/stateless_tests/stl_benchmark_test.py
index 6940efd3..fbc58765 100755
--- a/scripts/automation/regression/stateless_tests/stl_benchmark_test.py
+++ b/scripts/automation/regression/stateless_tests/stl_benchmark_test.py
@@ -4,6 +4,7 @@ from trex_stl_lib.api import *
import os, sys
from collections import deque
from time import time, sleep
+import pprint
class STLBenchmark_Test(CStlGeneral_Test):
"""Benchark stateless performance"""
@@ -14,9 +15,21 @@ class STLBenchmark_Test(CStlGeneral_Test):
stabilize = 5 # ensure stabilization over this period
print('')
- for profile_bench in self.get_benchmark_param('profiles'):
+ #self.get_benchmark_param('profiles')
+ #profiles=[{'bw_per_core': 1,
+ # 'cpu_util': 1,
+ # 'kwargs': {'packet_len': 64},
+ # 'name': 'stl/udp_for_benchmarks.py'}]
+
+ profiles = self.get_benchmark_param('profiles')
+ dp_cores = self.stl_trex.system_info.get('dp_core_count', 0)
+
+ for profile_bench in profiles:
+
cpu_utils = deque([0] * stabilize, maxlen = stabilize)
- bws_per_core = deque([0] * stabilize, maxlen = stabilize)
+ bps = deque([0] * stabilize, maxlen = stabilize)
+ pps = deque([0] * stabilize, maxlen = stabilize)
+
kwargs = profile_bench.get('kwargs', {})
print('Testing profile %s, kwargs: %s' % (profile_bench['name'], kwargs))
profile = STLProfile.load(os.path.join(CTRexScenario.scripts_path, profile_bench['name']), **kwargs)
@@ -32,13 +45,30 @@ class STLBenchmark_Test(CStlGeneral_Test):
for i in range(timeout + 1):
stats = self.stl_trex.get_stats()
cpu_utils.append(stats['global']['cpu_util'])
- bws_per_core.append(stats['global']['bw_per_core'])
+ bps.append(stats['global']['tx_bps'])
+ pps.append(stats['global']['tx_pps'])
+
if i > stabilize and min(cpu_utils) > max(cpu_utils) * 0.95:
break
sleep(0.5)
agv_cpu_util = sum(cpu_utils) / stabilize
- agv_bw_per_core = sum(bws_per_core) / stabilize
+ agv_pps = sum(pps) / stabilize
+ agv_bps = sum(bps) / stabilize
+
+ if agv_cpu_util == 0.0:
+ agv_cpu_util=1.0;
+
+ agv_mpps = (agv_pps/1e6);
+ agv_gbps = (agv_bps/1e9)
+
+
+ agv_gbps_norm = agv_gbps * 100.0/agv_cpu_util;
+ agv_mpps_norm = agv_mpps * 100.0/agv_cpu_util;
+
+ agv_gbps_norm_pc = agv_gbps_norm/dp_cores;
+ agv_mpps_norm_pc = agv_mpps_norm/dp_cores;
+
if critical_test and i == timeout and agv_cpu_util > 10:
raise Exception('Timeout on waiting for stabilization, last CPU util values: %s' % list(cpu_utils))
@@ -48,24 +78,32 @@ class STLBenchmark_Test(CStlGeneral_Test):
raise Exception('Too much queue_full: %s' % stats['global']['queue_full'])
if not cpu_utils[-1]:
raise Exception('CPU util is zero, last values: %s' % list(cpu_utils))
- print('Done (%ss), CPU util: %4g, bw_per_core: %6sGb/core' % (int(time() - start_time), agv_cpu_util, round(agv_bw_per_core, 2)))
- # TODO: add check of benchmark based on results from regression
-
- # report benchmarks
- if self.GAManager:
- try:
- pass
- #profile_repr = '%s.%s %s' % (CTRexScenario.setup_name,
- # os.path.basename(profile_bench['name']),
- # repr(kwargs).replace("'", ''))
- #self.GAManager.gaAddAction(Event = 'stateless_test', action = profile_repr,
- # label = 'bw_per_core', value = int(agv_bw_per_core))
- # TODO: report expected once acquired
- #self.GAManager.gaAddAction(Event = 'stateless_test', action = profile_repr,
- # label = 'bw_per_core_exp', value = int(expected_norm_cpu))
- #self.GAManager.emptyAndReportQ()
- except Exception as e:
- print('Sending GA failed: %s' % e)
+ print('Done (%ss), CPU util: %4g, norm_pps_per_core:%6smpps norm_bw_per_core: %6sGb/core' % (int(time() - start_time), agv_cpu_util, round(agv_mpps_norm_pc,2), round(agv_gbps_norm_pc, 2)))
+
+ # report benchmarks to elk
+ if self.elk:
+ streams=kwargs.get('stream_count',1)
+ elk_obj = self.get_elk_obj()
+ print("\n* Reporting to elk *\n")
+ name=profile_bench['name']
+ elk_obj['test']={ "name" : name,
+ "type" : "stateless-range",
+ "cores" : dp_cores,
+ "cpu%" : agv_cpu_util,
+ "mpps" : (agv_mpps),
+ "streams_count" :streams,
+ "mpps_pc" : (agv_mpps_norm_pc),
+ "gbps_pc" : (agv_gbps_norm_pc),
+ "gbps" : (agv_gbps),
+ "avg-pktsize" : round((1000.0*agv_gbps/(8.0*agv_mpps))),
+ "latecny" : { "min" : -1.0,
+ "max" : -1.0,
+ "avr" : -1.0
+ }
+ };
+ #pprint.pprint(elk_obj);
+ self.elk.perf.push_data(elk_obj)
+
def tearDown(self):
self.stl_trex.reset()
diff --git a/scripts/automation/regression/stateless_tests/stl_performance_test.py b/scripts/automation/regression/stateless_tests/stl_performance_test.py
index ee5d86cc..a1f4dd3b 100644
--- a/scripts/automation/regression/stateless_tests/stl_performance_test.py
+++ b/scripts/automation/regression/stateless_tests/stl_performance_test.py
@@ -1,6 +1,7 @@
import os
from .stl_general_test import CStlGeneral_Test, CTRexScenario
from trex_stl_lib.api import *
+import pprint
def avg (values):
return (sum(values) / float(len(values)))
@@ -67,6 +68,42 @@ class PerformanceReport(object):
ga.emptyAndReportQ()
+ def norm_senario (self):
+ s=self.scenario
+ s='+'.join(s.split(' '));
+ s='+'.join(s.split('-'));
+ s='+'.join(s.split(','));
+ l=s.split('+')
+ lr=[]
+ for obj in l:
+ if len(obj):
+ lr.append(obj);
+ s='-'.join(lr);
+ return(s);
+
+ def report_to_elk(self, elk,elk_obj, golden_mpps):
+ print("\n* Reporting to elk *\n")
+ elk_obj['test']={ "name" : self.norm_senario(),
+ "type" : "stateless",
+ "cores" : self.core_count,
+ "cpu%" : self.avg_cpu,
+ "mpps" : self.avg_mpps,
+ "streams_count" : 1,
+ "mpps_pc" : self.avg_mpps_per_core,
+ "gbps_pc" : self.avg_gbps_per_core,
+ "gbps" : self.avg_gbps,
+ "avg-pktsize" : ((1000.0*self.avg_gbps/(8.0*self.avg_mpps))),
+ "latecny" : { "min" : -1.0,
+ "max" : -1.0,
+ "avr" : -1.0
+ }
+ };
+
+ #pprint.pprint(elk_obj);
+ # push to elk
+ elk.perf.push_data(elk_obj)
+
+
class STLPerformance_Test(CStlGeneral_Test):
"""Tests for stateless client"""
@@ -242,9 +279,15 @@ class STLPerformance_Test(CStlGeneral_Test):
golden = scenario_cfg['mpps_per_core_golden']
report = self.execute_single_scenario_iteration(scenario_cfg)
+
if self.GAManager:
report.report_to_analytics(self.GAManager, golden)
+ #report to elk
+ if self.elk:
+ elk_obj = self.get_elk_obj()
+ report.report_to_elk(self.elk,elk_obj, golden)
+
rc = report.check_golden(golden)
if rc == PerformanceReport.GOLDEN_NORMAL or rc == PerformanceReport.GOLDEN_BETTER:
diff --git a/scripts/automation/regression/trex.py b/scripts/automation/regression/trex.py
index 416a6e3b..f9a3fbf1 100644
--- a/scripts/automation/regression/trex.py
+++ b/scripts/automation/regression/trex.py
@@ -10,6 +10,7 @@ import time
from CProgressDisp import TimedProgressBar
from stateful_tests.tests_exceptions import TRexInUseError
import datetime
+import copy
class CTRexScenario:
modes = set() # list of modes of this setup: loopback, virtual etc.
@@ -41,6 +42,20 @@ class CTRexScenario:
debug_image = False
test = None
json_verbose = False
+ elk = None
+ elk_info = None
+
+def copy_elk_info ():
+ assert(CTRexScenario.elk_info)
+ d = copy.deepcopy(CTRexScenario.elk_info);
+
+ timestamp = datetime.datetime.now(); # need to update this
+ d['timestamp']=timestamp.strftime("%Y-%m-%d %H:%M:%S")
+ return(d)
+
+
+
+
class CTRexRunner:
"""This is an instance for generating a CTRexRunner"""
diff --git a/scripts/automation/regression/trex_elk.py b/scripts/automation/regression/trex_elk.py
new file mode 100644
index 00000000..ddca474e
--- /dev/null
+++ b/scripts/automation/regression/trex_elk.py
@@ -0,0 +1,320 @@
+import os
+import outer_packages
+import json
+import pprint
+from elasticsearch import Elasticsearch
+from pprint import pprint
+from elasticsearch import helpers
+import random
+import datetime
+
+# one object example for perf
+def create_one_object (build_id):
+ d={};
+
+ sim_date=datetime.datetime.now()-datetime.timedelta(hours=random.randint(0,24*30));
+ info = {};
+
+
+ img={}
+ img['sha'] = random.choice(["v2.11","v2.10","v2.12","v2.13","v2.14"])
+ img['build_time'] = sim_date.strftime("%Y-%m-%d %H:%M:%S")
+ img['version'] = random.choice(["v2.11","v2.10","v2.12","v2.13","v2.14"])
+ img['formal'] = False
+
+ setup={}
+
+ setup['distro']='Ubunto14.03'
+ setup['kernel']='2.6.12'
+ setup['baremetal']=True
+ setup['hypervisor']='None'
+ setup['name']='trex07'
+ setup['cpu-sockets']=2
+ setup['cores']=16
+ setup['cpu-speed']=3.5
+
+ setup['dut'] ='loopback'
+ setup['drv-name']='mlx5'
+ setup['nic-ports']=2
+ setup['total-nic-ports']=2
+ setup['nic-speed'] ="40GbE"
+
+
+
+ info['image'] = img
+ info['setup'] = setup
+
+ d['info'] =info;
+
+ d['timestamp']=sim_date.strftime("%Y-%m-%d %H:%M:%S")
+ d['build_id']=str("build-%d" %(build_id))
+ d['test']={ "name" : "test1",
+ "type" : "stateless",
+ "cores" : random.randint(1,10),
+ "cpu%" : random.randint(60,99),
+ "mpps" : random.randint(9,32),
+ "mpps_pc" : random.randint(9,32),
+ "gbps_pc" : random.randint(9,32),
+ "gbps" : random.randint(9,32),
+ "avg-pktsize" : random.randint(60,1500),
+ "latecny" : { "min" : random.randint(1,10),
+ "max" : random.randint(100,120),
+ "avr" : random.randint(1,60)
+ }
+ };
+
+
+ return(d)
+
+
+class EsHelper(object):
+
+ def __init__ (self, es,
+ alias,
+ index_name,
+ mapping):
+ self.es = es
+ self.alias = alias
+ self.index_name = index_name
+ self.mapping = mapping
+ self.setting = { "index.mapper.dynamic":"false"};
+
+ def delete (self):
+ es=self.es;
+ es.indices.delete(index=self.alias, ignore=[400, 404]);
+
+ def is_exists (self):
+ es=self.es;
+ return es.indices.exists(index=self.alias, ignore=[400, 404])
+
+ def create_first_fime (self):
+ es=self.es;
+ index_name=self.index_name
+ es.indices.create(index=index_name, ignore=[],body = {
+ "aliases": { self.alias : {} },
+ "mappings" : { "data": self.mapping },
+ "settings" : self.setting
+ });
+
+ def update(self):
+ es=self.es;
+ es.indices.put_mapping(index=self.alias, doc_type="data",body=self.mapping);
+ es.indices.rollover(alias=self.alias,body={
+ "conditions": {
+ "max_age": "30d",
+ "max_docs": 100000
+ },
+ "mappings" : { "data": self.mapping },
+ "settings" : self.setting
+ }
+ );
+
+ def open(self):
+ if not self.is_exists():
+ self.create_first_fime ()
+ else:
+ self.update()
+
+ def close(self):
+ pass;
+
+ def push_data(self,data):
+ es=self.es;
+ es.index(index=self.alias,doc_type="data", body=data);
+
+
+
+
+def create_reg_object (build_id):
+ d={};
+
+ sim_date=datetime.datetime.now()-datetime.timedelta(hours=random.randint(0,24*30));
+ info = {};
+
+
+ img={}
+ img['sha'] = random.choice(["v2.11","v2.10","v2.12","v2.13","v2.14"])
+ img['build_time'] = sim_date.strftime("%Y-%m-%d %H:%M:%S")
+ img['version'] = random.choice(["v2.11","v2.10","v2.12","v2.13","v2.14"])
+ img['formal'] = False
+
+ setup={}
+
+ setup['distro']='Ubunto14.03'
+ setup['kernel']='2.6.12'
+ setup['baremetal']=True
+ setup['hypervisor']='None'
+ setup['name']='trex07'
+ setup['cpu-sockets']=2
+ setup['cores']=16
+ setup['cpu-speed']=3.5
+
+ setup['dut'] ='loopback'
+ setup['drv-name']='mlx5'
+ setup['nic-ports']=2
+ setup['total-nic-ports']=2
+ setup['nic-speed'] ="40GbE"
+
+
+
+ info['image'] = img
+ info['setup'] = setup
+
+ d['info'] =info;
+
+ d['timestamp']=sim_date.strftime("%Y-%m-%d %H:%M:%S")
+ d['build_id']=str("build-%d" %(build_id))
+ d['test']= { "name" : "stateful_tests.trex_imix_test.CTRexIMIX_Test.test_routing_imix" ,
+ "type" : "stateless",
+ "duration_sec" : random.uniform(1,10),
+ "result" : random.choice(["PASS","SKIP","FAIL"]),
+ "stdout" : """
+ LATEST RESULT OBJECT:
+ Total ARP received : 16 pkts
+ maximum-latency : 300 usec
+ average-latency : 277 usec
+ latency-any-error : ERROR
+ """
+ };
+
+ return(d)
+
+
+
+# how to add new keyword
+# you can add a new field but you can't remove old field
+class TRexEs(object):
+
+ def __init__ (self, host,
+ port,
+ ):
+ self.es = Elasticsearch([{"host": host, "port": port}])
+ es=self.es;
+ res=es.info()
+ es_version=res["version"]["number"];
+ l=es_version.split('.');
+ if not(len(l)==3 and int(l[0])>=5):
+ print("NOT valid ES version should be at least 5.0.x",es_version);
+ raise RuntimeError
+
+ setup_info = { # constant per setup
+ "properties": {
+
+ "image" : {
+ "properties": {
+ "sha" : { "type": "keyword" }, # git sha
+ "build_time" : { "type": "date", # build time
+ "format": "yyyy-MM-dd HH:mm:ss||yyyy-MM-dd||epoch_millis"},
+ "version" : { "type": "keyword" }, # version name like 'v2.12'
+ "formal" : { "type": "boolean" }, # true for formal release
+ }
+ },
+
+ "setup" : {
+ "properties": {
+ "distro" : { "type": "keyword" }, # 'ubuntu'
+ "kernel" : { "type": "keyword" }, # 2.3.19
+ "baremetal" : { "type": "boolean" }, # true or false for
+ "hypervisor" : { "type": "keyword" }, # kvm,esxi , none
+ "name" : { "type": "keyword" }, # setup name , e.g. kiwi02
+ "cpu-sockets" : { "type": "long" }, # number of socket
+ "cores" : { "type": "long" }, # total cores
+ "cpu-speed" : { "type": "double" }, # 3.5 in ghz
+ "dut" : { "type": "keyword" }, # asr1k, loopback
+ "drv-name" : { "type": "keyword" }, # vic, mlx5,599,xl710,x710
+ "nic-ports" : { "type": "long" }, #2,1,4
+ "total-nic-ports" : { "type": "long" }, #8
+ "nic-speed" : { "type": "keyword" }, #40Gb
+ }
+ }
+ }
+ }
+
+
+ perf_mapping = {
+ "dynamic": "strict",
+ "properties": {
+
+ "scenario" : { "type": "keyword" },
+ "build_id" : { "type": "keyword" },
+ "timestamp" : { "type": "date",
+ "format": "yyyy-MM-dd HH:mm:ss||yyyy-MM-dd||epoch_millis"},
+
+ "info" : setup_info,
+
+ "test" : {
+ "properties": {
+ "name" : { "type": "keyword" }, # name of the test
+ "type" : { "type": "keyword" }, # stateless,stateful, other
+ "cores" : { "type": "long" },
+ "cpu%" : { "type": "double" },
+ "mpps" : { "type": "double" },
+ "streams_count" : { "type": "long" },
+ "mpps_pc" : { "type": "double" },
+ "gbps_pc" : { "type": "double" },
+ "gbps" : { "type": "double" },
+ "avg-pktsize" : { "type": "long" },
+ "kcps" : { "type": "double" },
+ "latecny" : {
+ "properties": {
+ "min" : { "type": "double" },
+ "max" : { "type": "double" },
+ "avr" : { "type": "double" },
+ "max-win" : { "type": "double" },
+ "drop-rate" : { "type": "double" },
+ "jitter" : { "type": "double" },
+ }
+ }
+
+ }
+ }
+ }
+ }
+
+ self.perf = EsHelper(es=es,
+ alias="perf",
+ index_name='trex_perf-000001',
+ mapping=perf_mapping)
+
+
+
+ reg_mapping = {
+ "dynamic": "strict",
+ "properties": {
+
+ "scenario" : { "type": "keyword" },
+ "build_id" : { "type": "keyword" },
+ "timestamp" : { "type": "date",
+ "format": "yyyy-MM-dd HH:mm:ss||yyyy-MM-dd||epoch_millis"},
+
+ "info" : setup_info,
+
+ "test" : {
+ "properties": {
+ "name" : { "type": "text" }, # name of the test
+ "type" : { "type": "keyword" }, # stateless,stateful, other
+ "duration_sec" : { "type": "double" }, # sec
+ "result" : { "type": "keyword" }, # PASS,FAIL,SKIP
+ "stdout" : { "type": "text" }, # output in case of faliue
+ }
+ }
+ }
+ }
+
+
+ self.reg = EsHelper(es=es,
+ alias="reg",
+ index_name='trex_reg-000001',
+ mapping=reg_mapping)
+
+
+ self.perf.open();
+ self.reg.open();
+
+
+
+
+
+
+
+
diff --git a/scripts/automation/regression/trex_unit_test.py b/scripts/automation/regression/trex_unit_test.py
index 34d2c430..b47ef94d 100755
--- a/scripts/automation/regression/trex_unit_test.py
+++ b/scripts/automation/regression/trex_unit_test.py
@@ -27,7 +27,7 @@ Description:
import os
import sys
import outer_packages
-
+import datetime
import nose
from nose.plugins import Plugin
from nose.selector import Selector
@@ -40,6 +40,7 @@ from trex_stf_lib.trex_client import *
from trex_stf_lib.trex_exceptions import *
from trex_stl_lib.api import *
from trex_stl_lib.utils.GAObjClass import GAmanager_Regression
+import trex_elk
import trex
import socket
from pprint import pprint
@@ -229,6 +230,52 @@ class CTRexTestConfiguringPlugin(Plugin):
appName = 'TRex',
appVer = CTRexScenario.trex_version)
+ CTRexScenario.elk = trex_elk.TRexEs('sceasr-b20',9200);
+ self.set_cont_elk_info ()
+
+ def set_cont_elk_info (self):
+ elk_info={}
+ timestamp = datetime.datetime.now(); # need to update this
+ info = {};
+
+
+ img={}
+ img['sha'] = "v2.14" #TBD
+ img['build_time'] = timestamp.strftime("%Y-%m-%d %H:%M:%S")
+ img['version'] = CTRexScenario.trex_version
+ img['formal'] = False
+
+ setup={}
+
+ setup['distro']='None' #TBD 'Ubunto14.03'
+ setup['kernel']='None' #TBD '2.6.12'
+ setup['baremetal']=True #TBD
+ setup['hypervisor']='None' #TBD
+ setup['name']=CTRexScenario.setup_name
+
+ setup['cpu-sockets']=0 #TBD 2
+ setup['cores']=0 #TBD 16
+ setup['cpu-speed']=-1 #TBD 3.5
+
+ setup['dut'] ='None' #TBD 'loopback'
+ setup['drv-name']='None' #TBD 'mlx5'
+ setup['nic-ports']=0 #TBD 2
+ setup['total-nic-ports']=0 #TBD 2
+ setup['nic-speed'] = "None" #"40GbE" TBD
+
+
+
+ info['image'] = img
+ info['setup'] = setup
+
+ elk_info['info'] =info;
+
+ elk_info['timestamp']=timestamp.strftime("%Y-%m-%d %H:%M:%S") # need to update it
+ elk_info['build_id']=os.environ.get('BUILD_ID')
+ elk_info['scenario']=os.environ.get('SCENARIO')
+
+ CTRexScenario.elk_info = elk_info
+
def begin (self):
client = CTRexScenario.trex