From a41bd98b41c661f21d82d54c5b66cd97a290e337 Mon Sep 17 00:00:00 2001 From: itraviv Date: Sun, 27 Nov 2016 18:32:16 +0200 Subject: added howto guide on how to make the tracking environment. changed the analytics web report to fetch last 15 days of regression. added rule to ws_main to build the howto guide Signed-off-by: itraviv --- doc/AnalyticsConnect.py | 97 +++++++++++++++++++++++++++++++++-------------- doc/AnalyticsWebReport.py | 7 +++- doc/ws_main.py | 3 ++ 3 files changed, 77 insertions(+), 30 deletions(-) diff --git a/doc/AnalyticsConnect.py b/doc/AnalyticsConnect.py index e1210772..90818c1c 100755 --- a/doc/AnalyticsConnect.py +++ b/doc/AnalyticsConnect.py @@ -38,7 +38,7 @@ def initialize_analyticsreporting(): return analytics -def get_report(analytics,start_date='2016-11-06',end_date='2016-11-27'): +def get_report(analytics,start_date='2016-11-27',end_date='2016-11-27'): # Use the Analytics Service Object to query the Analytics Reporting API V4. return analytics.reports().batchGet( body={ @@ -80,31 +80,9 @@ def print_response(response): print metricHeader.get('name') + ': ' + value -def export_to_dict(response): - df = {'Test_name':[],'State':[],'Setup':[],'Test_type':[],'MPPS':[],'MPPS-Golden min':[],'MPPS-Golden max':[]} - for report in response.get('reports', []): - rows = report.get('data', {}).get('rows', []) - for row in rows: - dimensions = row.get('dimensions', []) - # print 'this is dimensions' - # print dimensions - df['Test_name'].append(dimensions[1]) - df['State'].append(dimensions[2]) - df['Setup'].append(dimensions[3]) - df['Test_type'].append(dimensions[4]) - dateRangeValues = row.get('metrics', []) - value = dateRangeValues[0].get('values',[])[0] - golden_min = dateRangeValues[0].get('values',[])[1] - golden_max = dateRangeValues[0].get('values',[])[2] - # print value - df['MPPS'].append(value) - df['MPPS-Golden min'].append(golden_min) - df['MPPS-Golden max'].append(golden_max) - return df - def export_to_tuples(response): - counter = 0 + # counter = 0 setups = set() df = {} for report in response.get('reports', []): @@ -135,7 +113,7 @@ def export_to_tuples(response): df[dimensions[3]] = {} df[dimensions[3]][dimensions[1]] = [tuple(data)] setups.add(dimensions[3]) - print 'counter is: %d' % counter + # print 'counter is: %d' % counter return df, setups @@ -143,9 +121,72 @@ def main(): analytics = initialize_analyticsreporting() response = get_report(analytics) df, setups = export_to_tuples(response) + # pprint(df) + return df,setups - - #pprint(response) if __name__ == '__main__': - main() + main() + + +""" +response structure (when fetched with "export to tuples"): + +{ 'setup1': {'test_name1': [(test_res1),(test_res2),...], + 'test_name2': [(test_res1),(test_res2),...] + }, + 'setup2': {'test_name1': [(test_res1),(test_res2),...], + 'test_name2': [(test_res1),(test_res2),...] + }, + . + . + . + . +} + +{u'kiwi02': {u'VM - 64 bytes, multi CPU, cache size 1024': [(u'VM - 64 bytes, multi CPU, cache size 1024', + u'stl', + u'performance', + u'19.711146', + u'19.0', + u'22.0'), + (u'VM - 64 bytes, multi CPU, cache size 1024', + u'stl', + u'performance', + u'19.581567', + u'19.0', + u'22.0')], + u'VM - 64 bytes, multi CPUs': [(u'VM - 64 bytes, multi CPUs', + u'stl', + u'performance', + u'10.398847', + u'9.7', + u'12.5'), + (u'VM - 64 bytes, multi CPUs', + u'stl', + u'performance', + u'10.925308', + u'9.7', + u'12.5') + ] + } + u'trex07': {u'VM - 64 bytes, multi CPU, cache size 1024': [(u'VM - 64 bytes, multi CPU, cache size 1024', + u'stl', + u'performance', + u'25.078212', + u'9.0', + u'15.0') + ] + u'VM - 64 bytes, multi CPUs': [(u'VM - 64 bytes, multi CPUs', + u'stl', + u'performance', + u'9.469138', + u'8.5', + u'12.0') + ] + } +} + + + +""" diff --git a/doc/AnalyticsWebReport.py b/doc/AnalyticsWebReport.py index 182d8367..bd4a9a2b 100755 --- a/doc/AnalyticsWebReport.py +++ b/doc/AnalyticsWebReport.py @@ -3,6 +3,7 @@ import sys import AnalyticsConnect as ac import TRexDataAnalysis as tr import time +import datetime def main(verbose = False): @@ -10,12 +11,14 @@ def main(verbose = False): print('Retrieving data from Google Analytics') analytics = ac.initialize_analyticsreporting() current_date = time.strftime("%Y-%m-%d") - response = ac.get_report(analytics, '2016-11-06', current_date) + k_days_ago = datetime.datetime.now() - datetime.timedelta(days=15) + start_date = str(k_days_ago.date()) + response = ac.get_report(analytics, start_date, current_date) ga_all_data_dict, setups = ac.export_to_tuples(response) dest_path = os.path.join(os.getcwd(), 'build', 'images') if verbose: print('Saving data to %s' % dest_path) - tr.create_all_data(ga_all_data_dict, setups, '2016-11-06', current_date, save_path = dest_path, + tr.create_all_data(ga_all_data_dict, setups, start_date, current_date, save_path = dest_path, add_stats='yes') if verbose: print('Done without errors.') diff --git a/doc/ws_main.py b/doc/ws_main.py index 58f6e98f..c5ccf205 100755 --- a/doc/ws_main.py +++ b/doc/ws_main.py @@ -987,6 +987,9 @@ def build(bld): bld(rule=convert_to_html_toc_book, source='trex_scapy_rpc_server.asciidoc waf.css', target='trex_scapy_rpc_server.html',scan=ascii_doc_scan); + + bld(rule=convert_to_html_toc_book, + source='trex-analytics-howto.asciidoc waf.css', target='trex-analytics-howto.html',scan=ascii_doc_scan); bld(rule='${ASCIIDOC} -a stylesheet=${SRC[1].abspath()} -a icons=true -a toc2 -a max-width=55em -o ${TGT} ${SRC[0].abspath()}', source='vm_doc.asciidoc waf.css', target='vm_doc.html', scan=ascii_doc_scan) -- cgit 1.2.3-korg