diff options
-rw-r--r-- | docs/report/introduction/methodology_trex_traffic_generator.rst | 42 | ||||
-rw-r--r-- | resources/tools/presentation/generator_tables.py | 69 |
2 files changed, 64 insertions, 47 deletions
diff --git a/docs/report/introduction/methodology_trex_traffic_generator.rst b/docs/report/introduction/methodology_trex_traffic_generator.rst index cae65ab1c8..dfa496b3ad 100644 --- a/docs/report/introduction/methodology_trex_traffic_generator.rst +++ b/docs/report/introduction/methodology_trex_traffic_generator.rst @@ -11,32 +11,34 @@ throughputs using MLRsearch and to measure maximum transfer rate in MRR tests. TRex is installed and run on the TG compute node. The typical procedure is: - TRex configuration is set in its configuration file + :: - $ sudo -E -S sh -c 'cat << EOF > /etc/trex_cfg.yaml - - version: 2 - c: 15 - limit_memory: 8192 - interfaces: ["${pci1}","${pci2}"] - port_info: - - dest_mac: [${dest_mac1}] - src_mac: [${src_mac1}] - - dest_mac: [${dest_mac2}] - src_mac: [${src_mac2}] - platform : - master_thread_id: 0 - latency_thread_id: 16 - dual_if: - - socket: 0 - threads: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15] - EOF' + $ sudo -E -S sh -c 'cat << EOF > /etc/trex_cfg.yaml + - version: 2 + c: 15 + limit_memory: 8192 + interfaces: ["${pci1}","${pci2}"] + port_info: + - dest_mac: [${dest_mac1}] + src_mac: [${src_mac1}] + - dest_mac: [${dest_mac2}] + src_mac: [${src_mac2}] + platform : + master_thread_id: 0 + latency_thread_id: 16 + dual_if: + - socket: 0 + threads: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15] + EOF' - TRex is started in the interactive mode as a background service + :: - $ sh -c 'cd <t-rex-install-dir>/scripts/ && \ - sudo nohup ./t-rex-64 -i --prefix $(hostname) --hdrh --no-scapy-server \ - > /tmp/trex.log 2>&1 &' > /dev/null + $ sh -c 'cd <t-rex-install-dir>/scripts/ && \ + sudo nohup ./t-rex-64 -i --prefix $(hostname) --hdrh --no-scapy-server \ + > /tmp/trex.log 2>&1 &' > /dev/null - There are traffic streams dynamically prepared for each test, based on traffic profiles. The traffic is sent and the statistics obtained using API diff --git a/resources/tools/presentation/generator_tables.py b/resources/tools/presentation/generator_tables.py index c06e492ec8..3afa3830fa 100644 --- a/resources/tools/presentation/generator_tables.py +++ b/resources/tools/presentation/generator_tables.py @@ -594,6 +594,7 @@ def _tpc_generate_html_table(header, data, out_file_name, legend=u"", path = u"_tmp/src/vpp_performance_tests/comparisons/" else: path = u"_tmp/src/dpdk_performance_tests/comparisons/" + logging.info(f" Writing the HTML file to {path}{file_name}.rst") with open(f"{path}{file_name}.rst", u"wt") as rst_file: rst_file.write( u"\n" @@ -612,11 +613,22 @@ def _tpc_generate_html_table(header, data, out_file_name, legend=u"", f'</iframe>\n\n' ) - # TODO: Use html (rst) list for legend and footnote if legend: - rst_file.write(legend[1:].replace(u"\n", u" |br| ")) + try: + itm_lst = legend[1:-2].split(u"\n") + rst_file.write( + f"{itm_lst[0]}\n\n- " + u'\n- '.join(itm_lst[1:]) + u"\n\n" + ) + except IndexError as err: + logging.error(f"Legend cannot be written to html file\n{err}") if footnote: - rst_file.write(footnote.replace(u"\n", u" |br| ")[1:]) + try: + itm_lst = footnote[1:].split(u"\n") + rst_file.write( + f"{itm_lst[0]}\n\n- " + u'\n- '.join(itm_lst[1:]) + u"\n\n" + ) + except IndexError as err: + logging.error(f"Footnote cannot be written to html file\n{err}") def table_soak_vs_ndr(table, input_data): @@ -777,17 +789,17 @@ def table_soak_vs_ndr(table, input_data): tbl_lst.sort(key=lambda rel: rel[-1], reverse=True) # Generate csv tables: - csv_file = f"{table[u'output-file']}.csv" - with open(csv_file, u"wt") as file_handler: + csv_file_name = f"{table[u'output-file']}.csv" + with open(csv_file_name, u"wt") as file_handler: file_handler.write(header_str) for test in tbl_lst: file_handler.write(u";".join([str(item) for item in test]) + u"\n") convert_csv_to_pretty_txt( - csv_file, f"{table[u'output-file']}.txt", delimiter=u";" + csv_file_name, f"{table[u'output-file']}.txt", delimiter=u";" ) - with open(f"{table[u'output-file']}.txt", u'a') as txt_file: - txt_file.write(legend) + with open(f"{table[u'output-file']}.txt", u'a') as file_handler: + file_handler.write(legend) # Generate html table: _tpc_generate_html_table( @@ -1737,12 +1749,13 @@ def table_comparison(table, input_data): legend = u"\n" + u"\n".join(legend_lst) + u"\n" footnote = u"" - for rca in rcas: - footnote += f"\n{rca[u'title']}:\n" - footnote += rca[u"data"].get(u"footnote", u"") + if rcas: + footnote += u"\nRCA:\n" + for rca in rcas: + footnote += rca[u"data"].get(u"footnote", u"") - csv_file = f"{table[u'output-file']}-csv.csv" - with open(csv_file, u"wt", encoding='utf-8') as file_handler: + csv_file_name = f"{table[u'output-file']}-csv.csv" + with open(csv_file_name, u"wt", encoding='utf-8') as file_handler: file_handler.write( u",".join([f'"{itm}"' for itm in header_csv]) + u"\n" ) @@ -1808,19 +1821,21 @@ def table_comparison(table, input_data): header.extend([rca[u"title"] for rca in rcas]) # Generate csv tables: - csv_file = f"{table[u'output-file']}.csv" - with open(csv_file, u"wt", encoding='utf-8') as file_handler: + csv_file_name = f"{table[u'output-file']}.csv" + logging.info(f" Writing the file {csv_file_name}") + with open(csv_file_name, u"wt", encoding='utf-8') as file_handler: file_handler.write(u";".join(header) + u"\n") for test in tbl_final: file_handler.write(u";".join([str(item) for item in test]) + u"\n") # Generate txt table: txt_file_name = f"{table[u'output-file']}.txt" - convert_csv_to_pretty_txt(csv_file, txt_file_name, delimiter=u";") + logging.info(f" Writing the file {txt_file_name}") + convert_csv_to_pretty_txt(csv_file_name, txt_file_name, delimiter=u";") - with open(txt_file_name, u'a', encoding='utf-8') as txt_file: - txt_file.write(legend) - txt_file.write(footnote) + with open(txt_file_name, u'a', encoding='utf-8') as file_handler: + file_handler.write(legend) + file_handler.write(footnote) # Generate html table: _tpc_generate_html_table( @@ -1969,9 +1984,9 @@ def table_weekly_comparison(table, in_data): tbl_lst.extend(tbl_lst_none) # Generate csv table: - csv_file = f"{table[u'output-file']}.csv" - logging.info(f" Writing the file {csv_file}") - with open(csv_file, u"wt", encoding='utf-8') as file_handler: + csv_file_name = f"{table[u'output-file']}.csv" + logging.info(f" Writing the file {csv_file_name}") + with open(csv_file_name, u"wt", encoding='utf-8') as file_handler: for hdr in header: file_handler.write(u",".join(hdr) + u"\n") for test in tbl_lst: @@ -1982,18 +1997,18 @@ def table_weekly_comparison(table, in_data): ] ) + u"\n") - txt_file = f"{table[u'output-file']}.txt" - logging.info(f" Writing the file {txt_file}") - convert_csv_to_pretty_txt(csv_file, txt_file, delimiter=u",") + txt_file_name = f"{table[u'output-file']}.txt" + logging.info(f" Writing the file {txt_file_name}") + convert_csv_to_pretty_txt(csv_file_name, txt_file_name, delimiter=u",") # Reorganize header in txt table txt_table = list() - with open(txt_file, u"rt", encoding='utf-8') as file_handler: + with open(txt_file_name, u"rt", encoding='utf-8') as file_handler: for line in file_handler: txt_table.append(line) try: txt_table.insert(5, txt_table.pop(2)) - with open(txt_file, u"wt", encoding='utf-8') as file_handler: + with open(txt_file_name, u"wt", encoding='utf-8') as file_handler: file_handler.writelines(txt_table) except IndexError: pass |