aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorTibor Frank <tifrank@cisco.com>2018-08-01 15:26:25 +0200
committerTibor Frank <tifrank@cisco.com>2018-08-02 07:02:32 +0000
commit9d81a06bfcf1017930a2cbd2e682b6255aae0dca (patch)
tree4cef1171b2345e00a65fb6b314f58e927ccd9f1d
parent1318b8f852bd09934263e8fe33cf48c5458689f9 (diff)
PAL: Remove unused code
Change-Id: I14f8f4ed12071f1c6b2c57c55b1add24c56619cc Signed-off-by: Tibor Frank <tifrank@cisco.com> (cherry picked from commit f4b2f3a853b246513e7517217b7efafbf70ccac7)
-rw-r--r--resources/tools/presentation/generator_files.py3
-rw-r--r--resources/tools/presentation/generator_plots.py35
-rw-r--r--resources/tools/presentation/generator_tables.py173
3 files changed, 3 insertions, 208 deletions
diff --git a/resources/tools/presentation/generator_files.py b/resources/tools/presentation/generator_files.py
index e2bcf7835d..ef7ebea968 100644
--- a/resources/tools/presentation/generator_files.py
+++ b/resources/tools/presentation/generator_files.py
@@ -106,9 +106,6 @@ def file_test_results(file_spec, input_data):
with open(file_name, "w") as file_handler:
file_handler.write(rst_header)
for suite_longname, suite in suites.iteritems():
- # TODO: Remove when NDRPDRDISC tests are not used:
- if "ndrchk" in suite_longname or "pdrchk" in suite_longname:
- continue
if len(suite_longname.split(".")) <= file_spec["data-start-level"]:
continue
file_handler.write("\n{0}\n{1}\n".format(
diff --git a/resources/tools/presentation/generator_plots.py b/resources/tools/presentation/generator_plots.py
index 52348fe5d1..a90dd0a9c5 100644
--- a/resources/tools/presentation/generator_plots.py
+++ b/resources/tools/presentation/generator_plots.py
@@ -75,11 +75,7 @@ def plot_performance_box(plot, input_data):
if y_vals.get(test["parent"], None) is None:
y_vals[test["parent"]] = list()
try:
- # TODO: Remove when definitely no NDRPDRDISC tests are used:
- if test["type"] in ("NDR", "PDR"):
- y_vals[test["parent"]].\
- append(test["throughput"]["value"])
- elif test["type"] in ("NDRPDR", ):
+ if test["type"] in ("NDRPDR", ):
if "-pdr" in plot_title.lower():
y_vals[test["parent"]].\
append(test["throughput"]["PDR"]["LOWER"])
@@ -170,21 +166,7 @@ def plot_latency_box(plot, input_data):
list() # direction2, max
]
try:
- # TODO: Remove when definitely no NDRPDRDISC tests are used:
- if test["type"] in ("NDR", "PDR"):
- y_tmp_vals[test["parent"]][0].append(
- test["latency"]["direction1"]["50"]["min"])
- y_tmp_vals[test["parent"]][1].append(
- test["latency"]["direction1"]["50"]["avg"])
- y_tmp_vals[test["parent"]][2].append(
- test["latency"]["direction1"]["50"]["max"])
- y_tmp_vals[test["parent"]][3].append(
- test["latency"]["direction2"]["50"]["min"])
- y_tmp_vals[test["parent"]][4].append(
- test["latency"]["direction2"]["50"]["avg"])
- y_tmp_vals[test["parent"]][5].append(
- test["latency"]["direction2"]["50"]["max"])
- elif test["type"] in ("NDRPDR", ):
+ if test["type"] in ("NDRPDR", ):
if "-pdr" in plot_title.lower():
ttype = "PDR"
elif "-ndr" in plot_title.lower():
@@ -298,18 +280,7 @@ def plot_throughput_speedup_analysis(plot, input_data):
"2": list(),
"4": list()}
try:
- # TODO: Remove when definitely no NDRPDRDISC tests are used:
- if test["type"] in ("NDR", "PDR"):
- if "1T1C" in test["tags"]:
- throughput[test["parent"]]["1"].\
- append(test["throughput"]["value"])
- elif "2T2C" in test["tags"]:
- throughput[test["parent"]]["2"]. \
- append(test["throughput"]["value"])
- elif "4T4C" in test["tags"]:
- throughput[test["parent"]]["4"]. \
- append(test["throughput"]["value"])
- elif test["type"] in ("NDRPDR", ):
+ if test["type"] in ("NDRPDR", ):
if "-pdr" in plot_title.lower():
ttype = "PDR"
elif "-ndr" in plot_title.lower():
diff --git a/resources/tools/presentation/generator_tables.py b/resources/tools/presentation/generator_tables.py
index 6d81f43ba4..c1c7fe01e3 100644
--- a/resources/tools/presentation/generator_tables.py
+++ b/resources/tools/presentation/generator_tables.py
@@ -180,179 +180,6 @@ def table_merged_details(table, input_data):
logging.info(" Done.")
-def table_performance_improvements(table, input_data):
- """Generate the table(s) with algorithm: table_performance_improvements
- specified in the specification file.
-
- # FIXME: Not used now.
-
- :param table: Table to generate.
- :param input_data: Data to process.
- :type table: pandas.Series
- :type input_data: InputData
- """
-
- def _write_line_to_file(file_handler, data):
- """Write a line to the .csv file.
-
- :param file_handler: File handler for the csv file. It must be open for
- writing text.
- :param data: Item to be written to the file.
- :type file_handler: BinaryIO
- :type data: list
- """
-
- line_lst = list()
- for item in data:
- if isinstance(item["data"], str):
- # Remove -?drdisc from the end
- if item["data"].endswith("drdisc"):
- item["data"] = item["data"][:-8]
- line_lst.append(item["data"])
- elif isinstance(item["data"], float):
- line_lst.append("{:.1f}".format(item["data"]))
- elif item["data"] is None:
- line_lst.append("")
- file_handler.write(",".join(line_lst) + "\n")
-
- logging.info(" Generating the table {0} ...".
- format(table.get("title", "")))
-
- # Read the template
- file_name = table.get("template", None)
- if file_name:
- try:
- tmpl = _read_csv_template(file_name)
- except PresentationError:
- logging.error(" The template '{0}' does not exist. Skipping the "
- "table.".format(file_name))
- return None
- else:
- logging.error("The template is not defined. Skipping the table.")
- return None
-
- # Transform the data
- logging.info(" Creating the data set for the {0} '{1}'.".
- format(table.get("type", ""), table.get("title", "")))
- data = input_data.filter_data(table)
-
- # Prepare the header of the tables
- header = list()
- for column in table["columns"]:
- header.append(column["title"])
-
- # Generate the data for the table according to the model in the table
- # specification
- tbl_lst = list()
- for tmpl_item in tmpl:
- tbl_item = list()
- for column in table["columns"]:
- cmd = column["data"].split(" ")[0]
- args = column["data"].split(" ")[1:]
- if cmd == "template":
- try:
- val = float(tmpl_item[int(args[0])])
- except ValueError:
- val = tmpl_item[int(args[0])]
- tbl_item.append({"data": val})
- elif cmd == "data":
- jobs = args[0:-1]
- operation = args[-1]
- data_lst = list()
- for job in jobs:
- for build in data[job]:
- try:
- data_lst.append(float(build[tmpl_item[0]]
- ["throughput"]["value"]))
- except (KeyError, TypeError):
- # No data, ignore
- continue
- if data_lst:
- tbl_item.append({"data": (eval(operation)(data_lst)) /
- 1000000})
- else:
- tbl_item.append({"data": None})
- elif cmd == "operation":
- operation = args[0]
- try:
- nr1 = float(tbl_item[int(args[1])]["data"])
- nr2 = float(tbl_item[int(args[2])]["data"])
- if nr1 and nr2:
- tbl_item.append({"data": eval(operation)(nr1, nr2)})
- else:
- tbl_item.append({"data": None})
- except (IndexError, ValueError, TypeError):
- logging.error("No data for {0}".format(tbl_item[0]["data"]))
- tbl_item.append({"data": None})
- continue
- else:
- logging.error("Not supported command {0}. Skipping the table.".
- format(cmd))
- return None
- tbl_lst.append(tbl_item)
-
- # Sort the table according to the relative change
- tbl_lst.sort(key=lambda rel: rel[-1]["data"], reverse=True)
-
- # Create the tables and write them to the files
- file_names = [
- "{0}_ndr_top{1}".format(table["output-file"], table["output-file-ext"]),
- "{0}_pdr_top{1}".format(table["output-file"], table["output-file-ext"]),
- "{0}_ndr_low{1}".format(table["output-file"], table["output-file-ext"]),
- "{0}_pdr_low{1}".format(table["output-file"], table["output-file-ext"])
- ]
-
- for file_name in file_names:
- logging.info(" Writing the file '{0}'".format(file_name))
- with open(file_name, "w") as file_handler:
- file_handler.write(",".join(header) + "\n")
- for item in tbl_lst:
- if isinstance(item[-1]["data"], float):
- rel_change = round(item[-1]["data"], 1)
- else:
- rel_change = item[-1]["data"]
- if "ndr_top" in file_name \
- and "ndr" in item[0]["data"] \
- and rel_change >= 10.0:
- _write_line_to_file(file_handler, item)
- elif "pdr_top" in file_name \
- and "pdr" in item[0]["data"] \
- and rel_change >= 10.0:
- _write_line_to_file(file_handler, item)
- elif "ndr_low" in file_name \
- and "ndr" in item[0]["data"] \
- and rel_change < 10.0:
- _write_line_to_file(file_handler, item)
- elif "pdr_low" in file_name \
- and "pdr" in item[0]["data"] \
- and rel_change < 10.0:
- _write_line_to_file(file_handler, item)
-
- logging.info(" Done.")
-
-
-def _read_csv_template(file_name):
- """Read the template from a .csv file.
-
- # FIXME: Not used now.
-
- :param file_name: Name / full path / relative path of the file to read.
- :type file_name: str
- :returns: Data from the template as list (lines) of lists (items on line).
- :rtype: list
- :raises: PresentationError if it is not possible to read the file.
- """
-
- try:
- with open(file_name, 'r') as csv_file:
- tmpl_data = list()
- for line in csv_file:
- tmpl_data.append(line[:-1].split(","))
- return tmpl_data
- except IOError as err:
- raise PresentationError(str(err), level="ERROR")
-
-
def table_performance_comparison(table, input_data):
"""Generate the table(s) with algorithm: table_performance_comparison
specified in the specification file.