aboutsummaryrefslogtreecommitdiffstats
path: root/resources/tools/presentation/generator_tables.py
diff options
context:
space:
mode:
authorTibor Frank <tifrank@cisco.com>2017-08-11 10:44:36 +0200
committerTibor Frank <tifrank@cisco.com>2017-10-11 15:21:02 +0200
commiteecad36d7d2275fa47fbcab40dbcf56108ab0a51 (patch)
treeb036a5b06035f5c36c8bb5bc279fe80925f2f8f8 /resources/tools/presentation/generator_tables.py
parentb62f0a99d13605a62f64f6ae9ac9aa9aae1755cb (diff)
CSIT-755: Presentation and analytics layer
- CSIT-760: Configuration - real example - CSIT-774: Implementation - parse configuration - CSIT-779: Implementation - set environment - CSIT-780: Implementation - download data - CSIT-783: Implementation - debug mode - CSIT-761: Implementation - Data pre-processing - parse input files - CSIT-784: Implementation - Data pre-processing - store the data, access to data - CSIT-789: Implementation - Data pre-processing - extract Documentation of the suite - CSIT-757: Low Level Design - CSIT-788: Implementation - Data pre-processing - extract VAT history and show runtime - CSIT-785: Implementation - Data filtering - CSIT-763: Presentation - tables - CSIT-804: Presentation - files - CSIT-762: Presentation - plots - LLD: API + functional diagram - CSIT-807: Element's models - CSIT-813: Process static content - CSIT-812: Report generation - CSIT-764: Integration to CSIT - CSIT-822: Archiving - CSIT-790: Documentation - configuration od the Input data is the same as for 17.07 report Change-Id: I6fd1eb1df4af99eaf91925282cdee1c892698c59 Signed-off-by: Tibor Frank <tifrank@cisco.com>
Diffstat (limited to 'resources/tools/presentation/generator_tables.py')
-rw-r--r--resources/tools/presentation/generator_tables.py251
1 files changed, 251 insertions, 0 deletions
diff --git a/resources/tools/presentation/generator_tables.py b/resources/tools/presentation/generator_tables.py
new file mode 100644
index 0000000000..367e8c9878
--- /dev/null
+++ b/resources/tools/presentation/generator_tables.py
@@ -0,0 +1,251 @@
+# Copyright (c) 2017 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Algorithms to generate tables.
+"""
+
+
+import logging
+from string import replace
+
+from errors import PresentationError
+from utils import mean, stdev, relative_change
+
+
+def generate_tables(spec, data):
+ """Generate all tables specified in the specification file.
+
+ :param spec: Specification read from the specification file.
+ :param data: Data to process.
+ :type spec: Specification
+ :type data: InputData
+ """
+
+ logging.info("Generating the tables ...")
+ for table in spec.tables:
+ try:
+ eval(table["algorithm"])(table, data)
+ except NameError:
+ logging.error("The algorithm '{0}' is not defined.".
+ format(table["algorithm"]))
+ logging.info("Done.")
+
+
+def table_details(table, input_data):
+ """Generate the table(s) with algorithm: table_detailed_test_results
+ specified in the specification file.
+
+ :param table: Table to generate.
+ :param input_data: Data to process.
+ :type table: pandas.Series
+ :type input_data: InputData
+ """
+
+ logging.info(" Generating the table {0} ...".
+ format(table.get("title", "")))
+
+ # Transform the data
+ data = input_data.filter_data(table)
+
+ # Prepare the header of the tables
+ header = list()
+ for column in table["columns"]:
+ header.append('"{0}"'.format(str(column["title"]).replace('"', '""')))
+
+ # Generate the data for the table according to the model in the table
+ # specification
+
+ job = table["data"].keys()[0]
+ build = str(table["data"][job][0])
+ for suite_longname, suite in input_data.suites(job, build).iteritems():
+ # Generate data
+ suite_name = suite["name"]
+ table_lst = list()
+ for test in data[job][build].keys():
+ if data[job][build][test]["parent"] in suite_name:
+ row_lst = list()
+ for column in table["columns"]:
+ try:
+ col_data = str(data[job][build][test][column["data"].
+ split(" ")[1]]).replace('"', '""')
+ if column["data"].split(" ")[1] in ("vat-history",
+ "show-run"):
+ col_data = replace(col_data, " |br| ", "",
+ maxreplace=1)
+ col_data = " |prein| {0} |preout| ".\
+ format(col_data[:-5])
+ row_lst.append('"{0}"'.format(col_data))
+ except KeyError:
+ row_lst.append("No data")
+ table_lst.append(row_lst)
+
+ # Write the data to file
+ if table_lst:
+ file_name = "{0}_{1}{2}".format(table["output-file"], suite_name,
+ table["output-file-ext"])
+ logging.info(" Writing file: '{}'".format(file_name))
+ with open(file_name, "w") as file_handler:
+ file_handler.write(",".join(header) + "\n")
+ for item in table_lst:
+ file_handler.write(",".join(item) + "\n")
+
+ logging.info(" Done.")
+
+
+def table_performance_improvements(table, input_data):
+ """Generate the table(s) with algorithm: table_performance_improvements
+ specified in the specification file.
+
+ :param table: Table to generate.
+ :param input_data: Data to process.
+ :type table: pandas.Series
+ :type input_data: InputData
+ """
+
+ def _write_line_to_file(file_handler, data):
+ """Write a line to the .csv file.
+
+ :param file_handler: File handler for the csv file. It must be open for
+ writing text.
+ :param data: Item to be written to the file.
+ :type file_handler: BinaryIO
+ :type data: list
+ """
+
+ line_lst = list()
+ for item in data:
+ if isinstance(item["data"], str):
+ line_lst.append(item["data"])
+ elif isinstance(item["data"], float):
+ line_lst.append("{:.1f}".format(item["data"]))
+ file_handler.write(",".join(line_lst) + "\n")
+
+ logging.info(" Generating the table {0} ...".
+ format(table.get("title", "")))
+
+ # Read the template
+ file_name = table.get("template", None)
+ if file_name:
+ try:
+ tmpl = _read_csv_template(file_name)
+ except PresentationError:
+ logging.error(" The template '{0}' does not exist. Skipping the "
+ "table.".format(file_name))
+ return None
+ else:
+ logging.error("The template is not defined. Skipping the table.")
+ return None
+
+ # Transform the data
+ data = input_data.filter_data(table)
+
+ # Prepare the header of the tables
+ header = list()
+ for column in table["columns"]:
+ header.append(column["title"])
+
+ # Generate the data for the table according to the model in the table
+ # specification
+ tbl_lst = list()
+ for tmpl_item in tmpl:
+ tbl_item = list()
+ for column in table["columns"]:
+ cmd = column["data"].split(" ")[0]
+ args = column["data"].split(" ")[1:]
+ if cmd == "template":
+ try:
+ val = float(tmpl_item[int(args[0])])
+ except ValueError:
+ val = tmpl_item[int(args[0])]
+ tbl_item.append({"data": val})
+ elif cmd == "data":
+ job = args[0]
+ operation = args[1]
+ data_lst = list()
+ for build in data[job]:
+ try:
+ data_lst.append(float(build[tmpl_item[0]]["throughput"]
+ ["value"]) / 1000000)
+ except (KeyError, TypeError):
+ # No data, ignore
+ pass
+ if data_lst:
+ tbl_item.append({"data": eval(operation)(data_lst)})
+ elif cmd == "operation":
+ operation = args[0]
+ nr1 = tbl_item[int(args[1])]["data"]
+ nr2 = tbl_item[int(args[2])]["data"]
+ if nr1 and nr2:
+ tbl_item.append({"data": eval(operation)(nr1, nr2)})
+ else:
+ tbl_item.append({"data": None})
+ else:
+ logging.error("Not supported command {0}. Skipping the table.".
+ format(cmd))
+ return None
+ tbl_lst.append(tbl_item)
+
+ # Sort the table according to the relative change
+ tbl_lst.sort(key=lambda rel: rel[-1]["data"], reverse=True)
+
+ # Create the tables and write them to the files
+ file_names = [
+ "{0}_ndr_top{1}".format(table["output-file"], table["output-file-ext"]),
+ "{0}_pdr_top{1}".format(table["output-file"], table["output-file-ext"]),
+ "{0}_ndr_low{1}".format(table["output-file"], table["output-file-ext"]),
+ "{0}_pdr_low{1}".format(table["output-file"], table["output-file-ext"])
+ ]
+
+ for file_name in file_names:
+ logging.info(" Writing the file '{0}'".format(file_name))
+ with open(file_name, "w") as file_handler:
+ file_handler.write(",".join(header) + "\n")
+ for item in tbl_lst:
+ if "ndr_top" in file_name \
+ and "ndr" in item[1]["data"] \
+ and item[-1]["data"] >= 10:
+ _write_line_to_file(file_handler, item)
+ elif "pdr_top" in file_name \
+ and "pdr" in item[1]["data"] \
+ and item[-1]["data"] >= 10:
+ _write_line_to_file(file_handler, item)
+ elif "ndr_low" in file_name \
+ and "ndr" in item[1]["data"] \
+ and item[-1]["data"] < 10:
+ _write_line_to_file(file_handler, item)
+ elif "pdr_low" in file_name \
+ and "pdr" in item[1]["data"] \
+ and item[-1]["data"] < 10:
+ _write_line_to_file(file_handler, item)
+
+ logging.info(" Done.")
+
+
+def _read_csv_template(file_name):
+ """Read the template from a .csv file.
+
+ :param file_name: Name / full path / relative path of the file to read.
+ :type file_name: str
+ :returns: Data from the template as list (lines) of lists (items on line).
+ :rtype: list
+ :raises: PresentationError if it is not possible to read the file.
+ """
+
+ try:
+ with open(file_name, 'r') as csv_file:
+ tmpl_data = list()
+ for line in csv_file:
+ tmpl_data.append(line[:-1].split(","))
+ return tmpl_data
+ except IOError as err:
+ raise PresentationError(str(err), level="ERROR")