aboutsummaryrefslogtreecommitdiffstats
path: root/resources/tools/presentation/input_data_parser.py
diff options
context:
space:
mode:
Diffstat (limited to 'resources/tools/presentation/input_data_parser.py')
-rw-r--r--resources/tools/presentation/input_data_parser.py75
1 files changed, 46 insertions, 29 deletions
diff --git a/resources/tools/presentation/input_data_parser.py b/resources/tools/presentation/input_data_parser.py
index 0ad07a95db..e12e2fb8df 100644
--- a/resources/tools/presentation/input_data_parser.py
+++ b/resources/tools/presentation/input_data_parser.py
@@ -29,6 +29,8 @@ from collections import OrderedDict
from string import replace
from os import remove
+from input_data_files import download_and_unzip_data_file
+
class ExecutionChecker(ResultVisitor):
"""Class to traverse through the test suite structure.
@@ -179,7 +181,7 @@ class ExecutionChecker(ResultVisitor):
REGEX_MRR = re.compile(r'MaxReceivedRate_Results\s\[pkts/(\d*)sec\]:\s'
r'tx\s(\d*),\srx\s(\d*)')
- def __init__(self, **metadata):
+ def __init__(self, metadata):
"""Initialisation.
:param metadata: Key-value pairs to be included in "metadata" part of
@@ -251,8 +253,6 @@ class ExecutionChecker(ResultVisitor):
self._data["metadata"]["version"] = self._version
self._msg_type = None
- logging.info(" VPP version: {0}".format(self._version))
-
def _get_vat_history(self, msg):
"""Called when extraction of VAT command history is required.
@@ -748,21 +748,29 @@ class InputData(object):
return self.data[job][build]["tests"]
@staticmethod
- def _parse_tests(job, build):
+ def _parse_tests(job, build, get_timestamp=False):
"""Process data from robot output.xml file and return JSON structured
data.
:param job: The name of job which build output data will be processed.
:param build: The build which output data will be processed.
+ :param get_timestamp: If True, timestamp is read form the xml source
+ file.
:type job: str
:type build: dict
+ :type get_timestamp: bool
:returns: JSON data structure.
:rtype: dict
"""
- tree = ET.parse(build["file-name"])
- root = tree.getroot()
- generated = root.attrib["generated"]
+ metadata = {
+ "job": job,
+ "build": build
+ }
+ if get_timestamp:
+ tree = ET.parse(build["file-name"])
+ root = tree.getroot()
+ metadata["generated"] = root.attrib["generated"]
with open(build["file-name"], 'r') as data_file:
try:
@@ -771,46 +779,55 @@ class InputData(object):
logging.error("Error occurred while parsing output.xml: {0}".
format(err))
return None
- checker = ExecutionChecker(job=job, build=build, generated=generated)
+ checker = ExecutionChecker(metadata)
result.visit(checker)
return checker.data
- def read_data(self):
- """Parse input data from input files and store in pandas' Series.
+ def download_and_parse_data(self, get_timestamp=False):
+ """Download the input data files, parse input data from input files and
+ store in pandas' Series.
+
+ :param get_timestamp: If True, timestamp is read form the xml source
+ file.
+ :type get_timestamp: bool
"""
- logging.info("Parsing input files ...")
+ logging.info("Downloading and parsing input files ...")
job_data = dict()
for job, builds in self._cfg.builds.items():
- logging.info(" Extracting data from the job '{0}' ...'".
+ logging.info(" Processing data from the job '{0}' ...'".
format(job))
builds_data = dict()
for build in builds:
- if build["status"] == "failed" \
- or build["status"] == "not found":
+ logging.info(" Processing the build '{0}'".
+ format(build["build"]))
+ self._cfg.set_input_state(job, build["build"], "failed")
+ if not download_and_unzip_data_file(self._cfg, job, build):
+ logging.error("It is not possible to download the input "
+ "data file from the job '{job}', build "
+ "'{build}', or it is damaged. Skipped.".
+ format(job=job, build=build["build"]))
continue
- logging.info(" Extracting data from the build '{0}'".
+
+ logging.info(" Processing data from the build '{0}' ...".
format(build["build"]))
- logging.info(" Processing the file '{0}'".
- format(build["file-name"]))
+ data = InputData._parse_tests(job, build,
+ get_timestamp=get_timestamp)
+ if data is None:
+ logging.error("Input data file from the job '{job}', build "
+ "'{build}' is damaged. Skipped.".
+ format(job=job, build=build["build"]))
+ continue
- data = InputData._parse_tests(job, build)
+ self._cfg.set_input_state(job, build["build"], "processed")
- logging.info(" Removing the file '{0}'".
- format(build["file-name"]))
try:
remove(build["file-name"])
- build["status"] = "processed"
except OSError as err:
- logging.error(" Cannot remove the file '{0}': {1}".
+ logging.error("Cannot remove the file '{0}': {1}".
format(build["file-name"], err))
- if data is None:
- logging.error("Input data file from the job '{job}', build "
- "'{build}' is damaged. Skipped.".
- format(job=job, build=build["build"]))
- continue
build_data = pd.Series({
"metadata": pd.Series(data["metadata"].values(),
@@ -818,9 +835,9 @@ class InputData(object):
"suites": pd.Series(data["suites"].values(),
index=data["suites"].keys()),
"tests": pd.Series(data["tests"].values(),
- index=data["tests"].keys()),
- })
+ index=data["tests"].keys())})
builds_data[str(build["build"])] = build_data
+ build["status"] = "processed"
logging.info(" Done.")
job_data[job] = pd.Series(builds_data.values(),