aboutsummaryrefslogtreecommitdiffstats
path: root/resources/tools
diff options
context:
space:
mode:
authorTibor Frank <tifrank@cisco.com>2022-08-09 10:02:29 +0200
committerTibor Frank <tifrank@cisco.com>2022-08-09 10:02:29 +0200
commitb7fccd32e5139b98976a7b4f44ef7eed9248f1b5 (patch)
tree2c6ebe82cee70081b507d71282d5bd644415b8b2 /resources/tools
parentc98961835d8bab39bdb1cbd6aa15a28f8d4fa163 (diff)
PAL: Update to RF 5.0 and pandas.Series
Change-Id: I2d4bfc6be02e66b6242ad688d092516bca0eb739 Signed-off-by: Tibor Frank <tifrank@cisco.com>
Diffstat (limited to 'resources/tools')
-rw-r--r--resources/tools/presentation/input_data_parser.py33
1 files changed, 15 insertions, 18 deletions
diff --git a/resources/tools/presentation/input_data_parser.py b/resources/tools/presentation/input_data_parser.py
index 94081425de..7f1ccd368b 100644
--- a/resources/tools/presentation/input_data_parser.py
+++ b/resources/tools/presentation/input_data_parser.py
@@ -1043,7 +1043,7 @@ class ExecutionChecker(ResultVisitor):
u"level": len(suite.longname.split(u"."))
}
- suite.body.visit(self)
+ suite.setup.visit(self)
def end_suite(self, suite):
"""Called when suite ends.
@@ -1211,11 +1211,6 @@ class ExecutionChecker(ResultVisitor):
if test.status == u"PASS":
test_result[u"result"], test_result[u"status"] = \
self._get_hoststack_data(test.message, tags)
- # elif u"TCP" in tags: # This might be not used
- # test_result[u"type"] = u"TCP"
- # if test.status == u"PASS":
- # groups = re.search(self.REGEX_TCP, test.message)
- # test_result[u"result"] = int(groups.group(2))
elif u"RECONF" in tags:
test_result[u"type"] = u"RECONF"
if test.status == u"PASS":
@@ -1455,7 +1450,7 @@ class InputData:
self._for_output = for_output
# Data store:
- self._input_data = pd.Series()
+ self._input_data = pd.Series(dtype="object")
@property
def data(self):
@@ -1670,7 +1665,7 @@ class InputData:
})
if self._input_data.get(job, None) is None:
- self._input_data[job] = pd.Series()
+ self._input_data[job] = pd.Series(dtype="object")
self._input_data[job][str(build_nr)] = build_data
self._cfg.set_input_file_name(
job, build_nr, result[u"build"][u"file-name"]
@@ -1749,7 +1744,7 @@ class InputData:
})
if self._input_data.get(job, None) is None:
- self._input_data[job] = pd.Series()
+ self._input_data[job] = pd.Series(dtype="object")
self._input_data[job][str(build_nr)] = build_data
self._cfg.set_input_state(job, build_nr, u"processed")
@@ -1906,12 +1901,12 @@ class InputData:
params.extend((u"type", u"status"))
data_to_filter = data if data else element[u"data"]
- data = pd.Series()
+ data = pd.Series(dtype="object")
try:
for job, builds in data_to_filter.items():
- data[job] = pd.Series()
+ data[job] = pd.Series(dtype="object")
for build in builds:
- data[job][str(build)] = pd.Series()
+ data[job][str(build)] = pd.Series(dtype="object")
try:
data_dict = dict(
self.data[job][str(build)][data_set].items())
@@ -1922,7 +1917,8 @@ class InputData:
for test_id, test_data in data_dict.items():
if eval(cond, {u"tags": test_data.get(u"tags", u"")}):
- data[job][str(build)][test_id] = pd.Series()
+ data[job][str(build)][test_id] = \
+ pd.Series(dtype="object")
if params is None:
for param, val in test_data.items():
data[job][str(build)][test_id][param] = val
@@ -2006,12 +2002,12 @@ class InputData:
else:
tests = include
- data = pd.Series()
+ data = pd.Series(dtype="object")
try:
for job, builds in element[u"data"].items():
- data[job] = pd.Series()
+ data[job] = pd.Series(dtype="object")
for build in builds:
- data[job][str(build)] = pd.Series()
+ data[job][str(build)] = pd.Series(dtype="object")
for test in tests:
try:
reg_ex = re.compile(str(test).lower())
@@ -2020,7 +2016,8 @@ class InputData:
if re.match(reg_ex, str(test_id).lower()):
test_data = self.data[job][
str(build)][data_set][test_id]
- data[job][str(build)][test_id] = pd.Series()
+ data[job][str(build)][test_id] = \
+ pd.Series(dtype="object")
if params is None:
for param, val in test_data.items():
data[job][str(build)][test_id]\
@@ -2075,7 +2072,7 @@ class InputData:
logging.info(u" Merging data ...")
- merged_data = pd.Series()
+ merged_data = pd.Series(dtype="object")
for builds in data.values:
for item in builds.values:
for item_id, item_data in item.items():