diff options
Diffstat (limited to 'resources/tools')
-rw-r--r-- | resources/tools/presentation/convert_xml_json.py | 235 | ||||
-rw-r--r-- | resources/tools/presentation/generator_tables.py | 39 | ||||
-rw-r--r-- | resources/tools/presentation/input_data_parser.py | 111 | ||||
-rw-r--r-- | resources/tools/presentation/json/template_0.1.0.json | 40 |
4 files changed, 196 insertions, 229 deletions
diff --git a/resources/tools/presentation/convert_xml_json.py b/resources/tools/presentation/convert_xml_json.py index e9ccca0b63..61c6e84a98 100644 --- a/resources/tools/presentation/convert_xml_json.py +++ b/resources/tools/presentation/convert_xml_json.py @@ -14,7 +14,7 @@ """Convert output_info.xml files into JSON structures. Version: 0.1.0 -Date: 8th June 2021 +Date: 22nd June 2021 The json structure is defined in https://gerrit.fd.io/r/c/csit/+/28992 """ @@ -72,97 +72,37 @@ class JSONData: """ return self._data - def add_element(self, value, path_to_value): - """Add an element to the json structure. + def update(self, kwargs): + """Update the data with new data from the dictionary. - :param value: Element value. - :param path_to_value: List of tuples where the first item is the element - on the path and the second one is its type. - :type value: dict, list, str, int, float, bool - :type path_to_value: list - :raises: IndexError if the path is empty. - :raises: TypeError if the val is of not supported type. + :param kwargs: Key value pairs to be added to the data. + :type kwargs: dict """ + self._data.update(kwargs) - def _add_element(val, path, structure): - """Add an element to the given path. - - :param val: Element value. - :param path: List of tuples where the first item is the element - on the path and the second one is its type. - :param structure: The structure where the element is added. - :type val: dict, list, str, int, float, bool - :type path: list - :type structure: dict - :raises TypeError if there is a wrong type in the path. - """ - if len(path) == 1: - if isinstance(structure, dict): - if path[0][1] is dict: - if path[0][0] not in structure: - structure[path[0][0]] = dict() - structure[path[0][0]].update(val) - elif path[0][1] is list: - if path[0][0] not in structure: - structure[path[0][0]] = list() - if isinstance(val, list): - structure[path[0][0]].extend(val) - else: - structure[path[0][0]].append(val) - else: - structure[path[0][0]] = val - elif isinstance(structure, list): - if path[0][0] == -1 or path[0][0] >= len(structure): - if isinstance(val, list): - structure.extend(val) - else: - structure.append(val) - else: - structure[path[0][0]] = val - return - - if isinstance(structure, dict): - if path[0][1] is dict: - if path[0][0] not in structure: - structure[path[0][0]] = dict() - elif path[0][1] is list: - if path[0][0] not in structure: - structure[path[0][0]] = list() - elif isinstance(structure, list): - if path[0][0] == -1 or path[0][0] >= len(structure): - if path[0][1] is list: - structure.append(list()) - elif path[0][1] is dict: - structure.append(dict()) - else: - structure.append(0) - path[0][0] = len(structure) - 1 - else: - raise TypeError( - u"Only the last item in the path can be different type " - u"then list or dictionary." - ) - _add_element(val, path[1:], structure[path[0][0]]) - - if not isinstance(value, (dict, list, str, int, float, bool)): - raise TypeError( - u"The value must be one of these types: dict, list, str, int, " - u"float, bool.\n" - f"Value: {value}\n" - f"Path: {path_to_value}" - ) - _add_element(deepcopy(value), path_to_value, self._data) - - def get_element(self, path): - """Get the element specified by the path. - - :param path: List of keys and indices to the requested element or - sub-tree. - :type path: list - :returns: Element specified by the path. - :rtype: any + def set_key(self, key, val): + """Setter. + + :param key: The key to be updated / added. + :param val: The key value. + :type key: str + :type val: object + """ + self._data[key] = deepcopy(val) + + def add_to_list(self, key, val): + """Add an item to the list identified by key. + + :param key: The key identifying the list. + :param val: The val to be appended to the list. If val is a list, + extend is used. """ - raise NotImplementedError + if self._data.get(key, None) is None: + self._data[key] = list() + if isinstance(val, list): + self._data[key].extend(val) + else: + self._data[key].append(val) def dump(self, file_out, indent=None): """Write JSON data to a file. @@ -207,53 +147,29 @@ def _export_test_from_xml_to_json(tid, in_data, out, template, metadata): :type metadata: dict """ - p_metadata = [(u"metadata", dict), ] - p_test = [(u"test", dict), ] - p_log = [(u"log", list), (-1, list)] - data = JSONData(template=template) - data.add_element({u"suite-id": metadata.pop(u"suite-id", u"")}, p_metadata) - data.add_element( - {u"suite-doc": metadata.pop(u"suite-doc", u"")}, p_metadata - ) - data.add_element({u"testbed": metadata.pop(u"testbed", u"")}, p_metadata) - data.add_element( - {u"sut-version": metadata.pop(u"sut-version", u"")}, p_metadata - ) - - data.add_element({u"test-id": tid}, p_test) + data.update(metadata) + data.set_key(u"test_id", tid) t_type = in_data.get(u"type", u"") t_type = u"NDRPDR" if t_type == u"CPS" else t_type # It is NDRPDR - data.add_element({u"test-type": t_type}, p_test) + data.set_key(u"test_type", t_type) tags = in_data.get(u"tags", list()) - data.add_element({u"tags": tags}, p_test) - data.add_element( - {u"documentation": in_data.get(u"documentation", u"")}, p_test - ) - data.add_element({u"message": in_data.get(u"msg", u"")}, p_test) - execution = { - u"start_time": in_data.get(u"starttime", u""), - u"end_time": in_data.get(u"endtime", u""), - u"status": in_data.get(u"status", u"FAILED"), - } - execution.update(metadata) - data.add_element({u"execution": execution}, p_test) - - log_item = { - u"source": { - u"type": u"node", - u"id": "" - }, - u"msg-type": u"", - u"log-level": u"INFO", - u"timestamp": in_data.get(u"starttime", u""), # replacement - u"msg": u"", - u"data": [] - } + data.set_key(u"tags", tags) + data.set_key(u"documentation", in_data.get(u"documentation", u"")) + data.set_key(u"message", in_data.get(u"msg", u"")) + data.set_key(u"start_time", in_data.get(u"starttime", u"")) + data.set_key(u"end_time", in_data.get(u"endtime", u"")) + data.set_key(u"status", in_data.get(u"status", u"FAILED")) + sut_type = u"" + if u"vpp" in tid: + sut_type = u"vpp" + elif u"dpdk" in tid: + sut_type = u"dpdk" + data.set_key(u"sut_type", sut_type) # Process configuration history: - in_papi = deepcopy(in_data.get(u"conf-history", None)) + in_papi = deepcopy(in_data.get(u"conf_history", None)) if in_papi: regex_dut = re.compile(r'\*\*DUT(\d):\*\*') node_id = u"dut1" @@ -264,20 +180,33 @@ def _export_test_from_xml_to_json(tid, in_data, out, template, metadata): if groups: node_id = f"dut{groups.group(1)}" else: - log_item[u"source"][u"id"] = node_id - log_item[u"msg-type"] = u"papi" - log_item[u"msg"] = line - data.add_element(log_item, p_log) + data.add_to_list( + u"log", + { + u"source_type": u"node", + u"source_id": node_id, + u"msg_type": u"papi", + u"log_level": u"INFO", + u"timestamp": in_data.get(u"starttime", u""), + u"msg": line, + u"data": list() + } + ) # Process show runtime: in_sh_run = deepcopy(in_data.get(u"show-run", None)) if in_sh_run: # Transform to openMetrics format for key, val in in_sh_run.items(): - log_item[u"source"][u"id"] = key - log_item[u"msg-type"] = u"metric" - log_item[u"msg"] = u"show-runtime" - log_item[u"data"] = list() + log_item = { + u"source_type": u"node", + u"source_id": key, + u"msg_type": u"metric", + u"log_level": u"INFO", + u"timestamp": in_data.get(u"starttime", u""), + u"msg": u"show_runtime", + u"data": list() + } for item in val.get(u"runtime", list()): for metric, m_data in item.items(): if metric == u"name": @@ -290,12 +219,12 @@ def _export_test_from_xml_to_json(tid, in_data, out, template, metadata): u"labels": { u"host": val.get(u"host", u""), u"socket": val.get(u"socket", u""), - u"graph-node": item.get(u"name", u""), - u"thread-id": str(idx) + u"graph_node": item.get(u"name", u""), + u"thread_id": str(idx) } } ) - data.add_element(log_item, p_log) + data.add_to_list(u"log", log_item) # Process results: results = dict() @@ -338,23 +267,23 @@ def _export_test_from_xml_to_json(tid, in_data, out, template, metadata): }, u"latency": { u"forward": { - u"pdr-90": in_data.get(u"latency", dict()). + u"pdr_90": in_data.get(u"latency", dict()). get(u"PDR90", dict()).get(u"direction1", u"NaN"), - u"pdr-50": in_data.get(u"latency", dict()). + u"pdr_50": in_data.get(u"latency", dict()). get(u"PDR50", dict()).get(u"direction1", u"NaN"), - u"pdr-10": in_data.get(u"latency", dict()). + u"pdr_10": in_data.get(u"latency", dict()). get(u"PDR10", dict()).get(u"direction1", u"NaN"), - u"pdr-0": in_data.get(u"latency", dict()). + u"pdr_0": in_data.get(u"latency", dict()). get(u"LAT0", dict()).get(u"direction1", u"NaN") }, u"reverse": { - u"pdr-90": in_data.get(u"latency", dict()). + u"pdr_90": in_data.get(u"latency", dict()). get(u"PDR90", dict()).get(u"direction2", u"NaN"), - u"pdr-50": in_data.get(u"latency", dict()). + u"pdr_50": in_data.get(u"latency", dict()). get(u"PDR50", dict()).get(u"direction2", u"NaN"), - u"pdr-10": in_data.get(u"latency", dict()). + u"pdr_10": in_data.get(u"latency", dict()). get(u"PDR10", dict()).get(u"direction2", u"NaN"), - u"pdr-0": in_data.get(u"latency", dict()). + u"pdr_0": in_data.get(u"latency", dict()). get(u"LAT0", dict()).get(u"direction2", u"NaN") } } @@ -369,7 +298,7 @@ def _export_test_from_xml_to_json(tid, in_data, out, template, metadata): } elif t_type == "SOAK": results = { - u"critical-rate": { + u"critical_rate": { u"lower": in_data.get(u"throughput", dict()). get(u"LOWER", u"NaN"), u"upper": in_data.get(u"throughput", dict()). @@ -387,7 +316,7 @@ def _export_test_from_xml_to_json(tid, in_data, out, template, metadata): } else: pass - data.add_element({u"results": results}, p_test) + data.set_key(u"results", results) data.dump(out, indent=u" ") @@ -456,12 +385,12 @@ def convert_xml_to_json(spec, data): { u"ci": u"jenkins.fd.io", u"job": job, - u"build": build_nr, - u"suite-id": suite_id, - u"suite-doc": build[u"suites"].get(suite_id, dict()). + u"build_number": build_nr, + u"suite_id": suite_id, + u"suite_doc": build[u"suites"].get(suite_id, dict()). get(u"doc", u""), u"testbed": build[u"metadata"].get(u"testbed", u""), - u"sut-version": build[u"metadata"].get(u"version", u"") + u"sut_version": build[u"metadata"].get(u"version", u"") } ) diff --git a/resources/tools/presentation/generator_tables.py b/resources/tools/presentation/generator_tables.py index bb962890d0..d66a8fc3cf 100644 --- a/resources/tools/presentation/generator_tables.py +++ b/resources/tools/presentation/generator_tables.py @@ -183,10 +183,45 @@ def table_oper_data_html(table, input_data): tcol = ET.SubElement( trow, u"td", attrib=dict(align=u"left", colspan=u"6") ) - if dut_data.get(u"threads", None) is None: + if dut_data.get(u"runtime", None) is None: tcol.text = u"No Data" continue + try: + threads_nr = len(dut_data[u"runtime"][0][u"clocks"]) + except (IndexError, KeyError): + tcol.text = u"No Data" + continue + + threads = OrderedDict({idx: list() for idx in range(threads_nr)}) + for item in dut_data[u"runtime"]: + for idx in range(threads_nr): + if item[u"vectors"][idx] > 0: + clocks = item[u"clocks"][idx] / item[u"vectors"][idx] + elif item[u"calls"][idx] > 0: + clocks = item[u"clocks"][idx] / item[u"calls"][idx] + elif item[u"suspends"][idx] > 0: + clocks = item[u"clocks"][idx] / item[u"suspends"][idx] + else: + clocks = 0.0 + + if item[u"calls"][idx] > 0: + vectors_call = item[u"vectors"][idx] / item[u"calls"][ + idx] + else: + vectors_call = 0.0 + + if int(item[u"calls"][idx]) + int(item[u"vectors"][idx]) + \ + int(item[u"suspends"][idx]): + threads[idx].append([ + item[u"name"], + item[u"calls"][idx], + item[u"vectors"][idx], + item[u"suspends"][idx], + clocks, + vectors_call + ]) + bold = ET.SubElement(tcol, u"b") bold.text = ( f"Host IP: {dut_data.get(u'host', '')}, " @@ -200,7 +235,7 @@ def table_oper_data_html(table, input_data): ) thead.text = u"\t" - for thread_nr, thread in dut_data[u"threads"].items(): + for thread_nr, thread in threads.items(): trow = ET.SubElement( tbl, u"tr", attrib=dict(bgcolor=colors[u"header"]) ) diff --git a/resources/tools/presentation/input_data_parser.py b/resources/tools/presentation/input_data_parser.py index 2cc2447ec0..94f8e96ec8 100644 --- a/resources/tools/presentation/input_data_parser.py +++ b/resources/tools/presentation/input_data_parser.py @@ -673,50 +673,17 @@ class ExecutionChecker(ResultVisitor): replace(u"'", u'"').replace(u'b"', u'"'). replace(u'u"', u'"').split(u":", 1)[1]) - try: - threads_nr = len(runtime[0][u"clocks"]) - except (IndexError, KeyError): - return - dut = u"dut{nr}".format( nr=len(self._data[u'tests'][self._test_id][u'show-run'].keys()) + 1) - oper = { - u"host": host, - u"socket": sock, - # Needed for json converter, enable when 'threads' is gone. - # u"runtime": runtime, - u"threads": OrderedDict({idx: list() for idx in range(threads_nr)}) - } - - for item in runtime: - for idx in range(threads_nr): - if item[u"vectors"][idx] > 0: - clocks = item[u"clocks"][idx] / item[u"vectors"][idx] - elif item[u"calls"][idx] > 0: - clocks = item[u"clocks"][idx] / item[u"calls"][idx] - elif item[u"suspends"][idx] > 0: - clocks = item[u"clocks"][idx] / item[u"suspends"][idx] - else: - clocks = 0.0 - - if item[u"calls"][idx] > 0: - vectors_call = item[u"vectors"][idx] / item[u"calls"][idx] - else: - vectors_call = 0.0 - - if int(item[u"calls"][idx]) + int(item[u"vectors"][idx]) + \ - int(item[u"suspends"][idx]): - oper[u"threads"][idx].append([ - item[u"name"], - item[u"calls"][idx], - item[u"vectors"][idx], - item[u"suspends"][idx], - clocks, - vectors_call - ]) - - self._data[u'tests'][self._test_id][u'show-run'][dut] = copy.copy(oper) + self._data[u'tests'][self._test_id][u'show-run'][dut] = \ + copy.copy( + { + u"host": host, + u"socket": sock, + u"runtime": runtime, + } + ) def _get_ndrpdr_throughput(self, msg): """Get NDR_LOWER, NDR_UPPER, PDR_LOWER and PDR_UPPER from the test @@ -2022,15 +1989,6 @@ class InputData: """Print all operational data to console. """ - tbl_hdr = ( - u"Name", - u"Nr of Vectors", - u"Nr of Packets", - u"Suspends", - u"Cycles per Packet", - u"Average Vector Size" - ) - for job in self._input_data.values: for build in job.values: for test_id, test_data in build[u"tests"].items(): @@ -2038,12 +1996,59 @@ class InputData: if test_data.get(u"show-run", None) is None: continue for dut_name, data in test_data[u"show-run"].items(): - if data.get(u"threads", None) is None: + if data.get(u"runtime", None) is None: + continue + try: + threads_nr = len(data[u"runtime"][0][u"clocks"]) + except (IndexError, KeyError): continue + threads = OrderedDict( + {idx: list() for idx in range(threads_nr)}) + for item in data[u"runtime"]: + for idx in range(threads_nr): + if item[u"vectors"][idx] > 0: + clocks = item[u"clocks"][idx] / \ + item[u"vectors"][idx] + elif item[u"calls"][idx] > 0: + clocks = item[u"clocks"][idx] / \ + item[u"calls"][idx] + elif item[u"suspends"][idx] > 0: + clocks = item[u"clocks"][idx] / \ + item[u"suspends"][idx] + else: + clocks = 0.0 + + if item[u"calls"][idx] > 0: + vectors_call = item[u"vectors"][idx] / \ + item[u"calls"][idx] + else: + vectors_call = 0.0 + + if int(item[u"calls"][idx]) + int( + item[u"vectors"][idx]) + \ + int(item[u"suspends"][idx]): + threads[idx].append([ + item[u"name"], + item[u"calls"][idx], + item[u"vectors"][idx], + item[u"suspends"][idx], + clocks, + vectors_call + ]) + print(f"Host IP: {data.get(u'host', '')}, " f"Socket: {data.get(u'socket', '')}") - for thread_nr, thread in data[u"threads"].items(): - txt_table = prettytable.PrettyTable(tbl_hdr) + for thread_nr, thread in threads.items(): + txt_table = prettytable.PrettyTable( + ( + u"Name", + u"Nr of Vectors", + u"Nr of Packets", + u"Suspends", + u"Cycles per Packet", + u"Average Vector Size" + ) + ) avg = 0.0 for row in thread: txt_table.add_row(row) diff --git a/resources/tools/presentation/json/template_0.1.0.json b/resources/tools/presentation/json/template_0.1.0.json index dd9fed7360..7c7c76a5c1 100644 --- a/resources/tools/presentation/json/template_0.1.0.json +++ b/resources/tools/presentation/json/template_0.1.0.json @@ -1,25 +1,23 @@ { "version": "0.1.0", - "test": { - "test-id": "", - "test-type": "", - "tags": [], - "documentation": "", - "message": "", - "execution": { - "ci": "", - "job": "", - "build": "", - "csit-commit": "", - "csit-gerrit-change": "", - "start_time": "", - "end_time": "", - "status": "" - }, - "results": {} - }, - "metadata": {}, - "resource": [], - "network": [], + "ci": "", + "job": "", + "build_number": "", + "testbed": "", + "suite_id": "", + "suite_doc": "", + "sut_type": "", + "sut_version": "", + "test_id": "", + "test_type": "", + "tags": [], + "documentation": "", + "message": "", + "csit_commit": "", + "csit_gerrit_change": "", + "start_time": "", + "end_time": "", + "status": "", + "results": {}, "log": [] } |