diff options
Diffstat (limited to 'resources/tools/dash')
-rw-r--r-- | resources/tools/dash/app/pal/data/data.yaml | 3 | ||||
-rw-r--r-- | resources/tools/dash/app/pal/stats/graphs.py | 3 | ||||
-rw-r--r-- | resources/tools/dash/app/pal/stats/layout.py | 66 | ||||
-rw-r--r-- | resources/tools/dash/app/pal/trending/graphs.py | 7 |
4 files changed, 68 insertions, 11 deletions
diff --git a/resources/tools/dash/app/pal/data/data.yaml b/resources/tools/dash/app/pal/data/data.yaml index 3ed7dea084..92cd659f48 100644 --- a/resources/tools/dash/app/pal/data/data.yaml +++ b/resources/tools/dash/app/pal/data/data.yaml @@ -14,6 +14,7 @@ statistics-trending: - dut_version - hosts - passed + - test_id trending-mrr: path: s3://fdio-docs-s3-cloudfront-index/csit/parquet/trending columns: @@ -31,7 +32,7 @@ trending-mrr: - result_receive_rate_rate_avg - result_receive_rate_rate_stdev - result_receive_rate_rate_unit - - result_receive_rate_rate_values + # - result_receive_rate_rate_values trending-ndrpdr: path: s3://fdio-docs-s3-cloudfront-index/csit/parquet/trending columns: diff --git a/resources/tools/dash/app/pal/stats/graphs.py b/resources/tools/dash/app/pal/stats/graphs.py index d9f49407d9..db6937402a 100644 --- a/resources/tools/dash/app/pal/stats/graphs.py +++ b/resources/tools/dash/app/pal/stats/graphs.py @@ -45,6 +45,7 @@ def graph_statistics(df: pd.DataFrame, job:str, layout: dict, hover = list() for _, row in data.iterrows(): + d_type = "trex" if row["dut_type"] == "none" else row["dut_type"] hover_itm = ( f"date: {row['start_time'].strftime('%d-%m-%Y %H:%M:%S')}<br>" f"duration: " @@ -52,7 +53,7 @@ def graph_statistics(df: pd.DataFrame, job:str, layout: dict, f"{((int(row['duration']) % 3600) // 60):02d}<br>" f"passed: {row['passed']}<br>" f"failed: {row['failed']}<br>" - f"{row['dut_type']}-ref: {row['dut_version']}<br>" + f"{d_type}-ref: {row['dut_version']}<br>" f"csit-ref: {row['job']}/{row['build']}<br>" f"hosts: {', '.join(row['hosts'])}" ) diff --git a/resources/tools/dash/app/pal/stats/layout.py b/resources/tools/dash/app/pal/stats/layout.py index cbb8cc7781..2f43308f7b 100644 --- a/resources/tools/dash/app/pal/stats/layout.py +++ b/resources/tools/dash/app/pal/stats/layout.py @@ -105,10 +105,10 @@ class Layout: "dut_version": list(), "hosts": list(), "passed": list(), - "failed": list() + "failed": list(), + "lst_failed": list() } for job in jobs: - # TODO: Add list of failed tests for each build df_job = df_tst_info.loc[(df_tst_info["job"] == job)] builds = df_job["build"].unique() for build in builds: @@ -119,15 +119,25 @@ class Layout: tst_info["dut_version"].append(df_build["dut_version"].iloc[-1]) tst_info["hosts"].append(df_build["hosts"].iloc[-1]) try: - passed = df_build.value_counts(subset='passed')[True] + passed = df_build.value_counts(subset="passed")[True] except KeyError: passed = 0 try: - failed = df_build.value_counts(subset='passed')[False] + failed = df_build.value_counts(subset="passed")[False] + failed_tests = df_build.loc[(df_build["passed"] == False)]\ + ["test_id"].to_list() + l_failed = list() + for tst in failed_tests: + lst_tst = tst.split(".") + suite = lst_tst[-2].replace("2n1l-", "").\ + replace("1n1l-", "").replace("2n-", "") + l_failed.append(f"{suite.split('-')[0]}-{lst_tst[-1]}") except KeyError: failed = 0 + l_failed = list() tst_info["passed"].append(passed) tst_info["failed"].append(failed) + tst_info["lst_failed"].append(sorted(l_failed)) self._data = data_stats.merge(pd.DataFrame.from_dict(tst_info)) @@ -300,7 +310,7 @@ class Layout: ), dcc.Loading( dbc.Offcanvas( - class_name="w-25", + class_name="w-50", id="offcanvas-metadata", title="Detailed Information", placement="end", @@ -804,6 +814,26 @@ class Layout: elif trigger_id == "graph-duration": graph_data = duration_data["points"][0].get("text", "") if graph_data: + lst_graph_data = graph_data.split("<br>") + + # Prepare list of failed tests: + job = str() + build = str() + for itm in lst_graph_data: + if "csit-ref:" in itm: + job, build = itm.split(" ")[-1].split("/") + break + if job and build: + fail_tests = self.data.loc[ + (self.data["job"] == job) & + (self.data["build"] == build) + ]["lst_failed"].values[0] + if not fail_tests: + fail_tests = None + else: + fail_tests = None + + # Create the content of the offcanvas: metadata = [ dbc.Card( class_name="gy-2 p-0", @@ -828,7 +858,7 @@ class Layout: ), x.split(": ")[1] ] - ) for x in graph_data.split("<br>") + ) for x in lst_graph_data ], flush=True), ] @@ -836,6 +866,30 @@ class Layout: ] ) ] + + if fail_tests is not None: + metadata.append( + dbc.Card( + class_name="gy-2 p-0", + children=[ + dbc.CardHeader( + f"List of Failed Tests ({len(fail_tests)})" + ), + dbc.CardBody( + id="failed-tests", + class_name="p-0", + children=[dbc.ListGroup( + children=[ + dbc.ListGroupItem(x) \ + for x in fail_tests + ], + flush=True), + ] + ) + ] + ) + ) + open_canvas = True return metadata, open_canvas diff --git a/resources/tools/dash/app/pal/trending/graphs.py b/resources/tools/dash/app/pal/trending/graphs.py index 3b81cf39c4..52e86d8e83 100644 --- a/resources/tools/dash/app/pal/trending/graphs.py +++ b/resources/tools/dash/app/pal/trending/graphs.py @@ -16,7 +16,6 @@ import plotly.graph_objects as go import pandas as pd -import re import hdrh.histogram import hdrh.codec @@ -212,11 +211,12 @@ def _generate_trending_traces(ttype: str, name: str, df: pd.DataFrame, hover = list() customdata = list() for _, row in df.iterrows(): + d_type = "trex" if row["dut_type"] == "none" else row["dut_type"] hover_itm = ( f"date: {row['start_time'].strftime('%d-%m-%Y %H:%M:%S')}<br>" f"<prop> [{row[_UNIT[ttype]]}]: {row[_VALUE[ttype]]}<br>" f"<stdev>" - f"{row['dut_type']}-ref: {row['dut_version']}<br>" + f"{d_type}-ref: {row['dut_version']}<br>" f"csit-ref: {row['job']}/{row['build']}<br>" f"hosts: {', '.join(row['hosts'])}" ) @@ -236,11 +236,12 @@ def _generate_trending_traces(ttype: str, name: str, df: pd.DataFrame, hover_trend = list() for avg, stdev, (_, row) in zip(trend_avg, trend_stdev, df.iterrows()): + d_type = "trex" if row["dut_type"] == "none" else row["dut_type"] hover_itm = ( f"date: {row['start_time'].strftime('%d-%m-%Y %H:%M:%S')}<br>" f"trend [pps]: {avg}<br>" f"stdev [pps]: {stdev}<br>" - f"{row['dut_type']}-ref: {row['dut_version']}<br>" + f"{d_type}-ref: {row['dut_version']}<br>" f"csit-ref: {row['job']}/{row['build']}<br>" f"hosts: {', '.join(row['hosts'])}" ) |