aboutsummaryrefslogtreecommitdiffstats
path: root/resources
diff options
context:
space:
mode:
authorTibor Frank <tifrank@cisco.com>2022-04-07 09:51:15 +0200
committerTibor Frank <tifrank@cisco.com>2022-04-08 12:05:04 +0000
commitab10e17fd7853043afde6af25921f6ab636e0964 (patch)
tree8b2f9eeee0b2cbe56171b196a91e69a2d36f28f6 /resources
parentefddc84171ee1335d193bac15644a0419ef8c166 (diff)
UTI: Improvements in detailed hover metadata information
Change-Id: I65c1a4a229869166c256aa320a54a74669519959 Signed-off-by: Tibor Frank <tifrank@cisco.com>
Diffstat (limited to 'resources')
-rw-r--r--resources/tools/dash/app/pal/trending/graphs.py333
-rw-r--r--resources/tools/dash/app/pal/trending/layout.py76
2 files changed, 240 insertions, 169 deletions
diff --git a/resources/tools/dash/app/pal/trending/graphs.py b/resources/tools/dash/app/pal/trending/graphs.py
index 0760d9cc80..dc4e7afca8 100644
--- a/resources/tools/dash/app/pal/trending/graphs.py
+++ b/resources/tools/dash/app/pal/trending/graphs.py
@@ -158,187 +158,204 @@ def _classify_anomalies(data):
return classification, avgs, stdevs
-def graph_trending_tput(data: pd.DataFrame, sel:dict, layout: dict,
- start: datetime, end: datetime) -> tuple:
+def select_trending_data(data: pd.DataFrame, itm:dict) -> pd.DataFrame:
"""
"""
- if not sel:
- return None, None
+ phy = itm["phy"].split("-")
+ if len(phy) == 4:
+ topo, arch, nic, drv = phy
+ if drv in ("dpdk", "ixgbe"):
+ drv = ""
+ else:
+ drv += "-"
+ drv = drv.replace("_", "-")
+ else:
+ return None
+ cadence = \
+ "weekly" if (arch == "aws" or itm["testtype"] != "mrr") else "daily"
+ sel_topo_arch = (
+ f"csit-vpp-perf-"
+ f"{itm['testtype'] if itm['testtype'] == 'mrr' else 'ndrpdr'}-"
+ f"{cadence}-master-{topo}-{arch}"
+ )
+ df_sel = data.loc[(data["job"] == sel_topo_arch)]
+ regex = (
+ f"^.*{nic}.*\.{itm['framesize']}-{itm['core']}-{drv}{itm['test']}-"
+ f"{'mrr' if itm['testtype'] == 'mrr' else 'ndrpdr'}$"
+ )
+ df = df_sel.loc[
+ df_sel["test_id"].apply(
+ lambda x: True if re.search(regex, x) else False
+ )
+ ].sort_values(by="start_time", ignore_index=True)
- def _generate_traces(ttype: str, name: str, df: pd.DataFrame,
- start: datetime, end: datetime, color: str) -> list:
+ return df
- df = df.dropna(subset=[_VALUE[ttype], ])
- if df.empty:
- return list()
- x_axis = [d for d in df["start_time"] if d >= start and d <= end]
+def _generate_trending_traces(ttype: str, name: str, df: pd.DataFrame,
+ start: datetime, end: datetime, color: str) -> list:
+ """
+ """
- anomalies, trend_avg, trend_stdev = _classify_anomalies(
- {k: v for k, v in zip(x_axis, df[_VALUE[ttype]])}
+ df = df.dropna(subset=[_VALUE[ttype], ])
+ if df.empty:
+ return list()
+
+ x_axis = [d for d in df["start_time"] if d >= start and d <= end]
+
+ anomalies, trend_avg, trend_stdev = _classify_anomalies(
+ {k: v for k, v in zip(x_axis, df[_VALUE[ttype]])}
+ )
+
+ hover = list()
+ customdata = list()
+ for _, row in df.iterrows():
+ hover_itm = (
+ f"date: {row['start_time'].strftime('%d-%m-%Y %H:%M:%S')}<br>"
+ f"<prop> [{row[_UNIT[ttype]]}]: {row[_VALUE[ttype]]}<br>"
+ f"<stdev>"
+ f"{row['dut_type']}-ref: {row['dut_version']}<br>"
+ f"csit-ref: {row['job']}/{row['build']}<br>"
+ f"hosts: {', '.join(row['hosts'])}"
)
-
- hover = list()
- customdata = list()
- for _, row in df.iterrows():
- hover_itm = (
- f"date: {row['start_time'].strftime('%d-%m-%Y %H:%M:%S')}<br>"
- f"<prop> [{row[_UNIT[ttype]]}]: {row[_VALUE[ttype]]}<br>"
- f"<stdev>"
- f"{row['dut_type']}-ref: {row['dut_version']}<br>"
- f"csit-ref: {row['job']}/{row['build']}"
+ if ttype == "mrr":
+ stdev = (
+ f"stdev [{row['result_receive_rate_rate_unit']}]: "
+ f"{row['result_receive_rate_rate_stdev']}<br>"
)
- if ttype == "mrr":
- stdev = (
- f"stdev [{row['result_receive_rate_rate_unit']}]: "
- f"{row['result_receive_rate_rate_stdev']}<br>"
- )
- else:
- stdev = ""
- hover_itm = hover_itm.replace(
- "<prop>", "latency" if ttype == "pdr-lat" else "average"
- ).replace("<stdev>", stdev)
- hover.append(hover_itm)
- if ttype == "pdr-lat":
- customdata.append(_get_hdrh_latencies(row, name))
-
- hover_trend = list()
- for avg, stdev in zip(trend_avg, trend_stdev):
- if ttype == "pdr-lat":
- hover_trend.append(
- f"trend [us]: {avg}<br>"
- f"stdev [us]: {stdev}"
- )
- else:
- hover_trend.append(
- f"trend [pps]: {avg}<br>"
- f"stdev [pps]: {stdev}"
- )
-
- traces = [
- go.Scatter( # Samples
- x=x_axis,
- y=df[_VALUE[ttype]],
- name=name,
- mode="markers",
- marker={
- u"size": 5,
- u"color": color,
- u"symbol": u"circle",
- },
- text=hover,
- hoverinfo=u"text+name",
- showlegend=True,
- legendgroup=name,
- customdata=customdata
- ),
- go.Scatter( # Trend line
- x=x_axis,
- y=trend_avg,
- name=name,
- mode="lines",
- line={
- u"shape": u"linear",
- u"width": 1,
- u"color": color,
- },
- text=hover_trend,
- hoverinfo=u"text+name",
+ else:
+ stdev = ""
+ hover_itm = hover_itm.replace(
+ "<prop>", "latency" if ttype == "pdr-lat" else "average"
+ ).replace("<stdev>", stdev)
+ hover.append(hover_itm)
+ if ttype == "pdr-lat":
+ customdata.append(_get_hdrh_latencies(row, name))
+
+ hover_trend = list()
+ for avg, stdev, (_, row) in zip(trend_avg, trend_stdev, df.iterrows()):
+ hover_itm = (
+ f"date: {row['start_time'].strftime('%d-%m-%Y %H:%M:%S')}<br>"
+ f"trend [pps]: {avg}<br>"
+ f"stdev [pps]: {stdev}<br>"
+ f"{row['dut_type']}-ref: {row['dut_version']}<br>"
+ f"csit-ref: {row['job']}/{row['build']}<br>"
+ f"hosts: {', '.join(row['hosts'])}"
+ )
+ if ttype == "pdr-lat":
+ hover_itm = hover_itm.replace("[pps]", "[us]")
+ hover_trend.append(hover_itm)
+
+ traces = [
+ go.Scatter( # Samples
+ x=x_axis,
+ y=df[_VALUE[ttype]],
+ name=name,
+ mode="markers",
+ marker={
+ u"size": 5,
+ u"color": color,
+ u"symbol": u"circle",
+ },
+ text=hover,
+ hoverinfo=u"text+name",
+ showlegend=True,
+ legendgroup=name,
+ customdata=customdata
+ ),
+ go.Scatter( # Trend line
+ x=x_axis,
+ y=trend_avg,
+ name=name,
+ mode="lines",
+ line={
+ u"shape": u"linear",
+ u"width": 1,
+ u"color": color,
+ },
+ text=hover_trend,
+ hoverinfo=u"text+name",
+ showlegend=False,
+ legendgroup=name,
+ )
+ ]
+
+ if anomalies:
+ anomaly_x = list()
+ anomaly_y = list()
+ anomaly_color = list()
+ for idx, anomaly in enumerate(anomalies):
+ if anomaly in (u"regression", u"progression"):
+ anomaly_x.append(x_axis[idx])
+ anomaly_y.append(trend_avg[idx])
+ anomaly_color.append(_ANOMALY_COLOR[anomaly])
+ anomaly_color.extend([0.0, 0.5, 1.0])
+ traces.append(
+ go.Scatter(
+ x=anomaly_x,
+ y=anomaly_y,
+ mode=u"markers",
+ hoverinfo=u"none",
showlegend=False,
legendgroup=name,
- )
- ]
-
- if anomalies:
- anomaly_x = list()
- anomaly_y = list()
- anomaly_color = list()
- for idx, anomaly in enumerate(anomalies):
- if anomaly in (u"regression", u"progression"):
- anomaly_x.append(x_axis[idx])
- anomaly_y.append(trend_avg[idx])
- anomaly_color.append(_ANOMALY_COLOR[anomaly])
- anomaly_color.extend([0.0, 0.5, 1.0])
- traces.append(
- go.Scatter(
- x=anomaly_x,
- y=anomaly_y,
- mode=u"markers",
- hoverinfo=u"none",
- showlegend=False,
- legendgroup=name,
- name=f"{name}-anomalies",
- marker={
- u"size": 15,
- u"symbol": u"circle-open",
- u"color": anomaly_color,
- u"colorscale": _COLORSCALE_LAT \
- if ttype == "pdr-lat" else _COLORSCALE_TPUT,
- u"showscale": True,
- u"line": {
- u"width": 2
- },
- u"colorbar": {
- u"y": 0.5,
- u"len": 0.8,
- u"title": u"Circles Marking Data Classification",
- u"titleside": u"right",
- # u"titlefont": {
- # u"size": 14
- # },
- u"tickmode": u"array",
- u"tickvals": [0.167, 0.500, 0.833],
- u"ticktext": _TICK_TEXT_LAT \
- if ttype == "pdr-lat" else _TICK_TEXT_TPUT,
- u"ticks": u"",
- u"ticklen": 0,
- u"tickangle": -90,
- u"thickness": 10
- }
+ name=f"{name}-anomalies",
+ marker={
+ u"size": 15,
+ u"symbol": u"circle-open",
+ u"color": anomaly_color,
+ u"colorscale": _COLORSCALE_LAT \
+ if ttype == "pdr-lat" else _COLORSCALE_TPUT,
+ u"showscale": True,
+ u"line": {
+ u"width": 2
+ },
+ u"colorbar": {
+ u"y": 0.5,
+ u"len": 0.8,
+ u"title": u"Circles Marking Data Classification",
+ u"titleside": u"right",
+ # u"titlefont": {
+ # u"size": 14
+ # },
+ u"tickmode": u"array",
+ u"tickvals": [0.167, 0.500, 0.833],
+ u"ticktext": _TICK_TEXT_LAT \
+ if ttype == "pdr-lat" else _TICK_TEXT_TPUT,
+ u"ticks": u"",
+ u"ticklen": 0,
+ u"tickangle": -90,
+ u"thickness": 10
}
- )
+ }
)
+ )
+
+ return traces
+
+
+def graph_trending(data: pd.DataFrame, sel:dict, layout: dict,
+ start: datetime, end: datetime) -> tuple:
+ """
+ """
- return traces
+ if not sel:
+ return None, None
- # Generate graph:
fig_tput = None
fig_lat = None
for idx, itm in enumerate(sel):
- phy = itm["phy"].split("-")
- if len(phy) == 4:
- topo, arch, nic, drv = phy
- if drv in ("dpdk", "ixgbe"):
- drv = ""
- else:
- drv += "-"
- drv = drv.replace("_", "-")
- else:
+
+ df = select_trending_data(data, itm)
+ if df is None:
continue
- cadence = \
- "weekly" if (arch == "aws" or itm["testtype"] != "mrr") else "daily"
- sel_topo_arch = (
- f"csit-vpp-perf-"
- f"{itm['testtype'] if itm['testtype'] == 'mrr' else 'ndrpdr'}-"
- f"{cadence}-master-{topo}-{arch}"
- )
- df_sel = data.loc[(data["job"] == sel_topo_arch)]
- regex = (
- f"^.*{nic}.*\.{itm['framesize']}-{itm['core']}-{drv}{itm['test']}-"
- f"{'mrr' if itm['testtype'] == 'mrr' else 'ndrpdr'}$"
- )
- df = df_sel.loc[
- df_sel["test_id"].apply(
- lambda x: True if re.search(regex, x) else False
- )
- ].sort_values(by="start_time", ignore_index=True)
+
name = (
f"{itm['phy']}-{itm['framesize']}-{itm['core']}-"
f"{itm['test']}-{itm['testtype']}"
)
- traces = _generate_traces(
+ traces = _generate_trending_traces(
itm["testtype"], name, df, start, end, _COLORS[idx % len(_COLORS)]
)
if traces:
@@ -347,7 +364,7 @@ def graph_trending_tput(data: pd.DataFrame, sel:dict, layout: dict,
fig_tput.add_traces(traces)
if itm["testtype"] == "pdr":
- traces = _generate_traces(
+ traces = _generate_trending_traces(
"pdr-lat", name, df, start, end, _COLORS[idx % len(_COLORS)]
)
if traces:
diff --git a/resources/tools/dash/app/pal/trending/layout.py b/resources/tools/dash/app/pal/trending/layout.py
index bd8dd8b240..f996f5a005 100644
--- a/resources/tools/dash/app/pal/trending/layout.py
+++ b/resources/tools/dash/app/pal/trending/layout.py
@@ -14,7 +14,6 @@
"""Plotly Dash HTML layout override.
"""
-
import pandas as pd
from dash import dcc
@@ -26,7 +25,8 @@ from yaml import load, FullLoader, YAMLError
from datetime import datetime, timedelta
from ..data.data import Data
-from .graphs import graph_trending_tput, graph_hdrh_latency
+from .graphs import graph_trending, graph_hdrh_latency, \
+ select_trending_data
class Layout:
@@ -193,7 +193,23 @@ class Layout:
html.Div(
id="div-tput-metadata",
children=[
- dcc.Markdown("**Throughput**"),
+ html.Button(
+ id="btn-download-data",
+ children=["Download Data"],
+ style={"display": "block"}
+ ),
+ dcc.Download(id="download-data"),
+ dcc.Clipboard(
+ target_id="tput-metadata",
+ title="Copy",
+ style={"display": "inline-block"}
+ ),
+ html.Nobr(" "),
+ html.Nobr(" "),
+ dcc.Markdown(
+ children="**Throughput**",
+ style={"display": "inline-block"}
+ ),
html.Pre(
id="tput-metadata",
children="Click on data points in the graph"
@@ -204,7 +220,17 @@ class Layout:
html.Div(
id="div-latency-metadata",
children=[
- dcc.Markdown("**Latency**"),
+ dcc.Clipboard(
+ target_id="latency-metadata",
+ title="Copy",
+ style={"display": "inline-block"}
+ ),
+ html.Nobr(" "),
+ html.Nobr(" "),
+ dcc.Markdown(
+ children="**Latency**",
+ style={"display": "inline-block"}
+ ),
html.Pre(
id="latency-metadata",
children="Click on data points in the graph"
@@ -627,7 +653,7 @@ class Layout:
})
elif trigger_id in ("btn-sel-display", "dpr-period"):
- fig_tput, fig_lat = graph_trending_tput(
+ fig_tput, fig_lat = graph_trending(
self.data, store_sel, self.layout, d_start, d_end
)
output.set_values({
@@ -653,7 +679,7 @@ class Layout:
new_store_sel.append(item)
store_sel = new_store_sel
if store_sel:
- fig_tput, fig_lat = graph_trending_tput(
+ fig_tput, fig_lat = graph_trending(
self.data, store_sel, self.layout, d_start, d_end
)
output.set_values({
@@ -691,8 +717,11 @@ class Layout:
Input("graph-tput", "clickData")
)
def _show_tput_metadata(hover_data):
+ """
+ """
if not hover_data:
raise PreventUpdate
+
return hover_data["points"][0]["text"].replace("<br>", "\n"),
@app.callback(
@@ -702,15 +731,40 @@ class Layout:
Input("graph-latency", "clickData")
)
def _show_latency_metadata(hover_data):
+ """
+ """
if not hover_data:
raise PreventUpdate
- graph = graph_hdrh_latency(
- hover_data["points"][0]["customdata"], self.layout
- )
- if not graph:
- graph = no_update
+
+ graph = no_update
+ hdrh_data = hover_data["points"][0].get("customdata", None)
+ if hdrh_data:
+ graph = graph_hdrh_latency(hdrh_data, self.layout)
+
return (
hover_data["points"][0]["text"].replace("<br>", "\n"),
graph,
self.STYLE_INLINE if graph else self.STYLE_HIDEN
)
+
+ @app.callback(
+ Output("download-data", "data"),
+ State("selected-tests", "data"),
+ Input("btn-download-data", "n_clicks"),
+ prevent_initial_call=True
+ )
+ def _download_data(store_sel, n_clicks):
+ """
+ """
+
+ if not n_clicks:
+ raise PreventUpdate
+
+ df = pd.DataFrame()
+ for itm in store_sel:
+ sel_data = select_trending_data(self.data, itm)
+ if sel_data is None:
+ continue
+ df = pd.concat([df, sel_data], ignore_index=True)
+
+ return dcc.send_data_frame(df.to_csv, "trending_data.csv")