summaryrefslogtreecommitdiffstats
path: root/src/vpp-api/client/stat_client.h
blob: 31fd82b41e24c26bc405dd42ec257d1a81dae3cd (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
/*
 * stat_client.h - Library for access to VPP statistics segment
 *
 * Copyright (c) 2018 Cisco and/or its affiliates.
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at:
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
#ifndef included_stat_client_h
#define included_stat_client_h

#include <stdint.h>
#include <unistd.h>
#include <vlib/counter_types.h>

typedef enum
{
  STAT_DIR_TYPE_ILLEGAL = 0,
  STAT_DIR_TYPE_SCALAR_INDEX,
  STAT_DIR_TYPE_COUNTER_VECTOR_SIMPLE,
  STAT_DIR_TYPE_COUNTER_VECTOR_COMBINED,
  STAT_DIR_TYPE_ERROR_INDEX,
} stat_directory_type_t;

/* Default socket to exchange segment fd */
#define STAT_SEGMENT_SOCKET_FILE "/run/vpp/stats.sock"

typedef struct stat_client_main_t stat_client_main_t;

typedef struct
{
  char *name;
  stat_directory_type_t type;
  union
  {
    double scalar_value;
    uint64_t error_value;
    counter_t **simple_counter_vec;
    vlib_counter_t **combined_counter_vec;
  };
} stat_segment_data_t;

stat_client_main_t *stat_client_get (void);
void stat_client_free (stat_client_main_t * sm);
int stat_segment_connect_r (char *socket_name, stat_client_main_t * sm);
int stat_segment_connect (char *socket_name);
void stat_segment_disconnect_r (stat_client_main_t * sm);
void stat_segment_disconnect (void);
uint8_t **stat_segment_string_vector (uint8_t ** string_vector, char *string);
int stat_segment_vec_len (void *vec);
void stat_segment_vec_free (void *vec);
uint32_t *stat_segment_ls_r (uint8_t ** patterns, stat_client_main_t * sm);
uint32_t *stat_segment_ls (uint8_t ** pattern);
stat_segment_data_t *stat_segment_dump_r (uint32_t * stats,
					  stat_client_main_t * sm);
stat_segment_data_t *stat_segment_dump (uint32_t * counter_vec);
stat_segment_data_t *stat_segment_dump_entry_r (uint32_t index,
						stat_client_main_t * sm);
stat_segment_data_t *stat_segment_dump_entry (uint32_t index);

void stat_segment_data_free (stat_segment_data_t * res);
double stat_segment_heartbeat_r (stat_client_main_t * sm);
double stat_segment_heartbeat (void);

char *stat_segment_index_to_name (uint32_t index);

#endif /* included_stat_client_h */

/*
 * fd.io coding-style-patch-verification: ON
 *
 * Local Variables:
 * eval: (c-set-style "gnu")
 * End:
 */
Keyword */ .highlight .l { color: #ae81ff } /* Literal */ .highlight .n { color: #f8f8f2 } /* Name */ .highlight .o { color: #f92672 } /* Operator */ .highlight .p { color: #f8f8f2 } /* Punctuation */ .highlight .ch { color: #75715e } /* Comment.Hashbang */ .highlight .cm { color: #75715e } /* Comment.Multiline */ .highlight .cp { color: #75715e } /* Comment.Preproc */ .highlight .cpf { color: #75715e } /* Comment.PreprocFile */ .highlight .c1 { color: #75715e } /* Comment.Single */ .highlight .cs { color: #75715e } /* Comment.Special */ .highlight .gd { color: #f92672 } /* Generic.Deleted */ .highlight .ge { font-style: italic } /* Generic.Emph */ .highlight .gi { color: #a6e22e } /* Generic.Inserted */ .highlight .gs { font-weight: bold } /* Generic.Strong */ .highlight .gu { color: #75715e } /* Generic.Subheading */ .highlight .kc { color: #66d9ef } /* Keyword.Constant */ .highlight .kd { color: #66d9ef } /* Keyword.Declaration */ .highlight .kn { color: #f92672 } /* Keyword.Namespace */ .highlight .kp { color: #66d9ef } /* Keyword.Pseudo */ .highlight .kr { color: #66d9ef } /* Keyword.Reserved */ .highlight .kt { color: #66d9ef } /* Keyword.Type */ .highlight .ld { color: #e6db74 } /* Literal.Date */ .highlight .m { color: #ae81ff } /* Literal.Number */ .highlight .s { color: #e6db74 } /* Literal.String */ .highlight .na { color: #a6e22e } /* Name.Attribute */ .highlight .nb { color: #f8f8f2 } /* Name.Builtin */ .highlight .nc { color: #a6e22e } /* Name.Class */ .highlight .no { color: #66d9ef } /* Name.Constant */ .highlight .nd { color: #a6e22e } /* Name.Decorator */ .highlight .ni { color: #f8f8f2 } /* Name.Entity */ .highlight .ne { color: #a6e22e } /* Name.Exception */ .highlight .nf { color: #a6e22e } /* Name.Function */ .highlight .nl { color: #f8f8f2 } /* Name.Label */ .highlight .nn { color: #f8f8f2 } /* Name.Namespace */ .highlight .nx { color: #a6e22e } /* Name.Other */ .highlight .py { color: #f8f8f2 } /* Name.Property */ .highlight .nt { color: #f92672 } /* Name.Tag */ .highlight .nv { color: #f8f8f2 } /* Name.Variable */ .highlight .ow { color: #f92672 } /* Operator.Word */ .highlight .w { color: #f8f8f2 } /* Text.Whitespace */ .highlight .mb { color: #ae81ff } /* Literal.Number.Bin */ .highlight .mf { color: #ae81ff } /* Literal.Number.Float */ .highlight .mh { color: #ae81ff } /* Literal.Number.Hex */ .highlight .mi { color: #ae81ff } /* Literal.Number.Integer */ .highlight .mo { color: #ae81ff } /* Literal.Number.Oct */ .highlight .sa { color: #e6db74 } /* Literal.String.Affix */ .highlight .sb { color: #e6db74 } /* Literal.String.Backtick */ .highlight .sc { color: #e6db74 } /* Literal.String.Char */ .highlight .dl { color: #e6db74 } /* Literal.String.Delimiter */ .highlight .sd { color: #e6db74 } /* Literal.String.Doc */ .highlight .s2 { color: #e6db74 } /* Literal.String.Double */ .highlight .se { color: #ae81ff } /* Literal.String.Escape */ .highlight .sh { color: #e6db74 } /* Literal.String.Heredoc */ .highlight .si { color: #e6db74 } /* Literal.String.Interpol */ .highlight .sx { color: #e6db74 } /* Literal.String.Other */ .highlight .sr { color: #e6db74 } /* Literal.String.Regex */ .highlight .s1 { color: #e6db74 } /* Literal.String.Single */ .highlight .ss { color: #e6db74 } /* Literal.String.Symbol */ .highlight .bp { color: #f8f8f2 } /* Name.Builtin.Pseudo */ .highlight .fm { color: #a6e22e } /* Name.Function.Magic */ .highlight .vc { color: #f8f8f2 } /* Name.Variable.Class */ .highlight .vg { color: #f8f8f2 } /* Name.Variable.Global */ .highlight .vi { color: #f8f8f2 } /* Name.Variable.Instance */ .highlight .vm { color: #f8f8f2 } /* Name.Variable.Magic */ .highlight .il { color: #ae81ff } /* Literal.Number.Integer.Long */ } @media (prefers-color-scheme: light) { .highlight .hll { background-color: #ffffcc } .highlight .c { color: #888888 } /* Comment */ .highlight .err { color: #a61717; background-color: #e3d2d2 } /* Error */ .highlight .k { color: #008800; font-weight: bold } /* Keyword */ .highlight .ch { color: #888888 } /* Comment.Hashbang */ .highlight .cm { color: #888888 } /* Comment.Multiline */ .highlight .cp { color: #cc0000; font-weight: bold } /* Comment.Preproc */ .highlight .cpf { color: #888888 } /* Comment.PreprocFile */ .highlight .c1 { color: #888888 } /* Comment.Single */ .highlight .cs { color: #cc0000; font-weight: bold; background-color: #fff0f0 } /* Comment.Special */ .highlight .gd { color: #000000; background-color: #ffdddd } /* Generic.Deleted */ .highlight .ge { font-style: italic } /* Generic.Emph */ .highlight .gr { color: #aa0000 } /* Generic.Error */ .highlight .gh { color: #333333 } /* Generic.Heading */ .highlight .gi { color: #000000; background-color: #ddffdd } /* Generic.Inserted */ .highlight .go { color: #888888 } /* Generic.Output */ .highlight .gp { color: #555555 } /* Generic.Prompt */ .highlight .gs { font-weight: bold } /* Generic.Strong */ .highlight .gu { color: #666666 } /* Generic.Subheading */ .highlight .gt { color: #aa0000 } /* Generic.Traceback */ .highlight .kc { color: #008800; font-weight: bold } /* Keyword.Constant */ .highlight .kd { color: #008800; font-weight: bold } /* Keyword.Declaration */ .highlight .kn { color: #008800; font-weight: bold } /* Keyword.Namespace */ .highlight .kp { color: #008800 } /* Keyword.Pseudo */ .highlight .kr { color: #008800; font-weight: bold } /* Keyword.Reserved */ .highlight .kt { color: #888888; font-weight: bold } /* Keyword.Type */ .highlight .m { color: #0000DD; font-weight: bold } /* Literal.Number */ .highlight .s { color: #dd2200; background-color: #fff0f0 } /* Literal.String */ .highlight .na { color: #336699 } /* Name.Attribute */ .highlight .nb { color: #003388 } /* Name.Builtin */ .highlight .nc { color: #bb0066; font-weight: bold } /* Name.Class */ .highlight .no { color: #003366; font-weight: bold } /* Name.Constant */ .highlight .nd { color: #555555 } /* Name.Decorator */ .highlight .ne { color: #bb0066; font-weight: bold } /* Name.Exception */ .highlight .nf { color: #0066bb; font-weight: bold } /* Name.Function */ .highlight .nl { color: #336699; font-style: italic } /* Name.Label */ .highlight .nn { color: #bb0066; font-weight: bold } /* Name.Namespace */ .highlight .py { color: #336699; font-weight: bold } /* Name.Property */ .highlight .nt { color: #bb0066; font-weight: bold } /* Name.Tag */ .highlight .nv { color: #336699 } /* Name.Variable */ .highlight .ow { color: #008800 } /* Operator.Word */ .highlight .w { color: #bbbbbb } /* Text.Whitespace */ .highlight .mb { color: #0000DD; font-weight: bold } /* Literal.Number.Bin */ .highlight .mf { color: #0000DD; font-weight: bold } /* Literal.Number.Float */ .highlight .mh { color: #0000DD; font-weight: bold } /* Literal.Number.Hex */ .highlight .mi { color: #0000DD; font-weight: bold } /* Literal.Number.Integer */ .highlight .mo { color: #0000DD; font-weight: bold } /* Literal.Number.Oct */ .highlight .sa { color: #dd2200; background-color: #fff0f0 } /* Literal.String.Affix */ .highlight .sb { color: #dd2200; background-color: #fff0f0 } /* Literal.String.Backtick */ .highlight .sc { color: #dd2200; background-color: #fff0f0 } /* Literal.String.Char */ .highlight .dl { color: #dd2200; background-color: #fff0f0 } /* Literal.String.Delimiter */ .highlight .sd { color: #dd2200; background-color: #fff0f0 } /* Literal.String.Doc */ .highlight .s2 { color: #dd2200; background-color: #fff0f0 } /* Literal.String.Double */ .highlight .se { color: #0044dd; background-color: #fff0f0 } /* Literal.String.Escape */ .highlight .sh { color: #dd2200; background-color: #fff0f0 } /* Literal.String.Heredoc */ .highlight .si { color: #3333bb; background-color: #fff0f0 } /* Literal.String.Interpol */ .highlight .sx { color: #22bb22; background-color: #f0fff0 } /* Literal.String.Other */ .highlight .sr { color: #008800; background-color: #fff0ff } /* Literal.String.Regex */ .highlight .s1 { color: #dd2200; background-color: #fff0f0 } /* Literal.String.Single */ .highlight .ss { color: #aa6600; background-color: #fff0f0 } /* Literal.String.Symbol */ .highlight .bp { color: #003388 } /* Name.Builtin.Pseudo */ .highlight .fm { color: #0066bb; font-weight: bold } /* Name.Function.Magic */ .highlight .vc { color: #336699 } /* Name.Variable.Class */ .highlight .vg { color: #dd7700 } /* Name.Variable.Global */ .highlight .vi { color: #3333bb } /* Name.Variable.Instance */ .highlight .vm { color: #336699 } /* Name.Variable.Magic */ .highlight .il { color: #0000DD; font-weight: bold } /* Literal.Number.Integer.Long */ }
# Copyright (c) 2021 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""Convert output_info.xml files into JSON structures.

Version: 0.1.0
Date:    22nd June 2021

The json structure is defined in https://gerrit.fd.io/r/c/csit/+/28992
"""

import os
import re
import json
import logging
import gzip

from os.path import join
from shutil import rmtree
from copy import deepcopy
from json import loads

from pal_utils import get_files


class JSONData:
    """A Class storing and manipulating data from tests.
    """

    def __init__(self, template=None):
        """Initialization.

        :param template: JSON formatted template used to store data. It can
            include default values.
        :type template: dict
        """

        self._template = deepcopy(template)
        self._data = self._template if self._template else dict()

    def __str__(self):
        """Return a string with human readable data.

        :returns: Readable description.
        :rtype: str
        """
        return str(self._data)

    def __repr__(self):
        """Return a string executable as Python constructor call.

        :returns: Executable constructor call.
        :rtype: str
        """
        return f"JSONData(template={self._template!r})"

    @property
    def data(self):
        """Getter

        :return: Data stored in the object.
        :rtype: dict
        """
        return self._data

    def update(self, kwargs):
        """Update the data with new data from the dictionary.

        :param kwargs: Key value pairs to be added to the data.
        :type kwargs: dict
        """
        self._data.update(kwargs)

    def set_key(self, key, val):
        """Setter.

        :param key: The key to be updated / added.
        :param val: The key value.
        :type key: str
        :type val: object
        """
        self._data[key] = deepcopy(val)

    def add_to_list(self, key, val):
        """Add an item to the list identified by key.

        :param key: The key identifying the list.
        :param val: The val to be appended to the list. If val is a list,
            extend is used.
        """
        if self._data.get(key, None) is None:
            self._data[key] = list()
        if isinstance(val, list):
            self._data[key].extend(val)
        else:
            self._data[key].append(val)

    def dump(self, file_out, indent=None):
        """Write JSON data to a file.

        :param file_out: Path to the output JSON file.
        :param indent: Indentation of items in JSON string. It is directly
            passed to json.dump method.
        :type file_out: str
        :type indent: str
        """
        try:
            with open(file_out, u"w") as file_handler:
                json.dump(self._data, file_handler, indent=indent)
        except OSError as err:
            logging.warning(f"{repr(err)} Skipping")

    def load(self, file_in):
        """Load JSON data from a file.

        :param file_in: Path to the input JSON file.
        :type file_in: str
        :raises: ValueError if the data being deserialized is not a valid
            JSON document.
        :raises: IOError if the file is not found or corrupted.
        """
        with open(file_in, u"r") as file_handler:
            self._data = json.load(file_handler)


def _export_test_from_xml_to_json(tid, in_data, out, template, metadata):
    """Export data from a test to a json structure.

    :param tid: Test ID.
    :param in_data: Test data.
    :param out: Path to output json file.
    :param template: JSON template with optional default values.
    :param metadata: Data which are not stored in XML structure.
    :type tid: str
    :type in_data: dict
    :type out: str
    :type template: dict
    :type metadata: dict
    """

    data = JSONData(template=template)

    data.update(metadata)
    data.set_key(u"test_id", tid)
    t_type = in_data.get(u"type", u"")
    t_type = u"NDRPDR" if t_type == u"CPS" else t_type  # It is NDRPDR
    data.set_key(u"test_type", t_type)
    tags = in_data.get(u"tags", list())
    data.set_key(u"tags", tags)
    data.set_key(u"documentation", in_data.get(u"documentation", u""))
    data.set_key(u"message", in_data.get(u"msg", u""))
    data.set_key(u"start_time", in_data.get(u"starttime", u""))
    data.set_key(u"end_time", in_data.get(u"endtime", u""))
    data.set_key(u"status", in_data.get(u"status", u"FAILED"))
    sut_type = u""
    if u"vpp" in tid:
        sut_type = u"vpp"
    elif u"dpdk" in tid:
        sut_type = u"dpdk"
    data.set_key(u"sut_type", sut_type)

    # Process configuration history:
    in_papi = deepcopy(in_data.get(u"conf_history", None))
    if in_papi:
        regex_dut = re.compile(r'\*\*DUT(\d):\*\*')
        node_id = u"dut1"
        for line in in_papi.split(u"\n"):
            if not line:
                continue
            groups = re.search(regex_dut, line)
            if groups:
                node_id = f"dut{groups.group(1)}"
            else:
                data.add_to_list(
                    u"log",
                    {
                        u"source_type": u"node",
                        u"source_id": node_id,
                        u"msg_type": u"papi",
                        u"log_level": u"INFO",
                        u"timestamp": in_data.get(u"starttime", u""),
                        u"msg": line,
                        u"data": list()
                    }
                )

    # Process show runtime:
    if in_data.get(u"telemetry-show-run", None):
        for item in in_data[u"telemetry-show-run"].values():
            data.add_to_list(u"log", item.get(u"runtime", dict()))
    else:
        in_sh_run = deepcopy(in_data.get(u"show-run", None))
        if in_sh_run:
            # Transform to openMetrics format
            for key, val in in_sh_run.items():
                log_item = {
                    u"source_type": u"node",
                    u"source_id": key,
                    u"msg_type": u"metric",
                    u"log_level": u"INFO",
                    u"timestamp": in_data.get(u"starttime", u""),
                    u"msg": u"show_runtime",
                    u"data": list()
                }
                runtime = loads(val.get(u"runtime", list()))
                for item in runtime:
                    for metric, m_data in item.items():
                        if metric == u"name":
                            continue
                        for idx, m_item in enumerate(m_data):
                            log_item[u"data"].append(
                                {
                                    u"name": metric,
                                    u"value": m_item,
                                    u"labels": {
                                        u"host": val.get(u"host", u""),
                                        u"socket": val.get(u"socket", u""),
                                        u"graph_node": item.get(u"name", u""),
                                        u"thread_id": str(idx)
                                    }
                                }
                            )
                data.add_to_list(u"log", log_item)

    # Process results:
    results = dict()
    if t_type == u"DEVICETEST":
        pass  # Nothing to add.
    elif t_type == u"NDRPDR":
        results = {
            u"throughput": {
                u"unit":
                    u"cps" if u"TCP_CPS" in tags or u"UDP_CPS" in tags
                    else u"pps",
                u"ndr": {
                    u"value": {
                        u"lower": in_data.get(u"throughput", dict()).
                                  get(u"NDR", dict()).get(u"LOWER", u"NaN"),
                        u"upper": in_data.get(u"throughput", dict()).
                                  get(u"NDR", dict()).get(u"UPPER", u"NaN")
                    },
                    u"value_gbps": {
                        u"lower": in_data.get(u"gbps", dict()).
                                  get(u"NDR", dict()).get(u"LOWER", u"NaN"),
                        u"upper": in_data.get(u"gbps", dict()).
                                  get(u"NDR", dict()).get(u"UPPER", u"NaN")
                    }
                },
                u"pdr": {
                    u"value": {
                        u"lower": in_data.get(u"throughput", dict()).
                                  get(u"PDR", dict()).get(u"LOWER", u"NaN"),
                        u"upper": in_data.get(u"throughput", dict()).
                                  get(u"PDR", dict()).get(u"UPPER", u"NaN")
                    },
                    u"value_gbps": {
                        u"lower": in_data.get(u"gbps", dict()).
                                  get(u"PDR", dict()).get(u"LOWER", u"NaN"),
                        u"upper": in_data.get(u"gbps", dict()).
                                  get(u"PDR", dict()).get(u"UPPER", u"NaN")
                    }
                }
            },
            u"latency": {
                u"forward": {
                    u"pdr_90": in_data.get(u"latency", dict()).
                               get(u"PDR90", dict()).get(u"direction1", u"NaN"),
                    u"pdr_50": in_data.get(u"latency", dict()).
                               get(u"PDR50", dict()).get(u"direction1", u"NaN"),
                    u"pdr_10": in_data.get(u"latency", dict()).
                               get(u"PDR10", dict()).get(u"direction1", u"NaN"),
                    u"pdr_0": in_data.get(u"latency", dict()).
                              get(u"LAT0", dict()).get(u"direction1", u"NaN")
                },
                u"reverse": {
                    u"pdr_90": in_data.get(u"latency", dict()).
                               get(u"PDR90", dict()).get(u"direction2", u"NaN"),
                    u"pdr_50": in_data.get(u"latency", dict()).
                               get(u"PDR50", dict()).get(u"direction2", u"NaN"),
                    u"pdr_10": in_data.get(u"latency", dict()).
                               get(u"PDR10", dict()).get(u"direction2", u"NaN"),
                    u"pdr_0": in_data.get(u"latency", dict()).
                              get(u"LAT0", dict()).get(u"direction2", u"NaN")
                }
            }
        }
    elif t_type == "MRR":
        results = {
            u"unit": u"pps",  # Old data use only pps
            u"samples": in_data.get(u"result", dict()).get(u"samples", list()),
            u"avg": in_data.get(u"result", dict()).get(u"receive-rate", u"NaN"),
            u"stdev": in_data.get(u"result", dict()).
                      get(u"receive-stdev", u"NaN")
        }
    elif t_type == "SOAK":
        results = {
            u"critical_rate": {
                u"lower": in_data.get(u"throughput", dict()).
                          get(u"LOWER", u"NaN"),
                u"upper": in_data.get(u"throughput", dict()).
                          get(u"UPPER", u"NaN"),
            }
        }
    elif t_type == "HOSTSTACK":
        results = in_data.get(u"result", dict())
    # elif t_type == "TCP":  # Not used ???
    #     results = in_data.get(u"result", u"NaN")
    elif t_type == "RECONF":
        results = {
            u"loss": in_data.get(u"result", dict()).get(u"loss", u"NaN"),
            u"time": in_data.get(u"result", dict()).get(u"time", u"NaN")
        }
    else:
        pass
    data.set_key(u"results", results)

    data.dump(out, indent=u"    ")


def convert_xml_to_json(spec, data):
    """Convert downloaded XML files into JSON.

    Procedure:
    - create one json file for each test,
    - gzip all json files one by one,
    - delete json files.

    :param spec: Specification read from the specification files.
    :param data: Input data parsed from output.xml files.
    :type spec: Specification
    :type data: InputData
    """

    logging.info(u"Converting downloaded XML files to JSON ...")

    template_name = spec.output.get(u"use-template", None)
    structure = spec.output.get(u"structure", u"tree")
    if template_name:
        with open(template_name, u"r") as file_handler:
            template = json.load(file_handler)
    else:
        template = None

    build_dir = spec.environment[u"paths"][u"DIR[BUILD,JSON]"]
    try:
        rmtree(build_dir)
    except FileNotFoundError:
        pass  # It does not exist

    os.mkdir(build_dir)

    for job, builds in data.data.items():
        logging.info(f"  Processing job {job}")
        if structure == "tree":
            os.makedirs(join(build_dir, job), exist_ok=True)
        for build_nr, build in builds.items():
            logging.info(f"  Processing build {build_nr}")
            if structure == "tree":
                os.makedirs(join(build_dir, job, build_nr), exist_ok=True)
            for test_id, test_data in build[u"tests"].items():
                groups = re.search(re.compile(r'-(\d+[tT](\d+[cC]))-'), test_id)
                if groups:
                    test_id = test_id.replace(groups.group(1), groups.group(2))
                logging.info(f"  Processing test {test_id}")
                if structure == "tree":
                    dirs = test_id.split(u".")[:-1]
                    name = test_id.split(u".")[-1]
                    os.makedirs(
                        join(build_dir, job, build_nr, *dirs), exist_ok=True
                    )
                    file_name = \
                        f"{join(build_dir, job, build_nr, *dirs, name)}.json"
                else:
                    file_name = join(
                        build_dir,
                        u'.'.join((job, build_nr, test_id, u'json'))
                    )
                suite_id = test_id.rsplit(u".", 1)[0].replace(u" ", u"_")
                _export_test_from_xml_to_json(
                    test_id, test_data, file_name, template,
                    {
                        u"ci": u"jenkins.fd.io",
                        u"job": job,
                        u"build_number": build_nr,
                        u"suite_id": suite_id,
                        u"suite_doc": build[u"suites"].get(suite_id, dict()).
                                      get(u"doc", u""),
                        u"testbed": build[u"metadata"].get(u"testbed", u""),
                        u"sut_version": build[u"metadata"].get(u"version", u"")
                    }
                )

    # gzip the json files:
    for file in get_files(build_dir, u"json"):
        with open(file, u"rb") as src:
            with gzip.open(f"{file}.gz", u"wb") as dst:
                dst.writelines(src)
            os.remove(file)

    logging.info(u"Done.")