aboutsummaryrefslogtreecommitdiffstats
path: root/resources/tools/report_gen
diff options
context:
space:
mode:
authorpmikus <pmikus@cisco.com>2017-02-01 18:54:58 +0100
committerPeter Mikus <pmikus@cisco.com>2017-02-03 20:40:09 +0000
commit35e5a327a66c05a3356d214dcfdece59ca725998 (patch)
treea111bb1d5b9d40f358655187d5bc4a3b899b0d06 /resources/tools/report_gen
parent6baa0bc9921bc13b1adff113c20a4db766c3feba (diff)
CSIT 1701 report files and script AD1
CSIT 1701 report files and script addendum 1 Edits to correct and align all Overview sub-sections. Updates in Performance CSIT Release Notes - added more NDR and PDR performance changes. Change-Id: I52b6ee89e9c536fb4ab9d30dc27cca8dbdd88a20 Signed-off-by: pmikus <pmikus@cisco.com> Signed-off-by: Maciek Konstantynowicz <mkonstan@cisco.com> Signed-off-by: pmikus <pmikus@cisco.com>
Diffstat (limited to 'resources/tools/report_gen')
-rw-r--r--resources/tools/report_gen/conf.py9
-rwxr-xr-xresources/tools/report_gen/run_report.sh106
-rwxr-xr-xresources/tools/report_gen/run_robot_data.py406
3 files changed, 455 insertions, 66 deletions
diff --git a/resources/tools/report_gen/conf.py b/resources/tools/report_gen/conf.py
index e6fb23cc66..12944e47c4 100644
--- a/resources/tools/report_gen/conf.py
+++ b/resources/tools/report_gen/conf.py
@@ -44,9 +44,9 @@ source_suffix = ['.rst', '.md']
master_doc = 'index'
# General information about the project.
-project = u'CSIT'
-copyright = u'2017, fd.io'
-author = u'CSIT'
+project = u'FD.io CSIT'
+copyright = u'2017, FD.io'
+author = u'FD.io CSIT'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@@ -158,6 +158,3 @@ texinfo_documents = [
author, 'CSIT1701report', 'One line description of project.',
'Miscellaneous'),
]
-
-
-
diff --git a/resources/tools/report_gen/run_report.sh b/resources/tools/report_gen/run_report.sh
index bbb048619e..2a88750464 100755
--- a/resources/tools/report_gen/run_report.sh
+++ b/resources/tools/report_gen/run_report.sh
@@ -6,7 +6,8 @@ SOURCE_DIR='../../../docs/report'
STATIC_DIR="${BUILD_DIR}/_static"
STATIC_DIR_VPP="${STATIC_DIR}/vpp"
STATIC_DIR_TESTPMD="${STATIC_DIR}/testpmd"
-CSS_PATCH_FILE="${BUILD_DIR}/_static/theme_overrides.css"
+STATIC_DIR_ARCH="${STATIC_DIR}/archive"
+CSS_PATCH_FILE="${STATIC_DIR}/theme_overrides.css"
sudo apt-get install -y libxml2 libxml2-dev libxslt-dev build-essential zlib1g-dev
@@ -31,7 +32,10 @@ pip install -r requirements.txt
export PYTHONPATH=`pwd`
# Generate the documentation:
-sphinx-build -v -c . -a -b html -E -D release=$1 -D version=$1 ${SOURCE_DIR} ${BUILD_DIR}/
+
+DATE=$(date -u '+%d-%b-%Y')
+
+sphinx-build -v -c . -a -b html -E -D release=$1 -D version="$1 report - $DATE" ${SOURCE_DIR} ${BUILD_DIR}/
# Patch the CSS for tables layout
cat - > ${CSS_PATCH_FILE} <<"_EOF"
@@ -54,34 +58,51 @@ _EOF
echo Downloading raw outputs for plots ...
mkdir -p ${STATIC_DIR_VPP}
mkdir -p ${STATIC_DIR_TESTPMD}
+mkdir -p ${STATIC_DIR_ARCH}
+
+JEN_URL='https://jenkins.fd.io/view/csit/job'
+JEN_FILE_PERF='output_perf_data.xml'
+
+JEN_JOB='csit-vpp-perf-1701-all'
+JEN_BUILD=(3 4 7)
+
+for i in "${JEN_BUILD[@]}"; do
+ wget -q ${JEN_URL}/${JEN_JOB}/${i}/artifact/${JEN_FILE_PERF} -O ${STATIC_DIR_VPP}/${JEN_JOB}-${i}.xml
+ wget -q ${JEN_URL}/${JEN_JOB}/${i}/artifact/\*zip\*/archive.zip -O ${STATIC_DIR_ARCH}/${JEN_JOB}-${i}.zip
+done
+
+JEN_JOB='csit-vpp-perf-1701-long'
+JEN_BUILD=(2 4)
-JENKINS_URL='https://jenkins.fd.io/view/csit/job/'
-JENKINS_DIR='/artifact/'
+for i in "${JEN_BUILD[@]}"; do
+ wget -q ${JEN_URL}/${JEN_JOB}/${i}/artifact/${JEN_FILE_PERF} -O ${STATIC_DIR_VPP}/${JEN_JOB}-${i}.xml
+ wget -q ${JEN_URL}/${JEN_JOB}/${i}/artifact/\*zip\*/archive.zip -O ${STATIC_DIR_ARCH}/${JEN_JOB}-${i}.zip
+done
-PERF_JENKINS_JOB='csit-vpp-perf-1701-all'
-PERF_JENKINS_BUILD=(3 4 7)
-PERF_JENKINS_FILE='output_perf_data.xml'
+JEN_JOB='csit-dpdk-perf-1701-all'
+JEN_BUILD=(2 3)
-for i in "${PERF_JENKINS_BUILD[@]}"; do
- wget -q ${JENKINS_URL}${PERF_JENKINS_JOB}/${i}${JENKINS_DIR}${PERF_JENKINS_FILE} -O ${STATIC_DIR_VPP}/${PERF_JENKINS_JOB}-${i}.xml
+for i in "${JEN_BUILD[@]}"; do
+ wget -q ${JEN_URL}/${JEN_JOB}/${i}/artifact/${JEN_FILE_PERF} -O ${STATIC_DIR_TESTPMD}/${JEN_JOB}-${i}.xml
+ wget -q ${JEN_URL}/${JEN_JOB}/${i}/artifact/\*zip\*/archive.zip -O ${STATIC_DIR_ARCH}/${JEN_JOB}-${i}.zip
done
-PERF_JENKINS_JOB='csit-vpp-perf-1701-long'
-PERF_JENKINS_BUILD=(2 4)
-PERF_JENKINS_FILE='output_perf_data.xml'
+JEN_JOB='csit-vpp-functional-1701-virl'
+JEN_BUILD=(18)
-for i in "${PERF_JENKINS_BUILD[@]}"; do
- wget -q ${JENKINS_URL}${PERF_JENKINS_JOB}/${i}${JENKINS_DIR}${PERF_JENKINS_FILE} -O ${STATIC_DIR_VPP}/${PERF_JENKINS_JOB}-${i}.xml
+for i in "${JEN_BUILD[@]}"; do
+ wget -q ${JEN_URL}/${JEN_JOB}/${i}/artifact/\*zip\*/archive.zip -O ${STATIC_DIR_ARCH}/${JEN_JOB}-${i}.zip
done
-PERF_JENKINS_JOB='csit-dpdk-perf-1701-all'
-PERF_JENKINS_BUILD=(2 3)
-PERF_JENKINS_FILE='output_perf_data.xml'
+JEN_URL='https://jenkins.fd.io/view/hc2vpp/job'
+JEN_JOB='hc2vpp-csit-integration-1701-ubuntu1404'
+JEN_BUILD=(1)
-for i in "${PERF_JENKINS_BUILD[@]}"; do
- wget -q ${JENKINS_URL}${PERF_JENKINS_JOB}/${i}${JENKINS_DIR}${PERF_JENKINS_FILE} -O ${STATIC_DIR_TESTPMD}/${PERF_JENKINS_JOB}-${i}.xml
+for i in "${JEN_BUILD[@]}"; do
+ wget -q ${JEN_URL}/${JEN_JOB}/${i}/artifact/\*zip\*/archive.zip -O ${STATIC_DIR_ARCH}/${JEN_JOB}-${i}.zip
done
+
# Plot packets per second
python run_plot.py --input ${STATIC_DIR_VPP} --output ${STATIC_DIR_VPP}/64B-1t1c-l2-ndrdisc --title "64B-1t1c-(eth|dot1q|dot1ad)-(l2xcbase|l2bdbasemaclrn)-ndrdisc" --xpath '//*[@framesize="64B" and contains(@tags,"BASE") and contains(@tags,"NDRDISC") and contains(@tags,"1T1C") and (contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD")) and not(contains(@tags,"VHOST"))]' --lower 0 --upper 16000000
@@ -112,13 +133,13 @@ python run_plot.py --input ${STATIC_DIR_VPP} --output ${STATIC_DIR_VPP}/64B-1t1c
python run_plot.py --input ${STATIC_DIR_VPP} --output ${STATIC_DIR_VPP}/64B-2t2c-l2-pdrdisc --title "64B-2t2c-(eth|dot1q|dot1ad)-(l2xcbase|l2bdbasemaclrn)-pdrdisc" --xpath '//*[@framesize="64B" and contains(@tags,"BASE") and contains(@tags,"PDRDISC") and not(contains(@tags,"NDRDISC")) and contains(@tags,"2T2C") and (contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD")) and not(contains(@tags,"VHOST"))]' --lower 0 --upper 26000000
python run_plot.py --input ${STATIC_DIR_VPP} --output ${STATIC_DIR_VPP}/64B-4t4c-l2-pdrdisc --title "64B-4t4c-(eth|dot1q|dot1ad)-(l2xcbase|l2bdbasemaclrn)-pdrdisc" --xpath '//*[@framesize="64B" and contains(@tags,"BASE") and contains(@tags,"PDRDISC") and not(contains(@tags,"NDRDISC")) and contains(@tags,"4T4C") and (contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD")) and not(contains(@tags,"VHOST"))]' --lower 0 --upper 36000000
-python run_plot.py --input ${STATIC_DIR_VPP} --output ${STATIC_DIR_VPP}/64B-1t1c-ethip4-ip4-pdrdisc --title "64B-1t1c-ethip4-ip4[a-z0-9]+-[a-z-]*pdrdisc" --xpath '//*[@framesize="64B" and (contains(@tags,"BASE") or contains(@tags,"SCALE") and not(contains(@tags,"NDRDISC")) or contains(@tags,"FEATURE")) and contains(@tags,"PDRDISC") and contains(@tags,"1T1C") and contains(@tags,"IP4FWD") and not(contains(@tags,"VHOST"))]' --lower 0 --upper 16000000
-python run_plot.py --input ${STATIC_DIR_VPP} --output ${STATIC_DIR_VPP}/64B-2t2c-ethip4-ip4-pdrdisc --title "64B-2t2c-ethip4-ip4[a-z0-9]+-[a-z-]*pdrdisc" --xpath '//*[@framesize="64B" and (contains(@tags,"BASE") or contains(@tags,"SCALE") and not(contains(@tags,"NDRDISC")) or contains(@tags,"FEATURE")) and contains(@tags,"PDRDISC") and contains(@tags,"2T2C") and contains(@tags,"IP4FWD") and not(contains(@tags,"VHOST"))]' --lower 0 --upper 26000000
-python run_plot.py --input ${STATIC_DIR_VPP} --output ${STATIC_DIR_VPP}/64B-4t4c-ethip4-ip4-pdrdisc --title "64B-4t4c-ethip4-ip4[a-z0-9]+-[a-z-]*pdrdisc" --xpath '//*[@framesize="64B" and (contains(@tags,"BASE") or contains(@tags,"SCALE") and not(contains(@tags,"NDRDISC")) or contains(@tags,"FEATURE")) and contains(@tags,"PDRDISC") and contains(@tags,"4T4C") and contains(@tags,"IP4FWD") and not(contains(@tags,"VHOST"))]' --lower 0 --upper 36000000
+python run_plot.py --input ${STATIC_DIR_VPP} --output ${STATIC_DIR_VPP}/64B-1t1c-ethip4-ip4-pdrdisc --title "64B-1t1c-ethip4-ip4[a-z0-9]+-[a-z-]*pdrdisc" --xpath '//*[@framesize="64B" and (contains(@tags,"BASE") or contains(@tags,"SCALE") or contains(@tags,"FEATURE")) and contains(@tags,"PDRDISC") and not(contains(@tags,"NDRDISC")) and contains(@tags,"1T1C") and contains(@tags,"IP4FWD") and not(contains(@tags,"VHOST"))]' --lower 0 --upper 16000000
+python run_plot.py --input ${STATIC_DIR_VPP} --output ${STATIC_DIR_VPP}/64B-2t2c-ethip4-ip4-pdrdisc --title "64B-2t2c-ethip4-ip4[a-z0-9]+-[a-z-]*pdrdisc" --xpath '//*[@framesize="64B" and (contains(@tags,"BASE") or contains(@tags,"SCALE") or contains(@tags,"FEATURE")) and contains(@tags,"PDRDISC") and not(contains(@tags,"NDRDISC")) and contains(@tags,"2T2C") and contains(@tags,"IP4FWD") and not(contains(@tags,"VHOST"))]' --lower 0 --upper 26000000
+python run_plot.py --input ${STATIC_DIR_VPP} --output ${STATIC_DIR_VPP}/64B-4t4c-ethip4-ip4-pdrdisc --title "64B-4t4c-ethip4-ip4[a-z0-9]+-[a-z-]*pdrdisc" --xpath '//*[@framesize="64B" and (contains(@tags,"BASE") or contains(@tags,"SCALE") or contains(@tags,"FEATURE")) and contains(@tags,"PDRDISC") and not(contains(@tags,"NDRDISC")) and contains(@tags,"4T4C") and contains(@tags,"IP4FWD") and not(contains(@tags,"VHOST"))]' --lower 0 --upper 36000000
-python run_plot.py --input ${STATIC_DIR_VPP} --output ${STATIC_DIR_VPP}/78B-1t1c-ethip6-ip6-pdrdisc --title "78B-1t1c-ethip6-ip6[a-z0-9]+-[a-z-]*pdrdisc" --xpath '//*[@framesize="78B" and (contains(@tags,"BASE") or contains(@tags,"SCALE") and not(contains(@tags,"NDRDISC")) or contains(@tags,"FEATURE")) and contains(@tags,"PDRDISC") and contains(@tags,"1T1C") and contains(@tags,"IP6FWD") and not(contains(@tags,"VHOST"))]' --lower 0 --upper 16000000
-python run_plot.py --input ${STATIC_DIR_VPP} --output ${STATIC_DIR_VPP}/78B-2t2c-ethip6-ip6-pdrdisc --title "78B-2t2c-ethip6-ip6[a-z0-9]+-[a-z-]*pdrdisc" --xpath '//*[@framesize="78B" and (contains(@tags,"BASE") or contains(@tags,"SCALE") and not(contains(@tags,"NDRDISC")) or contains(@tags,"FEATURE")) and contains(@tags,"PDRDISC") and contains(@tags,"2T2C") and contains(@tags,"IP6FWD") and not(contains(@tags,"VHOST"))]' --lower 0 --upper 26000000
-python run_plot.py --input ${STATIC_DIR_VPP} --output ${STATIC_DIR_VPP}/78B-4t4c-ethip6-ip6-pdrdisc --title "78B-4t4c-ethip6-ip6[a-z0-9]+-[a-z-]*pdrdisc" --xpath '//*[@framesize="78B" and (contains(@tags,"BASE") or contains(@tags,"SCALE") and not(contains(@tags,"NDRDISC")) or contains(@tags,"FEATURE")) and contains(@tags,"PDRDISC") and contains(@tags,"4T4C") and contains(@tags,"IP6FWD") and not(contains(@tags,"VHOST"))]' --lower 0 --upper 36000000
+python run_plot.py --input ${STATIC_DIR_VPP} --output ${STATIC_DIR_VPP}/78B-1t1c-ethip6-ip6-pdrdisc --title "78B-1t1c-ethip6-ip6[a-z0-9]+-[a-z-]*pdrdisc" --xpath '//*[@framesize="78B" and (contains(@tags,"BASE") or contains(@tags,"SCALE") or contains(@tags,"FEATURE")) and contains(@tags,"PDRDISC") and not(contains(@tags,"NDRDISC")) and contains(@tags,"1T1C") and contains(@tags,"IP6FWD") and not(contains(@tags,"VHOST"))]' --lower 0 --upper 16000000
+python run_plot.py --input ${STATIC_DIR_VPP} --output ${STATIC_DIR_VPP}/78B-2t2c-ethip6-ip6-pdrdisc --title "78B-2t2c-ethip6-ip6[a-z0-9]+-[a-z-]*pdrdisc" --xpath '//*[@framesize="78B" and (contains(@tags,"BASE") or contains(@tags,"SCALE") or contains(@tags,"FEATURE")) and contains(@tags,"PDRDISC") and not(contains(@tags,"NDRDISC")) and contains(@tags,"2T2C") and contains(@tags,"IP6FWD") and not(contains(@tags,"VHOST"))]' --lower 0 --upper 26000000
+python run_plot.py --input ${STATIC_DIR_VPP} --output ${STATIC_DIR_VPP}/78B-4t4c-ethip6-ip6-pdrdisc --title "78B-4t4c-ethip6-ip6[a-z0-9]+-[a-z-]*pdrdisc" --xpath '//*[@framesize="78B" and (contains(@tags,"BASE") or contains(@tags,"SCALE") or contains(@tags,"FEATURE")) and contains(@tags,"PDRDISC") and not(contains(@tags,"NDRDISC")) and contains(@tags,"4T4C") and contains(@tags,"IP6FWD") and not(contains(@tags,"VHOST"))]' --lower 0 --upper 36000000
python run_plot.py --input ${STATIC_DIR_VPP} --output ${STATIC_DIR_VPP}/64B-1t1c-ethip4-pdrdisc --title "64B-1t1c-ethip4[a-z0-9]+-[a-z0-9]*-pdrdisc" --xpath '//*[@framesize="64B" and contains(@tags,"ENCAP") and contains(@tags,"PDRDISC") and not(contains(@tags,"NDRDISC")) and contains(@tags,"1T1C") and (contains(@tags,"VXLAN") or contains(@tags,"VXLANGPE") or contains(@tags,"LISP") or contains(@tags,"LISPGPE") or contains(@tags,"GRE")) and not(contains(@tags,"VHOST"))]' --lower 0 --upper 16000000
python run_plot.py --input ${STATIC_DIR_VPP} --output ${STATIC_DIR_VPP}/64B-2t2c-ethip4-pdrdisc --title "64B-2t2c-ethip4[a-z0-9]+-[a-z0-9]*-pdrdisc" --xpath '//*[@framesize="64B" and contains(@tags,"ENCAP") and contains(@tags,"PDRDISC") and not(contains(@tags,"NDRDISC")) and contains(@tags,"2T2C") and (contains(@tags,"VXLAN") or contains(@tags,"VXLANGPE") or contains(@tags,"LISP") or contains(@tags,"LISPGPE") or contains(@tags,"GRE")) and not(contains(@tags,"VHOST"))]' --lower 0 --upper 16000000
@@ -170,41 +191,6 @@ python run_plot.py --input ${STATIC_DIR_TESTPMD} --output ${STATIC_DIR_TESTPMD}/
python run_plot.py --input ${STATIC_DIR_TESTPMD} --output ${STATIC_DIR_TESTPMD}/64B-2t2c-l2-ndrdisc-lat50 --title "64B-2t2c-(eth|dot1q|dot1ad)-(l2xcbase|l2bdbasemaclrn)-ndrdisc" --xpath '//*[@framesize="64B" and contains(@tags,"BASE") and contains(@tags,"NDRDISC") and contains(@tags,"2T2C") and (contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD")) and not(contains(@tags,"VHOST"))]' --latency lat_50
python run_plot.py --input ${STATIC_DIR_TESTPMD} --output ${STATIC_DIR_TESTPMD}/64B-4t4c-l2-ndrdisc-lat50 --title "64B-4t4c-(eth|dot1q|dot1ad)-(l2xcbase|l2bdbasemaclrn)-ndrdisc" --xpath '//*[@framesize="64B" and contains(@tags,"BASE") and contains(@tags,"NDRDISC") and contains(@tags,"4T4C") and (contains(@tags,"L2BDMACSTAT") or contains(@tags,"L2BDMACLRN") or contains(@tags,"L2XCFWD")) and not(contains(@tags,"VHOST"))]' --latency lat_50
-# Download raw outputs for archive
-
-#echo Downloading raw outputs for archive ...
-#JENKINS_URL='https://jenkins.fd.io/view/csit/job/'
-#JENKINS_DIR='/artifact/*zip*/'
-#JENKINS_FILE='archive.zip'
-
-#PERF_JENKINS_JOB='csit-vpp-perf-1701-all'
-#PERF_JENKINS_BUILD=(3 4 7)
-
-#for i in "${PERF_JENKINS_BUILD[@]}"; do
-# wget -q ${JENKINS_URL}${PERF_JENKINS_JOB}/${i}${JENKINS_DIR}${JENKINS_FILE} -O ${STATIC_DIR}/${PERF_JENKINS_JOB}-${i}.zip
-#done
-
-#PERF_JENKINS_JOB='csit-vpp-perf-1701-long'
-#PERF_JENKINS_BUILD=(2 4)
-
-#for i in "${PERF_JENKINS_BUILD[@]}"; do
-# wget -q ${JENKINS_URL}${PERF_JENKINS_JOB}/${i}${JENKINS_DIR}${JENKINS_FILE} -O ${STATIC_DIR}/${PERF_JENKINS_JOB}-${i}.zip
-#done
-
-#FUNC_JENKINS_JOB='csit-vpp-functional-1701-virl'
-#FUNC_JENKINS_BUILD=(18)
-
-#for i in "${FUNC_JENKINS_BUILD[@]}"; do
-# wget -q ${JENKINS_URL}${FUNC_JENKINS_JOB}/${i}${JENKINS_DIR}${JENKINS_FILE} -O ${STATIC_DIR}/${FUNC_JENKINS_JOB}-${i}.zip
-#done
-
-#PERF_JENKINS_JOB='csit-dpdk-perf-1701-all'
-#PERF_JENKINS_BUILD=(2 3)
-
-#for i in "${PERF_JENKINS_BUILD[@]}"; do
-# wget -q ${JENKINS_URL}${PERF_JENKINS_JOB}/${i}${JENKINS_DIR}${JENKINS_FILE} -O ${STATIC_DIR}/${PERF_JENKINS_JOB}-${i}.zip
-#done
-
# Create archive
echo Creating csit.report.tar.gz ...
tar -czvf ./csit.report.tar.gz ${BUILD_DIR}
diff --git a/resources/tools/report_gen/run_robot_data.py b/resources/tools/report_gen/run_robot_data.py
new file mode 100755
index 0000000000..e2bcfa2f0d
--- /dev/null
+++ b/resources/tools/report_gen/run_robot_data.py
@@ -0,0 +1,406 @@
+#!/usr/bin/python
+
+# Copyright (c) 2017 Cisco and/or its affiliates.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Script extracts interested data (name, documentation, message, status) from
+robot framework output file (output.xml) and prints in specified format (wiki,
+html, rst) to defined output file.
+
+Supported formats:
+ - html
+ - rst
+
+:TODO:
+ - wiki
+ - md
+
+:Example:
+
+robot_output_parser_publish.py -i output.xml" -o "tests.rst" -f "rst" -s 3 -l 2
+
+The example reads the data from "output.xml", writes the output to "tests.rst"
+in rst format. It will start on the 3rd level of xml structure and the generated
+document hierarchy will start on the 2nd level.
+"""
+
+import argparse
+import re
+import sys
+import json
+import string
+
+from robot.api import ExecutionResult, ResultVisitor
+
+
+class ExecutionChecker(ResultVisitor):
+ """Class to traverse through the test suite structure.
+
+ The functionality implemented in this class generates a json file. Its
+ structure is:
+
+ [
+ {
+ "level": "Level of the suite, type: str",
+ "title": "Title of the suite, type: str",
+ "doc": "Documentation of the suite, type: str",
+ "table": [
+ ["TC name", "TC doc", "message or status"],
+ ["TC name", "TC doc", "message or status"],
+ ... other test cases ...
+ ["Name", "Documentation", "Message or Status"]
+ ]
+ },
+ ... other test suites ...
+ ]
+
+ .. note:: The header of the table with TCs is at the and of the table.
+ """
+
+ def __init__(self, args):
+ self.formatting = args.formatting
+
+ def visit_suite(self, suite):
+ """Implements traversing through the suite and its direct children.
+
+ :param suite: Suite to process.
+ :type suite: Suite
+ :returns: Nothing.
+ """
+
+ if self.start_suite(suite) is not False:
+ if suite.tests:
+ sys.stdout.write(',"tests":[')
+ else:
+ sys.stdout.write('},')
+
+ suite.suites.visit(self)
+ suite.tests.visit(self)
+
+ if suite.tests:
+ if "ndrdisc" in suite.longname.lower():
+ hdr = '["Name","Documentation","Message"]'
+ else:
+ hdr = '["Name","Documentation","Status"]'
+ sys.stdout.write(hdr + ']},')
+
+ self.end_suite(suite)
+
+ def start_suite(self, suite):
+ """Called when suite starts.
+
+ :param suite: Suite to process.
+ :type suite: Suite
+ :returns: Nothing.
+ """
+
+ level = len(suite.longname.split("."))
+ sys.stdout.write('{')
+ sys.stdout.write('"level":"' + str(level) + '",')
+ sys.stdout.write('"title":"' + suite.name.replace('"', "'") + '",')
+ sys.stdout.write('"doc":"' + suite.doc.replace('"', "'").
+ replace('\n', ' ').replace('\r', '').
+ replace('*[', ' |br| *[') + '"')
+
+ def end_suite(self, suite):
+ """Called when suite ends.
+
+ :param suite: Suite to process.
+ :type suite: Suite
+ :returns: Nothing.
+ """
+ pass
+
+ def visit_test(self, test):
+ """Implements traversing through the test.
+
+ :param test: Test to process.
+ :type test: Test
+ :returns: Nothing.
+ """
+ if self.start_test(test) is not False:
+ self.end_test(test)
+
+ def start_test(self, test):
+ """Called when test starts.
+
+ :param test: Test to process.
+ :type test: Test
+ :returns: Nothing.
+ """
+
+ name = test.name.replace('"', "'")
+ doc = test.doc.replace('"', "'").replace('\n', ' ').replace('\r', '').\
+ replace('[', ' |br| [')
+ if any("NDRPDRDISC" in tag for tag in test.tags):
+ msg = test.message.replace('\n', ' |br| ').replace('\r', ''). \
+ replace('"', "'")
+
+ sys.stdout.write('["' + name + '","' + doc + '","' + msg + '"]')
+ else:
+ sys.stdout.write(
+ '["' + name + '","' + doc + '","' + test.status + '"]')
+
+ def end_test(self, test):
+ """Called when test ends.
+
+ :param test: Test to process.
+ :type test: Test
+ :returns: Nothing.
+ """
+ sys.stdout.write(',')
+
+
+def do_html(data, args):
+ """Generation of a html file from json data.
+
+ :param data: List of suites from json file.
+ :param args: Parsed arguments.
+ :type data: list of dict
+ :type args: ArgumentParser
+ :returns: Nothing.
+ """
+
+ shift = int(args.level)
+ start = int(args.start)
+
+ output = open(args.output, 'w')
+
+ output.write('<html>')
+ for item in data:
+ if int(item['level']) < start:
+ continue
+ level = str(int(item['level']) - start + shift)
+ output.write('<h' + level + '>' + item['title'].lower() +
+ '</h' + level + '>')
+ output.write('<p>' + re.sub(r"(\*)(.*?)(\*)", r"<b>\2</b>", item['doc'],
+ 0, flags=re.MULTILINE).
+ replace(' |br| ', '<br>') + '</p>')
+ try:
+ output.write(gen_html_table(item['tests']))
+ except KeyError:
+ continue
+ output.write('</html>')
+ output.close()
+
+
+def gen_html_table(data):
+ """Generates a table with TCs' names, documentation and messages / statuses
+ in html format. There is no css used.
+
+ :param data: Json data representing a table with TCs.
+ :type data: str
+ :returns: Table with TCs' names, documentation and messages / statuses in
+ html format.
+ :rtype: str
+ """
+
+ table = '<table width=100% border=1><tr>'
+ table += '<th width=30%>Name</th>'
+ table += '<th width=50%>Documentation</th>'
+ table += '<th width=20%>Status</th></tr>'
+
+ for item in data[0:-2]:
+ table += '<tr>'
+ for element in item:
+ table += '<td>' + element.replace(' |br| ', '<br>') + '</td>'
+ table += '</tr></table>'
+
+ return table
+
+
+def do_rst(data, args):
+ """Generation of a rst file from json data.
+
+ :param data: List of suites from json file.
+ :param args: Parsed arguments.
+ :type data: list of dict
+ :type args: ArgumentParser
+ :returns: Nothing.
+ """
+
+ hdrs = ['=', '-', '`', "'", '.', '~', '*', '+', '^']
+ shift = int(args.level)
+ start = int(args.start)
+
+ output = open(args.output, 'w')
+ output.write('\n.. |br| raw:: html\n\n <br />\n\n')
+
+ for item in data:
+ if int(item['level']) < start:
+ continue
+ if 'ndrchk' in item['title'].lower():
+ continue
+ output.write(item['title'].lower() + '\n' +
+ hdrs[int(item['level']) - start + shift] *
+ len(item['title']) + '\n\n')
+ output.write(item['doc'].replace('*', '**').replace('|br|', '\n\n -') +
+ '\n\n')
+ try:
+ output.write(gen_rst_table(item['tests']) + '\n\n')
+ except KeyError:
+ continue
+ output.close()
+
+
+def gen_rst_table(data):
+ """Generates a table with TCs' names, documentation and messages / statuses
+ in rst format.
+
+ :param data: Json data representing a table with TCs.
+ :type data: str
+ :returns: Table with TCs' names, documentation and messages / statuses in
+ rst format.
+ :rtype: str
+ """
+
+ table = []
+ # max size of each column
+ lengths = map(max, zip(*[[len(str(elt)) for elt in item] for item in data]))
+
+ start_of_line = '| '
+ vert_separator = ' | '
+ end_of_line = ' |'
+ line_marker = '-'
+
+ meta_template = vert_separator.join(['{{{{{0}:{{{0}}}}}}}'.format(i)
+ for i in range(len(lengths))])
+ template = '{0}{1}{2}'.format(start_of_line, meta_template.format(*lengths),
+ end_of_line)
+ # determine top/bottom borders
+ to_separator = string.maketrans('| ', '+-')
+ start_of_line = start_of_line.translate(to_separator)
+ vert_separator = vert_separator.translate(to_separator)
+ end_of_line = end_of_line.translate(to_separator)
+ separator = '{0}{1}{2}'.format(start_of_line, vert_separator.
+ join([x * line_marker for x in lengths]),
+ end_of_line)
+ # determine header separator
+ th_separator_tr = string.maketrans('-', '=')
+ start_of_line = start_of_line.translate(th_separator_tr)
+ line_marker = line_marker.translate(th_separator_tr)
+ vertical_separator = vert_separator.translate(th_separator_tr)
+ end_of_line = end_of_line.translate(th_separator_tr)
+ th_separator = '{0}{1}{2}'.format(start_of_line, vertical_separator.
+ join([x * line_marker for x in lengths]),
+ end_of_line)
+ # prepare table
+ table.append(separator)
+ # set table header
+ titles = data[-1]
+ table.append(template.format(*titles))
+ table.append(th_separator)
+ # generate table rows
+ for d in data[0:-2]:
+ table.append(template.format(*d))
+ table.append(separator)
+ table.append(template.format(*data[-2]))
+ table.append(separator)
+ return '\n'.join(table)
+
+
+def do_md(data, args):
+ """Generation of a rst file from json data.
+
+ :param data: List of suites from json file.
+ :param args: Parsed arguments.
+ :type data: list of dict
+ :type args: ArgumentParser
+ :returns: Nothing.
+ """
+ raise NotImplementedError("Export to 'md' format is not implemented.")
+
+
+def do_wiki(data, args):
+ """Generation of a wiki page from json data.
+
+ :param data: List of suites from json file.
+ :param args: Parsed arguments.
+ :type data: list of dict
+ :type args: ArgumentParser
+ :returns: Nothing.
+ """
+ raise NotImplementedError("Export to 'wiki' format is not implemented.")
+
+
+def process_robot_file(args):
+ """Process data from robot output.xml file and generate defined file type.
+
+ :param args: Parsed arguments.
+ :type args: ArgumentParser
+ :return: Nothing.
+ """
+
+ old_sys_stdout = sys.stdout
+ sys.stdout = open(args.output + '.json', 'w')
+
+ result = ExecutionResult(args.input)
+ checker = ExecutionChecker(args)
+
+ sys.stdout.write('[')
+ result.visit(checker)
+ sys.stdout.write('{}]')
+ sys.stdout.close()
+ sys.stdout = old_sys_stdout
+
+ with open(args.output + '.json', 'r') as json_file:
+ data = json.load(json_file)
+ data.pop(-1)
+
+ if args.formatting == 'rst':
+ do_rst(data, args)
+ elif args.formatting == 'wiki':
+ do_wiki(data, args)
+ elif args.formatting == 'html':
+ do_html(data, args)
+ elif args.formatting == 'md':
+ do_md(data, args)
+
+
+def parse_args():
+ """Parse arguments from cmd line.
+
+ :return: Parsed arguments.
+ :rtype ArgumentParser
+ """
+
+ parser = argparse.ArgumentParser(description=__doc__,
+ formatter_class=argparse.
+ RawDescriptionHelpFormatter)
+ parser.add_argument("-i", "--input",
+ required=True,
+ type=argparse.FileType('r'),
+ help="Robot XML log file")
+ parser.add_argument("-o", "--output",
+ type=str,
+ required=True,
+ help="Output file")
+ parser.add_argument("-f", "--formatting",
+ required=True,
+ choices=['html', 'wiki', 'rst', 'md'],
+ help="Output file format")
+ parser.add_argument("-s", "--start",
+ type=int,
+ default=1,
+ help="The first level to be taken from xml file")
+ parser.add_argument("-l", "--level",
+ type=int,
+ default=1,
+ help="The level of the first chapter in generated file")
+
+ return parser.parse_args()
+
+
+if __name__ == "__main__":
+ sys.exit(process_robot_file(parse_args()))