aboutsummaryrefslogtreecommitdiffstats
path: root/test
diff options
context:
space:
mode:
authorNathan Skrzypczak <nathan.skrzypczak@gmail.com>2021-08-19 11:38:06 +0200
committerDave Wallace <dwallacelf@gmail.com>2021-10-13 23:22:32 +0000
commit9ad39c026c8a3c945a7003c4aa4f5cb1d4c80160 (patch)
tree3cca19635417e28ae381d67ae31c75df2925032d /test
parentf47122e07e1ecd0151902a3cabe46c60a99bee8e (diff)
docs: better docs, mv doxygen to sphinx
This patch refactors the VPP sphinx docs in order to make it easier to consume for external readers as well as VPP developers. It also makes sphinx the single source of documentation, which simplifies maintenance and operation. Most important updates are: - reformat the existing documentation as rst - split RELEASE.md and move it into separate rst files - remove section 'events' - remove section 'archive' - remove section 'related projects' - remove section 'feature by release' - remove section 'Various links' - make (Configuration reference, CLI docs, developer docs) top level items in the list - move 'Use Cases' as part of 'About VPP' - move 'Troubleshooting' as part of 'Getting Started' - move test framework docs into 'Developer Documentation' - add a 'Contributing' section for gerrit, docs and other contributer related infos - deprecate doxygen and test-docs targets - redirect the "make doxygen" target to "make docs" Type: refactor Change-Id: I552a5645d5b7964d547f99b1336e2ac24e7c209f Signed-off-by: Nathan Skrzypczak <nathan.skrzypczak@gmail.com> Signed-off-by: Andrew Yourtchenko <ayourtch@gmail.com>
Diffstat (limited to 'test')
-rw-r--r--test/Makefile25
-rw-r--r--test/doc/Makefile36
-rw-r--r--test/doc/conf.py352
-rw-r--r--test/doc/index.rst11
-rw-r--r--test/doc/indices.rst6
-rw-r--r--test/doc/overview.rst450
-rw-r--r--test/requirements-3.txt156
-rw-r--r--test/requirements.txt2
8 files changed, 116 insertions, 922 deletions
diff --git a/test/Makefile b/test/Makefile
index e49fe6b9ade..42af2477890 100644
--- a/test/Makefile
+++ b/test/Makefile
@@ -14,7 +14,6 @@ ifndef TEST_DIR
endif
export TEST_BR = $(TEST_DIR)
-export TEST_DOC_BR = $(TEST_DIR)/doc/build
FAILED_DIR=/tmp/vpp-failed-unittests/
VPP_TEST_DIRS=$(shell ls -d $(TEST_DIR) $(EXTERN_TESTS))
@@ -239,21 +238,6 @@ wipe: reset
@rm -rf $(VENV_PATH)
@rm -rf $(patsubst %,%/__pycache__, $(VPP_TEST_DIRS))
-$(TEST_DOC_BR): $(PIP_INSTALL_DONE)
- @mkdir -p $@
- @bash -c "source $(VENV_PATH)/bin/activate && make -C doc html"
-
-.PHONY: doc
-doc: $(PIP_PATCH_DONE) $(TEST_DOC_BR)
- @echo
- @echo "Test Documentation URL: $(TEST_DOC_BR)/html/index.html"
- @echo "Run 'make test-wipe-doc test-doc' to rebuild the test docs"
- @echo
-
-.PHONY: wipe-doc
-wipe-doc:
- @rm -rf $(TEST_DOC_BR)
-
$(BUILD_COV_DIR):
@mkdir -p $@
@@ -279,7 +263,7 @@ wipe-papi:
@rm -rf $(PAPI_INSTALL_DONE) $(PAPI_WIPE_DIST)
.PHONY: wipe-all
-wipe-all: wipe wipe-papi wipe-doc wipe-cov
+wipe-all: wipe wipe-papi wipe-cov
@rm -rf $(TEST_BR)
.PHONY: checkstyle-diff
@@ -333,9 +317,8 @@ help:
@echo " test-gcov - build and run functional tests (gcov build)"
@echo " test-wipe - wipe (temporary) files generated by unit tests"
@echo " test-wipe-cov - wipe code coverage report for test framework"
- @echo " test-wipe-doc - wipe documentation for test framework"
@echo " test-wipe-papi - rebuild vpp_papi sources"
- @echo " test-wipe-all - wipe (temporary) files generated by unit tests, docs, and coverage"
+ @echo " test-wipe-all - wipe (temporary) files generated by unit tests, and coverage"
@echo " test-shell - enter shell with test environment"
@echo " test-shell-debug - enter shell with test environment (debug build)"
@echo " test-checkstyle - check PEP8 compliance for test framework"
@@ -405,10 +388,6 @@ help:
@echo " VPP_IN_GDB_NO_RMDIR=0 - don't remove existing tmp dir but fail instead"
@echo " VPP_IN_GDB_CMDLINE=1 - add 'interactive' to VPP arguments to run with command line"
@echo ""
- @echo "Creating test documentation"
- @echo " test-doc - generate documentation for test framework"
- @echo " test-wipe-doc - wipe documentation for test framework"
- @echo ""
@echo "Creating test code coverage report"
@echo " test-cov - generate code coverage report for test framework"
@echo " test-wipe-cov - wipe code coverage report for test framework"
diff --git a/test/doc/Makefile b/test/doc/Makefile
deleted file mode 100644
index 2d06cedd0be..00000000000
--- a/test/doc/Makefile
+++ /dev/null
@@ -1,36 +0,0 @@
-# Makefile for VPP Test documentation
-#
-
-SPHINXOPTS =
-SRC_DOC_DIR = $(TEST_DIR)/doc
-SPHINXBUILD = sphinx-build
-HTML_DOC_GEN_DIR = $(TEST_DOC_BR)/html
-API_DOC_GEN_DIR = $(TEST_DOC_BR)/api
-
-# Internal variables.
-ALLSPHINXOPTS = -d $(TEST_DOC_BR)/.sphinx-cache $(SPHINXOPTS) $(API_DOC_GEN_DIR) -c $(SRC_DOC_DIR)
-IN_VENV:=$(shell if pip -V | grep "venv" 2>&1 > /dev/null; then echo 1; else echo 0; fi)
-
-.PHONY: verify-virtualenv
-verify-virtualenv:
-ifndef TEST_DIR
- $(error TEST_DIR is not set)
-endif
-ifndef TEST_DOC_BR
- $(error TEST_DOC_BR is not set)
-endif
-ifeq ($(IN_VENV),0)
- $(error "Not running inside virtualenv (are you running 'make test-doc' from root?)")
-endif
-
-.PHONY: regen-api-doc
-regen-api-doc: verify-virtualenv
- @mkdir -p $(API_DOC_GEN_DIR)
- @cp $(SRC_DOC_DIR)/index.rst $(API_DOC_GEN_DIR)
- @cp $(SRC_DOC_DIR)/indices.rst $(API_DOC_GEN_DIR)
- @cp $(SRC_DOC_DIR)/overview.rst $(API_DOC_GEN_DIR)
- sphinx-apidoc -o $(API_DOC_GEN_DIR) -H "Module documentation" $(TEST_DIR)
-
-.PHONY: html
-html: regen-api-doc verify-virtualenv
- $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(HTML_DOC_GEN_DIR)
diff --git a/test/doc/conf.py b/test/doc/conf.py
deleted file mode 100644
index f73cde27fae..00000000000
--- a/test/doc/conf.py
+++ /dev/null
@@ -1,352 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# VPP test framework documentation build configuration file, created by
-# sphinx-quickstart on Thu Oct 13 08:45:03 2016.
-#
-# This file is execfile()d with the current directory set to its
-# containing dir.
-#
-# Note that not all possible configuration values are present in this
-# autogenerated file.
-#
-# All configuration values have a default; values that are commented out
-# serve to show the default.
-
-# If extensions (or modules to document with autodoc) are in another directory,
-# add these directories to sys.path here. If the directory is relative to the
-# documentation root, use os.path.abspath to make it absolute, like shown here.
-#
-import os
-import sys
-import subprocess
-from datetime import date
-sys.path.insert(0, os.path.abspath('..'))
-
-# -- General configuration ------------------------------------------------
-
-# If your documentation needs a minimal Sphinx version, state it here.
-#
-# needs_sphinx = '1.0'
-
-# Add any Sphinx extension module names here, as strings. They can be
-# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
-# ones.
-extensions = [
- 'sphinx.ext.autodoc',
-]
-autodoc_mock_imports = ['objgraph',
- 'parameterized',
- 'pexpect',
- 'psutil',
- 'pympler',
- 'scapy',
- 'syslog_rfc5424_parser',
- 'vpp_papi']
-
-# Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
-
-# The suffix(es) of source filenames.
-# You can specify multiple suffix as a list of string:
-#
-# source_suffix = ['.rst', '.md']
-source_suffix = '.rst'
-
-# The encoding of source files.
-#
-# source_encoding = 'utf-8-sig'
-
-# The master toctree document.
-master_doc = 'index'
-
-# General information about the project.
-project = u'VPP test framework'
-copyright = f'{date.today().year}, FD.io VPP team'
-author = u'FD.io VPP team'
-
-# The version info for the project you're documenting, acts as replacement for
-# |version| and |release|, also used in various other places throughout the
-# built documents.
-#
-# The short X.Y version.
-output = subprocess.run(['../../src/scripts/version'], stdout=subprocess.PIPE)
-version = f'{output.stdout.decode("utf-8")}'
-# The full version, including alpha/beta/rc tags.
-release = f'{output.stdout.decode("utf-8")}'
-
-# The language for content autogenerated by Sphinx. Refer to documentation
-# for a list of supported languages.
-#
-# This is also used if you do content translation via gettext catalogs.
-# Usually you set "language" from the command line for these cases.
-language = None
-
-# There are two options for replacing |today|: either, you set today to some
-# non-false value, then it is used:
-#
-# today = ''
-#
-# Else, today_fmt is used as the format for a strftime call.
-#
-# today_fmt = '%B %d, %Y'
-
-# List of patterns, relative to source directory, that match files and
-# directories to ignore when looking for source files.
-# This patterns also effect to html_static_path and html_extra_path
-exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
-
-# The reST default role (used for this markup: `text`) to use for all
-# documents.
-#
-default_role = 'any'
-
-# If true, '()' will be appended to :func: etc. cross-reference text.
-#
-add_function_parentheses = True
-
-# If true, the current module name will be prepended to all description
-# unit titles (such as .. function::).
-#
-# add_module_names = True
-
-# If true, sectionauthor and moduleauthor directives will be shown in the
-# output. They are ignored by default.
-#
-# show_authors = False
-
-# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
-
-# A list of ignored prefixes for module index sorting.
-# modindex_common_prefix = []
-
-# If true, keep warnings as "system message" paragraphs in the built documents.
-# keep_warnings = False
-
-# If true, `todo` and `todoList` produce output, else they produce nothing.
-todo_include_todos = False
-
-
-# -- Options for HTML output ----------------------------------------------
-
-# The theme to use for HTML and HTML Help pages. See the documentation for
-# a list of builtin themes.
-#
-# html_theme = 'alabaster'
-html_theme = 'sphinx_rtd_theme'
-
-# Theme options are theme-specific and customize the look and feel of a theme
-# further. For a list of options available for each theme, see the
-# documentation.
-#
-# html_theme_options = {}
-
-# Add any paths that contain custom themes here, relative to this directory.
-# html_theme_path = []
-
-# The name for this set of Sphinx documents.
-# "<project> v<release> documentation" by default.
-#
-# html_title = u'VPP test framework v0.1'
-
-# A shorter title for the navigation bar. Default is the same as html_title.
-#
-# html_short_title = None
-
-# The name of an image file (relative to this directory) to place at the top
-# of the sidebar.
-#
-# html_logo = None
-
-# The name of an image file (relative to this directory) to use as a favicon of
-# the docs. This file should be a Windows icon file (.ico) being 16x16 or
-# 32x32 pixels large.
-#
-# html_favicon = None
-
-# Add any paths that contain custom static files (such as style sheets) here,
-# relative to this directory. They are copied after the builtin static files,
-# so a file named "default.css" will overwrite the builtin "default.css".
-# html_static_path = []
-
-# Add any extra paths that contain custom files (such as robots.txt or
-# .htaccess) here, relative to this directory. These files are copied
-# directly to the root of the documentation.
-#
-# html_extra_path = []
-
-# If not None, a 'Last updated on:' timestamp is inserted at every page
-# bottom, using the given strftime format.
-# The empty string is equivalent to '%b %d, %Y'.
-#
-# html_last_updated_fmt = None
-
-# If true, SmartyPants will be used to convert quotes and dashes to
-# typographically correct entities.
-#
-# html_use_smartypants = True
-
-# Custom sidebar templates, maps document names to template names.
-#
-# html_sidebars = {}
-
-# Additional templates that should be rendered to pages, maps page names to
-# template names.
-#
-# html_additional_pages = {}
-
-# If false, no module index is generated.
-#
-# html_domain_indices = True
-
-# If false, no index is generated.
-#
-# html_use_index = True
-
-# If true, the index is split into individual pages for each letter.
-#
-# html_split_index = False
-
-# If true, links to the reST sources are added to the pages.
-#
-# html_show_sourcelink = True
-
-# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
-#
-# html_show_sphinx = True
-
-# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
-#
-# html_show_copyright = True
-
-# If true, an OpenSearch description file will be output, and all pages will
-# contain a <link> tag referring to it. The value of this option must be the
-# base URL from which the finished HTML is served.
-#
-# html_use_opensearch = ''
-
-# This is the file name suffix for HTML files (e.g. ".xhtml").
-# html_file_suffix = None
-
-# Language to be used for generating the HTML full-text search index.
-# Sphinx supports the following languages:
-# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
-# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
-#
-# html_search_language = 'en'
-
-# A dictionary with options for the search language support, empty by default.
-# 'ja' uses this config value.
-# 'zh' user can custom change `jieba` dictionary path.
-#
-# html_search_options = {'type': 'default'}
-
-# The name of a javascript file (relative to the configuration directory) that
-# implements a search results scorer. If empty, the default will be used.
-#
-# html_search_scorer = 'scorer.js'
-
-# Output file base name for HTML help builder.
-htmlhelp_basename = 'VPPtestframeworkdoc'
-
-# -- Options for LaTeX output ---------------------------------------------
-
-latex_elements = {
- # The paper size ('letterpaper' or 'a4paper').
- #
- # 'papersize': 'letterpaper',
-
- # The font size ('10pt', '11pt' or '12pt').
- #
- # 'pointsize': '10pt',
-
- # Additional stuff for the LaTeX preamble.
- #
- # 'preamble': '',
-
- # Latex figure (float) alignment
- #
- # 'figure_align': 'htbp',
-}
-
-# Grouping the document tree into LaTeX files. List of tuples
-# (source start file, target name, title,
-# author, documentclass [howto, manual, or own class]).
-latex_documents = [
- (master_doc, 'VPPtestframework.tex', u'VPP test framework Documentation',
- u'VPP team', 'manual'),
-]
-
-# The name of an image file (relative to this directory) to place at the top of
-# the title page.
-#
-# latex_logo = None
-
-# For "manual" documents, if this is true, then toplevel headings are parts,
-# not chapters.
-#
-# latex_use_parts = False
-
-# If true, show page references after internal links.
-#
-# latex_show_pagerefs = False
-
-# If true, show URL addresses after external links.
-#
-# latex_show_urls = False
-
-# Documents to append as an appendix to all manuals.
-#
-# latex_appendices = []
-
-# It false, will not define \strong, \code, itleref, \crossref ... but only
-# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
-# packages.
-#
-# latex_keep_old_macro_names = True
-
-# If false, no module index is generated.
-#
-# latex_domain_indices = True
-
-
-# -- Options for manual page output ---------------------------------------
-
-# One entry per manual page. List of tuples
-# (source start file, name, description, authors, manual section).
-man_pages = [
- (master_doc, 'vpptestframework', u'VPP test framework Documentation',
- [author], 1)
-]
-
-# If true, show URL addresses after external links.
-#
-# man_show_urls = False
-
-
-# -- Options for Texinfo output -------------------------------------------
-
-# Grouping the document tree into Texinfo files. List of tuples
-# (source start file, target name, title, author,
-# dir menu entry, description, category)
-texinfo_documents = [
- (master_doc, 'VPPtestframework', u'VPP test framework Documentation',
- author, 'VPPtestframework', 'One line description of project.',
- 'Miscellaneous'),
-]
-
-# Documents to append as an appendix to all manuals.
-#
-# texinfo_appendices = []
-
-# If false, no module index is generated.
-#
-# texinfo_domain_indices = True
-
-# How to display URL addresses: 'footnote', 'no', or 'inline'.
-#
-# texinfo_show_urls = 'footnote'
-
-# If true, do not generate a @detailmenu in the "Top" node's menu.
-#
-# texinfo_no_detailmenu = False
diff --git a/test/doc/index.rst b/test/doc/index.rst
deleted file mode 100644
index 62e348cd81b..00000000000
--- a/test/doc/index.rst
+++ /dev/null
@@ -1,11 +0,0 @@
-Contents
-========
-
-.. toctree::
- :numbered:
- :maxdepth: 2
- :glob:
-
- overview
- modules
- indices
diff --git a/test/doc/indices.rst b/test/doc/indices.rst
deleted file mode 100644
index d46b839f660..00000000000
--- a/test/doc/indices.rst
+++ /dev/null
@@ -1,6 +0,0 @@
-Indices and tables
-==================
-
-* :ref:`genindex`
-* :ref:`modindex`
-* :ref:`search`
diff --git a/test/doc/overview.rst b/test/doc/overview.rst
deleted file mode 100644
index 0c07cb6111f..00000000000
--- a/test/doc/overview.rst
+++ /dev/null
@@ -1,450 +0,0 @@
-.. _unittest: https://docs.python.org/2/library/unittest.html
-.. _TestCase: https://docs.python.org/2/library/unittest.html#unittest.TestCase
-.. _AssertionError: https://docs.python.org/2/library/exceptions.html#exceptions.AssertionError
-.. _SkipTest: https://docs.python.org/2/library/unittest.html#unittest.SkipTest
-.. _virtualenv: http://docs.python-guide.org/en/latest/dev/virtualenvs/
-.. _scapy: http://www.secdev.org/projects/scapy/
-.. _logging: https://docs.python.org/2/library/logging.html
-.. _process: https://docs.python.org/2/library/multiprocessing.html#the-process-class
-.. _pipes: https://docs.python.org/2/library/multiprocessing.html#multiprocessing.Pipe
-.. _managed: https://docs.python.org/2/library/multiprocessing.html#managers
-
-.. |vtf| replace:: VPP Test Framework
-
-|vtf|
-=====
-
-.. contents::
- :local:
- :depth: 1
-
-Overview
-########
-
-The goal of the |vtf| is to ease writing, running and debugging
-unit tests for the VPP. For this, python was chosen as a high level language
-allowing rapid development with scapy_ providing the necessary tool for creating
-and dissecting packets.
-
-Anatomy of a test case
-######################
-
-Python's unittest_ is used as the base framework upon which the VPP test
-framework is built. A test suite in the |vtf| consists of multiple classes
-derived from `VppTestCase`, which is itself derived from TestCase_.
-The test class defines one or more test functions, which act as test cases.
-
-Function flow when running a test case is:
-
-1. `setUpClass <VppTestCase.setUpClass>`:
- This function is called once for each test class, allowing a one-time test
- setup to be executed. If this functions throws an exception,
- none of the test functions are executed.
-2. `setUp <VppTestCase.setUp>`:
- The setUp function runs before each of the test functions. If this function
- throws an exception other than AssertionError_ or SkipTest_, then this is
- considered an error, not a test failure.
-3. *test_<name>*:
- This is the guts of the test case. It should execute the test scenario
- and use the various assert functions from the unittest framework to check
- necessary. Multiple test_<name> methods can exist in a test case.
-4. `tearDown <VppTestCase.tearDown>`:
- The tearDown function is called after each test function with the purpose
- of doing partial cleanup.
-5. `tearDownClass <VppTestCase.tearDownClass>`:
- Method called once after running all of the test functions to perform
- the final cleanup.
-
-Logging
-#######
-
-Each test case has a logger automatically created for it, stored in
-'logger' property, based on logging_. Use the logger's standard methods
-debug(), info(), error(), ... to emit log messages to the logger.
-
-All the log messages go always into a log file in temporary directory
-(see below).
-
-To control the messages printed to console, specify the V= parameter.
-
-.. code-block:: shell
-
- make test # minimum verbosity
- make test V=1 # moderate verbosity
- make test V=2 # maximum verbosity
-
-Parallel test execution
-#######################
-
-|vtf| test suites can be run in parallel. Each test suite is executed
-in a separate process spawned by Python multiprocessing process_.
-
-The results from child test suites are sent to parent through pipes_, which are
-aggregated and summarized at the end of the run.
-
-Stdout, stderr and logs logged in child processes are redirected to individual
-parent managed_ queues. The data from these queues are then emitted to stdout
-of the parent process in the order the test suites have finished. In case there
-are no finished test suites (such as at the beginning of the run), the data
-from last started test suite are emitted in real time.
-
-To enable parallel test run, specify the number of parallel processes:
-
-.. code-block:: shell
-
- make test TEST_JOBS=n # at most n processes will be spawned
- make test TEST_JOBS=auto # chosen based on the number of cores
- # and the size of shared memory
-
-Test temporary directory and VPP life cycle
-###########################################
-
-Test separation is achieved by separating the test files and vpp instances.
-Each test creates a temporary directory and it's name is used to create
-a shared memory prefix which is used to run a VPP instance.
-The temporary directory name contains the testcase class name for easy
-reference, so for testcase named 'TestVxlan' the directory could be named
-e.g. vpp-unittest-TestVxlan-UNUP3j.
-This way, there is no conflict between any other VPP instances running
-on the box and the test VPP. Any temporary files created by the test case
-are stored in this temporary test directory.
-
-The test temporary directory holds the following interesting files:
-
-* log.txt - this contains the logger output on max verbosity
-* pg*_in.pcap - last injected packet stream into VPP, named after the interface,
- so for pg0, the file will be named pg0_in.pcap
-* pg*_out.pcap - last capture file created by VPP for interface, similarly,
- named after the interface, so for e.g. pg1, the file will be named
- pg1_out.pcap
-* history files - whenever the capture is restarted or a new stream is added,
- the existing files are rotated and renamed, soo all the pcap files
- are always saved for later debugging if needed
-* core - if vpp dumps a core, it'll be stored in the temporary directory
-* vpp_stdout.txt - file containing output which vpp printed to stdout
-* vpp_stderr.txt - file containing output which vpp printed to stderr
-
-*NOTE*: existing temporary directories named vpp-unittest-* are automatically
-removed when invoking 'make test*' or 'make retest*' to keep the temporary
-directory clean.
-
-Virtual environment
-###################
-
-Virtualenv_ is a python module which provides a means to create an environment
-containing the dependencies required by the |vtf|, allowing a separation
-from any existing system-wide packages. |vtf|'s Makefile automatically
-creates a virtualenv_ inside build-root and installs the required packages
-in that environment. The environment is entered whenever executing a test
-via one of the make test targets.
-
-Naming conventions
-##################
-
-Most unit tests do some kind of packet manipulation - sending and receiving
-packets between VPP and virtual hosts connected to the VPP. Referring
-to the sides, addresses, etc. is always done as if looking from the VPP side,
-thus:
-
-* *local_* prefix is used for the VPP side.
- So e.g. `local_ip4 <VppInterface.local_ip4>` address is the IPv4 address
- assigned to the VPP interface.
-* *remote_* prefix is used for the virtual host side.
- So e.g. `remote_mac <VppInterface.remote_mac>` address is the MAC address
- assigned to the virtual host connected to the VPP.
-
-Automatically generated addresses
-#################################
-
-To send packets, one needs to typically provide some addresses, otherwise
-the packets will be dropped. The interface objects in |vtf| automatically
-provide addresses based on (typically) their indexes, which ensures
-there are no conflicts and eases debugging by making the addressing scheme
-consistent.
-
-The developer of a test case typically doesn't need to work with the actual
-numbers, rather using the properties of the objects. The addresses typically
-come in two flavors: '<address>' and '<address>n' - note the 'n' suffix.
-The former address is a Python string, while the latter is translated using
-socket.inet_pton to raw format in network byte order - this format is suitable
-for passing as an argument to VPP APIs.
-
-e.g. for the IPv4 address assigned to the VPP interface:
-
-* local_ip4 - Local IPv4 address on VPP interface (string)
-* local_ip4n - Local IPv4 address - raw, suitable as API parameter.
-
-These addresses need to be configured in VPP to be usable using e.g.
-`VppInterface.config_ip4` API. Please see the documentation to
-`VppInterface` for more details.
-
-By default, there is one remote address of each kind created for L3:
-remote_ip4 and remote_ip6. If the test needs more addresses, because it's
-simulating more remote hosts, they can be generated using
-`generate_remote_hosts` API and the entries for them inserted into the ARP
-table using `configure_ipv4_neighbors` API.
-
-Packet flow in the |vtf|
-########################
-
-Test framework -> VPP
-~~~~~~~~~~~~~~~~~~~~~
-
-|vtf| doesn't send any packets to VPP directly. Traffic is instead injected
-using packet-generator interfaces, represented by the `VppPGInterface` class.
-Packets are written into a temporary .pcap file, which is then read by the VPP
-and the packets are injected into the VPP world.
-
-To add a list of packets to an interface, call the `VppPGInterface.add_stream`
-method on that interface. Once everything is prepared, call `pg_start` method to
-start the packet generator on the VPP side.
-
-VPP -> test framework
-~~~~~~~~~~~~~~~~~~~~~
-
-Similarly, VPP doesn't send any packets to |vtf| directly. Instead, packet
-capture feature is used to capture and write traffic to a temporary .pcap file,
-which is then read and analyzed by the |vtf|.
-
-The following APIs are available to the test case for reading pcap files.
-
-* `VppPGInterface.get_capture`: this API is suitable for bulk & batch
- style of test, where a list of packets is prepared & sent, then the
- received packets are read and verified. The API needs the number of
- packets which are expected to be captured (ignoring filtered
- packets - see below) to know when the pcap file is completely
- written by the VPP. If using packet infos for verifying packets,
- then the counts of the packet infos can be automatically used by
- `VppPGInterface.get_capture` to get the proper count (in this case
- the default value None can be supplied as expected_count or ommitted
- altogether).
-* `VppPGInterface.wait_for_packet`: this API is suitable for
- interactive style of test, e.g. when doing session management,
- three-way handshakes, etc. This API waits for and returns a single
- packet, keeping the capture file in place and remembering
- context. Repeated invocations return following packets (or raise
- Exception if timeout is reached) from the same capture file (=
- packets arriving on the same interface).
-
-*NOTE*: it is not recommended to mix these APIs unless you understand
-how they work internally. None of these APIs rotate the pcap capture
-file, so calling e.g. `VppPGInterface.get_capture` after
-`VppPGInterface.wait_for_packet` will return already read packets. It
-is safe to switch from one API to another after calling
-`VppPGInterface.enable_capture` as that API rotates the capture file.
-
-Automatic filtering of packets:
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Both APIs (`VppPGInterface.get_capture` and
-`VppPGInterface.wait_for_packet`) by default filter the packet
-capture, removing known uninteresting packets from it - these are IPv6
-Router Advertisments and IPv6 Router Alerts. These packets are
-unsolicitated and from the point of |vtf| are random. If a test wants
-to receive these packets, it should specify either None or a custom
-filtering function as the value to the 'filter_out_fn' argument.
-
-Common API flow for sending/receiving packets:
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-We will describe a simple scenario, where packets are sent from pg0 to pg1
-interface, assuming that the interfaces were created using
-`create_pg_interfaces` API.
-
-1. Create a list of packets for pg0::
-
- packet_count = 10
- packets = create_packets(src=self.pg0, dst=self.pg1,
- count=packet_count)
-
-2. Add that list of packets to the source interface::
-
- self.pg0.add_stream(packets)
-
-3. Enable capture on the destination interface::
-
- self.pg1.enable_capture()
-
-4. Start the packet generator::
-
- self.pg_start()
-
-5. Wait for capture file to appear and read it::
-
- capture = self.pg1.get_capture(expected_count=packet_count)
-
-6. Verify packets match sent packets::
-
- self.verify_capture(send=packets, captured=capture)
-
-Test framework objects
-######################
-
-The following objects provide VPP abstraction and provide a means to do
-common tasks easily in the test cases.
-
-* `VppInterface`: abstract class representing generic VPP interface
- and contains some common functionality, which is then used by derived classes
-* `VppPGInterface`: class representing VPP packet-generator interface.
- The interface is created/destroyed when the object is created/destroyed.
-* `VppSubInterface`: VPP sub-interface abstract class, containing common
- functionality for e.g. `VppDot1QSubint` and `VppDot1ADSubint` classes
-
-How VPP APIs/CLIs are called
-############################
-
-Vpp provides python bindings in a python module called vpp-papi, which the test
-framework installs in the virtual environment. A shim layer represented by
-the `VppPapiProvider` class is built on top of the vpp-papi, serving these
-purposes:
-
-1. Automatic return value checks:
- After each API is called, the return value is checked against the expected
- return value (by default 0, but can be overridden) and an exception
- is raised if the check fails.
-2. Automatic call of hooks:
-
- a. `before_cli <Hook.before_cli>` and `before_api <Hook.before_api>` hooks
- are used for debug logging and stepping through the test
- b. `after_cli <Hook.after_cli>` and `after_api <Hook.after_api>` hooks
- are used for monitoring the vpp process for crashes
-3. Simplification of API calls:
- Many of the VPP APIs take a lot of parameters and by providing sane defaults
- for these, the API is much easier to use in the common case and the code is
- more readable. E.g. ip_add_del_route API takes ~25 parameters, of which
- in the common case, only 3 are needed.
-
-Utility methods
-###############
-
-Some interesting utility methods are:
-
-* `ppp`: 'Pretty Print Packet' - returns a string containing the same output
- as Scapy's packet.show() would print
-* `ppc`: 'Pretty Print Capture' - returns a string containing printout of
- a capture (with configurable limit on the number of packets printed from it)
- using `ppp`
-
-*NOTE*: Do not use Scapy's packet.show() in the tests, because it prints
-the output to stdout. All output should go to the logger associated with
-the test case.
-
-Example: how to add a new test
-##############################
-
-In this example, we will describe how to add a new test case which tests
-basic IPv4 forwarding.
-
-1. Add a new file called test_ip4_fwd.py in the test directory, starting
- with a few imports::
-
- from framework import VppTestCase
- from scapy.layers.l2 import Ether
- from scapy.packet import Raw
- from scapy.layers.inet import IP, UDP
- from random import randint
-
-2. Create a class inherited from the VppTestCase::
-
- class IP4FwdTestCase(VppTestCase):
- """ IPv4 simple forwarding test case """
-
-3. Add a setUpClass function containing the setup needed for our test to run::
-
- @classmethod
- def setUpClass(self):
- super(IP4FwdTestCase, self).setUpClass()
- self.create_pg_interfaces(range(2)) # create pg0 and pg1
- for i in self.pg_interfaces:
- i.admin_up() # put the interface up
- i.config_ip4() # configure IPv4 address on the interface
- i.resolve_arp() # resolve ARP, so that we know VPP MAC
-
-4. Create a helper method to create the packets to send::
-
- def create_stream(self, src_if, dst_if, count):
- packets = []
- for i in range(count):
- # create packet info stored in the test case instance
- info = self.create_packet_info(src_if, dst_if)
- # convert the info into packet payload
- payload = self.info_to_payload(info)
- # create the packet itself
- p = (Ether(dst=src_if.local_mac, src=src_if.remote_mac) /
- IP(src=src_if.remote_ip4, dst=dst_if.remote_ip4) /
- UDP(sport=randint(1000, 2000), dport=5678) /
- Raw(payload))
- # store a copy of the packet in the packet info
- info.data = p.copy()
- # append the packet to the list
- packets.append(p)
-
- # return the created packet list
- return packets
-
-5. Create a helper method to verify the capture::
-
- def verify_capture(self, src_if, dst_if, capture):
- packet_info = None
- for packet in capture:
- try:
- ip = packet[IP]
- udp = packet[UDP]
- # convert the payload to packet info object
- payload_info = self.payload_to_info(packet[Raw])
- # make sure the indexes match
- self.assert_equal(payload_info.src, src_if.sw_if_index,
- "source sw_if_index")
- self.assert_equal(payload_info.dst, dst_if.sw_if_index,
- "destination sw_if_index")
- packet_info = self.get_next_packet_info_for_interface2(
- src_if.sw_if_index,
- dst_if.sw_if_index,
- packet_info)
- # make sure we didn't run out of saved packets
- self.assertIsNotNone(packet_info)
- self.assert_equal(payload_info.index, packet_info.index,
- "packet info index")
- saved_packet = packet_info.data # fetch the saved packet
- # assert the values match
- self.assert_equal(ip.src, saved_packet[IP].src,
- "IP source address")
- # ... more assertions here
- self.assert_equal(udp.sport, saved_packet[UDP].sport,
- "UDP source port")
- except:
- self.logger.error(ppp("Unexpected or invalid packet:",
- packet))
- raise
- remaining_packet = self.get_next_packet_info_for_interface2(
- src_if.sw_if_index,
- dst_if.sw_if_index,
- packet_info)
- self.assertIsNone(remaining_packet,
- "Interface %s: Packet expected from interface "
- "%s didn't arrive" % (dst_if.name, src_if.name))
-
-6. Add the test code to test_basic function::
-
- def test_basic(self):
- count = 10
- # create the packet stream
- packets = self.create_stream(self.pg0, self.pg1, count)
- # add the stream to the source interface
- self.pg0.add_stream(packets)
- # enable capture on both interfaces
- self.pg0.enable_capture()
- self.pg1.enable_capture()
- # start the packet generator
- self.pg_start()
- # get capture - the proper count of packets was saved by
- # create_packet_info() based on dst_if parameter
- capture = self.pg1.get_capture()
- # assert nothing captured on pg0 (always do this last, so that
- # some time has already passed since pg_start())
- self.pg0.assert_nothing_captured()
- # verify capture
- self.verify_capture(self.pg0, self.pg1, capture)
-
-7. Run the test by issuing 'make test' or, to run only this specific
- test, issue 'make test TEST=test_ip4_fwd'.
diff --git a/test/requirements-3.txt b/test/requirements-3.txt
index 161f09d2c48..e5ade68f551 100644
--- a/test/requirements-3.txt
+++ b/test/requirements-3.txt
@@ -8,6 +8,10 @@ alabaster==0.7.12 \
--hash=sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359 \
--hash=sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02
# via sphinx
+attrs==21.2.0 \
+ --hash=sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1 \
+ --hash=sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb
+ # via jsonschema
babel==2.9.1 \
--hash=sha256:ab49e12b91d937cd11f0b67cb259a57ab4ad2b59ac7a3b41d6c06c0ac5b0def9 \
--hash=sha256:bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0
@@ -63,9 +67,9 @@ cffi==1.14.6 \
--hash=sha256:f627688813d0a4140153ff532537fbe4afea5a3dffce1f9deb7f91f848a832b5 \
--hash=sha256:fd4305f86f53dfd8cd3522269ed7fc34856a8ee3709a5e28b2836b2db9d4cd69
# via cryptography
-charset-normalizer==2.0.4 \
- --hash=sha256:0c8911edd15d19223366a194a513099a302055a962bca2cec0f54b8b63175d8b \
- --hash=sha256:f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3
+charset-normalizer==2.0.6 \
+ --hash=sha256:5d209c0a931f215cee683b6445e2d77677e7e75e159f78def0db09d68fafcaa6 \
+ --hash=sha256:5ec46d183433dcbd0ab716f2d7f29d8dee50505b3fdb40c6b985c7c4f5a3591f
# via requests
click==8.0.1 \
--hash=sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a \
@@ -75,21 +79,27 @@ commonmark==0.9.1 \
--hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \
--hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9
# via recommonmark
-cryptography==3.4.7 \
- --hash=sha256:0f1212a66329c80d68aeeb39b8a16d54ef57071bf22ff4e521657b27372e327d \
- --hash=sha256:1e056c28420c072c5e3cb36e2b23ee55e260cb04eee08f702e0edfec3fb51959 \
- --hash=sha256:240f5c21aef0b73f40bb9f78d2caff73186700bf1bc6b94285699aff98cc16c6 \
- --hash=sha256:26965837447f9c82f1855e0bc8bc4fb910240b6e0d16a664bb722df3b5b06873 \
- --hash=sha256:37340614f8a5d2fb9aeea67fd159bfe4f5f4ed535b1090ce8ec428b2f15a11f2 \
- --hash=sha256:3d10de8116d25649631977cb37da6cbdd2d6fa0e0281d014a5b7d337255ca713 \
- --hash=sha256:3d8427734c781ea5f1b41d6589c293089704d4759e34597dce91014ac125aad1 \
- --hash=sha256:7ec5d3b029f5fa2b179325908b9cd93db28ab7b85bb6c1db56b10e0b54235177 \
- --hash=sha256:8e56e16617872b0957d1c9742a3f94b43533447fd78321514abbe7db216aa250 \
- --hash=sha256:b01fd6f2737816cb1e08ed4807ae194404790eac7ad030b34f2ce72b332f5586 \
- --hash=sha256:bf40af59ca2465b24e54f671b2de2c59257ddc4f7e5706dbd6930e26823668d3 \
- --hash=sha256:de4e5f7f68220d92b7637fc99847475b59154b7a1b3868fb7385337af54ac9ca \
- --hash=sha256:eb8cc2afe8b05acbd84a43905832ec78e7b3873fb124ca190f574dca7389a87d \
- --hash=sha256:ee77aa129f481be46f8d92a1a7db57269a2f23052d5f2433b4621bb457081cc9
+cryptography==35.0.0 \
+ --hash=sha256:07bb7fbfb5de0980590ddfc7f13081520def06dc9ed214000ad4372fb4e3c7f6 \
+ --hash=sha256:18d90f4711bf63e2fb21e8c8e51ed8189438e6b35a6d996201ebd98a26abbbe6 \
+ --hash=sha256:1ed82abf16df40a60942a8c211251ae72858b25b7421ce2497c2eb7a1cee817c \
+ --hash=sha256:22a38e96118a4ce3b97509443feace1d1011d0571fae81fc3ad35f25ba3ea999 \
+ --hash=sha256:2d69645f535f4b2c722cfb07a8eab916265545b3475fdb34e0be2f4ee8b0b15e \
+ --hash=sha256:4a2d0e0acc20ede0f06ef7aa58546eee96d2592c00f450c9acb89c5879b61992 \
+ --hash=sha256:54b2605e5475944e2213258e0ab8696f4f357a31371e538ef21e8d61c843c28d \
+ --hash=sha256:7075b304cd567694dc692ffc9747f3e9cb393cc4aa4fb7b9f3abd6f5c4e43588 \
+ --hash=sha256:7b7ceeff114c31f285528ba8b390d3e9cfa2da17b56f11d366769a807f17cbaa \
+ --hash=sha256:7eba2cebca600a7806b893cb1d541a6e910afa87e97acf2021a22b32da1df52d \
+ --hash=sha256:928185a6d1ccdb816e883f56ebe92e975a262d31cc536429041921f8cb5a62fd \
+ --hash=sha256:9933f28f70d0517686bd7de36166dda42094eac49415459d9bdf5e7df3e0086d \
+ --hash=sha256:a688ebcd08250eab5bb5bca318cc05a8c66de5e4171a65ca51db6bd753ff8953 \
+ --hash=sha256:abb5a361d2585bb95012a19ed9b2c8f412c5d723a9836418fab7aaa0243e67d2 \
+ --hash=sha256:c10c797ac89c746e488d2ee92bd4abd593615694ee17b2500578b63cad6b93a8 \
+ --hash=sha256:ced40344e811d6abba00295ced98c01aecf0c2de39481792d87af4fa58b7b4d6 \
+ --hash=sha256:d57e0cdc1b44b6cdf8af1d01807db06886f10177469312fbde8f44ccbb284bc9 \
+ --hash=sha256:d99915d6ab265c22873f1b4d6ea5ef462ef797b4140be4c9d8b179915e0985c6 \
+ --hash=sha256:eb80e8a1f91e4b7ef8b33041591e6d89b2b8e122d787e87eeb2b08da71bb16ad \
+ --hash=sha256:ebeddd119f526bcf323a89f853afb12e225902a24d29b55fe18dd6fcb2838a76
# via
# -r requirements.txt
# noiseprotocol
@@ -97,9 +107,9 @@ deprecation==2.1.0 \
--hash=sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff \
--hash=sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a
# via -r requirements.txt
-docutils==0.16 \
- --hash=sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af \
- --hash=sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc
+docutils==0.17.1 \
+ --hash=sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125 \
+ --hash=sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61
# via
# recommonmark
# sphinx
@@ -116,10 +126,14 @@ imagesize==1.2.0 \
--hash=sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1 \
--hash=sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1
# via sphinx
-jinja2==3.0.1 \
- --hash=sha256:1f06f2da51e7b56b8f238affdd6b4e2c61e39598a378cc49345bc1bd42a978a4 \
- --hash=sha256:703f484b47a6af502e743c9122595cc812b0271f661722403114f71a79d0f5a4
+jinja2==3.0.2 \
+ --hash=sha256:827a0e32839ab1600d4eb1c4c33ec5a8edfbc5cb42dafa13b81f182f97784b45 \
+ --hash=sha256:8569982d3f0889eed11dd620c706d39b60c36d6d25843961f33f77fb6bc6b20c
# via sphinx
+jsonschema==4.0.1 \
+ --hash=sha256:48f4e74f8bec0c2f75e9fcfffa264e78342873e1b57e2cfeae54864cc5e9e4dd \
+ --hash=sha256:9938802041347f2c62cad2aef59e9a0826cd34584f3609db950efacb4dbf6518
+ # via -r requirements.txt
lark-parser==0.6.7 \
--hash=sha256:062800f3823a6c733ec1d181a2089a22d1f62dbe65f90a3f6b1e6de1934b05ef
# via syslog-rfc5424-parser
@@ -251,15 +265,15 @@ pycparser==2.20 \
--hash=sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0 \
--hash=sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705
# via cffi
-pyenchant==3.2.1 \
- --hash=sha256:37c79e1dab492092fe8135222b2ba404c1b79595b459af9edaeddb77a2cb89a5 \
- --hash=sha256:49e0b255bef9356f57eeeee1d983ffa8599c0a46727d55cddbc71ec26226ca80 \
- --hash=sha256:5e206a1d6596904a922496f6c9f7d0b964b243905f401f5f2f40ea4d1f74e2cf \
- --hash=sha256:e8546c28b630f6d9f76642166656e337df2a1849cbef2b8ee198e7f64266f4ee
+pyenchant==3.2.2 \
+ --hash=sha256:1cf830c6614362a78aab78d50eaf7c6c93831369c52e1bb64ffae1df0341e637 \
+ --hash=sha256:5a636832987eaf26efe971968f4d1b78e81f62bca2bde0a9da210c7de43c3bce \
+ --hash=sha256:5facc821ece957208a81423af7d6ec7810dad29697cb0d77aae81e4e11c8e5a6 \
+ --hash=sha256:6153f521852e23a5add923dbacfbf4bebbb8d70c4e4bad609a8e0f9faeb915d1
# via sphinxcontrib-spelling
-pygments==2.9.0 \
- --hash=sha256:a18f47b506a429f6f4b9df81bb02beab9ca21d0a5fee38ed15aef65f0545519f \
- --hash=sha256:d66e804411278594d764fc69ec36ec13d9ae9147193a1740cd34d272ca383b8e
+pygments==2.10.0 \
+ --hash=sha256:b8e67fe6af78f492b3c4b3e2970c0624cbf08beb1e493b2c99b9fa1b67a20380 \
+ --hash=sha256:f398865f7eb6874156579fdf36bc840a03cab64d1cde9e93d68f46a425ec52c6
# via sphinx
pympler==0.9 \
--hash=sha256:f2cbe7df622117af890249f2dea884eb702108a12d729d264b7c5983a6e06e47
@@ -268,10 +282,64 @@ pyparsing==2.4.7 \
--hash=sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1 \
--hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b
# via packaging
-pytz==2021.1 \
- --hash=sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da \
- --hash=sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798
+pyrsistent==0.18.0 \
+ --hash=sha256:097b96f129dd36a8c9e33594e7ebb151b1515eb52cceb08474c10a5479e799f2 \
+ --hash=sha256:2aaf19dc8ce517a8653746d98e962ef480ff34b6bc563fc067be6401ffb457c7 \
+ --hash=sha256:404e1f1d254d314d55adb8d87f4f465c8693d6f902f67eb6ef5b4526dc58e6ea \
+ --hash=sha256:48578680353f41dca1ca3dc48629fb77dfc745128b56fc01096b2530c13fd426 \
+ --hash=sha256:4916c10896721e472ee12c95cdc2891ce5890898d2f9907b1b4ae0f53588b710 \
+ --hash=sha256:527be2bfa8dc80f6f8ddd65242ba476a6c4fb4e3aedbf281dfbac1b1ed4165b1 \
+ --hash=sha256:58a70d93fb79dc585b21f9d72487b929a6fe58da0754fa4cb9f279bb92369396 \
+ --hash=sha256:5e4395bbf841693eaebaa5bb5c8f5cdbb1d139e07c975c682ec4e4f8126e03d2 \
+ --hash=sha256:6b5eed00e597b5b5773b4ca30bd48a5774ef1e96f2a45d105db5b4ebb4bca680 \
+ --hash=sha256:73ff61b1411e3fb0ba144b8f08d6749749775fe89688093e1efef9839d2dcc35 \
+ --hash=sha256:772e94c2c6864f2cd2ffbe58bb3bdefbe2a32afa0acb1a77e472aac831f83427 \
+ --hash=sha256:773c781216f8c2900b42a7b638d5b517bb134ae1acbebe4d1e8f1f41ea60eb4b \
+ --hash=sha256:a0c772d791c38bbc77be659af29bb14c38ced151433592e326361610250c605b \
+ --hash=sha256:b29b869cf58412ca5738d23691e96d8aff535e17390128a1a52717c9a109da4f \
+ --hash=sha256:c1a9ff320fa699337e05edcaae79ef8c2880b52720bc031b219e5b5008ebbdef \
+ --hash=sha256:cd3caef37a415fd0dae6148a1b6957a8c5f275a62cca02e18474608cb263640c \
+ --hash=sha256:d5ec194c9c573aafaceebf05fc400656722793dac57f254cd4741f3c27ae57b4 \
+ --hash=sha256:da6e5e818d18459fa46fac0a4a4e543507fe1110e808101277c5a2b5bab0cd2d \
+ --hash=sha256:e79d94ca58fcafef6395f6352383fa1a76922268fa02caa2272fff501c2fdc78 \
+ --hash=sha256:f3ef98d7b76da5eb19c37fda834d50262ff9167c65658d1d8f974d2e4d90676b \
+ --hash=sha256:f4c8cabb46ff8e5d61f56a037974228e978f26bfefce4f61a4b1ac0ba7a2ab72
+ # via jsonschema
+pytz==2021.3 \
+ --hash=sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c \
+ --hash=sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326
# via babel
+pyyaml==5.4.1 \
+ --hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \
+ --hash=sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696 \
+ --hash=sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393 \
+ --hash=sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77 \
+ --hash=sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922 \
+ --hash=sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5 \
+ --hash=sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8 \
+ --hash=sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10 \
+ --hash=sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc \
+ --hash=sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018 \
+ --hash=sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e \
+ --hash=sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253 \
+ --hash=sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347 \
+ --hash=sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183 \
+ --hash=sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541 \
+ --hash=sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb \
+ --hash=sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185 \
+ --hash=sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc \
+ --hash=sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db \
+ --hash=sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa \
+ --hash=sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46 \
+ --hash=sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122 \
+ --hash=sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b \
+ --hash=sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63 \
+ --hash=sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df \
+ --hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc \
+ --hash=sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247 \
+ --hash=sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6 \
+ --hash=sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0
+ # via -r requirements.txt
recommonmark==0.7.1 \
--hash=sha256:1b1db69af0231efce3fa21b94ff627ea33dee7079a01dd0a7f8482c3da148b3f \
--hash=sha256:bdb4db649f2222dcd8d2d844f0006b958d627f732415d399791ee436a3686d67
@@ -291,17 +359,17 @@ snowballstemmer==2.1.0 \
--hash=sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2 \
--hash=sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914
# via sphinx
-sphinx==4.1.2 \
- --hash=sha256:3092d929cd807926d846018f2ace47ba2f3b671b309c7a89cd3306e80c826b13 \
- --hash=sha256:46d52c6cee13fec44744b8c01ed692c18a640f6910a725cbb938bc36e8d64544
+sphinx==4.2.0 \
+ --hash=sha256:94078db9184491e15bce0a56d9186e0aec95f16ac20b12d00e06d4e36f1058a6 \
+ --hash=sha256:98a535c62a4fcfcc362528592f69b26f7caec587d32cd55688db580be0287ae0
# via
# -r requirements.txt
# recommonmark
# sphinx-rtd-theme
# sphinxcontrib-spelling
-sphinx-rtd-theme==0.5.2 \
- --hash=sha256:32bd3b5d13dc8186d7a42fc816a23d32e83a4827d7d9882948e7b837c232da5a \
- --hash=sha256:4a05bdbe8b1446d77a01e20a23ebc6777c74f43237035e76be89699308987d6f
+sphinx-rtd-theme==1.0.0 \
+ --hash=sha256:4d35a56f4508cfee4c4fb604373ede6feae2a306731d533f409ef5c3496fdbd8 \
+ --hash=sha256:eec6d497e4c2195fa0e8b2016b337532b8a699a68bcb22a512870e16925c6a5c
# via -r requirements.txt
sphinxcontrib-applehelp==1.0.2 \
--hash=sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a \
@@ -339,9 +407,9 @@ tomli==1.2.1 \
--hash=sha256:8dd0e9524d6f386271a36b41dbf6c57d8e32fd96fd22b6584679dc569d20899f \
--hash=sha256:a5b75cb6f3968abb47af1b40c1819dc519ea82bcc065776a866e8d74c5ca9442
# via pep517
-urllib3==1.26.6 \
- --hash=sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4 \
- --hash=sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f
+urllib3==1.26.7 \
+ --hash=sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece \
+ --hash=sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844
# via requests
wheel==0.37.0 \
--hash=sha256:21014b2bd93c6d0034b6ba5d35e4eb284340e09d63c59aef6fc14b0f346146fd \
diff --git a/test/requirements.txt b/test/requirements.txt
index a18f67c7f42..a7c0bc088c1 100644
--- a/test/requirements.txt
+++ b/test/requirements.txt
@@ -18,3 +18,5 @@ sphinxcontrib-spelling # BSD
sphinx-rtd-theme # MIT
noiseprotocol # MIT
recommonmark # MIT
+pyyaml # MIT
+jsonschema # MIT \ No newline at end of file