aboutsummaryrefslogtreecommitdiffstats
path: root/test/Makefile
blob: e49fe6b9ade2601ebcc2b5e5b3bcb1667c37c01e (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
ASAN_OPTIONS?=verify_asan_link_order=0:detect_leaks=0:abort_on_error=1:unmap_shadow_on_exit=1:disable_coredump=0
export ASAN_OPTIONS

.PHONY: verify-env
verify-env:
ifndef WS_ROOT
	$(error WS_ROOT is not set)
endif
ifndef BR
	$(error BR is not set)
endif
ifndef TEST_DIR
	$(error TEST_DIR is not set)
endif

export TEST_BR = $(TEST_DIR)
export TEST_DOC_BR = $(TEST_DIR)/doc/build
FAILED_DIR=/tmp/vpp-failed-unittests/
VPP_TEST_DIRS=$(shell ls -d $(TEST_DIR) $(EXTERN_TESTS))

FORCE_NO_WIPE=0
ifeq ($(DEBUG),gdb)
FORCE_FOREGROUND=1
else ifeq ($(DEBUG),gdbserver)
FORCE_FOREGROUND=1
else ifeq ($(DEBUG),gdb-all)
FORCE_FOREGROUND=1
else ifeq ($(DEBUG),gdbserver-all)
FORCE_FOREGROUND=1
else ifeq ($(DEBUG),core)
FORCE_FOREGROUND=1
else ifeq ($(DEBUG),attach)
FORCE_FOREGROUND=1
FORCE_NO_WIPE=1
else ifeq ($(STEP),yes)
FORCE_FOREGROUND=1
else ifeq ($(STEP),y)
FORCE_FOREGROUND=1
else ifeq ($(STEP),1)
FORCE_FOREGROUND=1
else
FORCE_FOREGROUND=0
endif

ifdef PROFILE_OUTPUT
PROFILE_OUTPUT_OPTS=-o $(PROFILE_OUTPUT)
endif

ifndef PROFILE_SORT_BY
PROFILE_SORT_BY=cumtime
endif

ifeq ($(PROFILE),1)
PYTHON_PROFILE_OPTS=-m cProfile $(PROFILE_OUTPUT_OPTS) -s $(PROFILE_SORT_BY)
FORCE_FOREGROUND=1
endif

UNITTEST_EXTRA_OPTS=
UNITTEST_FAILFAST_OPTS=

ifeq ($(FAILFAST),1)
UNITTEST_EXTRA_OPTS=-f
endif

ifneq ($(EXTERN_TESTS),)
UNITTEST_EXTRA_OPTS=$(UNITTEST_FAILFAST_OPTS) -d $(EXTERN_TESTS)
endif

VENV_PATH=$(TEST_DIR)/venv

ifeq ($(TEST_DEBUG),1)
VENV_RUN_DIR:=$(VENV_PATH)/run-debug
else
VENV_RUN_DIR:=$(VENV_PATH)/run
endif

ifeq ($(PYTHON),)
PYTHON_INTERP=python3
else
PYTHON_INTERP=$(PYTHON)
endif

PYTHON_VERSION=$(shell $(PYTHON_INTERP) -c 'import sys; print(sys.version_info.major)')
PIP_VERSION=21.2.4
# Keep in sync with requirements.txt
PIP_TOOLS_VERSION=6.2.0
PYTHON_DEPENDS=requirements-$(PYTHON_VERSION).txt
SCAPY_SOURCE=$(shell find $(VENV_PATH)/lib/python* -name site-packages)
BUILD_COV_DIR=$(TEST_BR)/coverage

PIP_TOOLS_INSTALL_DONE=$(VENV_RUN_DIR)/pip-tools-install-$(PYTHON_VERSION)-$(PIP_TOOLS_VERSION).done
PIP_INSTALL_DONE=$(VENV_RUN_DIR)/pip-install-$(PYTHON_VERSION)-$(PIP_VERSION).done
PIP_PATCH_DONE=$(VENV_RUN_DIR)/pip-patch-$(PYTHON_VERSION).done
PAPI_INSTALL_DONE=$(VENV_RUN_DIR)/papi-install-$(PYTHON_VERSION).done
PAPI_PYTHON_SRC_DIR=$(WS_ROOT)/src/vpp-api/python
PAPI_WIPE_DIST=$(WS_ROOT)/src/vpp-api/vapi/__pycache__ \
	$(PAPI_PYTHON_SRC_DIR)/build \
	$(PAPI_PYTHON_SRC_DIR)/vpp_papi.egg-info \
	$(PAPI_PYTHON_SRC_DIR)/vpp_papi/__pycache__

$(PIP_TOOLS_INSTALL_DONE):
	@rm -rf $(VENV_PATH)
	@mkdir -p $(VENV_RUN_DIR)
	@$(PYTHON_INTERP) -m venv $(VENV_PATH)
	# pip version pinning
	@bash -c "source $(VENV_PATH)/bin/activate && \
		  $(PYTHON_INTERP) -m pip install pip===$(PIP_VERSION)"
	@bash -c "source $(VENV_PATH)/bin/activate && \
		  $(PYTHON_INTERP) -m pip install pip-tools===$(PIP_TOOLS_VERSION)"
	@touch $@

$(PYTHON_DEPENDS): requirements.txt
	@bash -c "source $(VENV_PATH)/bin/activate && \
		  CUSTOM_COMPILE_COMMAND='make test-refresh-deps (or update requirements.txt)' \
		  $(PYTHON_INTERP) -m piptools compile -q --generate-hashes requirements.txt --output-file $@"

$(PIP_INSTALL_DONE): $(PIP_TOOLS_INSTALL_DONE) $(PYTHON_DEPENDS)
	@bash -c "source $(VENV_PATH)/bin/activate && \
		  $(PYTHON_INTERP) -m piptools sync $(PYTHON_DEPENDS)"
	@touch $@

$(PIP_PATCH_DONE): $(PIP_INSTALL_DONE)
	@echo --- patching ---
	@sleep 1 # Ensure python recompiles patched *.py files -> *.pyc
	for f in $(CURDIR)/patches/scapy-2.4.3/*.patch ; do \
		echo Applying patch: $$(basename $$f) ; \
		patch --forward -p1 -d $(SCAPY_SOURCE) < $$f ; \
		retCode=$$?; \
		[ $$retCode -gt 1 ] && exit $$retCode; \
	done; \
	touch $@

$(PAPI_INSTALL_DONE): $(PIP_PATCH_DONE)
	@bash -c "source $(VENV_PATH)/bin/activate && $(PYTHON_INTERP) -m pip install -e $(PAPI_PYTHON_SRC_DIR)"
	@touch $@

.PHONY: refresh-deps
refresh-deps: clean-deps $(PYTHON_DEPENDS)

.PHONY: clean-deps
clean-deps:
	@rm -f $(PYTHON_DEPENDS)

INTERN_PLUGIN_SRC_DIR=$(WS_ROOT)/src/plugins
ifneq ($(EXTERN_PLUGIN_SRC_DIR),)
PLUGIN_SRC_DIR=$(EXTERN_PLUGIN_SRC_DIR)
else
PLUGIN_SRC_DIR=$(INTERN_PLUGIN_SRC_DIR)
endif

define retest-func
@env VPP_IN_GDB=$(VPP_IN_GDB) FORCE_FOREGROUND=$(FORCE_FOREGROUND) FAILED_DIR=$(FAILED_DIR) VENV_PATH=$(VENV_PATH) scripts/setsid_wrapper.sh $(FORCE_FOREGROUND) $(VENV_PATH)/bin/activate $(PYTHON_INTERP) $(PYTHON_PROFILE_OPTS) run_tests.py -d $(TEST_DIR) $(UNITTEST_EXTRA_OPTS) || env FAILED_DIR=$(FAILED_DIR) COMPRESS_FAILED_TEST_LOGS=$(COMPRESS_FAILED_TEST_LOGS) scripts/compress_failed.sh
endef

.PHONY: sanity

ifeq ($(SANITY),no)
SANITY_IMPORT_VPP_PAPI_CMD=true
SANITY_RUN_VPP_CMD=true
else
SANITY_IMPORT_VPP_PAPI_CMD=source $(VENV_PATH)/bin/activate && $(PYTHON_INTERP) sanity_import_vpp_papi.py
SANITY_RUN_VPP_CMD=source $(VENV_PATH)/bin/activate && $(PYTHON_INTERP) sanity_run_vpp.py
endif

ifndef TEST_JOBS
PARALLEL_ILLEGAL=0
else ifeq ($(FORCE_FOREGROUND),0)
PARALLEL_ILLEGAL=0
else ifeq ($(TEST_JOBS),auto)
PARALLEL_ILLEGAL=0
else ifeq ($(TEST_JOBS),1)
PARALLEL_ILLEGAL=0
else
PARALLEL_ILLEGAL=1
endif

sanity: test-dep
	@bash -c "test $(PARALLEL_ILLEGAL) -eq 0 ||\
	    (echo \"*******************************************************************\" &&\
		 echo \"* Sanity check failed, TEST_JOBS is not 1 or 'auto' and DEBUG, STEP or PROFILE is set\" &&\
	         echo \"*******************************************************************\" &&\
		 false)"
	@bash -c "$(SANITY_IMPORT_VPP_PAPI_CMD) ||\
		(echo \"*******************************************************************\" &&\
		 echo \"* Sanity check failed, cannot import vpp_papi\" &&\
		 echo \"* to debug: \" &&\
		 echo \"* 1. enter test shell:   make test-shell\" &&\
		 echo \"* 2. execute debugger:   gdb python -ex 'run sanity_import_vpp_papi.py'\" &&\
	         echo \"*******************************************************************\" &&\
		 false)"
	@bash -c "$(SANITY_RUN_VPP_CMD) ||\
		(echo \"*******************************************************************\" &&\
		 echo \"* Sanity check failed, cannot run vpp\" &&\
	         echo \"*******************************************************************\" &&\
		 false)"

$(FAILED_DIR): reset
	@mkdir -p $@

.PHONY: test-dep
test-dep: $(PAPI_INSTALL_DONE) $(FAILED_DIR)

.PHONY: test
test: test-dep sanity
	$(call retest-func)

.PHONY: retest
retest: verify-env sanity $(FAILED_DIR)
	$(call retest-func)

.PHONY: shell
shell: test-dep
	@echo "source $(VENV_PATH)/bin/activate;\
		export RND_SEED=$(RND_SEED);\
		echo '***';\
		echo PYTHONPATH=$(PYTHONPATH);\
		echo RND_SEED=$(RND_SEED);\
		echo VPP_BUILD_DIR=$(VPP_BUILD_DIR);\
		echo VPP_BIN=$(VPP_BIN);\
		echo VPP_PLUGIN_PATH=$(VPP_PLUGIN_PATH);\
		echo VPP_TEST_PLUGIN_PATH=$(VPP_TEST_PLUGIN_PATH);\
		echo VPP_INSTALL_PATH=$(VPP_INSTALL_PATH);\
		echo EXTERN_TESTS=$(EXTERN_TESTS);\
		echo EXTERN_PLUGINS=$(EXTERN_PLUGINS);\
                echo EXTERN_COV_DIR=$(EXTERN_COV_DIR);\
		echo LD_LIBRARY_PATH=$(LD_LIBRARY_PATH);\
		echo '***';\
		exec </dev/tty" | bash -i

.PHONY: reset
reset:
	@rm -f /dev/shm/vpp-unittest-*
	@if [ $(FORCE_NO_WIPE) -eq "0" ] ; then rm -rf /tmp/vpp-unittest-*;  fi
	@rm -f /tmp/api_post_mortem.*
	@rm -rf $(FAILED_DIR)

.PHONY: wipe
wipe: reset
	@rm -rf $(VENV_PATH)
	@rm -rf $(patsubst %,%/__pycache__, $(VPP_TEST_DIRS))

$(TEST_DOC_BR): $(PIP_INSTALL_DONE)
	@mkdir -p $@
	@bash -c "source $(VENV_PATH)/bin/activate && make -C doc html"

.PHONY: doc
doc: $(PIP_PATCH_DONE) $(TEST_DOC_BR)
	@echo
	@echo "Test Documentation URL: $(TEST_DOC_BR)/html/index.html"
	@echo "Run 'make test-wipe-doc test-doc' to rebuild the test docs"
	@echo

.PHONY: wipe-doc
wipe-doc:
	@rm -rf $(TEST_DOC_BR)

$(BUILD_COV_DIR):
	@mkdir -p $@

.PHONY: cov
cov: wipe-cov test-dep ext $(BUILD_COV_DIR)
	@lcov --zerocounters --directory $(VPP_BUILD_DIR)
	@test -z "$(EXTERN_COV_DIR)" || lcov --zerocounters --directory $(EXTERN_COV_DIR)
	$(call retest-func)
	@lcov --capture --directory $(VPP_BUILD_DIR) --output-file $(BUILD_COV_DIR)/coverage.info
	@test -z "$(EXTERN_COV_DIR)" || lcov --capture --directory $(EXTERN_COV_DIR) --output-file $(BUILD_COV_DIR)/extern-coverage.info
	@genhtml $(BUILD_COV_DIR)/coverage.info --output-directory $(BUILD_COV_DIR)/html
	@test -z "$(EXTERN_COV_DIR)" || genhtml $(BUILD_COV_DIR)/extern-coverage.info --output-directory $(BUILD_COV_DIR)/extern-html
	@echo
	@echo "Build finished. Code coverage report is in $(BUILD_COV_DIR)/html/index.html"
	@test -z "$(EXTERN_COV_DIR)" || echo "Code coverage report for out-of-tree objects is in $(BUILD_COV_DIR)/extern-html/index.html"

.PHONY: wipe-cov
wipe-cov: wipe
	@rm -rf $(BUILD_COV_DIR)

.PHONY: wipe-papi
wipe-papi:
	@rm -rf $(PAPI_INSTALL_DONE) $(PAPI_WIPE_DIST)

.PHONY: wipe-all
wipe-all: wipe wipe-papi wipe-doc wipe-cov
	@rm -rf $(TEST_BR)

.PHONY: checkstyle-diff
checkstyle-diff: $(PIP_INSTALL_DONE)
	@bash -c "source $(VENV_PATH)/bin/activate &&\
		  $(PYTHON_INTERP) -m pip install pycodestyle"
	@bash -c "source $(VENV_PATH)/bin/activate &&\
		cd $(WS_ROOT) && git diff --name-only --no-color --relative HEAD~1 ':!*.patch' | grep '.py$$' | xargs -n 1 -I XXX \
		pycodestyle --show-source --ignore=W504,E126,E241,E226,E305,E704,E741,E722 -v XXX ||\
		(echo \"*********************************************************************\" &&\
		 echo \"* Test framework PEP8 compliance check FAILED (checked changed files)\" &&\
	         echo \"*********************************************************************\" &&\
		 false)"
	@echo "*********************************************************************"
	@echo "* Test framework PEP8 compliance check passed (checked changed files)"
	@echo "*********************************************************************"

.PHONY: start-gdb
start-gdb: sanity
	$(eval VPP_IN_GDB=1)
	$(eval FORCE_FOREGROUND=1)
	$(call retest-func)

.PHONY: checkstyle
checkstyle: $(PIP_INSTALL_DONE)
	@bash -c "source $(VENV_PATH)/bin/activate &&\
		  $(PYTHON_INTERP) -m pip install pycodestyle"
	@bash -c "source $(VENV_PATH)/bin/activate &&\
		pycodestyle --show-source --ignore=W504,E126,E241,E226,E305,E704,E741,E722 -v *.py ||\
		(echo \"*******************************************************************\" &&\
		 echo \"* Test framework PEP8 compliance check FAILED (checked all files)\" &&\
	         echo \"*******************************************************************\" &&\
		 false)"
	@echo "*******************************************************************"
	@echo "* Test framework PEP8 compliance check passed (checked all files)"
	@echo "*******************************************************************"

.PHONY: help
help:
	@echo "Running tests:"
	@echo ""
	@echo " test                   - build and run (basic) functional tests"
	@echo " test-debug             - build and run (basic) functional tests (debug build)"
	@echo " test-all               - build and run functional and extended tests"
	@echo " test-all-debug         - build and run functional and extended tests (debug build)"
	@echo " retest                 - run functional tests"
	@echo " retest-debug           - run functional tests (debug build)"
	@echo " retest-all             - run functional and extended tests"
	@echo " retest-all-debug       - run functional and extended tests (debug build)"
	@echo " test-cov               - generate code coverage report for test framework"
	@echo " test-gcov                      - build and run functional tests (gcov build)"
	@echo " test-wipe              - wipe (temporary) files generated by unit tests"
	@echo " test-wipe-cov          - wipe code coverage report for test framework"
	@echo " test-wipe-doc          - wipe documentation for test framework"
	@echo " test-wipe-papi         - rebuild vpp_papi sources"
	@echo " test-wipe-all          - wipe (temporary) files generated by unit tests, docs, and coverage"
	@echo " test-shell             - enter shell with test environment"
	@echo " test-shell-debug       - enter shell with test environment (debug build)"
	@echo " test-checkstyle        - check PEP8 compliance for test framework"
	@echo " test-refresh-deps      - refresh the Python dependencies for the tests"
	@echo ""
	@echo "Arguments controlling test runs:"
	@echo " V=[0|1|2]              - set test verbosity level"
	@echo "                          0=ERROR, 1=INFO, 2=DEBUG"
	@echo " TEST_JOBS=[<n>|auto]   - use at most <n> parallel python processes for test execution, if auto, set to number of available cpus (default: 1)"
	@echo " MAX_VPP_CPUS=[<n>|auto]- use at most <n> cpus for running vpp main and worker threads, if auto, set to number of available cpus (default: auto)"
	@echo " CACHE_OUTPUT=[0|1]     - cache VPP stdout/stderr and log as one block after test finishes (default: 1)"
	@echo " FAILFAST=[0|1]         - fail fast if 1, complete all tests if 0"
	@echo " TIMEOUT=<timeout>      - fail test suite if any single test takes longer than <timeout> (in seconds) to finish (default: 600)"
	@echo " RETRIES=<n>            - retry failed tests <n> times"
	@echo " DEBUG=<type>           - set VPP debugging kind"
	@echo "    DEBUG=core          - detect coredump and load it in gdb on crash"
	@echo "    DEBUG=gdb           - allow easy debugging by printing VPP PID"
	@echo "                          and waiting for user input before running"
	@echo "                          and tearing down a testcase"
	@echo "    DEBUG=gdbserver     - run gdb inside a gdb server, otherwise"
	@echo "                          same as above"
	@echo "    DEBUG=attach        - attach test case to already running vpp in gdb (see test-start-vpp-in-gdb)"
	@echo ""
	@echo " STEP=[yes|no]          - ease debugging by stepping through a testcase"
	@echo " SANITY=[yes|no]        - perform sanity import of vpp-api/sanity vpp run before running tests (default: yes)"
	@echo " EXTENDED_TESTS=[1|y]   - used by '[re]test-all' & '[re]test-all-debug' to run extended tests"
	@echo " TEST=<filter>          - filter the set of tests:"
	@echo "    by file-name        - only run tests from specified file, e.g. TEST=test_bfd selects all tests from test_bfd.py"
	@echo "    by file-suffix      - same as file-name, but 'test_' is omitted e.g. TEST=bfd selects all tests from test_bfd.py"
	@echo "    by wildcard         - wildcard filter is <file>.<class>.<test function>, each can be replaced by '*'"
	@echo "                          e.g. TEST='test_bfd.*.*' is equivalent to above example of filter by file-name"
	@echo "                               TEST='bfd.*.*' is equivalent to above example of filter by file-suffix"
	@echo "                               TEST='bfd.BFDAPITestCase.*' selects all tests from test_bfd.py which are part of BFDAPITestCase class"
	@echo "                               TEST='bfd.BFDAPITestCase.test_add_bfd' selects a single test named test_add_bfd from test_bfd.py/BFDAPITestCase"
	@echo "                               TEST='*.*.test_add_bfd' selects all test functions named test_add_bfd from all files/classes"
	@echo ""
	@echo " VARIANT=<variant>      - specify which march node variant to unit test"
	@echo "                          e.g. VARIANT=skx test the skx march variants"
	@echo "                          e.g. VARIANT=icl test the icl march variants"
	@echo ""
	@echo " COREDUMP_SIZE=<size>   - pass <size> as unix { coredump-size <size> } argument to vpp"
	@echo "                          e.g. COREDUMP_SIZE=4g"
	@echo "                               COREDUMP_SIZE=unlimited"
	@echo " COREDUMP_COMPRESS=1    - compress core files if not debugging them"
	@echo " EXTERN_TESTS=<path>    - path to out-of-tree test_<name>.py files containing test cases"
	@echo " EXTERN_PLUGINS=<path>  - path to out-of-tree plugins to be loaded by vpp under test"
	@echo " EXTERN_COV_DIR=<path>  - path to out-of-tree prefix, where source, object and .gcda files can be found for coverage report"
	@echo ""
	@echo " PROFILE=1              - enable profiling of test framework via cProfile module"
	@echo " PROFILE_SORT_BY=opt    - sort profiling report by opt - consult cProfile documentation for possible values (default: cumtime)"
	@echo " PROFILE_OUTPUT=file    - output profiling info to file - use absolute path (default: stdout)"
	@echo ""
	@echo " TEST_DEBUG=1           - turn on debugging of the test framework itself (expert)"
	@echo ""
	@echo " SKIP_AARCH64=1         - skip tests that are failing on the ARM platorm in FD.io CI"
	@echo ""
	@echo " RND_SEED=seed          - Seed RND with given seed"
	@echo ""
	@echo "Starting VPP in GDB for use with DEBUG=attach:"
	@echo ""
	@echo " test-start-vpp-in-gdb       - start VPP in gdb (release)"
	@echo " test-start-vpp-debug-in-gdb - start VPP in gdb (debug)"
	@echo ""
	@echo "Arguments controlling VPP in GDB runs:"
	@echo " "
	@echo " VPP_IN_GDB_TMP_DIR     - specify directory to run VPP IN (default: /tmp/unittest-attach-gdb)"
	@echo " VPP_IN_GDB_NO_RMDIR=0  - don't remove existing tmp dir but fail instead"
	@echo " VPP_IN_GDB_CMDLINE=1   - add 'interactive' to VPP arguments to run with command line"
	@echo ""
	@echo "Creating test documentation"
	@echo " test-doc               - generate documentation for test framework"
	@echo " test-wipe-doc          - wipe documentation for test framework"
	@echo ""
	@echo "Creating test code coverage report"
	@echo " test-cov               - generate code coverage report for test framework"
	@echo " test-wipe-cov          - wipe code coverage report for test framework"
	@echo ""
	@echo "Verifying code-style"
	@echo " test-checkstyle        - check PEP8 compliance"
	@echo ""
s1">'host']), enable_logging=False) return True @staticmethod def check_honeycomb_shutdown_state(*nodes): """Check state of Honeycomb service during shutdown on specified nodes. Honeycomb nodes reply with connection refused or the following status codes depending on shutdown progress: codes 200, 404. :param nodes: List of DUT nodes stopping Honeycomb. :type nodes: list :return: True if all GETs fail to connect. :rtype bool """ cmd = "ps -ef | grep -v grep | grep honeycomb" for node in nodes: if node['type'] == NodeType.DUT: try: status_code, _ = HTTPRequest.get(node, '/index.html', enable_logging=False) if status_code == HTTPCodes.OK: raise HoneycombError('Honeycomb on node {0} is still ' 'running.'.format(node['host']), enable_logging=False) elif status_code == HTTPCodes.NOT_FOUND: raise HoneycombError('Honeycomb on node {0} is shutting' ' down.'.format(node['host']), enable_logging=False) else: raise HoneycombError('Unexpected return code: {0}.'. format(status_code)) except HTTPRequestError: logger.debug('Connection refused, checking the process ' 'state ...') ssh = SSH() ssh.connect(node) (ret_code, _, _) = ssh.exec_command_sudo(cmd) if ret_code == 0: raise HoneycombError('Honeycomb on node {0} is still ' 'running.'.format(node['host']), enable_logging=False) else: logger.info("Honeycomb on node {0} has stopped". format(node['host'])) return True @staticmethod def configure_restconf_binding_address(node): """Configure Honeycomb to accept restconf requests from all IP addresses. IP version is determined by node data. :param node: Information about a DUT node. :type node: dict :raises HoneycombError: If the configuration could not be changed. """ find = "restconf-binding-address" try: IPv6Address(unicode(node["host"])) # if management IP of the node is in IPv6 format replace = '\\"restconf-binding-address\\": \\"0::0\\",' except (AttributeError, AddressValueError): replace = '\\"restconf-binding-address\\": \\"0.0.0.0\\",' argument = '"/{0}/c\\ {1}"'.format(find, replace) path = "{0}/config/restconf.json".format(Const.REMOTE_HC_DIR) command = "sed -i {0} {1}".format(argument, path) ssh = SSH() ssh.connect(node) (ret_code, _, stderr) = ssh.exec_command_sudo(command) if ret_code != 0: raise HoneycombError("Failed to modify configuration on " "node {0}, {1}".format(node, stderr)) @staticmethod def configure_jvpp_timeout(node, timeout=10): """Configure timeout value for Java API commands Honeycomb sends to VPP. :param node: Information about a DUT node. :param timeout: Timeout value in seconds. :type node: dict :type timeout: int :raises HoneycombError: If the configuration could not be changed. """ find = "jvpp-request-timeout" replace = '\\"jvpp-request-timeout\\": {0}'.format(timeout) argument = '"/{0}/c\\ {1}"'.format(find, replace) path = "{0}/config/jvpp.json".format(Const.REMOTE_HC_DIR) command = "sed -i {0} {1}".format(argument, path) ssh = SSH() ssh.connect(node) (ret_code, _, stderr) = ssh.exec_command_sudo(command) if ret_code != 0: raise HoneycombError("Failed to modify configuration on " "node {0}, {1}".format(node, stderr)) @staticmethod def print_environment(nodes): """Print information about the nodes to log. The information is defined by commands in cmds tuple at the beginning of this method. :param nodes: List of DUT nodes to get information about. :type nodes: list """ # TODO: When everything is set and running in VIRL env, transform this # method to a keyword checking the environment. cmds = ("uname -a", "df -lh", "echo $JAVA_HOME", "echo $PATH", "which java", "java -version", "dpkg --list | grep openjdk", "ls -la /opt/honeycomb") for node in nodes: if node['type'] == NodeType.DUT: logger.info("Checking node {} ...".format(node['host'])) for cmd in cmds: logger.info("Command: {}".format(cmd)) ssh = SSH() ssh.connect(node) ssh.exec_command_sudo(cmd) @staticmethod def print_ports(node): """Uses "sudo netstat -anp | grep java" to print port where a java application listens. :param node: Honeycomb node where we want to print the ports. :type node: dict """ cmds = ("netstat -anp | grep java", "ps -ef | grep [h]oneycomb") logger.info("Checking node {} ...".format(node['host'])) for cmd in cmds: logger.info("Command: {}".format(cmd)) ssh = SSH() ssh.connect(node) ssh.exec_command_sudo(cmd) @staticmethod def configure_log_level(node, level): """Set Honeycomb logging to the specified level. :param node: Honeycomb node. :param level: Log level (INFO, DEBUG, TRACE). :type node: dict :type level: str """ find = 'logger name=\\"io.fd\\"' replace = '<logger name=\\"io.fd\\" level=\\"{0}\\"/>'.format(level) argument = '"/{0}/c\\ {1}"'.format(find, replace) path = "{0}/config/logback.xml".format(Const.REMOTE_HC_DIR) command = "sed -i {0} {1}".format(argument, path) ssh = SSH() ssh.connect(node) (ret_code, _, stderr) = ssh.exec_command_sudo(command) if ret_code != 0: raise HoneycombError("Failed to modify configuration on " "node {0}, {1}".format(node, stderr)) @staticmethod def manage_honeycomb_features(node, feature, disable=False): """Configure Honeycomb to use features that are disabled by default, or disable previously enabled features. ..Note:: If the module is not enabled in VPP, Honeycomb will be unable to establish VPP connection. :param node: Honeycomb node. :param feature: Feature to enable. :param disable: Disable the specified feature instead of enabling it. :type node: dict :type feature: string :type disable: bool :raises HoneycombError: If the configuration could not be changed. """ disabled_features = { "NSH": "io.fd.hc2vpp.vppnsh.impl.VppNshModule" } ssh = SSH() ssh.connect(node) if feature in disabled_features.keys(): # uncomment by replacing the entire line find = replace = "{0}".format(disabled_features[feature]) if disable: replace = "// {0}".format(find) argument = '"/{0}/c\\ {1}"'.format(find, replace) path = "{0}/modules/*module-config"\ .format(Const.REMOTE_HC_DIR) command = "sed -i {0} {1}".format(argument, path) (ret_code, _, stderr) = ssh.exec_command_sudo(command) if ret_code != 0: raise HoneycombError("Failed to modify configuration on " "node {0}, {1}".format(node, stderr)) else: raise HoneycombError( "Unrecognized feature {0}.".format(feature)) @staticmethod def copy_java_libraries(node): """Copy Java libraries installed by vpp-api-java package to honeycomb lib folder. This is a (temporary?) workaround for jvpp version mismatches. :param node: Honeycomb node :type node: dict """ ssh = SSH() ssh.connect(node) (_, stdout, _) = ssh.exec_command_sudo( "ls /usr/share/java | grep ^jvpp-*") files = stdout.split("\n")[:-1] for item in files: # example filenames: # jvpp-registry-17.04.jar # jvpp-core-17.04.jar parts = item.split("-") version = "{0}-SNAPSHOT".format(parts[2][:5]) artifact_id = "{0}-{1}".format(parts[0], parts[1]) directory = "{0}/lib/io/fd/vpp/{1}/{2}".format( Const.REMOTE_HC_DIR, artifact_id, version) cmd = "sudo mkdir -p {0}; " \ "sudo cp /usr/share/java/{1} {0}/{2}-{3}.jar".format( directory, item, artifact_id, version) (ret_code, _, stderr) = ssh.exec_command(cmd) if ret_code != 0: raise HoneycombError("Failed to copy JVPP libraries on " "node {0}, {1}".format(node, stderr)) @staticmethod def copy_odl_client(node, odl_name, src_path, dst_path): """Copy ODL Client from source path to destination path. :param node: Honeycomb node. :param odl_name: Name of ODL client version to use. :param src_path: Source Path where to find ODl client. :param dst_path: Destination path. :type node: dict :type odl_name: str :type src_path: str :type dst_path: str :raises HoneycombError: If the operation fails. """ ssh = SSH() ssh.connect(node) cmd = "cp -r {src}/*karaf_{odl_name}* {dst}".format( src=src_path, odl_name=odl_name, dst=dst_path) ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=60) if int(ret_code) != 0: raise HoneycombError( "Failed to copy ODL client on node {0}".format(node["host"])) @staticmethod def setup_odl_client(node, path): """Start ODL client on the specified node. Karaf should be located in the provided path, and VPP and Honeycomb should already be running, otherwise the start will fail. :param node: Node to start ODL client on. :param path: Path to ODL client on node. :type node: dict :type path: str :raises HoneycombError: If Honeycomb fails to start. """ logger.console("\nStarting ODL client ...") ssh = SSH() ssh.connect(node) cmd = "{path}/*karaf*/bin/start clean".format(path=path) ret_code, _, _ = ssh.exec_command_sudo(cmd) if int(ret_code) != 0: raise HoneycombError('Node {0} failed to start ODL.'. format(node['host'])) else: logger.info("Starting the ODL client on node {0} is " "in progress ...".format(node['host'])) @staticmethod def install_odl_features(node, path, *features): """Install required features on a running ODL client. :param node: Honeycomb node. :param path: Path to ODL client on node. :param features: Optional, list of additional features to install. :type node: dict :type path: str :type features: list """ ssh = SSH() ssh.connect(node) cmd = "{path}/*karaf*/bin/client -u karaf feature:install " \ "odl-restconf-all odl-netconf-connector-all".format(path=path) for feature in features: cmd += " {0}".format(feature) ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=120) if int(ret_code) != 0: raise HoneycombError("Feature install did not succeed.") @staticmethod def check_odl_startup_state(node): """Check the status of ODL client startup. :param node: Honeycomb node. :param node: dict :returns: True when ODL is started. :rtype: bool :raises HoneycombError: When the response is not code 200: OK. """ path = HcUtil.read_path_from_url_file( "odl_client/odl_netconf_connector") expected_status_codes = (HTTPCodes.UNAUTHORIZED, HTTPCodes.FORBIDDEN, HTTPCodes.NOT_FOUND, HTTPCodes.SERVICE_UNAVAILABLE, HTTPCodes.INTERNAL_SERVER_ERROR) status_code, _ = HTTPRequest.get(node, path, timeout=10, enable_logging=False) if status_code == HTTPCodes.OK: logger.info("ODL client on node {0} is up and running". format(node['host'])) elif status_code in expected_status_codes: if status_code == HTTPCodes.UNAUTHORIZED: logger.info('Unauthorized. If this triggers keyword ' 'timeout, verify username and password.') raise HoneycombError('ODL client on node {0} running but ' 'not yet ready.'.format(node['host']), enable_logging=False) else: raise HoneycombError('Unexpected return code: {0}.'. format(status_code)) return True @staticmethod def check_odl_shutdown_state(node): """Check the status of ODL client shutdown. :param node: Honeycomb node. :type node: dict :returns: True when ODL is stopped. :rtype: bool :raises HoneycombError: When the response is not code 200: OK. """ cmd = "pgrep -f karaf" path = HcUtil.read_path_from_url_file( "odl_client/odl_netconf_connector") try: HTTPRequest.get(node, path, timeout=10, enable_logging=False) raise HoneycombError("ODL client is still running.") except HTTPRequestError: logger.debug("Connection refused, checking process state....") ssh = SSH() ssh.connect(node) ret_code, _, _ = ssh.exec_command(cmd) if ret_code == 0: raise HoneycombError("ODL client is still running.") return True @staticmethod def mount_honeycomb_on_odl(node): """Tell ODL client to mount Honeycomb instance over netconf. :param node: Honeycomb node. :type node: dict :raises HoneycombError: When the response is not code 200: OK. """ path = HcUtil.read_path_from_url_file( "odl_client/odl_netconf_connector") url_file = "{0}/{1}".format(Const.RESOURCES_TPL_HC, "odl_client/mount_honeycomb.xml") with open(url_file) as template: data = template.read() status_code, _ = HTTPRequest.post( node, path, headers={"Content-Type": "application/xml"}, payload=data, timeout=10, enable_logging=False) if status_code == HTTPCodes.OK: logger.info("ODL mount point configured successfully.") elif status_code == HTTPCodes.CONFLICT: logger.info("ODL mount point was already configured.") else: raise HoneycombError('Mount point configuration not successful') @staticmethod def stop_odl_client(node, path): """Stop ODL client service on the specified node. :param node: Node to start ODL client on. :param path: Path to ODL client. :type node: dict :type path: str :raises HoneycombError: If ODL client fails to stop. """ ssh = SSH() ssh.connect(node) cmd = "{0}/*karaf*/bin/stop".format(path) ssh = SSH() ssh.connect(node) ret_code, _, _ = ssh.exec_command_sudo(cmd) if int(ret_code) != 0: logger.debug("ODL Client refused to shut down.") cmd = "pkill -f 'karaf'" (ret_code, _, _) = ssh.exec_command_sudo(cmd) if int(ret_code) != 0: raise HoneycombError('Node {0} failed to stop ODL.'. format(node['host'])) logger.info("ODL client service stopped.") @staticmethod def stop_vpp_service(node): """Stop VPP service on the specified node. :param node: VPP node. :type node: dict :raises RuntimeError: If VPP fails to stop. """ ssh = SSH() ssh.connect(node) cmd = "service vpp stop" ret_code, _, _ = ssh.exec_command_sudo(cmd, timeout=80) if int(ret_code) != 0: logger.debug("VPP service refused to shut down.") class HoneycombStartupConfig(object): """Generator for Honeycomb startup configuration. """ def __init__(self): """Initializer.""" self.template = """ #!/bin/sh - STATUS=100 while [ $STATUS -eq 100 ] do {java_call} -jar $(dirname $0)/{jar_filename} STATUS=$? echo "Honeycomb exited with status: $STATUS" if [ $STATUS -eq 100 ] then echo "Restarting..." fi done """ self.java_call = "{scheduler} {affinity} java {jit_mode} {params}" self.scheduler = "" self.core_affinity = "" self.jit_mode = "" self.params = "" self.numa = "" self.config = "" self.ssh = SSH() def apply_config(self, node): """Generate configuration file /opt/honeycomb/honeycomb on the specified node. :param node: Honeycomb node. :type node: dict """ self.ssh.connect(node) _, filename, _ = self.ssh.exec_command("ls /opt/honeycomb | grep .jar") java_call = self.java_call.format(scheduler=self.scheduler, affinity=self.core_affinity, jit_mode=self.jit_mode, params=self.params) self.config = self.template.format(java_call=java_call, jar_filename=filename) self.ssh.connect(node) cmd = "echo '{config}' > /tmp/honeycomb " \ "&& chmod +x /tmp/honeycomb " \ "&& sudo mv -f /tmp/honeycomb /opt/honeycomb".\ format(config=self.config) self.ssh.exec_command(cmd) def set_cpu_scheduler(self, scheduler="FIFO"): """Use alternate CPU scheduler. Note: OTHER scheduler doesn't load-balance over isolcpus. :param scheduler: CPU scheduler to use. :type scheduler: str """ schedulers = {"FIFO": "-f 99", # First In, First Out "RR": "-r 99", # Round Robin "OTHER": "-o", # Ubuntu default } self.scheduler = "chrt {0}".format(schedulers[scheduler]) def set_cpu_core_affinity(self, low, high=None): """Set core affinity for the honeycomb process and subprocesses. :param low: Lowest core ID number. :param high: Highest core ID number. Leave empty to use a single core. :type low: int :type high: int """ self.core_affinity = "taskset -c {low}-{high}".format( low=low, high=high if high else low) def set_jit_compiler_mode(self, jit_mode): """Set running mode for Java's JIT compiler. :param jit_mode: Desiret JIT mode. :type jit_mode: str """ modes = {"client": "-client", # Default "server": "-server", # Higher performance but longer warmup "classic": "-classic" # Disables JIT compiler } self.jit_mode = modes[jit_mode] def set_memory_size(self, mem_min, mem_max=None): """Set minimum and maximum memory use for the JVM. :param mem_min: Minimum amount of memory (MB). :param mem_max: Maximum amount of memory (MB). Default is 4 times minimum value. :type mem_min: int :type mem_max: int """ self.params += " -Xms{min}m -Xmx{max}m".format( min=mem_min, max=mem_max if mem_max else mem_min*4) def set_metaspace_size(self, mem_min, mem_max=None): """Set minimum and maximum memory used for class metadata in the JVM. :param mem_min: Minimum metaspace size (MB). :param mem_max: Maximum metaspace size (MB). Defailt is 4 times minimum value. :type mem_min: int :type mem_max: int """ self.params += " -XX:MetaspaceSize={min}m " \ "-XX:MaxMetaspaceSize={max}m".format( min=mem_min, max=mem_max if mem_max else mem_min*4) def set_numa_optimization(self): """Use optimization of memory use and garbage collection for NUMA architectures.""" self.params += " -XX:+UseNUMA -XX:+UseParallelGC"