From d3f26ece7d4383df0b22fe9c3cb3e695381ec737 Mon Sep 17 00:00:00 2001 From: Dan Klein Date: Mon, 24 Aug 2015 10:51:13 +0300 Subject: Initial push to external_lib migration --- .../python_lib/python-daemon-2.0.5/ChangeLog | 380 ----- .../python_lib/python-daemon-2.0.5/LICENSE.ASF-2 | 202 --- .../python_lib/python-daemon-2.0.5/LICENSE.GPL-3 | 674 -------- .../python_lib/python-daemon-2.0.5/MANIFEST.in | 7 - .../python_lib/python-daemon-2.0.5/PKG-INFO | 38 - .../python-daemon-2.0.5/daemon/__init__.py | 49 - .../python-daemon-2.0.5/daemon/_metadata.py | 152 -- .../python-daemon-2.0.5/daemon/daemon.py | 926 ----------- .../python-daemon-2.0.5/daemon/pidfile.py | 67 - .../python-daemon-2.0.5/daemon/runner.py | 324 ---- .../python_lib/python-daemon-2.0.5/doc/CREDITS | 53 - .../python_lib/python-daemon-2.0.5/doc/FAQ | 156 -- .../python_lib/python-daemon-2.0.5/doc/TODO | 95 -- .../python_lib/python-daemon-2.0.5/doc/hacking.txt | 180 -- .../python_daemon.egg-info/PKG-INFO | 38 - .../python_daemon.egg-info/SOURCES.txt | 30 - .../python_daemon.egg-info/dependency_links.txt | 1 - .../python_daemon.egg-info/not-zip-safe | 1 - .../python_daemon.egg-info/requires.txt | 3 - .../python_daemon.egg-info/top_level.txt | 1 - .../python_daemon.egg-info/version_info.json | 6 - .../python_lib/python-daemon-2.0.5/setup.cfg | 11 - .../python_lib/python-daemon-2.0.5/setup.py | 106 -- .../python-daemon-2.0.5/test/__init__.py | 23 - .../python-daemon-2.0.5/test/scaffold.py | 322 ---- .../python-daemon-2.0.5/test/test_daemon.py | 1744 -------------------- .../python-daemon-2.0.5/test/test_metadata.py | 380 ----- .../python-daemon-2.0.5/test/test_pidfile.py | 472 ------ .../python-daemon-2.0.5/test/test_runner.py | 675 -------- .../python_lib/python-daemon-2.0.5/test_version.py | 1373 --------------- .../python_lib/python-daemon-2.0.5/version.py | 547 ------ 31 files changed, 9036 deletions(-) delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/ChangeLog delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/LICENSE.ASF-2 delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/LICENSE.GPL-3 delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/MANIFEST.in delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/PKG-INFO delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/__init__.py delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/_metadata.py delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/daemon.py delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/pidfile.py delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/runner.py delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/CREDITS delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/FAQ delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/TODO delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/hacking.txt delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/PKG-INFO delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/SOURCES.txt delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/dependency_links.txt delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/not-zip-safe delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/requires.txt delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/top_level.txt delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/version_info.json delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/setup.cfg delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/setup.py delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/__init__.py delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/scaffold.py delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_daemon.py delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_metadata.py delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_pidfile.py delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_runner.py delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test_version.py delete mode 100755 scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/version.py (limited to 'scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5') diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/ChangeLog b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/ChangeLog deleted file mode 100755 index 4975f781..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/ChangeLog +++ /dev/null @@ -1,380 +0,0 @@ -Version 2.0.5 -============= - -:Released: 2015-02-02 -:Maintainer: Ben Finney - -* Refine compatibility of exceptions for file operations. -* Specify the text encoding when opening the changelog file. - - -Version 2.0.4 -============= - -:Released: 2015-01-23 -:Maintainer: Ben Finney - -* Record version info via Setuptools commands. -* Remove the custom Setuptools entry points. - This closes Alioth bug#314948. - - -Version 2.0.3 -============= - -:Released: 2015-01-14 -:Maintainer: Ben Finney - -* Break circular import dependency for ‘setup.py’. -* Refactor all initial metadata functionality to ‘daemon._metadata’. -* Distribute ‘version’ (and its tests) only in source, not install. -* Build a “universal” (Python 2 and Python 3) wheel. - - -Version 2.0.2 -============= - -:Released: 2015-01-13 -:Maintainer: Ben Finney - -* Declare test-time dependency on recent ‘unittest2’. -* Declare packaging-time dependency on ‘docutils’ library. -* Include unit tests for ‘version’ module with source distribution. -* Record version info consistent with distribution metadata. - - -Version 2.0.1 -============= - -:Released: 2015-01-11 -:Maintainer: Ben Finney - -* Include the ‘version’ module with source distribution. - - -Version 2.0 -=========== - -:Released: 2015-01-10 -:Maintainer: Ben Finney - -* Support both Python 3 (version 3.2 or later) and Python 2 (version - 2.7 or later). -* Document the API of all functions comprehensively in docstrings. -* Add a hacking guide for developers. -* Add explicit credit for contributors. -* Document the security impact of the default umask. - -* Specify explicit text or binary mode when opening files. -* Preserve exception context in custom exceptions. - -* Declare compatibility with current Python versions. -* Depend on Python 3 compatible libraries. -* Update package homepage to Alioth hosted project page. -* Use ‘pydoc.splitdoc’ to get package description text. -* Remove ASCII translation of package description, not needed now the - docstring is a proper Unicode text value. -* Include test suite with source distribution. -* Move package metadata to ‘daemon/_metadata.py’. -* Migrate to JSON (instead of Python) for serialised version info. -* Add unit tests for metadata. -* Store and retrieve version info in Setuptools metadata. - -* Migrate to ‘str.format’ for interpolation of values into text. -* Migrate to ‘mock’ library for mock objects in tests. -* Migrate to ‘testscenarios’ library for unit test scenarios. -* Migrate to ‘unittest2’ library for back-ported improvements. - Remove custom test suite creation. -* Discriminate Python 2-and-3 compatible usage of dict methods. -* Discriminate Python 2-and-3 compatible bytes versus text. -* Declare explicit absolute and relative imports. -* Discriminate between different ‘fileno’ method behaviours. - In Python 3, ‘StringIO.fileno’ is callable but raises an exception. -* Migrate to built-in ‘next’ function. -* Wrap the ‘fromlist’ parameter of ‘__import__’ for Python 3 - compatibility. -* Wrap function introspection for Python 3 compatibility. -* Wrap standard library imports where names changed in Python 3. - - -Version 1.6.1 -============= - -:Released: 2014-08-04 -:Maintainer: Ben Finney - -* Use unambiguous “except FooType as foo” syntax. - This is to ease the port to Python 3, where the ambiguous comma - usage is an error. -* Ensure a ‘basestring’ name bound to the base type for strings. - This is to allow checks to work on Python 2 and 3. -* Specify versions of Python supported, as trove classifiers. - -* Update copyright notices. -* Add editor hints for most files. -* Distinguish continuation-line indentation versus block indentation. - -* Use unicode literals by default, specifying bytes where necessary. - This is to ease the port to Python 3, where the default string type - is unicode. -* Update copyright notices. -* Update the GPL license file to version 3, as declared in our - copyright notices. - -* Change license of library code to Apache License 2.0. Rationale at - . - - -Version 1.6 -=========== - -:Released: 2010-05-10 -:Maintainer: Ben Finney - -* Use absolute imports to disambiguate provenance of names. -* setup.py: Require ‘lockfile >=0.9’. -* daemon/pidfile.py: Renamed from ‘daemon/pidlockfile.py’. Change - references elsewhere to use this new name. -* test/test_pidfile.py: Renamed from ‘test/test_pidlockfile.py’. - Change references elsewhere to use this new name. -* daemon/pidfile.py: Remove functionality now migrated to ‘lockfile’ - library. - -* FAQ: Add some entries and re-structure the document. - -* Use ‘unicode’ data type for all text values. -* Prepare for Python 3 upgrade by tweaking some names and imports. - -* MANIFEST.in: Include the documentation in the distribution. - - -Version 1.5.5 -============= - -:Released: 2010-03-02 -:Maintainer: Ben Finney - -* Stop using ‘pkg_resources’ and revert to pre-1.5.3 version-string - handling, until a better way that doesn't break everyone else's - installation can be found. - - -Version 1.5.4 -============= - -:Released: 2010-02-27 -:Maintainer: Ben Finney - -* MANIFEST.in: Explicitly include version data file, otherwise - everything breaks for users of the sdist. - - -Version 1.5.3 -============= - -:Released: 2010-02-26 -:Maintainer: Ben Finney - -* daemon/daemon.py: Invoke the pidfile context manager's ‘__exit__’ - method with the correct arguments (as per - ). - Thanks to Ludvig Ericson for the bug report. -* version: New plain-text data file to store project version string. -* setup.py: Read version string from data file. -* daemon/version/__init__.py: Query version string with ‘pkg_resources’. - -* Add ‘pylint’ configuration for this project. -* Update copyright notices. - - -Version 1.5.2 -============= - -:Released: 2009-10-24 -:Maintainer: Ben Finney - -* Ensure we only prevent core dumps if ‘prevent_core’ is true. - Thanks to Denis Bilenko for reporting the lacking implementation of - this documented option. - -* Add initial Frequently Asked Questions document. - - -Version 1.5.1 -============= - -:Released: 2009-09-26 -:Maintainer: Ben Finney - -* Make a separate collection of DaemonRunner test scenarios. -* Handle a start request with a timeout on the PID file lock acquire. - -* Implement ‘TimeoutPIDLockFile’ to specify a timeout in advance of - lock acquisition. -* Use lock with timeout for ‘DaemonRunner’. - - -Version 1.5 -=========== - -:Released: 2009-09-24 -:Maintainer: Ben Finney - -* Make a separate collection of PIDLockFile test scenarios. - -* Raise specific errors on ‘DaemonRunner’ failures. -* Distinguish different conditions on reading and parsing PID file. -* Refactor code to ‘_terminate_daemon_process’ method. -* Improve explanations in comments and docstrings. -* Don't set pidfile at all if no path specified to constructor. -* Write the PID file using correct OS locking and permissions. -* Close the PID file after writing. -* Implement ‘PIDLockFile’ as subclass of ‘lockfile.LinkFileLock’. -* Remove redundant checks for file existence. - -* Manage the excluded file descriptors as a set (not a list). -* Only inspect the file descriptor of streams if they actually have - one (via a ‘fileno’ method) when determining which file descriptors - to close. Thanks to Ask Solem for revealing this bug. - - -Version 1.4.8 -============= - -:Released: 2009-09-17 -:Maintainer: Ben Finney - -* Remove child-exit signal (‘SIGCLD’, ‘SIGCHLD’) from default signal - map. Thanks to Joel Martin for pinpointing this issue. -* Document requirement for ensuring any operating-system specific - signal handlers are considered. -* Refactor ‘fork_then_exit_parent’ functionality to avoid duplicate - code. -* Remove redundant imports. -* Remove unused code from unit test suite scaffold. -* Add specific license terms for unit test suite scaffold. - - -Version 1.4.7 -============= - -:Released: 2009-09-03 -:Maintainer: Ben Finney - -* Fix keywords argument for distribution setup. -* Exclude ‘test’ package from distribution installation. - - -Version 1.4.6 -============= - -:Released: 2009-06-21 -:Maintainer: Ben Finney - -* Update documentation for changes from latest PEP 3143 revision. -* Implement DaemonContext.is_open method. - - -Version 1.4.5 -============= - -:Released: 2009-05-17 -:Maintainer: Ben Finney - -* Register DaemonContext.close method for atexit processing. -* Move PID file cleanup to close method. -* Improve docstrings by reference to, and copy from, PEP 3143. -* Use mock checking capabilities of newer ‘MiniMock’ library. -* Automate building a versioned distribution tarball. -* Include developer documentation files in source distribution. - - -Version 1.4.4 -============= - -:Released: 2009-03-26 -:Maintainer: Ben Finney - -* Conform to current PEP version, now released as PEP 3143 “Standard - daemon process library”. -* Ensure UID and GID are set in correct order. -* Delay closing all open files until just before re-binding standard - streams. -* Redirect standard streams to null device by default. - - -Version 1.4.3 -============= - -:Released: 2009-03-19 -:Maintainer: Ben Finney - -* Close the PID file context on exit. - - -Version 1.4.2 -============= - -:Released: 2009-03-18 -:Maintainer: Ben Finney - -* Context manager methods for DaemonContext. - - -Version 1.4.1 -============= - -:Released: 2009-03-18 -:Maintainer: Ben Finney - -* Improvements to docstrings. -* Further conformance with draft PEP. - - -Version 1.4 -=========== - -:Released: 2009-03-17 -:Maintainer: Ben Finney - -* Implement the interface from a draft PEP for process daemonisation. -* Complete statement coverage from unit test suite. - - -Version 1.3 -=========== - -:Released: 2009-03-12 -:Maintainer: Ben Finney - -* Separate controller (now ‘DaemonRunner’) from daemon process - context (now ‘DaemonContext’). -* Fix many corner cases and bugs. -* Huge increase in unit test suite. - - -Version 1.2 -=========== - -:Released: 2009-01-27 -:Maintainer: Ben Finney - -* Initial release of this project forked from ‘bda.daemon’. Thanks, - Robert Niederreiter. -* Refactor some functionality out to helper functions. -* Begin unit test suite. - - -.. - This is free software: you may copy, modify, and/or distribute this work - under the terms of the Apache License version 2.0 as published by the - Apache Software Foundation. - No warranty expressed or implied. See the file ‘LICENSE.ASF-2’ for details. - -.. - Local variables: - coding: utf-8 - mode: text - mode: rst - End: - vim: fileencoding=utf-8 filetype=rst : diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/LICENSE.ASF-2 b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/LICENSE.ASF-2 deleted file mode 100755 index d6456956..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/LICENSE.ASF-2 +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/LICENSE.GPL-3 b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/LICENSE.GPL-3 deleted file mode 100755 index 94a9ed02..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/LICENSE.GPL-3 +++ /dev/null @@ -1,674 +0,0 @@ - GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU General Public License is a free, copyleft license for -software and other kinds of works. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. - - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. - - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - Copyright (C) - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -. diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/MANIFEST.in b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/MANIFEST.in deleted file mode 100755 index d3d4341e..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/MANIFEST.in +++ /dev/null @@ -1,7 +0,0 @@ -include MANIFEST.in -include LICENSE.* -include ChangeLog -recursive-include doc * -include version.py -include test_version.py -recursive-include test *.py diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/PKG-INFO b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/PKG-INFO deleted file mode 100755 index fd81f509..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/PKG-INFO +++ /dev/null @@ -1,38 +0,0 @@ -Metadata-Version: 1.1 -Name: python-daemon -Version: 2.0.5 -Summary: Library to implement a well-behaved Unix daemon process. -Home-page: https://alioth.debian.org/projects/python-daemon/ -Author: Ben Finney -Author-email: ben+python@benfinney.id.au -License: Apache-2 -Description: This library implements the well-behaved daemon specification of - :pep:`3143`, “Standard daemon process library”. - - A well-behaved Unix daemon process is tricky to get right, but the - required steps are much the same for every daemon program. A - `DaemonContext` instance holds the behaviour and configured - process environment for the program; use the instance as a context - manager to enter a daemon state. - - Simple example of usage:: - - import daemon - - from spam import do_main_program - - with daemon.DaemonContext(): - do_main_program() - - Customisation of the steps to become a daemon is available by - setting options on the `DaemonContext` instance; see the - documentation for that class for each option. -Keywords: daemon,fork,unix -Platform: UNKNOWN -Classifier: Development Status :: 4 - Beta -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Operating System :: POSIX -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Intended Audience :: Developers -Classifier: Topic :: Software Development :: Libraries :: Python Modules diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/__init__.py b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/__init__.py deleted file mode 100755 index 4731a6ef..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/__init__.py +++ /dev/null @@ -1,49 +0,0 @@ -# -*- coding: utf-8 -*- - -# daemon/__init__.py -# Part of ‘python-daemon’, an implementation of PEP 3143. -# -# Copyright © 2009–2015 Ben Finney -# Copyright © 2006 Robert Niederreiter -# -# This is free software: you may copy, modify, and/or distribute this work -# under the terms of the Apache License, version 2.0 as published by the -# Apache Software Foundation. -# No warranty expressed or implied. See the file ‘LICENSE.ASF-2’ for details. - -""" Library to implement a well-behaved Unix daemon process. - - This library implements the well-behaved daemon specification of - :pep:`3143`, “Standard daemon process library”. - - A well-behaved Unix daemon process is tricky to get right, but the - required steps are much the same for every daemon program. A - `DaemonContext` instance holds the behaviour and configured - process environment for the program; use the instance as a context - manager to enter a daemon state. - - Simple example of usage:: - - import daemon - - from spam import do_main_program - - with daemon.DaemonContext(): - do_main_program() - - Customisation of the steps to become a daemon is available by - setting options on the `DaemonContext` instance; see the - documentation for that class for each option. - - """ - -from __future__ import (absolute_import, unicode_literals) - -from .daemon import DaemonContext - - -# Local variables: -# coding: utf-8 -# mode: python -# End: -# vim: fileencoding=utf-8 filetype=python : diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/_metadata.py b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/_metadata.py deleted file mode 100755 index 6d22a2b7..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/_metadata.py +++ /dev/null @@ -1,152 +0,0 @@ -# -*- coding: utf-8 -*- - -# daemon/_metadata.py -# Part of ‘python-daemon’, an implementation of PEP 3143. -# -# Copyright © 2008–2015 Ben Finney -# -# This is free software: you may copy, modify, and/or distribute this work -# under the terms of the Apache License, version 2.0 as published by the -# Apache Software Foundation. -# No warranty expressed or implied. See the file ‘LICENSE.ASF-2’ for details. - -""" Package metadata for the ‘python-daemon’ distribution. """ - -from __future__ import (absolute_import, unicode_literals) - -import json -import re -import collections -import datetime - -import pkg_resources - - -distribution_name = "python-daemon" -version_info_filename = "version_info.json" - -def get_distribution_version_info(filename=version_info_filename): - """ Get the version info from the installed distribution. - - :param filename: Base filename of the version info resource. - :return: The version info as a mapping of fields. If the - distribution is not available, the mapping is empty. - - The version info is stored as a metadata file in the - distribution. - - """ - version_info = { - 'release_date': "UNKNOWN", - 'version': "UNKNOWN", - 'maintainer': "UNKNOWN", - } - - try: - distribution = pkg_resources.get_distribution(distribution_name) - except pkg_resources.DistributionNotFound: - distribution = None - - if distribution is not None: - if distribution.has_metadata(version_info_filename): - content = distribution.get_metadata(version_info_filename) - version_info = json.loads(content) - - return version_info - -version_info = get_distribution_version_info() - -version_installed = version_info['version'] - - -rfc822_person_regex = re.compile( - "^(?P[^<]+) <(?P[^>]+)>$") - -ParsedPerson = collections.namedtuple('ParsedPerson', ['name', 'email']) - -def parse_person_field(value): - """ Parse a person field into name and email address. - - :param value: The text value specifying a person. - :return: A 2-tuple (name, email) for the person's details. - - If the `value` does not match a standard person with email - address, the `email` item is ``None``. - - """ - result = (None, None) - - match = rfc822_person_regex.match(value) - if len(value): - if match is not None: - result = ParsedPerson( - name=match.group('name'), - email=match.group('email')) - else: - result = ParsedPerson(name=value, email=None) - - return result - -author_name = "Ben Finney" -author_email = "ben+python@benfinney.id.au" -author = "{name} <{email}>".format(name=author_name, email=author_email) - - -class YearRange: - """ A range of years spanning a period. """ - - def __init__(self, begin, end=None): - self.begin = begin - self.end = end - - def __unicode__(self): - text = "{range.begin:04d}".format(range=self) - if self.end is not None: - if self.end > self.begin: - text = "{range.begin:04d}–{range.end:04d}".format(range=self) - return text - - __str__ = __unicode__ - - -def make_year_range(begin_year, end_date=None): - """ Construct the year range given a start and possible end date. - - :param begin_date: The beginning year (text) for the range. - :param end_date: The end date (text, ISO-8601 format) for the - range, or a non-date token string. - :return: The range of years as a `YearRange` instance. - - If the `end_date` is not a valid ISO-8601 date string, the - range has ``None`` for the end year. - - """ - begin_year = int(begin_year) - - try: - end_date = datetime.datetime.strptime(end_date, "%Y-%m-%d") - except (TypeError, ValueError): - # Specified end_date value is not a valid date. - end_year = None - else: - end_year = end_date.year - - year_range = YearRange(begin=begin_year, end=end_year) - - return year_range - -copyright_year_begin = "2001" -build_date = version_info['release_date'] -copyright_year_range = make_year_range(copyright_year_begin, build_date) - -copyright = "Copyright © {year_range} {author} and others".format( - year_range=copyright_year_range, author=author) -license = "Apache-2" -url = "https://alioth.debian.org/projects/python-daemon/" - - -# Local variables: -# coding: utf-8 -# mode: python -# End: -# vim: fileencoding=utf-8 filetype=python : diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/daemon.py b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/daemon.py deleted file mode 100755 index 07810cf1..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/daemon.py +++ /dev/null @@ -1,926 +0,0 @@ -# -*- coding: utf-8 -*- - -# daemon/daemon.py -# Part of ‘python-daemon’, an implementation of PEP 3143. -# -# Copyright © 2008–2015 Ben Finney -# Copyright © 2007–2008 Robert Niederreiter, Jens Klein -# Copyright © 2004–2005 Chad J. Schroeder -# Copyright © 2003 Clark Evans -# Copyright © 2002 Noah Spurrier -# Copyright © 2001 Jürgen Hermann -# -# This is free software: you may copy, modify, and/or distribute this work -# under the terms of the Apache License, version 2.0 as published by the -# Apache Software Foundation. -# No warranty expressed or implied. See the file ‘LICENSE.ASF-2’ for details. - -""" Daemon process behaviour. - """ - -from __future__ import (absolute_import, unicode_literals) - -import os -import sys -import resource -import errno -import signal -import socket -import atexit -try: - # Python 2 has both ‘str’ (bytes) and ‘unicode’ (text). - basestring = basestring - unicode = unicode -except NameError: - # Python 3 names the Unicode data type ‘str’. - basestring = str - unicode = str - - -class DaemonError(Exception): - """ Base exception class for errors from this module. """ - - def __init__(self, *args, **kwargs): - self._chain_from_context() - - super(DaemonError, self).__init__(*args, **kwargs) - - def _chain_from_context(self): - _chain_exception_from_existing_exception_context(self, as_cause=True) - - -class DaemonOSEnvironmentError(DaemonError, OSError): - """ Exception raised when daemon OS environment setup receives error. """ - - -class DaemonProcessDetachError(DaemonError, OSError): - """ Exception raised when process detach fails. """ - - -class DaemonContext: - """ Context for turning the current program into a daemon process. - - A `DaemonContext` instance represents the behaviour settings and - process context for the program when it becomes a daemon. The - behaviour and environment is customised by setting options on the - instance, before calling the `open` method. - - Each option can be passed as a keyword argument to the `DaemonContext` - constructor, or subsequently altered by assigning to an attribute on - the instance at any time prior to calling `open`. That is, for - options named `wibble` and `wubble`, the following invocation:: - - foo = daemon.DaemonContext(wibble=bar, wubble=baz) - foo.open() - - is equivalent to:: - - foo = daemon.DaemonContext() - foo.wibble = bar - foo.wubble = baz - foo.open() - - The following options are defined. - - `files_preserve` - :Default: ``None`` - - List of files that should *not* be closed when starting the - daemon. If ``None``, all open file descriptors will be closed. - - Elements of the list are file descriptors (as returned by a file - object's `fileno()` method) or Python `file` objects. Each - specifies a file that is not to be closed during daemon start. - - `chroot_directory` - :Default: ``None`` - - Full path to a directory to set as the effective root directory of - the process. If ``None``, specifies that the root directory is not - to be changed. - - `working_directory` - :Default: ``'/'`` - - Full path of the working directory to which the process should - change on daemon start. - - Since a filesystem cannot be unmounted if a process has its - current working directory on that filesystem, this should either - be left at default or set to a directory that is a sensible “home - directory” for the daemon while it is running. - - `umask` - :Default: ``0`` - - File access creation mask (“umask”) to set for the process on - daemon start. - - A daemon should not rely on the parent process's umask value, - which is beyond its control and may prevent creating a file with - the required access mode. So when the daemon context opens, the - umask is set to an explicit known value. - - If the conventional value of 0 is too open, consider setting a - value such as 0o022, 0o027, 0o077, or another specific value. - Otherwise, ensure the daemon creates every file with an - explicit access mode for the purpose. - - `pidfile` - :Default: ``None`` - - Context manager for a PID lock file. When the daemon context opens - and closes, it enters and exits the `pidfile` context manager. - - `detach_process` - :Default: ``None`` - - If ``True``, detach the process context when opening the daemon - context; if ``False``, do not detach. - - If unspecified (``None``) during initialisation of the instance, - this will be set to ``True`` by default, and ``False`` only if - detaching the process is determined to be redundant; for example, - in the case when the process was started by `init`, by `initd`, or - by `inetd`. - - `signal_map` - :Default: system-dependent - - Mapping from operating system signals to callback actions. - - The mapping is used when the daemon context opens, and determines - the action for each signal's signal handler: - - * A value of ``None`` will ignore the signal (by setting the - signal action to ``signal.SIG_IGN``). - - * A string value will be used as the name of an attribute on the - ``DaemonContext`` instance. The attribute's value will be used - as the action for the signal handler. - - * Any other value will be used as the action for the - signal handler. See the ``signal.signal`` documentation - for details of the signal handler interface. - - The default value depends on which signals are defined on the - running system. Each item from the list below whose signal is - actually defined in the ``signal`` module will appear in the - default map: - - * ``signal.SIGTTIN``: ``None`` - - * ``signal.SIGTTOU``: ``None`` - - * ``signal.SIGTSTP``: ``None`` - - * ``signal.SIGTERM``: ``'terminate'`` - - Depending on how the program will interact with its child - processes, it may need to specify a signal map that - includes the ``signal.SIGCHLD`` signal (received when a - child process exits). See the specific operating system's - documentation for more detail on how to determine what - circumstances dictate the need for signal handlers. - - `uid` - :Default: ``os.getuid()`` - - `gid` - :Default: ``os.getgid()`` - - The user ID (“UID”) value and group ID (“GID”) value to switch - the process to on daemon start. - - The default values, the real UID and GID of the process, will - relinquish any effective privilege elevation inherited by the - process. - - `prevent_core` - :Default: ``True`` - - If true, prevents the generation of core files, in order to avoid - leaking sensitive information from daemons run as `root`. - - `stdin` - :Default: ``None`` - - `stdout` - :Default: ``None`` - - `stderr` - :Default: ``None`` - - Each of `stdin`, `stdout`, and `stderr` is a file-like object - which will be used as the new file for the standard I/O stream - `sys.stdin`, `sys.stdout`, and `sys.stderr` respectively. The file - should therefore be open, with a minimum of mode 'r' in the case - of `stdin`, and mimimum of mode 'w+' in the case of `stdout` and - `stderr`. - - If the object has a `fileno()` method that returns a file - descriptor, the corresponding file will be excluded from being - closed during daemon start (that is, it will be treated as though - it were listed in `files_preserve`). - - If ``None``, the corresponding system stream is re-bound to the - file named by `os.devnull`. - - """ - - __metaclass__ = type - - def __init__( - self, - chroot_directory=None, - working_directory="/", - umask=0, - uid=None, - gid=None, - prevent_core=True, - detach_process=None, - files_preserve=None, - pidfile=None, - stdin=None, - stdout=None, - stderr=None, - signal_map=None, - ): - """ Set up a new instance. """ - self.chroot_directory = chroot_directory - self.working_directory = working_directory - self.umask = umask - self.prevent_core = prevent_core - self.files_preserve = files_preserve - self.pidfile = pidfile - self.stdin = stdin - self.stdout = stdout - self.stderr = stderr - - if uid is None: - uid = os.getuid() - self.uid = uid - if gid is None: - gid = os.getgid() - self.gid = gid - - if detach_process is None: - detach_process = is_detach_process_context_required() - self.detach_process = detach_process - - if signal_map is None: - signal_map = make_default_signal_map() - self.signal_map = signal_map - - self._is_open = False - - @property - def is_open(self): - """ ``True`` if the instance is currently open. """ - return self._is_open - - def open(self): - """ Become a daemon process. - - :return: ``None``. - - Open the daemon context, turning the current program into a daemon - process. This performs the following steps: - - * If this instance's `is_open` property is true, return - immediately. This makes it safe to call `open` multiple times on - an instance. - - * If the `prevent_core` attribute is true, set the resource limits - for the process to prevent any core dump from the process. - - * If the `chroot_directory` attribute is not ``None``, set the - effective root directory of the process to that directory (via - `os.chroot`). - - This allows running the daemon process inside a “chroot gaol” - as a means of limiting the system's exposure to rogue behaviour - by the process. Note that the specified directory needs to - already be set up for this purpose. - - * Set the process UID and GID to the `uid` and `gid` attribute - values. - - * Close all open file descriptors. This excludes those listed in - the `files_preserve` attribute, and those that correspond to the - `stdin`, `stdout`, or `stderr` attributes. - - * Change current working directory to the path specified by the - `working_directory` attribute. - - * Reset the file access creation mask to the value specified by - the `umask` attribute. - - * If the `detach_process` option is true, detach the current - process into its own process group, and disassociate from any - controlling terminal. - - * Set signal handlers as specified by the `signal_map` attribute. - - * If any of the attributes `stdin`, `stdout`, `stderr` are not - ``None``, bind the system streams `sys.stdin`, `sys.stdout`, - and/or `sys.stderr` to the files represented by the - corresponding attributes. Where the attribute has a file - descriptor, the descriptor is duplicated (instead of re-binding - the name). - - * If the `pidfile` attribute is not ``None``, enter its context - manager. - - * Mark this instance as open (for the purpose of future `open` and - `close` calls). - - * Register the `close` method to be called during Python's exit - processing. - - When the function returns, the running program is a daemon - process. - - """ - if self.is_open: - return - - if self.chroot_directory is not None: - change_root_directory(self.chroot_directory) - - if self.prevent_core: - prevent_core_dump() - - change_file_creation_mask(self.umask) - change_working_directory(self.working_directory) - change_process_owner(self.uid, self.gid) - - if self.detach_process: - detach_process_context() - - signal_handler_map = self._make_signal_handler_map() - set_signal_handlers(signal_handler_map) - - exclude_fds = self._get_exclude_file_descriptors() - close_all_open_files(exclude=exclude_fds) - - redirect_stream(sys.stdin, self.stdin) - redirect_stream(sys.stdout, self.stdout) - redirect_stream(sys.stderr, self.stderr) - - if self.pidfile is not None: - self.pidfile.__enter__() - - self._is_open = True - - register_atexit_function(self.close) - - def __enter__(self): - """ Context manager entry point. """ - self.open() - return self - - def close(self): - """ Exit the daemon process context. - - :return: ``None``. - - Close the daemon context. This performs the following steps: - - * If this instance's `is_open` property is false, return - immediately. This makes it safe to call `close` multiple times - on an instance. - - * If the `pidfile` attribute is not ``None``, exit its context - manager. - - * Mark this instance as closed (for the purpose of future `open` - and `close` calls). - - """ - if not self.is_open: - return - - if self.pidfile is not None: - # Follow the interface for telling a context manager to exit, - # . - self.pidfile.__exit__(None, None, None) - - self._is_open = False - - def __exit__(self, exc_type, exc_value, traceback): - """ Context manager exit point. """ - self.close() - - def terminate(self, signal_number, stack_frame): - """ Signal handler for end-process signals. - - :param signal_number: The OS signal number received. - :param stack_frame: The frame object at the point the - signal was received. - :return: ``None``. - - Signal handler for the ``signal.SIGTERM`` signal. Performs the - following step: - - * Raise a ``SystemExit`` exception explaining the signal. - - """ - exception = SystemExit( - "Terminating on signal {signal_number!r}".format( - signal_number=signal_number)) - raise exception - - def _get_exclude_file_descriptors(self): - """ Get the set of file descriptors to exclude closing. - - :return: A set containing the file descriptors for the - files to be preserved. - - The file descriptors to be preserved are those from the - items in `files_preserve`, and also each of `stdin`, - `stdout`, and `stderr`. For each item: - - * If the item is ``None``, it is omitted from the return - set. - - * If the item's ``fileno()`` method returns a value, that - value is in the return set. - - * Otherwise, the item is in the return set verbatim. - - """ - files_preserve = self.files_preserve - if files_preserve is None: - files_preserve = [] - files_preserve.extend( - item for item in [self.stdin, self.stdout, self.stderr] - if hasattr(item, 'fileno')) - - exclude_descriptors = set() - for item in files_preserve: - if item is None: - continue - file_descriptor = _get_file_descriptor(item) - if file_descriptor is not None: - exclude_descriptors.add(file_descriptor) - else: - exclude_descriptors.add(item) - - return exclude_descriptors - - def _make_signal_handler(self, target): - """ Make the signal handler for a specified target object. - - :param target: A specification of the target for the - handler; see below. - :return: The value for use by `signal.signal()`. - - If `target` is ``None``, return ``signal.SIG_IGN``. If `target` - is a text string, return the attribute of this instance named - by that string. Otherwise, return `target` itself. - - """ - if target is None: - result = signal.SIG_IGN - elif isinstance(target, unicode): - name = target - result = getattr(self, name) - else: - result = target - - return result - - def _make_signal_handler_map(self): - """ Make the map from signals to handlers for this instance. - - :return: The constructed signal map for this instance. - - Construct a map from signal numbers to handlers for this - context instance, suitable for passing to - `set_signal_handlers`. - - """ - signal_handler_map = dict( - (signal_number, self._make_signal_handler(target)) - for (signal_number, target) in self.signal_map.items()) - return signal_handler_map - - -def _get_file_descriptor(obj): - """ Get the file descriptor, if the object has one. - - :param obj: The object expected to be a file-like object. - :return: The file descriptor iff the file supports it; otherwise - ``None``. - - The object may be a non-file object. It may also be a - file-like object with no support for a file descriptor. In - either case, return ``None``. - - """ - file_descriptor = None - if hasattr(obj, 'fileno'): - try: - file_descriptor = obj.fileno() - except ValueError: - # The item doesn't support a file descriptor. - pass - - return file_descriptor - - -def change_working_directory(directory): - """ Change the working directory of this process. - - :param directory: The target directory path. - :return: ``None``. - - """ - try: - os.chdir(directory) - except Exception as exc: - error = DaemonOSEnvironmentError( - "Unable to change working directory ({exc})".format(exc=exc)) - raise error - - -def change_root_directory(directory): - """ Change the root directory of this process. - - :param directory: The target directory path. - :return: ``None``. - - Set the current working directory, then the process root directory, - to the specified `directory`. Requires appropriate OS privileges - for this process. - - """ - try: - os.chdir(directory) - os.chroot(directory) - except Exception as exc: - error = DaemonOSEnvironmentError( - "Unable to change root directory ({exc})".format(exc=exc)) - raise error - - -def change_file_creation_mask(mask): - """ Change the file creation mask for this process. - - :param mask: The numeric file creation mask to set. - :return: ``None``. - - """ - try: - os.umask(mask) - except Exception as exc: - error = DaemonOSEnvironmentError( - "Unable to change file creation mask ({exc})".format(exc=exc)) - raise error - - -def change_process_owner(uid, gid): - """ Change the owning UID and GID of this process. - - :param uid: The target UID for the daemon process. - :param gid: The target GID for the daemon process. - :return: ``None``. - - Set the GID then the UID of the process (in that order, to avoid - permission errors) to the specified `gid` and `uid` values. - Requires appropriate OS privileges for this process. - - """ - try: - os.setgid(gid) - os.setuid(uid) - except Exception as exc: - error = DaemonOSEnvironmentError( - "Unable to change process owner ({exc})".format(exc=exc)) - raise error - - -def prevent_core_dump(): - """ Prevent this process from generating a core dump. - - :return: ``None``. - - Set the soft and hard limits for core dump size to zero. On Unix, - this entirely prevents the process from creating core dump. - - """ - core_resource = resource.RLIMIT_CORE - - try: - # Ensure the resource limit exists on this platform, by requesting - # its current value. - core_limit_prev = resource.getrlimit(core_resource) - except ValueError as exc: - error = DaemonOSEnvironmentError( - "System does not support RLIMIT_CORE resource limit" - " ({exc})".format(exc=exc)) - raise error - - # Set hard and soft limits to zero, i.e. no core dump at all. - core_limit = (0, 0) - resource.setrlimit(core_resource, core_limit) - - -def detach_process_context(): - """ Detach the process context from parent and session. - - :return: ``None``. - - Detach from the parent process and session group, allowing the - parent to exit while this process continues running. - - Reference: “Advanced Programming in the Unix Environment”, - section 13.3, by W. Richard Stevens, published 1993 by - Addison-Wesley. - - """ - - def fork_then_exit_parent(error_message): - """ Fork a child process, then exit the parent process. - - :param error_message: Message for the exception in case of a - detach failure. - :return: ``None``. - :raise DaemonProcessDetachError: If the fork fails. - - """ - try: - pid = os.fork() - if pid > 0: - os._exit(0) - except OSError as exc: - error = DaemonProcessDetachError( - "{message}: [{exc.errno:d}] {exc.strerror}".format( - message=error_message, exc=exc)) - raise error - - fork_then_exit_parent(error_message="Failed first fork") - os.setsid() - fork_then_exit_parent(error_message="Failed second fork") - - -def is_process_started_by_init(): - """ Determine whether the current process is started by `init`. - - :return: ``True`` iff the parent process is `init`; otherwise - ``False``. - - The `init` process is the one with process ID of 1. - - """ - result = False - - init_pid = 1 - if os.getppid() == init_pid: - result = True - - return result - - -def is_socket(fd): - """ Determine whether the file descriptor is a socket. - - :param fd: The file descriptor to interrogate. - :return: ``True`` iff the file descriptor is a socket; otherwise - ``False``. - - Query the socket type of `fd`. If there is no error, the file is a - socket. - - """ - result = False - - file_socket = socket.fromfd(fd, socket.AF_INET, socket.SOCK_RAW) - - try: - socket_type = file_socket.getsockopt( - socket.SOL_SOCKET, socket.SO_TYPE) - except socket.error as exc: - exc_errno = exc.args[0] - if exc_errno == errno.ENOTSOCK: - # Socket operation on non-socket. - pass - else: - # Some other socket error. - result = True - else: - # No error getting socket type. - result = True - - return result - - -def is_process_started_by_superserver(): - """ Determine whether the current process is started by the superserver. - - :return: ``True`` if this process was started by the internet - superserver; otherwise ``False``. - - The internet superserver creates a network socket, and - attaches it to the standard streams of the child process. If - that is the case for this process, return ``True``, otherwise - ``False``. - - """ - result = False - - stdin_fd = sys.__stdin__.fileno() - if is_socket(stdin_fd): - result = True - - return result - - -def is_detach_process_context_required(): - """ Determine whether detaching the process context is required. - - :return: ``True`` iff the process is already detached; otherwise - ``False``. - - The process environment is interrogated for the following: - - * Process was started by `init`; or - - * Process was started by `inetd`. - - If any of the above are true, the process is deemed to be already - detached. - - """ - result = True - if is_process_started_by_init() or is_process_started_by_superserver(): - result = False - - return result - - -def close_file_descriptor_if_open(fd): - """ Close a file descriptor if already open. - - :param fd: The file descriptor to close. - :return: ``None``. - - Close the file descriptor `fd`, suppressing an error in the - case the file was not open. - - """ - try: - os.close(fd) - except EnvironmentError as exc: - if exc.errno == errno.EBADF: - # File descriptor was not open. - pass - else: - error = DaemonOSEnvironmentError( - "Failed to close file descriptor {fd:d} ({exc})".format( - fd=fd, exc=exc)) - raise error - - -MAXFD = 2048 - -def get_maximum_file_descriptors(): - """ Get the maximum number of open file descriptors for this process. - - :return: The number (integer) to use as the maximum number of open - files for this process. - - The maximum is the process hard resource limit of maximum number of - open file descriptors. If the limit is “infinity”, a default value - of ``MAXFD`` is returned. - - """ - limits = resource.getrlimit(resource.RLIMIT_NOFILE) - result = limits[1] - if result == resource.RLIM_INFINITY: - result = MAXFD - return result - - -def close_all_open_files(exclude=set()): - """ Close all open file descriptors. - - :param exclude: Collection of file descriptors to skip when closing - files. - :return: ``None``. - - Closes every file descriptor (if open) of this process. If - specified, `exclude` is a set of file descriptors to *not* - close. - - """ - maxfd = get_maximum_file_descriptors() - for fd in reversed(range(maxfd)): - if fd not in exclude: - close_file_descriptor_if_open(fd) - - -def redirect_stream(system_stream, target_stream): - """ Redirect a system stream to a specified file. - - :param standard_stream: A file object representing a standard I/O - stream. - :param target_stream: The target file object for the redirected - stream, or ``None`` to specify the null device. - :return: ``None``. - - `system_stream` is a standard system stream such as - ``sys.stdout``. `target_stream` is an open file object that - should replace the corresponding system stream object. - - If `target_stream` is ``None``, defaults to opening the - operating system's null device and using its file descriptor. - - """ - if target_stream is None: - target_fd = os.open(os.devnull, os.O_RDWR) - else: - target_fd = target_stream.fileno() - os.dup2(target_fd, system_stream.fileno()) - - -def make_default_signal_map(): - """ Make the default signal map for this system. - - :return: A mapping from signal number to handler object. - - The signals available differ by system. The map will not contain - any signals not defined on the running system. - - """ - name_map = { - 'SIGTSTP': None, - 'SIGTTIN': None, - 'SIGTTOU': None, - 'SIGTERM': 'terminate', - } - signal_map = dict( - (getattr(signal, name), target) - for (name, target) in name_map.items() - if hasattr(signal, name)) - - return signal_map - - -def set_signal_handlers(signal_handler_map): - """ Set the signal handlers as specified. - - :param signal_handler_map: A map from signal number to handler - object. - :return: ``None``. - - See the `signal` module for details on signal numbers and signal - handlers. - - """ - for (signal_number, handler) in signal_handler_map.items(): - signal.signal(signal_number, handler) - - -def register_atexit_function(func): - """ Register a function for processing at program exit. - - :param func: A callable function expecting no arguments. - :return: ``None``. - - The function `func` is registered for a call with no arguments - at program exit. - - """ - atexit.register(func) - - -def _chain_exception_from_existing_exception_context(exc, as_cause=False): - """ Decorate the specified exception with the existing exception context. - - :param exc: The exception instance to decorate. - :param as_cause: If true, the existing context is declared to be - the cause of the exception. - :return: ``None``. - - :PEP:`344` describes syntax and attributes (`__traceback__`, - `__context__`, `__cause__`) for use in exception chaining. - - Python 2 does not have that syntax, so this function decorates - the exception with values from the current exception context. - - """ - (existing_exc_type, existing_exc, existing_traceback) = sys.exc_info() - if as_cause: - exc.__cause__ = existing_exc - else: - exc.__context__ = existing_exc - exc.__traceback__ = existing_traceback - - -# Local variables: -# coding: utf-8 -# mode: python -# End: -# vim: fileencoding=utf-8 filetype=python : diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/pidfile.py b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/pidfile.py deleted file mode 100755 index 4517ee0e..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/pidfile.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- coding: utf-8 -*- - -# daemon/pidfile.py -# Part of ‘python-daemon’, an implementation of PEP 3143. -# -# Copyright © 2008–2015 Ben Finney -# -# This is free software: you may copy, modify, and/or distribute this work -# under the terms of the Apache License, version 2.0 as published by the -# Apache Software Foundation. -# No warranty expressed or implied. See the file ‘LICENSE.ASF-2’ for details. - -""" Lockfile behaviour implemented via Unix PID files. - """ - -from __future__ import (absolute_import, unicode_literals) - -from lockfile.pidlockfile import PIDLockFile - - -class TimeoutPIDLockFile(PIDLockFile, object): - """ Lockfile with default timeout, implemented as a Unix PID file. - - This uses the ``PIDLockFile`` implementation, with the - following changes: - - * The `acquire_timeout` parameter to the initialiser will be - used as the default `timeout` parameter for the `acquire` - method. - - """ - - def __init__(self, path, acquire_timeout=None, *args, **kwargs): - """ Set up the parameters of a TimeoutPIDLockFile. - - :param path: Filesystem path to the PID file. - :param acquire_timeout: Value to use by default for the - `acquire` call. - :return: ``None``. - - """ - self.acquire_timeout = acquire_timeout - super(TimeoutPIDLockFile, self).__init__(path, *args, **kwargs) - - def acquire(self, timeout=None, *args, **kwargs): - """ Acquire the lock. - - :param timeout: Specifies the timeout; see below for valid - values. - :return: ``None``. - - The `timeout` defaults to the value set during - initialisation with the `acquire_timeout` parameter. It is - passed to `PIDLockFile.acquire`; see that method for - details. - - """ - if timeout is None: - timeout = self.acquire_timeout - super(TimeoutPIDLockFile, self).acquire(timeout, *args, **kwargs) - - -# Local variables: -# coding: utf-8 -# mode: python -# End: -# vim: fileencoding=utf-8 filetype=python : diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/runner.py b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/runner.py deleted file mode 100755 index 6973cf1c..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/runner.py +++ /dev/null @@ -1,324 +0,0 @@ -# -*- coding: utf-8 -*- - -# daemon/runner.py -# Part of ‘python-daemon’, an implementation of PEP 3143. -# -# Copyright © 2009–2015 Ben Finney -# Copyright © 2007–2008 Robert Niederreiter, Jens Klein -# Copyright © 2003 Clark Evans -# Copyright © 2002 Noah Spurrier -# Copyright © 2001 Jürgen Hermann -# -# This is free software: you may copy, modify, and/or distribute this work -# under the terms of the Apache License, version 2.0 as published by the -# Apache Software Foundation. -# No warranty expressed or implied. See the file ‘LICENSE.ASF-2’ for details. - -""" Daemon runner library. - """ - -from __future__ import (absolute_import, unicode_literals) - -import sys -import os -import signal -import errno -try: - # Python 3 standard library. - ProcessLookupError -except NameError: - # No such class in Python 2. - ProcessLookupError = NotImplemented - -import lockfile - -from . import pidfile -from .daemon import (basestring, unicode) -from .daemon import DaemonContext -from .daemon import _chain_exception_from_existing_exception_context - - -class DaemonRunnerError(Exception): - """ Abstract base class for errors from DaemonRunner. """ - - def __init__(self, *args, **kwargs): - self._chain_from_context() - - super(DaemonRunnerError, self).__init__(*args, **kwargs) - - def _chain_from_context(self): - _chain_exception_from_existing_exception_context(self, as_cause=True) - - -class DaemonRunnerInvalidActionError(DaemonRunnerError, ValueError): - """ Raised when specified action for DaemonRunner is invalid. """ - - def _chain_from_context(self): - # This exception is normally not caused by another. - _chain_exception_from_existing_exception_context(self, as_cause=False) - - -class DaemonRunnerStartFailureError(DaemonRunnerError, RuntimeError): - """ Raised when failure starting DaemonRunner. """ - - -class DaemonRunnerStopFailureError(DaemonRunnerError, RuntimeError): - """ Raised when failure stopping DaemonRunner. """ - - -class DaemonRunner: - """ Controller for a callable running in a separate background process. - - The first command-line argument is the action to take: - - * 'start': Become a daemon and call `app.run()`. - * 'stop': Exit the daemon process specified in the PID file. - * 'restart': Stop, then start. - - """ - - __metaclass__ = type - - start_message = "started with pid {pid:d}" - - def __init__(self, app): - """ Set up the parameters of a new runner. - - :param app: The application instance; see below. - :return: ``None``. - - The `app` argument must have the following attributes: - - * `stdin_path`, `stdout_path`, `stderr_path`: Filesystem paths - to open and replace the existing `sys.stdin`, `sys.stdout`, - `sys.stderr`. - - * `pidfile_path`: Absolute filesystem path to a file that will - be used as the PID file for the daemon. If ``None``, no PID - file will be used. - - * `pidfile_timeout`: Used as the default acquisition timeout - value supplied to the runner's PID lock file. - - * `run`: Callable that will be invoked when the daemon is - started. - - """ - self.parse_args() - self.app = app - self.daemon_context = DaemonContext() - self.daemon_context.stdin = open(app.stdin_path, 'rt') - self.daemon_context.stdout = open(app.stdout_path, 'w+t') - self.daemon_context.stderr = open( - app.stderr_path, 'w+t', buffering=0) - - self.pidfile = None - if app.pidfile_path is not None: - self.pidfile = make_pidlockfile( - app.pidfile_path, app.pidfile_timeout) - self.daemon_context.pidfile = self.pidfile - - def _usage_exit(self, argv): - """ Emit a usage message, then exit. - - :param argv: The command-line arguments used to invoke the - program, as a sequence of strings. - :return: ``None``. - - """ - progname = os.path.basename(argv[0]) - usage_exit_code = 2 - action_usage = "|".join(self.action_funcs.keys()) - message = "usage: {progname} {usage}".format( - progname=progname, usage=action_usage) - emit_message(message) - sys.exit(usage_exit_code) - - def parse_args(self, argv=None): - """ Parse command-line arguments. - - :param argv: The command-line arguments used to invoke the - program, as a sequence of strings. - - :return: ``None``. - - The parser expects the first argument as the program name, the - second argument as the action to perform. - - If the parser fails to parse the arguments, emit a usage - message and exit the program. - - """ - if argv is None: - argv = sys.argv - - min_args = 2 - if len(argv) < min_args: - self._usage_exit(argv) - - self.action = unicode(argv[1]) - if self.action not in self.action_funcs: - self._usage_exit(argv) - - def _start(self): - """ Open the daemon context and run the application. - - :return: ``None``. - :raises DaemonRunnerStartFailureError: If the PID file cannot - be locked by this process. - - """ - if is_pidfile_stale(self.pidfile): - self.pidfile.break_lock() - - try: - self.daemon_context.open() - except lockfile.AlreadyLocked: - error = DaemonRunnerStartFailureError( - "PID file {pidfile.path!r} already locked".format( - pidfile=self.pidfile)) - raise error - - pid = os.getpid() - message = self.start_message.format(pid=pid) - emit_message(message) - - self.app.run() - - def _terminate_daemon_process(self): - """ Terminate the daemon process specified in the current PID file. - - :return: ``None``. - :raises DaemonRunnerStopFailureError: If terminating the daemon - fails with an OS error. - - """ - pid = self.pidfile.read_pid() - try: - os.kill(pid, signal.SIGTERM) - except OSError as exc: - error = DaemonRunnerStopFailureError( - "Failed to terminate {pid:d}: {exc}".format( - pid=pid, exc=exc)) - raise error - - def _stop(self): - """ Exit the daemon process specified in the current PID file. - - :return: ``None``. - :raises DaemonRunnerStopFailureError: If the PID file is not - already locked. - - """ - if not self.pidfile.is_locked(): - error = DaemonRunnerStopFailureError( - "PID file {pidfile.path!r} not locked".format( - pidfile=self.pidfile)) - raise error - - if is_pidfile_stale(self.pidfile): - self.pidfile.break_lock() - else: - self._terminate_daemon_process() - - def _restart(self): - """ Stop, then start. - """ - self._stop() - self._start() - - action_funcs = { - 'start': _start, - 'stop': _stop, - 'restart': _restart, - } - - def _get_action_func(self): - """ Get the function for the specified action. - - :return: The function object corresponding to the specified - action. - :raises DaemonRunnerInvalidActionError: if the action is - unknown. - - The action is specified by the `action` attribute, which is set - during `parse_args`. - - """ - try: - func = self.action_funcs[self.action] - except KeyError: - error = DaemonRunnerInvalidActionError( - "Unknown action: {action!r}".format( - action=self.action)) - raise error - return func - - def do_action(self): - """ Perform the requested action. - - :return: ``None``. - - The action is specified by the `action` attribute, which is set - during `parse_args`. - - """ - func = self._get_action_func() - func(self) - - -def emit_message(message, stream=None): - """ Emit a message to the specified stream (default `sys.stderr`). """ - if stream is None: - stream = sys.stderr - stream.write("{message}\n".format(message=message)) - stream.flush() - - -def make_pidlockfile(path, acquire_timeout): - """ Make a PIDLockFile instance with the given filesystem path. """ - if not isinstance(path, basestring): - error = ValueError("Not a filesystem path: {path!r}".format( - path=path)) - raise error - if not os.path.isabs(path): - error = ValueError("Not an absolute path: {path!r}".format( - path=path)) - raise error - lockfile = pidfile.TimeoutPIDLockFile(path, acquire_timeout) - - return lockfile - - -def is_pidfile_stale(pidfile): - """ Determine whether a PID file is stale. - - :return: ``True`` iff the PID file is stale; otherwise ``False``. - - The PID file is “stale” if its contents are valid but do not - match the PID of a currently-running process. - - """ - result = False - - pidfile_pid = pidfile.read_pid() - if pidfile_pid is not None: - try: - os.kill(pidfile_pid, signal.SIG_DFL) - except ProcessLookupError: - # The specified PID does not exist. - result = True - except OSError as exc: - if exc.errno == errno.ESRCH: - # Under Python 2, process lookup error is an OSError. - # The specified PID does not exist. - result = True - - return result - - -# Local variables: -# coding: utf-8 -# mode: python -# End: -# vim: fileencoding=utf-8 filetype=python : diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/CREDITS b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/CREDITS deleted file mode 100755 index feb65d5e..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/CREDITS +++ /dev/null @@ -1,53 +0,0 @@ -Credits for contributors to ‘python-daemon’ -########################################### - -:Updated: 2014-12-23 - -The ‘python-daemon’ library is the work of many contributors. - - -Primary developers -================== - -The library has been maintained over the years by: - -* Ben Finney -* Robert Niederreiter -* Jens Klein - - -Precursors -========== - -The library code base is inherited from prior work by: - -* Chad J. Schroeder -* Clark Evans -* Noah Spurrier -* Jürgen Hermann - - -Additional contributors -======================= - -People who have also contributed substantial improvements: - - - -.. - This is free software: you may copy, modify, and/or distribute this work - under the terms of the Apache License version 2.0 as published by the - Apache Software Foundation. - No warranty expressed or implied. See the file ‘LICENSE.ASF-2’ for details. - -.. - Local variables: - coding: utf-8 - mode: text - mode: rst - time-stamp-format: "%:y-%02m-%02d" - time-stamp-start: "^:Updated:[ ]+" - time-stamp-end: "$" - time-stamp-line-limit: 20 - End: - vim: fileencoding=utf-8 filetype=rst : diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/FAQ b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/FAQ deleted file mode 100755 index 1fcc4658..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/FAQ +++ /dev/null @@ -1,156 +0,0 @@ -‘python-daemon’ Frequently Asked Questions -########################################## - -:Author: Ben Finney -:Updated: 2015-01-10 - -.. contents:: -.. - 1 General - 1.1 What is the purpose of the ‘python-daemon’ library? - 1.2 How can I run a service communicating with a separate daemon process? - 2 Security - 2.1 Why is the umask set to 0 by default? - 3 File descriptors - 3.1 Why does the output stop after opening the daemon context? - 3.2 How can I preserve a ‘logging’ handler's file descriptor? - -General -======= - -What is the purpose of the ‘python-daemon’ library? ---------------------------------------------------- - -The ‘python-daemon’ library has a deliberately narrow focus: that of -being a reference implementation for `PEP 3143`_, “Standard daemon -process library”. - -.. _`PEP 3143`: http://www.python.org/dev/peps/pep-3143 - -How can I run a service communicating with a separate daemon process? ---------------------------------------------------------------------- - -As specified in `PEP 3143`_, the ‘python-daemon’ library is -specifically focussed on the goal of having the *current running -program* become a well-behaved Unix daemon process. This leaves open -the question of how this program is started, or about multiple -programs interacting. As detailed in PEP 3143: - - A daemon is not a service - - There is a related concept in many systems, called a “service”. A - service differs from the model in this PEP, in that rather than - having the *current* program continue to run as a daemon process, - a service starts an *additional* process to run in the background, - and the current process communicates with that additional process - via some defined channels. - - The Unix-style daemon model in this PEP can be used, among other - things, to implement the background-process part of a service; but - this PEP does not address the other aspects of setting up and - managing a service. - -A possible starting point for such a “service” model of execution is -in a `message from 2009-01-30`_ to the ``python-ideas`` forum. - -.. _`message from 2009-01-30`: http://mail.python.org/pipermail/python-ideas/2009-January/002606.html - - -Security -======== - -Why is the umask set to 0 by default? -------------------------------------- - -A daemon should not rely on the parent process's umask value, which is -beyond its control and may prevent creating a file with the required -access mode. So when the daemon context opens, the umask is set to an -explicit known value. - -If the conventional value of 0 is too open, consider setting a value -such as 0o022, 0o027, 0o077, or another specific value. Otherwise, -ensure the daemon creates every file with an explicit access mode for -the purpose. - - -File descriptors -================ - -Why does the output stop after opening the daemon context? ----------------------------------------------------------- - -The specified behaviour in `PEP 3143`_ includes the requirement to -detach the process from the controlling terminal (to allow the process -to continue to run as a daemon), and to close all file descriptors not -known to be safe once detached (to ensure any files that continue to -be used are under the control of the daemon process). - -If you want the process to generate output via the system streams -‘sys.stdout’ and ‘sys.stderr’, set the ‘DaemonContext’'s ‘stdout’ -and/or ‘stderr’ options to a file-like object (e.g. the ‘stream’ -attribute of a ‘logging.Handler’ instance). If these objects have file -descriptors, they will be preserved when the daemon context opens. - -How can I preserve a ‘logging’ handler's file descriptor? ---------------------------------------------------------- - -The ‘DaemonContext.open’ method conforms to `PEP 3143`_ by closing all -open file descriptors, but excluding those files specified in the -‘files_preserve’ option. This option is a list of files or file -descriptors. - -The Python standard library ‘logging’ module provides log handlers -that write to streams, including to files via the ‘StreamHandler’ -class and its sub-classes. The documentation (both the online `logging -module documentation`_ and the docstrings for the code) makes no -mention of a way to get at the stream associated with a handler -object. - -However, looking at the source code for ‘StreamHandler’, in Python 2.5 -as ``/usr/lib/python2.5/logging/__init__.py``, shows a ‘stream’ -attribute that is bound to the stream object. The attribute is not -marked private (i.e. it is not named with a leading underscore), so we -can presume it is part of the public API. - -That attribute can then be used to specify that a logging handler's -file descriptor should, when the ‘DaemonContext’ opens, be excluded -from closure:: - - import logging - import daemon - - # any subclass of StreamHandler should provide the ‘stream’ attribute. - lh = logging.handlers.TimedRotatingFileHandler( - "/var/log/foo.log", - # … - ) - - # … do some logging and other activity … - - daemon_context = daemon.DaemonContext() - daemon_context.files_preserve = [lh.stream] - - daemon_context.open() - - # … continue as a daemon process … - -.. _`logging module documentation`: http://docs.python.org/library/logging - - -.. - This is free software: you may copy, modify, and/or distribute this work - under the terms of the Apache License version 2.0 as published by the - Apache Software Foundation. - No warranty expressed or implied. See the file ‘LICENSE.ASF-2’ for details. - -.. - Local variables: - coding: utf-8 - mode: text - mode: rst - time-stamp-format: "%:y-%02m-%02d" - time-stamp-start: "^:Updated:[ ]+" - time-stamp-end: "$" - time-stamp-line-limit: 20 - End: - vim: fileencoding=utf-8 filetype=rst : diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/TODO b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/TODO deleted file mode 100755 index 81b41481..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/TODO +++ /dev/null @@ -1,95 +0,0 @@ -TODO for ‘python-daemon’ library -################################ - -:Updated: 2015-01-10 - -======= -PENDING -======= - -Tests -===== - -Libraries -========= - -* Evaluate switching to ‘flufl.lock’ library for PID lockfile behaviour - _. - -Features -======== - -Important ---------- - -Wishlist --------- - -* Allow specification of a syslog service name to log as (default: - output to stdout and stderr, not syslog). - -Documentation -============= - -Standard library inclusion -========================== - - -==== -DONE -==== - -* Convert to Python 2 and Python 3 compatible code base. - -* Work correctly with current ‘lockfile’ library (0.10 or later). - -* Write full unit tests for every new or changed behaviour at time of - commit. - -* Detect whether started by another process that handles - daemonisation, such as ‘inetd’, and behave appropriately. - -* Detach to new process and session group. - -* Allow specification of working directory (default: '/'). - -* Allow specification of umask (default: 0o000). - -* Drop ‘suid’ and ‘sgid’ privileges if set. - -* Close all open file handles. - -* Re-open stdin, stdout, stderr to user-specified files. - -* Default re-open stdin, stdout, stderr to ‘/dev/null’. - -* Allow specification of a non-root user and group to drop to, if - started as ‘root’ (default: no change of user or group). - -* Implement context manager protocol for daemon context. - -* Allow specification of PID file with its own context manager - (default: no PID file). - -* Full docstrings for functions, classes, and modules. - -* PEP 3143 for adding this library to the Python standard library. - - -.. - This is free software: you may copy, modify, and/or distribute this work - under the terms of the Apache License version 2.0 as published by the - Apache Software Foundation. - No warranty expressed or implied. See the file ‘LICENSE.ASF-2’ for details. - -.. - Local variables: - coding: utf-8 - mode: text - mode: rst - time-stamp-format: "%:y-%02m-%02d" - time-stamp-start: "^:Updated:[ ]+" - time-stamp-end: "$" - time-stamp-line-limit: 20 - End: - vim: fileencoding=utf-8 filetype=rst : diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/hacking.txt b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/hacking.txt deleted file mode 100755 index 9484dbd0..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/hacking.txt +++ /dev/null @@ -1,180 +0,0 @@ -Developer's guide -################# - -:Author: Ben Finney -:Updated: 2014-11-28 - - -Project layout -============== - -:: - - ./ Top level of source tree - doc/ Project documentation - bin/ Executable programs - daemon/ Main ‘daemon’ library - test/ Unit tests - - -Code style -========== - -Python ------- - -All Python code should conform to the guidelines in PEP8_. In -particular: - -* Indent each level using 4 spaces (``U+0020 SPACE``), and no TABs - (``U+0008 CHARACTER TABULATION``). - -* Name modules in lower case, ``multiplewordslikethis``. - -* Name classes in title case, ``MultipleWordsLikeThis``. - -* Name functions, instances and other variables in lower case, - ``multiple_words_like_this``. - -* Every module, class, and function has a Python doc string explaining - its purpose and API. - - *Exception*: Functions whose purpose and API are mandated by Python - itself (dunder-named methods) do not need a doc string. - -* Doc strings are written as triple-quoted strings. - - * The text of the doc string is marked up with reStructuredText. - - * The first line is a one-line synopsis of the object. This summary - line appears on the same line as the opening triple-quote, - separated by a single space. - - * Further lines, if needed, are separated from the first by one - blank line. - - * The synopsis is separated by one space from the opening - triple-quote; this causes it to appear four columns past the - beginning of the line. All subsequent lines are indented at least - four columns also. - - * The synopsis is followed by a reStructuredText field list. The - field names are: “param foo” for each parameter (where “foo” is - the parameter name), and “return” for the return value. The field - values describe the purpose of each. - - * The closing triple-quote appears on a separate line. - - Example:: - - def frobnicate(spam, algorithm="dv"): - """ Perform frobnication on ``spam``. - - :param spam: A travortionate (as a sequence of strings). - :param algorithm: The name of the algorithm to use for - frobnicating the travortionate. - :return: The frobnicated travortionate, if it is - non-empty; otherwise None. - - The frobnication is done by the Dietzel-Venkman algorithm, - and optimises for the case where ``spam`` is freebled and - agglutinative. - - """ - spagnify(spam) - # … - -* All ``import`` statements appear at the top of the module. - -* Each ``import`` statement imports a single module, or multiple names - from a single module. - - Example:: - - import sys - import os - from spam import foo, bar, baz - -.. _PEP8: http://www.python.org/dev/peps/pep-0008/ - -Additional style guidelines: - -* All text files (including program code) are encoded in UTF-8. - -* A page break (``U+000C FORM FEED``) whitespace character is used - within a module to break up semantically separate areas of the - module. - -* Editor hints for Emacs and Vim appear in a comment block at the - file's end:: - - - # Local variables: - # coding: utf-8 - # mode: python - # End: - # vim: fileencoding=utf-8 filetype=python : - - -Unit tests -========== - -All code should aim for 100% coverage by unit tests. New code, or -changes to existing code, will only be considered for inclusion in the -development tree when accompanied by corresponding additions or -changes to the unit tests. - -Test-driven development ------------------------ - -Where possible, practice test-driven development to implement program -code. - -* During a development session, maintain a separate window or terminal - with the unit test suite for the project running continuously, or - automatically every few seconds. - -* Any time a test is failing, the only valid change is to make all - tests pass. - -* Develop new interface features (changes to the program unit's - behaviour) only when all current tests pass. - -* Refactor as needed, but only when all tests pass. - - * Refactoring is any change to the code which does not alter its - interface or expected behaviour, such as performance - optimisations, readability improvements, modularisation - improvements etc. - -* Develop new or changed program behaviour by: - - * *First* write a single, specific test case for that new behaviour, - then watch the test fail in the absence of the desired behaviour. - - * Implement the minimum necessary change to satisfy the failing - test. Continue until all tests pass again, then stop making - functional changes. - - * Once all tests (including the new test) pass, consider refactoring - the code and the tests immediately, then ensure all the tests pass - again after any changes. - - * Iterate for each incremental change in interface or behaviour. - -Test-driven development is not absolutely necessary, but is the -simplest, most direct way to generate the kind of program changes -accompanied by unit tests that are necessary for inclusion in the -project. - - -.. - Local variables: - coding: utf-8 - mode: rst - time-stamp-format: "%:y-%02m-%02d" - time-stamp-start: "^:Updated:[ ]+" - time-stamp-end: "$" - time-stamp-line-limit: 20 - End: - vim: fileencoding=utf-8 filetype=rst : diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/PKG-INFO b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/PKG-INFO deleted file mode 100755 index fd81f509..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/PKG-INFO +++ /dev/null @@ -1,38 +0,0 @@ -Metadata-Version: 1.1 -Name: python-daemon -Version: 2.0.5 -Summary: Library to implement a well-behaved Unix daemon process. -Home-page: https://alioth.debian.org/projects/python-daemon/ -Author: Ben Finney -Author-email: ben+python@benfinney.id.au -License: Apache-2 -Description: This library implements the well-behaved daemon specification of - :pep:`3143`, “Standard daemon process library”. - - A well-behaved Unix daemon process is tricky to get right, but the - required steps are much the same for every daemon program. A - `DaemonContext` instance holds the behaviour and configured - process environment for the program; use the instance as a context - manager to enter a daemon state. - - Simple example of usage:: - - import daemon - - from spam import do_main_program - - with daemon.DaemonContext(): - do_main_program() - - Customisation of the steps to become a daemon is available by - setting options on the `DaemonContext` instance; see the - documentation for that class for each option. -Keywords: daemon,fork,unix -Platform: UNKNOWN -Classifier: Development Status :: 4 - Beta -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Operating System :: POSIX -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Intended Audience :: Developers -Classifier: Topic :: Software Development :: Libraries :: Python Modules diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/SOURCES.txt b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/SOURCES.txt deleted file mode 100755 index 6e176719..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/SOURCES.txt +++ /dev/null @@ -1,30 +0,0 @@ -ChangeLog -LICENSE.ASF-2 -LICENSE.GPL-3 -MANIFEST.in -setup.cfg -setup.py -test_version.py -version.py -daemon/__init__.py -daemon/_metadata.py -daemon/daemon.py -daemon/pidfile.py -daemon/runner.py -doc/CREDITS -doc/FAQ -doc/TODO -doc/hacking.txt -python_daemon.egg-info/PKG-INFO -python_daemon.egg-info/SOURCES.txt -python_daemon.egg-info/dependency_links.txt -python_daemon.egg-info/not-zip-safe -python_daemon.egg-info/requires.txt -python_daemon.egg-info/top_level.txt -python_daemon.egg-info/version_info.json -test/__init__.py -test/scaffold.py -test/test_daemon.py -test/test_metadata.py -test/test_pidfile.py -test/test_runner.py \ No newline at end of file diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/dependency_links.txt b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/dependency_links.txt deleted file mode 100755 index 8b137891..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/not-zip-safe b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/not-zip-safe deleted file mode 100755 index 8b137891..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/not-zip-safe +++ /dev/null @@ -1 +0,0 @@ - diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/requires.txt b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/requires.txt deleted file mode 100755 index d1496b02..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/requires.txt +++ /dev/null @@ -1,3 +0,0 @@ -setuptools -docutils -lockfile >=0.10 diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/top_level.txt b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/top_level.txt deleted file mode 100755 index 28e3ee0c..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -daemon diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/version_info.json b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/version_info.json deleted file mode 100755 index bac1b84f..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/version_info.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "release_date": "2015-02-02", - "version": "2.0.5", - "maintainer": "Ben Finney ", - "body": "* Refine compatibility of exceptions for file operations.\n* Specify the text encoding when opening the changelog file.\n" -} \ No newline at end of file diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/setup.cfg b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/setup.cfg deleted file mode 100755 index 9d3d2c02..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/setup.cfg +++ /dev/null @@ -1,11 +0,0 @@ -[aliases] -distribute = register sdist bdist_wheel upload - -[bdist_wheel] -universal = true - -[egg_info] -tag_svn_revision = 0 -tag_date = 0 -tag_build = - diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/setup.py b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/setup.py deleted file mode 100755 index 16a6a6a6..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/setup.py +++ /dev/null @@ -1,106 +0,0 @@ -# -*- coding: utf-8 -*- - -# setup.py -# Part of ‘python-daemon’, an implementation of PEP 3143. -# -# Copyright © 2008–2015 Ben Finney -# Copyright © 2008 Robert Niederreiter, Jens Klein -# -# This is free software: you may copy, modify, and/or distribute this work -# under the terms of the GNU General Public License as published by the -# Free Software Foundation; version 3 of that license or any later version. -# No warranty expressed or implied. See the file ‘LICENSE.GPL-3’ for details. - -""" Distribution setup for ‘python-daemon’ library. """ - -from __future__ import (absolute_import, unicode_literals) - -import sys -import os -import os.path -import pydoc -import distutils.util - -from setuptools import (setup, find_packages) - -import version - - -fromlist_expects_type = str -if sys.version_info < (3, 0): - fromlist_expects_type = bytes - - -main_module_name = 'daemon' -main_module_fromlist = list(map(fromlist_expects_type, [ - '_metadata'])) -main_module = __import__( - main_module_name, - level=0, fromlist=main_module_fromlist) -metadata = main_module._metadata - -(synopsis, long_description) = pydoc.splitdoc(pydoc.getdoc(main_module)) - -version_info = metadata.get_distribution_version_info() -version_string = version_info['version'] - -(maintainer_name, maintainer_email) = metadata.parse_person_field( - version_info['maintainer']) - - -setup( - name=metadata.distribution_name, - version=version_string, - packages=find_packages(exclude=["test"]), - cmdclass={ - "write_version_info": version.WriteVersionInfoCommand, - "egg_info": version.EggInfoCommand, - }, - - # Setuptools metadata. - maintainer=maintainer_name, - maintainer_email=maintainer_email, - zip_safe=False, - setup_requires=[ - "docutils", - ], - test_suite="unittest2.collector", - tests_require=[ - "unittest2 >=0.6", - "testtools", - "testscenarios >=0.4", - "mock >=1.0", - "docutils", - ], - install_requires=[ - "setuptools", - "docutils", - "lockfile >=0.10", - ], - - # PyPI metadata. - author=metadata.author_name, - author_email=metadata.author_email, - description=synopsis, - license=metadata.license, - keywords="daemon fork unix".split(), - url=metadata.url, - long_description=long_description, - classifiers=[ - # Reference: http://pypi.python.org/pypi?%3Aaction=list_classifiers - "Development Status :: 4 - Beta", - "License :: OSI Approved :: Apache Software License", - "Operating System :: POSIX", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Intended Audience :: Developers", - "Topic :: Software Development :: Libraries :: Python Modules", - ], - ) - - -# Local variables: -# coding: utf-8 -# mode: python -# End: -# vim: fileencoding=utf-8 filetype=python : diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/__init__.py b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/__init__.py deleted file mode 100755 index 398519f1..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ -# -*- coding: utf-8 -*- -# -# test/__init__.py -# Part of ‘python-daemon’, an implementation of PEP 3143. -# -# Copyright © 2008–2015 Ben Finney -# -# This is free software: you may copy, modify, and/or distribute this work -# under the terms of the Apache License, version 2.0 as published by the -# Apache Software Foundation. -# No warranty expressed or implied. See the file ‘LICENSE.ASF-2’ for details. - -""" Unit test suite for ‘daemon’ package. - """ - -from __future__ import (absolute_import, unicode_literals) - - -# Local variables: -# coding: utf-8 -# mode: python -# End: -# vim: fileencoding=utf-8 filetype=python : diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/scaffold.py b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/scaffold.py deleted file mode 100755 index 9a4f1150..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/scaffold.py +++ /dev/null @@ -1,322 +0,0 @@ -# -*- coding: utf-8 -*- - -# test/scaffold.py -# Part of ‘python-daemon’, an implementation of PEP 3143. -# -# Copyright © 2007–2015 Ben Finney -# -# This is free software: you may copy, modify, and/or distribute this work -# under the terms of the Apache License, version 2.0 as published by the -# Apache Software Foundation. -# No warranty expressed or implied. See the file ‘LICENSE.ASF-2’ for details. - -""" Scaffolding for unit test modules. - """ - -from __future__ import (absolute_import, unicode_literals) - -import unittest -import doctest -import logging -import os -import sys -import operator -import textwrap -from copy import deepcopy -import functools - -try: - # Python 2 has both ‘str’ (bytes) and ‘unicode’ (text). - basestring = basestring - unicode = unicode -except NameError: - # Python 3 names the Unicode data type ‘str’. - basestring = str - unicode = str - -import testscenarios -import testtools.testcase - - -test_dir = os.path.dirname(os.path.abspath(__file__)) -parent_dir = os.path.dirname(test_dir) -if not test_dir in sys.path: - sys.path.insert(1, test_dir) -if not parent_dir in sys.path: - sys.path.insert(1, parent_dir) - -# Disable all but the most critical logging messages. -logging.disable(logging.CRITICAL) - - -def get_function_signature(func): - """ Get the function signature as a mapping of attributes. - - :param func: The function object to interrogate. - :return: A mapping of the components of a function signature. - - The signature is constructed as a mapping: - - * 'name': The function's defined name. - * 'arg_count': The number of arguments expected by the function. - * 'arg_names': A sequence of the argument names, as strings. - * 'arg_defaults': A sequence of the default values for the arguments. - * 'va_args': The name bound to remaining positional arguments. - * 'va_kw_args': The name bound to remaining keyword arguments. - - """ - try: - # Python 3 function attributes. - func_code = func.__code__ - func_defaults = func.__defaults__ - except AttributeError: - # Python 2 function attributes. - func_code = func.func_code - func_defaults = func.func_defaults - - arg_count = func_code.co_argcount - arg_names = func_code.co_varnames[:arg_count] - - arg_defaults = {} - if func_defaults is not None: - arg_defaults = dict( - (name, value) - for (name, value) in - zip(arg_names[::-1], func_defaults[::-1])) - - signature = { - 'name': func.__name__, - 'arg_count': arg_count, - 'arg_names': arg_names, - 'arg_defaults': arg_defaults, - } - - non_pos_names = list(func_code.co_varnames[arg_count:]) - COLLECTS_ARBITRARY_POSITIONAL_ARGS = 0x04 - if func_code.co_flags & COLLECTS_ARBITRARY_POSITIONAL_ARGS: - signature['var_args'] = non_pos_names.pop(0) - COLLECTS_ARBITRARY_KEYWORD_ARGS = 0x08 - if func_code.co_flags & COLLECTS_ARBITRARY_KEYWORD_ARGS: - signature['var_kw_args'] = non_pos_names.pop(0) - - return signature - - -def format_function_signature(func): - """ Format the function signature as printable text. - - :param func: The function object to interrogate. - :return: A formatted text representation of the function signature. - - The signature is rendered a text; for example:: - - foo(spam, eggs, ham=True, beans=None, *args, **kwargs) - - """ - signature = get_function_signature(func) - - args_text = [] - for arg_name in signature['arg_names']: - if arg_name in signature['arg_defaults']: - arg_text = "{name}={value!r}".format( - name=arg_name, value=signature['arg_defaults'][arg_name]) - else: - arg_text = "{name}".format( - name=arg_name) - args_text.append(arg_text) - if 'var_args' in signature: - args_text.append("*{var_args}".format(signature)) - if 'var_kw_args' in signature: - args_text.append("**{var_kw_args}".format(signature)) - signature_args_text = ", ".join(args_text) - - func_name = signature['name'] - signature_text = "{name}({args})".format( - name=func_name, args=signature_args_text) - - return signature_text - - -class TestCase(testtools.testcase.TestCase): - """ Test case behaviour. """ - - def failUnlessOutputCheckerMatch(self, want, got, msg=None): - """ Fail unless the specified string matches the expected. - - :param want: The desired output pattern. - :param got: The actual text to match. - :param msg: A message to prefix on the failure message. - :return: ``None``. - :raises self.failureException: If the text does not match. - - Fail the test unless ``want`` matches ``got``, as determined by - a ``doctest.OutputChecker`` instance. This is not an equality - check, but a pattern match according to the ``OutputChecker`` - rules. - - """ - checker = doctest.OutputChecker() - want = textwrap.dedent(want) - source = "" - example = doctest.Example(source, want) - got = textwrap.dedent(got) - checker_optionflags = functools.reduce(operator.or_, [ - doctest.ELLIPSIS, - ]) - if not checker.check_output(want, got, checker_optionflags): - if msg is None: - diff = checker.output_difference( - example, got, checker_optionflags) - msg = "\n".join([ - "Output received did not match expected output", - "{diff}", - ]).format( - diff=diff) - raise self.failureException(msg) - - assertOutputCheckerMatch = failUnlessOutputCheckerMatch - - def failUnlessFunctionInTraceback(self, traceback, function, msg=None): - """ Fail if the function is not in the traceback. - - :param traceback: The traceback object to interrogate. - :param function: The function object to match. - :param msg: A message to prefix on the failure message. - :return: ``None``. - - :raises self.failureException: If the function is not in the - traceback. - - Fail the test if the function ``function`` is not at any of the - levels in the traceback object ``traceback``. - - """ - func_in_traceback = False - expected_code = function.func_code - current_traceback = traceback - while current_traceback is not None: - if expected_code is current_traceback.tb_frame.f_code: - func_in_traceback = True - break - current_traceback = current_traceback.tb_next - - if not func_in_traceback: - if msg is None: - msg = ( - "Traceback did not lead to original function" - " {function}" - ).format( - function=function) - raise self.failureException(msg) - - assertFunctionInTraceback = failUnlessFunctionInTraceback - - def failUnlessFunctionSignatureMatch(self, first, second, msg=None): - """ Fail if the function signatures do not match. - - :param first: The first function to compare. - :param second: The second function to compare. - :param msg: A message to prefix to the failure message. - :return: ``None``. - - :raises self.failureException: If the function signatures do - not match. - - Fail the test if the function signature does not match between - the ``first`` function and the ``second`` function. - - The function signature includes: - - * function name, - - * count of named parameters, - - * sequence of named parameters, - - * default values of named parameters, - - * collector for arbitrary positional arguments, - - * collector for arbitrary keyword arguments. - - """ - first_signature = get_function_signature(first) - second_signature = get_function_signature(second) - - if first_signature != second_signature: - if msg is None: - first_signature_text = format_function_signature(first) - second_signature_text = format_function_signature(second) - msg = (textwrap.dedent("""\ - Function signatures do not match: - {first!r} != {second!r} - Expected: - {first_text} - Got: - {second_text}""") - ).format( - first=first_signature, - first_text=first_signature_text, - second=second_signature, - second_text=second_signature_text, - ) - raise self.failureException(msg) - - assertFunctionSignatureMatch = failUnlessFunctionSignatureMatch - - -class TestCaseWithScenarios(testscenarios.WithScenarios, TestCase): - """ Test cases run per scenario. """ - - -class Exception_TestCase(TestCaseWithScenarios): - """ Test cases for exception classes. """ - - def test_exception_instance(self): - """ Exception instance should be created. """ - self.assertIsNot(self.instance, None) - - def test_exception_types(self): - """ Exception instance should match expected types. """ - for match_type in self.types: - self.assertIsInstance(self.instance, match_type) - - -def make_exception_scenarios(scenarios): - """ Make test scenarios for exception classes. - - :param scenarios: Sequence of scenarios. - :return: List of scenarios with additional mapping entries. - - Use this with `testscenarios` to adapt `Exception_TestCase`_ for - any exceptions that need testing. - - Each scenario is a tuple (`name`, `map`) where `map` is a mapping - of attributes to be applied to each test case. Attributes map must - contain items for: - - :key exc_type: - The exception type to be tested. - :key min_args: - The minimum argument count for the exception instance - initialiser. - :key types: - Sequence of types that should be superclasses of each - instance of the exception type. - - """ - updated_scenarios = deepcopy(scenarios) - for (name, scenario) in updated_scenarios: - args = (None,) * scenario['min_args'] - scenario['args'] = args - instance = scenario['exc_type'](*args) - scenario['instance'] = instance - - return updated_scenarios - - -# Local variables: -# coding: utf-8 -# mode: python -# End: -# vim: fileencoding=utf-8 filetype=python : diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_daemon.py b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_daemon.py deleted file mode 100755 index a911858a..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_daemon.py +++ /dev/null @@ -1,1744 +0,0 @@ -# -*- coding: utf-8 -*- -# -# test/test_daemon.py -# Part of ‘python-daemon’, an implementation of PEP 3143. -# -# Copyright © 2008–2015 Ben Finney -# -# This is free software: you may copy, modify, and/or distribute this work -# under the terms of the Apache License, version 2.0 as published by the -# Apache Software Foundation. -# No warranty expressed or implied. See the file ‘LICENSE.ASF-2’ for details. - -""" Unit test for ‘daemon’ module. - """ - -from __future__ import (absolute_import, unicode_literals) - -import os -import sys -import tempfile -import resource -import errno -import signal -import socket -from types import ModuleType -import collections -import functools -try: - # Standard library of Python 2.7 and later. - from io import StringIO -except ImportError: - # Standard library of Python 2.6 and earlier. - from StringIO import StringIO - -import mock - -from . import scaffold -from .scaffold import (basestring, unicode) -from .test_pidfile import ( - FakeFileDescriptorStringIO, - setup_pidfile_fixtures, - ) - -import daemon - - -class ModuleExceptions_TestCase(scaffold.Exception_TestCase): - """ Test cases for module exception classes. """ - - scenarios = scaffold.make_exception_scenarios([ - ('daemon.daemon.DaemonError', dict( - exc_type = daemon.daemon.DaemonError, - min_args = 1, - types = [Exception], - )), - ('daemon.daemon.DaemonOSEnvironmentError', dict( - exc_type = daemon.daemon.DaemonOSEnvironmentError, - min_args = 1, - types = [daemon.daemon.DaemonError, OSError], - )), - ('daemon.daemon.DaemonProcessDetachError', dict( - exc_type = daemon.daemon.DaemonProcessDetachError, - min_args = 1, - types = [daemon.daemon.DaemonError, OSError], - )), - ]) - - -def setup_daemon_context_fixtures(testcase): - """ Set up common test fixtures for DaemonContext test case. - - :param testcase: A ``TestCase`` instance to decorate. - :return: ``None``. - - Decorate the `testcase` with fixtures for tests involving - `DaemonContext`. - - """ - setup_streams_fixtures(testcase) - - setup_pidfile_fixtures(testcase) - - testcase.fake_pidfile_path = tempfile.mktemp() - testcase.mock_pidlockfile = mock.MagicMock() - testcase.mock_pidlockfile.path = testcase.fake_pidfile_path - - testcase.daemon_context_args = dict( - stdin=testcase.stream_files_by_name['stdin'], - stdout=testcase.stream_files_by_name['stdout'], - stderr=testcase.stream_files_by_name['stderr'], - ) - testcase.test_instance = daemon.DaemonContext( - **testcase.daemon_context_args) - -fake_default_signal_map = object() - -@mock.patch.object( - daemon.daemon, "is_detach_process_context_required", - new=(lambda: True)) -@mock.patch.object( - daemon.daemon, "make_default_signal_map", - new=(lambda: fake_default_signal_map)) -@mock.patch.object(os, "setgid", new=(lambda x: object())) -@mock.patch.object(os, "setuid", new=(lambda x: object())) -class DaemonContext_BaseTestCase(scaffold.TestCase): - """ Base class for DaemonContext test case classes. """ - - def setUp(self): - """ Set up test fixtures. """ - super(DaemonContext_BaseTestCase, self).setUp() - - setup_daemon_context_fixtures(self) - - -class DaemonContext_TestCase(DaemonContext_BaseTestCase): - """ Test cases for DaemonContext class. """ - - def test_instantiate(self): - """ New instance of DaemonContext should be created. """ - self.assertIsInstance( - self.test_instance, daemon.daemon.DaemonContext) - - def test_minimum_zero_arguments(self): - """ Initialiser should not require any arguments. """ - instance = daemon.daemon.DaemonContext() - self.assertIsNot(instance, None) - - def test_has_specified_chroot_directory(self): - """ Should have specified chroot_directory option. """ - args = dict( - chroot_directory=object(), - ) - expected_directory = args['chroot_directory'] - instance = daemon.daemon.DaemonContext(**args) - self.assertEqual(expected_directory, instance.chroot_directory) - - def test_has_specified_working_directory(self): - """ Should have specified working_directory option. """ - args = dict( - working_directory=object(), - ) - expected_directory = args['working_directory'] - instance = daemon.daemon.DaemonContext(**args) - self.assertEqual(expected_directory, instance.working_directory) - - def test_has_default_working_directory(self): - """ Should have default working_directory option. """ - args = dict() - expected_directory = "/" - instance = daemon.daemon.DaemonContext(**args) - self.assertEqual(expected_directory, instance.working_directory) - - def test_has_specified_creation_mask(self): - """ Should have specified umask option. """ - args = dict( - umask=object(), - ) - expected_mask = args['umask'] - instance = daemon.daemon.DaemonContext(**args) - self.assertEqual(expected_mask, instance.umask) - - def test_has_default_creation_mask(self): - """ Should have default umask option. """ - args = dict() - expected_mask = 0 - instance = daemon.daemon.DaemonContext(**args) - self.assertEqual(expected_mask, instance.umask) - - def test_has_specified_uid(self): - """ Should have specified uid option. """ - args = dict( - uid=object(), - ) - expected_id = args['uid'] - instance = daemon.daemon.DaemonContext(**args) - self.assertEqual(expected_id, instance.uid) - - def test_has_derived_uid(self): - """ Should have uid option derived from process. """ - args = dict() - expected_id = os.getuid() - instance = daemon.daemon.DaemonContext(**args) - self.assertEqual(expected_id, instance.uid) - - def test_has_specified_gid(self): - """ Should have specified gid option. """ - args = dict( - gid=object(), - ) - expected_id = args['gid'] - instance = daemon.daemon.DaemonContext(**args) - self.assertEqual(expected_id, instance.gid) - - def test_has_derived_gid(self): - """ Should have gid option derived from process. """ - args = dict() - expected_id = os.getgid() - instance = daemon.daemon.DaemonContext(**args) - self.assertEqual(expected_id, instance.gid) - - def test_has_specified_detach_process(self): - """ Should have specified detach_process option. """ - args = dict( - detach_process=object(), - ) - expected_value = args['detach_process'] - instance = daemon.daemon.DaemonContext(**args) - self.assertEqual(expected_value, instance.detach_process) - - def test_has_derived_detach_process(self): - """ Should have detach_process option derived from environment. """ - args = dict() - func = daemon.daemon.is_detach_process_context_required - expected_value = func() - instance = daemon.daemon.DaemonContext(**args) - self.assertEqual(expected_value, instance.detach_process) - - def test_has_specified_files_preserve(self): - """ Should have specified files_preserve option. """ - args = dict( - files_preserve=object(), - ) - expected_files_preserve = args['files_preserve'] - instance = daemon.daemon.DaemonContext(**args) - self.assertEqual(expected_files_preserve, instance.files_preserve) - - def test_has_specified_pidfile(self): - """ Should have the specified pidfile. """ - args = dict( - pidfile=object(), - ) - expected_pidfile = args['pidfile'] - instance = daemon.daemon.DaemonContext(**args) - self.assertEqual(expected_pidfile, instance.pidfile) - - def test_has_specified_stdin(self): - """ Should have specified stdin option. """ - args = dict( - stdin=object(), - ) - expected_file = args['stdin'] - instance = daemon.daemon.DaemonContext(**args) - self.assertEqual(expected_file, instance.stdin) - - def test_has_specified_stdout(self): - """ Should have specified stdout option. """ - args = dict( - stdout=object(), - ) - expected_file = args['stdout'] - instance = daemon.daemon.DaemonContext(**args) - self.assertEqual(expected_file, instance.stdout) - - def test_has_specified_stderr(self): - """ Should have specified stderr option. """ - args = dict( - stderr=object(), - ) - expected_file = args['stderr'] - instance = daemon.daemon.DaemonContext(**args) - self.assertEqual(expected_file, instance.stderr) - - def test_has_specified_signal_map(self): - """ Should have specified signal_map option. """ - args = dict( - signal_map=object(), - ) - expected_signal_map = args['signal_map'] - instance = daemon.daemon.DaemonContext(**args) - self.assertEqual(expected_signal_map, instance.signal_map) - - def test_has_derived_signal_map(self): - """ Should have signal_map option derived from system. """ - args = dict() - expected_signal_map = daemon.daemon.make_default_signal_map() - instance = daemon.daemon.DaemonContext(**args) - self.assertEqual(expected_signal_map, instance.signal_map) - - -class DaemonContext_is_open_TestCase(DaemonContext_BaseTestCase): - """ Test cases for DaemonContext.is_open property. """ - - def test_begin_false(self): - """ Initial value of is_open should be False. """ - instance = self.test_instance - self.assertEqual(False, instance.is_open) - - def test_write_fails(self): - """ Writing to is_open should fail. """ - instance = self.test_instance - self.assertRaises( - AttributeError, - setattr, instance, 'is_open', object()) - - -class DaemonContext_open_TestCase(DaemonContext_BaseTestCase): - """ Test cases for DaemonContext.open method. """ - - def setUp(self): - """ Set up test fixtures. """ - super(DaemonContext_open_TestCase, self).setUp() - - self.test_instance._is_open = False - - self.mock_module_daemon = mock.MagicMock() - daemon_func_patchers = dict( - (func_name, mock.patch.object( - daemon.daemon, func_name)) - for func_name in [ - "detach_process_context", - "change_working_directory", - "change_root_directory", - "change_file_creation_mask", - "change_process_owner", - "prevent_core_dump", - "close_all_open_files", - "redirect_stream", - "set_signal_handlers", - "register_atexit_function", - ]) - for (func_name, patcher) in daemon_func_patchers.items(): - mock_func = patcher.start() - self.addCleanup(patcher.stop) - self.mock_module_daemon.attach_mock(mock_func, func_name) - - self.mock_module_daemon.attach_mock(mock.Mock(), 'DaemonContext') - - self.test_files_preserve_fds = object() - self.test_signal_handler_map = object() - daemoncontext_method_return_values = { - '_get_exclude_file_descriptors': - self.test_files_preserve_fds, - '_make_signal_handler_map': - self.test_signal_handler_map, - } - daemoncontext_func_patchers = dict( - (func_name, mock.patch.object( - daemon.daemon.DaemonContext, - func_name, - return_value=return_value)) - for (func_name, return_value) in - daemoncontext_method_return_values.items()) - for (func_name, patcher) in daemoncontext_func_patchers.items(): - mock_func = patcher.start() - self.addCleanup(patcher.stop) - self.mock_module_daemon.DaemonContext.attach_mock( - mock_func, func_name) - - def test_performs_steps_in_expected_sequence(self): - """ Should perform daemonisation steps in expected sequence. """ - instance = self.test_instance - instance.chroot_directory = object() - instance.detach_process = True - instance.pidfile = self.mock_pidlockfile - self.mock_module_daemon.attach_mock( - self.mock_pidlockfile, 'pidlockfile') - expected_calls = [ - mock.call.change_root_directory(mock.ANY), - mock.call.prevent_core_dump(), - mock.call.change_file_creation_mask(mock.ANY), - mock.call.change_working_directory(mock.ANY), - mock.call.change_process_owner(mock.ANY, mock.ANY), - mock.call.detach_process_context(), - mock.call.DaemonContext._make_signal_handler_map(), - mock.call.set_signal_handlers(mock.ANY), - mock.call.DaemonContext._get_exclude_file_descriptors(), - mock.call.close_all_open_files(exclude=mock.ANY), - mock.call.redirect_stream(mock.ANY, mock.ANY), - mock.call.redirect_stream(mock.ANY, mock.ANY), - mock.call.redirect_stream(mock.ANY, mock.ANY), - mock.call.pidlockfile.__enter__(), - mock.call.register_atexit_function(mock.ANY), - ] - instance.open() - self.mock_module_daemon.assert_has_calls(expected_calls) - - def test_returns_immediately_if_is_open(self): - """ Should return immediately if is_open property is true. """ - instance = self.test_instance - instance._is_open = True - instance.open() - self.assertEqual(0, len(self.mock_module_daemon.mock_calls)) - - def test_changes_root_directory_to_chroot_directory(self): - """ Should change root directory to `chroot_directory` option. """ - instance = self.test_instance - chroot_directory = object() - instance.chroot_directory = chroot_directory - instance.open() - self.mock_module_daemon.change_root_directory.assert_called_with( - chroot_directory) - - def test_omits_chroot_if_no_chroot_directory(self): - """ Should omit changing root directory if no `chroot_directory`. """ - instance = self.test_instance - instance.chroot_directory = None - instance.open() - self.assertFalse(self.mock_module_daemon.change_root_directory.called) - - def test_prevents_core_dump(self): - """ Should request prevention of core dumps. """ - instance = self.test_instance - instance.open() - self.mock_module_daemon.prevent_core_dump.assert_called_with() - - def test_omits_prevent_core_dump_if_prevent_core_false(self): - """ Should omit preventing core dumps if `prevent_core` is false. """ - instance = self.test_instance - instance.prevent_core = False - instance.open() - self.assertFalse(self.mock_module_daemon.prevent_core_dump.called) - - def test_closes_open_files(self): - """ Should close all open files, excluding `files_preserve`. """ - instance = self.test_instance - expected_exclude = self.test_files_preserve_fds - instance.open() - self.mock_module_daemon.close_all_open_files.assert_called_with( - exclude=expected_exclude) - - def test_changes_directory_to_working_directory(self): - """ Should change current directory to `working_directory` option. """ - instance = self.test_instance - working_directory = object() - instance.working_directory = working_directory - instance.open() - self.mock_module_daemon.change_working_directory.assert_called_with( - working_directory) - - def test_changes_creation_mask_to_umask(self): - """ Should change file creation mask to `umask` option. """ - instance = self.test_instance - umask = object() - instance.umask = umask - instance.open() - self.mock_module_daemon.change_file_creation_mask.assert_called_with( - umask) - - def test_changes_owner_to_specified_uid_and_gid(self): - """ Should change process UID and GID to `uid` and `gid` options. """ - instance = self.test_instance - uid = object() - gid = object() - instance.uid = uid - instance.gid = gid - instance.open() - self.mock_module_daemon.change_process_owner.assert_called_with( - uid, gid) - - def test_detaches_process_context(self): - """ Should request detach of process context. """ - instance = self.test_instance - instance.open() - self.mock_module_daemon.detach_process_context.assert_called_with() - - def test_omits_process_detach_if_not_required(self): - """ Should omit detach of process context if not required. """ - instance = self.test_instance - instance.detach_process = False - instance.open() - self.assertFalse(self.mock_module_daemon.detach_process_context.called) - - def test_sets_signal_handlers_from_signal_map(self): - """ Should set signal handlers according to `signal_map`. """ - instance = self.test_instance - instance.signal_map = object() - expected_signal_handler_map = self.test_signal_handler_map - instance.open() - self.mock_module_daemon.set_signal_handlers.assert_called_with( - expected_signal_handler_map) - - def test_redirects_standard_streams(self): - """ Should request redirection of standard stream files. """ - instance = self.test_instance - (system_stdin, system_stdout, system_stderr) = ( - sys.stdin, sys.stdout, sys.stderr) - (target_stdin, target_stdout, target_stderr) = ( - self.stream_files_by_name[name] - for name in ['stdin', 'stdout', 'stderr']) - expected_calls = [ - mock.call(system_stdin, target_stdin), - mock.call(system_stdout, target_stdout), - mock.call(system_stderr, target_stderr), - ] - instance.open() - self.mock_module_daemon.redirect_stream.assert_has_calls( - expected_calls, any_order=True) - - def test_enters_pidfile_context(self): - """ Should enter the PID file context manager. """ - instance = self.test_instance - instance.pidfile = self.mock_pidlockfile - instance.open() - self.mock_pidlockfile.__enter__.assert_called_with() - - def test_sets_is_open_true(self): - """ Should set the `is_open` property to True. """ - instance = self.test_instance - instance.open() - self.assertEqual(True, instance.is_open) - - def test_registers_close_method_for_atexit(self): - """ Should register the `close` method for atexit processing. """ - instance = self.test_instance - close_method = instance.close - instance.open() - self.mock_module_daemon.register_atexit_function.assert_called_with( - close_method) - - -class DaemonContext_close_TestCase(DaemonContext_BaseTestCase): - """ Test cases for DaemonContext.close method. """ - - def setUp(self): - """ Set up test fixtures. """ - super(DaemonContext_close_TestCase, self).setUp() - - self.test_instance._is_open = True - - def test_returns_immediately_if_not_is_open(self): - """ Should return immediately if is_open property is false. """ - instance = self.test_instance - instance._is_open = False - instance.pidfile = object() - instance.close() - self.assertFalse(self.mock_pidlockfile.__exit__.called) - - def test_exits_pidfile_context(self): - """ Should exit the PID file context manager. """ - instance = self.test_instance - instance.pidfile = self.mock_pidlockfile - instance.close() - self.mock_pidlockfile.__exit__.assert_called_with(None, None, None) - - def test_returns_none(self): - """ Should return None. """ - instance = self.test_instance - expected_result = None - result = instance.close() - self.assertIs(result, expected_result) - - def test_sets_is_open_false(self): - """ Should set the `is_open` property to False. """ - instance = self.test_instance - instance.close() - self.assertEqual(False, instance.is_open) - - -@mock.patch.object(daemon.daemon.DaemonContext, "open") -class DaemonContext_context_manager_enter_TestCase(DaemonContext_BaseTestCase): - """ Test cases for DaemonContext.__enter__ method. """ - - def test_opens_daemon_context(self, mock_func_daemoncontext_open): - """ Should open the DaemonContext. """ - instance = self.test_instance - instance.__enter__() - mock_func_daemoncontext_open.assert_called_with() - - def test_returns_self_instance(self, mock_func_daemoncontext_open): - """ Should return DaemonContext instance. """ - instance = self.test_instance - expected_result = instance - result = instance.__enter__() - self.assertIs(result, expected_result) - - -@mock.patch.object(daemon.daemon.DaemonContext, "close") -class DaemonContext_context_manager_exit_TestCase(DaemonContext_BaseTestCase): - """ Test cases for DaemonContext.__exit__ method. """ - - def setUp(self): - """ Set up test fixtures. """ - super(DaemonContext_context_manager_exit_TestCase, self).setUp() - - self.test_args = dict( - exc_type=object(), - exc_value=object(), - traceback=object(), - ) - - def test_closes_daemon_context(self, mock_func_daemoncontext_close): - """ Should close the DaemonContext. """ - instance = self.test_instance - args = self.test_args - instance.__exit__(**args) - mock_func_daemoncontext_close.assert_called_with() - - def test_returns_none(self, mock_func_daemoncontext_close): - """ Should return None, indicating exception was not handled. """ - instance = self.test_instance - args = self.test_args - expected_result = None - result = instance.__exit__(**args) - self.assertIs(result, expected_result) - - -class DaemonContext_terminate_TestCase(DaemonContext_BaseTestCase): - """ Test cases for DaemonContext.terminate method. """ - - def setUp(self): - """ Set up test fixtures. """ - super(DaemonContext_terminate_TestCase, self).setUp() - - self.test_signal = signal.SIGTERM - self.test_frame = None - self.test_args = (self.test_signal, self.test_frame) - - def test_raises_system_exit(self): - """ Should raise SystemExit. """ - instance = self.test_instance - args = self.test_args - expected_exception = SystemExit - self.assertRaises( - expected_exception, - instance.terminate, *args) - - def test_exception_message_contains_signal_number(self): - """ Should raise exception with a message containing signal number. """ - instance = self.test_instance - args = self.test_args - signal_number = self.test_signal - expected_exception = SystemExit - exc = self.assertRaises( - expected_exception, - instance.terminate, *args) - self.assertIn(unicode(signal_number), unicode(exc)) - - -class DaemonContext_get_exclude_file_descriptors_TestCase( - DaemonContext_BaseTestCase): - """ Test cases for DaemonContext._get_exclude_file_descriptors function. """ - - def setUp(self): - """ Set up test fixtures. """ - super( - DaemonContext_get_exclude_file_descriptors_TestCase, - self).setUp() - - self.test_files = { - 2: FakeFileDescriptorStringIO(), - 5: 5, - 11: FakeFileDescriptorStringIO(), - 17: None, - 23: FakeFileDescriptorStringIO(), - 37: 37, - 42: FakeFileDescriptorStringIO(), - } - for (fileno, item) in self.test_files.items(): - if hasattr(item, '_fileno'): - item._fileno = fileno - self.test_file_descriptors = set( - fd for (fd, item) in self.test_files.items() - if item is not None) - self.test_file_descriptors.update( - self.stream_files_by_name[name].fileno() - for name in ['stdin', 'stdout', 'stderr'] - ) - - def test_returns_expected_file_descriptors(self): - """ Should return expected set of file descriptors. """ - instance = self.test_instance - instance.files_preserve = list(self.test_files.values()) - expected_result = self.test_file_descriptors - result = instance._get_exclude_file_descriptors() - self.assertEqual(expected_result, result) - - def test_returns_stream_redirects_if_no_files_preserve(self): - """ Should return only stream redirects if no files_preserve. """ - instance = self.test_instance - instance.files_preserve = None - expected_result = set( - stream.fileno() - for stream in self.stream_files_by_name.values()) - result = instance._get_exclude_file_descriptors() - self.assertEqual(expected_result, result) - - def test_returns_empty_set_if_no_files(self): - """ Should return empty set if no file options. """ - instance = self.test_instance - for name in ['files_preserve', 'stdin', 'stdout', 'stderr']: - setattr(instance, name, None) - expected_result = set() - result = instance._get_exclude_file_descriptors() - self.assertEqual(expected_result, result) - - def test_omits_non_file_streams(self): - """ Should omit non-file stream attributes. """ - instance = self.test_instance - instance.files_preserve = list(self.test_files.values()) - stream_files = self.stream_files_by_name - expected_result = self.test_file_descriptors.copy() - for (pseudo_stream_name, pseudo_stream) in stream_files.items(): - test_non_file_object = object() - setattr(instance, pseudo_stream_name, test_non_file_object) - stream_fd = pseudo_stream.fileno() - expected_result.discard(stream_fd) - result = instance._get_exclude_file_descriptors() - self.assertEqual(expected_result, result) - - def test_includes_verbatim_streams_without_file_descriptor(self): - """ Should include verbatim any stream without a file descriptor. """ - instance = self.test_instance - instance.files_preserve = list(self.test_files.values()) - stream_files = self.stream_files_by_name - mock_fileno_method = mock.MagicMock( - spec=sys.__stdin__.fileno, - side_effect=ValueError) - expected_result = self.test_file_descriptors.copy() - for (pseudo_stream_name, pseudo_stream) in stream_files.items(): - test_non_fd_stream = StringIO() - if not hasattr(test_non_fd_stream, 'fileno'): - # Python < 3 StringIO doesn't have ‘fileno’ at all. - # Add a method which raises an exception. - test_non_fd_stream.fileno = mock_fileno_method - setattr(instance, pseudo_stream_name, test_non_fd_stream) - stream_fd = pseudo_stream.fileno() - expected_result.discard(stream_fd) - expected_result.add(test_non_fd_stream) - result = instance._get_exclude_file_descriptors() - self.assertEqual(expected_result, result) - - def test_omits_none_streams(self): - """ Should omit any stream attribute which is None. """ - instance = self.test_instance - instance.files_preserve = list(self.test_files.values()) - stream_files = self.stream_files_by_name - expected_result = self.test_file_descriptors.copy() - for (pseudo_stream_name, pseudo_stream) in stream_files.items(): - setattr(instance, pseudo_stream_name, None) - stream_fd = pseudo_stream.fileno() - expected_result.discard(stream_fd) - result = instance._get_exclude_file_descriptors() - self.assertEqual(expected_result, result) - - -class DaemonContext_make_signal_handler_TestCase(DaemonContext_BaseTestCase): - """ Test cases for DaemonContext._make_signal_handler function. """ - - def test_returns_ignore_for_none(self): - """ Should return SIG_IGN when None handler specified. """ - instance = self.test_instance - target = None - expected_result = signal.SIG_IGN - result = instance._make_signal_handler(target) - self.assertEqual(expected_result, result) - - def test_returns_method_for_name(self): - """ Should return method of DaemonContext when name specified. """ - instance = self.test_instance - target = 'terminate' - expected_result = instance.terminate - result = instance._make_signal_handler(target) - self.assertEqual(expected_result, result) - - def test_raises_error_for_unknown_name(self): - """ Should raise AttributeError for unknown method name. """ - instance = self.test_instance - target = 'b0gUs' - expected_error = AttributeError - self.assertRaises( - expected_error, - instance._make_signal_handler, target) - - def test_returns_object_for_object(self): - """ Should return same object for any other object. """ - instance = self.test_instance - target = object() - expected_result = target - result = instance._make_signal_handler(target) - self.assertEqual(expected_result, result) - - -class DaemonContext_make_signal_handler_map_TestCase( - DaemonContext_BaseTestCase): - """ Test cases for DaemonContext._make_signal_handler_map function. """ - - def setUp(self): - """ Set up test fixtures. """ - super(DaemonContext_make_signal_handler_map_TestCase, self).setUp() - - self.test_instance.signal_map = { - object(): object(), - object(): object(), - object(): object(), - } - - self.test_signal_handlers = dict( - (key, object()) - for key in self.test_instance.signal_map.values()) - self.test_signal_handler_map = dict( - (key, self.test_signal_handlers[target]) - for (key, target) in self.test_instance.signal_map.items()) - - def fake_make_signal_handler(target): - return self.test_signal_handlers[target] - - func_patcher_make_signal_handler = mock.patch.object( - daemon.daemon.DaemonContext, "_make_signal_handler", - side_effect=fake_make_signal_handler) - self.mock_func_make_signal_handler = ( - func_patcher_make_signal_handler.start()) - self.addCleanup(func_patcher_make_signal_handler.stop) - - def test_returns_constructed_signal_handler_items(self): - """ Should return items as constructed via make_signal_handler. """ - instance = self.test_instance - expected_result = self.test_signal_handler_map - result = instance._make_signal_handler_map() - self.assertEqual(expected_result, result) - - -try: - FileNotFoundError -except NameError: - # Python 2 uses IOError. - FileNotFoundError = functools.partial(IOError, errno.ENOENT) - - -@mock.patch.object(os, "chdir") -class change_working_directory_TestCase(scaffold.TestCase): - """ Test cases for change_working_directory function. """ - - def setUp(self): - """ Set up test fixtures. """ - super(change_working_directory_TestCase, self).setUp() - - self.test_directory = object() - self.test_args = dict( - directory=self.test_directory, - ) - - def test_changes_working_directory_to_specified_directory( - self, - mock_func_os_chdir): - """ Should change working directory to specified directory. """ - args = self.test_args - directory = self.test_directory - daemon.daemon.change_working_directory(**args) - mock_func_os_chdir.assert_called_with(directory) - - def test_raises_daemon_error_on_os_error( - self, - mock_func_os_chdir): - """ Should raise a DaemonError on receiving an IOError. """ - args = self.test_args - test_error = FileNotFoundError("No such directory") - mock_func_os_chdir.side_effect = test_error - expected_error = daemon.daemon.DaemonOSEnvironmentError - exc = self.assertRaises( - expected_error, - daemon.daemon.change_working_directory, **args) - self.assertEqual(test_error, exc.__cause__) - - def test_error_message_contains_original_error_message( - self, - mock_func_os_chdir): - """ Should raise a DaemonError with original message. """ - args = self.test_args - test_error = FileNotFoundError("No such directory") - mock_func_os_chdir.side_effect = test_error - expected_error = daemon.daemon.DaemonOSEnvironmentError - exc = self.assertRaises( - expected_error, - daemon.daemon.change_working_directory, **args) - self.assertIn(unicode(test_error), unicode(exc)) - - -@mock.patch.object(os, "chroot") -@mock.patch.object(os, "chdir") -class change_root_directory_TestCase(scaffold.TestCase): - """ Test cases for change_root_directory function. """ - - def setUp(self): - """ Set up test fixtures. """ - super(change_root_directory_TestCase, self).setUp() - - self.test_directory = object() - self.test_args = dict( - directory=self.test_directory, - ) - - def test_changes_working_directory_to_specified_directory( - self, - mock_func_os_chdir, mock_func_os_chroot): - """ Should change working directory to specified directory. """ - args = self.test_args - directory = self.test_directory - daemon.daemon.change_root_directory(**args) - mock_func_os_chdir.assert_called_with(directory) - - def test_changes_root_directory_to_specified_directory( - self, - mock_func_os_chdir, mock_func_os_chroot): - """ Should change root directory to specified directory. """ - args = self.test_args - directory = self.test_directory - daemon.daemon.change_root_directory(**args) - mock_func_os_chroot.assert_called_with(directory) - - def test_raises_daemon_error_on_os_error_from_chdir( - self, - mock_func_os_chdir, mock_func_os_chroot): - """ Should raise a DaemonError on receiving an IOError from chdir. """ - args = self.test_args - test_error = FileNotFoundError("No such directory") - mock_func_os_chdir.side_effect = test_error - expected_error = daemon.daemon.DaemonOSEnvironmentError - exc = self.assertRaises( - expected_error, - daemon.daemon.change_root_directory, **args) - self.assertEqual(test_error, exc.__cause__) - - def test_raises_daemon_error_on_os_error_from_chroot( - self, - mock_func_os_chdir, mock_func_os_chroot): - """ Should raise a DaemonError on receiving an OSError from chroot. """ - args = self.test_args - test_error = OSError(errno.EPERM, "No chroot for you!") - mock_func_os_chroot.side_effect = test_error - expected_error = daemon.daemon.DaemonOSEnvironmentError - exc = self.assertRaises( - expected_error, - daemon.daemon.change_root_directory, **args) - self.assertEqual(test_error, exc.__cause__) - - def test_error_message_contains_original_error_message( - self, - mock_func_os_chdir, mock_func_os_chroot): - """ Should raise a DaemonError with original message. """ - args = self.test_args - test_error = FileNotFoundError("No such directory") - mock_func_os_chdir.side_effect = test_error - expected_error = daemon.daemon.DaemonOSEnvironmentError - exc = self.assertRaises( - expected_error, - daemon.daemon.change_root_directory, **args) - self.assertIn(unicode(test_error), unicode(exc)) - - -@mock.patch.object(os, "umask") -class change_file_creation_mask_TestCase(scaffold.TestCase): - """ Test cases for change_file_creation_mask function. """ - - def setUp(self): - """ Set up test fixtures. """ - super(change_file_creation_mask_TestCase, self).setUp() - - self.test_mask = object() - self.test_args = dict( - mask=self.test_mask, - ) - - def test_changes_umask_to_specified_mask(self, mock_func_os_umask): - """ Should change working directory to specified directory. """ - args = self.test_args - mask = self.test_mask - daemon.daemon.change_file_creation_mask(**args) - mock_func_os_umask.assert_called_with(mask) - - def test_raises_daemon_error_on_os_error_from_chdir( - self, - mock_func_os_umask): - """ Should raise a DaemonError on receiving an OSError from umask. """ - args = self.test_args - test_error = OSError(errno.EINVAL, "Whatchoo talkin' 'bout?") - mock_func_os_umask.side_effect = test_error - expected_error = daemon.daemon.DaemonOSEnvironmentError - exc = self.assertRaises( - expected_error, - daemon.daemon.change_file_creation_mask, **args) - self.assertEqual(test_error, exc.__cause__) - - def test_error_message_contains_original_error_message( - self, - mock_func_os_umask): - """ Should raise a DaemonError with original message. """ - args = self.test_args - test_error = FileNotFoundError("No such directory") - mock_func_os_umask.side_effect = test_error - expected_error = daemon.daemon.DaemonOSEnvironmentError - exc = self.assertRaises( - expected_error, - daemon.daemon.change_file_creation_mask, **args) - self.assertIn(unicode(test_error), unicode(exc)) - - -@mock.patch.object(os, "setgid") -@mock.patch.object(os, "setuid") -class change_process_owner_TestCase(scaffold.TestCase): - """ Test cases for change_process_owner function. """ - - def setUp(self): - """ Set up test fixtures. """ - super(change_process_owner_TestCase, self).setUp() - - self.test_uid = object() - self.test_gid = object() - self.test_args = dict( - uid=self.test_uid, - gid=self.test_gid, - ) - - def test_changes_gid_and_uid_in_order( - self, - mock_func_os_setuid, mock_func_os_setgid): - """ Should change process GID and UID in correct order. - - Since the process requires appropriate privilege to use - either of `setuid` or `setgid`, changing the UID must be - done last. - - """ - args = self.test_args - daemon.daemon.change_process_owner(**args) - mock_func_os_setuid.assert_called() - mock_func_os_setgid.assert_called() - - def test_changes_group_id_to_gid( - self, - mock_func_os_setuid, mock_func_os_setgid): - """ Should change process GID to specified value. """ - args = self.test_args - gid = self.test_gid - daemon.daemon.change_process_owner(**args) - mock_func_os_setgid.assert_called(gid) - - def test_changes_user_id_to_uid( - self, - mock_func_os_setuid, mock_func_os_setgid): - """ Should change process UID to specified value. """ - args = self.test_args - uid = self.test_uid - daemon.daemon.change_process_owner(**args) - mock_func_os_setuid.assert_called(uid) - - def test_raises_daemon_error_on_os_error_from_setgid( - self, - mock_func_os_setuid, mock_func_os_setgid): - """ Should raise a DaemonError on receiving an OSError from setgid. """ - args = self.test_args - test_error = OSError(errno.EPERM, "No switching for you!") - mock_func_os_setgid.side_effect = test_error - expected_error = daemon.daemon.DaemonOSEnvironmentError - exc = self.assertRaises( - expected_error, - daemon.daemon.change_process_owner, **args) - self.assertEqual(test_error, exc.__cause__) - - def test_raises_daemon_error_on_os_error_from_setuid( - self, - mock_func_os_setuid, mock_func_os_setgid): - """ Should raise a DaemonError on receiving an OSError from setuid. """ - args = self.test_args - test_error = OSError(errno.EPERM, "No switching for you!") - mock_func_os_setuid.side_effect = test_error - expected_error = daemon.daemon.DaemonOSEnvironmentError - exc = self.assertRaises( - expected_error, - daemon.daemon.change_process_owner, **args) - self.assertEqual(test_error, exc.__cause__) - - def test_error_message_contains_original_error_message( - self, - mock_func_os_setuid, mock_func_os_setgid): - """ Should raise a DaemonError with original message. """ - args = self.test_args - test_error = OSError(errno.EINVAL, "Whatchoo talkin' 'bout?") - mock_func_os_setuid.side_effect = test_error - expected_error = daemon.daemon.DaemonOSEnvironmentError - exc = self.assertRaises( - expected_error, - daemon.daemon.change_process_owner, **args) - self.assertIn(unicode(test_error), unicode(exc)) - - -RLimitResult = collections.namedtuple('RLimitResult', ['soft', 'hard']) - -fake_RLIMIT_CORE = object() - -@mock.patch.object(resource, "RLIMIT_CORE", new=fake_RLIMIT_CORE) -@mock.patch.object(resource, "setrlimit", side_effect=(lambda x, y: None)) -@mock.patch.object(resource, "getrlimit", side_effect=(lambda x: None)) -class prevent_core_dump_TestCase(scaffold.TestCase): - """ Test cases for prevent_core_dump function. """ - - def setUp(self): - """ Set up test fixtures. """ - super(prevent_core_dump_TestCase, self).setUp() - - def test_sets_core_limit_to_zero( - self, - mock_func_resource_getrlimit, mock_func_resource_setrlimit): - """ Should set the RLIMIT_CORE resource to zero. """ - expected_resource = fake_RLIMIT_CORE - expected_limit = tuple(RLimitResult(soft=0, hard=0)) - daemon.daemon.prevent_core_dump() - mock_func_resource_getrlimit.assert_called_with(expected_resource) - mock_func_resource_setrlimit.assert_called_with( - expected_resource, expected_limit) - - def test_raises_error_when_no_core_resource( - self, - mock_func_resource_getrlimit, mock_func_resource_setrlimit): - """ Should raise DaemonError if no RLIMIT_CORE resource. """ - test_error = ValueError("Bogus platform doesn't have RLIMIT_CORE") - def fake_getrlimit(res): - if res == resource.RLIMIT_CORE: - raise test_error - else: - return None - mock_func_resource_getrlimit.side_effect = fake_getrlimit - expected_error = daemon.daemon.DaemonOSEnvironmentError - exc = self.assertRaises( - expected_error, - daemon.daemon.prevent_core_dump) - self.assertEqual(test_error, exc.__cause__) - - -@mock.patch.object(os, "close") -class close_file_descriptor_if_open_TestCase(scaffold.TestCase): - """ Test cases for close_file_descriptor_if_open function. """ - - def setUp(self): - """ Set up test fixtures. """ - super(close_file_descriptor_if_open_TestCase, self).setUp() - - self.fake_fd = 274 - - def test_requests_file_descriptor_close(self, mock_func_os_close): - """ Should request close of file descriptor. """ - fd = self.fake_fd - daemon.daemon.close_file_descriptor_if_open(fd) - mock_func_os_close.assert_called_with(fd) - - def test_ignores_badfd_error_on_close(self, mock_func_os_close): - """ Should ignore OSError EBADF when closing. """ - fd = self.fake_fd - test_error = OSError(errno.EBADF, "Bad file descriptor") - def fake_os_close(fd): - raise test_error - mock_func_os_close.side_effect = fake_os_close - daemon.daemon.close_file_descriptor_if_open(fd) - mock_func_os_close.assert_called_with(fd) - - def test_raises_error_if_oserror_on_close(self, mock_func_os_close): - """ Should raise DaemonError if an OSError occurs when closing. """ - fd = self.fake_fd - test_error = OSError(object(), "Unexpected error") - def fake_os_close(fd): - raise test_error - mock_func_os_close.side_effect = fake_os_close - expected_error = daemon.daemon.DaemonOSEnvironmentError - exc = self.assertRaises( - expected_error, - daemon.daemon.close_file_descriptor_if_open, fd) - self.assertEqual(test_error, exc.__cause__) - - def test_raises_error_if_ioerror_on_close(self, mock_func_os_close): - """ Should raise DaemonError if an IOError occurs when closing. """ - fd = self.fake_fd - test_error = IOError(object(), "Unexpected error") - def fake_os_close(fd): - raise test_error - mock_func_os_close.side_effect = fake_os_close - expected_error = daemon.daemon.DaemonOSEnvironmentError - exc = self.assertRaises( - expected_error, - daemon.daemon.close_file_descriptor_if_open, fd) - self.assertEqual(test_error, exc.__cause__) - - -class maxfd_TestCase(scaffold.TestCase): - """ Test cases for module MAXFD constant. """ - - def test_positive(self): - """ Should be a positive number. """ - maxfd = daemon.daemon.MAXFD - self.assertTrue(maxfd > 0) - - def test_integer(self): - """ Should be an integer. """ - maxfd = daemon.daemon.MAXFD - self.assertEqual(int(maxfd), maxfd) - - def test_reasonably_high(self): - """ Should be reasonably high for default open files limit. - - If the system reports a limit of “infinity” on maximum - file descriptors, we still need a finite number in order - to close “all” of them. Ensure this is reasonably high - to catch most use cases. - - """ - expected_minimum = 2048 - maxfd = daemon.daemon.MAXFD - self.assertTrue( - expected_minimum <= maxfd, - msg=( - "MAXFD should be at least {minimum!r}" - " (got {maxfd!r})".format( - minimum=expected_minimum, maxfd=maxfd))) - - -fake_default_maxfd = 8 -fake_RLIMIT_NOFILE = object() -fake_RLIM_INFINITY = object() -fake_rlimit_nofile_large = 2468 - -def fake_getrlimit_nofile_soft_infinity(resource): - result = RLimitResult(soft=fake_RLIM_INFINITY, hard=object()) - if resource != fake_RLIMIT_NOFILE: - result = NotImplemented - return result - -def fake_getrlimit_nofile_hard_infinity(resource): - result = RLimitResult(soft=object(), hard=fake_RLIM_INFINITY) - if resource != fake_RLIMIT_NOFILE: - result = NotImplemented - return result - -def fake_getrlimit_nofile_hard_large(resource): - result = RLimitResult(soft=object(), hard=fake_rlimit_nofile_large) - if resource != fake_RLIMIT_NOFILE: - result = NotImplemented - return result - -@mock.patch.object(daemon.daemon, "MAXFD", new=fake_default_maxfd) -@mock.patch.object(resource, "RLIMIT_NOFILE", new=fake_RLIMIT_NOFILE) -@mock.patch.object(resource, "RLIM_INFINITY", new=fake_RLIM_INFINITY) -@mock.patch.object( - resource, "getrlimit", - side_effect=fake_getrlimit_nofile_hard_large) -class get_maximum_file_descriptors_TestCase(scaffold.TestCase): - """ Test cases for get_maximum_file_descriptors function. """ - - def test_returns_system_hard_limit(self, mock_func_resource_getrlimit): - """ Should return process hard limit on number of files. """ - expected_result = fake_rlimit_nofile_large - result = daemon.daemon.get_maximum_file_descriptors() - self.assertEqual(expected_result, result) - - def test_returns_module_default_if_hard_limit_infinity( - self, mock_func_resource_getrlimit): - """ Should return module MAXFD if hard limit is infinity. """ - mock_func_resource_getrlimit.side_effect = ( - fake_getrlimit_nofile_hard_infinity) - expected_result = fake_default_maxfd - result = daemon.daemon.get_maximum_file_descriptors() - self.assertEqual(expected_result, result) - - -def fake_get_maximum_file_descriptors(): - return fake_default_maxfd - -@mock.patch.object(resource, "RLIMIT_NOFILE", new=fake_RLIMIT_NOFILE) -@mock.patch.object(resource, "RLIM_INFINITY", new=fake_RLIM_INFINITY) -@mock.patch.object( - resource, "getrlimit", - new=fake_getrlimit_nofile_soft_infinity) -@mock.patch.object( - daemon.daemon, "get_maximum_file_descriptors", - new=fake_get_maximum_file_descriptors) -@mock.patch.object(daemon.daemon, "close_file_descriptor_if_open") -class close_all_open_files_TestCase(scaffold.TestCase): - """ Test cases for close_all_open_files function. """ - - def test_requests_all_open_files_to_close( - self, mock_func_close_file_descriptor_if_open): - """ Should request close of all open files. """ - expected_file_descriptors = range(fake_default_maxfd) - expected_calls = [ - mock.call(fd) for fd in expected_file_descriptors] - daemon.daemon.close_all_open_files() - mock_func_close_file_descriptor_if_open.assert_has_calls( - expected_calls, any_order=True) - - def test_requests_all_but_excluded_files_to_close( - self, mock_func_close_file_descriptor_if_open): - """ Should request close of all open files but those excluded. """ - test_exclude = set([3, 7]) - args = dict( - exclude=test_exclude, - ) - expected_file_descriptors = set( - fd for fd in range(fake_default_maxfd) - if fd not in test_exclude) - expected_calls = [ - mock.call(fd) for fd in expected_file_descriptors] - daemon.daemon.close_all_open_files(**args) - mock_func_close_file_descriptor_if_open.assert_has_calls( - expected_calls, any_order=True) - - -class detach_process_context_TestCase(scaffold.TestCase): - """ Test cases for detach_process_context function. """ - - class FakeOSExit(SystemExit): - """ Fake exception raised for os._exit(). """ - - def setUp(self): - """ Set up test fixtures. """ - super(detach_process_context_TestCase, self).setUp() - - self.mock_module_os = mock.MagicMock(wraps=os) - - fake_pids = [0, 0] - func_patcher_os_fork = mock.patch.object( - os, "fork", - side_effect=iter(fake_pids)) - self.mock_func_os_fork = func_patcher_os_fork.start() - self.addCleanup(func_patcher_os_fork.stop) - self.mock_module_os.attach_mock(self.mock_func_os_fork, "fork") - - func_patcher_os_setsid = mock.patch.object(os, "setsid") - self.mock_func_os_setsid = func_patcher_os_setsid.start() - self.addCleanup(func_patcher_os_setsid.stop) - self.mock_module_os.attach_mock(self.mock_func_os_setsid, "setsid") - - def raise_os_exit(status=None): - raise self.FakeOSExit(status) - - func_patcher_os_force_exit = mock.patch.object( - os, "_exit", - side_effect=raise_os_exit) - self.mock_func_os_force_exit = func_patcher_os_force_exit.start() - self.addCleanup(func_patcher_os_force_exit.stop) - self.mock_module_os.attach_mock(self.mock_func_os_force_exit, "_exit") - - def test_parent_exits(self): - """ Parent process should exit. """ - parent_pid = 23 - self.mock_func_os_fork.side_effect = iter([parent_pid]) - self.assertRaises( - self.FakeOSExit, - daemon.daemon.detach_process_context) - self.mock_module_os.assert_has_calls([ - mock.call.fork(), - mock.call._exit(0), - ]) - - def test_first_fork_error_raises_error(self): - """ Error on first fork should raise DaemonProcessDetachError. """ - fork_errno = 13 - fork_strerror = "Bad stuff happened" - test_error = OSError(fork_errno, fork_strerror) - test_pids_iter = iter([test_error]) - - def fake_fork(): - next_item = next(test_pids_iter) - if isinstance(next_item, Exception): - raise next_item - else: - return next_item - - self.mock_func_os_fork.side_effect = fake_fork - exc = self.assertRaises( - daemon.daemon.DaemonProcessDetachError, - daemon.daemon.detach_process_context) - self.assertEqual(test_error, exc.__cause__) - self.mock_module_os.assert_has_calls([ - mock.call.fork(), - ]) - - def test_child_starts_new_process_group(self): - """ Child should start new process group. """ - daemon.daemon.detach_process_context() - self.mock_module_os.assert_has_calls([ - mock.call.fork(), - mock.call.setsid(), - ]) - - def test_child_forks_next_parent_exits(self): - """ Child should fork, then exit if parent. """ - fake_pids = [0, 42] - self.mock_func_os_fork.side_effect = iter(fake_pids) - self.assertRaises( - self.FakeOSExit, - daemon.daemon.detach_process_context) - self.mock_module_os.assert_has_calls([ - mock.call.fork(), - mock.call.setsid(), - mock.call.fork(), - mock.call._exit(0), - ]) - - def test_second_fork_error_reports_to_stderr(self): - """ Error on second fork should cause report to stderr. """ - fork_errno = 17 - fork_strerror = "Nasty stuff happened" - test_error = OSError(fork_errno, fork_strerror) - test_pids_iter = iter([0, test_error]) - - def fake_fork(): - next_item = next(test_pids_iter) - if isinstance(next_item, Exception): - raise next_item - else: - return next_item - - self.mock_func_os_fork.side_effect = fake_fork - exc = self.assertRaises( - daemon.daemon.DaemonProcessDetachError, - daemon.daemon.detach_process_context) - self.assertEqual(test_error, exc.__cause__) - self.mock_module_os.assert_has_calls([ - mock.call.fork(), - mock.call.setsid(), - mock.call.fork(), - ]) - - def test_child_forks_next_child_continues(self): - """ Child should fork, then continue if child. """ - daemon.daemon.detach_process_context() - self.mock_module_os.assert_has_calls([ - mock.call.fork(), - mock.call.setsid(), - mock.call.fork(), - ]) - - -@mock.patch("os.getppid", return_value=765) -class is_process_started_by_init_TestCase(scaffold.TestCase): - """ Test cases for is_process_started_by_init function. """ - - def test_returns_false_by_default(self, mock_func_os_getppid): - """ Should return False under normal circumstances. """ - expected_result = False - result = daemon.daemon.is_process_started_by_init() - self.assertIs(result, expected_result) - - def test_returns_true_if_parent_process_is_init( - self, mock_func_os_getppid): - """ Should return True if parent process is `init`. """ - init_pid = 1 - mock_func_os_getppid.return_value = init_pid - expected_result = True - result = daemon.daemon.is_process_started_by_init() - self.assertIs(result, expected_result) - - -class is_socket_TestCase(scaffold.TestCase): - """ Test cases for is_socket function. """ - - def setUp(self): - """ Set up test fixtures. """ - super(is_socket_TestCase, self).setUp() - - def fake_getsockopt(level, optname, buflen=None): - result = object() - if optname is socket.SO_TYPE: - result = socket.SOCK_RAW - return result - - self.fake_socket_getsockopt_func = fake_getsockopt - - self.fake_socket_error = socket.error( - errno.ENOTSOCK, - "Socket operation on non-socket") - - self.mock_socket = mock.MagicMock(spec=socket.socket) - self.mock_socket.getsockopt.side_effect = self.fake_socket_error - - def fake_socket_fromfd(fd, family, type, proto=None): - return self.mock_socket - - func_patcher_socket_fromfd = mock.patch.object( - socket, "fromfd", - side_effect=fake_socket_fromfd) - func_patcher_socket_fromfd.start() - self.addCleanup(func_patcher_socket_fromfd.stop) - - def test_returns_false_by_default(self): - """ Should return False under normal circumstances. """ - test_fd = 23 - expected_result = False - result = daemon.daemon.is_socket(test_fd) - self.assertIs(result, expected_result) - - def test_returns_true_if_stdin_is_socket(self): - """ Should return True if `stdin` is a socket. """ - test_fd = 23 - getsockopt = self.mock_socket.getsockopt - getsockopt.side_effect = self.fake_socket_getsockopt_func - expected_result = True - result = daemon.daemon.is_socket(test_fd) - self.assertIs(result, expected_result) - - def test_returns_false_if_stdin_socket_raises_error(self): - """ Should return True if `stdin` is a socket and raises error. """ - test_fd = 23 - getsockopt = self.mock_socket.getsockopt - getsockopt.side_effect = socket.error( - object(), "Weird socket stuff") - expected_result = True - result = daemon.daemon.is_socket(test_fd) - self.assertIs(result, expected_result) - - -class is_process_started_by_superserver_TestCase(scaffold.TestCase): - """ Test cases for is_process_started_by_superserver function. """ - - def setUp(self): - """ Set up test fixtures. """ - super(is_process_started_by_superserver_TestCase, self).setUp() - - def fake_is_socket(fd): - if sys.__stdin__.fileno() == fd: - result = self.fake_stdin_is_socket_func() - else: - result = False - return result - - self.fake_stdin_is_socket_func = (lambda: False) - - func_patcher_is_socket = mock.patch.object( - daemon.daemon, "is_socket", - side_effect=fake_is_socket) - func_patcher_is_socket.start() - self.addCleanup(func_patcher_is_socket.stop) - - def test_returns_false_by_default(self): - """ Should return False under normal circumstances. """ - expected_result = False - result = daemon.daemon.is_process_started_by_superserver() - self.assertIs(result, expected_result) - - def test_returns_true_if_stdin_is_socket(self): - """ Should return True if `stdin` is a socket. """ - self.fake_stdin_is_socket_func = (lambda: True) - expected_result = True - result = daemon.daemon.is_process_started_by_superserver() - self.assertIs(result, expected_result) - - -@mock.patch.object( - daemon.daemon, "is_process_started_by_superserver", - return_value=False) -@mock.patch.object( - daemon.daemon, "is_process_started_by_init", - return_value=False) -class is_detach_process_context_required_TestCase(scaffold.TestCase): - """ Test cases for is_detach_process_context_required function. """ - - def test_returns_true_by_default( - self, - mock_func_is_process_started_by_init, - mock_func_is_process_started_by_superserver): - """ Should return True under normal circumstances. """ - expected_result = True - result = daemon.daemon.is_detach_process_context_required() - self.assertIs(result, expected_result) - - def test_returns_false_if_started_by_init( - self, - mock_func_is_process_started_by_init, - mock_func_is_process_started_by_superserver): - """ Should return False if current process started by init. """ - mock_func_is_process_started_by_init.return_value = True - expected_result = False - result = daemon.daemon.is_detach_process_context_required() - self.assertIs(result, expected_result) - - def test_returns_true_if_started_by_superserver( - self, - mock_func_is_process_started_by_init, - mock_func_is_process_started_by_superserver): - """ Should return False if current process started by superserver. """ - mock_func_is_process_started_by_superserver.return_value = True - expected_result = False - result = daemon.daemon.is_detach_process_context_required() - self.assertIs(result, expected_result) - - -def setup_streams_fixtures(testcase): - """ Set up common test fixtures for standard streams. """ - testcase.stream_file_paths = dict( - stdin=tempfile.mktemp(), - stdout=tempfile.mktemp(), - stderr=tempfile.mktemp(), - ) - - testcase.stream_files_by_name = dict( - (name, FakeFileDescriptorStringIO()) - for name in ['stdin', 'stdout', 'stderr'] - ) - - testcase.stream_files_by_path = dict( - (testcase.stream_file_paths[name], - testcase.stream_files_by_name[name]) - for name in ['stdin', 'stdout', 'stderr'] - ) - - -@mock.patch.object(os, "dup2") -class redirect_stream_TestCase(scaffold.TestCase): - """ Test cases for redirect_stream function. """ - - def setUp(self): - """ Set up test fixtures. """ - super(redirect_stream_TestCase, self).setUp() - - self.test_system_stream = FakeFileDescriptorStringIO() - self.test_target_stream = FakeFileDescriptorStringIO() - self.test_null_file = FakeFileDescriptorStringIO() - - def fake_os_open(path, flag, mode=None): - if path == os.devnull: - result = self.test_null_file.fileno() - else: - raise FileNotFoundError("No such file", path) - return result - - func_patcher_os_open = mock.patch.object( - os, "open", - side_effect=fake_os_open) - self.mock_func_os_open = func_patcher_os_open.start() - self.addCleanup(func_patcher_os_open.stop) - - def test_duplicates_target_file_descriptor( - self, mock_func_os_dup2): - """ Should duplicate file descriptor from target to system stream. """ - system_stream = self.test_system_stream - system_fileno = system_stream.fileno() - target_stream = self.test_target_stream - target_fileno = target_stream.fileno() - daemon.daemon.redirect_stream(system_stream, target_stream) - mock_func_os_dup2.assert_called_with(target_fileno, system_fileno) - - def test_duplicates_null_file_descriptor_by_default( - self, mock_func_os_dup2): - """ Should by default duplicate the null file to the system stream. """ - system_stream = self.test_system_stream - system_fileno = system_stream.fileno() - target_stream = None - null_path = os.devnull - null_flag = os.O_RDWR - null_file = self.test_null_file - null_fileno = null_file.fileno() - daemon.daemon.redirect_stream(system_stream, target_stream) - self.mock_func_os_open.assert_called_with(null_path, null_flag) - mock_func_os_dup2.assert_called_with(null_fileno, system_fileno) - - -class make_default_signal_map_TestCase(scaffold.TestCase): - """ Test cases for make_default_signal_map function. """ - - def setUp(self): - """ Set up test fixtures. """ - super(make_default_signal_map_TestCase, self).setUp() - - # Use whatever default string type this Python version needs. - signal_module_name = str('signal') - self.fake_signal_module = ModuleType(signal_module_name) - - fake_signal_names = [ - 'SIGHUP', - 'SIGCLD', - 'SIGSEGV', - 'SIGTSTP', - 'SIGTTIN', - 'SIGTTOU', - 'SIGTERM', - ] - for name in fake_signal_names: - setattr(self.fake_signal_module, name, object()) - - module_patcher_signal = mock.patch.object( - daemon.daemon, "signal", new=self.fake_signal_module) - module_patcher_signal.start() - self.addCleanup(module_patcher_signal.stop) - - default_signal_map_by_name = { - 'SIGTSTP': None, - 'SIGTTIN': None, - 'SIGTTOU': None, - 'SIGTERM': 'terminate', - } - self.default_signal_map = dict( - (getattr(self.fake_signal_module, name), target) - for (name, target) in default_signal_map_by_name.items()) - - def test_returns_constructed_signal_map(self): - """ Should return map per default. """ - expected_result = self.default_signal_map - result = daemon.daemon.make_default_signal_map() - self.assertEqual(expected_result, result) - - def test_returns_signal_map_with_only_ids_in_signal_module(self): - """ Should return map with only signals in the `signal` module. - - The `signal` module is documented to only define those - signals which exist on the running system. Therefore the - default map should not contain any signals which are not - defined in the `signal` module. - - """ - del(self.default_signal_map[self.fake_signal_module.SIGTTOU]) - del(self.fake_signal_module.SIGTTOU) - expected_result = self.default_signal_map - result = daemon.daemon.make_default_signal_map() - self.assertEqual(expected_result, result) - - -@mock.patch.object(daemon.daemon.signal, "signal") -class set_signal_handlers_TestCase(scaffold.TestCase): - """ Test cases for set_signal_handlers function. """ - - def setUp(self): - """ Set up test fixtures. """ - super(set_signal_handlers_TestCase, self).setUp() - - self.signal_handler_map = { - signal.SIGQUIT: object(), - signal.SIGSEGV: object(), - signal.SIGINT: object(), - } - - def test_sets_signal_handler_for_each_item(self, mock_func_signal_signal): - """ Should set signal handler for each item in map. """ - signal_handler_map = self.signal_handler_map - expected_calls = [ - mock.call(signal_number, handler) - for (signal_number, handler) in signal_handler_map.items()] - daemon.daemon.set_signal_handlers(signal_handler_map) - self.assertEquals(expected_calls, mock_func_signal_signal.mock_calls) - - -@mock.patch.object(daemon.daemon.atexit, "register") -class register_atexit_function_TestCase(scaffold.TestCase): - """ Test cases for register_atexit_function function. """ - - def test_registers_function_for_atexit_processing( - self, mock_func_atexit_register): - """ Should register specified function for atexit processing. """ - func = object() - daemon.daemon.register_atexit_function(func) - mock_func_atexit_register.assert_called_with(func) - - -# Local variables: -# coding: utf-8 -# mode: python -# End: -# vim: fileencoding=utf-8 filetype=python : diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_metadata.py b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_metadata.py deleted file mode 100755 index 692753f4..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_metadata.py +++ /dev/null @@ -1,380 +0,0 @@ -# -*- coding: utf-8 -*- -# -# test/test_metadata.py -# Part of ‘python-daemon’, an implementation of PEP 3143. -# -# Copyright © 2008–2015 Ben Finney -# -# This is free software: you may copy, modify, and/or distribute this work -# under the terms of the Apache License, version 2.0 as published by the -# Apache Software Foundation. -# No warranty expressed or implied. See the file ‘LICENSE.ASF-2’ for details. - -""" Unit test for ‘_metadata’ private module. - """ - -from __future__ import (absolute_import, unicode_literals) - -import sys -import errno -import re -try: - # Python 3 standard library. - import urllib.parse as urlparse -except ImportError: - # Python 2 standard library. - import urlparse -import functools -import collections -import json - -import pkg_resources -import mock -import testtools.helpers -import testtools.matchers -import testscenarios - -from . import scaffold -from .scaffold import (basestring, unicode) - -import daemon._metadata as metadata - - -class HasAttribute(testtools.matchers.Matcher): - """ A matcher to assert an object has a named attribute. """ - - def __init__(self, name): - self.attribute_name = name - - def match(self, instance): - """ Assert the object `instance` has an attribute named `name`. """ - result = None - if not testtools.helpers.safe_hasattr(instance, self.attribute_name): - result = AttributeNotFoundMismatch(instance, self.attribute_name) - return result - - -class AttributeNotFoundMismatch(testtools.matchers.Mismatch): - """ The specified instance does not have the named attribute. """ - - def __init__(self, instance, name): - self.instance = instance - self.attribute_name = name - - def describe(self): - """ Emit a text description of this mismatch. """ - text = ( - "{instance!r}" - " has no attribute named {name!r}").format( - instance=self.instance, name=self.attribute_name) - return text - - -class metadata_value_TestCase(scaffold.TestCaseWithScenarios): - """ Test cases for metadata module values. """ - - expected_str_attributes = set([ - 'version_installed', - 'author', - 'copyright', - 'license', - 'url', - ]) - - scenarios = [ - (name, {'attribute_name': name}) - for name in expected_str_attributes] - for (name, params) in scenarios: - if name == 'version_installed': - # No duck typing, this attribute might be None. - params['ducktype_attribute_name'] = NotImplemented - continue - # Expect an attribute of ‘str’ to test this value. - params['ducktype_attribute_name'] = 'isdigit' - - def test_module_has_attribute(self): - """ Metadata should have expected value as a module attribute. """ - self.assertThat( - metadata, HasAttribute(self.attribute_name)) - - def test_module_attribute_has_duck_type(self): - """ Metadata value should have expected duck-typing attribute. """ - if self.ducktype_attribute_name == NotImplemented: - self.skipTest("Can't assert this attribute's type") - instance = getattr(metadata, self.attribute_name) - self.assertThat( - instance, HasAttribute(self.ducktype_attribute_name)) - - -class parse_person_field_TestCase( - testscenarios.WithScenarios, testtools.TestCase): - """ Test cases for ‘get_latest_version’ function. """ - - scenarios = [ - ('simple', { - 'test_person': "Foo Bar ", - 'expected_result': ("Foo Bar", "foo.bar@example.com"), - }), - ('empty', { - 'test_person': "", - 'expected_result': (None, None), - }), - ('none', { - 'test_person': None, - 'expected_error': TypeError, - }), - ('no email', { - 'test_person': "Foo Bar", - 'expected_result': ("Foo Bar", None), - }), - ] - - def test_returns_expected_result(self): - """ Should return expected result. """ - if hasattr(self, 'expected_error'): - self.assertRaises( - self.expected_error, - metadata.parse_person_field, self.test_person) - else: - result = metadata.parse_person_field(self.test_person) - self.assertEqual(self.expected_result, result) - - -class YearRange_TestCase(scaffold.TestCaseWithScenarios): - """ Test cases for ‘YearRange’ class. """ - - scenarios = [ - ('simple', { - 'begin_year': 1970, - 'end_year': 1979, - 'expected_text': "1970–1979", - }), - ('same year', { - 'begin_year': 1970, - 'end_year': 1970, - 'expected_text': "1970", - }), - ('no end year', { - 'begin_year': 1970, - 'end_year': None, - 'expected_text': "1970", - }), - ] - - def setUp(self): - """ Set up test fixtures. """ - super(YearRange_TestCase, self).setUp() - - self.test_instance = metadata.YearRange( - self.begin_year, self.end_year) - - def test_text_representation_as_expected(self): - """ Text representation should be as expected. """ - result = unicode(self.test_instance) - self.assertEqual(result, self.expected_text) - - -FakeYearRange = collections.namedtuple('FakeYearRange', ['begin', 'end']) - -@mock.patch.object(metadata, 'YearRange', new=FakeYearRange) -class make_year_range_TestCase(scaffold.TestCaseWithScenarios): - """ Test cases for ‘make_year_range’ function. """ - - scenarios = [ - ('simple', { - 'begin_year': "1970", - 'end_date': "1979-01-01", - 'expected_range': FakeYearRange(begin=1970, end=1979), - }), - ('same year', { - 'begin_year': "1970", - 'end_date': "1970-01-01", - 'expected_range': FakeYearRange(begin=1970, end=1970), - }), - ('no end year', { - 'begin_year': "1970", - 'end_date': None, - 'expected_range': FakeYearRange(begin=1970, end=None), - }), - ('end date UNKNOWN token', { - 'begin_year': "1970", - 'end_date': "UNKNOWN", - 'expected_range': FakeYearRange(begin=1970, end=None), - }), - ('end date FUTURE token', { - 'begin_year': "1970", - 'end_date': "FUTURE", - 'expected_range': FakeYearRange(begin=1970, end=None), - }), - ] - - def test_result_matches_expected_range(self): - """ Result should match expected YearRange. """ - result = metadata.make_year_range(self.begin_year, self.end_date) - self.assertEqual(result, self.expected_range) - - -class metadata_content_TestCase(scaffold.TestCase): - """ Test cases for content of metadata. """ - - def test_copyright_formatted_correctly(self): - """ Copyright statement should be formatted correctly. """ - regex_pattern = ( - "Copyright © " - "\d{4}" # four-digit year - "(?:–\d{4})?" # optional range dash and ending four-digit year - ) - regex_flags = re.UNICODE - self.assertThat( - metadata.copyright, - testtools.matchers.MatchesRegex(regex_pattern, regex_flags)) - - def test_author_formatted_correctly(self): - """ Author information should be formatted correctly. """ - regex_pattern = ( - ".+ " # name - "<[^>]+>" # email address, in angle brackets - ) - regex_flags = re.UNICODE - self.assertThat( - metadata.author, - testtools.matchers.MatchesRegex(regex_pattern, regex_flags)) - - def test_copyright_contains_author(self): - """ Copyright information should contain author information. """ - self.assertThat( - metadata.copyright, - testtools.matchers.Contains(metadata.author)) - - def test_url_parses_correctly(self): - """ Homepage URL should parse correctly. """ - result = urlparse.urlparse(metadata.url) - self.assertIsInstance( - result, urlparse.ParseResult, - "URL value {url!r} did not parse correctly".format( - url=metadata.url)) - - -try: - FileNotFoundError -except NameError: - # Python 2 uses IOError. - FileNotFoundError = functools.partial(IOError, errno.ENOENT) - -version_info_filename = "version_info.json" - -def fake_func_has_metadata(testcase, resource_name): - """ Fake the behaviour of ‘pkg_resources.Distribution.has_metadata’. """ - if ( - resource_name != testcase.expected_resource_name - or not hasattr(testcase, 'test_version_info')): - return False - return True - - -def fake_func_get_metadata(testcase, resource_name): - """ Fake the behaviour of ‘pkg_resources.Distribution.get_metadata’. """ - if not fake_func_has_metadata(testcase, resource_name): - error = FileNotFoundError(resource_name) - raise error - content = testcase.test_version_info - return content - - -def fake_func_get_distribution(testcase, distribution_name): - """ Fake the behaviour of ‘pkg_resources.get_distribution’. """ - if distribution_name != metadata.distribution_name: - raise pkg_resources.DistributionNotFound - if hasattr(testcase, 'get_distribution_error'): - raise testcase.get_distribution_error - mock_distribution = testcase.mock_distribution - mock_distribution.has_metadata.side_effect = functools.partial( - fake_func_has_metadata, testcase) - mock_distribution.get_metadata.side_effect = functools.partial( - fake_func_get_metadata, testcase) - return mock_distribution - - -@mock.patch.object(metadata, 'distribution_name', new="mock-dist") -class get_distribution_version_info_TestCase(scaffold.TestCaseWithScenarios): - """ Test cases for ‘get_distribution_version_info’ function. """ - - default_version_info = { - 'release_date': "UNKNOWN", - 'version': "UNKNOWN", - 'maintainer': "UNKNOWN", - } - - scenarios = [ - ('version 0.0', { - 'test_version_info': json.dumps({ - 'version': "0.0", - }), - 'expected_version_info': {'version': "0.0"}, - }), - ('version 1.0', { - 'test_version_info': json.dumps({ - 'version': "1.0", - }), - 'expected_version_info': {'version': "1.0"}, - }), - ('file lorem_ipsum.json', { - 'version_info_filename': "lorem_ipsum.json", - 'test_version_info': json.dumps({ - 'version': "1.0", - }), - 'expected_version_info': {'version': "1.0"}, - }), - ('not installed', { - 'get_distribution_error': pkg_resources.DistributionNotFound(), - 'expected_version_info': default_version_info, - }), - ('no version_info', { - 'expected_version_info': default_version_info, - }), - ] - - def setUp(self): - """ Set up test fixtures. """ - super(get_distribution_version_info_TestCase, self).setUp() - - if hasattr(self, 'expected_resource_name'): - self.test_args = {'filename': self.expected_resource_name} - else: - self.test_args = {} - self.expected_resource_name = version_info_filename - - self.mock_distribution = mock.MagicMock() - func_patcher_get_distribution = mock.patch.object( - pkg_resources, 'get_distribution') - func_patcher_get_distribution.start() - self.addCleanup(func_patcher_get_distribution.stop) - pkg_resources.get_distribution.side_effect = functools.partial( - fake_func_get_distribution, self) - - def test_requests_installed_distribution(self): - """ The package distribution should be retrieved. """ - expected_distribution_name = metadata.distribution_name - version_info = metadata.get_distribution_version_info(**self.test_args) - pkg_resources.get_distribution.assert_called_with( - expected_distribution_name) - - def test_requests_specified_filename(self): - """ The specified metadata resource name should be requested. """ - if hasattr(self, 'get_distribution_error'): - self.skipTest("No access to distribution") - version_info = metadata.get_distribution_version_info(**self.test_args) - self.mock_distribution.has_metadata.assert_called_with( - self.expected_resource_name) - - def test_result_matches_expected_items(self): - """ The result should match the expected items. """ - version_info = metadata.get_distribution_version_info(**self.test_args) - self.assertEqual(self.expected_version_info, version_info) - - -# Local variables: -# coding: utf-8 -# mode: python -# End: -# vim: fileencoding=utf-8 filetype=python : diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_pidfile.py b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_pidfile.py deleted file mode 100755 index 9b636ec8..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_pidfile.py +++ /dev/null @@ -1,472 +0,0 @@ -# -*- coding: utf-8 -*- -# -# test/test_pidfile.py -# Part of ‘python-daemon’, an implementation of PEP 3143. -# -# Copyright © 2008–2015 Ben Finney -# -# This is free software: you may copy, modify, and/or distribute this work -# under the terms of the Apache License, version 2.0 as published by the -# Apache Software Foundation. -# No warranty expressed or implied. See the file ‘LICENSE.ASF-2’ for details. - -""" Unit test for ‘pidfile’ module. - """ - -from __future__ import (absolute_import, unicode_literals) - -try: - # Python 3 standard library. - import builtins -except ImportError: - # Python 2 standard library. - import __builtin__ as builtins -import os -import itertools -import tempfile -import errno -import functools -try: - # Standard library of Python 2.7 and later. - from io import StringIO -except ImportError: - # Standard library of Python 2.6 and earlier. - from StringIO import StringIO - -import mock -import lockfile - -from . import scaffold - -import daemon.pidfile - - -class FakeFileDescriptorStringIO(StringIO, object): - """ A StringIO class that fakes a file descriptor. """ - - _fileno_generator = itertools.count() - - def __init__(self, *args, **kwargs): - self._fileno = next(self._fileno_generator) - super(FakeFileDescriptorStringIO, self).__init__(*args, **kwargs) - - def fileno(self): - return self._fileno - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - pass - - -try: - FileNotFoundError - PermissionError -except NameError: - # Python 2 uses IOError. - FileNotFoundError = functools.partial(IOError, errno.ENOENT) - PermissionError = functools.partial(IOError, errno.EPERM) - - -def make_pidlockfile_scenarios(): - """ Make a collection of scenarios for testing `PIDLockFile` instances. - - :return: A collection of scenarios for tests involving - `PIDLockfFile` instances. - - The collection is a mapping from scenario name to a dictionary of - scenario attributes. - - """ - - fake_current_pid = 235 - fake_other_pid = 8642 - fake_pidfile_path = tempfile.mktemp() - - fake_pidfile_empty = FakeFileDescriptorStringIO() - fake_pidfile_current_pid = FakeFileDescriptorStringIO( - "{pid:d}\n".format(pid=fake_current_pid)) - fake_pidfile_other_pid = FakeFileDescriptorStringIO( - "{pid:d}\n".format(pid=fake_other_pid)) - fake_pidfile_bogus = FakeFileDescriptorStringIO( - "b0gUs") - - scenarios = { - 'simple': {}, - 'not-exist': { - 'open_func_name': 'fake_open_nonexist', - 'os_open_func_name': 'fake_os_open_nonexist', - }, - 'not-exist-write-denied': { - 'open_func_name': 'fake_open_nonexist', - 'os_open_func_name': 'fake_os_open_nonexist', - }, - 'not-exist-write-busy': { - 'open_func_name': 'fake_open_nonexist', - 'os_open_func_name': 'fake_os_open_nonexist', - }, - 'exist-read-denied': { - 'open_func_name': 'fake_open_read_denied', - 'os_open_func_name': 'fake_os_open_read_denied', - }, - 'exist-locked-read-denied': { - 'locking_pid': fake_other_pid, - 'open_func_name': 'fake_open_read_denied', - 'os_open_func_name': 'fake_os_open_read_denied', - }, - 'exist-empty': {}, - 'exist-invalid': { - 'pidfile': fake_pidfile_bogus, - }, - 'exist-current-pid': { - 'pidfile': fake_pidfile_current_pid, - 'pidfile_pid': fake_current_pid, - }, - 'exist-current-pid-locked': { - 'pidfile': fake_pidfile_current_pid, - 'pidfile_pid': fake_current_pid, - 'locking_pid': fake_current_pid, - }, - 'exist-other-pid': { - 'pidfile': fake_pidfile_other_pid, - 'pidfile_pid': fake_other_pid, - }, - 'exist-other-pid-locked': { - 'pidfile': fake_pidfile_other_pid, - 'pidfile_pid': fake_other_pid, - 'locking_pid': fake_other_pid, - }, - } - - for scenario in scenarios.values(): - scenario['pid'] = fake_current_pid - scenario['pidfile_path'] = fake_pidfile_path - if 'pidfile' not in scenario: - scenario['pidfile'] = fake_pidfile_empty - if 'pidfile_pid' not in scenario: - scenario['pidfile_pid'] = None - if 'locking_pid' not in scenario: - scenario['locking_pid'] = None - if 'open_func_name' not in scenario: - scenario['open_func_name'] = 'fake_open_okay' - if 'os_open_func_name' not in scenario: - scenario['os_open_func_name'] = 'fake_os_open_okay' - - return scenarios - - -def setup_pidfile_fixtures(testcase): - """ Set up common fixtures for PID file test cases. - - :param testcase: A `TestCase` instance to decorate. - - Decorate the `testcase` with attributes to be fixtures for tests - involving `PIDLockFile` instances. - - """ - scenarios = make_pidlockfile_scenarios() - testcase.pidlockfile_scenarios = scenarios - - def get_scenario_option(testcase, key, default=None): - value = default - try: - value = testcase.scenario[key] - except (NameError, TypeError, AttributeError, KeyError): - pass - return value - - func_patcher_os_getpid = mock.patch.object( - os, "getpid", - return_value=scenarios['simple']['pid']) - func_patcher_os_getpid.start() - testcase.addCleanup(func_patcher_os_getpid.stop) - - def make_fake_open_funcs(testcase): - - def fake_open_nonexist(filename, mode, buffering): - if mode.startswith('r'): - error = FileNotFoundError( - "No such file {filename!r}".format( - filename=filename)) - raise error - else: - result = testcase.scenario['pidfile'] - return result - - def fake_open_read_denied(filename, mode, buffering): - if mode.startswith('r'): - error = PermissionError( - "Read denied on {filename!r}".format( - filename=filename)) - raise error - else: - result = testcase.scenario['pidfile'] - return result - - def fake_open_okay(filename, mode, buffering): - result = testcase.scenario['pidfile'] - return result - - def fake_os_open_nonexist(filename, flags, mode): - if (flags & os.O_CREAT): - result = testcase.scenario['pidfile'].fileno() - else: - error = FileNotFoundError( - "No such file {filename!r}".format( - filename=filename)) - raise error - return result - - def fake_os_open_read_denied(filename, flags, mode): - if (flags & os.O_CREAT): - result = testcase.scenario['pidfile'].fileno() - else: - error = PermissionError( - "Read denied on {filename!r}".format( - filename=filename)) - raise error - return result - - def fake_os_open_okay(filename, flags, mode): - result = testcase.scenario['pidfile'].fileno() - return result - - funcs = dict( - (name, obj) for (name, obj) in vars().items() - if callable(obj)) - - return funcs - - testcase.fake_pidfile_open_funcs = make_fake_open_funcs(testcase) - - def fake_open(filename, mode='rt', buffering=None): - scenario_path = get_scenario_option(testcase, 'pidfile_path') - if filename == scenario_path: - func_name = testcase.scenario['open_func_name'] - fake_open_func = testcase.fake_pidfile_open_funcs[func_name] - result = fake_open_func(filename, mode, buffering) - else: - result = FakeFileDescriptorStringIO() - return result - - mock_open = mock.mock_open() - mock_open.side_effect = fake_open - - func_patcher_builtin_open = mock.patch.object( - builtins, "open", - new=mock_open) - func_patcher_builtin_open.start() - testcase.addCleanup(func_patcher_builtin_open.stop) - - def fake_os_open(filename, flags, mode=None): - scenario_path = get_scenario_option(testcase, 'pidfile_path') - if filename == scenario_path: - func_name = testcase.scenario['os_open_func_name'] - fake_os_open_func = testcase.fake_pidfile_open_funcs[func_name] - result = fake_os_open_func(filename, flags, mode) - else: - result = FakeFileDescriptorStringIO().fileno() - return result - - mock_os_open = mock.MagicMock(side_effect=fake_os_open) - - func_patcher_os_open = mock.patch.object( - os, "open", - new=mock_os_open) - func_patcher_os_open.start() - testcase.addCleanup(func_patcher_os_open.stop) - - def fake_os_fdopen(fd, mode='rt', buffering=None): - scenario_pidfile = get_scenario_option( - testcase, 'pidfile', FakeFileDescriptorStringIO()) - if fd == testcase.scenario['pidfile'].fileno(): - result = testcase.scenario['pidfile'] - else: - raise OSError(errno.EBADF, "Bad file descriptor") - return result - - mock_os_fdopen = mock.MagicMock(side_effect=fake_os_fdopen) - - func_patcher_os_fdopen = mock.patch.object( - os, "fdopen", - new=mock_os_fdopen) - func_patcher_os_fdopen.start() - testcase.addCleanup(func_patcher_os_fdopen.stop) - - -def make_lockfile_method_fakes(scenario): - """ Make common fake methods for lockfile class. - - :param scenario: A scenario for testing with PIDLockFile. - :return: A mapping from normal function name to the corresponding - fake function. - - Each fake function behaves appropriately for the specified `scenario`. - - """ - - def fake_func_read_pid(): - return scenario['pidfile_pid'] - def fake_func_is_locked(): - return (scenario['locking_pid'] is not None) - def fake_func_i_am_locking(): - return ( - scenario['locking_pid'] == scenario['pid']) - def fake_func_acquire(timeout=None): - if scenario['locking_pid'] is not None: - raise lockfile.AlreadyLocked() - scenario['locking_pid'] = scenario['pid'] - def fake_func_release(): - if scenario['locking_pid'] is None: - raise lockfile.NotLocked() - if scenario['locking_pid'] != scenario['pid']: - raise lockfile.NotMyLock() - scenario['locking_pid'] = None - def fake_func_break_lock(): - scenario['locking_pid'] = None - - fake_methods = dict( - ( - func_name.replace('fake_func_', ''), - mock.MagicMock(side_effect=fake_func)) - for (func_name, fake_func) in vars().items() - if func_name.startswith('fake_func_')) - - return fake_methods - - -def apply_lockfile_method_mocks(mock_lockfile, testcase, scenario): - """ Apply common fake methods to mock lockfile class. - - :param mock_lockfile: An object providing the `LockFile` interface. - :param testcase: The `TestCase` instance providing the context for - the patch. - :param scenario: The `PIDLockFile` test scenario to use. - - Mock the `LockFile` methods of `mock_lockfile`, by applying fake - methods customised for `scenario`. The mock is does by a patch - within the context of `testcase`. - - """ - fake_methods = dict( - (func_name, fake_func) - for (func_name, fake_func) in - make_lockfile_method_fakes(scenario).items() - if func_name not in ['read_pid']) - - for (func_name, fake_func) in fake_methods.items(): - func_patcher = mock.patch.object( - mock_lockfile, func_name, - new=fake_func) - func_patcher.start() - testcase.addCleanup(func_patcher.stop) - - -def setup_pidlockfile_fixtures(testcase, scenario_name=None): - """ Set up common fixtures for PIDLockFile test cases. - - :param testcase: The `TestCase` instance to decorate. - :param scenario_name: The name of the `PIDLockFile` scenario to use. - - Decorate the `testcase` with attributes that are fixtures for test - cases involving `PIDLockFile` instances.` - - """ - - setup_pidfile_fixtures(testcase) - - for func_name in [ - 'write_pid_to_pidfile', - 'remove_existing_pidfile', - ]: - func_patcher = mock.patch.object(lockfile.pidlockfile, func_name) - func_patcher.start() - testcase.addCleanup(func_patcher.stop) - - -class TimeoutPIDLockFile_TestCase(scaffold.TestCase): - """ Test cases for ‘TimeoutPIDLockFile’ class. """ - - def setUp(self): - """ Set up test fixtures. """ - super(TimeoutPIDLockFile_TestCase, self).setUp() - - pidlockfile_scenarios = make_pidlockfile_scenarios() - self.pidlockfile_scenario = pidlockfile_scenarios['simple'] - pidfile_path = self.pidlockfile_scenario['pidfile_path'] - - for func_name in ['__init__', 'acquire']: - func_patcher = mock.patch.object( - lockfile.pidlockfile.PIDLockFile, func_name) - func_patcher.start() - self.addCleanup(func_patcher.stop) - - self.scenario = { - 'pidfile_path': self.pidlockfile_scenario['pidfile_path'], - 'acquire_timeout': self.getUniqueInteger(), - } - - self.test_kwargs = dict( - path=self.scenario['pidfile_path'], - acquire_timeout=self.scenario['acquire_timeout'], - ) - self.test_instance = daemon.pidfile.TimeoutPIDLockFile( - **self.test_kwargs) - - def test_inherits_from_pidlockfile(self): - """ Should inherit from PIDLockFile. """ - instance = self.test_instance - self.assertIsInstance(instance, lockfile.pidlockfile.PIDLockFile) - - def test_init_has_expected_signature(self): - """ Should have expected signature for ‘__init__’. """ - def test_func(self, path, acquire_timeout=None, *args, **kwargs): pass - test_func.__name__ = str('__init__') - self.assertFunctionSignatureMatch( - test_func, - daemon.pidfile.TimeoutPIDLockFile.__init__) - - def test_has_specified_acquire_timeout(self): - """ Should have specified ‘acquire_timeout’ value. """ - instance = self.test_instance - expected_timeout = self.test_kwargs['acquire_timeout'] - self.assertEqual(expected_timeout, instance.acquire_timeout) - - @mock.patch.object( - lockfile.pidlockfile.PIDLockFile, "__init__", - autospec=True) - def test_calls_superclass_init(self, mock_init): - """ Should call the superclass ‘__init__’. """ - expected_path = self.test_kwargs['path'] - instance = daemon.pidfile.TimeoutPIDLockFile(**self.test_kwargs) - mock_init.assert_called_with(instance, expected_path) - - @mock.patch.object( - lockfile.pidlockfile.PIDLockFile, "acquire", - autospec=True) - def test_acquire_uses_specified_timeout(self, mock_func_acquire): - """ Should call the superclass ‘acquire’ with specified timeout. """ - instance = self.test_instance - test_timeout = self.getUniqueInteger() - expected_timeout = test_timeout - instance.acquire(test_timeout) - mock_func_acquire.assert_called_with(instance, expected_timeout) - - @mock.patch.object( - lockfile.pidlockfile.PIDLockFile, "acquire", - autospec=True) - def test_acquire_uses_stored_timeout_by_default(self, mock_func_acquire): - """ Should call superclass ‘acquire’ with stored timeout by default. """ - instance = self.test_instance - test_timeout = self.test_kwargs['acquire_timeout'] - expected_timeout = test_timeout - instance.acquire() - mock_func_acquire.assert_called_with(instance, expected_timeout) - - -# Local variables: -# coding: utf-8 -# mode: python -# End: -# vim: fileencoding=utf-8 filetype=python : diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_runner.py b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_runner.py deleted file mode 100755 index 4c0c714a..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_runner.py +++ /dev/null @@ -1,675 +0,0 @@ -# -*- coding: utf-8 -*- -# -# test/test_runner.py -# Part of ‘python-daemon’, an implementation of PEP 3143. -# -# Copyright © 2009–2015 Ben Finney -# -# This is free software: you may copy, modify, and/or distribute this work -# under the terms of the Apache License, version 2.0 as published by the -# Apache Software Foundation. -# No warranty expressed or implied. See the file ‘LICENSE.ASF-2’ for details. - -""" Unit test for ‘runner’ module. - """ - -from __future__ import (absolute_import, unicode_literals) - -try: - # Python 3 standard library. - import builtins -except ImportError: - # Python 2 standard library. - import __builtin__ as builtins -import os -import os.path -import sys -import tempfile -import errno -import signal -import functools - -import lockfile -import mock -import testtools - -from . import scaffold -from .scaffold import (basestring, unicode) -from .test_pidfile import ( - FakeFileDescriptorStringIO, - setup_pidfile_fixtures, - make_pidlockfile_scenarios, - apply_lockfile_method_mocks, - ) -from .test_daemon import ( - setup_streams_fixtures, - ) - -import daemon.daemon -import daemon.runner -import daemon.pidfile - - -class ModuleExceptions_TestCase(scaffold.Exception_TestCase): - """ Test cases for module exception classes. """ - - scenarios = scaffold.make_exception_scenarios([ - ('daemon.runner.DaemonRunnerError', dict( - exc_type = daemon.runner.DaemonRunnerError, - min_args = 1, - types = [Exception], - )), - ('daemon.runner.DaemonRunnerInvalidActionError', dict( - exc_type = daemon.runner.DaemonRunnerInvalidActionError, - min_args = 1, - types = [daemon.runner.DaemonRunnerError, ValueError], - )), - ('daemon.runner.DaemonRunnerStartFailureError', dict( - exc_type = daemon.runner.DaemonRunnerStartFailureError, - min_args = 1, - types = [daemon.runner.DaemonRunnerError, RuntimeError], - )), - ('daemon.runner.DaemonRunnerStopFailureError', dict( - exc_type = daemon.runner.DaemonRunnerStopFailureError, - min_args = 1, - types = [daemon.runner.DaemonRunnerError, RuntimeError], - )), - ]) - - -def make_runner_scenarios(): - """ Make a collection of scenarios for testing `DaemonRunner` instances. - - :return: A collection of scenarios for tests involving - `DaemonRunner` instances. - - The collection is a mapping from scenario name to a dictionary of - scenario attributes. - - """ - - pidlockfile_scenarios = make_pidlockfile_scenarios() - - scenarios = { - 'simple': { - 'pidlockfile_scenario_name': 'simple', - }, - 'pidfile-locked': { - 'pidlockfile_scenario_name': 'exist-other-pid-locked', - }, - } - - for scenario in scenarios.values(): - if 'pidlockfile_scenario_name' in scenario: - pidlockfile_scenario = pidlockfile_scenarios.pop( - scenario['pidlockfile_scenario_name']) - scenario['pid'] = pidlockfile_scenario['pid'] - scenario['pidfile_path'] = pidlockfile_scenario['pidfile_path'] - scenario['pidfile_timeout'] = 23 - scenario['pidlockfile_scenario'] = pidlockfile_scenario - - return scenarios - - -def set_runner_scenario(testcase, scenario_name): - """ Set the DaemonRunner test scenario for the test case. - - :param testcase: The `TestCase` instance to decorate. - :param scenario_name: The name of the scenario to use. - - Set the `DaemonRunner` test scenario name and decorate the - `testcase` with the corresponding scenario fixtures. - - """ - scenarios = testcase.runner_scenarios - testcase.scenario = scenarios[scenario_name] - apply_lockfile_method_mocks( - testcase.mock_runner_lockfile, - testcase, - testcase.scenario['pidlockfile_scenario']) - - -def setup_runner_fixtures(testcase): - """ Set up common fixtures for `DaemonRunner` test cases. - - :param testcase: A `TestCase` instance to decorate. - - Decorate the `testcase` with attributes to be fixtures for tests - involving `DaemonRunner` instances. - - """ - setup_pidfile_fixtures(testcase) - setup_streams_fixtures(testcase) - - testcase.runner_scenarios = make_runner_scenarios() - - patcher_stderr = mock.patch.object( - sys, "stderr", - new=FakeFileDescriptorStringIO()) - testcase.fake_stderr = patcher_stderr.start() - testcase.addCleanup(patcher_stderr.stop) - - simple_scenario = testcase.runner_scenarios['simple'] - - testcase.mock_runner_lockfile = mock.MagicMock( - spec=daemon.pidfile.TimeoutPIDLockFile) - apply_lockfile_method_mocks( - testcase.mock_runner_lockfile, - testcase, - simple_scenario['pidlockfile_scenario']) - testcase.mock_runner_lockfile.path = simple_scenario['pidfile_path'] - - patcher_lockfile_class = mock.patch.object( - daemon.pidfile, "TimeoutPIDLockFile", - return_value=testcase.mock_runner_lockfile) - patcher_lockfile_class.start() - testcase.addCleanup(patcher_lockfile_class.stop) - - class TestApp(object): - - def __init__(self): - self.stdin_path = testcase.stream_file_paths['stdin'] - self.stdout_path = testcase.stream_file_paths['stdout'] - self.stderr_path = testcase.stream_file_paths['stderr'] - self.pidfile_path = simple_scenario['pidfile_path'] - self.pidfile_timeout = simple_scenario['pidfile_timeout'] - - run = mock.MagicMock(name="TestApp.run") - - testcase.TestApp = TestApp - - patcher_runner_daemoncontext = mock.patch.object( - daemon.runner, "DaemonContext", autospec=True) - patcher_runner_daemoncontext.start() - testcase.addCleanup(patcher_runner_daemoncontext.stop) - - testcase.test_app = testcase.TestApp() - - testcase.test_program_name = "bazprog" - testcase.test_program_path = os.path.join( - "/foo/bar", testcase.test_program_name) - testcase.valid_argv_params = { - 'start': [testcase.test_program_path, 'start'], - 'stop': [testcase.test_program_path, 'stop'], - 'restart': [testcase.test_program_path, 'restart'], - } - - def fake_open(filename, mode=None, buffering=None): - if filename in testcase.stream_files_by_path: - result = testcase.stream_files_by_path[filename] - else: - result = FakeFileDescriptorStringIO() - result.mode = mode - result.buffering = buffering - return result - - mock_open = mock.mock_open() - mock_open.side_effect = fake_open - - func_patcher_builtin_open = mock.patch.object( - builtins, "open", - new=mock_open) - func_patcher_builtin_open.start() - testcase.addCleanup(func_patcher_builtin_open.stop) - - func_patcher_os_kill = mock.patch.object(os, "kill") - func_patcher_os_kill.start() - testcase.addCleanup(func_patcher_os_kill.stop) - - patcher_sys_argv = mock.patch.object( - sys, "argv", - new=testcase.valid_argv_params['start']) - patcher_sys_argv.start() - testcase.addCleanup(patcher_sys_argv.stop) - - testcase.test_instance = daemon.runner.DaemonRunner(testcase.test_app) - - testcase.scenario = NotImplemented - - -class DaemonRunner_BaseTestCase(scaffold.TestCase): - """ Base class for DaemonRunner test case classes. """ - - def setUp(self): - """ Set up test fixtures. """ - super(DaemonRunner_BaseTestCase, self).setUp() - - setup_runner_fixtures(self) - set_runner_scenario(self, 'simple') - - -class DaemonRunner_TestCase(DaemonRunner_BaseTestCase): - """ Test cases for DaemonRunner class. """ - - def setUp(self): - """ Set up test fixtures. """ - super(DaemonRunner_TestCase, self).setUp() - - func_patcher_parse_args = mock.patch.object( - daemon.runner.DaemonRunner, "parse_args") - func_patcher_parse_args.start() - self.addCleanup(func_patcher_parse_args.stop) - - # Create a new instance now with our custom patches. - self.test_instance = daemon.runner.DaemonRunner(self.test_app) - - def test_instantiate(self): - """ New instance of DaemonRunner should be created. """ - self.assertIsInstance(self.test_instance, daemon.runner.DaemonRunner) - - def test_parses_commandline_args(self): - """ Should parse commandline arguments. """ - self.test_instance.parse_args.assert_called_with() - - def test_has_specified_app(self): - """ Should have specified application object. """ - self.assertIs(self.test_app, self.test_instance.app) - - def test_sets_pidfile_none_when_pidfile_path_is_none(self): - """ Should set ‘pidfile’ to ‘None’ when ‘pidfile_path’ is ‘None’. """ - pidfile_path = None - self.test_app.pidfile_path = pidfile_path - expected_pidfile = None - instance = daemon.runner.DaemonRunner(self.test_app) - self.assertIs(expected_pidfile, instance.pidfile) - - def test_error_when_pidfile_path_not_string(self): - """ Should raise ValueError when PID file path not a string. """ - pidfile_path = object() - self.test_app.pidfile_path = pidfile_path - expected_error = ValueError - self.assertRaises( - expected_error, - daemon.runner.DaemonRunner, self.test_app) - - def test_error_when_pidfile_path_not_absolute(self): - """ Should raise ValueError when PID file path not absolute. """ - pidfile_path = "foo/bar.pid" - self.test_app.pidfile_path = pidfile_path - expected_error = ValueError - self.assertRaises( - expected_error, - daemon.runner.DaemonRunner, self.test_app) - - def test_creates_lock_with_specified_parameters(self): - """ Should create a TimeoutPIDLockFile with specified params. """ - pidfile_path = self.scenario['pidfile_path'] - pidfile_timeout = self.scenario['pidfile_timeout'] - daemon.pidfile.TimeoutPIDLockFile.assert_called_with( - pidfile_path, pidfile_timeout) - - def test_has_created_pidfile(self): - """ Should have new PID lock file as `pidfile` attribute. """ - expected_pidfile = self.mock_runner_lockfile - instance = self.test_instance - self.assertIs( - expected_pidfile, instance.pidfile) - - def test_daemon_context_has_created_pidfile(self): - """ DaemonContext component should have new PID lock file. """ - expected_pidfile = self.mock_runner_lockfile - daemon_context = self.test_instance.daemon_context - self.assertIs( - expected_pidfile, daemon_context.pidfile) - - def test_daemon_context_has_specified_stdin_stream(self): - """ DaemonContext component should have specified stdin file. """ - test_app = self.test_app - expected_file = self.stream_files_by_name['stdin'] - daemon_context = self.test_instance.daemon_context - self.assertEqual(expected_file, daemon_context.stdin) - - def test_daemon_context_has_stdin_in_read_mode(self): - """ DaemonContext component should open stdin file for read. """ - expected_mode = 'rt' - daemon_context = self.test_instance.daemon_context - self.assertIn(expected_mode, daemon_context.stdin.mode) - - def test_daemon_context_has_specified_stdout_stream(self): - """ DaemonContext component should have specified stdout file. """ - test_app = self.test_app - expected_file = self.stream_files_by_name['stdout'] - daemon_context = self.test_instance.daemon_context - self.assertEqual(expected_file, daemon_context.stdout) - - def test_daemon_context_has_stdout_in_append_mode(self): - """ DaemonContext component should open stdout file for append. """ - expected_mode = 'w+t' - daemon_context = self.test_instance.daemon_context - self.assertIn(expected_mode, daemon_context.stdout.mode) - - def test_daemon_context_has_specified_stderr_stream(self): - """ DaemonContext component should have specified stderr file. """ - test_app = self.test_app - expected_file = self.stream_files_by_name['stderr'] - daemon_context = self.test_instance.daemon_context - self.assertEqual(expected_file, daemon_context.stderr) - - def test_daemon_context_has_stderr_in_append_mode(self): - """ DaemonContext component should open stderr file for append. """ - expected_mode = 'w+t' - daemon_context = self.test_instance.daemon_context - self.assertIn(expected_mode, daemon_context.stderr.mode) - - def test_daemon_context_has_stderr_with_no_buffering(self): - """ DaemonContext component should open stderr file unbuffered. """ - expected_buffering = 0 - daemon_context = self.test_instance.daemon_context - self.assertEqual( - expected_buffering, daemon_context.stderr.buffering) - - -class DaemonRunner_usage_exit_TestCase(DaemonRunner_BaseTestCase): - """ Test cases for DaemonRunner.usage_exit method. """ - - def test_raises_system_exit(self): - """ Should raise SystemExit exception. """ - instance = self.test_instance - argv = [self.test_program_path] - self.assertRaises( - SystemExit, - instance._usage_exit, argv) - - def test_message_follows_conventional_format(self): - """ Should emit a conventional usage message. """ - instance = self.test_instance - argv = [self.test_program_path] - expected_stderr_output = """\ - usage: {progname} ... - """.format( - progname=self.test_program_name) - self.assertRaises( - SystemExit, - instance._usage_exit, argv) - self.assertOutputCheckerMatch( - expected_stderr_output, self.fake_stderr.getvalue()) - - -class DaemonRunner_parse_args_TestCase(DaemonRunner_BaseTestCase): - """ Test cases for DaemonRunner.parse_args method. """ - - def setUp(self): - """ Set up test fixtures. """ - super(DaemonRunner_parse_args_TestCase, self).setUp() - - func_patcher_usage_exit = mock.patch.object( - daemon.runner.DaemonRunner, "_usage_exit", - side_effect=NotImplementedError) - func_patcher_usage_exit.start() - self.addCleanup(func_patcher_usage_exit.stop) - - def test_emits_usage_message_if_insufficient_args(self): - """ Should emit a usage message and exit if too few arguments. """ - instance = self.test_instance - argv = [self.test_program_path] - exc = self.assertRaises( - NotImplementedError, - instance.parse_args, argv) - daemon.runner.DaemonRunner._usage_exit.assert_called_with(argv) - - def test_emits_usage_message_if_unknown_action_arg(self): - """ Should emit a usage message and exit if unknown action. """ - instance = self.test_instance - progname = self.test_program_name - argv = [self.test_program_path, 'bogus'] - exc = self.assertRaises( - NotImplementedError, - instance.parse_args, argv) - daemon.runner.DaemonRunner._usage_exit.assert_called_with(argv) - - def test_should_parse_system_argv_by_default(self): - """ Should parse sys.argv by default. """ - instance = self.test_instance - expected_action = 'start' - argv = self.valid_argv_params['start'] - with mock.patch.object(sys, "argv", new=argv): - instance.parse_args() - self.assertEqual(expected_action, instance.action) - - def test_sets_action_from_first_argument(self): - """ Should set action from first commandline argument. """ - instance = self.test_instance - for name, argv in self.valid_argv_params.items(): - expected_action = name - instance.parse_args(argv) - self.assertEqual(expected_action, instance.action) - - -try: - ProcessLookupError -except NameError: - # Python 2 uses OSError. - ProcessLookupError = functools.partial(OSError, errno.ESRCH) - -class DaemonRunner_do_action_TestCase(DaemonRunner_BaseTestCase): - """ Test cases for DaemonRunner.do_action method. """ - - def test_raises_error_if_unknown_action(self): - """ Should emit a usage message and exit if action is unknown. """ - instance = self.test_instance - instance.action = 'bogus' - expected_error = daemon.runner.DaemonRunnerInvalidActionError - self.assertRaises( - expected_error, - instance.do_action) - - -class DaemonRunner_do_action_start_TestCase(DaemonRunner_BaseTestCase): - """ Test cases for DaemonRunner.do_action method, action 'start'. """ - - def setUp(self): - """ Set up test fixtures. """ - super(DaemonRunner_do_action_start_TestCase, self).setUp() - - self.test_instance.action = 'start' - - def test_raises_error_if_pidfile_locked(self): - """ Should raise error if PID file is locked. """ - - instance = self.test_instance - instance.daemon_context.open.side_effect = lockfile.AlreadyLocked - pidfile_path = self.scenario['pidfile_path'] - expected_error = daemon.runner.DaemonRunnerStartFailureError - expected_message_content = pidfile_path - exc = self.assertRaises( - expected_error, - instance.do_action) - self.assertIn(expected_message_content, unicode(exc)) - - def test_breaks_lock_if_no_such_process(self): - """ Should request breaking lock if PID file process is not running. """ - set_runner_scenario(self, 'pidfile-locked') - instance = self.test_instance - self.mock_runner_lockfile.read_pid.return_value = ( - self.scenario['pidlockfile_scenario']['pidfile_pid']) - pidfile_path = self.scenario['pidfile_path'] - test_pid = self.scenario['pidlockfile_scenario']['pidfile_pid'] - expected_signal = signal.SIG_DFL - test_error = ProcessLookupError("Not running") - os.kill.side_effect = test_error - instance.do_action() - os.kill.assert_called_with(test_pid, expected_signal) - self.mock_runner_lockfile.break_lock.assert_called_with() - - def test_requests_daemon_context_open(self): - """ Should request the daemon context to open. """ - instance = self.test_instance - instance.do_action() - instance.daemon_context.open.assert_called_with() - - def test_emits_start_message_to_stderr(self): - """ Should emit start message to stderr. """ - instance = self.test_instance - expected_stderr = """\ - started with pid {pid:d} - """.format( - pid=self.scenario['pid']) - instance.do_action() - self.assertOutputCheckerMatch( - expected_stderr, self.fake_stderr.getvalue()) - - def test_requests_app_run(self): - """ Should request the application to run. """ - instance = self.test_instance - instance.do_action() - self.test_app.run.assert_called_with() - - -class DaemonRunner_do_action_stop_TestCase(DaemonRunner_BaseTestCase): - """ Test cases for DaemonRunner.do_action method, action 'stop'. """ - - def setUp(self): - """ Set up test fixtures. """ - super(DaemonRunner_do_action_stop_TestCase, self).setUp() - - set_runner_scenario(self, 'pidfile-locked') - - self.test_instance.action = 'stop' - - self.mock_runner_lockfile.is_locked.return_value = True - self.mock_runner_lockfile.i_am_locking.return_value = False - self.mock_runner_lockfile.read_pid.return_value = ( - self.scenario['pidlockfile_scenario']['pidfile_pid']) - - def test_raises_error_if_pidfile_not_locked(self): - """ Should raise error if PID file is not locked. """ - set_runner_scenario(self, 'simple') - instance = self.test_instance - self.mock_runner_lockfile.is_locked.return_value = False - self.mock_runner_lockfile.i_am_locking.return_value = False - self.mock_runner_lockfile.read_pid.return_value = ( - self.scenario['pidlockfile_scenario']['pidfile_pid']) - pidfile_path = self.scenario['pidfile_path'] - expected_error = daemon.runner.DaemonRunnerStopFailureError - expected_message_content = pidfile_path - exc = self.assertRaises( - expected_error, - instance.do_action) - self.assertIn(expected_message_content, unicode(exc)) - - def test_breaks_lock_if_pidfile_stale(self): - """ Should break lock if PID file is stale. """ - instance = self.test_instance - pidfile_path = self.scenario['pidfile_path'] - test_pid = self.scenario['pidlockfile_scenario']['pidfile_pid'] - expected_signal = signal.SIG_DFL - test_error = OSError(errno.ESRCH, "Not running") - os.kill.side_effect = test_error - instance.do_action() - self.mock_runner_lockfile.break_lock.assert_called_with() - - def test_sends_terminate_signal_to_process_from_pidfile(self): - """ Should send SIGTERM to the daemon process. """ - instance = self.test_instance - test_pid = self.scenario['pidlockfile_scenario']['pidfile_pid'] - expected_signal = signal.SIGTERM - instance.do_action() - os.kill.assert_called_with(test_pid, expected_signal) - - def test_raises_error_if_cannot_send_signal_to_process(self): - """ Should raise error if cannot send signal to daemon process. """ - instance = self.test_instance - test_pid = self.scenario['pidlockfile_scenario']['pidfile_pid'] - pidfile_path = self.scenario['pidfile_path'] - test_error = OSError(errno.EPERM, "Nice try") - os.kill.side_effect = test_error - expected_error = daemon.runner.DaemonRunnerStopFailureError - expected_message_content = unicode(test_pid) - exc = self.assertRaises( - expected_error, - instance.do_action) - self.assertIn(expected_message_content, unicode(exc)) - - -@mock.patch.object(daemon.runner.DaemonRunner, "_start") -@mock.patch.object(daemon.runner.DaemonRunner, "_stop") -class DaemonRunner_do_action_restart_TestCase(DaemonRunner_BaseTestCase): - """ Test cases for DaemonRunner.do_action method, action 'restart'. """ - - def setUp(self): - """ Set up test fixtures. """ - super(DaemonRunner_do_action_restart_TestCase, self).setUp() - - set_runner_scenario(self, 'pidfile-locked') - - self.test_instance.action = 'restart' - - def test_requests_stop_then_start( - self, - mock_func_daemonrunner_start, mock_func_daemonrunner_stop): - """ Should request stop, then start. """ - instance = self.test_instance - instance.do_action() - mock_func_daemonrunner_start.assert_called_with() - mock_func_daemonrunner_stop.assert_called_with() - - -@mock.patch.object(sys, "stderr") -class emit_message_TestCase(scaffold.TestCase): - """ Test cases for ‘emit_message’ function. """ - - def test_writes_specified_message_to_stream(self, mock_stderr): - """ Should write specified message to stream. """ - test_message = self.getUniqueString() - expected_content = "{message}\n".format(message=test_message) - daemon.runner.emit_message(test_message, stream=mock_stderr) - mock_stderr.write.assert_called_with(expected_content) - - def test_writes_to_specified_stream(self, mock_stderr): - """ Should write message to specified stream. """ - test_message = self.getUniqueString() - mock_stream = mock.MagicMock() - daemon.runner.emit_message(test_message, stream=mock_stream) - mock_stream.write.assert_called_with(mock.ANY) - - def test_writes_to_stderr_by_default(self, mock_stderr): - """ Should write message to ‘sys.stderr’ by default. """ - test_message = self.getUniqueString() - daemon.runner.emit_message(test_message) - mock_stderr.write.assert_called_with(mock.ANY) - - -class is_pidfile_stale_TestCase(scaffold.TestCase): - """ Test cases for ‘is_pidfile_stale’ function. """ - - def setUp(self): - """ Set up test fixtures. """ - super(is_pidfile_stale_TestCase, self).setUp() - - func_patcher_os_kill = mock.patch.object(os, "kill") - func_patcher_os_kill.start() - self.addCleanup(func_patcher_os_kill.stop) - os.kill.return_value = None - - self.test_pid = self.getUniqueInteger() - self.test_pidfile = mock.MagicMock(daemon.pidfile.TimeoutPIDLockFile) - self.test_pidfile.read_pid.return_value = self.test_pid - - def test_returns_false_if_no_pid_in_file(self): - """ Should return False if the pidfile contains no PID. """ - self.test_pidfile.read_pid.return_value = None - expected_result = False - result = daemon.runner.is_pidfile_stale(self.test_pidfile) - self.assertEqual(expected_result, result) - - def test_returns_false_if_process_exists(self): - """ Should return False if the process with its PID exists. """ - expected_result = False - result = daemon.runner.is_pidfile_stale(self.test_pidfile) - self.assertEqual(expected_result, result) - - def test_returns_true_if_process_does_not_exist(self): - """ Should return True if the process does not exist. """ - test_error = ProcessLookupError("No such process") - del os.kill.return_value - os.kill.side_effect = test_error - expected_result = True - result = daemon.runner.is_pidfile_stale(self.test_pidfile) - self.assertEqual(expected_result, result) - - -# Local variables: -# coding: utf-8 -# mode: python -# End: -# vim: fileencoding=utf-8 filetype=python : diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test_version.py b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test_version.py deleted file mode 100755 index b52f521d..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test_version.py +++ /dev/null @@ -1,1373 +0,0 @@ -# -*- coding: utf-8 -*- -# -# test_version.py -# Part of ‘python-daemon’, an implementation of PEP 3143. -# -# Copyright © 2008–2015 Ben Finney -# -# This is free software: you may copy, modify, and/or distribute this work -# under the terms of the GNU General Public License as published by the -# Free Software Foundation; version 3 of that license or any later version. -# No warranty expressed or implied. See the file ‘LICENSE.GPL-3’ for details. - -""" Unit test for ‘version’ packaging module. """ - -from __future__ import (absolute_import, unicode_literals) - -import os -import os.path -import io -import errno -import functools -import collections -import textwrap -import json -import tempfile -import distutils.dist -import distutils.cmd -import distutils.errors -import distutils.fancy_getopt -try: - # Standard library of Python 2.7 and later. - from io import StringIO -except ImportError: - # Standard library of Python 2.6 and earlier. - from StringIO import StringIO - -import mock -import testtools -import testscenarios -import docutils -import docutils.writers -import docutils.nodes -import setuptools -import setuptools.command - -import version - -version.ensure_class_bases_begin_with( - version.__dict__, str('VersionInfoWriter'), docutils.writers.Writer) -version.ensure_class_bases_begin_with( - version.__dict__, str('VersionInfoTranslator'), - docutils.nodes.SparseNodeVisitor) - - -def make_test_classes_for_ensure_class_bases_begin_with(): - """ Make test classes for use with ‘ensure_class_bases_begin_with’. - - :return: Mapping {`name`: `type`} of the custom types created. - - """ - - class quux_metaclass(type): - def __new__(metaclass, name, bases, namespace): - return super(quux_metaclass, metaclass).__new__( - metaclass, name, bases, namespace) - - class Foo(object): - __metaclass__ = type - - class Bar(object): - pass - - class FooInheritingBar(Bar): - __metaclass__ = type - - class FooWithCustomMetaclass(object): - __metaclass__ = quux_metaclass - - result = dict( - (name, value) for (name, value) in locals().items() - if isinstance(value, type)) - - return result - -class ensure_class_bases_begin_with_TestCase( - testscenarios.WithScenarios, testtools.TestCase): - """ Test cases for ‘ensure_class_bases_begin_with’ function. """ - - test_classes = make_test_classes_for_ensure_class_bases_begin_with() - - scenarios = [ - ('simple', { - 'test_class': test_classes['Foo'], - 'base_class': test_classes['Bar'], - }), - ('custom metaclass', { - 'test_class': test_classes['FooWithCustomMetaclass'], - 'base_class': test_classes['Bar'], - 'expected_metaclass': test_classes['quux_metaclass'], - }), - ] - - def setUp(self): - """ Set up test fixtures. """ - super(ensure_class_bases_begin_with_TestCase, self).setUp() - - self.class_name = self.test_class.__name__ - self.test_module_namespace = {self.class_name: self.test_class} - - if not hasattr(self, 'expected_metaclass'): - self.expected_metaclass = type - - patcher_metaclass = mock.patch.object( - self.test_class, '__metaclass__') - patcher_metaclass.start() - self.addCleanup(patcher_metaclass.stop) - - self.fake_new_class = type(object) - self.test_class.__metaclass__.return_value = ( - self.fake_new_class) - - def test_module_namespace_contains_new_class(self): - """ Specified module namespace should have new class. """ - version.ensure_class_bases_begin_with( - self.test_module_namespace, self.class_name, self.base_class) - self.assertIn(self.fake_new_class, self.test_module_namespace.values()) - - def test_calls_metaclass_with_expected_class_name(self): - """ Should call the metaclass with the expected class name. """ - version.ensure_class_bases_begin_with( - self.test_module_namespace, self.class_name, self.base_class) - expected_class_name = self.class_name - self.test_class.__metaclass__.assert_called_with( - expected_class_name, mock.ANY, mock.ANY) - - def test_calls_metaclass_with_expected_bases(self): - """ Should call the metaclass with the expected bases. """ - version.ensure_class_bases_begin_with( - self.test_module_namespace, self.class_name, self.base_class) - expected_bases = tuple( - [self.base_class] - + list(self.test_class.__bases__)) - self.test_class.__metaclass__.assert_called_with( - mock.ANY, expected_bases, mock.ANY) - - def test_calls_metaclass_with_expected_namespace(self): - """ Should call the metaclass with the expected class namespace. """ - version.ensure_class_bases_begin_with( - self.test_module_namespace, self.class_name, self.base_class) - expected_namespace = self.test_class.__dict__.copy() - del expected_namespace['__dict__'] - self.test_class.__metaclass__.assert_called_with( - mock.ANY, mock.ANY, expected_namespace) - - -class ensure_class_bases_begin_with_AlreadyHasBase_TestCase( - testscenarios.WithScenarios, testtools.TestCase): - """ Test cases for ‘ensure_class_bases_begin_with’ function. - - These test cases test the conditions where the class's base is - already the specified base class. - - """ - - test_classes = make_test_classes_for_ensure_class_bases_begin_with() - - scenarios = [ - ('already Bar subclass', { - 'test_class': test_classes['FooInheritingBar'], - 'base_class': test_classes['Bar'], - }), - ] - - def setUp(self): - """ Set up test fixtures. """ - super( - ensure_class_bases_begin_with_AlreadyHasBase_TestCase, - self).setUp() - - self.class_name = self.test_class.__name__ - self.test_module_namespace = {self.class_name: self.test_class} - - patcher_metaclass = mock.patch.object( - self.test_class, '__metaclass__') - patcher_metaclass.start() - self.addCleanup(patcher_metaclass.stop) - - def test_metaclass_not_called(self): - """ Should not call metaclass to create a new type. """ - version.ensure_class_bases_begin_with( - self.test_module_namespace, self.class_name, self.base_class) - self.assertFalse(self.test_class.__metaclass__.called) - - -class VersionInfoWriter_TestCase(testtools.TestCase): - """ Test cases for ‘VersionInfoWriter’ class. """ - - def setUp(self): - """ Set up test fixtures. """ - super(VersionInfoWriter_TestCase, self).setUp() - - self.test_instance = version.VersionInfoWriter() - - def test_declares_version_info_support(self): - """ Should declare support for ‘version_info’. """ - instance = self.test_instance - expected_support = "version_info" - result = instance.supports(expected_support) - self.assertTrue(result) - - -class VersionInfoWriter_translate_TestCase(testtools.TestCase): - """ Test cases for ‘VersionInfoWriter.translate’ method. """ - - def setUp(self): - """ Set up test fixtures. """ - super(VersionInfoWriter_translate_TestCase, self).setUp() - - patcher_translator = mock.patch.object( - version, 'VersionInfoTranslator') - self.mock_class_translator = patcher_translator.start() - self.addCleanup(patcher_translator.stop) - self.mock_translator = self.mock_class_translator.return_value - - self.test_instance = version.VersionInfoWriter() - patcher_document = mock.patch.object( - self.test_instance, 'document') - patcher_document.start() - self.addCleanup(patcher_document.stop) - - def test_creates_translator_with_document(self): - """ Should create a translator with the writer's document. """ - instance = self.test_instance - expected_document = self.test_instance.document - instance.translate() - self.mock_class_translator.assert_called_with(expected_document) - - def test_calls_document_walkabout_with_translator(self): - """ Should call document.walkabout with the translator. """ - instance = self.test_instance - instance.translate() - instance.document.walkabout.assert_called_with(self.mock_translator) - - def test_output_from_translator_astext(self): - """ Should have output from translator.astext(). """ - instance = self.test_instance - instance.translate() - expected_output = self.mock_translator.astext.return_value - self.assertEqual(expected_output, instance.output) - - -class ChangeLogEntry_TestCase(testtools.TestCase): - """ Test cases for ‘ChangeLogEntry’ class. """ - - def setUp(self): - """ Set up test fixtures. """ - super(ChangeLogEntry_TestCase, self).setUp() - - self.test_instance = version.ChangeLogEntry() - - def test_instantiate(self): - """ New instance of ‘ChangeLogEntry’ should be created. """ - self.assertIsInstance( - self.test_instance, version.ChangeLogEntry) - - def test_minimum_zero_arguments(self): - """ Initialiser should not require any arguments. """ - instance = version.ChangeLogEntry() - self.assertIsNot(instance, None) - - -class ChangeLogEntry_release_date_TestCase( - testscenarios.WithScenarios, testtools.TestCase): - """ Test cases for ‘ChangeLogEntry.release_date’ attribute. """ - - scenarios = [ - ('default', { - 'test_args': {}, - 'expected_release_date': - version.ChangeLogEntry.default_release_date, - }), - ('unknown token', { - 'test_args': {'release_date': "UNKNOWN"}, - 'expected_release_date': "UNKNOWN", - }), - ('future token', { - 'test_args': {'release_date': "FUTURE"}, - 'expected_release_date': "FUTURE", - }), - ('2001-01-01', { - 'test_args': {'release_date': "2001-01-01"}, - 'expected_release_date': "2001-01-01", - }), - ('bogus', { - 'test_args': {'release_date': "b0gUs"}, - 'expected_error': ValueError, - }), - ] - - def test_has_expected_release_date(self): - """ Should have default `release_date` attribute. """ - if hasattr(self, 'expected_error'): - self.assertRaises( - self.expected_error, - version.ChangeLogEntry, **self.test_args) - else: - instance = version.ChangeLogEntry(**self.test_args) - self.assertEqual(self.expected_release_date, instance.release_date) - - -class ChangeLogEntry_version_TestCase( - testscenarios.WithScenarios, testtools.TestCase): - """ Test cases for ‘ChangeLogEntry.version’ attribute. """ - - scenarios = [ - ('default', { - 'test_args': {}, - 'expected_version': - version.ChangeLogEntry.default_version, - }), - ('unknown token', { - 'test_args': {'version': "UNKNOWN"}, - 'expected_version': "UNKNOWN", - }), - ('0.0', { - 'test_args': {'version': "0.0"}, - 'expected_version': "0.0", - }), - ] - - def test_has_expected_version(self): - """ Should have default `version` attribute. """ - instance = version.ChangeLogEntry(**self.test_args) - self.assertEqual(self.expected_version, instance.version) - - -class ChangeLogEntry_maintainer_TestCase( - testscenarios.WithScenarios, testtools.TestCase): - """ Test cases for ‘ChangeLogEntry.maintainer’ attribute. """ - - scenarios = [ - ('default', { - 'test_args': {}, - 'expected_maintainer': None, - }), - ('person', { - 'test_args': {'maintainer': "Foo Bar "}, - 'expected_maintainer': "Foo Bar ", - }), - ('bogus', { - 'test_args': {'maintainer': "b0gUs"}, - 'expected_error': ValueError, - }), - ] - - def test_has_expected_maintainer(self): - """ Should have default `maintainer` attribute. """ - if hasattr(self, 'expected_error'): - self.assertRaises( - self.expected_error, - version.ChangeLogEntry, **self.test_args) - else: - instance = version.ChangeLogEntry(**self.test_args) - self.assertEqual(self.expected_maintainer, instance.maintainer) - - -class ChangeLogEntry_body_TestCase( - testscenarios.WithScenarios, testtools.TestCase): - """ Test cases for ‘ChangeLogEntry.body’ attribute. """ - - scenarios = [ - ('default', { - 'test_args': {}, - 'expected_body': None, - }), - ('simple', { - 'test_args': {'body': "Foo bar baz."}, - 'expected_body': "Foo bar baz.", - }), - ] - - def test_has_expected_body(self): - """ Should have default `body` attribute. """ - instance = version.ChangeLogEntry(**self.test_args) - self.assertEqual(self.expected_body, instance.body) - - -class ChangeLogEntry_as_version_info_entry_TestCase( - testscenarios.WithScenarios, testtools.TestCase): - """ Test cases for ‘ChangeLogEntry.as_version_info_entry’ attribute. """ - - scenarios = [ - ('default', { - 'test_args': {}, - 'expected_result': collections.OrderedDict([ - ('release_date', version.ChangeLogEntry.default_release_date), - ('version', version.ChangeLogEntry.default_version), - ('maintainer', None), - ('body', None), - ]), - }), - ] - - def setUp(self): - """ Set up test fixtures. """ - super(ChangeLogEntry_as_version_info_entry_TestCase, self).setUp() - - self.test_instance = version.ChangeLogEntry(**self.test_args) - - def test_returns_result(self): - """ Should return expected result. """ - result = self.test_instance.as_version_info_entry() - self.assertEqual(self.expected_result, result) - - -def make_mock_field_node(field_name, field_body): - """ Make a mock Docutils field node for tests. """ - - mock_field_node = mock.MagicMock( - name='field', spec=docutils.nodes.field) - - mock_field_name_node = mock.MagicMock( - name='field_name', spec=docutils.nodes.field_name) - mock_field_name_node.parent = mock_field_node - mock_field_name_node.children = [field_name] - - mock_field_body_node = mock.MagicMock( - name='field_body', spec=docutils.nodes.field_body) - mock_field_body_node.parent = mock_field_node - mock_field_body_node.children = [field_body] - - mock_field_node.children = [mock_field_name_node, mock_field_body_node] - - def fake_func_first_child_matching_class(node_class): - result = None - node_class_name = node_class.__name__ - for (index, node) in enumerate(mock_field_node.children): - if node._mock_name == node_class_name: - result = index - break - return result - - mock_field_node.first_child_matching_class.side_effect = ( - fake_func_first_child_matching_class) - - return mock_field_node - - -class JsonEqual(testtools.matchers.Matcher): - """ A matcher to compare the value of JSON streams. """ - - def __init__(self, expected): - self.expected_value = expected - - def match(self, content): - """ Assert the JSON `content` matches the `expected_content`. """ - result = None - actual_value = json.loads(content.decode('utf-8')) - if actual_value != self.expected_value: - result = JsonValueMismatch(self.expected_value, actual_value) - return result - - -class JsonValueMismatch(testtools.matchers.Mismatch): - """ The specified JSON stream does not evaluate to the expected value. """ - - def __init__(self, expected, actual): - self.expected_value = expected - self.actual_value = actual - - def describe(self): - """ Emit a text description of this mismatch. """ - expected_json_text = json.dumps(self.expected_value, indent=4) - actual_json_text = json.dumps(self.actual_value, indent=4) - text = ( - "\n" - "reference: {expected}\n" - "actual: {actual}\n").format( - expected=expected_json_text, actual=actual_json_text) - return text - - -class changelog_to_version_info_collection_TestCase( - testscenarios.WithScenarios, testtools.TestCase): - """ Test cases for ‘changelog_to_version_info_collection’ function. """ - - scenarios = [ - ('single entry', { - 'test_input': textwrap.dedent("""\ - Version 1.0 - =========== - - :Released: 2009-01-01 - :Maintainer: Foo Bar - - * Lorem ipsum dolor sit amet. - """), - 'expected_version_info': [ - { - 'release_date': "2009-01-01", - 'version': "1.0", - 'maintainer': "Foo Bar ", - 'body': "* Lorem ipsum dolor sit amet.\n", - }, - ], - }), - ('multiple entries', { - 'test_input': textwrap.dedent("""\ - Version 1.0 - =========== - - :Released: 2009-01-01 - :Maintainer: Foo Bar - - * Lorem ipsum dolor sit amet. - - - Version 0.8 - =========== - - :Released: 2004-01-01 - :Maintainer: Foo Bar - - * Donec venenatis nisl aliquam ipsum. - - - Version 0.7.2 - ============= - - :Released: 2001-01-01 - :Maintainer: Foo Bar - - * Pellentesque elementum mollis finibus. - """), - 'expected_version_info': [ - { - 'release_date': "2009-01-01", - 'version': "1.0", - 'maintainer': "Foo Bar ", - 'body': "* Lorem ipsum dolor sit amet.\n", - }, - { - 'release_date': "2004-01-01", - 'version': "0.8", - 'maintainer': "Foo Bar ", - 'body': "* Donec venenatis nisl aliquam ipsum.\n", - }, - { - 'release_date': "2001-01-01", - 'version': "0.7.2", - 'maintainer': "Foo Bar ", - 'body': "* Pellentesque elementum mollis finibus.\n", - }, - ], - }), - ('trailing comment', { - 'test_input': textwrap.dedent("""\ - Version NEXT - ============ - - :Released: FUTURE - :Maintainer: - - * Lorem ipsum dolor sit amet. - - .. - Vivamus aliquam felis rutrum rutrum dictum. - """), - 'expected_version_info': [ - { - 'release_date': "FUTURE", - 'version': "NEXT", - 'maintainer': "", - 'body': "* Lorem ipsum dolor sit amet.\n", - }, - ], - }), - ('inline comment', { - 'test_input': textwrap.dedent("""\ - Version NEXT - ============ - - :Released: FUTURE - :Maintainer: - - .. - Vivamus aliquam felis rutrum rutrum dictum. - - * Lorem ipsum dolor sit amet. - """), - 'expected_version_info': [ - { - 'release_date': "FUTURE", - 'version': "NEXT", - 'maintainer': "", - 'body': "* Lorem ipsum dolor sit amet.\n", - }, - ], - }), - ('unreleased entry', { - 'test_input': textwrap.dedent("""\ - Version NEXT - ============ - - :Released: FUTURE - :Maintainer: - - * Lorem ipsum dolor sit amet. - - - Version 0.8 - =========== - - :Released: 2001-01-01 - :Maintainer: Foo Bar - - * Donec venenatis nisl aliquam ipsum. - """), - 'expected_version_info': [ - { - 'release_date': "FUTURE", - 'version': "NEXT", - 'maintainer': "", - 'body': "* Lorem ipsum dolor sit amet.\n", - }, - { - 'release_date': "2001-01-01", - 'version': "0.8", - 'maintainer': "Foo Bar ", - 'body': "* Donec venenatis nisl aliquam ipsum.\n", - }, - ], - }), - ('no section', { - 'test_input': textwrap.dedent("""\ - :Released: 2009-01-01 - :Maintainer: Foo Bar - - * Lorem ipsum dolor sit amet. - """), - 'expected_error': version.InvalidFormatError, - }), - ('subsection', { - 'test_input': textwrap.dedent("""\ - Version 1.0 - =========== - - :Released: 2009-01-01 - :Maintainer: Foo Bar - - * Lorem ipsum dolor sit amet. - - Ut ultricies fermentum quam - --------------------------- - - * In commodo magna facilisis in. - """), - 'expected_error': version.InvalidFormatError, - 'subsection': True, - }), - ('unknown field', { - 'test_input': textwrap.dedent("""\ - Version 1.0 - =========== - - :Released: 2009-01-01 - :Maintainer: Foo Bar - :Favourite: Spam - - * Lorem ipsum dolor sit amet. - """), - 'expected_error': version.InvalidFormatError, - }), - ('invalid version word', { - 'test_input': textwrap.dedent("""\ - BoGuS 1.0 - ========= - - :Released: 2009-01-01 - :Maintainer: Foo Bar - - * Lorem ipsum dolor sit amet. - """), - 'expected_error': version.InvalidFormatError, - }), - ('invalid section title', { - 'test_input': textwrap.dedent("""\ - Lorem Ipsum 1.0 - =============== - - :Released: 2009-01-01 - :Maintainer: Foo Bar - - * Lorem ipsum dolor sit amet. - """), - 'expected_error': version.InvalidFormatError, - }), - ] - - def test_returns_expected_version_info(self): - """ Should return expected version info mapping. """ - infile = StringIO(self.test_input) - if hasattr(self, 'expected_error'): - self.assertRaises( - self.expected_error, - version.changelog_to_version_info_collection, infile) - else: - result = version.changelog_to_version_info_collection(infile) - self.assertThat(result, JsonEqual(self.expected_version_info)) - - -try: - FileNotFoundError - PermissionError -except NameError: - # Python 2 uses OSError. - FileNotFoundError = functools.partial(IOError, errno.ENOENT) - PermissionError = functools.partial(IOError, errno.EPERM) - -fake_version_info = { - 'release_date': "2001-01-01", 'version': "2.0", - 'maintainer': None, 'body': None, - } - -@mock.patch.object( - version, "get_latest_version", return_value=fake_version_info) -class generate_version_info_from_changelog_TestCase( - testscenarios.WithScenarios, testtools.TestCase): - """ Test cases for ‘generate_version_info_from_changelog’ function. """ - - fake_open_side_effects = { - 'success': ( - lambda *args, **kwargs: StringIO()), - 'file not found': FileNotFoundError(), - 'permission denied': PermissionError(), - } - - scenarios = [ - ('simple', { - 'open_scenario': 'success', - 'fake_versions_json': json.dumps([fake_version_info]), - 'expected_result': fake_version_info, - }), - ('file not found', { - 'open_scenario': 'file not found', - 'expected_result': {}, - }), - ('permission denied', { - 'open_scenario': 'permission denied', - 'expected_result': {}, - }), - ] - - def setUp(self): - """ Set up test fixtures. """ - super(generate_version_info_from_changelog_TestCase, self).setUp() - - self.fake_changelog_file_path = tempfile.mktemp() - - def fake_open(filespec, *args, **kwargs): - if filespec == self.fake_changelog_file_path: - side_effect = self.fake_open_side_effects[self.open_scenario] - if callable(side_effect): - result = side_effect() - else: - raise side_effect - else: - result = StringIO() - return result - - func_patcher_io_open = mock.patch.object( - io, "open") - func_patcher_io_open.start() - self.addCleanup(func_patcher_io_open.stop) - io.open.side_effect = fake_open - - self.file_encoding = "utf-8" - - func_patcher_changelog_to_version_info_collection = mock.patch.object( - version, "changelog_to_version_info_collection") - func_patcher_changelog_to_version_info_collection.start() - self.addCleanup(func_patcher_changelog_to_version_info_collection.stop) - if hasattr(self, 'fake_versions_json'): - version.changelog_to_version_info_collection.return_value = ( - self.fake_versions_json.encode(self.file_encoding)) - - def test_returns_empty_collection_on_read_error( - self, - mock_func_get_latest_version): - """ Should return empty collection on error reading changelog. """ - test_error = PermissionError("Not for you") - version.changelog_to_version_info_collection.side_effect = test_error - result = version.generate_version_info_from_changelog( - self.fake_changelog_file_path) - expected_result = {} - self.assertDictEqual(expected_result, result) - - def test_opens_file_with_expected_encoding( - self, - mock_func_get_latest_version): - """ Should open changelog file in text mode with expected encoding. """ - result = version.generate_version_info_from_changelog( - self.fake_changelog_file_path) - expected_file_path = self.fake_changelog_file_path - expected_open_mode = 'rt' - expected_encoding = self.file_encoding - (open_args_positional, open_args_kwargs) = io.open.call_args - (open_args_filespec, open_args_mode) = open_args_positional[:2] - open_args_encoding = open_args_kwargs['encoding'] - self.assertEqual(expected_file_path, open_args_filespec) - self.assertEqual(expected_open_mode, open_args_mode) - self.assertEqual(expected_encoding, open_args_encoding) - - def test_returns_expected_result( - self, - mock_func_get_latest_version): - """ Should return expected result. """ - result = version.generate_version_info_from_changelog( - self.fake_changelog_file_path) - self.assertEqual(self.expected_result, result) - - -DefaultNoneDict = functools.partial(collections.defaultdict, lambda: None) - -class get_latest_version_TestCase( - testscenarios.WithScenarios, testtools.TestCase): - """ Test cases for ‘get_latest_version’ function. """ - - scenarios = [ - ('simple', { - 'test_versions': [ - DefaultNoneDict({'release_date': "LATEST"}), - ], - 'expected_result': version.ChangeLogEntry.make_ordered_dict( - DefaultNoneDict({'release_date': "LATEST"})), - }), - ('no versions', { - 'test_versions': [], - 'expected_result': collections.OrderedDict(), - }), - ('ordered versions', { - 'test_versions': [ - DefaultNoneDict({'release_date': "1"}), - DefaultNoneDict({'release_date': "2"}), - DefaultNoneDict({'release_date': "LATEST"}), - ], - 'expected_result': version.ChangeLogEntry.make_ordered_dict( - DefaultNoneDict({'release_date': "LATEST"})), - }), - ('un-ordered versions', { - 'test_versions': [ - DefaultNoneDict({'release_date': "2"}), - DefaultNoneDict({'release_date': "LATEST"}), - DefaultNoneDict({'release_date': "1"}), - ], - 'expected_result': version.ChangeLogEntry.make_ordered_dict( - DefaultNoneDict({'release_date': "LATEST"})), - }), - ] - - def test_returns_expected_result(self): - """ Should return expected result. """ - result = version.get_latest_version(self.test_versions) - self.assertDictEqual(self.expected_result, result) - - -@mock.patch.object(json, "dumps", side_effect=json.dumps) -class serialise_version_info_from_mapping_TestCase( - testscenarios.WithScenarios, testtools.TestCase): - """ Test cases for ‘get_latest_version’ function. """ - - scenarios = [ - ('simple', { - 'test_version_info': {'foo': "spam"}, - }), - ] - - for (name, scenario) in scenarios: - scenario['fake_json_dump'] = json.dumps(scenario['test_version_info']) - scenario['expected_value'] = scenario['test_version_info'] - - def test_passes_specified_object(self, mock_func_json_dumps): - """ Should pass the specified object to `json.dumps`. """ - result = version.serialise_version_info_from_mapping( - self.test_version_info) - mock_func_json_dumps.assert_called_with( - self.test_version_info, indent=mock.ANY) - - def test_returns_expected_result(self, mock_func_json_dumps): - """ Should return expected result. """ - mock_func_json_dumps.return_value = self.fake_json_dump - result = version.serialise_version_info_from_mapping( - self.test_version_info) - value = json.loads(result) - self.assertEqual(self.expected_value, value) - - -DistributionMetadata_defaults = { - name: None - for name in list(collections.OrderedDict.fromkeys( - distutils.dist.DistributionMetadata._METHOD_BASENAMES))} -FakeDistributionMetadata = collections.namedtuple( - 'FakeDistributionMetadata', DistributionMetadata_defaults.keys()) - -Distribution_defaults = { - 'metadata': None, - 'version': None, - 'release_date': None, - 'maintainer': None, - 'maintainer_email': None, - } -FakeDistribution = collections.namedtuple( - 'FakeDistribution', Distribution_defaults.keys()) - -def make_fake_distribution( - fields_override=None, metadata_fields_override=None): - metadata_fields = DistributionMetadata_defaults.copy() - if metadata_fields_override is not None: - metadata_fields.update(metadata_fields_override) - metadata = FakeDistributionMetadata(**metadata_fields) - - fields = Distribution_defaults.copy() - fields['metadata'] = metadata - if fields_override is not None: - fields.update(fields_override) - distribution = FakeDistribution(**fields) - - return distribution - - -class get_changelog_path_TestCase( - testscenarios.WithScenarios, testtools.TestCase): - """ Test cases for ‘get_changelog_path’ function. """ - - default_path = "." - default_script_filename = "setup.py" - - scenarios = [ - ('simple', {}), - ('unusual script name', { - 'script_filename': "lorem_ipsum", - }), - ('relative script path', { - 'script_directory': "dolor/sit/amet", - }), - ('absolute script path', { - 'script_directory': "/dolor/sit/amet", - }), - ('specify filename', { - 'changelog_filename': "adipiscing", - }), - ] - - def setUp(self): - """ Set up test fixtures. """ - super(get_changelog_path_TestCase, self).setUp() - - self.test_distribution = mock.MagicMock(distutils.dist.Distribution) - - if not hasattr(self, 'script_directory'): - self.script_directory = self.default_path - if not hasattr(self, 'script_filename'): - self.script_filename = self.default_script_filename - self.test_distribution.script_name = os.path.join( - self.script_directory, self.script_filename) - - changelog_filename = version.changelog_filename - if hasattr(self, 'changelog_filename'): - changelog_filename = self.changelog_filename - - self.expected_result = os.path.join( - self.script_directory, changelog_filename) - - def test_returns_expected_result(self): - """ Should return expected result. """ - args = { - 'distribution': self.test_distribution, - } - if hasattr(self, 'changelog_filename'): - args.update({'filename': self.changelog_filename}) - result = version.get_changelog_path(**args) - self.assertEqual(self.expected_result, result) - - -class WriteVersionInfoCommand_BaseTestCase( - testscenarios.WithScenarios, testtools.TestCase): - """ Base class for ‘WriteVersionInfoCommand’ test case classes. """ - - def setUp(self): - """ Set up test fixtures. """ - super(WriteVersionInfoCommand_BaseTestCase, self).setUp() - - fake_distribution_name = self.getUniqueString() - - self.test_distribution = distutils.dist.Distribution() - self.test_distribution.metadata.name = fake_distribution_name - - -class WriteVersionInfoCommand_TestCase(WriteVersionInfoCommand_BaseTestCase): - """ Test cases for ‘WriteVersionInfoCommand’ class. """ - - def test_subclass_of_distutils_command(self): - """ Should be a subclass of ‘distutils.cmd.Command’. """ - instance = version.WriteVersionInfoCommand(self.test_distribution) - self.assertIsInstance(instance, distutils.cmd.Command) - - -class WriteVersionInfoCommand_user_options_TestCase( - WriteVersionInfoCommand_BaseTestCase): - """ Test cases for ‘WriteVersionInfoCommand.user_options’ attribute. """ - - def setUp(self): - """ Set up test fixtures. """ - super(WriteVersionInfoCommand_user_options_TestCase, self).setUp() - - self.test_instance = version.WriteVersionInfoCommand( - self.test_distribution) - self.commandline_parser = distutils.fancy_getopt.FancyGetopt( - self.test_instance.user_options) - - def test_parses_correctly_as_fancy_getopt(self): - """ Should parse correctly in ‘FancyGetopt’. """ - self.assertIsInstance( - self.commandline_parser, distutils.fancy_getopt.FancyGetopt) - - def test_includes_base_class_user_options(self): - """ Should include base class's user_options. """ - base_command = setuptools.command.egg_info.egg_info - expected_user_options = base_command.user_options - self.assertThat( - set(expected_user_options), - IsSubset(set(self.test_instance.user_options))) - - def test_has_option_changelog_path(self): - """ Should have a ‘changelog-path’ option. """ - expected_option_name = "changelog-path=" - result = self.commandline_parser.has_option(expected_option_name) - self.assertTrue(result) - - def test_has_option_outfile_path(self): - """ Should have a ‘outfile-path’ option. """ - expected_option_name = "outfile-path=" - result = self.commandline_parser.has_option(expected_option_name) - self.assertTrue(result) - - -class WriteVersionInfoCommand_initialize_options_TestCase( - WriteVersionInfoCommand_BaseTestCase): - """ Test cases for ‘WriteVersionInfoCommand.initialize_options’ method. """ - - def setUp(self): - """ Set up test fixtures. """ - super( - WriteVersionInfoCommand_initialize_options_TestCase, self - ).setUp() - - patcher_func_egg_info_initialize_options = mock.patch.object( - setuptools.command.egg_info.egg_info, "initialize_options") - patcher_func_egg_info_initialize_options.start() - self.addCleanup(patcher_func_egg_info_initialize_options.stop) - - def test_calls_base_class_method(self): - """ Should call base class's ‘initialize_options’ method. """ - instance = version.WriteVersionInfoCommand(self.test_distribution) - base_command_class = setuptools.command.egg_info.egg_info - base_command_class.initialize_options.assert_called_with() - - def test_sets_changelog_path_to_none(self): - """ Should set ‘changelog_path’ attribute to ``None``. """ - instance = version.WriteVersionInfoCommand(self.test_distribution) - self.assertIs(instance.changelog_path, None) - - def test_sets_outfile_path_to_none(self): - """ Should set ‘outfile_path’ attribute to ``None``. """ - instance = version.WriteVersionInfoCommand(self.test_distribution) - self.assertIs(instance.outfile_path, None) - - -class WriteVersionInfoCommand_finalize_options_TestCase( - WriteVersionInfoCommand_BaseTestCase): - """ Test cases for ‘WriteVersionInfoCommand.finalize_options’ method. """ - - def setUp(self): - """ Set up test fixtures. """ - super(WriteVersionInfoCommand_finalize_options_TestCase, self).setUp() - - self.test_instance = version.WriteVersionInfoCommand(self.test_distribution) - - patcher_func_egg_info_finalize_options = mock.patch.object( - setuptools.command.egg_info.egg_info, "finalize_options") - patcher_func_egg_info_finalize_options.start() - self.addCleanup(patcher_func_egg_info_finalize_options.stop) - - self.fake_script_dir = self.getUniqueString() - self.test_distribution.script_name = os.path.join( - self.fake_script_dir, self.getUniqueString()) - - self.fake_egg_dir = self.getUniqueString() - self.test_instance.egg_info = self.fake_egg_dir - - patcher_func_get_changelog_path = mock.patch.object( - version, "get_changelog_path") - patcher_func_get_changelog_path.start() - self.addCleanup(patcher_func_get_changelog_path.stop) - - self.fake_changelog_path = self.getUniqueString() - version.get_changelog_path.return_value = self.fake_changelog_path - - def test_calls_base_class_method(self): - """ Should call base class's ‘finalize_options’ method. """ - base_command_class = setuptools.command.egg_info.egg_info - self.test_instance.finalize_options() - base_command_class.finalize_options.assert_called_with() - - def test_sets_force_to_none(self): - """ Should set ‘force’ attribute to ``None``. """ - self.test_instance.finalize_options() - self.assertIs(self.test_instance.force, None) - - def test_sets_changelog_path_using_get_changelog_path(self): - """ Should set ‘changelog_path’ attribute if it was ``None``. """ - self.test_instance.changelog_path = None - self.test_instance.finalize_options() - expected_changelog_path = self.fake_changelog_path - self.assertEqual(expected_changelog_path, self.test_instance.changelog_path) - - def test_leaves_changelog_path_if_already_set(self): - """ Should leave ‘changelog_path’ attribute set. """ - prior_changelog_path = self.getUniqueString() - self.test_instance.changelog_path = prior_changelog_path - self.test_instance.finalize_options() - expected_changelog_path = prior_changelog_path - self.assertEqual(expected_changelog_path, self.test_instance.changelog_path) - - def test_sets_outfile_path_to_default(self): - """ Should set ‘outfile_path’ attribute to default value. """ - fake_version_info_filename = self.getUniqueString() - with mock.patch.object( - version, "version_info_filename", - new=fake_version_info_filename): - self.test_instance.finalize_options() - expected_outfile_path = os.path.join( - self.fake_egg_dir, fake_version_info_filename) - self.assertEqual(expected_outfile_path, self.test_instance.outfile_path) - - def test_leaves_outfile_path_if_already_set(self): - """ Should leave ‘outfile_path’ attribute set. """ - prior_outfile_path = self.getUniqueString() - self.test_instance.outfile_path = prior_outfile_path - self.test_instance.finalize_options() - expected_outfile_path = prior_outfile_path - self.assertEqual(expected_outfile_path, self.test_instance.outfile_path) - - -class has_changelog_TestCase( - testscenarios.WithScenarios, testtools.TestCase): - """ Test cases for ‘has_changelog’ function. """ - - fake_os_path_exists_side_effects = { - 'true': (lambda path: True), - 'false': (lambda path: False), - } - - scenarios = [ - ('no changelog path', { - 'changelog_path': None, - 'expected_result': False, - }), - ('changelog exists', { - 'os_path_exists_scenario': 'true', - 'expected_result': True, - }), - ('changelog not found', { - 'os_path_exists_scenario': 'false', - 'expected_result': False, - }), - ] - - def setUp(self): - """ Set up test fixtures. """ - super(has_changelog_TestCase, self).setUp() - - self.test_distribution = distutils.dist.Distribution() - self.test_command = version.EggInfoCommand( - self.test_distribution) - - patcher_func_get_changelog_path = mock.patch.object( - version, "get_changelog_path") - patcher_func_get_changelog_path.start() - self.addCleanup(patcher_func_get_changelog_path.stop) - - self.fake_changelog_file_path = self.getUniqueString() - if hasattr(self, 'changelog_path'): - self.fake_changelog_file_path = self.changelog_path - version.get_changelog_path.return_value = self.fake_changelog_file_path - self.fake_changelog_file = StringIO() - - def fake_os_path_exists(path): - if path == self.fake_changelog_file_path: - side_effect = self.fake_os_path_exists_side_effects[ - self.os_path_exists_scenario] - if callable(side_effect): - result = side_effect(path) - else: - raise side_effect - else: - result = False - return result - - func_patcher_os_path_exists = mock.patch.object( - os.path, "exists") - func_patcher_os_path_exists.start() - self.addCleanup(func_patcher_os_path_exists.stop) - os.path.exists.side_effect = fake_os_path_exists - - def test_gets_changelog_path_from_distribution(self): - """ Should call ‘get_changelog_path’ with distribution. """ - result = version.has_changelog(self.test_command) - version.get_changelog_path.assert_called_with( - self.test_distribution) - - def test_returns_expected_result(self): - """ Should be a subclass of ‘distutils.cmd.Command’. """ - result = version.has_changelog(self.test_command) - self.assertEqual(self.expected_result, result) - - -@mock.patch.object(version, 'generate_version_info_from_changelog') -@mock.patch.object(version, 'serialise_version_info_from_mapping') -@mock.patch.object(version.EggInfoCommand, "write_file") -class WriteVersionInfoCommand_run_TestCase( - WriteVersionInfoCommand_BaseTestCase): - """ Test cases for ‘WriteVersionInfoCommand.run’ method. """ - - def setUp(self): - """ Set up test fixtures. """ - super(WriteVersionInfoCommand_run_TestCase, self).setUp() - - self.test_instance = version.WriteVersionInfoCommand( - self.test_distribution) - - self.fake_changelog_path = self.getUniqueString() - self.test_instance.changelog_path = self.fake_changelog_path - - self.fake_outfile_path = self.getUniqueString() - self.test_instance.outfile_path = self.fake_outfile_path - - def test_returns_none( - self, - mock_func_egg_info_write_file, - mock_func_serialise_version_info, - mock_func_generate_version_info): - """ Should return ``None``. """ - result = self.test_instance.run() - self.assertIs(result, None) - - def test_generates_version_info_from_changelog( - self, - mock_func_egg_info_write_file, - mock_func_serialise_version_info, - mock_func_generate_version_info): - """ Should generate version info from specified changelog. """ - self.test_instance.run() - expected_changelog_path = self.test_instance.changelog_path - mock_func_generate_version_info.assert_called_with( - expected_changelog_path) - - def test_serialises_version_info_from_mapping( - self, - mock_func_egg_info_write_file, - mock_func_serialise_version_info, - mock_func_generate_version_info): - """ Should serialise version info from specified mapping. """ - self.test_instance.run() - expected_version_info = mock_func_generate_version_info.return_value - mock_func_serialise_version_info.assert_called_with( - expected_version_info) - - def test_writes_file_using_command_context( - self, - mock_func_egg_info_write_file, - mock_func_serialise_version_info, - mock_func_generate_version_info): - """ Should write the metadata file using the command context. """ - self.test_instance.run() - expected_content = mock_func_serialise_version_info.return_value - mock_func_egg_info_write_file.assert_called_with( - "version info", self.fake_outfile_path, expected_content) - - -IsSubset = testtools.matchers.MatchesPredicateWithParams( - set.issubset, "{0} should be a subset of {1}") - -class EggInfoCommand_TestCase(testtools.TestCase): - """ Test cases for ‘EggInfoCommand’ class. """ - - def setUp(self): - """ Set up test fixtures. """ - super(EggInfoCommand_TestCase, self).setUp() - - self.test_distribution = distutils.dist.Distribution() - self.test_instance = version.EggInfoCommand(self.test_distribution) - - def test_subclass_of_setuptools_egg_info(self): - """ Should be a subclass of Setuptools ‘egg_info’. """ - self.assertIsInstance( - self.test_instance, setuptools.command.egg_info.egg_info) - - def test_sub_commands_include_base_class_sub_commands(self): - """ Should include base class's sub-commands in this sub_commands. """ - base_command = setuptools.command.egg_info.egg_info - expected_sub_commands = base_command.sub_commands - self.assertThat( - set(expected_sub_commands), - IsSubset(set(self.test_instance.sub_commands))) - - def test_sub_commands_includes_write_version_info_command(self): - """ Should include sub-command named ‘write_version_info’. """ - commands_by_name = dict(self.test_instance.sub_commands) - expected_predicate = version.has_changelog - expected_item = ('write_version_info', expected_predicate) - self.assertIn(expected_item, commands_by_name.items()) - - -@mock.patch.object(setuptools.command.egg_info.egg_info, "run") -class EggInfoCommand_run_TestCase(testtools.TestCase): - """ Test cases for ‘EggInfoCommand.run’ method. """ - - def setUp(self): - """ Set up test fixtures. """ - super(EggInfoCommand_run_TestCase, self).setUp() - - self.test_distribution = distutils.dist.Distribution() - self.test_instance = version.EggInfoCommand(self.test_distribution) - - base_command = setuptools.command.egg_info.egg_info - patcher_func_egg_info_get_sub_commands = mock.patch.object( - base_command, "get_sub_commands") - patcher_func_egg_info_get_sub_commands.start() - self.addCleanup(patcher_func_egg_info_get_sub_commands.stop) - - patcher_func_egg_info_run_command = mock.patch.object( - base_command, "run_command") - patcher_func_egg_info_run_command.start() - self.addCleanup(patcher_func_egg_info_run_command.stop) - - self.fake_sub_commands = ["spam", "eggs", "beans"] - base_command.get_sub_commands.return_value = self.fake_sub_commands - - def test_returns_none(self, mock_func_egg_info_run): - """ Should return ``None``. """ - result = self.test_instance.run() - self.assertIs(result, None) - - def test_runs_each_command_in_sub_commands( - self, mock_func_egg_info_run): - """ Should run each command in ‘self.get_sub_commands()’. """ - base_command = setuptools.command.egg_info.egg_info - self.test_instance.run() - expected_calls = [mock.call(name) for name in self.fake_sub_commands] - base_command.run_command.assert_has_calls(expected_calls) - - def test_calls_base_class_run(self, mock_func_egg_info_run): - """ Should call base class's ‘run’ method. """ - result = self.test_instance.run() - mock_func_egg_info_run.assert_called_with() - - -# Local variables: -# coding: utf-8 -# mode: python -# End: -# vim: fileencoding=utf-8 filetype=python : diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/version.py b/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/version.py deleted file mode 100755 index 7e4c4202..00000000 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/version.py +++ /dev/null @@ -1,547 +0,0 @@ -# -*- coding: utf-8 -*- - -# version.py -# Part of ‘python-daemon’, an implementation of PEP 3143. -# -# Copyright © 2008–2015 Ben Finney -# -# This is free software: you may copy, modify, and/or distribute this work -# under the terms of the GNU General Public License as published by the -# Free Software Foundation; version 3 of that license or any later version. -# No warranty expressed or implied. See the file ‘LICENSE.GPL-3’ for details. - -""" Version information unified for human- and machine-readable formats. - - The project ‘ChangeLog’ file is a reStructuredText document, with - each section describing a version of the project. The document is - intended to be readable as-is by end users. - - This module handles transformation from the ‘ChangeLog’ to a - mapping of version information, serialised as JSON. It also - provides functionality for Distutils to use this information. - - Requires: - - * Docutils - * JSON - - """ - -from __future__ import (absolute_import, unicode_literals) - -import sys -import os -import io -import errno -import json -import datetime -import textwrap -import re -import functools -import collections -import distutils -import distutils.errors -import distutils.cmd -try: - # Python 2 has both ‘str’ (bytes) and ‘unicode’ (text). - basestring = basestring - unicode = unicode -except NameError: - # Python 3 names the Unicode data type ‘str’. - basestring = str - unicode = str - -import setuptools -import setuptools.command.egg_info - - -def ensure_class_bases_begin_with(namespace, class_name, base_class): - """ Ensure the named class's bases start with the base class. - - :param namespace: The namespace containing the class name. - :param class_name: The name of the class to alter. - :param base_class: The type to be the first base class for the - newly created type. - :return: ``None``. - - This function is a hack to circumvent a circular dependency: - using classes from a module which is not installed at the time - this module is imported. - - Call this function after ensuring `base_class` is available, - before using the class named by `class_name`. - - """ - existing_class = namespace[class_name] - assert isinstance(existing_class, type) - - bases = list(existing_class.__bases__) - if base_class is bases[0]: - # Already bound to a type with the right bases. - return - bases.insert(0, base_class) - - new_class_namespace = existing_class.__dict__.copy() - # Type creation will assign the correct ‘__dict__’ attribute. - del new_class_namespace['__dict__'] - - metaclass = existing_class.__metaclass__ - new_class = metaclass(class_name, tuple(bases), new_class_namespace) - - namespace[class_name] = new_class - - -class VersionInfoWriter(object): - """ Docutils writer to produce a version info JSON data stream. """ - - # This class needs its base class to be a class from `docutils`. - # But that would create a circular dependency: Setuptools cannot - # ensure `docutils` is available before importing this module. - # - # Use `ensure_class_bases_begin_with` after importing `docutils`, to - # re-bind the `VersionInfoWriter` name to a new type that inherits - # from `docutils.writers.Writer`. - - __metaclass__ = type - - supported = ['version_info'] - """ Formats this writer supports. """ - - def __init__(self): - super(VersionInfoWriter, self).__init__() - self.translator_class = VersionInfoTranslator - - def translate(self): - visitor = self.translator_class(self.document) - self.document.walkabout(visitor) - self.output = visitor.astext() - - -rfc822_person_regex = re.compile( - "^(?P[^<]+) <(?P[^>]+)>$") - -class ChangeLogEntry: - """ An individual entry from the ‘ChangeLog’ document. """ - - __metaclass__ = type - - field_names = [ - 'release_date', - 'version', - 'maintainer', - 'body', - ] - - date_format = "%Y-%m-%d" - default_version = "UNKNOWN" - default_release_date = "UNKNOWN" - - def __init__( - self, - release_date=default_release_date, version=default_version, - maintainer=None, body=None): - self.validate_release_date(release_date) - self.release_date = release_date - - self.version = version - - self.validate_maintainer(maintainer) - self.maintainer = maintainer - self.body = body - - @classmethod - def validate_release_date(cls, value): - """ Validate the `release_date` value. - - :param value: The prospective `release_date` value. - :return: ``None`` if the value is valid. - :raises ValueError: If the value is invalid. - - """ - if value in ["UNKNOWN", "FUTURE"]: - # A valid non-date value. - return None - - # Raises `ValueError` if parse fails. - datetime.datetime.strptime(value, ChangeLogEntry.date_format) - - @classmethod - def validate_maintainer(cls, value): - """ Validate the `maintainer` value. - - :param value: The prospective `maintainer` value. - :return: ``None`` if the value is valid. - :raises ValueError: If the value is invalid. - - """ - valid = False - - if value is None: - valid = True - elif rfc822_person_regex.search(value): - valid = True - - if not valid: - raise ValueError("Not a valid person specification {value!r}") - else: - return None - - @classmethod - def make_ordered_dict(cls, fields): - """ Make an ordered dict of the fields. """ - result = collections.OrderedDict( - (name, fields[name]) - for name in cls.field_names) - return result - - def as_version_info_entry(self): - """ Format the changelog entry as a version info entry. """ - fields = vars(self) - entry = self.make_ordered_dict(fields) - - return entry - - -class InvalidFormatError(ValueError): - """ Raised when the document is not a valid ‘ChangeLog’ document. """ - - -class VersionInfoTranslator(object): - """ Translator from document nodes to a version info stream. """ - - # This class needs its base class to be a class from `docutils`. - # But that would create a circular dependency: Setuptools cannot - # ensure `docutils` is available before importing this module. - # - # Use `ensure_class_bases_begin_with` after importing `docutils`, - # to re-bind the `VersionInfoTranslator` name to a new type that - # inherits from `docutils.nodes.SparseNodeVisitor`. - - __metaclass__ = type - - wrap_width = 78 - bullet_text = "* " - - attr_convert_funcs_by_attr_name = { - 'released': ('release_date', unicode), - 'version': ('version', unicode), - 'maintainer': ('maintainer', unicode), - } - - def __init__(self, document): - super(VersionInfoTranslator, self).__init__(document) - self.settings = document.settings - self.current_section_level = 0 - self.current_field_name = None - self.content = [] - self.indent_width = 0 - self.initial_indent = "" - self.subsequent_indent = "" - self.current_entry = None - - # Docutils is not available when this class is defined. - # Get the `docutils` module dynamically. - self._docutils = sys.modules['docutils'] - - def astext(self): - """ Return the translated document as text. """ - text = json.dumps(self.content, indent=4) - return text - - def append_to_current_entry(self, text): - if self.current_entry is not None: - if self.current_entry.body is not None: - self.current_entry.body += text - - def visit_Text(self, node): - raw_text = node.astext() - text = textwrap.fill( - raw_text, - width=self.wrap_width, - initial_indent=self.initial_indent, - subsequent_indent=self.subsequent_indent) - self.append_to_current_entry(text) - - def depart_Text(self, node): - pass - - def visit_comment(self, node): - raise self._docutils.nodes.SkipNode - - def visit_field_body(self, node): - field_list_node = node.parent.parent - if not isinstance(field_list_node, self._docutils.nodes.field_list): - raise InvalidFormatError( - "Unexpected field within {node!r}".format( - node=field_list_node)) - (attr_name, convert_func) = self.attr_convert_funcs_by_attr_name[ - self.current_field_name] - attr_value = convert_func(node.astext()) - setattr(self.current_entry, attr_name, attr_value) - - def depart_field_body(self, node): - pass - - def visit_field_list(self, node): - pass - - def depart_field_list(self, node): - self.current_field_name = None - self.current_entry.body = "" - - def visit_field_name(self, node): - field_name = node.astext() - if self.current_section_level == 1: - # At a top-level section. - if field_name.lower() not in ["released", "maintainer"]: - raise InvalidFormatError( - "Unexpected field name {name!r}".format(name=field_name)) - self.current_field_name = field_name.lower() - - def depart_field_name(self, node): - pass - - def visit_bullet_list(self, node): - self.current_context = [] - - def depart_bullet_list(self, node): - self.current_entry.changes = self.current_context - self.current_context = None - - def adjust_indent_width(self, delta): - self.indent_width += delta - self.subsequent_indent = " " * self.indent_width - self.initial_indent = self.subsequent_indent - - def visit_list_item(self, node): - indent_delta = +len(self.bullet_text) - self.adjust_indent_width(indent_delta) - self.initial_indent = self.subsequent_indent[:-indent_delta] - self.append_to_current_entry(self.initial_indent + self.bullet_text) - - def depart_list_item(self, node): - indent_delta = +len(self.bullet_text) - self.adjust_indent_width(-indent_delta) - self.append_to_current_entry("\n") - - def visit_section(self, node): - self.current_section_level += 1 - if self.current_section_level == 1: - # At a top-level section. - self.current_entry = ChangeLogEntry() - else: - raise InvalidFormatError( - "Subsections not implemented for this writer") - - def depart_section(self, node): - self.current_section_level -= 1 - self.content.append( - self.current_entry.as_version_info_entry()) - self.current_entry = None - - _expected_title_word_length = len("Version FOO".split(" ")) - - def depart_title(self, node): - title_text = node.astext() - # At a top-level section. - words = title_text.split(" ") - version = None - if len(words) != self._expected_title_word_length: - raise InvalidFormatError( - "Unexpected title text {text!r}".format(text=title_text)) - if words[0].lower() not in ["version"]: - raise InvalidFormatError( - "Unexpected title text {text!r}".format(text=title_text)) - version = words[-1] - self.current_entry.version = version - - -def changelog_to_version_info_collection(infile): - """ Render the ‘ChangeLog’ document to a version info collection. - - :param infile: A file-like object containing the changelog. - :return: The serialised JSON data of the version info collection. - - """ - - # Docutils is not available when Setuptools needs this module, so - # delay the imports to this function instead. - import docutils.core - import docutils.nodes - import docutils.writers - - ensure_class_bases_begin_with( - globals(), str('VersionInfoWriter'), docutils.writers.Writer) - ensure_class_bases_begin_with( - globals(), str('VersionInfoTranslator'), - docutils.nodes.SparseNodeVisitor) - - writer = VersionInfoWriter() - settings_overrides = { - 'doctitle_xform': False, - } - version_info_json = docutils.core.publish_string( - infile.read(), writer=writer, - settings_overrides=settings_overrides) - - return version_info_json - - -try: - lru_cache = functools.lru_cache -except AttributeError: - # Python < 3.2 does not have the `functools.lru_cache` function. - # Not essential, so replace it with a no-op. - lru_cache = lambda maxsize=None, typed=False: lambda func: func - - -@lru_cache(maxsize=128) -def generate_version_info_from_changelog(infile_path): - """ Get the version info for the latest version in the changelog. - - :param infile_path: Filesystem path to the input changelog file. - :return: The generated version info mapping; or ``None`` if the - file cannot be read. - - The document is explicitly opened as UTF-8 encoded text. - - """ - version_info = collections.OrderedDict() - - versions_all_json = None - try: - with io.open(infile_path, 'rt', encoding="utf-8") as infile: - versions_all_json = changelog_to_version_info_collection(infile) - except EnvironmentError: - # If we can't read the input file, leave the collection empty. - pass - - if versions_all_json is not None: - versions_all = json.loads(versions_all_json.decode('utf-8')) - version_info = get_latest_version(versions_all) - - return version_info - - -def get_latest_version(versions): - """ Get the latest version from a collection of changelog entries. - - :param versions: A collection of mappings for changelog entries. - :return: An ordered mapping of fields for the latest version, - if `versions` is non-empty; otherwise, an empty mapping. - - """ - version_info = collections.OrderedDict() - - versions_by_release_date = { - item['release_date']: item - for item in versions} - if versions_by_release_date: - latest_release_date = max(versions_by_release_date.keys()) - version_info = ChangeLogEntry.make_ordered_dict( - versions_by_release_date[latest_release_date]) - - return version_info - - -def serialise_version_info_from_mapping(version_info): - """ Generate the version info serialised data. - - :param version_info: Mapping of version info items. - :return: The version info serialised to JSON. - - """ - content = json.dumps(version_info, indent=4) - - return content - - -changelog_filename = "ChangeLog" - -def get_changelog_path(distribution, filename=changelog_filename): - """ Get the changelog file path for the distribution. - - :param distribution: The distutils.dist.Distribution instance. - :param filename: The base filename of the changelog document. - :return: Filesystem path of the changelog document, or ``None`` - if not discoverable. - - """ - setup_dirname = os.path.dirname(distribution.script_name) - filepath = os.path.join(setup_dirname, filename) - - return filepath - - -def has_changelog(command): - """ Return ``True`` iff the distribution's changelog file exists. """ - result = False - - changelog_path = get_changelog_path(command.distribution) - if changelog_path is not None: - if os.path.exists(changelog_path): - result = True - - return result - - -class EggInfoCommand(setuptools.command.egg_info.egg_info, object): - """ Custom ‘egg_info’ command for this distribution. """ - - sub_commands = ([ - ('write_version_info', has_changelog), - ] + setuptools.command.egg_info.egg_info.sub_commands) - - def run(self): - """ Execute this command. """ - super(EggInfoCommand, self).run() - - for command_name in self.get_sub_commands(): - self.run_command(command_name) - - -version_info_filename = "version_info.json" - -class WriteVersionInfoCommand(EggInfoCommand, object): - """ Setuptools command to serialise version info metadata. """ - - user_options = ([ - ("changelog-path=", None, - "Filesystem path to the changelog document."), - ("outfile-path=", None, - "Filesystem path to the version info file."), - ] + EggInfoCommand.user_options) - - def initialize_options(self): - """ Initialise command options to defaults. """ - super(WriteVersionInfoCommand, self).initialize_options() - self.changelog_path = None - self.outfile_path = None - - def finalize_options(self): - """ Finalise command options before execution. """ - self.set_undefined_options( - 'build', - ('force', 'force')) - - super(WriteVersionInfoCommand, self).finalize_options() - - if self.changelog_path is None: - self.changelog_path = get_changelog_path(self.distribution) - - if self.outfile_path is None: - egg_dir = self.egg_info - self.outfile_path = os.path.join(egg_dir, version_info_filename) - - def run(self): - """ Execute this command. """ - version_info = generate_version_info_from_changelog(self.changelog_path) - content = serialise_version_info_from_mapping(version_info) - self.write_file("version info", self.outfile_path, content) - - -# Local variables: -# coding: utf-8 -# mode: python -# End: -# vim: fileencoding=utf-8 filetype=python : -- cgit 1.2.3-korg