diff options
-rw-r--r-- | .gitignore | 6 | ||||
-rwxr-xr-x | CONTRIBUTORS | 10 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/CMakeLists.txt (renamed from yaml-cpp/CMakeLists.txt) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/include/yaml-cpp/aliasmanager.h (renamed from yaml-cpp/include/yaml-cpp/aliasmanager.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/include/yaml-cpp/anchor.h (renamed from yaml-cpp/include/yaml-cpp/anchor.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/include/yaml-cpp/binary.h (renamed from yaml-cpp/include/yaml-cpp/binary.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/include/yaml-cpp/contrib/anchordict.h (renamed from yaml-cpp/include/yaml-cpp/contrib/anchordict.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/include/yaml-cpp/contrib/graphbuilder.h (renamed from yaml-cpp/include/yaml-cpp/contrib/graphbuilder.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/include/yaml-cpp/conversion.h (renamed from yaml-cpp/include/yaml-cpp/conversion.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/include/yaml-cpp/dll.h (renamed from yaml-cpp/include/yaml-cpp/dll.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/include/yaml-cpp/emitfromevents.h (renamed from yaml-cpp/include/yaml-cpp/emitfromevents.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/include/yaml-cpp/emitter.h (renamed from yaml-cpp/include/yaml-cpp/emitter.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/include/yaml-cpp/emittermanip.h (renamed from yaml-cpp/include/yaml-cpp/emittermanip.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/include/yaml-cpp/eventhandler.h (renamed from yaml-cpp/include/yaml-cpp/eventhandler.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/include/yaml-cpp/exceptions.h (renamed from yaml-cpp/include/yaml-cpp/exceptions.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/include/yaml-cpp/iterator.h (renamed from yaml-cpp/include/yaml-cpp/iterator.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/include/yaml-cpp/ltnode.h (renamed from yaml-cpp/include/yaml-cpp/ltnode.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/include/yaml-cpp/mark.h (renamed from yaml-cpp/include/yaml-cpp/mark.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/include/yaml-cpp/node.h (renamed from yaml-cpp/include/yaml-cpp/node.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/include/yaml-cpp/nodeimpl.h (renamed from yaml-cpp/include/yaml-cpp/nodeimpl.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/include/yaml-cpp/nodereadimpl.h (renamed from yaml-cpp/include/yaml-cpp/nodereadimpl.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/include/yaml-cpp/nodeutil.h (renamed from yaml-cpp/include/yaml-cpp/nodeutil.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/include/yaml-cpp/noncopyable.h (renamed from yaml-cpp/include/yaml-cpp/noncopyable.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/include/yaml-cpp/null.h (renamed from yaml-cpp/include/yaml-cpp/null.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/include/yaml-cpp/ostream.h (renamed from yaml-cpp/include/yaml-cpp/ostream.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/include/yaml-cpp/parser.h (renamed from yaml-cpp/include/yaml-cpp/parser.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/include/yaml-cpp/stlemitter.h (renamed from yaml-cpp/include/yaml-cpp/stlemitter.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/include/yaml-cpp/stlnode.h (renamed from yaml-cpp/include/yaml-cpp/stlnode.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/include/yaml-cpp/traits.h (renamed from yaml-cpp/include/yaml-cpp/traits.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/include/yaml-cpp/yaml.h (renamed from yaml-cpp/include/yaml-cpp/yaml.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/install.txt (renamed from yaml-cpp/install.txt) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/license.txt (renamed from yaml-cpp/license.txt) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/aliasmanager.cpp (renamed from yaml-cpp/src/aliasmanager.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/binary.cpp (renamed from yaml-cpp/src/binary.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/collectionstack.h (renamed from yaml-cpp/src/collectionstack.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/contrib/graphbuilder.cpp (renamed from yaml-cpp/src/contrib/graphbuilder.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/contrib/graphbuilderadapter.cpp (renamed from yaml-cpp/src/contrib/graphbuilderadapter.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/contrib/graphbuilderadapter.h (renamed from yaml-cpp/src/contrib/graphbuilderadapter.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/conversion.cpp (renamed from yaml-cpp/src/conversion.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/directives.cpp (renamed from yaml-cpp/src/directives.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/directives.h (renamed from yaml-cpp/src/directives.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/emitfromevents.cpp (renamed from yaml-cpp/src/emitfromevents.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/emitter.cpp (renamed from yaml-cpp/src/emitter.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/emitterstate.cpp (renamed from yaml-cpp/src/emitterstate.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/emitterstate.h (renamed from yaml-cpp/src/emitterstate.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/emitterutils.cpp (renamed from yaml-cpp/src/emitterutils.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/emitterutils.h (renamed from yaml-cpp/src/emitterutils.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/exp.cpp (renamed from yaml-cpp/src/exp.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/exp.h (renamed from yaml-cpp/src/exp.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/indentation.h (renamed from yaml-cpp/src/indentation.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/iterator.cpp (renamed from yaml-cpp/src/iterator.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/iterpriv.h (renamed from yaml-cpp/src/iterpriv.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/node.cpp (renamed from yaml-cpp/src/node.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/nodebuilder.cpp (renamed from yaml-cpp/src/nodebuilder.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/nodebuilder.h (renamed from yaml-cpp/src/nodebuilder.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/nodeownership.cpp (renamed from yaml-cpp/src/nodeownership.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/nodeownership.h (renamed from yaml-cpp/src/nodeownership.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/null.cpp (renamed from yaml-cpp/src/null.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/ostream.cpp (renamed from yaml-cpp/src/ostream.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/parser.cpp (renamed from yaml-cpp/src/parser.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/ptr_stack.h (renamed from yaml-cpp/src/ptr_stack.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/ptr_vector.h (renamed from yaml-cpp/src/ptr_vector.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/regex.cpp (renamed from yaml-cpp/src/regex.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/regex.h (renamed from yaml-cpp/src/regex.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/regeximpl.h (renamed from yaml-cpp/src/regeximpl.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/scanner.cpp (renamed from yaml-cpp/src/scanner.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/scanner.h (renamed from yaml-cpp/src/scanner.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/scanscalar.cpp (renamed from yaml-cpp/src/scanscalar.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/scanscalar.h (renamed from yaml-cpp/src/scanscalar.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/scantag.cpp (renamed from yaml-cpp/src/scantag.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/scantag.h (renamed from yaml-cpp/src/scantag.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/scantoken.cpp (renamed from yaml-cpp/src/scantoken.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/setting.h (renamed from yaml-cpp/src/setting.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/simplekey.cpp (renamed from yaml-cpp/src/simplekey.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/singledocparser.cpp (renamed from yaml-cpp/src/singledocparser.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/singledocparser.h (renamed from yaml-cpp/src/singledocparser.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/stream.cpp (renamed from yaml-cpp/src/stream.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/stream.h (renamed from yaml-cpp/src/stream.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/streamcharsource.h (renamed from yaml-cpp/src/streamcharsource.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/stringsource.h (renamed from yaml-cpp/src/stringsource.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/tag.cpp (renamed from yaml-cpp/src/tag.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/tag.h (renamed from yaml-cpp/src/tag.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/src/token.h (renamed from yaml-cpp/src/token.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/test/CMakeLists.txt (renamed from yaml-cpp/test/CMakeLists.txt) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/test/emittertests.cpp (renamed from yaml-cpp/test/emittertests.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/test/emittertests.h (renamed from yaml-cpp/test/emittertests.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/test/main.cpp (renamed from yaml-cpp/test/main.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/test/nodetests.h (renamed from yaml-cpp/test/nodetests.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/test/old-api/parsertests.cpp (renamed from yaml-cpp/test/old-api/parsertests.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/test/old-api/spectests.cpp (renamed from yaml-cpp/test/old-api/spectests.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/test/parsertests.h (renamed from yaml-cpp/test/parsertests.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/test/specexamples.h (renamed from yaml-cpp/test/specexamples.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/test/spectests.cpp (renamed from yaml-cpp/test/spectests.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/test/spectests.h (renamed from yaml-cpp/test/spectests.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/test/tests.cpp (renamed from yaml-cpp/test/tests.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/test/tests.h (renamed from yaml-cpp/test/tests.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/util/CMakeLists.txt (renamed from yaml-cpp/util/CMakeLists.txt) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/util/api.cpp (renamed from yaml-cpp/util/api.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/util/parse.cpp (renamed from yaml-cpp/util/parse.cpp) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/yaml-cpp/yaml-cpp.pc.cmake (renamed from yaml-cpp/yaml-cpp.pc.cmake) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/zmq/include/zmq.h (renamed from src/zmq/include/zmq.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/zmq/include/zmq_utils.h (renamed from src/zmq/include/zmq_utils.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/zmq/libzmq.a (renamed from src/zmq/libzmq.a) | bin | 7932556 -> 7932556 bytes | |||
-rw-r--r--[-rwxr-xr-x] | external_libs/zmq/libzmq.la (renamed from src/zmq/libzmq.la) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/zmq/libzmq.lai (renamed from src/zmq/libzmq.lai) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | external_libs/zmq/libzmq.so (renamed from src/zmq/libzmq.so) | bin | 3150071 -> 3150071 bytes | |||
-rw-r--r--[-rwxr-xr-x] | external_libs/zmq/libzmq.so.3 (renamed from src/zmq/libzmq.so.3) | bin | 3150071 -> 3150071 bytes | |||
-rw-r--r--[-rwxr-xr-x] | external_libs/zmq/libzmq.so.3.1.0 (renamed from src/zmq/libzmq.so.3.1.0) | bin | 3150071 -> 3150071 bytes | |||
-rwxr-xr-x | linux/ws_main.py | 8 | ||||
-rwxr-xr-x | linux_dpdk/ws_main.py | 15 | ||||
-rwxr-xr-x | scripts/automation/trex_control_plane/client/outer_packages.py | 29 | ||||
-rwxr-xr-x | scripts/automation/trex_control_plane/client/trex_client.py | 12 | ||||
-rwxr-xr-x | scripts/automation/trex_control_plane/client_utils/general_utils.py | 27 | ||||
-rw-r--r-- | scripts/automation/trex_control_plane/client_utils/jsonrpc_client.py (renamed from src/console/trex_rpc_client.py) | 366 | ||||
-rw-r--r-- | scripts/automation/trex_control_plane/client_utils/outer_packages.py | 29 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/automation/trex_control_plane/console/trex_console.py (renamed from src/console/trex_console.py) | 6 | ||||
-rw-r--r-- | scripts/automation/trex_control_plane/console/trex_root_path.py | 15 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/automation/trex_control_plane/console/trex_status.py (renamed from src/console/trex_status.py) | 0 | ||||
-rwxr-xr-x | scripts/automation/trex_control_plane/examples/client_interactive_example.py | 9 | ||||
-rwxr-xr-x | scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/PKG-INFO | 10 | ||||
-rwxr-xr-x | scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/README.txt | 203 | ||||
-rwxr-xr-x | scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/jsonrpclib/SimpleJSONRPCServer.py | 229 | ||||
-rwxr-xr-x | scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/jsonrpclib/__init__.py | 6 | ||||
-rwxr-xr-x | scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/jsonrpclib/config.py | 38 | ||||
-rwxr-xr-x | scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/jsonrpclib/history.py | 40 | ||||
-rwxr-xr-x | scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/jsonrpclib/jsonclass.py | 145 | ||||
-rwxr-xr-x | scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/jsonrpclib/jsonrpc.py | 556 | ||||
-rwxr-xr-x | scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/setup.py | 28 | ||||
-rwxr-xr-x | scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/LICENSE.txt | 11 | ||||
-rwxr-xr-x | scripts/automation/trex_control_plane/python_lib/rednose-0.4.1/rednose.py | 387 | ||||
-rwxr-xr-x | scripts/automation/trex_control_plane/python_lib/rednose-0.4.1/setup.py | 29 | ||||
-rwxr-xr-x | scripts/automation/trex_control_plane/python_lib/zmq_fedora.tar.gz | bin | 1752871 -> 0 bytes | |||
-rwxr-xr-x | scripts/automation/trex_control_plane/server/extended_daemon_runner.py | 30 | ||||
-rwxr-xr-x | scripts/automation/trex_control_plane/server/outer_packages.py | 64 | ||||
-rwxr-xr-x | scripts/automation/trex_control_plane/server/zmq_monitor_thread.py | 20 | ||||
-rw-r--r-- | scripts/external_libs/PyYAML-3.01/LICENSE | 19 | ||||
-rw-r--r-- | scripts/external_libs/PyYAML-3.01/PKG-INFO | 28 | ||||
-rw-r--r-- | scripts/external_libs/PyYAML-3.01/README | 18 | ||||
-rw-r--r-- | scripts/external_libs/PyYAML-3.01/lib/yaml/__init__.py | 284 | ||||
-rw-r--r-- | scripts/external_libs/PyYAML-3.01/lib/yaml/composer.py | 123 | ||||
-rw-r--r-- | scripts/external_libs/PyYAML-3.01/lib/yaml/constructor.py | 638 | ||||
-rw-r--r-- | scripts/external_libs/PyYAML-3.01/lib/yaml/dumper.py | 62 | ||||
-rw-r--r-- | scripts/external_libs/PyYAML-3.01/lib/yaml/emitter.py | 1162 | ||||
-rw-r--r-- | scripts/external_libs/PyYAML-3.01/lib/yaml/error.py | 75 | ||||
-rw-r--r-- | scripts/external_libs/PyYAML-3.01/lib/yaml/events.py | 86 | ||||
-rw-r--r-- | scripts/external_libs/PyYAML-3.01/lib/yaml/loader.py | 40 | ||||
-rw-r--r-- | scripts/external_libs/PyYAML-3.01/lib/yaml/nodes.py | 49 | ||||
-rw-r--r-- | scripts/external_libs/PyYAML-3.01/lib/yaml/parser.py | 484 | ||||
-rw-r--r-- | scripts/external_libs/PyYAML-3.01/lib/yaml/reader.py | 222 | ||||
-rw-r--r-- | scripts/external_libs/PyYAML-3.01/lib/yaml/representer.py | 501 | ||||
-rw-r--r-- | scripts/external_libs/PyYAML-3.01/lib/yaml/resolver.py | 205 | ||||
-rw-r--r-- | scripts/external_libs/PyYAML-3.01/lib/yaml/scanner.py | 1458 | ||||
-rw-r--r-- | scripts/external_libs/PyYAML-3.01/lib/yaml/serializer.py | 121 | ||||
-rw-r--r-- | scripts/external_libs/PyYAML-3.01/lib/yaml/tokens.py | 104 | ||||
-rw-r--r-- | scripts/external_libs/PyYAML-3.01/setup.py | 52 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/__init__.py (renamed from scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib_pelix.egg-info/dependency_links.txt) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/enum34-1.0.4/PKG-INFO (renamed from scripts/automation/trex_control_plane/python_lib/enum34-1.0.4/PKG-INFO) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/enum34-1.0.4/enum/LICENSE (renamed from scripts/automation/trex_control_plane/python_lib/enum34-1.0.4/enum/LICENSE) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/enum34-1.0.4/enum/README (renamed from scripts/automation/trex_control_plane/python_lib/enum34-1.0.4/enum/README) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/enum34-1.0.4/enum/__init__.py (renamed from scripts/automation/trex_control_plane/python_lib/enum34-1.0.4/enum/__init__.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/enum34-1.0.4/enum/doc/enum.rst (renamed from scripts/automation/trex_control_plane/python_lib/enum34-1.0.4/enum/doc/enum.rst) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/enum34-1.0.4/enum/enum.py (renamed from scripts/automation/trex_control_plane/python_lib/enum34-1.0.4/enum/enum.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/enum34-1.0.4/enum/test_enum.py (renamed from scripts/automation/trex_control_plane/python_lib/enum34-1.0.4/enum/test_enum.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/enum34-1.0.4/setup.py (renamed from scripts/automation/trex_control_plane/python_lib/enum34-1.0.4/setup.py) | 88 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/jsonrpclib-pelix-0.2.5/LICENSE.txt (renamed from scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/LICENSE.txt) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/jsonrpclib-pelix-0.2.5/MANIFEST.in (renamed from scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/MANIFEST.in) | 4 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/jsonrpclib-pelix-0.2.5/PKG-INFO (renamed from scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/PKG-INFO) | 920 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/jsonrpclib-pelix-0.2.5/README.rst (renamed from scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/README.rst) | 876 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib/SimpleJSONRPCServer.py (renamed from scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib/SimpleJSONRPCServer.py) | 1204 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib/__init__.py (renamed from scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib/__init__.py) | 68 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib/config.py (renamed from scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib/config.py) | 282 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib/history.py (renamed from scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib/history.py) | 190 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib/jsonclass.py (renamed from scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib/jsonclass.py) | 590 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib/jsonrpc.py (renamed from scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib/jsonrpc.py) | 2384 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib/threadpool.py (renamed from scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib/threadpool.py) | 980 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib/utils.py (renamed from scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib/utils.py) | 244 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib_pelix.egg-info/PKG-INFO (renamed from scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib_pelix.egg-info/PKG-INFO) | 920 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib_pelix.egg-info/SOURCES.txt (renamed from scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib_pelix.egg-info/SOURCES.txt) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib_pelix.egg-info/dependency_links.txt (renamed from scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile.egg-info/dependency_links.txt) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib_pelix.egg-info/top_level.txt (renamed from scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib_pelix.egg-info/top_level.txt) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/jsonrpclib-pelix-0.2.5/setup.cfg (renamed from scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/setup.cfg) | 16 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/jsonrpclib-pelix-0.2.5/setup.py (renamed from scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/setup.py) | 148 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/lockfile-0.10.2/ACKS (renamed from scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/ACKS) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/lockfile-0.10.2/AUTHORS (renamed from scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/AUTHORS) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/lockfile-0.10.2/ChangeLog (renamed from scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/ChangeLog) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/lockfile-0.10.2/LICENSE (renamed from scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/LICENSE) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/lockfile-0.10.2/PKG-INFO (renamed from scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/PKG-INFO) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/lockfile-0.10.2/README (renamed from scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/README) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/lockfile-0.10.2/RELEASE-NOTES (renamed from scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/RELEASE-NOTES) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/lockfile-0.10.2/doc/source/Makefile (renamed from scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/doc/source/Makefile) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/lockfile-0.10.2/doc/source/conf.py (renamed from scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/doc/source/conf.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/lockfile-0.10.2/doc/source/index.rst (renamed from scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/doc/source/index.rst) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/lockfile-0.10.2/lockfile.egg-info/PKG-INFO (renamed from scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile.egg-info/PKG-INFO) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/lockfile-0.10.2/lockfile.egg-info/SOURCES.txt (renamed from scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile.egg-info/SOURCES.txt) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/lockfile-0.10.2/lockfile.egg-info/dependency_links.txt (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/dependency_links.txt) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/lockfile-0.10.2/lockfile.egg-info/not-zip-safe (renamed from scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile.egg-info/not-zip-safe) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/lockfile-0.10.2/lockfile.egg-info/top_level.txt (renamed from scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile.egg-info/top_level.txt) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/lockfile-0.10.2/lockfile/__init__.py (renamed from scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile/__init__.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/lockfile-0.10.2/lockfile/linklockfile.py (renamed from scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile/linklockfile.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/lockfile-0.10.2/lockfile/mkdirlockfile.py (renamed from scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile/mkdirlockfile.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/lockfile-0.10.2/lockfile/pidlockfile.py (renamed from scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile/pidlockfile.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/lockfile-0.10.2/lockfile/sqlitelockfile.py (renamed from scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile/sqlitelockfile.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/lockfile-0.10.2/lockfile/symlinklockfile.py (renamed from scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile/symlinklockfile.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/lockfile-0.10.2/setup.cfg (renamed from scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/setup.cfg) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/lockfile-0.10.2/setup.py (renamed from scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/setup.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/lockfile-0.10.2/test-requirements.txt (renamed from scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/test-requirements.txt) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/lockfile-0.10.2/test/compliancetest.py (renamed from scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/test/compliancetest.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/lockfile-0.10.2/test/test_lockfile.py (renamed from scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/test/test_lockfile.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/lockfile-0.10.2/tox.ini (renamed from scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/tox.ini) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/ChangeLog (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/ChangeLog) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/LICENSE.ASF-2 (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/LICENSE.ASF-2) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/LICENSE.GPL-3 (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/LICENSE.GPL-3) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/MANIFEST.in (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/MANIFEST.in) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/PKG-INFO (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/PKG-INFO) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/daemon/__init__.py (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/__init__.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/daemon/_metadata.py (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/_metadata.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/daemon/daemon.py (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/daemon.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/daemon/pidfile.py (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/pidfile.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/daemon/runner.py (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/runner.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/doc/CREDITS (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/CREDITS) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/doc/FAQ (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/FAQ) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/doc/TODO (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/TODO) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/doc/hacking.txt (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/hacking.txt) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/python_daemon.egg-info/PKG-INFO (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/PKG-INFO) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/python_daemon.egg-info/SOURCES.txt (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/SOURCES.txt) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/python_daemon.egg-info/dependency_links.txt (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/not-zip-safe) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/python_daemon.egg-info/not-zip-safe (renamed from scripts/automation/trex_control_plane/python_lib/__init__.py) | 2 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/python_daemon.egg-info/requires.txt (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/requires.txt) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/python_daemon.egg-info/top_level.txt (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/top_level.txt) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/python_daemon.egg-info/version_info.json (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/version_info.json) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/setup.cfg (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/setup.cfg) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/setup.py (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/setup.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/test/__init__.py (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/__init__.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/test/scaffold.py (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/scaffold.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/test/test_daemon.py (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_daemon.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/test/test_metadata.py (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_metadata.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/test/test_pidfile.py (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_pidfile.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/test/test_runner.py (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_runner.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/test_version.py (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test_version.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/python-daemon-2.0.5/version.py (renamed from scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/version.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/termstyle/MANIFEST.in (renamed from scripts/automation/trex_control_plane/python_lib/termstyle/MANIFEST.in) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/termstyle/Makefile (renamed from scripts/automation/trex_control_plane/python_lib/termstyle/Makefile) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/termstyle/README.rst (renamed from scripts/automation/trex_control_plane/python_lib/termstyle/README.rst) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/termstyle/VERSION (renamed from scripts/automation/trex_control_plane/python_lib/termstyle/VERSION) | 0 | ||||
-rw-r--r-- | scripts/external_libs/termstyle/__init__.py (renamed from src/console/zmq/eventloop/minitornado/__init__.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/termstyle/python-termstyle.xml (renamed from scripts/automation/trex_control_plane/python_lib/termstyle/python-termstyle.xml) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/termstyle/setup.py (renamed from scripts/automation/trex_control_plane/python_lib/termstyle/setup.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/termstyle/termstyle.py (renamed from scripts/automation/trex_control_plane/python_lib/termstyle/termstyle.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/termstyle/test2.py (renamed from scripts/automation/trex_control_plane/python_lib/termstyle/test2.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/termstyle/test3.py (renamed from scripts/automation/trex_control_plane/python_lib/termstyle/test3.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/termstyle/test_all.sh (renamed from scripts/automation/trex_control_plane/python_lib/termstyle/test_all.sh) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/__init__.py (renamed from src/console/zmq/__init__.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/auth/__init__.py (renamed from src/console/zmq/auth/__init__.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/auth/base.py (renamed from src/console/zmq/auth/base.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/auth/certs.py (renamed from src/console/zmq/auth/certs.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/auth/ioloop.py (renamed from src/console/zmq/auth/ioloop.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/auth/thread.py (renamed from src/console/zmq/auth/thread.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/backend/__init__.py (renamed from src/console/zmq/backend/__init__.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/backend/cffi/__init__.py (renamed from src/console/zmq/backend/cffi/__init__.py) | 0 | ||||
-rw-r--r-- | scripts/external_libs/zmq/backend/cffi/_cdefs.h (renamed from src/console/zmq/backend/cffi/_cdefs.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/backend/cffi/_cffi.py (renamed from src/console/zmq/backend/cffi/_cffi.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/backend/cffi/_poll.py (renamed from src/console/zmq/backend/cffi/_poll.py) | 0 | ||||
-rw-r--r-- | scripts/external_libs/zmq/backend/cffi/_verify.c (renamed from src/console/zmq/backend/cffi/_verify.c) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/backend/cffi/constants.py (renamed from src/console/zmq/backend/cffi/constants.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/backend/cffi/context.py (renamed from src/console/zmq/backend/cffi/context.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/backend/cffi/devices.py (renamed from src/console/zmq/backend/cffi/devices.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/backend/cffi/error.py (renamed from src/console/zmq/backend/cffi/error.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/backend/cffi/message.py (renamed from src/console/zmq/backend/cffi/message.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/backend/cffi/socket.py (renamed from src/console/zmq/backend/cffi/socket.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/backend/cffi/utils.py (renamed from src/console/zmq/backend/cffi/utils.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/backend/cython/__init__.py (renamed from src/console/zmq/backend/cython/__init__.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/backend/cython/_device.py (renamed from src/console/zmq/backend/cython/_device.py) | 0 | ||||
-rw-r--r-- | scripts/external_libs/zmq/backend/cython/_device.so | bin | 0 -> 116272 bytes | |||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/backend/cython/_poll.py (renamed from src/console/zmq/backend/cython/_poll.py) | 0 | ||||
-rw-r--r-- | scripts/external_libs/zmq/backend/cython/_poll.so | bin | 0 -> 164229 bytes | |||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/backend/cython/_version.py (renamed from src/console/zmq/backend/cython/_version.py) | 0 | ||||
-rw-r--r-- | scripts/external_libs/zmq/backend/cython/_version.so | bin | 0 -> 45734 bytes | |||
-rw-r--r-- | scripts/external_libs/zmq/backend/cython/checkrc.pxd (renamed from src/console/zmq/backend/cython/checkrc.pxd) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/backend/cython/constants.py (renamed from src/console/zmq/backend/cython/constants.py) | 0 | ||||
-rw-r--r-- | scripts/external_libs/zmq/backend/cython/constants.so | bin | 0 -> 189238 bytes | |||
-rw-r--r-- | scripts/external_libs/zmq/backend/cython/context.pxd (renamed from src/console/zmq/backend/cython/context.pxd) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/backend/cython/context.py (renamed from src/console/zmq/backend/cython/context.py) | 0 | ||||
-rw-r--r-- | scripts/external_libs/zmq/backend/cython/context.so | bin | 0 -> 172716 bytes | |||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/backend/cython/error.py (renamed from src/console/zmq/backend/cython/error.py) | 0 | ||||
-rw-r--r-- | scripts/external_libs/zmq/backend/cython/error.so | bin | 0 -> 63000 bytes | |||
-rw-r--r-- | scripts/external_libs/zmq/backend/cython/libzmq.pxd (renamed from src/console/zmq/backend/cython/libzmq.pxd) | 0 | ||||
-rw-r--r-- | scripts/external_libs/zmq/backend/cython/message.pxd (renamed from src/console/zmq/backend/cython/message.pxd) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/backend/cython/message.py (renamed from src/console/zmq/backend/cython/message.py) | 0 | ||||
-rw-r--r-- | scripts/external_libs/zmq/backend/cython/message.so | bin | 0 -> 256078 bytes | |||
-rw-r--r-- | scripts/external_libs/zmq/backend/cython/socket.pxd (renamed from src/console/zmq/backend/cython/socket.pxd) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/backend/cython/socket.py (renamed from src/console/zmq/backend/cython/socket.py) | 0 | ||||
-rw-r--r-- | scripts/external_libs/zmq/backend/cython/socket.so | bin | 0 -> 472585 bytes | |||
-rw-r--r-- | scripts/external_libs/zmq/backend/cython/utils.pxd (renamed from src/console/zmq/backend/cython/utils.pxd) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/backend/cython/utils.py (renamed from src/console/zmq/backend/cython/utils.py) | 0 | ||||
-rw-r--r-- | scripts/external_libs/zmq/backend/cython/utils.so | bin | 0 -> 106936 bytes | |||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/backend/select.py (renamed from src/console/zmq/backend/select.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/devices/__init__.py (renamed from src/console/zmq/devices/__init__.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/devices/basedevice.py (renamed from src/console/zmq/devices/basedevice.py) | 0 | ||||
-rw-r--r-- | scripts/external_libs/zmq/devices/monitoredqueue.pxd (renamed from src/console/zmq/devices/monitoredqueue.pxd) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/devices/monitoredqueue.py (renamed from src/console/zmq/devices/monitoredqueue.py) | 0 | ||||
-rw-r--r-- | scripts/external_libs/zmq/devices/monitoredqueue.so | bin | 0 -> 157950 bytes | |||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/devices/monitoredqueuedevice.py (renamed from src/console/zmq/devices/monitoredqueuedevice.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/devices/proxydevice.py (renamed from src/console/zmq/devices/proxydevice.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/error.py (renamed from src/console/zmq/error.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/eventloop/__init__.py (renamed from src/console/zmq/eventloop/__init__.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/eventloop/ioloop.py (renamed from src/console/zmq/eventloop/ioloop.py) | 0 | ||||
-rw-r--r-- | scripts/external_libs/zmq/eventloop/minitornado/__init__.py (renamed from src/console/zmq/eventloop/minitornado/platform/__init__.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/eventloop/minitornado/concurrent.py (renamed from src/console/zmq/eventloop/minitornado/concurrent.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/eventloop/minitornado/ioloop.py (renamed from src/console/zmq/eventloop/minitornado/ioloop.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/eventloop/minitornado/log.py (renamed from src/console/zmq/eventloop/minitornado/log.py) | 0 | ||||
-rw-r--r-- | scripts/external_libs/zmq/eventloop/minitornado/platform/__init__.py (renamed from src/console/zmq/log/__init__.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/eventloop/minitornado/platform/auto.py (renamed from src/console/zmq/eventloop/minitornado/platform/auto.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/eventloop/minitornado/platform/common.py (renamed from src/console/zmq/eventloop/minitornado/platform/common.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/eventloop/minitornado/platform/interface.py (renamed from src/console/zmq/eventloop/minitornado/platform/interface.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/eventloop/minitornado/platform/posix.py (renamed from src/console/zmq/eventloop/minitornado/platform/posix.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/eventloop/minitornado/platform/windows.py (renamed from src/console/zmq/eventloop/minitornado/platform/windows.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/eventloop/minitornado/stack_context.py (renamed from src/console/zmq/eventloop/minitornado/stack_context.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/eventloop/minitornado/util.py (renamed from src/console/zmq/eventloop/minitornado/util.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/eventloop/zmqstream.py (renamed from src/console/zmq/eventloop/zmqstream.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/green/__init__.py (renamed from src/console/zmq/green/__init__.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/green/core.py (renamed from src/console/zmq/green/core.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/green/device.py (renamed from src/console/zmq/green/device.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/green/eventloop/__init__.py (renamed from src/console/zmq/green/eventloop/__init__.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/green/eventloop/ioloop.py (renamed from src/console/zmq/green/eventloop/ioloop.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/green/eventloop/zmqstream.py (renamed from src/console/zmq/green/eventloop/zmqstream.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/green/poll.py (renamed from src/console/zmq/green/poll.py) | 0 | ||||
-rw-r--r-- | scripts/external_libs/zmq/libzmq.so | bin | 0 -> 3150071 bytes | |||
-rw-r--r-- | scripts/external_libs/zmq/log/__init__.py (renamed from src/console/zmq/utils/__init__.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/log/handlers.py (renamed from src/console/zmq/log/handlers.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/ssh/__init__.py (renamed from src/console/zmq/ssh/__init__.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/ssh/forward.py (renamed from src/console/zmq/ssh/forward.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/ssh/tunnel.py (renamed from src/console/zmq/ssh/tunnel.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/sugar/__init__.py (renamed from src/console/zmq/sugar/__init__.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/sugar/attrsettr.py (renamed from src/console/zmq/sugar/attrsettr.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/sugar/constants.py (renamed from src/console/zmq/sugar/constants.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/sugar/context.py (renamed from src/console/zmq/sugar/context.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/sugar/frame.py (renamed from src/console/zmq/sugar/frame.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/sugar/poll.py (renamed from src/console/zmq/sugar/poll.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/sugar/socket.py (renamed from src/console/zmq/sugar/socket.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/sugar/tracker.py (renamed from src/console/zmq/sugar/tracker.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/sugar/version.py (renamed from src/console/zmq/sugar/version.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/tests/__init__.py (renamed from src/console/zmq/tests/__init__.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/tests/test_auth.py (renamed from src/console/zmq/tests/test_auth.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/tests/test_cffi_backend.py (renamed from src/console/zmq/tests/test_cffi_backend.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/tests/test_constants.py (renamed from src/console/zmq/tests/test_constants.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/tests/test_context.py (renamed from src/console/zmq/tests/test_context.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/tests/test_device.py (renamed from src/console/zmq/tests/test_device.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/tests/test_error.py (renamed from src/console/zmq/tests/test_error.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/tests/test_etc.py (renamed from src/console/zmq/tests/test_etc.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/tests/test_imports.py (renamed from src/console/zmq/tests/test_imports.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/tests/test_ioloop.py (renamed from src/console/zmq/tests/test_ioloop.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/tests/test_log.py (renamed from src/console/zmq/tests/test_log.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/tests/test_message.py (renamed from src/console/zmq/tests/test_message.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/tests/test_monitor.py (renamed from src/console/zmq/tests/test_monitor.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/tests/test_monqueue.py (renamed from src/console/zmq/tests/test_monqueue.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/tests/test_multipart.py (renamed from src/console/zmq/tests/test_multipart.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/tests/test_pair.py (renamed from src/console/zmq/tests/test_pair.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/tests/test_poll.py (renamed from src/console/zmq/tests/test_poll.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/tests/test_pubsub.py (renamed from src/console/zmq/tests/test_pubsub.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/tests/test_reqrep.py (renamed from src/console/zmq/tests/test_reqrep.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/tests/test_security.py (renamed from src/console/zmq/tests/test_security.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/tests/test_socket.py (renamed from src/console/zmq/tests/test_socket.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/tests/test_stopwatch.py (renamed from src/console/zmq/tests/test_stopwatch.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/tests/test_version.py (renamed from src/console/zmq/tests/test_version.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/tests/test_win32_shim.py (renamed from src/console/zmq/tests/test_win32_shim.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/tests/test_z85.py (renamed from src/console/zmq/tests/test_z85.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/tests/test_zmqstream.py (renamed from src/console/zmq/tests/test_zmqstream.py) | 0 | ||||
-rw-r--r-- | scripts/external_libs/zmq/utils/__init__.py | 0 | ||||
-rw-r--r-- | scripts/external_libs/zmq/utils/buffers.pxd (renamed from src/console/zmq/utils/buffers.pxd) | 0 | ||||
-rw-r--r-- | scripts/external_libs/zmq/utils/compiler.json (renamed from src/console/zmq/utils/compiler.json) | 0 | ||||
-rw-r--r-- | scripts/external_libs/zmq/utils/config.json (renamed from src/console/zmq/utils/config.json) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/utils/constant_names.py (renamed from src/console/zmq/utils/constant_names.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/utils/garbage.py (renamed from src/console/zmq/utils/garbage.py) | 0 | ||||
-rw-r--r-- | scripts/external_libs/zmq/utils/getpid_compat.h (renamed from src/console/zmq/utils/getpid_compat.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/utils/interop.py (renamed from src/console/zmq/utils/interop.py) | 0 | ||||
-rw-r--r-- | scripts/external_libs/zmq/utils/ipcmaxlen.h (renamed from src/console/zmq/utils/ipcmaxlen.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/utils/jsonapi.py (renamed from src/console/zmq/utils/jsonapi.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/utils/monitor.py (renamed from src/console/zmq/utils/monitor.py) | 0 | ||||
-rw-r--r-- | scripts/external_libs/zmq/utils/pyversion_compat.h (renamed from src/console/zmq/utils/pyversion_compat.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/utils/sixcerpt.py (renamed from src/console/zmq/utils/sixcerpt.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/utils/strtypes.py (renamed from src/console/zmq/utils/strtypes.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/utils/win32.py (renamed from src/console/zmq/utils/win32.py) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/external_libs/zmq/utils/z85.py (renamed from src/console/zmq/utils/z85.py) | 0 | ||||
-rw-r--r-- | scripts/external_libs/zmq/utils/zmq_compat.h (renamed from src/console/zmq/utils/zmq_compat.h) | 0 | ||||
-rw-r--r-- | scripts/external_libs/zmq/utils/zmq_constants.h (renamed from src/console/zmq/utils/zmq_constants.h) | 0 | ||||
-rw-r--r--[-rwxr-xr-x] | scripts/libzmq.so.3 | bin | 3150071 -> 3150071 bytes | |||
-rw-r--r--[-rwxr-xr-x] | scripts/libzmq.so.3.1.0 | bin | 3150071 -> 3150071 bytes | |||
-rwxr-xr-x | scripts/trex-console | 2 |
388 files changed, 10534 insertions, 6443 deletions
@@ -45,8 +45,10 @@ scripts/mock-* ehthumbs.db Thumbs.db -# slickedit files # -################### + +# IDE/ Editors files # +###################### +.idea/ *.vpj *.vpw *.vtg diff --git a/CONTRIBUTORS b/CONTRIBUTORS index 06929302..57d19820 100755 --- a/CONTRIBUTORS +++ b/CONTRIBUTORS @@ -1,5 +1,5 @@ -Hanoh haim -Dave Johnson -Wenxian Li -Dan Klein - +Hanoh haim +Dave Johnson +Wenxian Li +Dan Klein +Itay Marom diff --git a/yaml-cpp/CMakeLists.txt b/external_libs/yaml-cpp/CMakeLists.txt index 823ce201..823ce201 100755..100644 --- a/yaml-cpp/CMakeLists.txt +++ b/external_libs/yaml-cpp/CMakeLists.txt diff --git a/yaml-cpp/include/yaml-cpp/aliasmanager.h b/external_libs/yaml-cpp/include/yaml-cpp/aliasmanager.h index e90c93dd..e90c93dd 100755..100644 --- a/yaml-cpp/include/yaml-cpp/aliasmanager.h +++ b/external_libs/yaml-cpp/include/yaml-cpp/aliasmanager.h diff --git a/yaml-cpp/include/yaml-cpp/anchor.h b/external_libs/yaml-cpp/include/yaml-cpp/anchor.h index 433f2fa5..433f2fa5 100755..100644 --- a/yaml-cpp/include/yaml-cpp/anchor.h +++ b/external_libs/yaml-cpp/include/yaml-cpp/anchor.h diff --git a/yaml-cpp/include/yaml-cpp/binary.h b/external_libs/yaml-cpp/include/yaml-cpp/binary.h index 8504ebeb..8504ebeb 100755..100644 --- a/yaml-cpp/include/yaml-cpp/binary.h +++ b/external_libs/yaml-cpp/include/yaml-cpp/binary.h diff --git a/yaml-cpp/include/yaml-cpp/contrib/anchordict.h b/external_libs/yaml-cpp/include/yaml-cpp/contrib/anchordict.h index e483dc4b..e483dc4b 100755..100644 --- a/yaml-cpp/include/yaml-cpp/contrib/anchordict.h +++ b/external_libs/yaml-cpp/include/yaml-cpp/contrib/anchordict.h diff --git a/yaml-cpp/include/yaml-cpp/contrib/graphbuilder.h b/external_libs/yaml-cpp/include/yaml-cpp/contrib/graphbuilder.h index 6739a12b..6739a12b 100755..100644 --- a/yaml-cpp/include/yaml-cpp/contrib/graphbuilder.h +++ b/external_libs/yaml-cpp/include/yaml-cpp/contrib/graphbuilder.h diff --git a/yaml-cpp/include/yaml-cpp/conversion.h b/external_libs/yaml-cpp/include/yaml-cpp/conversion.h index 1b557b56..1b557b56 100755..100644 --- a/yaml-cpp/include/yaml-cpp/conversion.h +++ b/external_libs/yaml-cpp/include/yaml-cpp/conversion.h diff --git a/yaml-cpp/include/yaml-cpp/dll.h b/external_libs/yaml-cpp/include/yaml-cpp/dll.h index ea138401..ea138401 100755..100644 --- a/yaml-cpp/include/yaml-cpp/dll.h +++ b/external_libs/yaml-cpp/include/yaml-cpp/dll.h diff --git a/yaml-cpp/include/yaml-cpp/emitfromevents.h b/external_libs/yaml-cpp/include/yaml-cpp/emitfromevents.h index e11ae640..e11ae640 100755..100644 --- a/yaml-cpp/include/yaml-cpp/emitfromevents.h +++ b/external_libs/yaml-cpp/include/yaml-cpp/emitfromevents.h diff --git a/yaml-cpp/include/yaml-cpp/emitter.h b/external_libs/yaml-cpp/include/yaml-cpp/emitter.h index 1d7edf2f..1d7edf2f 100755..100644 --- a/yaml-cpp/include/yaml-cpp/emitter.h +++ b/external_libs/yaml-cpp/include/yaml-cpp/emitter.h diff --git a/yaml-cpp/include/yaml-cpp/emittermanip.h b/external_libs/yaml-cpp/include/yaml-cpp/emittermanip.h index a8ec64a4..a8ec64a4 100755..100644 --- a/yaml-cpp/include/yaml-cpp/emittermanip.h +++ b/external_libs/yaml-cpp/include/yaml-cpp/emittermanip.h diff --git a/yaml-cpp/include/yaml-cpp/eventhandler.h b/external_libs/yaml-cpp/include/yaml-cpp/eventhandler.h index 3173a1fb..3173a1fb 100755..100644 --- a/yaml-cpp/include/yaml-cpp/eventhandler.h +++ b/external_libs/yaml-cpp/include/yaml-cpp/eventhandler.h diff --git a/yaml-cpp/include/yaml-cpp/exceptions.h b/external_libs/yaml-cpp/include/yaml-cpp/exceptions.h index 394d5868..394d5868 100755..100644 --- a/yaml-cpp/include/yaml-cpp/exceptions.h +++ b/external_libs/yaml-cpp/include/yaml-cpp/exceptions.h diff --git a/yaml-cpp/include/yaml-cpp/iterator.h b/external_libs/yaml-cpp/include/yaml-cpp/iterator.h index 400ee340..400ee340 100755..100644 --- a/yaml-cpp/include/yaml-cpp/iterator.h +++ b/external_libs/yaml-cpp/include/yaml-cpp/iterator.h diff --git a/yaml-cpp/include/yaml-cpp/ltnode.h b/external_libs/yaml-cpp/include/yaml-cpp/ltnode.h index 30b4f950..30b4f950 100755..100644 --- a/yaml-cpp/include/yaml-cpp/ltnode.h +++ b/external_libs/yaml-cpp/include/yaml-cpp/ltnode.h diff --git a/yaml-cpp/include/yaml-cpp/mark.h b/external_libs/yaml-cpp/include/yaml-cpp/mark.h index 7c80fbcb..7c80fbcb 100755..100644 --- a/yaml-cpp/include/yaml-cpp/mark.h +++ b/external_libs/yaml-cpp/include/yaml-cpp/mark.h diff --git a/yaml-cpp/include/yaml-cpp/node.h b/external_libs/yaml-cpp/include/yaml-cpp/node.h index e78190e0..e78190e0 100755..100644 --- a/yaml-cpp/include/yaml-cpp/node.h +++ b/external_libs/yaml-cpp/include/yaml-cpp/node.h diff --git a/yaml-cpp/include/yaml-cpp/nodeimpl.h b/external_libs/yaml-cpp/include/yaml-cpp/nodeimpl.h index 5ca7ddba..5ca7ddba 100755..100644 --- a/yaml-cpp/include/yaml-cpp/nodeimpl.h +++ b/external_libs/yaml-cpp/include/yaml-cpp/nodeimpl.h diff --git a/yaml-cpp/include/yaml-cpp/nodereadimpl.h b/external_libs/yaml-cpp/include/yaml-cpp/nodereadimpl.h index 6838dc5a..6838dc5a 100755..100644 --- a/yaml-cpp/include/yaml-cpp/nodereadimpl.h +++ b/external_libs/yaml-cpp/include/yaml-cpp/nodereadimpl.h diff --git a/yaml-cpp/include/yaml-cpp/nodeutil.h b/external_libs/yaml-cpp/include/yaml-cpp/nodeutil.h index d0c01d27..d0c01d27 100755..100644 --- a/yaml-cpp/include/yaml-cpp/nodeutil.h +++ b/external_libs/yaml-cpp/include/yaml-cpp/nodeutil.h diff --git a/yaml-cpp/include/yaml-cpp/noncopyable.h b/external_libs/yaml-cpp/include/yaml-cpp/noncopyable.h index 8e61e433..8e61e433 100755..100644 --- a/yaml-cpp/include/yaml-cpp/noncopyable.h +++ b/external_libs/yaml-cpp/include/yaml-cpp/noncopyable.h diff --git a/yaml-cpp/include/yaml-cpp/null.h b/external_libs/yaml-cpp/include/yaml-cpp/null.h index 711f18c3..711f18c3 100755..100644 --- a/yaml-cpp/include/yaml-cpp/null.h +++ b/external_libs/yaml-cpp/include/yaml-cpp/null.h diff --git a/yaml-cpp/include/yaml-cpp/ostream.h b/external_libs/yaml-cpp/include/yaml-cpp/ostream.h index 65839b1b..65839b1b 100755..100644 --- a/yaml-cpp/include/yaml-cpp/ostream.h +++ b/external_libs/yaml-cpp/include/yaml-cpp/ostream.h diff --git a/yaml-cpp/include/yaml-cpp/parser.h b/external_libs/yaml-cpp/include/yaml-cpp/parser.h index f71cdff4..f71cdff4 100755..100644 --- a/yaml-cpp/include/yaml-cpp/parser.h +++ b/external_libs/yaml-cpp/include/yaml-cpp/parser.h diff --git a/yaml-cpp/include/yaml-cpp/stlemitter.h b/external_libs/yaml-cpp/include/yaml-cpp/stlemitter.h index f8ff20ea..f8ff20ea 100755..100644 --- a/yaml-cpp/include/yaml-cpp/stlemitter.h +++ b/external_libs/yaml-cpp/include/yaml-cpp/stlemitter.h diff --git a/yaml-cpp/include/yaml-cpp/stlnode.h b/external_libs/yaml-cpp/include/yaml-cpp/stlnode.h index 40d4ae79..40d4ae79 100755..100644 --- a/yaml-cpp/include/yaml-cpp/stlnode.h +++ b/external_libs/yaml-cpp/include/yaml-cpp/stlnode.h diff --git a/yaml-cpp/include/yaml-cpp/traits.h b/external_libs/yaml-cpp/include/yaml-cpp/traits.h index 09eead44..09eead44 100755..100644 --- a/yaml-cpp/include/yaml-cpp/traits.h +++ b/external_libs/yaml-cpp/include/yaml-cpp/traits.h diff --git a/yaml-cpp/include/yaml-cpp/yaml.h b/external_libs/yaml-cpp/include/yaml-cpp/yaml.h index 29595553..29595553 100755..100644 --- a/yaml-cpp/include/yaml-cpp/yaml.h +++ b/external_libs/yaml-cpp/include/yaml-cpp/yaml.h diff --git a/yaml-cpp/install.txt b/external_libs/yaml-cpp/install.txt index 93923624..93923624 100755..100644 --- a/yaml-cpp/install.txt +++ b/external_libs/yaml-cpp/install.txt diff --git a/yaml-cpp/license.txt b/external_libs/yaml-cpp/license.txt index 5bd9e1a1..5bd9e1a1 100755..100644 --- a/yaml-cpp/license.txt +++ b/external_libs/yaml-cpp/license.txt diff --git a/yaml-cpp/src/aliasmanager.cpp b/external_libs/yaml-cpp/src/aliasmanager.cpp index ed4d3b5a..ed4d3b5a 100755..100644 --- a/yaml-cpp/src/aliasmanager.cpp +++ b/external_libs/yaml-cpp/src/aliasmanager.cpp diff --git a/yaml-cpp/src/binary.cpp b/external_libs/yaml-cpp/src/binary.cpp index 589eb089..589eb089 100755..100644 --- a/yaml-cpp/src/binary.cpp +++ b/external_libs/yaml-cpp/src/binary.cpp diff --git a/yaml-cpp/src/collectionstack.h b/external_libs/yaml-cpp/src/collectionstack.h index 4a986bc9..4a986bc9 100755..100644 --- a/yaml-cpp/src/collectionstack.h +++ b/external_libs/yaml-cpp/src/collectionstack.h diff --git a/yaml-cpp/src/contrib/graphbuilder.cpp b/external_libs/yaml-cpp/src/contrib/graphbuilder.cpp index ab5159cc..ab5159cc 100755..100644 --- a/yaml-cpp/src/contrib/graphbuilder.cpp +++ b/external_libs/yaml-cpp/src/contrib/graphbuilder.cpp diff --git a/yaml-cpp/src/contrib/graphbuilderadapter.cpp b/external_libs/yaml-cpp/src/contrib/graphbuilderadapter.cpp index 557e97c8..557e97c8 100755..100644 --- a/yaml-cpp/src/contrib/graphbuilderadapter.cpp +++ b/external_libs/yaml-cpp/src/contrib/graphbuilderadapter.cpp diff --git a/yaml-cpp/src/contrib/graphbuilderadapter.h b/external_libs/yaml-cpp/src/contrib/graphbuilderadapter.h index 3ef8ab6c..3ef8ab6c 100755..100644 --- a/yaml-cpp/src/contrib/graphbuilderadapter.h +++ b/external_libs/yaml-cpp/src/contrib/graphbuilderadapter.h diff --git a/yaml-cpp/src/conversion.cpp b/external_libs/yaml-cpp/src/conversion.cpp index f81e1a0b..f81e1a0b 100755..100644 --- a/yaml-cpp/src/conversion.cpp +++ b/external_libs/yaml-cpp/src/conversion.cpp diff --git a/yaml-cpp/src/directives.cpp b/external_libs/yaml-cpp/src/directives.cpp index faf1483b..faf1483b 100755..100644 --- a/yaml-cpp/src/directives.cpp +++ b/external_libs/yaml-cpp/src/directives.cpp diff --git a/yaml-cpp/src/directives.h b/external_libs/yaml-cpp/src/directives.h index a3308f72..a3308f72 100755..100644 --- a/yaml-cpp/src/directives.h +++ b/external_libs/yaml-cpp/src/directives.h diff --git a/yaml-cpp/src/emitfromevents.cpp b/external_libs/yaml-cpp/src/emitfromevents.cpp index 49fc10b2..49fc10b2 100755..100644 --- a/yaml-cpp/src/emitfromevents.cpp +++ b/external_libs/yaml-cpp/src/emitfromevents.cpp diff --git a/yaml-cpp/src/emitter.cpp b/external_libs/yaml-cpp/src/emitter.cpp index 91f48da7..91f48da7 100755..100644 --- a/yaml-cpp/src/emitter.cpp +++ b/external_libs/yaml-cpp/src/emitter.cpp diff --git a/yaml-cpp/src/emitterstate.cpp b/external_libs/yaml-cpp/src/emitterstate.cpp index 562e82c9..562e82c9 100755..100644 --- a/yaml-cpp/src/emitterstate.cpp +++ b/external_libs/yaml-cpp/src/emitterstate.cpp diff --git a/yaml-cpp/src/emitterstate.h b/external_libs/yaml-cpp/src/emitterstate.h index 5698e325..5698e325 100755..100644 --- a/yaml-cpp/src/emitterstate.h +++ b/external_libs/yaml-cpp/src/emitterstate.h diff --git a/yaml-cpp/src/emitterutils.cpp b/external_libs/yaml-cpp/src/emitterutils.cpp index 3d184d6c..3d184d6c 100755..100644 --- a/yaml-cpp/src/emitterutils.cpp +++ b/external_libs/yaml-cpp/src/emitterutils.cpp diff --git a/yaml-cpp/src/emitterutils.h b/external_libs/yaml-cpp/src/emitterutils.h index 0e270d69..0e270d69 100755..100644 --- a/yaml-cpp/src/emitterutils.h +++ b/external_libs/yaml-cpp/src/emitterutils.h diff --git a/yaml-cpp/src/exp.cpp b/external_libs/yaml-cpp/src/exp.cpp index 7bc54546..7bc54546 100755..100644 --- a/yaml-cpp/src/exp.cpp +++ b/external_libs/yaml-cpp/src/exp.cpp diff --git a/yaml-cpp/src/exp.h b/external_libs/yaml-cpp/src/exp.h index 3e12aba4..3e12aba4 100755..100644 --- a/yaml-cpp/src/exp.h +++ b/external_libs/yaml-cpp/src/exp.h diff --git a/yaml-cpp/src/indentation.h b/external_libs/yaml-cpp/src/indentation.h index 25f684f8..25f684f8 100755..100644 --- a/yaml-cpp/src/indentation.h +++ b/external_libs/yaml-cpp/src/indentation.h diff --git a/yaml-cpp/src/iterator.cpp b/external_libs/yaml-cpp/src/iterator.cpp index f4159e32..f4159e32 100755..100644 --- a/yaml-cpp/src/iterator.cpp +++ b/external_libs/yaml-cpp/src/iterator.cpp diff --git a/yaml-cpp/src/iterpriv.h b/external_libs/yaml-cpp/src/iterpriv.h index c511e8ac..c511e8ac 100755..100644 --- a/yaml-cpp/src/iterpriv.h +++ b/external_libs/yaml-cpp/src/iterpriv.h diff --git a/yaml-cpp/src/node.cpp b/external_libs/yaml-cpp/src/node.cpp index 360b4ad9..360b4ad9 100755..100644 --- a/yaml-cpp/src/node.cpp +++ b/external_libs/yaml-cpp/src/node.cpp diff --git a/yaml-cpp/src/nodebuilder.cpp b/external_libs/yaml-cpp/src/nodebuilder.cpp index 13a70326..13a70326 100755..100644 --- a/yaml-cpp/src/nodebuilder.cpp +++ b/external_libs/yaml-cpp/src/nodebuilder.cpp diff --git a/yaml-cpp/src/nodebuilder.h b/external_libs/yaml-cpp/src/nodebuilder.h index 9c1d16a0..9c1d16a0 100755..100644 --- a/yaml-cpp/src/nodebuilder.h +++ b/external_libs/yaml-cpp/src/nodebuilder.h diff --git a/yaml-cpp/src/nodeownership.cpp b/external_libs/yaml-cpp/src/nodeownership.cpp index 118edbc8..118edbc8 100755..100644 --- a/yaml-cpp/src/nodeownership.cpp +++ b/external_libs/yaml-cpp/src/nodeownership.cpp diff --git a/yaml-cpp/src/nodeownership.h b/external_libs/yaml-cpp/src/nodeownership.h index 69870814..69870814 100755..100644 --- a/yaml-cpp/src/nodeownership.h +++ b/external_libs/yaml-cpp/src/nodeownership.h diff --git a/yaml-cpp/src/null.cpp b/external_libs/yaml-cpp/src/null.cpp index 08fa9aae..08fa9aae 100755..100644 --- a/yaml-cpp/src/null.cpp +++ b/external_libs/yaml-cpp/src/null.cpp diff --git a/yaml-cpp/src/ostream.cpp b/external_libs/yaml-cpp/src/ostream.cpp index a7f1e14b..a7f1e14b 100755..100644 --- a/yaml-cpp/src/ostream.cpp +++ b/external_libs/yaml-cpp/src/ostream.cpp diff --git a/yaml-cpp/src/parser.cpp b/external_libs/yaml-cpp/src/parser.cpp index b836823f..b836823f 100755..100644 --- a/yaml-cpp/src/parser.cpp +++ b/external_libs/yaml-cpp/src/parser.cpp diff --git a/yaml-cpp/src/ptr_stack.h b/external_libs/yaml-cpp/src/ptr_stack.h index bf454fb3..bf454fb3 100755..100644 --- a/yaml-cpp/src/ptr_stack.h +++ b/external_libs/yaml-cpp/src/ptr_stack.h diff --git a/yaml-cpp/src/ptr_vector.h b/external_libs/yaml-cpp/src/ptr_vector.h index 7b936cb5..7b936cb5 100755..100644 --- a/yaml-cpp/src/ptr_vector.h +++ b/external_libs/yaml-cpp/src/ptr_vector.h diff --git a/yaml-cpp/src/regex.cpp b/external_libs/yaml-cpp/src/regex.cpp index b35b1f43..b35b1f43 100755..100644 --- a/yaml-cpp/src/regex.cpp +++ b/external_libs/yaml-cpp/src/regex.cpp diff --git a/yaml-cpp/src/regex.h b/external_libs/yaml-cpp/src/regex.h index 8722e626..8722e626 100755..100644 --- a/yaml-cpp/src/regex.h +++ b/external_libs/yaml-cpp/src/regex.h diff --git a/yaml-cpp/src/regeximpl.h b/external_libs/yaml-cpp/src/regeximpl.h index d5c20d74..d5c20d74 100755..100644 --- a/yaml-cpp/src/regeximpl.h +++ b/external_libs/yaml-cpp/src/regeximpl.h diff --git a/yaml-cpp/src/scanner.cpp b/external_libs/yaml-cpp/src/scanner.cpp index 199ef25a..199ef25a 100755..100644 --- a/yaml-cpp/src/scanner.cpp +++ b/external_libs/yaml-cpp/src/scanner.cpp diff --git a/yaml-cpp/src/scanner.h b/external_libs/yaml-cpp/src/scanner.h index bc8dcbe5..bc8dcbe5 100755..100644 --- a/yaml-cpp/src/scanner.h +++ b/external_libs/yaml-cpp/src/scanner.h diff --git a/yaml-cpp/src/scanscalar.cpp b/external_libs/yaml-cpp/src/scanscalar.cpp index 064c0867..064c0867 100755..100644 --- a/yaml-cpp/src/scanscalar.cpp +++ b/external_libs/yaml-cpp/src/scanscalar.cpp diff --git a/yaml-cpp/src/scanscalar.h b/external_libs/yaml-cpp/src/scanscalar.h index c198cb18..c198cb18 100755..100644 --- a/yaml-cpp/src/scanscalar.h +++ b/external_libs/yaml-cpp/src/scanscalar.h diff --git a/yaml-cpp/src/scantag.cpp b/external_libs/yaml-cpp/src/scantag.cpp index b71cbcc4..b71cbcc4 100755..100644 --- a/yaml-cpp/src/scantag.cpp +++ b/external_libs/yaml-cpp/src/scantag.cpp diff --git a/yaml-cpp/src/scantag.h b/external_libs/yaml-cpp/src/scantag.h index 38437c03..38437c03 100755..100644 --- a/yaml-cpp/src/scantag.h +++ b/external_libs/yaml-cpp/src/scantag.h diff --git a/yaml-cpp/src/scantoken.cpp b/external_libs/yaml-cpp/src/scantoken.cpp index 06d9cd62..06d9cd62 100755..100644 --- a/yaml-cpp/src/scantoken.cpp +++ b/external_libs/yaml-cpp/src/scantoken.cpp diff --git a/yaml-cpp/src/setting.h b/external_libs/yaml-cpp/src/setting.h index 806ccdae..806ccdae 100755..100644 --- a/yaml-cpp/src/setting.h +++ b/external_libs/yaml-cpp/src/setting.h diff --git a/yaml-cpp/src/simplekey.cpp b/external_libs/yaml-cpp/src/simplekey.cpp index 857a9e0b..857a9e0b 100755..100644 --- a/yaml-cpp/src/simplekey.cpp +++ b/external_libs/yaml-cpp/src/simplekey.cpp diff --git a/yaml-cpp/src/singledocparser.cpp b/external_libs/yaml-cpp/src/singledocparser.cpp index 47759c32..47759c32 100755..100644 --- a/yaml-cpp/src/singledocparser.cpp +++ b/external_libs/yaml-cpp/src/singledocparser.cpp diff --git a/yaml-cpp/src/singledocparser.h b/external_libs/yaml-cpp/src/singledocparser.h index 3798dccf..3798dccf 100755..100644 --- a/yaml-cpp/src/singledocparser.h +++ b/external_libs/yaml-cpp/src/singledocparser.h diff --git a/yaml-cpp/src/stream.cpp b/external_libs/yaml-cpp/src/stream.cpp index 447b67c1..447b67c1 100755..100644 --- a/yaml-cpp/src/stream.cpp +++ b/external_libs/yaml-cpp/src/stream.cpp diff --git a/yaml-cpp/src/stream.h b/external_libs/yaml-cpp/src/stream.h index 87f48dc8..87f48dc8 100755..100644 --- a/yaml-cpp/src/stream.h +++ b/external_libs/yaml-cpp/src/stream.h diff --git a/yaml-cpp/src/streamcharsource.h b/external_libs/yaml-cpp/src/streamcharsource.h index 21fae4e1..21fae4e1 100755..100644 --- a/yaml-cpp/src/streamcharsource.h +++ b/external_libs/yaml-cpp/src/streamcharsource.h diff --git a/yaml-cpp/src/stringsource.h b/external_libs/yaml-cpp/src/stringsource.h index 21be3c9a..21be3c9a 100755..100644 --- a/yaml-cpp/src/stringsource.h +++ b/external_libs/yaml-cpp/src/stringsource.h diff --git a/yaml-cpp/src/tag.cpp b/external_libs/yaml-cpp/src/tag.cpp index 82a47047..82a47047 100755..100644 --- a/yaml-cpp/src/tag.cpp +++ b/external_libs/yaml-cpp/src/tag.cpp diff --git a/yaml-cpp/src/tag.h b/external_libs/yaml-cpp/src/tag.h index 5f77548d..5f77548d 100755..100644 --- a/yaml-cpp/src/tag.h +++ b/external_libs/yaml-cpp/src/tag.h diff --git a/yaml-cpp/src/token.h b/external_libs/yaml-cpp/src/token.h index 9807e258..9807e258 100755..100644 --- a/yaml-cpp/src/token.h +++ b/external_libs/yaml-cpp/src/token.h diff --git a/yaml-cpp/test/CMakeLists.txt b/external_libs/yaml-cpp/test/CMakeLists.txt index 241c19ef..241c19ef 100755..100644 --- a/yaml-cpp/test/CMakeLists.txt +++ b/external_libs/yaml-cpp/test/CMakeLists.txt diff --git a/yaml-cpp/test/emittertests.cpp b/external_libs/yaml-cpp/test/emittertests.cpp index a7fdab67..a7fdab67 100755..100644 --- a/yaml-cpp/test/emittertests.cpp +++ b/external_libs/yaml-cpp/test/emittertests.cpp diff --git a/yaml-cpp/test/emittertests.h b/external_libs/yaml-cpp/test/emittertests.h index e7c6ac50..e7c6ac50 100755..100644 --- a/yaml-cpp/test/emittertests.h +++ b/external_libs/yaml-cpp/test/emittertests.h diff --git a/yaml-cpp/test/main.cpp b/external_libs/yaml-cpp/test/main.cpp index 64c69f10..64c69f10 100755..100644 --- a/yaml-cpp/test/main.cpp +++ b/external_libs/yaml-cpp/test/main.cpp diff --git a/yaml-cpp/test/nodetests.h b/external_libs/yaml-cpp/test/nodetests.h index 733e782e..733e782e 100755..100644 --- a/yaml-cpp/test/nodetests.h +++ b/external_libs/yaml-cpp/test/nodetests.h diff --git a/yaml-cpp/test/old-api/parsertests.cpp b/external_libs/yaml-cpp/test/old-api/parsertests.cpp index de7f1238..de7f1238 100755..100644 --- a/yaml-cpp/test/old-api/parsertests.cpp +++ b/external_libs/yaml-cpp/test/old-api/parsertests.cpp diff --git a/yaml-cpp/test/old-api/spectests.cpp b/external_libs/yaml-cpp/test/old-api/spectests.cpp index fb5505be..fb5505be 100755..100644 --- a/yaml-cpp/test/old-api/spectests.cpp +++ b/external_libs/yaml-cpp/test/old-api/spectests.cpp diff --git a/yaml-cpp/test/parsertests.h b/external_libs/yaml-cpp/test/parsertests.h index f3de1b8c..f3de1b8c 100755..100644 --- a/yaml-cpp/test/parsertests.h +++ b/external_libs/yaml-cpp/test/parsertests.h diff --git a/yaml-cpp/test/specexamples.h b/external_libs/yaml-cpp/test/specexamples.h index 4688bdcf..4688bdcf 100755..100644 --- a/yaml-cpp/test/specexamples.h +++ b/external_libs/yaml-cpp/test/specexamples.h diff --git a/yaml-cpp/test/spectests.cpp b/external_libs/yaml-cpp/test/spectests.cpp index bffc5062..bffc5062 100755..100644 --- a/yaml-cpp/test/spectests.cpp +++ b/external_libs/yaml-cpp/test/spectests.cpp diff --git a/yaml-cpp/test/spectests.h b/external_libs/yaml-cpp/test/spectests.h index 5246df58..5246df58 100755..100644 --- a/yaml-cpp/test/spectests.h +++ b/external_libs/yaml-cpp/test/spectests.h diff --git a/yaml-cpp/test/tests.cpp b/external_libs/yaml-cpp/test/tests.cpp index 2dff6eeb..2dff6eeb 100755..100644 --- a/yaml-cpp/test/tests.cpp +++ b/external_libs/yaml-cpp/test/tests.cpp diff --git a/yaml-cpp/test/tests.h b/external_libs/yaml-cpp/test/tests.h index 757dbc53..757dbc53 100755..100644 --- a/yaml-cpp/test/tests.h +++ b/external_libs/yaml-cpp/test/tests.h diff --git a/yaml-cpp/util/CMakeLists.txt b/external_libs/yaml-cpp/util/CMakeLists.txt index 22339f02..22339f02 100755..100644 --- a/yaml-cpp/util/CMakeLists.txt +++ b/external_libs/yaml-cpp/util/CMakeLists.txt diff --git a/yaml-cpp/util/api.cpp b/external_libs/yaml-cpp/util/api.cpp index e5180a8a..e5180a8a 100755..100644 --- a/yaml-cpp/util/api.cpp +++ b/external_libs/yaml-cpp/util/api.cpp diff --git a/yaml-cpp/util/parse.cpp b/external_libs/yaml-cpp/util/parse.cpp index d02a76a7..d02a76a7 100755..100644 --- a/yaml-cpp/util/parse.cpp +++ b/external_libs/yaml-cpp/util/parse.cpp diff --git a/yaml-cpp/yaml-cpp.pc.cmake b/external_libs/yaml-cpp/yaml-cpp.pc.cmake index 04d343f6..04d343f6 100755..100644 --- a/yaml-cpp/yaml-cpp.pc.cmake +++ b/external_libs/yaml-cpp/yaml-cpp.pc.cmake diff --git a/src/zmq/include/zmq.h b/external_libs/zmq/include/zmq.h index f7b10db6..f7b10db6 100755..100644 --- a/src/zmq/include/zmq.h +++ b/external_libs/zmq/include/zmq.h diff --git a/src/zmq/include/zmq_utils.h b/external_libs/zmq/include/zmq_utils.h index 9b14aa72..9b14aa72 100755..100644 --- a/src/zmq/include/zmq_utils.h +++ b/external_libs/zmq/include/zmq_utils.h diff --git a/src/zmq/libzmq.a b/external_libs/zmq/libzmq.a Binary files differindex 8c994993..8c994993 100755..100644 --- a/src/zmq/libzmq.a +++ b/external_libs/zmq/libzmq.a diff --git a/src/zmq/libzmq.la b/external_libs/zmq/libzmq.la index 2e5f984d..2e5f984d 100755..100644 --- a/src/zmq/libzmq.la +++ b/external_libs/zmq/libzmq.la diff --git a/src/zmq/libzmq.lai b/external_libs/zmq/libzmq.lai index 126d3d5e..126d3d5e 100755..100644 --- a/src/zmq/libzmq.lai +++ b/external_libs/zmq/libzmq.lai diff --git a/src/zmq/libzmq.so b/external_libs/zmq/libzmq.so Binary files differindex 16980c27..16980c27 100755..100644 --- a/src/zmq/libzmq.so +++ b/external_libs/zmq/libzmq.so diff --git a/src/zmq/libzmq.so.3 b/external_libs/zmq/libzmq.so.3 Binary files differindex 16980c27..16980c27 100755..100644 --- a/src/zmq/libzmq.so.3 +++ b/external_libs/zmq/libzmq.so.3 diff --git a/src/zmq/libzmq.so.3.1.0 b/external_libs/zmq/libzmq.so.3.1.0 Binary files differindex 16980c27..16980c27 100755..100644 --- a/src/zmq/libzmq.so.3.1.0 +++ b/external_libs/zmq/libzmq.so.3.1.0 diff --git a/linux/ws_main.py b/linux/ws_main.py index 659d4921..e2364be4 100755 --- a/linux/ws_main.py +++ b/linux/ws_main.py @@ -172,7 +172,7 @@ rpc_server_mock = SrcGroups([cmn_src, json_src ]) -yaml_src = SrcGroup(dir='yaml-cpp/src/', +yaml_src = SrcGroup(dir='external_libs/yaml-cpp/src/', src_list=[ 'aliasmanager.cpp', 'binary.cpp', @@ -223,11 +223,11 @@ cxxflags_base =['-DWIN_UCODE_SIM', includes_path =''' ../src/pal/linux/ - ../src/zmq/include/ ../src/ ../src/rpc-server/ ../external_libs/json/ - ../yaml-cpp/include/ + ../external_libs/zmq/include/ + ../external_libs/yaml-cpp/include/ '''; @@ -359,7 +359,7 @@ build_types = [ def build_prog (bld, build_obj): - zmq_lib_path='src/zmq/' + zmq_lib_path='external_libs/zmq/' bld.read_shlib( name='zmq' , paths=[top + zmq_lib_path] ) bld.program(features='cxx cxxprogram', diff --git a/linux_dpdk/ws_main.py b/linux_dpdk/ws_main.py index aa270e69..24ffe18e 100755 --- a/linux_dpdk/ws_main.py +++ b/linux_dpdk/ws_main.py @@ -156,7 +156,7 @@ json_src = SrcGroup(dir='external_libs/json', 'jsoncpp.cpp' ]) -yaml_src = SrcGroup(dir='yaml-cpp/src/', +yaml_src = SrcGroup(dir='external_libs/yaml-cpp/src/', src_list=[ 'aliasmanager.cpp', 'binary.cpp', @@ -400,10 +400,13 @@ common_flags_old = common_flags + [ includes_path =''' ../src/pal/linux_dpdk/ ../src/ - ../external_libs/json/ + ../src/rpc-server/ - ../yaml-cpp/include/ - ../src/zmq/include/ + + ../external_libs/yaml-cpp/include/ + ../external_libs/zmq/include/ + ../external_libs/json/ + ../src/dpdk_lib18/librte_eal/linuxapp/eal/include/ ../src/dpdk_lib18/librte_eal/common/include/ ../src/dpdk_lib18/librte_eal/common/ @@ -592,7 +595,7 @@ build_types = [ def build_prog (bld, build_obj): - zmq_lib_path='src/zmq/' + zmq_lib_path='external_libs/zmq/' bld.read_shlib( name='zmq' , paths=[top+zmq_lib_path] ) #rte_libs =[ @@ -749,7 +752,7 @@ files_list=[ 'trex_daemon_server' ]; -files_dir=['cap2','avl','cfg','ko','automation','python-lib'] +files_dir=['cap2','avl','cfg','ko','automation', 'external_libs', 'python-lib'] class Env(object): diff --git a/scripts/automation/trex_control_plane/client/outer_packages.py b/scripts/automation/trex_control_plane/client/outer_packages.py index a7c34e48..5facad20 100755 --- a/scripts/automation/trex_control_plane/client/outer_packages.py +++ b/scripts/automation/trex_control_plane/client/outer_packages.py @@ -1,29 +1,30 @@ #!/router/bin/python
-import sys,site
-import platform,os
+import sys
+import site
+import os
-CURRENT_PATH = os.path.dirname(os.path.realpath(__file__))
+CURRENT_PATH = os.path.dirname(os.path.realpath(__file__))
ROOT_PATH = os.path.abspath(os.path.join(CURRENT_PATH, os.pardir)) # path to trex_control_plane directory
-PATH_TO_PYTHON_LIB = os.path.abspath(os.path.join(ROOT_PATH, 'python_lib'))
-
+PATH_TO_PYTHON_LIB = os.path.abspath(os.path.join(ROOT_PATH, os.pardir, os.pardir, 'external_libs'))
CLIENT_MODULES = ['enum34-1.0.4',
- # 'jsonrpclib-0.1.3',
- 'jsonrpclib-pelix-0.2.5',
- 'termstyle',
- 'rpc_exceptions-0.1'
- ]
+ 'jsonrpclib-pelix-0.2.5',
+ 'termstyle',
+ 'rpc_exceptions-0.1'
+ ]
+
-def import_client_modules ():
+def import_client_modules():
sys.path.append(ROOT_PATH)
import_module_list(CLIENT_MODULES)
-def import_module_list (modules_list):
+
+def import_module_list(modules_list):
assert(isinstance(modules_list, list))
for p in modules_list:
- full_path = os.path.join(PATH_TO_PYTHON_LIB, p)
- fix_path = os.path.normcase(full_path) #CURRENT_PATH+p)
+ full_path = os.path.join(PATH_TO_PYTHON_LIB, p)
+ fix_path = os.path.normcase(full_path) # (CURRENT_PATH+p)
site.addsitedir(full_path)
import_client_modules()
diff --git a/scripts/automation/trex_control_plane/client/trex_client.py b/scripts/automation/trex_control_plane/client/trex_client.py index 1f297538..0fbb4719 100755 --- a/scripts/automation/trex_control_plane/client/trex_client.py +++ b/scripts/automation/trex_control_plane/client/trex_client.py @@ -35,7 +35,7 @@ class CTRexClient(object): def __init__(self, trex_host, max_history_size = 100, trex_daemon_port = 8090, trex_zmq_port = 4500, verbose = False): """ - Instatiate a T-Rex client object, and connecting it to listening deamon-server + Instantiate a T-Rex client object, and connecting it to listening daemon-server :parameters: trex_host : str @@ -45,15 +45,15 @@ class CTRexClient(object): default value : **100** trex_daemon_port : int - the port number on which the trex-deamon server can be reached + the port number on which the trex-daemon server can be reached default value: **8090** trex_zmq_port : int - the port number on which trex's zmq module will interact with deamon server + the port number on which trex's zmq module will interact with daemon server default value: **4500** verbose : bool - sets a verbose output on suported class method. + sets a verbose output on supported class method. default value : **False** @@ -153,7 +153,7 @@ class CTRexClient(object): """ Request to stop a T-Rex run on server. - The request is only valid if the stop intitiator is the same client as the T-Rex run intitiator. + The request is only valid if the stop initiator is the same client as the T-Rex run initiator. :parameters: None @@ -223,7 +223,7 @@ class CTRexClient(object): """ Block the client application until T-Rex changes state from 'Starting' to either 'Idle' or 'Running' - The request is only valid if the stop intitiator is the same client as the T-Rex run intitiator. + The request is only valid if the stop initiator is the same client as the T-Rex run initiator. :parameters: timeout : int diff --git a/scripts/automation/trex_control_plane/client_utils/general_utils.py b/scripts/automation/trex_control_plane/client_utils/general_utils.py index 5544eabc..b5912628 100755 --- a/scripts/automation/trex_control_plane/client_utils/general_utils.py +++ b/scripts/automation/trex_control_plane/client_utils/general_utils.py @@ -1,6 +1,9 @@ #!/router/bin/python -import sys,site +import sys +import site +import string +import random import os try: @@ -50,7 +53,27 @@ def find_path_to_pardir (pardir, base_path = os.getcwd() ): """ components = base_path.split(os.sep) return str.join(os.sep, components[:components.index(pardir)+1]) - + + +def random_id_gen(length=8): + """ + A generator for creating a random chars id of specific length + + :parameters: + length : int + the desired length of the generated id + + default: 8 + + :return: + a random id with each next() request. + """ + id_chars = string.ascii_lowercase + string.digits + while True: + return_id = '' + for i in range(length): + return_id += random.choice(id_chars) + yield return_id if __name__ == "__main__": diff --git a/src/console/trex_rpc_client.py b/scripts/automation/trex_control_plane/client_utils/jsonrpc_client.py index 77d5fe1c..1631c494 100644 --- a/src/console/trex_rpc_client.py +++ b/scripts/automation/trex_control_plane/client_utils/jsonrpc_client.py @@ -1,180 +1,186 @@ -
-import zmq
-import json
-from time import sleep
-import random
-
-class RpcClient():
-
- def __init__ (self, default_server, default_port):
- self.verbose = False
- self.connected = False
-
- # default values
- self.port = default_port
- self.server = default_server
-
- def get_connection_details (self):
- rc = {}
- rc['server'] = self.server
- rc['port'] = self.port
-
- return rc
-
- def pretty_json (self, json_str):
- return json.dumps(json.loads(json_str), indent = 4, separators=(',', ': '), sort_keys = True)
-
- def verbose_msg (self, msg):
- if not self.verbose:
- return
-
- print "[verbose] " + msg
-
-
- def create_jsonrpc_v2 (self, method_name, params = {}, id = None):
- msg = {}
- msg["jsonrpc"] = "2.0"
- msg["method"] = method_name
-
- msg["params"] = params
-
- msg["id"] = id
-
- return json.dumps(msg)
-
- def invoke_rpc_method (self, method_name, params = {}, block = False):
- rc, msg = self._invoke_rpc_method(method_name, params, block)
- if not rc:
- self.disconnect()
-
- return rc, msg
-
- def _invoke_rpc_method (self, method_name, params = {}, block = False):
- if not self.connected:
- return False, "Not connected to server"
-
- id = random.randint(1, 1000)
- msg = self.create_jsonrpc_v2(method_name, params, id = id)
-
- self.verbose_msg("Sending Request To Server:\n\n" + self.pretty_json(msg) + "\n")
-
- if block:
- self.socket.send(msg)
- else:
- try:
- self.socket.send(msg, flags = zmq.NOBLOCK)
- except zmq.error.ZMQError:
- return False, "Failed To Get Send Message"
-
- got_response = False
-
- if block:
- response = self.socket.recv()
- got_response = True
- else:
- for i in xrange(0 ,10):
- try:
- response = self.socket.recv(flags = zmq.NOBLOCK)
- got_response = True
- break
- except zmq.error.Again:
- sleep(0.2)
-
- if not got_response:
- return False, "Failed To Get Server Response"
-
- self.verbose_msg("Server Response:\n\n" + self.pretty_json(response) + "\n")
-
- # decode
- response_json = json.loads(response)
-
- if (response_json.get("jsonrpc") != "2.0"):
- return False, "Malfromed Response ({0})".format(str(response))
-
- if (response_json.get("id") != id):
- return False, "Server Replied With Bad ID ({0})".format(str(response))
-
- # error reported by server
- if ("error" in response_json):
- return True, response_json["error"]["message"]
-
- # if no error there should be a result
- if ("result" not in response_json):
- return False, "Malfromed Response ({0})".format(str(response))
-
- return True, response_json["result"]
-
-
- def ping_rpc_server (self):
-
- return self.invoke_rpc_method("ping", block = False)
-
- def get_rpc_server_status (self):
- return self.invoke_rpc_method("get_status")
-
- def query_rpc_server (self):
- return self.invoke_rpc_method("get_reg_cmds")
-
-
- def set_verbose (self, mode):
- self.verbose = mode
-
- def disconnect (self):
- if self.connected:
- self.socket.close(linger = 0)
- self.context.destroy(linger = 0)
- self.connected = False
- return True, ""
- else:
- return False, "Not connected to server"
-
- def connect (self, server = None, port = None):
- if self.connected:
- self.disconnect()
-
- self.context = zmq.Context()
-
- self.server = (server if server else self.server)
- self.port = (port if port else self.port)
-
- # Socket to talk to server
- self.transport = "tcp://{0}:{1}".format(self.server, self.port)
-
- print "\nConnecting To RPC Server On {0}".format(self.transport)
-
- self.socket = self.context.socket(zmq.REQ)
- try:
- self.socket.connect(self.transport)
- except zmq.error.ZMQError as e:
- return False, "ZMQ Error: Bad server or port name: " + str(e)
-
-
- self.connected = True
-
- # ping the server
- rc, err = self.ping_rpc_server()
- if not rc:
- self.disconnect()
- return rc, err
-
- return True, ""
-
- def reconnect (self):
- # connect using current values
- return self.connect()
-
- if not self.connected:
- return False, "Not connected to server"
-
- # reconnect
- return self.connect(self.server, self.port)
-
- def is_connected (self):
- return self.connected
-
- def __del__ (self):
- print "Shutting down RPC client\n"
- self.context.destroy(linger = 0)
-
-
-
+#!/router/bin/python + +import outer_packages +import zmq +import json +import general_utils +from time import sleep + +class JsonRpcClient(object): + + def __init__ (self, default_server, default_port): + self.verbose = False + self.connected = False + + # default values + self.port = default_port + self.server = default_server + self.id_gen = general_utils.random_id_gen() + + def get_connection_details (self): + rc = {} + rc['server'] = self.server + rc['port'] = self.port + + return rc + + def pretty_json (self, json_str): + return json.dumps(json.loads(json_str), indent = 4, separators=(',', ': '), sort_keys = True) + + def verbose_msg (self, msg): + if not self.verbose: + return + + print "[verbose] " + msg + + + def create_jsonrpc_v2 (self, method_name, params = {}, id = None): + msg = {} + msg["jsonrpc"] = "2.0" + msg["method"] = method_name + + msg["params"] = params + + msg["id"] = id + + return json.dumps(msg) + + def invoke_rpc_method (self, method_name, params = {}, block = False): + rc, msg = self._invoke_rpc_method(method_name, params, block) + if not rc: + self.disconnect() + + return rc, msg + + def _invoke_rpc_method (self, method_name, params = {}, block = False): + if not self.connected: + return False, "Not connected to server" + + id = self.id_gen.next() + msg = self.create_jsonrpc_v2(method_name, params, id = id) + + self.verbose_msg("Sending Request To Server:\n\n" + self.pretty_json(msg) + "\n") + + if block: + self.socket.send(msg) + else: + try: + self.socket.send(msg, flags = zmq.NOBLOCK) + except zmq.error.ZMQError as e: + return False, "Failed To Get Send Message" + + got_response = False + + if block: + response = self.socket.recv() + got_response = True + else: + for i in xrange(0 ,10): + try: + response = self.socket.recv(flags = zmq.NOBLOCK) + got_response = True + break + except zmq.Again: + sleep(0.2) + + if not got_response: + return False, "Failed To Get Server Response" + + self.verbose_msg("Server Response:\n\n" + self.pretty_json(response) + "\n") + + # decode + response_json = json.loads(response) + + if (response_json.get("jsonrpc") != "2.0"): + return False, "Malfromed Response ({0})".format(str(response)) + + if (response_json.get("id") != id): + return False, "Server Replied With Bad ID ({0})".format(str(response)) + + # error reported by server + if ("error" in response_json): + return True, response_json["error"]["message"] + + # if no error there should be a result + if ("result" not in response_json): + return False, "Malfromed Response ({0})".format(str(response)) + + return True, response_json["result"] + + + def ping_rpc_server(self): + + return self.invoke_rpc_method("ping", block = False) + + def get_rpc_server_status (self): + return self.invoke_rpc_method("get_status") + + def query_rpc_server(self): + return self.invoke_rpc_method("get_reg_cmds") + + + def set_verbose(self, mode): + self.verbose = mode + + def disconnect (self): + if self.connected: + self.socket.close(linger = 0) + self.context.destroy(linger = 0) + self.connected = False + return True, "" + else: + return False, "Not connected to server" + + def connect(self, server = None, port = None): + if self.connected: + self.disconnect() + + self.context = zmq.Context() + + self.server = (server if server else self.server) + self.port = (port if port else self.port) + + # Socket to talk to server + self.transport = "tcp://{0}:{1}".format(self.server, self.port) + + print "\nConnecting To RPC Server On {0}".format(self.transport) + + self.socket = self.context.socket(zmq.REQ) + try: + self.socket.connect(self.transport) + except zmq.error.ZMQError as e: + return False, "ZMQ Error: Bad server or port name: " + str(e) + + + self.connected = True + + # ping the server + rc, err = self.ping_rpc_server() + if not rc: + self.disconnect() + return rc, err + + return True, "" + + + def reconnect(self): + # connect using current values + return self.connect() + + if not self.connected: + return False, "Not connected to server" + + # reconnect + return self.connect(self.server, self.port) + + + def is_connected(self): + return self.connected + + + def __del__(self): + print "Shutting down RPC client\n" + self.context.destroy(linger=0) + +if __name__ == "__main__": + pass diff --git a/scripts/automation/trex_control_plane/client_utils/outer_packages.py b/scripts/automation/trex_control_plane/client_utils/outer_packages.py new file mode 100644 index 00000000..c489fd3d --- /dev/null +++ b/scripts/automation/trex_control_plane/client_utils/outer_packages.py @@ -0,0 +1,29 @@ +#!/router/bin/python + +import sys +import site +import os + +CURRENT_PATH = os.path.dirname(os.path.realpath(__file__)) +ROOT_PATH = os.path.abspath(os.path.join(CURRENT_PATH, os.pardir)) # path to trex_control_plane directory +PATH_TO_PYTHON_LIB = os.path.abspath(os.path.join(ROOT_PATH, os.pardir, os.pardir, 'external_libs')) + +CLIENT_UTILS_MODULES = ['zmq'] + + +def import_client_utils_modules(): + # must be in a higher priority + sys.path.insert(0, PATH_TO_PYTHON_LIB) + sys.path.append(ROOT_PATH) + import_module_list(CLIENT_UTILS_MODULES) + + +def import_module_list(modules_list): + assert(isinstance(modules_list, list)) + for p in modules_list: + full_path = os.path.join(PATH_TO_PYTHON_LIB, p) + fix_path = os.path.normcase(full_path) + site.addsitedir(full_path) + +import_client_utils_modules() + diff --git a/src/console/trex_console.py b/scripts/automation/trex_control_plane/console/trex_console.py index 1cb8194d..6514a51c 100755..100644 --- a/src/console/trex_console.py +++ b/scripts/automation/trex_control_plane/console/trex_console.py @@ -5,8 +5,8 @@ import json import ast import argparse import sys - -from trex_rpc_client import RpcClient +import trex_root_path +from client_utils.jsonrpc_client import JsonRpcClient import trex_status class TrexConsole(cmd.Cmd): @@ -237,7 +237,7 @@ def main (): options = parser.parse_args(sys.argv[1:]) # RPC client - rpc_client = RpcClient(options.server, options.port) + rpc_client = JsonRpcClient(options.server, options.port) # console try: diff --git a/scripts/automation/trex_control_plane/console/trex_root_path.py b/scripts/automation/trex_control_plane/console/trex_root_path.py new file mode 100644 index 00000000..de4ec03b --- /dev/null +++ b/scripts/automation/trex_control_plane/console/trex_root_path.py @@ -0,0 +1,15 @@ +#!/router/bin/python + +import os +import sys + +def add_root_to_path (): + """adds trex_control_plane root dir to script path, up to `depth` parent dirs""" + root_dirname = 'trex_control_plane' + file_path = os.path.dirname(os.path.realpath(__file__)) + + components = file_path.split(os.sep) + sys.path.append( str.join(os.sep, components[:components.index(root_dirname)+1]) ) + return + +add_root_to_path() diff --git a/src/console/trex_status.py b/scripts/automation/trex_control_plane/console/trex_status.py index 8ee669b5..8ee669b5 100755..100644 --- a/src/console/trex_status.py +++ b/scripts/automation/trex_control_plane/console/trex_status.py diff --git a/scripts/automation/trex_control_plane/examples/client_interactive_example.py b/scripts/automation/trex_control_plane/examples/client_interactive_example.py index e8d358a9..10735221 100755 --- a/scripts/automation/trex_control_plane/examples/client_interactive_example.py +++ b/scripts/automation/trex_control_plane/examples/client_interactive_example.py @@ -4,7 +4,7 @@ import trex_root_path from client.trex_client import * from common.trex_exceptions import * import cmd -from python_lib.termstyle import termstyle +import termstyle import os from argparse import ArgumentParser from pprint import pprint @@ -23,14 +23,13 @@ class InteractiveTRexClient(cmd.Cmd): cmd.Cmd.__init__(self) self.verbose = verbose_mode self.trex = CTRexClient(trex_host, max_history_size, trex_daemon_port = trex_port, verbose = verbose_mode) - self.DEFAULT_RUN_PARAMS = dict(c = 4, - m = 1.5, + self.DEFAULT_RUN_PARAMS = dict( m = 1.5, nc = True, p = True, d = 100, f = 'avl/sfr_delay_10_1g.yaml', l = 1000) - self.run_params = self.DEFAULT_RUN_PARAMS + self.run_params = dict(self.DEFAULT_RUN_PARAMS) self.decoder = json.JSONDecoder() @@ -112,7 +111,7 @@ class InteractiveTRexClient(cmd.Cmd): def do_restore_run_default (self, line): """Restores original T-Rex running configuration""" - self.run_params = self.DEFAULT_RUN_PARAMS + self.run_params = dict(self.DEFAULT_RUN_PARAMS) print termstyle.green("*** End of restoring default run parameters ***") def do_run_until_finish (self, sample_rate): diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/PKG-INFO b/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/PKG-INFO deleted file mode 100755 index 7082747b..00000000 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/PKG-INFO +++ /dev/null @@ -1,10 +0,0 @@ -Metadata-Version: 1.0 -Name: jsonrpclib -Version: 0.1.3 -Summary: This project is an implementation of the JSON-RPC v2.0 specification (backwards-compatible) as a client library. -Home-page: http://github.com/joshmarshall/jsonrpclib/ -Author: Josh Marshall -Author-email: catchjosh@gmail.com -License: http://www.apache.org/licenses/LICENSE-2.0 -Description: UNKNOWN -Platform: UNKNOWN diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/README.txt b/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/README.txt deleted file mode 100755 index 9d431a48..00000000 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/README.txt +++ /dev/null @@ -1,203 +0,0 @@ -JSONRPClib -========== -This library is an implementation of the JSON-RPC specification. -It supports both the original 1.0 specification, as well as the -new (proposed) 2.0 spec, which includes batch submission, keyword -arguments, etc. - -It is licensed under the Apache License, Version 2.0 -(http://www.apache.org/licenses/LICENSE-2.0.html). - -Communication -------------- -Feel free to send any questions, comments, or patches to our Google Group -mailing list (you'll need to join to send a message): -http://groups.google.com/group/jsonrpclib - -Summary -------- -This library implements the JSON-RPC 2.0 proposed specification in pure Python. -It is designed to be as compatible with the syntax of xmlrpclib as possible -(it extends where possible), so that projects using xmlrpclib could easily be -modified to use JSON and experiment with the differences. - -It is backwards-compatible with the 1.0 specification, and supports all of the -new proposed features of 2.0, including: - -* Batch submission (via MultiCall) -* Keyword arguments -* Notifications (both in a batch and 'normal') -* Class translation using the 'jsonclass' key. - -I've added a "SimpleJSONRPCServer", which is intended to emulate the -"SimpleXMLRPCServer" from the default Python distribution. - -Requirements ------------- -It supports cjson and simplejson, and looks for the parsers in that order -(searching first for cjson, then for the "built-in" simplejson as json in 2.6+, -and then the simplejson external library). One of these must be installed to -use this library, although if you have a standard distribution of 2.6+, you -should already have one. Keep in mind that cjson is supposed to be the -quickest, I believe, so if you are going for full-on optimization you may -want to pick it up. - -Client Usage ------------- - -This is (obviously) taken from a console session. - - >>> import jsonrpclib - >>> server = jsonrpclib.Server('http://localhost:8080') - >>> server.add(5,6) - 11 - >>> print jsonrpclib.history.request - {"jsonrpc": "2.0", "params": [5, 6], "id": "gb3c9g37", "method": "add"} - >>> print jsonrpclib.history.response - {'jsonrpc': '2.0', 'result': 11, 'id': 'gb3c9g37'} - >>> server.add(x=5, y=10) - 15 - >>> server._notify.add(5,6) - # No result returned... - >>> batch = jsonrpclib.MultiCall(server) - >>> batch.add(5, 6) - >>> batch.ping({'key':'value'}) - >>> batch._notify.add(4, 30) - >>> results = batch() - >>> for result in results: - >>> ... print result - 11 - {'key': 'value'} - # Note that there are only two responses -- this is according to spec. - -If you need 1.0 functionality, there are a bunch of places you can pass that -in, although the best is just to change the value on -jsonrpclib.config.version: - - >>> import jsonrpclib - >>> jsonrpclib.config.version - 2.0 - >>> jsonrpclib.config.version = 1.0 - >>> server = jsonrpclib.Server('http://localhost:8080') - >>> server.add(7, 10) - 17 - >>> print jsonrpclib..history.request - {"params": [7, 10], "id": "thes7tl2", "method": "add"} - >>> print jsonrpclib.history.response - {'id': 'thes7tl2', 'result': 17, 'error': None} - >>> - -The equivalent loads and dumps functions also exist, although with minor -modifications. The dumps arguments are almost identical, but it adds three -arguments: rpcid for the 'id' key, version to specify the JSON-RPC -compatibility, and notify if it's a request that you want to be a -notification. - -Additionally, the loads method does not return the params and method like -xmlrpclib, but instead a.) parses for errors, raising ProtocolErrors, and -b.) returns the entire structure of the request / response for manual parsing. - -SimpleJSONRPCServer -------------------- -This is identical in usage (or should be) to the SimpleXMLRPCServer in the default Python install. Some of the differences in features are that it obviously supports notification, batch calls, class translation (if left on), etc. Note: The import line is slightly different from the regular SimpleXMLRPCServer, since the SimpleJSONRPCServer is distributed within the jsonrpclib library. - - from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer - - server = SimpleJSONRPCServer(('localhost', 8080)) - server.register_function(pow) - server.register_function(lambda x,y: x+y, 'add') - server.register_function(lambda x: x, 'ping') - server.serve_forever() - -Class Translation ------------------ -I've recently added "automatic" class translation support, although it is -turned off by default. This can be devastatingly slow if improperly used, so -the following is just a short list of things to keep in mind when using it. - -* Keep It (the object) Simple Stupid. (for exceptions, keep reading.) -* Do not require init params (for exceptions, keep reading) -* Getter properties without setters could be dangerous (read: not tested) - -If any of the above are issues, use the _serialize method. (see usage below) -The server and client must BOTH have use_jsonclass configuration item on and -they must both have access to the same libraries used by the objects for -this to work. - -If you have excessively nested arguments, it would be better to turn off the -translation and manually invoke it on specific objects using -jsonrpclib.jsonclass.dump / jsonrpclib.jsonclass.load (since the default -behavior recursively goes through attributes and lists / dicts / tuples). - -[test_obj.py] - - # This object is /very/ simple, and the system will look through the - # attributes and serialize what it can. - class TestObj(object): - foo = 'bar' - - # This object requires __init__ params, so it uses the _serialize method - # and returns a tuple of init params and attribute values (the init params - # can be a dict or a list, but the attribute values must be a dict.) - class TestSerial(object): - foo = 'bar' - def __init__(self, *args): - self.args = args - def _serialize(self): - return (self.args, {'foo':self.foo,}) - -[usage] - - import jsonrpclib - import test_obj - - jsonrpclib.config.use_jsonclass = True - - testobj1 = test_obj.TestObj() - testobj2 = test_obj.TestSerial() - server = jsonrpclib.Server('http://localhost:8080') - # The 'ping' just returns whatever is sent - ping1 = server.ping(testobj1) - ping2 = server.ping(testobj2) - print jsonrpclib.history.request - # {"jsonrpc": "2.0", "params": [{"__jsonclass__": ["test_obj.TestSerial", ["foo"]]}], "id": "a0l976iv", "method": "ping"} - print jsonrpclib.history.result - # {'jsonrpc': '2.0', 'result': <test_obj.TestSerial object at 0x2744590>, 'id': 'a0l976iv'} - -To turn on this behaviour, just set jsonrpclib.config.use_jsonclass to True. -If you want to use a different method for serialization, just set -jsonrpclib.config.serialize_method to the method name. Finally, if you are -using classes that you have defined in the implementation (as in, not a -separate library), you'll need to add those (on BOTH the server and the -client) using the jsonrpclib.config.classes.add() method. -(Examples forthcoming.) - -Feedback on this "feature" is very, VERY much appreciated. - -Why JSON-RPC? -------------- -In my opinion, there are several reasons to choose JSON over XML for RPC: - -* Much simpler to read (I suppose this is opinion, but I know I'm right. :) -* Size / Bandwidth - Main reason, a JSON object representation is just much smaller. -* Parsing - JSON should be much quicker to parse than XML. -* Easy class passing with jsonclass (when enabled) - -In the interest of being fair, there are also a few reasons to choose XML -over JSON: - -* Your server doesn't do JSON (rather obvious) -* Wider XML-RPC support across APIs (can we change this? :)) -* Libraries are more established, i.e. more stable (Let's change this too.) - -TESTS ------ -I've dropped almost-verbatim tests from the JSON-RPC spec 2.0 page. -You can run it with: - - python tests.py - -TODO ----- -* Use HTTP error codes on SimpleJSONRPCServer -* Test, test, test and optimize
\ No newline at end of file diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/jsonrpclib/SimpleJSONRPCServer.py b/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/jsonrpclib/SimpleJSONRPCServer.py deleted file mode 100755 index d76da73e..00000000 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/jsonrpclib/SimpleJSONRPCServer.py +++ /dev/null @@ -1,229 +0,0 @@ -import jsonrpclib -from jsonrpclib import Fault -from jsonrpclib.jsonrpc import USE_UNIX_SOCKETS -import SimpleXMLRPCServer -import SocketServer -import socket -import logging -import os -import types -import traceback -import sys -try: - import fcntl -except ImportError: - # For Windows - fcntl = None - -def get_version(request): - # must be a dict - if 'jsonrpc' in request.keys(): - return 2.0 - if 'id' in request.keys(): - return 1.0 - return None - -def validate_request(request): - if type(request) is not types.DictType: - fault = Fault( - -32600, 'Request must be {}, not %s.' % type(request) - ) - return fault - rpcid = request.get('id', None) - version = get_version(request) - if not version: - fault = Fault(-32600, 'Request %s invalid.' % request, rpcid=rpcid) - return fault - request.setdefault('params', []) - method = request.get('method', None) - params = request.get('params') - param_types = (types.ListType, types.DictType, types.TupleType) - if not method or type(method) not in types.StringTypes or \ - type(params) not in param_types: - fault = Fault( - -32600, 'Invalid request parameters or method.', rpcid=rpcid - ) - return fault - return True - -class SimpleJSONRPCDispatcher(SimpleXMLRPCServer.SimpleXMLRPCDispatcher): - - def __init__(self, encoding=None): - SimpleXMLRPCServer.SimpleXMLRPCDispatcher.__init__(self, - allow_none=True, - encoding=encoding) - - def _marshaled_dispatch(self, data, dispatch_method = None): - response = None - try: - request = jsonrpclib.loads(data) - except Exception, e: - fault = Fault(-32700, 'Request %s invalid. (%s)' % (data, e)) - response = fault.response() - return response - if not request: - fault = Fault(-32600, 'Request invalid -- no request data.') - return fault.response() - if type(request) is types.ListType: - # This SHOULD be a batch, by spec - responses = [] - for req_entry in request: - result = validate_request(req_entry) - if type(result) is Fault: - responses.append(result.response()) - continue - resp_entry = self._marshaled_single_dispatch(req_entry) - if resp_entry is not None: - responses.append(resp_entry) - if len(responses) > 0: - response = '[%s]' % ','.join(responses) - else: - response = '' - else: - result = validate_request(request) - if type(result) is Fault: - return result.response() - response = self._marshaled_single_dispatch(request) - return response - - def _marshaled_single_dispatch(self, request): - # TODO - Use the multiprocessing and skip the response if - # it is a notification - # Put in support for custom dispatcher here - # (See SimpleXMLRPCServer._marshaled_dispatch) - method = request.get('method') - params = request.get('params') - try: - response = self._dispatch(method, params) - except: - exc_type, exc_value, exc_tb = sys.exc_info() - fault = Fault(-32603, '%s:%s' % (exc_type, exc_value)) - return fault.response() - if 'id' not in request.keys() or request['id'] == None: - # It's a notification - return None - try: - response = jsonrpclib.dumps(response, - methodresponse=True, - rpcid=request['id'] - ) - return response - except: - exc_type, exc_value, exc_tb = sys.exc_info() - fault = Fault(-32603, '%s:%s' % (exc_type, exc_value)) - return fault.response() - - def _dispatch(self, method, params): - func = None - try: - func = self.funcs[method] - except KeyError: - if self.instance is not None: - if hasattr(self.instance, '_dispatch'): - return self.instance._dispatch(method, params) - else: - try: - func = SimpleXMLRPCServer.resolve_dotted_attribute( - self.instance, - method, - True - ) - except AttributeError: - pass - if func is not None: - try: - if type(params) is types.ListType: - response = func(*params) - else: - response = func(**params) - return response - except TypeError: - return Fault(-32602, 'Invalid parameters.') - except: - err_lines = traceback.format_exc().splitlines() - trace_string = '%s | %s' % (err_lines[-3], err_lines[-1]) - fault = jsonrpclib.Fault(-32603, 'Server error: %s' % - trace_string) - return fault - else: - return Fault(-32601, 'Method %s not supported.' % method) - -class SimpleJSONRPCRequestHandler( - SimpleXMLRPCServer.SimpleXMLRPCRequestHandler): - - def do_POST(self): - if not self.is_rpc_path_valid(): - self.report_404() - return - try: - max_chunk_size = 10*1024*1024 - size_remaining = int(self.headers["content-length"]) - L = [] - while size_remaining: - chunk_size = min(size_remaining, max_chunk_size) - L.append(self.rfile.read(chunk_size)) - size_remaining -= len(L[-1]) - data = ''.join(L) - response = self.server._marshaled_dispatch(data) - self.send_response(200) - except Exception, e: - self.send_response(500) - err_lines = traceback.format_exc().splitlines() - trace_string = '%s | %s' % (err_lines[-3], err_lines[-1]) - fault = jsonrpclib.Fault(-32603, 'Server error: %s' % trace_string) - response = fault.response() - if response == None: - response = '' - self.send_header("Content-type", "application/json-rpc") - self.send_header("Content-length", str(len(response))) - self.end_headers() - self.wfile.write(response) - self.wfile.flush() - self.connection.shutdown(1) - -class SimpleJSONRPCServer(SocketServer.TCPServer, SimpleJSONRPCDispatcher): - - allow_reuse_address = True - - def __init__(self, addr, requestHandler=SimpleJSONRPCRequestHandler, - logRequests=True, encoding=None, bind_and_activate=True, - address_family=socket.AF_INET): - self.logRequests = logRequests - SimpleJSONRPCDispatcher.__init__(self, encoding) - # TCPServer.__init__ has an extra parameter on 2.6+, so - # check Python version and decide on how to call it - vi = sys.version_info - self.address_family = address_family - if USE_UNIX_SOCKETS and address_family == socket.AF_UNIX: - # Unix sockets can't be bound if they already exist in the - # filesystem. The convention of e.g. X11 is to unlink - # before binding again. - if os.path.exists(addr): - try: - os.unlink(addr) - except OSError: - logging.warning("Could not unlink socket %s", addr) - # if python 2.5 and lower - if vi[0] < 3 and vi[1] < 6: - SocketServer.TCPServer.__init__(self, addr, requestHandler) - else: - SocketServer.TCPServer.__init__(self, addr, requestHandler, - bind_and_activate) - if fcntl is not None and hasattr(fcntl, 'FD_CLOEXEC'): - flags = fcntl.fcntl(self.fileno(), fcntl.F_GETFD) - flags |= fcntl.FD_CLOEXEC - fcntl.fcntl(self.fileno(), fcntl.F_SETFD, flags) - -class CGIJSONRPCRequestHandler(SimpleJSONRPCDispatcher): - - def __init__(self, encoding=None): - SimpleJSONRPCDispatcher.__init__(self, encoding) - - def handle_jsonrpc(self, request_text): - response = self._marshaled_dispatch(request_text) - print 'Content-Type: application/json-rpc' - print 'Content-Length: %d' % len(response) - print - sys.stdout.write(response) - - handle_xmlrpc = handle_jsonrpc diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/jsonrpclib/__init__.py b/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/jsonrpclib/__init__.py deleted file mode 100755 index 6e884b83..00000000 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/jsonrpclib/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from jsonrpclib.config import Config -config = Config.instance() -from jsonrpclib.history import History -history = History.instance() -from jsonrpclib.jsonrpc import Server, MultiCall, Fault -from jsonrpclib.jsonrpc import ProtocolError, loads, dumps diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/jsonrpclib/config.py b/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/jsonrpclib/config.py deleted file mode 100755 index 4d28f1b1..00000000 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/jsonrpclib/config.py +++ /dev/null @@ -1,38 +0,0 @@ -import sys - -class LocalClasses(dict): - def add(self, cls): - self[cls.__name__] = cls - -class Config(object): - """ - This is pretty much used exclusively for the 'jsonclass' - functionality... set use_jsonclass to False to turn it off. - You can change serialize_method and ignore_attribute, or use - the local_classes.add(class) to include "local" classes. - """ - use_jsonclass = True - # Change to False to keep __jsonclass__ entries raw. - serialize_method = '_serialize' - # The serialize_method should be a string that references the - # method on a custom class object which is responsible for - # returning a tuple of the constructor arguments and a dict of - # attributes. - ignore_attribute = '_ignore' - # The ignore attribute should be a string that references the - # attribute on a custom class object which holds strings and / or - # references of the attributes the class translator should ignore. - classes = LocalClasses() - # The list of classes to use for jsonclass translation. - version = 2.0 - # Version of the JSON-RPC spec to support - user_agent = 'jsonrpclib/0.1 (Python %s)' % \ - '.'.join([str(ver) for ver in sys.version_info[0:3]]) - # User agent to use for calls. - _instance = None - - @classmethod - def instance(cls): - if not cls._instance: - cls._instance = cls() - return cls._instance diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/jsonrpclib/history.py b/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/jsonrpclib/history.py deleted file mode 100755 index d11863dc..00000000 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/jsonrpclib/history.py +++ /dev/null @@ -1,40 +0,0 @@ -class History(object): - """ - This holds all the response and request objects for a - session. A server using this should call "clear" after - each request cycle in order to keep it from clogging - memory. - """ - requests = [] - responses = [] - _instance = None - - @classmethod - def instance(cls): - if not cls._instance: - cls._instance = cls() - return cls._instance - - def add_response(self, response_obj): - self.responses.append(response_obj) - - def add_request(self, request_obj): - self.requests.append(request_obj) - - @property - def request(self): - if len(self.requests) == 0: - return None - else: - return self.requests[-1] - - @property - def response(self): - if len(self.responses) == 0: - return None - else: - return self.responses[-1] - - def clear(self): - del self.requests[:] - del self.responses[:] diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/jsonrpclib/jsonclass.py b/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/jsonrpclib/jsonclass.py deleted file mode 100755 index 298c3da3..00000000 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/jsonrpclib/jsonclass.py +++ /dev/null @@ -1,145 +0,0 @@ -import types -import inspect -import re -import traceback - -from jsonrpclib import config - -iter_types = [ - types.DictType, - types.ListType, - types.TupleType -] - -string_types = [ - types.StringType, - types.UnicodeType -] - -numeric_types = [ - types.IntType, - types.LongType, - types.FloatType -] - -value_types = [ - types.BooleanType, - types.NoneType -] - -supported_types = iter_types+string_types+numeric_types+value_types -invalid_module_chars = r'[^a-zA-Z0-9\_\.]' - -class TranslationError(Exception): - pass - -def dump(obj, serialize_method=None, ignore_attribute=None, ignore=[]): - if not serialize_method: - serialize_method = config.serialize_method - if not ignore_attribute: - ignore_attribute = config.ignore_attribute - obj_type = type(obj) - # Parse / return default "types"... - if obj_type in numeric_types+string_types+value_types: - return obj - if obj_type in iter_types: - if obj_type in (types.ListType, types.TupleType): - new_obj = [] - for item in obj: - new_obj.append(dump(item, serialize_method, - ignore_attribute, ignore)) - if obj_type is types.TupleType: - new_obj = tuple(new_obj) - return new_obj - # It's a dict... - else: - new_obj = {} - for key, value in obj.iteritems(): - new_obj[key] = dump(value, serialize_method, - ignore_attribute, ignore) - return new_obj - # It's not a standard type, so it needs __jsonclass__ - module_name = inspect.getmodule(obj).__name__ - class_name = obj.__class__.__name__ - json_class = class_name - if module_name not in ['', '__main__']: - json_class = '%s.%s' % (module_name, json_class) - return_obj = {"__jsonclass__":[json_class,]} - # If a serialization method is defined.. - if serialize_method in dir(obj): - # Params can be a dict (keyword) or list (positional) - # Attrs MUST be a dict. - serialize = getattr(obj, serialize_method) - params, attrs = serialize() - return_obj['__jsonclass__'].append(params) - return_obj.update(attrs) - return return_obj - # Otherwise, try to figure it out - # Obviously, we can't assume to know anything about the - # parameters passed to __init__ - return_obj['__jsonclass__'].append([]) - attrs = {} - ignore_list = getattr(obj, ignore_attribute, [])+ignore - for attr_name, attr_value in obj.__dict__.iteritems(): - if type(attr_value) in supported_types and \ - attr_name not in ignore_list and \ - attr_value not in ignore_list: - attrs[attr_name] = dump(attr_value, serialize_method, - ignore_attribute, ignore) - return_obj.update(attrs) - return return_obj - -def load(obj): - if type(obj) in string_types+numeric_types+value_types: - return obj - if type(obj) is types.ListType: - return_list = [] - for entry in obj: - return_list.append(load(entry)) - return return_list - # Othewise, it's a dict type - if '__jsonclass__' not in obj.keys(): - return_dict = {} - for key, value in obj.iteritems(): - new_value = load(value) - return_dict[key] = new_value - return return_dict - # It's a dict, and it's a __jsonclass__ - orig_module_name = obj['__jsonclass__'][0] - params = obj['__jsonclass__'][1] - if orig_module_name == '': - raise TranslationError('Module name empty.') - json_module_clean = re.sub(invalid_module_chars, '', orig_module_name) - if json_module_clean != orig_module_name: - raise TranslationError('Module name %s has invalid characters.' % - orig_module_name) - json_module_parts = json_module_clean.split('.') - json_class = None - if len(json_module_parts) == 1: - # Local class name -- probably means it won't work - if json_module_parts[0] not in config.classes.keys(): - raise TranslationError('Unknown class or module %s.' % - json_module_parts[0]) - json_class = config.classes[json_module_parts[0]] - else: - json_class_name = json_module_parts.pop() - json_module_tree = '.'.join(json_module_parts) - try: - temp_module = __import__(json_module_tree) - except ImportError: - raise TranslationError('Could not import %s from module %s.' % - (json_class_name, json_module_tree)) - json_class = getattr(temp_module, json_class_name) - # Creating the object... - new_obj = None - if type(params) is types.ListType: - new_obj = json_class(*params) - elif type(params) is types.DictType: - new_obj = json_class(**params) - else: - raise TranslationError('Constructor args must be a dict or list.') - for key, value in obj.iteritems(): - if key == '__jsonclass__': - continue - setattr(new_obj, key, value) - return new_obj diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/jsonrpclib/jsonrpc.py b/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/jsonrpclib/jsonrpc.py deleted file mode 100755 index e11939ae..00000000 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/jsonrpclib/jsonrpc.py +++ /dev/null @@ -1,556 +0,0 @@ -""" -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - -============================ -JSONRPC Library (jsonrpclib) -============================ - -This library is a JSON-RPC v.2 (proposed) implementation which -follows the xmlrpclib API for portability between clients. It -uses the same Server / ServerProxy, loads, dumps, etc. syntax, -while providing features not present in XML-RPC like: - -* Keyword arguments -* Notifications -* Versioning -* Batches and batch notifications - -Eventually, I'll add a SimpleXMLRPCServer compatible library, -and other things to tie the thing off nicely. :) - -For a quick-start, just open a console and type the following, -replacing the server address, method, and parameters -appropriately. ->>> import jsonrpclib ->>> server = jsonrpclib.Server('http://localhost:8181') ->>> server.add(5, 6) -11 ->>> server._notify.add(5, 6) ->>> batch = jsonrpclib.MultiCall(server) ->>> batch.add(3, 50) ->>> batch.add(2, 3) ->>> batch._notify.add(3, 5) ->>> batch() -[53, 5] - -See http://code.google.com/p/jsonrpclib/ for more info. -""" - -import types -import sys -from xmlrpclib import Transport as XMLTransport -from xmlrpclib import SafeTransport as XMLSafeTransport -from xmlrpclib import ServerProxy as XMLServerProxy -from xmlrpclib import _Method as XML_Method -import time -import string -import random - -# Library includes -import jsonrpclib -from jsonrpclib import config -from jsonrpclib import history - -# JSON library importing -cjson = None -json = None -try: - import cjson -except ImportError: - try: - import json - except ImportError: - try: - import simplejson as json - except ImportError: - raise ImportError( - 'You must have the cjson, json, or simplejson ' + - 'module(s) available.' - ) - -IDCHARS = string.ascii_lowercase+string.digits - -class UnixSocketMissing(Exception): - """ - Just a properly named Exception if Unix Sockets usage is - attempted on a platform that doesn't support them (Windows) - """ - pass - -#JSON Abstractions - -def jdumps(obj, encoding='utf-8'): - # Do 'serialize' test at some point for other classes - global cjson - if cjson: - return cjson.encode(obj) - else: - return json.dumps(obj, encoding=encoding) - -def jloads(json_string): - global cjson - if cjson: - return cjson.decode(json_string) - else: - return json.loads(json_string) - - -# XMLRPClib re-implementations - -class ProtocolError(Exception): - pass - -class TransportMixIn(object): - """ Just extends the XMLRPC transport where necessary. """ - user_agent = config.user_agent - # for Python 2.7 support - _connection = None - - def send_content(self, connection, request_body): - connection.putheader("Content-Type", "application/json-rpc") - connection.putheader("Content-Length", str(len(request_body))) - connection.endheaders() - if request_body: - connection.send(request_body) - - def getparser(self): - target = JSONTarget() - return JSONParser(target), target - -class JSONParser(object): - def __init__(self, target): - self.target = target - - def feed(self, data): - self.target.feed(data) - - def close(self): - pass - -class JSONTarget(object): - def __init__(self): - self.data = [] - - def feed(self, data): - self.data.append(data) - - def close(self): - return ''.join(self.data) - -class Transport(TransportMixIn, XMLTransport): - pass - -class SafeTransport(TransportMixIn, XMLSafeTransport): - pass -from httplib import HTTP, HTTPConnection -from socket import socket - -USE_UNIX_SOCKETS = False - -try: - from socket import AF_UNIX, SOCK_STREAM - USE_UNIX_SOCKETS = True -except ImportError: - pass - -if (USE_UNIX_SOCKETS): - - class UnixHTTPConnection(HTTPConnection): - def connect(self): - self.sock = socket(AF_UNIX, SOCK_STREAM) - self.sock.connect(self.host) - - class UnixHTTP(HTTP): - _connection_class = UnixHTTPConnection - - class UnixTransport(TransportMixIn, XMLTransport): - def make_connection(self, host): - import httplib - host, extra_headers, x509 = self.get_host_info(host) - return UnixHTTP(host) - - -class ServerProxy(XMLServerProxy): - """ - Unfortunately, much more of this class has to be copied since - so much of it does the serialization. - """ - - def __init__(self, uri, transport=None, encoding=None, - verbose=0, version=None): - import urllib - if not version: - version = config.version - self.__version = version - schema, uri = urllib.splittype(uri) - if schema not in ('http', 'https', 'unix'): - raise IOError('Unsupported JSON-RPC protocol.') - if schema == 'unix': - if not USE_UNIX_SOCKETS: - # Don't like the "generic" Exception... - raise UnixSocketMissing("Unix sockets not available.") - self.__host = uri - self.__handler = '/' - else: - self.__host, self.__handler = urllib.splithost(uri) - if not self.__handler: - # Not sure if this is in the JSON spec? - #self.__handler = '/' - self.__handler == '/' - if transport is None: - if schema == 'unix': - transport = UnixTransport() - elif schema == 'https': - transport = SafeTransport() - else: - transport = Transport() - self.__transport = transport - self.__encoding = encoding - self.__verbose = verbose - - def _request(self, methodname, params, rpcid=None): - request = dumps(params, methodname, encoding=self.__encoding, - rpcid=rpcid, version=self.__version) - response = self._run_request(request) - check_for_errors(response) - return response['result'] - - def _request_notify(self, methodname, params, rpcid=None): - request = dumps(params, methodname, encoding=self.__encoding, - rpcid=rpcid, version=self.__version, notify=True) - response = self._run_request(request, notify=True) - check_for_errors(response) - return - - def _run_request(self, request, notify=None): - history.add_request(request) - - response = self.__transport.request( - self.__host, - self.__handler, - request, - verbose=self.__verbose - ) - - # Here, the XMLRPC library translates a single list - # response to the single value -- should we do the - # same, and require a tuple / list to be passed to - # the response object, or expect the Server to be - # outputting the response appropriately? - - history.add_response(response) - if not response: - return None - return_obj = loads(response) - return return_obj - - def __getattr__(self, name): - # Same as original, just with new _Method reference - return _Method(self._request, name) - - @property - def _notify(self): - # Just like __getattr__, but with notify namespace. - return _Notify(self._request_notify) - - -class _Method(XML_Method): - - def __call__(self, *args, **kwargs): - if len(args) > 0 and len(kwargs) > 0: - raise ProtocolError('Cannot use both positional ' + - 'and keyword arguments (according to JSON-RPC spec.)') - if len(args) > 0: - return self.__send(self.__name, args) - else: - return self.__send(self.__name, kwargs) - - def __getattr__(self, name): - self.__name = '%s.%s' % (self.__name, name) - return self - # The old method returned a new instance, but this seemed wasteful. - # The only thing that changes is the name. - #return _Method(self.__send, "%s.%s" % (self.__name, name)) - -class _Notify(object): - def __init__(self, request): - self._request = request - - def __getattr__(self, name): - return _Method(self._request, name) - -# Batch implementation - -class MultiCallMethod(object): - - def __init__(self, method, notify=False): - self.method = method - self.params = [] - self.notify = notify - - def __call__(self, *args, **kwargs): - if len(kwargs) > 0 and len(args) > 0: - raise ProtocolError('JSON-RPC does not support both ' + - 'positional and keyword arguments.') - if len(kwargs) > 0: - self.params = kwargs - else: - self.params = args - - def request(self, encoding=None, rpcid=None): - return dumps(self.params, self.method, version=2.0, - encoding=encoding, rpcid=rpcid, notify=self.notify) - - def __repr__(self): - return '%s' % self.request() - - def __getattr__(self, method): - new_method = '%s.%s' % (self.method, method) - self.method = new_method - return self - -class MultiCallNotify(object): - - def __init__(self, multicall): - self.multicall = multicall - - def __getattr__(self, name): - new_job = MultiCallMethod(name, notify=True) - self.multicall._job_list.append(new_job) - return new_job - -class MultiCallIterator(object): - - def __init__(self, results): - self.results = results - - def __iter__(self): - for i in range(0, len(self.results)): - yield self[i] - raise StopIteration - - def __getitem__(self, i): - item = self.results[i] - check_for_errors(item) - return item['result'] - - def __len__(self): - return len(self.results) - -class MultiCall(object): - - def __init__(self, server): - self._server = server - self._job_list = [] - - def _request(self): - if len(self._job_list) < 1: - # Should we alert? This /is/ pretty obvious. - return - request_body = '[ %s ]' % ','.join([job.request() for - job in self._job_list]) - responses = self._server._run_request(request_body) - del self._job_list[:] - if not responses: - responses = [] - return MultiCallIterator(responses) - - @property - def _notify(self): - return MultiCallNotify(self) - - def __getattr__(self, name): - new_job = MultiCallMethod(name) - self._job_list.append(new_job) - return new_job - - __call__ = _request - -# These lines conform to xmlrpclib's "compatibility" line. -# Not really sure if we should include these, but oh well. -Server = ServerProxy - -class Fault(object): - # JSON-RPC error class - def __init__(self, code=-32000, message='Server error', rpcid=None): - self.faultCode = code - self.faultString = message - self.rpcid = rpcid - - def error(self): - return {'code':self.faultCode, 'message':self.faultString} - - def response(self, rpcid=None, version=None): - if not version: - version = config.version - if rpcid: - self.rpcid = rpcid - return dumps( - self, methodresponse=True, rpcid=self.rpcid, version=version - ) - - def __repr__(self): - return '<Fault %s: %s>' % (self.faultCode, self.faultString) - -def random_id(length=8): - return_id = '' - for i in range(length): - return_id += random.choice(IDCHARS) - return return_id - -class Payload(dict): - def __init__(self, rpcid=None, version=None): - if not version: - version = config.version - self.id = rpcid - self.version = float(version) - - def request(self, method, params=[]): - if type(method) not in types.StringTypes: - raise ValueError('Method name must be a string.') - if not self.id: - self.id = random_id() - request = { 'id':self.id, 'method':method } - if params: - request['params'] = params - if self.version >= 2: - request['jsonrpc'] = str(self.version) - return request - - def notify(self, method, params=[]): - request = self.request(method, params) - if self.version >= 2: - del request['id'] - else: - request['id'] = None - return request - - def response(self, result=None): - response = {'result':result, 'id':self.id} - if self.version >= 2: - response['jsonrpc'] = str(self.version) - else: - response['error'] = None - return response - - def error(self, code=-32000, message='Server error.'): - error = self.response() - if self.version >= 2: - del error['result'] - else: - error['result'] = None - error['error'] = {'code':code, 'message':message} - return error - -def dumps(params=[], methodname=None, methodresponse=None, - encoding=None, rpcid=None, version=None, notify=None): - """ - This differs from the Python implementation in that it implements - the rpcid argument since the 2.0 spec requires it for responses. - """ - if not version: - version = config.version - valid_params = (types.TupleType, types.ListType, types.DictType) - if methodname in types.StringTypes and \ - type(params) not in valid_params and \ - not isinstance(params, Fault): - """ - If a method, and params are not in a listish or a Fault, - error out. - """ - raise TypeError('Params must be a dict, list, tuple or Fault ' + - 'instance.') - # Begin parsing object - payload = Payload(rpcid=rpcid, version=version) - if not encoding: - encoding = 'utf-8' - if type(params) is Fault: - response = payload.error(params.faultCode, params.faultString) - return jdumps(response, encoding=encoding) - if type(methodname) not in types.StringTypes and methodresponse != True: - raise ValueError('Method name must be a string, or methodresponse '+ - 'must be set to True.') - if config.use_jsonclass == True: - from jsonrpclib import jsonclass - params = jsonclass.dump(params) - if methodresponse is True: - if rpcid is None: - raise ValueError('A method response must have an rpcid.') - response = payload.response(params) - return jdumps(response, encoding=encoding) - request = None - if notify == True: - request = payload.notify(methodname, params) - else: - request = payload.request(methodname, params) - return jdumps(request, encoding=encoding) - -def loads(data): - """ - This differs from the Python implementation, in that it returns - the request structure in Dict format instead of the method, params. - It will return a list in the case of a batch request / response. - """ - if data == '': - # notification - return None - result = jloads(data) - # if the above raises an error, the implementing server code - # should return something like the following: - # { 'jsonrpc':'2.0', 'error': fault.error(), id: None } - if config.use_jsonclass == True: - from jsonrpclib import jsonclass - result = jsonclass.load(result) - return result - -def check_for_errors(result): - if not result: - # Notification - return result - if type(result) is not types.DictType: - raise TypeError('Response is not a dict.') - if 'jsonrpc' in result.keys() and float(result['jsonrpc']) > 2.0: - raise NotImplementedError('JSON-RPC version not yet supported.') - if 'result' not in result.keys() and 'error' not in result.keys(): - raise ValueError('Response does not have a result or error key.') - if 'error' in result.keys() and result['error'] != None: - code = result['error']['code'] - message = result['error']['message'] - raise ProtocolError((code, message)) - return result - -def isbatch(result): - if type(result) not in (types.ListType, types.TupleType): - return False - if len(result) < 1: - return False - if type(result[0]) is not types.DictType: - return False - if 'jsonrpc' not in result[0].keys(): - return False - try: - version = float(result[0]['jsonrpc']) - except ValueError: - raise ProtocolError('"jsonrpc" key must be a float(able) value.') - if version < 2: - return False - return True - -def isnotification(request): - if 'id' not in request.keys(): - # 2.0 notification - return True - if request['id'] == None: - # 1.0 notification - return True - return False diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/setup.py b/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/setup.py deleted file mode 100755 index 569b6367..00000000 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/setup.py +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env/python -""" -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -""" - -import distutils.core - -distutils.core.setup( - name = "jsonrpclib", - version = "0.1.3", - packages = ["jsonrpclib"], - author = "Josh Marshall", - author_email = "catchjosh@gmail.com", - url = "http://github.com/joshmarshall/jsonrpclib/", - license = "http://www.apache.org/licenses/LICENSE-2.0", - description = "This project is an implementation of the JSON-RPC v2.0 " + - "specification (backwards-compatible) as a client library.", -) diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/LICENSE.txt b/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/LICENSE.txt deleted file mode 100755 index eb0864bd..00000000 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/LICENSE.txt +++ /dev/null @@ -1,11 +0,0 @@ -Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
diff --git a/scripts/automation/trex_control_plane/python_lib/rednose-0.4.1/rednose.py b/scripts/automation/trex_control_plane/python_lib/rednose-0.4.1/rednose.py deleted file mode 100755 index 1ff892ad..00000000 --- a/scripts/automation/trex_control_plane/python_lib/rednose-0.4.1/rednose.py +++ /dev/null @@ -1,387 +0,0 @@ -# Copyright (c) 2009, Tim Cuthbertson # All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions -# are met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following -# disclaimer in the documentation and/or other materials provided -# with the distribution. -# * Neither the name of the organisation nor the names of its -# contributors may be used to endorse or promote products derived -# from this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS -# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED -# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY -# WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. - -from __future__ import print_function -import os -import sys -import linecache -import re -import time - -import nose - -import termstyle - -failure = 'FAILED' -error = 'ERROR' -success = 'passed' -skip = 'skipped' -line_length = 77 - -PY3 = sys.version_info[0] >= 3 -if PY3: - to_unicode = str -else: - def to_unicode(s): - try: - return unicode(s) - except UnicodeDecodeError: - return unicode(repr(str(s))) - -BLACKLISTED_WRITERS = [ - 'nose[\\/]result\\.pyc?$', - 'unittest[\\/]runner\\.pyc?$' -] -REDNOSE_DEBUG = False - - -class RedNose(nose.plugins.Plugin): - env_opt = 'NOSE_REDNOSE' - env_opt_color = 'NOSE_REDNOSE_COLOR' - score = 199 # just under the `coverage` module - - def __init__(self, *args): - super(RedNose, self).__init__(*args) - self.reports = [] - self.error = self.success = self.failure = self.skip = 0 - self.total = 0 - self.stream = None - self.verbose = False - self.enabled = False - self.tree = False - - def options(self, parser, env=os.environ): - global REDNOSE_DEBUG - rednose_on = bool(env.get(self.env_opt, False)) - rednose_color = env.get(self.env_opt_color, 'auto') - REDNOSE_DEBUG = bool(env.get('REDNOSE_DEBUG', False)) - - parser.add_option( - "--rednose", - action="store_true", - default=rednose_on, - dest="rednose", - help="enable colour output (alternatively, set $%s=1)" % (self.env_opt,) - ) - parser.add_option( - "--no-color", - action="store_false", - dest="rednose", - help="disable colour output" - ) - parser.add_option( - "--force-color", - action="store_const", - dest='rednose_color', - default=rednose_color, - const='force', - help="force colour output when not using a TTY (alternatively, set $%s=force)" % (self.env_opt_color,) - ) - parser.add_option( - "--immediate", - action="store_true", - default=False, - help="print errors and failures as they happen, as well as at the end" - ) - - def configure(self, options, conf): - if options.rednose: - self.enabled = True - termstyle_init = { - 'force': termstyle.enable, - 'off': termstyle.disable - }.get(options.rednose_color, termstyle.auto) - termstyle_init() - - self.immediate = options.immediate - self.verbose = options.verbosity >= 2 - - def begin(self): - self.start_time = time.time() - self._in_test = False - - def _format_test_name(self, test): - return test.shortDescription() or to_unicode(test) - - def prepareTestResult(self, result): - result.stream = FilteringStream(self.stream, BLACKLISTED_WRITERS) - - def beforeTest(self, test): - self._in_test = True - if self.verbose: - self._out(self._format_test_name(test) + ' ... ') - - def afterTest(self, test): - if self._in_test: - self.addSkip() - - def _print_test(self, type_, color): - self.total += 1 - if self.verbose: - self._outln(color(type_)) - else: - if type_ == failure: - short_ = 'F' - elif type_ == error: - short_ = 'X' - elif type_ == skip: - short_ = '-' - else: - short_ = '.' - self._out(color(short_)) - if self.total % line_length == 0: - self._outln() - self._in_test = False - - def _add_report(self, report): - failure_type, test, err = report - self.reports.append(report) - if self.immediate: - self._outln() - self._report_test(len(self.reports), *report) - - def addFailure(self, test, err): - self.failure += 1 - self._add_report((failure, test, err)) - self._print_test(failure, termstyle.red) - - def addError(self, test, err): - if err[0].__name__ == 'SkipTest': - self.addSkip(test, err) - return - self.error += 1 - self._add_report((error, test, err)) - self._print_test(error, termstyle.yellow) - - def addSuccess(self, test): - self.success += 1 - self._print_test(success, termstyle.green) - - def addSkip(self, test=None, err=None): - self.skip += 1 - self._print_test(skip, termstyle.blue) - - def setOutputStream(self, stream): - self.stream = stream - - def report(self, stream): - """report on all registered failures and errors""" - self._outln() - if self.immediate: - for x in range(0, 5): - self._outln() - report_num = 0 - if len(self.reports) > 0: - for report_num, report in enumerate(self.reports): - self._report_test(report_num + 1, *report) - self._outln() - - self._summarize() - - def _summarize(self): - """summarize all tests - the number of failures, errors and successes""" - self._line(termstyle.black) - self._out("%s test%s run in %0.1f seconds" % ( - self.total, - self._plural(self.total), - time.time() - self.start_time)) - if self.total > self.success: - self._outln(". ") - additionals = [] - if self.failure > 0: - additionals.append(termstyle.red("%s FAILED" % ( - self.failure,))) - if self.error > 0: - additionals.append(termstyle.yellow("%s error%s" % ( - self.error, - self._plural(self.error) ))) - if self.skip > 0: - additionals.append(termstyle.blue("%s skipped" % ( - self.skip))) - self._out(', '.join(additionals)) - - self._out(termstyle.green(" (%s test%s passed)" % ( - self.success, - self._plural(self.success) ))) - self._outln() - - def _report_test(self, report_num, type_, test, err): - """report the results of a single (failing or errored) test""" - self._line(termstyle.black) - self._out("%s) " % (report_num)) - if type_ == failure: - color = termstyle.red - self._outln(color('FAIL: %s' % (self._format_test_name(test),))) - else: - color = termstyle.yellow - self._outln(color('ERROR: %s' % (self._format_test_name(test),))) - - exc_type, exc_instance, exc_trace = err - - self._outln() - self._outln(self._fmt_traceback(exc_trace)) - self._out(color(' ', termstyle.bold(color(exc_type.__name__)), ": ")) - self._outln(self._fmt_message(exc_instance, color)) - self._outln() - - def _relative_path(self, path): - """ - If path is a child of the current working directory, the relative - path is returned surrounded by bold xterm escape sequences. - If path is not a child of the working directory, path is returned - """ - try: - here = os.path.abspath(os.path.realpath(os.getcwd())) - fullpath = os.path.abspath(os.path.realpath(path)) - except OSError: - return path - if fullpath.startswith(here): - return termstyle.bold(fullpath[len(here)+1:]) - return path - - def _file_line(self, tb): - """formats the file / lineno / function line of a traceback element""" - prefix = "file://" - prefix = "" - - f = tb.tb_frame - if '__unittest' in f.f_globals: - # this is the magical flag that prevents unittest internal - # code from junking up the stacktrace - return None - - filename = f.f_code.co_filename - lineno = tb.tb_lineno - linecache.checkcache(filename) - function_name = f.f_code.co_name - - line_contents = linecache.getline(filename, lineno, f.f_globals).strip() - - return " %s line %s in %s\n %s" % ( - termstyle.blue(prefix, self._relative_path(filename)), - lineno, - termstyle.cyan(function_name), - line_contents) - - def _fmt_traceback(self, trace): - """format a traceback""" - ret = [] - ret.append(termstyle.default(" Traceback (most recent call last):")) - current_trace = trace - while current_trace is not None: - line = self._file_line(current_trace) - if line is not None: - ret.append(line) - current_trace = current_trace.tb_next - return '\n'.join(ret) - - def _fmt_message(self, exception, color): - orig_message_lines = to_unicode(exception).splitlines() - - if len(orig_message_lines) == 0: - return '' - message_lines = [color(orig_message_lines[0])] - for line in orig_message_lines[1:]: - match = re.match('^---.* begin captured stdout.*----$', line) - if match: - color = None - message_lines.append('') - line = ' ' + line - message_lines.append(color(line) if color is not None else line) - return '\n'.join(message_lines) - - def _out(self, msg='', newline=False): - self.stream.write(msg) - if newline: - self.stream.write('\n') - - def _outln(self, msg=''): - self._out(msg, True) - - def _plural(self, num): - return '' if num == 1 else 's' - - def _line(self, color=termstyle.reset, char='-'): - """ - print a line of separator characters (default '-') - in the given colour (default black) - """ - self._outln(color(char * line_length)) - - -import traceback -import sys - - -class FilteringStream(object): - """ - A wrapper for a stream that will filter - calls to `write` and `writeln` to ignore calls - from blacklisted callers - (implemented as a regex on their filename, according - to traceback.extract_stack()) - - It's super hacky, but there seems to be no other way - to suppress nose's default output - """ - def __init__(self, stream, excludes): - self.__stream = stream - self.__excludes = list(map(re.compile, excludes)) - - def __should_filter(self): - try: - stack = traceback.extract_stack(limit=3)[0] - filename = stack[0] - pattern_matches_filename = lambda pattern: pattern.search(filename) - should_filter = any(map(pattern_matches_filename, self.__excludes)) - if REDNOSE_DEBUG: - print >> sys.stderr, "REDNOSE_DEBUG: got write call from %s, should_filter = %s" % ( - filename, should_filter) - return should_filter - except StandardError as e: - if REDNOSE_DEBUG: - print("\nError in rednose filtering: %s" % (e,), file=sys.stderr) - traceback.print_exc(sys.stderr) - return False - - def write(self, *a): - if self.__should_filter(): - return - return self.__stream.write(*a) - - def writeln(self, *a): - if self.__should_filter(): - return - return self.__stream.writeln(*a) - - # pass non-known methods through to self.__stream - def __getattr__(self, name): - if REDNOSE_DEBUG: - print("REDNOSE_DEBUG: getting attr %s" % (name,), file=sys.stderr) - return getattr(self.__stream, name) diff --git a/scripts/automation/trex_control_plane/python_lib/rednose-0.4.1/setup.py b/scripts/automation/trex_control_plane/python_lib/rednose-0.4.1/setup.py deleted file mode 100755 index 34cded4b..00000000 --- a/scripts/automation/trex_control_plane/python_lib/rednose-0.4.1/setup.py +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env python - -## NOTE: ## -## this setup.py was generated by zero2pypi: -## http://gfxmonk.net/dist/0install/zero2pypi.xml - -from setuptools import * -setup( - packages = find_packages(exclude=['test', 'test.*']), - description='coloured output for nosetests', - entry_points={'nose.plugins.0.10': ['NOSETESTS_PLUGINS = rednose:RedNose']}, - install_requires=['setuptools', 'python-termstyle >=0.1.7'], - long_description="\n**Note**: This package has been built automatically by\n`zero2pypi <http://gfxmonk.net/dist/0install/zero2pypi.xml>`_.\nIf possible, you should use the zero-install feed instead:\nhttp://gfxmonk.net/dist/0install/rednose.xml\n\n----------------\n\n=========\nrednose\n=========\n\nrednose is a `nosetests`_\nplugin for adding colour (and readability) to nosetest console results.\n\nInstallation:\n-------------\n::\n\n\teasy_install rednose\n\t\nor from the source::\n\n\t./setup.py develop\n\nUsage:\n------\n::\n\n\tnosetests --rednose\n\nor::\n\n\texport NOSE_REDNOSE=1\n\tnosetests\n\nRednose by default uses auto-colouring, which will only use\ncolour if you're running it on a terminal (i.e not piping it\nto a file). To control colouring, use one of::\n\n\tnosetests --rednose --force-color\n\tnosetests --no-color\n\n(you can also control this by setting the environment variable NOSE_REDNOSE_COLOR to 'force' or 'no')\n\n.. _nosetests: http://somethingaboutorange.com/mrl/projects/nose/\n", - name='rednose', - py_modules=['rednose'], - url='http://gfxmonk.net/dist/0install/rednose.xml', - version='0.4.1', -classifiers=[ - "License :: OSI Approved :: BSD License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "Topic :: Software Development :: Libraries :: Python Modules", - "Topic :: Software Development :: Testing", - ], - keywords='test nosetests nose nosetest output colour console', - license='BSD', -) diff --git a/scripts/automation/trex_control_plane/python_lib/zmq_fedora.tar.gz b/scripts/automation/trex_control_plane/python_lib/zmq_fedora.tar.gz Binary files differdeleted file mode 100755 index 4f36749b..00000000 --- a/scripts/automation/trex_control_plane/python_lib/zmq_fedora.tar.gz +++ /dev/null diff --git a/scripts/automation/trex_control_plane/server/extended_daemon_runner.py b/scripts/automation/trex_control_plane/server/extended_daemon_runner.py index 07eedd9f..2ce1eb06 100755 --- a/scripts/automation/trex_control_plane/server/extended_daemon_runner.py +++ b/scripts/automation/trex_control_plane/server/extended_daemon_runner.py @@ -8,18 +8,17 @@ import os, sys from argparse import ArgumentParser
from trex_server import trex_parser
try:
- from python_lib.termstyle import termstyle
+ from termstyle import termstyle
except ImportError:
import termstyle
-
-def daemonize_parser (parser_obj, action_funcs, help_menu):
+def daemonize_parser(parser_obj, action_funcs, help_menu):
"""Update the regular process parser to deal with daemon process options"""
parser_obj.description += " (as a daemon process)"
parser_obj.usage = None
- parser_obj.add_argument("action", choices = action_funcs,
- action="store", help = help_menu )
+ parser_obj.add_argument("action", choices=action_funcs,
+ action="store", help=help_menu)
return
@@ -42,7 +41,7 @@ class ExtendedDaemonRunner(runner.DaemonRunner): (*) start-live : start the application in live mode (no daemon process).
"""
- def __init__ (self, app, parser_obj):
+ def __init__(self, app, parser_obj):
""" Set up the parameters of a new runner.
THIS METHOD INTENTIONALLY DO NOT INVOKE SUPER __init__() METHOD
@@ -78,8 +77,8 @@ class ExtendedDaemonRunner(runner.DaemonRunner): self.daemon_context = daemon.DaemonContext()
self.daemon_context.stdin = open(app.stdin_path, 'rt')
self.daemon_context.stdout = open(app.stdout_path, 'w+t')
- self.daemon_context.stderr = open(
- app.stderr_path, 'a+t', buffering=0)
+ self.daemon_context.stderr = open(app.stderr_path,
+ 'a+t', buffering=0)
self.pidfile = None
if app.pidfile_path is not None:
@@ -87,23 +86,22 @@ class ExtendedDaemonRunner(runner.DaemonRunner): self.daemon_context.pidfile = self.pidfile
# mask out all arguments that aren't relevant to main app script
-
- def update_action_funcs (self):
+ def update_action_funcs(self):
self.action_funcs.update({u'start-live': self._start_live, u'show': self._show}) # add key (=action), value (=desired func)
@staticmethod
- def _start_live (self):
+ def _start_live(self):
self.app.run()
@staticmethod
- def _show (self):
+ def _show(self):
if self.pidfile.is_locked():
print termstyle.red("T-Rex server daemon is running")
else:
print termstyle.red("T-Rex server daemon is NOT running")
- def do_action (self):
+ def do_action(self):
self.__prevent_duplicate_runs()
self.__prompt_init_msg()
try:
@@ -117,7 +115,7 @@ class ExtendedDaemonRunner(runner.DaemonRunner): self.do_action()
- def __prevent_duplicate_runs (self):
+ def __prevent_duplicate_runs(self):
if self.action == 'start' and self.pidfile.is_locked():
print termstyle.green("Server daemon is already running")
exit(1)
@@ -125,13 +123,13 @@ class ExtendedDaemonRunner(runner.DaemonRunner): print termstyle.green("Server daemon is not running")
exit(1)
- def __prompt_init_msg (self):
+ def __prompt_init_msg(self):
if self.action == 'start':
print termstyle.green("Starting daemon server...")
elif self.action == 'stop':
print termstyle.green("Stopping daemon server...")
- def __verify_termination (self):
+ def __verify_termination(self):
pass
# import time
# while self.pidfile.is_locked():
diff --git a/scripts/automation/trex_control_plane/server/outer_packages.py b/scripts/automation/trex_control_plane/server/outer_packages.py index ab25ea68..976e478d 100755 --- a/scripts/automation/trex_control_plane/server/outer_packages.py +++ b/scripts/automation/trex_control_plane/server/outer_packages.py @@ -1,66 +1,34 @@ #!/router/bin/python -import sys,site -import platform,os -import tarfile -import errno -import pwd +import sys +import site +import os CURRENT_PATH = os.path.dirname(os.path.realpath(__file__)) -ROOT_PATH = os.path.abspath(os.path.join(CURRENT_PATH, os.pardir)) # path to trex_control_plane directory -PATH_TO_PYTHON_LIB = os.path.abspath(os.path.join(ROOT_PATH, 'python_lib')) +ROOT_PATH = os.path.abspath(os.path.join(CURRENT_PATH, os.pardir)) # path to trex_control_plane directory +PATH_TO_PYTHON_LIB = os.path.abspath(os.path.join(ROOT_PATH, os.pardir, os.pardir, 'external_libs')) SERVER_MODULES = ['enum34-1.0.4', - # 'jsonrpclib-0.1.3', - 'jsonrpclib-pelix-0.2.5', - 'zmq', - 'python-daemon-2.0.5', - 'lockfile-0.10.2', - 'termstyle' - ] + 'jsonrpclib-pelix-0.2.5', + 'zmq', + 'python-daemon-2.0.5', + 'lockfile-0.10.2', + 'termstyle' + ] -def extract_zmq_package (): - """make sure zmq package is available""" - os.chdir(PATH_TO_PYTHON_LIB) - if not os.path.exists('zmq'): - if os.path.exists('zmq_fedora.tar.gz'): # make sure tar file is available for extraction - try: - tar = tarfile.open("zmq_fedora.tar.gz") - # finally, extract the tarfile locally - tar.extractall() - except OSError as err: - if err.errno == errno.EACCES: - # fall back. try extracting using currently logged in user - stat_info = os.stat(PATH_TO_PYTHON_LIB) - uid = stat_info.st_uid - logged_user = pwd.getpwuid(uid).pw_name - if logged_user != 'root': - try: - os.system("sudo -u {user} tar -zxvf zmq_fedora.tar.gz".format(user = logged_user)) - except: - raise OSError(13, 'Permission denied: Please make sure that logged user have sudo access and writing privileges to `python_lib` directory.') - else: - raise OSError(13, 'Permission denied: Please make sure that logged user have sudo access and writing privileges to `python_lib` directory.') - finally: - tar.close() - else: - raise IOError("File 'zmq_fedora.tar.gz' couldn't be located at python_lib directory.") - os.chdir(CURRENT_PATH) - -def import_server_modules (): +def import_server_modules(): # must be in a higher priority sys.path.insert(0, PATH_TO_PYTHON_LIB) sys.path.append(ROOT_PATH) - extract_zmq_package() import_module_list(SERVER_MODULES) -def import_module_list (modules_list): + +def import_module_list(modules_list): assert(isinstance(modules_list, list)) for p in modules_list: - full_path = os.path.join(PATH_TO_PYTHON_LIB, p) - fix_path = os.path.normcase(full_path) + full_path = os.path.join(PATH_TO_PYTHON_LIB, p) + fix_path = os.path.normcase(full_path) site.addsitedir(full_path) - import_server_modules() diff --git a/scripts/automation/trex_control_plane/server/zmq_monitor_thread.py b/scripts/automation/trex_control_plane/server/zmq_monitor_thread.py index 28e154ee..7a278af8 100755 --- a/scripts/automation/trex_control_plane/server/zmq_monitor_thread.py +++ b/scripts/automation/trex_control_plane/server/zmq_monitor_thread.py @@ -13,25 +13,23 @@ from common.trex_status_e import TRexStatus CCustomLogger.setup_custom_logger('TRexServer')
logger = logging.getLogger('TRexServer')
+
class ZmqMonitorSession(threading.Thread):
def __init__(self, trexObj , zmq_port):
super(ZmqMonitorSession, self).__init__()
self.stoprequest = threading.Event()
-# self.terminateFlag = False
self.first_dump = True
self.zmq_port = zmq_port
- self.zmq_publisher = "tcp://localhost:{port}".format( port = self.zmq_port )
-# self.context = zmq.Context()
-# self.socket = self.context.socket(zmq.SUB)
+ self.zmq_publisher = "tcp://localhost:{port}".format(port=self.zmq_port)
self.trexObj = trexObj
self.expect_trex = self.trexObj.expect_trex # used to signal if T-Rex is expected to run and if data should be considered
self.decoder = JSONDecoder()
logger.info("ZMQ monitor initialization finished")
- def run (self):
+ def run(self):
self.context = zmq.Context()
self.socket = self.context.socket(zmq.SUB)
- logger.info("ZMQ monitor started listening @ {pub}".format( pub = self.zmq_publisher ) )
+ logger.info("ZMQ monitor started listening @ {pub}".format(pub=self.zmq_publisher))
self.socket.connect(self.zmq_publisher)
self.socket.setsockopt(zmq.SUBSCRIBE, '')
@@ -46,10 +44,10 @@ class ZmqMonitorSession(threading.Thread): # allow this exception since it comes from ZMQ monitor termination
pass
else:
- logger.error("ZMQ monitor thrown an exception. Received exception: {ex}".format(ex = e))
+ logger.error("ZMQ monitor thrown an exception. Received exception: {ex}".format(ex=e))
raise
- def join (self, timeout = None):
+ def join(self, timeout=None):
self.stoprequest.set()
logger.debug("Handling termination of ZMQ monitor thread")
self.socket.close()
@@ -57,15 +55,15 @@ class ZmqMonitorSession(threading.Thread): logger.info("ZMQ monitor resources has been freed.")
super(ZmqMonitorSession, self).join(timeout)
- def parse_and_update_zmq_dump (self, zmq_dump):
+ def parse_and_update_zmq_dump(self, zmq_dump):
try:
dict_obj = self.decoder.decode(zmq_dump)
except ValueError:
- logger.error("ZMQ dump failed JSON-RPC decode. Ignoring. Bad dump was: {dump}".format(dump = zmq_dump))
+ logger.error("ZMQ dump failed JSON-RPC decode. Ignoring. Bad dump was: {dump}".format(dump=zmq_dump))
dict_obj = None
# add to trex_obj zmq latest dump, based on its 'name' header
- if dict_obj is not None and dict_obj!={}:
+ if dict_obj is not None and dict_obj != {}:
self.trexObj.zmq_dump[dict_obj['name']] = dict_obj
if self.first_dump:
# change TRexStatus from starting to Running once the first ZMQ dump is obtained and parsed successfully
diff --git a/scripts/external_libs/PyYAML-3.01/LICENSE b/scripts/external_libs/PyYAML-3.01/LICENSE new file mode 100644 index 00000000..050ced23 --- /dev/null +++ b/scripts/external_libs/PyYAML-3.01/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2006 Kirill Simonov + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/scripts/external_libs/PyYAML-3.01/PKG-INFO b/scripts/external_libs/PyYAML-3.01/PKG-INFO new file mode 100644 index 00000000..6ec73b1f --- /dev/null +++ b/scripts/external_libs/PyYAML-3.01/PKG-INFO @@ -0,0 +1,28 @@ +Metadata-Version: 1.0 +Name: PyYAML +Version: 3.01 +Summary: YAML parser and emitter for Python +Home-page: http://pyyaml.org/wiki/PyYAML +Author: Kirill Simonov +Author-email: xi@resolvent.net +License: MIT +Download-URL: http://pyyaml.org/download/pyyaml/PyYAML-3.01.tar.gz +Description: YAML is a data serialization format designed for human readability and + interaction with scripting languages. PyYAML is a YAML parser and + emitter for Python. + + PyYAML features a complete YAML 1.1 parser, Unicode support, pickle + support, capable extension API, and sensible error messages. PyYAML + supports standard YAML tags and provides Python-specific tags that allow + to represent an arbitrary Python object. + + PyYAML is applicable for a broad range of tasks from complex + configuration files to object serialization and persistance. +Platform: Any +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Markup diff --git a/scripts/external_libs/PyYAML-3.01/README b/scripts/external_libs/PyYAML-3.01/README new file mode 100644 index 00000000..8a6dec77 --- /dev/null +++ b/scripts/external_libs/PyYAML-3.01/README @@ -0,0 +1,18 @@ +PyYAML 3000 - The next generation YAML parser and emitter for Python. + +To install, type 'python setup.py install'. + +For more information, check the PyYAML homepage: +'http://pyyaml.org/wiki/PyYAML'. + +Documentation (rough and incomplete though): +'http://pyyaml.org/wiki/PyYAMLDocumentation'. + +Post your questions and opinions to the YAML-Core mailing list: +'http://lists.sourceforge.net/lists/listinfo/yaml-core'. + +Submit bug reports and feature requests to the PyYAML bug tracker: +'http://pyyaml.org/newticket?component=pyyaml'. + +PyYAML 3000 is written by Kirill Simonov <xi@resolvent.net>. It is released +under the MIT license. See the file LICENSE for more details. diff --git a/scripts/external_libs/PyYAML-3.01/lib/yaml/__init__.py b/scripts/external_libs/PyYAML-3.01/lib/yaml/__init__.py new file mode 100644 index 00000000..c30973a3 --- /dev/null +++ b/scripts/external_libs/PyYAML-3.01/lib/yaml/__init__.py @@ -0,0 +1,284 @@ + +from error import * + +from tokens import * +from events import * +from nodes import * + +from loader import * +from dumper import * + +def scan(stream, Loader=Loader): + """ + Scan a YAML stream and produce scanning tokens. + """ + loader = Loader(stream) + while loader.check_token(): + yield loader.get_token() + +def parse(stream, Loader=Loader): + """ + Parse a YAML stream and produce parsing events. + """ + loader = Loader(stream) + while loader.check_event(): + yield loader.get_event() + +def compose(stream, Loader=Loader): + """ + Parse the first YAML document in a stream + and produce the corresponding representation tree. + """ + loader = Loader(stream) + if loader.check_node(): + return loader.get_node() + +def compose_all(stream, Loader=Loader): + """ + Parse all YAML documents in a stream + and produce corresponsing representation trees. + """ + loader = Loader(stream) + while loader.check_node(): + yield loader.get_node() + +def load_all(stream, Loader=Loader): + """ + Parse all YAML documents in a stream + and produce corresponding Python objects. + """ + loader = Loader(stream) + while loader.check_data(): + yield loader.get_data() + +def load(stream, Loader=Loader): + """ + Parse the first YAML document in a stream + and produce the corresponding Python object. + """ + loader = Loader(stream) + if loader.check_data(): + return loader.get_data() + +def safe_load_all(stream): + """ + Parse all YAML documents in a stream + and produce corresponding Python objects. + Resolve only basic YAML tags. + """ + return load_all(stream, SafeLoader) + +def safe_load(stream): + """ + Parse the first YAML document in a stream + and produce the corresponding Python object. + Resolve only basic YAML tags. + """ + return load(stream, SafeLoader) + +def emit(events, stream=None, Dumper=Dumper, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None): + """ + Emit YAML parsing events into a stream. + If stream is None, return the produced string instead. + """ + getvalue = None + if stream is None: + try: + from cStringIO import StringIO + except ImportError: + from StringIO import StringIO + stream = StringIO() + getvalue = stream.getvalue + dumper = Dumper(stream, canonical=canonical, indent=indent, width=width, + allow_unicode=allow_unicode, line_break=line_break) + for event in events: + dumper.emit(event) + if getvalue: + return getvalue() + +def serialize_all(nodes, stream=None, Dumper=Dumper, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding='utf-8', explicit_start=None, explicit_end=None, + version=None, tags=None): + """ + Serialize a sequence of representation trees into a YAML stream. + If stream is None, return the produced string instead. + """ + getvalue = None + if stream is None: + try: + from cStringIO import StringIO + except ImportError: + from StringIO import StringIO + stream = StringIO() + getvalue = stream.getvalue + dumper = Dumper(stream, canonical=canonical, indent=indent, width=width, + allow_unicode=allow_unicode, line_break=line_break, + encoding=encoding, version=version, tags=tags, + explicit_start=explicit_start, explicit_end=explicit_end) + dumper.open() + for node in nodes: + dumper.serialize(node) + dumper.close() + if getvalue: + return getvalue() + +def serialize(node, stream=None, Dumper=Dumper, **kwds): + """ + Serialize a representation tree into a YAML stream. + If stream is None, return the produced string instead. + """ + return serialize_all([node], stream, Dumper=Dumper, **kwds) + +def dump_all(documents, stream=None, Dumper=Dumper, + default_style=None, default_flow_style=None, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding='utf-8', explicit_start=None, explicit_end=None, + version=None, tags=None): + """ + Serialize a sequence of Python objects into a YAML stream. + If stream is None, return the produced string instead. + """ + getvalue = None + if stream is None: + try: + from cStringIO import StringIO + except ImportError: + from StringIO import StringIO + stream = StringIO() + getvalue = stream.getvalue + dumper = Dumper(stream, default_style=default_style, + default_flow_style=default_flow_style, + canonical=canonical, indent=indent, width=width, + allow_unicode=allow_unicode, line_break=line_break, + encoding=encoding, version=version, tags=tags, + explicit_start=explicit_start, explicit_end=explicit_end) + dumper.open() + for data in documents: + dumper.represent(data) + dumper.close() + if getvalue: + return getvalue() + +def dump(data, stream=None, Dumper=Dumper, **kwds): + """ + Serialize a Python object into a YAML stream. + If stream is None, return the produced string instead. + """ + return dump_all([data], stream, Dumper=Dumper, **kwds) + +def safe_dump_all(documents, stream=None, **kwds): + """ + Serialize a sequence of Python objects into a YAML stream. + Produce only basic YAML tags. + If stream is None, return the produced string instead. + """ + return dump_all(documents, stream, Dumper=SafeDumper, **kwds) + +def safe_dump(data, stream=None, **kwds): + """ + Serialize a Python object into a YAML stream. + Produce only basic YAML tags. + If stream is None, return the produced string instead. + """ + return dump_all([data], stream, Dumper=SafeDumper, **kwds) + +def add_implicit_resolver(tag, regexp, first=None, + Loader=Loader, Dumper=Dumper): + """ + Add an implicit scalar detector. + If an implicit scalar value matches the given regexp, + the corresponding tag is assigned to the scalar. + first is a sequence of possible initial characters or None. + """ + Loader.add_implicit_resolver(tag, regexp, first) + Dumper.add_implicit_resolver(tag, regexp, first) + +def add_path_resolver(tag, path, kind=None, Loader=Loader, Dumper=Dumper): + """ + Add a path based resolver for the given tag. + A path is a list of keys that forms a path + to a node in the representation tree. + Keys can be string values, integers, or None. + """ + Loader.add_path_resolver(tag, path, kind) + Dumper.add_path_resolver(tag, path, kind) + +def add_constructor(tag, constructor, Loader=Loader): + """ + Add a constructor for the given tag. + Constructor is a function that accepts a Loader instance + and a node object and produces the corresponding Python object. + """ + Loader.add_constructor(tag, constructor) + +def add_multi_constructor(tag_prefix, multi_constructor, Loader=Loader): + """ + Add a multi-constructor for the given tag prefix. + Multi-constructor is called for a node if its tag starts with tag_prefix. + Multi-constructor accepts a Loader instance, a tag suffix, + and a node object and produces the corresponding Python object. + """ + Loader.add_multi_constructor(tag_prefix, multi_constructor) + +def add_representer(data_type, representer, Dumper=Dumper): + """ + Add a representer for the given type. + Representer is a function accepting a Dumper instance + and an instance of the given data type + and producing the corresponding representation node. + """ + Dumper.add_representer(data_type, representer) + +def add_multi_representer(data_type, multi_representer, Dumper=Dumper): + """ + Add a representer for the given type. + Multi-representer is a function accepting a Dumper instance + and an instance of the given data type or subtype + and producing the corresponding representation node. + """ + Dumper.add_multi_representer(data_type, multi_representer) + +class YAMLObjectMetaclass(type): + """ + The metaclass for YAMLObject. + """ + def __init__(cls, name, bases, kwds): + super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds) + if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None: + cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml) + cls.yaml_dumper.add_representer(cls, cls.to_yaml) + +class YAMLObject(object): + """ + An object that can dump itself to a YAML stream + and load itself from a YAML stream. + """ + + __metaclass__ = YAMLObjectMetaclass + + yaml_loader = Loader + yaml_dumper = Dumper + + yaml_tag = None + yaml_flow_style = None + + def from_yaml(cls, loader, node): + """ + Convert a representation node to a Python object. + """ + return loader.construct_yaml_object(node, cls) + from_yaml = classmethod(from_yaml) + + def to_yaml(cls, dumper, data): + """ + Convert a Python object to a representation node. + """ + return dumper.represent_yaml_object(cls.yaml_tag, data, cls, + flow_style=cls.yaml_flow_style) + to_yaml = classmethod(to_yaml) + diff --git a/scripts/external_libs/PyYAML-3.01/lib/yaml/composer.py b/scripts/external_libs/PyYAML-3.01/lib/yaml/composer.py new file mode 100644 index 00000000..d256b054 --- /dev/null +++ b/scripts/external_libs/PyYAML-3.01/lib/yaml/composer.py @@ -0,0 +1,123 @@ + +__all__ = ['Composer', 'ComposerError'] + +from error import MarkedYAMLError +from events import * +from nodes import * + +class ComposerError(MarkedYAMLError): + pass + +class Composer: + + def __init__(self): + self.anchors = {} + + def check_node(self): + # If there are more documents available? + return not self.check_event(StreamEndEvent) + + def get_node(self): + # Get the root node of the next document. + if not self.check_event(StreamEndEvent): + return self.compose_document() + + def __iter__(self): + # Iterator protocol. + while not self.check_event(StreamEndEvent): + yield self.compose_document() + + def compose_document(self): + + # Drop the STREAM-START event. + if self.check_event(StreamStartEvent): + self.get_event() + + # Drop the DOCUMENT-START event. + self.get_event() + + # Compose the root node. + node = self.compose_node(None, None) + + # Drop the DOCUMENT-END event. + self.get_event() + + self.complete_anchors = {} + return node + + def compose_node(self, parent, index): + if self.check_event(AliasEvent): + event = self.get_event() + anchor = event.anchor + if anchor not in self.anchors: + raise ComposerError(None, None, "found undefined alias %r" + % anchor.encode('utf-8'), event.start_mark) + return self.anchors[anchor] + event = self.peek_event() + anchor = event.anchor + if anchor is not None: + if anchor in self.anchors: + raise ComposerError("found duplicate anchor %r; first occurence" + % anchor.encode('utf-8'), self.anchors[anchor].start_mark, + "second occurence", event.start_mark) + self.descend_resolver(parent, index) + if self.check_event(ScalarEvent): + node = self.compose_scalar_node(anchor) + elif self.check_event(SequenceStartEvent): + node = self.compose_sequence_node(anchor) + elif self.check_event(MappingStartEvent): + node = self.compose_mapping_node(anchor) + self.ascend_resolver() + return node + + def compose_scalar_node(self, anchor): + event = self.get_event() + tag = event.tag + if tag is None or tag == u'!': + tag = self.resolve(ScalarNode, event.value, event.implicit) + node = ScalarNode(tag, event.value, + event.start_mark, event.end_mark, style=event.style) + if anchor is not None: + self.anchors[anchor] = node + return node + + def compose_sequence_node(self, anchor): + start_event = self.get_event() + tag = start_event.tag + if tag is None or tag == u'!': + tag = self.resolve(SequenceNode, None, start_event.implicit) + node = SequenceNode(tag, [], + start_event.start_mark, None, + flow_style=start_event.flow_style) + if anchor is not None: + self.anchors[anchor] = node + index = 0 + while not self.check_event(SequenceEndEvent): + node.value.append(self.compose_node(node, index)) + index += 1 + end_event = self.get_event() + node.end_mark = end_event.end_mark + return node + + def compose_mapping_node(self, anchor): + start_event = self.get_event() + tag = start_event.tag + if tag is None or tag == u'!': + tag = self.resolve(MappingNode, None, start_event.implicit) + node = MappingNode(tag, {}, + start_event.start_mark, None, + flow_style=start_event.flow_style) + if anchor is not None: + self.anchors[anchor] = node + while not self.check_event(MappingEndEvent): + key_event = self.peek_event() + item_key = self.compose_node(node, None) + if item_key in node.value: + raise ComposerError("while composing a mapping", start_event.start_mark, + "found duplicate key", key_event.start_mark) + item_value = self.compose_node(node, item_key) + node.value[item_key] = item_value + end_event = self.get_event() + node.end_mark = end_event.end_mark + return node + diff --git a/scripts/external_libs/PyYAML-3.01/lib/yaml/constructor.py b/scripts/external_libs/PyYAML-3.01/lib/yaml/constructor.py new file mode 100644 index 00000000..57ad53d1 --- /dev/null +++ b/scripts/external_libs/PyYAML-3.01/lib/yaml/constructor.py @@ -0,0 +1,638 @@ + +__all__ = ['BaseConstructor', 'SafeConstructor', 'Constructor', + 'ConstructorError'] + +from error import * +from nodes import * +from composer import * + +try: + import datetime + datetime_available = True +except ImportError: + datetime_available = False + +try: + set +except NameError: + from sets import Set as set + +import binascii, re, sys + +class ConstructorError(MarkedYAMLError): + pass + +class BaseConstructor(Composer): + + yaml_constructors = {} + yaml_multi_constructors = {} + + def __init__(self): + self.constructed_objects = {} + self.recursive_objects = {} + + def check_data(self): + # If there are more documents available? + return self.check_node() + + def get_data(self): + # Construct and return the next document. + if self.check_node(): + return self.construct_document(self.get_node()) + + def __iter__(self): + # Iterator protocol. + while self.check_node(): + yield self.construct_document(self.get_node()) + + def construct_document(self, node): + data = self.construct_object(node) + self.constructed_objects = {} + self.recursive_objects = {} + return data + + def construct_object(self, node): + if node in self.constructed_objects: + return self.constructed_objects[node] + if node in self.recursive_objects: + raise ConstructorError(None, None, + "found recursive node", node.start_mark) + self.recursive_objects[node] = None + constructor = None + if node.tag in self.yaml_constructors: + constructor = lambda node: self.yaml_constructors[node.tag](self, node) + else: + for tag_prefix in self.yaml_multi_constructors: + if node.tag.startswith(tag_prefix): + tag_suffix = node.tag[len(tag_prefix):] + constructor = lambda node: \ + self.yaml_multi_constructors[tag_prefix](self, tag_suffix, node) + break + else: + if None in self.yaml_multi_constructors: + constructor = lambda node: \ + self.yaml_multi_constructors[None](self, node.tag, node) + elif None in self.yaml_constructors: + constructor = lambda node: \ + self.yaml_constructors[None](self, node) + elif isinstance(node, ScalarNode): + constructor = self.construct_scalar + elif isinstance(node, SequenceNode): + constructor = self.construct_sequence + elif isinstance(node, MappingNode): + constructor = self.construct_mapping + else: + print node.tag + data = constructor(node) + self.constructed_objects[node] = data + del self.recursive_objects[node] + return data + + def construct_scalar(self, node): + if not isinstance(node, ScalarNode): + if isinstance(node, MappingNode): + for key_node in node.value: + if key_node.tag == u'tag:yaml.org,2002:value': + return self.construct_scalar(node.value[key_node]) + raise ConstructorError(None, None, + "expected a scalar node, but found %s" % node.id, + node.start_mark) + return node.value + + def construct_sequence(self, node): + if not isinstance(node, SequenceNode): + raise ConstructorError(None, None, + "expected a sequence node, but found %s" % node.id, + node.start_mark) + return [self.construct_object(child) for child in node.value] + + def construct_mapping(self, node): + if not isinstance(node, MappingNode): + raise ConstructorError(None, None, + "expected a mapping node, but found %s" % node.id, + node.start_mark) + mapping = {} + merge = None + for key_node in node.value: + if key_node.tag == u'tag:yaml.org,2002:merge': + if merge is not None: + raise ConstructorError("while constructing a mapping", node.start_mark, + "found duplicate merge key", key_node.start_mark) + value_node = node.value[key_node] + if isinstance(value_node, MappingNode): + merge = [self.construct_mapping(value_node)] + elif isinstance(value_node, SequenceNode): + merge = [] + for subnode in value_node.value: + if not isinstance(subnode, MappingNode): + raise ConstructorError("while constructing a mapping", + node.start_mark, + "expected a mapping for merging, but found %s" + % subnode.id, subnode.start_mark) + merge.append(self.construct_mapping(subnode)) + merge.reverse() + else: + raise ConstructorError("while constructing a mapping", node.start_mark, + "expected a mapping or list of mappings for merging, but found %s" + % value_node.id, value_node.start_mark) + elif key_node.tag == u'tag:yaml.org,2002:value': + if '=' in mapping: + raise ConstructorError("while construction a mapping", node.start_mark, + "found duplicate value key", key_node.start_mark) + value = self.construct_object(node.value[key_node]) + mapping['='] = value + else: + key = self.construct_object(key_node) + try: + duplicate_key = key in mapping + except TypeError, exc: + raise ConstructorError("while constructing a mapping", node.start_mark, + "found unacceptable key (%s)" % exc, key_node.start_mark) + if duplicate_key: + raise ConstructorError("while constructing a mapping", node.start_mark, + "found duplicate key", key_node.start_mark) + value = self.construct_object(node.value[key_node]) + mapping[key] = value + if merge is not None: + merge.append(mapping) + mapping = {} + for submapping in merge: + mapping.update(submapping) + return mapping + + def construct_pairs(self, node): + if not isinstance(node, MappingNode): + raise ConstructorError(None, None, + "expected a mapping node, but found %s" % node.id, + node.start_mark) + pairs = [] + for key_node in node.value: + key = self.construct_object(key_node) + value = self.construct_object(node.value[key_node]) + pairs.append((key, value)) + return pairs + + def add_constructor(cls, tag, constructor): + if not 'yaml_constructors' in cls.__dict__: + cls.yaml_constructors = cls.yaml_constructors.copy() + cls.yaml_constructors[tag] = constructor + add_constructor = classmethod(add_constructor) + + def add_multi_constructor(cls, tag_prefix, multi_constructor): + if not 'yaml_multi_constructors' in cls.__dict__: + cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy() + cls.yaml_multi_constructors[tag_prefix] = multi_constructor + add_multi_constructor = classmethod(add_multi_constructor) + +class SafeConstructor(BaseConstructor): + + def construct_yaml_null(self, node): + self.construct_scalar(node) + return None + + bool_values = { + u'yes': True, + u'no': False, + u'true': True, + u'false': False, + u'on': True, + u'off': False, + } + + def construct_yaml_bool(self, node): + value = self.construct_scalar(node) + return self.bool_values[value.lower()] + + def construct_yaml_int(self, node): + value = str(self.construct_scalar(node)) + value = value.replace('_', '') + sign = +1 + if value[0] == '-': + sign = -1 + if value[0] in '+-': + value = value[1:] + if value == '0': + return 0 + elif value.startswith('0b'): + return sign*int(value[2:], 2) + elif value.startswith('0x'): + return sign*int(value[2:], 16) + elif value[0] == '0': + return sign*int(value, 8) + elif ':' in value: + digits = [int(part) for part in value.split(':')] + digits.reverse() + base = 1 + value = 0 + for digit in digits: + value += digit*base + base *= 60 + return sign*value + else: + return sign*int(value) + + inf_value = 1e300000 + nan_value = inf_value/inf_value + + def construct_yaml_float(self, node): + value = str(self.construct_scalar(node)) + value = value.replace('_', '') + sign = +1 + if value[0] == '-': + sign = -1 + if value[0] in '+-': + value = value[1:] + if value.lower() == '.inf': + return sign*self.inf_value + elif value.lower() == '.nan': + return self.nan_value + elif ':' in value: + digits = [float(part) for part in value.split(':')] + digits.reverse() + base = 1 + value = 0.0 + for digit in digits: + value += digit*base + base *= 60 + return sign*value + else: + return float(value) + + def construct_yaml_binary(self, node): + value = self.construct_scalar(node) + try: + return str(value).decode('base64') + except (binascii.Error, UnicodeEncodeError), exc: + raise ConstructorError(None, None, + "failed to decode base64 data: %s" % exc, node.start_mark) + + timestamp_regexp = re.compile( + ur'''^(?P<year>[0-9][0-9][0-9][0-9]) + -(?P<month>[0-9][0-9]?) + -(?P<day>[0-9][0-9]?) + (?:(?:[Tt]|[ \t]+) + (?P<hour>[0-9][0-9]?) + :(?P<minute>[0-9][0-9]) + :(?P<second>[0-9][0-9]) + (?:\.(?P<fraction>[0-9]*))? + (?:[ \t]*(?:Z|(?P<tz_hour>[-+][0-9][0-9]?) + (?::(?P<tz_minute>[0-9][0-9])?)?))?)?$''', re.X) + + def construct_yaml_timestamp(self, node): + value = self.construct_scalar(node) + match = self.timestamp_regexp.match(node.value) + values = match.groupdict() + for key in values: + if values[key]: + values[key] = int(values[key]) + else: + values[key] = 0 + fraction = values['fraction'] + if fraction: + while 10*fraction < 1000000: + fraction *= 10 + values['fraction'] = fraction + stamp = datetime.datetime(values['year'], values['month'], values['day'], + values['hour'], values['minute'], values['second'], values['fraction']) + diff = datetime.timedelta(hours=values['tz_hour'], minutes=values['tz_minute']) + return stamp-diff + + def construct_yaml_omap(self, node): + # Note: we do not check for duplicate keys, because it's too + # CPU-expensive. + if not isinstance(node, SequenceNode): + raise ConstructorError("while constructing an ordered map", node.start_mark, + "expected a sequence, but found %s" % node.id, node.start_mark) + omap = [] + for subnode in node.value: + if not isinstance(subnode, MappingNode): + raise ConstructorError("while constructing an ordered map", node.start_mark, + "expected a mapping of length 1, but found %s" % subnode.id, + subnode.start_mark) + if len(subnode.value) != 1: + raise ConstructorError("while constructing an ordered map", node.start_mark, + "expected a single mapping item, but found %d items" % len(subnode.value), + subnode.start_mark) + key_node = subnode.value.keys()[0] + key = self.construct_object(key_node) + value = self.construct_object(subnode.value[key_node]) + omap.append((key, value)) + return omap + + def construct_yaml_pairs(self, node): + # Note: the same code as `construct_yaml_omap`. + if not isinstance(node, SequenceNode): + raise ConstructorError("while constructing pairs", node.start_mark, + "expected a sequence, but found %s" % node.id, node.start_mark) + pairs = [] + for subnode in node.value: + if not isinstance(subnode, MappingNode): + raise ConstructorError("while constructing pairs", node.start_mark, + "expected a mapping of length 1, but found %s" % subnode.id, + subnode.start_mark) + if len(subnode.value) != 1: + raise ConstructorError("while constructing pairs", node.start_mark, + "expected a single mapping item, but found %d items" % len(subnode.value), + subnode.start_mark) + key_node = subnode.value.keys()[0] + key = self.construct_object(key_node) + value = self.construct_object(subnode.value[key_node]) + pairs.append((key, value)) + return pairs + + def construct_yaml_set(self, node): + value = self.construct_mapping(node) + return set(value) + + def construct_yaml_str(self, node): + value = self.construct_scalar(node) + try: + return str(value) + except UnicodeEncodeError: + return value + + def construct_yaml_seq(self, node): + return self.construct_sequence(node) + + def construct_yaml_map(self, node): + return self.construct_mapping(node) + + def construct_yaml_object(self, node, cls): + state = self.construct_mapping(node) + data = cls.__new__(cls) + if hasattr(data, '__setstate__'): + data.__setstate__(state) + else: + data.__dict__.update(state) + return data + + def construct_undefined(self, node): + raise ConstructorError(None, None, + "could not determine a constructor for the tag %r" % node.tag.encode('utf-8'), + node.start_mark) + +SafeConstructor.add_constructor( + u'tag:yaml.org,2002:null', + SafeConstructor.construct_yaml_null) + +SafeConstructor.add_constructor( + u'tag:yaml.org,2002:bool', + SafeConstructor.construct_yaml_bool) + +SafeConstructor.add_constructor( + u'tag:yaml.org,2002:int', + SafeConstructor.construct_yaml_int) + +SafeConstructor.add_constructor( + u'tag:yaml.org,2002:float', + SafeConstructor.construct_yaml_float) + +SafeConstructor.add_constructor( + u'tag:yaml.org,2002:binary', + SafeConstructor.construct_yaml_binary) + +if datetime_available: + SafeConstructor.add_constructor( + u'tag:yaml.org,2002:timestamp', + SafeConstructor.construct_yaml_timestamp) + +SafeConstructor.add_constructor( + u'tag:yaml.org,2002:omap', + SafeConstructor.construct_yaml_omap) + +SafeConstructor.add_constructor( + u'tag:yaml.org,2002:pairs', + SafeConstructor.construct_yaml_pairs) + +SafeConstructor.add_constructor( + u'tag:yaml.org,2002:set', + SafeConstructor.construct_yaml_set) + +SafeConstructor.add_constructor( + u'tag:yaml.org,2002:str', + SafeConstructor.construct_yaml_str) + +SafeConstructor.add_constructor( + u'tag:yaml.org,2002:seq', + SafeConstructor.construct_yaml_seq) + +SafeConstructor.add_constructor( + u'tag:yaml.org,2002:map', + SafeConstructor.construct_yaml_map) + +SafeConstructor.add_constructor(None, + SafeConstructor.construct_undefined) + +class Constructor(SafeConstructor): + + def construct_python_str(self, node): + return self.construct_scalar(node).encode('utf-8') + + def construct_python_unicode(self, node): + return self.construct_scalar(node) + + def construct_python_long(self, node): + return long(self.construct_yaml_int(node)) + + def construct_python_complex(self, node): + return complex(self.construct_scalar(node)) + + def construct_python_tuple(self, node): + return tuple(self.construct_yaml_seq(node)) + + def find_python_module(self, name, mark): + if not name: + raise ConstructorError("while constructing a Python module", mark, + "expected non-empty name appended to the tag", mark) + try: + __import__(name) + except ImportError, exc: + raise ConstructorError("while constructing a Python module", mark, + "cannot find module %r (%s)" % (name.encode('utf-8'), exc), mark) + return sys.modules[name] + + def find_python_name(self, name, mark): + if not name: + raise ConstructorError("while constructing a Python object", mark, + "expected non-empty name appended to the tag", mark) + if u'.' in name: + # Python 2.4 only + #module_name, object_name = name.rsplit('.', 1) + items = name.split('.') + object_name = items.pop() + module_name = '.'.join(items) + else: + module_name = '__builtin__' + object_name = name + try: + __import__(module_name) + except ImportError, exc: + raise ConstructorError("while constructing a Python object", mark, + "cannot find module %r (%s)" % (module_name.encode('utf-8'), exc), mark) + module = sys.modules[module_name] + if not hasattr(module, object_name): + raise ConstructorError("while constructing a Python object", mark, + "cannot find %r in the module %r" % (object_name.encode('utf-8'), + module.__name__), mark) + return getattr(module, object_name) + + def construct_python_name(self, suffix, node): + value = self.construct_scalar(node) + if value: + raise ConstructorError("while constructing a Python name", node.start_mark, + "expected the empty value, but found %r" % value.encode('utf-8'), + node.start_mark) + return self.find_python_name(suffix, node.start_mark) + + def construct_python_module(self, suffix, node): + value = self.construct_scalar(node) + if value: + raise ConstructorError("while constructing a Python module", node.start_mark, + "expected the empty value, but found %r" % value.encode('utf-8'), + node.start_mark) + return self.find_python_module(suffix, node.start_mark) + + class classobj: pass + + def make_python_instance(self, suffix, node, + args=None, kwds=None, newobj=False): + if not args: + args = [] + if not kwds: + kwds = {} + cls = self.find_python_name(suffix, node.start_mark) + if newobj and isinstance(cls, type(self.classobj)) \ + and not args and not kwds: + instance = self.classobj() + instance.__class__ = cls + return instance + elif newobj and isinstance(cls, type): + return cls.__new__(cls, *args, **kwds) + else: + return cls(*args, **kwds) + + def set_python_instance_state(self, instance, state): + if hasattr(instance, '__setstate__'): + instance.__setstate__(state) + else: + slotstate = {} + if isinstance(state, tuple) and len(state) == 2: + state, slotstate = state + if hasattr(instance, '__dict__'): + instance.__dict__.update(state) + elif state: + slotstate.update(state) + for key, value in slotstate.items(): + setattr(object, key, value) + + def construct_python_object(self, suffix, node): + # Format: + # !!python/object:module.name { ... state ... } + instance = self.make_python_instance(suffix, node, newobj=True) + state = self.construct_mapping(node) + self.set_python_instance_state(instance, state) + return instance + + def construct_python_object_apply(self, suffix, node, newobj=False): + # Format: + # !!python/object/apply # (or !!python/object/new) + # args: [ ... arguments ... ] + # kwds: { ... keywords ... } + # state: ... state ... + # listitems: [ ... listitems ... ] + # dictitems: { ... dictitems ... } + # or short format: + # !!python/object/apply [ ... arguments ... ] + # The difference between !!python/object/apply and !!python/object/new + # is how an object is created, check make_python_instance for details. + if isinstance(node, SequenceNode): + args = self.construct_sequence(node) + kwds = {} + state = {} + listitems = [] + dictitems = {} + else: + value = self.construct_mapping(node) + args = value.get('args', []) + kwds = value.get('kwds', {}) + state = value.get('state', {}) + listitems = value.get('listitems', []) + dictitems = value.get('dictitems', {}) + instance = self.make_python_instance(suffix, node, args, kwds, newobj) + if state: + self.set_python_instance_state(instance, state) + if listitems: + instance.extend(listitems) + if dictitems: + for key in dictitems: + instance[key] = dictitems[key] + return instance + + def construct_python_object_new(self, suffix, node): + return self.construct_python_object_apply(suffix, node, newobj=True) + + +Constructor.add_constructor( + u'tag:yaml.org,2002:python/none', + Constructor.construct_yaml_null) + +Constructor.add_constructor( + u'tag:yaml.org,2002:python/bool', + Constructor.construct_yaml_bool) + +Constructor.add_constructor( + u'tag:yaml.org,2002:python/str', + Constructor.construct_python_str) + +Constructor.add_constructor( + u'tag:yaml.org,2002:python/unicode', + Constructor.construct_python_unicode) + +Constructor.add_constructor( + u'tag:yaml.org,2002:python/int', + Constructor.construct_yaml_int) + +Constructor.add_constructor( + u'tag:yaml.org,2002:python/long', + Constructor.construct_python_long) + +Constructor.add_constructor( + u'tag:yaml.org,2002:python/float', + Constructor.construct_yaml_float) + +Constructor.add_constructor( + u'tag:yaml.org,2002:python/complex', + Constructor.construct_python_complex) + +Constructor.add_constructor( + u'tag:yaml.org,2002:python/list', + Constructor.construct_yaml_seq) + +Constructor.add_constructor( + u'tag:yaml.org,2002:python/tuple', + Constructor.construct_python_tuple) + +Constructor.add_constructor( + u'tag:yaml.org,2002:python/dict', + Constructor.construct_yaml_map) + +Constructor.add_multi_constructor( + u'tag:yaml.org,2002:python/name:', + Constructor.construct_python_name) + +Constructor.add_multi_constructor( + u'tag:yaml.org,2002:python/module:', + Constructor.construct_python_module) + +Constructor.add_multi_constructor( + u'tag:yaml.org,2002:python/object:', + Constructor.construct_python_object) + +Constructor.add_multi_constructor( + u'tag:yaml.org,2002:python/object/apply:', + Constructor.construct_python_object_apply) + +Constructor.add_multi_constructor( + u'tag:yaml.org,2002:python/object/new:', + Constructor.construct_python_object_new) + diff --git a/scripts/external_libs/PyYAML-3.01/lib/yaml/dumper.py b/scripts/external_libs/PyYAML-3.01/lib/yaml/dumper.py new file mode 100644 index 00000000..355c1e2f --- /dev/null +++ b/scripts/external_libs/PyYAML-3.01/lib/yaml/dumper.py @@ -0,0 +1,62 @@ + +__all__ = ['BaseDumper', 'SafeDumper', 'Dumper'] + +from emitter import * +from serializer import * +from representer import * +from resolver import * + +class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver): + + def __init__(self, stream, + default_style=None, default_flow_style=None, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding=None, explicit_start=None, explicit_end=None, + version=None, tags=None): + Emitter.__init__(self, stream, canonical=canonical, + indent=indent, width=width, + allow_uncode=allow_unicode, line_break=line_break) + Serializer.__init__(self, encoding=encoding, + explicit_start=explicit_start, explicit_end=explicit_end, + version=version, tags=tags) + Representer.__init__(self, default_style=default_style, + default_flow_style=default_flow_style) + Resolver.__init__(self) + +class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver): + + def __init__(self, stream, + default_style=None, default_flow_style=None, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding=None, explicit_start=None, explicit_end=None, + version=None, tags=None): + Emitter.__init__(self, stream, canonical=canonical, + indent=indent, width=width, + allow_unicode=allow_unicode, line_break=line_break) + Serializer.__init__(self, encoding=encoding, + explicit_start=explicit_start, explicit_end=explicit_end, + version=version, tags=tags) + SafeRepresenter.__init__(self, default_style=default_style, + default_flow_style=default_flow_style) + Resolver.__init__(self) + +class Dumper(Emitter, Serializer, Representer, Resolver): + + def __init__(self, stream, + default_style=None, default_flow_style=None, + canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None, + encoding=None, explicit_start=None, explicit_end=None, + version=None, tags=None): + Emitter.__init__(self, stream, canonical=canonical, + indent=indent, width=width, + allow_unicode=allow_unicode, line_break=line_break) + Serializer.__init__(self, encoding=encoding, + explicit_start=explicit_start, explicit_end=explicit_end, + version=version, tags=tags) + Representer.__init__(self, default_style=default_style, + default_flow_style=default_flow_style) + Resolver.__init__(self) + diff --git a/scripts/external_libs/PyYAML-3.01/lib/yaml/emitter.py b/scripts/external_libs/PyYAML-3.01/lib/yaml/emitter.py new file mode 100644 index 00000000..a34c4526 --- /dev/null +++ b/scripts/external_libs/PyYAML-3.01/lib/yaml/emitter.py @@ -0,0 +1,1162 @@ + +# Emitter expects events obeying the following grammar: +# stream ::= STREAM-START document* STREAM-END +# document ::= DOCUMENT-START node DOCUMENT-END +# node ::= SCALAR | sequence | mapping +# sequence ::= SEQUENCE-START node* SEQUENCE-END +# mapping ::= MAPPING-START (node node)* MAPPING-END + +__all__ = ['Emitter', 'EmitterError'] + +from error import YAMLError +from events import * + +import re + +class EmitterError(YAMLError): + pass + +class ScalarAnalysis: + def __init__(self, scalar, empty, multiline, + allow_flow_plain, allow_block_plain, + allow_single_quoted, allow_double_quoted, + allow_block): + self.scalar = scalar + self.empty = empty + self.multiline = multiline + self.allow_flow_plain = allow_flow_plain + self.allow_block_plain = allow_block_plain + self.allow_single_quoted = allow_single_quoted + self.allow_double_quoted = allow_double_quoted + self.allow_block = allow_block + +class Emitter: + + DEFAULT_TAG_PREFIXES = { + u'!' : u'!', + u'tag:yaml.org,2002:' : u'!!', + } + + def __init__(self, stream, canonical=None, indent=None, width=None, + allow_unicode=None, line_break=None): + + # The stream should have the methods `write` and possibly `flush`. + self.stream = stream + + # Encoding can be overriden by STREAM-START. + self.encoding = None + + # Emitter is a state machine with a stack of states to handle nested + # structures. + self.states = [] + self.state = self.expect_stream_start + + # Current event and the event queue. + self.events = [] + self.event = None + + # The current indentation level and the stack of previous indents. + self.indents = [] + self.indent = None + + # Flow level. + self.flow_level = 0 + + # Contexts. + self.root_context = False + self.sequence_context = False + self.mapping_context = False + self.simple_key_context = False + + # Characteristics of the last emitted character: + # - current position. + # - is it a whitespace? + # - is it an indention character + # (indentation space, '-', '?', or ':')? + self.line = 0 + self.column = 0 + self.whitespace = True + self.indention = True + + # Formatting details. + self.canonical = canonical + self.allow_unicode = allow_unicode + self.best_indent = 2 + if indent and 1 < indent < 10: + self.best_indent = indent + self.best_width = 80 + if width and width > self.best_indent*2: + self.best_width = width + self.best_line_break = u'\n' + if line_break in [u'\r', u'\n', u'\r\n']: + self.best_line_break = line_break + + # Tag prefixes. + self.tag_prefixes = None + + # Prepared anchor and tag. + self.prepared_anchor = None + self.prepared_tag = None + + # Scalar analysis and style. + self.analysis = None + self.style = None + + def emit(self, event): + self.events.append(event) + while not self.need_more_events(): + self.event = self.events.pop(0) + self.state() + self.event = None + + # In some cases, we wait for a few next events before emitting. + + def need_more_events(self): + if not self.events: + return True + event = self.events[0] + if isinstance(event, DocumentStartEvent): + return self.need_events(1) + elif isinstance(event, SequenceStartEvent): + return self.need_events(2) + elif isinstance(event, MappingStartEvent): + return self.need_events(3) + else: + return False + + def need_events(self, count): + level = 0 + for event in self.events[1:]: + if isinstance(event, (DocumentStartEvent, CollectionStartEvent)): + level += 1 + elif isinstance(event, (DocumentEndEvent, CollectionEndEvent)): + level -= 1 + elif isinstance(event, StreamEndEvent): + level = -1 + if level < 0: + return False + return (len(self.events) < count+1) + + def increase_indent(self, flow=False, indentless=False): + self.indents.append(self.indent) + if self.indent is None: + if flow: + self.indent = self.best_indent + else: + self.indent = 0 + elif not indentless: + self.indent += self.best_indent + + # States. + + # Stream handlers. + + def expect_stream_start(self): + if isinstance(self.event, StreamStartEvent): + if self.event.encoding: + self.encoding = self.event.encoding + self.write_stream_start() + self.state = self.expect_first_document_start + else: + raise EmitterError("expected StreamStartEvent, but got %s" + % self.event) + + def expect_nothing(self): + raise EmitterError("expected nothing, but got %s" % self.event) + + # Document handlers. + + def expect_first_document_start(self): + return self.expect_document_start(first=True) + + def expect_document_start(self, first=False): + if isinstance(self.event, DocumentStartEvent): + if self.event.version: + version_text = self.prepare_version(self.event.version) + self.write_version_directive(version_text) + self.tag_prefixes = self.DEFAULT_TAG_PREFIXES.copy() + if self.event.tags: + handles = self.event.tags.keys() + handles.sort() + for handle in handles: + prefix = self.event.tags[handle] + self.tag_prefixes[prefix] = handle + handle_text = self.prepare_tag_handle(handle) + prefix_text = self.prepare_tag_prefix(prefix) + self.write_tag_directive(handle_text, prefix_text) + implicit = (first and not self.event.explicit and not self.canonical + and not self.event.version and not self.event.tags + and not self.check_empty_document()) + if not implicit: + self.write_indent() + self.write_indicator(u'---', True) + if self.canonical: + self.write_indent() + self.state = self.expect_document_root + elif isinstance(self.event, StreamEndEvent): + self.write_stream_end() + self.state = self.expect_nothing + else: + raise EmitterError("expected DocumentStartEvent, but got %s" + % self.event) + + def expect_document_end(self): + if isinstance(self.event, DocumentEndEvent): + self.write_indent() + if self.event.explicit: + self.write_indicator(u'...', True) + self.write_indent() + self.flush_stream() + self.state = self.expect_document_start + else: + raise EmitterError("expected DocumentEndEvent, but got %s" + % self.event) + + def expect_document_root(self): + self.states.append(self.expect_document_end) + self.expect_node(root=True) + + # Node handlers. + + def expect_node(self, root=False, sequence=False, mapping=False, + simple_key=False): + self.root_context = root + self.sequence_context = sequence + self.mapping_context = mapping + self.simple_key_context = simple_key + if isinstance(self.event, AliasEvent): + self.expect_alias() + elif isinstance(self.event, (ScalarEvent, CollectionStartEvent)): + self.process_anchor(u'&') + self.process_tag() + if isinstance(self.event, ScalarEvent): + self.expect_scalar() + elif isinstance(self.event, SequenceStartEvent): + if self.flow_level or self.canonical or self.event.flow_style \ + or self.check_empty_sequence(): + self.expect_flow_sequence() + else: + self.expect_block_sequence() + elif isinstance(self.event, MappingStartEvent): + if self.flow_level or self.canonical or self.event.flow_style \ + or self.check_empty_mapping(): + self.expect_flow_mapping() + else: + self.expect_block_mapping() + else: + raise EmitterError("expected NodeEvent, but got %s" % self.event) + + def expect_alias(self): + if self.event.anchor is None: + raise EmitterError("anchor is not specified for alias") + self.process_anchor(u'*') + self.state = self.states.pop() + + def expect_scalar(self): + self.increase_indent(flow=True) + self.process_scalar() + self.indent = self.indents.pop() + self.state = self.states.pop() + + # Flow sequence handlers. + + def expect_flow_sequence(self): + self.write_indicator(u'[', True, whitespace=True) + self.flow_level += 1 + self.increase_indent(flow=True) + self.state = self.expect_first_flow_sequence_item + + def expect_first_flow_sequence_item(self): + if isinstance(self.event, SequenceEndEvent): + self.indent = self.indents.pop() + self.flow_level -= 1 + self.write_indicator(u']', False) + self.state = self.states.pop() + else: + if self.canonical or self.column > self.best_width: + self.write_indent() + self.states.append(self.expect_flow_sequence_item) + self.expect_node(sequence=True) + + def expect_flow_sequence_item(self): + if isinstance(self.event, SequenceEndEvent): + self.indent = self.indents.pop() + self.flow_level -= 1 + if self.canonical: + self.write_indicator(u',', False) + self.write_indent() + self.write_indicator(u']', False) + self.state = self.states.pop() + else: + self.write_indicator(u',', False) + if self.canonical or self.column > self.best_width: + self.write_indent() + self.states.append(self.expect_flow_sequence_item) + self.expect_node(sequence=True) + + # Flow mapping handlers. + + def expect_flow_mapping(self): + self.write_indicator(u'{', True, whitespace=True) + self.flow_level += 1 + self.increase_indent(flow=True) + self.state = self.expect_first_flow_mapping_key + + def expect_first_flow_mapping_key(self): + if isinstance(self.event, MappingEndEvent): + self.indent = self.indents.pop() + self.flow_level -= 1 + self.write_indicator(u'}', False) + self.state = self.states.pop() + else: + if self.canonical or self.column > self.best_width: + self.write_indent() + if not self.canonical and self.check_simple_key(): + self.states.append(self.expect_flow_mapping_simple_value) + self.expect_node(mapping=True, simple_key=True) + else: + self.write_indicator(u'?', True) + self.states.append(self.expect_flow_mapping_value) + self.expect_node(mapping=True) + + def expect_flow_mapping_key(self): + if isinstance(self.event, MappingEndEvent): + self.indent = self.indents.pop() + self.flow_level -= 1 + if self.canonical: + self.write_indicator(u',', False) + self.write_indent() + self.write_indicator(u'}', False) + self.state = self.states.pop() + else: + self.write_indicator(u',', False) + if self.canonical or self.column > self.best_width: + self.write_indent() + if not self.canonical and self.check_simple_key(): + self.states.append(self.expect_flow_mapping_simple_value) + self.expect_node(mapping=True, simple_key=True) + else: + self.write_indicator(u'?', True) + self.states.append(self.expect_flow_mapping_value) + self.expect_node(mapping=True) + + def expect_flow_mapping_simple_value(self): + self.write_indicator(u':', False) + self.states.append(self.expect_flow_mapping_key) + self.expect_node(mapping=True) + + def expect_flow_mapping_value(self): + if self.canonical or self.column > self.best_width: + self.write_indent() + self.write_indicator(u':', True) + self.states.append(self.expect_flow_mapping_key) + self.expect_node(mapping=True) + + # Block sequence handlers. + + def expect_block_sequence(self): + indentless = (self.mapping_context and not self.indention) + self.increase_indent(flow=False, indentless=indentless) + self.state = self.expect_first_block_sequence_item + + def expect_first_block_sequence_item(self): + return self.expect_block_sequence_item(first=True) + + def expect_block_sequence_item(self, first=False): + if not first and isinstance(self.event, SequenceEndEvent): + self.indent = self.indents.pop() + self.state = self.states.pop() + else: + self.write_indent() + self.write_indicator(u'-', True, indention=True) + self.states.append(self.expect_block_sequence_item) + self.expect_node(sequence=True) + + # Block mapping handlers. + + def expect_block_mapping(self): + self.increase_indent(flow=False) + self.state = self.expect_first_block_mapping_key + + def expect_first_block_mapping_key(self): + return self.expect_block_mapping_key(first=True) + + def expect_block_mapping_key(self, first=False): + if not first and isinstance(self.event, MappingEndEvent): + self.indent = self.indents.pop() + self.state = self.states.pop() + else: + self.write_indent() + if self.check_simple_key(): + self.states.append(self.expect_block_mapping_simple_value) + self.expect_node(mapping=True, simple_key=True) + else: + self.write_indicator(u'?', True, indention=True) + self.states.append(self.expect_block_mapping_value) + self.expect_node(mapping=True) + + def expect_block_mapping_simple_value(self): + self.write_indicator(u':', False) + self.states.append(self.expect_block_mapping_key) + self.expect_node(mapping=True) + + def expect_block_mapping_value(self): + self.write_indent() + self.write_indicator(u':', True, indention=True) + self.states.append(self.expect_block_mapping_key) + self.expect_node(mapping=True) + + # Checkers. + + def check_empty_sequence(self): + return (isinstance(self.event, SequenceStartEvent) and self.events + and isinstance(self.events[0], SequenceEndEvent)) + + def check_empty_mapping(self): + return (isinstance(self.event, MappingStartEvent) and self.events + and isinstance(self.events[0], MappingEndEvent)) + + def check_empty_document(self): + if not isinstance(self.event, DocumentStartEvent) or not self.events: + return False + event = self.events[0] + return (isinstance(event, ScalarEvent) and event.anchor is None + and event.tag is None and event.implicit and event.value == u'') + + def check_simple_key(self): + length = 0 + if isinstance(self.event, NodeEvent) and self.event.anchor is not None: + if self.prepared_anchor is None: + self.prepared_anchor = self.prepare_anchor(self.event.anchor) + length += len(self.prepared_anchor) + if isinstance(self.event, (ScalarEvent, CollectionStartEvent)) \ + and self.event.tag is not None: + if self.prepared_tag is None: + self.prepared_tag = self.prepare_tag(self.event.tag) + length += len(self.prepared_tag) + if isinstance(self.event, ScalarEvent): + if self.analysis is None: + self.analysis = self.analyze_scalar(self.event.value) + length += len(self.analysis.scalar) + return (length < 128 and (isinstance(self.event, AliasEvent) + or (isinstance(self.event, ScalarEvent) + and not self.analysis.empty and not self.analysis.multiline) + or self.check_empty_sequence() or self.check_empty_mapping())) + + # Anchor, Tag, and Scalar processors. + + def process_anchor(self, indicator): + if self.event.anchor is None: + self.prepared_anchor = None + return + if self.prepared_anchor is None: + self.prepared_anchor = self.prepare_anchor(self.event.anchor) + if self.prepared_anchor: + self.write_indicator(indicator+self.prepared_anchor, True) + self.prepared_anchor = None + + def process_tag(self): + tag = self.event.tag + if isinstance(self.event, ScalarEvent): + if self.style is None: + self.style = self.choose_scalar_style() + if ((not self.canonical or tag is None) and + ((self.style == '' and self.event.implicit[0]) + or (self.style != '' and self.event.implicit[1]))): + self.prepared_tag = None + return + if self.event.implicit[0] and tag is None: + tag = u'!' + self.prepared_tag = None + else: + if (not self.canonical or tag is None) and self.event.implicit: + self.prepared_tag = None + return + if tag is None: + raise EmitterError("tag is not specified") + if self.prepared_tag is None: + self.prepared_tag = self.prepare_tag(tag) + if self.prepared_tag: + self.write_indicator(self.prepared_tag, True) + self.prepared_tag = None + + def choose_scalar_style(self): + if self.analysis is None: + self.analysis = self.analyze_scalar(self.event.value) + if self.event.style == '"' or self.canonical: + return '"' + if not self.event.style and self.event.implicit[0]: + if (not (self.simple_key_context and + (self.analysis.empty or self.analysis.multiline)) + and (self.flow_level and self.analysis.allow_flow_plain + or (not self.flow_level and self.analysis.allow_block_plain))): + return '' + if self.event.style and self.event.style in '|>': + if not self.flow_level and self.analysis.allow_block: + return self.event.style + if not self.event.style or self.event.style == '\'': + if (self.analysis.allow_single_quoted and + not (self.simple_key_context and self.analysis.multiline)): + return '\'' + return '"' + + def process_scalar(self): + if self.analysis is None: + self.analysis = self.analyze_scalar(self.event.value) + if self.style is None: + self.style = self.choose_scalar_style() + split = (not self.simple_key_context) + #if self.analysis.multiline and split \ + # and (not self.style or self.style in '\'\"'): + # self.write_indent() + if self.style == '"': + self.write_double_quoted(self.analysis.scalar, split) + elif self.style == '\'': + self.write_single_quoted(self.analysis.scalar, split) + elif self.style == '>': + self.write_folded(self.analysis.scalar) + elif self.style == '|': + self.write_literal(self.analysis.scalar) + else: + self.write_plain(self.analysis.scalar, split) + self.analysis = None + self.style = None + + # Analyzers. + + def prepare_version(self, version): + major, minor = version + if major != 1: + raise EmitterError("unsupported YAML version: %d.%d" % (major, minor)) + return u'%d.%d' % (major, minor) + + def prepare_tag_handle(self, handle): + if not handle: + raise EmitterError("tag handle must not be empty") + if handle[0] != u'!' or handle[-1] != u'!': + raise EmitterError("tag handle must start and end with '!': %r" + % (handle.encode('utf-8'))) + for ch in handle[1:-1]: + if not (u'0' <= ch <= u'9' or u'A' <= ch <= 'Z' or u'a' <= ch <= 'z' \ + or ch in u'-_'): + raise EmitterError("invalid character %r in the tag handle: %r" + % (ch.encode('utf-8'), handle.encode('utf-8'))) + return handle + + def prepare_tag_prefix(self, prefix): + if not prefix: + raise EmitterError("tag prefix must not be empty") + chunks = [] + start = end = 0 + if prefix[0] == u'!': + end = 1 + while end < len(prefix): + ch = prefix[end] + if u'0' <= ch <= u'9' or u'A' <= ch <= 'Z' or u'a' <= ch <= 'z' \ + or ch in u'-;/?!:@&=+$,_.~*\'()[]': + end += 1 + else: + if start < end: + chunks.append(prefix[start:end]) + start = end = end+1 + data = ch.encode('utf-8') + for ch in data: + chunks.append(u'%%%02X' % ord(ch)) + if start < end: + chunks.append(prefix[start:end]) + return u''.join(chunks) + + def prepare_tag(self, tag): + if not tag: + raise EmitterError("tag must not be empty") + if tag == u'!': + return tag + handle = None + suffix = tag + for prefix in self.tag_prefixes: + if tag.startswith(prefix) \ + and (prefix == u'!' or len(prefix) < len(tag)): + handle = self.tag_prefixes[prefix] + suffix = tag[len(prefix):] + chunks = [] + start = end = 0 + while end < len(suffix): + ch = suffix[end] + if u'0' <= ch <= u'9' or u'A' <= ch <= 'Z' or u'a' <= ch <= 'z' \ + or ch in u'-;/?:@&=+$,_.~*\'()[]' \ + or (ch == u'!' and handle != u'!'): + end += 1 + else: + if start < end: + chunks.append(suffix[start:end]) + start = end = end+1 + data = ch.encode('utf-8') + for ch in data: + chunks.append(u'%%%02X' % ord(ch)) + if start < end: + chunks.append(suffix[start:end]) + suffix_text = u''.join(chunks) + if handle: + return u'%s%s' % (handle, suffix_text) + else: + return u'!<%s>' % suffix_text + + def prepare_anchor(self, anchor): + if not anchor: + raise EmitterError("anchor must not be empty") + for ch in anchor: + if not (u'0' <= ch <= u'9' or u'A' <= ch <= 'Z' or u'a' <= ch <= 'z' \ + or ch in u'-_'): + raise EmitterError("invalid character %r in the anchor: %r" + % (ch.encode('utf-8'), anchor.encode('utf-8'))) + return anchor + + def analyze_scalar(self, scalar): + + # Empty scalar is a special case. + if not scalar: + return ScalarAnalysis(scalar=scalar, empty=True, multiline=False, + allow_flow_plain=False, allow_block_plain=True, + allow_single_quoted=True, allow_double_quoted=True, + allow_block=False) + + # Indicators and special characters. + block_indicators = False + flow_indicators = False + line_breaks = False + special_characters = False + + # Whitespaces. + inline_spaces = False # non-space space+ non-space + inline_breaks = False # non-space break+ non-space + leading_spaces = False # ^ space+ (non-space | $) + leading_breaks = False # ^ break+ (non-space | $) + trailing_spaces = False # (^ | non-space) space+ $ + trailing_breaks = False # (^ | non-space) break+ $ + inline_breaks_spaces = False # non-space break+ space+ non-space + mixed_breaks_spaces = False # anything else + + # Check document indicators. + if scalar.startswith(u'---') or scalar.startswith(u'...'): + block_indicators = True + flow_indicators = True + + # First character or preceded by a whitespace. + preceeded_by_space = True + + # Last character or followed by a whitespace. + followed_by_space = (len(scalar) == 1 or + scalar[1] in u'\0 \t\r\n\x85\u2028\u2029') + + # The current series of whitespaces contain plain spaces. + spaces = False + + # The current series of whitespaces contain line breaks. + breaks = False + + # The current series of whitespaces contain a space followed by a + # break. + mixed = False + + # The current series of whitespaces start at the beginning of the + # scalar. + leading = False + + index = 0 + while index < len(scalar): + ch = scalar[index] + + # Check for indicators. + + if index == 0: + # Leading indicators are special characters. + if ch in u'#,[]{}#&*!|>\'\"%@`': + flow_indicators = True + block_indicators = True + if ch in u'?:': + flow_indicators = True + if followed_by_space: + block_indicators = True + if ch == u'-' and followed_by_space: + flow_indicators = True + block_indicators = True + else: + # Some indicators cannot appear within a scalar as well. + if ch in u',?[]{}': + flow_indicators = True + if ch == u':': + flow_indicators = True + if followed_by_space: + block_indicators = True + if ch == u'#' and preceeded_by_space: + flow_indicators = True + block_indicators = True + + # Check for line breaks, special, and unicode characters. + + if ch in u'\n\x85\u2028\u2029': + line_breaks = True + if not (ch == u'\n' or u'\x20' <= ch <= u'\x7E'): + if (ch == u'\x85' or u'\xA0' <= ch <= u'\uD7FF' + or u'\uE000' <= ch <= u'\uFFFD') and ch != u'\uFEFF': + unicode_characters = True + if not self.allow_unicode: + special_characters = True + else: + special_characters = True + + # Spaces, line breaks, and how they are mixed. State machine. + + # Start or continue series of whitespaces. + if ch in u' \n\x85\u2028\u2029': + if spaces and breaks: + if ch != u' ': # break+ (space+ break+) => mixed + mixed = True + elif spaces: + if ch != u' ': # (space+ break+) => mixed + breaks = True + mixed = True + elif breaks: + if ch == u' ': # break+ space+ + spaces = True + else: + leading = (index == 0) + if ch == u' ': # space+ + spaces = True + else: # break+ + breaks = True + + # Series of whitespaces ended with a non-space. + elif spaces or breaks: + if leading: + if spaces and breaks: + mixed_breaks_spaces = True + elif spaces: + leading_spaces = True + elif breaks: + leading_breaks = True + else: + if mixed: + mixed_breaks_spaces = True + elif spaces and breaks: + inline_breaks_spaces = True + elif spaces: + inline_spaces = True + elif breaks: + inline_breaks = True + spaces = breaks = mixed = leading = False + + # Series of whitespaces reach the end. + if (spaces or breaks) and (index == len(scalar)-1): + if spaces and breaks: + mixed_breaks_spaces = True + elif spaces: + trailing_spaces = True + if leading: + leading_spaces = True + elif breaks: + trailing_breaks = True + if leading: + leading_breaks = True + spaces = breaks = mixed = leading = False + + # Prepare for the next character. + index += 1 + preceeded_by_space = (ch in u'\0 \t\r\n\x85\u2028\u2029') + followed_by_space = (index+1 >= len(scalar) or + scalar[index+1] in u'\0 \t\r\n\x85\u2028\u2029') + + # Let's decide what styles are allowed. + allow_flow_plain = True + allow_block_plain = True + allow_single_quoted = True + allow_double_quoted = True + allow_block = True + + # Leading and trailing whitespace are bad for plain scalars. We also + # do not want to mess with leading whitespaces for block scalars. + if leading_spaces or leading_breaks or trailing_spaces: + allow_flow_plain = allow_block_plain = allow_block = False + + # Trailing breaks are fine for block scalars, but unacceptable for + # plain scalars. + if trailing_breaks: + allow_flow_plain = allow_block_plain = False + + # The combination of (space+ break+) is only acceptable for block + # scalars. + if inline_breaks_spaces: + allow_flow_plain = allow_block_plain = allow_single_quoted = False + + # Mixed spaces and breaks, as well as special character are only + # allowed for double quoted scalars. + if mixed_breaks_spaces or special_characters: + allow_flow_plain = allow_block_plain = \ + allow_single_quoted = allow_block = False + + # We don't emit multiline plain scalars. + if line_breaks: + allow_flow_plain = allow_block_plain = False + + # Flow indicators are forbidden for flow plain scalars. + if flow_indicators: + allow_flow_plain = False + + # Block indicators are forbidden for block plain scalars. + if block_indicators: + allow_block_plain = False + + return ScalarAnalysis(scalar=scalar, + empty=False, multiline=line_breaks, + allow_flow_plain=allow_flow_plain, + allow_block_plain=allow_block_plain, + allow_single_quoted=allow_single_quoted, + allow_double_quoted=allow_double_quoted, + allow_block=allow_block) + + # Writers. + + def flush_stream(self): + if hasattr(self.stream, 'flush'): + self.stream.flush() + + def write_stream_start(self): + # Write BOM if needed. + if self.encoding and self.encoding.startswith('utf-16'): + self.stream.write(u'\xFF\xFE'.encode(self.encoding)) + + def write_stream_end(self): + self.flush_stream() + + def write_indicator(self, indicator, need_whitespace, + whitespace=False, indention=False): + if self.whitespace or not need_whitespace: + data = indicator + else: + data = u' '+indicator + self.whitespace = whitespace + self.indention = self.indention and indention + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + + def write_indent(self): + indent = self.indent or 0 + if not self.indention or self.column > indent \ + or (self.column == indent and not self.whitespace): + self.write_line_break() + if self.column < indent: + self.whitespace = True + data = u' '*(indent-self.column) + self.column = indent + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + + def write_line_break(self, data=None): + if data is None: + data = self.best_line_break + self.whitespace = True + self.indention = True + self.line += 1 + self.column = 0 + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + + def write_version_directive(self, version_text): + data = u'%%YAML %s' % version_text + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + self.write_line_break() + + def write_tag_directive(self, handle_text, prefix_text): + data = u'%%TAG %s %s' % (handle_text, prefix_text) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + self.write_line_break() + + # Scalar streams. + + def write_single_quoted(self, text, split=True): + self.write_indicator(u'\'', True) + spaces = False + breaks = False + start = end = 0 + while end <= len(text): + ch = None + if end < len(text): + ch = text[end] + if spaces: + if ch is None or ch != u' ': + if start+1 == end and self.column > self.best_width and split \ + and start != 0 and end != len(text): + self.write_indent() + else: + data = text[start:end] + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + elif breaks: + if ch is None or ch not in u'\n\x85\u2028\u2029': + if text[start] == u'\n': + self.write_line_break() + for br in text[start:end]: + if br == u'\n': + self.write_line_break() + else: + self.write_line_break(br) + self.write_indent() + start = end + else: + if ch is None or ch in u' \n\x85\u2028\u2029' or ch == u'\'': + if start < end: + data = text[start:end] + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + if ch == u'\'': + data = u'\'\'' + self.column += 2 + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + 1 + if ch is not None: + spaces = (ch == u' ') + breaks = (ch in u'\n\x85\u2028\u2029') + end += 1 + self.write_indicator(u'\'', False) + + ESCAPE_REPLACEMENTS = { + u'\0': u'0', + u'\x07': u'a', + u'\x08': u'b', + u'\x09': u't', + u'\x0A': u'n', + u'\x0B': u'v', + u'\x0C': u'f', + u'\x0D': u'r', + u'\x1B': u'e', + u'\"': u'\"', + u'\\': u'\\', + u'\x85': u'N', + u'\xA0': u'_', + u'\u2028': u'L', + u'\u2029': u'P', + } + + def write_double_quoted(self, text, split=True): + self.write_indicator(u'"', True) + start = end = 0 + while end <= len(text): + ch = None + if end < len(text): + ch = text[end] + if ch is None or ch in u'"\\\x85\u2028\u2029\uFEFF' \ + or not (u'\x20' <= ch <= u'\x7E' + or (self.allow_unicode + and (u'\xA0' <= ch <= u'\uD7FF' + or u'\uE000' <= ch <= u'\uFFFD'))): + if start < end: + data = text[start:end] + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + if ch is not None: + if ch in self.ESCAPE_REPLACEMENTS: + data = u'\\'+self.ESCAPE_REPLACEMENTS[ch] + elif ch <= u'\xFF': + data = u'\\x%02X' % ord(ch) + elif ch <= u'\uFFFF': + data = u'\\u%04X' % ord(ch) + else: + data = u'\\U%08X' % ord(ch) + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end+1 + if 0 < end < len(text)-1 and (ch == u' ' or start >= end) \ + and self.column+(end-start) > self.best_width and split: + data = text[start:end]+u'\\' + if start < end: + start = end + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + self.write_indent() + self.whitespace = False + self.indention = False + if text[start] == u' ': + data = u'\\' + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + end += 1 + self.write_indicator(u'"', False) + + def determine_chomp(self, text): + tail = text[-2:] + while len(tail) < 2: + tail = u' '+tail + if tail[-1] in u'\n\x85\u2028\u2029': + if tail[-2] in u'\n\x85\u2028\u2029': + return u'+' + else: + return u'' + else: + return u'-' + + def write_folded(self, text): + chomp = self.determine_chomp(text) + self.write_indicator(u'>'+chomp, True) + self.write_indent() + leading_space = False + spaces = False + breaks = False + start = end = 0 + while end <= len(text): + ch = None + if end < len(text): + ch = text[end] + if breaks: + if ch is None or ch not in u'\n\x85\u2028\u2029': + if not leading_space and ch is not None and ch != u' ' \ + and text[start] == u'\n': + self.write_line_break() + leading_space = (ch == u' ') + for br in text[start:end]: + if br == u'\n': + self.write_line_break() + else: + self.write_line_break(br) + if ch is not None: + self.write_indent() + start = end + elif spaces: + if ch != u' ': + if start+1 == end and self.column > self.best_width: + self.write_indent() + else: + data = text[start:end] + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + else: + if ch is None or ch in u' \n\x85\u2028\u2029': + data = text[start:end] + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + if ch is None: + self.write_line_break() + start = end + if ch is not None: + breaks = (ch in u'\n\x85\u2028\u2029') + spaces = (ch == u' ') + end += 1 + + def write_literal(self, text): + chomp = self.determine_chomp(text) + self.write_indicator(u'|'+chomp, True) + self.write_indent() + breaks = False + start = end = 0 + while end <= len(text): + ch = None + if end < len(text): + ch = text[end] + if breaks: + if ch is None or ch not in u'\n\x85\u2028\u2029': + for br in text[start:end]: + if br == u'\n': + self.write_line_break() + else: + self.write_line_break(br) + if ch is not None: + self.write_indent() + start = end + else: + if ch is None or ch in u'\n\x85\u2028\u2029': + data = text[start:end] + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + if ch is None: + self.write_line_break() + start = end + if ch is not None: + breaks = (ch in u'\n\x85\u2028\u2029') + end += 1 + + def write_plain(self, text, split=True): + if not text: + return + if not self.whitespace: + data = u' ' + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + self.writespace = False + self.indention = False + spaces = False + breaks = False + start = end = 0 + while end <= len(text): + ch = None + if end < len(text): + ch = text[end] + if spaces: + if ch != u' ': + if start+1 == end and self.column > self.best_width and split: + self.write_indent() + self.writespace = False + self.indention = False + else: + data = text[start:end] + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + elif breaks: + if ch not in u'\n\x85\u2028\u2029': + if text[start] == u'\n': + self.write_line_break() + for br in text[start:end]: + if br == u'\n': + self.write_line_break() + else: + self.write_line_break(br) + self.write_indent() + self.whitespace = False + self.indention = False + start = end + else: + if ch is None or ch in u' \n\x85\u2028\u2029': + data = text[start:end] + self.column += len(data) + if self.encoding: + data = data.encode(self.encoding) + self.stream.write(data) + start = end + if ch is not None: + spaces = (ch == u' ') + breaks = (ch in u'\n\x85\u2028\u2029') + end += 1 + diff --git a/scripts/external_libs/PyYAML-3.01/lib/yaml/error.py b/scripts/external_libs/PyYAML-3.01/lib/yaml/error.py new file mode 100644 index 00000000..8fa916b2 --- /dev/null +++ b/scripts/external_libs/PyYAML-3.01/lib/yaml/error.py @@ -0,0 +1,75 @@ + +__all__ = ['Mark', 'YAMLError', 'MarkedYAMLError'] + +class Mark: + + def __init__(self, name, index, line, column, buffer, pointer): + self.name = name + self.index = index + self.line = line + self.column = column + self.buffer = buffer + self.pointer = pointer + + def get_snippet(self, indent=4, max_length=75): + if self.buffer is None: + return None + head = '' + start = self.pointer + while start > 0 and self.buffer[start-1] not in u'\0\r\n\x85\u2028\u2029': + start -= 1 + if self.pointer-start > max_length/2-1: + head = ' ... ' + start += 5 + break + tail = '' + end = self.pointer + while end < len(self.buffer) and self.buffer[end] not in u'\0\r\n\x85\u2028\u2029': + end += 1 + if end-self.pointer > max_length/2-1: + tail = ' ... ' + end -= 5 + break + snippet = self.buffer[start:end].encode('utf-8') + return ' '*indent + head + snippet + tail + '\n' \ + + ' '*(indent+self.pointer-start+len(head)) + '^' + + def __str__(self): + snippet = self.get_snippet() + where = " in \"%s\", line %d, column %d" \ + % (self.name, self.line+1, self.column+1) + if snippet is not None: + where += ":\n"+snippet + return where + +class YAMLError(Exception): + pass + +class MarkedYAMLError(YAMLError): + + def __init__(self, context=None, context_mark=None, + problem=None, problem_mark=None, note=None): + self.context = context + self.context_mark = context_mark + self.problem = problem + self.problem_mark = problem_mark + self.note = note + + def __str__(self): + lines = [] + if self.context is not None: + lines.append(self.context) + if self.context_mark is not None \ + and (self.problem is None or self.problem_mark is None + or self.context_mark.name != self.problem_mark.name + or self.context_mark.line != self.problem_mark.line + or self.context_mark.column != self.problem_mark.column): + lines.append(str(self.context_mark)) + if self.problem is not None: + lines.append(self.problem) + if self.problem_mark is not None: + lines.append(str(self.problem_mark)) + if self.note is not None: + lines.append(self.note) + return '\n'.join(lines) + diff --git a/scripts/external_libs/PyYAML-3.01/lib/yaml/events.py b/scripts/external_libs/PyYAML-3.01/lib/yaml/events.py new file mode 100644 index 00000000..3f244fa0 --- /dev/null +++ b/scripts/external_libs/PyYAML-3.01/lib/yaml/events.py @@ -0,0 +1,86 @@ + +# Abstract classes. + +class Event: + def __init__(self, start_mark=None, end_mark=None): + self.start_mark = start_mark + self.end_mark = end_mark + def __repr__(self): + attributes = [key for key in ['anchor', 'tag', 'implicit', 'value'] + if hasattr(self, key)] + arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) + for key in attributes]) + return '%s(%s)' % (self.__class__.__name__, arguments) + +class NodeEvent(Event): + def __init__(self, anchor, start_mark=None, end_mark=None): + self.anchor = anchor + self.start_mark = start_mark + self.end_mark = end_mark + +class CollectionStartEvent(NodeEvent): + def __init__(self, anchor, tag, implicit, start_mark=None, end_mark=None, + flow_style=None): + self.anchor = anchor + self.tag = tag + self.implicit = implicit + self.start_mark = start_mark + self.end_mark = end_mark + self.flow_style = flow_style + +class CollectionEndEvent(Event): + pass + +# Implementations. + +class StreamStartEvent(Event): + def __init__(self, start_mark=None, end_mark=None, encoding=None): + self.start_mark = start_mark + self.end_mark = end_mark + self.encoding = encoding + +class StreamEndEvent(Event): + pass + +class DocumentStartEvent(Event): + def __init__(self, start_mark=None, end_mark=None, + explicit=None, version=None, tags=None): + self.start_mark = start_mark + self.end_mark = end_mark + self.explicit = explicit + self.version = version + self.tags = tags + +class DocumentEndEvent(Event): + def __init__(self, start_mark=None, end_mark=None, + explicit=None): + self.start_mark = start_mark + self.end_mark = end_mark + self.explicit = explicit + +class AliasEvent(NodeEvent): + pass + +class ScalarEvent(NodeEvent): + def __init__(self, anchor, tag, implicit, value, + start_mark=None, end_mark=None, style=None): + self.anchor = anchor + self.tag = tag + self.implicit = implicit + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + self.style = style + +class SequenceStartEvent(CollectionStartEvent): + pass + +class SequenceEndEvent(CollectionEndEvent): + pass + +class MappingStartEvent(CollectionStartEvent): + pass + +class MappingEndEvent(CollectionEndEvent): + pass + diff --git a/scripts/external_libs/PyYAML-3.01/lib/yaml/loader.py b/scripts/external_libs/PyYAML-3.01/lib/yaml/loader.py new file mode 100644 index 00000000..293ff467 --- /dev/null +++ b/scripts/external_libs/PyYAML-3.01/lib/yaml/loader.py @@ -0,0 +1,40 @@ + +__all__ = ['BaseLoader', 'SafeLoader', 'Loader'] + +from reader import * +from scanner import * +from parser import * +from composer import * +from constructor import * +from resolver import * + +class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver): + + def __init__(self, stream): + Reader.__init__(self, stream) + Scanner.__init__(self) + Parser.__init__(self) + Composer.__init__(self) + BaseConstructor.__init__(self) + BaseResolver.__init__(self) + +class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver): + + def __init__(self, stream): + Reader.__init__(self, stream) + Scanner.__init__(self) + Parser.__init__(self) + Composer.__init__(self) + SafeConstructor.__init__(self) + Resolver.__init__(self) + +class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver): + + def __init__(self, stream): + Reader.__init__(self, stream) + Scanner.__init__(self) + Parser.__init__(self) + Composer.__init__(self) + Constructor.__init__(self) + Resolver.__init__(self) + diff --git a/scripts/external_libs/PyYAML-3.01/lib/yaml/nodes.py b/scripts/external_libs/PyYAML-3.01/lib/yaml/nodes.py new file mode 100644 index 00000000..cb8c1cba --- /dev/null +++ b/scripts/external_libs/PyYAML-3.01/lib/yaml/nodes.py @@ -0,0 +1,49 @@ + +class Node: + def __init__(self, tag, value, start_mark, end_mark): + self.tag = tag + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + def __repr__(self): + value = self.value + #if isinstance(value, list): + # if len(value) == 0: + # value = '<empty>' + # elif len(value) == 1: + # value = '<1 item>' + # else: + # value = '<%d items>' % len(value) + #else: + # if len(value) > 75: + # value = repr(value[:70]+u' ... ') + # else: + # value = repr(value) + value = repr(value) + return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value) + +class ScalarNode(Node): + id = 'scalar' + def __init__(self, tag, value, + start_mark=None, end_mark=None, style=None): + self.tag = tag + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + self.style = style + +class CollectionNode(Node): + def __init__(self, tag, value, + start_mark=None, end_mark=None, flow_style=None): + self.tag = tag + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + self.flow_style = flow_style + +class SequenceNode(CollectionNode): + id = 'sequence' + +class MappingNode(CollectionNode): + id = 'mapping' + diff --git a/scripts/external_libs/PyYAML-3.01/lib/yaml/parser.py b/scripts/external_libs/PyYAML-3.01/lib/yaml/parser.py new file mode 100644 index 00000000..2aec0fe3 --- /dev/null +++ b/scripts/external_libs/PyYAML-3.01/lib/yaml/parser.py @@ -0,0 +1,484 @@ + +# YAML can be parsed by an LL(1) parser! +# +# We use the following production rules: +# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END +# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END? +# implicit_document ::= block_node DOCUMENT-END? +# block_node ::= ALIAS | properties? block_content +# flow_node ::= ALIAS | properties? flow_content +# properties ::= TAG ANCHOR? | ANCHOR TAG? +# block_content ::= block_collection | flow_collection | SCALAR +# flow_content ::= flow_collection | SCALAR +# block_collection ::= block_sequence | block_mapping +# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END +# block_mapping ::= BLOCK-MAPPING_START ((KEY block_node_or_indentless_sequence?)? (VALUE block_node_or_indentless_sequence?)?)* BLOCK-END +# block_node_or_indentless_sequence ::= ALIAS | properties? (block_content | indentless_block_sequence) +# indentless_block_sequence ::= (BLOCK-ENTRY block_node?)+ +# flow_collection ::= flow_sequence | flow_mapping +# flow_sequence ::= FLOW-SEQUENCE-START (flow_sequence_entry FLOW-ENTRY)* flow_sequence_entry? FLOW-SEQUENCE-END +# flow_mapping ::= FLOW-MAPPING-START (flow_mapping_entry FLOW-ENTRY)* flow_mapping_entry? FLOW-MAPPING-END +# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? + +# TODO: support for BOM within a stream. +# stream ::= (BOM? implicit_document)? (BOM? explicit_document)* STREAM-END + +# FIRST sets: +# stream: { STREAM-START } +# explicit_document: { DIRECTIVE DOCUMENT-START } +# implicit_document: FIRST(block_node) +# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START } +# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START } +# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } +# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } +# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START } +# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } +# block_sequence: { BLOCK-SEQUENCE-START } +# block_mapping: { BLOCK-MAPPING-START } +# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY } +# indentless_sequence: { ENTRY } +# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } +# flow_sequence: { FLOW-SEQUENCE-START } +# flow_mapping: { FLOW-MAPPING-START } +# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY } +# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY } + +__all__ = ['Parser', 'ParserError'] + +from error import MarkedYAMLError +from tokens import * +from events import * +from scanner import * + +class ParserError(MarkedYAMLError): + pass + +class Parser: + # Since writing a recursive-descendant parser is a straightforward task, we + # do not give many comments here. + # Note that we use Python generators. If you rewrite the parser in another + # language, you may replace all 'yield'-s with event handler calls. + + DEFAULT_TAGS = { + u'!': u'!', + u'!!': u'tag:yaml.org,2002:', + } + + def __init__(self): + self.current_event = None + self.yaml_version = None + self.tag_handles = {} + self.event_generator = self.parse_stream() + + def check_event(self, *choices): + # Check the type of the next event. + if self.current_event is None: + try: + self.current_event = self.event_generator.next() + except StopIteration: + pass + if self.current_event is not None: + if not choices: + return True + for choice in choices: + if isinstance(self.current_event, choice): + return True + return False + + def peek_event(self): + # Get the next event. + if self.current_event is None: + try: + self.current_event = self.event_generator.next() + except StopIteration: + pass + return self.current_event + + def get_event(self): + # Get the next event. + if self.current_event is None: + try: + self.current_event = self.event_generator.next() + except StopIteration: + pass + value = self.current_event + self.current_event = None + return value + + def __iter__(self): + # Iterator protocol. + return self.event_generator + + def parse_stream(self): + # STREAM-START implicit_document? explicit_document* STREAM-END + + # Parse start of stream. + token = self.get_token() + yield StreamStartEvent(token.start_mark, token.end_mark, + encoding=token.encoding) + + # Parse implicit document. + if not self.check_token(DirectiveToken, DocumentStartToken, + StreamEndToken): + self.tag_handles = self.DEFAULT_TAGS + token = self.peek_token() + start_mark = end_mark = token.start_mark + yield DocumentStartEvent(start_mark, end_mark, + explicit=False) + for event in self.parse_block_node(): + yield event + token = self.peek_token() + start_mark = end_mark = token.start_mark + explicit = False + while self.check_token(DocumentEndToken): + token = self.get_token() + end_mark = token.end_mark + explicit = True + yield DocumentEndEvent(start_mark, end_mark, + explicit=explicit) + + # Parse explicit documents. + while not self.check_token(StreamEndToken): + token = self.peek_token() + start_mark = token.start_mark + version, tags = self.process_directives() + if not self.check_token(DocumentStartToken): + raise ParserError(None, None, + "expected '<document start>', but found %r" + % self.peek_token().id, + self.peek_token().start_mark) + token = self.get_token() + end_mark = token.end_mark + yield DocumentStartEvent(start_mark, end_mark, + explicit=True, version=version, tags=tags) + if self.check_token(DirectiveToken, + DocumentStartToken, DocumentEndToken, StreamEndToken): + yield self.process_empty_scalar(token.end_mark) + else: + for event in self.parse_block_node(): + yield event + token = self.peek_token() + start_mark = end_mark = token.start_mark + explicit = False + while self.check_token(DocumentEndToken): + token = self.get_token() + end_mark = token.end_mark + explicit=True + yield DocumentEndEvent(start_mark, end_mark, + explicit=explicit) + + # Parse end of stream. + token = self.get_token() + yield StreamEndEvent(token.start_mark, token.end_mark) + + def process_directives(self): + # DIRECTIVE* + self.yaml_version = None + self.tag_handles = {} + while self.check_token(DirectiveToken): + token = self.get_token() + if token.name == u'YAML': + if self.yaml_version is not None: + raise ParserError(None, None, + "found duplicate YAML directive", token.start_mark) + major, minor = token.value + if major != 1: + raise ParserError(None, None, + "found incompatible YAML document (version 1.* is required)", + token.start_mark) + self.yaml_version = token.value + elif token.name == u'TAG': + handle, prefix = token.value + if handle in self.tag_handles: + raise ParserError(None, None, + "duplicate tag handle %r" % handle.encode('utf-8'), + token.start_mark) + self.tag_handles[handle] = prefix + if self.tag_handles: + value = self.yaml_version, self.tag_handles.copy() + else: + value = self.yaml_version, None + for key in self.DEFAULT_TAGS: + if key not in self.tag_handles: + self.tag_handles[key] = self.DEFAULT_TAGS[key] + return value + + def parse_block_node(self): + return self.parse_node(block=True) + + def parse_flow_node(self): + return self.parse_node() + + def parse_block_node_or_indentless_sequence(self): + return self.parse_node(block=True, indentless_sequence=True) + + def parse_node(self, block=False, indentless_sequence=False): + # block_node ::= ALIAS | properties? block_content + # flow_node ::= ALIAS | properties? flow_content + # properties ::= TAG ANCHOR? | ANCHOR TAG? + # block_content ::= block_collection | flow_collection | SCALAR + # flow_content ::= flow_collection | SCALAR + # block_collection ::= block_sequence | block_mapping + # block_node_or_indentless_sequence ::= ALIAS | properties? + # (block_content | indentless_block_sequence) + if self.check_token(AliasToken): + token = self.get_token() + yield AliasEvent(token.value, token.start_mark, token.end_mark) + else: + anchor = None + tag = None + start_mark = end_mark = tag_mark = None + if self.check_token(AnchorToken): + token = self.get_token() + start_mark = token.start_mark + end_mark = token.end_mark + anchor = token.value + if self.check_token(TagToken): + token = self.get_token() + tag_mark = token.start_mark + end_mark = token.end_mark + tag = token.value + elif self.check_token(TagToken): + token = self.get_token() + start_mark = tag_mark = token.start_mark + end_mark = token.end_mark + tag = token.value + if self.check_token(AnchorToken): + token = self.get_token() + end_mark = token.end_mark + anchor = token.value + if tag is not None and tag != u'!': + handle, suffix = tag + if handle is not None: + if handle not in self.tag_handles: + raise ParserError("while parsing a node", start_mark, + "found undefined tag handle %r" % handle.encode('utf-8'), + tag_mark) + tag = self.tag_handles[handle]+suffix + else: + tag = suffix + #if tag == u'!': + # raise ParserError("while parsing a node", start_mark, + # "found non-specific tag '!'", tag_mark, + # "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.") + if start_mark is None: + start_mark = end_mark = self.peek_token().start_mark + event = None + collection_events = None + implicit = (tag is None or tag == u'!') + if indentless_sequence and self.check_token(BlockEntryToken): + end_mark = self.peek_token().end_mark + event = SequenceStartEvent(anchor, tag, implicit, + start_mark, end_mark) + collection_events = self.parse_indentless_sequence() + else: + if self.check_token(ScalarToken): + token = self.get_token() + end_mark = token.end_mark + if (token.plain and tag is None) or tag == u'!': + implicit = (True, False) + elif tag is None: + implicit = (False, True) + else: + implicit = (False, False) + event = ScalarEvent(anchor, tag, implicit, token.value, + start_mark, end_mark, style=token.style) + elif self.check_token(FlowSequenceStartToken): + end_mark = self.peek_token().end_mark + event = SequenceStartEvent(anchor, tag, implicit, + start_mark, end_mark, flow_style=True) + collection_events = self.parse_flow_sequence() + elif self.check_token(FlowMappingStartToken): + end_mark = self.peek_token().end_mark + event = MappingStartEvent(anchor, tag, implicit, + start_mark, end_mark, flow_style=True) + collection_events = self.parse_flow_mapping() + elif block and self.check_token(BlockSequenceStartToken): + end_mark = self.peek_token().start_mark + event = SequenceStartEvent(anchor, tag, implicit, + start_mark, end_mark, flow_style=False) + collection_events = self.parse_block_sequence() + elif block and self.check_token(BlockMappingStartToken): + end_mark = self.peek_token().start_mark + event = MappingStartEvent(anchor, tag, implicit, + start_mark, end_mark, flow_style=False) + collection_events = self.parse_block_mapping() + elif anchor is not None or tag is not None: + # Empty scalars are allowed even if a tag or an anchor is + # specified. + event = ScalarEvent(anchor, tag, (implicit, False), u'', + start_mark, end_mark) + else: + if block: + node = 'block' + else: + node = 'flow' + token = self.peek_token() + raise ParserError("while scanning a %s node" % node, start_mark, + "expected the node content, but found %r" % token.id, + token.start_mark) + yield event + if collection_events is not None: + for event in collection_events: + yield event + + def parse_block_sequence(self): + # BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END + token = self.get_token() + start_mark = token.start_mark + while self.check_token(BlockEntryToken): + token = self.get_token() + if not self.check_token(BlockEntryToken, BlockEndToken): + for event in self.parse_block_node(): + yield event + else: + yield self.process_empty_scalar(token.end_mark) + if not self.check_token(BlockEndToken): + token = self.peek_token() + raise ParserError("while scanning a block collection", start_mark, + "expected <block end>, but found %r" % token.id, token.start_mark) + token = self.get_token() + yield SequenceEndEvent(token.start_mark, token.end_mark) + + def parse_indentless_sequence(self): + # (BLOCK-ENTRY block_node?)+ + while self.check_token(BlockEntryToken): + token = self.get_token() + if not self.check_token(BlockEntryToken, + KeyToken, ValueToken, BlockEndToken): + for event in self.parse_block_node(): + yield event + else: + yield self.process_empty_scalar(token.end_mark) + token = self.peek_token() + yield SequenceEndEvent(token.start_mark, token.start_mark) + + def parse_block_mapping(self): + # BLOCK-MAPPING_START + # ((KEY block_node_or_indentless_sequence?)? + # (VALUE block_node_or_indentless_sequence?)?)* + # BLOCK-END + token = self.get_token() + start_mark = token.start_mark + while self.check_token(KeyToken, ValueToken): + if self.check_token(KeyToken): + token = self.get_token() + if not self.check_token(KeyToken, ValueToken, BlockEndToken): + for event in self.parse_block_node_or_indentless_sequence(): + yield event + else: + yield self.process_empty_scalar(token.end_mark) + if self.check_token(ValueToken): + token = self.get_token() + if not self.check_token(KeyToken, ValueToken, BlockEndToken): + for event in self.parse_block_node_or_indentless_sequence(): + yield event + else: + yield self.process_empty_scalar(token.end_mark) + else: + token = self.peek_token() + yield self.process_empty_scalar(token.start_mark) + if not self.check_token(BlockEndToken): + token = self.peek_token() + raise ParserError("while scanning a block mapping", start_mark, + "expected <block end>, but found %r" % token.id, token.start_mark) + token = self.get_token() + yield MappingEndEvent(token.start_mark, token.end_mark) + + def parse_flow_sequence(self): + # flow_sequence ::= FLOW-SEQUENCE-START + # (flow_sequence_entry FLOW-ENTRY)* + # flow_sequence_entry? + # FLOW-SEQUENCE-END + # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? + # + # Note that while production rules for both flow_sequence_entry and + # flow_mapping_entry are equal, their interpretations are different. + # For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?` + # generate an inline mapping (set syntax). + token = self.get_token() + start_mark = token.start_mark + while not self.check_token(FlowSequenceEndToken): + if self.check_token(KeyToken): + token = self.get_token() + yield MappingStartEvent(None, None, True, + token.start_mark, token.end_mark, + flow_style=True) + if not self.check_token(ValueToken, + FlowEntryToken, FlowSequenceEndToken): + for event in self.parse_flow_node(): + yield event + else: + yield self.process_empty_scalar(token.end_mark) + if self.check_token(ValueToken): + token = self.get_token() + if not self.check_token(FlowEntryToken, FlowSequenceEndToken): + for event in self.parse_flow_node(): + yield event + else: + yield self.process_empty_scalar(token.end_mark) + else: + token = self.peek_token() + yield self.process_empty_scalar(token.start_mark) + token = self.peek_token() + yield MappingEndEvent(token.start_mark, token.start_mark) + else: + for event in self.parse_flow_node(): + yield event + if not self.check_token(FlowEntryToken, FlowSequenceEndToken): + token = self.peek_token() + raise ParserError("while scanning a flow sequence", start_mark, + "expected ',' or ']', but got %r" % token.id, token.start_mark) + if self.check_token(FlowEntryToken): + self.get_token() + token = self.get_token() + yield SequenceEndEvent(token.start_mark, token.end_mark) + + def parse_flow_mapping(self): + # flow_mapping ::= FLOW-MAPPING-START + # (flow_mapping_entry FLOW-ENTRY)* + # flow_mapping_entry? + # FLOW-MAPPING-END + # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? + token = self.get_token() + start_mark = token.start_mark + while not self.check_token(FlowMappingEndToken): + if self.check_token(KeyToken): + token = self.get_token() + if not self.check_token(ValueToken, + FlowEntryToken, FlowMappingEndToken): + for event in self.parse_flow_node(): + yield event + else: + yield self.process_empty_scalar(token.end_mark) + if self.check_token(ValueToken): + token = self.get_token() + if not self.check_token(FlowEntryToken, FlowMappingEndToken): + for event in self.parse_flow_node(): + yield event + else: + yield self.process_empty_scalar(token.end_mark) + else: + token = self.peek_token() + yield self.process_empty_scalar(token.start_mark) + else: + for event in self.parse_flow_node(): + yield event + yield self.process_empty_scalar(self.peek_token().start_mark) + if not self.check_token(FlowEntryToken, FlowMappingEndToken): + token = self.peek_token() + raise ParserError("while scanning a flow mapping", start_mark, + "expected ',' or '}', but got %r" % token.id, token.start_mark) + if self.check_token(FlowEntryToken): + self.get_token() + if not self.check_token(FlowMappingEndToken): + token = self.peek_token() + raise ParserError("while scanning a flow mapping", start_mark, + "expected '}', but found %r" % token.id, token.start_mark) + token = self.get_token() + yield MappingEndEvent(token.start_mark, token.end_mark) + + def process_empty_scalar(self, mark): + return ScalarEvent(None, None, (True, False), u'', mark, mark) + diff --git a/scripts/external_libs/PyYAML-3.01/lib/yaml/reader.py b/scripts/external_libs/PyYAML-3.01/lib/yaml/reader.py new file mode 100644 index 00000000..beb76d0a --- /dev/null +++ b/scripts/external_libs/PyYAML-3.01/lib/yaml/reader.py @@ -0,0 +1,222 @@ +# This module contains abstractions for the input stream. You don't have to +# looks further, there are no pretty code. +# +# We define two classes here. +# +# Mark(source, line, column) +# It's just a record and its only use is producing nice error messages. +# Parser does not use it for any other purposes. +# +# Reader(source, data) +# Reader determines the encoding of `data` and converts it to unicode. +# Reader provides the following methods and attributes: +# reader.peek(length=1) - return the next `length` characters +# reader.forward(length=1) - move the current position to `length` characters. +# reader.index - the number of the current character. +# reader.line, stream.column - the line and the column of the current character. + +__all__ = ['Reader', 'ReaderError'] + +from error import YAMLError, Mark + +import codecs, re + +# Unfortunately, codec functions in Python 2.3 does not support the `finish` +# arguments, so we have to write our own wrappers. + +try: + codecs.utf_8_decode('', 'strict', False) + from codecs import utf_8_decode, utf_16_le_decode, utf_16_be_decode + +except TypeError: + + def utf_16_le_decode(data, errors, finish=False): + if not finish and len(data) % 2 == 1: + data = data[:-1] + return codecs.utf_16_le_decode(data, errors) + + def utf_16_be_decode(data, errors, finish=False): + if not finish and len(data) % 2 == 1: + data = data[:-1] + return codecs.utf_16_be_decode(data, errors) + + def utf_8_decode(data, errors, finish=False): + if not finish: + # We are trying to remove a possible incomplete multibyte character + # from the suffix of the data. + # The first byte of a multi-byte sequence is in the range 0xc0 to 0xfd. + # All further bytes are in the range 0x80 to 0xbf. + # UTF-8 encoded UCS characters may be up to six bytes long. + count = 0 + while count < 5 and count < len(data) \ + and '\x80' <= data[-count-1] <= '\xBF': + count -= 1 + if count < 5 and count < len(data) \ + and '\xC0' <= data[-count-1] <= '\xFD': + data = data[:-count-1] + return codecs.utf_8_decode(data, errors) + +class ReaderError(YAMLError): + + def __init__(self, name, position, character, encoding, reason): + self.name = name + self.character = character + self.position = position + self.encoding = encoding + self.reason = reason + + def __str__(self): + if isinstance(self.character, str): + return "'%s' codec can't decode byte #x%02x: %s\n" \ + " in \"%s\", position %d" \ + % (self.encoding, ord(self.character), self.reason, + self.name, self.position) + else: + return "unacceptable character #x%04x: %s\n" \ + " in \"%s\", position %d" \ + % (ord(self.character), self.reason, + self.name, self.position) + +class Reader: + # Reader: + # - determines the data encoding and converts it to unicode, + # - checks if characters are in allowed range, + # - adds '\0' to the end. + + # Reader accepts + # - a `str` object, + # - a `unicode` object, + # - a file-like object with its `read` method returning `str`, + # - a file-like object with its `read` method returning `unicode`. + + # Yeah, it's ugly and slow. + + def __init__(self, stream): + self.name = None + self.stream = None + self.stream_pointer = 0 + self.eof = True + self.buffer = u'' + self.pointer = 0 + self.raw_buffer = None + self.raw_decode = None + self.encoding = None + self.index = 0 + self.line = 0 + self.column = 0 + if isinstance(stream, unicode): + self.name = "<unicode string>" + self.check_printable(stream) + self.buffer = stream+u'\0' + elif isinstance(stream, str): + self.name = "<string>" + self.raw_buffer = stream + self.determine_encoding() + else: + self.stream = stream + self.name = getattr(stream, 'name', "<file>") + self.eof = False + self.raw_buffer = '' + self.determine_encoding() + + def peek(self, index=0): + if self.pointer+index+1 >= len(self.buffer): + self.update(index+1) + return self.buffer[self.pointer+index] + + def prefix(self, length=1): + if self.pointer+length >= len(self.buffer): + self.update(length) + return self.buffer[self.pointer:self.pointer+length] + + def forward(self, length=1): + if self.pointer+length+1 >= len(self.buffer): + self.update(length+1) + for k in range(length): + ch = self.buffer[self.pointer] + self.pointer += 1 + self.index += 1 + if ch in u'\n\x85\u2028\u2029' \ + or (ch == u'\r' and self.buffer[self.pointer+1] != u'\n'): + self.line += 1 + self.column = 0 + elif ch != u'\uFEFF': + self.column += 1 + + def get_mark(self): + if self.stream is None: + return Mark(self.name, self.index, self.line, self.column, + self.buffer, self.pointer) + else: + return Mark(self.name, self.index, self.line, self.column, + None, None) + + def determine_encoding(self): + while not self.eof and len(self.raw_buffer) < 2: + self.update_raw() + if not isinstance(self.raw_buffer, unicode): + if self.raw_buffer.startswith(codecs.BOM_UTF16_LE): + self.raw_decode = utf_16_le_decode + self.encoding = 'utf-16-le' + elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE): + self.raw_decode = utf_16_be_decode + self.encoding = 'utf-16-be' + else: + self.raw_decode = utf_8_decode + self.encoding = 'utf-8' + self.update(1) + + NON_PRINTABLE = re.compile(u'[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD]') + def check_printable(self, data): + match = self.NON_PRINTABLE.search(data) + if match: + character = match.group() + position = self.index+(len(self.buffer)-self.pointer)+match.start() + raise ReaderError(self.name, position, character, + 'unicode', "special characters are not allowed") + + def update(self, length): + if self.raw_buffer is None: + return + self.buffer = self.buffer[self.pointer:] + self.pointer = 0 + while len(self.buffer) < length: + if not self.eof: + self.update_raw() + if self.raw_decode is not None: + try: + data, converted = self.raw_decode(self.raw_buffer, + 'strict', self.eof) + except UnicodeDecodeError, exc: + character = exc.object[exc.start] + if self.stream is not None: + position = self.stream_pointer-len(self.raw_buffer)+exc.start + else: + position = exc.start + raise ReaderError(self.name, position, character, + exc.encoding, exc.reason) + else: + data = self.raw_buffer + converted = len(data) + self.check_printable(data) + self.buffer += data + self.raw_buffer = self.raw_buffer[converted:] + if self.eof: + self.buffer += u'\0' + self.raw_buffer = None + break + + def update_raw(self, size=1024): + data = self.stream.read(size) + if data: + self.raw_buffer += data + self.stream_pointer += len(data) + else: + self.eof = True + +#try: +# import psyco +# psyco.bind(Reader) +#except ImportError: +# pass + diff --git a/scripts/external_libs/PyYAML-3.01/lib/yaml/representer.py b/scripts/external_libs/PyYAML-3.01/lib/yaml/representer.py new file mode 100644 index 00000000..cb37169d --- /dev/null +++ b/scripts/external_libs/PyYAML-3.01/lib/yaml/representer.py @@ -0,0 +1,501 @@ + +__all__ = ['BaseRepresenter', 'SafeRepresenter', 'Representer', + 'RepresenterError'] + +from error import * +from nodes import * + +try: + import datetime + datetime_available = True +except ImportError: + datetime_available = False + +try: + set +except NameError: + from sets import Set as set + +import sys, copy_reg + +class RepresenterError(YAMLError): + pass + +class BaseRepresenter: + + yaml_representers = {} + yaml_multi_representers = {} + + def __init__(self, default_style=None, default_flow_style=None): + self.default_style = default_style + self.default_flow_style = default_flow_style + self.represented_objects = {} + + def represent(self, data): + node = self.represent_data(data) + self.serialize(node) + self.represented_objects = {} + + class C: pass + c = C() + def f(): pass + classobj_type = type(C) + instance_type = type(c) + function_type = type(f) + builtin_function_type = type(abs) + module_type = type(sys) + del C, c, f + + def get_classobj_bases(self, cls): + bases = [cls] + for base in cls.__bases__: + bases.extend(self.get_classobj_bases(base)) + return bases + + def represent_data(self, data): + if self.ignore_aliases(data): + alias_key = None + else: + alias_key = id(data) + if alias_key is not None: + if alias_key in self.represented_objects: + node = self.represented_objects[alias_key] + if node is None: + raise RepresenterError("recursive objects are not allowed: %r" % data) + return node + self.represented_objects[alias_key] = None + data_types = type(data).__mro__ + if type(data) is self.instance_type: + data_types = self.get_classobj_bases(data.__class__)+list(data_types) + if data_types[0] in self.yaml_representers: + node = self.yaml_representers[data_types[0]](self, data) + else: + for data_type in data_types: + if data_type in self.yaml_multi_representers: + node = self.yaml_multi_representers[data_type](self, data) + break + else: + if None in self.yaml_multi_representers: + node = self.yaml_multi_representers[None](self, data) + elif None in self.yaml_representers: + node = self.yaml_representers[None](self, data) + else: + node = ScalarNode(None, unicode(data)) + if alias_key is not None: + self.represented_objects[alias_key] = node + return node + + def add_representer(cls, data_type, representer): + if not 'yaml_representers' in cls.__dict__: + cls.yaml_representers = cls.yaml_representers.copy() + cls.yaml_representers[data_type] = representer + add_representer = classmethod(add_representer) + + def add_multi_representer(cls, data_type, representer): + if not 'yaml_multi_representers' in cls.__dict__: + cls.yaml_multi_representers = cls.yaml_multi_representers.copy() + cls.yaml_multi_representers[data_type] = representer + add_multi_representer = classmethod(add_multi_representer) + + def represent_scalar(self, tag, value, style=None): + if style is None: + style = self.default_style + return ScalarNode(tag, value, style=style) + + def represent_sequence(self, tag, sequence, flow_style=None): + best_style = True + value = [] + for item in sequence: + node_item = self.represent_data(item) + if not (isinstance(node_item, ScalarNode) and not node_item.style): + best_style = False + value.append(self.represent_data(item)) + if flow_style is None: + flow_style = self.default_flow_style + if flow_style is None: + flow_style = best_style + return SequenceNode(tag, value, flow_style=flow_style) + + def represent_mapping(self, tag, mapping, flow_style=None): + best_style = True + if hasattr(mapping, 'keys'): + value = {} + for item_key in mapping.keys(): + item_value = mapping[item_key] + node_key = self.represent_data(item_key) + node_value = self.represent_data(item_value) + if not (isinstance(node_key, ScalarNode) and not node_key.style): + best_style = False + if not (isinstance(node_value, ScalarNode) and not node_value.style): + best_style = False + value[node_key] = node_value + else: + value = [] + for item_key, item_value in mapping: + node_key = self.represent_data(item_key) + node_value = self.represent_data(item_value) + if not (isinstance(node_key, ScalarNode) and not node_key.style): + best_style = False + if not (isinstance(node_value, ScalarNode) and not node_value.style): + best_style = False + value.append((node_key, node_value)) + if flow_style is None: + flow_style = self.default_flow_style + if flow_style is None: + flow_style = best_style + return MappingNode(tag, value, flow_style=flow_style) + + def ignore_aliases(self, data): + return False + +class SafeRepresenter(BaseRepresenter): + + def ignore_aliases(self, data): + if data in [None, ()]: + return True + if isinstance(data, (str, unicode, bool, int, float)): + return True + + def represent_none(self, data): + return self.represent_scalar(u'tag:yaml.org,2002:null', + u'null') + + def represent_str(self, data): + tag = None + style = None + try: + data = unicode(data, 'ascii') + tag = u'tag:yaml.org,2002:str' + except UnicodeDecodeError: + try: + data = unicode(data, 'utf-8') + tag = u'tag:yaml.org,2002:str' + except UnicodeDecodeError: + data = data.encode('base64') + tag = u'tag:yaml.org,2002:binary' + style = '|' + return self.represent_scalar(tag, data, style=style) + + def represent_unicode(self, data): + return self.represent_scalar(u'tag:yaml.org,2002:str', data) + + def represent_bool(self, data): + if data: + value = u'true' + else: + value = u'false' + return self.represent_scalar(u'tag:yaml.org,2002:bool', value) + + def represent_int(self, data): + return self.represent_scalar(u'tag:yaml.org,2002:int', unicode(data)) + + def represent_long(self, data): + return self.represent_scalar(u'tag:yaml.org,2002:int', unicode(data)) + + repr_pos_inf = repr(1e300000) + repr_neg_inf = repr(-1e300000) + repr_nan = repr(1e300000/1e300000) + + def represent_float(self, data): + repr_data = repr(data) + if repr_data == self.repr_pos_inf: + value = u'.inf' + elif repr_data == self.repr_neg_inf: + value = u'-.inf' + elif repr_data == self.repr_nan: + value = u'.nan' + else: + value = unicode(repr_data) + return self.represent_scalar(u'tag:yaml.org,2002:float', value) + + def represent_list(self, data): + pairs = (len(data) > 0 and isinstance(data, list)) + if pairs: + for item in data: + if not isinstance(item, tuple) or len(item) != 2: + pairs = False + break + if not pairs: + return self.represent_sequence(u'tag:yaml.org,2002:seq', data) + value = [] + for item_key, item_value in data: + value.append(self.represent_mapping(u'tag:yaml.org,2002:map', + [(item_key, item_value)])) + return SequenceNode(u'tag:yaml.org,2002:pairs', value) + + def represent_dict(self, data): + return self.represent_mapping(u'tag:yaml.org,2002:map', data) + + def represent_set(self, data): + value = {} + for key in data: + value[key] = None + return self.represent_mapping(u'tag:yaml.org,2002:set', value) + + def represent_date(self, data): + value = u'%04d-%02d-%02d' % (data.year, data.month, data.day) + return self.represent_scalar(u'tag:yaml.org,2002:timestamp', value) + + def represent_datetime(self, data): + value = u'%04d-%02d-%02d %02d:%02d:%02d' \ + % (data.year, data.month, data.day, + data.hour, data.minute, data.second) + if data.microsecond: + value += u'.' + unicode(data.microsecond/1000000.0).split(u'.')[1] + if data.utcoffset(): + value += unicode(data.utcoffset()) + return self.represent_scalar(u'tag:yaml.org,2002:timestamp', value) + + def represent_yaml_object(self, tag, data, cls, flow_style=None): + if hasattr(data, '__getstate__'): + state = data.__getstate__() + else: + state = data.__dict__.copy() + if isinstance(state, dict): + state = state.items() + state.sort() + return self.represent_mapping(tag, state, flow_style=flow_style) + + def represent_undefined(self, data): + raise RepresenterError("cannot represent an object: %s" % data) + +SafeRepresenter.add_representer(type(None), + SafeRepresenter.represent_none) + +SafeRepresenter.add_representer(str, + SafeRepresenter.represent_str) + +SafeRepresenter.add_representer(unicode, + SafeRepresenter.represent_unicode) + +SafeRepresenter.add_representer(bool, + SafeRepresenter.represent_bool) + +SafeRepresenter.add_representer(int, + SafeRepresenter.represent_int) + +SafeRepresenter.add_representer(long, + SafeRepresenter.represent_long) + +SafeRepresenter.add_representer(float, + SafeRepresenter.represent_float) + +SafeRepresenter.add_representer(list, + SafeRepresenter.represent_list) + +SafeRepresenter.add_representer(tuple, + SafeRepresenter.represent_list) + +SafeRepresenter.add_representer(dict, + SafeRepresenter.represent_dict) + +SafeRepresenter.add_representer(set, + SafeRepresenter.represent_set) + +if datetime_available: + SafeRepresenter.add_representer(datetime.date, + SafeRepresenter.represent_date) + SafeRepresenter.add_representer(datetime.datetime, + SafeRepresenter.represent_datetime) + +SafeRepresenter.add_representer(None, + SafeRepresenter.represent_undefined) + +class Representer(SafeRepresenter): + + def represent_str(self, data): + tag = None + style = None + try: + data = unicode(data, 'ascii') + tag = u'tag:yaml.org,2002:str' + except UnicodeDecodeError: + try: + data = unicode(data, 'utf-8') + tag = u'tag:yaml.org,2002:python/str' + except UnicodeDecodeError: + data = data.encode('base64') + tag = u'tag:yaml.org,2002:binary' + style = '|' + return self.represent_scalar(tag, data, style=style) + + def represent_unicode(self, data): + tag = None + try: + data.encode('ascii') + tag = u'tag:yaml.org,2002:python/unicode' + except UnicodeEncodeError: + tag = u'tag:yaml.org,2002:str' + return self.represent_scalar(tag, data) + + def represent_long(self, data): + tag = u'tag:yaml.org,2002:int' + if int(data) is not data: + tag = u'tag:yaml.org,2002:python/long' + return self.represent_scalar(tag, unicode(data)) + + def represent_complex(self, data): + if data.imag == 0.0: + data = u'%r' % data.real + elif data.real == 0.0: + data = u'%rj' % data.imag + elif data.imag > 0: + data = u'%r+%rj' % (data.real, data.imag) + else: + data = u'%r%rj' % (data.real, data.imag) + return self.represent_scalar(u'tag:yaml.org,2002:python/complex', data) + + def represent_tuple(self, data): + return self.represent_sequence(u'tag:yaml.org,2002:python/tuple', data) + + def represent_name(self, data): + name = u'%s.%s' % (data.__module__, data.__name__) + return self.represent_scalar(u'tag:yaml.org,2002:python/name:'+name, u'') + + def represent_module(self, data): + return self.represent_scalar( + u'tag:yaml.org,2002:python/module:'+data.__name__, u'') + + def represent_instance(self, data): + # For instances of classic classes, we use __getinitargs__ and + # __getstate__ to serialize the data. + + # If data.__getinitargs__ exists, the object must be reconstructed by + # calling cls(**args), where args is a tuple returned by + # __getinitargs__. Otherwise, the cls.__init__ method should never be + # called and the class instance is created by instantiating a trivial + # class and assigning to the instance's __class__ variable. + + # If data.__getstate__ exists, it returns the state of the object. + # Otherwise, the state of the object is data.__dict__. + + # We produce either a !!python/object or !!python/object/new node. + # If data.__getinitargs__ does not exist and state is a dictionary, we + # produce a !!python/object node . Otherwise we produce a + # !!python/object/new node. + + cls = data.__class__ + class_name = u'%s.%s' % (cls.__module__, cls.__name__) + args = None + state = None + if hasattr(data, '__getinitargs__'): + args = list(data.__getinitargs__()) + if hasattr(data, '__getstate__'): + state = data.__getstate__() + else: + state = data.__dict__ + if args is None and isinstance(state, dict): + state = state.items() + state.sort() + return self.represent_mapping( + u'tag:yaml.org,2002:python/object:'+class_name, state) + if isinstance(state, dict) and not state: + return self.represent_sequence( + u'tag:yaml.org,2002:python/object/new:'+class_name, args) + value = {} + if args: + value['args'] = args + value['state'] = state + return self.represent_mapping( + u'tag:yaml.org,2002:python/object/new:'+class_name, value) + + def represent_object(self, data): + # We use __reduce__ API to save the data. data.__reduce__ returns + # a tuple of length 2-5: + # (function, args, state, listitems, dictitems) + + # For reconstructing, we calls function(*args), then set its state, + # listitems, and dictitems if they are not None. + + # A special case is when function.__name__ == '__newobj__'. In this + # case we create the object with args[0].__new__(*args). + + # Another special case is when __reduce__ returns a string - we don't + # support it. + + # We produce a !!python/object, !!python/object/new or + # !!python/object/apply node. + + cls = type(data) + if cls in copy_reg.dispatch_table: + reduce = copy_reg.dispatch_table[cls] + elif hasattr(data, '__reduce_ex__'): + reduce = data.__reduce_ex__(2) + elif hasattr(data, '__reduce__'): + reduce = data.__reduce__() + else: + raise RepresenterError("cannot represent object: %r" % data) + reduce = (list(reduce)+[None]*5)[:5] + function, args, state, listitems, dictitems = reduce + args = list(args) + if state is None: + state = {} + if listitems is not None: + listitems = list(listitems) + if dictitems is not None: + dictitems = dict(dictitems) + if function.__name__ == '__newobj__': + function = args[0] + args = args[1:] + tag = u'tag:yaml.org,2002:python/object/new:' + newobj = True + else: + tag = u'tag:yaml.org,2002:python/object/apply:' + newobj = False + function_name = u'%s.%s' % (function.__module__, function.__name__) + if not args and not listitems and not dictitems \ + and isinstance(state, dict) and newobj: + state = state.items() + state.sort() + return self.represent_mapping( + u'tag:yaml.org,2002:python/object:'+function_name, state) + if not listitems and not dictitems \ + and isinstance(state, dict) and not state: + return self.represent_sequence(tag+function_name, args) + value = {} + if args: + value['args'] = args + if state or not isinstance(state, dict): + value['state'] = state + if listitems: + value['listitems'] = listitems + if dictitems: + value['dictitems'] = dictitems + return self.represent_mapping(tag+function_name, value) + +Representer.add_representer(str, + Representer.represent_str) + +Representer.add_representer(unicode, + Representer.represent_unicode) + +Representer.add_representer(long, + Representer.represent_long) + +Representer.add_representer(complex, + Representer.represent_complex) + +Representer.add_representer(tuple, + Representer.represent_tuple) + +Representer.add_representer(type, + Representer.represent_name) + +Representer.add_representer(Representer.classobj_type, + Representer.represent_name) + +Representer.add_representer(Representer.function_type, + Representer.represent_name) + +Representer.add_representer(Representer.builtin_function_type, + Representer.represent_name) + +Representer.add_representer(Representer.module_type, + Representer.represent_module) + +Representer.add_multi_representer(Representer.instance_type, + Representer.represent_instance) + +Representer.add_multi_representer(object, + Representer.represent_object) + diff --git a/scripts/external_libs/PyYAML-3.01/lib/yaml/resolver.py b/scripts/external_libs/PyYAML-3.01/lib/yaml/resolver.py new file mode 100644 index 00000000..7e580e98 --- /dev/null +++ b/scripts/external_libs/PyYAML-3.01/lib/yaml/resolver.py @@ -0,0 +1,205 @@ + +__all__ = ['BaseResolver', 'Resolver'] + +from error import * +from nodes import * + +import re + +class ResolverError(YAMLError): + pass + +class BaseResolver: + + DEFAULT_SCALAR_TAG = u'tag:yaml.org,2002:str' + DEFAULT_SEQUENCE_TAG = u'tag:yaml.org,2002:seq' + DEFAULT_MAPPING_TAG = u'tag:yaml.org,2002:map' + + yaml_implicit_resolvers = {} + yaml_path_resolvers = {} + + def __init__(self): + self.resolver_exact_paths = [] + self.resolver_prefix_paths = [] + + def add_implicit_resolver(cls, tag, regexp, first): + if not 'yaml_implicit_resolvers' in cls.__dict__: + cls.yaml_implicit_resolvers = cls.yaml_implicit_resolvers.copy() + if first is None: + first = [None] + for ch in first: + cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp)) + add_implicit_resolver = classmethod(add_implicit_resolver) + + def add_path_resolver(cls, tag, path, kind=None): + if not 'yaml_path_resolvers' in cls.__dict__: + cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy() + new_path = [] + for element in path: + if isinstance(element, (list, tuple)): + if len(element) == 2: + node_check, index_check = element + elif len(element) == 1: + node_check = element[0] + index_check = True + else: + raise ResolverError("Invalid path element: %s" % element) + else: + node_check = None + index_check = element + if node_check is str: + node_check = ScalarNode + elif node_check is list: + node_check = SequenceNode + elif node_check is map: + node_check = MappingNode + elif node_check not in [ScalarNode, SequenceNode, MappingNode] \ + and not isinstance(node_check, basestring) \ + and node_check is not None: + raise ResolverError("Invalid node checker: %s" % node_check) + if not isinstance(index_check, (basestring, int)) \ + and index_check is not None: + raise ResolverError("Invalid index checker: %s" % index_check) + new_path.append((node_check, index_check)) + if kind is str: + kind = ScalarNode + elif kind is list: + kind = SequenceNode + elif kind is map: + kind = MappingNode + elif kind not in [ScalarNode, SequenceNode, MappingNode] \ + and kind is not None: + raise ResolverError("Invalid node kind: %s" % kind) + cls.yaml_path_resolvers[tuple(new_path), kind] = tag + add_path_resolver = classmethod(add_path_resolver) + + def descend_resolver(self, current_node, current_index): + exact_paths = {} + prefix_paths = [] + if current_node: + depth = len(self.resolver_prefix_paths) + for path, kind in self.resolver_prefix_paths[-1]: + if self.check_resolver_prefix(depth, path, kind, + current_node, current_index): + if len(path) > depth: + prefix_paths.append((path, kind)) + else: + exact_paths[kind] = self.yaml_path_resolvers[path, kind] + else: + for path, kind in self.yaml_path_resolvers: + if not path: + exact_paths[kind] = self.yaml_path_resolvers[path, kind] + else: + prefix_paths.append((path, kind)) + self.resolver_exact_paths.append(exact_paths) + self.resolver_prefix_paths.append(prefix_paths) + + def ascend_resolver(self): + self.resolver_exact_paths.pop() + self.resolver_prefix_paths.pop() + + def check_resolver_prefix(self, depth, path, kind, + current_node, current_index): + node_check, index_check = path[depth-1] + if isinstance(node_check, basestring): + if current_node.tag != node_check: + return + elif node_check is not None: + if not isinstance(current_node, node_check): + return + if index_check is True and current_index is not None: + return + if index_check in [False, None] and current_index is None: + return + if isinstance(index_check, basestring): + if not (isinstance(current_index, ScalarNode) + and index_check == current_index.value): + return + elif isinstance(index_check, int): + if index_check != current_index: + return + return True + + def resolve(self, kind, value, implicit): + if kind is ScalarNode and implicit[0]: + if value == u'': + resolvers = self.yaml_implicit_resolvers.get(u'', []) + else: + resolvers = self.yaml_implicit_resolvers.get(value[0], []) + resolvers += self.yaml_implicit_resolvers.get(None, []) + for tag, regexp in resolvers: + if regexp.match(value): + return tag + implicit = implicit[1] + exact_paths = self.resolver_exact_paths[-1] + if kind in exact_paths: + return exact_paths[kind] + if None in exact_paths: + return exact_paths[None] + if kind is ScalarNode: + return self.DEFAULT_SCALAR_TAG + elif kind is SequenceNode: + return self.DEFAULT_SEQUENCE_TAG + elif kind is MappingNode: + return self.DEFAULT_MAPPING_TAG + +class Resolver(BaseResolver): + pass + +Resolver.add_implicit_resolver( + u'tag:yaml.org,2002:bool', + re.compile(ur'''^(?:yes|Yes|YES|n|N|no|No|NO + |true|True|TRUE|false|False|FALSE + |on|On|ON|off|Off|OFF)$''', re.X), + list(u'yYnNtTfFoO')) + +Resolver.add_implicit_resolver( + u'tag:yaml.org,2002:float', + re.compile(ur'''^(?:[-+]?(?:[0-9][0-9_]*)?\.[0-9_]*(?:[eE][-+][0-9]+)? + |[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]* + |[-+]?\.(?:inf|Inf|INF) + |\.(?:nan|NaN|NAN))$''', re.X), + list(u'-+0123456789.')) + +Resolver.add_implicit_resolver( + u'tag:yaml.org,2002:int', + re.compile(ur'''^(?:[-+]?0b[0-1_]+ + |[-+]?0[0-7_]+ + |[-+]?(?:0|[1-9][0-9_]*) + |[-+]?0x[0-9a-fA-F_]+ + |[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X), + list(u'-+0123456789')) + +Resolver.add_implicit_resolver( + u'tag:yaml.org,2002:merge', + re.compile(ur'^(?:<<)$'), + ['<']) + +Resolver.add_implicit_resolver( + u'tag:yaml.org,2002:null', + re.compile(ur'''^(?: ~ + |null|Null|NULL + | )$''', re.X), + [u'~', u'n', u'N', u'']) + +Resolver.add_implicit_resolver( + u'tag:yaml.org,2002:timestamp', + re.compile(ur'''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9] + |[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]? + (?:[Tt]|[ \t]+)[0-9][0-9]? + :[0-9][0-9] :[0-9][0-9] (?:\.[0-9]*)? + (?:[ \t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X), + list(u'0123456789')) + +Resolver.add_implicit_resolver( + u'tag:yaml.org,2002:value', + re.compile(ur'^(?:=)$'), + ['=']) + +# The following resolver is only for documentation purposes. It cannot work +# because plain scalars cannot start with '!', '&', or '*'. +Resolver.add_implicit_resolver( + u'tag:yaml.org,2002:yaml', + re.compile(ur'^(?:!|&|\*)$'), + list(u'!&*')) + diff --git a/scripts/external_libs/PyYAML-3.01/lib/yaml/scanner.py b/scripts/external_libs/PyYAML-3.01/lib/yaml/scanner.py new file mode 100644 index 00000000..cf2478f9 --- /dev/null +++ b/scripts/external_libs/PyYAML-3.01/lib/yaml/scanner.py @@ -0,0 +1,1458 @@ + +# Scanner produces tokens of the following types: +# STREAM-START +# STREAM-END +# DIRECTIVE(name, value) +# DOCUMENT-START +# DOCUMENT-END +# BLOCK-SEQUENCE-START +# BLOCK-MAPPING-START +# BLOCK-END +# FLOW-SEQUENCE-START +# FLOW-MAPPING-START +# FLOW-SEQUENCE-END +# FLOW-MAPPING-END +# BLOCK-ENTRY +# FLOW-ENTRY +# KEY +# VALUE +# ALIAS(value) +# ANCHOR(value) +# TAG(value) +# SCALAR(value, plain) +# +# Read comments in the Scanner code for more details. +# + +__all__ = ['Scanner', 'ScannerError'] + +from error import MarkedYAMLError +from tokens import * + +class ScannerError(MarkedYAMLError): + pass + +class SimpleKey: + # See below simple keys treatment. + + def __init__(self, token_number, required, index, line, column, mark): + self.token_number = token_number + self.required = required + self.index = index + self.line = line + self.column = column + self.mark = mark + +class Scanner: + + def __init__(self): + """Initialize the scanner.""" + # It is assumed that Scanner and Reader will have a common descendant. + # Reader do the dirty work of checking for BOM and converting the + # input data to Unicode. It also adds NUL to the end. + # + # Reader supports the following methods + # self.peek(i=0) # peek the next i-th character + # self.prefix(l=1) # peek the next l characters + # self.forward(l=1) # read the next l characters and move the pointer. + + # Had we reached the end of the stream? + self.done = False + + # The number of unclosed '{' and '['. `flow_level == 0` means block + # context. + self.flow_level = 0 + + # List of processed tokens that are not yet emitted. + self.tokens = [] + + # Add the STREAM-START token. + self.fetch_stream_start() + + # Number of tokens that were emitted through the `get_token` method. + self.tokens_taken = 0 + + # The current indentation level. + self.indent = -1 + + # Past indentation levels. + self.indents = [] + + # Variables related to simple keys treatment. + + # A simple key is a key that is not denoted by the '?' indicator. + # Example of simple keys: + # --- + # block simple key: value + # ? not a simple key: + # : { flow simple key: value } + # We emit the KEY token before all keys, so when we find a potential + # simple key, we try to locate the corresponding ':' indicator. + # Simple keys should be limited to a single line and 1024 characters. + + # Can a simple key start at the current position? A simple key may + # start: + # - at the beginning of the line, not counting indentation spaces + # (in block context), + # - after '{', '[', ',' (in the flow context), + # - after '?', ':', '-' (in the block context). + # In the block context, this flag also signifies if a block collection + # may start at the current position. + self.allow_simple_key = True + + # Keep track of possible simple keys. This is a dictionary. The key + # is `flow_level`; there can be no more that one possible simple key + # for each level. The value is a SimpleKey record: + # (token_number, required, index, line, column, mark) + # A simple key may start with ALIAS, ANCHOR, TAG, SCALAR(flow), + # '[', or '{' tokens. + self.possible_simple_keys = {} + + # Public methods. + + def check_token(self, *choices): + # Check if the next token is one of the given types. + while self.need_more_tokens(): + self.fetch_more_tokens() + if self.tokens: + if not choices: + return True + for choice in choices: + if isinstance(self.tokens[0], choice): + return True + return False + + def peek_token(self): + # Return the next token, but do not delete if from the queue. + while self.need_more_tokens(): + self.fetch_more_tokens() + if self.tokens: + return self.tokens[0] + + def get_token(self): + # Return the next token. + while self.need_more_tokens(): + self.fetch_more_tokens() + if self.tokens: + self.tokens_taken += 1 + return self.tokens.pop(0) + + def __iter__(self): + # Iterator protocol. + while self.need_more_tokens(): + self.fetch_more_tokens() + while self.tokens: + self.tokens_taken += 1 + yield self.tokens.pop(0) + while self.need_more_tokens(): + self.fetch_more_tokens() + + # Private methods. + + def need_more_tokens(self): + if self.done: + return False + if not self.tokens: + return True + # The current token may be a potential simple key, so we + # need to look further. + self.stale_possible_simple_keys() + if self.next_possible_simple_key() == self.tokens_taken: + return True + + def fetch_more_tokens(self): + + # Eat whitespaces and comments until we reach the next token. + self.scan_to_next_token() + + # Remove obsolete possible simple keys. + self.stale_possible_simple_keys() + + # Compare the current indentation and column. It may add some tokens + # and decrease the current indentation level. + self.unwind_indent(self.column) + + # Peek the next character. + ch = self.peek() + + # Is it the end of stream? + if ch == u'\0': + return self.fetch_stream_end() + + # Is it a directive? + if ch == u'%' and self.check_directive(): + return self.fetch_directive() + + # Is it the document start? + if ch == u'-' and self.check_document_start(): + return self.fetch_document_start() + + # Is it the document end? + if ch == u'.' and self.check_document_end(): + return self.fetch_document_end() + + # TODO: support for BOM within a stream. + #if ch == u'\uFEFF': + # return self.fetch_bom() <-- issue BOMToken + + # Note: the order of the following checks is NOT significant. + + # Is it the flow sequence start indicator? + if ch == u'[': + return self.fetch_flow_sequence_start() + + # Is it the flow mapping start indicator? + if ch == u'{': + return self.fetch_flow_mapping_start() + + # Is it the flow sequence end indicator? + if ch == u']': + return self.fetch_flow_sequence_end() + + # Is it the flow mapping end indicator? + if ch == u'}': + return self.fetch_flow_mapping_end() + + # Is it the flow entry indicator? + if ch in u',': + return self.fetch_flow_entry() + + # Is it the block entry indicator? + if ch in u'-' and self.check_block_entry(): + return self.fetch_block_entry() + + # Is it the key indicator? + if ch == u'?' and self.check_key(): + return self.fetch_key() + + # Is it the value indicator? + if ch == u':' and self.check_value(): + return self.fetch_value() + + # Is it an alias? + if ch == u'*': + return self.fetch_alias() + + # Is it an anchor? + if ch == u'&': + return self.fetch_anchor() + + # Is it a tag? + if ch == u'!': + return self.fetch_tag() + + # Is it a literal scalar? + if ch == u'|' and not self.flow_level: + return self.fetch_literal() + + # Is it a folded scalar? + if ch == u'>' and not self.flow_level: + return self.fetch_folded() + + # Is it a single quoted scalar? + if ch == u'\'': + return self.fetch_single() + + # Is it a double quoted scalar? + if ch == u'\"': + return self.fetch_double() + + # It must be a plain scalar then. + if self.check_plain(): + return self.fetch_plain() + + # No? It's an error. Let's produce a nice error message. + raise ScannerError("while scanning for the next token", None, + "found character %r that cannot start any token" + % ch.encode('utf-8'), self.get_mark()) + + # Simple keys treatment. + + def next_possible_simple_key(self): + # Return the number of the nearest possible simple key. Actually we + # don't need to loop through the whole dictionary. We may replace it + # with the following code: + # if not self.possible_simple_keys: + # return None + # return self.possible_simple_keys[ + # min(self.possible_simple_keys.keys())].token_number + min_token_number = None + for level in self.possible_simple_keys: + key = self.possible_simple_keys[level] + if min_token_number is None or key.token_number < min_token_number: + min_token_number = key.token_number + return min_token_number + + def stale_possible_simple_keys(self): + # Remove entries that are no longer possible simple keys. According to + # the YAML specification, simple keys + # - should be limited to a single line, + # - should be no longer than 1024 characters. + # Disabling this procedure will allow simple keys of any length and + # height (may cause problems if indentation is broken though). + for level in self.possible_simple_keys.keys(): + key = self.possible_simple_keys[level] + if key.line != self.line \ + or self.index-key.index > 1024: + if key.required: + raise ScannerError("while scanning a simple key", key.mark, + "could not found expected ':'", self.get_mark()) + del self.possible_simple_keys[level] + + def save_possible_simple_key(self): + # The next token may start a simple key. We check if it's possible + # and save its position. This function is called for + # ALIAS, ANCHOR, TAG, SCALAR(flow), '[', and '{'. + + # Check if a simple key is required at the current position. + required = not self.flow_level and self.indent == self.column + + # A simple key is required only if it is the first token in the current + # line. Therefore it is always allowed. + assert self.allow_simple_key or not required + + # The next token might be a simple key. Let's save it's number and + # position. + if self.allow_simple_key: + self.remove_possible_simple_key() + token_number = self.tokens_taken+len(self.tokens) + key = SimpleKey(token_number, required, + self.index, self.line, self.column, self.get_mark()) + self.possible_simple_keys[self.flow_level] = key + + def remove_possible_simple_key(self): + # Remove the saved possible key position at the current flow level. + if self.flow_level in self.possible_simple_keys: + key = self.possible_simple_keys[self.flow_level] + + # I don't think it's possible, but I could be wrong. + assert not key.required + #if key.required: + # raise ScannerError("while scanning a simple key", key.mark, + # "could not found expected ':'", self.get_mark()) + + # Indentation functions. + + def unwind_indent(self, column): + + ## In flow context, tokens should respect indentation. + ## Actually the condition should be `self.indent >= column` according to + ## the spec. But this condition will prohibit intuitively correct + ## constructions such as + ## key : { + ## } + #if self.flow_level and self.indent > column: + # raise ScannerError(None, None, + # "invalid intendation or unclosed '[' or '{'", + # self.get_mark()) + + # In the flow context, indentation is ignored. We make the scanner less + # restrictive then specification requires. + if self.flow_level: + return + + # In block context, we may need to issue the BLOCK-END tokens. + while self.indent > column: + mark = self.get_mark() + self.indent = self.indents.pop() + self.tokens.append(BlockEndToken(mark, mark)) + + def add_indent(self, column): + # Check if we need to increase indentation. + if self.indent < column: + self.indents.append(self.indent) + self.indent = column + return True + return False + + # Fetchers. + + def fetch_stream_start(self): + # We always add STREAM-START as the first token and STREAM-END as the + # last token. + + # Read the token. + mark = self.get_mark() + + # Add STREAM-START. + self.tokens.append(StreamStartToken(mark, mark, + encoding=self.encoding)) + + + def fetch_stream_end(self): + + # Set the current intendation to -1. + self.unwind_indent(-1) + + # Reset everything (not really needed). + self.allow_simple_key = False + self.possible_simple_keys = {} + + # Read the token. + mark = self.get_mark() + + # Add STREAM-END. + self.tokens.append(StreamEndToken(mark, mark)) + + # The steam is finished. + self.done = True + + def fetch_directive(self): + + # Set the current intendation to -1. + self.unwind_indent(-1) + + # Reset simple keys. + self.remove_possible_simple_key() + self.allow_simple_key = False + + # Scan and add DIRECTIVE. + self.tokens.append(self.scan_directive()) + + def fetch_document_start(self): + self.fetch_document_indicator(DocumentStartToken) + + def fetch_document_end(self): + self.fetch_document_indicator(DocumentEndToken) + + def fetch_document_indicator(self, TokenClass): + + # Set the current intendation to -1. + self.unwind_indent(-1) + + # Reset simple keys. Note that there could not be a block collection + # after '---'. + self.remove_possible_simple_key() + self.allow_simple_key = False + + # Add DOCUMENT-START or DOCUMENT-END. + start_mark = self.get_mark() + self.forward(3) + end_mark = self.get_mark() + self.tokens.append(TokenClass(start_mark, end_mark)) + + def fetch_flow_sequence_start(self): + self.fetch_flow_collection_start(FlowSequenceStartToken) + + def fetch_flow_mapping_start(self): + self.fetch_flow_collection_start(FlowMappingStartToken) + + def fetch_flow_collection_start(self, TokenClass): + + # '[' and '{' may start a simple key. + self.save_possible_simple_key() + + # Increase the flow level. + self.flow_level += 1 + + # Simple keys are allowed after '[' and '{'. + self.allow_simple_key = True + + # Add FLOW-SEQUENCE-START or FLOW-MAPPING-START. + start_mark = self.get_mark() + self.forward() + end_mark = self.get_mark() + self.tokens.append(TokenClass(start_mark, end_mark)) + + def fetch_flow_sequence_end(self): + self.fetch_flow_collection_end(FlowSequenceEndToken) + + def fetch_flow_mapping_end(self): + self.fetch_flow_collection_end(FlowMappingEndToken) + + def fetch_flow_collection_end(self, TokenClass): + + # Reset possible simple key on the current level. + self.remove_possible_simple_key() + + # Decrease the flow level. + self.flow_level -= 1 + + # No simple keys after ']' or '}'. + self.allow_simple_key = False + + # Add FLOW-SEQUENCE-END or FLOW-MAPPING-END. + start_mark = self.get_mark() + self.forward() + end_mark = self.get_mark() + self.tokens.append(TokenClass(start_mark, end_mark)) + + def fetch_flow_entry(self): + + # Simple keys are allowed after ','. + self.allow_simple_key = True + + # Reset possible simple key on the current level. + self.remove_possible_simple_key() + + # Add FLOW-ENTRY. + start_mark = self.get_mark() + self.forward() + end_mark = self.get_mark() + self.tokens.append(FlowEntryToken(start_mark, end_mark)) + + def fetch_block_entry(self): + + # Block context needs additional checks. + if not self.flow_level: + + # Are we allowed to start a new entry? + if not self.allow_simple_key: + raise ScannerError(None, None, + "sequence entries are not allowed here", + self.get_mark()) + + # We may need to add BLOCK-SEQUENCE-START. + if self.add_indent(self.column): + mark = self.get_mark() + self.tokens.append(BlockSequenceStartToken(mark, mark)) + + # It's an error for the block entry to occur in the flow context, + # but we let the parser detect this. + else: + pass + + # Simple keys are allowed after '-'. + self.allow_simple_key = True + + # Reset possible simple key on the current level. + self.remove_possible_simple_key() + + # Add BLOCK-ENTRY. + start_mark = self.get_mark() + self.forward() + end_mark = self.get_mark() + self.tokens.append(BlockEntryToken(start_mark, end_mark)) + + def fetch_key(self): + + # Block context needs additional checks. + if not self.flow_level: + + # Are we allowed to start a key (not nessesary a simple)? + if not self.allow_simple_key: + raise ScannerError(None, None, + "mapping keys are not allowed here", + self.get_mark()) + + # We may need to add BLOCK-MAPPING-START. + if self.add_indent(self.column): + mark = self.get_mark() + self.tokens.append(BlockMappingStartToken(mark, mark)) + + # Simple keys are allowed after '?' in the block context. + self.allow_simple_key = not self.flow_level + + # Reset possible simple key on the current level. + self.remove_possible_simple_key() + + # Add KEY. + start_mark = self.get_mark() + self.forward() + end_mark = self.get_mark() + self.tokens.append(KeyToken(start_mark, end_mark)) + + def fetch_value(self): + + # Do we determine a simple key? + if self.flow_level in self.possible_simple_keys: + + # Add KEY. + key = self.possible_simple_keys[self.flow_level] + del self.possible_simple_keys[self.flow_level] + self.tokens.insert(key.token_number-self.tokens_taken, + KeyToken(key.mark, key.mark)) + + # If this key starts a new block mapping, we need to add + # BLOCK-MAPPING-START. + if not self.flow_level: + if self.add_indent(key.column): + self.tokens.insert(key.token_number-self.tokens_taken, + BlockMappingStartToken(key.mark, key.mark)) + + # There cannot be two simple keys one after another. + self.allow_simple_key = False + + # It must be a part of a complex key. + else: + + # Block context needs additional checks. + # (Do we really need them? They will be catched by the parser + # anyway.) + if not self.flow_level: + + # We are allowed to start a complex value if and only if + # we can start a simple key. + if not self.allow_simple_key: + raise ScannerError(None, None, + "mapping values are not allowed here", + self.get_mark()) + + # Simple keys are allowed after ':' in the block context. + self.allow_simple_key = not self.flow_level + + # Reset possible simple key on the current level. + self.remove_possible_simple_key() + + # Add VALUE. + start_mark = self.get_mark() + self.forward() + end_mark = self.get_mark() + self.tokens.append(ValueToken(start_mark, end_mark)) + + def fetch_alias(self): + + # ALIAS could be a simple key. + self.save_possible_simple_key() + + # No simple keys after ALIAS. + self.allow_simple_key = False + + # Scan and add ALIAS. + self.tokens.append(self.scan_anchor(AliasToken)) + + def fetch_anchor(self): + + # ANCHOR could start a simple key. + self.save_possible_simple_key() + + # No simple keys after ANCHOR. + self.allow_simple_key = False + + # Scan and add ANCHOR. + self.tokens.append(self.scan_anchor(AnchorToken)) + + def fetch_tag(self): + + # TAG could start a simple key. + self.save_possible_simple_key() + + # No simple keys after TAG. + self.allow_simple_key = False + + # Scan and add TAG. + self.tokens.append(self.scan_tag()) + + def fetch_literal(self): + self.fetch_block_scalar(style='|') + + def fetch_folded(self): + self.fetch_block_scalar(style='>') + + def fetch_block_scalar(self, style): + + # A simple key may follow a block scalar. + self.allow_simple_key = True + + # Reset possible simple key on the current level. + self.remove_possible_simple_key() + + # Scan and add SCALAR. + self.tokens.append(self.scan_block_scalar(style)) + + def fetch_single(self): + self.fetch_flow_scalar(style='\'') + + def fetch_double(self): + self.fetch_flow_scalar(style='"') + + def fetch_flow_scalar(self, style): + + # A flow scalar could be a simple key. + self.save_possible_simple_key() + + # No simple keys after flow scalars. + self.allow_simple_key = False + + # Scan and add SCALAR. + self.tokens.append(self.scan_flow_scalar(style)) + + def fetch_plain(self): + + # A plain scalar could be a simple key. + self.save_possible_simple_key() + + # No simple keys after plain scalars. But note that `scan_plain` will + # change this flag if the scan is finished at the beginning of the + # line. + self.allow_simple_key = False + + # Scan and add SCALAR. May change `allow_simple_key`. + self.tokens.append(self.scan_plain()) + + # Checkers. + + def check_directive(self): + + # DIRECTIVE: ^ '%' ... + # The '%' indicator is already checked. + if self.column == 0: + return True + + def check_document_start(self): + + # DOCUMENT-START: ^ '---' (' '|'\n') + if self.column == 0: + if self.prefix(3) == u'---' \ + and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029': + return True + + def check_document_end(self): + + # DOCUMENT-END: ^ '...' (' '|'\n') + if self.column == 0: + if self.prefix(3) == u'...' \ + and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029': + return True + + def check_block_entry(self): + + # BLOCK-ENTRY: '-' (' '|'\n') + return self.peek(1) in u'\0 \t\r\n\x85\u2028\u2029' + + def check_key(self): + + # KEY(flow context): '?' + if self.flow_level: + return True + + # KEY(block context): '?' (' '|'\n') + else: + return self.peek(1) in u'\0 \t\r\n\x85\u2028\u2029' + + def check_value(self): + + # VALUE(flow context): ':' + if self.flow_level: + return True + + # VALUE(block context): ':' (' '|'\n') + else: + return self.peek(1) in u'\0 \t\r\n\x85\u2028\u2029' + + def check_plain(self): + + # A plain scalar may start with any non-space character except: + # '-', '?', ':', ',', '[', ']', '{', '}', + # '#', '&', '*', '!', '|', '>', '\'', '\"', + # '%', '@', '`'. + # + # It may also start with + # '-', '?', ':' + # if it is followed by a non-space character. + # + # Note that we limit the last rule to the block context (except the + # '-' character) because we want the flow context to be space + # independent. + ch = self.peek() + return ch not in u'\0 \t\r\n\x85\u2028\u2029-?:,[]{}#&*!|>\'\"%@`' \ + or (self.peek(1) not in u'\0 \t\r\n\x85\u2028\u2029' + and (ch == u'-' or (not self.flow_level and ch in u'?:'))) + + # Scanners. + + def scan_to_next_token(self): + # We ignore spaces, line breaks and comments. + # If we find a line break in the block context, we set the flag + # `allow_simple_key` on. + # The byte order mark is stripped if it's the first character in the + # stream. We do not yet support BOM inside the stream as the + # specification requires. Any such mark will be considered as a part + # of the document. + # + # TODO: We need to make tab handling rules more sane. A good rule is + # Tabs cannot precede tokens + # BLOCK-SEQUENCE-START, BLOCK-MAPPING-START, BLOCK-END, + # KEY(block), VALUE(block), BLOCK-ENTRY + # So the checking code is + # if <TAB>: + # self.allow_simple_keys = False + # We also need to add the check for `allow_simple_keys == True` to + # `unwind_indent` before issuing BLOCK-END. + # Scanners for block, flow, and plain scalars need to be modified. + + if self.index == 0 and self.peek() == u'\uFEFF': + self.forward() + found = False + while not found: + while self.peek() == u' ': + self.forward() + if self.peek() == u'#': + while self.peek() not in u'\0\r\n\x85\u2028\u2029': + self.forward() + if self.scan_line_break(): + if not self.flow_level: + self.allow_simple_key = True + else: + found = True + + def scan_directive(self): + # See the specification for details. + start_mark = self.get_mark() + self.forward() + name = self.scan_directive_name(start_mark) + value = None + if name == u'YAML': + value = self.scan_yaml_directive_value(start_mark) + end_mark = self.get_mark() + elif name == u'TAG': + value = self.scan_tag_directive_value(start_mark) + end_mark = self.get_mark() + else: + end_mark = self.get_mark() + while self.peek() not in u'\0\r\n\x85\u2028\u2029': + self.forward() + self.scan_directive_ignored_line(start_mark) + return DirectiveToken(name, value, start_mark, end_mark) + + def scan_directive_name(self, start_mark): + # See the specification for details. + length = 0 + ch = self.peek(length) + while u'0' <= ch <= u'9' or u'A' <= ch <= 'Z' or u'a' <= ch <= 'z' \ + or ch in u'-_': + length += 1 + ch = self.peek(length) + if not length: + raise ScannerError("while scanning a directive", start_mark, + "expected alphabetic or numeric character, but found %r" + % ch.encode('utf-8'), self.get_mark()) + value = self.prefix(length) + self.forward(length) + ch = self.peek() + if ch not in u'\0 \r\n\x85\u2028\u2029': + raise ScannerError("while scanning a directive", start_mark, + "expected alphabetic or numeric character, but found %r" + % ch.encode('utf-8'), self.get_mark()) + return value + + def scan_yaml_directive_value(self, start_mark): + # See the specification for details. + while self.peek() == u' ': + self.forward() + major = self.scan_yaml_directive_number(start_mark) + if self.peek() != '.': + raise ScannerError("while scanning a directive", start_mark, + "expected a digit or '.', but found %r" + % self.peek().encode('utf-8'), + self.get_mark()) + self.forward() + minor = self.scan_yaml_directive_number(start_mark) + if self.peek() not in u'\0 \r\n\x85\u2028\u2029': + raise ScannerError("while scanning a directive", start_mark, + "expected a digit or ' ', but found %r" + % self.peek().encode('utf-8'), + self.get_mark()) + return (major, minor) + + def scan_yaml_directive_number(self, start_mark): + # See the specification for details. + ch = self.peek() + if not (u'0' <= ch <= '9'): + raise ScannerError("while scanning a directive", start_mark, + "expected a digit, but found %r" % ch.encode('utf-8'), + self.get_mark()) + length = 0 + while u'0' <= self.peek(length) <= u'9': + length += 1 + value = int(self.prefix(length)) + self.forward(length) + return value + + def scan_tag_directive_value(self, start_mark): + # See the specification for details. + while self.peek() == u' ': + self.forward() + handle = self.scan_tag_directive_handle(start_mark) + while self.peek() == u' ': + self.forward() + prefix = self.scan_tag_directive_prefix(start_mark) + return (handle, prefix) + + def scan_tag_directive_handle(self, start_mark): + # See the specification for details. + value = self.scan_tag_handle('directive', start_mark) + ch = self.peek() + if ch != u' ': + raise ScannerError("while scanning a directive", start_mark, + "expected ' ', but found %r" % ch.encode('utf-8'), + self.get_mark()) + return value + + def scan_tag_directive_prefix(self, start_mark): + # See the specification for details. + value = self.scan_tag_uri('directive', start_mark) + ch = self.peek() + if ch not in u'\0 \r\n\x85\u2028\u2029': + raise ScannerError("while scanning a directive", start_mark, + "expected ' ', but found %r" % ch.encode('utf-8'), + self.get_mark()) + return value + + def scan_directive_ignored_line(self, start_mark): + # See the specification for details. + while self.peek() == u' ': + self.forward() + if self.peek() == u'#': + while self.peek() not in u'\0\r\n\x85\u2028\u2029': + self.forward() + ch = self.peek() + if ch not in u'\0\r\n\x85\u2028\u2029': + raise ScannerError("while scanning a directive", start_mark, + "expected a comment or a line break, but found %r" + % ch.encode('utf-8'), self.get_mark()) + self.scan_line_break() + + def scan_anchor(self, TokenClass): + # The specification does not restrict characters for anchors and + # aliases. This may lead to problems, for instance, the document: + # [ *alias, value ] + # can be interpteted in two ways, as + # [ "value" ] + # and + # [ *alias , "value" ] + # Therefore we restrict aliases to numbers and ASCII letters. + start_mark = self.get_mark() + indicator = self.peek() + if indicator == '*': + name = 'alias' + else: + name = 'anchor' + self.forward() + length = 0 + ch = self.peek(length) + while u'0' <= ch <= u'9' or u'A' <= ch <= 'Z' or u'a' <= ch <= 'z' \ + or ch in u'-_': + length += 1 + ch = self.peek(length) + if not length: + raise ScannerError("while scanning an %s" % name, start_mark, + "expected alphabetic or numeric character, but found %r" + % ch.encode('utf-8'), self.get_mark()) + value = self.prefix(length) + self.forward(length) + ch = self.peek() + if ch not in u'\0 \t\r\n\x85\u2028\u2029?:,]}%@`': + raise ScannerError("while scanning an %s" % name, start_mark, + "expected alphabetic or numeric character, but found %r" + % ch.encode('utf-8'), self.get_mark()) + end_mark = self.get_mark() + return TokenClass(value, start_mark, end_mark) + + def scan_tag(self): + # See the specification for details. + start_mark = self.get_mark() + ch = self.peek(1) + if ch == u'<': + handle = None + self.forward(2) + suffix = self.scan_tag_uri('tag', start_mark) + if self.peek() != u'>': + raise ScannerError("while parsing a tag", start_mark, + "expected '>', but found %r" % self.peek().encode('utf-8'), + self.get_mark()) + self.forward() + elif ch in u'\0 \t\r\n\x85\u2028\u2029': + handle = None + suffix = u'!' + self.forward() + else: + length = 1 + use_handle = False + while ch not in u'\0 \r\n\x85\u2028\u2029': + if ch == u'!': + use_handle = True + break + length += 1 + ch = self.peek(length) + handle = u'!' + if use_handle: + handle = self.scan_tag_handle('tag', start_mark) + else: + handle = u'!' + self.forward() + suffix = self.scan_tag_uri('tag', start_mark) + ch = self.peek() + if ch not in u'\0 \r\n\x85\u2028\u2029': + raise ScannerError("while scanning a tag", start_mark, + "expected ' ', but found %r" % ch.encode('utf-8'), + self.get_mark()) + value = (handle, suffix) + end_mark = self.get_mark() + return TagToken(value, start_mark, end_mark) + + def scan_block_scalar(self, style): + # See the specification for details. + + if style == '>': + folded = True + else: + folded = False + + chunks = [] + start_mark = self.get_mark() + + # Scan the header. + self.forward() + chomping, increment = self.scan_block_scalar_indicators(start_mark) + self.scan_block_scalar_ignored_line(start_mark) + + # Determine the indentation level and go to the first non-empty line. + min_indent = self.indent+1 + if min_indent < 1: + min_indent = 1 + if increment is None: + breaks, max_indent, end_mark = self.scan_block_scalar_indentation() + indent = max(min_indent, max_indent) + else: + indent = min_indent+increment-1 + breaks, end_mark = self.scan_block_scalar_breaks(indent) + line_break = u'' + + # Scan the inner part of the block scalar. + while self.column == indent and self.peek() != u'\0': + chunks.extend(breaks) + leading_non_space = self.peek() not in u' \t' + length = 0 + while self.peek(length) not in u'\0\r\n\x85\u2028\u2029': + length += 1 + chunks.append(self.prefix(length)) + self.forward(length) + line_break = self.scan_line_break() + breaks, end_mark = self.scan_block_scalar_breaks(indent) + if self.column == indent and self.peek() != u'\0': + + # Unfortunately, folding rules are ambiguous. + # + # This is the folding according to the specification: + + if folded and line_break == u'\n' \ + and leading_non_space and self.peek() not in u' \t': + if not breaks: + chunks.append(u' ') + else: + chunks.append(line_break) + + # This is Clark Evans's interpretation (also in the spec + # examples): + # + #if folded and line_break == u'\n': + # if not breaks: + # if self.peek() not in ' \t': + # chunks.append(u' ') + # else: + # chunks.append(line_break) + #else: + # chunks.append(line_break) + else: + break + + # Chomp the tail. + if chomping is not False: + chunks.append(line_break) + if chomping is True: + chunks.extend(breaks) + + # We are done. + return ScalarToken(u''.join(chunks), False, start_mark, end_mark, + style) + + def scan_block_scalar_indicators(self, start_mark): + # See the specification for details. + chomping = None + increment = None + ch = self.peek() + if ch in u'+-': + if ch == '+': + chomping = True + else: + chomping = False + self.forward() + ch = self.peek() + if ch in u'0123456789': + increment = int(ch) + if increment == 0: + raise ScannerError("while scanning a block scalar", start_mark, + "expected indentation indicator in the range 1-9, but found 0", + self.get_mark()) + self.forward() + elif ch in u'0123456789': + increment = int(ch) + if increment == 0: + raise ScannerError("while scanning a block scalar", start_mark, + "expected indentation indicator in the range 1-9, but found 0", + self.get_mark()) + self.forward() + ch = self.peek() + if ch in u'+-': + if ch == '+': + chomping = True + else: + chomping = False + self.forward() + ch = self.peek() + if ch not in u'\0 \r\n\x85\u2028\u2029': + raise ScannerError("while scanning a block scalar", start_mark, + "expected chomping or indentation indicators, but found %r" + % ch.encode('utf-8'), self.get_mark()) + return chomping, increment + + def scan_block_scalar_ignored_line(self, start_mark): + # See the specification for details. + while self.peek() == u' ': + self.forward() + if self.peek() == u'#': + while self.peek() not in u'\0\r\n\x85\u2028\u2029': + self.forward() + ch = self.peek() + if ch not in u'\0\r\n\x85\u2028\u2029': + raise ScannerError("while scanning a block scalar", start_mark, + "expected a comment or a line break, but found %r" + % ch.encode('utf-8'), self.get_mark()) + self.scan_line_break() + + def scan_block_scalar_indentation(self): + # See the specification for details. + chunks = [] + max_indent = 0 + end_mark = self.get_mark() + while self.peek() in u' \r\n\x85\u2028\u2029': + if self.peek() != u' ': + chunks.append(self.scan_line_break()) + end_mark = self.get_mark() + else: + self.forward() + if self.column > max_indent: + max_indent = self.column + return chunks, max_indent, end_mark + + def scan_block_scalar_breaks(self, indent): + # See the specification for details. + chunks = [] + end_mark = self.get_mark() + while self.column < indent and self.peek() == u' ': + self.forward() + while self.peek() in u'\r\n\x85\u2028\u2029': + chunks.append(self.scan_line_break()) + end_mark = self.get_mark() + while self.column < indent and self.peek() == u' ': + self.forward() + return chunks, end_mark + + def scan_flow_scalar(self, style): + # See the specification for details. + # Note that we loose indentation rules for quoted scalars. Quoted + # scalars don't need to adhere indentation because " and ' clearly + # mark the beginning and the end of them. Therefore we are less + # restrictive then the specification requires. We only need to check + # that document separators are not included in scalars. + if style == '"': + double = True + else: + double = False + chunks = [] + start_mark = self.get_mark() + quote = self.peek() + self.forward() + chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark)) + while self.peek() != quote: + chunks.extend(self.scan_flow_scalar_spaces(double, start_mark)) + chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark)) + self.forward() + end_mark = self.get_mark() + return ScalarToken(u''.join(chunks), False, start_mark, end_mark, + style) + + ESCAPE_REPLACEMENTS = { + u'0': u'\0', + u'a': u'\x07', + u'b': u'\x08', + u't': u'\x09', + u'\t': u'\x09', + u'n': u'\x0A', + u'v': u'\x0B', + u'f': u'\x0C', + u'r': u'\x0D', + u'e': u'\x1B', + u' ': u'\x20', + u'\"': u'\"', + u'\\': u'\\', + u'N': u'\x85', + u'_': u'\xA0', + u'L': u'\u2028', + u'P': u'\u2029', + } + + ESCAPE_CODES = { + u'x': 2, + u'u': 4, + u'U': 8, + } + + def scan_flow_scalar_non_spaces(self, double, start_mark): + # See the specification for details. + chunks = [] + while True: + length = 0 + while self.peek(length) not in u'\'\"\\\0 \t\r\n\x85\u2028\u2029': + length += 1 + if length: + chunks.append(self.prefix(length)) + self.forward(length) + ch = self.peek() + if not double and ch == u'\'' and self.peek(1) == u'\'': + chunks.append(u'\'') + self.forward(2) + elif (double and ch == u'\'') or (not double and ch in u'\"\\'): + chunks.append(ch) + self.forward() + elif double and ch == u'\\': + self.forward() + ch = self.peek() + if ch in self.ESCAPE_REPLACEMENTS: + chunks.append(self.ESCAPE_REPLACEMENTS[ch]) + self.forward() + elif ch in self.ESCAPE_CODES: + length = self.ESCAPE_CODES[ch] + self.forward() + for k in range(length): + if self.peek(k) not in u'0123456789ABCDEFabcdef': + raise ScannerError("while scanning a double-quoted scalar", start_mark, + "expected escape sequence of %d hexdecimal numbers, but found %r" % + (length, self.peek(k).encode('utf-8')), self.get_mark()) + code = int(self.prefix(length), 16) + chunks.append(unichr(code)) + self.forward(length) + elif ch in u'\r\n\x85\u2028\u2029': + self.scan_line_break() + chunks.extend(self.scan_flow_scalar_breaks(double, start_mark)) + else: + raise ScannerError("while scanning a double-quoted scalar", start_mark, + "found unknown escape character %r" % ch.encode('utf-8'), self.get_mark()) + else: + return chunks + + def scan_flow_scalar_spaces(self, double, start_mark): + # See the specification for details. + chunks = [] + length = 0 + while self.peek(length) in u' \t': + length += 1 + whitespaces = self.prefix(length) + self.forward(length) + ch = self.peek() + if ch == u'\0': + raise ScannerError("while scanning a quoted scalar", start_mark, + "found unexpected end of stream", self.get_mark()) + elif ch in u'\r\n\x85\u2028\u2029': + line_break = self.scan_line_break() + breaks = self.scan_flow_scalar_breaks(double, start_mark) + if line_break != u'\n': + chunks.append(line_break) + elif not breaks: + chunks.append(u' ') + chunks.extend(breaks) + else: + chunks.append(whitespaces) + return chunks + + def scan_flow_scalar_breaks(self, double, start_mark): + # See the specification for details. + chunks = [] + while True: + # Instead of checking indentation, we check for document + # separators. + prefix = self.prefix(3) + if (prefix == u'---' or prefix == u'...') \ + and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029': + raise ScannerError("while scanning a quoted scalar", start_mark, + "found unexpected document separator", self.get_mark()) + while self.peek() in u' \t': + self.forward() + if self.peek() in u'\r\n\x85\u2028\u2029': + chunks.append(self.scan_line_break()) + else: + return chunks + + def scan_plain(self): + # See the specification for details. + # We add an additional restriction for the flow context: + # plain scalars in the flow context cannot contain ',', ':' and '?'. + # We also keep track of the `allow_simple_key` flag here. + # Indentation rules are loosed for the flow context. + chunks = [] + start_mark = self.get_mark() + end_mark = start_mark + indent = self.indent+1 + # We allow zero indentation for scalars, but then we need to check for + # document separators at the beginning of the line. + #if indent == 0: + # indent = 1 + spaces = [] + while True: + length = 0 + if self.peek() == u'#': + break + while True: + ch = self.peek(length) + if ch in u'\0 \t\r\n\x85\u2028\u2029' \ + or (not self.flow_level and ch == u':' and + self.peek(length+1) in u'\0 \t\r\n\x28\u2028\u2029') \ + or (self.flow_level and ch in u',:?[]{}'): + break + length += 1 + # It's not clear what we should do with ':' in the flow context. + if (self.flow_level and ch == u':' + and self.peek(length+1) not in u'\0 \t\r\n\x28\u2028\u2029,[]{}'): + self.forward(length) + raise ScannerError("while scanning a plain scalar", start_mark, + "found unexpected ':'", self.get_mark(), + "Please check http://pyyaml.org/wiki/YAMLColonInFlowContext for details.") + if length == 0: + break + self.allow_simple_key = False + chunks.extend(spaces) + chunks.append(self.prefix(length)) + self.forward(length) + end_mark = self.get_mark() + spaces = self.scan_plain_spaces(indent, start_mark) + if not spaces or self.peek() == u'#' \ + or (not self.flow_level and self.column < indent): + break + return ScalarToken(u''.join(chunks), True, start_mark, end_mark) + + def scan_plain_spaces(self, indent, start_mark): + # See the specification for details. + # The specification is really confusing about tabs in plain scalars. + # We just forbid them completely. Do not use tabs in YAML! + chunks = [] + length = 0 + while self.peek(length) in u' ': + length += 1 + whitespaces = self.prefix(length) + self.forward(length) + ch = self.peek() + if ch in u'\r\n\x85\u2028\u2029': + line_break = self.scan_line_break() + self.allow_simple_key = True + prefix = self.prefix(3) + if (prefix == u'---' or prefix == u'...') \ + and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029': + return + breaks = [] + while self.peek() in u' \r\n\x85\u2028\u2029': + if self.peek() == ' ': + self.forward() + else: + breaks.append(self.scan_line_break()) + prefix = self.prefix(3) + if (prefix == u'---' or prefix == u'...') \ + and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029': + return + if line_break != u'\n': + chunks.append(line_break) + elif not breaks: + chunks.append(u' ') + chunks.extend(breaks) + elif whitespaces: + chunks.append(whitespaces) + return chunks + + def scan_tag_handle(self, name, start_mark): + # See the specification for details. + # For some strange reasons, the specification does not allow '_' in + # tag handles. I have allowed it anyway. + ch = self.peek() + if ch != u'!': + raise ScannerError("while scanning a %s" % name, start_mark, + "expected '!', but found %r" % ch.encode('utf-8'), + self.get_mark()) + length = 1 + ch = self.peek(length) + if ch != u' ': + while u'0' <= ch <= u'9' or u'A' <= ch <= 'Z' or u'a' <= ch <= 'z' \ + or ch in u'-_': + length += 1 + ch = self.peek(length) + if ch != u'!': + self.forward(length) + raise ScannerError("while scanning a %s" % name, start_mark, + "expected '!', but found %r" % ch.encode('utf-8'), + self.get_mark()) + length += 1 + value = self.prefix(length) + self.forward(length) + return value + + def scan_tag_uri(self, name, start_mark): + # See the specification for details. + # Note: we do not check if URI is well-formed. + chunks = [] + length = 0 + ch = self.peek(length) + while u'0' <= ch <= u'9' or u'A' <= ch <= 'Z' or u'a' <= ch <= 'z' \ + or ch in u'-;/?:@&=+$,_.!~*\'()[]%': + if ch == u'%': + chunks.append(self.prefix(length)) + self.forward(length) + length = 0 + chunks.append(self.scan_uri_escapes(name, start_mark)) + else: + length += 1 + ch = self.peek(length) + if length: + chunks.append(self.prefix(length)) + self.forward(length) + length = 0 + if not chunks: + raise ScannerError("while parsing a %s" % name, start_mark, + "expected URI, but found %r" % ch.encode('utf-8'), + self.get_mark()) + return u''.join(chunks) + + def scan_uri_escapes(self, name, start_mark): + # See the specification for details. + bytes = [] + mark = self.get_mark() + while self.peek() == u'%': + self.forward() + for k in range(2): + if self.peek(k) not in u'0123456789ABCDEFabcdef': + raise ScannerError("while scanning a %s" % name, start_mark, + "expected URI escape sequence of 2 hexdecimal numbers, but found %r" % + (self.peek(k).encode('utf-8')), self.get_mark()) + bytes.append(chr(int(self.prefix(2), 16))) + self.forward(2) + try: + value = unicode(''.join(bytes), 'utf-8') + except UnicodeDecodeError, exc: + raise ScannerError("while scanning a %s" % name, start_mark, str(exc), mark) + return value + + def scan_line_break(self): + # Transforms: + # '\r\n' : '\n' + # '\r' : '\n' + # '\n' : '\n' + # '\x85' : '\n' + # '\u2028' : '\u2028' + # '\u2029 : '\u2029' + # default : '' + ch = self.peek() + if ch in u'\r\n\x85': + if self.prefix(2) == u'\r\n': + self.forward(2) + else: + self.forward() + return u'\n' + elif ch in u'\u2028\u2029': + self.forward() + return ch + return u'' + +#try: +# import psyco +# psyco.bind(Scanner) +#except ImportError: +# pass + diff --git a/scripts/external_libs/PyYAML-3.01/lib/yaml/serializer.py b/scripts/external_libs/PyYAML-3.01/lib/yaml/serializer.py new file mode 100644 index 00000000..937be9a9 --- /dev/null +++ b/scripts/external_libs/PyYAML-3.01/lib/yaml/serializer.py @@ -0,0 +1,121 @@ + +__all__ = ['Serializer', 'SerializerError'] + +from error import YAMLError +from events import * +from nodes import * + +class SerializerError(YAMLError): + pass + +class Serializer: + + ANCHOR_TEMPLATE = u'id%03d' + + def __init__(self, encoding=None, + explicit_start=None, explicit_end=None, version=None, tags=None): + self.use_encoding = encoding + self.use_explicit_start = explicit_start + self.use_explicit_end = explicit_end + self.use_version = version + self.use_tags = tags + self.serialized_nodes = {} + self.anchors = {} + self.last_anchor_id = 0 + self.closed = None + + def open(self): + if self.closed is None: + self.emit(StreamStartEvent(encoding=self.use_encoding)) + self.closed = False + elif self.closed: + raise SerializerError("serializer is closed") + else: + raise SerializerError("serializer is already opened") + + def close(self): + if self.closed is None: + raise SerializerError("serializer is not opened") + elif not self.closed: + self.emit(StreamEndEvent()) + self.closed = True + + #def __del__(self): + # self.close() + + def serialize(self, node): + if self.closed is None: + raise SerializerError("serializer is not opened") + elif self.closed: + raise SerializerError("serializer is closed") + self.emit(DocumentStartEvent(explicit=self.use_explicit_start, + version=self.use_version, tags=self.use_tags)) + self.anchor_node(node) + self.serialize_node(node, None, None) + self.emit(DocumentEndEvent(explicit=self.use_explicit_end)) + self.serialized_nodes = {} + self.anchors = {} + self.last_alias_id = 0 + + def anchor_node(self, node): + if node in self.anchors: + if self.anchors[node] is None: + self.anchors[node] = self.generate_anchor(node) + else: + self.anchors[node] = None + if isinstance(node, SequenceNode): + for item in node.value: + self.anchor_node(item) + elif isinstance(node, MappingNode): + if hasattr(node.value, 'keys'): + for key in node.value.keys(): + self.anchor_node(key) + self.anchor_node(node.value[key]) + else: + for key, value in node.value: + self.anchor_node(key) + self.anchor_node(value) + + def generate_anchor(self, node): + self.last_anchor_id += 1 + return self.ANCHOR_TEMPLATE % self.last_anchor_id + + def serialize_node(self, node, parent, index): + alias = self.anchors[node] + if node in self.serialized_nodes: + self.emit(AliasEvent(alias)) + else: + self.serialized_nodes[node] = True + self.descend_resolver(parent, index) + if isinstance(node, ScalarNode): + detected_tag = self.resolve(ScalarNode, node.value, (True, False)) + default_tag = self.resolve(ScalarNode, node.value, (False, True)) + implicit = (node.tag == detected_tag), (node.tag == default_tag) + self.emit(ScalarEvent(alias, node.tag, implicit, node.value, + style=node.style)) + elif isinstance(node, SequenceNode): + implicit = (node.tag + == self.resolve(SequenceNode, node.value, True)) + self.emit(SequenceStartEvent(alias, node.tag, implicit, + flow_style=node.flow_style)) + index = 0 + for item in node.value: + self.serialize_node(item, node, index) + index += 1 + self.emit(SequenceEndEvent()) + elif isinstance(node, MappingNode): + implicit = (node.tag + == self.resolve(MappingNode, node.value, True)) + self.emit(MappingStartEvent(alias, node.tag, implicit, + flow_style=node.flow_style)) + if hasattr(node.value, 'keys'): + for key in node.value.keys(): + self.serialize_node(key, node, None) + self.serialize_node(node.value[key], node, key) + else: + for key, value in node.value: + self.serialize_node(key, node, None) + self.serialize_node(value, node, key) + self.emit(MappingEndEvent()) + self.ascend_resolver() + diff --git a/scripts/external_libs/PyYAML-3.01/lib/yaml/tokens.py b/scripts/external_libs/PyYAML-3.01/lib/yaml/tokens.py new file mode 100644 index 00000000..4fe4522e --- /dev/null +++ b/scripts/external_libs/PyYAML-3.01/lib/yaml/tokens.py @@ -0,0 +1,104 @@ + +class Token: + def __init__(self, start_mark, end_mark): + self.start_mark = start_mark + self.end_mark = end_mark + def __repr__(self): + attributes = [key for key in self.__dict__ + if not key.endswith('_mark')] + attributes.sort() + arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) + for key in attributes]) + return '%s(%s)' % (self.__class__.__name__, arguments) + +#class BOMToken(Token): +# id = '<byte order mark>' + +class DirectiveToken(Token): + id = '<directive>' + def __init__(self, name, value, start_mark, end_mark): + self.name = name + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + +class DocumentStartToken(Token): + id = '<document start>' + +class DocumentEndToken(Token): + id = '<document end>' + +class StreamStartToken(Token): + id = '<stream start>' + def __init__(self, start_mark=None, end_mark=None, + encoding=None): + self.start_mark = start_mark + self.end_mark = end_mark + self.encoding = encoding + +class StreamEndToken(Token): + id = '<stream end>' + +class BlockSequenceStartToken(Token): + id = '<block sequence start>' + +class BlockMappingStartToken(Token): + id = '<block mapping start>' + +class BlockEndToken(Token): + id = '<block end>' + +class FlowSequenceStartToken(Token): + id = '[' + +class FlowMappingStartToken(Token): + id = '{' + +class FlowSequenceEndToken(Token): + id = ']' + +class FlowMappingEndToken(Token): + id = '}' + +class KeyToken(Token): + id = '?' + +class ValueToken(Token): + id = ':' + +class BlockEntryToken(Token): + id = '-' + +class FlowEntryToken(Token): + id = ',' + +class AliasToken(Token): + id = '<alias>' + def __init__(self, value, start_mark, end_mark): + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + +class AnchorToken(Token): + id = '<anchor>' + def __init__(self, value, start_mark, end_mark): + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + +class TagToken(Token): + id = '<tag>' + def __init__(self, value, start_mark, end_mark): + self.value = value + self.start_mark = start_mark + self.end_mark = end_mark + +class ScalarToken(Token): + id = '<scalar>' + def __init__(self, value, plain, start_mark, end_mark, style=None): + self.value = value + self.plain = plain + self.start_mark = start_mark + self.end_mark = end_mark + self.style = style + diff --git a/scripts/external_libs/PyYAML-3.01/setup.py b/scripts/external_libs/PyYAML-3.01/setup.py new file mode 100644 index 00000000..23c1efac --- /dev/null +++ b/scripts/external_libs/PyYAML-3.01/setup.py @@ -0,0 +1,52 @@ + +NAME = 'PyYAML' +VERSION = '3.01' +DESCRIPTION = "YAML parser and emitter for Python" +LONG_DESCRIPTION = """\ +YAML is a data serialization format designed for human readability and +interaction with scripting languages. PyYAML is a YAML parser and +emitter for Python. + +PyYAML features a complete YAML 1.1 parser, Unicode support, pickle +support, capable extension API, and sensible error messages. PyYAML +supports standard YAML tags and provides Python-specific tags that allow +to represent an arbitrary Python object. + +PyYAML is applicable for a broad range of tasks from complex +configuration files to object serialization and persistance.""" +AUTHOR = "Kirill Simonov" +AUTHOR_EMAIL = 'xi@resolvent.net' +LICENSE = "MIT" +PLATFORMS = "Any" +URL = "http://pyyaml.org/wiki/PyYAML" +DOWNLOAD_URL = "http://pyyaml.org/download/pyyaml/%s-%s.tar.gz" % (NAME, VERSION) +CLASSIFIERS = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Topic :: Software Development :: Libraries :: Python Modules", + "Topic :: Text Processing :: Markup", +] + + +from distutils.core import setup + +setup( + name=NAME, + version=VERSION, + description=DESCRIPTION, + long_description=LONG_DESCRIPTION, + author=AUTHOR, + author_email=AUTHOR_EMAIL, + license=LICENSE, + platforms=PLATFORMS, + url=URL, + download_url=DOWNLOAD_URL, + classifiers=CLASSIFIERS, + + package_dir={'': 'lib'}, + packages=['yaml'], +) + diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib_pelix.egg-info/dependency_links.txt b/scripts/external_libs/__init__.py index 8b137891..8b137891 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib_pelix.egg-info/dependency_links.txt +++ b/scripts/external_libs/__init__.py diff --git a/scripts/automation/trex_control_plane/python_lib/enum34-1.0.4/PKG-INFO b/scripts/external_libs/enum34-1.0.4/PKG-INFO index 428ce0e3..428ce0e3 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/enum34-1.0.4/PKG-INFO +++ b/scripts/external_libs/enum34-1.0.4/PKG-INFO diff --git a/scripts/automation/trex_control_plane/python_lib/enum34-1.0.4/enum/LICENSE b/scripts/external_libs/enum34-1.0.4/enum/LICENSE index 9003b885..9003b885 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/enum34-1.0.4/enum/LICENSE +++ b/scripts/external_libs/enum34-1.0.4/enum/LICENSE diff --git a/scripts/automation/trex_control_plane/python_lib/enum34-1.0.4/enum/README b/scripts/external_libs/enum34-1.0.4/enum/README index 511af984..511af984 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/enum34-1.0.4/enum/README +++ b/scripts/external_libs/enum34-1.0.4/enum/README diff --git a/scripts/automation/trex_control_plane/python_lib/enum34-1.0.4/enum/__init__.py b/scripts/external_libs/enum34-1.0.4/enum/__init__.py index 6a327a8a..6a327a8a 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/enum34-1.0.4/enum/__init__.py +++ b/scripts/external_libs/enum34-1.0.4/enum/__init__.py diff --git a/scripts/automation/trex_control_plane/python_lib/enum34-1.0.4/enum/doc/enum.rst b/scripts/external_libs/enum34-1.0.4/enum/doc/enum.rst index 0d429bfc..0d429bfc 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/enum34-1.0.4/enum/doc/enum.rst +++ b/scripts/external_libs/enum34-1.0.4/enum/doc/enum.rst diff --git a/scripts/automation/trex_control_plane/python_lib/enum34-1.0.4/enum/enum.py b/scripts/external_libs/enum34-1.0.4/enum/enum.py index 6a327a8a..6a327a8a 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/enum34-1.0.4/enum/enum.py +++ b/scripts/external_libs/enum34-1.0.4/enum/enum.py diff --git a/scripts/automation/trex_control_plane/python_lib/enum34-1.0.4/enum/test_enum.py b/scripts/external_libs/enum34-1.0.4/enum/test_enum.py index d7a97942..d7a97942 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/enum34-1.0.4/enum/test_enum.py +++ b/scripts/external_libs/enum34-1.0.4/enum/test_enum.py diff --git a/scripts/automation/trex_control_plane/python_lib/enum34-1.0.4/setup.py b/scripts/external_libs/enum34-1.0.4/setup.py index ecb4944f..4cb9c691 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/enum34-1.0.4/setup.py +++ b/scripts/external_libs/enum34-1.0.4/setup.py @@ -1,44 +1,44 @@ -import os
-import sys
-from distutils.core import setup
-
-if sys.version_info[:2] < (2, 7):
- required = ['ordereddict']
-else:
- required = []
-
-long_desc = open('enum/doc/enum.rst').read()
-
-setup( name='enum34',
- version='1.0.4',
- url='https://pypi.python.org/pypi/enum34',
- packages=['enum'],
- package_data={
- 'enum' : [
- 'LICENSE',
- 'README',
- 'doc/enum.rst',
- 'doc/enum.pdf',
- 'test_enum.py',
- ]
- },
- license='BSD License',
- description='Python 3.4 Enum backported to 3.3, 3.2, 3.1, 2.7, 2.6, 2.5, and 2.4',
- long_description=long_desc,
- provides=['enum'],
- install_requires=required,
- author='Ethan Furman',
- author_email='ethan@stoneleaf.us',
- classifiers=[
- 'Development Status :: 5 - Production/Stable',
- 'Intended Audience :: Developers',
- 'License :: OSI Approved :: BSD License',
- 'Programming Language :: Python',
- 'Topic :: Software Development',
- 'Programming Language :: Python :: 2.4',
- 'Programming Language :: Python :: 2.5',
- 'Programming Language :: Python :: 2.6',
- 'Programming Language :: Python :: 2.7',
- 'Programming Language :: Python :: 3',
- ],
- )
+import os +import sys +from distutils.core import setup + +if sys.version_info[:2] < (2, 7): + required = ['ordereddict'] +else: + required = [] + +long_desc = open('enum/doc/enum.rst').read() + +setup( name='enum34', + version='1.0.4', + url='https://pypi.python.org/pypi/enum34', + packages=['enum'], + package_data={ + 'enum' : [ + 'LICENSE', + 'README', + 'doc/enum.rst', + 'doc/enum.pdf', + 'test_enum.py', + ] + }, + license='BSD License', + description='Python 3.4 Enum backported to 3.3, 3.2, 3.1, 2.7, 2.6, 2.5, and 2.4', + long_description=long_desc, + provides=['enum'], + install_requires=required, + author='Ethan Furman', + author_email='ethan@stoneleaf.us', + classifiers=[ + 'Development Status :: 5 - Production/Stable', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: BSD License', + 'Programming Language :: Python', + 'Topic :: Software Development', + 'Programming Language :: Python :: 2.4', + 'Programming Language :: Python :: 2.5', + 'Programming Language :: Python :: 2.6', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + ], + ) diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/LICENSE.txt b/scripts/external_libs/jsonrpclib-pelix-0.2.5/LICENSE.txt index 51fca54c..51fca54c 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-0.1.3/LICENSE.txt +++ b/scripts/external_libs/jsonrpclib-pelix-0.2.5/LICENSE.txt diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/MANIFEST.in b/scripts/external_libs/jsonrpclib-pelix-0.2.5/MANIFEST.in index 42f4acf5..eb0014ad 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/MANIFEST.in +++ b/scripts/external_libs/jsonrpclib-pelix-0.2.5/MANIFEST.in @@ -1,2 +1,2 @@ -include *.txt
-include README.rst
+include *.txt +include README.rst diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/PKG-INFO b/scripts/external_libs/jsonrpclib-pelix-0.2.5/PKG-INFO index 9d0f3fca..5dce6b1c 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/PKG-INFO +++ b/scripts/external_libs/jsonrpclib-pelix-0.2.5/PKG-INFO @@ -1,460 +1,460 @@ -Metadata-Version: 1.1
-Name: jsonrpclib-pelix
-Version: 0.2.5
-Summary: This project is an implementation of the JSON-RPC v2.0 specification (backwards-compatible) as a client library, for Python 2.6+ and Python 3.This version is a fork of jsonrpclib by Josh Marshall, usable with Pelix remote services.
-Home-page: http://github.com/tcalmant/jsonrpclib/
-Author: Thomas Calmant
-Author-email: thomas.calmant+github@gmail.com
-License: Apache License 2.0
-Description: JSONRPClib (patched for Pelix)
- ##############################
-
- .. image:: https://pypip.in/license/jsonrpclib-pelix/badge.svg
- :target: https://pypi.python.org/pypi/jsonrpclib-pelix/
-
- .. image:: https://travis-ci.org/tcalmant/jsonrpclib.svg?branch=master
- :target: https://travis-ci.org/tcalmant/jsonrpclib
-
- .. image:: https://coveralls.io/repos/tcalmant/jsonrpclib/badge.svg?branch=master
- :target: https://coveralls.io/r/tcalmant/jsonrpclib?branch=master
-
-
- This library is an implementation of the JSON-RPC specification.
- It supports both the original 1.0 specification, as well as the
- new (proposed) 2.0 specification, which includes batch submission, keyword
- arguments, etc.
-
- It is licensed under the Apache License, Version 2.0
- (http://www.apache.org/licenses/LICENSE-2.0.html).
-
-
- About this version
- ******************
-
- This is a patched version of the original ``jsonrpclib`` project by
- Josh Marshall, available at https://github.com/joshmarshall/jsonrpclib.
-
- The suffix *-pelix* only indicates that this version works with Pelix Remote
- Services, but it is **not** a Pelix specific implementation.
-
- * This version adds support for Python 3, staying compatible with Python 2.
- * It is now possible to use the dispatch_method argument while extending
- the SimpleJSONRPCDispatcher, to use a custom dispatcher.
- This allows to use this package by Pelix Remote Services.
- * It can use thread pools to control the number of threads spawned to handle
- notification requests and clients connections.
- * The modifications added in other forks of this project have been added:
-
- * From https://github.com/drdaeman/jsonrpclib:
-
- * Improved JSON-RPC 1.0 support
- * Less strict error response handling
-
- * From https://github.com/tuomassalo/jsonrpclib:
-
- * In case of a non-pre-defined error, raise an AppError and give access to
- *error.data*
-
- * From https://github.com/dejw/jsonrpclib:
-
- * Custom headers can be sent with request and associated tests
-
- * The support for Unix sockets has been removed, as it is not trivial to convert
- to Python 3 (and I don't use them)
- * This version cannot be installed with the original ``jsonrpclib``, as it uses
- the same package name.
-
-
- Summary
- *******
-
- This library implements the JSON-RPC 2.0 proposed specification in pure Python.
- It is designed to be as compatible with the syntax of ``xmlrpclib`` as possible
- (it extends where possible), so that projects using ``xmlrpclib`` could easily
- be modified to use JSON and experiment with the differences.
-
- It is backwards-compatible with the 1.0 specification, and supports all of the
- new proposed features of 2.0, including:
-
- * Batch submission (via MultiCall)
- * Keyword arguments
- * Notifications (both in a batch and 'normal')
- * Class translation using the ``__jsonclass__`` key.
-
- I've added a "SimpleJSONRPCServer", which is intended to emulate the
- "SimpleXMLRPCServer" from the default Python distribution.
-
-
- Requirements
- ************
-
- It supports ``cjson`` and ``simplejson``, and looks for the parsers in that
- order (searching first for ``cjson``, then for the *built-in* ``json`` in 2.6+,
- and then the ``simplejson`` external library).
- One of these must be installed to use this library, although if you have a
- standard distribution of 2.6+, you should already have one.
- Keep in mind that ``cjson`` is supposed to be the quickest, I believe, so if
- you are going for full-on optimization you may want to pick it up.
-
- Since library uses ``contextlib`` module, you should have at least Python 2.5
- installed.
-
-
- Installation
- ************
-
- You can install this from PyPI with one of the following commands (sudo
- may be required):
-
- .. code-block:: console
-
- easy_install jsonrpclib-pelix
- pip install jsonrpclib-pelix
-
- Alternatively, you can download the source from the GitHub repository
- at http://github.com/tcalmant/jsonrpclib and manually install it
- with the following commands:
-
- .. code-block:: console
-
- git clone git://github.com/tcalmant/jsonrpclib.git
- cd jsonrpclib
- python setup.py install
-
-
- SimpleJSONRPCServer
- *******************
-
- This is identical in usage (or should be) to the SimpleXMLRPCServer in the
- Python standard library. Some of the differences in features are that it
- obviously supports notification, batch calls, class translation (if left on),
- etc.
- Note: The import line is slightly different from the regular SimpleXMLRPCServer,
- since the SimpleJSONRPCServer is distributed within the ``jsonrpclib`` library.
-
- .. code-block:: python
-
- from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer
-
- server = SimpleJSONRPCServer(('localhost', 8080))
- server.register_function(pow)
- server.register_function(lambda x,y: x+y, 'add')
- server.register_function(lambda x: x, 'ping')
- server.serve_forever()
-
- To start protect the server with SSL, use the following snippet:
-
- .. code-block:: python
-
- from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer
-
- # Setup the SSL socket
- server = SimpleJSONRPCServer(('localhost', 8080), bind_and_activate=False)
- server.socket = ssl.wrap_socket(server.socket, certfile='server.pem',
- server_side=True)
- server.server_bind()
- server.server_activate()
-
- # ... register functions
- # Start the server
- server.serve_forever()
-
-
- Notification Thread Pool
- ========================
-
- By default, notification calls are handled in the request handling thread.
- It is possible to use a thread pool to handle them, by giving it to the server
- using the ``set_notification_pool()`` method:
-
- .. code-block:: python
-
- from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer
- from jsonrpclib.threadpool import ThreadPool
-
- # Setup the thread pool: between 0 and 10 threads
- pool = ThreadPool(max_threads=10, min_threads=0)
-
- # Don't forget to start it
- pool.start()
-
- # Setup the server
- server = SimpleJSONRPCServer(('localhost', 8080), config)
- server.set_notification_pool(pool)
-
- # Register methods
- server.register_function(pow)
- server.register_function(lambda x,y: x+y, 'add')
- server.register_function(lambda x: x, 'ping')
-
- try:
- server.serve_forever()
- finally:
- # Stop the thread pool (let threads finish their current task)
- pool.stop()
- server.set_notification_pool(None)
-
-
- Threaded server
- ===============
-
- It is also possible to use a thread pool to handle clients requests, using the
- ``PooledJSONRPCServer`` class.
- By default, this class uses pool of 0 to 30 threads. A custom pool can be given
- with the ``thread_pool`` parameter of the class constructor.
-
- The notification pool and the request pool are different: by default, a server
- with a request pool doesn't have a notification pool.
-
- .. code-block:: python
-
- from jsonrpclib.SimpleJSONRPCServer import PooledJSONRPCServer
- from jsonrpclib.threadpool import ThreadPool
-
- # Setup the notification and request pools
- nofif_pool = ThreadPool(max_threads=10, min_threads=0)
- request_pool = ThreadPool(max_threads=50, min_threads=10)
-
- # Don't forget to start them
- nofif_pool.start()
- request_pool.start()
-
- # Setup the server
- server = PooledJSONRPCServer(('localhost', 8080), config,
- thread_pool=request_pool)
- server.set_notification_pool(nofif_pool)
-
- # Register methods
- server.register_function(pow)
- server.register_function(lambda x,y: x+y, 'add')
- server.register_function(lambda x: x, 'ping')
-
- try:
- server.serve_forever()
- finally:
- # Stop the thread pools (let threads finish their current task)
- request_pool.stop()
- nofif_pool.stop()
- server.set_notification_pool(None)
-
- Client Usage
- ************
-
- This is (obviously) taken from a console session.
-
- .. code-block:: python
-
- >>> import jsonrpclib
- >>> server = jsonrpclib.ServerProxy('http://localhost:8080')
- >>> server.add(5,6)
- 11
- >>> server.add(x=5, y=10)
- 15
- >>> server._notify.add(5,6)
- # No result returned...
- >>> batch = jsonrpclib.MultiCall(server)
- >>> batch.add(5, 6)
- >>> batch.ping({'key':'value'})
- >>> batch._notify.add(4, 30)
- >>> results = batch()
- >>> for result in results:
- >>> ... print(result)
- 11
- {'key': 'value'}
- # Note that there are only two responses -- this is according to spec.
-
- # Clean up
- >>> server('close')()
-
- # Using client history
- >>> history = jsonrpclib.history.History()
- >>> server = jsonrpclib.ServerProxy('http://localhost:8080', history=history)
- >>> server.add(5,6)
- 11
- >>> print(history.request)
- {"id": "f682b956-c8e1-4506-9db4-29fe8bc9fcaa", "jsonrpc": "2.0",
- "method": "add", "params": [5, 6]}
- >>> print(history.response)
- {"id": "f682b956-c8e1-4506-9db4-29fe8bc9fcaa", "jsonrpc": "2.0",
- "result": 11}
-
- # Clean up
- >>> server('close')()
-
- If you need 1.0 functionality, there are a bunch of places you can pass that in,
- although the best is just to give a specific configuration to
- ``jsonrpclib.ServerProxy``:
-
- .. code-block:: python
-
- >>> import jsonrpclib
- >>> jsonrpclib.config.DEFAULT.version
- 2.0
- >>> config = jsonrpclib.config.Config(version=1.0)
- >>> history = jsonrpclib.history.History()
- >>> server = jsonrpclib.ServerProxy('http://localhost:8080', config=config,
- history=history)
- >>> server.add(7, 10)
- 17
- >>> print(history.request)
- {"id": "827b2923-5b37-49a5-8b36-e73920a16d32",
- "method": "add", "params": [7, 10]}
- >>> print(history.response)
- {"id": "827b2923-5b37-49a5-8b36-e73920a16d32", "error": null, "result": 17}
- >>> server('close')()
-
- The equivalent ``loads`` and ``dumps`` functions also exist, although with minor
- modifications. The ``dumps`` arguments are almost identical, but it adds three
- arguments: ``rpcid`` for the 'id' key, ``version`` to specify the JSON-RPC
- compatibility, and ``notify`` if it's a request that you want to be a
- notification.
-
- Additionally, the ``loads`` method does not return the params and method like
- ``xmlrpclib``, but instead a.) parses for errors, raising ProtocolErrors, and
- b.) returns the entire structure of the request / response for manual parsing.
-
-
- Additional headers
- ******************
-
- If your remote service requires custom headers in request, you can pass them
- as as a ``headers`` keyword argument, when creating the ``ServerProxy``:
-
- .. code-block:: python
-
- >>> import jsonrpclib
- >>> server = jsonrpclib.ServerProxy("http://localhost:8080",
- headers={'X-Test' : 'Test'})
-
- You can also put additional request headers only for certain method invocation:
-
- .. code-block:: python
-
- >>> import jsonrpclib
- >>> server = jsonrpclib.Server("http://localhost:8080")
- >>> with server._additional_headers({'X-Test' : 'Test'}) as test_server:
- ... test_server.ping(42)
- ...
- >>> # X-Test header will be no longer sent in requests
-
- Of course ``_additional_headers`` contexts can be nested as well.
-
-
- Class Translation
- *****************
-
- I've recently added "automatic" class translation support, although it is
- turned off by default. This can be devastatingly slow if improperly used, so
- the following is just a short list of things to keep in mind when using it.
-
- * Keep It (the object) Simple Stupid. (for exceptions, keep reading.)
- * Do not require init params (for exceptions, keep reading)
- * Getter properties without setters could be dangerous (read: not tested)
-
- If any of the above are issues, use the _serialize method. (see usage below)
- The server and client must BOTH have use_jsonclass configuration item on and
- they must both have access to the same libraries used by the objects for
- this to work.
-
- If you have excessively nested arguments, it would be better to turn off the
- translation and manually invoke it on specific objects using
- ``jsonrpclib.jsonclass.dump`` / ``jsonrpclib.jsonclass.load`` (since the default
- behavior recursively goes through attributes and lists / dicts / tuples).
-
- Sample file: *test_obj.py*
-
- .. code-block:: python
-
- # This object is /very/ simple, and the system will look through the
- # attributes and serialize what it can.
- class TestObj(object):
- foo = 'bar'
-
- # This object requires __init__ params, so it uses the _serialize method
- # and returns a tuple of init params and attribute values (the init params
- # can be a dict or a list, but the attribute values must be a dict.)
- class TestSerial(object):
- foo = 'bar'
- def __init__(self, *args):
- self.args = args
- def _serialize(self):
- return (self.args, {'foo':self.foo,})
-
- * Sample usage
-
- .. code-block:: python
-
- >>> import jsonrpclib
- >>> import test_obj
-
- # History is used only to print the serialized form of beans
- >>> history = jsonrpclib.history.History()
- >>> testobj1 = test_obj.TestObj()
- >>> testobj2 = test_obj.TestSerial()
- >>> server = jsonrpclib.Server('http://localhost:8080', history=history)
-
- # The 'ping' just returns whatever is sent
- >>> ping1 = server.ping(testobj1)
- >>> ping2 = server.ping(testobj2)
-
- >>> print(history.request)
- {"id": "7805f1f9-9abd-49c6-81dc-dbd47229fe13", "jsonrpc": "2.0",
- "method": "ping", "params": [{"__jsonclass__":
- ["test_obj.TestSerial", []], "foo": "bar"}
- ]}
- >>> print(history.response)
- {"id": "7805f1f9-9abd-49c6-81dc-dbd47229fe13", "jsonrpc": "2.0",
- "result": {"__jsonclass__": ["test_obj.TestSerial", []], "foo": "bar"}}
-
- This behavior is turned by default. To deactivate it, just set the
- ``use_jsonclass`` member of a server ``Config`` to False.
- If you want to use a per-class serialization method, set its name in the
- ``serialize_method`` member of a server ``Config``.
- Finally, if you are using classes that you have defined in the implementation
- (as in, not a separate library), you'll need to add those (on BOTH the server
- and the client) using the ``config.classes.add()`` method.
-
- Feedback on this "feature" is very, VERY much appreciated.
-
- Why JSON-RPC?
- *************
-
- In my opinion, there are several reasons to choose JSON over XML for RPC:
-
- * Much simpler to read (I suppose this is opinion, but I know I'm right. :)
- * Size / Bandwidth - Main reason, a JSON object representation is just much smaller.
- * Parsing - JSON should be much quicker to parse than XML.
- * Easy class passing with ``jsonclass`` (when enabled)
-
- In the interest of being fair, there are also a few reasons to choose XML
- over JSON:
-
- * Your server doesn't do JSON (rather obvious)
- * Wider XML-RPC support across APIs (can we change this? :))
- * Libraries are more established, i.e. more stable (Let's change this too.)
-
- Tests
- *****
-
- Tests are an almost-verbatim drop from the JSON-RPC specification 2.0 page.
- They can be run using *unittest* or *nosetest*:
-
- .. code-block:: console
-
- python -m unittest discover tests
- python3 -m unittest discover tests
- nosetests tests
-
-Platform: UNKNOWN
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: Apache Software License
-Classifier: Operating System :: OS Independent
-Classifier: Programming Language :: Python :: 2.6
-Classifier: Programming Language :: Python :: 2.7
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.0
-Classifier: Programming Language :: Python :: 3.1
-Classifier: Programming Language :: Python :: 3.2
-Classifier: Programming Language :: Python :: 3.3
-Classifier: Programming Language :: Python :: 3.4
+Metadata-Version: 1.1 +Name: jsonrpclib-pelix +Version: 0.2.5 +Summary: This project is an implementation of the JSON-RPC v2.0 specification (backwards-compatible) as a client library, for Python 2.6+ and Python 3.This version is a fork of jsonrpclib by Josh Marshall, usable with Pelix remote services. +Home-page: http://github.com/tcalmant/jsonrpclib/ +Author: Thomas Calmant +Author-email: thomas.calmant+github@gmail.com +License: Apache License 2.0 +Description: JSONRPClib (patched for Pelix) + ############################## + + .. image:: https://pypip.in/license/jsonrpclib-pelix/badge.svg + :target: https://pypi.python.org/pypi/jsonrpclib-pelix/ + + .. image:: https://travis-ci.org/tcalmant/jsonrpclib.svg?branch=master + :target: https://travis-ci.org/tcalmant/jsonrpclib + + .. image:: https://coveralls.io/repos/tcalmant/jsonrpclib/badge.svg?branch=master + :target: https://coveralls.io/r/tcalmant/jsonrpclib?branch=master + + + This library is an implementation of the JSON-RPC specification. + It supports both the original 1.0 specification, as well as the + new (proposed) 2.0 specification, which includes batch submission, keyword + arguments, etc. + + It is licensed under the Apache License, Version 2.0 + (http://www.apache.org/licenses/LICENSE-2.0.html). + + + About this version + ****************** + + This is a patched version of the original ``jsonrpclib`` project by + Josh Marshall, available at https://github.com/joshmarshall/jsonrpclib. + + The suffix *-pelix* only indicates that this version works with Pelix Remote + Services, but it is **not** a Pelix specific implementation. + + * This version adds support for Python 3, staying compatible with Python 2. + * It is now possible to use the dispatch_method argument while extending + the SimpleJSONRPCDispatcher, to use a custom dispatcher. + This allows to use this package by Pelix Remote Services. + * It can use thread pools to control the number of threads spawned to handle + notification requests and clients connections. + * The modifications added in other forks of this project have been added: + + * From https://github.com/drdaeman/jsonrpclib: + + * Improved JSON-RPC 1.0 support + * Less strict error response handling + + * From https://github.com/tuomassalo/jsonrpclib: + + * In case of a non-pre-defined error, raise an AppError and give access to + *error.data* + + * From https://github.com/dejw/jsonrpclib: + + * Custom headers can be sent with request and associated tests + + * The support for Unix sockets has been removed, as it is not trivial to convert + to Python 3 (and I don't use them) + * This version cannot be installed with the original ``jsonrpclib``, as it uses + the same package name. + + + Summary + ******* + + This library implements the JSON-RPC 2.0 proposed specification in pure Python. + It is designed to be as compatible with the syntax of ``xmlrpclib`` as possible + (it extends where possible), so that projects using ``xmlrpclib`` could easily + be modified to use JSON and experiment with the differences. + + It is backwards-compatible with the 1.0 specification, and supports all of the + new proposed features of 2.0, including: + + * Batch submission (via MultiCall) + * Keyword arguments + * Notifications (both in a batch and 'normal') + * Class translation using the ``__jsonclass__`` key. + + I've added a "SimpleJSONRPCServer", which is intended to emulate the + "SimpleXMLRPCServer" from the default Python distribution. + + + Requirements + ************ + + It supports ``cjson`` and ``simplejson``, and looks for the parsers in that + order (searching first for ``cjson``, then for the *built-in* ``json`` in 2.6+, + and then the ``simplejson`` external library). + One of these must be installed to use this library, although if you have a + standard distribution of 2.6+, you should already have one. + Keep in mind that ``cjson`` is supposed to be the quickest, I believe, so if + you are going for full-on optimization you may want to pick it up. + + Since library uses ``contextlib`` module, you should have at least Python 2.5 + installed. + + + Installation + ************ + + You can install this from PyPI with one of the following commands (sudo + may be required): + + .. code-block:: console + + easy_install jsonrpclib-pelix + pip install jsonrpclib-pelix + + Alternatively, you can download the source from the GitHub repository + at http://github.com/tcalmant/jsonrpclib and manually install it + with the following commands: + + .. code-block:: console + + git clone git://github.com/tcalmant/jsonrpclib.git + cd jsonrpclib + python setup.py install + + + SimpleJSONRPCServer + ******************* + + This is identical in usage (or should be) to the SimpleXMLRPCServer in the + Python standard library. Some of the differences in features are that it + obviously supports notification, batch calls, class translation (if left on), + etc. + Note: The import line is slightly different from the regular SimpleXMLRPCServer, + since the SimpleJSONRPCServer is distributed within the ``jsonrpclib`` library. + + .. code-block:: python + + from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer + + server = SimpleJSONRPCServer(('localhost', 8080)) + server.register_function(pow) + server.register_function(lambda x,y: x+y, 'add') + server.register_function(lambda x: x, 'ping') + server.serve_forever() + + To start protect the server with SSL, use the following snippet: + + .. code-block:: python + + from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer + + # Setup the SSL socket + server = SimpleJSONRPCServer(('localhost', 8080), bind_and_activate=False) + server.socket = ssl.wrap_socket(server.socket, certfile='server.pem', + server_side=True) + server.server_bind() + server.server_activate() + + # ... register functions + # Start the server + server.serve_forever() + + + Notification Thread Pool + ======================== + + By default, notification calls are handled in the request handling thread. + It is possible to use a thread pool to handle them, by giving it to the server + using the ``set_notification_pool()`` method: + + .. code-block:: python + + from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer + from jsonrpclib.threadpool import ThreadPool + + # Setup the thread pool: between 0 and 10 threads + pool = ThreadPool(max_threads=10, min_threads=0) + + # Don't forget to start it + pool.start() + + # Setup the server + server = SimpleJSONRPCServer(('localhost', 8080), config) + server.set_notification_pool(pool) + + # Register methods + server.register_function(pow) + server.register_function(lambda x,y: x+y, 'add') + server.register_function(lambda x: x, 'ping') + + try: + server.serve_forever() + finally: + # Stop the thread pool (let threads finish their current task) + pool.stop() + server.set_notification_pool(None) + + + Threaded server + =============== + + It is also possible to use a thread pool to handle clients requests, using the + ``PooledJSONRPCServer`` class. + By default, this class uses pool of 0 to 30 threads. A custom pool can be given + with the ``thread_pool`` parameter of the class constructor. + + The notification pool and the request pool are different: by default, a server + with a request pool doesn't have a notification pool. + + .. code-block:: python + + from jsonrpclib.SimpleJSONRPCServer import PooledJSONRPCServer + from jsonrpclib.threadpool import ThreadPool + + # Setup the notification and request pools + nofif_pool = ThreadPool(max_threads=10, min_threads=0) + request_pool = ThreadPool(max_threads=50, min_threads=10) + + # Don't forget to start them + nofif_pool.start() + request_pool.start() + + # Setup the server + server = PooledJSONRPCServer(('localhost', 8080), config, + thread_pool=request_pool) + server.set_notification_pool(nofif_pool) + + # Register methods + server.register_function(pow) + server.register_function(lambda x,y: x+y, 'add') + server.register_function(lambda x: x, 'ping') + + try: + server.serve_forever() + finally: + # Stop the thread pools (let threads finish their current task) + request_pool.stop() + nofif_pool.stop() + server.set_notification_pool(None) + + Client Usage + ************ + + This is (obviously) taken from a console session. + + .. code-block:: python + + >>> import jsonrpclib + >>> server = jsonrpclib.ServerProxy('http://localhost:8080') + >>> server.add(5,6) + 11 + >>> server.add(x=5, y=10) + 15 + >>> server._notify.add(5,6) + # No result returned... + >>> batch = jsonrpclib.MultiCall(server) + >>> batch.add(5, 6) + >>> batch.ping({'key':'value'}) + >>> batch._notify.add(4, 30) + >>> results = batch() + >>> for result in results: + >>> ... print(result) + 11 + {'key': 'value'} + # Note that there are only two responses -- this is according to spec. + + # Clean up + >>> server('close')() + + # Using client history + >>> history = jsonrpclib.history.History() + >>> server = jsonrpclib.ServerProxy('http://localhost:8080', history=history) + >>> server.add(5,6) + 11 + >>> print(history.request) + {"id": "f682b956-c8e1-4506-9db4-29fe8bc9fcaa", "jsonrpc": "2.0", + "method": "add", "params": [5, 6]} + >>> print(history.response) + {"id": "f682b956-c8e1-4506-9db4-29fe8bc9fcaa", "jsonrpc": "2.0", + "result": 11} + + # Clean up + >>> server('close')() + + If you need 1.0 functionality, there are a bunch of places you can pass that in, + although the best is just to give a specific configuration to + ``jsonrpclib.ServerProxy``: + + .. code-block:: python + + >>> import jsonrpclib + >>> jsonrpclib.config.DEFAULT.version + 2.0 + >>> config = jsonrpclib.config.Config(version=1.0) + >>> history = jsonrpclib.history.History() + >>> server = jsonrpclib.ServerProxy('http://localhost:8080', config=config, + history=history) + >>> server.add(7, 10) + 17 + >>> print(history.request) + {"id": "827b2923-5b37-49a5-8b36-e73920a16d32", + "method": "add", "params": [7, 10]} + >>> print(history.response) + {"id": "827b2923-5b37-49a5-8b36-e73920a16d32", "error": null, "result": 17} + >>> server('close')() + + The equivalent ``loads`` and ``dumps`` functions also exist, although with minor + modifications. The ``dumps`` arguments are almost identical, but it adds three + arguments: ``rpcid`` for the 'id' key, ``version`` to specify the JSON-RPC + compatibility, and ``notify`` if it's a request that you want to be a + notification. + + Additionally, the ``loads`` method does not return the params and method like + ``xmlrpclib``, but instead a.) parses for errors, raising ProtocolErrors, and + b.) returns the entire structure of the request / response for manual parsing. + + + Additional headers + ****************** + + If your remote service requires custom headers in request, you can pass them + as as a ``headers`` keyword argument, when creating the ``ServerProxy``: + + .. code-block:: python + + >>> import jsonrpclib + >>> server = jsonrpclib.ServerProxy("http://localhost:8080", + headers={'X-Test' : 'Test'}) + + You can also put additional request headers only for certain method invocation: + + .. code-block:: python + + >>> import jsonrpclib + >>> server = jsonrpclib.Server("http://localhost:8080") + >>> with server._additional_headers({'X-Test' : 'Test'}) as test_server: + ... test_server.ping(42) + ... + >>> # X-Test header will be no longer sent in requests + + Of course ``_additional_headers`` contexts can be nested as well. + + + Class Translation + ***************** + + I've recently added "automatic" class translation support, although it is + turned off by default. This can be devastatingly slow if improperly used, so + the following is just a short list of things to keep in mind when using it. + + * Keep It (the object) Simple Stupid. (for exceptions, keep reading.) + * Do not require init params (for exceptions, keep reading) + * Getter properties without setters could be dangerous (read: not tested) + + If any of the above are issues, use the _serialize method. (see usage below) + The server and client must BOTH have use_jsonclass configuration item on and + they must both have access to the same libraries used by the objects for + this to work. + + If you have excessively nested arguments, it would be better to turn off the + translation and manually invoke it on specific objects using + ``jsonrpclib.jsonclass.dump`` / ``jsonrpclib.jsonclass.load`` (since the default + behavior recursively goes through attributes and lists / dicts / tuples). + + Sample file: *test_obj.py* + + .. code-block:: python + + # This object is /very/ simple, and the system will look through the + # attributes and serialize what it can. + class TestObj(object): + foo = 'bar' + + # This object requires __init__ params, so it uses the _serialize method + # and returns a tuple of init params and attribute values (the init params + # can be a dict or a list, but the attribute values must be a dict.) + class TestSerial(object): + foo = 'bar' + def __init__(self, *args): + self.args = args + def _serialize(self): + return (self.args, {'foo':self.foo,}) + + * Sample usage + + .. code-block:: python + + >>> import jsonrpclib + >>> import test_obj + + # History is used only to print the serialized form of beans + >>> history = jsonrpclib.history.History() + >>> testobj1 = test_obj.TestObj() + >>> testobj2 = test_obj.TestSerial() + >>> server = jsonrpclib.Server('http://localhost:8080', history=history) + + # The 'ping' just returns whatever is sent + >>> ping1 = server.ping(testobj1) + >>> ping2 = server.ping(testobj2) + + >>> print(history.request) + {"id": "7805f1f9-9abd-49c6-81dc-dbd47229fe13", "jsonrpc": "2.0", + "method": "ping", "params": [{"__jsonclass__": + ["test_obj.TestSerial", []], "foo": "bar"} + ]} + >>> print(history.response) + {"id": "7805f1f9-9abd-49c6-81dc-dbd47229fe13", "jsonrpc": "2.0", + "result": {"__jsonclass__": ["test_obj.TestSerial", []], "foo": "bar"}} + + This behavior is turned by default. To deactivate it, just set the + ``use_jsonclass`` member of a server ``Config`` to False. + If you want to use a per-class serialization method, set its name in the + ``serialize_method`` member of a server ``Config``. + Finally, if you are using classes that you have defined in the implementation + (as in, not a separate library), you'll need to add those (on BOTH the server + and the client) using the ``config.classes.add()`` method. + + Feedback on this "feature" is very, VERY much appreciated. + + Why JSON-RPC? + ************* + + In my opinion, there are several reasons to choose JSON over XML for RPC: + + * Much simpler to read (I suppose this is opinion, but I know I'm right. :) + * Size / Bandwidth - Main reason, a JSON object representation is just much smaller. + * Parsing - JSON should be much quicker to parse than XML. + * Easy class passing with ``jsonclass`` (when enabled) + + In the interest of being fair, there are also a few reasons to choose XML + over JSON: + + * Your server doesn't do JSON (rather obvious) + * Wider XML-RPC support across APIs (can we change this? :)) + * Libraries are more established, i.e. more stable (Let's change this too.) + + Tests + ***** + + Tests are an almost-verbatim drop from the JSON-RPC specification 2.0 page. + They can be run using *unittest* or *nosetest*: + + .. code-block:: console + + python -m unittest discover tests + python3 -m unittest discover tests + nosetests tests + +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.0 +Classifier: Programming Language :: Python :: 3.1 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/README.rst b/scripts/external_libs/jsonrpclib-pelix-0.2.5/README.rst index 29da2708..19001933 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/README.rst +++ b/scripts/external_libs/jsonrpclib-pelix-0.2.5/README.rst @@ -1,438 +1,438 @@ -JSONRPClib (patched for Pelix)
-##############################
-
-.. image:: https://pypip.in/license/jsonrpclib-pelix/badge.svg
- :target: https://pypi.python.org/pypi/jsonrpclib-pelix/
-
-.. image:: https://travis-ci.org/tcalmant/jsonrpclib.svg?branch=master
- :target: https://travis-ci.org/tcalmant/jsonrpclib
-
-.. image:: https://coveralls.io/repos/tcalmant/jsonrpclib/badge.svg?branch=master
- :target: https://coveralls.io/r/tcalmant/jsonrpclib?branch=master
-
-
-This library is an implementation of the JSON-RPC specification.
-It supports both the original 1.0 specification, as well as the
-new (proposed) 2.0 specification, which includes batch submission, keyword
-arguments, etc.
-
-It is licensed under the Apache License, Version 2.0
-(http://www.apache.org/licenses/LICENSE-2.0.html).
-
-
-About this version
-******************
-
-This is a patched version of the original ``jsonrpclib`` project by
-Josh Marshall, available at https://github.com/joshmarshall/jsonrpclib.
-
-The suffix *-pelix* only indicates that this version works with Pelix Remote
-Services, but it is **not** a Pelix specific implementation.
-
-* This version adds support for Python 3, staying compatible with Python 2.
-* It is now possible to use the dispatch_method argument while extending
- the SimpleJSONRPCDispatcher, to use a custom dispatcher.
- This allows to use this package by Pelix Remote Services.
-* It can use thread pools to control the number of threads spawned to handle
- notification requests and clients connections.
-* The modifications added in other forks of this project have been added:
-
- * From https://github.com/drdaeman/jsonrpclib:
-
- * Improved JSON-RPC 1.0 support
- * Less strict error response handling
-
- * From https://github.com/tuomassalo/jsonrpclib:
-
- * In case of a non-pre-defined error, raise an AppError and give access to
- *error.data*
-
- * From https://github.com/dejw/jsonrpclib:
-
- * Custom headers can be sent with request and associated tests
-
-* The support for Unix sockets has been removed, as it is not trivial to convert
- to Python 3 (and I don't use them)
-* This version cannot be installed with the original ``jsonrpclib``, as it uses
- the same package name.
-
-
-Summary
-*******
-
-This library implements the JSON-RPC 2.0 proposed specification in pure Python.
-It is designed to be as compatible with the syntax of ``xmlrpclib`` as possible
-(it extends where possible), so that projects using ``xmlrpclib`` could easily
-be modified to use JSON and experiment with the differences.
-
-It is backwards-compatible with the 1.0 specification, and supports all of the
-new proposed features of 2.0, including:
-
-* Batch submission (via MultiCall)
-* Keyword arguments
-* Notifications (both in a batch and 'normal')
-* Class translation using the ``__jsonclass__`` key.
-
-I've added a "SimpleJSONRPCServer", which is intended to emulate the
-"SimpleXMLRPCServer" from the default Python distribution.
-
-
-Requirements
-************
-
-It supports ``cjson`` and ``simplejson``, and looks for the parsers in that
-order (searching first for ``cjson``, then for the *built-in* ``json`` in 2.6+,
-and then the ``simplejson`` external library).
-One of these must be installed to use this library, although if you have a
-standard distribution of 2.6+, you should already have one.
-Keep in mind that ``cjson`` is supposed to be the quickest, I believe, so if
-you are going for full-on optimization you may want to pick it up.
-
-Since library uses ``contextlib`` module, you should have at least Python 2.5
-installed.
-
-
-Installation
-************
-
-You can install this from PyPI with one of the following commands (sudo
-may be required):
-
-.. code-block:: console
-
- easy_install jsonrpclib-pelix
- pip install jsonrpclib-pelix
-
-Alternatively, you can download the source from the GitHub repository
-at http://github.com/tcalmant/jsonrpclib and manually install it
-with the following commands:
-
-.. code-block:: console
-
- git clone git://github.com/tcalmant/jsonrpclib.git
- cd jsonrpclib
- python setup.py install
-
-
-SimpleJSONRPCServer
-*******************
-
-This is identical in usage (or should be) to the SimpleXMLRPCServer in the
-Python standard library. Some of the differences in features are that it
-obviously supports notification, batch calls, class translation (if left on),
-etc.
-Note: The import line is slightly different from the regular SimpleXMLRPCServer,
-since the SimpleJSONRPCServer is distributed within the ``jsonrpclib`` library.
-
-.. code-block:: python
-
- from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer
-
- server = SimpleJSONRPCServer(('localhost', 8080))
- server.register_function(pow)
- server.register_function(lambda x,y: x+y, 'add')
- server.register_function(lambda x: x, 'ping')
- server.serve_forever()
-
-To start protect the server with SSL, use the following snippet:
-
-.. code-block:: python
-
- from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer
-
- # Setup the SSL socket
- server = SimpleJSONRPCServer(('localhost', 8080), bind_and_activate=False)
- server.socket = ssl.wrap_socket(server.socket, certfile='server.pem',
- server_side=True)
- server.server_bind()
- server.server_activate()
-
- # ... register functions
- # Start the server
- server.serve_forever()
-
-
-Notification Thread Pool
-========================
-
-By default, notification calls are handled in the request handling thread.
-It is possible to use a thread pool to handle them, by giving it to the server
-using the ``set_notification_pool()`` method:
-
-.. code-block:: python
-
- from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer
- from jsonrpclib.threadpool import ThreadPool
-
- # Setup the thread pool: between 0 and 10 threads
- pool = ThreadPool(max_threads=10, min_threads=0)
-
- # Don't forget to start it
- pool.start()
-
- # Setup the server
- server = SimpleJSONRPCServer(('localhost', 8080), config)
- server.set_notification_pool(pool)
-
- # Register methods
- server.register_function(pow)
- server.register_function(lambda x,y: x+y, 'add')
- server.register_function(lambda x: x, 'ping')
-
- try:
- server.serve_forever()
- finally:
- # Stop the thread pool (let threads finish their current task)
- pool.stop()
- server.set_notification_pool(None)
-
-
-Threaded server
-===============
-
-It is also possible to use a thread pool to handle clients requests, using the
-``PooledJSONRPCServer`` class.
-By default, this class uses pool of 0 to 30 threads. A custom pool can be given
-with the ``thread_pool`` parameter of the class constructor.
-
-The notification pool and the request pool are different: by default, a server
-with a request pool doesn't have a notification pool.
-
-.. code-block:: python
-
- from jsonrpclib.SimpleJSONRPCServer import PooledJSONRPCServer
- from jsonrpclib.threadpool import ThreadPool
-
- # Setup the notification and request pools
- nofif_pool = ThreadPool(max_threads=10, min_threads=0)
- request_pool = ThreadPool(max_threads=50, min_threads=10)
-
- # Don't forget to start them
- nofif_pool.start()
- request_pool.start()
-
- # Setup the server
- server = PooledJSONRPCServer(('localhost', 8080), config,
- thread_pool=request_pool)
- server.set_notification_pool(nofif_pool)
-
- # Register methods
- server.register_function(pow)
- server.register_function(lambda x,y: x+y, 'add')
- server.register_function(lambda x: x, 'ping')
-
- try:
- server.serve_forever()
- finally:
- # Stop the thread pools (let threads finish their current task)
- request_pool.stop()
- nofif_pool.stop()
- server.set_notification_pool(None)
-
-Client Usage
-************
-
-This is (obviously) taken from a console session.
-
-.. code-block:: python
-
- >>> import jsonrpclib
- >>> server = jsonrpclib.ServerProxy('http://localhost:8080')
- >>> server.add(5,6)
- 11
- >>> server.add(x=5, y=10)
- 15
- >>> server._notify.add(5,6)
- # No result returned...
- >>> batch = jsonrpclib.MultiCall(server)
- >>> batch.add(5, 6)
- >>> batch.ping({'key':'value'})
- >>> batch._notify.add(4, 30)
- >>> results = batch()
- >>> for result in results:
- >>> ... print(result)
- 11
- {'key': 'value'}
- # Note that there are only two responses -- this is according to spec.
-
- # Clean up
- >>> server('close')()
-
- # Using client history
- >>> history = jsonrpclib.history.History()
- >>> server = jsonrpclib.ServerProxy('http://localhost:8080', history=history)
- >>> server.add(5,6)
- 11
- >>> print(history.request)
- {"id": "f682b956-c8e1-4506-9db4-29fe8bc9fcaa", "jsonrpc": "2.0",
- "method": "add", "params": [5, 6]}
- >>> print(history.response)
- {"id": "f682b956-c8e1-4506-9db4-29fe8bc9fcaa", "jsonrpc": "2.0",
- "result": 11}
-
- # Clean up
- >>> server('close')()
-
-If you need 1.0 functionality, there are a bunch of places you can pass that in,
-although the best is just to give a specific configuration to
-``jsonrpclib.ServerProxy``:
-
-.. code-block:: python
-
- >>> import jsonrpclib
- >>> jsonrpclib.config.DEFAULT.version
- 2.0
- >>> config = jsonrpclib.config.Config(version=1.0)
- >>> history = jsonrpclib.history.History()
- >>> server = jsonrpclib.ServerProxy('http://localhost:8080', config=config,
- history=history)
- >>> server.add(7, 10)
- 17
- >>> print(history.request)
- {"id": "827b2923-5b37-49a5-8b36-e73920a16d32",
- "method": "add", "params": [7, 10]}
- >>> print(history.response)
- {"id": "827b2923-5b37-49a5-8b36-e73920a16d32", "error": null, "result": 17}
- >>> server('close')()
-
-The equivalent ``loads`` and ``dumps`` functions also exist, although with minor
-modifications. The ``dumps`` arguments are almost identical, but it adds three
-arguments: ``rpcid`` for the 'id' key, ``version`` to specify the JSON-RPC
-compatibility, and ``notify`` if it's a request that you want to be a
-notification.
-
-Additionally, the ``loads`` method does not return the params and method like
-``xmlrpclib``, but instead a.) parses for errors, raising ProtocolErrors, and
-b.) returns the entire structure of the request / response for manual parsing.
-
-
-Additional headers
-******************
-
-If your remote service requires custom headers in request, you can pass them
-as as a ``headers`` keyword argument, when creating the ``ServerProxy``:
-
-.. code-block:: python
-
- >>> import jsonrpclib
- >>> server = jsonrpclib.ServerProxy("http://localhost:8080",
- headers={'X-Test' : 'Test'})
-
-You can also put additional request headers only for certain method invocation:
-
-.. code-block:: python
-
- >>> import jsonrpclib
- >>> server = jsonrpclib.Server("http://localhost:8080")
- >>> with server._additional_headers({'X-Test' : 'Test'}) as test_server:
- ... test_server.ping(42)
- ...
- >>> # X-Test header will be no longer sent in requests
-
-Of course ``_additional_headers`` contexts can be nested as well.
-
-
-Class Translation
-*****************
-
-I've recently added "automatic" class translation support, although it is
-turned off by default. This can be devastatingly slow if improperly used, so
-the following is just a short list of things to keep in mind when using it.
-
-* Keep It (the object) Simple Stupid. (for exceptions, keep reading.)
-* Do not require init params (for exceptions, keep reading)
-* Getter properties without setters could be dangerous (read: not tested)
-
-If any of the above are issues, use the _serialize method. (see usage below)
-The server and client must BOTH have use_jsonclass configuration item on and
-they must both have access to the same libraries used by the objects for
-this to work.
-
-If you have excessively nested arguments, it would be better to turn off the
-translation and manually invoke it on specific objects using
-``jsonrpclib.jsonclass.dump`` / ``jsonrpclib.jsonclass.load`` (since the default
-behavior recursively goes through attributes and lists / dicts / tuples).
-
- Sample file: *test_obj.py*
-
-.. code-block:: python
-
- # This object is /very/ simple, and the system will look through the
- # attributes and serialize what it can.
- class TestObj(object):
- foo = 'bar'
-
- # This object requires __init__ params, so it uses the _serialize method
- # and returns a tuple of init params and attribute values (the init params
- # can be a dict or a list, but the attribute values must be a dict.)
- class TestSerial(object):
- foo = 'bar'
- def __init__(self, *args):
- self.args = args
- def _serialize(self):
- return (self.args, {'foo':self.foo,})
-
-* Sample usage
-
-.. code-block:: python
-
- >>> import jsonrpclib
- >>> import test_obj
-
- # History is used only to print the serialized form of beans
- >>> history = jsonrpclib.history.History()
- >>> testobj1 = test_obj.TestObj()
- >>> testobj2 = test_obj.TestSerial()
- >>> server = jsonrpclib.Server('http://localhost:8080', history=history)
-
- # The 'ping' just returns whatever is sent
- >>> ping1 = server.ping(testobj1)
- >>> ping2 = server.ping(testobj2)
-
- >>> print(history.request)
- {"id": "7805f1f9-9abd-49c6-81dc-dbd47229fe13", "jsonrpc": "2.0",
- "method": "ping", "params": [{"__jsonclass__":
- ["test_obj.TestSerial", []], "foo": "bar"}
- ]}
- >>> print(history.response)
- {"id": "7805f1f9-9abd-49c6-81dc-dbd47229fe13", "jsonrpc": "2.0",
- "result": {"__jsonclass__": ["test_obj.TestSerial", []], "foo": "bar"}}
-
-This behavior is turned by default. To deactivate it, just set the
-``use_jsonclass`` member of a server ``Config`` to False.
-If you want to use a per-class serialization method, set its name in the
-``serialize_method`` member of a server ``Config``.
-Finally, if you are using classes that you have defined in the implementation
-(as in, not a separate library), you'll need to add those (on BOTH the server
-and the client) using the ``config.classes.add()`` method.
-
-Feedback on this "feature" is very, VERY much appreciated.
-
-Why JSON-RPC?
-*************
-
-In my opinion, there are several reasons to choose JSON over XML for RPC:
-
-* Much simpler to read (I suppose this is opinion, but I know I'm right. :)
-* Size / Bandwidth - Main reason, a JSON object representation is just much smaller.
-* Parsing - JSON should be much quicker to parse than XML.
-* Easy class passing with ``jsonclass`` (when enabled)
-
-In the interest of being fair, there are also a few reasons to choose XML
-over JSON:
-
-* Your server doesn't do JSON (rather obvious)
-* Wider XML-RPC support across APIs (can we change this? :))
-* Libraries are more established, i.e. more stable (Let's change this too.)
-
-Tests
-*****
-
-Tests are an almost-verbatim drop from the JSON-RPC specification 2.0 page.
-They can be run using *unittest* or *nosetest*:
-
-.. code-block:: console
-
- python -m unittest discover tests
- python3 -m unittest discover tests
- nosetests tests
+JSONRPClib (patched for Pelix) +############################## + +.. image:: https://pypip.in/license/jsonrpclib-pelix/badge.svg + :target: https://pypi.python.org/pypi/jsonrpclib-pelix/ + +.. image:: https://travis-ci.org/tcalmant/jsonrpclib.svg?branch=master + :target: https://travis-ci.org/tcalmant/jsonrpclib + +.. image:: https://coveralls.io/repos/tcalmant/jsonrpclib/badge.svg?branch=master + :target: https://coveralls.io/r/tcalmant/jsonrpclib?branch=master + + +This library is an implementation of the JSON-RPC specification. +It supports both the original 1.0 specification, as well as the +new (proposed) 2.0 specification, which includes batch submission, keyword +arguments, etc. + +It is licensed under the Apache License, Version 2.0 +(http://www.apache.org/licenses/LICENSE-2.0.html). + + +About this version +****************** + +This is a patched version of the original ``jsonrpclib`` project by +Josh Marshall, available at https://github.com/joshmarshall/jsonrpclib. + +The suffix *-pelix* only indicates that this version works with Pelix Remote +Services, but it is **not** a Pelix specific implementation. + +* This version adds support for Python 3, staying compatible with Python 2. +* It is now possible to use the dispatch_method argument while extending + the SimpleJSONRPCDispatcher, to use a custom dispatcher. + This allows to use this package by Pelix Remote Services. +* It can use thread pools to control the number of threads spawned to handle + notification requests and clients connections. +* The modifications added in other forks of this project have been added: + + * From https://github.com/drdaeman/jsonrpclib: + + * Improved JSON-RPC 1.0 support + * Less strict error response handling + + * From https://github.com/tuomassalo/jsonrpclib: + + * In case of a non-pre-defined error, raise an AppError and give access to + *error.data* + + * From https://github.com/dejw/jsonrpclib: + + * Custom headers can be sent with request and associated tests + +* The support for Unix sockets has been removed, as it is not trivial to convert + to Python 3 (and I don't use them) +* This version cannot be installed with the original ``jsonrpclib``, as it uses + the same package name. + + +Summary +******* + +This library implements the JSON-RPC 2.0 proposed specification in pure Python. +It is designed to be as compatible with the syntax of ``xmlrpclib`` as possible +(it extends where possible), so that projects using ``xmlrpclib`` could easily +be modified to use JSON and experiment with the differences. + +It is backwards-compatible with the 1.0 specification, and supports all of the +new proposed features of 2.0, including: + +* Batch submission (via MultiCall) +* Keyword arguments +* Notifications (both in a batch and 'normal') +* Class translation using the ``__jsonclass__`` key. + +I've added a "SimpleJSONRPCServer", which is intended to emulate the +"SimpleXMLRPCServer" from the default Python distribution. + + +Requirements +************ + +It supports ``cjson`` and ``simplejson``, and looks for the parsers in that +order (searching first for ``cjson``, then for the *built-in* ``json`` in 2.6+, +and then the ``simplejson`` external library). +One of these must be installed to use this library, although if you have a +standard distribution of 2.6+, you should already have one. +Keep in mind that ``cjson`` is supposed to be the quickest, I believe, so if +you are going for full-on optimization you may want to pick it up. + +Since library uses ``contextlib`` module, you should have at least Python 2.5 +installed. + + +Installation +************ + +You can install this from PyPI with one of the following commands (sudo +may be required): + +.. code-block:: console + + easy_install jsonrpclib-pelix + pip install jsonrpclib-pelix + +Alternatively, you can download the source from the GitHub repository +at http://github.com/tcalmant/jsonrpclib and manually install it +with the following commands: + +.. code-block:: console + + git clone git://github.com/tcalmant/jsonrpclib.git + cd jsonrpclib + python setup.py install + + +SimpleJSONRPCServer +******************* + +This is identical in usage (or should be) to the SimpleXMLRPCServer in the +Python standard library. Some of the differences in features are that it +obviously supports notification, batch calls, class translation (if left on), +etc. +Note: The import line is slightly different from the regular SimpleXMLRPCServer, +since the SimpleJSONRPCServer is distributed within the ``jsonrpclib`` library. + +.. code-block:: python + + from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer + + server = SimpleJSONRPCServer(('localhost', 8080)) + server.register_function(pow) + server.register_function(lambda x,y: x+y, 'add') + server.register_function(lambda x: x, 'ping') + server.serve_forever() + +To start protect the server with SSL, use the following snippet: + +.. code-block:: python + + from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer + + # Setup the SSL socket + server = SimpleJSONRPCServer(('localhost', 8080), bind_and_activate=False) + server.socket = ssl.wrap_socket(server.socket, certfile='server.pem', + server_side=True) + server.server_bind() + server.server_activate() + + # ... register functions + # Start the server + server.serve_forever() + + +Notification Thread Pool +======================== + +By default, notification calls are handled in the request handling thread. +It is possible to use a thread pool to handle them, by giving it to the server +using the ``set_notification_pool()`` method: + +.. code-block:: python + + from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer + from jsonrpclib.threadpool import ThreadPool + + # Setup the thread pool: between 0 and 10 threads + pool = ThreadPool(max_threads=10, min_threads=0) + + # Don't forget to start it + pool.start() + + # Setup the server + server = SimpleJSONRPCServer(('localhost', 8080), config) + server.set_notification_pool(pool) + + # Register methods + server.register_function(pow) + server.register_function(lambda x,y: x+y, 'add') + server.register_function(lambda x: x, 'ping') + + try: + server.serve_forever() + finally: + # Stop the thread pool (let threads finish their current task) + pool.stop() + server.set_notification_pool(None) + + +Threaded server +=============== + +It is also possible to use a thread pool to handle clients requests, using the +``PooledJSONRPCServer`` class. +By default, this class uses pool of 0 to 30 threads. A custom pool can be given +with the ``thread_pool`` parameter of the class constructor. + +The notification pool and the request pool are different: by default, a server +with a request pool doesn't have a notification pool. + +.. code-block:: python + + from jsonrpclib.SimpleJSONRPCServer import PooledJSONRPCServer + from jsonrpclib.threadpool import ThreadPool + + # Setup the notification and request pools + nofif_pool = ThreadPool(max_threads=10, min_threads=0) + request_pool = ThreadPool(max_threads=50, min_threads=10) + + # Don't forget to start them + nofif_pool.start() + request_pool.start() + + # Setup the server + server = PooledJSONRPCServer(('localhost', 8080), config, + thread_pool=request_pool) + server.set_notification_pool(nofif_pool) + + # Register methods + server.register_function(pow) + server.register_function(lambda x,y: x+y, 'add') + server.register_function(lambda x: x, 'ping') + + try: + server.serve_forever() + finally: + # Stop the thread pools (let threads finish their current task) + request_pool.stop() + nofif_pool.stop() + server.set_notification_pool(None) + +Client Usage +************ + +This is (obviously) taken from a console session. + +.. code-block:: python + + >>> import jsonrpclib + >>> server = jsonrpclib.ServerProxy('http://localhost:8080') + >>> server.add(5,6) + 11 + >>> server.add(x=5, y=10) + 15 + >>> server._notify.add(5,6) + # No result returned... + >>> batch = jsonrpclib.MultiCall(server) + >>> batch.add(5, 6) + >>> batch.ping({'key':'value'}) + >>> batch._notify.add(4, 30) + >>> results = batch() + >>> for result in results: + >>> ... print(result) + 11 + {'key': 'value'} + # Note that there are only two responses -- this is according to spec. + + # Clean up + >>> server('close')() + + # Using client history + >>> history = jsonrpclib.history.History() + >>> server = jsonrpclib.ServerProxy('http://localhost:8080', history=history) + >>> server.add(5,6) + 11 + >>> print(history.request) + {"id": "f682b956-c8e1-4506-9db4-29fe8bc9fcaa", "jsonrpc": "2.0", + "method": "add", "params": [5, 6]} + >>> print(history.response) + {"id": "f682b956-c8e1-4506-9db4-29fe8bc9fcaa", "jsonrpc": "2.0", + "result": 11} + + # Clean up + >>> server('close')() + +If you need 1.0 functionality, there are a bunch of places you can pass that in, +although the best is just to give a specific configuration to +``jsonrpclib.ServerProxy``: + +.. code-block:: python + + >>> import jsonrpclib + >>> jsonrpclib.config.DEFAULT.version + 2.0 + >>> config = jsonrpclib.config.Config(version=1.0) + >>> history = jsonrpclib.history.History() + >>> server = jsonrpclib.ServerProxy('http://localhost:8080', config=config, + history=history) + >>> server.add(7, 10) + 17 + >>> print(history.request) + {"id": "827b2923-5b37-49a5-8b36-e73920a16d32", + "method": "add", "params": [7, 10]} + >>> print(history.response) + {"id": "827b2923-5b37-49a5-8b36-e73920a16d32", "error": null, "result": 17} + >>> server('close')() + +The equivalent ``loads`` and ``dumps`` functions also exist, although with minor +modifications. The ``dumps`` arguments are almost identical, but it adds three +arguments: ``rpcid`` for the 'id' key, ``version`` to specify the JSON-RPC +compatibility, and ``notify`` if it's a request that you want to be a +notification. + +Additionally, the ``loads`` method does not return the params and method like +``xmlrpclib``, but instead a.) parses for errors, raising ProtocolErrors, and +b.) returns the entire structure of the request / response for manual parsing. + + +Additional headers +****************** + +If your remote service requires custom headers in request, you can pass them +as as a ``headers`` keyword argument, when creating the ``ServerProxy``: + +.. code-block:: python + + >>> import jsonrpclib + >>> server = jsonrpclib.ServerProxy("http://localhost:8080", + headers={'X-Test' : 'Test'}) + +You can also put additional request headers only for certain method invocation: + +.. code-block:: python + + >>> import jsonrpclib + >>> server = jsonrpclib.Server("http://localhost:8080") + >>> with server._additional_headers({'X-Test' : 'Test'}) as test_server: + ... test_server.ping(42) + ... + >>> # X-Test header will be no longer sent in requests + +Of course ``_additional_headers`` contexts can be nested as well. + + +Class Translation +***************** + +I've recently added "automatic" class translation support, although it is +turned off by default. This can be devastatingly slow if improperly used, so +the following is just a short list of things to keep in mind when using it. + +* Keep It (the object) Simple Stupid. (for exceptions, keep reading.) +* Do not require init params (for exceptions, keep reading) +* Getter properties without setters could be dangerous (read: not tested) + +If any of the above are issues, use the _serialize method. (see usage below) +The server and client must BOTH have use_jsonclass configuration item on and +they must both have access to the same libraries used by the objects for +this to work. + +If you have excessively nested arguments, it would be better to turn off the +translation and manually invoke it on specific objects using +``jsonrpclib.jsonclass.dump`` / ``jsonrpclib.jsonclass.load`` (since the default +behavior recursively goes through attributes and lists / dicts / tuples). + + Sample file: *test_obj.py* + +.. code-block:: python + + # This object is /very/ simple, and the system will look through the + # attributes and serialize what it can. + class TestObj(object): + foo = 'bar' + + # This object requires __init__ params, so it uses the _serialize method + # and returns a tuple of init params and attribute values (the init params + # can be a dict or a list, but the attribute values must be a dict.) + class TestSerial(object): + foo = 'bar' + def __init__(self, *args): + self.args = args + def _serialize(self): + return (self.args, {'foo':self.foo,}) + +* Sample usage + +.. code-block:: python + + >>> import jsonrpclib + >>> import test_obj + + # History is used only to print the serialized form of beans + >>> history = jsonrpclib.history.History() + >>> testobj1 = test_obj.TestObj() + >>> testobj2 = test_obj.TestSerial() + >>> server = jsonrpclib.Server('http://localhost:8080', history=history) + + # The 'ping' just returns whatever is sent + >>> ping1 = server.ping(testobj1) + >>> ping2 = server.ping(testobj2) + + >>> print(history.request) + {"id": "7805f1f9-9abd-49c6-81dc-dbd47229fe13", "jsonrpc": "2.0", + "method": "ping", "params": [{"__jsonclass__": + ["test_obj.TestSerial", []], "foo": "bar"} + ]} + >>> print(history.response) + {"id": "7805f1f9-9abd-49c6-81dc-dbd47229fe13", "jsonrpc": "2.0", + "result": {"__jsonclass__": ["test_obj.TestSerial", []], "foo": "bar"}} + +This behavior is turned by default. To deactivate it, just set the +``use_jsonclass`` member of a server ``Config`` to False. +If you want to use a per-class serialization method, set its name in the +``serialize_method`` member of a server ``Config``. +Finally, if you are using classes that you have defined in the implementation +(as in, not a separate library), you'll need to add those (on BOTH the server +and the client) using the ``config.classes.add()`` method. + +Feedback on this "feature" is very, VERY much appreciated. + +Why JSON-RPC? +************* + +In my opinion, there are several reasons to choose JSON over XML for RPC: + +* Much simpler to read (I suppose this is opinion, but I know I'm right. :) +* Size / Bandwidth - Main reason, a JSON object representation is just much smaller. +* Parsing - JSON should be much quicker to parse than XML. +* Easy class passing with ``jsonclass`` (when enabled) + +In the interest of being fair, there are also a few reasons to choose XML +over JSON: + +* Your server doesn't do JSON (rather obvious) +* Wider XML-RPC support across APIs (can we change this? :)) +* Libraries are more established, i.e. more stable (Let's change this too.) + +Tests +***** + +Tests are an almost-verbatim drop from the JSON-RPC specification 2.0 page. +They can be run using *unittest* or *nosetest*: + +.. code-block:: console + + python -m unittest discover tests + python3 -m unittest discover tests + nosetests tests diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib/SimpleJSONRPCServer.py b/scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib/SimpleJSONRPCServer.py index f7a7b652..e9fe4e68 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib/SimpleJSONRPCServer.py +++ b/scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib/SimpleJSONRPCServer.py @@ -1,602 +1,602 @@ -#!/usr/bin/python
-# -- Content-Encoding: UTF-8 --
-"""
-Defines a request dispatcher, a HTTP request handler, a HTTP server and a
-CGI request handler.
-
-:authors: Josh Marshall, Thomas Calmant
-:copyright: Copyright 2015, isandlaTech
-:license: Apache License 2.0
-:version: 0.2.5
-
-..
-
- Copyright 2015 isandlaTech
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-"""
-
-# Module version
-__version_info__ = (0, 2, 5)
-__version__ = ".".join(str(x) for x in __version_info__)
-
-# Documentation strings format
-__docformat__ = "restructuredtext en"
-
-# ------------------------------------------------------------------------------
-# Local modules
-from jsonrpclib import Fault
-import jsonrpclib.config
-import jsonrpclib.utils as utils
-import jsonrpclib.threadpool
-
-# Standard library
-import logging
-import socket
-import sys
-import traceback
-
-# Prepare the logger
-_logger = logging.getLogger(__name__)
-
-try:
- # Python 3
- # pylint: disable=F0401,E0611
- import xmlrpc.server as xmlrpcserver
- import socketserver
-except (ImportError, AttributeError):
- # Python 2 or IronPython
- # pylint: disable=F0401,E0611
- import SimpleXMLRPCServer as xmlrpcserver
- import SocketServer as socketserver
-
-try:
- # Windows
- import fcntl
-except ImportError:
- # Other systems
- # pylint: disable=C0103
- fcntl = None
-
-# ------------------------------------------------------------------------------
-
-
-def get_version(request):
- """
- Computes the JSON-RPC version
-
- :param request: A request dictionary
- :return: The JSON-RPC version or None
- """
- if 'jsonrpc' in request:
- return 2.0
- elif 'id' in request:
- return 1.0
-
- return None
-
-
-def validate_request(request, json_config):
- """
- Validates the format of a request dictionary
-
- :param request: A request dictionary
- :param json_config: A JSONRPClib Config instance
- :return: True if the dictionary is valid, else a Fault object
- """
- if not isinstance(request, utils.DictType):
- # Invalid request type
- fault = Fault(-32600, 'Request must be a dict, not {0}'
- .format(type(request).__name__),
- config=json_config)
- _logger.warning("Invalid request content: %s", fault)
- return fault
-
- # Get the request ID
- rpcid = request.get('id', None)
-
- # Check request version
- version = get_version(request)
- if not version:
- fault = Fault(-32600, 'Request {0} invalid.'.format(request),
- rpcid=rpcid, config=json_config)
- _logger.warning("No version in request: %s", fault)
- return fault
-
- # Default parameters: empty list
- request.setdefault('params', [])
-
- # Check parameters
- method = request.get('method', None)
- params = request.get('params')
- param_types = (utils.ListType, utils.DictType, utils.TupleType)
-
- if not method or not isinstance(method, utils.string_types) or \
- not isinstance(params, param_types):
- # Invalid type of method name or parameters
- fault = Fault(-32600, 'Invalid request parameters or method.',
- rpcid=rpcid, config=json_config)
- _logger.warning("Invalid request content: %s", fault)
- return fault
-
- # Valid request
- return True
-
-# ------------------------------------------------------------------------------
-
-
-class NoMulticallResult(Exception):
- """
- No result in multicall
- """
- pass
-
-
-class SimpleJSONRPCDispatcher(xmlrpcserver.SimpleXMLRPCDispatcher, object):
- """
- Mix-in class that dispatches JSON-RPC requests.
-
- This class is used to register JSON-RPC method handlers
- and then to dispatch them. This class doesn't need to be
- instanced directly when used by SimpleJSONRPCServer.
- """
- def __init__(self, encoding=None, config=jsonrpclib.config.DEFAULT):
- """
- Sets up the dispatcher with the given encoding.
- None values are allowed.
- """
- xmlrpcserver.SimpleXMLRPCDispatcher.__init__(
- self, allow_none=True, encoding=encoding or "UTF-8")
- self.json_config = config
-
- # Notification thread pool
- self.__notification_pool = None
-
- def set_notification_pool(self, thread_pool):
- """
- Sets the thread pool to use to handle notifications
- """
- self.__notification_pool = thread_pool
-
- def _unmarshaled_dispatch(self, request, dispatch_method=None):
- """
- Loads the request dictionary (unmarshaled), calls the method(s)
- accordingly and returns a JSON-RPC dictionary (not marshaled)
-
- :param request: JSON-RPC request dictionary (or list of)
- :param dispatch_method: Custom dispatch method (for method resolution)
- :return: A JSON-RPC dictionary (or an array of) or None if the request
- was a notification
- :raise NoMulticallResult: No result in batch
- """
- if not request:
- # Invalid request dictionary
- fault = Fault(-32600, 'Request invalid -- no request data.',
- config=self.json_config)
- _logger.warning("Invalid request: %s", fault)
- return fault.dump()
-
- if isinstance(request, utils.ListType):
- # This SHOULD be a batch, by spec
- responses = []
- for req_entry in request:
- # Validate the request
- result = validate_request(req_entry, self.json_config)
- if isinstance(result, Fault):
- responses.append(result.dump())
- continue
-
- # Call the method
- resp_entry = self._marshaled_single_dispatch(req_entry,
- dispatch_method)
-
- # Store its result
- if isinstance(resp_entry, Fault):
- # pylint: disable=E1103
- responses.append(resp_entry.dump())
- elif resp_entry is not None:
- responses.append(resp_entry)
-
- if not responses:
- # No non-None result
- _logger.error("No result in Multicall")
- raise NoMulticallResult("No result")
-
- return responses
-
- else:
- # Single call
- result = validate_request(request, self.json_config)
- if isinstance(result, Fault):
- return result.dump()
-
- # Call the method
- response = self._marshaled_single_dispatch(request,
- dispatch_method)
- if isinstance(response, Fault):
- # pylint: disable=E1103
- return response.dump()
-
- return response
-
- def _marshaled_dispatch(self, data, dispatch_method=None, path=None):
- """
- Parses the request data (marshaled), calls method(s) and returns a
- JSON string (marshaled)
-
- :param data: A JSON request string
- :param dispatch_method: Custom dispatch method (for method resolution)
- :param path: Unused parameter, to keep compatibility with xmlrpclib
- :return: A JSON-RPC response string (marshaled)
- """
- # Parse the request
- try:
- request = jsonrpclib.loads(data, self.json_config)
- except Exception as ex:
- # Parsing/loading error
- fault = Fault(-32700, 'Request {0} invalid. ({1}:{2})'
- .format(data, type(ex).__name__, ex),
- config=self.json_config)
- _logger.warning("Error parsing request: %s", fault)
- return fault.response()
-
- # Get the response dictionary
- try:
- response = self._unmarshaled_dispatch(request, dispatch_method)
- if response is not None:
- # Compute the string representation of the dictionary/list
- return jsonrpclib.jdumps(response, self.encoding)
- else:
- # No result (notification)
- return ''
- except NoMulticallResult:
- # Return an empty string (jsonrpclib internal behaviour)
- return ''
-
- def _marshaled_single_dispatch(self, request, dispatch_method=None):
- """
- Dispatches a single method call
-
- :param request: A validated request dictionary
- :param dispatch_method: Custom dispatch method (for method resolution)
- :return: A JSON-RPC response dictionary, or None if it was a
- notification request
- """
- method = request.get('method')
- params = request.get('params')
-
- # Prepare a request-specific configuration
- if 'jsonrpc' not in request and self.json_config.version >= 2:
- # JSON-RPC 1.0 request on a JSON-RPC 2.0
- # => compatibility needed
- config = self.json_config.copy()
- config.version = 1.0
- else:
- # Keep server configuration as is
- config = self.json_config
-
- # Test if this is a notification request
- is_notification = 'id' not in request or request['id'] in (None, '')
- if is_notification and self.__notification_pool is not None:
- # Use the thread pool for notifications
- if dispatch_method is not None:
- self.__notification_pool.enqueue(dispatch_method,
- method, params)
- else:
- self.__notification_pool.enqueue(self._dispatch,
- method, params, config)
-
- # Return immediately
- return None
- else:
- # Synchronous call
- try:
- # Call the method
- if dispatch_method is not None:
- response = dispatch_method(method, params)
- else:
- response = self._dispatch(method, params, config)
- except Exception as ex:
- # Return a fault
- fault = Fault(-32603, '{0}:{1}'.format(type(ex).__name__, ex),
- config=config)
- _logger.error("Error calling method %s: %s", method, fault)
- return fault.dump()
-
- if is_notification:
- # It's a notification, no result needed
- # Do not use 'not id' as it might be the integer 0
- return None
-
- # Prepare a JSON-RPC dictionary
- try:
- return jsonrpclib.dump(response, rpcid=request['id'],
- is_response=True, config=config)
- except Exception as ex:
- # JSON conversion exception
- fault = Fault(-32603, '{0}:{1}'.format(type(ex).__name__, ex),
- config=config)
- _logger.error("Error preparing JSON-RPC result: %s", fault)
- return fault.dump()
-
- def _dispatch(self, method, params, config=None):
- """
- Default method resolver and caller
-
- :param method: Name of the method to call
- :param params: List of arguments to give to the method
- :param config: Request-specific configuration
- :return: The result of the method
- """
- config = config or self.json_config
-
- func = None
- try:
- # Look into registered methods
- func = self.funcs[method]
- except KeyError:
- if self.instance is not None:
- # Try with the registered instance
- try:
- # Instance has a custom dispatcher
- return getattr(self.instance, '_dispatch')(method, params)
- except AttributeError:
- # Resolve the method name in the instance
- try:
- func = xmlrpcserver.resolve_dotted_attribute(
- self.instance, method, True)
- except AttributeError:
- # Unknown method
- pass
-
- if func is not None:
- try:
- # Call the method
- if isinstance(params, utils.ListType):
- return func(*params)
- else:
- return func(**params)
- except TypeError as ex:
- # Maybe the parameters are wrong
- fault = Fault(-32602, 'Invalid parameters: {0}'.format(ex),
- config=config)
- _logger.warning("Invalid call parameters: %s", fault)
- return fault
- except:
- # Method exception
- err_lines = traceback.format_exc().splitlines()
- trace_string = '{0} | {1}'.format(err_lines[-3], err_lines[-1])
- fault = Fault(-32603, 'Server error: {0}'.format(trace_string),
- config=config)
- _logger.exception("Server-side exception: %s", fault)
- return fault
- else:
- # Unknown method
- fault = Fault(-32601, 'Method {0} not supported.'.format(method),
- config=config)
- _logger.warning("Unknown method: %s", fault)
- return fault
-
-# ------------------------------------------------------------------------------
-
-
-class SimpleJSONRPCRequestHandler(xmlrpcserver.SimpleXMLRPCRequestHandler):
- """
- HTTP request handler.
-
- The server that receives the requests must have a json_config member,
- containing a JSONRPClib Config instance
- """
- def do_POST(self):
- """
- Handles POST requests
- """
- if not self.is_rpc_path_valid():
- self.report_404()
- return
-
- # Retrieve the configuration
- config = getattr(self.server, 'json_config', jsonrpclib.config.DEFAULT)
-
- try:
- # Read the request body
- max_chunk_size = 10 * 1024 * 1024
- size_remaining = int(self.headers["content-length"])
- chunks = []
- while size_remaining:
- chunk_size = min(size_remaining, max_chunk_size)
- raw_chunk = self.rfile.read(chunk_size)
- if not raw_chunk:
- break
- chunks.append(utils.from_bytes(raw_chunk))
- size_remaining -= len(chunks[-1])
- data = ''.join(chunks)
-
- try:
- # Decode content
- data = self.decode_request_content(data)
- if data is None:
- # Unknown encoding, response has been sent
- return
- except AttributeError:
- # Available since Python 2.7
- pass
-
- # Execute the method
- response = self.server._marshaled_dispatch(
- data, getattr(self, '_dispatch', None), self.path)
-
- # No exception: send a 200 OK
- self.send_response(200)
- except:
- # Exception: send 500 Server Error
- self.send_response(500)
- err_lines = traceback.format_exc().splitlines()
- trace_string = '{0} | {1}'.format(err_lines[-3], err_lines[-1])
- fault = jsonrpclib.Fault(-32603, 'Server error: {0}'
- .format(trace_string), config=config)
- _logger.exception("Server-side error: %s", fault)
- response = fault.response()
-
- if response is None:
- # Avoid to send None
- response = ''
-
- # Convert the response to the valid string format
- response = utils.to_bytes(response)
-
- # Send it
- self.send_header("Content-type", config.content_type)
- self.send_header("Content-length", str(len(response)))
- self.end_headers()
- if response:
- self.wfile.write(response)
-
-# ------------------------------------------------------------------------------
-
-
-class SimpleJSONRPCServer(socketserver.TCPServer, SimpleJSONRPCDispatcher):
- """
- JSON-RPC server (and dispatcher)
- """
- # This simplifies server restart after error
- allow_reuse_address = True
-
- # pylint: disable=C0103
- def __init__(self, addr, requestHandler=SimpleJSONRPCRequestHandler,
- logRequests=True, encoding=None, bind_and_activate=True,
- address_family=socket.AF_INET,
- config=jsonrpclib.config.DEFAULT):
- """
- Sets up the server and the dispatcher
-
- :param addr: The server listening address
- :param requestHandler: Custom request handler
- :param logRequests: Flag to(de)activate requests logging
- :param encoding: The dispatcher request encoding
- :param bind_and_activate: If True, starts the server immediately
- :param address_family: The server listening address family
- :param config: A JSONRPClib Config instance
- """
- # Set up the dispatcher fields
- SimpleJSONRPCDispatcher.__init__(self, encoding, config)
-
- # Prepare the server configuration
- # logRequests is used by SimpleXMLRPCRequestHandler
- self.logRequests = logRequests
- self.address_family = address_family
- self.json_config = config
-
- # Work on the request handler
- class RequestHandlerWrapper(requestHandler, object):
- """
- Wraps the request handle to have access to the configuration
- """
- def __init__(self, *args, **kwargs):
- """
- Constructs the wrapper after having stored the configuration
- """
- self.config = config
- super(RequestHandlerWrapper, self).__init__(*args, **kwargs)
-
- # Set up the server
- socketserver.TCPServer.__init__(self, addr, requestHandler,
- bind_and_activate)
-
- # Windows-specific
- if fcntl is not None and hasattr(fcntl, 'FD_CLOEXEC'):
- flags = fcntl.fcntl(self.fileno(), fcntl.F_GETFD)
- flags |= fcntl.FD_CLOEXEC
- fcntl.fcntl(self.fileno(), fcntl.F_SETFD, flags)
-
-# ------------------------------------------------------------------------------
-
-
-class PooledJSONRPCServer(SimpleJSONRPCServer, socketserver.ThreadingMixIn):
- """
- JSON-RPC server based on a thread pool
- """
- def __init__(self, addr, requestHandler=SimpleJSONRPCRequestHandler,
- logRequests=True, encoding=None, bind_and_activate=True,
- address_family=socket.AF_INET,
- config=jsonrpclib.config.DEFAULT, thread_pool=None):
- """
- Sets up the server and the dispatcher
-
- :param addr: The server listening address
- :param requestHandler: Custom request handler
- :param logRequests: Flag to(de)activate requests logging
- :param encoding: The dispatcher request encoding
- :param bind_and_activate: If True, starts the server immediately
- :param address_family: The server listening address family
- :param config: A JSONRPClib Config instance
- :param thread_pool: A ThreadPool object. The pool must be started.
- """
- # Normalize the thread pool
- if thread_pool is None:
- # Start a thread pool with 30 threads max, 0 thread min
- thread_pool = jsonrpclib.threadpool.ThreadPool(
- 30, 0, logname="PooledJSONRPCServer")
- thread_pool.start()
-
- # Store the thread pool
- self.__request_pool = thread_pool
-
- # Prepare the server
- SimpleJSONRPCServer.__init__(self, addr, requestHandler, logRequests,
- encoding, bind_and_activate,
- address_family, config)
-
- def process_request(self, request, client_address):
- """
- Handle a client request: queue it in the thread pool
- """
- self.__request_pool.enqueue(self.process_request_thread,
- request, client_address)
-
- def server_close(self):
- """
- Clean up the server
- """
- SimpleJSONRPCServer.server_close(self)
- self.__request_pool.stop()
-
-# ------------------------------------------------------------------------------
-
-
-class CGIJSONRPCRequestHandler(SimpleJSONRPCDispatcher):
- """
- JSON-RPC CGI handler (and dispatcher)
- """
- def __init__(self, encoding=None, config=jsonrpclib.config.DEFAULT):
- """
- Sets up the dispatcher
-
- :param encoding: Dispatcher encoding
- :param config: A JSONRPClib Config instance
- """
- SimpleJSONRPCDispatcher.__init__(self, encoding, config)
-
- def handle_jsonrpc(self, request_text):
- """
- Handle a JSON-RPC request
- """
- response = self._marshaled_dispatch(request_text)
- sys.stdout.write('Content-Type: {0}\r\n'
- .format(self.json_config.content_type))
- sys.stdout.write('Content-Length: {0:d}\r\n'.format(len(response)))
- sys.stdout.write('\r\n')
- sys.stdout.write(response)
-
- # XML-RPC alias
- handle_xmlrpc = handle_jsonrpc
+#!/usr/bin/python +# -- Content-Encoding: UTF-8 -- +""" +Defines a request dispatcher, a HTTP request handler, a HTTP server and a +CGI request handler. + +:authors: Josh Marshall, Thomas Calmant +:copyright: Copyright 2015, isandlaTech +:license: Apache License 2.0 +:version: 0.2.5 + +.. + + Copyright 2015 isandlaTech + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +""" + +# Module version +__version_info__ = (0, 2, 5) +__version__ = ".".join(str(x) for x in __version_info__) + +# Documentation strings format +__docformat__ = "restructuredtext en" + +# ------------------------------------------------------------------------------ +# Local modules +from jsonrpclib import Fault +import jsonrpclib.config +import jsonrpclib.utils as utils +import jsonrpclib.threadpool + +# Standard library +import logging +import socket +import sys +import traceback + +# Prepare the logger +_logger = logging.getLogger(__name__) + +try: + # Python 3 + # pylint: disable=F0401,E0611 + import xmlrpc.server as xmlrpcserver + import socketserver +except (ImportError, AttributeError): + # Python 2 or IronPython + # pylint: disable=F0401,E0611 + import SimpleXMLRPCServer as xmlrpcserver + import SocketServer as socketserver + +try: + # Windows + import fcntl +except ImportError: + # Other systems + # pylint: disable=C0103 + fcntl = None + +# ------------------------------------------------------------------------------ + + +def get_version(request): + """ + Computes the JSON-RPC version + + :param request: A request dictionary + :return: The JSON-RPC version or None + """ + if 'jsonrpc' in request: + return 2.0 + elif 'id' in request: + return 1.0 + + return None + + +def validate_request(request, json_config): + """ + Validates the format of a request dictionary + + :param request: A request dictionary + :param json_config: A JSONRPClib Config instance + :return: True if the dictionary is valid, else a Fault object + """ + if not isinstance(request, utils.DictType): + # Invalid request type + fault = Fault(-32600, 'Request must be a dict, not {0}' + .format(type(request).__name__), + config=json_config) + _logger.warning("Invalid request content: %s", fault) + return fault + + # Get the request ID + rpcid = request.get('id', None) + + # Check request version + version = get_version(request) + if not version: + fault = Fault(-32600, 'Request {0} invalid.'.format(request), + rpcid=rpcid, config=json_config) + _logger.warning("No version in request: %s", fault) + return fault + + # Default parameters: empty list + request.setdefault('params', []) + + # Check parameters + method = request.get('method', None) + params = request.get('params') + param_types = (utils.ListType, utils.DictType, utils.TupleType) + + if not method or not isinstance(method, utils.string_types) or \ + not isinstance(params, param_types): + # Invalid type of method name or parameters + fault = Fault(-32600, 'Invalid request parameters or method.', + rpcid=rpcid, config=json_config) + _logger.warning("Invalid request content: %s", fault) + return fault + + # Valid request + return True + +# ------------------------------------------------------------------------------ + + +class NoMulticallResult(Exception): + """ + No result in multicall + """ + pass + + +class SimpleJSONRPCDispatcher(xmlrpcserver.SimpleXMLRPCDispatcher, object): + """ + Mix-in class that dispatches JSON-RPC requests. + + This class is used to register JSON-RPC method handlers + and then to dispatch them. This class doesn't need to be + instanced directly when used by SimpleJSONRPCServer. + """ + def __init__(self, encoding=None, config=jsonrpclib.config.DEFAULT): + """ + Sets up the dispatcher with the given encoding. + None values are allowed. + """ + xmlrpcserver.SimpleXMLRPCDispatcher.__init__( + self, allow_none=True, encoding=encoding or "UTF-8") + self.json_config = config + + # Notification thread pool + self.__notification_pool = None + + def set_notification_pool(self, thread_pool): + """ + Sets the thread pool to use to handle notifications + """ + self.__notification_pool = thread_pool + + def _unmarshaled_dispatch(self, request, dispatch_method=None): + """ + Loads the request dictionary (unmarshaled), calls the method(s) + accordingly and returns a JSON-RPC dictionary (not marshaled) + + :param request: JSON-RPC request dictionary (or list of) + :param dispatch_method: Custom dispatch method (for method resolution) + :return: A JSON-RPC dictionary (or an array of) or None if the request + was a notification + :raise NoMulticallResult: No result in batch + """ + if not request: + # Invalid request dictionary + fault = Fault(-32600, 'Request invalid -- no request data.', + config=self.json_config) + _logger.warning("Invalid request: %s", fault) + return fault.dump() + + if isinstance(request, utils.ListType): + # This SHOULD be a batch, by spec + responses = [] + for req_entry in request: + # Validate the request + result = validate_request(req_entry, self.json_config) + if isinstance(result, Fault): + responses.append(result.dump()) + continue + + # Call the method + resp_entry = self._marshaled_single_dispatch(req_entry, + dispatch_method) + + # Store its result + if isinstance(resp_entry, Fault): + # pylint: disable=E1103 + responses.append(resp_entry.dump()) + elif resp_entry is not None: + responses.append(resp_entry) + + if not responses: + # No non-None result + _logger.error("No result in Multicall") + raise NoMulticallResult("No result") + + return responses + + else: + # Single call + result = validate_request(request, self.json_config) + if isinstance(result, Fault): + return result.dump() + + # Call the method + response = self._marshaled_single_dispatch(request, + dispatch_method) + if isinstance(response, Fault): + # pylint: disable=E1103 + return response.dump() + + return response + + def _marshaled_dispatch(self, data, dispatch_method=None, path=None): + """ + Parses the request data (marshaled), calls method(s) and returns a + JSON string (marshaled) + + :param data: A JSON request string + :param dispatch_method: Custom dispatch method (for method resolution) + :param path: Unused parameter, to keep compatibility with xmlrpclib + :return: A JSON-RPC response string (marshaled) + """ + # Parse the request + try: + request = jsonrpclib.loads(data, self.json_config) + except Exception as ex: + # Parsing/loading error + fault = Fault(-32700, 'Request {0} invalid. ({1}:{2})' + .format(data, type(ex).__name__, ex), + config=self.json_config) + _logger.warning("Error parsing request: %s", fault) + return fault.response() + + # Get the response dictionary + try: + response = self._unmarshaled_dispatch(request, dispatch_method) + if response is not None: + # Compute the string representation of the dictionary/list + return jsonrpclib.jdumps(response, self.encoding) + else: + # No result (notification) + return '' + except NoMulticallResult: + # Return an empty string (jsonrpclib internal behaviour) + return '' + + def _marshaled_single_dispatch(self, request, dispatch_method=None): + """ + Dispatches a single method call + + :param request: A validated request dictionary + :param dispatch_method: Custom dispatch method (for method resolution) + :return: A JSON-RPC response dictionary, or None if it was a + notification request + """ + method = request.get('method') + params = request.get('params') + + # Prepare a request-specific configuration + if 'jsonrpc' not in request and self.json_config.version >= 2: + # JSON-RPC 1.0 request on a JSON-RPC 2.0 + # => compatibility needed + config = self.json_config.copy() + config.version = 1.0 + else: + # Keep server configuration as is + config = self.json_config + + # Test if this is a notification request + is_notification = 'id' not in request or request['id'] in (None, '') + if is_notification and self.__notification_pool is not None: + # Use the thread pool for notifications + if dispatch_method is not None: + self.__notification_pool.enqueue(dispatch_method, + method, params) + else: + self.__notification_pool.enqueue(self._dispatch, + method, params, config) + + # Return immediately + return None + else: + # Synchronous call + try: + # Call the method + if dispatch_method is not None: + response = dispatch_method(method, params) + else: + response = self._dispatch(method, params, config) + except Exception as ex: + # Return a fault + fault = Fault(-32603, '{0}:{1}'.format(type(ex).__name__, ex), + config=config) + _logger.error("Error calling method %s: %s", method, fault) + return fault.dump() + + if is_notification: + # It's a notification, no result needed + # Do not use 'not id' as it might be the integer 0 + return None + + # Prepare a JSON-RPC dictionary + try: + return jsonrpclib.dump(response, rpcid=request['id'], + is_response=True, config=config) + except Exception as ex: + # JSON conversion exception + fault = Fault(-32603, '{0}:{1}'.format(type(ex).__name__, ex), + config=config) + _logger.error("Error preparing JSON-RPC result: %s", fault) + return fault.dump() + + def _dispatch(self, method, params, config=None): + """ + Default method resolver and caller + + :param method: Name of the method to call + :param params: List of arguments to give to the method + :param config: Request-specific configuration + :return: The result of the method + """ + config = config or self.json_config + + func = None + try: + # Look into registered methods + func = self.funcs[method] + except KeyError: + if self.instance is not None: + # Try with the registered instance + try: + # Instance has a custom dispatcher + return getattr(self.instance, '_dispatch')(method, params) + except AttributeError: + # Resolve the method name in the instance + try: + func = xmlrpcserver.resolve_dotted_attribute( + self.instance, method, True) + except AttributeError: + # Unknown method + pass + + if func is not None: + try: + # Call the method + if isinstance(params, utils.ListType): + return func(*params) + else: + return func(**params) + except TypeError as ex: + # Maybe the parameters are wrong + fault = Fault(-32602, 'Invalid parameters: {0}'.format(ex), + config=config) + _logger.warning("Invalid call parameters: %s", fault) + return fault + except: + # Method exception + err_lines = traceback.format_exc().splitlines() + trace_string = '{0} | {1}'.format(err_lines[-3], err_lines[-1]) + fault = Fault(-32603, 'Server error: {0}'.format(trace_string), + config=config) + _logger.exception("Server-side exception: %s", fault) + return fault + else: + # Unknown method + fault = Fault(-32601, 'Method {0} not supported.'.format(method), + config=config) + _logger.warning("Unknown method: %s", fault) + return fault + +# ------------------------------------------------------------------------------ + + +class SimpleJSONRPCRequestHandler(xmlrpcserver.SimpleXMLRPCRequestHandler): + """ + HTTP request handler. + + The server that receives the requests must have a json_config member, + containing a JSONRPClib Config instance + """ + def do_POST(self): + """ + Handles POST requests + """ + if not self.is_rpc_path_valid(): + self.report_404() + return + + # Retrieve the configuration + config = getattr(self.server, 'json_config', jsonrpclib.config.DEFAULT) + + try: + # Read the request body + max_chunk_size = 10 * 1024 * 1024 + size_remaining = int(self.headers["content-length"]) + chunks = [] + while size_remaining: + chunk_size = min(size_remaining, max_chunk_size) + raw_chunk = self.rfile.read(chunk_size) + if not raw_chunk: + break + chunks.append(utils.from_bytes(raw_chunk)) + size_remaining -= len(chunks[-1]) + data = ''.join(chunks) + + try: + # Decode content + data = self.decode_request_content(data) + if data is None: + # Unknown encoding, response has been sent + return + except AttributeError: + # Available since Python 2.7 + pass + + # Execute the method + response = self.server._marshaled_dispatch( + data, getattr(self, '_dispatch', None), self.path) + + # No exception: send a 200 OK + self.send_response(200) + except: + # Exception: send 500 Server Error + self.send_response(500) + err_lines = traceback.format_exc().splitlines() + trace_string = '{0} | {1}'.format(err_lines[-3], err_lines[-1]) + fault = jsonrpclib.Fault(-32603, 'Server error: {0}' + .format(trace_string), config=config) + _logger.exception("Server-side error: %s", fault) + response = fault.response() + + if response is None: + # Avoid to send None + response = '' + + # Convert the response to the valid string format + response = utils.to_bytes(response) + + # Send it + self.send_header("Content-type", config.content_type) + self.send_header("Content-length", str(len(response))) + self.end_headers() + if response: + self.wfile.write(response) + +# ------------------------------------------------------------------------------ + + +class SimpleJSONRPCServer(socketserver.TCPServer, SimpleJSONRPCDispatcher): + """ + JSON-RPC server (and dispatcher) + """ + # This simplifies server restart after error + allow_reuse_address = True + + # pylint: disable=C0103 + def __init__(self, addr, requestHandler=SimpleJSONRPCRequestHandler, + logRequests=True, encoding=None, bind_and_activate=True, + address_family=socket.AF_INET, + config=jsonrpclib.config.DEFAULT): + """ + Sets up the server and the dispatcher + + :param addr: The server listening address + :param requestHandler: Custom request handler + :param logRequests: Flag to(de)activate requests logging + :param encoding: The dispatcher request encoding + :param bind_and_activate: If True, starts the server immediately + :param address_family: The server listening address family + :param config: A JSONRPClib Config instance + """ + # Set up the dispatcher fields + SimpleJSONRPCDispatcher.__init__(self, encoding, config) + + # Prepare the server configuration + # logRequests is used by SimpleXMLRPCRequestHandler + self.logRequests = logRequests + self.address_family = address_family + self.json_config = config + + # Work on the request handler + class RequestHandlerWrapper(requestHandler, object): + """ + Wraps the request handle to have access to the configuration + """ + def __init__(self, *args, **kwargs): + """ + Constructs the wrapper after having stored the configuration + """ + self.config = config + super(RequestHandlerWrapper, self).__init__(*args, **kwargs) + + # Set up the server + socketserver.TCPServer.__init__(self, addr, requestHandler, + bind_and_activate) + + # Windows-specific + if fcntl is not None and hasattr(fcntl, 'FD_CLOEXEC'): + flags = fcntl.fcntl(self.fileno(), fcntl.F_GETFD) + flags |= fcntl.FD_CLOEXEC + fcntl.fcntl(self.fileno(), fcntl.F_SETFD, flags) + +# ------------------------------------------------------------------------------ + + +class PooledJSONRPCServer(SimpleJSONRPCServer, socketserver.ThreadingMixIn): + """ + JSON-RPC server based on a thread pool + """ + def __init__(self, addr, requestHandler=SimpleJSONRPCRequestHandler, + logRequests=True, encoding=None, bind_and_activate=True, + address_family=socket.AF_INET, + config=jsonrpclib.config.DEFAULT, thread_pool=None): + """ + Sets up the server and the dispatcher + + :param addr: The server listening address + :param requestHandler: Custom request handler + :param logRequests: Flag to(de)activate requests logging + :param encoding: The dispatcher request encoding + :param bind_and_activate: If True, starts the server immediately + :param address_family: The server listening address family + :param config: A JSONRPClib Config instance + :param thread_pool: A ThreadPool object. The pool must be started. + """ + # Normalize the thread pool + if thread_pool is None: + # Start a thread pool with 30 threads max, 0 thread min + thread_pool = jsonrpclib.threadpool.ThreadPool( + 30, 0, logname="PooledJSONRPCServer") + thread_pool.start() + + # Store the thread pool + self.__request_pool = thread_pool + + # Prepare the server + SimpleJSONRPCServer.__init__(self, addr, requestHandler, logRequests, + encoding, bind_and_activate, + address_family, config) + + def process_request(self, request, client_address): + """ + Handle a client request: queue it in the thread pool + """ + self.__request_pool.enqueue(self.process_request_thread, + request, client_address) + + def server_close(self): + """ + Clean up the server + """ + SimpleJSONRPCServer.server_close(self) + self.__request_pool.stop() + +# ------------------------------------------------------------------------------ + + +class CGIJSONRPCRequestHandler(SimpleJSONRPCDispatcher): + """ + JSON-RPC CGI handler (and dispatcher) + """ + def __init__(self, encoding=None, config=jsonrpclib.config.DEFAULT): + """ + Sets up the dispatcher + + :param encoding: Dispatcher encoding + :param config: A JSONRPClib Config instance + """ + SimpleJSONRPCDispatcher.__init__(self, encoding, config) + + def handle_jsonrpc(self, request_text): + """ + Handle a JSON-RPC request + """ + response = self._marshaled_dispatch(request_text) + sys.stdout.write('Content-Type: {0}\r\n' + .format(self.json_config.content_type)) + sys.stdout.write('Content-Length: {0:d}\r\n'.format(len(response))) + sys.stdout.write('\r\n') + sys.stdout.write(response) + + # XML-RPC alias + handle_xmlrpc = handle_jsonrpc diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib/__init__.py b/scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib/__init__.py index 2c7dc1c5..a92774ab 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib/__init__.py +++ b/scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib/__init__.py @@ -1,34 +1,34 @@ -#!/usr/bin/python
-# -- Content-Encoding: UTF-8 --
-"""
-Aliases to ease access to jsonrpclib classes
-
-:authors: Josh Marshall, Thomas Calmant
-:copyright: Copyright 2015, isandlaTech
-:license: Apache License 2.0
-:version: 0.2.5
-
-..
-
- Copyright 2015 isandlaTech
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-"""
-
-# Easy access to utility methods and classes
-from jsonrpclib.jsonrpc import Server, ServerProxy
-from jsonrpclib.jsonrpc import MultiCall, Fault, ProtocolError, AppError
-from jsonrpclib.jsonrpc import loads, dumps, load, dump
-from jsonrpclib.jsonrpc import jloads, jdumps
-import jsonrpclib.history as history
-import jsonrpclib.utils as utils
+#!/usr/bin/python +# -- Content-Encoding: UTF-8 -- +""" +Aliases to ease access to jsonrpclib classes + +:authors: Josh Marshall, Thomas Calmant +:copyright: Copyright 2015, isandlaTech +:license: Apache License 2.0 +:version: 0.2.5 + +.. + + Copyright 2015 isandlaTech + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +""" + +# Easy access to utility methods and classes +from jsonrpclib.jsonrpc import Server, ServerProxy +from jsonrpclib.jsonrpc import MultiCall, Fault, ProtocolError, AppError +from jsonrpclib.jsonrpc import loads, dumps, load, dump +from jsonrpclib.jsonrpc import jloads, jdumps +import jsonrpclib.history as history +import jsonrpclib.utils as utils diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib/config.py b/scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib/config.py index d2c5a811..77838d4e 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib/config.py +++ b/scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib/config.py @@ -1,141 +1,141 @@ -#!/usr/bin/python
-# -- Content-Encoding: UTF-8 --
-"""
-The configuration module.
-
-:copyright: Copyright 2015, isandlaTech
-:license: Apache License 2.0
-:version: 0.2.5
-
-..
-
- Copyright 2015 isandlaTech
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-"""
-
-# Module version
-__version_info__ = (0, 2, 5)
-__version__ = ".".join(str(x) for x in __version_info__)
-
-# Documentation strings format
-__docformat__ = "restructuredtext en"
-
-# ------------------------------------------------------------------------------
-
-import sys
-
-# ------------------------------------------------------------------------------
-
-
-class LocalClasses(dict):
- """
- Associates local classes with their names (used in the jsonclass module)
- """
- def add(self, cls, name=None):
- """
- Stores a local class
-
- :param cls: A class
- :param name: Custom name used in the __jsonclass__ attribute
- """
- if not name:
- name = cls.__name__
- self[name] = cls
-
-# ------------------------------------------------------------------------------
-
-
-class Config(object):
- """
- This is pretty much used exclusively for the 'jsonclass'
- functionality... set use_jsonclass to False to turn it off.
- You can change serialize_method and ignore_attribute, or use
- the local_classes.add(class) to include "local" classes.
- """
- def __init__(self, version=2.0, content_type="application/json-rpc",
- user_agent=None, use_jsonclass=True,
- serialize_method='_serialize',
- ignore_attribute='_ignore',
- serialize_handlers=None):
- """
- Sets up a configuration of JSONRPClib
-
- :param version: JSON-RPC specification version
- :param content_type: HTTP content type header value
- :param user_agent: The HTTP request user agent
- :param use_jsonclass: Allow bean marshalling
- :param serialize_method: A string that references the method on a
- custom class object which is responsible for
- returning a tuple of the arguments and a dict
- of attributes.
- :param ignore_attribute: A string that references the attribute on a
- custom class object which holds strings and/or
- references of the attributes the class
- translator should ignore.
- :param serialize_handlers: A dictionary of dump handler functions by
- type for additional type support and for
- overriding dump of built-in types in utils
- """
- # JSON-RPC specification
- self.version = version
-
- # Change to False to keep __jsonclass__ entries raw.
- self.use_jsonclass = use_jsonclass
-
- # it SHOULD be 'application/json-rpc'
- # but MAY be 'application/json' or 'application/jsonrequest'
- self.content_type = content_type
-
- # Default user agent
- if user_agent is None:
- user_agent = 'jsonrpclib/{0} (Python {1})'.format(
- __version__, '.'.join(str(ver)
- for ver in sys.version_info[0:3]))
- self.user_agent = user_agent
-
- # The list of classes to use for jsonclass translation.
- self.classes = LocalClasses()
-
- # The serialize_method should be a string that references the
- # method on a custom class object which is responsible for
- # returning a tuple of the constructor arguments and a dict of
- # attributes.
- self.serialize_method = serialize_method
-
- # The ignore attribute should be a string that references the
- # attribute on a custom class object which holds strings and / or
- # references of the attributes the class translator should ignore.
- self.ignore_attribute = ignore_attribute
-
- # The list of serialize handler functions for jsonclass dump.
- # Used for handling additional types and overriding built-in types.
- # Functions are expected to have the same parameters as jsonclass dump
- # (possibility to call standard jsonclass dump function within).
- self.serialize_handlers = serialize_handlers or {}
-
- def copy(self):
- """
- Returns a shallow copy of this configuration bean
-
- :return: A shallow copy of this configuration
- """
- new_config = Config(self.version, self.content_type, self.user_agent,
- self.use_jsonclass, self.serialize_method,
- self.ignore_attribute, None)
- new_config.classes = self.classes.copy()
- new_config.serialize_handlers = self.serialize_handlers.copy()
- return new_config
-
-# Default configuration
-DEFAULT = Config()
+#!/usr/bin/python +# -- Content-Encoding: UTF-8 -- +""" +The configuration module. + +:copyright: Copyright 2015, isandlaTech +:license: Apache License 2.0 +:version: 0.2.5 + +.. + + Copyright 2015 isandlaTech + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +""" + +# Module version +__version_info__ = (0, 2, 5) +__version__ = ".".join(str(x) for x in __version_info__) + +# Documentation strings format +__docformat__ = "restructuredtext en" + +# ------------------------------------------------------------------------------ + +import sys + +# ------------------------------------------------------------------------------ + + +class LocalClasses(dict): + """ + Associates local classes with their names (used in the jsonclass module) + """ + def add(self, cls, name=None): + """ + Stores a local class + + :param cls: A class + :param name: Custom name used in the __jsonclass__ attribute + """ + if not name: + name = cls.__name__ + self[name] = cls + +# ------------------------------------------------------------------------------ + + +class Config(object): + """ + This is pretty much used exclusively for the 'jsonclass' + functionality... set use_jsonclass to False to turn it off. + You can change serialize_method and ignore_attribute, or use + the local_classes.add(class) to include "local" classes. + """ + def __init__(self, version=2.0, content_type="application/json-rpc", + user_agent=None, use_jsonclass=True, + serialize_method='_serialize', + ignore_attribute='_ignore', + serialize_handlers=None): + """ + Sets up a configuration of JSONRPClib + + :param version: JSON-RPC specification version + :param content_type: HTTP content type header value + :param user_agent: The HTTP request user agent + :param use_jsonclass: Allow bean marshalling + :param serialize_method: A string that references the method on a + custom class object which is responsible for + returning a tuple of the arguments and a dict + of attributes. + :param ignore_attribute: A string that references the attribute on a + custom class object which holds strings and/or + references of the attributes the class + translator should ignore. + :param serialize_handlers: A dictionary of dump handler functions by + type for additional type support and for + overriding dump of built-in types in utils + """ + # JSON-RPC specification + self.version = version + + # Change to False to keep __jsonclass__ entries raw. + self.use_jsonclass = use_jsonclass + + # it SHOULD be 'application/json-rpc' + # but MAY be 'application/json' or 'application/jsonrequest' + self.content_type = content_type + + # Default user agent + if user_agent is None: + user_agent = 'jsonrpclib/{0} (Python {1})'.format( + __version__, '.'.join(str(ver) + for ver in sys.version_info[0:3])) + self.user_agent = user_agent + + # The list of classes to use for jsonclass translation. + self.classes = LocalClasses() + + # The serialize_method should be a string that references the + # method on a custom class object which is responsible for + # returning a tuple of the constructor arguments and a dict of + # attributes. + self.serialize_method = serialize_method + + # The ignore attribute should be a string that references the + # attribute on a custom class object which holds strings and / or + # references of the attributes the class translator should ignore. + self.ignore_attribute = ignore_attribute + + # The list of serialize handler functions for jsonclass dump. + # Used for handling additional types and overriding built-in types. + # Functions are expected to have the same parameters as jsonclass dump + # (possibility to call standard jsonclass dump function within). + self.serialize_handlers = serialize_handlers or {} + + def copy(self): + """ + Returns a shallow copy of this configuration bean + + :return: A shallow copy of this configuration + """ + new_config = Config(self.version, self.content_type, self.user_agent, + self.use_jsonclass, self.serialize_method, + self.ignore_attribute, None) + new_config.classes = self.classes.copy() + new_config.serialize_handlers = self.serialize_handlers.copy() + return new_config + +# Default configuration +DEFAULT = Config() diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib/history.py b/scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib/history.py index 7062ab66..288d9539 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib/history.py +++ b/scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib/history.py @@ -1,95 +1,95 @@ -#!/usr/bin/python
-# -- Content-Encoding: UTF-8 --
-"""
-The history module.
-
-:authors: Josh Marshall, Thomas Calmant
-:copyright: Copyright 2015, isandlaTech
-:license: Apache License 2.0
-:version: 0.2.5
-
-..
-
- Copyright 2015 isandlaTech
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-"""
-
-# Module version
-__version_info__ = (0, 2, 5)
-__version__ = ".".join(str(x) for x in __version_info__)
-
-# Documentation strings format
-__docformat__ = "restructuredtext en"
-
-# ------------------------------------------------------------------------------
-
-
-class History(object):
- """
- This holds all the response and request objects for a
- session. A server using this should call "clear" after
- each request cycle in order to keep it from clogging
- memory.
- """
- def __init__(self):
- """
- Sets up members
- """
- self.requests = []
- self.responses = []
-
- def add_response(self, response_obj):
- """
- Adds a response to the history
-
- :param response_obj: Response content
- """
- self.responses.append(response_obj)
-
- def add_request(self, request_obj):
- """
- Adds a request to the history
-
- :param request_obj: A request object
- """
- self.requests.append(request_obj)
-
- @property
- def request(self):
- """
- Returns the latest stored request or None
- """
- try:
- return self.requests[-1]
-
- except IndexError:
- return None
-
- @property
- def response(self):
- """
- Returns the latest stored response or None
- """
- try:
- return self.responses[-1]
-
- except IndexError:
- return None
-
- def clear(self):
- """
- Clears the history lists
- """
- del self.requests[:]
- del self.responses[:]
+#!/usr/bin/python +# -- Content-Encoding: UTF-8 -- +""" +The history module. + +:authors: Josh Marshall, Thomas Calmant +:copyright: Copyright 2015, isandlaTech +:license: Apache License 2.0 +:version: 0.2.5 + +.. + + Copyright 2015 isandlaTech + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +""" + +# Module version +__version_info__ = (0, 2, 5) +__version__ = ".".join(str(x) for x in __version_info__) + +# Documentation strings format +__docformat__ = "restructuredtext en" + +# ------------------------------------------------------------------------------ + + +class History(object): + """ + This holds all the response and request objects for a + session. A server using this should call "clear" after + each request cycle in order to keep it from clogging + memory. + """ + def __init__(self): + """ + Sets up members + """ + self.requests = [] + self.responses = [] + + def add_response(self, response_obj): + """ + Adds a response to the history + + :param response_obj: Response content + """ + self.responses.append(response_obj) + + def add_request(self, request_obj): + """ + Adds a request to the history + + :param request_obj: A request object + """ + self.requests.append(request_obj) + + @property + def request(self): + """ + Returns the latest stored request or None + """ + try: + return self.requests[-1] + + except IndexError: + return None + + @property + def response(self): + """ + Returns the latest stored response or None + """ + try: + return self.responses[-1] + + except IndexError: + return None + + def clear(self): + """ + Clears the history lists + """ + del self.requests[:] + del self.responses[:] diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib/jsonclass.py b/scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib/jsonclass.py index c7cc4c35..6bcbeab7 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib/jsonclass.py +++ b/scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib/jsonclass.py @@ -1,295 +1,295 @@ -#!/usr/bin/python
-# -- Content-Encoding: UTF-8 --
-"""
-The serialization module
-
-:authors: Josh Marshall, Thomas Calmant
-:copyright: Copyright 2015, isandlaTech
-:license: Apache License 2.0
-:version: 0.2.5
-
-..
-
- Copyright 2015 isandlaTech
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-"""
-
-# Module version
-__version_info__ = (0, 2, 5)
-__version__ = ".".join(str(x) for x in __version_info__)
-
-# Documentation strings format
-__docformat__ = "restructuredtext en"
-
-# ------------------------------------------------------------------------------
-
-# Local package
-import jsonrpclib.config
-import jsonrpclib.utils as utils
-
-# Standard library
-import inspect
-import re
-
-# ------------------------------------------------------------------------------
-
-# Supported transmitted code
-SUPPORTED_TYPES = (utils.DictType,) + utils.iterable_types \
- + utils.primitive_types
-
-# Regex of invalid module characters
-INVALID_MODULE_CHARS = r'[^a-zA-Z0-9\_\.]'
-
-# ------------------------------------------------------------------------------
-
-
-class TranslationError(Exception):
- """
- Unmarshaling exception
- """
- pass
-
-
-def _slots_finder(clazz, fields_set):
- """
- Recursively visits the class hierarchy to find all slots
-
- :param clazz: Class to analyze
- :param fields_set: Set where to store __slots___ content
- """
- # ... class level
- try:
- fields_set.update(clazz.__slots__)
- except AttributeError:
- pass
-
- # ... parent classes level
- for base_class in clazz.__bases__:
- _slots_finder(base_class, fields_set)
-
-
-def _find_fields(obj):
- """
- Returns the names of the fields of the given object
-
- :param obj: An object to analyze
- :return: A set of field names
- """
- # Find fields...
- fields = set()
-
- # ... using __dict__
- try:
- fields.update(obj.__dict__)
- except AttributeError:
- pass
-
- # ... using __slots__
- _slots_finder(obj.__class__, fields)
- return fields
-
-
-def dump(obj, serialize_method=None, ignore_attribute=None, ignore=None,
- config=jsonrpclib.config.DEFAULT):
- """
- Transforms the given object into a JSON-RPC compliant form.
- Converts beans into dictionaries with a __jsonclass__ entry.
- Doesn't change primitive types.
-
- :param obj: An object to convert
- :param serialize_method: Custom serialization method
- :param ignore_attribute: Name of the object attribute containing the names
- of members to ignore
- :param ignore: A list of members to ignore
- :param config: A JSONRPClib Config instance
- :return: A JSON-RPC compliant object
- """
- # Normalize arguments
- serialize_method = serialize_method or config.serialize_method
- ignore_attribute = ignore_attribute or config.ignore_attribute
- ignore = ignore or []
-
- # Parse / return default "types"...
- # Apply additional types, override built-in types
- # (reminder: config.serialize_handlers is a dict)
- try:
- serializer = config.serialize_handlers[type(obj)]
- except KeyError:
- # Not a serializer
- pass
- else:
- if serializer is not None:
- return serializer(obj, serialize_method, ignore_attribute,
- ignore, config)
-
- # Primitive
- if isinstance(obj, utils.primitive_types):
- return obj
-
- # Iterative
- elif isinstance(obj, utils.iterable_types):
- # List, set or tuple
- return [dump(item, serialize_method, ignore_attribute, ignore, config)
- for item in obj]
-
- elif isinstance(obj, utils.DictType):
- # Dictionary
- return dict((key, dump(value, serialize_method,
- ignore_attribute, ignore, config))
- for key, value in obj.items())
-
- # It's not a standard type, so it needs __jsonclass__
- module_name = inspect.getmodule(type(obj)).__name__
- json_class = obj.__class__.__name__
-
- if module_name not in ('', '__main__'):
- json_class = '{0}.{1}'.format(module_name, json_class)
-
- # Keep the class name in the returned object
- return_obj = {"__jsonclass__": [json_class]}
-
- # If a serialization method is defined..
- if hasattr(obj, serialize_method):
- # Params can be a dict (keyword) or list (positional)
- # Attrs MUST be a dict.
- serialize = getattr(obj, serialize_method)
- params, attrs = serialize()
- return_obj['__jsonclass__'].append(params)
- return_obj.update(attrs)
- return return_obj
-
- else:
- # Otherwise, try to figure it out
- # Obviously, we can't assume to know anything about the
- # parameters passed to __init__
- return_obj['__jsonclass__'].append([])
-
- # Prepare filtering lists
- known_types = SUPPORTED_TYPES + tuple(config.serialize_handlers)
- ignore_list = getattr(obj, ignore_attribute, []) + ignore
-
- # Find fields and filter them by name
- fields = _find_fields(obj)
- fields.difference_update(ignore_list)
-
- # Dump field values
- attrs = {}
- for attr_name in fields:
- attr_value = getattr(obj, attr_name)
- if isinstance(attr_value, known_types) and \
- attr_value not in ignore_list:
- attrs[attr_name] = dump(attr_value, serialize_method,
- ignore_attribute, ignore, config)
- return_obj.update(attrs)
- return return_obj
-
-# ------------------------------------------------------------------------------
-
-
-def load(obj, classes=None):
- """
- If 'obj' is a dictionary containing a __jsonclass__ entry, converts the
- dictionary item into a bean of this class.
-
- :param obj: An object from a JSON-RPC dictionary
- :param classes: A custom {name: class} dictionary
- :return: The loaded object
- """
- # Primitive
- if isinstance(obj, utils.primitive_types):
- return obj
-
- # List, set or tuple
- elif isinstance(obj, utils.iterable_types):
- # This comes from a JSON parser, so it can only be a list...
- return [load(entry) for entry in obj]
-
- # Otherwise, it's a dict type
- elif '__jsonclass__' not in obj:
- return dict((key, load(value)) for key, value in obj.items())
-
- # It's a dictionary, and it has a __jsonclass__
- orig_module_name = obj['__jsonclass__'][0]
- params = obj['__jsonclass__'][1]
-
- # Validate the module name
- if not orig_module_name:
- raise TranslationError('Module name empty.')
-
- json_module_clean = re.sub(INVALID_MODULE_CHARS, '', orig_module_name)
- if json_module_clean != orig_module_name:
- raise TranslationError('Module name {0} has invalid characters.'
- .format(orig_module_name))
-
- # Load the class
- json_module_parts = json_module_clean.split('.')
- json_class = None
- if classes and len(json_module_parts) == 1:
- # Local class name -- probably means it won't work
- try:
- json_class = classes[json_module_parts[0]]
- except KeyError:
- raise TranslationError('Unknown class or module {0}.'
- .format(json_module_parts[0]))
-
- else:
- # Module + class
- json_class_name = json_module_parts.pop()
- json_module_tree = '.'.join(json_module_parts)
- try:
- # Use fromlist to load the module itself, not the package
- temp_module = __import__(json_module_tree,
- fromlist=[json_class_name])
- except ImportError:
- raise TranslationError('Could not import {0} from module {1}.'
- .format(json_class_name, json_module_tree))
-
- try:
- json_class = getattr(temp_module, json_class_name)
- except AttributeError:
- raise TranslationError("Unknown class {0}.{1}."
- .format(json_module_tree, json_class_name))
-
- # Create the object
- new_obj = None
- if isinstance(params, utils.ListType):
- try:
- new_obj = json_class(*params)
- except TypeError as ex:
- raise TranslationError("Error instantiating {0}: {1}"
- .format(json_class.__name__, ex))
-
- elif isinstance(params, utils.DictType):
- try:
- new_obj = json_class(**params)
- except TypeError as ex:
- raise TranslationError("Error instantiating {0}: {1}"
- .format(json_class.__name__, ex))
-
- else:
- raise TranslationError("Constructor args must be a dict or a list, "
- "not {0}".format(type(params).__name__))
-
- # Remove the class information, as it must be ignored during the
- # reconstruction of the object
- raw_jsonclass = obj.pop('__jsonclass__')
-
- for key, value in obj.items():
- # Recursive loading
- setattr(new_obj, key, load(value, classes))
-
- # Restore the class information for further usage
- obj['__jsonclass__'] = raw_jsonclass
-
- return new_obj
+#!/usr/bin/python +# -- Content-Encoding: UTF-8 -- +""" +The serialization module + +:authors: Josh Marshall, Thomas Calmant +:copyright: Copyright 2015, isandlaTech +:license: Apache License 2.0 +:version: 0.2.5 + +.. + + Copyright 2015 isandlaTech + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +""" + +# Module version +__version_info__ = (0, 2, 5) +__version__ = ".".join(str(x) for x in __version_info__) + +# Documentation strings format +__docformat__ = "restructuredtext en" + +# ------------------------------------------------------------------------------ + +# Local package +import jsonrpclib.config +import jsonrpclib.utils as utils + +# Standard library +import inspect +import re + +# ------------------------------------------------------------------------------ + +# Supported transmitted code +SUPPORTED_TYPES = (utils.DictType,) + utils.iterable_types \ + + utils.primitive_types + +# Regex of invalid module characters +INVALID_MODULE_CHARS = r'[^a-zA-Z0-9\_\.]' + +# ------------------------------------------------------------------------------ + + +class TranslationError(Exception): + """ + Unmarshaling exception + """ + pass + + +def _slots_finder(clazz, fields_set): + """ + Recursively visits the class hierarchy to find all slots + + :param clazz: Class to analyze + :param fields_set: Set where to store __slots___ content + """ + # ... class level + try: + fields_set.update(clazz.__slots__) + except AttributeError: + pass + + # ... parent classes level + for base_class in clazz.__bases__: + _slots_finder(base_class, fields_set) + + +def _find_fields(obj): + """ + Returns the names of the fields of the given object + + :param obj: An object to analyze + :return: A set of field names + """ + # Find fields... + fields = set() + + # ... using __dict__ + try: + fields.update(obj.__dict__) + except AttributeError: + pass + + # ... using __slots__ + _slots_finder(obj.__class__, fields) + return fields + + +def dump(obj, serialize_method=None, ignore_attribute=None, ignore=None, + config=jsonrpclib.config.DEFAULT): + """ + Transforms the given object into a JSON-RPC compliant form. + Converts beans into dictionaries with a __jsonclass__ entry. + Doesn't change primitive types. + + :param obj: An object to convert + :param serialize_method: Custom serialization method + :param ignore_attribute: Name of the object attribute containing the names + of members to ignore + :param ignore: A list of members to ignore + :param config: A JSONRPClib Config instance + :return: A JSON-RPC compliant object + """ + # Normalize arguments + serialize_method = serialize_method or config.serialize_method + ignore_attribute = ignore_attribute or config.ignore_attribute + ignore = ignore or [] + + # Parse / return default "types"... + # Apply additional types, override built-in types + # (reminder: config.serialize_handlers is a dict) + try: + serializer = config.serialize_handlers[type(obj)] + except KeyError: + # Not a serializer + pass + else: + if serializer is not None: + return serializer(obj, serialize_method, ignore_attribute, + ignore, config) + + # Primitive + if isinstance(obj, utils.primitive_types): + return obj + + # Iterative + elif isinstance(obj, utils.iterable_types): + # List, set or tuple + return [dump(item, serialize_method, ignore_attribute, ignore, config) + for item in obj] + + elif isinstance(obj, utils.DictType): + # Dictionary + return dict((key, dump(value, serialize_method, + ignore_attribute, ignore, config)) + for key, value in obj.items()) + + # It's not a standard type, so it needs __jsonclass__ + module_name = inspect.getmodule(type(obj)).__name__ + json_class = obj.__class__.__name__ + + if module_name not in ('', '__main__'): + json_class = '{0}.{1}'.format(module_name, json_class) + + # Keep the class name in the returned object + return_obj = {"__jsonclass__": [json_class]} + + # If a serialization method is defined.. + if hasattr(obj, serialize_method): + # Params can be a dict (keyword) or list (positional) + # Attrs MUST be a dict. + serialize = getattr(obj, serialize_method) + params, attrs = serialize() + return_obj['__jsonclass__'].append(params) + return_obj.update(attrs) + return return_obj + + else: + # Otherwise, try to figure it out + # Obviously, we can't assume to know anything about the + # parameters passed to __init__ + return_obj['__jsonclass__'].append([]) + + # Prepare filtering lists + known_types = SUPPORTED_TYPES + tuple(config.serialize_handlers) + ignore_list = getattr(obj, ignore_attribute, []) + ignore + + # Find fields and filter them by name + fields = _find_fields(obj) + fields.difference_update(ignore_list) + + # Dump field values + attrs = {} + for attr_name in fields: + attr_value = getattr(obj, attr_name) + if isinstance(attr_value, known_types) and \ + attr_value not in ignore_list: + attrs[attr_name] = dump(attr_value, serialize_method, + ignore_attribute, ignore, config) + return_obj.update(attrs) + return return_obj + +# ------------------------------------------------------------------------------ + + +def load(obj, classes=None): + """ + If 'obj' is a dictionary containing a __jsonclass__ entry, converts the + dictionary item into a bean of this class. + + :param obj: An object from a JSON-RPC dictionary + :param classes: A custom {name: class} dictionary + :return: The loaded object + """ + # Primitive + if isinstance(obj, utils.primitive_types): + return obj + + # List, set or tuple + elif isinstance(obj, utils.iterable_types): + # This comes from a JSON parser, so it can only be a list... + return [load(entry) for entry in obj] + + # Otherwise, it's a dict type + elif '__jsonclass__' not in obj: + return dict((key, load(value)) for key, value in obj.items()) + + # It's a dictionary, and it has a __jsonclass__ + orig_module_name = obj['__jsonclass__'][0] + params = obj['__jsonclass__'][1] + + # Validate the module name + if not orig_module_name: + raise TranslationError('Module name empty.') + + json_module_clean = re.sub(INVALID_MODULE_CHARS, '', orig_module_name) + if json_module_clean != orig_module_name: + raise TranslationError('Module name {0} has invalid characters.' + .format(orig_module_name)) + + # Load the class + json_module_parts = json_module_clean.split('.') + json_class = None + if classes and len(json_module_parts) == 1: + # Local class name -- probably means it won't work + try: + json_class = classes[json_module_parts[0]] + except KeyError: + raise TranslationError('Unknown class or module {0}.' + .format(json_module_parts[0])) + + else: + # Module + class + json_class_name = json_module_parts.pop() + json_module_tree = '.'.join(json_module_parts) + try: + # Use fromlist to load the module itself, not the package + temp_module = __import__(json_module_tree, + fromlist=[json_class_name]) + except ImportError: + raise TranslationError('Could not import {0} from module {1}.' + .format(json_class_name, json_module_tree)) + + try: + json_class = getattr(temp_module, json_class_name) + except AttributeError: + raise TranslationError("Unknown class {0}.{1}." + .format(json_module_tree, json_class_name)) + + # Create the object + new_obj = None + if isinstance(params, utils.ListType): + try: + new_obj = json_class(*params) + except TypeError as ex: + raise TranslationError("Error instantiating {0}: {1}" + .format(json_class.__name__, ex)) + + elif isinstance(params, utils.DictType): + try: + new_obj = json_class(**params) + except TypeError as ex: + raise TranslationError("Error instantiating {0}: {1}" + .format(json_class.__name__, ex)) + + else: + raise TranslationError("Constructor args must be a dict or a list, " + "not {0}".format(type(params).__name__)) + + # Remove the class information, as it must be ignored during the + # reconstruction of the object + raw_jsonclass = obj.pop('__jsonclass__') + + for key, value in obj.items(): + # Recursive loading + setattr(new_obj, key, load(value, classes)) + + # Restore the class information for further usage + obj['__jsonclass__'] = raw_jsonclass + + return new_obj diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib/jsonrpc.py b/scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib/jsonrpc.py index 8ee902b0..8ea3a9c8 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib/jsonrpc.py +++ b/scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib/jsonrpc.py @@ -1,1192 +1,1192 @@ -#!/usr/bin/python
-# -- Content-Encoding: UTF-8 --
-"""
-============================
-JSONRPC Library (jsonrpclib)
-============================
-
-This library is a JSON-RPC v.2 (proposed) implementation which
-follows the xmlrpclib API for portability between clients. It
-uses the same Server / ServerProxy, loads, dumps, etc. syntax,
-while providing features not present in XML-RPC like:
-
-* Keyword arguments
-* Notifications
-* Versioning
-* Batches and batch notifications
-
-Eventually, I'll add a SimpleXMLRPCServer compatible library,
-and other things to tie the thing off nicely. :)
-
-For a quick-start, just open a console and type the following,
-replacing the server address, method, and parameters
-appropriately.
->>> import jsonrpclib
->>> server = jsonrpclib.Server('http://localhost:8181')
->>> server.add(5, 6)
-11
->>> server._notify.add(5, 6)
->>> batch = jsonrpclib.MultiCall(server)
->>> batch.add(3, 50)
->>> batch.add(2, 3)
->>> batch._notify.add(3, 5)
->>> batch()
-[53, 5]
-
-See https://github.com/tcalmant/jsonrpclib for more info.
-
-:authors: Josh Marshall, Thomas Calmant
-:copyright: Copyright 2015, isandlaTech
-:license: Apache License 2.0
-:version: 0.2.5
-
-..
-
- Copyright 2015 isandlaTech
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-"""
-
-# Module version
-__version_info__ = (0, 2, 5)
-__version__ = ".".join(str(x) for x in __version_info__)
-
-# Documentation strings format
-__docformat__ = "restructuredtext en"
-
-# ------------------------------------------------------------------------------
-
-# Library includes
-import jsonrpclib.config
-import jsonrpclib.utils as utils
-
-# Standard library
-import contextlib
-import logging
-import sys
-import uuid
-
-# Create the logger
-_logger = logging.getLogger(__name__)
-
-try:
- # Python 3
- # pylint: disable=F0401,E0611
- from urllib.parse import splittype
- from urllib.parse import splithost
- from xmlrpc.client import Transport as XMLTransport
- from xmlrpc.client import SafeTransport as XMLSafeTransport
- from xmlrpc.client import ServerProxy as XMLServerProxy
- from xmlrpc.client import _Method as XML_Method
-
-except ImportError:
- # Python 2
- # pylint: disable=F0401,E0611
- from urllib import splittype
- from urllib import splithost
- from xmlrpclib import Transport as XMLTransport
- from xmlrpclib import SafeTransport as XMLSafeTransport
- from xmlrpclib import ServerProxy as XMLServerProxy
- from xmlrpclib import _Method as XML_Method
-
-# ------------------------------------------------------------------------------
-# JSON library import
-
-# JSON class serialization
-from jsonrpclib import jsonclass
-
-try:
- # pylint: disable=F0401,E0611
- # Using cjson
- import cjson
- _logger.debug("Using cjson as JSON library")
-
- # Declare cjson methods
- def jdumps(obj, encoding='utf-8'):
- """
- Serializes ``obj`` to a JSON formatted string, using cjson.
- """
- return cjson.encode(obj)
-
- def jloads(json_string):
- """
- Deserializes ``json_string`` (a string containing a JSON document)
- to a Python object, using cjson.
- """
- return cjson.decode(json_string)
-
-except ImportError:
- # pylint: disable=F0401,E0611
- # Use json or simplejson
- try:
- import json
- _logger.debug("Using json as JSON library")
-
- except ImportError:
- try:
- import simplejson as json
- _logger.debug("Using simplejson as JSON library")
- except ImportError:
- _logger.error("No supported JSON library found")
- raise ImportError('You must have the cjson, json, or simplejson '
- 'module(s) available.')
-
- # Declare json methods
- if sys.version_info[0] < 3:
- def jdumps(obj, encoding='utf-8'):
- """
- Serializes ``obj`` to a JSON formatted string.
- """
- # Python 2 (explicit encoding)
- return json.dumps(obj, encoding=encoding)
-
- else:
- # Python 3
- def jdumps(obj, encoding='utf-8'):
- """
- Serializes ``obj`` to a JSON formatted string.
- """
- # Python 3 (the encoding parameter has been removed)
- return json.dumps(obj)
-
- def jloads(json_string):
- """
- Deserializes ``json_string`` (a string containing a JSON document)
- to a Python object.
- """
- return json.loads(json_string)
-
-# ------------------------------------------------------------------------------
-# XMLRPClib re-implementations
-
-
-class ProtocolError(Exception):
- """
- JSON-RPC error
-
- ProtocolError.args[0] can be:
- * an error message (string)
- * a (code, message) tuple
- """
- pass
-
-
-class AppError(ProtocolError):
- """
- Application error: the error code is not in the pre-defined ones
-
- AppError.args[0][0]: Error code
- AppError.args[0][1]: Error message or trace
- AppError.args[0][2]: Associated data
- """
- def data(self):
- """
- Retrieves the value found in the 'data' entry of the error, or None
-
- :return: The data associated to the error, or None
- """
- return self.args[0][2]
-
-
-class JSONParser(object):
- """
- Default JSON parser
- """
- def __init__(self, target):
- """
- Associates the target loader to the parser
-
- :param target: a JSONTarget instance
- """
- self.target = target
-
- def feed(self, data):
- """
- Feeds the associated target with the given data
- """
- self.target.feed(data)
-
- def close(self):
- """
- Does nothing
- """
- pass
-
-
-class JSONTarget(object):
- """
- Unmarshalls stream data to a string
- """
- def __init__(self):
- """
- Sets up the unmarshaller
- """
- self.data = []
-
- def feed(self, data):
- """
- Stores the given raw data into a buffer
- """
- # Store raw data as it might not contain whole wide-character
- self.data.append(data)
-
- def close(self):
- """
- Unmarshalls the buffered data
- """
- if not self.data:
- return ''
- else:
- # Use type to have a valid join (str vs. bytes)
- data = type(self.data[0])().join(self.data)
- try:
- # Convert the whole final string
- data = utils.from_bytes(data)
- except:
- # Try a pass-through
- pass
-
- return data
-
-
-class TransportMixIn(object):
- """ Just extends the XMLRPC transport where necessary. """
- # for Python 2.7 support
- _connection = None
-
- # List of non-overridable headers
- # Use the configuration to change the content-type
- readonly_headers = ('content-length', 'content-type')
-
- def __init__(self, config=jsonrpclib.config.DEFAULT, context=None):
- """
- Sets up the transport
-
- :param config: A JSONRPClib Config instance
- """
- # Store the configuration
- self._config = config
-
- # Store the SSL context
- self.context = context
-
- # Set up the user agent
- self.user_agent = config.user_agent
-
- # Additional headers: list of dictionaries
- self.additional_headers = []
-
- def push_headers(self, headers):
- """
- Adds a dictionary of headers to the additional headers list
-
- :param headers: A dictionary
- """
- self.additional_headers.append(headers)
-
- def pop_headers(self, headers):
- """
- Removes the given dictionary from the additional headers list.
- Also validates that given headers are on top of the stack
-
- :param headers: Headers to remove
- :raise AssertionError: The given dictionary is not on the latest stored
- in the additional headers list
- """
- assert self.additional_headers[-1] == headers
- self.additional_headers.pop()
-
- def emit_additional_headers(self, connection):
- """
- Puts headers as is in the request, filtered read only headers
-
- :param connection: The request connection
- """
- additional_headers = {}
-
- # Prepare the merged dictionary
- for headers in self.additional_headers:
- additional_headers.update(headers)
-
- # Remove forbidden keys
- for forbidden in self.readonly_headers:
- additional_headers.pop(forbidden, None)
-
- # Reversed order: in the case of multiple headers value definition,
- # the latest pushed has priority
- for key, value in additional_headers.items():
- key = str(key)
- if key.lower() not in self.readonly_headers:
- # Only accept replaceable headers
- connection.putheader(str(key), str(value))
-
- def send_content(self, connection, request_body):
- """
- Completes the request headers and sends the request body of a JSON-RPC
- request over a HTTPConnection
-
- :param connection: An HTTPConnection object
- :param request_body: JSON-RPC request body
- """
- # Convert the body first
- request_body = utils.to_bytes(request_body)
-
- # "static" headers
- connection.putheader("Content-Type", self._config.content_type)
- connection.putheader("Content-Length", str(len(request_body)))
-
- # Emit additional headers here in order not to override content-length
- self.emit_additional_headers(connection)
-
- connection.endheaders()
- if request_body:
- connection.send(request_body)
-
- def getparser(self):
- """
- Create an instance of the parser, and attach it to an unmarshalling
- object. Return both objects.
-
- :return: The parser and unmarshaller instances
- """
- target = JSONTarget()
- return JSONParser(target), target
-
-
-class Transport(TransportMixIn, XMLTransport):
- """
- Mixed-in HTTP transport
- """
- pass
-
-
-class SafeTransport(TransportMixIn, XMLSafeTransport):
- """
- Mixed-in HTTPS transport
- """
- pass
-
-# ------------------------------------------------------------------------------
-
-
-class ServerProxy(XMLServerProxy):
- """
- Unfortunately, much more of this class has to be copied since
- so much of it does the serialization.
- """
- def __init__(self, uri, transport=None, encoding=None,
- verbose=0, version=None, headers=None, history=None,
- config=jsonrpclib.config.DEFAULT, context=None):
- """
- Sets up the server proxy
-
- :param uri: Request URI
- :param transport: Custom transport handler
- :param encoding: Specified encoding
- :param verbose: Log verbosity level
- :param version: JSON-RPC specification version
- :param headers: Custom additional headers for each request
- :param history: History object (for tests)
- :param config: A JSONRPClib Config instance
- :param context: The optional SSLContext to use
- """
- # Store the configuration
- self._config = config
- self.__version = version or config.version
-
- schema, uri = splittype(uri)
- if schema not in ('http', 'https'):
- _logger.error("jsonrpclib only support http(s) URIs, not %s",
- schema)
- raise IOError('Unsupported JSON-RPC protocol.')
-
- self.__host, self.__handler = splithost(uri)
- if not self.__handler:
- # Not sure if this is in the JSON spec?
- self.__handler = '/'
-
- if transport is None:
- if schema == 'https':
- transport = SafeTransport(config=config, context=context)
- else:
- transport = Transport(config=config)
- self.__transport = transport
-
- self.__encoding = encoding
- self.__verbose = verbose
- self.__history = history
-
- # Global custom headers are injected into Transport
- self.__transport.push_headers(headers or {})
-
- def _request(self, methodname, params, rpcid=None):
- """
- Calls a method on the remote server
-
- :param methodname: Name of the method to call
- :param params: Method parameters
- :param rpcid: ID of the remote call
- :return: The parsed result of the call
- """
- request = dumps(params, methodname, encoding=self.__encoding,
- rpcid=rpcid, version=self.__version,
- config=self._config)
- response = self._run_request(request)
- check_for_errors(response)
- return response['result']
-
- def _request_notify(self, methodname, params, rpcid=None):
- """
- Calls a method as a notification
-
- :param methodname: Name of the method to call
- :param params: Method parameters
- :param rpcid: ID of the remote call
- """
- request = dumps(params, methodname, encoding=self.__encoding,
- rpcid=rpcid, version=self.__version, notify=True,
- config=self._config)
- response = self._run_request(request, notify=True)
- check_for_errors(response)
-
- def _run_request(self, request, notify=False):
- """
- Sends the given request to the remote server
-
- :param request: The request to send
- :param notify: Notification request flag (unused)
- :return: The response as a parsed JSON object
- """
- if self.__history is not None:
- self.__history.add_request(request)
-
- response = self.__transport.request(
- self.__host,
- self.__handler,
- request,
- verbose=self.__verbose
- )
-
- # Here, the XMLRPC library translates a single list
- # response to the single value -- should we do the
- # same, and require a tuple / list to be passed to
- # the response object, or expect the Server to be
- # outputting the response appropriately?
-
- if self.__history is not None:
- self.__history.add_response(response)
-
- if not response:
- return None
- else:
- return_obj = loads(response, self._config)
- return return_obj
-
- def __getattr__(self, name):
- """
- Returns a callable object to call the remote service
- """
- # Same as original, just with new _Method reference
- return _Method(self._request, name)
-
- def __close(self):
- """
- Closes the transport layer
- """
- try:
- self.__transport.close()
- except AttributeError:
- # Not available in Python 2.6
- pass
-
- def __call__(self, attr):
- """
- A workaround to get special attributes on the ServerProxy
- without interfering with the magic __getattr__
-
- (code from xmlrpclib in Python 2.7)
- """
- if attr == "close":
- return self.__close
-
- elif attr == "transport":
- return self.__transport
-
- raise AttributeError("Attribute {0} not found".format(attr))
-
- @property
- def _notify(self):
- """
- Like __getattr__, but sending a notification request instead of a call
- """
- return _Notify(self._request_notify)
-
- @contextlib.contextmanager
- def _additional_headers(self, headers):
- """
- Allows to specify additional headers, to be added inside the with
- block.
- Example of usage:
-
- >>> with client._additional_headers({'X-Test' : 'Test'}) as new_client:
- ... new_client.method()
- ...
- >>> # Here old headers are restored
- """
- self.__transport.push_headers(headers)
- yield self
- self.__transport.pop_headers(headers)
-
-# ------------------------------------------------------------------------------
-
-
-class _Method(XML_Method):
- """
- Some magic to bind an JSON-RPC method to an RPC server.
- """
- def __call__(self, *args, **kwargs):
- """
- Sends an RPC request and returns the unmarshalled result
- """
- if args and kwargs:
- raise ProtocolError("Cannot use both positional and keyword "
- "arguments (according to JSON-RPC spec.)")
- if args:
- return self.__send(self.__name, args)
- else:
- return self.__send(self.__name, kwargs)
-
- def __getattr__(self, name):
- """
- Returns a Method object for nested calls
- """
- if name == "__name__":
- return self.__name
- return _Method(self.__send, "{0}.{1}".format(self.__name, name))
-
-
-class _Notify(object):
- """
- Same as _Method, but to send notifications
- """
- def __init__(self, request):
- """
- Sets the method to call to send a request to the server
- """
- self._request = request
-
- def __getattr__(self, name):
- """
- Returns a Method object, to be called as a notification
- """
- return _Method(self._request, name)
-
-# ------------------------------------------------------------------------------
-# Batch implementation
-
-
-class MultiCallMethod(object):
- """
- Stores calls made to a MultiCall object for batch execution
- """
- def __init__(self, method, notify=False, config=jsonrpclib.config.DEFAULT):
- """
- Sets up the store
-
- :param method: Name of the method to call
- :param notify: Notification flag
- :param config: Request configuration
- """
- self.method = method
- self.params = []
- self.notify = notify
- self._config = config
-
- def __call__(self, *args, **kwargs):
- """
- Normalizes call parameters
- """
- if kwargs and args:
- raise ProtocolError('JSON-RPC does not support both ' +
- 'positional and keyword arguments.')
- if kwargs:
- self.params = kwargs
- else:
- self.params = args
-
- def request(self, encoding=None, rpcid=None):
- """
- Returns the request object as JSON-formatted string
- """
- return dumps(self.params, self.method, version=2.0,
- encoding=encoding, rpcid=rpcid, notify=self.notify,
- config=self._config)
-
- def __repr__(self):
- """
- String representation
- """
- return str(self.request())
-
- def __getattr__(self, method):
- """
- Updates the object for a nested call
- """
- self.method = "{0}.{1}".format(self.method, method)
- return self
-
-
-class MultiCallNotify(object):
- """
- Same as MultiCallMethod but for notifications
- """
- def __init__(self, multicall, config=jsonrpclib.config.DEFAULT):
- """
- Sets ip the store
-
- :param multicall: The parent MultiCall instance
- :param config: Request configuration
- """
- self.multicall = multicall
- self._config = config
-
- def __getattr__(self, name):
- """
- Returns the MultiCallMethod to use as a notification
- """
- new_job = MultiCallMethod(name, notify=True, config=self._config)
- self.multicall._job_list.append(new_job)
- return new_job
-
-
-class MultiCallIterator(object):
- """
- Iterates over the results of a MultiCall.
- Exceptions are raised in response to JSON-RPC faults
- """
- def __init__(self, results):
- """
- Sets up the results store
- """
- self.results = results
-
- def __get_result(self, item):
- """
- Checks for error and returns the "real" result stored in a MultiCall
- result.
- """
- check_for_errors(item)
- return item['result']
-
- def __iter__(self):
- """
- Iterates over all results
- """
- for item in self.results:
- yield self.__get_result(item)
- raise StopIteration
-
- def __getitem__(self, i):
- """
- Returns the i-th object of the results
- """
- return self.__get_result(self.results[i])
-
- def __len__(self):
- """
- Returns the number of results stored
- """
- return len(self.results)
-
-
-class MultiCall(object):
- """
- server -> a object used to boxcar method calls, where server should be a
- ServerProxy object.
-
- Methods can be added to the MultiCall using normal
- method call syntax e.g.:
-
- multicall = MultiCall(server_proxy)
- multicall.add(2,3)
- multicall.get_address("Guido")
-
- To execute the multicall, call the MultiCall object e.g.:
-
- add_result, address = multicall()
- """
- def __init__(self, server, config=jsonrpclib.config.DEFAULT):
- """
- Sets up the multicall
-
- :param server: A ServerProxy object
- :param config: Request configuration
- """
- self._server = server
- self._job_list = []
- self._config = config
-
- def _request(self):
- """
- Sends the request to the server and returns the responses
-
- :return: A MultiCallIterator object
- """
- if len(self._job_list) < 1:
- # Should we alert? This /is/ pretty obvious.
- return
- request_body = "[ {0} ]".format(
- ','.join(job.request() for job in self._job_list))
- responses = self._server._run_request(request_body)
- del self._job_list[:]
- if not responses:
- responses = []
- return MultiCallIterator(responses)
-
- @property
- def _notify(self):
- """
- Prepares a notification call
- """
- return MultiCallNotify(self, self._config)
-
- def __getattr__(self, name):
- """
- Registers a method call
- """
- new_job = MultiCallMethod(name, config=self._config)
- self._job_list.append(new_job)
- return new_job
-
- __call__ = _request
-
-# These lines conform to xmlrpclib's "compatibility" line.
-# Not really sure if we should include these, but oh well.
-Server = ServerProxy
-
-# ------------------------------------------------------------------------------
-
-
-class Fault(object):
- """
- JSON-RPC error class
- """
- def __init__(self, code=-32000, message='Server error', rpcid=None,
- config=jsonrpclib.config.DEFAULT, data=None):
- """
- Sets up the error description
-
- :param code: Fault code
- :param message: Associated message
- :param rpcid: Request ID
- :param config: A JSONRPClib Config instance
- :param data: Extra information added to an error description
- """
- self.faultCode = code
- self.faultString = message
- self.rpcid = rpcid
- self.config = config
- self.data = data
-
- def error(self):
- """
- Returns the error as a dictionary
-
- :returns: A {'code', 'message'} dictionary
- """
- return {'code': self.faultCode, 'message': self.faultString,
- 'data': self.data}
-
- def response(self, rpcid=None, version=None):
- """
- Returns the error as a JSON-RPC response string
-
- :param rpcid: Forced request ID
- :param version: JSON-RPC version
- :return: A JSON-RPC response string
- """
- if not version:
- version = self.config.version
-
- if rpcid:
- self.rpcid = rpcid
-
- return dumps(self, methodresponse=True, rpcid=self.rpcid,
- version=version, config=self.config)
-
- def dump(self, rpcid=None, version=None):
- """
- Returns the error as a JSON-RPC response dictionary
-
- :param rpcid: Forced request ID
- :param version: JSON-RPC version
- :return: A JSON-RPC response dictionary
- """
- if not version:
- version = self.config.version
-
- if rpcid:
- self.rpcid = rpcid
-
- return dump(self, is_response=True, rpcid=self.rpcid,
- version=version, config=self.config)
-
- def __repr__(self):
- """
- String representation
- """
- return '<Fault {0}: {1}>'.format(self.faultCode, self.faultString)
-
-
-class Payload(object):
- """
- JSON-RPC content handler
- """
- def __init__(self, rpcid=None, version=None,
- config=jsonrpclib.config.DEFAULT):
- """
- Sets up the JSON-RPC handler
-
- :param rpcid: Request ID
- :param version: JSON-RPC version
- :param config: A JSONRPClib Config instance
- """
- if not version:
- version = config.version
-
- self.id = rpcid
- self.version = float(version)
-
- def request(self, method, params=None):
- """
- Prepares a method call request
-
- :param method: Method name
- :param params: Method parameters
- :return: A JSON-RPC request dictionary
- """
- if not isinstance(method, utils.string_types):
- raise ValueError('Method name must be a string.')
-
- if not self.id:
- # Generate a request ID
- self.id = str(uuid.uuid4())
-
- request = {'id': self.id, 'method': method}
- if params or self.version < 1.1:
- request['params'] = params or []
-
- if self.version >= 2:
- request['jsonrpc'] = str(self.version)
-
- return request
-
- def notify(self, method, params=None):
- """
- Prepares a notification request
-
- :param method: Notification name
- :param params: Notification parameters
- :return: A JSON-RPC notification dictionary
- """
- # Prepare the request dictionary
- request = self.request(method, params)
-
- # Remove the request ID, as it's a notification
- if self.version >= 2:
- del request['id']
- else:
- request['id'] = None
-
- return request
-
- def response(self, result=None):
- """
- Prepares a response dictionary
-
- :param result: The result of method call
- :return: A JSON-RPC response dictionary
- """
- response = {'result': result, 'id': self.id}
-
- if self.version >= 2:
- response['jsonrpc'] = str(self.version)
- else:
- response['error'] = None
-
- return response
-
- def error(self, code=-32000, message='Server error.', data=None):
- """
- Prepares an error dictionary
-
- :param code: Error code
- :param message: Error message
- :return: A JSON-RPC error dictionary
- """
- error = self.response()
- if self.version >= 2:
- del error['result']
- else:
- error['result'] = None
- error['error'] = {'code': code, 'message': message}
- if data is not None:
- error['error']['data'] = data
- return error
-
-# ------------------------------------------------------------------------------
-
-
-def dump(params=None, methodname=None, rpcid=None, version=None,
- is_response=None, is_notify=None, config=jsonrpclib.config.DEFAULT):
- """
- Prepares a JSON-RPC dictionary (request, notification, response or error)
-
- :param params: Method parameters (if a method name is given) or a Fault
- :param methodname: Method name
- :param rpcid: Request ID
- :param version: JSON-RPC version
- :param is_response: If True, this is a response dictionary
- :param is_notify: If True, this is a notification request
- :param config: A JSONRPClib Config instance
- :return: A JSON-RPC dictionary
- """
- # Default version
- if not version:
- version = config.version
-
- if not is_response and params is None:
- params = []
-
- # Validate method name and parameters
- valid_params = [utils.TupleType, utils.ListType, utils.DictType, Fault]
- if is_response:
- valid_params.append(type(None))
-
- if isinstance(methodname, utils.string_types) and \
- not isinstance(params, tuple(valid_params)):
- """
- If a method, and params are not in a listish or a Fault,
- error out.
- """
- raise TypeError("Params must be a dict, list, tuple "
- "or Fault instance.")
-
- # Prepares the JSON-RPC content
- payload = Payload(rpcid=rpcid, version=version)
-
- if isinstance(params, Fault):
- # Prepare an error dictionary
- # pylint: disable=E1103
- return payload.error(params.faultCode, params.faultString, params.data)
-
- if not isinstance(methodname, utils.string_types) and not is_response:
- # Neither a request nor a response
- raise ValueError('Method name must be a string, or is_response '
- 'must be set to True.')
-
- if config.use_jsonclass:
- # Use jsonclass to convert the parameters
- params = jsonclass.dump(params, config=config)
-
- if is_response:
- # Prepare a response dictionary
- if rpcid is None:
- # A response must have a request ID
- raise ValueError('A method response must have an rpcid.')
- return payload.response(params)
-
- if is_notify:
- # Prepare a notification dictionary
- return payload.notify(methodname, params)
- else:
- # Prepare a method call dictionary
- return payload.request(methodname, params)
-
-
-def dumps(params=None, methodname=None, methodresponse=None,
- encoding=None, rpcid=None, version=None, notify=None,
- config=jsonrpclib.config.DEFAULT):
- """
- Prepares a JSON-RPC request/response string
-
- :param params: Method parameters (if a method name is given) or a Fault
- :param methodname: Method name
- :param methodresponse: If True, this is a response dictionary
- :param encoding: Result string encoding
- :param rpcid: Request ID
- :param version: JSON-RPC version
- :param notify: If True, this is a notification request
- :param config: A JSONRPClib Config instance
- :return: A JSON-RPC dictionary
- """
- # Prepare the dictionary
- request = dump(params, methodname, rpcid, version, methodresponse, notify,
- config)
-
- # Returns it as a JSON string
- return jdumps(request, encoding=encoding or "UTF-8")
-
-
-def load(data, config=jsonrpclib.config.DEFAULT):
- """
- Loads a JSON-RPC request/response dictionary. Calls jsonclass to load beans
-
- :param data: A JSON-RPC dictionary
- :param config: A JSONRPClib Config instance (or None for default values)
- :return: A parsed dictionary or None
- """
- if data is None:
- # Notification
- return None
-
- # if the above raises an error, the implementing server code
- # should return something like the following:
- # { 'jsonrpc':'2.0', 'error': fault.error(), id: None }
- if config.use_jsonclass:
- # Convert beans
- data = jsonclass.load(data, config.classes)
-
- return data
-
-
-def loads(data, config=jsonrpclib.config.DEFAULT):
- """
- Loads a JSON-RPC request/response string. Calls jsonclass to load beans
-
- :param data: A JSON-RPC string
- :param config: A JSONRPClib Config instance (or None for default values)
- :return: A parsed dictionary or None
- """
- if data == '':
- # Notification
- return None
-
- # Parse the JSON dictionary
- result = jloads(data)
-
- # Load the beans
- return load(result, config)
-
-# ------------------------------------------------------------------------------
-
-
-def check_for_errors(result):
- """
- Checks if a result dictionary signals an error
-
- :param result: A result dictionary
- :raise TypeError: Invalid parameter
- :raise NotImplementedError: Unknown JSON-RPC version
- :raise ValueError: Invalid dictionary content
- :raise ProtocolError: An error occurred on the server side
- :return: The result parameter
- """
- if not result:
- # Notification
- return result
-
- if not isinstance(result, utils.DictType):
- # Invalid argument
- raise TypeError('Response is not a dict.')
-
- if 'jsonrpc' in result and float(result['jsonrpc']) > 2.0:
- # Unknown JSON-RPC version
- raise NotImplementedError('JSON-RPC version not yet supported.')
-
- if 'result' not in result and 'error' not in result:
- # Invalid dictionary content
- raise ValueError('Response does not have a result or error key.')
-
- if 'error' in result and result['error']:
- # Server-side error
- if 'code' in result['error']:
- # Code + Message
- code = result['error']['code']
- try:
- # Get the message (jsonrpclib)
- message = result['error']['message']
- except KeyError:
- # Get the trace (jabsorb)
- message = result['error'].get('trace', '<no error message>')
-
- if -32700 <= code <= -32000:
- # Pre-defined errors
- # See http://www.jsonrpc.org/specification#error_object
- raise ProtocolError((code, message))
- else:
- # Application error
- data = result['error'].get('data', None)
- raise AppError((code, message, data))
-
- elif isinstance(result['error'], dict) and len(result['error']) == 1:
- # Error with a single entry ('reason', ...): use its content
- error_key = result['error'].keys()[0]
- raise ProtocolError(result['error'][error_key])
-
- else:
- # Use the raw error content
- raise ProtocolError(result['error'])
-
- return result
-
-
-def isbatch(request):
- """
- Tests if the given request is a batch call, i.e. a list of multiple calls
- :param request: a JSON-RPC request object
- :return: True if the request is a batch call
- """
- if not isinstance(request, (utils.ListType, utils.TupleType)):
- # Not a list: not a batch call
- return False
- elif len(request) < 1:
- # Only one request: not a batch call
- return False
- elif not isinstance(request[0], utils.DictType):
- # One of the requests is not a dictionary, i.e. a JSON Object
- # therefore it is not a valid JSON-RPC request
- return False
- elif 'jsonrpc' not in request[0].keys():
- # No "jsonrpc" version in the JSON object: not a request
- return False
-
- try:
- version = float(request[0]['jsonrpc'])
- except ValueError:
- # Bad version of JSON-RPC
- raise ProtocolError('"jsonrpc" key must be a float(able) value.')
-
- if version < 2:
- # Batch call were not supported before JSON-RPC 2.0
- return False
-
- return True
-
-
-def isnotification(request):
- """
- Tests if the given request is a notification
-
- :param request: A request dictionary
- :return: True if the request is a notification
- """
- if 'id' not in request:
- # 2.0 notification
- return True
-
- if request['id'] is None:
- # 1.0 notification
- return True
-
- return False
+#!/usr/bin/python +# -- Content-Encoding: UTF-8 -- +""" +============================ +JSONRPC Library (jsonrpclib) +============================ + +This library is a JSON-RPC v.2 (proposed) implementation which +follows the xmlrpclib API for portability between clients. It +uses the same Server / ServerProxy, loads, dumps, etc. syntax, +while providing features not present in XML-RPC like: + +* Keyword arguments +* Notifications +* Versioning +* Batches and batch notifications + +Eventually, I'll add a SimpleXMLRPCServer compatible library, +and other things to tie the thing off nicely. :) + +For a quick-start, just open a console and type the following, +replacing the server address, method, and parameters +appropriately. +>>> import jsonrpclib +>>> server = jsonrpclib.Server('http://localhost:8181') +>>> server.add(5, 6) +11 +>>> server._notify.add(5, 6) +>>> batch = jsonrpclib.MultiCall(server) +>>> batch.add(3, 50) +>>> batch.add(2, 3) +>>> batch._notify.add(3, 5) +>>> batch() +[53, 5] + +See https://github.com/tcalmant/jsonrpclib for more info. + +:authors: Josh Marshall, Thomas Calmant +:copyright: Copyright 2015, isandlaTech +:license: Apache License 2.0 +:version: 0.2.5 + +.. + + Copyright 2015 isandlaTech + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +""" + +# Module version +__version_info__ = (0, 2, 5) +__version__ = ".".join(str(x) for x in __version_info__) + +# Documentation strings format +__docformat__ = "restructuredtext en" + +# ------------------------------------------------------------------------------ + +# Library includes +import jsonrpclib.config +import jsonrpclib.utils as utils + +# Standard library +import contextlib +import logging +import sys +import uuid + +# Create the logger +_logger = logging.getLogger(__name__) + +try: + # Python 3 + # pylint: disable=F0401,E0611 + from urllib.parse import splittype + from urllib.parse import splithost + from xmlrpc.client import Transport as XMLTransport + from xmlrpc.client import SafeTransport as XMLSafeTransport + from xmlrpc.client import ServerProxy as XMLServerProxy + from xmlrpc.client import _Method as XML_Method + +except ImportError: + # Python 2 + # pylint: disable=F0401,E0611 + from urllib import splittype + from urllib import splithost + from xmlrpclib import Transport as XMLTransport + from xmlrpclib import SafeTransport as XMLSafeTransport + from xmlrpclib import ServerProxy as XMLServerProxy + from xmlrpclib import _Method as XML_Method + +# ------------------------------------------------------------------------------ +# JSON library import + +# JSON class serialization +from jsonrpclib import jsonclass + +try: + # pylint: disable=F0401,E0611 + # Using cjson + import cjson + _logger.debug("Using cjson as JSON library") + + # Declare cjson methods + def jdumps(obj, encoding='utf-8'): + """ + Serializes ``obj`` to a JSON formatted string, using cjson. + """ + return cjson.encode(obj) + + def jloads(json_string): + """ + Deserializes ``json_string`` (a string containing a JSON document) + to a Python object, using cjson. + """ + return cjson.decode(json_string) + +except ImportError: + # pylint: disable=F0401,E0611 + # Use json or simplejson + try: + import json + _logger.debug("Using json as JSON library") + + except ImportError: + try: + import simplejson as json + _logger.debug("Using simplejson as JSON library") + except ImportError: + _logger.error("No supported JSON library found") + raise ImportError('You must have the cjson, json, or simplejson ' + 'module(s) available.') + + # Declare json methods + if sys.version_info[0] < 3: + def jdumps(obj, encoding='utf-8'): + """ + Serializes ``obj`` to a JSON formatted string. + """ + # Python 2 (explicit encoding) + return json.dumps(obj, encoding=encoding) + + else: + # Python 3 + def jdumps(obj, encoding='utf-8'): + """ + Serializes ``obj`` to a JSON formatted string. + """ + # Python 3 (the encoding parameter has been removed) + return json.dumps(obj) + + def jloads(json_string): + """ + Deserializes ``json_string`` (a string containing a JSON document) + to a Python object. + """ + return json.loads(json_string) + +# ------------------------------------------------------------------------------ +# XMLRPClib re-implementations + + +class ProtocolError(Exception): + """ + JSON-RPC error + + ProtocolError.args[0] can be: + * an error message (string) + * a (code, message) tuple + """ + pass + + +class AppError(ProtocolError): + """ + Application error: the error code is not in the pre-defined ones + + AppError.args[0][0]: Error code + AppError.args[0][1]: Error message or trace + AppError.args[0][2]: Associated data + """ + def data(self): + """ + Retrieves the value found in the 'data' entry of the error, or None + + :return: The data associated to the error, or None + """ + return self.args[0][2] + + +class JSONParser(object): + """ + Default JSON parser + """ + def __init__(self, target): + """ + Associates the target loader to the parser + + :param target: a JSONTarget instance + """ + self.target = target + + def feed(self, data): + """ + Feeds the associated target with the given data + """ + self.target.feed(data) + + def close(self): + """ + Does nothing + """ + pass + + +class JSONTarget(object): + """ + Unmarshalls stream data to a string + """ + def __init__(self): + """ + Sets up the unmarshaller + """ + self.data = [] + + def feed(self, data): + """ + Stores the given raw data into a buffer + """ + # Store raw data as it might not contain whole wide-character + self.data.append(data) + + def close(self): + """ + Unmarshalls the buffered data + """ + if not self.data: + return '' + else: + # Use type to have a valid join (str vs. bytes) + data = type(self.data[0])().join(self.data) + try: + # Convert the whole final string + data = utils.from_bytes(data) + except: + # Try a pass-through + pass + + return data + + +class TransportMixIn(object): + """ Just extends the XMLRPC transport where necessary. """ + # for Python 2.7 support + _connection = None + + # List of non-overridable headers + # Use the configuration to change the content-type + readonly_headers = ('content-length', 'content-type') + + def __init__(self, config=jsonrpclib.config.DEFAULT, context=None): + """ + Sets up the transport + + :param config: A JSONRPClib Config instance + """ + # Store the configuration + self._config = config + + # Store the SSL context + self.context = context + + # Set up the user agent + self.user_agent = config.user_agent + + # Additional headers: list of dictionaries + self.additional_headers = [] + + def push_headers(self, headers): + """ + Adds a dictionary of headers to the additional headers list + + :param headers: A dictionary + """ + self.additional_headers.append(headers) + + def pop_headers(self, headers): + """ + Removes the given dictionary from the additional headers list. + Also validates that given headers are on top of the stack + + :param headers: Headers to remove + :raise AssertionError: The given dictionary is not on the latest stored + in the additional headers list + """ + assert self.additional_headers[-1] == headers + self.additional_headers.pop() + + def emit_additional_headers(self, connection): + """ + Puts headers as is in the request, filtered read only headers + + :param connection: The request connection + """ + additional_headers = {} + + # Prepare the merged dictionary + for headers in self.additional_headers: + additional_headers.update(headers) + + # Remove forbidden keys + for forbidden in self.readonly_headers: + additional_headers.pop(forbidden, None) + + # Reversed order: in the case of multiple headers value definition, + # the latest pushed has priority + for key, value in additional_headers.items(): + key = str(key) + if key.lower() not in self.readonly_headers: + # Only accept replaceable headers + connection.putheader(str(key), str(value)) + + def send_content(self, connection, request_body): + """ + Completes the request headers and sends the request body of a JSON-RPC + request over a HTTPConnection + + :param connection: An HTTPConnection object + :param request_body: JSON-RPC request body + """ + # Convert the body first + request_body = utils.to_bytes(request_body) + + # "static" headers + connection.putheader("Content-Type", self._config.content_type) + connection.putheader("Content-Length", str(len(request_body))) + + # Emit additional headers here in order not to override content-length + self.emit_additional_headers(connection) + + connection.endheaders() + if request_body: + connection.send(request_body) + + def getparser(self): + """ + Create an instance of the parser, and attach it to an unmarshalling + object. Return both objects. + + :return: The parser and unmarshaller instances + """ + target = JSONTarget() + return JSONParser(target), target + + +class Transport(TransportMixIn, XMLTransport): + """ + Mixed-in HTTP transport + """ + pass + + +class SafeTransport(TransportMixIn, XMLSafeTransport): + """ + Mixed-in HTTPS transport + """ + pass + +# ------------------------------------------------------------------------------ + + +class ServerProxy(XMLServerProxy): + """ + Unfortunately, much more of this class has to be copied since + so much of it does the serialization. + """ + def __init__(self, uri, transport=None, encoding=None, + verbose=0, version=None, headers=None, history=None, + config=jsonrpclib.config.DEFAULT, context=None): + """ + Sets up the server proxy + + :param uri: Request URI + :param transport: Custom transport handler + :param encoding: Specified encoding + :param verbose: Log verbosity level + :param version: JSON-RPC specification version + :param headers: Custom additional headers for each request + :param history: History object (for tests) + :param config: A JSONRPClib Config instance + :param context: The optional SSLContext to use + """ + # Store the configuration + self._config = config + self.__version = version or config.version + + schema, uri = splittype(uri) + if schema not in ('http', 'https'): + _logger.error("jsonrpclib only support http(s) URIs, not %s", + schema) + raise IOError('Unsupported JSON-RPC protocol.') + + self.__host, self.__handler = splithost(uri) + if not self.__handler: + # Not sure if this is in the JSON spec? + self.__handler = '/' + + if transport is None: + if schema == 'https': + transport = SafeTransport(config=config, context=context) + else: + transport = Transport(config=config) + self.__transport = transport + + self.__encoding = encoding + self.__verbose = verbose + self.__history = history + + # Global custom headers are injected into Transport + self.__transport.push_headers(headers or {}) + + def _request(self, methodname, params, rpcid=None): + """ + Calls a method on the remote server + + :param methodname: Name of the method to call + :param params: Method parameters + :param rpcid: ID of the remote call + :return: The parsed result of the call + """ + request = dumps(params, methodname, encoding=self.__encoding, + rpcid=rpcid, version=self.__version, + config=self._config) + response = self._run_request(request) + check_for_errors(response) + return response['result'] + + def _request_notify(self, methodname, params, rpcid=None): + """ + Calls a method as a notification + + :param methodname: Name of the method to call + :param params: Method parameters + :param rpcid: ID of the remote call + """ + request = dumps(params, methodname, encoding=self.__encoding, + rpcid=rpcid, version=self.__version, notify=True, + config=self._config) + response = self._run_request(request, notify=True) + check_for_errors(response) + + def _run_request(self, request, notify=False): + """ + Sends the given request to the remote server + + :param request: The request to send + :param notify: Notification request flag (unused) + :return: The response as a parsed JSON object + """ + if self.__history is not None: + self.__history.add_request(request) + + response = self.__transport.request( + self.__host, + self.__handler, + request, + verbose=self.__verbose + ) + + # Here, the XMLRPC library translates a single list + # response to the single value -- should we do the + # same, and require a tuple / list to be passed to + # the response object, or expect the Server to be + # outputting the response appropriately? + + if self.__history is not None: + self.__history.add_response(response) + + if not response: + return None + else: + return_obj = loads(response, self._config) + return return_obj + + def __getattr__(self, name): + """ + Returns a callable object to call the remote service + """ + # Same as original, just with new _Method reference + return _Method(self._request, name) + + def __close(self): + """ + Closes the transport layer + """ + try: + self.__transport.close() + except AttributeError: + # Not available in Python 2.6 + pass + + def __call__(self, attr): + """ + A workaround to get special attributes on the ServerProxy + without interfering with the magic __getattr__ + + (code from xmlrpclib in Python 2.7) + """ + if attr == "close": + return self.__close + + elif attr == "transport": + return self.__transport + + raise AttributeError("Attribute {0} not found".format(attr)) + + @property + def _notify(self): + """ + Like __getattr__, but sending a notification request instead of a call + """ + return _Notify(self._request_notify) + + @contextlib.contextmanager + def _additional_headers(self, headers): + """ + Allows to specify additional headers, to be added inside the with + block. + Example of usage: + + >>> with client._additional_headers({'X-Test' : 'Test'}) as new_client: + ... new_client.method() + ... + >>> # Here old headers are restored + """ + self.__transport.push_headers(headers) + yield self + self.__transport.pop_headers(headers) + +# ------------------------------------------------------------------------------ + + +class _Method(XML_Method): + """ + Some magic to bind an JSON-RPC method to an RPC server. + """ + def __call__(self, *args, **kwargs): + """ + Sends an RPC request and returns the unmarshalled result + """ + if args and kwargs: + raise ProtocolError("Cannot use both positional and keyword " + "arguments (according to JSON-RPC spec.)") + if args: + return self.__send(self.__name, args) + else: + return self.__send(self.__name, kwargs) + + def __getattr__(self, name): + """ + Returns a Method object for nested calls + """ + if name == "__name__": + return self.__name + return _Method(self.__send, "{0}.{1}".format(self.__name, name)) + + +class _Notify(object): + """ + Same as _Method, but to send notifications + """ + def __init__(self, request): + """ + Sets the method to call to send a request to the server + """ + self._request = request + + def __getattr__(self, name): + """ + Returns a Method object, to be called as a notification + """ + return _Method(self._request, name) + +# ------------------------------------------------------------------------------ +# Batch implementation + + +class MultiCallMethod(object): + """ + Stores calls made to a MultiCall object for batch execution + """ + def __init__(self, method, notify=False, config=jsonrpclib.config.DEFAULT): + """ + Sets up the store + + :param method: Name of the method to call + :param notify: Notification flag + :param config: Request configuration + """ + self.method = method + self.params = [] + self.notify = notify + self._config = config + + def __call__(self, *args, **kwargs): + """ + Normalizes call parameters + """ + if kwargs and args: + raise ProtocolError('JSON-RPC does not support both ' + + 'positional and keyword arguments.') + if kwargs: + self.params = kwargs + else: + self.params = args + + def request(self, encoding=None, rpcid=None): + """ + Returns the request object as JSON-formatted string + """ + return dumps(self.params, self.method, version=2.0, + encoding=encoding, rpcid=rpcid, notify=self.notify, + config=self._config) + + def __repr__(self): + """ + String representation + """ + return str(self.request()) + + def __getattr__(self, method): + """ + Updates the object for a nested call + """ + self.method = "{0}.{1}".format(self.method, method) + return self + + +class MultiCallNotify(object): + """ + Same as MultiCallMethod but for notifications + """ + def __init__(self, multicall, config=jsonrpclib.config.DEFAULT): + """ + Sets ip the store + + :param multicall: The parent MultiCall instance + :param config: Request configuration + """ + self.multicall = multicall + self._config = config + + def __getattr__(self, name): + """ + Returns the MultiCallMethod to use as a notification + """ + new_job = MultiCallMethod(name, notify=True, config=self._config) + self.multicall._job_list.append(new_job) + return new_job + + +class MultiCallIterator(object): + """ + Iterates over the results of a MultiCall. + Exceptions are raised in response to JSON-RPC faults + """ + def __init__(self, results): + """ + Sets up the results store + """ + self.results = results + + def __get_result(self, item): + """ + Checks for error and returns the "real" result stored in a MultiCall + result. + """ + check_for_errors(item) + return item['result'] + + def __iter__(self): + """ + Iterates over all results + """ + for item in self.results: + yield self.__get_result(item) + raise StopIteration + + def __getitem__(self, i): + """ + Returns the i-th object of the results + """ + return self.__get_result(self.results[i]) + + def __len__(self): + """ + Returns the number of results stored + """ + return len(self.results) + + +class MultiCall(object): + """ + server -> a object used to boxcar method calls, where server should be a + ServerProxy object. + + Methods can be added to the MultiCall using normal + method call syntax e.g.: + + multicall = MultiCall(server_proxy) + multicall.add(2,3) + multicall.get_address("Guido") + + To execute the multicall, call the MultiCall object e.g.: + + add_result, address = multicall() + """ + def __init__(self, server, config=jsonrpclib.config.DEFAULT): + """ + Sets up the multicall + + :param server: A ServerProxy object + :param config: Request configuration + """ + self._server = server + self._job_list = [] + self._config = config + + def _request(self): + """ + Sends the request to the server and returns the responses + + :return: A MultiCallIterator object + """ + if len(self._job_list) < 1: + # Should we alert? This /is/ pretty obvious. + return + request_body = "[ {0} ]".format( + ','.join(job.request() for job in self._job_list)) + responses = self._server._run_request(request_body) + del self._job_list[:] + if not responses: + responses = [] + return MultiCallIterator(responses) + + @property + def _notify(self): + """ + Prepares a notification call + """ + return MultiCallNotify(self, self._config) + + def __getattr__(self, name): + """ + Registers a method call + """ + new_job = MultiCallMethod(name, config=self._config) + self._job_list.append(new_job) + return new_job + + __call__ = _request + +# These lines conform to xmlrpclib's "compatibility" line. +# Not really sure if we should include these, but oh well. +Server = ServerProxy + +# ------------------------------------------------------------------------------ + + +class Fault(object): + """ + JSON-RPC error class + """ + def __init__(self, code=-32000, message='Server error', rpcid=None, + config=jsonrpclib.config.DEFAULT, data=None): + """ + Sets up the error description + + :param code: Fault code + :param message: Associated message + :param rpcid: Request ID + :param config: A JSONRPClib Config instance + :param data: Extra information added to an error description + """ + self.faultCode = code + self.faultString = message + self.rpcid = rpcid + self.config = config + self.data = data + + def error(self): + """ + Returns the error as a dictionary + + :returns: A {'code', 'message'} dictionary + """ + return {'code': self.faultCode, 'message': self.faultString, + 'data': self.data} + + def response(self, rpcid=None, version=None): + """ + Returns the error as a JSON-RPC response string + + :param rpcid: Forced request ID + :param version: JSON-RPC version + :return: A JSON-RPC response string + """ + if not version: + version = self.config.version + + if rpcid: + self.rpcid = rpcid + + return dumps(self, methodresponse=True, rpcid=self.rpcid, + version=version, config=self.config) + + def dump(self, rpcid=None, version=None): + """ + Returns the error as a JSON-RPC response dictionary + + :param rpcid: Forced request ID + :param version: JSON-RPC version + :return: A JSON-RPC response dictionary + """ + if not version: + version = self.config.version + + if rpcid: + self.rpcid = rpcid + + return dump(self, is_response=True, rpcid=self.rpcid, + version=version, config=self.config) + + def __repr__(self): + """ + String representation + """ + return '<Fault {0}: {1}>'.format(self.faultCode, self.faultString) + + +class Payload(object): + """ + JSON-RPC content handler + """ + def __init__(self, rpcid=None, version=None, + config=jsonrpclib.config.DEFAULT): + """ + Sets up the JSON-RPC handler + + :param rpcid: Request ID + :param version: JSON-RPC version + :param config: A JSONRPClib Config instance + """ + if not version: + version = config.version + + self.id = rpcid + self.version = float(version) + + def request(self, method, params=None): + """ + Prepares a method call request + + :param method: Method name + :param params: Method parameters + :return: A JSON-RPC request dictionary + """ + if not isinstance(method, utils.string_types): + raise ValueError('Method name must be a string.') + + if not self.id: + # Generate a request ID + self.id = str(uuid.uuid4()) + + request = {'id': self.id, 'method': method} + if params or self.version < 1.1: + request['params'] = params or [] + + if self.version >= 2: + request['jsonrpc'] = str(self.version) + + return request + + def notify(self, method, params=None): + """ + Prepares a notification request + + :param method: Notification name + :param params: Notification parameters + :return: A JSON-RPC notification dictionary + """ + # Prepare the request dictionary + request = self.request(method, params) + + # Remove the request ID, as it's a notification + if self.version >= 2: + del request['id'] + else: + request['id'] = None + + return request + + def response(self, result=None): + """ + Prepares a response dictionary + + :param result: The result of method call + :return: A JSON-RPC response dictionary + """ + response = {'result': result, 'id': self.id} + + if self.version >= 2: + response['jsonrpc'] = str(self.version) + else: + response['error'] = None + + return response + + def error(self, code=-32000, message='Server error.', data=None): + """ + Prepares an error dictionary + + :param code: Error code + :param message: Error message + :return: A JSON-RPC error dictionary + """ + error = self.response() + if self.version >= 2: + del error['result'] + else: + error['result'] = None + error['error'] = {'code': code, 'message': message} + if data is not None: + error['error']['data'] = data + return error + +# ------------------------------------------------------------------------------ + + +def dump(params=None, methodname=None, rpcid=None, version=None, + is_response=None, is_notify=None, config=jsonrpclib.config.DEFAULT): + """ + Prepares a JSON-RPC dictionary (request, notification, response or error) + + :param params: Method parameters (if a method name is given) or a Fault + :param methodname: Method name + :param rpcid: Request ID + :param version: JSON-RPC version + :param is_response: If True, this is a response dictionary + :param is_notify: If True, this is a notification request + :param config: A JSONRPClib Config instance + :return: A JSON-RPC dictionary + """ + # Default version + if not version: + version = config.version + + if not is_response and params is None: + params = [] + + # Validate method name and parameters + valid_params = [utils.TupleType, utils.ListType, utils.DictType, Fault] + if is_response: + valid_params.append(type(None)) + + if isinstance(methodname, utils.string_types) and \ + not isinstance(params, tuple(valid_params)): + """ + If a method, and params are not in a listish or a Fault, + error out. + """ + raise TypeError("Params must be a dict, list, tuple " + "or Fault instance.") + + # Prepares the JSON-RPC content + payload = Payload(rpcid=rpcid, version=version) + + if isinstance(params, Fault): + # Prepare an error dictionary + # pylint: disable=E1103 + return payload.error(params.faultCode, params.faultString, params.data) + + if not isinstance(methodname, utils.string_types) and not is_response: + # Neither a request nor a response + raise ValueError('Method name must be a string, or is_response ' + 'must be set to True.') + + if config.use_jsonclass: + # Use jsonclass to convert the parameters + params = jsonclass.dump(params, config=config) + + if is_response: + # Prepare a response dictionary + if rpcid is None: + # A response must have a request ID + raise ValueError('A method response must have an rpcid.') + return payload.response(params) + + if is_notify: + # Prepare a notification dictionary + return payload.notify(methodname, params) + else: + # Prepare a method call dictionary + return payload.request(methodname, params) + + +def dumps(params=None, methodname=None, methodresponse=None, + encoding=None, rpcid=None, version=None, notify=None, + config=jsonrpclib.config.DEFAULT): + """ + Prepares a JSON-RPC request/response string + + :param params: Method parameters (if a method name is given) or a Fault + :param methodname: Method name + :param methodresponse: If True, this is a response dictionary + :param encoding: Result string encoding + :param rpcid: Request ID + :param version: JSON-RPC version + :param notify: If True, this is a notification request + :param config: A JSONRPClib Config instance + :return: A JSON-RPC dictionary + """ + # Prepare the dictionary + request = dump(params, methodname, rpcid, version, methodresponse, notify, + config) + + # Returns it as a JSON string + return jdumps(request, encoding=encoding or "UTF-8") + + +def load(data, config=jsonrpclib.config.DEFAULT): + """ + Loads a JSON-RPC request/response dictionary. Calls jsonclass to load beans + + :param data: A JSON-RPC dictionary + :param config: A JSONRPClib Config instance (or None for default values) + :return: A parsed dictionary or None + """ + if data is None: + # Notification + return None + + # if the above raises an error, the implementing server code + # should return something like the following: + # { 'jsonrpc':'2.0', 'error': fault.error(), id: None } + if config.use_jsonclass: + # Convert beans + data = jsonclass.load(data, config.classes) + + return data + + +def loads(data, config=jsonrpclib.config.DEFAULT): + """ + Loads a JSON-RPC request/response string. Calls jsonclass to load beans + + :param data: A JSON-RPC string + :param config: A JSONRPClib Config instance (or None for default values) + :return: A parsed dictionary or None + """ + if data == '': + # Notification + return None + + # Parse the JSON dictionary + result = jloads(data) + + # Load the beans + return load(result, config) + +# ------------------------------------------------------------------------------ + + +def check_for_errors(result): + """ + Checks if a result dictionary signals an error + + :param result: A result dictionary + :raise TypeError: Invalid parameter + :raise NotImplementedError: Unknown JSON-RPC version + :raise ValueError: Invalid dictionary content + :raise ProtocolError: An error occurred on the server side + :return: The result parameter + """ + if not result: + # Notification + return result + + if not isinstance(result, utils.DictType): + # Invalid argument + raise TypeError('Response is not a dict.') + + if 'jsonrpc' in result and float(result['jsonrpc']) > 2.0: + # Unknown JSON-RPC version + raise NotImplementedError('JSON-RPC version not yet supported.') + + if 'result' not in result and 'error' not in result: + # Invalid dictionary content + raise ValueError('Response does not have a result or error key.') + + if 'error' in result and result['error']: + # Server-side error + if 'code' in result['error']: + # Code + Message + code = result['error']['code'] + try: + # Get the message (jsonrpclib) + message = result['error']['message'] + except KeyError: + # Get the trace (jabsorb) + message = result['error'].get('trace', '<no error message>') + + if -32700 <= code <= -32000: + # Pre-defined errors + # See http://www.jsonrpc.org/specification#error_object + raise ProtocolError((code, message)) + else: + # Application error + data = result['error'].get('data', None) + raise AppError((code, message, data)) + + elif isinstance(result['error'], dict) and len(result['error']) == 1: + # Error with a single entry ('reason', ...): use its content + error_key = result['error'].keys()[0] + raise ProtocolError(result['error'][error_key]) + + else: + # Use the raw error content + raise ProtocolError(result['error']) + + return result + + +def isbatch(request): + """ + Tests if the given request is a batch call, i.e. a list of multiple calls + :param request: a JSON-RPC request object + :return: True if the request is a batch call + """ + if not isinstance(request, (utils.ListType, utils.TupleType)): + # Not a list: not a batch call + return False + elif len(request) < 1: + # Only one request: not a batch call + return False + elif not isinstance(request[0], utils.DictType): + # One of the requests is not a dictionary, i.e. a JSON Object + # therefore it is not a valid JSON-RPC request + return False + elif 'jsonrpc' not in request[0].keys(): + # No "jsonrpc" version in the JSON object: not a request + return False + + try: + version = float(request[0]['jsonrpc']) + except ValueError: + # Bad version of JSON-RPC + raise ProtocolError('"jsonrpc" key must be a float(able) value.') + + if version < 2: + # Batch call were not supported before JSON-RPC 2.0 + return False + + return True + + +def isnotification(request): + """ + Tests if the given request is a notification + + :param request: A request dictionary + :return: True if the request is a notification + """ + if 'id' not in request: + # 2.0 notification + return True + + if request['id'] is None: + # 1.0 notification + return True + + return False diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib/threadpool.py b/scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib/threadpool.py index 3919c105..a38b5b83 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib/threadpool.py +++ b/scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib/threadpool.py @@ -1,490 +1,490 @@ -#!/usr/bin/env python
-# -- Content-Encoding: UTF-8 --
-"""
-Cached thread pool, inspired from Pelix/iPOPO Thread Pool
-
-:author: Thomas Calmant
-:copyright: Copyright 2015, isandlaTech
-:license: Apache License 2.0
-:version: 0.2.5
-
-..
-
- Copyright 2015 isandlaTech
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-"""
-
-# Documentation strings format
-__docformat__ = "restructuredtext en"
-
-# Module version
-__version_info__ = (0, 2, 5)
-__version__ = ".".join(str(x) for x in __version_info__)
-
-# ------------------------------------------------------------------------------
-
-# Standard library
-import logging
-import threading
-
-try:
- # Python 3
- # pylint: disable=F0401
- import queue
-except ImportError:
- # Python 2
- # pylint: disable=F0401
- import Queue as queue
-
-# ------------------------------------------------------------------------------
-
-
-class EventData(object):
- """
- A threading event with some associated data
- """
- def __init__(self):
- """
- Sets up the event
- """
- self.__event = threading.Event()
- self.__data = None
- self.__exception = None
-
- @property
- def data(self):
- """
- Returns the associated value
- """
- return self.__data
-
- @property
- def exception(self):
- """
- Returns the exception used to stop the wait() method
- """
- return self.__exception
-
- def clear(self):
- """
- Clears the event
- """
- self.__event.clear()
- self.__data = None
- self.__exception = None
-
- def is_set(self):
- """
- Checks if the event is set
- """
- return self.__event.is_set()
-
- def set(self, data=None):
- """
- Sets the event
- """
- self.__data = data
- self.__exception = None
- self.__event.set()
-
- def raise_exception(self, exception):
- """
- Raises an exception in wait()
-
- :param exception: An Exception object
- """
- self.__data = None
- self.__exception = exception
- self.__event.set()
-
- def wait(self, timeout=None):
- """
- Waits for the event or for the timeout
-
- :param timeout: Wait timeout (in seconds)
- :return: True if the event as been set, else False
- """
- # The 'or' part is for Python 2.6
- result = self.__event.wait(timeout) or self.__event.is_set()
- # pylint: disable=E0702
- # Pylint seems to miss the "is None" check below
- if self.__exception is None:
- return result
- else:
- raise self.__exception
-
-
-class FutureResult(object):
- """
- An object to wait for the result of a threaded execution
- """
- def __init__(self, logger=None):
- """
- Sets up the FutureResult object
-
- :param logger: The Logger to use in case of error (optional)
- """
- self._logger = logger or logging.getLogger(__name__)
- self._done_event = EventData()
- self.__callback = None
- self.__extra = None
-
- def __notify(self):
- """
- Notify the given callback about the result of the execution
- """
- if self.__callback is not None:
- try:
- self.__callback(self._done_event.data,
- self._done_event.exception,
- self.__extra)
- except Exception as ex:
- self._logger.exception("Error calling back method: %s", ex)
-
- def set_callback(self, method, extra=None):
- """
- Sets a callback method, called once the result has been computed or in
- case of exception.
-
- The callback method must have the following signature:
- ``callback(result, exception, extra)``.
-
- :param method: The method to call back in the end of the execution
- :param extra: Extra parameter to be given to the callback method
- """
- self.__callback = method
- self.__extra = extra
- if self._done_event.is_set():
- # The execution has already finished
- self.__notify()
-
- def execute(self, method, args, kwargs):
- """
- Execute the given method and stores its result.
- The result is considered "done" even if the method raises an exception
-
- :param method: The method to execute
- :param args: Method positional arguments
- :param kwargs: Method keyword arguments
- :raise Exception: The exception raised by the method
- """
- # Normalize arguments
- if args is None:
- args = []
-
- if kwargs is None:
- kwargs = {}
-
- try:
- # Call the method
- result = method(*args, **kwargs)
- except Exception as ex:
- # Something went wrong: propagate to the event and to the caller
- self._done_event.raise_exception(ex)
- raise
- else:
- # Store the result
- self._done_event.set(result)
- finally:
- # In any case: notify the call back (if any)
- self.__notify()
-
- def done(self):
- """
- Returns True if the job has finished, else False
- """
- return self._done_event.is_set()
-
- def result(self, timeout=None):
- """
- Waits up to timeout for the result the threaded job.
- Returns immediately the result if the job has already been done.
-
- :param timeout: The maximum time to wait for a result (in seconds)
- :raise OSError: The timeout raised before the job finished
- :raise Exception: The exception encountered during the call, if any
- """
- if self._done_event.wait(timeout):
- return self._done_event.data
- else:
- raise OSError("Timeout raised")
-
-# ------------------------------------------------------------------------------
-
-
-class ThreadPool(object):
- """
- Executes the tasks stored in a FIFO in a thread pool
- """
- def __init__(self, max_threads, min_threads=1, queue_size=0, timeout=60,
- logname=None):
- """
- Sets up the thread pool.
-
- Threads are kept alive 60 seconds (timeout argument).
-
- :param max_threads: Maximum size of the thread pool
- :param min_threads: Minimum size of the thread pool
- :param queue_size: Size of the task queue (0 for infinite)
- :param timeout: Queue timeout (in seconds, 60s by default)
- :param logname: Name of the logger
- :raise ValueError: Invalid number of threads
- """
- # Validate parameters
- try:
- max_threads = int(max_threads)
- if max_threads < 1:
- raise ValueError("Pool size must be greater than 0")
- except (TypeError, ValueError) as ex:
- raise ValueError("Invalid pool size: {0}".format(ex))
-
- try:
- min_threads = int(min_threads)
- if min_threads < 0:
- min_threads = 0
- elif min_threads > max_threads:
- min_threads = max_threads
- except (TypeError, ValueError) as ex:
- raise ValueError("Invalid pool size: {0}".format(ex))
-
- # The logger
- self._logger = logging.getLogger(logname or __name__)
-
- # The loop control event
- self._done_event = threading.Event()
- self._done_event.set()
-
- # The task queue
- try:
- queue_size = int(queue_size)
- except (TypeError, ValueError):
- # Not a valid integer
- queue_size = 0
-
- self._queue = queue.Queue(queue_size)
- self._timeout = timeout
- self.__lock = threading.RLock()
-
- # The thread pool
- self._min_threads = min_threads
- self._max_threads = max_threads
- self._threads = []
-
- # Thread count
- self._thread_id = 0
-
- # Current number of threads, active and alive
- self.__nb_threads = 0
- self.__nb_active_threads = 0
-
- def start(self):
- """
- Starts the thread pool. Does nothing if the pool is already started.
- """
- if not self._done_event.is_set():
- # Stop event not set: we're running
- return
-
- # Clear the stop event
- self._done_event.clear()
-
- # Compute the number of threads to start to handle pending tasks
- nb_pending_tasks = self._queue.qsize()
- if nb_pending_tasks > self._max_threads:
- nb_threads = self._max_threads
- elif nb_pending_tasks < self._min_threads:
- nb_threads = self._min_threads
- else:
- nb_threads = nb_pending_tasks
-
- # Create the threads
- for _ in range(nb_threads):
- self.__start_thread()
-
- def __start_thread(self):
- """
- Starts a new thread, if possible
- """
- with self.__lock:
- if self.__nb_threads >= self._max_threads:
- # Can't create more threads
- return False
-
- if self._done_event.is_set():
- # We're stopped: do nothing
- return False
-
- # Prepare thread and start it
- name = "{0}-{1}".format(self._logger.name, self._thread_id)
- self._thread_id += 1
-
- thread = threading.Thread(target=self.__run, name=name)
- thread.daemon = True
- self._threads.append(thread)
- thread.start()
- return True
-
- def stop(self):
- """
- Stops the thread pool. Does nothing if the pool is already stopped.
- """
- if self._done_event.is_set():
- # Stop event set: we're stopped
- return
-
- # Set the stop event
- self._done_event.set()
-
- with self.__lock:
- # Add something in the queue (to unlock the join())
- try:
- for _ in self._threads:
- self._queue.put(self._done_event, True, self._timeout)
- except queue.Full:
- # There is already something in the queue
- pass
-
- # Copy the list of threads to wait for
- threads = self._threads[:]
-
- # Join threads outside the lock
- for thread in threads:
- while thread.is_alive():
- # Wait 3 seconds
- thread.join(3)
- if thread.is_alive():
- # Thread is still alive: something might be wrong
- self._logger.warning("Thread %s is still alive...",
- thread.name)
-
- # Clear storage
- del self._threads[:]
- self.clear()
-
- def enqueue(self, method, *args, **kwargs):
- """
- Queues a task in the pool
-
- :param method: Method to call
- :return: A FutureResult object, to get the result of the task
- :raise ValueError: Invalid method
- :raise Full: The task queue is full
- """
- if not hasattr(method, '__call__'):
- raise ValueError("{0} has no __call__ member."
- .format(method.__name__))
-
- # Prepare the future result object
- future = FutureResult(self._logger)
-
- # Use a lock, as we might be "resetting" the queue
- with self.__lock:
- # Add the task to the queue
- self._queue.put((method, args, kwargs, future), True,
- self._timeout)
-
- if self.__nb_active_threads == self.__nb_threads:
- # All threads are taken: start a new one
- self.__start_thread()
-
- return future
-
- def clear(self):
- """
- Empties the current queue content.
- Returns once the queue have been emptied.
- """
- with self.__lock:
- # Empty the current queue
- try:
- while True:
- self._queue.get_nowait()
- self._queue.task_done()
- except queue.Empty:
- # Queue is now empty
- pass
-
- # Wait for the tasks currently executed
- self.join()
-
- def join(self, timeout=None):
- """
- Waits for all the tasks to be executed
-
- :param timeout: Maximum time to wait (in seconds)
- :return: True if the queue has been emptied, else False
- """
- if self._queue.empty():
- # Nothing to wait for...
- return True
- elif timeout is None:
- # Use the original join
- self._queue.join()
- return True
- else:
- # Wait for the condition
- with self._queue.all_tasks_done:
- self._queue.all_tasks_done.wait(timeout)
- return not bool(self._queue.unfinished_tasks)
-
- def __run(self):
- """
- The main loop
- """
- with self.__lock:
- self.__nb_threads += 1
-
- while not self._done_event.is_set():
- try:
- # Wait for an action (blocking)
- task = self._queue.get(True, self._timeout)
- if task is self._done_event:
- # Stop event in the queue: get out
- self._queue.task_done()
- with self.__lock:
- self.__nb_threads -= 1
- return
- except queue.Empty:
- # Nothing to do yet
- pass
- else:
- with self.__lock:
- self.__nb_active_threads += 1
-
- # Extract elements
- method, args, kwargs, future = task
- try:
- # Call the method
- future.execute(method, args, kwargs)
- except Exception as ex:
- self._logger.exception("Error executing %s: %s",
- method.__name__, ex)
- finally:
- # Mark the action as executed
- self._queue.task_done()
-
- # Thread is not active anymore
- self.__nb_active_threads -= 1
-
- # Clean up thread if necessary
- with self.__lock:
- if self.__nb_threads > self._min_threads:
- # No more work for this thread, and we're above the
- # minimum number of threads: stop this one
- self.__nb_threads -= 1
- return
-
- with self.__lock:
- # Thread stops
- self.__nb_threads -= 1
+#!/usr/bin/env python +# -- Content-Encoding: UTF-8 -- +""" +Cached thread pool, inspired from Pelix/iPOPO Thread Pool + +:author: Thomas Calmant +:copyright: Copyright 2015, isandlaTech +:license: Apache License 2.0 +:version: 0.2.5 + +.. + + Copyright 2015 isandlaTech + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +""" + +# Documentation strings format +__docformat__ = "restructuredtext en" + +# Module version +__version_info__ = (0, 2, 5) +__version__ = ".".join(str(x) for x in __version_info__) + +# ------------------------------------------------------------------------------ + +# Standard library +import logging +import threading + +try: + # Python 3 + # pylint: disable=F0401 + import queue +except ImportError: + # Python 2 + # pylint: disable=F0401 + import Queue as queue + +# ------------------------------------------------------------------------------ + + +class EventData(object): + """ + A threading event with some associated data + """ + def __init__(self): + """ + Sets up the event + """ + self.__event = threading.Event() + self.__data = None + self.__exception = None + + @property + def data(self): + """ + Returns the associated value + """ + return self.__data + + @property + def exception(self): + """ + Returns the exception used to stop the wait() method + """ + return self.__exception + + def clear(self): + """ + Clears the event + """ + self.__event.clear() + self.__data = None + self.__exception = None + + def is_set(self): + """ + Checks if the event is set + """ + return self.__event.is_set() + + def set(self, data=None): + """ + Sets the event + """ + self.__data = data + self.__exception = None + self.__event.set() + + def raise_exception(self, exception): + """ + Raises an exception in wait() + + :param exception: An Exception object + """ + self.__data = None + self.__exception = exception + self.__event.set() + + def wait(self, timeout=None): + """ + Waits for the event or for the timeout + + :param timeout: Wait timeout (in seconds) + :return: True if the event as been set, else False + """ + # The 'or' part is for Python 2.6 + result = self.__event.wait(timeout) or self.__event.is_set() + # pylint: disable=E0702 + # Pylint seems to miss the "is None" check below + if self.__exception is None: + return result + else: + raise self.__exception + + +class FutureResult(object): + """ + An object to wait for the result of a threaded execution + """ + def __init__(self, logger=None): + """ + Sets up the FutureResult object + + :param logger: The Logger to use in case of error (optional) + """ + self._logger = logger or logging.getLogger(__name__) + self._done_event = EventData() + self.__callback = None + self.__extra = None + + def __notify(self): + """ + Notify the given callback about the result of the execution + """ + if self.__callback is not None: + try: + self.__callback(self._done_event.data, + self._done_event.exception, + self.__extra) + except Exception as ex: + self._logger.exception("Error calling back method: %s", ex) + + def set_callback(self, method, extra=None): + """ + Sets a callback method, called once the result has been computed or in + case of exception. + + The callback method must have the following signature: + ``callback(result, exception, extra)``. + + :param method: The method to call back in the end of the execution + :param extra: Extra parameter to be given to the callback method + """ + self.__callback = method + self.__extra = extra + if self._done_event.is_set(): + # The execution has already finished + self.__notify() + + def execute(self, method, args, kwargs): + """ + Execute the given method and stores its result. + The result is considered "done" even if the method raises an exception + + :param method: The method to execute + :param args: Method positional arguments + :param kwargs: Method keyword arguments + :raise Exception: The exception raised by the method + """ + # Normalize arguments + if args is None: + args = [] + + if kwargs is None: + kwargs = {} + + try: + # Call the method + result = method(*args, **kwargs) + except Exception as ex: + # Something went wrong: propagate to the event and to the caller + self._done_event.raise_exception(ex) + raise + else: + # Store the result + self._done_event.set(result) + finally: + # In any case: notify the call back (if any) + self.__notify() + + def done(self): + """ + Returns True if the job has finished, else False + """ + return self._done_event.is_set() + + def result(self, timeout=None): + """ + Waits up to timeout for the result the threaded job. + Returns immediately the result if the job has already been done. + + :param timeout: The maximum time to wait for a result (in seconds) + :raise OSError: The timeout raised before the job finished + :raise Exception: The exception encountered during the call, if any + """ + if self._done_event.wait(timeout): + return self._done_event.data + else: + raise OSError("Timeout raised") + +# ------------------------------------------------------------------------------ + + +class ThreadPool(object): + """ + Executes the tasks stored in a FIFO in a thread pool + """ + def __init__(self, max_threads, min_threads=1, queue_size=0, timeout=60, + logname=None): + """ + Sets up the thread pool. + + Threads are kept alive 60 seconds (timeout argument). + + :param max_threads: Maximum size of the thread pool + :param min_threads: Minimum size of the thread pool + :param queue_size: Size of the task queue (0 for infinite) + :param timeout: Queue timeout (in seconds, 60s by default) + :param logname: Name of the logger + :raise ValueError: Invalid number of threads + """ + # Validate parameters + try: + max_threads = int(max_threads) + if max_threads < 1: + raise ValueError("Pool size must be greater than 0") + except (TypeError, ValueError) as ex: + raise ValueError("Invalid pool size: {0}".format(ex)) + + try: + min_threads = int(min_threads) + if min_threads < 0: + min_threads = 0 + elif min_threads > max_threads: + min_threads = max_threads + except (TypeError, ValueError) as ex: + raise ValueError("Invalid pool size: {0}".format(ex)) + + # The logger + self._logger = logging.getLogger(logname or __name__) + + # The loop control event + self._done_event = threading.Event() + self._done_event.set() + + # The task queue + try: + queue_size = int(queue_size) + except (TypeError, ValueError): + # Not a valid integer + queue_size = 0 + + self._queue = queue.Queue(queue_size) + self._timeout = timeout + self.__lock = threading.RLock() + + # The thread pool + self._min_threads = min_threads + self._max_threads = max_threads + self._threads = [] + + # Thread count + self._thread_id = 0 + + # Current number of threads, active and alive + self.__nb_threads = 0 + self.__nb_active_threads = 0 + + def start(self): + """ + Starts the thread pool. Does nothing if the pool is already started. + """ + if not self._done_event.is_set(): + # Stop event not set: we're running + return + + # Clear the stop event + self._done_event.clear() + + # Compute the number of threads to start to handle pending tasks + nb_pending_tasks = self._queue.qsize() + if nb_pending_tasks > self._max_threads: + nb_threads = self._max_threads + elif nb_pending_tasks < self._min_threads: + nb_threads = self._min_threads + else: + nb_threads = nb_pending_tasks + + # Create the threads + for _ in range(nb_threads): + self.__start_thread() + + def __start_thread(self): + """ + Starts a new thread, if possible + """ + with self.__lock: + if self.__nb_threads >= self._max_threads: + # Can't create more threads + return False + + if self._done_event.is_set(): + # We're stopped: do nothing + return False + + # Prepare thread and start it + name = "{0}-{1}".format(self._logger.name, self._thread_id) + self._thread_id += 1 + + thread = threading.Thread(target=self.__run, name=name) + thread.daemon = True + self._threads.append(thread) + thread.start() + return True + + def stop(self): + """ + Stops the thread pool. Does nothing if the pool is already stopped. + """ + if self._done_event.is_set(): + # Stop event set: we're stopped + return + + # Set the stop event + self._done_event.set() + + with self.__lock: + # Add something in the queue (to unlock the join()) + try: + for _ in self._threads: + self._queue.put(self._done_event, True, self._timeout) + except queue.Full: + # There is already something in the queue + pass + + # Copy the list of threads to wait for + threads = self._threads[:] + + # Join threads outside the lock + for thread in threads: + while thread.is_alive(): + # Wait 3 seconds + thread.join(3) + if thread.is_alive(): + # Thread is still alive: something might be wrong + self._logger.warning("Thread %s is still alive...", + thread.name) + + # Clear storage + del self._threads[:] + self.clear() + + def enqueue(self, method, *args, **kwargs): + """ + Queues a task in the pool + + :param method: Method to call + :return: A FutureResult object, to get the result of the task + :raise ValueError: Invalid method + :raise Full: The task queue is full + """ + if not hasattr(method, '__call__'): + raise ValueError("{0} has no __call__ member." + .format(method.__name__)) + + # Prepare the future result object + future = FutureResult(self._logger) + + # Use a lock, as we might be "resetting" the queue + with self.__lock: + # Add the task to the queue + self._queue.put((method, args, kwargs, future), True, + self._timeout) + + if self.__nb_active_threads == self.__nb_threads: + # All threads are taken: start a new one + self.__start_thread() + + return future + + def clear(self): + """ + Empties the current queue content. + Returns once the queue have been emptied. + """ + with self.__lock: + # Empty the current queue + try: + while True: + self._queue.get_nowait() + self._queue.task_done() + except queue.Empty: + # Queue is now empty + pass + + # Wait for the tasks currently executed + self.join() + + def join(self, timeout=None): + """ + Waits for all the tasks to be executed + + :param timeout: Maximum time to wait (in seconds) + :return: True if the queue has been emptied, else False + """ + if self._queue.empty(): + # Nothing to wait for... + return True + elif timeout is None: + # Use the original join + self._queue.join() + return True + else: + # Wait for the condition + with self._queue.all_tasks_done: + self._queue.all_tasks_done.wait(timeout) + return not bool(self._queue.unfinished_tasks) + + def __run(self): + """ + The main loop + """ + with self.__lock: + self.__nb_threads += 1 + + while not self._done_event.is_set(): + try: + # Wait for an action (blocking) + task = self._queue.get(True, self._timeout) + if task is self._done_event: + # Stop event in the queue: get out + self._queue.task_done() + with self.__lock: + self.__nb_threads -= 1 + return + except queue.Empty: + # Nothing to do yet + pass + else: + with self.__lock: + self.__nb_active_threads += 1 + + # Extract elements + method, args, kwargs, future = task + try: + # Call the method + future.execute(method, args, kwargs) + except Exception as ex: + self._logger.exception("Error executing %s: %s", + method.__name__, ex) + finally: + # Mark the action as executed + self._queue.task_done() + + # Thread is not active anymore + self.__nb_active_threads -= 1 + + # Clean up thread if necessary + with self.__lock: + if self.__nb_threads > self._min_threads: + # No more work for this thread, and we're above the + # minimum number of threads: stop this one + self.__nb_threads -= 1 + return + + with self.__lock: + # Thread stops + self.__nb_threads -= 1 diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib/utils.py b/scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib/utils.py index 01b71fce..31183742 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib/utils.py +++ b/scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib/utils.py @@ -1,122 +1,122 @@ -#!/usr/bin/python
-# -- Content-Encoding: UTF-8 --
-"""
-Utility methods, for compatibility between Python version
-
-:author: Thomas Calmant
-:copyright: Copyright 2015, isandlaTech
-:license: Apache License 2.0
-:version: 0.2.5
-
-..
-
- Copyright 2015 isandlaTech
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-"""
-
-# Module version
-__version_info__ = (0, 2, 5)
-__version__ = ".".join(str(x) for x in __version_info__)
-
-# Documentation strings format
-__docformat__ = "restructuredtext en"
-
-# ------------------------------------------------------------------------------
-
-import sys
-
-# ------------------------------------------------------------------------------
-
-if sys.version_info[0] < 3:
- # Python 2
- import types
- try:
- string_types = (
- types.StringType,
- types.UnicodeType
- )
- except NameError:
- # Python built without unicode support
- string_types = (types.StringType,)
-
- numeric_types = (
- types.IntType,
- types.LongType,
- types.FloatType
- )
-
- def to_bytes(string):
- """
- Converts the given string into bytes
- """
- if type(string) is unicode:
- return str(string)
- return string
-
- def from_bytes(data):
- """
- Converts the given bytes into a string
- """
- if type(data) is str:
- return data
- return str(data)
-
-else:
- # Python 3
- string_types = (
- bytes,
- str
- )
-
- numeric_types = (
- int,
- float
- )
-
- def to_bytes(string):
- """
- Converts the given string into bytes
- """
- if type(string) is bytes:
- return string
- return bytes(string, "UTF-8")
-
- def from_bytes(data):
- """
- Converts the given bytes into a string
- """
- if type(data) is str:
- return data
- return str(data, "UTF-8")
-
-# ------------------------------------------------------------------------------
-# Common
-
-DictType = dict
-
-ListType = list
-TupleType = tuple
-
-iterable_types = (
- list,
- set, frozenset,
- tuple
-)
-
-value_types = (
- bool,
- type(None)
-)
-
-primitive_types = string_types + numeric_types + value_types
+#!/usr/bin/python +# -- Content-Encoding: UTF-8 -- +""" +Utility methods, for compatibility between Python version + +:author: Thomas Calmant +:copyright: Copyright 2015, isandlaTech +:license: Apache License 2.0 +:version: 0.2.5 + +.. + + Copyright 2015 isandlaTech + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +""" + +# Module version +__version_info__ = (0, 2, 5) +__version__ = ".".join(str(x) for x in __version_info__) + +# Documentation strings format +__docformat__ = "restructuredtext en" + +# ------------------------------------------------------------------------------ + +import sys + +# ------------------------------------------------------------------------------ + +if sys.version_info[0] < 3: + # Python 2 + import types + try: + string_types = ( + types.StringType, + types.UnicodeType + ) + except NameError: + # Python built without unicode support + string_types = (types.StringType,) + + numeric_types = ( + types.IntType, + types.LongType, + types.FloatType + ) + + def to_bytes(string): + """ + Converts the given string into bytes + """ + if type(string) is unicode: + return str(string) + return string + + def from_bytes(data): + """ + Converts the given bytes into a string + """ + if type(data) is str: + return data + return str(data) + +else: + # Python 3 + string_types = ( + bytes, + str + ) + + numeric_types = ( + int, + float + ) + + def to_bytes(string): + """ + Converts the given string into bytes + """ + if type(string) is bytes: + return string + return bytes(string, "UTF-8") + + def from_bytes(data): + """ + Converts the given bytes into a string + """ + if type(data) is str: + return data + return str(data, "UTF-8") + +# ------------------------------------------------------------------------------ +# Common + +DictType = dict + +ListType = list +TupleType = tuple + +iterable_types = ( + list, + set, frozenset, + tuple +) + +value_types = ( + bool, + type(None) +) + +primitive_types = string_types + numeric_types + value_types diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib_pelix.egg-info/PKG-INFO b/scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib_pelix.egg-info/PKG-INFO index 9d0f3fca..5dce6b1c 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib_pelix.egg-info/PKG-INFO +++ b/scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib_pelix.egg-info/PKG-INFO @@ -1,460 +1,460 @@ -Metadata-Version: 1.1
-Name: jsonrpclib-pelix
-Version: 0.2.5
-Summary: This project is an implementation of the JSON-RPC v2.0 specification (backwards-compatible) as a client library, for Python 2.6+ and Python 3.This version is a fork of jsonrpclib by Josh Marshall, usable with Pelix remote services.
-Home-page: http://github.com/tcalmant/jsonrpclib/
-Author: Thomas Calmant
-Author-email: thomas.calmant+github@gmail.com
-License: Apache License 2.0
-Description: JSONRPClib (patched for Pelix)
- ##############################
-
- .. image:: https://pypip.in/license/jsonrpclib-pelix/badge.svg
- :target: https://pypi.python.org/pypi/jsonrpclib-pelix/
-
- .. image:: https://travis-ci.org/tcalmant/jsonrpclib.svg?branch=master
- :target: https://travis-ci.org/tcalmant/jsonrpclib
-
- .. image:: https://coveralls.io/repos/tcalmant/jsonrpclib/badge.svg?branch=master
- :target: https://coveralls.io/r/tcalmant/jsonrpclib?branch=master
-
-
- This library is an implementation of the JSON-RPC specification.
- It supports both the original 1.0 specification, as well as the
- new (proposed) 2.0 specification, which includes batch submission, keyword
- arguments, etc.
-
- It is licensed under the Apache License, Version 2.0
- (http://www.apache.org/licenses/LICENSE-2.0.html).
-
-
- About this version
- ******************
-
- This is a patched version of the original ``jsonrpclib`` project by
- Josh Marshall, available at https://github.com/joshmarshall/jsonrpclib.
-
- The suffix *-pelix* only indicates that this version works with Pelix Remote
- Services, but it is **not** a Pelix specific implementation.
-
- * This version adds support for Python 3, staying compatible with Python 2.
- * It is now possible to use the dispatch_method argument while extending
- the SimpleJSONRPCDispatcher, to use a custom dispatcher.
- This allows to use this package by Pelix Remote Services.
- * It can use thread pools to control the number of threads spawned to handle
- notification requests and clients connections.
- * The modifications added in other forks of this project have been added:
-
- * From https://github.com/drdaeman/jsonrpclib:
-
- * Improved JSON-RPC 1.0 support
- * Less strict error response handling
-
- * From https://github.com/tuomassalo/jsonrpclib:
-
- * In case of a non-pre-defined error, raise an AppError and give access to
- *error.data*
-
- * From https://github.com/dejw/jsonrpclib:
-
- * Custom headers can be sent with request and associated tests
-
- * The support for Unix sockets has been removed, as it is not trivial to convert
- to Python 3 (and I don't use them)
- * This version cannot be installed with the original ``jsonrpclib``, as it uses
- the same package name.
-
-
- Summary
- *******
-
- This library implements the JSON-RPC 2.0 proposed specification in pure Python.
- It is designed to be as compatible with the syntax of ``xmlrpclib`` as possible
- (it extends where possible), so that projects using ``xmlrpclib`` could easily
- be modified to use JSON and experiment with the differences.
-
- It is backwards-compatible with the 1.0 specification, and supports all of the
- new proposed features of 2.0, including:
-
- * Batch submission (via MultiCall)
- * Keyword arguments
- * Notifications (both in a batch and 'normal')
- * Class translation using the ``__jsonclass__`` key.
-
- I've added a "SimpleJSONRPCServer", which is intended to emulate the
- "SimpleXMLRPCServer" from the default Python distribution.
-
-
- Requirements
- ************
-
- It supports ``cjson`` and ``simplejson``, and looks for the parsers in that
- order (searching first for ``cjson``, then for the *built-in* ``json`` in 2.6+,
- and then the ``simplejson`` external library).
- One of these must be installed to use this library, although if you have a
- standard distribution of 2.6+, you should already have one.
- Keep in mind that ``cjson`` is supposed to be the quickest, I believe, so if
- you are going for full-on optimization you may want to pick it up.
-
- Since library uses ``contextlib`` module, you should have at least Python 2.5
- installed.
-
-
- Installation
- ************
-
- You can install this from PyPI with one of the following commands (sudo
- may be required):
-
- .. code-block:: console
-
- easy_install jsonrpclib-pelix
- pip install jsonrpclib-pelix
-
- Alternatively, you can download the source from the GitHub repository
- at http://github.com/tcalmant/jsonrpclib and manually install it
- with the following commands:
-
- .. code-block:: console
-
- git clone git://github.com/tcalmant/jsonrpclib.git
- cd jsonrpclib
- python setup.py install
-
-
- SimpleJSONRPCServer
- *******************
-
- This is identical in usage (or should be) to the SimpleXMLRPCServer in the
- Python standard library. Some of the differences in features are that it
- obviously supports notification, batch calls, class translation (if left on),
- etc.
- Note: The import line is slightly different from the regular SimpleXMLRPCServer,
- since the SimpleJSONRPCServer is distributed within the ``jsonrpclib`` library.
-
- .. code-block:: python
-
- from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer
-
- server = SimpleJSONRPCServer(('localhost', 8080))
- server.register_function(pow)
- server.register_function(lambda x,y: x+y, 'add')
- server.register_function(lambda x: x, 'ping')
- server.serve_forever()
-
- To start protect the server with SSL, use the following snippet:
-
- .. code-block:: python
-
- from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer
-
- # Setup the SSL socket
- server = SimpleJSONRPCServer(('localhost', 8080), bind_and_activate=False)
- server.socket = ssl.wrap_socket(server.socket, certfile='server.pem',
- server_side=True)
- server.server_bind()
- server.server_activate()
-
- # ... register functions
- # Start the server
- server.serve_forever()
-
-
- Notification Thread Pool
- ========================
-
- By default, notification calls are handled in the request handling thread.
- It is possible to use a thread pool to handle them, by giving it to the server
- using the ``set_notification_pool()`` method:
-
- .. code-block:: python
-
- from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer
- from jsonrpclib.threadpool import ThreadPool
-
- # Setup the thread pool: between 0 and 10 threads
- pool = ThreadPool(max_threads=10, min_threads=0)
-
- # Don't forget to start it
- pool.start()
-
- # Setup the server
- server = SimpleJSONRPCServer(('localhost', 8080), config)
- server.set_notification_pool(pool)
-
- # Register methods
- server.register_function(pow)
- server.register_function(lambda x,y: x+y, 'add')
- server.register_function(lambda x: x, 'ping')
-
- try:
- server.serve_forever()
- finally:
- # Stop the thread pool (let threads finish their current task)
- pool.stop()
- server.set_notification_pool(None)
-
-
- Threaded server
- ===============
-
- It is also possible to use a thread pool to handle clients requests, using the
- ``PooledJSONRPCServer`` class.
- By default, this class uses pool of 0 to 30 threads. A custom pool can be given
- with the ``thread_pool`` parameter of the class constructor.
-
- The notification pool and the request pool are different: by default, a server
- with a request pool doesn't have a notification pool.
-
- .. code-block:: python
-
- from jsonrpclib.SimpleJSONRPCServer import PooledJSONRPCServer
- from jsonrpclib.threadpool import ThreadPool
-
- # Setup the notification and request pools
- nofif_pool = ThreadPool(max_threads=10, min_threads=0)
- request_pool = ThreadPool(max_threads=50, min_threads=10)
-
- # Don't forget to start them
- nofif_pool.start()
- request_pool.start()
-
- # Setup the server
- server = PooledJSONRPCServer(('localhost', 8080), config,
- thread_pool=request_pool)
- server.set_notification_pool(nofif_pool)
-
- # Register methods
- server.register_function(pow)
- server.register_function(lambda x,y: x+y, 'add')
- server.register_function(lambda x: x, 'ping')
-
- try:
- server.serve_forever()
- finally:
- # Stop the thread pools (let threads finish their current task)
- request_pool.stop()
- nofif_pool.stop()
- server.set_notification_pool(None)
-
- Client Usage
- ************
-
- This is (obviously) taken from a console session.
-
- .. code-block:: python
-
- >>> import jsonrpclib
- >>> server = jsonrpclib.ServerProxy('http://localhost:8080')
- >>> server.add(5,6)
- 11
- >>> server.add(x=5, y=10)
- 15
- >>> server._notify.add(5,6)
- # No result returned...
- >>> batch = jsonrpclib.MultiCall(server)
- >>> batch.add(5, 6)
- >>> batch.ping({'key':'value'})
- >>> batch._notify.add(4, 30)
- >>> results = batch()
- >>> for result in results:
- >>> ... print(result)
- 11
- {'key': 'value'}
- # Note that there are only two responses -- this is according to spec.
-
- # Clean up
- >>> server('close')()
-
- # Using client history
- >>> history = jsonrpclib.history.History()
- >>> server = jsonrpclib.ServerProxy('http://localhost:8080', history=history)
- >>> server.add(5,6)
- 11
- >>> print(history.request)
- {"id": "f682b956-c8e1-4506-9db4-29fe8bc9fcaa", "jsonrpc": "2.0",
- "method": "add", "params": [5, 6]}
- >>> print(history.response)
- {"id": "f682b956-c8e1-4506-9db4-29fe8bc9fcaa", "jsonrpc": "2.0",
- "result": 11}
-
- # Clean up
- >>> server('close')()
-
- If you need 1.0 functionality, there are a bunch of places you can pass that in,
- although the best is just to give a specific configuration to
- ``jsonrpclib.ServerProxy``:
-
- .. code-block:: python
-
- >>> import jsonrpclib
- >>> jsonrpclib.config.DEFAULT.version
- 2.0
- >>> config = jsonrpclib.config.Config(version=1.0)
- >>> history = jsonrpclib.history.History()
- >>> server = jsonrpclib.ServerProxy('http://localhost:8080', config=config,
- history=history)
- >>> server.add(7, 10)
- 17
- >>> print(history.request)
- {"id": "827b2923-5b37-49a5-8b36-e73920a16d32",
- "method": "add", "params": [7, 10]}
- >>> print(history.response)
- {"id": "827b2923-5b37-49a5-8b36-e73920a16d32", "error": null, "result": 17}
- >>> server('close')()
-
- The equivalent ``loads`` and ``dumps`` functions also exist, although with minor
- modifications. The ``dumps`` arguments are almost identical, but it adds three
- arguments: ``rpcid`` for the 'id' key, ``version`` to specify the JSON-RPC
- compatibility, and ``notify`` if it's a request that you want to be a
- notification.
-
- Additionally, the ``loads`` method does not return the params and method like
- ``xmlrpclib``, but instead a.) parses for errors, raising ProtocolErrors, and
- b.) returns the entire structure of the request / response for manual parsing.
-
-
- Additional headers
- ******************
-
- If your remote service requires custom headers in request, you can pass them
- as as a ``headers`` keyword argument, when creating the ``ServerProxy``:
-
- .. code-block:: python
-
- >>> import jsonrpclib
- >>> server = jsonrpclib.ServerProxy("http://localhost:8080",
- headers={'X-Test' : 'Test'})
-
- You can also put additional request headers only for certain method invocation:
-
- .. code-block:: python
-
- >>> import jsonrpclib
- >>> server = jsonrpclib.Server("http://localhost:8080")
- >>> with server._additional_headers({'X-Test' : 'Test'}) as test_server:
- ... test_server.ping(42)
- ...
- >>> # X-Test header will be no longer sent in requests
-
- Of course ``_additional_headers`` contexts can be nested as well.
-
-
- Class Translation
- *****************
-
- I've recently added "automatic" class translation support, although it is
- turned off by default. This can be devastatingly slow if improperly used, so
- the following is just a short list of things to keep in mind when using it.
-
- * Keep It (the object) Simple Stupid. (for exceptions, keep reading.)
- * Do not require init params (for exceptions, keep reading)
- * Getter properties without setters could be dangerous (read: not tested)
-
- If any of the above are issues, use the _serialize method. (see usage below)
- The server and client must BOTH have use_jsonclass configuration item on and
- they must both have access to the same libraries used by the objects for
- this to work.
-
- If you have excessively nested arguments, it would be better to turn off the
- translation and manually invoke it on specific objects using
- ``jsonrpclib.jsonclass.dump`` / ``jsonrpclib.jsonclass.load`` (since the default
- behavior recursively goes through attributes and lists / dicts / tuples).
-
- Sample file: *test_obj.py*
-
- .. code-block:: python
-
- # This object is /very/ simple, and the system will look through the
- # attributes and serialize what it can.
- class TestObj(object):
- foo = 'bar'
-
- # This object requires __init__ params, so it uses the _serialize method
- # and returns a tuple of init params and attribute values (the init params
- # can be a dict or a list, but the attribute values must be a dict.)
- class TestSerial(object):
- foo = 'bar'
- def __init__(self, *args):
- self.args = args
- def _serialize(self):
- return (self.args, {'foo':self.foo,})
-
- * Sample usage
-
- .. code-block:: python
-
- >>> import jsonrpclib
- >>> import test_obj
-
- # History is used only to print the serialized form of beans
- >>> history = jsonrpclib.history.History()
- >>> testobj1 = test_obj.TestObj()
- >>> testobj2 = test_obj.TestSerial()
- >>> server = jsonrpclib.Server('http://localhost:8080', history=history)
-
- # The 'ping' just returns whatever is sent
- >>> ping1 = server.ping(testobj1)
- >>> ping2 = server.ping(testobj2)
-
- >>> print(history.request)
- {"id": "7805f1f9-9abd-49c6-81dc-dbd47229fe13", "jsonrpc": "2.0",
- "method": "ping", "params": [{"__jsonclass__":
- ["test_obj.TestSerial", []], "foo": "bar"}
- ]}
- >>> print(history.response)
- {"id": "7805f1f9-9abd-49c6-81dc-dbd47229fe13", "jsonrpc": "2.0",
- "result": {"__jsonclass__": ["test_obj.TestSerial", []], "foo": "bar"}}
-
- This behavior is turned by default. To deactivate it, just set the
- ``use_jsonclass`` member of a server ``Config`` to False.
- If you want to use a per-class serialization method, set its name in the
- ``serialize_method`` member of a server ``Config``.
- Finally, if you are using classes that you have defined in the implementation
- (as in, not a separate library), you'll need to add those (on BOTH the server
- and the client) using the ``config.classes.add()`` method.
-
- Feedback on this "feature" is very, VERY much appreciated.
-
- Why JSON-RPC?
- *************
-
- In my opinion, there are several reasons to choose JSON over XML for RPC:
-
- * Much simpler to read (I suppose this is opinion, but I know I'm right. :)
- * Size / Bandwidth - Main reason, a JSON object representation is just much smaller.
- * Parsing - JSON should be much quicker to parse than XML.
- * Easy class passing with ``jsonclass`` (when enabled)
-
- In the interest of being fair, there are also a few reasons to choose XML
- over JSON:
-
- * Your server doesn't do JSON (rather obvious)
- * Wider XML-RPC support across APIs (can we change this? :))
- * Libraries are more established, i.e. more stable (Let's change this too.)
-
- Tests
- *****
-
- Tests are an almost-verbatim drop from the JSON-RPC specification 2.0 page.
- They can be run using *unittest* or *nosetest*:
-
- .. code-block:: console
-
- python -m unittest discover tests
- python3 -m unittest discover tests
- nosetests tests
-
-Platform: UNKNOWN
-Classifier: Development Status :: 5 - Production/Stable
-Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: Apache Software License
-Classifier: Operating System :: OS Independent
-Classifier: Programming Language :: Python :: 2.6
-Classifier: Programming Language :: Python :: 2.7
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.0
-Classifier: Programming Language :: Python :: 3.1
-Classifier: Programming Language :: Python :: 3.2
-Classifier: Programming Language :: Python :: 3.3
-Classifier: Programming Language :: Python :: 3.4
+Metadata-Version: 1.1 +Name: jsonrpclib-pelix +Version: 0.2.5 +Summary: This project is an implementation of the JSON-RPC v2.0 specification (backwards-compatible) as a client library, for Python 2.6+ and Python 3.This version is a fork of jsonrpclib by Josh Marshall, usable with Pelix remote services. +Home-page: http://github.com/tcalmant/jsonrpclib/ +Author: Thomas Calmant +Author-email: thomas.calmant+github@gmail.com +License: Apache License 2.0 +Description: JSONRPClib (patched for Pelix) + ############################## + + .. image:: https://pypip.in/license/jsonrpclib-pelix/badge.svg + :target: https://pypi.python.org/pypi/jsonrpclib-pelix/ + + .. image:: https://travis-ci.org/tcalmant/jsonrpclib.svg?branch=master + :target: https://travis-ci.org/tcalmant/jsonrpclib + + .. image:: https://coveralls.io/repos/tcalmant/jsonrpclib/badge.svg?branch=master + :target: https://coveralls.io/r/tcalmant/jsonrpclib?branch=master + + + This library is an implementation of the JSON-RPC specification. + It supports both the original 1.0 specification, as well as the + new (proposed) 2.0 specification, which includes batch submission, keyword + arguments, etc. + + It is licensed under the Apache License, Version 2.0 + (http://www.apache.org/licenses/LICENSE-2.0.html). + + + About this version + ****************** + + This is a patched version of the original ``jsonrpclib`` project by + Josh Marshall, available at https://github.com/joshmarshall/jsonrpclib. + + The suffix *-pelix* only indicates that this version works with Pelix Remote + Services, but it is **not** a Pelix specific implementation. + + * This version adds support for Python 3, staying compatible with Python 2. + * It is now possible to use the dispatch_method argument while extending + the SimpleJSONRPCDispatcher, to use a custom dispatcher. + This allows to use this package by Pelix Remote Services. + * It can use thread pools to control the number of threads spawned to handle + notification requests and clients connections. + * The modifications added in other forks of this project have been added: + + * From https://github.com/drdaeman/jsonrpclib: + + * Improved JSON-RPC 1.0 support + * Less strict error response handling + + * From https://github.com/tuomassalo/jsonrpclib: + + * In case of a non-pre-defined error, raise an AppError and give access to + *error.data* + + * From https://github.com/dejw/jsonrpclib: + + * Custom headers can be sent with request and associated tests + + * The support for Unix sockets has been removed, as it is not trivial to convert + to Python 3 (and I don't use them) + * This version cannot be installed with the original ``jsonrpclib``, as it uses + the same package name. + + + Summary + ******* + + This library implements the JSON-RPC 2.0 proposed specification in pure Python. + It is designed to be as compatible with the syntax of ``xmlrpclib`` as possible + (it extends where possible), so that projects using ``xmlrpclib`` could easily + be modified to use JSON and experiment with the differences. + + It is backwards-compatible with the 1.0 specification, and supports all of the + new proposed features of 2.0, including: + + * Batch submission (via MultiCall) + * Keyword arguments + * Notifications (both in a batch and 'normal') + * Class translation using the ``__jsonclass__`` key. + + I've added a "SimpleJSONRPCServer", which is intended to emulate the + "SimpleXMLRPCServer" from the default Python distribution. + + + Requirements + ************ + + It supports ``cjson`` and ``simplejson``, and looks for the parsers in that + order (searching first for ``cjson``, then for the *built-in* ``json`` in 2.6+, + and then the ``simplejson`` external library). + One of these must be installed to use this library, although if you have a + standard distribution of 2.6+, you should already have one. + Keep in mind that ``cjson`` is supposed to be the quickest, I believe, so if + you are going for full-on optimization you may want to pick it up. + + Since library uses ``contextlib`` module, you should have at least Python 2.5 + installed. + + + Installation + ************ + + You can install this from PyPI with one of the following commands (sudo + may be required): + + .. code-block:: console + + easy_install jsonrpclib-pelix + pip install jsonrpclib-pelix + + Alternatively, you can download the source from the GitHub repository + at http://github.com/tcalmant/jsonrpclib and manually install it + with the following commands: + + .. code-block:: console + + git clone git://github.com/tcalmant/jsonrpclib.git + cd jsonrpclib + python setup.py install + + + SimpleJSONRPCServer + ******************* + + This is identical in usage (or should be) to the SimpleXMLRPCServer in the + Python standard library. Some of the differences in features are that it + obviously supports notification, batch calls, class translation (if left on), + etc. + Note: The import line is slightly different from the regular SimpleXMLRPCServer, + since the SimpleJSONRPCServer is distributed within the ``jsonrpclib`` library. + + .. code-block:: python + + from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer + + server = SimpleJSONRPCServer(('localhost', 8080)) + server.register_function(pow) + server.register_function(lambda x,y: x+y, 'add') + server.register_function(lambda x: x, 'ping') + server.serve_forever() + + To start protect the server with SSL, use the following snippet: + + .. code-block:: python + + from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer + + # Setup the SSL socket + server = SimpleJSONRPCServer(('localhost', 8080), bind_and_activate=False) + server.socket = ssl.wrap_socket(server.socket, certfile='server.pem', + server_side=True) + server.server_bind() + server.server_activate() + + # ... register functions + # Start the server + server.serve_forever() + + + Notification Thread Pool + ======================== + + By default, notification calls are handled in the request handling thread. + It is possible to use a thread pool to handle them, by giving it to the server + using the ``set_notification_pool()`` method: + + .. code-block:: python + + from jsonrpclib.SimpleJSONRPCServer import SimpleJSONRPCServer + from jsonrpclib.threadpool import ThreadPool + + # Setup the thread pool: between 0 and 10 threads + pool = ThreadPool(max_threads=10, min_threads=0) + + # Don't forget to start it + pool.start() + + # Setup the server + server = SimpleJSONRPCServer(('localhost', 8080), config) + server.set_notification_pool(pool) + + # Register methods + server.register_function(pow) + server.register_function(lambda x,y: x+y, 'add') + server.register_function(lambda x: x, 'ping') + + try: + server.serve_forever() + finally: + # Stop the thread pool (let threads finish their current task) + pool.stop() + server.set_notification_pool(None) + + + Threaded server + =============== + + It is also possible to use a thread pool to handle clients requests, using the + ``PooledJSONRPCServer`` class. + By default, this class uses pool of 0 to 30 threads. A custom pool can be given + with the ``thread_pool`` parameter of the class constructor. + + The notification pool and the request pool are different: by default, a server + with a request pool doesn't have a notification pool. + + .. code-block:: python + + from jsonrpclib.SimpleJSONRPCServer import PooledJSONRPCServer + from jsonrpclib.threadpool import ThreadPool + + # Setup the notification and request pools + nofif_pool = ThreadPool(max_threads=10, min_threads=0) + request_pool = ThreadPool(max_threads=50, min_threads=10) + + # Don't forget to start them + nofif_pool.start() + request_pool.start() + + # Setup the server + server = PooledJSONRPCServer(('localhost', 8080), config, + thread_pool=request_pool) + server.set_notification_pool(nofif_pool) + + # Register methods + server.register_function(pow) + server.register_function(lambda x,y: x+y, 'add') + server.register_function(lambda x: x, 'ping') + + try: + server.serve_forever() + finally: + # Stop the thread pools (let threads finish their current task) + request_pool.stop() + nofif_pool.stop() + server.set_notification_pool(None) + + Client Usage + ************ + + This is (obviously) taken from a console session. + + .. code-block:: python + + >>> import jsonrpclib + >>> server = jsonrpclib.ServerProxy('http://localhost:8080') + >>> server.add(5,6) + 11 + >>> server.add(x=5, y=10) + 15 + >>> server._notify.add(5,6) + # No result returned... + >>> batch = jsonrpclib.MultiCall(server) + >>> batch.add(5, 6) + >>> batch.ping({'key':'value'}) + >>> batch._notify.add(4, 30) + >>> results = batch() + >>> for result in results: + >>> ... print(result) + 11 + {'key': 'value'} + # Note that there are only two responses -- this is according to spec. + + # Clean up + >>> server('close')() + + # Using client history + >>> history = jsonrpclib.history.History() + >>> server = jsonrpclib.ServerProxy('http://localhost:8080', history=history) + >>> server.add(5,6) + 11 + >>> print(history.request) + {"id": "f682b956-c8e1-4506-9db4-29fe8bc9fcaa", "jsonrpc": "2.0", + "method": "add", "params": [5, 6]} + >>> print(history.response) + {"id": "f682b956-c8e1-4506-9db4-29fe8bc9fcaa", "jsonrpc": "2.0", + "result": 11} + + # Clean up + >>> server('close')() + + If you need 1.0 functionality, there are a bunch of places you can pass that in, + although the best is just to give a specific configuration to + ``jsonrpclib.ServerProxy``: + + .. code-block:: python + + >>> import jsonrpclib + >>> jsonrpclib.config.DEFAULT.version + 2.0 + >>> config = jsonrpclib.config.Config(version=1.0) + >>> history = jsonrpclib.history.History() + >>> server = jsonrpclib.ServerProxy('http://localhost:8080', config=config, + history=history) + >>> server.add(7, 10) + 17 + >>> print(history.request) + {"id": "827b2923-5b37-49a5-8b36-e73920a16d32", + "method": "add", "params": [7, 10]} + >>> print(history.response) + {"id": "827b2923-5b37-49a5-8b36-e73920a16d32", "error": null, "result": 17} + >>> server('close')() + + The equivalent ``loads`` and ``dumps`` functions also exist, although with minor + modifications. The ``dumps`` arguments are almost identical, but it adds three + arguments: ``rpcid`` for the 'id' key, ``version`` to specify the JSON-RPC + compatibility, and ``notify`` if it's a request that you want to be a + notification. + + Additionally, the ``loads`` method does not return the params and method like + ``xmlrpclib``, but instead a.) parses for errors, raising ProtocolErrors, and + b.) returns the entire structure of the request / response for manual parsing. + + + Additional headers + ****************** + + If your remote service requires custom headers in request, you can pass them + as as a ``headers`` keyword argument, when creating the ``ServerProxy``: + + .. code-block:: python + + >>> import jsonrpclib + >>> server = jsonrpclib.ServerProxy("http://localhost:8080", + headers={'X-Test' : 'Test'}) + + You can also put additional request headers only for certain method invocation: + + .. code-block:: python + + >>> import jsonrpclib + >>> server = jsonrpclib.Server("http://localhost:8080") + >>> with server._additional_headers({'X-Test' : 'Test'}) as test_server: + ... test_server.ping(42) + ... + >>> # X-Test header will be no longer sent in requests + + Of course ``_additional_headers`` contexts can be nested as well. + + + Class Translation + ***************** + + I've recently added "automatic" class translation support, although it is + turned off by default. This can be devastatingly slow if improperly used, so + the following is just a short list of things to keep in mind when using it. + + * Keep It (the object) Simple Stupid. (for exceptions, keep reading.) + * Do not require init params (for exceptions, keep reading) + * Getter properties without setters could be dangerous (read: not tested) + + If any of the above are issues, use the _serialize method. (see usage below) + The server and client must BOTH have use_jsonclass configuration item on and + they must both have access to the same libraries used by the objects for + this to work. + + If you have excessively nested arguments, it would be better to turn off the + translation and manually invoke it on specific objects using + ``jsonrpclib.jsonclass.dump`` / ``jsonrpclib.jsonclass.load`` (since the default + behavior recursively goes through attributes and lists / dicts / tuples). + + Sample file: *test_obj.py* + + .. code-block:: python + + # This object is /very/ simple, and the system will look through the + # attributes and serialize what it can. + class TestObj(object): + foo = 'bar' + + # This object requires __init__ params, so it uses the _serialize method + # and returns a tuple of init params and attribute values (the init params + # can be a dict or a list, but the attribute values must be a dict.) + class TestSerial(object): + foo = 'bar' + def __init__(self, *args): + self.args = args + def _serialize(self): + return (self.args, {'foo':self.foo,}) + + * Sample usage + + .. code-block:: python + + >>> import jsonrpclib + >>> import test_obj + + # History is used only to print the serialized form of beans + >>> history = jsonrpclib.history.History() + >>> testobj1 = test_obj.TestObj() + >>> testobj2 = test_obj.TestSerial() + >>> server = jsonrpclib.Server('http://localhost:8080', history=history) + + # The 'ping' just returns whatever is sent + >>> ping1 = server.ping(testobj1) + >>> ping2 = server.ping(testobj2) + + >>> print(history.request) + {"id": "7805f1f9-9abd-49c6-81dc-dbd47229fe13", "jsonrpc": "2.0", + "method": "ping", "params": [{"__jsonclass__": + ["test_obj.TestSerial", []], "foo": "bar"} + ]} + >>> print(history.response) + {"id": "7805f1f9-9abd-49c6-81dc-dbd47229fe13", "jsonrpc": "2.0", + "result": {"__jsonclass__": ["test_obj.TestSerial", []], "foo": "bar"}} + + This behavior is turned by default. To deactivate it, just set the + ``use_jsonclass`` member of a server ``Config`` to False. + If you want to use a per-class serialization method, set its name in the + ``serialize_method`` member of a server ``Config``. + Finally, if you are using classes that you have defined in the implementation + (as in, not a separate library), you'll need to add those (on BOTH the server + and the client) using the ``config.classes.add()`` method. + + Feedback on this "feature" is very, VERY much appreciated. + + Why JSON-RPC? + ************* + + In my opinion, there are several reasons to choose JSON over XML for RPC: + + * Much simpler to read (I suppose this is opinion, but I know I'm right. :) + * Size / Bandwidth - Main reason, a JSON object representation is just much smaller. + * Parsing - JSON should be much quicker to parse than XML. + * Easy class passing with ``jsonclass`` (when enabled) + + In the interest of being fair, there are also a few reasons to choose XML + over JSON: + + * Your server doesn't do JSON (rather obvious) + * Wider XML-RPC support across APIs (can we change this? :)) + * Libraries are more established, i.e. more stable (Let's change this too.) + + Tests + ***** + + Tests are an almost-verbatim drop from the JSON-RPC specification 2.0 page. + They can be run using *unittest* or *nosetest*: + + .. code-block:: console + + python -m unittest discover tests + python3 -m unittest discover tests + nosetests tests + +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.0 +Classifier: Programming Language :: Python :: 3.1 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib_pelix.egg-info/SOURCES.txt b/scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib_pelix.egg-info/SOURCES.txt index f5714032..f5714032 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib_pelix.egg-info/SOURCES.txt +++ b/scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib_pelix.egg-info/SOURCES.txt diff --git a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile.egg-info/dependency_links.txt b/scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib_pelix.egg-info/dependency_links.txt index 8b137891..8b137891 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile.egg-info/dependency_links.txt +++ b/scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib_pelix.egg-info/dependency_links.txt diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib_pelix.egg-info/top_level.txt b/scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib_pelix.egg-info/top_level.txt index 1410b2ff..1410b2ff 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/jsonrpclib_pelix.egg-info/top_level.txt +++ b/scripts/external_libs/jsonrpclib-pelix-0.2.5/jsonrpclib_pelix.egg-info/top_level.txt diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/setup.cfg b/scripts/external_libs/jsonrpclib-pelix-0.2.5/setup.cfg index 26c67942..7633f817 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/setup.cfg +++ b/scripts/external_libs/jsonrpclib-pelix-0.2.5/setup.cfg @@ -1,8 +1,8 @@ -[bdist_wheel]
-universal = 1
-
-[egg_info]
-tag_date = 0
-tag_svn_revision = 0
-tag_build =
-
+[bdist_wheel] +universal = 1 + +[egg_info] +tag_date = 0 +tag_svn_revision = 0 +tag_build = + diff --git a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/setup.py b/scripts/external_libs/jsonrpclib-pelix-0.2.5/setup.py index a64f2fb0..fb28d630 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/jsonrpclib-pelix-0.2.5/setup.py +++ b/scripts/external_libs/jsonrpclib-pelix-0.2.5/setup.py @@ -1,74 +1,74 @@ -#!/usr/bin/env python
-# -- Content-Encoding: UTF-8 --
-"""
-Installation script
-
-:authors: Josh Marshall, Thomas Calmant
-:copyright: Copyright 2015, isandlaTech
-:license: Apache License 2.0
-:version: 0.2.5
-
-..
-
- Copyright 2015 isandlaTech
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-"""
-
-# Module version
-__version_info__ = (0, 2, 5)
-__version__ = ".".join(str(x) for x in __version_info__)
-
-# Documentation strings format
-__docformat__ = "restructuredtext en"
-
-# ------------------------------------------------------------------------------
-
-import sys
-
-try:
- from setuptools import setup
-except ImportError:
- from distutils.core import setup
-
-# ------------------------------------------------------------------------------
-
-setup(
- name="jsonrpclib-pelix",
- version=__version__,
- license="Apache License 2.0",
- author="Thomas Calmant",
- author_email="thomas.calmant+github@gmail.com",
- url="http://github.com/tcalmant/jsonrpclib/",
- description=
- "This project is an implementation of the JSON-RPC v2.0 specification "
- "(backwards-compatible) as a client library, for Python 2.6+ and Python 3."
- "This version is a fork of jsonrpclib by Josh Marshall, "
- "usable with Pelix remote services.",
- long_description=open("README.rst").read(),
- packages=["jsonrpclib"],
- classifiers=[
- 'Development Status :: 5 - Production/Stable',
- 'Intended Audience :: Developers',
- 'License :: OSI Approved :: Apache Software License',
- 'Operating System :: OS Independent',
- 'Programming Language :: Python :: 2.6',
- 'Programming Language :: Python :: 2.7',
- 'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.0',
- 'Programming Language :: Python :: 3.1',
- 'Programming Language :: Python :: 3.2',
- 'Programming Language :: Python :: 3.3',
- 'Programming Language :: Python :: 3.4'],
- tests_require=['unittest2'] if sys.version_info < (2, 7) else []
-)
+#!/usr/bin/env python +# -- Content-Encoding: UTF-8 -- +""" +Installation script + +:authors: Josh Marshall, Thomas Calmant +:copyright: Copyright 2015, isandlaTech +:license: Apache License 2.0 +:version: 0.2.5 + +.. + + Copyright 2015 isandlaTech + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +""" + +# Module version +__version_info__ = (0, 2, 5) +__version__ = ".".join(str(x) for x in __version_info__) + +# Documentation strings format +__docformat__ = "restructuredtext en" + +# ------------------------------------------------------------------------------ + +import sys + +try: + from setuptools import setup +except ImportError: + from distutils.core import setup + +# ------------------------------------------------------------------------------ + +setup( + name="jsonrpclib-pelix", + version=__version__, + license="Apache License 2.0", + author="Thomas Calmant", + author_email="thomas.calmant+github@gmail.com", + url="http://github.com/tcalmant/jsonrpclib/", + description= + "This project is an implementation of the JSON-RPC v2.0 specification " + "(backwards-compatible) as a client library, for Python 2.6+ and Python 3." + "This version is a fork of jsonrpclib by Josh Marshall, " + "usable with Pelix remote services.", + long_description=open("README.rst").read(), + packages=["jsonrpclib"], + classifiers=[ + 'Development Status :: 5 - Production/Stable', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: Apache Software License', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 2.6', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.0', + 'Programming Language :: Python :: 3.1', + 'Programming Language :: Python :: 3.2', + 'Programming Language :: Python :: 3.3', + 'Programming Language :: Python :: 3.4'], + tests_require=['unittest2'] if sys.version_info < (2, 7) else [] +) diff --git a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/ACKS b/scripts/external_libs/lockfile-0.10.2/ACKS index 44519d17..44519d17 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/ACKS +++ b/scripts/external_libs/lockfile-0.10.2/ACKS diff --git a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/AUTHORS b/scripts/external_libs/lockfile-0.10.2/AUTHORS index fda721cd..fda721cd 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/AUTHORS +++ b/scripts/external_libs/lockfile-0.10.2/AUTHORS diff --git a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/ChangeLog b/scripts/external_libs/lockfile-0.10.2/ChangeLog index 3ba36a7d..3ba36a7d 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/ChangeLog +++ b/scripts/external_libs/lockfile-0.10.2/ChangeLog diff --git a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/LICENSE b/scripts/external_libs/lockfile-0.10.2/LICENSE index 610c0793..610c0793 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/LICENSE +++ b/scripts/external_libs/lockfile-0.10.2/LICENSE diff --git a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/PKG-INFO b/scripts/external_libs/lockfile-0.10.2/PKG-INFO index 9f72376f..9f72376f 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/PKG-INFO +++ b/scripts/external_libs/lockfile-0.10.2/PKG-INFO diff --git a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/README b/scripts/external_libs/lockfile-0.10.2/README index 5f7acbc4..5f7acbc4 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/README +++ b/scripts/external_libs/lockfile-0.10.2/README diff --git a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/RELEASE-NOTES b/scripts/external_libs/lockfile-0.10.2/RELEASE-NOTES index 8b452ed1..8b452ed1 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/RELEASE-NOTES +++ b/scripts/external_libs/lockfile-0.10.2/RELEASE-NOTES diff --git a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/doc/source/Makefile b/scripts/external_libs/lockfile-0.10.2/doc/source/Makefile index 1b1e8d28..1b1e8d28 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/doc/source/Makefile +++ b/scripts/external_libs/lockfile-0.10.2/doc/source/Makefile diff --git a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/doc/source/conf.py b/scripts/external_libs/lockfile-0.10.2/doc/source/conf.py index 623edcb5..623edcb5 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/doc/source/conf.py +++ b/scripts/external_libs/lockfile-0.10.2/doc/source/conf.py diff --git a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/doc/source/index.rst b/scripts/external_libs/lockfile-0.10.2/doc/source/index.rst index f76173dc..f76173dc 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/doc/source/index.rst +++ b/scripts/external_libs/lockfile-0.10.2/doc/source/index.rst diff --git a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile.egg-info/PKG-INFO b/scripts/external_libs/lockfile-0.10.2/lockfile.egg-info/PKG-INFO index 9f72376f..9f72376f 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile.egg-info/PKG-INFO +++ b/scripts/external_libs/lockfile-0.10.2/lockfile.egg-info/PKG-INFO diff --git a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile.egg-info/SOURCES.txt b/scripts/external_libs/lockfile-0.10.2/lockfile.egg-info/SOURCES.txt index 4b289f3a..4b289f3a 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile.egg-info/SOURCES.txt +++ b/scripts/external_libs/lockfile-0.10.2/lockfile.egg-info/SOURCES.txt diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/dependency_links.txt b/scripts/external_libs/lockfile-0.10.2/lockfile.egg-info/dependency_links.txt index 8b137891..8b137891 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/dependency_links.txt +++ b/scripts/external_libs/lockfile-0.10.2/lockfile.egg-info/dependency_links.txt diff --git a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile.egg-info/not-zip-safe b/scripts/external_libs/lockfile-0.10.2/lockfile.egg-info/not-zip-safe index 8b137891..8b137891 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile.egg-info/not-zip-safe +++ b/scripts/external_libs/lockfile-0.10.2/lockfile.egg-info/not-zip-safe diff --git a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile.egg-info/top_level.txt b/scripts/external_libs/lockfile-0.10.2/lockfile.egg-info/top_level.txt index 5a13159a..5a13159a 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile.egg-info/top_level.txt +++ b/scripts/external_libs/lockfile-0.10.2/lockfile.egg-info/top_level.txt diff --git a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile/__init__.py b/scripts/external_libs/lockfile-0.10.2/lockfile/__init__.py index d905af96..d905af96 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile/__init__.py +++ b/scripts/external_libs/lockfile-0.10.2/lockfile/__init__.py diff --git a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile/linklockfile.py b/scripts/external_libs/lockfile-0.10.2/lockfile/linklockfile.py index 9c506734..9c506734 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile/linklockfile.py +++ b/scripts/external_libs/lockfile-0.10.2/lockfile/linklockfile.py diff --git a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile/mkdirlockfile.py b/scripts/external_libs/lockfile-0.10.2/lockfile/mkdirlockfile.py index 8d2c801f..8d2c801f 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile/mkdirlockfile.py +++ b/scripts/external_libs/lockfile-0.10.2/lockfile/mkdirlockfile.py diff --git a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile/pidlockfile.py b/scripts/external_libs/lockfile-0.10.2/lockfile/pidlockfile.py index e92f9ead..e92f9ead 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile/pidlockfile.py +++ b/scripts/external_libs/lockfile-0.10.2/lockfile/pidlockfile.py diff --git a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile/sqlitelockfile.py b/scripts/external_libs/lockfile-0.10.2/lockfile/sqlitelockfile.py index 7dee4a85..7dee4a85 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile/sqlitelockfile.py +++ b/scripts/external_libs/lockfile-0.10.2/lockfile/sqlitelockfile.py diff --git a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile/symlinklockfile.py b/scripts/external_libs/lockfile-0.10.2/lockfile/symlinklockfile.py index 57551a36..57551a36 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/lockfile/symlinklockfile.py +++ b/scripts/external_libs/lockfile-0.10.2/lockfile/symlinklockfile.py diff --git a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/setup.cfg b/scripts/external_libs/lockfile-0.10.2/setup.cfg index c1fb3984..c1fb3984 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/setup.cfg +++ b/scripts/external_libs/lockfile-0.10.2/setup.cfg diff --git a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/setup.py b/scripts/external_libs/lockfile-0.10.2/setup.py index 73637574..73637574 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/setup.py +++ b/scripts/external_libs/lockfile-0.10.2/setup.py diff --git a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/test-requirements.txt b/scripts/external_libs/lockfile-0.10.2/test-requirements.txt index 2e087ff1..2e087ff1 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/test-requirements.txt +++ b/scripts/external_libs/lockfile-0.10.2/test-requirements.txt diff --git a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/test/compliancetest.py b/scripts/external_libs/lockfile-0.10.2/test/compliancetest.py index e0258b11..e0258b11 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/test/compliancetest.py +++ b/scripts/external_libs/lockfile-0.10.2/test/compliancetest.py diff --git a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/test/test_lockfile.py b/scripts/external_libs/lockfile-0.10.2/test/test_lockfile.py index e1f4f72f..e1f4f72f 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/test/test_lockfile.py +++ b/scripts/external_libs/lockfile-0.10.2/test/test_lockfile.py diff --git a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/tox.ini b/scripts/external_libs/lockfile-0.10.2/tox.ini index b0a868a3..b0a868a3 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/lockfile-0.10.2/tox.ini +++ b/scripts/external_libs/lockfile-0.10.2/tox.ini diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/ChangeLog b/scripts/external_libs/python-daemon-2.0.5/ChangeLog index 4975f781..4975f781 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/ChangeLog +++ b/scripts/external_libs/python-daemon-2.0.5/ChangeLog diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/LICENSE.ASF-2 b/scripts/external_libs/python-daemon-2.0.5/LICENSE.ASF-2 index d6456956..d6456956 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/LICENSE.ASF-2 +++ b/scripts/external_libs/python-daemon-2.0.5/LICENSE.ASF-2 diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/LICENSE.GPL-3 b/scripts/external_libs/python-daemon-2.0.5/LICENSE.GPL-3 index 94a9ed02..94a9ed02 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/LICENSE.GPL-3 +++ b/scripts/external_libs/python-daemon-2.0.5/LICENSE.GPL-3 diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/MANIFEST.in b/scripts/external_libs/python-daemon-2.0.5/MANIFEST.in index d3d4341e..d3d4341e 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/MANIFEST.in +++ b/scripts/external_libs/python-daemon-2.0.5/MANIFEST.in diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/PKG-INFO b/scripts/external_libs/python-daemon-2.0.5/PKG-INFO index fd81f509..fd81f509 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/PKG-INFO +++ b/scripts/external_libs/python-daemon-2.0.5/PKG-INFO diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/__init__.py b/scripts/external_libs/python-daemon-2.0.5/daemon/__init__.py index 4731a6ef..4731a6ef 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/__init__.py +++ b/scripts/external_libs/python-daemon-2.0.5/daemon/__init__.py diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/_metadata.py b/scripts/external_libs/python-daemon-2.0.5/daemon/_metadata.py index 6d22a2b7..6d22a2b7 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/_metadata.py +++ b/scripts/external_libs/python-daemon-2.0.5/daemon/_metadata.py diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/daemon.py b/scripts/external_libs/python-daemon-2.0.5/daemon/daemon.py index 07810cf1..07810cf1 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/daemon.py +++ b/scripts/external_libs/python-daemon-2.0.5/daemon/daemon.py diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/pidfile.py b/scripts/external_libs/python-daemon-2.0.5/daemon/pidfile.py index 4517ee0e..4517ee0e 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/pidfile.py +++ b/scripts/external_libs/python-daemon-2.0.5/daemon/pidfile.py diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/runner.py b/scripts/external_libs/python-daemon-2.0.5/daemon/runner.py index 6973cf1c..6973cf1c 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/daemon/runner.py +++ b/scripts/external_libs/python-daemon-2.0.5/daemon/runner.py diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/CREDITS b/scripts/external_libs/python-daemon-2.0.5/doc/CREDITS index feb65d5e..feb65d5e 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/CREDITS +++ b/scripts/external_libs/python-daemon-2.0.5/doc/CREDITS diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/FAQ b/scripts/external_libs/python-daemon-2.0.5/doc/FAQ index 1fcc4658..1fcc4658 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/FAQ +++ b/scripts/external_libs/python-daemon-2.0.5/doc/FAQ diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/TODO b/scripts/external_libs/python-daemon-2.0.5/doc/TODO index 81b41481..81b41481 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/TODO +++ b/scripts/external_libs/python-daemon-2.0.5/doc/TODO diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/hacking.txt b/scripts/external_libs/python-daemon-2.0.5/doc/hacking.txt index 9484dbd0..9484dbd0 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/doc/hacking.txt +++ b/scripts/external_libs/python-daemon-2.0.5/doc/hacking.txt diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/PKG-INFO b/scripts/external_libs/python-daemon-2.0.5/python_daemon.egg-info/PKG-INFO index fd81f509..fd81f509 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/PKG-INFO +++ b/scripts/external_libs/python-daemon-2.0.5/python_daemon.egg-info/PKG-INFO diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/SOURCES.txt b/scripts/external_libs/python-daemon-2.0.5/python_daemon.egg-info/SOURCES.txt index 6e176719..6e176719 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/SOURCES.txt +++ b/scripts/external_libs/python-daemon-2.0.5/python_daemon.egg-info/SOURCES.txt diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/not-zip-safe b/scripts/external_libs/python-daemon-2.0.5/python_daemon.egg-info/dependency_links.txt index 8b137891..8b137891 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/not-zip-safe +++ b/scripts/external_libs/python-daemon-2.0.5/python_daemon.egg-info/dependency_links.txt diff --git a/scripts/automation/trex_control_plane/python_lib/__init__.py b/scripts/external_libs/python-daemon-2.0.5/python_daemon.egg-info/not-zip-safe index d3f5a12f..8b137891 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/__init__.py +++ b/scripts/external_libs/python-daemon-2.0.5/python_daemon.egg-info/not-zip-safe @@ -1 +1 @@ -
+ diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/requires.txt b/scripts/external_libs/python-daemon-2.0.5/python_daemon.egg-info/requires.txt index d1496b02..d1496b02 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/requires.txt +++ b/scripts/external_libs/python-daemon-2.0.5/python_daemon.egg-info/requires.txt diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/top_level.txt b/scripts/external_libs/python-daemon-2.0.5/python_daemon.egg-info/top_level.txt index 28e3ee0c..28e3ee0c 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/top_level.txt +++ b/scripts/external_libs/python-daemon-2.0.5/python_daemon.egg-info/top_level.txt diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/version_info.json b/scripts/external_libs/python-daemon-2.0.5/python_daemon.egg-info/version_info.json index bac1b84f..bac1b84f 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/python_daemon.egg-info/version_info.json +++ b/scripts/external_libs/python-daemon-2.0.5/python_daemon.egg-info/version_info.json diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/setup.cfg b/scripts/external_libs/python-daemon-2.0.5/setup.cfg index 9d3d2c02..9d3d2c02 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/setup.cfg +++ b/scripts/external_libs/python-daemon-2.0.5/setup.cfg diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/setup.py b/scripts/external_libs/python-daemon-2.0.5/setup.py index 16a6a6a6..16a6a6a6 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/setup.py +++ b/scripts/external_libs/python-daemon-2.0.5/setup.py diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/__init__.py b/scripts/external_libs/python-daemon-2.0.5/test/__init__.py index 398519f1..398519f1 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/__init__.py +++ b/scripts/external_libs/python-daemon-2.0.5/test/__init__.py diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/scaffold.py b/scripts/external_libs/python-daemon-2.0.5/test/scaffold.py index 9a4f1150..9a4f1150 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/scaffold.py +++ b/scripts/external_libs/python-daemon-2.0.5/test/scaffold.py diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_daemon.py b/scripts/external_libs/python-daemon-2.0.5/test/test_daemon.py index a911858a..a911858a 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_daemon.py +++ b/scripts/external_libs/python-daemon-2.0.5/test/test_daemon.py diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_metadata.py b/scripts/external_libs/python-daemon-2.0.5/test/test_metadata.py index 692753f4..692753f4 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_metadata.py +++ b/scripts/external_libs/python-daemon-2.0.5/test/test_metadata.py diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_pidfile.py b/scripts/external_libs/python-daemon-2.0.5/test/test_pidfile.py index 9b636ec8..9b636ec8 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_pidfile.py +++ b/scripts/external_libs/python-daemon-2.0.5/test/test_pidfile.py diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_runner.py b/scripts/external_libs/python-daemon-2.0.5/test/test_runner.py index 4c0c714a..4c0c714a 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test/test_runner.py +++ b/scripts/external_libs/python-daemon-2.0.5/test/test_runner.py diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test_version.py b/scripts/external_libs/python-daemon-2.0.5/test_version.py index b52f521d..b52f521d 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/test_version.py +++ b/scripts/external_libs/python-daemon-2.0.5/test_version.py diff --git a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/version.py b/scripts/external_libs/python-daemon-2.0.5/version.py index 7e4c4202..7e4c4202 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/python-daemon-2.0.5/version.py +++ b/scripts/external_libs/python-daemon-2.0.5/version.py diff --git a/scripts/automation/trex_control_plane/python_lib/termstyle/MANIFEST.in b/scripts/external_libs/termstyle/MANIFEST.in index 14dafaf3..14dafaf3 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/termstyle/MANIFEST.in +++ b/scripts/external_libs/termstyle/MANIFEST.in diff --git a/scripts/automation/trex_control_plane/python_lib/termstyle/Makefile b/scripts/external_libs/termstyle/Makefile index 02151dca..02151dca 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/termstyle/Makefile +++ b/scripts/external_libs/termstyle/Makefile diff --git a/scripts/automation/trex_control_plane/python_lib/termstyle/README.rst b/scripts/external_libs/termstyle/README.rst index f3dfa0ab..f3dfa0ab 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/termstyle/README.rst +++ b/scripts/external_libs/termstyle/README.rst diff --git a/scripts/automation/trex_control_plane/python_lib/termstyle/VERSION b/scripts/external_libs/termstyle/VERSION index 345f8cc0..345f8cc0 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/termstyle/VERSION +++ b/scripts/external_libs/termstyle/VERSION diff --git a/src/console/zmq/eventloop/minitornado/__init__.py b/scripts/external_libs/termstyle/__init__.py index e69de29b..e69de29b 100644 --- a/src/console/zmq/eventloop/minitornado/__init__.py +++ b/scripts/external_libs/termstyle/__init__.py diff --git a/scripts/automation/trex_control_plane/python_lib/termstyle/python-termstyle.xml b/scripts/external_libs/termstyle/python-termstyle.xml index b6b08bd7..b6b08bd7 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/termstyle/python-termstyle.xml +++ b/scripts/external_libs/termstyle/python-termstyle.xml diff --git a/scripts/automation/trex_control_plane/python_lib/termstyle/setup.py b/scripts/external_libs/termstyle/setup.py index 69b11cbb..69b11cbb 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/termstyle/setup.py +++ b/scripts/external_libs/termstyle/setup.py diff --git a/scripts/automation/trex_control_plane/python_lib/termstyle/termstyle.py b/scripts/external_libs/termstyle/termstyle.py index 62a3a920..62a3a920 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/termstyle/termstyle.py +++ b/scripts/external_libs/termstyle/termstyle.py diff --git a/scripts/automation/trex_control_plane/python_lib/termstyle/test2.py b/scripts/external_libs/termstyle/test2.py index 2d84c375..2d84c375 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/termstyle/test2.py +++ b/scripts/external_libs/termstyle/test2.py diff --git a/scripts/automation/trex_control_plane/python_lib/termstyle/test3.py b/scripts/external_libs/termstyle/test3.py index 861c44f9..861c44f9 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/termstyle/test3.py +++ b/scripts/external_libs/termstyle/test3.py diff --git a/scripts/automation/trex_control_plane/python_lib/termstyle/test_all.sh b/scripts/external_libs/termstyle/test_all.sh index d28545a9..d28545a9 100755..100644 --- a/scripts/automation/trex_control_plane/python_lib/termstyle/test_all.sh +++ b/scripts/external_libs/termstyle/test_all.sh diff --git a/src/console/zmq/__init__.py b/scripts/external_libs/zmq/__init__.py index 3408b3ba..3408b3ba 100755..100644 --- a/src/console/zmq/__init__.py +++ b/scripts/external_libs/zmq/__init__.py diff --git a/src/console/zmq/auth/__init__.py b/scripts/external_libs/zmq/auth/__init__.py index 11d3ad6b..11d3ad6b 100755..100644 --- a/src/console/zmq/auth/__init__.py +++ b/scripts/external_libs/zmq/auth/__init__.py diff --git a/src/console/zmq/auth/base.py b/scripts/external_libs/zmq/auth/base.py index 9b4aaed7..9b4aaed7 100755..100644 --- a/src/console/zmq/auth/base.py +++ b/scripts/external_libs/zmq/auth/base.py diff --git a/src/console/zmq/auth/certs.py b/scripts/external_libs/zmq/auth/certs.py index 4d26ad7b..4d26ad7b 100755..100644 --- a/src/console/zmq/auth/certs.py +++ b/scripts/external_libs/zmq/auth/certs.py diff --git a/src/console/zmq/auth/ioloop.py b/scripts/external_libs/zmq/auth/ioloop.py index 1f448b47..1f448b47 100755..100644 --- a/src/console/zmq/auth/ioloop.py +++ b/scripts/external_libs/zmq/auth/ioloop.py diff --git a/src/console/zmq/auth/thread.py b/scripts/external_libs/zmq/auth/thread.py index 8c3355a9..8c3355a9 100755..100644 --- a/src/console/zmq/auth/thread.py +++ b/scripts/external_libs/zmq/auth/thread.py diff --git a/src/console/zmq/backend/__init__.py b/scripts/external_libs/zmq/backend/__init__.py index 7cac725c..7cac725c 100755..100644 --- a/src/console/zmq/backend/__init__.py +++ b/scripts/external_libs/zmq/backend/__init__.py diff --git a/src/console/zmq/backend/cffi/__init__.py b/scripts/external_libs/zmq/backend/cffi/__init__.py index ca3164d3..ca3164d3 100755..100644 --- a/src/console/zmq/backend/cffi/__init__.py +++ b/scripts/external_libs/zmq/backend/cffi/__init__.py diff --git a/src/console/zmq/backend/cffi/_cdefs.h b/scripts/external_libs/zmq/backend/cffi/_cdefs.h index d3300575..d3300575 100644 --- a/src/console/zmq/backend/cffi/_cdefs.h +++ b/scripts/external_libs/zmq/backend/cffi/_cdefs.h diff --git a/src/console/zmq/backend/cffi/_cffi.py b/scripts/external_libs/zmq/backend/cffi/_cffi.py index c73ebf83..c73ebf83 100755..100644 --- a/src/console/zmq/backend/cffi/_cffi.py +++ b/scripts/external_libs/zmq/backend/cffi/_cffi.py diff --git a/src/console/zmq/backend/cffi/_poll.py b/scripts/external_libs/zmq/backend/cffi/_poll.py index 9bca34ca..9bca34ca 100755..100644 --- a/src/console/zmq/backend/cffi/_poll.py +++ b/scripts/external_libs/zmq/backend/cffi/_poll.py diff --git a/src/console/zmq/backend/cffi/_verify.c b/scripts/external_libs/zmq/backend/cffi/_verify.c index 547840eb..547840eb 100644 --- a/src/console/zmq/backend/cffi/_verify.c +++ b/scripts/external_libs/zmq/backend/cffi/_verify.c diff --git a/src/console/zmq/backend/cffi/constants.py b/scripts/external_libs/zmq/backend/cffi/constants.py index ee293e74..ee293e74 100755..100644 --- a/src/console/zmq/backend/cffi/constants.py +++ b/scripts/external_libs/zmq/backend/cffi/constants.py diff --git a/src/console/zmq/backend/cffi/context.py b/scripts/external_libs/zmq/backend/cffi/context.py index 16a7b257..16a7b257 100755..100644 --- a/src/console/zmq/backend/cffi/context.py +++ b/scripts/external_libs/zmq/backend/cffi/context.py diff --git a/src/console/zmq/backend/cffi/devices.py b/scripts/external_libs/zmq/backend/cffi/devices.py index c7a514a8..c7a514a8 100755..100644 --- a/src/console/zmq/backend/cffi/devices.py +++ b/scripts/external_libs/zmq/backend/cffi/devices.py diff --git a/src/console/zmq/backend/cffi/error.py b/scripts/external_libs/zmq/backend/cffi/error.py index 3bb64de0..3bb64de0 100755..100644 --- a/src/console/zmq/backend/cffi/error.py +++ b/scripts/external_libs/zmq/backend/cffi/error.py diff --git a/src/console/zmq/backend/cffi/message.py b/scripts/external_libs/zmq/backend/cffi/message.py index c35decb6..c35decb6 100755..100644 --- a/src/console/zmq/backend/cffi/message.py +++ b/scripts/external_libs/zmq/backend/cffi/message.py diff --git a/src/console/zmq/backend/cffi/socket.py b/scripts/external_libs/zmq/backend/cffi/socket.py index 3c427739..3c427739 100755..100644 --- a/src/console/zmq/backend/cffi/socket.py +++ b/scripts/external_libs/zmq/backend/cffi/socket.py diff --git a/src/console/zmq/backend/cffi/utils.py b/scripts/external_libs/zmq/backend/cffi/utils.py index fde7827b..fde7827b 100755..100644 --- a/src/console/zmq/backend/cffi/utils.py +++ b/scripts/external_libs/zmq/backend/cffi/utils.py diff --git a/src/console/zmq/backend/cython/__init__.py b/scripts/external_libs/zmq/backend/cython/__init__.py index e5358185..e5358185 100755..100644 --- a/src/console/zmq/backend/cython/__init__.py +++ b/scripts/external_libs/zmq/backend/cython/__init__.py diff --git a/src/console/zmq/backend/cython/_device.py b/scripts/external_libs/zmq/backend/cython/_device.py index 3368ca2c..3368ca2c 100755..100644 --- a/src/console/zmq/backend/cython/_device.py +++ b/scripts/external_libs/zmq/backend/cython/_device.py diff --git a/scripts/external_libs/zmq/backend/cython/_device.so b/scripts/external_libs/zmq/backend/cython/_device.so Binary files differnew file mode 100644 index 00000000..5957f8e0 --- /dev/null +++ b/scripts/external_libs/zmq/backend/cython/_device.so diff --git a/src/console/zmq/backend/cython/_poll.py b/scripts/external_libs/zmq/backend/cython/_poll.py index cb1d5d77..cb1d5d77 100755..100644 --- a/src/console/zmq/backend/cython/_poll.py +++ b/scripts/external_libs/zmq/backend/cython/_poll.py diff --git a/scripts/external_libs/zmq/backend/cython/_poll.so b/scripts/external_libs/zmq/backend/cython/_poll.so Binary files differnew file mode 100644 index 00000000..d93d0f10 --- /dev/null +++ b/scripts/external_libs/zmq/backend/cython/_poll.so diff --git a/src/console/zmq/backend/cython/_version.py b/scripts/external_libs/zmq/backend/cython/_version.py index 08262706..08262706 100755..100644 --- a/src/console/zmq/backend/cython/_version.py +++ b/scripts/external_libs/zmq/backend/cython/_version.py diff --git a/scripts/external_libs/zmq/backend/cython/_version.so b/scripts/external_libs/zmq/backend/cython/_version.so Binary files differnew file mode 100644 index 00000000..40dd3dc8 --- /dev/null +++ b/scripts/external_libs/zmq/backend/cython/_version.so diff --git a/src/console/zmq/backend/cython/checkrc.pxd b/scripts/external_libs/zmq/backend/cython/checkrc.pxd index 3bf69fc3..3bf69fc3 100644 --- a/src/console/zmq/backend/cython/checkrc.pxd +++ b/scripts/external_libs/zmq/backend/cython/checkrc.pxd diff --git a/src/console/zmq/backend/cython/constants.py b/scripts/external_libs/zmq/backend/cython/constants.py index ea772ac0..ea772ac0 100755..100644 --- a/src/console/zmq/backend/cython/constants.py +++ b/scripts/external_libs/zmq/backend/cython/constants.py diff --git a/scripts/external_libs/zmq/backend/cython/constants.so b/scripts/external_libs/zmq/backend/cython/constants.so Binary files differnew file mode 100644 index 00000000..cf44c07e --- /dev/null +++ b/scripts/external_libs/zmq/backend/cython/constants.so diff --git a/src/console/zmq/backend/cython/context.pxd b/scripts/external_libs/zmq/backend/cython/context.pxd index 9c9267a5..9c9267a5 100644 --- a/src/console/zmq/backend/cython/context.pxd +++ b/scripts/external_libs/zmq/backend/cython/context.pxd diff --git a/src/console/zmq/backend/cython/context.py b/scripts/external_libs/zmq/backend/cython/context.py index 19f8ec7c..19f8ec7c 100755..100644 --- a/src/console/zmq/backend/cython/context.py +++ b/scripts/external_libs/zmq/backend/cython/context.py diff --git a/scripts/external_libs/zmq/backend/cython/context.so b/scripts/external_libs/zmq/backend/cython/context.so Binary files differnew file mode 100644 index 00000000..ef9b9699 --- /dev/null +++ b/scripts/external_libs/zmq/backend/cython/context.so diff --git a/src/console/zmq/backend/cython/error.py b/scripts/external_libs/zmq/backend/cython/error.py index d3a4ea0e..d3a4ea0e 100755..100644 --- a/src/console/zmq/backend/cython/error.py +++ b/scripts/external_libs/zmq/backend/cython/error.py diff --git a/scripts/external_libs/zmq/backend/cython/error.so b/scripts/external_libs/zmq/backend/cython/error.so Binary files differnew file mode 100644 index 00000000..360da9dd --- /dev/null +++ b/scripts/external_libs/zmq/backend/cython/error.so diff --git a/src/console/zmq/backend/cython/libzmq.pxd b/scripts/external_libs/zmq/backend/cython/libzmq.pxd index e42f6d6b..e42f6d6b 100644 --- a/src/console/zmq/backend/cython/libzmq.pxd +++ b/scripts/external_libs/zmq/backend/cython/libzmq.pxd diff --git a/src/console/zmq/backend/cython/message.pxd b/scripts/external_libs/zmq/backend/cython/message.pxd index 4781195f..4781195f 100644 --- a/src/console/zmq/backend/cython/message.pxd +++ b/scripts/external_libs/zmq/backend/cython/message.pxd diff --git a/src/console/zmq/backend/cython/message.py b/scripts/external_libs/zmq/backend/cython/message.py index 5e423b62..5e423b62 100755..100644 --- a/src/console/zmq/backend/cython/message.py +++ b/scripts/external_libs/zmq/backend/cython/message.py diff --git a/scripts/external_libs/zmq/backend/cython/message.so b/scripts/external_libs/zmq/backend/cython/message.so Binary files differnew file mode 100644 index 00000000..f674489f --- /dev/null +++ b/scripts/external_libs/zmq/backend/cython/message.so diff --git a/src/console/zmq/backend/cython/socket.pxd b/scripts/external_libs/zmq/backend/cython/socket.pxd index b8a331e2..b8a331e2 100644 --- a/src/console/zmq/backend/cython/socket.pxd +++ b/scripts/external_libs/zmq/backend/cython/socket.pxd diff --git a/src/console/zmq/backend/cython/socket.py b/scripts/external_libs/zmq/backend/cython/socket.py index faef8bee..faef8bee 100755..100644 --- a/src/console/zmq/backend/cython/socket.py +++ b/scripts/external_libs/zmq/backend/cython/socket.py diff --git a/scripts/external_libs/zmq/backend/cython/socket.so b/scripts/external_libs/zmq/backend/cython/socket.so Binary files differnew file mode 100644 index 00000000..1c927042 --- /dev/null +++ b/scripts/external_libs/zmq/backend/cython/socket.so diff --git a/src/console/zmq/backend/cython/utils.pxd b/scripts/external_libs/zmq/backend/cython/utils.pxd index 1d7117f1..1d7117f1 100644 --- a/src/console/zmq/backend/cython/utils.pxd +++ b/scripts/external_libs/zmq/backend/cython/utils.pxd diff --git a/src/console/zmq/backend/cython/utils.py b/scripts/external_libs/zmq/backend/cython/utils.py index fe928300..fe928300 100755..100644 --- a/src/console/zmq/backend/cython/utils.py +++ b/scripts/external_libs/zmq/backend/cython/utils.py diff --git a/scripts/external_libs/zmq/backend/cython/utils.so b/scripts/external_libs/zmq/backend/cython/utils.so Binary files differnew file mode 100644 index 00000000..b4e5b283 --- /dev/null +++ b/scripts/external_libs/zmq/backend/cython/utils.so diff --git a/src/console/zmq/backend/select.py b/scripts/external_libs/zmq/backend/select.py index 0a2e09a2..0a2e09a2 100755..100644 --- a/src/console/zmq/backend/select.py +++ b/scripts/external_libs/zmq/backend/select.py diff --git a/src/console/zmq/devices/__init__.py b/scripts/external_libs/zmq/devices/__init__.py index 23715963..23715963 100755..100644 --- a/src/console/zmq/devices/__init__.py +++ b/scripts/external_libs/zmq/devices/__init__.py diff --git a/src/console/zmq/devices/basedevice.py b/scripts/external_libs/zmq/devices/basedevice.py index 7ba1b7ac..7ba1b7ac 100755..100644 --- a/src/console/zmq/devices/basedevice.py +++ b/scripts/external_libs/zmq/devices/basedevice.py diff --git a/src/console/zmq/devices/monitoredqueue.pxd b/scripts/external_libs/zmq/devices/monitoredqueue.pxd index 1e26ed86..1e26ed86 100644 --- a/src/console/zmq/devices/monitoredqueue.pxd +++ b/scripts/external_libs/zmq/devices/monitoredqueue.pxd diff --git a/src/console/zmq/devices/monitoredqueue.py b/scripts/external_libs/zmq/devices/monitoredqueue.py index 6d714e51..6d714e51 100755..100644 --- a/src/console/zmq/devices/monitoredqueue.py +++ b/scripts/external_libs/zmq/devices/monitoredqueue.py diff --git a/scripts/external_libs/zmq/devices/monitoredqueue.so b/scripts/external_libs/zmq/devices/monitoredqueue.so Binary files differnew file mode 100644 index 00000000..edca8a4b --- /dev/null +++ b/scripts/external_libs/zmq/devices/monitoredqueue.so diff --git a/src/console/zmq/devices/monitoredqueuedevice.py b/scripts/external_libs/zmq/devices/monitoredqueuedevice.py index 9723f866..9723f866 100755..100644 --- a/src/console/zmq/devices/monitoredqueuedevice.py +++ b/scripts/external_libs/zmq/devices/monitoredqueuedevice.py diff --git a/src/console/zmq/devices/proxydevice.py b/scripts/external_libs/zmq/devices/proxydevice.py index 68be3f15..68be3f15 100755..100644 --- a/src/console/zmq/devices/proxydevice.py +++ b/scripts/external_libs/zmq/devices/proxydevice.py diff --git a/src/console/zmq/error.py b/scripts/external_libs/zmq/error.py index 48cdaafa..48cdaafa 100755..100644 --- a/src/console/zmq/error.py +++ b/scripts/external_libs/zmq/error.py diff --git a/src/console/zmq/eventloop/__init__.py b/scripts/external_libs/zmq/eventloop/__init__.py index 568e8e8d..568e8e8d 100755..100644 --- a/src/console/zmq/eventloop/__init__.py +++ b/scripts/external_libs/zmq/eventloop/__init__.py diff --git a/src/console/zmq/eventloop/ioloop.py b/scripts/external_libs/zmq/eventloop/ioloop.py index 35f4c418..35f4c418 100755..100644 --- a/src/console/zmq/eventloop/ioloop.py +++ b/scripts/external_libs/zmq/eventloop/ioloop.py diff --git a/src/console/zmq/eventloop/minitornado/platform/__init__.py b/scripts/external_libs/zmq/eventloop/minitornado/__init__.py index e69de29b..e69de29b 100644 --- a/src/console/zmq/eventloop/minitornado/platform/__init__.py +++ b/scripts/external_libs/zmq/eventloop/minitornado/__init__.py diff --git a/src/console/zmq/eventloop/minitornado/concurrent.py b/scripts/external_libs/zmq/eventloop/minitornado/concurrent.py index 519b23d5..519b23d5 100755..100644 --- a/src/console/zmq/eventloop/minitornado/concurrent.py +++ b/scripts/external_libs/zmq/eventloop/minitornado/concurrent.py diff --git a/src/console/zmq/eventloop/minitornado/ioloop.py b/scripts/external_libs/zmq/eventloop/minitornado/ioloop.py index 710a3ecb..710a3ecb 100755..100644 --- a/src/console/zmq/eventloop/minitornado/ioloop.py +++ b/scripts/external_libs/zmq/eventloop/minitornado/ioloop.py diff --git a/src/console/zmq/eventloop/minitornado/log.py b/scripts/external_libs/zmq/eventloop/minitornado/log.py index 49051e89..49051e89 100755..100644 --- a/src/console/zmq/eventloop/minitornado/log.py +++ b/scripts/external_libs/zmq/eventloop/minitornado/log.py diff --git a/src/console/zmq/log/__init__.py b/scripts/external_libs/zmq/eventloop/minitornado/platform/__init__.py index e69de29b..e69de29b 100644 --- a/src/console/zmq/log/__init__.py +++ b/scripts/external_libs/zmq/eventloop/minitornado/platform/__init__.py diff --git a/src/console/zmq/eventloop/minitornado/platform/auto.py b/scripts/external_libs/zmq/eventloop/minitornado/platform/auto.py index b40ccd94..b40ccd94 100755..100644 --- a/src/console/zmq/eventloop/minitornado/platform/auto.py +++ b/scripts/external_libs/zmq/eventloop/minitornado/platform/auto.py diff --git a/src/console/zmq/eventloop/minitornado/platform/common.py b/scripts/external_libs/zmq/eventloop/minitornado/platform/common.py index 2d75dc1e..2d75dc1e 100755..100644 --- a/src/console/zmq/eventloop/minitornado/platform/common.py +++ b/scripts/external_libs/zmq/eventloop/minitornado/platform/common.py diff --git a/src/console/zmq/eventloop/minitornado/platform/interface.py b/scripts/external_libs/zmq/eventloop/minitornado/platform/interface.py index 07da6bab..07da6bab 100755..100644 --- a/src/console/zmq/eventloop/minitornado/platform/interface.py +++ b/scripts/external_libs/zmq/eventloop/minitornado/platform/interface.py diff --git a/src/console/zmq/eventloop/minitornado/platform/posix.py b/scripts/external_libs/zmq/eventloop/minitornado/platform/posix.py index ccffbb66..ccffbb66 100755..100644 --- a/src/console/zmq/eventloop/minitornado/platform/posix.py +++ b/scripts/external_libs/zmq/eventloop/minitornado/platform/posix.py diff --git a/src/console/zmq/eventloop/minitornado/platform/windows.py b/scripts/external_libs/zmq/eventloop/minitornado/platform/windows.py index 817bdca1..817bdca1 100755..100644 --- a/src/console/zmq/eventloop/minitornado/platform/windows.py +++ b/scripts/external_libs/zmq/eventloop/minitornado/platform/windows.py diff --git a/src/console/zmq/eventloop/minitornado/stack_context.py b/scripts/external_libs/zmq/eventloop/minitornado/stack_context.py index 226d8042..226d8042 100755..100644 --- a/src/console/zmq/eventloop/minitornado/stack_context.py +++ b/scripts/external_libs/zmq/eventloop/minitornado/stack_context.py diff --git a/src/console/zmq/eventloop/minitornado/util.py b/scripts/external_libs/zmq/eventloop/minitornado/util.py index c1e2eb95..c1e2eb95 100755..100644 --- a/src/console/zmq/eventloop/minitornado/util.py +++ b/scripts/external_libs/zmq/eventloop/minitornado/util.py diff --git a/src/console/zmq/eventloop/zmqstream.py b/scripts/external_libs/zmq/eventloop/zmqstream.py index 86a97e44..86a97e44 100755..100644 --- a/src/console/zmq/eventloop/zmqstream.py +++ b/scripts/external_libs/zmq/eventloop/zmqstream.py diff --git a/src/console/zmq/green/__init__.py b/scripts/external_libs/zmq/green/__init__.py index ff7e5965..ff7e5965 100755..100644 --- a/src/console/zmq/green/__init__.py +++ b/scripts/external_libs/zmq/green/__init__.py diff --git a/src/console/zmq/green/core.py b/scripts/external_libs/zmq/green/core.py index 9fc73e32..9fc73e32 100755..100644 --- a/src/console/zmq/green/core.py +++ b/scripts/external_libs/zmq/green/core.py diff --git a/src/console/zmq/green/device.py b/scripts/external_libs/zmq/green/device.py index 4b070237..4b070237 100755..100644 --- a/src/console/zmq/green/device.py +++ b/scripts/external_libs/zmq/green/device.py diff --git a/src/console/zmq/green/eventloop/__init__.py b/scripts/external_libs/zmq/green/eventloop/__init__.py index c5150efe..c5150efe 100755..100644 --- a/src/console/zmq/green/eventloop/__init__.py +++ b/scripts/external_libs/zmq/green/eventloop/__init__.py diff --git a/src/console/zmq/green/eventloop/ioloop.py b/scripts/external_libs/zmq/green/eventloop/ioloop.py index e12fd5e9..e12fd5e9 100755..100644 --- a/src/console/zmq/green/eventloop/ioloop.py +++ b/scripts/external_libs/zmq/green/eventloop/ioloop.py diff --git a/src/console/zmq/green/eventloop/zmqstream.py b/scripts/external_libs/zmq/green/eventloop/zmqstream.py index 90fbd1f5..90fbd1f5 100755..100644 --- a/src/console/zmq/green/eventloop/zmqstream.py +++ b/scripts/external_libs/zmq/green/eventloop/zmqstream.py diff --git a/src/console/zmq/green/poll.py b/scripts/external_libs/zmq/green/poll.py index 8f016129..8f016129 100755..100644 --- a/src/console/zmq/green/poll.py +++ b/scripts/external_libs/zmq/green/poll.py diff --git a/scripts/external_libs/zmq/libzmq.so b/scripts/external_libs/zmq/libzmq.so Binary files differnew file mode 100644 index 00000000..16980c27 --- /dev/null +++ b/scripts/external_libs/zmq/libzmq.so diff --git a/src/console/zmq/utils/__init__.py b/scripts/external_libs/zmq/log/__init__.py index e69de29b..e69de29b 100644 --- a/src/console/zmq/utils/__init__.py +++ b/scripts/external_libs/zmq/log/__init__.py diff --git a/src/console/zmq/log/handlers.py b/scripts/external_libs/zmq/log/handlers.py index 5ff21bf3..5ff21bf3 100755..100644 --- a/src/console/zmq/log/handlers.py +++ b/scripts/external_libs/zmq/log/handlers.py diff --git a/src/console/zmq/ssh/__init__.py b/scripts/external_libs/zmq/ssh/__init__.py index 57f09568..57f09568 100755..100644 --- a/src/console/zmq/ssh/__init__.py +++ b/scripts/external_libs/zmq/ssh/__init__.py diff --git a/src/console/zmq/ssh/forward.py b/scripts/external_libs/zmq/ssh/forward.py index 2d619462..2d619462 100755..100644 --- a/src/console/zmq/ssh/forward.py +++ b/scripts/external_libs/zmq/ssh/forward.py diff --git a/src/console/zmq/ssh/tunnel.py b/scripts/external_libs/zmq/ssh/tunnel.py index 5a0c5433..5a0c5433 100755..100644 --- a/src/console/zmq/ssh/tunnel.py +++ b/scripts/external_libs/zmq/ssh/tunnel.py diff --git a/src/console/zmq/sugar/__init__.py b/scripts/external_libs/zmq/sugar/__init__.py index d0510a44..d0510a44 100755..100644 --- a/src/console/zmq/sugar/__init__.py +++ b/scripts/external_libs/zmq/sugar/__init__.py diff --git a/src/console/zmq/sugar/attrsettr.py b/scripts/external_libs/zmq/sugar/attrsettr.py index 4bbd36d6..4bbd36d6 100755..100644 --- a/src/console/zmq/sugar/attrsettr.py +++ b/scripts/external_libs/zmq/sugar/attrsettr.py diff --git a/src/console/zmq/sugar/constants.py b/scripts/external_libs/zmq/sugar/constants.py index 88281176..88281176 100755..100644 --- a/src/console/zmq/sugar/constants.py +++ b/scripts/external_libs/zmq/sugar/constants.py diff --git a/src/console/zmq/sugar/context.py b/scripts/external_libs/zmq/sugar/context.py index 86a9c5dc..86a9c5dc 100755..100644 --- a/src/console/zmq/sugar/context.py +++ b/scripts/external_libs/zmq/sugar/context.py diff --git a/src/console/zmq/sugar/frame.py b/scripts/external_libs/zmq/sugar/frame.py index 9f556c86..9f556c86 100755..100644 --- a/src/console/zmq/sugar/frame.py +++ b/scripts/external_libs/zmq/sugar/frame.py diff --git a/src/console/zmq/sugar/poll.py b/scripts/external_libs/zmq/sugar/poll.py index c7b1d1bb..c7b1d1bb 100755..100644 --- a/src/console/zmq/sugar/poll.py +++ b/scripts/external_libs/zmq/sugar/poll.py diff --git a/src/console/zmq/sugar/socket.py b/scripts/external_libs/zmq/sugar/socket.py index c91589d7..c91589d7 100755..100644 --- a/src/console/zmq/sugar/socket.py +++ b/scripts/external_libs/zmq/sugar/socket.py diff --git a/src/console/zmq/sugar/tracker.py b/scripts/external_libs/zmq/sugar/tracker.py index fb8c007f..fb8c007f 100755..100644 --- a/src/console/zmq/sugar/tracker.py +++ b/scripts/external_libs/zmq/sugar/tracker.py diff --git a/src/console/zmq/sugar/version.py b/scripts/external_libs/zmq/sugar/version.py index ea8fbbc4..ea8fbbc4 100755..100644 --- a/src/console/zmq/sugar/version.py +++ b/scripts/external_libs/zmq/sugar/version.py diff --git a/src/console/zmq/tests/__init__.py b/scripts/external_libs/zmq/tests/__init__.py index 325a3f19..325a3f19 100755..100644 --- a/src/console/zmq/tests/__init__.py +++ b/scripts/external_libs/zmq/tests/__init__.py diff --git a/src/console/zmq/tests/test_auth.py b/scripts/external_libs/zmq/tests/test_auth.py index d350f61f..d350f61f 100755..100644 --- a/src/console/zmq/tests/test_auth.py +++ b/scripts/external_libs/zmq/tests/test_auth.py diff --git a/src/console/zmq/tests/test_cffi_backend.py b/scripts/external_libs/zmq/tests/test_cffi_backend.py index 1f85eebf..1f85eebf 100755..100644 --- a/src/console/zmq/tests/test_cffi_backend.py +++ b/scripts/external_libs/zmq/tests/test_cffi_backend.py diff --git a/src/console/zmq/tests/test_constants.py b/scripts/external_libs/zmq/tests/test_constants.py index d32b2b48..d32b2b48 100755..100644 --- a/src/console/zmq/tests/test_constants.py +++ b/scripts/external_libs/zmq/tests/test_constants.py diff --git a/src/console/zmq/tests/test_context.py b/scripts/external_libs/zmq/tests/test_context.py index e3280778..e3280778 100755..100644 --- a/src/console/zmq/tests/test_context.py +++ b/scripts/external_libs/zmq/tests/test_context.py diff --git a/src/console/zmq/tests/test_device.py b/scripts/external_libs/zmq/tests/test_device.py index f8305074..f8305074 100755..100644 --- a/src/console/zmq/tests/test_device.py +++ b/scripts/external_libs/zmq/tests/test_device.py diff --git a/src/console/zmq/tests/test_error.py b/scripts/external_libs/zmq/tests/test_error.py index a2eee14a..a2eee14a 100755..100644 --- a/src/console/zmq/tests/test_error.py +++ b/scripts/external_libs/zmq/tests/test_error.py diff --git a/src/console/zmq/tests/test_etc.py b/scripts/external_libs/zmq/tests/test_etc.py index ad224064..ad224064 100755..100644 --- a/src/console/zmq/tests/test_etc.py +++ b/scripts/external_libs/zmq/tests/test_etc.py diff --git a/src/console/zmq/tests/test_imports.py b/scripts/external_libs/zmq/tests/test_imports.py index c0ddfaac..c0ddfaac 100755..100644 --- a/src/console/zmq/tests/test_imports.py +++ b/scripts/external_libs/zmq/tests/test_imports.py diff --git a/src/console/zmq/tests/test_ioloop.py b/scripts/external_libs/zmq/tests/test_ioloop.py index 2a8b1153..2a8b1153 100755..100644 --- a/src/console/zmq/tests/test_ioloop.py +++ b/scripts/external_libs/zmq/tests/test_ioloop.py diff --git a/src/console/zmq/tests/test_log.py b/scripts/external_libs/zmq/tests/test_log.py index 9206f095..9206f095 100755..100644 --- a/src/console/zmq/tests/test_log.py +++ b/scripts/external_libs/zmq/tests/test_log.py diff --git a/src/console/zmq/tests/test_message.py b/scripts/external_libs/zmq/tests/test_message.py index d8770bdf..d8770bdf 100755..100644 --- a/src/console/zmq/tests/test_message.py +++ b/scripts/external_libs/zmq/tests/test_message.py diff --git a/src/console/zmq/tests/test_monitor.py b/scripts/external_libs/zmq/tests/test_monitor.py index 4f035388..4f035388 100755..100644 --- a/src/console/zmq/tests/test_monitor.py +++ b/scripts/external_libs/zmq/tests/test_monitor.py diff --git a/src/console/zmq/tests/test_monqueue.py b/scripts/external_libs/zmq/tests/test_monqueue.py index e855602e..e855602e 100755..100644 --- a/src/console/zmq/tests/test_monqueue.py +++ b/scripts/external_libs/zmq/tests/test_monqueue.py diff --git a/src/console/zmq/tests/test_multipart.py b/scripts/external_libs/zmq/tests/test_multipart.py index 24d41be0..24d41be0 100755..100644 --- a/src/console/zmq/tests/test_multipart.py +++ b/scripts/external_libs/zmq/tests/test_multipart.py diff --git a/src/console/zmq/tests/test_pair.py b/scripts/external_libs/zmq/tests/test_pair.py index e88c1e8b..e88c1e8b 100755..100644 --- a/src/console/zmq/tests/test_pair.py +++ b/scripts/external_libs/zmq/tests/test_pair.py diff --git a/src/console/zmq/tests/test_poll.py b/scripts/external_libs/zmq/tests/test_poll.py index 57346c89..57346c89 100755..100644 --- a/src/console/zmq/tests/test_poll.py +++ b/scripts/external_libs/zmq/tests/test_poll.py diff --git a/src/console/zmq/tests/test_pubsub.py b/scripts/external_libs/zmq/tests/test_pubsub.py index a3ee22aa..a3ee22aa 100755..100644 --- a/src/console/zmq/tests/test_pubsub.py +++ b/scripts/external_libs/zmq/tests/test_pubsub.py diff --git a/src/console/zmq/tests/test_reqrep.py b/scripts/external_libs/zmq/tests/test_reqrep.py index de17f2b3..de17f2b3 100755..100644 --- a/src/console/zmq/tests/test_reqrep.py +++ b/scripts/external_libs/zmq/tests/test_reqrep.py diff --git a/src/console/zmq/tests/test_security.py b/scripts/external_libs/zmq/tests/test_security.py index 687b7e0f..687b7e0f 100755..100644 --- a/src/console/zmq/tests/test_security.py +++ b/scripts/external_libs/zmq/tests/test_security.py diff --git a/src/console/zmq/tests/test_socket.py b/scripts/external_libs/zmq/tests/test_socket.py index 5c842edc..5c842edc 100755..100644 --- a/src/console/zmq/tests/test_socket.py +++ b/scripts/external_libs/zmq/tests/test_socket.py diff --git a/src/console/zmq/tests/test_stopwatch.py b/scripts/external_libs/zmq/tests/test_stopwatch.py index 49fb79f2..49fb79f2 100755..100644 --- a/src/console/zmq/tests/test_stopwatch.py +++ b/scripts/external_libs/zmq/tests/test_stopwatch.py diff --git a/src/console/zmq/tests/test_version.py b/scripts/external_libs/zmq/tests/test_version.py index 6ebebf30..6ebebf30 100755..100644 --- a/src/console/zmq/tests/test_version.py +++ b/scripts/external_libs/zmq/tests/test_version.py diff --git a/src/console/zmq/tests/test_win32_shim.py b/scripts/external_libs/zmq/tests/test_win32_shim.py index 55657bda..55657bda 100755..100644 --- a/src/console/zmq/tests/test_win32_shim.py +++ b/scripts/external_libs/zmq/tests/test_win32_shim.py diff --git a/src/console/zmq/tests/test_z85.py b/scripts/external_libs/zmq/tests/test_z85.py index 8a73cb4d..8a73cb4d 100755..100644 --- a/src/console/zmq/tests/test_z85.py +++ b/scripts/external_libs/zmq/tests/test_z85.py diff --git a/src/console/zmq/tests/test_zmqstream.py b/scripts/external_libs/zmq/tests/test_zmqstream.py index cdb3a171..cdb3a171 100755..100644 --- a/src/console/zmq/tests/test_zmqstream.py +++ b/scripts/external_libs/zmq/tests/test_zmqstream.py diff --git a/scripts/external_libs/zmq/utils/__init__.py b/scripts/external_libs/zmq/utils/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/scripts/external_libs/zmq/utils/__init__.py diff --git a/src/console/zmq/utils/buffers.pxd b/scripts/external_libs/zmq/utils/buffers.pxd index 998aa551..998aa551 100644 --- a/src/console/zmq/utils/buffers.pxd +++ b/scripts/external_libs/zmq/utils/buffers.pxd diff --git a/src/console/zmq/utils/compiler.json b/scripts/external_libs/zmq/utils/compiler.json index e58fc130..e58fc130 100644 --- a/src/console/zmq/utils/compiler.json +++ b/scripts/external_libs/zmq/utils/compiler.json diff --git a/src/console/zmq/utils/config.json b/scripts/external_libs/zmq/utils/config.json index 1e4611f9..1e4611f9 100644 --- a/src/console/zmq/utils/config.json +++ b/scripts/external_libs/zmq/utils/config.json diff --git a/src/console/zmq/utils/constant_names.py b/scripts/external_libs/zmq/utils/constant_names.py index 47da9dc2..47da9dc2 100755..100644 --- a/src/console/zmq/utils/constant_names.py +++ b/scripts/external_libs/zmq/utils/constant_names.py diff --git a/src/console/zmq/utils/garbage.py b/scripts/external_libs/zmq/utils/garbage.py index 80a8725a..80a8725a 100755..100644 --- a/src/console/zmq/utils/garbage.py +++ b/scripts/external_libs/zmq/utils/garbage.py diff --git a/src/console/zmq/utils/getpid_compat.h b/scripts/external_libs/zmq/utils/getpid_compat.h index 47ce90fa..47ce90fa 100644 --- a/src/console/zmq/utils/getpid_compat.h +++ b/scripts/external_libs/zmq/utils/getpid_compat.h diff --git a/src/console/zmq/utils/interop.py b/scripts/external_libs/zmq/utils/interop.py index 26c01969..26c01969 100755..100644 --- a/src/console/zmq/utils/interop.py +++ b/scripts/external_libs/zmq/utils/interop.py diff --git a/src/console/zmq/utils/ipcmaxlen.h b/scripts/external_libs/zmq/utils/ipcmaxlen.h index 7218db78..7218db78 100644 --- a/src/console/zmq/utils/ipcmaxlen.h +++ b/scripts/external_libs/zmq/utils/ipcmaxlen.h diff --git a/src/console/zmq/utils/jsonapi.py b/scripts/external_libs/zmq/utils/jsonapi.py index 865ca6d5..865ca6d5 100755..100644 --- a/src/console/zmq/utils/jsonapi.py +++ b/scripts/external_libs/zmq/utils/jsonapi.py diff --git a/src/console/zmq/utils/monitor.py b/scripts/external_libs/zmq/utils/monitor.py index 734d54b1..734d54b1 100755..100644 --- a/src/console/zmq/utils/monitor.py +++ b/scripts/external_libs/zmq/utils/monitor.py diff --git a/src/console/zmq/utils/pyversion_compat.h b/scripts/external_libs/zmq/utils/pyversion_compat.h index fac09046..fac09046 100644 --- a/src/console/zmq/utils/pyversion_compat.h +++ b/scripts/external_libs/zmq/utils/pyversion_compat.h diff --git a/src/console/zmq/utils/sixcerpt.py b/scripts/external_libs/zmq/utils/sixcerpt.py index 5492fd59..5492fd59 100755..100644 --- a/src/console/zmq/utils/sixcerpt.py +++ b/scripts/external_libs/zmq/utils/sixcerpt.py diff --git a/src/console/zmq/utils/strtypes.py b/scripts/external_libs/zmq/utils/strtypes.py index 548410dc..548410dc 100755..100644 --- a/src/console/zmq/utils/strtypes.py +++ b/scripts/external_libs/zmq/utils/strtypes.py diff --git a/src/console/zmq/utils/win32.py b/scripts/external_libs/zmq/utils/win32.py index ea758299..ea758299 100755..100644 --- a/src/console/zmq/utils/win32.py +++ b/scripts/external_libs/zmq/utils/win32.py diff --git a/src/console/zmq/utils/z85.py b/scripts/external_libs/zmq/utils/z85.py index 1bb1784e..1bb1784e 100755..100644 --- a/src/console/zmq/utils/z85.py +++ b/scripts/external_libs/zmq/utils/z85.py diff --git a/src/console/zmq/utils/zmq_compat.h b/scripts/external_libs/zmq/utils/zmq_compat.h index 81c57b69..81c57b69 100644 --- a/src/console/zmq/utils/zmq_compat.h +++ b/scripts/external_libs/zmq/utils/zmq_compat.h diff --git a/src/console/zmq/utils/zmq_constants.h b/scripts/external_libs/zmq/utils/zmq_constants.h index 97683022..97683022 100644 --- a/src/console/zmq/utils/zmq_constants.h +++ b/scripts/external_libs/zmq/utils/zmq_constants.h diff --git a/scripts/libzmq.so.3 b/scripts/libzmq.so.3 Binary files differindex 16980c27..16980c27 100755..100644 --- a/scripts/libzmq.so.3 +++ b/scripts/libzmq.so.3 diff --git a/scripts/libzmq.so.3.1.0 b/scripts/libzmq.so.3.1.0 Binary files differindex 16980c27..16980c27 100755..100644 --- a/scripts/libzmq.so.3.1.0 +++ b/scripts/libzmq.so.3.1.0 diff --git a/scripts/trex-console b/scripts/trex-console index 50e097e7..6eab77dd 100755 --- a/scripts/trex-console +++ b/scripts/trex-console @@ -1,2 +1,2 @@ #!/bin/bash -../src/console/trex_console.py $@ +../scripts/automation/trex_control_plane/console/trex_console.py $@ |