diff --git a/.gitmodules b/.gitmodules index 8dd9a046..e69de29b 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +0,0 @@ -[submodule "extern/wrapt"] - path = vendor/wrapt - url = https://github.com/applandinc/wrapt.git diff --git a/.travis.yml b/.travis.yml index b890997a..07a266a0 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,6 +2,7 @@ os: linux dist: jammy language: python python: +- "3.11" - "3.10" - "3.9.14" - "3.8" diff --git a/CHANGELOG.md b/CHANGELOG.md index 5b65be65..e14f84a0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,17 @@ +# [1.16.0](https://github.com/getappmap/appmap-python/compare/v1.15.2...v1.16.0) (2023-05-25) + + +### Bug Fixes + +* handle 3.11 find_spec implementation ([c62e64e](https://github.com/getappmap/appmap-python/commit/c62e64e04d3911e37e17a7254ed44f4975b4dd11)) +* relax restriction on packaging to >=19.0 ([affdbda](https://github.com/getappmap/appmap-python/commit/affdbda3e3e492eeaadc9abe4803e0728a663f2b)) +* update unittest integration for python 3.11 ([bd9598b](https://github.com/getappmap/appmap-python/commit/bd9598bea8576641b923d157690937bc2e07d3df)) + + +### Features + +* support python 3.11 ([15b0ddc](https://github.com/getappmap/appmap-python/commit/15b0ddc94397b78912bca2ad5a1bc68d4f3f2942)) + ## [1.15.2](https://github.com/getappmap/appmap-python/compare/v1.15.1...v1.15.2) (2023-05-23) diff --git a/README.md b/README.md index 959bc5f6..eafd83f4 100644 --- a/README.md +++ b/README.md @@ -4,6 +4,7 @@ - [Getting the code](#getting-the-code) - [Python version support](#python-version-support) - [Dependency management](#dependency-management) + - [wrapt](#wrapt) - [Linting](#linting) - [Testing](#testing) - [pytest](#pytest) @@ -71,6 +72,14 @@ oldest version currently supported (see the % poetry install ``` +### wrapt +The one dependency that is not managed using `poetry` is `wrapt`. Because it's possible that +projects that use `appmap` may also need an unmodified version of `wrapt` (e.g. `pylint` depends on +`astroid`, which in turn depends on `wrapt`), we use +[vendoring](https://github.com/pradyunsg/vendoring) to vendor `wrapt`. + +To update `wrapt`, use `tox` (described below) to run the `vendoring` environment. + ## Linting [pylint](https://www.pylint.org/) for linting: @@ -83,9 +92,9 @@ Your code has been rated at 10.00/10 (previous run: 10.00/10, +0.00) ``` -[Note that the current configuration requires a 10.0 for the Travis build to pass. To make -this easier to achieve, convention and refactoring checks have both been disabled. They -should be reenabled as soon as possible.] +[Note that the current configuration has a threshold set which must be met for the Travis build to +pass. To make this easier to achieve, a number of checks have both been disabled. They should be +reenabled as soon as possible.] ## Testing diff --git a/_appmap/importer.py b/_appmap/importer.py index 8a97e2b6..ab33b427 100644 --- a/_appmap/importer.py +++ b/_appmap/importer.py @@ -7,7 +7,7 @@ from collections.abc import MutableSequence from functools import reduce -from appmap import wrapt +from _appmap import wrapt from .env import Env from .utils import FnType @@ -165,9 +165,7 @@ def do_import(cls, *args, **kwargs): logger.debug("do_import, mod %s args %s kwargs %s", mod, args, kwargs) if not cls.filter_chain: - cls.filter_chain = reduce( - lambda acc, e: e(acc), cls.filter_stack, NullFilter(None) - ) + cls.filter_chain = reduce(lambda acc, e: e(acc), cls.filter_stack, NullFilter(None)) def instrument_functions(filterable, selected_functions=None): logger.debug(" looking for members of %s", filterable.obj) @@ -264,12 +262,26 @@ def wrapped_find_spec(find_spec, _, args, kwargs): def wrap_finder_find_spec(finder): - find_spec = getattr(finder, "find_spec", None) - if find_spec is None: - logger.debug("no find_spec for finder %r", finder) - return + # Prior to 3.11, it worked fine to just grab find_spec from the finder and wrap it. The + # implementation of builtin finders must have changed with 3.11, because we now need the same + # kind of workaround we use above for exec_module. + if sys.version_info[1] < 11: + find_spec = getattr(finder, "find_spec", None) + if find_spec is None: + logger.debug("no find_spec for finder %r", finder) + return - finder.find_spec = wrap_finder_function(find_spec, wrapped_find_spec) + finder.find_spec = wrap_finder_function(find_spec, wrapped_find_spec) + else: + find_spec = inspect.getattr_static(finder, "find_spec", None) + if find_spec is None: + logger.debug("no find_spec for finder %r", finder) + return + + if isinstance(find_spec, (classmethod, staticmethod)): + finder.find_spec = wrap_finder_function(find_spec, wrapped_find_spec) + else: + finder.find_spec = wrap_finder_function(finder.find_spec, wrapped_find_spec) class MetapathObserver(MutableSequence): diff --git a/_appmap/test/data/unittest/expected/pytest.appmap.json b/_appmap/test/data/unittest/expected/pytest.appmap.json index b389f4a9..a3fa8b72 100644 --- a/_appmap/test/data/unittest/expected/pytest.appmap.json +++ b/_appmap/test/data/unittest/expected/pytest.appmap.json @@ -12,7 +12,7 @@ "recording": { "defined_class": "simple.test_simple.UnitTestTest", "method_id": "test_hello_world", - "source_location": "simple/test_simple.py:13" + "source_location": "simple/test_simple.py:12" }, "name": "Unit test test hello world", "feature": "Hello world", @@ -28,7 +28,7 @@ "defined_class": "simple.test_simple.UnitTestTest", "method_id": "test_hello_world", "path": "simple/test_simple.py", - "lineno": 14, + "lineno": 13, "static": false, "receiver": { "name": "self", @@ -178,7 +178,7 @@ { "name": "test_hello_world", "type": "function", - "location": "simple/test_simple.py:14", + "location": "simple/test_simple.py:13", "static": false } ] diff --git a/_appmap/test/data/unittest/expected/unittest.appmap.json b/_appmap/test/data/unittest/expected/unittest.appmap.json index 555a46b7..f524fe0c 100644 --- a/_appmap/test/data/unittest/expected/unittest.appmap.json +++ b/_appmap/test/data/unittest/expected/unittest.appmap.json @@ -12,7 +12,7 @@ "recording": { "defined_class": "simple.test_simple.UnitTestTest", "method_id": "test_hello_world", - "source_location": "simple/test_simple.py:14" + "source_location": "simple/test_simple.py:13" }, "name": "Unit test test hello world", "feature": "Hello world", @@ -28,7 +28,7 @@ "defined_class": "simple.test_simple.UnitTestTest", "method_id": "test_hello_world", "path": "simple/test_simple.py", - "lineno": 14, + "lineno": 13, "static": false, "receiver": { "name": "self", @@ -178,7 +178,7 @@ { "name": "test_hello_world", "type": "function", - "location": "simple/test_simple.py:14", + "location": "simple/test_simple.py:13", "static": false } ] diff --git a/_appmap/test/data/unittest/simple/test_simple.py b/_appmap/test/data/unittest/simple/test_simple.py index 988ce882..0c895645 100644 --- a/_appmap/test/data/unittest/simple/test_simple.py +++ b/_appmap/test/data/unittest/simple/test_simple.py @@ -7,7 +7,6 @@ # finders correctly. from decouple import config -import appmap.unittest class UnitTestTest(unittest.TestCase): diff --git a/_appmap/test/test_labels.py b/_appmap/test/test_labels.py index f83762ac..624217e1 100644 --- a/_appmap/test/test_labels.py +++ b/_appmap/test/test_labels.py @@ -1,6 +1,6 @@ import pytest -from appmap.wrapt import BoundFunctionWrapper, FunctionWrapper +from _appmap.wrapt import BoundFunctionWrapper, FunctionWrapper @pytest.mark.appmap_enabled diff --git a/_appmap/test/test_params.py b/_appmap/test/test_params.py index 3b3c0339..cbe572bc 100644 --- a/_appmap/test/test_params.py +++ b/_appmap/test/test_params.py @@ -7,9 +7,9 @@ import pytest +from _appmap import wrapt from _appmap.event import CallEvent from _appmap.importer import FilterableCls, FilterableFn -from appmap import wrapt empty_args = {"name": "args", "class": "builtins.tuple", "kind": "rest", "value": "()"} diff --git a/_appmap/test/test_recording.py b/_appmap/test/test_recording.py index bae89b43..ad4fc76d 100644 --- a/_appmap/test/test_recording.py +++ b/_appmap/test/test_recording.py @@ -10,7 +10,7 @@ import appmap from _appmap.event import Event from _appmap.recorder import Recorder, ThreadRecorder -from appmap.wrapt import FunctionWrapper +from _appmap.wrapt import FunctionWrapper from .normalize import normalize_appmap, remove_line_numbers diff --git a/_appmap/test/test_test_frameworks.py b/_appmap/test/test_test_frameworks.py index 6bd1968a..52a5bf50 100644 --- a/_appmap/test/test_test_frameworks.py +++ b/_appmap/test/test_test_frameworks.py @@ -136,6 +136,12 @@ def test_write_appmap(tmp_path): @pytest.fixture(name="testdir") def fixture_runner_testdir(request, data_dir, pytester, monkeypatch): + # We need to set environment variables to control how tests are run. This will only work + # properly if pytester runs pytest in a subprocess. + assert ( + pytester._method == "subprocess" # pylint:disable=protected-access + ), "must run pytest in a subprocess" + # The init subdirectory contains a sitecustomize.py file that # imports the appmap module. This simulates the way a real # installation works, performing the same function as the the diff --git a/_appmap/unittest.py b/_appmap/unittest.py new file mode 100644 index 00000000..93ca5184 --- /dev/null +++ b/_appmap/unittest.py @@ -0,0 +1,69 @@ +import sys +import unittest +from contextlib import contextmanager + +from _appmap import testing_framework, wrapt +from _appmap.utils import get_function_location + +_session = testing_framework.session("unittest", "tests") + + +def _get_test_location(cls, method_name): + fn = getattr(cls, method_name) + return get_function_location(fn) + + +if sys.version_info[1] < 8: + # Prior to 3.8, unittest called the test case's test method directly, which left us without an + # opportunity to hook it. So, instead, instrument unittest.case._Outcome.testPartExecutor, a + # method used to run test cases. `isTest` will be True when the part is the actual test method, + # False when it's setUp or teardown. + @wrapt.patch_function_wrapper("unittest.case", "_Outcome.testPartExecutor") + @contextmanager + def testPartExecutor(wrapped, _, args, kwargs): + def _args(test_case, *_, isTest=False, **__): + return (test_case, isTest) + + test_case, is_test = _args(*args, **kwargs) + already_recording = getattr(test_case, "_appmap_pytest_recording", None) + # fmt: off + if ( + (not is_test) + or isinstance(test_case, unittest.case._SubTest) # pylint: disable=protected-access + or already_recording + ): + # fmt: on + with wrapped(*args, **kwargs): + yield + return + + method_name = test_case.id().split(".")[-1] + location = _get_test_location(test_case.__class__, method_name) + with _session.record( + test_case.__class__, method_name, location=location + ) as metadata: + if metadata: + with wrapped( + *args, **kwargs + ), testing_framework.collect_result_metadata(metadata): + yield + else: + # session.record may return None + yield + +else: + # As of 3.8, unittest.case.TestCase now calls the test's method indirectly, through + # TestCase._callTestMethod. Hook that to manage a recording session. + @wrapt.patch_function_wrapper("unittest.case", "TestCase._callTestMethod") + def callTestMethod(wrapped, test_case, args, kwargs): + already_recording = getattr(test_case, "_appmap_pytest_recording", None) + if already_recording: + wrapped(*args, **kwargs) + return + + method_name = test_case.id().split(".")[-1] + location = _get_test_location(test_case.__class__, method_name) + with _session.record(test_case.__class__, method_name, location=location) as metadata: + if metadata: + with testing_framework.collect_result_metadata(metadata): + wrapped(*args, **kwargs) diff --git a/_appmap/wrapt b/_appmap/wrapt new file mode 120000 index 00000000..5a0704ee --- /dev/null +++ b/_appmap/wrapt @@ -0,0 +1 @@ +../vendor/_appmap/wrapt \ No newline at end of file diff --git a/appmap/flask.py b/appmap/flask.py index eb343b8d..2fb5e037 100644 --- a/appmap/flask.py +++ b/appmap/flask.py @@ -9,6 +9,7 @@ from werkzeug.exceptions import BadRequest, UnsupportedMediaType from werkzeug.middleware.dispatcher import DispatcherMiddleware +from _appmap import wrapt from _appmap.env import Env from _appmap.event import HttpServerRequestEvent, HttpServerResponseEvent from _appmap.flask import app as remote_recording_app @@ -16,7 +17,6 @@ from _appmap.utils import patch_class, values_dict from _appmap.web_framework import JSON_ERRORS, AppmapMiddleware, MiddlewareInserter from _appmap.web_framework import TemplateHandler as BaseTemplateHandler -from appmap import wrapt try: # pylint: disable=unused-import diff --git a/appmap/pytest.py b/appmap/pytest.py index 8b9d4c80..703e0e88 100644 --- a/appmap/pytest.py +++ b/appmap/pytest.py @@ -1,8 +1,7 @@ import pytest -from _appmap import testing_framework +from _appmap import testing_framework, wrapt from _appmap.env import Env -from appmap import wrapt logger = Env.current.getLogger(__name__) diff --git a/appmap/unittest.py b/appmap/unittest.py index 44d38a98..368b3089 100644 --- a/appmap/unittest.py +++ b/appmap/unittest.py @@ -1,60 +1,7 @@ -import unittest -from contextlib import contextmanager - -from _appmap import testing_framework from _appmap.env import Env -from _appmap.utils import get_function_location -from appmap import wrapt logger = Env.current.getLogger(__name__) - -def setup_unittest(): - session = testing_framework.session("unittest", "tests") - - def get_test_location(cls, method_name): - - fn = getattr(cls, method_name) - return get_function_location(fn) - - # unittest.case._Outcome.testPartExecutor is used by all supported - # versions of unittest to run test cases. `isTest` will be True when - # the part is the actual test method, False when it's setUp or - # teardown. - @wrapt.patch_function_wrapper("unittest.case", "_Outcome.testPartExecutor") - @contextmanager - def testPartExecutor(wrapped, _, args, kwargs): - def _args(test_case, *_, isTest=False, **__): - return (test_case, isTest) - - test_case, is_test = _args(*args, **kwargs) - already_recording = getattr(test_case, "_appmap_pytest_recording", None) - # fmt: off - if ( - (not is_test) - or isinstance(test_case, unittest.case._SubTest) # pylint: disable=protected-access - or already_recording - ): - # fmt: on - with wrapped(*args, **kwargs): - yield - return - - method_name = test_case.id().split(".")[-1] - location = get_test_location(test_case.__class__, method_name) - with session.record( - test_case.__class__, method_name, location=location - ) as metadata: - if metadata: - with wrapped( - *args, **kwargs - ), testing_framework.collect_result_metadata(metadata): - yield - else: - # session.record may return None - yield - - if not Env.current.is_appmap_repo and Env.current.enables("unittest"): logger.debug("Test recording is enabled (unittest)") - setup_unittest() + import _appmap.unittest # pyright: ignore pylint: disable=unused-import diff --git a/appmap/wrapt b/appmap/wrapt deleted file mode 120000 index 8ee9c092..00000000 --- a/appmap/wrapt +++ /dev/null @@ -1 +0,0 @@ -../vendor/wrapt/src/appmap/wrapt \ No newline at end of file diff --git a/ci/run_tests.sh b/ci/run_tests.sh index 472d0352..cec80e9e 100755 --- a/ci/run_tests.sh +++ b/ci/run_tests.sh @@ -6,4 +6,4 @@ docker run -i${t} --rm\ -v $PWD/dist:/dist -v $PWD/_appmap/test/data/unittest:/_appmap/test/data/unittest\ -v $PWD/ci:/ci\ -w /tmp\ - python:3.9 bash -ce "${@:-/ci/smoketest.sh; /ci/test_pipenv.sh; /ci/test_poetry.sh}" + python:3.11 bash -ce "${@:-/ci/smoketest.sh; /ci/test_pipenv.sh; /ci/test_poetry.sh}" diff --git a/ci/smoketest.sh b/ci/smoketest.sh index 5568fcfc..028aba37 100755 --- a/ci/smoketest.sh +++ b/ci/smoketest.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash -set -e +set -ex pip install -U pip pytest "flask<2" python-decouple pip install /dist/appmap-*-py3-none-any.whl diff --git a/pyproject.toml b/pyproject.toml index 4d6c87ed..c6777499 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "appmap" -version = "1.15.2" +version = "1.16.0" description = "Create AppMap files by recording a Python application." readme = "README.md" authors = [ @@ -22,10 +22,10 @@ classifiers = [ 'Topic :: Software Development :: Documentation' ] include = ['appmap.pth'] +exclude = ['_appmap/wrapt'] packages = [ - { include = "appmap"}, {include = "_appmap" }, - { include = "appmap/wrapt/*.py", from="vendor/wrapt/src" } + { include = "appmap"}, {include = "_appmap" }, {include = "_appmap/wrapt", from = "vendor"} ] [tool.poetry.dependencies] @@ -36,7 +36,7 @@ PyYAML = ">=5.3.0" inflection = ">=0.3.0" importlib-metadata = ">=0.8" importlib-resources = "^5.4.0" -packaging = ">=21.3" +packaging = ">=19.0" # If you include "Django" as an optional dependency here, you'll be able to use poetry to install it # in your dev environment. However, doing so causes poetry v1.2.0 to remove it from the virtualenv # *created and managed by tox*, i.e. not your dev environment. @@ -87,7 +87,7 @@ line-length = 100 extend-exclude = ''' /( | vendor - | appmap/wrapt + | _appmap/wrapt )/ ''' @@ -95,6 +95,18 @@ extend-exclude = ''' profile = "black" extend_skip = [ "vendor", - "appmap/wrapt" + "_appmap/wrapt" ] +[tool.vendoring] +destination = "vendor/_appmap/" +requirements = "vendor/vendor.txt" +namespace = "" + +protected-files = ["vendor.txt"] +patches-dir = "vendor/patches" + +[tool.vendoring.transformations] +drop = [ + "**/*.so", +] diff --git a/tox.ini b/tox.ini index 9a685c10..a2934df7 100644 --- a/tox.ini +++ b/tox.ini @@ -2,7 +2,7 @@ isolated_build = true # The *-web environments test the latest versions of Django and Flask with the full test suite. For # older version of the web frameworks, just run the tests that are specific to them. -envlist = py3{8,9,10}-web, py3{7,8,9,10}-flask1-django3 +envlist = py3{8,9,10,11}-web, py3{7,8,9,10,11}-flask1-django3 [testenv] allowlist_externals = @@ -25,3 +25,11 @@ commands = web: poetry run {posargs:pytest -vv} flask1: poetry run pytest _appmap/test/test_flask.py django3: poetry run pytest _appmap/test/test_django.py + +[testenv:vendoring] +skip_install = True +deps = vendoring +commands = + poetry run vendoring {posargs:sync} + # We don't need the .pyi files vendoring generates + python -c 'from pathlib import Path; all(map(Path.unlink, Path("vendor").rglob("*.pyi")))' \ No newline at end of file diff --git a/vendor/_appmap/wrapt/LICENSE b/vendor/_appmap/wrapt/LICENSE new file mode 100644 index 00000000..bd8c7124 --- /dev/null +++ b/vendor/_appmap/wrapt/LICENSE @@ -0,0 +1,24 @@ +Copyright (c) 2013-2023, Graham Dumpleton +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/_appmap/wrapt/__init__.py b/vendor/_appmap/wrapt/__init__.py new file mode 100644 index 00000000..c5363524 --- /dev/null +++ b/vendor/_appmap/wrapt/__init__.py @@ -0,0 +1,27 @@ +__version_info__ = ('1', '15', '0') +__version__ = '.'.join(__version_info__) + +from .wrappers import (ObjectProxy, CallableObjectProxy, FunctionWrapper, + BoundFunctionWrapper, WeakFunctionProxy, PartialCallableObjectProxy, + resolve_path, apply_patch, wrap_object, wrap_object_attribute, + function_wrapper, wrap_function_wrapper, patch_function_wrapper, + transient_function_wrapper) + +from .decorators import (adapter_factory, AdapterFactory, decorator, + synchronized) + +from .importer import (register_post_import_hook, when_imported, + notify_module_loaded, discover_post_import_hooks) + +# Import of inspect.getcallargs() included for backward compatibility. An +# implementation of this was previously bundled and made available here for +# Python <2.7. Avoid using this in future. + +from inspect import getcallargs + +# Variant of inspect.formatargspec() included here for forward compatibility. +# This is being done because Python 3.11 dropped inspect.formatargspec() but +# code for handling signature changing decorators relied on it. Exposing the +# bundled implementation here in case any user of wrapt was also needing it. + +from .arguments import formatargspec diff --git a/vendor/_appmap/wrapt/arguments.py b/vendor/_appmap/wrapt/arguments.py new file mode 100644 index 00000000..032bc059 --- /dev/null +++ b/vendor/_appmap/wrapt/arguments.py @@ -0,0 +1,38 @@ +# The inspect.formatargspec() function was dropped in Python 3.11 but we need +# need it for when constructing signature changing decorators based on result of +# inspect.getargspec() or inspect.getfullargspec(). The code here implements +# inspect.formatargspec() base on Parameter and Signature from inspect module, +# which were added in Python 3.6. Thanks to Cyril Jouve for the implementation. + +try: + from inspect import Parameter, Signature +except ImportError: + from inspect import formatargspec +else: + def formatargspec(args, varargs=None, varkw=None, defaults=None, + kwonlyargs=(), kwonlydefaults={}, annotations={}): + if kwonlydefaults is None: + kwonlydefaults = {} + ndefaults = len(defaults) if defaults else 0 + parameters = [ + Parameter( + arg, + Parameter.POSITIONAL_OR_KEYWORD, + default=defaults[i] if i >= 0 else Parameter.empty, + annotation=annotations.get(arg, Parameter.empty), + ) for i, arg in enumerate(args, ndefaults - len(args)) + ] + if varargs: + parameters.append(Parameter(varargs, Parameter.VAR_POSITIONAL)) + parameters.extend( + Parameter( + kwonlyarg, + Parameter.KEYWORD_ONLY, + default=kwonlydefaults.get(kwonlyarg, Parameter.empty), + annotation=annotations.get(kwonlyarg, Parameter.empty), + ) for kwonlyarg in kwonlyargs + ) + if varkw: + parameters.append(Parameter(varkw, Parameter.VAR_KEYWORD)) + return_annotation = annotations.get('return', Signature.empty) + return str(Signature(parameters, return_annotation=return_annotation)) \ No newline at end of file diff --git a/vendor/_appmap/wrapt/decorators.py b/vendor/_appmap/wrapt/decorators.py new file mode 100644 index 00000000..c3f25472 --- /dev/null +++ b/vendor/_appmap/wrapt/decorators.py @@ -0,0 +1,541 @@ +"""This module implements decorators for implementing other decorators +as well as some commonly used decorators. + +""" + +import sys + +PY2 = sys.version_info[0] == 2 + +if PY2: + string_types = basestring, + + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + +else: + string_types = str, + + import builtins + + exec_ = getattr(builtins, "exec") + del builtins + +from functools import partial +from inspect import isclass +from threading import Lock, RLock + +from .arguments import formatargspec + +try: + from inspect import signature +except ImportError: + pass + +from .wrappers import (FunctionWrapper, BoundFunctionWrapper, ObjectProxy, + CallableObjectProxy) + +# Adapter wrapper for the wrapped function which will overlay certain +# properties from the adapter function onto the wrapped function so that +# functions such as inspect.getargspec(), inspect.getfullargspec(), +# inspect.signature() and inspect.getsource() return the correct results +# one would expect. + +class _AdapterFunctionCode(CallableObjectProxy): + + def __init__(self, wrapped_code, adapter_code): + super(_AdapterFunctionCode, self).__init__(wrapped_code) + self._self_adapter_code = adapter_code + + @property + def co_argcount(self): + return self._self_adapter_code.co_argcount + + @property + def co_code(self): + return self._self_adapter_code.co_code + + @property + def co_flags(self): + return self._self_adapter_code.co_flags + + @property + def co_kwonlyargcount(self): + return self._self_adapter_code.co_kwonlyargcount + + @property + def co_varnames(self): + return self._self_adapter_code.co_varnames + +class _AdapterFunctionSurrogate(CallableObjectProxy): + + def __init__(self, wrapped, adapter): + super(_AdapterFunctionSurrogate, self).__init__(wrapped) + self._self_adapter = adapter + + @property + def __code__(self): + return _AdapterFunctionCode(self.__wrapped__.__code__, + self._self_adapter.__code__) + + @property + def __defaults__(self): + return self._self_adapter.__defaults__ + + @property + def __kwdefaults__(self): + return self._self_adapter.__kwdefaults__ + + @property + def __signature__(self): + if 'signature' not in globals(): + return self._self_adapter.__signature__ + else: + return signature(self._self_adapter) + + if PY2: + func_code = __code__ + func_defaults = __defaults__ + +class _BoundAdapterWrapper(BoundFunctionWrapper): + + @property + def __func__(self): + return _AdapterFunctionSurrogate(self.__wrapped__.__func__, + self._self_parent._self_adapter) + + @property + def __signature__(self): + if 'signature' not in globals(): + return self.__wrapped__.__signature__ + else: + return signature(self._self_parent._self_adapter) + + if PY2: + im_func = __func__ + +class AdapterWrapper(FunctionWrapper): + + __bound_function_wrapper__ = _BoundAdapterWrapper + + def __init__(self, *args, **kwargs): + adapter = kwargs.pop('adapter') + super(AdapterWrapper, self).__init__(*args, **kwargs) + self._self_surrogate = _AdapterFunctionSurrogate( + self.__wrapped__, adapter) + self._self_adapter = adapter + + @property + def __code__(self): + return self._self_surrogate.__code__ + + @property + def __defaults__(self): + return self._self_surrogate.__defaults__ + + @property + def __kwdefaults__(self): + return self._self_surrogate.__kwdefaults__ + + if PY2: + func_code = __code__ + func_defaults = __defaults__ + + @property + def __signature__(self): + return self._self_surrogate.__signature__ + +class AdapterFactory(object): + def __call__(self, wrapped): + raise NotImplementedError() + +class DelegatedAdapterFactory(AdapterFactory): + def __init__(self, factory): + super(DelegatedAdapterFactory, self).__init__() + self.factory = factory + def __call__(self, wrapped): + return self.factory(wrapped) + +adapter_factory = DelegatedAdapterFactory + +# Decorator for creating other decorators. This decorator and the +# wrappers which they use are designed to properly preserve any name +# attributes, function signatures etc, in addition to the wrappers +# themselves acting like a transparent proxy for the original wrapped +# function so the wrapper is effectively indistinguishable from the +# original wrapped function. + +def decorator(wrapper=None, enabled=None, adapter=None, proxy=FunctionWrapper): + # The decorator should be supplied with a single positional argument + # which is the wrapper function to be used to implement the + # decorator. This may be preceded by a step whereby the keyword + # arguments are supplied to customise the behaviour of the + # decorator. The 'adapter' argument is used to optionally denote a + # separate function which is notionally used by an adapter + # decorator. In that case parts of the function '__code__' and + # '__defaults__' attributes are used from the adapter function + # rather than those of the wrapped function. This allows for the + # argument specification from inspect.getfullargspec() and similar + # functions to be overridden with a prototype for a different + # function than what was wrapped. The 'enabled' argument provides a + # way to enable/disable the use of the decorator. If the type of + # 'enabled' is a boolean, then it is evaluated immediately and the + # wrapper not even applied if it is False. If not a boolean, it will + # be evaluated when the wrapper is called for an unbound wrapper, + # and when binding occurs for a bound wrapper. When being evaluated, + # if 'enabled' is callable it will be called to obtain the value to + # be checked. If False, the wrapper will not be called and instead + # the original wrapped function will be called directly instead. + # The 'proxy' argument provides a way of passing a custom version of + # the FunctionWrapper class used in decorating the function. + + if wrapper is not None: + # Helper function for creating wrapper of the appropriate + # time when we need it down below. + + def _build(wrapped, wrapper, enabled=None, adapter=None): + if adapter: + if isinstance(adapter, AdapterFactory): + adapter = adapter(wrapped) + + if not callable(adapter): + ns = {} + + # Check if the signature argument specification has + # annotations. If it does then we need to remember + # it but also drop it when attempting to manufacture + # a standin adapter function. This is necessary else + # it will try and look up any types referenced in + # the annotations in the empty namespace we use, + # which will fail. + + annotations = {} + + if not isinstance(adapter, string_types): + if len(adapter) == 7: + annotations = adapter[-1] + adapter = adapter[:-1] + adapter = formatargspec(*adapter) + + exec_('def adapter{}: pass'.format(adapter), ns, ns) + adapter = ns['adapter'] + + # Override the annotations for the manufactured + # adapter function so they match the original + # adapter signature argument specification. + + if annotations: + adapter.__annotations__ = annotations + + return AdapterWrapper(wrapped=wrapped, wrapper=wrapper, + enabled=enabled, adapter=adapter) + + return proxy(wrapped=wrapped, wrapper=wrapper, enabled=enabled) + + # The wrapper has been provided so return the final decorator. + # The decorator is itself one of our function wrappers so we + # can determine when it is applied to functions, instance methods + # or class methods. This allows us to bind the instance or class + # method so the appropriate self or cls attribute is supplied + # when it is finally called. + + def _wrapper(wrapped, instance, args, kwargs): + # We first check for the case where the decorator was applied + # to a class type. + # + # @decorator + # class mydecoratorclass(object): + # def __init__(self, arg=None): + # self.arg = arg + # def __call__(self, wrapped, instance, args, kwargs): + # return wrapped(*args, **kwargs) + # + # @mydecoratorclass(arg=1) + # def function(): + # pass + # + # In this case an instance of the class is to be used as the + # decorator wrapper function. If args was empty at this point, + # then it means that there were optional keyword arguments + # supplied to be used when creating an instance of the class + # to be used as the wrapper function. + + if instance is None and isclass(wrapped) and not args: + # We still need to be passed the target function to be + # wrapped as yet, so we need to return a further function + # to be able to capture it. + + def _capture(target_wrapped): + # Now have the target function to be wrapped and need + # to create an instance of the class which is to act + # as the decorator wrapper function. Before we do that, + # we need to first check that use of the decorator + # hadn't been disabled by a simple boolean. If it was, + # the target function to be wrapped is returned instead. + + _enabled = enabled + if type(_enabled) is bool: + if not _enabled: + return target_wrapped + _enabled = None + + # Now create an instance of the class which is to act + # as the decorator wrapper function. Any arguments had + # to be supplied as keyword only arguments so that is + # all we pass when creating it. + + target_wrapper = wrapped(**kwargs) + + # Finally build the wrapper itself and return it. + + return _build(target_wrapped, target_wrapper, + _enabled, adapter) + + return _capture + + # We should always have the target function to be wrapped at + # this point as the first (and only) value in args. + + target_wrapped = args[0] + + # Need to now check that use of the decorator hadn't been + # disabled by a simple boolean. If it was, then target + # function to be wrapped is returned instead. + + _enabled = enabled + if type(_enabled) is bool: + if not _enabled: + return target_wrapped + _enabled = None + + # We now need to build the wrapper, but there are a couple of + # different cases we need to consider. + + if instance is None: + if isclass(wrapped): + # In this case the decorator was applied to a class + # type but optional keyword arguments were not supplied + # for initialising an instance of the class to be used + # as the decorator wrapper function. + # + # @decorator + # class mydecoratorclass(object): + # def __init__(self, arg=None): + # self.arg = arg + # def __call__(self, wrapped, instance, + # args, kwargs): + # return wrapped(*args, **kwargs) + # + # @mydecoratorclass + # def function(): + # pass + # + # We still need to create an instance of the class to + # be used as the decorator wrapper function, but no + # arguments are pass. + + target_wrapper = wrapped() + + else: + # In this case the decorator was applied to a normal + # function, or possibly a static method of a class. + # + # @decorator + # def mydecoratorfuntion(wrapped, instance, + # args, kwargs): + # return wrapped(*args, **kwargs) + # + # @mydecoratorfunction + # def function(): + # pass + # + # That normal function becomes the decorator wrapper + # function. + + target_wrapper = wrapper + + else: + if isclass(instance): + # In this case the decorator was applied to a class + # method. + # + # class myclass(object): + # @decorator + # @classmethod + # def decoratorclassmethod(cls, wrapped, + # instance, args, kwargs): + # return wrapped(*args, **kwargs) + # + # instance = myclass() + # + # @instance.decoratorclassmethod + # def function(): + # pass + # + # This one is a bit strange because binding was actually + # performed on the wrapper created by our decorator + # factory. We need to apply that binding to the decorator + # wrapper function that the decorator factory + # was applied to. + + target_wrapper = wrapper.__get__(None, instance) + + else: + # In this case the decorator was applied to an instance + # method. + # + # class myclass(object): + # @decorator + # def decoratorclassmethod(self, wrapped, + # instance, args, kwargs): + # return wrapped(*args, **kwargs) + # + # instance = myclass() + # + # @instance.decoratorclassmethod + # def function(): + # pass + # + # This one is a bit strange because binding was actually + # performed on the wrapper created by our decorator + # factory. We need to apply that binding to the decorator + # wrapper function that the decorator factory + # was applied to. + + target_wrapper = wrapper.__get__(instance, type(instance)) + + # Finally build the wrapper itself and return it. + + return _build(target_wrapped, target_wrapper, _enabled, adapter) + + # We first return our magic function wrapper here so we can + # determine in what context the decorator factory was used. In + # other words, it is itself a universal decorator. The decorator + # function is used as the adapter so that linters see a signature + # corresponding to the decorator and not the wrapper it is being + # applied to. + + return _build(wrapper, _wrapper, adapter=decorator) + + else: + # The wrapper still has not been provided, so we are just + # collecting the optional keyword arguments. Return the + # decorator again wrapped in a partial using the collected + # arguments. + + return partial(decorator, enabled=enabled, adapter=adapter, + proxy=proxy) + +# Decorator for implementing thread synchronization. It can be used as a +# decorator, in which case the synchronization context is determined by +# what type of function is wrapped, or it can also be used as a context +# manager, where the user needs to supply the correct synchronization +# context. It is also possible to supply an object which appears to be a +# synchronization primitive of some sort, by virtue of having release() +# and acquire() methods. In that case that will be used directly as the +# synchronization primitive without creating a separate lock against the +# derived or supplied context. + +def synchronized(wrapped): + # Determine if being passed an object which is a synchronization + # primitive. We can't check by type for Lock, RLock, Semaphore etc, + # as the means of creating them isn't the type. Therefore use the + # existence of acquire() and release() methods. This is more + # extensible anyway as it allows custom synchronization mechanisms. + + if hasattr(wrapped, 'acquire') and hasattr(wrapped, 'release'): + # We remember what the original lock is and then return a new + # decorator which accesses and locks it. When returning the new + # decorator we wrap it with an object proxy so we can override + # the context manager methods in case it is being used to wrap + # synchronized statements with a 'with' statement. + + lock = wrapped + + @decorator + def _synchronized(wrapped, instance, args, kwargs): + # Execute the wrapped function while the original supplied + # lock is held. + + with lock: + return wrapped(*args, **kwargs) + + class _PartialDecorator(CallableObjectProxy): + + def __enter__(self): + lock.acquire() + return lock + + def __exit__(self, *args): + lock.release() + + return _PartialDecorator(wrapped=_synchronized) + + # Following only apply when the lock is being created automatically + # based on the context of what was supplied. In this case we supply + # a final decorator, but need to use FunctionWrapper directly as we + # want to derive from it to add context manager methods in case it is + # being used to wrap synchronized statements with a 'with' statement. + + def _synchronized_lock(context): + # Attempt to retrieve the lock for the specific context. + + lock = vars(context).get('_synchronized_lock', None) + + if lock is None: + # There is no existing lock defined for the context we + # are dealing with so we need to create one. This needs + # to be done in a way to guarantee there is only one + # created, even if multiple threads try and create it at + # the same time. We can't always use the setdefault() + # method on the __dict__ for the context. This is the + # case where the context is a class, as __dict__ is + # actually a dictproxy. What we therefore do is use a + # meta lock on this wrapper itself, to control the + # creation and assignment of the lock attribute against + # the context. + + with synchronized._synchronized_meta_lock: + # We need to check again for whether the lock we want + # exists in case two threads were trying to create it + # at the same time and were competing to create the + # meta lock. + + lock = vars(context).get('_synchronized_lock', None) + + if lock is None: + lock = RLock() + setattr(context, '_synchronized_lock', lock) + + return lock + + def _synchronized_wrapper(wrapped, instance, args, kwargs): + # Execute the wrapped function while the lock for the + # desired context is held. If instance is None then the + # wrapped function is used as the context. + + with _synchronized_lock(instance if instance is not None else wrapped): + return wrapped(*args, **kwargs) + + class _FinalDecorator(FunctionWrapper): + + def __enter__(self): + self._self_lock = _synchronized_lock(self.__wrapped__) + self._self_lock.acquire() + return self._self_lock + + def __exit__(self, *args): + self._self_lock.release() + + return _FinalDecorator(wrapped=wrapped, wrapper=_synchronized_wrapper) + +synchronized._synchronized_meta_lock = Lock() diff --git a/vendor/_appmap/wrapt/importer.py b/vendor/_appmap/wrapt/importer.py new file mode 100644 index 00000000..1e5e6886 --- /dev/null +++ b/vendor/_appmap/wrapt/importer.py @@ -0,0 +1,293 @@ +"""This module implements a post import hook mechanism styled after what is +described in PEP-369. Note that it doesn't cope with modules being reloaded. + +""" + +import sys +import threading + +PY2 = sys.version_info[0] == 2 + +if PY2: + string_types = basestring, + find_spec = None +else: + string_types = str, + from importlib.util import find_spec + +# The dictionary registering any post import hooks to be triggered once +# the target module has been imported. Once a module has been imported +# and the hooks fired, the list of hooks recorded against the target +# module will be truncated but the list left in the dictionary. This +# acts as a flag to indicate that the module had already been imported. + +_post_import_hooks = {} +_post_import_hooks_init = False +_post_import_hooks_lock = threading.RLock() + +# Register a new post import hook for the target module name. This +# differs from the PEP-369 implementation in that it also allows the +# hook function to be specified as a string consisting of the name of +# the callback in the form 'module:function'. This will result in a +# proxy callback being registered which will defer loading of the +# specified module containing the callback function until required. + +def _create_import_hook_from_string(name): + def import_hook(module): + module_name, function = name.split(':') + attrs = function.split('.') + __import__(module_name) + callback = sys.modules[module_name] + for attr in attrs: + callback = getattr(callback, attr) + return callback(module) + return import_hook + +def register_post_import_hook(hook, name): + # Create a deferred import hook if hook is a string name rather than + # a callable function. + + if isinstance(hook, string_types): + hook = _create_import_hook_from_string(hook) + + with _post_import_hooks_lock: + # Automatically install the import hook finder if it has not already + # been installed. + + global _post_import_hooks_init + + if not _post_import_hooks_init: + _post_import_hooks_init = True + sys.meta_path.insert(0, ImportHookFinder()) + + # Check if the module is already imported. If not, register the hook + # to be called after import. + + module = sys.modules.get(name, None) + + if module is None: + _post_import_hooks.setdefault(name, []).append(hook) + + # If the module is already imported, we fire the hook right away. Note that + # the hook is called outside of the lock to avoid deadlocks if code run as a + # consequence of calling the module import hook in turn triggers a separate + # thread which tries to register an import hook. + + if module is not None: + hook(module) + +# Register post import hooks defined as package entry points. + +def _create_import_hook_from_entrypoint(entrypoint): + def import_hook(module): + __import__(entrypoint.module_name) + callback = sys.modules[entrypoint.module_name] + for attr in entrypoint.attrs: + callback = getattr(callback, attr) + return callback(module) + return import_hook + +def discover_post_import_hooks(group): + try: + import pkg_resources + except ImportError: + return + + for entrypoint in pkg_resources.iter_entry_points(group=group): + callback = _create_import_hook_from_entrypoint(entrypoint) + register_post_import_hook(callback, entrypoint.name) + +# Indicate that a module has been loaded. Any post import hooks which +# were registered against the target module will be invoked. If an +# exception is raised in any of the post import hooks, that will cause +# the import of the target module to fail. + +def notify_module_loaded(module): + name = getattr(module, '__name__', None) + + with _post_import_hooks_lock: + hooks = _post_import_hooks.pop(name, ()) + + # Note that the hook is called outside of the lock to avoid deadlocks if + # code run as a consequence of calling the module import hook in turn + # triggers a separate thread which tries to register an import hook. + + for hook in hooks: + hook(module) + +# A custom module import finder. This intercepts attempts to import +# modules and watches out for attempts to import target modules of +# interest. When a module of interest is imported, then any post import +# hooks which are registered will be invoked. + +class _ImportHookLoader: + + def load_module(self, fullname): + module = sys.modules[fullname] + notify_module_loaded(module) + + return module + +class _ImportHookChainedLoader: + + def __init__(self, loader): + self.loader = loader + + if hasattr(loader, "load_module"): + self.load_module = self._load_module + if hasattr(loader, "create_module"): + self.create_module = self._create_module + if hasattr(loader, "exec_module"): + self.exec_module = self._exec_module + + def _set_loader(self, module): + # Set module's loader to self.loader unless it's already set to + # something else. Import machinery will set it to spec.loader if it is + # None, so handle None as well. The module may not support attribute + # assignment, in which case we simply skip it. Note that we also deal + # with __loader__ not existing at all. This is to future proof things + # due to proposal to remove the attribue as described in the GitHub + # issue at https://github.com/python/cpython/issues/77458. Also prior + # to Python 3.3, the __loader__ attribute was only set if a custom + # module loader was used. It isn't clear whether the attribute still + # existed in that case or was set to None. + + class UNDEFINED: pass + + if getattr(module, "__loader__", UNDEFINED) in (None, self): + try: + module.__loader__ = self.loader + except AttributeError: + pass + + if (getattr(module, "__spec__", None) is not None + and getattr(module.__spec__, "loader", None) is self): + module.__spec__.loader = self.loader + + def _load_module(self, fullname): + module = self.loader.load_module(fullname) + self._set_loader(module) + notify_module_loaded(module) + + return module + + # Python 3.4 introduced create_module() and exec_module() instead of + # load_module() alone. Splitting the two steps. + + def _create_module(self, spec): + return self.loader.create_module(spec) + + def _exec_module(self, module): + self._set_loader(module) + self.loader.exec_module(module) + notify_module_loaded(module) + +class ImportHookFinder: + + def __init__(self): + self.in_progress = {} + + def find_module(self, fullname, path=None): + # If the module being imported is not one we have registered + # post import hooks for, we can return immediately. We will + # take no further part in the importing of this module. + + with _post_import_hooks_lock: + if fullname not in _post_import_hooks: + return None + + # When we are interested in a specific module, we will call back + # into the import system a second time to defer to the import + # finder that is supposed to handle the importing of the module. + # We set an in progress flag for the target module so that on + # the second time through we don't trigger another call back + # into the import system and cause a infinite loop. + + if fullname in self.in_progress: + return None + + self.in_progress[fullname] = True + + # Now call back into the import system again. + + try: + if not find_spec: + # For Python 2 we don't have much choice but to + # call back in to __import__(). This will + # actually cause the module to be imported. If no + # module could be found then ImportError will be + # raised. Otherwise we return a loader which + # returns the already loaded module and invokes + # the post import hooks. + + __import__(fullname) + + return _ImportHookLoader() + + else: + # For Python 3 we need to use find_spec().loader + # from the importlib.util module. It doesn't actually + # import the target module and only finds the + # loader. If a loader is found, we need to return + # our own loader which will then in turn call the + # real loader to import the module and invoke the + # post import hooks. + + loader = getattr(find_spec(fullname), "loader", None) + + if loader and not isinstance(loader, _ImportHookChainedLoader): + return _ImportHookChainedLoader(loader) + + finally: + del self.in_progress[fullname] + + def find_spec(self, fullname, path=None, target=None): + # Since Python 3.4, you are meant to implement find_spec() method + # instead of find_module() and since Python 3.10 you get deprecation + # warnings if you don't define find_spec(). + + # If the module being imported is not one we have registered + # post import hooks for, we can return immediately. We will + # take no further part in the importing of this module. + + with _post_import_hooks_lock: + if fullname not in _post_import_hooks: + return None + + # When we are interested in a specific module, we will call back + # into the import system a second time to defer to the import + # finder that is supposed to handle the importing of the module. + # We set an in progress flag for the target module so that on + # the second time through we don't trigger another call back + # into the import system and cause a infinite loop. + + if fullname in self.in_progress: + return None + + self.in_progress[fullname] = True + + # Now call back into the import system again. + + try: + # This should only be Python 3 so find_spec() should always + # exist so don't need to check. + + spec = find_spec(fullname) + loader = getattr(spec, "loader", None) + + if loader and not isinstance(loader, _ImportHookChainedLoader): + spec.loader = _ImportHookChainedLoader(loader) + + return spec + + finally: + del self.in_progress[fullname] + +# Decorator for marking that a function should be called as a post +# import hook when the target module is imported. + +def when_imported(name): + def register(hook): + register_post_import_hook(hook, name) + return hook + return register diff --git a/vendor/_appmap/wrapt/wrappers.py b/vendor/_appmap/wrapt/wrappers.py new file mode 100644 index 00000000..a7e9a3d6 --- /dev/null +++ b/vendor/_appmap/wrapt/wrappers.py @@ -0,0 +1,1077 @@ +import os +import sys +import functools +import operator +import weakref +import inspect +from itertools import chain +import types + +PY2 = sys.version_info[0] == 2 + +if PY2: + string_types = basestring, +else: + string_types = str, + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + return meta("NewBase", bases, {}) + +def slots(obj): + return chain.from_iterable(getattr(cls, '__slots__', []) + for cls in type(obj).__mro__) + +class _ObjectProxyMethods(object): + + # We use properties to override the values of __module__ and + # __doc__. If we add these in ObjectProxy, the derived class + # __dict__ will still be setup to have string variants of these + # attributes and the rules of descriptors means that they appear to + # take precedence over the properties in the base class. To avoid + # that, we copy the properties into the derived class type itself + # via a meta class. In that way the properties will always take + # precedence. + + @property + def __module__(self): + return self.__wrapped__.__module__ + + @__module__.setter + def __module__(self, value): + self.__wrapped__.__module__ = value + + @property + def __doc__(self): + return self.__wrapped__.__doc__ + + @__doc__.setter + def __doc__(self, value): + self.__wrapped__.__doc__ = value + + # We similar use a property for __dict__. We need __dict__ to be + # explicit to ensure that vars() works as expected. + + @property + def __dict__(self): + return self.__wrapped__.__dict__ + + # Need to also propagate the special __weakref__ attribute for case + # where decorating classes which will define this. If do not define + # it and use a function like inspect.getmembers() on a decorator + # class it will fail. This can't be in the derived classes. + + @property + def __weakref__(self): + return self.__wrapped__.__weakref__ + +class _ObjectProxyMetaType(type): + def __new__(cls, name, bases, dictionary): + # Copy our special properties into the class so that they + # always take precedence over attributes of the same name added + # during construction of a derived class. This is to save + # duplicating the implementation for them in all derived classes. + + dictionary.update(vars(_ObjectProxyMethods)) + + return type.__new__(cls, name, bases, dictionary) + +class ObjectProxy(with_metaclass(_ObjectProxyMetaType)): + + __slots__ = ('__wrapped__',) + + def __init__(self, wrapped): + object.__setattr__(self, '__wrapped__', wrapped) + + # Python 3.2+ has the __qualname__ attribute, but it does not + # allow it to be overridden using a property and it must instead + # be an actual string object instead. + + try: + object.__setattr__(self, '__qualname__', wrapped.__qualname__) + except AttributeError: + pass + + # Python 3.10 onwards also does not allow itself to be overridden + # using a property and it must instead be set explicitly. + + try: + object.__setattr__(self, '__annotations__', wrapped.__annotations__) + except AttributeError: + pass + + @property + def __name__(self): + return self.__wrapped__.__name__ + + @__name__.setter + def __name__(self, value): + self.__wrapped__.__name__ = value + + @property + def __class__(self): + return self.__wrapped__.__class__ + + @__class__.setter + def __class__(self, value): + self.__wrapped__.__class__ = value + + def __dir__(self): + return dir(self.__wrapped__) + + def __str__(self): + return str(self.__wrapped__) + + if not PY2: + def __bytes__(self): + return bytes(self.__wrapped__) + + def __repr__(self): + return '<{} at 0x{:x} for {} at 0x{:x}>'.format( + type(self).__name__, id(self), + type(self.__wrapped__).__name__, + id(self.__wrapped__)) + + def __reversed__(self): + return reversed(self.__wrapped__) + + if not PY2: + def __round__(self): + return round(self.__wrapped__) + + if sys.hexversion >= 0x03070000: + def __mro_entries__(self, bases): + return (self.__wrapped__,) + + def __lt__(self, other): + return self.__wrapped__ < other + + def __le__(self, other): + return self.__wrapped__ <= other + + def __eq__(self, other): + return self.__wrapped__ == other + + def __ne__(self, other): + return self.__wrapped__ != other + + def __gt__(self, other): + return self.__wrapped__ > other + + def __ge__(self, other): + return self.__wrapped__ >= other + + def __hash__(self): + return hash(self.__wrapped__) + + def __nonzero__(self): + return bool(self.__wrapped__) + + def __bool__(self): + return bool(self.__wrapped__) + + def __setattr__(self, name, value): + if name.startswith('_self_'): + object.__setattr__(self, name, value) + + elif name == '__wrapped__': + object.__setattr__(self, name, value) + try: + object.__delattr__(self, '__qualname__') + except AttributeError: + pass + try: + object.__setattr__(self, '__qualname__', value.__qualname__) + except AttributeError: + pass + try: + object.__delattr__(self, '__annotations__') + except AttributeError: + pass + try: + object.__setattr__(self, '__annotations__', value.__annotations__) + except AttributeError: + pass + + elif name == '__qualname__': + setattr(self.__wrapped__, name, value) + object.__setattr__(self, name, value) + + elif name == '__annotations__': + setattr(self.__wrapped__, name, value) + object.__setattr__(self, name, value) + + elif hasattr(type(self), name): + object.__setattr__(self, name, value) + + else: + setattr(self.__wrapped__, name, value) + + def __getattribute__(self, name): + try: + if name in chain(slots(self), ["__reduce__", "__reduce_ex__"]): + return super().__getattribute__(name) + except AttributeError: + pass + + return self.__wrapped__.__getattribute__(name) + + def __getattr__(self, name): + # If we are being asked to lookup '__wrapped__' then the + # '__init__()' method cannot have been called. + + if name == '__wrapped__': + raise ValueError('wrapper has not been initialised') + + if name in slots(self): + return super().__getattr__(name) + + return getattr(self.__wrapped__, name) + + def __delattr__(self, name): + if name.startswith('_self_'): + object.__delattr__(self, name) + + elif name == '__wrapped__': + raise TypeError('__wrapped__ must be an object') + + elif name == '__qualname__': + object.__delattr__(self, name) + delattr(self.__wrapped__, name) + + elif hasattr(type(self), name): + object.__delattr__(self, name) + + else: + delattr(self.__wrapped__, name) + + def __add__(self, other): + return self.__wrapped__ + other + + def __sub__(self, other): + return self.__wrapped__ - other + + def __mul__(self, other): + return self.__wrapped__ * other + + def __div__(self, other): + return operator.div(self.__wrapped__, other) + + def __truediv__(self, other): + return operator.truediv(self.__wrapped__, other) + + def __floordiv__(self, other): + return self.__wrapped__ // other + + def __mod__(self, other): + return self.__wrapped__ % other + + def __divmod__(self, other): + return divmod(self.__wrapped__, other) + + def __pow__(self, other, *args): + return pow(self.__wrapped__, other, *args) + + def __lshift__(self, other): + return self.__wrapped__ << other + + def __rshift__(self, other): + return self.__wrapped__ >> other + + def __and__(self, other): + return self.__wrapped__ & other + + def __xor__(self, other): + return self.__wrapped__ ^ other + + def __or__(self, other): + return self.__wrapped__ | other + + def __radd__(self, other): + return other + self.__wrapped__ + + def __rsub__(self, other): + return other - self.__wrapped__ + + def __rmul__(self, other): + return other * self.__wrapped__ + + def __rdiv__(self, other): + return operator.div(other, self.__wrapped__) + + def __rtruediv__(self, other): + return operator.truediv(other, self.__wrapped__) + + def __rfloordiv__(self, other): + return other // self.__wrapped__ + + def __rmod__(self, other): + return other % self.__wrapped__ + + def __rdivmod__(self, other): + return divmod(other, self.__wrapped__) + + def __rpow__(self, other, *args): + return pow(other, self.__wrapped__, *args) + + def __rlshift__(self, other): + return other << self.__wrapped__ + + def __rrshift__(self, other): + return other >> self.__wrapped__ + + def __rand__(self, other): + return other & self.__wrapped__ + + def __rxor__(self, other): + return other ^ self.__wrapped__ + + def __ror__(self, other): + return other | self.__wrapped__ + + def __iadd__(self, other): + self.__wrapped__ += other + return self + + def __isub__(self, other): + self.__wrapped__ -= other + return self + + def __imul__(self, other): + self.__wrapped__ *= other + return self + + def __idiv__(self, other): + self.__wrapped__ = operator.idiv(self.__wrapped__, other) + return self + + def __itruediv__(self, other): + self.__wrapped__ = operator.itruediv(self.__wrapped__, other) + return self + + def __ifloordiv__(self, other): + self.__wrapped__ //= other + return self + + def __imod__(self, other): + self.__wrapped__ %= other + return self + + def __ipow__(self, other): + self.__wrapped__ **= other + return self + + def __ilshift__(self, other): + self.__wrapped__ <<= other + return self + + def __irshift__(self, other): + self.__wrapped__ >>= other + return self + + def __iand__(self, other): + self.__wrapped__ &= other + return self + + def __ixor__(self, other): + self.__wrapped__ ^= other + return self + + def __ior__(self, other): + self.__wrapped__ |= other + return self + + def __neg__(self): + return -self.__wrapped__ + + def __pos__(self): + return +self.__wrapped__ + + def __abs__(self): + return abs(self.__wrapped__) + + def __invert__(self): + return ~self.__wrapped__ + + def __int__(self): + return int(self.__wrapped__) + + def __long__(self): + return long(self.__wrapped__) + + def __float__(self): + return float(self.__wrapped__) + + def __complex__(self): + return complex(self.__wrapped__) + + def __oct__(self): + return oct(self.__wrapped__) + + def __hex__(self): + return hex(self.__wrapped__) + + def __index__(self): + return operator.index(self.__wrapped__) + + def __len__(self): + return len(self.__wrapped__) + + def __contains__(self, value): + return value in self.__wrapped__ + + def __getitem__(self, key): + return self.__wrapped__[key] + + def __setitem__(self, key, value): + self.__wrapped__[key] = value + + def __delitem__(self, key): + del self.__wrapped__[key] + + def __getslice__(self, i, j): + return self.__wrapped__[i:j] + + def __setslice__(self, i, j, value): + self.__wrapped__[i:j] = value + + def __delslice__(self, i, j): + del self.__wrapped__[i:j] + + def __enter__(self): + return self.__wrapped__.__enter__() + + def __exit__(self, *args, **kwargs): + return self.__wrapped__.__exit__(*args, **kwargs) + + def __iter__(self): + return iter(self.__wrapped__) + + def __copy__(self): + raise NotImplementedError('object proxy must define __copy__()') + + def __deepcopy__(self, memo): + raise NotImplementedError('object proxy must define __deepcopy__()') + + def __reduce__(self): + raise NotImplementedError( + 'object proxy must define __reduce_ex__()') + + def __reduce_ex__(self, protocol): + raise NotImplementedError( + 'object proxy must define __reduce_ex__()') + +class CallableObjectProxy(ObjectProxy): + + def __call__(*args, **kwargs): + def _unpack_self(self, *args): + return self, args + + self, args = _unpack_self(*args) + + return self.__wrapped__(*args, **kwargs) + +class PartialCallableObjectProxy(ObjectProxy): + __slots__ = ('_self_args', '_self_kwargs') + + def __init__(*args, **kwargs): + def _unpack_self(self, *args): + return self, args + + self, args = _unpack_self(*args) + + if len(args) < 1: + raise TypeError('partial type takes at least one argument') + + wrapped, args = args[0], args[1:] + + if not callable(wrapped): + raise TypeError('the first argument must be callable') + + super(PartialCallableObjectProxy, self).__init__(wrapped) + + object.__setattr__(self, '_self_args', args) + object.__setattr__(self, '_self_kwargs', kwargs) + + def __call__(*args, **kwargs): + def _unpack_self(self, *args): + return self, args + + self, args = _unpack_self(*args) + + _args = self._self_args + args + + _kwargs = dict(self._self_kwargs) + _kwargs.update(kwargs) + + return self.__wrapped__(*_args, **_kwargs) + +class _FunctionWrapperBase(ObjectProxy): + + __slots__ = ('_self_instance', '_self_wrapper', '_self_enabled', + '_self_binding', '_self_parent', '_bfws') + + def __init__(self, wrapped, instance, wrapper, enabled=None, + binding='function', parent=None): + + super(_FunctionWrapperBase, self).__init__(wrapped) + + object.__setattr__(self, '_self_instance', instance) + object.__setattr__(self, '_self_wrapper', wrapper) + object.__setattr__(self, '_self_enabled', enabled) + object.__setattr__(self, '_self_binding', binding) + object.__setattr__(self, '_self_parent', parent) + object.__setattr__(self, '_bfws', list()) + + def __get__(self, instance, owner): + # This method is actually doing double duty for both unbound and + # bound derived wrapper classes. It should possibly be broken up + # and the distinct functionality moved into the derived classes. + # Can't do that straight away due to some legacy code which is + # relying on it being here in this base class. + # + # The distinguishing attribute which determines whether we are + # being called in an unbound or bound wrapper is the parent + # attribute. If binding has never occurred, then the parent will + # be None. + # + # First therefore, is if we are called in an unbound wrapper. In + # this case we perform the binding. + # + # We have one special case to worry about here. This is where we + # are decorating a nested class. In this case the wrapped class + # would not have a __get__() method to call. In that case we + # simply return self. + # + # Note that we otherwise still do binding even if instance is + # None and accessing an unbound instance method from a class. + # This is because we need to be able to later detect that + # specific case as we will need to extract the instance from the + # first argument of those passed in. + + if self._self_parent is None: + if not inspect.isclass(self.__wrapped__): + descriptor = self.__wrapped__.__get__(instance, owner) + ret = BoundFunctionWrapper( + descriptor, instance, + self._self_wrapper, self._self_enabled, + self._self_binding, self) + self._bfws.append(ret) + return ret + + return self + + # Now we have the case of binding occurring a second time on what + # was already a bound function. In this case we would usually + # return ourselves again. This mirrors what Python does. + # + # The special case this time is where we were originally bound + # with an instance of None and we were likely an instance + # method. In that case we rebind against the original wrapped + # function from the parent again. + + if self._self_instance is None and self._self_binding == 'function': + descriptor = self._self_parent.__wrapped__.__get__( + instance, owner) + + ret = BoundFunctionWrapper( + descriptor, instance, self._self_wrapper, + self._self_enabled, self._self_binding, + self._self_parent) + self._bfws.append(ret) + return ret + + return self + + def __call__(*args, **kwargs): + def _unpack_self(self, *args): + return self, args + + self, args = _unpack_self(*args) + + # If enabled has been specified, then evaluate it at this point + # and if the wrapper is not to be executed, then simply return + # the bound function rather than a bound wrapper for the bound + # function. When evaluating enabled, if it is callable we call + # it, otherwise we evaluate it as a boolean. + + if self._self_enabled is not None: + if callable(self._self_enabled): + if not self._self_enabled(): + return self.__wrapped__(*args, **kwargs) + elif not self._self_enabled: + return self.__wrapped__(*args, **kwargs) + + # This can occur where initial function wrapper was applied to + # a function that was already bound to an instance. In that case + # we want to extract the instance from the function and use it. + + if self._self_binding in ('function', 'classmethod'): + if self._self_instance is None: + instance = getattr(self.__wrapped__, '__self__', None) + if instance is not None: + return self._self_wrapper(self.__wrapped__, instance, + args, kwargs) + + # This is generally invoked when the wrapped function is being + # called as a normal function and is not bound to a class as an + # instance method. This is also invoked in the case where the + # wrapped function was a method, but this wrapper was in turn + # wrapped using the staticmethod decorator. + + return self._self_wrapper(self.__wrapped__, self._self_instance, + args, kwargs) + + def __set_name__(self, owner, name): + # This is a special method use to supply information to + # descriptors about what the name of variable in a class + # definition is. Not wanting to add this to ObjectProxy as not + # sure of broader implications of doing that. Thus restrict to + # FunctionWrapper used by decorators. + + if hasattr(self.__wrapped__, "__set_name__"): + self.__wrapped__.__set_name__(owner, name) + + def __instancecheck__(self, instance): + # This is a special method used by isinstance() to make checks + # instance of the `__wrapped__`. + return isinstance(instance, self.__wrapped__) + + def __subclasscheck__(self, subclass): + # This is a special method used by issubclass() to make checks + # about inheritance of classes. We need to upwrap any object + # proxy. Not wanting to add this to ObjectProxy as not sure of + # broader implications of doing that. Thus restrict to + # FunctionWrapper used by decorators. + + if hasattr(subclass, "__wrapped__"): + return issubclass(subclass.__wrapped__, self.__wrapped__) + else: + return issubclass(subclass, self.__wrapped__) + +class BoundFunctionWrapper(_FunctionWrapperBase): + + def __new__(cls, *args, **kwargs): + # In addition to constructing a BoundFoundWrapper internally, + # we need to be able to handle being created as if we were an + # instance of types.BoundMethod. Creating and using a weakref + # to a bound function relies on being able to do this. Since a + # BFW stands in for just such a function, it must support it. + # + # Maybe there's a better way of distinguishing these two + # cases, but for now, use the number of arguments. When we're + # constructed with 2, return a types.BoundMethod. When we're + # constructed with more than 2, return an instance of + # BoundFunctionWrapper. + if len(args) == 2: + func = args[0] # the function + obj = args[1] # the object it's bound to + return types.MethodType(func, obj) + + return super(BoundFunctionWrapper, cls).__new__(cls) + + def __call__(*args, **kwargs): + def _unpack_self(self, *args): + return self, args + + self, args = _unpack_self(*args) + + # If enabled has been specified, then evaluate it at this point + # and if the wrapper is not to be executed, then simply return + # the bound function rather than a bound wrapper for the bound + # function. When evaluating enabled, if it is callable we call + # it, otherwise we evaluate it as a boolean. + + if self._self_enabled is not None: + if callable(self._self_enabled): + if not self._self_enabled(): + return self.__wrapped__(*args, **kwargs) + elif not self._self_enabled: + return self.__wrapped__(*args, **kwargs) + + # We need to do things different depending on whether we are + # likely wrapping an instance method vs a static method or class + # method. + + if self._self_binding == 'function': + if self._self_instance is None: + # This situation can occur where someone is calling the + # instancemethod via the class type and passing the instance + # as the first argument. We need to shift the args before + # making the call to the wrapper and effectively bind the + # instance to the wrapped function using a partial so the + # wrapper doesn't see anything as being different. + + if not args: + raise TypeError('missing 1 required positional argument') + + instance, args = args[0], args[1:] + wrapped = PartialCallableObjectProxy(self.__wrapped__, instance) + return self._self_wrapper(wrapped, instance, args, kwargs) + + return self._self_wrapper(self.__wrapped__, self._self_instance, + args, kwargs) + + else: + # As in this case we would be dealing with a classmethod or + # staticmethod, then _self_instance will only tell us whether + # when calling the classmethod or staticmethod they did it via an + # instance of the class it is bound to and not the case where + # done by the class type itself. We thus ignore _self_instance + # and use the __self__ attribute of the bound function instead. + # For a classmethod, this means instance will be the class type + # and for a staticmethod it will be None. This is probably the + # more useful thing we can pass through even though we loose + # knowledge of whether they were called on the instance vs the + # class type, as it reflects what they have available in the + # decoratored function. + + instance = getattr(self.__wrapped__, '__self__', None) + + return self._self_wrapper(self.__wrapped__, instance, args, + kwargs) + +class FunctionWrapper(_FunctionWrapperBase): + + __bound_function_wrapper__ = BoundFunctionWrapper + + # The code here is pretty complicated (see the comment below), and it's not completely clear to + # me whether it actually keeps any state. If it does, __reduce_ex__ needs to return a tuple so a + # new FunctionWrapper will be created. If it doesn't, then __reduce_ex__ can simply return a + # string, which would cause deepcopy to return the original FunctionWrapper. + # + # Update: We'll return the qualname of the wrapped function instead of a tuple allows a + # FunctionWrapper to be pickled (as the function it wraps). This seems to be adequate for + # generating AppMaps, so go with that. + + def __reduce_ex__(self, protocol): + return self.__wrapped__.__qualname__ + + # return FunctionWrapper, ( + # self.__wrapped__, + # self._self_wrapper, + # self._self_enabled, + # ) + + def __init__(self, wrapped, wrapper, enabled=None): + # What it is we are wrapping here could be anything. We need to + # try and detect specific cases though. In particular, we need + # to detect when we are given something that is a method of a + # class. Further, we need to know when it is likely an instance + # method, as opposed to a class or static method. This can + # become problematic though as there isn't strictly a fool proof + # method of knowing. + # + # The situations we could encounter when wrapping a method are: + # + # 1. The wrapper is being applied as part of a decorator which + # is a part of the class definition. In this case what we are + # given is the raw unbound function, classmethod or staticmethod + # wrapper objects. + # + # The problem here is that we will not know we are being applied + # in the context of the class being set up. This becomes + # important later for the case of an instance method, because in + # that case we just see it as a raw function and can't + # distinguish it from wrapping a normal function outside of + # a class context. + # + # 2. The wrapper is being applied when performing monkey + # patching of the class type afterwards and the method to be + # wrapped was retrieved direct from the __dict__ of the class + # type. This is effectively the same as (1) above. + # + # 3. The wrapper is being applied when performing monkey + # patching of the class type afterwards and the method to be + # wrapped was retrieved from the class type. In this case + # binding will have been performed where the instance against + # which the method is bound will be None at that point. + # + # This case is a problem because we can no longer tell if the + # method was a static method, plus if using Python3, we cannot + # tell if it was an instance method as the concept of an + # unnbound method no longer exists. + # + # 4. The wrapper is being applied when performing monkey + # patching of an instance of a class. In this case binding will + # have been perfomed where the instance was not None. + # + # This case is a problem because we can no longer tell if the + # method was a static method. + # + # Overall, the best we can do is look at the original type of the + # object which was wrapped prior to any binding being done and + # see if it is an instance of classmethod or staticmethod. In + # the case where other decorators are between us and them, if + # they do not propagate the __class__ attribute so that the + # isinstance() checks works, then likely this will do the wrong + # thing where classmethod and staticmethod are used. + # + # Since it is likely to be very rare that anyone even puts + # decorators around classmethod and staticmethod, likelihood of + # that being an issue is very small, so we accept it and suggest + # that those other decorators be fixed. It is also only an issue + # if a decorator wants to actually do things with the arguments. + # + # As to not being able to identify static methods properly, we + # just hope that that isn't something people are going to want + # to wrap, or if they do suggest they do it the correct way by + # ensuring that it is decorated in the class definition itself, + # or patch it in the __dict__ of the class type. + # + # So to get the best outcome we can, whenever we aren't sure what + # it is, we label it as a 'function'. If it was already bound and + # that is rebound later, we assume that it will be an instance + # method and try an cope with the possibility that the 'self' + # argument it being passed as an explicit argument and shuffle + # the arguments around to extract 'self' for use as the instance. + + if isinstance(wrapped, classmethod): + binding = 'classmethod' + + elif isinstance(wrapped, staticmethod): + binding = 'staticmethod' + + elif hasattr(wrapped, '__self__'): + if inspect.isclass(wrapped.__self__): + binding = 'classmethod' + else: + binding = 'function' + + else: + binding = 'function' + + super(FunctionWrapper, self).__init__(wrapped, None, wrapper, + enabled, binding) + +try: + if not os.environ.get('WRAPT_DISABLE_EXTENSIONS'): + from ._wrappers import (ObjectProxy, CallableObjectProxy, + PartialCallableObjectProxy, FunctionWrapper, + BoundFunctionWrapper, _FunctionWrapperBase) +except ImportError: + pass + +# Helper functions for applying wrappers to existing functions. + +def resolve_path(module, name): + if isinstance(module, string_types): + __import__(module) + module = sys.modules[module] + + parent = module + + path = name.split('.') + attribute = path[0] + + # We can't just always use getattr() because in doing + # that on a class it will cause binding to occur which + # will complicate things later and cause some things not + # to work. For the case of a class we therefore access + # the __dict__ directly. To cope though with the wrong + # class being given to us, or a method being moved into + # a base class, we need to walk the class hierarchy to + # work out exactly which __dict__ the method was defined + # in, as accessing it from __dict__ will fail if it was + # not actually on the class given. Fallback to using + # getattr() if we can't find it. If it truly doesn't + # exist, then that will fail. + + def lookup_attribute(parent, attribute): + if inspect.isclass(parent): + for cls in inspect.getmro(parent): + if attribute in vars(cls): + return vars(cls)[attribute] + else: + return getattr(parent, attribute) + else: + return getattr(parent, attribute) + + original = lookup_attribute(parent, attribute) + + for attribute in path[1:]: + parent = original + original = lookup_attribute(parent, attribute) + + return (parent, attribute, original) + +def apply_patch(parent, attribute, replacement): + setattr(parent, attribute, replacement) + +def wrap_object(module, name, factory, args=(), kwargs={}): + (parent, attribute, original) = resolve_path(module, name) + wrapper = factory(original, *args, **kwargs) + apply_patch(parent, attribute, wrapper) + return wrapper + +# Function for applying a proxy object to an attribute of a class +# instance. The wrapper works by defining an attribute of the same name +# on the class which is a descriptor and which intercepts access to the +# instance attribute. Note that this cannot be used on attributes which +# are themselves defined by a property object. + +class AttributeWrapper(object): + + def __init__(self, attribute, factory, args, kwargs): + self.attribute = attribute + self.factory = factory + self.args = args + self.kwargs = kwargs + + def __get__(self, instance, owner): + value = instance.__dict__[self.attribute] + return self.factory(value, *self.args, **self.kwargs) + + def __set__(self, instance, value): + instance.__dict__[self.attribute] = value + + def __delete__(self, instance): + del instance.__dict__[self.attribute] + +def wrap_object_attribute(module, name, factory, args=(), kwargs={}): + path, attribute = name.rsplit('.', 1) + parent = resolve_path(module, path)[2] + wrapper = AttributeWrapper(attribute, factory, args, kwargs) + apply_patch(parent, attribute, wrapper) + return wrapper + +# Functions for creating a simple decorator using a FunctionWrapper, +# plus short cut functions for applying wrappers to functions. These are +# for use when doing monkey patching. For a more featured way of +# creating decorators see the decorator decorator instead. + +def function_wrapper(wrapper): + def _wrapper(wrapped, instance, args, kwargs): + target_wrapped = args[0] + if instance is None: + target_wrapper = wrapper + elif inspect.isclass(instance): + target_wrapper = wrapper.__get__(None, instance) + else: + target_wrapper = wrapper.__get__(instance, type(instance)) + return FunctionWrapper(target_wrapped, target_wrapper) + return FunctionWrapper(wrapper, _wrapper) + +def wrap_function_wrapper(module, name, wrapper): + return wrap_object(module, name, FunctionWrapper, (wrapper,)) + +def patch_function_wrapper(module, name): + def _wrapper(wrapper): + return wrap_object(module, name, FunctionWrapper, (wrapper,)) + return _wrapper + +def transient_function_wrapper(module, name): + def _decorator(wrapper): + def _wrapper(wrapped, instance, args, kwargs): + target_wrapped = args[0] + if instance is None: + target_wrapper = wrapper + elif inspect.isclass(instance): + target_wrapper = wrapper.__get__(None, instance) + else: + target_wrapper = wrapper.__get__(instance, type(instance)) + def _execute(wrapped, instance, args, kwargs): + (parent, attribute, original) = resolve_path(module, name) + replacement = FunctionWrapper(original, target_wrapper) + setattr(parent, attribute, replacement) + try: + return wrapped(*args, **kwargs) + finally: + setattr(parent, attribute, original) + return FunctionWrapper(target_wrapped, _execute) + return FunctionWrapper(wrapper, _wrapper) + return _decorator + +# A weak function proxy. This will work on instance methods, class +# methods, static methods and regular functions. Special treatment is +# needed for the method types because the bound method is effectively a +# transient object and applying a weak reference to one will immediately +# result in it being destroyed and the weakref callback called. The weak +# reference is therefore applied to the instance the method is bound to +# and the original function. The function is then rebound at the point +# of a call via the weak function proxy. + +def _weak_function_proxy_callback(ref, proxy, callback): + if proxy._self_expired: + return + + proxy._self_expired = True + + # This could raise an exception. We let it propagate back and let + # the weakref.proxy() deal with it, at which point it generally + # prints out a short error message direct to stderr and keeps going. + + if callback is not None: + callback(proxy) + +class WeakFunctionProxy(ObjectProxy): + + __slots__ = ('_self_expired', '_self_instance') + + def __init__(self, wrapped, callback=None): + # We need to determine if the wrapped function is actually a + # bound method. In the case of a bound method, we need to keep a + # reference to the original unbound function and the instance. + # This is necessary because if we hold a reference to the bound + # function, it will be the only reference and given it is a + # temporary object, it will almost immediately expire and + # the weakref callback triggered. So what is done is that we + # hold a reference to the instance and unbound function and + # when called bind the function to the instance once again and + # then call it. Note that we avoid using a nested function for + # the callback here so as not to cause any odd reference cycles. + + _callback = callback and functools.partial( + _weak_function_proxy_callback, proxy=self, + callback=callback) + + self._self_expired = False + + if isinstance(wrapped, _FunctionWrapperBase): + self._self_instance = weakref.ref(wrapped._self_instance, + _callback) + + if wrapped._self_parent is not None: + super(WeakFunctionProxy, self).__init__( + weakref.proxy(wrapped._self_parent, _callback)) + + else: + super(WeakFunctionProxy, self).__init__( + weakref.proxy(wrapped, _callback)) + + return + + try: + self._self_instance = weakref.ref(wrapped.__self__, _callback) + + super(WeakFunctionProxy, self).__init__( + weakref.proxy(wrapped.__func__, _callback)) + + except AttributeError: + self._self_instance = None + + super(WeakFunctionProxy, self).__init__( + weakref.proxy(wrapped, _callback)) + + def __call__(*args, **kwargs): + def _unpack_self(self, *args): + return self, args + + self, args = _unpack_self(*args) + + # We perform a boolean check here on the instance and wrapped + # function as that will trigger the reference error prior to + # calling if the reference had expired. + + instance = self._self_instance and self._self_instance() + function = self.__wrapped__ and self.__wrapped__ + + # If the wrapped function was originally a bound function, for + # which we retained a reference to the instance and the unbound + # function we need to rebind the function and then call it. If + # not just called the wrapped function. + + if instance is None: + return self.__wrapped__(*args, **kwargs) + + return function.__get__(instance, type(instance))(*args, **kwargs) diff --git a/vendor/patches/wrapt.patch b/vendor/patches/wrapt.patch new file mode 100644 index 00000000..0cacb335 --- /dev/null +++ b/vendor/patches/wrapt.patch @@ -0,0 +1,297 @@ +From 03c559f90ee502dc6425d41be75f561e8ac7e85b Mon Sep 17 00:00:00 2001 +From: Alan Potter +Date: Tue, 13 Apr 2021 06:43:00 -0400 +Subject: [PATCH 1/4] Add ObjectProxy.__getattribute__ + +This allows access to attributes set on ObjectProxy itself, rather than +always return the attribute of the proxied object. +--- + wrapt/wrappers.py | 50 +++++++++++++++++++++++++++++++++++++++++------ + 1 file changed, 44 insertions(+), 6 deletions(-) + +diff --git a/vendor/_appmap/wrapt/wrappers.py b/vendor/_appmap/wrapt/wrappers.py +index 48f334e..ffafedf 100644 +--- a/vendor/_appmap/wrapt/wrappers.py ++++ b/vendor/_appmap/wrapt/wrappers.py +@@ -4,6 +4,8 @@ import functools + import operator + import weakref + import inspect ++from itertools import chain ++import types + + PY2 = sys.version_info[0] == 2 + +@@ -16,6 +18,10 @@ def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + return meta("NewBase", bases, {}) + ++def slots(obj): ++ return chain.from_iterable(getattr(cls, '__slots__', []) ++ for cls in type(obj).__mro__) ++ + class _ObjectProxyMethods(object): + + # We use properties to override the values of __module__ and +@@ -72,7 +78,7 @@ class _ObjectProxyMetaType(type): + + class ObjectProxy(with_metaclass(_ObjectProxyMetaType)): + +- __slots__ = '__wrapped__' ++ __slots__ = ('__wrapped__',) + + def __init__(self, wrapped): + object.__setattr__(self, '__wrapped__', wrapped) +@@ -201,13 +207,25 @@ class ObjectProxy(with_metaclass(_ObjectProxyMetaType)): + else: + setattr(self.__wrapped__, name, value) + ++ def __getattribute__(self, name): ++ try: ++ if name in slots(self): ++ return super().__getattribute__(name) ++ except AttributeError: ++ pass ++ ++ return self.__wrapped__.__getattribute__(name) ++ + def __getattr__(self, name): +- # If we are being to lookup '__wrapped__' then the ++ # If we are being asked to lookup '__wrapped__' then the + # '__init__()' method cannot have been called. + + if name == '__wrapped__': + raise ValueError('wrapper has not been initialised') + ++ if name in slots(self): ++ return super().__getattr__(name) ++ + return getattr(self.__wrapped__, name) + + def __delattr__(self, name): +@@ -454,6 +472,7 @@ class CallableObjectProxy(ObjectProxy): + return self.__wrapped__(*args, **kwargs) + + class PartialCallableObjectProxy(ObjectProxy): ++ __slots__ = ('_self_args', '_self_kwargs') + + def __init__(*args, **kwargs): + def _unpack_self(self, *args): +@@ -471,8 +490,8 @@ class PartialCallableObjectProxy(ObjectProxy): + + super(PartialCallableObjectProxy, self).__init__(wrapped) + +- self._self_args = args +- self._self_kwargs = kwargs ++ object.__setattr__(self, '_self_args', args) ++ object.__setattr__(self, '_self_kwargs', kwargs) + + def __call__(*args, **kwargs): + def _unpack_self(self, *args): +@@ -533,7 +552,7 @@ class _FunctionWrapperBase(ObjectProxy): + if not inspect.isclass(self.__wrapped__): + descriptor = self.__wrapped__.__get__(instance, owner) + +- return self.__bound_function_wrapper__(descriptor, instance, ++ return BoundFunctionWrapper(descriptor, instance, + self._self_wrapper, self._self_enabled, + self._self_binding, self) + +@@ -552,7 +571,7 @@ class _FunctionWrapperBase(ObjectProxy): + descriptor = self._self_parent.__wrapped__.__get__( + instance, owner) + +- return self._self_parent.__bound_function_wrapper__( ++ return BoundFunctionWrapper( + descriptor, instance, self._self_wrapper, + self._self_enabled, self._self_binding, + self._self_parent) +@@ -627,6 +646,25 @@ class _FunctionWrapperBase(ObjectProxy): + + class BoundFunctionWrapper(_FunctionWrapperBase): + ++ def __new__(cls, *args, **kwargs): ++ # In addition to constructing a BoundFoundWrapper internally, ++ # we need to be able to handle being created as if we were an ++ # instance of types.BoundMethod. Creating and using a weakref ++ # to a bound function relies on being able to do this. Since a ++ # BFW stands in for just such a function, it must support it. ++ # ++ # Maybe there's a better way of distinguishing these two ++ # cases, but for now, use the number of arguments. When we're ++ # constructed with 2, return a types.BoundMethod. When we're ++ # constructed with more than 2, return an instance of ++ # BoundFunctionWrapper. ++ if len(args) == 2: ++ func = args[0] # the function ++ obj = args[1] # the object it's bound to ++ return types.MethodType(func, obj) ++ ++ return super(BoundFunctionWrapper, cls).__new__(cls) ++ + def __call__(*args, **kwargs): + def _unpack_self(self, *args): + return self, args +-- +2.40.0 + + +From 5d888631313636563889572aa44c7ff033b52452 Mon Sep 17 00:00:00 2001 +From: Alan Potter +Date: Thu, 29 Apr 2021 07:26:14 -0400 +Subject: [PATCH 2/4] Keep a reference to new BoundFunctionWrappers + +Keep a reference to any new BoundFunctionWrappers created by +_FunctionWrapperBase. This keeps them from being immediately eligible +for collection. +--- + wrapt/wrappers.py | 23 ++++++++++++++--------- + 1 file changed, 14 insertions(+), 9 deletions(-) + +diff --git a/vendor/_appmap/wrapt/wrappers.py b/vendor/_appmap/wrapt/wrappers.py +index ffafedf..b4152e6 100644 +--- a/vendor/_appmap/wrapt/wrappers.py ++++ b/vendor/_appmap/wrapt/wrappers.py +@@ -509,7 +509,7 @@ class PartialCallableObjectProxy(ObjectProxy): + class _FunctionWrapperBase(ObjectProxy): + + __slots__ = ('_self_instance', '_self_wrapper', '_self_enabled', +- '_self_binding', '_self_parent') ++ '_self_binding', '_self_parent', '_bfws') + + def __init__(self, wrapped, instance, wrapper, enabled=None, + binding='function', parent=None): +@@ -521,6 +521,7 @@ class _FunctionWrapperBase(ObjectProxy): + object.__setattr__(self, '_self_enabled', enabled) + object.__setattr__(self, '_self_binding', binding) + object.__setattr__(self, '_self_parent', parent) ++ object.__setattr__(self, '_bfws', list()) + + def __get__(self, instance, owner): + # This method is actually doing double duty for both unbound and +@@ -551,10 +552,12 @@ class _FunctionWrapperBase(ObjectProxy): + if self._self_parent is None: + if not inspect.isclass(self.__wrapped__): + descriptor = self.__wrapped__.__get__(instance, owner) +- +- return BoundFunctionWrapper(descriptor, instance, +- self._self_wrapper, self._self_enabled, +- self._self_binding, self) ++ ret = BoundFunctionWrapper( ++ descriptor, instance, ++ self._self_wrapper, self._self_enabled, ++ self._self_binding, self) ++ self._bfws.append(ret) ++ return ret + + return self + +@@ -571,10 +574,12 @@ class _FunctionWrapperBase(ObjectProxy): + descriptor = self._self_parent.__wrapped__.__get__( + instance, owner) + +- return BoundFunctionWrapper( +- descriptor, instance, self._self_wrapper, +- self._self_enabled, self._self_binding, +- self._self_parent) ++ ret = BoundFunctionWrapper( ++ descriptor, instance, self._self_wrapper, ++ self._self_enabled, self._self_binding, ++ self._self_parent) ++ self._bfws.append(ret) ++ return ret + + return self + +-- +2.40.0 + + +From 0b98ff82a051e886daf2d018bc6f657522a5b5b8 Mon Sep 17 00:00:00 2001 +From: Alan Potter +Date: Thu, 22 Sep 2022 04:55:25 -0400 +Subject: [PATCH 3/4] Add FunctionWrapper.__reduce_ex__ + +Implement __reduce_ex__ in FunctionWrapper. Ensure that it will be +called, rather than forwarded to the wrapped function. This allows a +function that has been wrapped to be deepcopy'ed. +--- + wrapt/wrappers.py | 9 ++++++++- + 1 file changed, 8 insertions(+), 1 deletion(-) + +diff --git a/vendor/_appmap/wrapt/wrappers.py b/vendor/_appmap/wrapt/wrappers.py +index b4152e6..137220b 100644 +--- a/vendor/_appmap/wrapt/wrappers.py ++++ b/vendor/_appmap/wrapt/wrappers.py +@@ -209,7 +209,7 @@ class ObjectProxy(with_metaclass(_ObjectProxyMetaType)): + + def __getattribute__(self, name): + try: +- if name in slots(self): ++ if name in chain(slots(self), ["__reduce__", "__reduce_ex__"]): + return super().__getattribute__(name) + except AttributeError: + pass +@@ -735,6 +735,13 @@ class FunctionWrapper(_FunctionWrapperBase): + + __bound_function_wrapper__ = BoundFunctionWrapper + ++ # The code here is pretty complicated (see the comment below), and it's not completely clear to ++ # me whether it actually keeps any state. If it does, __reduce_ex__ needs to return a tuple so a ++ # new FunctionWrapper will be created. If it doesn't, then __reduce_ex__ could simply return a ++ # string, which would cause deepcopy to return the original FunctionWrapper. ++ def __reduce_ex__(self, protocol): ++ return FunctionWrapper, (self.__wrapped__, self._self_wrapper, self._self_enabled) ++ + def __init__(self, wrapped, wrapper, enabled=None): + # What it is we are wrapping here could be anything. We need to + # try and detect specific cases though. In particular, we need +-- +2.40.0 + + +From dd1c07569dea572e3fee6afade0b9649cf31335b Mon Sep 17 00:00:00 2001 +From: Alan Potter +Date: Tue, 6 Dec 2022 18:48:13 -0500 +Subject: [PATCH 4/4] fix: have __reduce_ex__ return qualname + +Have FunctionWrapper.__reduce_ex__ return the wrapped function's +qualname, instead of return a tuple (to create a new FunctionWrapper). + +This is fine for the purposes of creating AppMaps, and may even be +correct in general. +--- + wrapt/wrappers.py | 15 +++++++++++++-- + 1 file changed, 13 insertions(+), 2 deletions(-) + +diff --git a/vendor/_appmap/wrapt/wrappers.py b/vendor/_appmap/wrapt/wrappers.py +index 137220b..a7e9a3d 100644 +--- a/vendor/_appmap/wrapt/wrappers.py ++++ b/vendor/_appmap/wrapt/wrappers.py +@@ -737,10 +737,21 @@ class FunctionWrapper(_FunctionWrapperBase): + + # The code here is pretty complicated (see the comment below), and it's not completely clear to + # me whether it actually keeps any state. If it does, __reduce_ex__ needs to return a tuple so a +- # new FunctionWrapper will be created. If it doesn't, then __reduce_ex__ could simply return a ++ # new FunctionWrapper will be created. If it doesn't, then __reduce_ex__ can simply return a + # string, which would cause deepcopy to return the original FunctionWrapper. ++ # ++ # Update: We'll return the qualname of the wrapped function instead of a tuple allows a ++ # FunctionWrapper to be pickled (as the function it wraps). This seems to be adequate for ++ # generating AppMaps, so go with that. ++ + def __reduce_ex__(self, protocol): +- return FunctionWrapper, (self.__wrapped__, self._self_wrapper, self._self_enabled) ++ return self.__wrapped__.__qualname__ ++ ++ # return FunctionWrapper, ( ++ # self.__wrapped__, ++ # self._self_wrapper, ++ # self._self_enabled, ++ # ) + + def __init__(self, wrapped, wrapper, enabled=None): + # What it is we are wrapping here could be anything. We need to +-- +2.40.0 + diff --git a/vendor/vendor.txt b/vendor/vendor.txt new file mode 100644 index 00000000..79b49d28 --- /dev/null +++ b/vendor/vendor.txt @@ -0,0 +1 @@ +wrapt==1.15.0 ; python_version >= "3.7" and python_version < "4.0" \ No newline at end of file diff --git a/vendor/wrapt b/vendor/wrapt deleted file mode 160000 index 7dcec811..00000000 --- a/vendor/wrapt +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 7dcec8118747b8a480c1664cad3c16ab9e86f538