diff --git a/CHANGELOG.md b/CHANGELOG.md index 86a849d203..aaf317cc81 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,119 @@ # Changelog +## 1.45.0 + +This is the final 1.x release for the forseeable future. Development will continue on the 2.x release line. The first 2.x version will be available in the next few weeks. + +### Various fixes & improvements + +- Allow to upsert monitors (#2929) by @sentrivana + + It's now possible to provide `monitor_config` to the `monitor` decorator/context manager directly: + + ```python + from sentry_sdk.crons import monitor + + # All keys except `schedule` are optional + monitor_config = { + "schedule": {"type": "crontab", "value": "0 0 * * *"}, + "timezone": "Europe/Vienna", + "checkin_margin": 10, + "max_runtime": 10, + "failure_issue_threshold": 5, + "recovery_threshold": 5, + } + + @monitor(monitor_slug='', monitor_config=monitor_config) + def tell_the_world(): + print('My scheduled task...') + ``` + + Check out [the cron docs](https://docs.sentry.io/platforms/python/crons/) for details. + +- Add Django `signals_denylist` to filter signals that are attached to by `signals_spans` (#2758) by @lieryan + + If you want to exclude some Django signals from performance tracking, you can use the new `signals_denylist` Django option: + + ```python + import django.db.models.signals + import sentry_sdk + + sentry_sdk.init( + ... + integrations=[ + DjangoIntegration( + ... + signals_denylist=[ + django.db.models.signals.pre_init, + django.db.models.signals.post_init, + ], + ), + ], + ) + ``` + +- `increment` for metrics (#2588) by @mitsuhiko + + `increment` and `inc` are equivalent, so you can pick whichever you like more. + +- Add `value`, `unit` to `before_emit_metric` (#2958) by @sentrivana + + If you add a custom `before_emit_metric`, it'll now accept 4 arguments (the `key`, `value`, `unit` and `tags`) instead of just `key` and `tags`. + + ```python + def before_emit(key, value, unit, tags): + if key == "removed-metric": + return False + tags["extra"] = "foo" + del tags["release"] + return True + + sentry_sdk.init( + ... + _experiments={ + "before_emit_metric": before_emit, + } + ) + ``` + +- Remove experimental metric summary options (#2957) by @sentrivana + + The `_experiments` options `metrics_summary_sample_rate` and `should_summarize_metric` have been removed. + +- New normalization rules for metric keys, names, units, tags (#2946) by @sentrivana +- Change `data_category` from `statsd` to `metric_bucket` (#2954) by @cleptric +- Accessing `__mro__` might throw a `ValueError` (#2952) by @sentrivana +- Suppress prompt spawned by subprocess when using `pythonw` (#2936) by @collinbanko +- Handle `None` in GraphQL query #2715 (#2762) by @czyber +- Do not send "quiet" Sanic exceptions to Sentry (#2821) by @hamedsh +- Implement `metric_bucket` rate limits (#2933) by @cleptric +- Fix type hints for `monitor` decorator (#2944) by @szokeasaurusrex +- Remove deprecated `typing` imports in crons (#2945) by @szokeasaurusrex +- Make `monitor_config` a `TypedDict` (#2931) by @sentrivana +- Add `devenv-requirements.txt` and update env setup instructions (#2761) by @arr-ee +- Bump `types-protobuf` from `4.24.0.20240311` to `4.24.0.20240408` (#2941) by @dependabot +- Disable Codecov check run annotations (#2537) by @eliatcodecov + +## 1.44.1 + +### Various fixes & improvements + +- Make `monitor` async friendly (#2912) by @sentrivana + + You can now decorate your async functions with the `monitor` + decorator and they will correctly report their duration + and completion status. + +- Fixed `Event | None` runtime `TypeError` (#2928) by @szokeasaurusrex + +## 1.44.0 + +### Various fixes & improvements + +- ref: Define types at runtime (#2914) by @szokeasaurusrex +- Explicit reexport of types (#2866) (#2913) by @szokeasaurusrex +- feat(profiling): Add thread data to spans (#2843) by @Zylphrex + ## 1.43.0 ### Various fixes & improvements diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index cf972cfd6c..05b642c502 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -8,7 +8,6 @@ This file outlines the process to contribute to the SDK itself. For contributing Please search the [issue tracker](https://github.com/getsentry/sentry-python/issues) before creating a new issue (a problem or an improvement request). Please also ask in our [Sentry Community on Discord](https://discord.com/invite/Ww9hbqr) before submitting a new issue. There are a ton of great people in our Discord community ready to help you! - ## Submitting Changes - Fork the `sentry-python` repo and prepare your changes. @@ -64,7 +63,7 @@ This will make sure that your commits will have the correct coding style. ```bash cd sentry-python -pip install -r linter-requirements.txt +pip install -r devenv-requirements.txt pip install pre-commit @@ -75,12 +74,8 @@ That's it. You should be ready to make changes, run tests, and make commits! If ## Running Tests -To run the tests, first setup your development environment according to the instructions above. Then, install the required packages for running tests with the following command: -```bash -pip install -r test-requirements.txt -``` +You can run all tests with the following command: -Once the requirements are installed, you can run all tests with the following command: ```bash pytest tests/ ``` diff --git a/checkouts/data-schemas b/checkouts/data-schemas index 8232f178ae..1e17eb5472 160000 --- a/checkouts/data-schemas +++ b/checkouts/data-schemas @@ -1 +1 @@ -Subproject commit 8232f178ae709232907b783d709f5fba80b26201 +Subproject commit 1e17eb54727a77681a1b9e845c9a5d55b52d35a1 diff --git a/codecov.yml b/codecov.yml index 93a5b687e4..6e4467b675 100644 --- a/codecov.yml +++ b/codecov.yml @@ -9,3 +9,5 @@ coverage: ignore: - "tests" - "sentry_sdk/_types.py" +github_checks: + annotations: false \ No newline at end of file diff --git a/devenv-requirements.txt b/devenv-requirements.txt new file mode 100644 index 0000000000..2b7abae3c2 --- /dev/null +++ b/devenv-requirements.txt @@ -0,0 +1,5 @@ +-r linter-requirements.txt +-r test-requirements.txt +mockupdb # required by `pymongo` tests that are enabled by `pymongo` from linter requirements +pytest<7.0.0 # https://github.com/pytest-dev/pytest/issues/9621; see tox.ini +pytest-asyncio<=0.21.1 # https://github.com/pytest-dev/pytest-asyncio/issues/706 diff --git a/docs/conf.py b/docs/conf.py index 2cd901f5fa..5383a64224 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -30,7 +30,7 @@ copyright = "2019-{}, Sentry Team and Contributors".format(datetime.now().year) author = "Sentry Team and Contributors" -release = "1.43.0" +release = "1.45.0" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/linter-requirements.txt b/linter-requirements.txt index c390f5fe70..e86ffd506b 100644 --- a/linter-requirements.txt +++ b/linter-requirements.txt @@ -2,7 +2,7 @@ mypy black flake8==5.0.4 # flake8 depends on pyflakes>=3.0.0 and this dropped support for Python 2 "# type:" comments types-certifi -types-protobuf==4.24.0.20240311 # newer raises an error on mypy sentry_sdk +types-protobuf==4.24.0.20240408 # newer raises an error on mypy sentry_sdk types-redis types-setuptools pymongo # There is no separate types module. diff --git a/sentry_sdk/_types.py b/sentry_sdk/_types.py index 49bffb3416..368db17138 100644 --- a/sentry_sdk/_types.py +++ b/sentry_sdk/_types.py @@ -113,7 +113,7 @@ "session", "internal", "profile", - "statsd", + "metric_bucket", "monitor", ] SessionStatus = Literal["ok", "exited", "crashed", "abnormal"] @@ -178,3 +178,37 @@ BucketKey = Tuple[MetricType, str, MeasurementUnit, MetricTagsInternal] MetricMetaKey = Tuple[MetricType, str, MeasurementUnit] + + MonitorConfigScheduleType = Literal["crontab", "interval"] + MonitorConfigScheduleUnit = Literal[ + "year", + "month", + "week", + "day", + "hour", + "minute", + "second", # not supported in Sentry and will result in a warning + ] + + MonitorConfigSchedule = TypedDict( + "MonitorConfigSchedule", + { + "type": MonitorConfigScheduleType, + "value": Union[int, str], + "unit": MonitorConfigScheduleUnit, + }, + total=False, + ) + + MonitorConfig = TypedDict( + "MonitorConfig", + { + "schedule": MonitorConfigSchedule, + "timezone": str, + "checkin_margin": int, + "max_runtime": int, + "failure_issue_threshold": int, + "recovery_threshold": int, + }, + total=False, + ) diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 738ca2e1c0..1cf37211e1 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -24,10 +24,12 @@ Event, EventProcessor, Hint, + MeasurementUnit, ProfilerMode, TracesSampler, TransactionProcessor, MetricTags, + MetricValue, ) # Experiments are feature flags to enable and disable certain unstable SDK @@ -47,9 +49,9 @@ "transport_zlib_compression_level": Optional[int], "transport_num_pools": Optional[int], "enable_metrics": Optional[bool], - "metrics_summary_sample_rate": Optional[float], - "should_summarize_metric": Optional[Callable[[str, MetricTags], bool]], - "before_emit_metric": Optional[Callable[[str, MetricTags], bool]], + "before_emit_metric": Optional[ + Callable[[str, MetricValue, MeasurementUnit, MetricTags], bool] + ], "metric_code_locations": Optional[bool], }, total=False, @@ -191,6 +193,18 @@ class SPANDATA: Example: "http.handler" """ + THREAD_ID = "thread.id" + """ + Identifier of a thread from where the span originated. This should be a string. + Example: "7972576320" + """ + + THREAD_NAME = "thread.name" + """ + Label identifying a thread from where the span originated. This should be a string. + Example: "MainThread" + """ + class OP: CACHE_GET_ITEM = "cache.get_item" @@ -321,4 +335,4 @@ def _get_default_options(): del _get_default_options -VERSION = "1.43.0" +VERSION = "1.45.0" diff --git a/sentry_sdk/crons/_decorator.py b/sentry_sdk/crons/_decorator.py new file mode 100644 index 0000000000..2d0612f681 --- /dev/null +++ b/sentry_sdk/crons/_decorator.py @@ -0,0 +1,61 @@ +from functools import wraps +from inspect import iscoroutinefunction + +from sentry_sdk._types import TYPE_CHECKING + +if TYPE_CHECKING: + from collections.abc import Awaitable, Callable + from typing import Any, cast, overload, ParamSpec, TypeVar, Union + + P = ParamSpec("P") + R = TypeVar("R") + + +class MonitorMixin: + if TYPE_CHECKING: + + @overload + def __call__(self, fn): + # type: (Callable[P, Awaitable[Any]]) -> Callable[P, Awaitable[Any]] + # Unfortunately, mypy does not give us any reliable way to type check the + # return value of an Awaitable (i.e. async function) for this overload, + # since calling iscouroutinefunction narrows the type to Callable[P, Awaitable[Any]]. + ... + + @overload + def __call__(self, fn): + # type: (Callable[P, R]) -> Callable[P, R] + ... + + def __call__( + self, + fn, # type: Union[Callable[P, R], Callable[P, Awaitable[Any]]] + ): + # type: (...) -> Union[Callable[P, R], Callable[P, Awaitable[Any]]] + if iscoroutinefunction(fn): + return self._async_wrapper(fn) + + else: + if TYPE_CHECKING: + fn = cast("Callable[P, R]", fn) + return self._sync_wrapper(fn) + + def _async_wrapper(self, fn): + # type: (Callable[P, Awaitable[Any]]) -> Callable[P, Awaitable[Any]] + @wraps(fn) + async def inner(*args: "P.args", **kwargs: "P.kwargs"): + # type: (...) -> R + with self: # type: ignore[attr-defined] + return await fn(*args, **kwargs) + + return inner + + def _sync_wrapper(self, fn): + # type: (Callable[P, R]) -> Callable[P, R] + @wraps(fn) + def inner(*args: "P.args", **kwargs: "P.kwargs"): + # type: (...) -> R + with self: # type: ignore[attr-defined] + return fn(*args, **kwargs) + + return inner diff --git a/sentry_sdk/crons/_decorator_py2.py b/sentry_sdk/crons/_decorator_py2.py new file mode 100644 index 0000000000..9e1da797e2 --- /dev/null +++ b/sentry_sdk/crons/_decorator_py2.py @@ -0,0 +1,21 @@ +from functools import wraps + +from sentry_sdk._types import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Callable, ParamSpec, TypeVar + + P = ParamSpec("P") + R = TypeVar("R") + + +class MonitorMixin: + def __call__(self, fn): + # type: (Callable[P, R]) -> Callable[P, R] + @wraps(fn) + def inner(*args, **kwargs): + # type: (Any, Any) -> Any + with self: # type: ignore[attr-defined] + return fn(*args, **kwargs) + + return inner diff --git a/sentry_sdk/crons/api.py b/sentry_sdk/crons/api.py index 92d113a924..1a95583301 100644 --- a/sentry_sdk/crons/api.py +++ b/sentry_sdk/crons/api.py @@ -5,18 +5,18 @@ if TYPE_CHECKING: - from typing import Any, Dict, Optional - from sentry_sdk._types import Event + from typing import Optional + from sentry_sdk._types import Event, MonitorConfig def _create_check_in_event( - monitor_slug=None, - check_in_id=None, - status=None, - duration_s=None, - monitor_config=None, + monitor_slug=None, # type: Optional[str] + check_in_id=None, # type: Optional[str] + status=None, # type: Optional[str] + duration_s=None, # type: Optional[float] + monitor_config=None, # type: Optional[MonitorConfig] ): - # type: (Optional[str], Optional[str], Optional[str], Optional[float], Optional[Dict[str, Any]]) -> Event + # type: (...) -> Event options = Hub.current.client.options if Hub.current.client else {} check_in_id = check_in_id or uuid.uuid4().hex # type: str @@ -37,13 +37,13 @@ def _create_check_in_event( def capture_checkin( - monitor_slug=None, - check_in_id=None, - status=None, - duration=None, - monitor_config=None, + monitor_slug=None, # type: Optional[str] + check_in_id=None, # type: Optional[str] + status=None, # type: Optional[str] + duration=None, # type: Optional[float] + monitor_config=None, # type: Optional[MonitorConfig] ): - # type: (Optional[str], Optional[str], Optional[str], Optional[float], Optional[Dict[str, Any]]) -> str + # type: (...) -> str check_in_event = _create_check_in_event( monitor_slug=monitor_slug, check_in_id=check_in_id, diff --git a/sentry_sdk/crons/decorator.py b/sentry_sdk/crons/decorator.py index 34f4d0ac95..6c5f747b97 100644 --- a/sentry_sdk/crons/decorator.py +++ b/sentry_sdk/crons/decorator.py @@ -1,18 +1,25 @@ -import sys - -from sentry_sdk._compat import contextmanager, reraise +from sentry_sdk._compat import PY2 from sentry_sdk._types import TYPE_CHECKING from sentry_sdk.crons import capture_checkin from sentry_sdk.crons.consts import MonitorStatus from sentry_sdk.utils import now if TYPE_CHECKING: - from typing import Generator, Optional + from typing import Optional, Type + from types import TracebackType + from sentry_sdk._types import MonitorConfig + +if PY2: + from sentry_sdk.crons._decorator_py2 import MonitorMixin +else: + # This is in its own module so that we don't make Python 2 + # angery over `async def`s. + # Once we drop Python 2, remove the mixin and merge it + # into the main monitor class. + from sentry_sdk.crons._decorator import MonitorMixin -@contextmanager -def monitor(monitor_slug=None): - # type: (Optional[str]) -> Generator[None, None, None] +class monitor(MonitorMixin): # noqa: N801 """ Decorator/context manager to capture checkin events for a monitor. @@ -39,32 +46,35 @@ def test(arg): with sentry_sdk.monitor(monitor_slug='my-fancy-slug'): print(arg) ``` + """ + def __init__(self, monitor_slug=None, monitor_config=None): + # type: (Optional[str], Optional[MonitorConfig]) -> None + self.monitor_slug = monitor_slug + self.monitor_config = monitor_config - """ + def __enter__(self): + # type: () -> None + self.start_timestamp = now() + self.check_in_id = capture_checkin( + monitor_slug=self.monitor_slug, + status=MonitorStatus.IN_PROGRESS, + monitor_config=self.monitor_config, + ) + + def __exit__(self, exc_type, exc_value, traceback): + # type: (Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]) -> None + duration_s = now() - self.start_timestamp - start_timestamp = now() - check_in_id = capture_checkin( - monitor_slug=monitor_slug, status=MonitorStatus.IN_PROGRESS - ) + if exc_type is None and exc_value is None and traceback is None: + status = MonitorStatus.OK + else: + status = MonitorStatus.ERROR - try: - yield - except Exception: - duration_s = now() - start_timestamp capture_checkin( - monitor_slug=monitor_slug, - check_in_id=check_in_id, - status=MonitorStatus.ERROR, + monitor_slug=self.monitor_slug, + check_in_id=self.check_in_id, + status=status, duration=duration_s, + monitor_config=self.monitor_config, ) - exc_info = sys.exc_info() - reraise(*exc_info) - - duration_s = now() - start_timestamp - capture_checkin( - monitor_slug=monitor_slug, - check_in_id=check_in_id, - status=MonitorStatus.OK, - duration=duration_s, - ) diff --git a/sentry_sdk/envelope.py b/sentry_sdk/envelope.py index 8f89bda238..fb214a45f4 100644 --- a/sentry_sdk/envelope.py +++ b/sentry_sdk/envelope.py @@ -261,7 +261,7 @@ def data_category(self): elif ty == "profile": return "profile" elif ty == "statsd": - return "statsd" + return "metric_bucket" elif ty == "check_in": return "monitor" else: diff --git a/sentry_sdk/integrations/celery.py b/sentry_sdk/integrations/celery.py index f2e1aff48a..984197316f 100644 --- a/sentry_sdk/integrations/celery.py +++ b/sentry_sdk/integrations/celery.py @@ -3,6 +3,11 @@ import sys import time +try: + from typing import cast +except ImportError: + cast = lambda _, o: o + from sentry_sdk.api import continue_trace from sentry_sdk.consts import OP from sentry_sdk._compat import reraise @@ -31,7 +36,15 @@ from typing import Union from sentry_sdk.tracing import Span - from sentry_sdk._types import EventProcessor, Event, Hint, ExcInfo + from sentry_sdk._types import ( + EventProcessor, + Event, + Hint, + ExcInfo, + MonitorConfig, + MonitorConfigScheduleType, + MonitorConfigScheduleUnit, + ) F = TypeVar("F", bound=Callable[..., Any]) @@ -416,7 +429,7 @@ def _get_headers(task): def _get_humanized_interval(seconds): - # type: (float) -> Tuple[int, str] + # type: (float) -> Tuple[int, MonitorConfigScheduleUnit] TIME_UNITS = ( # noqa: N806 ("day", 60 * 60 * 24.0), ("hour", 60 * 60.0), @@ -427,17 +440,17 @@ def _get_humanized_interval(seconds): for unit, divider in TIME_UNITS: if seconds >= divider: interval = int(seconds / divider) - return (interval, unit) + return (interval, cast("MonitorConfigScheduleUnit", unit)) return (int(seconds), "second") def _get_monitor_config(celery_schedule, app, monitor_name): - # type: (Any, Celery, str) -> Dict[str, Any] - monitor_config = {} # type: Dict[str, Any] - schedule_type = None # type: Optional[str] + # type: (Any, Celery, str) -> MonitorConfig + monitor_config = {} # type: MonitorConfig + schedule_type = None # type: Optional[MonitorConfigScheduleType] schedule_value = None # type: Optional[Union[str, int]] - schedule_unit = None # type: Optional[str] + schedule_unit = None # type: Optional[MonitorConfigScheduleUnit] if isinstance(celery_schedule, crontab): schedule_type = "crontab" diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py index 98834a4693..a38674f09d 100644 --- a/sentry_sdk/integrations/django/__init__.py +++ b/sentry_sdk/integrations/django/__init__.py @@ -114,6 +114,7 @@ class DjangoIntegration(Integration): middleware_spans = None signals_spans = None cache_spans = None + signals_denylist = [] # type: list[signals.Signal] def __init__( self, @@ -121,8 +122,9 @@ def __init__( middleware_spans=True, signals_spans=True, cache_spans=False, + signals_denylist=None, ): - # type: (str, bool, bool, bool) -> None + # type: (str, bool, bool, bool, Optional[list[signals.Signal]]) -> None if transaction_style not in TRANSACTION_STYLE_VALUES: raise ValueError( "Invalid value for transaction_style: %s (must be in %s)" @@ -132,6 +134,7 @@ def __init__( self.middleware_spans = middleware_spans self.signals_spans = signals_spans self.cache_spans = cache_spans + self.signals_denylist = signals_denylist or [] @staticmethod def setup_once(): diff --git a/sentry_sdk/integrations/django/signals_handlers.py b/sentry_sdk/integrations/django/signals_handlers.py index 097a56c8aa..3d1aadab1f 100644 --- a/sentry_sdk/integrations/django/signals_handlers.py +++ b/sentry_sdk/integrations/django/signals_handlers.py @@ -78,7 +78,11 @@ def wrapper(*args, **kwargs): return wrapper integration = hub.get_integration(DjangoIntegration) - if integration and integration.signals_spans: + if ( + integration + and integration.signals_spans + and self not in integration.signals_denylist + ): for idx, receiver in enumerate(sync_receivers): sync_receivers[idx] = sentry_sync_receiver_wrapper(receiver) diff --git a/sentry_sdk/integrations/sanic.py b/sentry_sdk/integrations/sanic.py index 53d3cb6c07..7e0c690da0 100644 --- a/sentry_sdk/integrations/sanic.py +++ b/sentry_sdk/integrations/sanic.py @@ -342,6 +342,8 @@ def _capture_exception(exception): client_options=client.options, mechanism={"type": "sanic", "handled": False}, ) + if hint and hasattr(hint["exc_info"][0], "quiet") and hint["exc_info"][0].quiet: + return hub.capture_event(event, hint=hint) diff --git a/sentry_sdk/integrations/strawberry.py b/sentry_sdk/integrations/strawberry.py index 3d450e0692..5bc4184bee 100644 --- a/sentry_sdk/integrations/strawberry.py +++ b/sentry_sdk/integrations/strawberry.py @@ -145,6 +145,9 @@ def on_operation(self): operation_type = "query" op = OP.GRAPHQL_QUERY + if self.execution_context.query is None: + self.execution_context.query = "" + if self.execution_context.query.strip().startswith("mutation"): operation_type = "mutation" op = OP.GRAPHQL_MUTATION diff --git a/sentry_sdk/metrics.py b/sentry_sdk/metrics.py index b59cf033ec..1e4f5a532e 100644 --- a/sentry_sdk/metrics.py +++ b/sentry_sdk/metrics.py @@ -54,8 +54,6 @@ _in_metrics = ContextVar("in_metrics", default=False) -_sanitize_key = partial(re.compile(r"[^a-zA-Z0-9_/.-]+").sub, "_") -_sanitize_value = partial(re.compile(r"[^\w\d\s_:/@\.{}\[\]$-]+", re.UNICODE).sub, "") _set = set # set is shadowed below GOOD_TRANSACTION_SOURCES = frozenset( @@ -67,6 +65,32 @@ ] ) +_sanitize_unit = partial(re.compile(r"[^a-zA-Z0-9_]+").sub, "") +_sanitize_metric_key = partial(re.compile(r"[^a-zA-Z0-9_\-.]+").sub, "_") +_sanitize_tag_key = partial(re.compile(r"[^a-zA-Z0-9_\-.\/]+").sub, "") +_TAG_VALUE_SANITIZATION_TABLE = { + "\n": "\\n", + "\r": "\\r", + "\t": "\\t", + "\\": "\\\\", + "|": "\\u{7c}", + ",": "\\u{2c}", +} + + +def _sanitize_tag_value(value): + # type: (str) -> str + return "".join( + [ + ( + _TAG_VALUE_SANITIZATION_TABLE[char] + if char in _TAG_VALUE_SANITIZATION_TABLE + else char + ) + for char in value + ] + ) + def get_code_location(stacklevel): # type: (int) -> Optional[Dict[str, Any]] @@ -269,7 +293,8 @@ def _encode_metrics(flushable_buckets): for timestamp, buckets in flushable_buckets: for bucket_key, metric in iteritems(buckets): metric_type, metric_name, metric_unit, metric_tags = bucket_key - metric_name = _sanitize_key(metric_name) + metric_name = _sanitize_metric_key(metric_name) + metric_unit = _sanitize_unit(metric_unit) _write(metric_name.encode("utf-8")) _write(b"@") _write(metric_unit.encode("utf-8")) @@ -285,7 +310,7 @@ def _encode_metrics(flushable_buckets): _write(b"|#") first = True for tag_key, tag_value in metric_tags: - tag_key = _sanitize_key(tag_key) + tag_key = _sanitize_tag_key(tag_key) if not tag_key: continue if first: @@ -294,7 +319,7 @@ def _encode_metrics(flushable_buckets): _write(b",") _write(tag_key.encode("utf-8")) _write(b":") - _write(_sanitize_value(tag_value).encode("utf-8")) + _write(_sanitize_tag_value(tag_value).encode("utf-8")) _write(b"|T") _write(str(timestamp).encode("ascii")) @@ -309,7 +334,9 @@ def _encode_locations(timestamp, code_locations): for key, loc in code_locations: metric_type, name, unit = key - mri = "{}:{}@{}".format(metric_type, _sanitize_key(name), unit) + mri = "{}:{}@{}".format( + metric_type, _sanitize_metric_key(name), _sanitize_unit(unit) + ) loc["type"] = "location" mapping.setdefault(mri, []).append(loc) @@ -557,6 +584,8 @@ def add( # Given the new weight we consider whether we want to force flush. self._consider_force_flush() + # For sets, we only record that a value has been added to the set but not which one. + # See develop docs: https://develop.sentry.dev/sdk/metrics/#sets if local_aggregator is not None: local_value = float(added if ty == "s" else value) local_aggregator.add(ty, key, local_value, unit, serialized_tags) @@ -701,15 +730,13 @@ def _get_aggregator(): ) -def _get_aggregator_and_update_tags(key, tags): - # type: (str, Optional[MetricTags]) -> Tuple[Optional[MetricsAggregator], Optional[LocalAggregator], Optional[MetricTags]] +def _get_aggregator_and_update_tags(key, value, unit, tags): + # type: (str, Optional[MetricValue], MeasurementUnit, Optional[MetricTags]) -> Tuple[Optional[MetricsAggregator], Optional[LocalAggregator], Optional[MetricTags]] hub = sentry_sdk.Hub.current client = hub.client if client is None or client.metrics_aggregator is None: return None, None, tags - experiments = client.options.get("_experiments", {}) - updated_tags = dict(tags or ()) # type: Dict[str, MetricTagValue] updated_tags.setdefault("release", client.options["release"]) updated_tags.setdefault("environment", client.options["environment"]) @@ -725,31 +752,20 @@ def _get_aggregator_and_update_tags(key, tags): if transaction_name: updated_tags.setdefault("transaction", transaction_name) if scope._span is not None: - sample_rate = experiments.get("metrics_summary_sample_rate") - # We default the sample rate of metrics summaries to 1.0 only when the sample rate is `None` since we - # want to honor the user's decision if they pass a valid float. - if sample_rate is None: - sample_rate = 1.0 - should_summarize_metric_callback = experiments.get( - "should_summarize_metric" - ) - if random.random() < sample_rate and ( - should_summarize_metric_callback is None - or should_summarize_metric_callback(key, updated_tags) - ): - local_aggregator = scope._span._get_local_aggregator() + local_aggregator = scope._span._get_local_aggregator() + experiments = client.options.get("_experiments", {}) before_emit_callback = experiments.get("before_emit_metric") if before_emit_callback is not None: with recursion_protection() as in_metrics: if not in_metrics: - if not before_emit_callback(key, updated_tags): + if not before_emit_callback(key, value, unit, updated_tags): return None, None, updated_tags return client.metrics_aggregator, local_aggregator, updated_tags -def incr( +def increment( key, # type: str value=1.0, # type: float unit="none", # type: MeasurementUnit @@ -759,13 +775,19 @@ def incr( ): # type: (...) -> None """Increments a counter.""" - aggregator, local_aggregator, tags = _get_aggregator_and_update_tags(key, tags) + aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( + key, value, unit, tags + ) if aggregator is not None: aggregator.add( "c", key, value, unit, tags, timestamp, local_aggregator, stacklevel ) +# alias as incr is relatively common in python +incr = increment + + class _Timing(object): def __init__( self, @@ -816,7 +838,10 @@ def __exit__(self, exc_type, exc_value, tb): # type: (Any, Any, Any) -> None assert self._span, "did not enter" aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( - self.key, self.tags + self.key, + self.value, + self.unit, + self.tags, ) if aggregator is not None: elapsed = TIMING_FUNCTIONS[self.unit]() - self.entered # type: ignore @@ -871,7 +896,9 @@ def timing( - it can be used as a decorator """ if value is not None: - aggregator, local_aggregator, tags = _get_aggregator_and_update_tags(key, tags) + aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( + key, value, unit, tags + ) if aggregator is not None: aggregator.add( "d", key, value, unit, tags, timestamp, local_aggregator, stacklevel @@ -889,7 +916,9 @@ def distribution( ): # type: (...) -> None """Emits a distribution.""" - aggregator, local_aggregator, tags = _get_aggregator_and_update_tags(key, tags) + aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( + key, value, unit, tags + ) if aggregator is not None: aggregator.add( "d", key, value, unit, tags, timestamp, local_aggregator, stacklevel @@ -906,7 +935,9 @@ def set( ): # type: (...) -> None """Emits a set.""" - aggregator, local_aggregator, tags = _get_aggregator_and_update_tags(key, tags) + aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( + key, value, unit, tags + ) if aggregator is not None: aggregator.add( "s", key, value, unit, tags, timestamp, local_aggregator, stacklevel @@ -923,7 +954,9 @@ def gauge( ): # type: (...) -> None """Emits a gauge.""" - aggregator, local_aggregator, tags = _get_aggregator_and_update_tags(key, tags) + aggregator, local_aggregator, tags = _get_aggregator_and_update_tags( + key, value, unit, tags + ) if aggregator is not None: aggregator.add( "g", key, value, unit, tags, timestamp, local_aggregator, stacklevel diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py index ef4868f745..da5a4a8228 100644 --- a/sentry_sdk/profiler.py +++ b/sentry_sdk/profiler.py @@ -42,6 +42,8 @@ from sentry_sdk.utils import ( capture_internal_exception, filename_for_module, + get_current_thread_meta, + is_gevent, is_valid_sample_rate, logger, nanosecond_time, @@ -126,32 +128,16 @@ try: - from gevent import get_hub as get_gevent_hub # type: ignore - from gevent.monkey import get_original, is_module_patched # type: ignore + from gevent.monkey import get_original # type: ignore from gevent.threadpool import ThreadPool # type: ignore thread_sleep = get_original("time", "sleep") except ImportError: - - def get_gevent_hub(): - # type: () -> Any - return None - thread_sleep = time.sleep - def is_module_patched(*args, **kwargs): - # type: (*Any, **Any) -> bool - # unable to import from gevent means no modules have been patched - return False - ThreadPool = None -def is_gevent(): - # type: () -> bool - return is_module_patched("threading") or is_module_patched("_thread") - - _scheduler = None # type: Optional[Scheduler] # The default sampling frequency to use. This is set at 101 in order to @@ -361,7 +347,7 @@ def get_frame_name(frame): for cls in frame.f_locals["self"].__class__.__mro__: if name in cls.__dict__: return "{}.{}".format(cls.__name__, name) - except AttributeError: + except (AttributeError, ValueError): pass # if it was a class method, (decorated with `@classmethod`) @@ -377,7 +363,7 @@ def get_frame_name(frame): for cls in frame.f_locals["cls"].__mro__: if name in cls.__dict__: return "{}.{}".format(cls.__name__, name) - except AttributeError: + except (AttributeError, ValueError): pass # nothing we can do if it is a staticmethod (decorated with @staticmethod) @@ -389,52 +375,6 @@ def get_frame_name(frame): MAX_PROFILE_DURATION_NS = int(3e10) # 30 seconds -def get_current_thread_id(thread=None): - # type: (Optional[threading.Thread]) -> Optional[int] - """ - Try to get the id of the current thread, with various fall backs. - """ - - # if a thread is specified, that takes priority - if thread is not None: - try: - thread_id = thread.ident - if thread_id is not None: - return thread_id - except AttributeError: - pass - - # if the app is using gevent, we should look at the gevent hub first - # as the id there differs from what the threading module reports - if is_gevent(): - gevent_hub = get_gevent_hub() - if gevent_hub is not None: - try: - # this is undocumented, so wrap it in try except to be safe - return gevent_hub.thread_ident - except AttributeError: - pass - - # use the current thread's id if possible - try: - current_thread_id = threading.current_thread().ident - if current_thread_id is not None: - return current_thread_id - except AttributeError: - pass - - # if we can't get the current thread id, fall back to the main thread id - try: - main_thread_id = threading.main_thread().ident - if main_thread_id is not None: - return main_thread_id - except AttributeError: - pass - - # we've tried everything, time to give up - return None - - class Profile(object): def __init__( self, @@ -456,7 +396,7 @@ def __init__( # Various framework integrations are capable of overwriting the active thread id. # If it is set to `None` at the end of the profile, we fall back to the default. - self._default_active_thread_id = get_current_thread_id() or 0 # type: int + self._default_active_thread_id = get_current_thread_meta()[0] or 0 # type: int self.active_thread_id = None # type: Optional[int] try: @@ -479,7 +419,7 @@ def __init__( def update_active_thread_id(self): # type: () -> None - self.active_thread_id = get_current_thread_id() + self.active_thread_id = get_current_thread_meta()[0] logger.debug( "[Profiling] updating active thread id to {tid}".format( tid=self.active_thread_id diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index bac1ceaa60..7afe7e0944 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -5,7 +5,12 @@ import sentry_sdk from sentry_sdk.consts import INSTRUMENTER -from sentry_sdk.utils import is_valid_sample_rate, logger, nanosecond_time +from sentry_sdk.utils import ( + get_current_thread_meta, + is_valid_sample_rate, + logger, + nanosecond_time, +) from sentry_sdk._compat import datetime_utcnow, utc_from_timestamp, PY2 from sentry_sdk.consts import SPANDATA from sentry_sdk._types import TYPE_CHECKING @@ -172,6 +177,9 @@ def __init__( self._span_recorder = None # type: Optional[_SpanRecorder] self._local_aggregator = None # type: Optional[LocalAggregator] + thread_id, thread_name = get_current_thread_meta() + self.set_thread(thread_id, thread_name) + # TODO this should really live on the Transaction class rather than the Span # class def init_span_recorder(self, maxlen): @@ -418,6 +426,15 @@ def set_status(self, value): # type: (str) -> None self.status = value + def set_thread(self, thread_id, thread_name): + # type: (Optional[int], Optional[str]) -> None + + if thread_id is not None: + self.set_data(SPANDATA.THREAD_ID, str(thread_id)) + + if thread_name is not None: + self.set_data(SPANDATA.THREAD_NAME, thread_name) + def set_http_status(self, http_status): # type: (int) -> None self.set_tag( diff --git a/sentry_sdk/transport.py b/sentry_sdk/transport.py index 9ea9cd0c98..d2fc734f7c 100644 --- a/sentry_sdk/transport.py +++ b/sentry_sdk/transport.py @@ -144,10 +144,22 @@ def _parse_rate_limits(header, now=None): for limit in header.split(","): try: - retry_after, categories, _ = limit.strip().split(":", 2) + parameters = limit.strip().split(":") + retry_after, categories = parameters[:2] + retry_after = now + timedelta(seconds=int(retry_after)) for category in categories and categories.split(";") or (None,): - yield category, retry_after + if category == "metric_bucket": + try: + namespaces = parameters[4].split(";") + except IndexError: + namespaces = [] + + if not namespaces or "custom" in namespaces: + yield category, retry_after + + else: + yield category, retry_after except (LookupError, ValueError): continue @@ -210,6 +222,7 @@ def record_lost_event( # quantity of 0 is actually 1 as we do not want to count # empty attachments as actually empty. quantity = len(item.get_bytes()) or 1 + elif data_category is None: raise TypeError("data category not provided") @@ -336,7 +349,14 @@ def _check_disabled(self, category): # type: (str) -> bool def _disabled(bucket): # type: (Any) -> bool + + # The envelope item type used for metrics is statsd + # whereas the rate limit category is metric_bucket + if bucket == "statsd": + bucket = "metric_bucket" + ts = self._disabled_until.get(bucket) + return ts is not None and ts > datetime_utcnow() return _disabled(category) or _disabled(None) @@ -402,7 +422,7 @@ def _send_envelope( new_items = [] for item in envelope.items: if self._check_disabled(item.data_category): - if item.data_category in ("transaction", "error", "default"): + if item.data_category in ("transaction", "error", "default", "statsd"): self.on_dropped_event("self_rate_limits") self.record_lost_event("ratelimit_backoff", item=item) else: diff --git a/sentry_sdk/types.py b/sentry_sdk/types.py index 5c46de7f88..16c57ceea4 100644 --- a/sentry_sdk/types.py +++ b/sentry_sdk/types.py @@ -11,4 +11,13 @@ from typing import TYPE_CHECKING if TYPE_CHECKING: - from sentry_sdk._types import Event, Hint # noqa: F401 + from sentry_sdk._types import Event, Hint +else: + from typing import Any + + # The lines below allow the types to be imported from outside `if TYPE_CHECKING` + # guards. The types in this module are only intended to be used for type hints. + Event = Any + Hint = Any + +__all__ = ("Event", "Hint") diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index 150130a057..efacd6161b 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -106,9 +106,16 @@ def get_git_revision(): # type: () -> Optional[str] try: with open(os.path.devnull, "w+") as null: + # prevent command prompt windows from popping up on windows + startupinfo = None + if sys.platform == "win32" or sys.platform == "cygwin": + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + revision = ( subprocess.Popen( ["git", "rev-parse", "HEAD"], + startupinfo=startupinfo, stdout=subprocess.PIPE, stderr=null, stdin=null, @@ -1746,9 +1753,14 @@ def now(): try: + from gevent import get_hub as get_gevent_hub from gevent.monkey import is_module_patched except ImportError: + def get_gevent_hub(): + # type: () -> Any + return None + def is_module_patched(*args, **kwargs): # type: (*Any, **Any) -> bool # unable to import from gevent means no modules have been patched @@ -1758,3 +1770,54 @@ def is_module_patched(*args, **kwargs): def is_gevent(): # type: () -> bool return is_module_patched("threading") or is_module_patched("_thread") + + +def get_current_thread_meta(thread=None): + # type: (Optional[threading.Thread]) -> Tuple[Optional[int], Optional[str]] + """ + Try to get the id of the current thread, with various fall backs. + """ + + # if a thread is specified, that takes priority + if thread is not None: + try: + thread_id = thread.ident + thread_name = thread.name + if thread_id is not None: + return thread_id, thread_name + except AttributeError: + pass + + # if the app is using gevent, we should look at the gevent hub first + # as the id there differs from what the threading module reports + if is_gevent(): + gevent_hub = get_gevent_hub() + if gevent_hub is not None: + try: + # this is undocumented, so wrap it in try except to be safe + return gevent_hub.thread_ident, None + except AttributeError: + pass + + # use the current thread's id if possible + try: + thread = threading.current_thread() + thread_id = thread.ident + thread_name = thread.name + if thread_id is not None: + return thread_id, thread_name + except AttributeError: + pass + + # if we can't get the current thread id, fall back to the main thread id + try: + thread = threading.main_thread() + thread_id = thread.ident + thread_name = thread.name + if thread_id is not None: + return thread_id, thread_name + except AttributeError: + pass + + # we've tried everything, time to give up + return None, None diff --git a/setup.py b/setup.py index 9f4155cad4..14da2fc74c 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.43.0", + version="1.45.0", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", diff --git a/tests/conftest.py b/tests/conftest.py index 85c65462cb..c87111cbf7 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -652,3 +652,15 @@ def patch_start_tracing_child(fake_transaction_is_none=False): return_value=fake_transaction, ): yield fake_start_child + + +class ApproxDict(dict): + def __eq__(self, other): + # For an ApproxDict to equal another dict, the other dict just needs to contain + # all the keys from the ApproxDict with the same values. + # + # The other dict may contain additional keys with any value. + return all(key in other and other[key] == value for key, value in self.items()) + + def __ne__(self, other): + return not self.__eq__(other) diff --git a/tests/crons/__init__.py b/tests/crons/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/test_crons.py b/tests/crons/test_crons.py similarity index 66% rename from tests/test_crons.py rename to tests/crons/test_crons.py index 39d02a5d47..1f50a33751 100644 --- a/tests/test_crons.py +++ b/tests/crons/test_crons.py @@ -2,9 +2,8 @@ import uuid import sentry_sdk -from sentry_sdk.crons import capture_checkin - from sentry_sdk import Hub, configure_scope, set_level +from sentry_sdk.crons import capture_checkin try: from unittest import mock # python 3.3 and above @@ -34,27 +33,45 @@ def _break_world_contextmanager(name): return "Hello, {}".format(name) +@sentry_sdk.monitor(monitor_slug="ghi789", monitor_config=None) +def _no_monitor_config(): + return + + +@sentry_sdk.monitor( + monitor_slug="ghi789", + monitor_config={ + "schedule": {"type": "crontab", "value": "0 0 * * *"}, + "failure_issue_threshold": 5, + }, +) +def _with_monitor_config(): + return + + def test_decorator(sentry_init): sentry_init() with mock.patch( "sentry_sdk.crons.decorator.capture_checkin" - ) as fake_capture_checking: + ) as fake_capture_checkin: result = _hello_world("Grace") assert result == "Hello, Grace" # Check for initial checkin - fake_capture_checking.assert_has_calls( + fake_capture_checkin.assert_has_calls( [ - mock.call(monitor_slug="abc123", status="in_progress"), + mock.call( + monitor_slug="abc123", status="in_progress", monitor_config=None + ), ] ) # Check for final checkin - assert fake_capture_checking.call_args[1]["monitor_slug"] == "abc123" - assert fake_capture_checking.call_args[1]["status"] == "ok" - assert fake_capture_checking.call_args[1]["duration"] - assert fake_capture_checking.call_args[1]["check_in_id"] + assert fake_capture_checkin.call_args[1]["monitor_slug"] == "abc123" + assert fake_capture_checkin.call_args[1]["status"] == "ok" + assert fake_capture_checkin.call_args[1]["duration"] + assert fake_capture_checkin.call_args[1]["check_in_id"] def test_decorator_error(sentry_init): @@ -62,24 +79,26 @@ def test_decorator_error(sentry_init): with mock.patch( "sentry_sdk.crons.decorator.capture_checkin" - ) as fake_capture_checking: + ) as fake_capture_checkin: with pytest.raises(ZeroDivisionError): result = _break_world("Grace") assert "result" not in locals() # Check for initial checkin - fake_capture_checking.assert_has_calls( + fake_capture_checkin.assert_has_calls( [ - mock.call(monitor_slug="def456", status="in_progress"), + mock.call( + monitor_slug="def456", status="in_progress", monitor_config=None + ), ] ) # Check for final checkin - assert fake_capture_checking.call_args[1]["monitor_slug"] == "def456" - assert fake_capture_checking.call_args[1]["status"] == "error" - assert fake_capture_checking.call_args[1]["duration"] - assert fake_capture_checking.call_args[1]["check_in_id"] + assert fake_capture_checkin.call_args[1]["monitor_slug"] == "def456" + assert fake_capture_checkin.call_args[1]["status"] == "error" + assert fake_capture_checkin.call_args[1]["duration"] + assert fake_capture_checkin.call_args[1]["check_in_id"] def test_contextmanager(sentry_init): @@ -87,22 +106,24 @@ def test_contextmanager(sentry_init): with mock.patch( "sentry_sdk.crons.decorator.capture_checkin" - ) as fake_capture_checking: + ) as fake_capture_checkin: result = _hello_world_contextmanager("Grace") assert result == "Hello, Grace" # Check for initial checkin - fake_capture_checking.assert_has_calls( + fake_capture_checkin.assert_has_calls( [ - mock.call(monitor_slug="abc123", status="in_progress"), + mock.call( + monitor_slug="abc123", status="in_progress", monitor_config=None + ), ] ) # Check for final checkin - assert fake_capture_checking.call_args[1]["monitor_slug"] == "abc123" - assert fake_capture_checking.call_args[1]["status"] == "ok" - assert fake_capture_checking.call_args[1]["duration"] - assert fake_capture_checking.call_args[1]["check_in_id"] + assert fake_capture_checkin.call_args[1]["monitor_slug"] == "abc123" + assert fake_capture_checkin.call_args[1]["status"] == "ok" + assert fake_capture_checkin.call_args[1]["duration"] + assert fake_capture_checkin.call_args[1]["check_in_id"] def test_contextmanager_error(sentry_init): @@ -110,24 +131,26 @@ def test_contextmanager_error(sentry_init): with mock.patch( "sentry_sdk.crons.decorator.capture_checkin" - ) as fake_capture_checking: + ) as fake_capture_checkin: with pytest.raises(ZeroDivisionError): result = _break_world_contextmanager("Grace") assert "result" not in locals() # Check for initial checkin - fake_capture_checking.assert_has_calls( + fake_capture_checkin.assert_has_calls( [ - mock.call(monitor_slug="def456", status="in_progress"), + mock.call( + monitor_slug="def456", status="in_progress", monitor_config=None + ), ] ) # Check for final checkin - assert fake_capture_checking.call_args[1]["monitor_slug"] == "def456" - assert fake_capture_checking.call_args[1]["status"] == "error" - assert fake_capture_checking.call_args[1]["duration"] - assert fake_capture_checking.call_args[1]["check_in_id"] + assert fake_capture_checkin.call_args[1]["monitor_slug"] == "def456" + assert fake_capture_checkin.call_args[1]["status"] == "error" + assert fake_capture_checkin.call_args[1]["duration"] + assert fake_capture_checkin.call_args[1]["check_in_id"] def test_capture_checkin_simple(sentry_init): @@ -195,6 +218,8 @@ def test_monitor_config(sentry_init, capture_envelopes): monitor_config = { "schedule": {"type": "crontab", "value": "0 0 * * *"}, + "failure_issue_threshold": 5, + "recovery_threshold": 5, } capture_checkin(monitor_slug="abc123", monitor_config=monitor_config) @@ -212,6 +237,41 @@ def test_monitor_config(sentry_init, capture_envelopes): assert "monitor_config" not in check_in +def test_decorator_monitor_config(sentry_init, capture_envelopes): + sentry_init() + envelopes = capture_envelopes() + + _with_monitor_config() + + assert len(envelopes) == 2 + + for check_in_envelope in envelopes: + assert len(check_in_envelope.items) == 1 + check_in = check_in_envelope.items[0].payload.json + + assert check_in["monitor_slug"] == "ghi789" + assert check_in["monitor_config"] == { + "schedule": {"type": "crontab", "value": "0 0 * * *"}, + "failure_issue_threshold": 5, + } + + +def test_decorator_no_monitor_config(sentry_init, capture_envelopes): + sentry_init() + envelopes = capture_envelopes() + + _no_monitor_config() + + assert len(envelopes) == 2 + + for check_in_envelope in envelopes: + assert len(check_in_envelope.items) == 1 + check_in = check_in_envelope.items[0].payload.json + + assert check_in["monitor_slug"] == "ghi789" + assert "monitor_config" not in check_in + + def test_capture_checkin_sdk_not_initialized(): # Tests that the capture_checkin does not raise an error when Sentry SDK is not initialized. # sentry_init() is intentionally omitted. diff --git a/tests/crons/test_crons_async_py3.py b/tests/crons/test_crons_async_py3.py new file mode 100644 index 0000000000..53ec96d713 --- /dev/null +++ b/tests/crons/test_crons_async_py3.py @@ -0,0 +1,144 @@ +import pytest + +import sentry_sdk + +try: + from unittest import mock # python 3.3 and above +except ImportError: + import mock # python < 3.3 + + +@sentry_sdk.monitor(monitor_slug="abc123") +async def _hello_world(name): + return "Hello, {}".format(name) + + +@sentry_sdk.monitor(monitor_slug="def456") +async def _break_world(name): + 1 / 0 + return "Hello, {}".format(name) + + +async def my_coroutine(): + return + + +async def _hello_world_contextmanager(name): + with sentry_sdk.monitor(monitor_slug="abc123"): + await my_coroutine() + return "Hello, {}".format(name) + + +async def _break_world_contextmanager(name): + with sentry_sdk.monitor(monitor_slug="def456"): + await my_coroutine() + 1 / 0 + return "Hello, {}".format(name) + + +@pytest.mark.asyncio +async def test_decorator(sentry_init): + sentry_init() + + with mock.patch( + "sentry_sdk.crons.decorator.capture_checkin" + ) as fake_capture_checkin: + result = await _hello_world("Grace") + assert result == "Hello, Grace" + + # Check for initial checkin + fake_capture_checkin.assert_has_calls( + [ + mock.call( + monitor_slug="abc123", status="in_progress", monitor_config=None + ), + ] + ) + + # Check for final checkin + assert fake_capture_checkin.call_args[1]["monitor_slug"] == "abc123" + assert fake_capture_checkin.call_args[1]["status"] == "ok" + assert fake_capture_checkin.call_args[1]["duration"] + assert fake_capture_checkin.call_args[1]["check_in_id"] + + +@pytest.mark.asyncio +async def test_decorator_error(sentry_init): + sentry_init() + + with mock.patch( + "sentry_sdk.crons.decorator.capture_checkin" + ) as fake_capture_checkin: + with pytest.raises(ZeroDivisionError): + result = await _break_world("Grace") + + assert "result" not in locals() + + # Check for initial checkin + fake_capture_checkin.assert_has_calls( + [ + mock.call( + monitor_slug="def456", status="in_progress", monitor_config=None + ), + ] + ) + + # Check for final checkin + assert fake_capture_checkin.call_args[1]["monitor_slug"] == "def456" + assert fake_capture_checkin.call_args[1]["status"] == "error" + assert fake_capture_checkin.call_args[1]["duration"] + assert fake_capture_checkin.call_args[1]["check_in_id"] + + +@pytest.mark.asyncio +async def test_contextmanager(sentry_init): + sentry_init() + + with mock.patch( + "sentry_sdk.crons.decorator.capture_checkin" + ) as fake_capture_checkin: + result = await _hello_world_contextmanager("Grace") + assert result == "Hello, Grace" + + # Check for initial checkin + fake_capture_checkin.assert_has_calls( + [ + mock.call( + monitor_slug="abc123", status="in_progress", monitor_config=None + ), + ] + ) + + # Check for final checkin + assert fake_capture_checkin.call_args[1]["monitor_slug"] == "abc123" + assert fake_capture_checkin.call_args[1]["status"] == "ok" + assert fake_capture_checkin.call_args[1]["duration"] + assert fake_capture_checkin.call_args[1]["check_in_id"] + + +@pytest.mark.asyncio +async def test_contextmanager_error(sentry_init): + sentry_init() + + with mock.patch( + "sentry_sdk.crons.decorator.capture_checkin" + ) as fake_capture_checkin: + with pytest.raises(ZeroDivisionError): + result = await _break_world_contextmanager("Grace") + + assert "result" not in locals() + + # Check for initial checkin + fake_capture_checkin.assert_has_calls( + [ + mock.call( + monitor_slug="def456", status="in_progress", monitor_config=None + ), + ] + ) + + # Check for final checkin + assert fake_capture_checkin.call_args[1]["monitor_slug"] == "def456" + assert fake_capture_checkin.call_args[1]["status"] == "error" + assert fake_capture_checkin.call_args[1]["duration"] + assert fake_capture_checkin.call_args[1]["check_in_id"] diff --git a/tests/integrations/aiohttp/test_aiohttp.py b/tests/integrations/aiohttp/test_aiohttp.py index de5cf19f44..90ca466175 100644 --- a/tests/integrations/aiohttp/test_aiohttp.py +++ b/tests/integrations/aiohttp/test_aiohttp.py @@ -9,6 +9,7 @@ from sentry_sdk import capture_message, start_transaction from sentry_sdk.integrations.aiohttp import AioHttpIntegration +from tests.conftest import ApproxDict try: from unittest import mock # python 3.3 and above @@ -495,15 +496,17 @@ async def handler(request): crumb = event["breadcrumbs"]["values"][0] assert crumb["type"] == "http" assert crumb["category"] == "httplib" - assert crumb["data"] == { - "url": "http://127.0.0.1:{}/".format(raw_server.port), - "http.fragment": "", - "http.method": "GET", - "http.query": "", - "http.response.status_code": 200, - "reason": "OK", - "extra": "foo", - } + assert crumb["data"] == ApproxDict( + { + "url": "http://127.0.0.1:{}/".format(raw_server.port), + "http.fragment": "", + "http.method": "GET", + "http.query": "", + "http.response.status_code": 200, + "reason": "OK", + "extra": "foo", + } + ) @pytest.mark.asyncio diff --git a/tests/integrations/asyncpg/test_asyncpg.py b/tests/integrations/asyncpg/test_asyncpg.py index a839031c3b..611d8ea9d9 100644 --- a/tests/integrations/asyncpg/test_asyncpg.py +++ b/tests/integrations/asyncpg/test_asyncpg.py @@ -34,6 +34,7 @@ from sentry_sdk.consts import SPANDATA from sentry_sdk.tracing_utils import record_sql_queries from sentry_sdk._compat import contextmanager +from tests.conftest import ApproxDict try: from unittest import mock @@ -46,13 +47,15 @@ ) CRUMBS_CONNECT = { "category": "query", - "data": { - "db.name": PG_NAME, - "db.system": "postgresql", - "db.user": PG_USER, - "server.address": PG_HOST, - "server.port": PG_PORT, - }, + "data": ApproxDict( + { + "db.name": PG_NAME, + "db.system": "postgresql", + "db.user": PG_USER, + "server.address": PG_HOST, + "server.port": PG_PORT, + } + ), "message": "connect", "type": "default", } diff --git a/tests/integrations/boto3/test_s3.py b/tests/integrations/boto3/test_s3.py index 5812c2c1bb..8c05b72a3e 100644 --- a/tests/integrations/boto3/test_s3.py +++ b/tests/integrations/boto3/test_s3.py @@ -4,6 +4,7 @@ from sentry_sdk import Hub from sentry_sdk.integrations.boto3 import Boto3Integration +from tests.conftest import ApproxDict from tests.integrations.boto3.aws_mock import MockResponse from tests.integrations.boto3 import read_fixture @@ -65,12 +66,14 @@ def test_streaming(sentry_init, capture_events): span1 = event["spans"][0] assert span1["op"] == "http.client" assert span1["description"] == "aws.s3.GetObject" - assert span1["data"] == { - "http.method": "GET", - "aws.request.url": "https://bucket.s3.amazonaws.com/foo.pdf", - "http.fragment": "", - "http.query": "", - } + assert span1["data"] == ApproxDict( + { + "http.method": "GET", + "aws.request.url": "https://bucket.s3.amazonaws.com/foo.pdf", + "http.fragment": "", + "http.query": "", + } + ) span2 = event["spans"][1] assert span2["op"] == "http.client.stream" @@ -123,7 +126,13 @@ def test_omit_url_data_if_parsing_fails(sentry_init, capture_events): transaction.finish() (event,) = events - assert event["spans"][0]["data"] == { - "http.method": "GET", - # no url data - } + assert event["spans"][0]["data"] == ApproxDict( + { + "http.method": "GET", + # no url data + } + ) + + assert "aws.request.url" not in event["spans"][0]["data"] + assert "http.fragment" not in event["spans"][0]["data"] + assert "http.query" not in event["spans"][0]["data"] diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py index 0d44ee992e..c6eb55536c 100644 --- a/tests/integrations/celery/test_celery.py +++ b/tests/integrations/celery/test_celery.py @@ -10,6 +10,7 @@ ) from sentry_sdk._compat import text_type +from tests.conftest import ApproxDict from celery import Celery, VERSION from celery.bin import worker @@ -218,6 +219,7 @@ def dummy_task(x, y): assert execution_event["spans"] == [] assert submission_event["spans"] == [ { + "data": ApproxDict(), "description": "dummy_task", "op": "queue.submit.celery", "parent_span_id": submission_event["contexts"]["trace"]["span_id"], diff --git a/tests/integrations/clickhouse_driver/test_clickhouse_driver.py b/tests/integrations/clickhouse_driver/test_clickhouse_driver.py index 74a04fac44..b39f722c52 100644 --- a/tests/integrations/clickhouse_driver/test_clickhouse_driver.py +++ b/tests/integrations/clickhouse_driver/test_clickhouse_driver.py @@ -10,6 +10,7 @@ from sentry_sdk import start_transaction, capture_message from sentry_sdk.integrations.clickhouse_driver import ClickhouseDriverIntegration +from tests.conftest import ApproxDict EXPECT_PARAMS_IN_SELECT = True if clickhouse_driver.VERSION < (0, 2, 6): @@ -102,6 +103,9 @@ def test_clickhouse_client_breadcrumbs(sentry_init, capture_events) -> None: if not EXPECT_PARAMS_IN_SELECT: expected_breadcrumbs[-1]["data"].pop("db.params", None) + for crumb in expected_breadcrumbs: + crumb["data"] = ApproxDict(crumb["data"]) + for crumb in event["breadcrumbs"]["values"]: crumb.pop("timestamp", None) @@ -201,6 +205,9 @@ def test_clickhouse_client_breadcrumbs_with_pii(sentry_init, capture_events) -> if not EXPECT_PARAMS_IN_SELECT: expected_breadcrumbs[-1]["data"].pop("db.params", None) + for crumb in expected_breadcrumbs: + crumb["data"] = ApproxDict(crumb["data"]) + for crumb in event["breadcrumbs"]["values"]: crumb.pop("timestamp", None) @@ -313,6 +320,9 @@ def test_clickhouse_client_spans( if not EXPECT_PARAMS_IN_SELECT: expected_spans[-1]["data"].pop("db.params", None) + for span in expected_spans: + span["data"] = ApproxDict(span["data"]) + for span in event["spans"]: span.pop("span_id", None) span.pop("start_timestamp", None) @@ -434,6 +444,9 @@ def test_clickhouse_client_spans_with_pii( if not EXPECT_PARAMS_IN_SELECT: expected_spans[-1]["data"].pop("db.params", None) + for span in expected_spans: + span["data"] = ApproxDict(span["data"]) + for span in event["spans"]: span.pop("span_id", None) span.pop("start_timestamp", None) @@ -529,6 +542,9 @@ def test_clickhouse_dbapi_breadcrumbs(sentry_init, capture_events) -> None: if not EXPECT_PARAMS_IN_SELECT: expected_breadcrumbs[-1]["data"].pop("db.params", None) + for crumb in expected_breadcrumbs: + crumb["data"] = ApproxDict(crumb["data"]) + for crumb in event["breadcrumbs"]["values"]: crumb.pop("timestamp", None) @@ -629,6 +645,9 @@ def test_clickhouse_dbapi_breadcrumbs_with_pii(sentry_init, capture_events) -> N if not EXPECT_PARAMS_IN_SELECT: expected_breadcrumbs[-1]["data"].pop("db.params", None) + for crumb in expected_breadcrumbs: + crumb["data"] = ApproxDict(crumb["data"]) + for crumb in event["breadcrumbs"]["values"]: crumb.pop("timestamp", None) @@ -739,6 +758,9 @@ def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) if not EXPECT_PARAMS_IN_SELECT: expected_spans[-1]["data"].pop("db.params", None) + for span in expected_spans: + span["data"] = ApproxDict(span["data"]) + for span in event["spans"]: span.pop("span_id", None) span.pop("start_timestamp", None) @@ -860,6 +882,9 @@ def test_clickhouse_dbapi_spans_with_pii( if not EXPECT_PARAMS_IN_SELECT: expected_spans[-1]["data"].pop("db.params", None) + for span in expected_spans: + span["data"] = ApproxDict(span["data"]) + for span in event["spans"]: span.pop("span_id", None) span.pop("start_timestamp", None) diff --git a/tests/integrations/django/myapp/signals.py b/tests/integrations/django/myapp/signals.py new file mode 100644 index 0000000000..3dab92b8d9 --- /dev/null +++ b/tests/integrations/django/myapp/signals.py @@ -0,0 +1,15 @@ +from django.core import signals +from django.dispatch import receiver + +myapp_custom_signal = signals.Signal() +myapp_custom_signal_silenced = signals.Signal() + + +@receiver(myapp_custom_signal) +def signal_handler(sender, **kwargs): + assert sender == "hello" + + +@receiver(myapp_custom_signal_silenced) +def signal_handler_silenced(sender, **kwargs): + assert sender == "hello" diff --git a/tests/integrations/django/myapp/urls.py b/tests/integrations/django/myapp/urls.py index 92621b07a2..672a9b15ae 100644 --- a/tests/integrations/django/myapp/urls.py +++ b/tests/integrations/django/myapp/urls.py @@ -76,6 +76,11 @@ def path(path, *args, **kwargs): name="csrf_hello_not_exempt", ), path("sync/thread_ids", views.thread_ids_sync, name="thread_ids_sync"), + path( + "send-myapp-custom-signal", + views.send_myapp_custom_signal, + name="send_myapp_custom_signal", + ), ] # async views diff --git a/tests/integrations/django/myapp/views.py b/tests/integrations/django/myapp/views.py index 193147003b..294895430b 100644 --- a/tests/integrations/django/myapp/views.py +++ b/tests/integrations/django/myapp/views.py @@ -14,6 +14,11 @@ from django.views.decorators.csrf import csrf_exempt from django.views.generic import ListView +from tests.integrations.django.myapp.signals import ( + myapp_custom_signal, + myapp_custom_signal_silenced, +) + try: from rest_framework.decorators import api_view from rest_framework.response import Response @@ -253,3 +258,10 @@ def thread_ids_sync(*args, **kwargs): my_async_view = None thread_ids_async = None post_echo_async = None + + +@csrf_exempt +def send_myapp_custom_signal(request): + myapp_custom_signal.send(sender="hello") + myapp_custom_signal_silenced.send(sender="hello") + return HttpResponse("ok") diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py index 095657fd8a..1efe4be278 100644 --- a/tests/integrations/django/test_basic.py +++ b/tests/integrations/django/test_basic.py @@ -27,8 +27,9 @@ from sentry_sdk.integrations.django.caching import _get_span_description from sentry_sdk.integrations.executing import ExecutingIntegration from sentry_sdk.tracing import Span -from tests.conftest import unpack_werkzeug_response +from tests.conftest import ApproxDict, unpack_werkzeug_response from tests.integrations.django.myapp.wsgi import application +from tests.integrations.django.myapp.signals import myapp_custom_signal_silenced from tests.integrations.django.utils import pytest_mark_django_db_decorator DJANGO_VERSION = DJANGO_VERSION[:2] @@ -1035,6 +1036,47 @@ def test_signals_spans_disabled(sentry_init, client, capture_events): assert not transaction["spans"] +EXPECTED_SIGNALS_SPANS_FILTERED = """\ +- op="http.server": description=null + - op="event.django": description="django.db.reset_queries" + - op="event.django": description="django.db.close_old_connections" + - op="event.django": description="tests.integrations.django.myapp.signals.signal_handler"\ +""" + + +def test_signals_spans_filtering(sentry_init, client, capture_events, render_span_tree): + sentry_init( + integrations=[ + DjangoIntegration( + middleware_spans=False, + signals_denylist=[ + myapp_custom_signal_silenced, + ], + ), + ], + traces_sample_rate=1.0, + ) + events = capture_events() + + client.get(reverse("send_myapp_custom_signal")) + + (transaction,) = events + + assert render_span_tree(transaction) == EXPECTED_SIGNALS_SPANS_FILTERED + + assert transaction["spans"][0]["op"] == "event.django" + assert transaction["spans"][0]["description"] == "django.db.reset_queries" + + assert transaction["spans"][1]["op"] == "event.django" + assert transaction["spans"][1]["description"] == "django.db.close_old_connections" + + assert transaction["spans"][2]["op"] == "event.django" + assert ( + transaction["spans"][2]["description"] + == "tests.integrations.django.myapp.signals.signal_handler" + ) + + def test_csrf(sentry_init, client): """ Assert that CSRF view decorator works even with the view wrapped in our own @@ -1237,14 +1279,14 @@ def test_cache_spans_middleware( assert first_event["spans"][0]["description"].startswith( "get views.decorators.cache.cache_header." ) - assert first_event["spans"][0]["data"] == {"cache.hit": False} + assert first_event["spans"][0]["data"] == ApproxDict({"cache.hit": False}) assert len(second_event["spans"]) == 2 assert second_event["spans"][0]["op"] == "cache.get_item" assert second_event["spans"][0]["description"].startswith( "get views.decorators.cache.cache_header." ) - assert second_event["spans"][0]["data"] == {"cache.hit": False} + assert second_event["spans"][0]["data"] == ApproxDict({"cache.hit": False}) assert second_event["spans"][1]["op"] == "cache.get_item" assert second_event["spans"][1]["description"].startswith( @@ -1279,14 +1321,14 @@ def test_cache_spans_decorator(sentry_init, client, capture_events, use_django_c assert first_event["spans"][0]["description"].startswith( "get views.decorators.cache.cache_header." ) - assert first_event["spans"][0]["data"] == {"cache.hit": False} + assert first_event["spans"][0]["data"] == ApproxDict({"cache.hit": False}) assert len(second_event["spans"]) == 2 assert second_event["spans"][0]["op"] == "cache.get_item" assert second_event["spans"][0]["description"].startswith( "get views.decorators.cache.cache_header." ) - assert second_event["spans"][0]["data"] == {"cache.hit": False} + assert second_event["spans"][0]["data"] == ApproxDict({"cache.hit": False}) assert second_event["spans"][1]["op"] == "cache.get_item" assert second_event["spans"][1]["description"].startswith( @@ -1323,7 +1365,7 @@ def test_cache_spans_templatetag( assert first_event["spans"][0]["description"].startswith( "get template.cache.some_identifier." ) - assert first_event["spans"][0]["data"] == {"cache.hit": False} + assert first_event["spans"][0]["data"] == ApproxDict({"cache.hit": False}) assert len(second_event["spans"]) == 1 assert second_event["spans"][0]["op"] == "cache.get_item" diff --git a/tests/integrations/grpc/test_grpc.py b/tests/integrations/grpc/test_grpc.py index 0813d655ae..3f49c0a0f4 100644 --- a/tests/integrations/grpc/test_grpc.py +++ b/tests/integrations/grpc/test_grpc.py @@ -11,6 +11,7 @@ from sentry_sdk import Hub, start_transaction from sentry_sdk.consts import OP from sentry_sdk.integrations.grpc import GRPCIntegration +from tests.conftest import ApproxDict from tests.integrations.grpc.grpc_test_service_pb2 import gRPCTestMessage from tests.integrations.grpc.grpc_test_service_pb2_grpc import ( gRPCTestServiceServicer, @@ -151,11 +152,13 @@ def test_grpc_client_starts_span(sentry_init, capture_events_forksafe): span["description"] == "unary unary call to /grpc_test_server.gRPCTestService/TestServe" ) - assert span["data"] == { - "type": "unary unary", - "method": "/grpc_test_server.gRPCTestService/TestServe", - "code": "OK", - } + assert span["data"] == ApproxDict( + { + "type": "unary unary", + "method": "/grpc_test_server.gRPCTestService/TestServe", + "code": "OK", + } + ) @pytest.mark.forked @@ -183,10 +186,12 @@ def test_grpc_client_unary_stream_starts_span(sentry_init, capture_events_forksa span["description"] == "unary stream call to /grpc_test_server.gRPCTestService/TestUnaryStream" ) - assert span["data"] == { - "type": "unary stream", - "method": "/grpc_test_server.gRPCTestService/TestUnaryStream", - } + assert span["data"] == ApproxDict( + { + "type": "unary stream", + "method": "/grpc_test_server.gRPCTestService/TestUnaryStream", + } + ) # using unittest.mock.Mock not possible because grpc verifies @@ -229,11 +234,13 @@ def test_grpc_client_other_interceptor(sentry_init, capture_events_forksafe): span["description"] == "unary unary call to /grpc_test_server.gRPCTestService/TestServe" ) - assert span["data"] == { - "type": "unary unary", - "method": "/grpc_test_server.gRPCTestService/TestServe", - "code": "OK", - } + assert span["data"] == ApproxDict( + { + "type": "unary unary", + "method": "/grpc_test_server.gRPCTestService/TestServe", + "code": "OK", + } + ) @pytest.mark.forked diff --git a/tests/integrations/grpc/test_grpc_aio.py b/tests/integrations/grpc/test_grpc_aio.py index 0b8571adca..3e21188ec8 100644 --- a/tests/integrations/grpc/test_grpc_aio.py +++ b/tests/integrations/grpc/test_grpc_aio.py @@ -11,6 +11,7 @@ from sentry_sdk import Hub, start_transaction from sentry_sdk.consts import OP from sentry_sdk.integrations.grpc import GRPCIntegration +from tests.conftest import ApproxDict from tests.integrations.grpc.grpc_test_service_pb2 import gRPCTestMessage from tests.integrations.grpc.grpc_test_service_pb2_grpc import ( gRPCTestServiceServicer, @@ -161,11 +162,13 @@ async def test_grpc_client_starts_span( span["description"] == "unary unary call to /grpc_test_server.gRPCTestService/TestServe" ) - assert span["data"] == { - "type": "unary unary", - "method": "/grpc_test_server.gRPCTestService/TestServe", - "code": "OK", - } + assert span["data"] == ApproxDict( + { + "type": "unary unary", + "method": "/grpc_test_server.gRPCTestService/TestServe", + "code": "OK", + } + ) @pytest.mark.asyncio @@ -190,10 +193,12 @@ async def test_grpc_client_unary_stream_starts_span( span["description"] == "unary stream call to /grpc_test_server.gRPCTestService/TestUnaryStream" ) - assert span["data"] == { - "type": "unary stream", - "method": "/grpc_test_server.gRPCTestService/TestUnaryStream", - } + assert span["data"] == ApproxDict( + { + "type": "unary stream", + "method": "/grpc_test_server.gRPCTestService/TestUnaryStream", + } + ) @pytest.mark.asyncio diff --git a/tests/integrations/httpx/test_httpx.py b/tests/integrations/httpx/test_httpx.py index e141faa282..c4ca97321c 100644 --- a/tests/integrations/httpx/test_httpx.py +++ b/tests/integrations/httpx/test_httpx.py @@ -7,6 +7,7 @@ from sentry_sdk import capture_message, start_transaction from sentry_sdk.consts import MATCH_ALL, SPANDATA from sentry_sdk.integrations.httpx import HttpxIntegration +from tests.conftest import ApproxDict try: from unittest import mock # python 3.3 and above @@ -46,15 +47,17 @@ def before_breadcrumb(crumb, hint): crumb = event["breadcrumbs"]["values"][0] assert crumb["type"] == "http" assert crumb["category"] == "httplib" - assert crumb["data"] == { - "url": url, - SPANDATA.HTTP_METHOD: "GET", - SPANDATA.HTTP_FRAGMENT: "", - SPANDATA.HTTP_QUERY: "", - SPANDATA.HTTP_STATUS_CODE: 200, - "reason": "OK", - "extra": "foo", - } + assert crumb["data"] == ApproxDict( + { + "url": url, + SPANDATA.HTTP_METHOD: "GET", + SPANDATA.HTTP_FRAGMENT: "", + SPANDATA.HTTP_QUERY: "", + SPANDATA.HTTP_STATUS_CODE: 200, + "reason": "OK", + "extra": "foo", + } + ) @pytest.mark.parametrize( @@ -291,9 +294,15 @@ def test_omit_url_data_if_parsing_fails(sentry_init, capture_events): capture_message("Testing!") (event,) = events - assert event["breadcrumbs"]["values"][0]["data"] == { - SPANDATA.HTTP_METHOD: "GET", - SPANDATA.HTTP_STATUS_CODE: 200, - "reason": "OK", - # no url related data - } + assert event["breadcrumbs"]["values"][0]["data"] == ApproxDict( + { + SPANDATA.HTTP_METHOD: "GET", + SPANDATA.HTTP_STATUS_CODE: 200, + "reason": "OK", + # no url related data + } + ) + + assert "url" not in event["breadcrumbs"]["values"][0]["data"] + assert SPANDATA.HTTP_FRAGMENT not in event["breadcrumbs"]["values"][0]["data"] + assert SPANDATA.HTTP_QUERY not in event["breadcrumbs"]["values"][0]["data"] diff --git a/tests/integrations/redis/asyncio/test_redis_asyncio.py b/tests/integrations/redis/asyncio/test_redis_asyncio.py index 7233b8f908..4f024a2824 100644 --- a/tests/integrations/redis/asyncio/test_redis_asyncio.py +++ b/tests/integrations/redis/asyncio/test_redis_asyncio.py @@ -3,6 +3,7 @@ from sentry_sdk import capture_message, start_transaction from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.redis import RedisIntegration +from tests.conftest import ApproxDict from fakeredis.aioredis import FakeRedis @@ -64,18 +65,20 @@ async def test_async_redis_pipeline( (span,) = event["spans"] assert span["op"] == "db.redis" assert span["description"] == "redis.pipeline.execute" - assert span["data"] == { - "redis.commands": { - "count": 3, - "first_ten": expected_first_ten, - }, - SPANDATA.DB_SYSTEM: "redis", - SPANDATA.DB_NAME: "0", - SPANDATA.SERVER_ADDRESS: connection.connection_pool.connection_kwargs.get( - "host" - ), - SPANDATA.SERVER_PORT: 6379, - } + assert span["data"] == ApproxDict( + { + "redis.commands": { + "count": 3, + "first_ten": expected_first_ten, + }, + SPANDATA.DB_SYSTEM: "redis", + SPANDATA.DB_NAME: "0", + SPANDATA.SERVER_ADDRESS: connection.connection_pool.connection_kwargs.get( + "host" + ), + SPANDATA.SERVER_PORT: 6379, + } + ) assert span["tags"] == { "redis.transaction": is_transaction, "redis.is_cluster": False, diff --git a/tests/integrations/redis/cluster/test_redis_cluster.py b/tests/integrations/redis/cluster/test_redis_cluster.py index 1e1e59e254..a16d66588c 100644 --- a/tests/integrations/redis/cluster/test_redis_cluster.py +++ b/tests/integrations/redis/cluster/test_redis_cluster.py @@ -3,6 +3,7 @@ from sentry_sdk.consts import SPANDATA from sentry_sdk.api import start_transaction from sentry_sdk.integrations.redis import RedisIntegration +from tests.conftest import ApproxDict import redis @@ -82,12 +83,14 @@ def test_rediscluster_basic(sentry_init, capture_events, send_default_pii, descr span = spans[-1] assert span["op"] == "db.redis" assert span["description"] == description - assert span["data"] == { - SPANDATA.DB_SYSTEM: "redis", - # ClusterNode converts localhost to 127.0.0.1 - SPANDATA.SERVER_ADDRESS: "127.0.0.1", - SPANDATA.SERVER_PORT: 6379, - } + assert span["data"] == ApproxDict( + { + SPANDATA.DB_SYSTEM: "redis", + # ClusterNode converts localhost to 127.0.0.1 + SPANDATA.SERVER_ADDRESS: "127.0.0.1", + SPANDATA.SERVER_PORT: 6379, + } + ) assert span["tags"] == { "db.operation": "SET", "redis.command": "SET", @@ -125,16 +128,18 @@ def test_rediscluster_pipeline( (span,) = event["spans"] assert span["op"] == "db.redis" assert span["description"] == "redis.pipeline.execute" - assert span["data"] == { - "redis.commands": { - "count": 3, - "first_ten": expected_first_ten, - }, - SPANDATA.DB_SYSTEM: "redis", - # ClusterNode converts localhost to 127.0.0.1 - SPANDATA.SERVER_ADDRESS: "127.0.0.1", - SPANDATA.SERVER_PORT: 6379, - } + assert span["data"] == ApproxDict( + { + "redis.commands": { + "count": 3, + "first_ten": expected_first_ten, + }, + SPANDATA.DB_SYSTEM: "redis", + # ClusterNode converts localhost to 127.0.0.1 + SPANDATA.SERVER_ADDRESS: "127.0.0.1", + SPANDATA.SERVER_PORT: 6379, + } + ) assert span["tags"] == { "redis.transaction": False, # For Cluster, this is always False "redis.is_cluster": True, diff --git a/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py b/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py index ad78b79e27..a6d8962afe 100644 --- a/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py +++ b/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py @@ -3,6 +3,7 @@ from sentry_sdk import capture_message, start_transaction from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.redis import RedisIntegration +from tests.conftest import ApproxDict from redis.asyncio import cluster @@ -47,12 +48,14 @@ async def test_async_breadcrumb(sentry_init, capture_events): assert crumb == { "category": "redis", "message": "GET 'foobar'", - "data": { - "db.operation": "GET", - "redis.key": "foobar", - "redis.command": "GET", - "redis.is_cluster": True, - }, + "data": ApproxDict( + { + "db.operation": "GET", + "redis.key": "foobar", + "redis.command": "GET", + "redis.is_cluster": True, + } + ), "timestamp": crumb["timestamp"], "type": "redis", } @@ -82,12 +85,14 @@ async def test_async_basic(sentry_init, capture_events, send_default_pii, descri (span,) = event["spans"] assert span["op"] == "db.redis" assert span["description"] == description - assert span["data"] == { - SPANDATA.DB_SYSTEM: "redis", - # ClusterNode converts localhost to 127.0.0.1 - SPANDATA.SERVER_ADDRESS: "127.0.0.1", - SPANDATA.SERVER_PORT: 6379, - } + assert span["data"] == ApproxDict( + { + SPANDATA.DB_SYSTEM: "redis", + # ClusterNode converts localhost to 127.0.0.1 + SPANDATA.SERVER_ADDRESS: "127.0.0.1", + SPANDATA.SERVER_PORT: 6379, + } + ) assert span["tags"] == { "redis.is_cluster": True, "db.operation": "SET", @@ -126,16 +131,18 @@ async def test_async_redis_pipeline( (span,) = event["spans"] assert span["op"] == "db.redis" assert span["description"] == "redis.pipeline.execute" - assert span["data"] == { - "redis.commands": { - "count": 3, - "first_ten": expected_first_ten, - }, - SPANDATA.DB_SYSTEM: "redis", - # ClusterNode converts localhost to 127.0.0.1 - SPANDATA.SERVER_ADDRESS: "127.0.0.1", - SPANDATA.SERVER_PORT: 6379, - } + assert span["data"] == ApproxDict( + { + "redis.commands": { + "count": 3, + "first_ten": expected_first_ten, + }, + SPANDATA.DB_SYSTEM: "redis", + # ClusterNode converts localhost to 127.0.0.1 + SPANDATA.SERVER_ADDRESS: "127.0.0.1", + SPANDATA.SERVER_PORT: 6379, + } + ) assert span["tags"] == { "redis.transaction": False, "redis.is_cluster": True, diff --git a/tests/integrations/rediscluster/test_rediscluster.py b/tests/integrations/rediscluster/test_rediscluster.py index 14d831a647..88f987758b 100644 --- a/tests/integrations/rediscluster/test_rediscluster.py +++ b/tests/integrations/rediscluster/test_rediscluster.py @@ -4,6 +4,7 @@ from sentry_sdk.api import start_transaction from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.redis import RedisIntegration +from tests.conftest import ApproxDict try: from unittest import mock @@ -56,12 +57,14 @@ def test_rediscluster_basic(rediscluster_cls, sentry_init, capture_events): assert crumb == { "category": "redis", "message": "GET 'foobar'", - "data": { - "db.operation": "GET", - "redis.key": "foobar", - "redis.command": "GET", - "redis.is_cluster": True, - }, + "data": ApproxDict( + { + "db.operation": "GET", + "redis.key": "foobar", + "redis.command": "GET", + "redis.is_cluster": True, + } + ), "timestamp": crumb["timestamp"], "type": "redis", } @@ -96,16 +99,18 @@ def test_rediscluster_pipeline( (span,) = event["spans"] assert span["op"] == "db.redis" assert span["description"] == "redis.pipeline.execute" - assert span["data"] == { - "redis.commands": { - "count": 3, - "first_ten": expected_first_ten, - }, - SPANDATA.DB_SYSTEM: "redis", - SPANDATA.DB_NAME: "1", - SPANDATA.SERVER_ADDRESS: "localhost", - SPANDATA.SERVER_PORT: 63791, - } + assert span["data"] == ApproxDict( + { + "redis.commands": { + "count": 3, + "first_ten": expected_first_ten, + }, + SPANDATA.DB_SYSTEM: "redis", + SPANDATA.DB_NAME: "1", + SPANDATA.SERVER_ADDRESS: "localhost", + SPANDATA.SERVER_PORT: 63791, + } + ) assert span["tags"] == { "redis.transaction": False, # For Cluster, this is always False "redis.is_cluster": True, @@ -127,12 +132,14 @@ def test_db_connection_attributes_client(sentry_init, capture_events, redisclust (event,) = events (span,) = event["spans"] - assert span["data"] == { - SPANDATA.DB_SYSTEM: "redis", - SPANDATA.DB_NAME: "1", - SPANDATA.SERVER_ADDRESS: "localhost", - SPANDATA.SERVER_PORT: 63791, - } + assert span["data"] == ApproxDict( + { + SPANDATA.DB_SYSTEM: "redis", + SPANDATA.DB_NAME: "1", + SPANDATA.SERVER_ADDRESS: "localhost", + SPANDATA.SERVER_PORT: 63791, + } + ) @pytest.mark.parametrize("rediscluster_cls", rediscluster_classes) @@ -155,13 +162,15 @@ def test_db_connection_attributes_pipeline( (span,) = event["spans"] assert span["op"] == "db.redis" assert span["description"] == "redis.pipeline.execute" - assert span["data"] == { - "redis.commands": { - "count": 1, - "first_ten": ["GET 'foo'"], - }, - SPANDATA.DB_SYSTEM: "redis", - SPANDATA.DB_NAME: "1", - SPANDATA.SERVER_ADDRESS: "localhost", - SPANDATA.SERVER_PORT: 63791, - } + assert span["data"] == ApproxDict( + { + "redis.commands": { + "count": 1, + "first_ten": ["GET 'foo'"], + }, + SPANDATA.DB_SYSTEM: "redis", + SPANDATA.DB_NAME: "1", + SPANDATA.SERVER_ADDRESS: "localhost", + SPANDATA.SERVER_PORT: 63791, + } + ) diff --git a/tests/integrations/requests/test_requests.py b/tests/integrations/requests/test_requests.py index ed5b273712..1f4dd412d7 100644 --- a/tests/integrations/requests/test_requests.py +++ b/tests/integrations/requests/test_requests.py @@ -6,6 +6,7 @@ from sentry_sdk import capture_message from sentry_sdk.consts import SPANDATA from sentry_sdk.integrations.stdlib import StdlibIntegration +from tests.conftest import ApproxDict try: from unittest import mock # python 3.3 and above @@ -28,14 +29,16 @@ def test_crumb_capture(sentry_init, capture_events): (crumb,) = event["breadcrumbs"]["values"] assert crumb["type"] == "http" assert crumb["category"] == "httplib" - assert crumb["data"] == { - "url": url, - SPANDATA.HTTP_METHOD: "GET", - SPANDATA.HTTP_FRAGMENT: "", - SPANDATA.HTTP_QUERY: "", - SPANDATA.HTTP_STATUS_CODE: response.status_code, - "reason": response.reason, - } + assert crumb["data"] == ApproxDict( + { + "url": url, + SPANDATA.HTTP_METHOD: "GET", + SPANDATA.HTTP_FRAGMENT: "", + SPANDATA.HTTP_QUERY: "", + SPANDATA.HTTP_STATUS_CODE: response.status_code, + "reason": response.reason, + } + ) @pytest.mark.tests_internal_exceptions @@ -56,9 +59,15 @@ def test_omit_url_data_if_parsing_fails(sentry_init, capture_events): capture_message("Testing!") (event,) = events - assert event["breadcrumbs"]["values"][0]["data"] == { - SPANDATA.HTTP_METHOD: "GET", - SPANDATA.HTTP_STATUS_CODE: response.status_code, - "reason": response.reason, - # no url related data - } + assert event["breadcrumbs"]["values"][0]["data"] == ApproxDict( + { + SPANDATA.HTTP_METHOD: "GET", + SPANDATA.HTTP_STATUS_CODE: response.status_code, + "reason": response.reason, + # no url related data + } + ) + + assert "url" not in event["breadcrumbs"]["values"][0]["data"] + assert SPANDATA.HTTP_FRAGMENT not in event["breadcrumbs"]["values"][0]["data"] + assert SPANDATA.HTTP_QUERY not in event["breadcrumbs"]["values"][0]["data"] diff --git a/tests/integrations/socket/test_socket.py b/tests/integrations/socket/test_socket.py index 914ba0bf84..4f93c1f2a5 100644 --- a/tests/integrations/socket/test_socket.py +++ b/tests/integrations/socket/test_socket.py @@ -2,6 +2,7 @@ from sentry_sdk import start_transaction from sentry_sdk.integrations.socket import SocketIntegration +from tests.conftest import ApproxDict def test_getaddrinfo_trace(sentry_init, capture_events): @@ -16,10 +17,12 @@ def test_getaddrinfo_trace(sentry_init, capture_events): assert span["op"] == "socket.dns" assert span["description"] == "example.com:443" - assert span["data"] == { - "host": "example.com", - "port": 443, - } + assert span["data"] == ApproxDict( + { + "host": "example.com", + "port": 443, + } + ) def test_create_connection_trace(sentry_init, capture_events): @@ -37,15 +40,19 @@ def test_create_connection_trace(sentry_init, capture_events): assert connect_span["op"] == "socket.connection" assert connect_span["description"] == "example.com:443" - assert connect_span["data"] == { - "address": ["example.com", 443], - "timeout": timeout, - "source_address": None, - } + assert connect_span["data"] == ApproxDict( + { + "address": ["example.com", 443], + "timeout": timeout, + "source_address": None, + } + ) assert dns_span["op"] == "socket.dns" assert dns_span["description"] == "example.com:443" - assert dns_span["data"] == { - "host": "example.com", - "port": 443, - } + assert dns_span["data"] == ApproxDict( + { + "host": "example.com", + "port": 443, + } + ) diff --git a/tests/integrations/stdlib/test_httplib.py b/tests/integrations/stdlib/test_httplib.py index d50bf42e21..6055b86ab8 100644 --- a/tests/integrations/stdlib/test_httplib.py +++ b/tests/integrations/stdlib/test_httplib.py @@ -27,7 +27,7 @@ from sentry_sdk.tracing import Transaction from sentry_sdk.integrations.stdlib import StdlibIntegration -from tests.conftest import create_mock_http_server +from tests.conftest import ApproxDict, create_mock_http_server PORT = create_mock_http_server() @@ -46,14 +46,16 @@ def test_crumb_capture(sentry_init, capture_events): assert crumb["type"] == "http" assert crumb["category"] == "httplib" - assert crumb["data"] == { - "url": url, - SPANDATA.HTTP_METHOD: "GET", - SPANDATA.HTTP_STATUS_CODE: 200, - "reason": "OK", - SPANDATA.HTTP_FRAGMENT: "", - SPANDATA.HTTP_QUERY: "", - } + assert crumb["data"] == ApproxDict( + { + "url": url, + SPANDATA.HTTP_METHOD: "GET", + SPANDATA.HTTP_STATUS_CODE: 200, + "reason": "OK", + SPANDATA.HTTP_FRAGMENT: "", + SPANDATA.HTTP_QUERY: "", + } + ) def test_crumb_capture_hint(sentry_init, capture_events): @@ -73,15 +75,17 @@ def before_breadcrumb(crumb, hint): (crumb,) = event["breadcrumbs"]["values"] assert crumb["type"] == "http" assert crumb["category"] == "httplib" - assert crumb["data"] == { - "url": url, - SPANDATA.HTTP_METHOD: "GET", - SPANDATA.HTTP_STATUS_CODE: 200, - "reason": "OK", - "extra": "foo", - SPANDATA.HTTP_FRAGMENT: "", - SPANDATA.HTTP_QUERY: "", - } + assert crumb["data"] == ApproxDict( + { + "url": url, + SPANDATA.HTTP_METHOD: "GET", + SPANDATA.HTTP_STATUS_CODE: 200, + "reason": "OK", + "extra": "foo", + SPANDATA.HTTP_FRAGMENT: "", + SPANDATA.HTTP_QUERY: "", + } + ) def test_empty_realurl(sentry_init): @@ -131,14 +135,16 @@ def test_httplib_misuse(sentry_init, capture_events, request): assert crumb["type"] == "http" assert crumb["category"] == "httplib" - assert crumb["data"] == { - "url": "http://localhost:{}/200".format(PORT), - SPANDATA.HTTP_METHOD: "GET", - SPANDATA.HTTP_STATUS_CODE: 200, - "reason": "OK", - SPANDATA.HTTP_FRAGMENT: "", - SPANDATA.HTTP_QUERY: "", - } + assert crumb["data"] == ApproxDict( + { + "url": "http://localhost:{}/200".format(PORT), + SPANDATA.HTTP_METHOD: "GET", + SPANDATA.HTTP_STATUS_CODE: 200, + "reason": "OK", + SPANDATA.HTTP_FRAGMENT: "", + SPANDATA.HTTP_QUERY: "", + } + ) def test_outgoing_trace_headers(sentry_init, monkeypatch): diff --git a/tests/integrations/stdlib/test_subprocess.py b/tests/integrations/stdlib/test_subprocess.py index 31da043ac3..d61be35fd2 100644 --- a/tests/integrations/stdlib/test_subprocess.py +++ b/tests/integrations/stdlib/test_subprocess.py @@ -8,6 +8,7 @@ from sentry_sdk import capture_message, start_transaction from sentry_sdk._compat import PY2 from sentry_sdk.integrations.stdlib import StdlibIntegration +from tests.conftest import ApproxDict if PY2: @@ -125,7 +126,7 @@ def test_subprocess_basic( assert message_event["message"] == "hi" - data = {"subprocess.cwd": os.getcwd()} if with_cwd else {} + data = ApproxDict({"subprocess.cwd": os.getcwd()} if with_cwd else {}) (crumb,) = message_event["breadcrumbs"]["values"] assert crumb == { diff --git a/tests/integrations/strawberry/test_strawberry_py3.py b/tests/integrations/strawberry/test_strawberry_py3.py index b357779461..e84c5f6fa5 100644 --- a/tests/integrations/strawberry/test_strawberry_py3.py +++ b/tests/integrations/strawberry/test_strawberry_py3.py @@ -25,6 +25,7 @@ SentryAsyncExtension, SentrySyncExtension, ) +from tests.conftest import ApproxDict parameterize_strawberry_test = pytest.mark.parametrize( @@ -351,12 +352,14 @@ def test_capture_transaction_on_error( resolve_span = resolve_spans[0] assert resolve_span["parent_span_id"] == query_span["span_id"] assert resolve_span["description"] == "resolving Query.error" - assert resolve_span["data"] == { - "graphql.field_name": "error", - "graphql.parent_type": "Query", - "graphql.field_path": "Query.error", - "graphql.path": "error", - } + assert resolve_span["data"] == ApproxDict( + { + "graphql.field_name": "error", + "graphql.parent_type": "Query", + "graphql.field_path": "Query.error", + "graphql.path": "error", + } + ) @parameterize_strawberry_test @@ -429,12 +432,14 @@ def test_capture_transaction_on_success( resolve_span = resolve_spans[0] assert resolve_span["parent_span_id"] == query_span["span_id"] assert resolve_span["description"] == "resolving Query.hello" - assert resolve_span["data"] == { - "graphql.field_name": "hello", - "graphql.parent_type": "Query", - "graphql.field_path": "Query.hello", - "graphql.path": "hello", - } + assert resolve_span["data"] == ApproxDict( + { + "graphql.field_name": "hello", + "graphql.parent_type": "Query", + "graphql.field_path": "Query.hello", + "graphql.path": "hello", + } + ) @parameterize_strawberry_test @@ -507,12 +512,14 @@ def test_transaction_no_operation_name( resolve_span = resolve_spans[0] assert resolve_span["parent_span_id"] == query_span["span_id"] assert resolve_span["description"] == "resolving Query.hello" - assert resolve_span["data"] == { - "graphql.field_name": "hello", - "graphql.parent_type": "Query", - "graphql.field_path": "Query.hello", - "graphql.path": "hello", - } + assert resolve_span["data"] == ApproxDict( + { + "graphql.field_name": "hello", + "graphql.parent_type": "Query", + "graphql.field_path": "Query.hello", + "graphql.path": "hello", + } + ) @parameterize_strawberry_test @@ -585,9 +592,38 @@ def test_transaction_mutation( resolve_span = resolve_spans[0] assert resolve_span["parent_span_id"] == query_span["span_id"] assert resolve_span["description"] == "resolving Mutation.change" - assert resolve_span["data"] == { - "graphql.field_name": "change", - "graphql.parent_type": "Mutation", - "graphql.field_path": "Mutation.change", - "graphql.path": "change", - } + assert resolve_span["data"] == ApproxDict( + { + "graphql.field_name": "change", + "graphql.parent_type": "Mutation", + "graphql.field_path": "Mutation.change", + "graphql.path": "change", + } + ) + + +@parameterize_strawberry_test +def test_handle_none_query_gracefully( + request, + sentry_init, + capture_events, + client_factory, + async_execution, + framework_integrations, +): + sentry_init( + integrations=[ + StrawberryIntegration(async_execution=async_execution), + ] + + framework_integrations, + ) + events = capture_events() + + schema = strawberry.Schema(Query) + + client_factory = request.getfixturevalue(client_factory) + client = client_factory(schema) + + client.post("/graphql", json={}) + + assert len(events) == 0, "expected no events to be sent to Sentry" diff --git a/tests/test_metrics.py b/tests/test_metrics.py index 1d4a49fcb2..741935615d 100644 --- a/tests/test_metrics.py +++ b/tests/test_metrics.py @@ -58,7 +58,7 @@ def parse_metrics(bytes): @minimum_python_37_with_gevent @pytest.mark.forked -def test_incr(sentry_init, capture_envelopes, maybe_monkeypatched_threading): +def test_increment(sentry_init, capture_envelopes, maybe_monkeypatched_threading): sentry_init( release="fun-release", environment="not-fun-env", @@ -67,7 +67,8 @@ def test_incr(sentry_init, capture_envelopes, maybe_monkeypatched_threading): ts = time.time() envelopes = capture_envelopes() - metrics.incr("foobar", 1.0, tags={"foo": "bar", "blub": "blah"}, timestamp=ts) + metrics.increment("foobar", 1.0, tags={"foo": "bar", "blub": "blah"}, timestamp=ts) + # python specific alias metrics.incr("foobar", 2.0, tags={"foo": "bar", "blub": "blah"}, timestamp=ts) Hub.current.flush() @@ -487,8 +488,8 @@ def test_multiple(sentry_init, capture_envelopes): metrics.gauge("my-gauge", 20.0, tags={"x": "y"}, timestamp=ts) metrics.gauge("my-gauge", 30.0, tags={"x": "y"}, timestamp=ts) for _ in range(10): - metrics.incr("counter-1", 1.0, timestamp=ts) - metrics.incr("counter-2", 1.0, timestamp=ts) + metrics.increment("counter-1", 1.0, timestamp=ts) + metrics.increment("counter-2", 1.0, timestamp=ts) Hub.current.flush() @@ -570,18 +571,13 @@ def test_transaction_name( @minimum_python_37_with_gevent @pytest.mark.forked -@pytest.mark.parametrize("sample_rate", [1.0, None]) def test_metric_summaries( - sentry_init, capture_envelopes, sample_rate, maybe_monkeypatched_threading + sentry_init, capture_envelopes, maybe_monkeypatched_threading ): sentry_init( release="fun-release@1.0.0", environment="not-fun-env", enable_tracing=True, - _experiments={ - "enable_metrics": True, - "metrics_summary_sample_rate": sample_rate, - }, ) ts = time.time() envelopes = capture_envelopes() @@ -589,7 +585,7 @@ def test_metric_summaries( with start_transaction( op="stuff", name="/foo", source=TRANSACTION_SOURCE_ROUTE ) as transaction: - metrics.incr("root-counter", timestamp=ts) + metrics.increment("root-counter", timestamp=ts) with metrics.timing("my-timer-metric", tags={"a": "b"}, timestamp=ts): for x in range(10): metrics.distribution("my-dist", float(x), timestamp=ts) @@ -681,171 +677,99 @@ def test_metric_summaries( @minimum_python_37_with_gevent @pytest.mark.forked -def test_metrics_summary_disabled( - sentry_init, capture_envelopes, maybe_monkeypatched_threading -): - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - enable_tracing=True, - _experiments={"enable_metrics": True, "metrics_summary_sample_rate": 0.0}, - ) - ts = time.time() - envelopes = capture_envelopes() - - with start_transaction( - op="stuff", name="/foo", source=TRANSACTION_SOURCE_ROUTE - ) as transaction: - with metrics.timing("my-timer-metric", tags={"a": "b"}, timestamp=ts): - pass - - Hub.current.flush() - - (transaction, envelope) = envelopes - - # Metrics Emission - assert envelope.items[0].headers["type"] == "statsd" - m = parse_metrics(envelope.items[0].payload.get_bytes()) - - assert len(m) == 1 - assert m[0][1] == "my-timer-metric@second" - assert m[0][2] == "d" - assert len(m[0][3]) == 1 - assert m[0][4] == { - "a": "b", - "transaction": "/foo", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - - # Measurement Attachment - t = transaction.items[0].get_transaction_event() - assert "_metrics_summary" not in t - assert "_metrics_summary" not in t["spans"][0] - - -@minimum_python_37_with_gevent -@pytest.mark.forked -def test_metrics_summary_filtered( - sentry_init, capture_envelopes, maybe_monkeypatched_threading +@pytest.mark.parametrize( + "metric_name,metric_unit,expected_name", + [ + ("first-metric", "nano-second", "first-metric@nanosecond"), + ("another_metric?", "nano second", "another_metric_@nanosecond"), + ( + "metric", + "nanosecond", + "metric@nanosecond", + ), + ( + "my.amaze.metric I guess", + "nano|\nsecond", + "my.amaze.metric_I_guess@nanosecond", + ), + # fmt: off + (u"métríc", u"nanöseconď", u"m_tr_c@nansecon"), + # fmt: on + ], +) +def test_metric_name_normalization( + sentry_init, + capture_envelopes, + metric_name, + metric_unit, + expected_name, + maybe_monkeypatched_threading, ): - def should_summarize_metric(key, tags): - return key == "foo" - sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", - enable_tracing=True, - _experiments={ - "enable_metrics": True, - "metrics_summary_sample_rate": 1.0, - "should_summarize_metric": should_summarize_metric, - }, + _experiments={"enable_metrics": True, "metric_code_locations": False}, ) - ts = time.time() envelopes = capture_envelopes() - with start_transaction( - op="stuff", name="/foo", source=TRANSACTION_SOURCE_ROUTE - ) as transaction: - metrics.timing("foo", value=3.0, tags={"a": "b"}, timestamp=ts) - metrics.timing("foo", value=2.0, tags={"b": "c"}, timestamp=ts) - metrics.timing("bar", value=1.0, tags={"a": "b"}, timestamp=ts) + metrics.distribution(metric_name, 1.0, unit=metric_unit) Hub.current.flush() - (transaction, envelope) = envelopes + (envelope,) = envelopes - # Metrics Emission + assert len(envelope.items) == 1 assert envelope.items[0].headers["type"] == "statsd" - m = parse_metrics(envelope.items[0].payload.get_bytes()) - assert len(m) == 3 - assert m[0][1] == "bar@second" - assert m[1][1] == "foo@second" - assert m[2][1] == "foo@second" + parsed_metrics = parse_metrics(envelope.items[0].payload.get_bytes()) + assert len(parsed_metrics) == 1 - # Measurement Attachment - t = transaction.items[0].get_transaction_event()["_metrics_summary"] - assert len(t["d:foo@second"]) == 2 - assert { - "tags": { - "a": "b", - "environment": "not-fun-env", - "release": "fun-release@1.0.0", - "transaction": "/foo", - }, - "min": 3.0, - "max": 3.0, - "count": 1, - "sum": 3.0, - } in t["d:foo@second"] - assert { - "tags": { - "b": "c", - "environment": "not-fun-env", - "release": "fun-release@1.0.0", - "transaction": "/foo", - }, - "min": 2.0, - "max": 2.0, - "count": 1, - "sum": 2.0, - } in t["d:foo@second"] + name = parsed_metrics[0][1] + assert name == expected_name @minimum_python_37_with_gevent @pytest.mark.forked -def test_tag_normalization( - sentry_init, capture_envelopes, maybe_monkeypatched_threading +@pytest.mark.parametrize( + "metric_tag,expected_tag", + [ + ({"f-oo|bar": "%$foo/"}, {"f-oobar": "%$foo/"}), + ({"foo$.$.$bar": "blah{}"}, {"foo..bar": "blah{}"}), + # fmt: off + ({u"foö-bar": u"snöwmän"}, {u"fo-bar": u"snöwmän"},), + # fmt: on + ({"route": "GET /foo"}, {"route": "GET /foo"}), + ({"__bar__": "this | or , that"}, {"__bar__": "this \\u{7c} or \\u{2c} that"}), + ({"foo/": "hello!\n\r\t\\"}, {"foo/": "hello!\\n\\r\\t\\\\"}), + ], +) +def test_metric_tag_normalization( + sentry_init, + capture_envelopes, + metric_tag, + expected_tag, + maybe_monkeypatched_threading, ): sentry_init( - release="fun-release@1.0.0", - environment="not-fun-env", _experiments={"enable_metrics": True, "metric_code_locations": False}, ) - ts = time.time() envelopes = capture_envelopes() - # fmt: off - metrics.distribution("a", 1.0, tags={"foo-bar": "%$foo"}, timestamp=ts) - metrics.distribution("b", 1.0, tags={"foo$$$bar": "blah{}"}, timestamp=ts) - metrics.distribution("c", 1.0, tags={u"foö-bar": u"snöwmän"}, timestamp=ts) - metrics.distribution("d", 1.0, tags={"route": "GET /foo"}, timestamp=ts) - # fmt: on + metrics.distribution("a", 1.0, tags=metric_tag) + Hub.current.flush() (envelope,) = envelopes assert len(envelope.items) == 1 assert envelope.items[0].headers["type"] == "statsd" - m = parse_metrics(envelope.items[0].payload.get_bytes()) - assert len(m) == 4 - assert m[0][4] == { - "foo-bar": "$foo", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } + parsed_metrics = parse_metrics(envelope.items[0].payload.get_bytes()) + assert len(parsed_metrics) == 1 - assert m[1][4] == { - "foo_bar": "blah{}", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } + tags = parsed_metrics[0][4] - # fmt: off - assert m[2][4] == { - "fo_-bar": u"snöwmän", - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - } - assert m[3][4] == { - "release": "fun-release@1.0.0", - "environment": "not-fun-env", - "route": "GET /foo", - } - # fmt: on + expected_tag_key, expected_tag_value = expected_tag.popitem() + assert expected_tag_key in tags + assert tags[expected_tag_key] == expected_tag_value @minimum_python_37_with_gevent @@ -853,13 +777,14 @@ def test_tag_normalization( def test_before_emit_metric( sentry_init, capture_envelopes, maybe_monkeypatched_threading ): - def before_emit(key, tags): - if key == "removed-metric": + def before_emit(key, value, unit, tags): + if key == "removed-metric" or value == 47 or unit == "unsupported": return False + tags["extra"] = "foo" del tags["release"] # this better be a noop! - metrics.incr("shitty-recursion") + metrics.increment("shitty-recursion") return True sentry_init( @@ -873,8 +798,10 @@ def before_emit(key, tags): ) envelopes = capture_envelopes() - metrics.incr("removed-metric", 1.0) - metrics.incr("actual-metric", 1.0) + metrics.increment("removed-metric", 1.0) + metrics.increment("another-removed-metric", 47) + metrics.increment("yet-another-removed-metric", 1.0, unit="unsupported") + metrics.increment("actual-metric", 1.0) Hub.current.flush() (envelope,) = envelopes @@ -906,7 +833,7 @@ def test_aggregator_flush( ) envelopes = capture_envelopes() - metrics.incr("a-metric", 1.0) + metrics.increment("a-metric", 1.0) Hub.current.flush() assert len(envelopes) == 1 @@ -925,7 +852,7 @@ def test_tag_serialization( ) envelopes = capture_envelopes() - metrics.incr( + metrics.increment( "counter", tags={ "no-value": None, @@ -970,12 +897,12 @@ def test_flush_recursion_protection( real_capture_envelope = test_client.transport.capture_envelope def bad_capture_envelope(*args, **kwargs): - metrics.incr("bad-metric") + metrics.increment("bad-metric") return real_capture_envelope(*args, **kwargs) monkeypatch.setattr(test_client.transport, "capture_envelope", bad_capture_envelope) - metrics.incr("counter") + metrics.increment("counter") # flush twice to see the inner metric Hub.current.flush() @@ -1004,12 +931,12 @@ def test_flush_recursion_protection_background_flush( real_capture_envelope = test_client.transport.capture_envelope def bad_capture_envelope(*args, **kwargs): - metrics.incr("bad-metric") + metrics.increment("bad-metric") return real_capture_envelope(*args, **kwargs) monkeypatch.setattr(test_client.transport, "capture_envelope", bad_capture_envelope) - metrics.incr("counter") + metrics.increment("counter") # flush via sleep and flag Hub.current.client.metrics_aggregator._force_flush = True diff --git a/tests/test_profiler.py b/tests/test_profiler.py index 94659ff02f..495dd3f300 100644 --- a/tests/test_profiler.py +++ b/tests/test_profiler.py @@ -16,13 +16,11 @@ extract_frame, extract_stack, frame_id, - get_current_thread_id, get_frame_name, setup_profiler, ) from sentry_sdk.tracing import Transaction from sentry_sdk._lru_cache import LRUCache -from sentry_sdk._queue import Queue try: from unittest import mock # python 3.3 and above @@ -556,74 +554,6 @@ def test_extract_stack_with_cache(frame, depth): assert frame1 is frame2, i -@requires_python_version(3, 3) -def test_get_current_thread_id_explicit_thread(): - results = Queue(maxsize=1) - - def target1(): - pass - - def target2(): - results.put(get_current_thread_id(thread1)) - - thread1 = threading.Thread(target=target1) - thread1.start() - - thread2 = threading.Thread(target=target2) - thread2.start() - - thread2.join() - thread1.join() - - assert thread1.ident == results.get(timeout=1) - - -@requires_python_version(3, 3) -@requires_gevent -def test_get_current_thread_id_gevent_in_thread(): - results = Queue(maxsize=1) - - def target(): - job = gevent.spawn(get_current_thread_id) - job.join() - results.put(job.value) - - thread = threading.Thread(target=target) - thread.start() - thread.join() - assert thread.ident == results.get(timeout=1) - - -@requires_python_version(3, 3) -def test_get_current_thread_id_running_thread(): - results = Queue(maxsize=1) - - def target(): - results.put(get_current_thread_id()) - - thread = threading.Thread(target=target) - thread.start() - thread.join() - assert thread.ident == results.get(timeout=1) - - -@requires_python_version(3, 3) -def test_get_current_thread_id_main_thread(): - results = Queue(maxsize=1) - - def target(): - # mock that somehow the current thread doesn't exist - with mock.patch("threading.current_thread", side_effect=[None]): - results.put(get_current_thread_id()) - - thread_id = threading.main_thread().ident if sys.version_info >= (3, 4) else None - - thread = threading.Thread(target=target) - thread.start() - thread.join() - assert thread_id == results.get(timeout=1) - - def get_scheduler_threads(scheduler): return [thread for thread in threading.enumerate() if thread.name == scheduler.name] diff --git a/tests/test_scrubber.py b/tests/test_scrubber.py index 126bf158d8..2c4bd3aa90 100644 --- a/tests/test_scrubber.py +++ b/tests/test_scrubber.py @@ -4,6 +4,7 @@ from sentry_sdk import capture_exception, capture_event, start_transaction, start_span from sentry_sdk.utils import event_from_exception from sentry_sdk.scrubber import EventScrubber +from tests.conftest import ApproxDict logger = logging.getLogger(__name__) @@ -121,7 +122,9 @@ def test_span_data_scrubbing(sentry_init, capture_events): span.set_data("datafoo", "databar") (event,) = events - assert event["spans"][0]["data"] == {"password": "[Filtered]", "datafoo": "databar"} + assert event["spans"][0]["data"] == ApproxDict( + {"password": "[Filtered]", "datafoo": "databar"} + ) assert event["_meta"]["spans"] == { "0": {"data": {"password": {"": {"rem": [["!config", "s"]]}}}} } diff --git a/tests/test_transport.py b/tests/test_transport.py index c1f70b0108..8848ad471e 100644 --- a/tests/test_transport.py +++ b/tests/test_transport.py @@ -14,7 +14,7 @@ from sentry_sdk import Hub, Client, add_breadcrumb, capture_message, Scope from sentry_sdk._compat import datetime_utcnow from sentry_sdk.transport import KEEP_ALIVE_SOCKET_OPTIONS, _parse_rate_limits -from sentry_sdk.envelope import Envelope, parse_json +from sentry_sdk.envelope import Envelope, Item, parse_json from sentry_sdk.integrations.logging import LoggingIntegration try: @@ -466,3 +466,114 @@ def test_complex_limits_without_data_category( client.flush() assert len(capturing_server.captured) == 0 + + +@pytest.mark.parametrize("response_code", [200, 429]) +def test_metric_bucket_limits(capturing_server, response_code, make_client): + client = make_client() + capturing_server.respond_with( + code=response_code, + headers={ + "X-Sentry-Rate-Limits": "4711:metric_bucket:organization:quota_exceeded:custom" + }, + ) + + envelope = Envelope() + envelope.add_item(Item(payload=b"{}", type="statsd")) + client.transport.capture_envelope(envelope) + client.flush() + + assert len(capturing_server.captured) == 1 + assert capturing_server.captured[0].path == "/api/132/envelope/" + capturing_server.clear_captured() + + assert set(client.transport._disabled_until) == set(["metric_bucket"]) + + client.transport.capture_envelope(envelope) + client.capture_event({"type": "transaction"}) + client.flush() + + assert len(capturing_server.captured) == 2 + + envelope = capturing_server.captured[0].envelope + assert envelope.items[0].type == "transaction" + envelope = capturing_server.captured[1].envelope + assert envelope.items[0].type == "client_report" + report = parse_json(envelope.items[0].get_bytes()) + assert report["discarded_events"] == [ + {"category": "metric_bucket", "reason": "ratelimit_backoff", "quantity": 1}, + ] + + +@pytest.mark.parametrize("response_code", [200, 429]) +def test_metric_bucket_limits_with_namespace( + capturing_server, response_code, make_client +): + client = make_client() + capturing_server.respond_with( + code=response_code, + headers={ + "X-Sentry-Rate-Limits": "4711:metric_bucket:organization:quota_exceeded:foo" + }, + ) + + envelope = Envelope() + envelope.add_item(Item(payload=b"{}", type="statsd")) + client.transport.capture_envelope(envelope) + client.flush() + + assert len(capturing_server.captured) == 1 + assert capturing_server.captured[0].path == "/api/132/envelope/" + capturing_server.clear_captured() + + assert set(client.transport._disabled_until) == set([]) + + client.transport.capture_envelope(envelope) + client.capture_event({"type": "transaction"}) + client.flush() + + assert len(capturing_server.captured) == 2 + + envelope = capturing_server.captured[0].envelope + assert envelope.items[0].type == "statsd" + envelope = capturing_server.captured[1].envelope + assert envelope.items[0].type == "transaction" + + +@pytest.mark.parametrize("response_code", [200, 429]) +def test_metric_bucket_limits_with_all_namespaces( + capturing_server, response_code, make_client +): + client = make_client() + capturing_server.respond_with( + code=response_code, + headers={ + "X-Sentry-Rate-Limits": "4711:metric_bucket:organization:quota_exceeded" + }, + ) + + envelope = Envelope() + envelope.add_item(Item(payload=b"{}", type="statsd")) + client.transport.capture_envelope(envelope) + client.flush() + + assert len(capturing_server.captured) == 1 + assert capturing_server.captured[0].path == "/api/132/envelope/" + capturing_server.clear_captured() + + assert set(client.transport._disabled_until) == set(["metric_bucket"]) + + client.transport.capture_envelope(envelope) + client.capture_event({"type": "transaction"}) + client.flush() + + assert len(capturing_server.captured) == 2 + + envelope = capturing_server.captured[0].envelope + assert envelope.items[0].type == "transaction" + envelope = capturing_server.captured[1].envelope + assert envelope.items[0].type == "client_report" + report = parse_json(envelope.items[0].get_bytes()) + assert report["discarded_events"] == [ + {"category": "metric_bucket", "reason": "ratelimit_backoff", "quantity": 1}, + ] diff --git a/tests/test_types.py b/tests/test_types.py new file mode 100644 index 0000000000..bef6aaa59e --- /dev/null +++ b/tests/test_types.py @@ -0,0 +1,28 @@ +import sys + +import pytest +from sentry_sdk.types import Event, Hint + + +@pytest.mark.skipif( + sys.version_info < (3, 10), + reason="Type hinting with `|` is available in Python 3.10+", +) +def test_event_or_none_runtime(): + """ + Ensures that the `Event` type's runtime value supports the `|` operation with `None`. + This test is needed to ensure that using an `Event | None` type hint (e.g. for + `before_send`'s return value) does not raise a TypeError at runtime. + """ + Event | None + + +@pytest.mark.skipif( + sys.version_info < (3, 10), + reason="Type hinting with `|` is available in Python 3.10+", +) +def test_hint_or_none_runtime(): + """ + Analogue to `test_event_or_none_runtime`, but for the `Hint` type. + """ + Hint | None diff --git a/tests/test_utils.py b/tests/test_utils.py index 147064b541..4b8e9087cc 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,12 +1,15 @@ import pytest import re import sys +import threading from datetime import timedelta from sentry_sdk._compat import duration_in_milliseconds +from sentry_sdk._queue import Queue from sentry_sdk.utils import ( Components, Dsn, + get_current_thread_meta, get_default_release, get_error_message, get_git_revision, @@ -29,6 +32,11 @@ except ImportError: import mock # python < 3.3 +try: + import gevent +except ImportError: + gevent = None + try: # Python 3 FileNotFoundError @@ -607,3 +615,138 @@ def test_default_release_empty_string(): ) def test_duration_in_milliseconds(timedelta, expected_milliseconds): assert duration_in_milliseconds(timedelta) == expected_milliseconds + + +def test_get_current_thread_meta_explicit_thread(): + results = Queue(maxsize=1) + + def target1(): + pass + + def target2(): + results.put(get_current_thread_meta(thread1)) + + thread1 = threading.Thread(target=target1) + thread1.start() + + thread2 = threading.Thread(target=target2) + thread2.start() + + thread2.join() + thread1.join() + + assert (thread1.ident, thread1.name) == results.get(timeout=1) + + +@pytest.mark.skipif( + sys.version_info < (3, 4), reason="threading.main_thread() Not available" +) +def test_get_current_thread_meta_bad_explicit_thread(): + thread = "fake thread" + + main_thread = threading.main_thread() + + assert (main_thread.ident, main_thread.name) == get_current_thread_meta(thread) + + +@pytest.mark.skipif(gevent is None, reason="gevent not enabled") +def test_get_current_thread_meta_gevent_in_thread(): + results = Queue(maxsize=1) + + def target(): + with mock.patch("sentry_sdk.utils.is_gevent", side_effect=[True]): + job = gevent.spawn(get_current_thread_meta) + job.join() + results.put(job.value) + + thread = threading.Thread(target=target) + thread.start() + thread.join() + assert (thread.ident, None) == results.get(timeout=1) + + +@pytest.mark.skipif(gevent is None, reason="gevent not enabled") +def test_get_current_thread_meta_gevent_in_thread_failed_to_get_hub(): + results = Queue(maxsize=1) + + def target(): + with mock.patch("sentry_sdk.utils.is_gevent", side_effect=[True]): + with mock.patch( + "sentry_sdk.utils.get_gevent_hub", side_effect=["fake hub"] + ): + job = gevent.spawn(get_current_thread_meta) + job.join() + results.put(job.value) + + thread = threading.Thread(target=target) + thread.start() + thread.join() + assert (thread.ident, thread.name) == results.get(timeout=1) + + +def test_get_current_thread_meta_running_thread(): + results = Queue(maxsize=1) + + def target(): + results.put(get_current_thread_meta()) + + thread = threading.Thread(target=target) + thread.start() + thread.join() + assert (thread.ident, thread.name) == results.get(timeout=1) + + +@pytest.mark.skipif( + sys.version_info < (3, 4), reason="threading.main_thread() Not available" +) +def test_get_current_thread_meta_bad_running_thread(): + results = Queue(maxsize=1) + + def target(): + with mock.patch("threading.current_thread", side_effect=["fake thread"]): + results.put(get_current_thread_meta()) + + thread = threading.Thread(target=target) + thread.start() + thread.join() + + main_thread = threading.main_thread() + assert (main_thread.ident, main_thread.name) == results.get(timeout=1) + + +@pytest.mark.skipif( + sys.version_info < (3, 4), reason="threading.main_thread() Not available" +) +def test_get_current_thread_meta_main_thread(): + results = Queue(maxsize=1) + + def target(): + # mock that somehow the current thread doesn't exist + with mock.patch("threading.current_thread", side_effect=[None]): + results.put(get_current_thread_meta()) + + main_thread = threading.main_thread() + + thread = threading.Thread(target=target) + thread.start() + thread.join() + assert (main_thread.ident, main_thread.name) == results.get(timeout=1) + + +@pytest.mark.skipif( + sys.version_info < (3, 4), reason="threading.main_thread() Not available" +) +def test_get_current_thread_meta_failed_to_get_main_thread(): + results = Queue(maxsize=1) + + def target(): + with mock.patch("threading.current_thread", side_effect=["fake thread"]): + with mock.patch("threading.current_thread", side_effect=["fake thread"]): + results.put(get_current_thread_meta()) + + main_thread = threading.main_thread() + + thread = threading.Thread(target=target) + thread.start() + thread.join() + assert (main_thread.ident, main_thread.name) == results.get(timeout=1)