diff --git a/fixtures/integrations/jira/stub_client.py b/fixtures/integrations/jira/stub_client.py
index 9cebf81245c2bb..9544d0ea9908c6 100644
--- a/fixtures/integrations/jira/stub_client.py
+++ b/fixtures/integrations/jira/stub_client.py
@@ -44,7 +44,7 @@ def get_transitions(self, issue_key):
def transition_issue(self, issue_key, transition_id):
pass
- def user_id_field(self):
+ def user_id_field(self) -> str:
return "accountId"
def get_user(self, user_id):
diff --git a/pyproject.toml b/pyproject.toml
index 2dee5af8731868..2ef8beb370ae44 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -307,9 +307,6 @@ module = [
"sentry.api.endpoints.organization_releases",
"sentry.api.paginator",
"sentry.db.postgres.base",
- "sentry.integrations.pagerduty.actions.form",
- "sentry.integrations.slack.message_builder.notifications.issues",
- "sentry.integrations.slack.webhooks.event",
"sentry.issues.search",
"sentry.middleware.auth",
"sentry.middleware.ratelimit",
@@ -349,32 +346,26 @@ module = [
"fixtures.safe_migrations_apps.*",
"fixtures.schema_validation",
"sentry.analytics.*",
+ "sentry.api.bases.*",
"sentry.api.decorators",
"sentry.api.endpoints.integrations.sentry_apps.installation.external_issue.*",
"sentry.api.endpoints.organization_events_spans_performance",
+ "sentry.api.endpoints.organization_member.*",
"sentry.api.endpoints.project_repo_path_parsing",
"sentry.api.endpoints.project_rules_configuration",
- "sentry.api.endpoints.release_thresholds.health_checks.*",
+ "sentry.api.endpoints.release_thresholds.*",
"sentry.api.event_search",
- "sentry.api.helpers.deprecation",
- "sentry.api.helpers.environments",
- "sentry.api.helpers.error_upsampling",
- "sentry.api.helpers.group_index.delete",
- "sentry.api.helpers.group_index.update",
- "sentry.api.helpers.source_map_helper",
+ "sentry.api.helpers.*",
"sentry.api.permissions",
"sentry.api.serializers.models.organization_member.*",
"sentry.api.serializers.rest_framework.group_notes",
"sentry.audit_log.services.*",
- "sentry.auth.access",
- "sentry.auth.authenticators.recovery_code",
- "sentry.auth.manager",
- "sentry.auth.services.*",
- "sentry.auth.view",
+ "sentry.auth.*",
"sentry.bgtasks.*",
"sentry.buffer.*",
"sentry.build.*",
- "sentry.data_export.processors.issues_by_tag",
+ "sentry.dashboards.*",
+ "sentry.data_export.*",
"sentry.data_secrecy.models.*",
"sentry.data_secrecy.service.*",
"sentry.db.models.fields.citext",
@@ -386,12 +377,9 @@ module = [
"sentry.db.models.utils",
"sentry.db.pending_deletion",
"sentry.deletions.*",
+ "sentry.demo_mode.*",
"sentry.digests.*",
- "sentry.dynamic_sampling.models.*",
- "sentry.dynamic_sampling.rules.biases.*",
- "sentry.dynamic_sampling.rules.combinators.*",
- "sentry.dynamic_sampling.rules.helpers.*",
- "sentry.dynamic_sampling.tasks.helpers.*",
+ "sentry.dynamic_sampling.*",
"sentry.eventstream.*",
"sentry.eventtypes.error",
"sentry.feedback.migrations.*",
@@ -427,11 +415,14 @@ module = [
"sentry.integrations.jira_server.actions.*",
"sentry.integrations.jira_server.utils.*",
"sentry.integrations.models.integration_feature",
+ "sentry.integrations.pagerduty.*",
"sentry.integrations.project_management.*",
"sentry.integrations.repository.*",
"sentry.integrations.services.*",
+ "sentry.integrations.slack.message_builder.notifications.issues",
"sentry.integrations.slack.threads.*",
"sentry.integrations.slack.views.*",
+ "sentry.integrations.slack.webhooks.event",
"sentry.integrations.source_code_management.repository",
"sentry.integrations.utils.sync",
"sentry.integrations.vsts.actions.*",
@@ -503,6 +494,8 @@ module = [
"sentry.issues.update_inbox",
"sentry.lang.java.processing",
"sentry.llm.*",
+ "sentry.mail.*",
+ "sentry.middleware.integrations.*",
"sentry.middleware.reporting_endpoint",
"sentry.migrations.*",
"sentry.models.activity",
@@ -655,6 +648,8 @@ module = [
"social_auth.admin",
"social_auth.migrations.*",
"sudo.*",
+ "tests.acceptance.*",
+ "tests.apidocs.*",
"tests.sentry.api.endpoints.issues.*",
"tests.sentry.api.endpoints.release_thresholds.utils.*",
"tests.sentry.api.endpoints.secret_scanning.*",
@@ -795,6 +790,7 @@ module = [
"tests.sentry.workflow_engine.endpoints.utils.*",
"tests.sentry.workflow_engine.handlers.action.*",
"tests.sentry.workflow_engine.models.*",
+ "tests.sentry_plugins.*",
"tools.*",
]
disallow_any_generics = true
diff --git a/src/sentry/api/bases/avatar.py b/src/sentry/api/bases/avatar.py
index ad7f823529a1ff..34dbcc7e51b432 100644
--- a/src/sentry/api/bases/avatar.py
+++ b/src/sentry/api/bases/avatar.py
@@ -8,19 +8,20 @@
from sentry.api.fields import AvatarField
from sentry.api.serializers import serialize
+from sentry.db.models.base import Model
from sentry.models.avatars.base import AvatarBase
from sentry.models.avatars.control_base import ControlAvatarBase
AvatarT = TypeVar("AvatarT", bound=AvatarBase)
-class AvatarSerializer(serializers.Serializer):
+class AvatarSerializer(serializers.Serializer[dict[str, Any]]):
avatar_photo = AvatarField(required=False)
avatar_type = serializers.ChoiceField(
choices=(("upload", "upload"), ("gravatar", "gravatar"), ("letter_avatar", "letter_avatar"))
)
- def validate(self, attrs):
+ def validate(self, attrs: dict[str, Any]) -> dict[str, Any]:
attrs = super().validate(attrs)
if attrs.get("avatar_type") == "upload":
model_type = self.context["type"]
@@ -46,7 +47,7 @@ def validate(self, attrs):
class AvatarMixin(Generic[AvatarT]):
object_type: ClassVar[str]
- serializer_cls: ClassVar[type[serializers.Serializer]] = AvatarSerializer
+ serializer_cls: ClassVar[type[serializers.Serializer[dict[str, Any]]]] = AvatarSerializer
@property
def model(self) -> type[AvatarT]:
@@ -56,13 +57,15 @@ def get(self, request: Request, **kwargs: Any) -> Response:
obj = kwargs.pop(self.object_type, None)
return Response(serialize(obj, request.user, **kwargs))
- def get_serializer_context(self, obj, **kwargs: Any):
+ def get_serializer_context(self, obj: Model, **kwargs: Any) -> dict[str, Any]:
return {"type": self.model, "kwargs": {self.object_type: obj}}
- def get_avatar_filename(self, obj):
+ def get_avatar_filename(self, obj: Model) -> str:
return f"{obj.id}.png"
- def parse(self, request: Request, **kwargs: Any) -> tuple[Any, serializers.Serializer]:
+ def parse(
+ self, request: Request, **kwargs: Any
+ ) -> tuple[Model, serializers.Serializer[dict[str, Any]]]:
obj = kwargs.pop(self.object_type, None)
serializer = self.serializer_cls(
@@ -70,7 +73,9 @@ def parse(self, request: Request, **kwargs: Any) -> tuple[Any, serializers.Seria
)
return (obj, serializer)
- def save_avatar(self, obj: Any, serializer: serializers.Serializer, **kwargs: Any) -> AvatarT:
+ def save_avatar(
+ self, obj: Model, serializer: serializers.Serializer[dict[str, Any]], **kwargs: Any
+ ) -> AvatarT:
result = serializer.validated_data
return self.model.save_avatar(
diff --git a/src/sentry/api/bases/group.py b/src/sentry/api/bases/group.py
index 48d5eee887acf9..b3efea5ae6f785 100644
--- a/src/sentry/api/bases/group.py
+++ b/src/sentry/api/bases/group.py
@@ -1,10 +1,13 @@
from __future__ import annotations
import logging
+from typing import Any
import sentry_sdk
+from django.db.models import QuerySet
from rest_framework.permissions import SAFE_METHODS
from rest_framework.request import Request
+from rest_framework.views import APIView
from sentry.api.api_owners import ApiOwner
from sentry.api.base import Endpoint
@@ -12,6 +15,7 @@
from sentry.api.exceptions import ResourceDoesNotExist
from sentry.demo_mode.utils import is_demo_mode_enabled, is_demo_user
from sentry.integrations.tasks import create_comment, update_comment
+from sentry.models.activity import Activity
from sentry.models.group import Group, GroupStatus, get_group_with_redirect
from sentry.models.grouplink import GroupLink
from sentry.models.organization import Organization
@@ -34,7 +38,8 @@ class GroupPermission(ProjectPermission):
"DELETE": ["event:admin"],
}
- def has_object_permission(self, request: Request, view, group):
+ def has_object_permission(self, request: Request, view: APIView, group: Any) -> bool:
+ assert isinstance(group, Group)
return super().has_object_permission(request, view, group.project)
@@ -43,8 +48,13 @@ class GroupEndpoint(Endpoint):
permission_classes = (GroupPermission,)
def convert_args(
- self, request: Request, issue_id, organization_id_or_slug=None, *args, **kwargs
- ):
+ self,
+ request: Request,
+ issue_id: str,
+ organization_id_or_slug: str | None = None,
+ *args: Any,
+ **kwargs: Any,
+ ) -> tuple[tuple[Any, ...], dict[str, Any]]:
# TODO(tkaemming): Ideally, this would return a 302 response, rather
# than just returning the data that is bound to the new group. (It
# technically shouldn't be a 301, since the response could change again
@@ -96,12 +106,12 @@ def convert_args(
return (args, kwargs)
- def get_external_issue_ids(self, group):
+ def get_external_issue_ids(self, group: Group) -> QuerySet[Any]:
return GroupLink.objects.filter(
project_id=group.project_id, group_id=group.id, linked_type=GroupLink.LinkedType.issue
).values_list("linked_id", flat=True)
- def create_external_comment(self, request: Request, group, group_note):
+ def create_external_comment(self, request: Request, group: Group, group_note: Activity) -> None:
for external_issue_id in self.get_external_issue_ids(group):
create_comment.apply_async(
kwargs={
@@ -111,7 +121,7 @@ def create_external_comment(self, request: Request, group, group_note):
}
)
- def update_external_comment(self, request: Request, group, group_note):
+ def update_external_comment(self, request: Request, group: Group, group_note: Activity) -> None:
for external_issue_id in self.get_external_issue_ids(group):
update_comment.apply_async(
kwargs={
@@ -133,7 +143,7 @@ class GroupAiPermission(GroupPermission):
# We want to allow POST requests in order to showcase AI features in demo mode
ALLOWED_METHODS = tuple(list(SAFE_METHODS) + ["POST"])
- def has_permission(self, request: Request, view) -> bool:
+ def has_permission(self, request: Request, view: APIView) -> bool:
if is_demo_user(request.user):
if not is_demo_mode_enabled() or request.method not in self.ALLOWED_METHODS:
return False
@@ -141,7 +151,8 @@ def has_permission(self, request: Request, view) -> bool:
return True
return super().has_permission(request, view)
- def has_object_permission(self, request: Request, view, group) -> bool:
+ def has_object_permission(self, request: Request, view: APIView, group: Any) -> bool:
+ assert isinstance(group, Group)
if is_demo_user(request.user):
if not is_demo_mode_enabled() or request.method not in self.ALLOWED_METHODS:
return False
diff --git a/src/sentry/api/bases/incident.py b/src/sentry/api/bases/incident.py
index aa18754573d9b3..27288d2f06f61e 100644
--- a/src/sentry/api/bases/incident.py
+++ b/src/sentry/api/bases/incident.py
@@ -1,3 +1,5 @@
+from typing import Any
+
from rest_framework.exceptions import PermissionDenied
from rest_framework.request import Request
@@ -24,7 +26,13 @@ class IncidentPermission(OrganizationPermission):
class IncidentEndpoint(OrganizationEndpoint):
- def convert_args(self, request: Request, incident_identifier, *args, **kwargs):
+ def convert_args(
+ self,
+ request: Request,
+ incident_identifier: str,
+ *args: Any,
+ **kwargs: Any,
+ ) -> tuple[tuple[Any, ...], dict[str, Any]]:
args, kwargs = super().convert_args(request, *args, **kwargs)
organization = kwargs["organization"]
diff --git a/src/sentry/api/bases/organization_events.py b/src/sentry/api/bases/organization_events.py
index e2dbe93ee6dceb..d00155618e4e2c 100644
--- a/src/sentry/api/bases/organization_events.py
+++ b/src/sentry/api/bases/organization_events.py
@@ -1,12 +1,13 @@
from __future__ import annotations
import itertools
-from collections.abc import Callable, Sequence
+from collections.abc import Callable, Iterable, Sequence
from datetime import timedelta
from typing import Any, cast
from urllib.parse import quote as urlquote
import sentry_sdk
+from django.contrib.auth.models import AnonymousUser
from django.http.request import HttpRequest
from django.utils import timezone
from rest_framework.exceptions import ParseError, ValidationError
@@ -27,9 +28,9 @@
from sentry.api.serializers.snuba import SnubaTSResultSerializer
from sentry.api.utils import handle_query_errors
from sentry.discover.arithmetic import is_equation, strip_equation
-from sentry.discover.models import DatasetSourcesTypes, DiscoverSavedQueryTypes
+from sentry.discover.models import DatasetSourcesTypes, DiscoverSavedQuery, DiscoverSavedQueryTypes
from sentry.exceptions import InvalidSearchQuery
-from sentry.models.dashboard_widget import DashboardWidgetTypes
+from sentry.models.dashboard_widget import DashboardWidget, DashboardWidgetTypes
from sentry.models.dashboard_widget import DatasetSourcesTypes as DashboardDatasetSourcesTypes
from sentry.models.group import Group
from sentry.models.organization import Organization
@@ -43,6 +44,7 @@
from sentry.snuba.dataset import Dataset
from sentry.snuba.metrics.extraction import MetricSpecType
from sentry.snuba.utils import DATASET_LABELS, DATASET_OPTIONS, get_dataset
+from sentry.users.models.user import User
from sentry.users.services.user.serial import serialize_generic_user
from sentry.utils import snuba
from sentry.utils.cursors import Cursor
@@ -51,7 +53,7 @@
from sentry.utils.snuba import MAX_FIELDS, SnubaTSResult
-def get_query_columns(columns, rollup):
+def get_query_columns(columns: list[str], rollup: int) -> list[str]:
"""
Backwards compatibility for incidents which uses the old
column aliases as it straddles both versions of events/discover.
@@ -113,7 +115,7 @@ def get_teams(self, request: Request, organization: Organization) -> list[Team]:
if not request.user:
return []
- teams = get_teams(request, organization)
+ teams: Iterable[Team] = get_teams(request, organization)
if not teams:
teams = Team.objects.get_for_user(organization, request.user)
@@ -249,7 +251,14 @@ def handle_on_demand(self, request: Request) -> tuple[bool, MetricSpecType]:
return use_on_demand_metrics, on_demand_metric_type
- def save_split_decision(self, widget, has_errors, has_transactions_data, organization, user):
+ def save_split_decision(
+ self,
+ widget: DashboardWidget,
+ has_errors: bool,
+ has_transactions_data: bool,
+ organization: Organization,
+ user: User | AnonymousUser,
+ ) -> int | None:
"""This can be removed once the discover dataset has been fully split"""
source = DashboardDatasetSourcesTypes.INFERRED.value
if has_errors and not has_transactions_data:
@@ -273,15 +282,19 @@ def save_split_decision(self, widget, has_errors, has_transactions_data, organiz
return decision
def save_discover_saved_query_split_decision(
- self, query, dataset_inferred_from_query, has_errors, has_transactions_data
- ):
+ self,
+ query: DiscoverSavedQuery,
+ dataset_inferred_from_query: int | None,
+ has_errors: bool,
+ has_transactions_data: bool,
+ ) -> int | None:
"""
This can be removed once the discover dataset has been fully split.
If dataset is ambiguous (i.e., could be either transactions or errors),
default to errors.
"""
dataset_source = DatasetSourcesTypes.INFERRED.value
- if dataset_inferred_from_query:
+ if dataset_inferred_from_query is not None:
decision = dataset_inferred_from_query
sentry_sdk.set_tag("discover.split_reason", "inferred_from_query")
elif has_errors and not has_transactions_data:
@@ -314,7 +327,7 @@ def handle_unit_meta(
units[key], meta[key] = self.get_unit_and_type(key, value)
return meta, units
- def get_unit_and_type(self, field, field_type):
+ def get_unit_and_type(self, field: str, field_type: str) -> tuple[str | None, str]:
if field_type in SIZE_UNITS:
return field_type, "size"
elif field_type in DURATION_UNITS:
@@ -427,7 +440,7 @@ def handle_data(
return results
- def handle_error_upsampling(self, project_ids: Sequence[int], results: dict[str, Any]):
+ def handle_error_upsampling(self, project_ids: Sequence[int], results: dict[str, Any]) -> None:
"""
If the query is for error upsampled projects, we convert various functions under the hood.
We need to rename these fields before returning the results to the client, to hide the conversion.
@@ -704,7 +717,9 @@ def serialize_multiple_axis(
return result
- def update_meta_with_accuracy(self, meta, event_result, query_column) -> None:
+ def update_meta_with_accuracy(
+ self, meta: dict[str, Any], event_result: SnubaTSResult, query_column: str
+ ) -> None:
if "processed_timeseries" in event_result.data:
processed_timeseries = event_result.data["processed_timeseries"]
meta["accuracy"] = {
@@ -724,7 +739,7 @@ def serialize_accuracy_data(
data: Any,
column: str,
null_zero: bool = False,
- ):
+ ) -> list[dict[str, Any]]:
serialized_values = []
for timestamp, group in itertools.groupby(data, key=lambda r: r["time"]):
for row in group:
diff --git a/src/sentry/api/bases/organization_flag.py b/src/sentry/api/bases/organization_flag.py
index 122f3fcea2f98d..ae42a98b43718a 100644
--- a/src/sentry/api/bases/organization_flag.py
+++ b/src/sentry/api/bases/organization_flag.py
@@ -1,5 +1,7 @@
from __future__ import annotations
+from typing import Any
+
from rest_framework.request import Request
from sentry import features
@@ -22,7 +24,9 @@ def feature_flags(self) -> list[str]:
"Requires set 'feature_flags' property to restrict this endpoint."
)
- def convert_args(self, request: Request, *args, **kwargs):
+ def convert_args(
+ self, request: Request, *args: Any, **kwargs: Any
+ ) -> tuple[tuple[Any, ...], dict[str, Any]]:
parsed_args, parsed_kwargs = super().convert_args(request, *args, **kwargs)
organization = parsed_kwargs.get("organization")
feature_gate = [
diff --git a/src/sentry/api/bases/organizationmember.py b/src/sentry/api/bases/organizationmember.py
index 4e0b8fb4ce18cb..284b8fcf8eaac7 100644
--- a/src/sentry/api/bases/organizationmember.py
+++ b/src/sentry/api/bases/organizationmember.py
@@ -60,18 +60,18 @@ class MemberIdField(serializers.IntegerField):
Allow "me" in addition to integers
"""
- def to_internal_value(self, data):
+ def to_internal_value(self, data: float | int | str) -> Any:
if data == "me":
return data
return super().to_internal_value(data)
- def run_validation(self, data=empty):
+ def run_validation(self, data: object | None = empty) -> object | None:
if data == "me":
return data
return super().run_validation(data)
-class MemberSerializer(serializers.Serializer):
+class MemberSerializer(serializers.Serializer[dict[str, int | Literal["me"]]]):
id = MemberIdField(min_value=0, max_value=BoundedAutoField.MAX_VALUE, required=True)
diff --git a/src/sentry/api/bases/project.py b/src/sentry/api/bases/project.py
index c211b2b265f5af..39b9c6eda482ac 100644
--- a/src/sentry/api/bases/project.py
+++ b/src/sentry/api/bases/project.py
@@ -121,9 +121,9 @@ class ProjectEndpoint(Endpoint):
def convert_args(
self,
request: Request,
- *args,
- **kwargs,
- ):
+ *args: Any,
+ **kwargs: Any,
+ ) -> tuple[tuple[Any, ...], dict[str, Any]]:
if args and args[0] is not None:
organization_id_or_slug: int | str = args[0]
# Required so it behaves like the original convert_args, where organization_id_or_slug was another parameter
@@ -193,7 +193,9 @@ def convert_args(
kwargs["project"] = project
return (args, kwargs)
- def get_filter_params(self, request: Request, project, date_filter_optional=False):
+ def get_filter_params(
+ self, request: Request, project: Project, date_filter_optional: bool = False
+ ) -> dict[str, Any]:
"""Similar to the version on the organization just for a single project."""
# get the top level params -- projects, time range, and environment
# from the request
@@ -203,7 +205,7 @@ def get_filter_params(self, request: Request, project, date_filter_optional=Fals
raise ProjectEventsError(str(e))
environments = [env.name for env in get_environments(request, project.organization)]
- params = {"start": start, "end": end, "project_id": [project.id]}
+ params: dict[str, Any] = {"start": start, "end": end, "project_id": [project.id]}
if environments:
params["environment"] = environments
diff --git a/src/sentry/api/bases/team.py b/src/sentry/api/bases/team.py
index 6f4701158bc535..cd73b066c631e4 100644
--- a/src/sentry/api/bases/team.py
+++ b/src/sentry/api/bases/team.py
@@ -1,5 +1,9 @@
+from collections.abc import Sequence
+from typing import Any
+
from rest_framework.permissions import BasePermission
from rest_framework.request import Request
+from rest_framework.views import APIView
from sentry.api.base import Endpoint
from sentry.api.exceptions import ResourceDoesNotExist
@@ -9,8 +13,8 @@
from .organization import OrganizationPermission
-def has_team_permission(request, team, scope_map):
- allowed_scopes = set(scope_map.get(request.method, []))
+def has_team_permission(request: Request, team: Team, scope_map: dict[str, Sequence[str]]) -> bool:
+ allowed_scopes = set(scope_map.get(request.method or "", []))
return any(request.access.has_team_scope(team, s) for s in allowed_scopes)
@@ -22,7 +26,7 @@ class TeamPermission(OrganizationPermission):
"DELETE": ["team:admin"],
}
- def has_object_permission(self, request: Request, view, team):
+ def has_object_permission(self, request: Request, view: APIView, team: Any) -> bool:
has_org_scope = super().has_object_permission(request, view, team.organization)
if has_org_scope:
# Org-admin has "team:admin", but they can only act on their teams
@@ -36,8 +40,13 @@ class TeamEndpoint(Endpoint):
permission_classes: tuple[type[BasePermission], ...] = (TeamPermission,)
def convert_args(
- self, request: Request, organization_id_or_slug, team_id_or_slug, *args, **kwargs
- ):
+ self,
+ request: Request,
+ organization_id_or_slug: str | int,
+ team_id_or_slug: str | int,
+ *args: Any,
+ **kwargs: Any,
+ ) -> tuple[tuple[Any, ...], dict[str, Any]]:
try:
team = (
Team.objects.filter(
diff --git a/src/sentry/api/endpoints/organization_events_meta.py b/src/sentry/api/endpoints/organization_events_meta.py
index 53825bd9398a0b..cf82f86b3c3574 100644
--- a/src/sentry/api/endpoints/organization_events_meta.py
+++ b/src/sentry/api/endpoints/organization_events_meta.py
@@ -21,8 +21,8 @@
from sentry.api.utils import handle_query_errors
from sentry.middleware import is_frontend_request
from sentry.models.organization import Organization
-from sentry.search.eap.types import SearchResolverConfig
-from sentry.search.events.types import SnubaParams
+from sentry.search.eap.types import EAPResponse, SearchResolverConfig
+from sentry.search.events.types import EventsResponse, SnubaParams
from sentry.snuba import spans_indexed, spans_metrics
from sentry.snuba.query_sources import QuerySource
from sentry.snuba.referrer import Referrer
@@ -217,7 +217,9 @@ def get(self, request: Request, organization: Organization) -> Response:
)
-def get_span_samples(request: Request, snuba_params: SnubaParams, orderby: list[str] | None):
+def get_span_samples(
+ request: Request, snuba_params: SnubaParams, orderby: list[str] | None
+) -> EventsResponse:
is_frontend = is_frontend_request(request)
buckets = request.GET.get("intervals", 3)
lower_bound = request.GET.get("lowerBound", 0)
@@ -297,7 +299,9 @@ def get_span_samples(request: Request, snuba_params: SnubaParams, orderby: list[
)
-def get_eap_span_samples(request: Request, snuba_params: SnubaParams, orderby: list[str] | None):
+def get_eap_span_samples(
+ request: Request, snuba_params: SnubaParams, orderby: list[str] | None
+) -> EAPResponse:
lower_bound = request.GET.get("lowerBound", 0)
first_bound = request.GET.get("firstBound")
second_bound = request.GET.get("secondBound")
diff --git a/src/sentry/api/endpoints/organization_member/__init__.py b/src/sentry/api/endpoints/organization_member/__init__.py
index 87c8e2886959c2..dee4f55c9e1581 100644
--- a/src/sentry/api/endpoints/organization_member/__init__.py
+++ b/src/sentry/api/endpoints/organization_member/__init__.py
@@ -24,7 +24,7 @@ def save_team_assignments(
organization_member: OrganizationMember,
teams: list[Team] | None,
teams_with_roles: list[tuple[Team, str]] | None = None,
-):
+) -> None:
# https://github.com/getsentry/sentry/pull/6054/files/8edbdb181cf898146eda76d46523a21d69ab0ec7#r145798271
lock = locks.get(
f"org:member:{organization_member.id}", duration=5, name="save_team_assignment"
diff --git a/src/sentry/api/endpoints/organization_member/index.py b/src/sentry/api/endpoints/organization_member/index.py
index 8cd5f8a436576e..693eef18bd16fb 100644
--- a/src/sentry/api/endpoints/organization_member/index.py
+++ b/src/sentry/api/endpoints/organization_member/index.py
@@ -1,3 +1,5 @@
+from typing import Any
+
from django.conf import settings
from django.db import router, transaction
from django.db.models import Exists, F, OuterRef, Q
@@ -45,7 +47,7 @@
@extend_schema_serializer(
deprecate_fields=["role", "teams"], exclude_fields=["regenerate", "role", "teams"]
)
-class OrganizationMemberRequestSerializer(serializers.Serializer):
+class OrganizationMemberRequestSerializer(serializers.Serializer[dict[str, Any]]):
email = AllowedEmailField(
max_length=75, required=True, help_text="The email address to send the invitation to."
)
@@ -82,7 +84,7 @@ class OrganizationMemberRequestSerializer(serializers.Serializer):
)
regenerate = serializers.BooleanField(required=False)
- def validate_email(self, email):
+ def validate_email(self, email: str) -> str:
users = user_service.get_many_by_email(
emails=[email],
is_active=True,
@@ -111,10 +113,10 @@ def validate_email(self, email):
return email
- def validate_role(self, role):
+ def validate_role(self, role: str) -> str:
return self.validate_orgRole(role)
- def validate_orgRole(self, role):
+ def validate_orgRole(self, role: str) -> str:
if role == "billing" and features.has(
"organizations:invite-billing", self.context["organization"]
):
@@ -130,7 +132,7 @@ def validate_orgRole(self, role):
)
return role
- def validate_teams(self, teams):
+ def validate_teams(self, teams: list[Team]) -> list[Team]:
valid_teams = list(
Team.objects.filter(
organization=self.context["organization"], status=TeamStatus.ACTIVE, slug__in=teams
@@ -142,7 +144,7 @@ def validate_teams(self, teams):
return valid_teams
- def validate_teamRoles(self, teamRoles) -> list[tuple[Team, str]]:
+ def validate_teamRoles(self, teamRoles: list[dict[str, Any]]) -> list[tuple[Team, str]]:
roles = {item["role"] for item in teamRoles}
valid_roles = [r.id for r in team_roles.get_all()] + [None]
if roles.difference(valid_roles):
@@ -314,7 +316,7 @@ def get(self, request: Request, organization: Organization) -> Response:
},
examples=OrganizationMemberExamples.CREATE_ORG_MEMBER,
)
- def post(self, request: Request, organization) -> Response:
+ def post(self, request: Request, organization: Organization) -> Response:
"""
Add or invite a member to an organization.
"""
diff --git a/src/sentry/api/endpoints/organization_member/requests/invite/details.py b/src/sentry/api/endpoints/organization_member/requests/invite/details.py
index 6cb2c2844ce94f..f25dbdfcc8a0c4 100644
--- a/src/sentry/api/endpoints/organization_member/requests/invite/details.py
+++ b/src/sentry/api/endpoints/organization_member/requests/invite/details.py
@@ -1,6 +1,6 @@
from __future__ import annotations
-from typing import Literal
+from typing import Any, Literal
from rest_framework import serializers, status
from rest_framework.request import Request
@@ -23,10 +23,10 @@
from ... import get_allowed_org_roles, save_team_assignments
-class ApproveInviteRequestSerializer(serializers.Serializer):
+class ApproveInviteRequestSerializer(serializers.Serializer[dict[str, Any]]):
approve = serializers.BooleanField(required=True, write_only=True)
- def validate_approve(self, approve):
+ def validate_approve(self, approve: bool) -> bool:
request = self.context["request"]
member = self.context["member"]
allowed_roles = self.context["allowed_roles"]
diff --git a/src/sentry/api/endpoints/organization_member/requests/invite/index.py b/src/sentry/api/endpoints/organization_member/requests/invite/index.py
index 2d29c8bf0d71d4..74abb24ec65626 100644
--- a/src/sentry/api/endpoints/organization_member/requests/invite/index.py
+++ b/src/sentry/api/endpoints/organization_member/requests/invite/index.py
@@ -55,7 +55,7 @@ def get(self, request: Request, organization: Organization) -> Response:
paginator_cls=OffsetPaginator,
)
- def post(self, request: Request, organization) -> Response:
+ def post(self, request: Request, organization: Organization) -> Response:
"""
Add a invite request to Organization
````````````````````````````````````
diff --git a/src/sentry/api/endpoints/organization_member/requests/join.py b/src/sentry/api/endpoints/organization_member/requests/join.py
index 1088750f7c683b..929b003e3f5a06 100644
--- a/src/sentry/api/endpoints/organization_member/requests/join.py
+++ b/src/sentry/api/endpoints/organization_member/requests/join.py
@@ -1,4 +1,5 @@
import logging
+from typing import Any
from django.db import IntegrityError
from django.db.models import Q
@@ -13,6 +14,7 @@
from sentry.auth.services.auth import auth_service
from sentry.demo_mode.utils import is_demo_user
from sentry.hybridcloud.models.outbox import outbox_context
+from sentry.models.organization import Organization
from sentry.models.organizationmember import InviteStatus, OrganizationMember
from sentry.notifications.notifications.organization_request import JoinRequestNotification
from sentry.notifications.utils.tasks import async_send_notification
@@ -23,11 +25,13 @@
logger = logging.getLogger(__name__)
-class JoinRequestSerializer(serializers.Serializer):
+class JoinRequestSerializer(serializers.Serializer[dict[str, Any]]):
email = AllowedEmailField(max_length=75, required=True)
-def create_organization_join_request(organization, email, ip_address=None):
+def create_organization_join_request(
+ organization: Organization, email: str, ip_address: str | None = None
+) -> OrganizationMember | None:
with outbox_context(flush=False):
om = OrganizationMember.objects.filter(
Q(email__iexact=email)
@@ -35,7 +39,7 @@ def create_organization_join_request(organization, email, ip_address=None):
organization=organization,
).first()
if om:
- return
+ return None
try:
om = OrganizationMember.objects.create(
@@ -66,7 +70,7 @@ class OrganizationJoinRequestEndpoint(OrganizationEndpoint):
}
}
- def post(self, request: Request, organization) -> Response:
+ def post(self, request: Request, organization: Organization) -> Response:
if organization.get_option("sentry:join_requests") is False:
return Response(
{"detail": "Your organization does not allow join requests."}, status=403
diff --git a/src/sentry/api/endpoints/organization_member/team_details.py b/src/sentry/api/endpoints/organization_member/team_details.py
index 8fa639377cc6c7..f7a61bead5b47c 100644
--- a/src/sentry/api/endpoints/organization_member/team_details.py
+++ b/src/sentry/api/endpoints/organization_member/team_details.py
@@ -52,7 +52,7 @@ class OrganizationMemberTeamSerializerResponse(TypedDict):
@extend_schema_serializer(exclude_fields=["isActive"])
-class OrganizationMemberTeamSerializer(serializers.Serializer):
+class OrganizationMemberTeamSerializer(serializers.Serializer[dict[str, Any]]):
isActive = serializers.BooleanField()
teamRole = serializers.ChoiceField(
choices=team_roles.get_descriptions(),
diff --git a/src/sentry/api/endpoints/organization_member/utils.py b/src/sentry/api/endpoints/organization_member/utils.py
index bc3107d5ddd6d4..d547b1b26d7f05 100644
--- a/src/sentry/api/endpoints/organization_member/utils.py
+++ b/src/sentry/api/endpoints/organization_member/utils.py
@@ -2,6 +2,11 @@
from rest_framework.request import Request
from sentry.api.bases.organization import OrganizationPermission
+from sentry.models.organization import Organization
+from sentry.organizations.services.organization.model import (
+ RpcOrganization,
+ RpcUserOrganizationContext,
+)
ERR_RATE_LIMITED = "You are being rate limited for too many invitations."
@@ -50,5 +55,9 @@ class RelaxedMemberPermission(OrganizationPermission):
# Allow deletions to happen for disabled members so they can remove themselves
# allowing other methods should be fine as well even if we don't strictly need to allow them
- def is_member_disabled_from_limit(self, request: Request, organization):
+ def is_member_disabled_from_limit(
+ self,
+ request: Request,
+ organization: RpcUserOrganizationContext | RpcOrganization | Organization | int,
+ ) -> bool:
return False
diff --git a/src/sentry/api/endpoints/organization_on_demand_metrics_estimation_stats.py b/src/sentry/api/endpoints/organization_on_demand_metrics_estimation_stats.py
index cb2a9310a375e6..3fdce282d167de 100644
--- a/src/sentry/api/endpoints/organization_on_demand_metrics_estimation_stats.py
+++ b/src/sentry/api/endpoints/organization_on_demand_metrics_estimation_stats.py
@@ -1,5 +1,5 @@
-from collections.abc import Sequence
-from datetime import datetime
+from collections.abc import Callable, Sequence
+from datetime import timedelta
from enum import Enum
from types import ModuleType
from typing import TypedDict, Union, cast
@@ -152,7 +152,9 @@ def estimate_stats_quality(stats: list[MetricVolumeRow]) -> StatsQualityEstimati
return StatsQualityEstimation.NO_INDEXED_DATA
-def get_stats_generator(use_discover: bool, remove_on_demand: bool):
+def get_stats_generator(
+ use_discover: bool, remove_on_demand: bool
+) -> Callable[[Sequence[str], str, SnubaParams, int, bool, timedelta | None], SnubaTSResult]:
"""
Returns a get_stats function that can fetch from either metrics or discover and
with or without on_demand metrics.
@@ -164,7 +166,7 @@ def get_discover_stats(
snuba_params: SnubaParams,
rollup: int,
zerofill_results: bool, # not used but required by get_event_stats_data
- comparison_delta: datetime | None, # not used but required by get_event_stats_data
+ comparison_delta: timedelta | None, # not used but required by get_event_stats_data
) -> SnubaTSResult:
# use discover or metrics_performance depending on the dataset
if use_discover:
diff --git a/src/sentry/api/endpoints/relay/project_configs.py b/src/sentry/api/endpoints/relay/project_configs.py
index fda297e0228bb5..b1a92480dbcd2f 100644
--- a/src/sentry/api/endpoints/relay/project_configs.py
+++ b/src/sentry/api/endpoints/relay/project_configs.py
@@ -41,7 +41,7 @@ class RelayProjectConfigsEndpoint(Endpoint):
def post(self, request: Request):
relay = request.relay
assert relay is not None # should be provided during Authentication
- response = {}
+ response: dict[str, Any] = {}
if not relay.is_internal:
return Response("Relay unauthorized for config information", status=403)
diff --git a/src/sentry/api/endpoints/release_thresholds/release_threshold.py b/src/sentry/api/endpoints/release_thresholds/release_threshold.py
index 0b72c00cbfc0a0..8f514e7ef34111 100644
--- a/src/sentry/api/endpoints/release_thresholds/release_threshold.py
+++ b/src/sentry/api/endpoints/release_thresholds/release_threshold.py
@@ -1,3 +1,5 @@
+from typing import TypedDict
+
from django.http import HttpResponse
from rest_framework import serializers
from rest_framework.request import Request
@@ -19,19 +21,27 @@
from sentry.models.release_threshold.release_threshold import ReleaseThreshold
-class ReleaseThresholdPOSTSerializer(serializers.Serializer):
+class ReleaseThresholdPOSTData(TypedDict, total=False):
+ threshold_type: int
+ trigger_type: int
+ value: int
+ window_in_seconds: int
+ environment: object
+
+
+class ReleaseThresholdPOSTSerializer(serializers.Serializer[ReleaseThresholdPOSTData]):
threshold_type = serializers.ChoiceField(choices=ReleaseThresholdType.as_str_choices())
trigger_type = serializers.ChoiceField(choices=ReleaseThresholdTriggerType.as_str_choices())
value = serializers.IntegerField(required=True, min_value=0)
window_in_seconds = serializers.IntegerField(required=True, min_value=0)
environment = EnvironmentField(required=False, allow_null=True)
- def validate_threshold_type(self, threshold_type: str):
+ def validate_threshold_type(self, threshold_type: str) -> int:
if threshold_type not in THRESHOLD_TYPE_STR_TO_INT:
raise serializers.ValidationError("Invalid threshold type")
return THRESHOLD_TYPE_STR_TO_INT[threshold_type]
- def validate_trigger_type(self, trigger_type: str):
+ def validate_trigger_type(self, trigger_type: str) -> int:
if trigger_type not in TRIGGER_TYPE_STRING_TO_INT:
raise serializers.ValidationError("Invalid trigger type")
return TRIGGER_TYPE_STRING_TO_INT[trigger_type]
diff --git a/src/sentry/api/endpoints/release_thresholds/release_threshold_details.py b/src/sentry/api/endpoints/release_thresholds/release_threshold_details.py
index 6b6c0d066370cc..326df32b2b723b 100644
--- a/src/sentry/api/endpoints/release_thresholds/release_threshold_details.py
+++ b/src/sentry/api/endpoints/release_thresholds/release_threshold_details.py
@@ -1,5 +1,5 @@
import logging
-from typing import Any
+from typing import Any, TypedDict
from django.http import HttpResponse
from rest_framework import serializers
@@ -21,21 +21,29 @@
from sentry.models.release_threshold.constants import TriggerType as ReleaseThresholdTriggerType
from sentry.models.release_threshold.release_threshold import ReleaseThreshold
+
+class ReleaseThresholdPUTData(TypedDict):
+ threshold_type: int
+ trigger_type: int
+ value: int
+ window_in_seconds: int
+
+
logger = logging.getLogger("sentry.release_thresholds")
-class ReleaseThresholdPUTSerializer(serializers.Serializer):
+class ReleaseThresholdPUTSerializer(serializers.Serializer[ReleaseThresholdPUTData]):
threshold_type = serializers.ChoiceField(choices=ReleaseThresholdType.as_str_choices())
trigger_type = serializers.ChoiceField(choices=ReleaseThresholdTriggerType.as_str_choices())
value = serializers.IntegerField(required=True, min_value=0)
window_in_seconds = serializers.IntegerField(required=True, min_value=0)
- def validate_threshold_type(self, threshold_type: str):
+ def validate_threshold_type(self, threshold_type: str) -> int:
if threshold_type not in THRESHOLD_TYPE_STR_TO_INT:
raise serializers.ValidationError("Invalid threshold type")
return THRESHOLD_TYPE_STR_TO_INT[threshold_type]
- def validate_trigger_type(self, trigger_type: str):
+ def validate_trigger_type(self, trigger_type: str) -> int:
if trigger_type not in TRIGGER_TYPE_STRING_TO_INT:
raise serializers.ValidationError("Invalid trigger type")
return TRIGGER_TYPE_STRING_TO_INT[trigger_type]
@@ -54,9 +62,9 @@ class ReleaseThresholdDetailsEndpoint(ProjectEndpoint):
def convert_args(
self,
request: Request,
- *args,
- **kwargs,
- ) -> Any:
+ *args: Any,
+ **kwargs: Any,
+ ) -> tuple[tuple[Any, ...], dict[str, Any]]:
parsed_args, parsed_kwargs = super().convert_args(request, *args, **kwargs)
try:
parsed_kwargs["release_threshold"] = ReleaseThreshold.objects.get(
diff --git a/src/sentry/api/endpoints/release_thresholds/release_threshold_index.py b/src/sentry/api/endpoints/release_thresholds/release_threshold_index.py
index e8232642ac6f17..f83c25a5a91144 100644
--- a/src/sentry/api/endpoints/release_thresholds/release_threshold_index.py
+++ b/src/sentry/api/endpoints/release_thresholds/release_threshold_index.py
@@ -1,3 +1,5 @@
+from typing import TypedDict
+
from django.db.models import Q
from django.http import HttpResponse
from rest_framework import serializers
@@ -14,7 +16,12 @@
from sentry.models.release_threshold.release_threshold import ReleaseThreshold
-class ReleaseThresholdIndexGETValidator(serializers.Serializer):
+class ReleaseThresholdIndexGETData(TypedDict, total=False):
+ environment: list[str]
+ project: list[int]
+
+
+class ReleaseThresholdIndexGETValidator(serializers.Serializer[ReleaseThresholdIndexGETData]):
environment = serializers.ListField(
required=False, allow_empty=True, child=serializers.CharField()
)
diff --git a/src/sentry/api/endpoints/release_thresholds/release_threshold_status_index.py b/src/sentry/api/endpoints/release_thresholds/release_threshold_status_index.py
index d5ef59b1472a3d..70d8630ad9a749 100644
--- a/src/sentry/api/endpoints/release_thresholds/release_threshold_status_index.py
+++ b/src/sentry/api/endpoints/release_thresholds/release_threshold_status_index.py
@@ -3,7 +3,7 @@
import logging
from collections import defaultdict
from datetime import datetime, timedelta, timezone
-from typing import TYPE_CHECKING, DefaultDict
+from typing import TYPE_CHECKING, Any, DefaultDict, TypedDict
from django.db.models import F, Q
from django.http import HttpResponse
@@ -37,6 +37,7 @@
from sentry.models.release import Release
from sentry.models.release_threshold.constants import ReleaseThresholdType
from sentry.organizations.services.organization import RpcOrganization
+from sentry.release_health.base import SessionsQueryResult
from sentry.utils import metrics
logger = logging.getLogger("sentry.release_threshold_status")
@@ -49,7 +50,17 @@
from sentry.models.releaseprojectenvironment import ReleaseProjectEnvironment
-class ReleaseThresholdStatusIndexSerializer(serializers.Serializer):
+class ReleaseThresholdStatusIndexData(TypedDict, total=False):
+ start: datetime
+ end: datetime
+ environment: list[str]
+ projectSlug: list[str]
+ release: list[str]
+
+
+class ReleaseThresholdStatusIndexSerializer(
+ serializers.Serializer[ReleaseThresholdStatusIndexData]
+):
start = serializers.DateTimeField(
help_text="The start of the time series range as an explicit datetime, either in UTC ISO8601 or epoch seconds. "
"Use along with `end`.",
@@ -81,7 +92,7 @@ class ReleaseThresholdStatusIndexSerializer(serializers.Serializer):
help_text=("A list of release versions to filter your results by."),
)
- def validate(self, data):
+ def validate(self, data: ReleaseThresholdStatusIndexData) -> ReleaseThresholdStatusIndexData:
if data["start"] >= data["end"]:
raise serializers.ValidationError("Start datetime must be after End")
return data
@@ -195,7 +206,7 @@ def get(self, request: Request, organization: Organization | RpcOrganization) ->
# ========================================================================
# Step 3: flatten thresholds and compile projects/release-thresholds by type
# ========================================================================
- thresholds_by_type: DefaultDict[int, dict[str, list]] = defaultdict()
+ thresholds_by_type: DefaultDict[int, dict[str, list[Any]]] = defaultdict()
query_windows_by_type: DefaultDict[int, dict[str, datetime]] = defaultdict()
for release in queryset:
# TODO:
@@ -389,7 +400,7 @@ def get(self, request: Request, organization: Organization | RpcOrganization) ->
elif threshold_type == ReleaseThresholdType.CRASH_FREE_SESSION_RATE:
metrics.incr("release.threshold_health_status.check.crash_free_session_rate")
query_window = query_windows_by_type[threshold_type]
- sessions_data = {}
+ sessions_data: SessionsQueryResult | None = None
try:
sessions_data = fetch_sessions_data(
end=query_window["end"],
@@ -416,7 +427,7 @@ def get(self, request: Request, organization: Organization | RpcOrganization) ->
if sessions_data:
for ethreshold in category_thresholds:
is_healthy, rate = is_crash_free_rate_healthy_check(
- ethreshold, sessions_data, CRASH_SESSIONS_DISPLAY
+ ethreshold, dict(sessions_data), CRASH_SESSIONS_DISPLAY
)
ethreshold.update({"is_healthy": is_healthy, "metric_value": rate})
release_threshold_health[ethreshold["key"]].append(ethreshold)
diff --git a/src/sentry/api/endpoints/release_thresholds/utils/fetch_sessions_data.py b/src/sentry/api/endpoints/release_thresholds/utils/fetch_sessions_data.py
index 09380f7f242666..b41946136e8fc2 100644
--- a/src/sentry/api/endpoints/release_thresholds/utils/fetch_sessions_data.py
+++ b/src/sentry/api/endpoints/release_thresholds/utils/fetch_sessions_data.py
@@ -11,6 +11,7 @@
from sentry.api.utils import handle_query_errors
from sentry.models.organization import Organization
from sentry.organizations.services.organization.model import RpcOrganization
+from sentry.release_health.base import SessionsQueryResult
from sentry.snuba.sessions_v2 import QueryDefinition
@@ -21,7 +22,7 @@ def fetch_sessions_data(
end: datetime,
start: datetime,
field: str | None = "sum(session)", # alternatively count_unique(user)
-):
+) -> SessionsQueryResult:
"""
This implementation was derived from organization_sessions GET endpoint
NOTE: Params are derived from the request query and pulls the relevant project/environment objects
diff --git a/src/sentry/api/endpoints/release_thresholds/utils/get_new_issue_counts.py b/src/sentry/api/endpoints/release_thresholds/utils/get_new_issue_counts.py
index 074df359aa8acd..772f0ea69661c1 100644
--- a/src/sentry/api/endpoints/release_thresholds/utils/get_new_issue_counts.py
+++ b/src/sentry/api/endpoints/release_thresholds/utils/get_new_issue_counts.py
@@ -17,7 +17,7 @@ def get_new_issue_counts(
constructs a query for each threshold, filtering on project
NOTE: group messages are guaranteed to have a related groupenvironment
"""
- queryset: QuerySet | None = None
+ queryset: QuerySet[Group, dict[str, Any]] | None = None
for t in thresholds:
env: dict[str, Any] = t.get("environment") or {}
query = Q(
diff --git a/src/sentry/api/endpoints/system_options.py b/src/sentry/api/endpoints/system_options.py
index 934ddad054202b..b68189e344f8d5 100644
--- a/src/sentry/api/endpoints/system_options.py
+++ b/src/sentry/api/endpoints/system_options.py
@@ -84,7 +84,7 @@ def has_permission(self, request: Request) -> bool:
return True
- def put(self, request: Request):
+ def put(self, request: Request) -> Response:
if not self.has_permission(request):
return Response(status=403)
diff --git a/src/sentry/api/helpers/default_inbound_filters.py b/src/sentry/api/helpers/default_inbound_filters.py
index f867d683b1e94f..eb97f4be35a6d1 100644
--- a/src/sentry/api/helpers/default_inbound_filters.py
+++ b/src/sentry/api/helpers/default_inbound_filters.py
@@ -1,17 +1,21 @@
+from collections.abc import Sequence
+
from sentry.ingest import inbound_filters
+from sentry.models.organization import Organization
+from sentry.models.project import Project
# Turns on certain inbound filters by default for project.
def set_default_inbound_filters(
- project,
- organization,
- filters=(
+ project: Project,
+ organization: Organization,
+ filters: Sequence[str] = (
"browser-extensions",
"legacy-browsers",
"web-crawlers",
"filtered-transaction",
),
-):
+) -> None:
browser_subfilters = [
"ie",
diff --git a/src/sentry/api/helpers/default_symbol_sources.py b/src/sentry/api/helpers/default_symbol_sources.py
index 55430e5e0cf6bd..a0adf06a64083a 100644
--- a/src/sentry/api/helpers/default_symbol_sources.py
+++ b/src/sentry/api/helpers/default_symbol_sources.py
@@ -10,7 +10,7 @@
}
-def set_default_symbol_sources(project: Project | RpcProject):
+def set_default_symbol_sources(project: Project | RpcProject) -> None:
if project.platform and project.platform in DEFAULT_SYMBOL_SOURCES:
project.update_option(
"sentry:builtin_symbol_sources", DEFAULT_SYMBOL_SOURCES[project.platform]
diff --git a/src/sentry/api/helpers/group_index/__init__.py b/src/sentry/api/helpers/group_index/__init__.py
index 8eef633c0d40ab..cf19a6371caca8 100644
--- a/src/sentry/api/helpers/group_index/__init__.py
+++ b/src/sentry/api/helpers/group_index/__init__.py
@@ -1,6 +1,7 @@
from collections.abc import Callable, Mapping
from typing import Any
+from sentry.models.group import Group
from sentry.utils.cursors import CursorResult
"""TODO(mgaeta): This directory is incorrectly suffixed '_index'."""
@@ -16,7 +17,7 @@
# `sentry.api.paginator.BasePaginator.get_result`.
SEARCH_MAX_HITS = 1000
-SearchFunction = Callable[[Mapping[str, Any]], tuple[CursorResult, Mapping[str, Any]]]
+SearchFunction = Callable[[Mapping[str, Any]], tuple[CursorResult[Group], Mapping[str, Any]]]
__all__ = (
"ACTIVITIES_COUNT",
diff --git a/src/sentry/api/helpers/group_index/validators/group.py b/src/sentry/api/helpers/group_index/validators/group.py
index efa3a9563181b9..0be6d0ac397d84 100644
--- a/src/sentry/api/helpers/group_index/validators/group.py
+++ b/src/sentry/api/helpers/group_index/validators/group.py
@@ -7,7 +7,7 @@
from sentry.api.fields import ActorField
from sentry.api.helpers.group_index.validators.inbox_details import InboxDetailsValidator
from sentry.api.helpers.group_index.validators.status_details import StatusDetailsValidator
-from sentry.models.group import STATUS_UPDATE_CHOICES
+from sentry.models.group import STATUS_UPDATE_CHOICES, Group
from sentry.types.actor import Actor
from sentry.types.group import SUBSTATUS_UPDATE_CHOICES, PriorityLevel
@@ -23,7 +23,7 @@
"snoozeDuration",
]
)
-class GroupValidator(serializers.Serializer):
+class GroupValidator(serializers.Serializer[Group]):
inbox = serializers.BooleanField(
help_text="If true, marks the issue as reviewed by the requestor."
)
diff --git a/src/sentry/api/helpers/group_index/validators/in_commit.py b/src/sentry/api/helpers/group_index/validators/in_commit.py
index ef0bf6d0e3923b..bdb72a31645b95 100644
--- a/src/sentry/api/helpers/group_index/validators/in_commit.py
+++ b/src/sentry/api/helpers/group_index/validators/in_commit.py
@@ -13,7 +13,7 @@ class InCommitResult(TypedDict):
@extend_schema_serializer()
-class InCommitValidator(serializers.Serializer):
+class InCommitValidator(serializers.Serializer[InCommitResult]):
commit = serializers.CharField(required=True, help_text="The SHA of the resolving commit.")
repository = serializers.CharField(
required=True, help_text="The name of the repository (as it appears in Sentry)."
diff --git a/src/sentry/api/helpers/group_index/validators/inbox_details.py b/src/sentry/api/helpers/group_index/validators/inbox_details.py
index 8447e2722bd2ad..bb9f28a8c7bd45 100644
--- a/src/sentry/api/helpers/group_index/validators/inbox_details.py
+++ b/src/sentry/api/helpers/group_index/validators/inbox_details.py
@@ -1,6 +1,8 @@
+from typing import Never
+
from rest_framework import serializers
-class InboxDetailsValidator(serializers.Serializer):
+class InboxDetailsValidator(serializers.Serializer[Never]):
# Support undo / snooze reasons
pass
diff --git a/src/sentry/api/helpers/group_index/validators/status_details.py b/src/sentry/api/helpers/group_index/validators/status_details.py
index d626efd32285fd..eaab67aa1a7dc9 100644
--- a/src/sentry/api/helpers/group_index/validators/status_details.py
+++ b/src/sentry/api/helpers/group_index/validators/status_details.py
@@ -21,7 +21,7 @@ class StatusDetailsResult(TypedDict):
@extend_schema_serializer()
-class StatusDetailsValidator(serializers.Serializer):
+class StatusDetailsValidator(serializers.Serializer[StatusDetailsResult]):
inNextRelease = serializers.BooleanField(
help_text="If true, marks the issue as resolved in the next release."
)
diff --git a/src/sentry/api/helpers/releases.py b/src/sentry/api/helpers/releases.py
index 371a6f10190582..d11688c8497d18 100644
--- a/src/sentry/api/helpers/releases.py
+++ b/src/sentry/api/helpers/releases.py
@@ -1,11 +1,15 @@
from sentry.api.exceptions import ResourceDoesNotExist
from sentry.models.grouplink import GroupLink
from sentry.models.groupresolution import GroupResolution
+from sentry.models.organization import Organization
from sentry.models.release import Release
from sentry.models.releasecommit import ReleaseCommit
+from sentry.organizations.services.organization import RpcOrganization
-def get_group_ids_resolved_in_release(organization, version):
+def get_group_ids_resolved_in_release(
+ organization: Organization | RpcOrganization, version: str
+) -> set[int]:
try:
release = Release.objects.get(version=version, organization=organization)
except Release.DoesNotExist:
diff --git a/src/sentry/api/helpers/slugs.py b/src/sentry/api/helpers/slugs.py
index a20dd9a002db4b..e05eea523b8cc1 100644
--- a/src/sentry/api/helpers/slugs.py
+++ b/src/sentry/api/helpers/slugs.py
@@ -20,7 +20,7 @@ def validate_sentry_slug(slug: str) -> None:
validator(slug)
-def sentry_slugify(slug: str, allow_unicode=False) -> str:
+def sentry_slugify(slug: str, allow_unicode: bool = False) -> str:
"""
Slugify a string using Django's built-in slugify function. Ensures that the
slug is not entirely numeric by adding 3 letter suffix if necessary.
diff --git a/src/sentry/api/helpers/teams.py b/src/sentry/api/helpers/teams.py
index 582519f7d539bf..210f095f8d8c21 100644
--- a/src/sentry/api/helpers/teams.py
+++ b/src/sentry/api/helpers/teams.py
@@ -1,10 +1,18 @@
+from __future__ import annotations
+
+from collections.abc import Iterable
+
+from django.db.models.query import QuerySet
from rest_framework.exceptions import PermissionDenied
+from rest_framework.request import Request
from sentry.auth.superuser import is_active_superuser
from sentry.exceptions import InvalidParams
+from sentry.models.organization import Organization
from sentry.models.organizationmember import OrganizationMember
from sentry.models.organizationmemberteam import OrganizationMemberTeam
from sentry.models.team import Team, TeamStatus
+from sentry.organizations.services.organization.model import RpcOrganization
def is_team_admin(org_member: OrganizationMember, team: Team | None = None) -> bool:
@@ -18,7 +26,11 @@ def is_team_admin(org_member: OrganizationMember, team: Team | None = None) -> b
return omt.exists()
-def get_teams(request, organization, teams=None):
+def get_teams(
+ request: Request,
+ organization: Organization | RpcOrganization,
+ teams: Iterable[int | str] | None = None,
+) -> QuerySet[Team]:
# do normal teams lookup based on request params
requested_teams = set(request.GET.getlist("team", []) if teams is None else teams)
diff --git a/src/sentry/api/helpers/user_reports.py b/src/sentry/api/helpers/user_reports.py
index d2620cb52926b1..2ee5fb44cf03ca 100644
--- a/src/sentry/api/helpers/user_reports.py
+++ b/src/sentry/api/helpers/user_reports.py
@@ -1,7 +1,10 @@
+from collections.abc import Sequence
+
from sentry.models.group import Group, GroupStatus
+from sentry.models.userreport import UserReport
-def user_reports_filter_to_unresolved(user_reports):
+def user_reports_filter_to_unresolved(user_reports: Sequence[UserReport]) -> list[UserReport]:
group_ids = {ur.group_id for ur in user_reports if ur.group_id}
unresolved_group_ids = set()
if group_ids:
diff --git a/src/sentry/apidocs/examples/project_examples.py b/src/sentry/apidocs/examples/project_examples.py
index d497a0ba882b5c..bfb38def8e6e2f 100644
--- a/src/sentry/apidocs/examples/project_examples.py
+++ b/src/sentry/apidocs/examples/project_examples.py
@@ -1,3 +1,5 @@
+from typing import Any
+
from drf_spectacular.utils import OpenApiExample
KEY_RATE_LIMIT = {
@@ -375,7 +377,7 @@
]
-def project_with_team(extra_team: bool = False):
+def project_with_team(extra_team: bool = False) -> dict[str, Any]:
teams = [
{
"id": "2349234102",
diff --git a/src/sentry/audit_log/events.py b/src/sentry/audit_log/events.py
index 58a8e21bb71d8a..0ac39f9be9867f 100644
--- a/src/sentry/audit_log/events.py
+++ b/src/sentry/audit_log/events.py
@@ -23,10 +23,10 @@ def _get_member_display(email: str | None, target_user: User | None) -> str:
class MemberAddAuditLogEvent(AuditLogEvent):
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(event_id=2, name="MEMBER_ADD", api_name="member.add")
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
if audit_log_entry.target_user == audit_log_entry.actor:
return "joined the organization"
@@ -35,10 +35,10 @@ def render(self, audit_log_entry: AuditLogEntry):
class MemberEditAuditLogEvent(AuditLogEvent):
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(event_id=4, name="MEMBER_EDIT", api_name="member.edit")
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
member = _get_member_display(audit_log_entry.data.get("email"), audit_log_entry.target_user)
role = audit_log_entry.data.get("role") or "N/A"
@@ -50,10 +50,10 @@ def render(self, audit_log_entry: AuditLogEntry):
class MemberRemoveAuditLogEvent(AuditLogEvent):
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(event_id=5, name="MEMBER_REMOVE", api_name="member.remove")
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
if audit_log_entry.target_user == audit_log_entry.actor:
return "left the organization"
@@ -62,10 +62,10 @@ def render(self, audit_log_entry: AuditLogEntry):
class MemberJoinTeamAuditLogEvent(AuditLogEvent):
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(event_id=6, name="MEMBER_JOIN_TEAM", api_name="member.join-team")
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
if audit_log_entry.target_user == audit_log_entry.actor:
return "joined team {team_slug}".format(**audit_log_entry.data)
@@ -76,10 +76,10 @@ def render(self, audit_log_entry: AuditLogEntry):
class MemberLeaveTeamAuditLogEvent(AuditLogEvent):
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(event_id=7, name="MEMBER_LEAVE_TEAM", api_name="member.leave-team")
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
if audit_log_entry.target_user == audit_log_entry.actor:
return "left team {team_slug}".format(**audit_log_entry.data)
@@ -90,10 +90,10 @@ def render(self, audit_log_entry: AuditLogEntry):
class MemberPendingAuditLogEvent(AuditLogEvent):
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(event_id=8, name="MEMBER_PENDING", api_name="member.pending")
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
user_display_name = _get_member_display(
audit_log_entry.data.get("email"), audit_log_entry.target_user
)
@@ -101,39 +101,39 @@ def render(self, audit_log_entry: AuditLogEntry):
class OrgAddAuditLogEvent(AuditLogEvent):
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(event_id=10, name="ORG_ADD", api_name="org.create")
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
if channel := audit_log_entry.data.get("channel"):
return f"created the organization with {channel} integration"
return "created the organization"
class OrgEditAuditLogEvent(AuditLogEvent):
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(event_id=11, name="ORG_EDIT", api_name="org.edit")
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
items_string = ", ".join(f"{k} {v}" for k, v in audit_log_entry.data.items())
return "edited the organization setting: " + items_string
class TeamEditAuditLogEvent(AuditLogEvent):
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(event_id=21, name="TEAM_EDIT", api_name="team.edit")
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
slug = audit_log_entry.data["slug"]
return f"edited team {slug}"
class ProjectEditAuditLogEvent(AuditLogEvent):
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(event_id=31, name="PROJECT_EDIT", api_name="project.edit")
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
if "old_slug" in audit_log_entry.data:
return "renamed project slug from {old_slug} to {new_slug}".format(
**audit_log_entry.data
@@ -145,10 +145,10 @@ def render(self, audit_log_entry: AuditLogEntry):
class ProjectKeyEditAuditLogEvent(AuditLogEvent):
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(event_id=51, name="PROJECTKEY_EDIT", api_name="projectkey.edit")
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
items_strings = []
if "prev_rate_limit_count" in audit_log_entry.data:
items_strings.append(
@@ -171,14 +171,14 @@ def render(self, audit_log_entry: AuditLogEntry):
class ProjectPerformanceDetectionSettingsAuditLogEvent(AuditLogEvent):
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(
event_id=178,
name="PROJECT_PERFORMANCE_ISSUE_DETECTION_CHANGE",
api_name="project.change-performance-issue-detection",
)
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
from sentry.issues.endpoints.project_performance_issue_settings import (
project_settings_to_group_map as map,
)
@@ -209,91 +209,91 @@ def render_project_action(audit_log_entry: AuditLogEntry, action: str):
class ProjectEnableAuditLogEvent(AuditLogEvent):
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(event_id=37, name="PROJECT_ENABLE", api_name="project.enable")
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
return render_project_action(audit_log_entry, "enable")
class ProjectDisableAuditLogEvent(AuditLogEvent):
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(event_id=38, name="PROJECT_DISABLE", api_name="project.disable")
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
return render_project_action(audit_log_entry, "disable")
class ProjectOwnershipRuleEditAuditLogEvent(AuditLogEvent):
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(
event_id=179, name="PROJECT_OWNERSHIPRULE_EDIT", api_name="project.ownership-rule.edit"
)
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
return "modified ownership rules"
class SSOEditAuditLogEvent(AuditLogEvent):
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(event_id=62, name="SSO_EDIT", api_name="sso.edit")
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
settings = ", ".join(f"{k} {v}" for k, v in audit_log_entry.data.items())
return "edited sso settings: " + settings
class ServiceHookAddAuditLogEvent(AuditLogEvent):
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(event_id=100, name="SERVICEHOOK_ADD", api_name="servicehook.create")
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
full_url = audit_log_entry.data.get("url")
return f'added a service hook for "{truncatechars(full_url, 64)}"'
class ServiceHookEditAuditLogEvent(AuditLogEvent):
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(event_id=101, name="SERVICEHOOK_EDIT", api_name="servicehook.edit")
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
full_url = audit_log_entry.data.get("url")
return f'edited the service hook for "{truncatechars(full_url, 64)}"'
class ServiceHookRemoveAuditLogEvent(AuditLogEvent):
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(event_id=102, name="SERVICEHOOK_REMOVE", api_name="servicehook.remove")
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
full_url = audit_log_entry.data.get("url")
return f'removed the service hook for "{truncatechars(full_url, 64)}"'
class IntegrationDisabledAuditLogEvent(AuditLogEvent):
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(event_id=108, name="INTEGRATION_DISABLED", api_name="integration.disable")
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
provider = audit_log_entry.data.get("provider") or ""
return f"disabled {provider} integration".format(**audit_log_entry.data)
class IntegrationUpgradeAuditLogEvent(AuditLogEvent):
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(event_id=109, name="INTEGRATION_UPGRADE", api_name="integration.upgrade")
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
if audit_log_entry.data.get("provider"):
return "upgraded {name} for the {provider} integration".format(**audit_log_entry.data)
return "updated an integration"
class IntegrationAddAuditLogEvent(AuditLogEvent):
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(event_id=110, name="INTEGRATION_ADD", api_name="integration.add")
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
if audit_log_entry.data.get("provider"):
return "installed {name} for the {provider} integration".format(**audit_log_entry.data)
return "enabled integration {integration} for project {project}".format(
@@ -302,10 +302,10 @@ def render(self, audit_log_entry: AuditLogEntry):
class IntegrationEditAuditLogEvent(AuditLogEvent):
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(event_id=111, name="INTEGRATION_EDIT", api_name="integration.edit")
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
if audit_log_entry.data.get("provider"):
return "edited the {name} for the {provider} integration".format(**audit_log_entry.data)
return "edited integration {integration} for project {project}".format(
@@ -314,10 +314,10 @@ def render(self, audit_log_entry: AuditLogEntry):
class IntegrationRemoveAuditLogEvent(AuditLogEvent):
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(event_id=112, name="INTEGRATION_REMOVE", api_name="integration.remove")
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
if audit_log_entry.data.get("provider"):
return "uninstalled {name} for the {provider} integration".format(
**audit_log_entry.data
@@ -328,38 +328,38 @@ def render(self, audit_log_entry: AuditLogEntry):
class InternalIntegrationAddAuditLogEvent(AuditLogEvent):
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(
event_id=130, name="INTERNAL_INTEGRATION_ADD", api_name="internal-integration.create"
)
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
integration_name = audit_log_entry.data.get("name") or ""
return f"created internal integration {integration_name}"
class InternalIntegrationDisabledAuditLogEvent(AuditLogEvent):
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(
event_id=131,
name="INTERNAL_INTEGRATION_DISABLED",
api_name="internal-integration.disable",
)
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
integration_name = audit_log_entry.data.get("name") or ""
return f"disabled internal integration {integration_name}".format(**audit_log_entry.data)
class MonitorAddAuditLogEvent(AuditLogEvent):
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(
event_id=120,
name="MONITOR_ADD",
api_name="monitor.add",
)
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
entry_data = audit_log_entry.data
name = entry_data.get("name")
upsert = entry_data.get("upsert")
diff --git a/src/sentry/audit_log/manager.py b/src/sentry/audit_log/manager.py
index 570c0aa350635d..fa2e0fac0b99f1 100644
--- a/src/sentry/audit_log/manager.py
+++ b/src/sentry/audit_log/manager.py
@@ -73,7 +73,7 @@ def __init__(self, event_id, name, api_name, template=None):
self.api_name = api_name
self.template = template
- def render(self, audit_log_entry: AuditLogEntry):
+ def render(self, audit_log_entry: AuditLogEntry) -> str:
if not self.template:
return ""
return self.template.format(**audit_log_entry.data)
diff --git a/src/sentry/auth/authenticators/base.py b/src/sentry/auth/authenticators/base.py
index 71d4ff00f39ee3..30efd2336e8afb 100644
--- a/src/sentry/auth/authenticators/base.py
+++ b/src/sentry/auth/authenticators/base.py
@@ -59,18 +59,18 @@ class NewEnrollmentDisallowed(Exception):
class AuthenticatorInterface:
- type = -1
+ type: int = -1
interface_id: str
name: str | _StrPromise
description: str | _StrPromise
rotation_warning: str | _StrPromise | None = None
- is_backup_interface = False
- enroll_button = _("Enroll")
- configure_button = _("Info")
+ is_backup_interface: bool = False
+ enroll_button: str | _StrPromise = _("Enroll")
+ configure_button: str | _StrPromise = _("Info")
remove_button: str | _StrPromise | None = _("Remove 2FA method")
- is_available = True
- allow_multi_enrollment = False
- allow_rotation_in_place = False
+ is_available: bool = True
+ allow_multi_enrollment: bool = False
+ allow_rotation_in_place: bool = False
authenticator: Authenticator | None
status: EnrollmentStatus
_unbound_config: dict[Any, Any]
@@ -190,7 +190,9 @@ def validate_otp(self, otp: str) -> bool:
"""
return False
- def validate_response(self, request: Request, challenge, response) -> bool:
+ def validate_response(
+ self, request: Request, challenge: bytes | None, response: dict[str, Any]
+ ) -> bool:
"""If the activation generates a challenge that needs to be
responded to this validates the response for that challenge. This
is only ever called for challenges emitted by the activation of this
diff --git a/src/sentry/auth/authenticators/sms.py b/src/sentry/auth/authenticators/sms.py
index db937fc2247c85..afcef5cd501433 100644
--- a/src/sentry/auth/authenticators/sms.py
+++ b/src/sentry/auth/authenticators/sms.py
@@ -2,7 +2,7 @@
import logging
from hashlib import md5
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, Any
from django.http.request import HttpRequest
from django.utils.functional import classproperty
@@ -21,7 +21,7 @@
class SMSRateLimitExceeded(Exception):
- def __init__(self, phone_number: str, user_id: int | None, remote_ip) -> None:
+ def __init__(self, phone_number: str, user_id: int | None, remote_ip: str | None) -> None:
super().__init__()
self.phone_number = phone_number
self.user_id = user_id
@@ -43,10 +43,10 @@ class SmsInterface(OtpMixin):
code_ttl = 45
@classproperty
- def is_available(cls):
+ def is_available(cls) -> bool:
return sms_available()
- def generate_new_config(self):
+ def generate_new_config(self) -> dict[str, Any]:
config = super().generate_new_config()
config["phone_number"] = None
return config
@@ -55,11 +55,11 @@ def make_otp(self) -> TOTP:
return TOTP(self.config["secret"], digits=6, interval=self.code_ttl, default_window=1)
@property
- def phone_number(self):
+ def phone_number(self) -> str:
return self.config["phone_number"]
@phone_number.setter
- def phone_number(self, value):
+ def phone_number(self, value: str) -> None:
self.config["phone_number"] = value
def activate(self, request: HttpRequest) -> ActivationMessageResult:
diff --git a/src/sentry/auth/authenticators/u2f.py b/src/sentry/auth/authenticators/u2f.py
index ecb010b58cd24e..4208ba3e0268a0 100644
--- a/src/sentry/auth/authenticators/u2f.py
+++ b/src/sentry/auth/authenticators/u2f.py
@@ -3,7 +3,7 @@
from base64 import urlsafe_b64encode
from functools import cached_property
from time import time
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, Any
from urllib.parse import urlparse
import orjson
@@ -28,14 +28,15 @@
from .base import ActivationChallengeResult, AuthenticatorInterface
if TYPE_CHECKING:
+ from sentry.users.models.authenticator import Authenticator
from sentry.users.models.user import User
-def decode_credential_id(device) -> str:
+def decode_credential_id(device: dict[str, Any]) -> str:
return urlsafe_b64encode(device["binding"].credential_data.credential_id).decode("ascii")
-def create_credential_object(registeredKey: dict[str, str]) -> base:
+def create_credential_object(registeredKey: dict[str, str]) -> base.AttestedCredentialData:
return base.AttestedCredentialData.from_ctap1(
websafe_decode(registeredKey["keyHandle"]),
websafe_decode(registeredKey["publicKey"]),
@@ -71,7 +72,9 @@ def webauthn_registration_server(self) -> Fido2Server:
return Fido2Server(self.rp)
def __init__(
- self, authenticator=None, status: EnrollmentStatus = EnrollmentStatus.EXISTING
+ self,
+ authenticator: Authenticator | None = None,
+ status: EnrollmentStatus = EnrollmentStatus.EXISTING,
) -> None:
super().__init__(authenticator, status)
@@ -80,24 +83,24 @@ def __init__(
)
@classproperty
- def u2f_app_id(cls):
+ def u2f_app_id(cls) -> str:
rv = options.get("u2f.app-id")
return rv or absolute_uri(reverse("sentry-u2f-app-id"))
@classproperty
- def u2f_facets(cls):
+ def u2f_facets(cls) -> list[str]:
facets = options.get("u2f.facets")
if not facets:
return [_get_url_prefix()]
return [x.rstrip("/") for x in facets]
@classproperty
- def is_available(cls):
+ def is_available(cls) -> bool:
url_prefix = _get_url_prefix()
- return url_prefix and url_prefix.startswith("https://")
+ return bool(url_prefix) and url_prefix.startswith("https://")
- def _get_kept_devices(self, key: str):
- def _key_does_not_match(device):
+ def _get_kept_devices(self, key: str) -> list[dict[str, Any]]:
+ def _key_does_not_match(device: dict[str, Any]) -> bool:
if isinstance(device["binding"], AuthenticatorData):
return decode_credential_id(device) != key
else:
@@ -105,7 +108,7 @@ def _key_does_not_match(device):
return [device for device in self.config.get("devices", ()) if _key_does_not_match(device)]
- def generate_new_config(self):
+ def generate_new_config(self) -> dict[str, Any]:
return {}
def start_enrollment(self, user: User) -> tuple[cbor, Fido2Server]:
@@ -123,7 +126,7 @@ def start_enrollment(self, user: User) -> tuple[cbor, Fido2Server]:
)
return cbor.encode(registration_data), state
- def get_u2f_devices(self):
+ def get_u2f_devices(self) -> list[AuthenticatorData | DeviceRegistration]:
rv = []
for data in self.config.get("devices", ()):
# XXX: The previous version of python-u2flib-server didn't store
@@ -136,7 +139,7 @@ def get_u2f_devices(self):
rv.append(DeviceRegistration(data["binding"]))
return rv
- def credentials(self):
+ def credentials(self) -> list[base.AttestedCredentialData]:
credentials = []
# there are 2 types of registered keys from the registered devices, those with type
# AuthenticatorData are those from WebAuthn registered devices that we don't have to modify
@@ -159,15 +162,16 @@ def remove_u2f_device(self, key: str) -> bool:
return True
return False
- def get_device_name(self, key: str):
+ def get_device_name(self, key: str) -> str | None:
for device in self.config.get("devices", ()):
if isinstance(device["binding"], AuthenticatorData):
if decode_credential_id(device) == key:
return device["name"]
elif device["binding"]["keyHandle"] == key:
return device["name"]
+ return None
- def get_registered_devices(self):
+ def get_registered_devices(self) -> list[dict[str, Any]]:
rv = []
for device in self.config.get("devices", ()):
if isinstance(device["binding"], AuthenticatorData):
@@ -192,7 +196,11 @@ def get_registered_devices(self):
return rv
def try_enroll(
- self, enrollment_data: str, response_data: str, device_name=None, state=None
+ self,
+ enrollment_data: str,
+ response_data: str,
+ device_name: str | None = None,
+ state: dict[str, Any] | None = None,
) -> None:
data = orjson.loads(response_data)
client_data = ClientData(websafe_decode(data["response"]["clientDataJSON"]))
@@ -211,7 +219,9 @@ def activate(self, request: HttpRequest) -> ActivationChallengeResult:
request.session["webauthn_authentication_state"] = state
return ActivationChallengeResult(challenge=cbor.encode(challenge["publicKey"]))
- def validate_response(self, request: HttpRequest, challenge, response) -> bool:
+ def validate_response(
+ self, request: HttpRequest, challenge: bytes | None, response: dict[str, Any]
+ ) -> bool:
try:
credentials = self.credentials()
self.webauthn_authentication_server.authenticate_complete(
diff --git a/src/sentry/auth/elevated_mode.py b/src/sentry/auth/elevated_mode.py
index 06252992351d70..188a5f5524e06d 100644
--- a/src/sentry/auth/elevated_mode.py
+++ b/src/sentry/auth/elevated_mode.py
@@ -1,8 +1,14 @@
from abc import ABC, abstractmethod
+from datetime import datetime
from enum import Enum
+from typing import Any
+from django.contrib.auth.models import AnonymousUser
+from django.http import HttpResponse
from django.http.request import HttpRequest
+from sentry.users.models.user import User
+
class InactiveReason(str, Enum):
INVALID_IP = "invalid-ip"
@@ -28,7 +34,7 @@ def is_privileged_request(self) -> tuple[bool, InactiveReason]:
pass
@abstractmethod
- def get_session_data(self, current_datetime=None):
+ def get_session_data(self, current_datetime: datetime | None = None) -> dict[str, Any] | None:
pass
@abstractmethod
@@ -36,7 +42,7 @@ def _populate(self) -> None:
pass
@abstractmethod
- def set_logged_in(self, user, current_datetime=None) -> None:
+ def set_logged_in(self, user: User, current_datetime: datetime | None = None) -> None:
pass
@abstractmethod
@@ -44,7 +50,7 @@ def set_logged_out(self) -> None:
pass
@abstractmethod
- def on_response(cls, response) -> None:
+ def on_response(cls, response: HttpResponse) -> None:
pass
@@ -58,6 +64,9 @@ def has_elevated_mode(request: HttpRequest) -> bool:
from sentry.auth.staff import has_staff_option, is_active_staff
from sentry.auth.superuser import is_active_superuser
+ if isinstance(request.user, AnonymousUser):
+ return False
+
if has_staff_option(request.user):
return is_active_staff(request)
diff --git a/src/sentry/auth/password_validation.py b/src/sentry/auth/password_validation.py
index 7fd2fe1013ad24..0f4694220c4cde 100644
--- a/src/sentry/auth/password_validation.py
+++ b/src/sentry/auth/password_validation.py
@@ -1,5 +1,6 @@
import logging
from hashlib import sha1
+from typing import Any
import requests
from django.conf import settings
@@ -9,16 +10,17 @@
from django.utils.translation import ngettext
from sentry import options
+from sentry.users.models.user import User
from sentry.utils.imports import import_string
logger = logging.getLogger(__name__)
-def get_default_password_validators():
+def get_default_password_validators() -> list[Any]:
return get_password_validators(settings.AUTH_PASSWORD_VALIDATORS)
-def get_password_validators(validator_config):
+def get_password_validators(validator_config: list[dict[str, Any]]) -> list[Any]:
validators = []
for validator in validator_config:
try:
@@ -31,7 +33,9 @@ def get_password_validators(validator_config):
return validators
-def validate_password(password, user=None, password_validators=None) -> None:
+def validate_password(
+ password: str, user: User | None = None, password_validators: list[Any] | None = None
+) -> None:
"""
Validate whether the password meets all validator requirements.
@@ -50,7 +54,7 @@ def validate_password(password, user=None, password_validators=None) -> None:
raise ValidationError(errors)
-def password_validators_help_texts(password_validators=None):
+def password_validators_help_texts(password_validators: list[Any] | None = None) -> list[str]:
"""
Return a list of all help texts of all configured validators.
"""
@@ -62,7 +66,7 @@ def password_validators_help_texts(password_validators=None):
return help_texts
-def _password_validators_help_text_html(password_validators=None) -> str:
+def _password_validators_help_text_html(password_validators: list[Any] | None = None) -> str:
"""
Return an HTML string with all help texts of all configured validators
in an
.
@@ -83,7 +87,7 @@ class MaximumLengthValidator:
def __init__(self, max_length: int = 256) -> None:
self.max_length = max_length
- def validate(self, password: str, user=None) -> None:
+ def validate(self, password: str, user: User | None = None) -> None:
if len(password) > self.max_length:
raise ValidationError(
ngettext(
@@ -112,7 +116,7 @@ def __init__(self, threshold: int = 1, timeout: float = 0.200) -> None:
self.threshold = threshold
self.timeout = timeout
- def validate(self, password, user=None) -> None:
+ def validate(self, password: str, user: User | None = None) -> None:
digest = sha1(password.encode("utf-8")).hexdigest().upper()
prefix = digest[:5]
suffix = digest[5:]
diff --git a/src/sentry/auth/provider.py b/src/sentry/auth/provider.py
index 96724ac8346348..84cf287b3de44b 100644
--- a/src/sentry/auth/provider.py
+++ b/src/sentry/auth/provider.py
@@ -122,7 +122,7 @@ def build_identity(self, state: Mapping[str, Any]) -> Mapping[str, Any]:
raise NotImplementedError
def update_identity(
- self, new_data: Mapping[str, Any], current_data: Mapping[str, Any]
+ self, new_data: dict[str, Any], current_data: Mapping[str, Any]
) -> Mapping[str, Any]:
"""
When re-authenticating with a provider, the identity data may need to
diff --git a/src/sentry/auth/providers/dummy.py b/src/sentry/auth/providers/dummy.py
index b01e08ce5ad878..15b2292838e013 100644
--- a/src/sentry/auth/providers/dummy.py
+++ b/src/sentry/auth/providers/dummy.py
@@ -1,7 +1,10 @@
-from collections.abc import Sequence
+from collections.abc import Mapping, Sequence
+from typing import Any
-from django.http import HttpRequest, HttpResponse
+from django.http import HttpRequest
+from django.http.response import HttpResponse, HttpResponseBase
+from sentry.auth.helper import AuthHelper
from sentry.auth.provider import MigratingIdentityId, Provider
from sentry.auth.providers.saml2.provider import Attributes, SAML2Provider
from sentry.auth.view import AuthView
@@ -11,7 +14,7 @@
class AskEmail(AuthView):
- def dispatch(self, request: HttpRequest, pipeline) -> HttpResponse:
+ def dispatch(self, request: HttpRequest, pipeline: AuthHelper) -> HttpResponseBase:
if "email" in request.POST:
if "id" in request.POST:
pipeline.bind_state("id", request.POST.get("id"))
@@ -30,7 +33,7 @@ class DummyProvider(Provider):
def get_auth_pipeline(self) -> Sequence[AuthView]:
return [AskEmail()]
- def build_identity(self, state):
+ def build_identity(self, state: Mapping[str, Any]) -> Mapping[str, Any]:
return {
"id": MigratingIdentityId(
id=state.get("id", state["email"]), legacy_id=state.get("legacy_email")
@@ -43,7 +46,7 @@ def build_identity(self, state):
def refresh_identity(self, auth_identity: AuthIdentity) -> None:
pass
- def build_config(self, state):
+ def build_config(self, state: Mapping[str, Any]) -> dict[str, Any]:
return {}
@@ -70,5 +73,5 @@ class DummySAML2Provider(SAML2Provider):
def get_saml_setup_pipeline(self) -> list[AuthView]:
return []
- def build_config(self, state):
+ def build_config(self, state: Mapping[str, Any]) -> dict[str, Any]:
return dummy_provider_config
diff --git a/src/sentry/auth/providers/fly/client.py b/src/sentry/auth/providers/fly/client.py
index a2fcd29957b46c..9d8c423aea3ac0 100644
--- a/src/sentry/auth/providers/fly/client.py
+++ b/src/sentry/auth/providers/fly/client.py
@@ -1,5 +1,8 @@
from __future__ import annotations
+from types import TracebackType
+from typing import Any
+
import orjson
from requests.exceptions import RequestException
@@ -9,7 +12,7 @@
class FlyApiError(Exception):
- def __init__(self, message="", status=0) -> None:
+ def __init__(self, message: str | bytes = "", status: int = 0) -> None:
super().__init__(message)
self.status = status
@@ -22,10 +25,15 @@ def __init__(self, access_token: str) -> None:
def __enter__(self) -> FlyClient:
return self
- def __exit__(self, exc_type, exc_value, traceback) -> None:
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_value: BaseException | None,
+ traceback: TracebackType | None,
+ ) -> None:
self.http.close()
- def _request(self, path: str):
+ def _request(self, path: str) -> dict[str, Any]:
headers = {"Authorization": f"Bearer {self.access_token}"}
req_url = f"{ACCESS_TOKEN_URL}/{path.lstrip('/')}"
try:
@@ -39,7 +47,7 @@ def _request(self, path: str):
raise FlyApiError(req.content, status=req.status_code)
return orjson.loads(req.content)
- def get_info(self):
+ def get_info(self) -> dict[str, Any]:
"""
Use access token to issue an inline request to the token introspection endpoint.
The response gives you enough information, for example, to authorize the user
diff --git a/src/sentry/auth/providers/fly/provider.py b/src/sentry/auth/providers/fly/provider.py
index c7d3b0f8efc4ff..204310f6d86470 100644
--- a/src/sentry/auth/providers/fly/provider.py
+++ b/src/sentry/auth/providers/fly/provider.py
@@ -1,6 +1,7 @@
from __future__ import annotations
-from collections.abc import Callable
+from collections.abc import Callable, Mapping
+from typing import Any
from django.http.request import HttpRequest
@@ -23,7 +24,7 @@ class FlyOAuth2Provider(OAuth2Provider):
access_token_url = ACCESS_TOKEN_URL
authorize_url = AUTHORIZE_URL
- def __init__(self, org=None, **config) -> None:
+ def __init__(self, org: RpcOrganization | None = None, **config: Any) -> None:
self.org = org
super().__init__(**config)
@@ -35,7 +36,9 @@ def get_client_secret(self) -> str:
def get_configure_view(
self,
- ) -> Callable[[HttpRequest, RpcOrganization, RpcAuthProvider], DeferredResponse]:
+ ) -> Callable[
+ [HttpRequest, RpcOrganization | dict[str, Any], RpcAuthProvider], DeferredResponse
+ ]:
# Utilized from organization_auth_settings.py when configuring the app
# Injected into the configuration form
return fly_configure_view
@@ -55,7 +58,7 @@ def get_refresh_token_url(self) -> str:
return ACCESS_TOKEN_URL
@classmethod
- def build_config(self, resource):
+ def build_config(self, resource: dict[str, Any]) -> dict[str, dict[str, Any]]:
"""
On configuration, we determine which provider organization to configure sentry SSO for.
This configuration is then stored and passed into the pipeline instances during SSO
@@ -63,7 +66,7 @@ def build_config(self, resource):
"""
return {"org": {"id": resource.get("id")}}
- def build_identity(self, state):
+ def build_identity(self, state: Mapping[str, Any]) -> Mapping[str, Any]:
"""
ex Response:
{
diff --git a/src/sentry/auth/providers/fly/views.py b/src/sentry/auth/providers/fly/views.py
index cf5229c2317405..d5642d80dfd57e 100644
--- a/src/sentry/auth/providers/fly/views.py
+++ b/src/sentry/auth/providers/fly/views.py
@@ -1,8 +1,10 @@
from __future__ import annotations
import logging
+from typing import TYPE_CHECKING, Any
-from django.http import HttpRequest, HttpResponse
+from django.http import HttpRequest
+from django.http.response import HttpResponseBase
from sentry.auth.providers.oauth2 import OAuth2Login
from sentry.auth.services.auth.model import RpcAuthProvider
@@ -13,6 +15,9 @@
from .client import FlyClient
from .constants import AUTHORIZE_URL, ERR_NO_ORG_ACCESS, SCOPE
+if TYPE_CHECKING:
+ from sentry.auth.helper import AuthHelper
+
logger = logging.getLogger("sentry.auth.fly")
@@ -25,15 +30,19 @@ def __init__(self, client_id: str) -> None:
class FetchUser(AuthView):
- def __init__(self, org=None, *args, **kwargs) -> None:
+ def __init__(
+ self, org: RpcOrganization | dict[str, Any] | None = None, *args: Any, **kwargs: Any
+ ) -> None:
"""
NOTE: org/args are configured via provider `build_config` method and provided at SSO time
"""
self.org = org
super().__init__(*args, **kwargs)
- def handle(self, request: HttpRequest, pipeline) -> HttpResponse:
- with FlyClient(pipeline.fetch_state("data")["access_token"]) as client:
+ def handle(self, request: HttpRequest, pipeline: AuthHelper) -> HttpResponseBase:
+ data: dict[str, Any] | None = pipeline.fetch_state("data")
+ assert data is not None
+ with FlyClient(data["access_token"]) as client:
"""
Utilize the access token to make final request to token introspection endpoint
pipeline.fetch_state -> base pipeline _fetch_state
@@ -43,7 +52,8 @@ def handle(self, request: HttpRequest, pipeline) -> HttpResponse:
info = client.get_info()
if self.org is not None:
user_orgs = info.get("organizations", {})
- if self.org["id"] not in [org["id"] for org in user_orgs]:
+ org_id = self.org["id"] if isinstance(self.org, dict) else self.org.id
+ if org_id not in [org["id"] for org in user_orgs]:
logger.warning(
"SSO attempt no org access", extra={"org": self.org, "user_orgs": user_orgs}
)
@@ -55,6 +65,6 @@ def handle(self, request: HttpRequest, pipeline) -> HttpResponse:
def fly_configure_view(
- request: HttpRequest, org: RpcOrganization, auth_provider: RpcAuthProvider
+ request: HttpRequest, org: RpcOrganization | dict[str, Any], auth_provider: RpcAuthProvider
) -> DeferredResponse:
return DeferredResponse("sentry_auth_fly/configure.html")
diff --git a/src/sentry/auth/providers/github/client.py b/src/sentry/auth/providers/github/client.py
index 847184cfc68f8a..e737d611ede1f3 100644
--- a/src/sentry/auth/providers/github/client.py
+++ b/src/sentry/auth/providers/github/client.py
@@ -1,5 +1,8 @@
from __future__ import annotations
+from types import TracebackType
+from typing import Any
+
import orjson
from requests.exceptions import RequestException
@@ -9,23 +12,25 @@
class GitHubApiError(Exception):
- def __init__(self, message="", status=0) -> None:
+ def __init__(self, message: str | bytes = "", status: int = 0) -> None:
super().__init__(message)
self.status = status
class GitHubClient:
- def __init__(self, access_token) -> None:
+ def __init__(self, access_token: str) -> None:
self.http = http.build_session()
self.access_token = access_token
def __enter__(self) -> GitHubClient:
return self
- def __exit__(self, exc_type, exc_value, traceback) -> None:
+ def __exit__(
+ self, exc_type: type | None, exc_value: Exception | None, traceback: TracebackType | None
+ ) -> None:
self.http.close()
- def _request(self, path: str):
+ def _request(self, path: str) -> dict[str, Any] | list[dict[str, Any]]:
headers = {"Authorization": f"token {self.access_token}"}
try:
@@ -39,18 +44,24 @@ def _request(self, path: str):
raise GitHubApiError(req.content, status=req.status_code)
return orjson.loads(req.content)
- def get_org_list(self):
- return self._request("/user/orgs")
+ def get_org_list(self) -> list[dict[str, Any]]:
+ res = self._request("/user/orgs")
+ if not isinstance(res, list):
+ return [res]
+ return res
- def get_user(self):
+ def get_user(self) -> dict[str, Any] | list[dict[str, Any]]:
return self._request("/user")
- def get_user_emails(self):
- return self._request("/user/emails")
+ def get_user_emails(self) -> list[dict[str, Any]]:
+ res = self._request("/user/emails")
+ if not isinstance(res, list):
+ return [res]
+ return res
- def is_org_member(self, org_id) -> bool:
- org_id = str(org_id)
+ def is_org_member(self, org_id: int) -> bool:
+ org_id_str = str(org_id)
for o in self.get_org_list():
- if str(o["id"]) == org_id:
+ if str(o["id"]) == org_id_str:
return True
return False
diff --git a/src/sentry/auth/providers/github/provider.py b/src/sentry/auth/providers/github/provider.py
index 1d32d70d5b2831..f7f46bb76f368d 100644
--- a/src/sentry/auth/providers/github/provider.py
+++ b/src/sentry/auth/providers/github/provider.py
@@ -1,6 +1,7 @@
from __future__ import annotations
-from collections.abc import Callable
+from collections.abc import Callable, Mapping
+from typing import Any
from django.http.request import HttpRequest
@@ -24,13 +25,15 @@ class GitHubOAuth2Provider(OAuth2Provider):
name = "GitHub"
key = IntegrationProviderSlug.GITHUB.value
- def get_client_id(self):
+ def get_client_id(self) -> str:
+ assert isinstance(CLIENT_ID, str)
return CLIENT_ID
- def get_client_secret(self):
+ def get_client_secret(self) -> str:
+ assert isinstance(CLIENT_SECRET, str)
return CLIENT_SECRET
- def __init__(self, org=None, **config) -> None:
+ def __init__(self, org: RpcOrganization | dict[str, Any] | None = None, **config: Any) -> None:
super().__init__(**config)
self.org = org
@@ -61,7 +64,7 @@ def get_setup_pipeline(self) -> list[AuthView]:
def get_refresh_token_url(self) -> str:
return ACCESS_TOKEN_URL
- def build_config(self, state):
+ def build_config(self, state: Mapping[str, Any]) -> dict[str, dict[str, Any]]:
"""
On configuration, we determine which provider organization to configure SSO for
This configuration is then stored and passed into the pipeline instances during SSO
@@ -69,7 +72,7 @@ def build_config(self, state):
"""
return {"org": {"id": state["org"]["id"], "name": state["org"]["login"]}}
- def build_identity(self, state):
+ def build_identity(self, state: Mapping[str, Any]) -> Mapping[str, Any]:
data = state["data"]
user_data = state["user"]
return {
@@ -82,7 +85,10 @@ def build_identity(self, state):
def refresh_identity(self, auth_identity: AuthIdentity) -> None:
with GitHubClient(auth_identity.data["access_token"]) as client:
try:
- if not client.is_org_member(self.org["id"]):
+ if not self.org:
+ raise IdentityNotValid
+ org_id = self.org.id if isinstance(self.org, RpcOrganization) else self.org["id"]
+ if not client.is_org_member(org_id):
raise IdentityNotValid
except GitHubApiError as e:
raise IdentityNotValid(e)
diff --git a/src/sentry/auth/providers/github/views.py b/src/sentry/auth/providers/github/views.py
index 950de31f9f685f..e75af0cab48332 100644
--- a/src/sentry/auth/providers/github/views.py
+++ b/src/sentry/auth/providers/github/views.py
@@ -1,9 +1,12 @@
from __future__ import annotations
+from typing import Any
+
from django import forms
-from django.http import HttpRequest, HttpResponse
+from django.http import HttpRequest
from django.http.response import HttpResponseBase
+from sentry.auth.helper import AuthHelper
from sentry.auth.services.auth.model import RpcAuthProvider
from sentry.auth.view import AuthView
from sentry.models.authidentity import AuthIdentity
@@ -22,7 +25,7 @@
)
-def _get_name_from_email(email):
+def _get_name_from_email(email: str) -> str:
"""
Given an email return a capitalized name. Ex. john.smith@example.com would return John Smith.
"""
@@ -32,21 +35,27 @@ def _get_name_from_email(email):
class FetchUser(AuthView):
- def __init__(self, org=None, *args, **kwargs) -> None:
+ def __init__(
+ self, org: RpcOrganization | dict[str, Any] | None = None, *args: Any, **kwargs: Any
+ ) -> None:
self.org = org
super().__init__(*args, **kwargs)
- def handle(self, request: HttpRequest, pipeline) -> HttpResponse:
- with GitHubClient(pipeline.fetch_state("data")["access_token"]) as client:
+ def handle(self, request: HttpRequest, pipeline: AuthHelper) -> HttpResponseBase:
+ data: dict[str, Any] | None = pipeline.fetch_state("data")
+ assert data is not None
+ with GitHubClient(data["access_token"]) as client:
if self.org is not None:
# if we have a configured org (self.org) for our oauth provider
- if not client.is_org_member(self.org["id"]):
+ org_id = self.org.id if isinstance(self.org, RpcOrganization) else self.org["id"]
+ if not client.is_org_member(org_id):
# `is_org_member` fetches provider orgs for the auth'd provider user.
# if our configured org is not in the users list of orgs, then that user
# does not have access to the provisioned org and we will prevent access
return pipeline.error(ERR_NO_ORG_ACCESS)
user = client.get_user()
+ assert isinstance(user, dict)
if not user.get("email"):
emails = client.get_user_emails()
@@ -85,8 +94,9 @@ class ConfirmEmailForm(forms.Form):
class ConfirmEmail(AuthView):
- def handle(self, request: HttpRequest, pipeline) -> HttpResponseBase:
- user = pipeline.fetch_state("user")
+ def handle(self, request: HttpRequest, pipeline: AuthHelper) -> HttpResponseBase:
+ user: dict[str, Any] | None = pipeline.fetch_state("user")
+ assert user is not None
# TODO(dcramer): this isn't ideal, but our current flow doesnt really
# support this behavior;
@@ -114,18 +124,20 @@ def handle(self, request: HttpRequest, pipeline) -> HttpResponseBase:
class SelectOrganizationForm(forms.Form):
org = forms.ChoiceField(label="Organization")
- def __init__(self, org_list, *args, **kwargs) -> None:
+ def __init__(self, org_list: list[dict[str, Any]], *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
set_field_choices(self.fields["org"], [(o["id"], o["login"]) for o in org_list])
class SelectOrganization(AuthView):
- def __init__(self, *args, **kwargs) -> None:
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
- def handle(self, request: HttpRequest, pipeline) -> HttpResponseBase:
- with GitHubClient(pipeline.fetch_state("data")["access_token"]) as client:
+ def handle(self, request: HttpRequest, pipeline: AuthHelper) -> HttpResponseBase:
+ data: dict[str, Any] | None = pipeline.fetch_state("data")
+ assert data is not None
+ with GitHubClient(data["access_token"]) as client:
org_list = client.get_org_list()
form = SelectOrganizationForm(org_list, request.POST or None)
diff --git a/src/sentry/auth/providers/google/provider.py b/src/sentry/auth/providers/google/provider.py
index 7b0a017dc4a280..5f10f917bbaefe 100644
--- a/src/sentry/auth/providers/google/provider.py
+++ b/src/sentry/auth/providers/google/provider.py
@@ -1,6 +1,7 @@
from __future__ import annotations
-from collections.abc import Callable
+from collections.abc import Callable, Mapping
+from typing import Any
from django.http import HttpRequest
@@ -20,7 +21,7 @@ class GoogleOAuth2Login(OAuth2Login):
authorize_url = AUTHORIZE_URL
scope = SCOPE
- def __init__(self, client_id: str, domains=None) -> None:
+ def __init__(self, client_id: str, domains: list[str] | None = None) -> None:
self.domains = domains
super().__init__(client_id=client_id)
@@ -38,7 +39,13 @@ class GoogleOAuth2Provider(OAuth2Provider):
name = "Google"
key = "google"
- def __init__(self, domain=None, domains=None, version=None, **config) -> None:
+ def __init__(
+ self,
+ domain: str | None = None,
+ domains: list[str] | None = None,
+ version: str | None = None,
+ **config: Any,
+ ) -> None:
if domain:
if domains:
domains.append(domain)
@@ -81,10 +88,10 @@ def get_auth_pipeline(self) -> list[AuthView]:
def get_refresh_token_url(self) -> str:
return ACCESS_TOKEN_URL
- def build_config(self, state):
+ def build_config(self, state: Mapping[str, Any]) -> dict[str, Any]:
return {"domains": [state["domain"]], "version": DATA_VERSION}
- def build_identity(self, state):
+ def build_identity(self, state: Mapping[str, Any]) -> Mapping[str, Any]:
# https://developers.google.com/identity/protocols/OpenIDConnect#server-flow
# data.user => {
# "iss":"accounts.google.com",
diff --git a/src/sentry/auth/providers/google/views.py b/src/sentry/auth/providers/google/views.py
index e04aad5aad4c8b..87ffab557dd104 100644
--- a/src/sentry/auth/providers/google/views.py
+++ b/src/sentry/auth/providers/google/views.py
@@ -1,11 +1,13 @@
from __future__ import annotations
import logging
+from typing import Any
import orjson
from django.http import HttpRequest
-from rest_framework.response import Response
+from django.http.response import HttpResponseBase
+from sentry.auth.helper import AuthHelper
from sentry.auth.services.auth.model import RpcAuthProvider
from sentry.auth.view import AuthView
from sentry.organizations.services.organization.model import RpcOrganization
@@ -18,13 +20,16 @@
class FetchUser(AuthView):
- def __init__(self, domains, version, *args, **kwargs) -> None:
+ def __init__(
+ self, domains: list[str] | None, version: str | None, *args: Any, **kwargs: Any
+ ) -> None:
self.domains = domains
self.version = version
super().__init__(*args, **kwargs)
- def dispatch(self, request: HttpRequest, pipeline) -> Response:
- data = pipeline.fetch_state("data")
+ def dispatch(self, request: HttpRequest, pipeline: AuthHelper) -> HttpResponseBase:
+ data: dict[str, Any] | None = pipeline.fetch_state("data")
+ assert data is not None
try:
id_token = data["id_token"]
@@ -39,7 +44,7 @@ def dispatch(self, request: HttpRequest, pipeline) -> Response:
return pipeline.error(ERR_INVALID_RESPONSE)
try:
- payload = orjson.loads(payload_b)
+ payload: dict[str, Any] = orjson.loads(payload_b)
except Exception as exc:
logger.exception("Unable to decode id_token payload: %s", exc)
return pipeline.error(ERR_INVALID_RESPONSE)
@@ -49,6 +54,7 @@ def dispatch(self, request: HttpRequest, pipeline) -> Response:
return pipeline.error(ERR_INVALID_RESPONSE)
# support legacy style domains with pure domain regexp
+ domain: str | None = None
if self.version is None:
domain = extract_domain(payload["email"])
else:
@@ -81,5 +87,5 @@ def google_configure_view(
return DeferredResponse("sentry_auth_google/configure.html", {"domains": domains or []})
-def extract_domain(email):
+def extract_domain(email: str) -> str:
return email.rsplit("@", 1)[-1]
diff --git a/src/sentry/auth/providers/oauth2.py b/src/sentry/auth/providers/oauth2.py
index ac80737c762d0c..3f11932652e1a4 100644
--- a/src/sentry/auth/providers/oauth2.py
+++ b/src/sentry/auth/providers/oauth2.py
@@ -1,14 +1,16 @@
+from __future__ import annotations
+
import abc
import logging
import secrets
from collections.abc import Mapping
from time import time
-from typing import Any
+from typing import TYPE_CHECKING, Any
from urllib.parse import parse_qsl, urlencode
import orjson
-from django.http import HttpRequest, HttpResponse
-from django.http.response import HttpResponseRedirect
+from django.http import HttpRequest
+from django.http.response import HttpResponseBase, HttpResponseRedirect
from django.urls import reverse
from sentry.auth.exceptions import IdentityNotValid
@@ -18,6 +20,9 @@
from sentry.models.authidentity import AuthIdentity
from sentry.utils.http import absolute_uri
+if TYPE_CHECKING:
+ from sentry.auth.helper import AuthHelper
+
ERR_INVALID_STATE = "An error occurred while validating your request."
@@ -28,9 +33,16 @@ def _get_redirect_url() -> str:
class OAuth2Login(AuthView):
authorize_url: str | None = None
client_id: str | None = None
- scope = ""
+ scope: str = ""
- def __init__(self, authorize_url=None, client_id=None, scope=None, *args, **kwargs) -> None:
+ def __init__(
+ self,
+ authorize_url: str | None = None,
+ client_id: str | None = None,
+ scope: str | None = None,
+ *args: Any,
+ **kwargs: Any,
+ ) -> None:
super().__init__(*args, **kwargs)
if authorize_url is not None:
self.authorize_url = authorize_url
@@ -54,7 +66,7 @@ def get_authorize_params(self, state: str, redirect_uri: str) -> dict[str, str |
"redirect_uri": redirect_uri,
}
- def dispatch(self, request: HttpRequest, pipeline) -> HttpResponse:
+ def dispatch(self, request: HttpRequest, pipeline: AuthHelper) -> HttpResponseBase:
if "code" in request.GET:
return pipeline.next_step()
@@ -76,7 +88,12 @@ class OAuth2Callback(AuthView):
client_secret: str | None = None
def __init__(
- self, access_token_url=None, client_id=None, client_secret=None, *args, **kwargs
+ self,
+ access_token_url: str | None = None,
+ client_id: str | None = None,
+ client_secret: str | None = None,
+ *args: Any,
+ **kwargs: Any,
) -> None:
super().__init__(*args, **kwargs)
if access_token_url is not None:
@@ -86,7 +103,7 @@ def __init__(
if client_secret is not None:
self.client_secret = client_secret
- def get_token_params(self, code: str, redirect_uri: str) -> dict[str, str | None]:
+ def get_token_params(self, code: str, redirect_uri: str) -> Mapping[str, str | None]:
return {
"grant_type": "authorization_code",
"code": code,
@@ -95,7 +112,9 @@ def get_token_params(self, code: str, redirect_uri: str) -> dict[str, str | None
"client_secret": self.client_secret,
}
- def exchange_token(self, request: HttpRequest, pipeline, code: str):
+ def exchange_token(
+ self, request: HttpRequest, pipeline: AuthHelper, code: str
+ ) -> Mapping[str, Any]:
# TODO: this needs the auth yet
data = self.get_token_params(code=code, redirect_uri=_get_redirect_url())
req = safe_urlopen(self.access_token_url, data=data)
@@ -104,7 +123,7 @@ def exchange_token(self, request: HttpRequest, pipeline, code: str):
return dict(parse_qsl(body))
return orjson.loads(body)
- def dispatch(self, request: HttpRequest, pipeline) -> HttpResponse:
+ def dispatch(self, request: HttpRequest, pipeline: AuthHelper) -> HttpResponseBase:
error = request.GET.get("error")
state = request.GET.get("state")
code = request.GET.get("code")
@@ -156,7 +175,7 @@ def get_auth_pipeline(self) -> list[AuthView]:
def get_refresh_token_url(self) -> str:
raise NotImplementedError
- def get_refresh_token_params(self, refresh_token: str) -> dict[str, str | None]:
+ def get_refresh_token_params(self, refresh_token: str) -> Mapping[str, str | None]:
return {
"client_id": self.get_client_id(),
"client_secret": self.get_client_secret(),
@@ -164,7 +183,7 @@ def get_refresh_token_params(self, refresh_token: str) -> dict[str, str | None]:
"refresh_token": refresh_token,
}
- def get_oauth_data(self, payload):
+ def get_oauth_data(self, payload: Mapping[str, Any]) -> Mapping[str, Any]:
data = {"access_token": payload["access_token"], "token_type": payload["token_type"]}
if "expires_in" in payload:
data["expires"] = int(time()) + int(payload["expires_in"])
@@ -186,7 +205,9 @@ def build_identity(self, state: Mapping[str, Any]) -> Mapping[str, Any]:
"""
raise NotImplementedError
- def update_identity(self, new_data, current_data):
+ def update_identity(
+ self, new_data: dict[str, Any], current_data: Mapping[str, Any]
+ ) -> Mapping[str, Any]:
# we want to maintain things like refresh_token that might not
# exist on a refreshed state
if "refresh_token" in current_data:
diff --git a/src/sentry/auth/providers/saml2/forms.py b/src/sentry/auth/providers/saml2/forms.py
index dce57ea246e092..a4d5140ab68803 100644
--- a/src/sentry/auth/providers/saml2/forms.py
+++ b/src/sentry/auth/providers/saml2/forms.py
@@ -1,14 +1,18 @@
+from typing import Any
+
from django import forms
from django.forms.utils import ErrorList
+from django.http import HttpRequest
from django.utils.encoding import force_str
from django.utils.translation import gettext_lazy as _
from onelogin.saml2.idp_metadata_parser import OneLogin_Saml2_IdPMetadataParser
from requests.exceptions import SSLError
+from sentry.auth.helper import AuthHelper
from sentry.http import safe_urlopen
-def extract_idp_data_from_parsed_data(data):
+def extract_idp_data_from_parsed_data(data: dict[str, Any]) -> dict[str, Any]:
"""
Transform data returned by the OneLogin_Saml2_IdPMetadataParser into the
expected IdP dict shape.
@@ -27,14 +31,14 @@ def extract_idp_data_from_parsed_data(data):
}
-def process_url(form):
+def process_url(form: forms.Form) -> dict[str, Any]:
url = form.cleaned_data["metadata_url"]
response = safe_urlopen(url)
data = OneLogin_Saml2_IdPMetadataParser.parse(response.content)
return extract_idp_data_from_parsed_data(data)
-def process_xml(form):
+def process_xml(form: forms.Form) -> dict[str, Any]:
# cast unicode xml to byte string so lxml won't complain when trying to
# parse a xml document with a type declaration.
xml = form.cleaned_data["metadata_xml"].encode("utf8")
@@ -60,7 +64,9 @@ class SAMLForm(forms.Form):
processor = lambda d: d.cleaned_data
-def process_metadata(form_cls, request, helper):
+def process_metadata(
+ form_cls: type[forms.Form], request: HttpRequest, helper: AuthHelper
+) -> forms.Form | None:
form = form_cls()
if "action_save" not in request.POST:
@@ -72,15 +78,16 @@ def process_metadata(form_cls, request, helper):
return form
try:
+ assert hasattr(form_cls, "processor")
data = form_cls.processor(form)
except SSLError:
- errors = form._errors.setdefault("__all__", ErrorList())
+ errors = form.errors.setdefault("__all__", ErrorList())
errors.append(
"Could not verify SSL certificate. Ensure that your IdP instance has a valid SSL certificate that is linked to a trusted root certificate."
)
return form
except Exception:
- errors = form._errors.setdefault("__all__", ErrorList())
+ errors = form.errors.setdefault("__all__", ErrorList())
errors.append("Failed to parse provided SAML2 metadata")
return form
@@ -92,7 +99,7 @@ def process_metadata(form_cls, request, helper):
]
error_list = ", ".join(field_errors)
- errors = form._errors.setdefault("__all__", ErrorList())
+ errors = form._errors.setdefault("__all__", ErrorList()) # type: ignore[attr-defined] # XXX: ._errors is an internal attr
errors.append(f"Invalid metadata: {error_list}")
return form
diff --git a/src/sentry/auth/providers/saml2/generic/views.py b/src/sentry/auth/providers/saml2/generic/views.py
index aea514084f7b0f..edcedb8edc9d3c 100644
--- a/src/sentry/auth/providers/saml2/generic/views.py
+++ b/src/sentry/auth/providers/saml2/generic/views.py
@@ -1,9 +1,11 @@
from __future__ import annotations
+from django.forms import Form
from django.http.request import HttpRequest
from django.http.response import HttpResponseBase
from django.urls import reverse
+from sentry.auth.helper import AuthHelper
from sentry.auth.providers.saml2.forms import (
AttributeMappingForm,
SAMLForm,
@@ -50,15 +52,23 @@ def saml2_configure_view(
class SelectIdP(AuthView):
- def handle(self, request: HttpRequest, pipeline) -> HttpResponseBase:
+ def handle(self, request: HttpRequest, pipeline: AuthHelper) -> HttpResponseBase:
op = "url"
- forms = {"url": URLMetadataForm(), "xml": XMLMetadataForm(), "idp": SAMLForm()}
+ forms: dict[str, Form | None] = {
+ "url": URLMetadataForm(),
+ "xml": XMLMetadataForm(),
+ "idp": SAMLForm(),
+ }
if "action_save" in request.POST:
op = request.POST["action_save"]
- form_cls = forms[op].__class__
- forms[op] = process_metadata(form_cls, request, pipeline)
+ form_from_forms = forms[op]
+ if form_from_forms is None:
+ forms[op] = None
+ else:
+ form_cls = form_from_forms.__class__
+ forms[op] = process_metadata(form_cls, request, pipeline)
# process_metadata will return None when the action was successful and
# data was bound to the pipeline.
@@ -69,7 +79,7 @@ def handle(self, request: HttpRequest, pipeline) -> HttpResponseBase:
class MapAttributes(AuthView):
- def handle(self, request: HttpRequest, pipeline) -> HttpResponseBase:
+ def handle(self, request: HttpRequest, pipeline: AuthHelper) -> HttpResponseBase:
if "save_mappings" not in request.POST:
form = AttributeMappingForm()
else:
diff --git a/src/sentry/auth/providers/saml2/provider.py b/src/sentry/auth/providers/saml2/provider.py
index 65a5af898997c9..6bf78f5420ecb9 100644
--- a/src/sentry/auth/providers/saml2/provider.py
+++ b/src/sentry/auth/providers/saml2/provider.py
@@ -1,7 +1,8 @@
from __future__ import annotations
import abc
-from typing import NotRequired, TypedDict, _TypedDict
+from collections.abc import Mapping
+from typing import Any, NotRequired, TypedDict, _TypedDict
from urllib.parse import urlparse
import sentry_sdk
@@ -20,6 +21,7 @@
from sentry import features, options
from sentry.auth.exceptions import IdentityNotValid
+from sentry.auth.helper import AuthHelper
from sentry.auth.provider import Provider
from sentry.auth.store import FLOW_LOGIN
from sentry.auth.view import AuthView
@@ -58,7 +60,7 @@ def get_provider(organization_slug: str) -> SAML2Provider | None:
class SAML2LoginView(AuthView):
- def dispatch(self, request: HttpRequest, pipeline) -> HttpResponse:
+ def dispatch(self, request: HttpRequest, pipeline: AuthHelper) -> HttpResponseBase:
if "SAMLResponse" in request.POST:
return pipeline.next_step()
@@ -128,7 +130,7 @@ def dispatch(self, request: HttpRequest, organization_slug: str) -> HttpResponse
class SAML2ACSView(AuthView):
@method_decorator(csrf_exempt)
- def dispatch(self, request: HttpRequest, pipeline) -> HttpResponse:
+ def dispatch(self, request: HttpRequest, pipeline: AuthHelper) -> HttpResponseBase:
provider = pipeline.provider
# If we're authenticating during the setup pipeline the provider will
@@ -164,7 +166,7 @@ def dispatch(self, request: HttpRequest, organization_slug: str) -> HttpResponse
# No need to logout an anonymous user.
should_logout = request.user.is_authenticated
- def force_logout():
+ def force_logout() -> None:
logout(request)
redirect_to = auth.process_slo(
@@ -266,7 +268,7 @@ def get_saml_setup_pipeline(self) -> list[AuthView]:
state.
"""
- def attribute_mapping(self):
+ def attribute_mapping(self) -> Mapping[str, Any]:
"""
Returns the default Attribute Key -> IdP attribute key mapping.
@@ -276,7 +278,7 @@ def attribute_mapping(self):
"""
return {}
- def build_config(self, state):
+ def build_config(self, state: dict[str, Any]) -> dict[str, Any]:
config = state
# Default attribute mapping if none bound
@@ -285,7 +287,7 @@ def build_config(self, state):
return config
- def build_identity(self, state):
+ def build_identity(self, state: Mapping[str, Any]) -> Mapping[str, Any]:
raw_attributes = state["auth_attributes"]
attributes = {}
@@ -358,7 +360,7 @@ class SamlConfig(TypedDict):
idp: NotRequired[_SamlConfigIdp]
-def build_saml_config(provider_config, org: str) -> SamlConfig:
+def build_saml_config(provider_config: Mapping[str, Any], org: str) -> SamlConfig:
"""
Construct the SAML configuration dict to be passed into the OneLogin SAML
library.
diff --git a/src/sentry/auth/providers/saml2/rippling/provider.py b/src/sentry/auth/providers/saml2/rippling/provider.py
index d57ef6d0705f92..deaafad2d1f776 100644
--- a/src/sentry/auth/providers/saml2/rippling/provider.py
+++ b/src/sentry/auth/providers/saml2/rippling/provider.py
@@ -1,6 +1,7 @@
from django.http.request import HttpRequest
from django.http.response import HttpResponseBase
+from sentry.auth.helper import AuthHelper
from sentry.auth.providers.saml2.forms import URLMetadataForm
from sentry.auth.providers.saml2.provider import Attributes, SAML2Provider
from sentry.auth.providers.saml2.views import make_simple_setup
@@ -18,7 +19,7 @@ class WaitForCompletion(AuthView):
This is simply an extra step to wait for them to complete that.
"""
- def handle(self, request: HttpRequest, pipeline) -> HttpResponseBase:
+ def handle(self, request: HttpRequest, pipeline: AuthHelper) -> HttpResponseBase:
if "continue_setup" in request.POST:
return pipeline.next_step()
diff --git a/src/sentry/auth/providers/saml2/views.py b/src/sentry/auth/providers/saml2/views.py
index e2c202f34b8c4c..dbe8a2b54c8494 100644
--- a/src/sentry/auth/providers/saml2/views.py
+++ b/src/sentry/auth/providers/saml2/views.py
@@ -1,13 +1,15 @@
+from django import forms
from django.http.request import HttpRequest
from django.http.response import HttpResponseBase
+from sentry.auth.helper import AuthHelper
from sentry.auth.providers.saml2.forms import process_metadata
from sentry.auth.view import AuthView
-def make_simple_setup(form_cls, template_path: str) -> type[AuthView]:
+def make_simple_setup(form_cls: type[forms.Form], template_path: str) -> type[AuthView]:
class SelectIdP(AuthView):
- def handle(self, request: HttpRequest, pipeline) -> HttpResponseBase:
+ def handle(self, request: HttpRequest, pipeline: AuthHelper) -> HttpResponseBase:
form = process_metadata(form_cls, request, pipeline)
if form:
diff --git a/src/sentry/auth/staff.py b/src/sentry/auth/staff.py
index 65cc8e35e38771..3167be18e4c711 100644
--- a/src/sentry/auth/staff.py
+++ b/src/sentry/auth/staff.py
@@ -1,13 +1,16 @@
from __future__ import annotations
+import enum
import ipaddress
import logging
+from collections.abc import Iterable
from datetime import datetime, timedelta, timezone
+from typing import Any, Final
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from django.core.signing import BadSignature
-from django.http import HttpRequest
+from django.http import HttpRequest, HttpResponse
from django.utils import timezone as django_timezone
from django.utils.crypto import constant_time_compare, get_random_string
@@ -40,7 +43,8 @@
STAFF_ORG_ID = getattr(settings, "STAFF_ORG_ID", None)
-UNSET = object()
+_UnsetType = enum.Enum("_UnsetType", "UNSET")
+_Unset: Final = _UnsetType.UNSET
def is_active_staff(request: HttpRequest) -> bool:
@@ -72,10 +76,12 @@ def _seconds_to_timestamp(seconds: str) -> datetime:
class Staff(ElevatedMode):
allowed_ips = frozenset(ipaddress.ip_network(str(v), strict=False) for v in ALLOWED_IPS)
- def __init__(self, request, allowed_ips=UNSET) -> None:
+ def __init__(
+ self, request: HttpRequest, allowed_ips: Iterable[Any] | _UnsetType = _Unset
+ ) -> None:
self.uid: str | None = None
self.request = request
- if allowed_ips is not UNSET:
+ if allowed_ips is not _Unset:
self.allowed_ips = frozenset(
ipaddress.ip_network(str(v), strict=False) for v in allowed_ips or ()
)
@@ -116,7 +122,7 @@ def is_privileged_request(self) -> tuple[bool, InactiveReason]:
return False, InactiveReason.INVALID_IP
return True, InactiveReason.NONE
- def get_session_data(self, current_datetime: datetime | None = None):
+ def get_session_data(self, current_datetime: datetime | None = None) -> dict[str, Any] | None:
"""
Return the current session data, with native types coerced.
"""
@@ -127,14 +133,14 @@ def get_session_data(self, current_datetime: datetime | None = None):
key=COOKIE_NAME,
default=None,
salt=COOKIE_SALT,
- max_age=MAX_AGE.total_seconds(),
+ max_age=int(MAX_AGE.total_seconds()),
)
except BadSignature:
logger.exception(
"staff.bad-cookie-signature",
extra={"ip_address": request.META["REMOTE_ADDR"], "user_id": request.user.id},
)
- return
+ return None
data = request.session.get(SESSION_KEY)
if not cookie_token:
@@ -143,13 +149,13 @@ def get_session_data(self, current_datetime: datetime | None = None):
"staff.missing-cookie-token",
extra={"ip_address": request.META["REMOTE_ADDR"], "user_id": request.user.id},
)
- return
+ return None
elif not data:
logger.warning(
"staff.missing-session-data",
extra={"ip_address": request.META["REMOTE_ADDR"], "user_id": request.user.id},
)
- return
+ return None
session_token = data.get("tok")
if not session_token:
@@ -157,14 +163,14 @@ def get_session_data(self, current_datetime: datetime | None = None):
"staff.missing-session-token",
extra={"ip_address": request.META["REMOTE_ADDR"], "user_id": request.user.id},
)
- return
+ return None
if not constant_time_compare(cookie_token, session_token):
logger.warning(
"staff.invalid-token",
extra={"ip_address": request.META["REMOTE_ADDR"], "user_id": request.user.id},
)
- return
+ return None
if data["uid"] != str(request.user.id):
logger.warning(
@@ -175,7 +181,7 @@ def get_session_data(self, current_datetime: datetime | None = None):
"expected_user_id": data["uid"],
},
)
- return
+ return None
if current_datetime is None:
current_datetime = django_timezone.now()
@@ -188,14 +194,14 @@ def get_session_data(self, current_datetime: datetime | None = None):
extra={"ip_address": request.META["REMOTE_ADDR"], "user_id": request.user.id},
exc_info=True,
)
- return
+ return None
if expires_date < current_datetime:
logger.info(
"staff.session-expired",
extra={"ip_address": request.META["REMOTE_ADDR"], "user_id": request.user.id},
)
- return
+ return None
return data
@@ -203,7 +209,7 @@ def _populate(self) -> None:
current_datetime = django_timezone.now()
request = self.request
- user = getattr(request, "user", None)
+ user: User | None = getattr(request, "user", None)
if not hasattr(request, "session"):
data = None
elif not (user and user.is_staff):
@@ -214,6 +220,7 @@ def _populate(self) -> None:
if not data:
self._set_logged_out()
else:
+ assert user is not None
self._set_logged_in(
expires=_seconds_to_timestamp(data["exp"]), token=data["tok"], user=user
)
@@ -237,7 +244,13 @@ def _populate(self) -> None:
},
)
- def _set_logged_in(self, expires: datetime, token: str, user, current_datetime=None):
+ def _set_logged_in(
+ self,
+ expires: datetime,
+ token: str,
+ user: User,
+ current_datetime: datetime | None = None,
+ ) -> None:
# we bind uid here, as if you change users in the same request
# we wouldn't want to still support staff auth (given
# the staff check happens right here)
@@ -272,7 +285,7 @@ def _set_logged_out(self) -> None:
self.is_valid = False
self.request.session.pop(SESSION_KEY, None)
- def set_logged_in(self, user: User | AnonymousUser, current_datetime=None) -> None:
+ def set_logged_in(self, user: User, current_datetime: datetime | None = None) -> None:
"""
Mark a session as staff-enabled.
"""
@@ -302,14 +315,14 @@ def set_logged_out(self) -> None:
extra={"ip_address": request.META["REMOTE_ADDR"], "user_id": request.user.id},
)
- def on_response(self, response) -> None:
+ def on_response(self, response: HttpResponse) -> None:
request = self.request
# Re-bind the cookie
if self.is_active:
response.set_signed_cookie(
COOKIE_NAME,
- self.token,
+ self.token or "",
salt=COOKIE_SALT,
# set max_age to None, as we want this cookie to expire on browser close
max_age=None,
diff --git a/src/sentry/auth/superuser.py b/src/sentry/auth/superuser.py
index bc4b385e0d26b0..199c8776e94519 100644
--- a/src/sentry/auth/superuser.py
+++ b/src/sentry/auth/superuser.py
@@ -11,17 +11,17 @@
from __future__ import annotations
+import enum
import ipaddress
import logging
-from collections.abc import Container
+from collections.abc import Container, Iterable
from datetime import datetime, timedelta, timezone
-from typing import Any, Never, TypeIs, overload
+from typing import Any, Final, Never, TypeIs, overload
import orjson
from django.conf import settings
-from django.contrib.auth.models import AnonymousUser
from django.core.signing import BadSignature
-from django.http import HttpRequest
+from django.http import HttpRequest, HttpResponse
from django.utils import timezone as django_timezone
from django.utils.crypto import constant_time_compare, get_random_string
from rest_framework import serializers, status
@@ -74,7 +74,9 @@
SUPERUSER_ACCESS_CATEGORIES = getattr(settings, "SUPERUSER_ACCESS_CATEGORIES", ["for_unit_test"])
-UNSET = object()
+_UnsetType = enum.Enum("_UnsetType", "UNSET")
+_Unset: Final = _UnsetType.UNSET
+
DISABLE_SU_FORM_U2F_CHECK_FOR_LOCAL = getattr(
settings, "DISABLE_SU_FORM_U2F_CHECK_FOR_LOCAL", False
@@ -87,7 +89,7 @@
def get_superuser_scopes(
auth_state: RpcAuthState,
- user: Any,
+ user: User,
organization_context: Organization | RpcUserOrganizationContext,
) -> set[str]:
@@ -184,14 +186,26 @@ def _check_expired_on_org_change(self) -> bool:
return False
return self._is_active
- def __init__(self, request, allowed_ips=UNSET, org_id=UNSET, current_datetime=None):
+ def __init__(
+ self,
+ request: HttpRequest,
+ allowed_ips: Iterable[Any] | _UnsetType = _Unset,
+ org_id: int | None | _UnsetType = _Unset,
+ current_datetime: datetime | None = None,
+ ) -> None:
self.uid: str | None = None
self.request = request
- if allowed_ips is not UNSET:
+ self.expires: datetime | None = None
+ self.token: str | None = None
+ self._is_active: bool = False
+ self._inactive_reason: InactiveReason = InactiveReason.NONE
+ self.is_valid: bool = False
+
+ if allowed_ips is not _Unset:
self.allowed_ips = frozenset(
ipaddress.ip_network(str(v), strict=False) for v in allowed_ips or ()
)
- if org_id is not UNSET:
+ if org_id is not _Unset:
self.org_id = org_id
self._populate(current_datetime=current_datetime)
@@ -246,7 +260,7 @@ def is_privileged_request(self) -> tuple[bool, InactiveReason]:
return False, InactiveReason.INVALID_IP
return True, InactiveReason.NONE
- def get_session_data(self, current_datetime=None):
+ def get_session_data(self, current_datetime: datetime | None = None) -> dict[str, Any] | None:
"""
Return the current session data, with native types coerced.
"""
@@ -254,14 +268,17 @@ def get_session_data(self, current_datetime=None):
try:
cookie_token = request.get_signed_cookie(
- key=COOKIE_NAME, default=None, salt=COOKIE_SALT, max_age=MAX_AGE.total_seconds()
+ key=COOKIE_NAME,
+ default=None,
+ salt=COOKIE_SALT,
+ max_age=int(MAX_AGE.total_seconds()),
)
except BadSignature:
logger.exception(
"superuser.bad-cookie-signature",
extra={"ip_address": request.META["REMOTE_ADDR"], "user_id": request.user.id},
)
- return
+ return None
data = request.session.get(SESSION_KEY)
if not cookie_token:
@@ -270,13 +287,13 @@ def get_session_data(self, current_datetime=None):
"superuser.missing-cookie-token",
extra={"ip_address": request.META["REMOTE_ADDR"], "user_id": request.user.id},
)
- return
+ return None
elif not data:
logger.warning(
"superuser.missing-session-data",
extra={"ip_address": request.META["REMOTE_ADDR"], "user_id": request.user.id},
)
- return
+ return None
session_token = data.get("tok")
if not session_token:
@@ -284,14 +301,14 @@ def get_session_data(self, current_datetime=None):
"superuser.missing-session-token",
extra={"ip_address": request.META["REMOTE_ADDR"], "user_id": request.user.id},
)
- return
+ return None
if not constant_time_compare(cookie_token, session_token):
logger.warning(
"superuser.invalid-token",
extra={"ip_address": request.META["REMOTE_ADDR"], "user_id": request.user.id},
)
- return
+ return None
if data["uid"] != str(request.user.id):
logger.warning(
@@ -302,7 +319,7 @@ def get_session_data(self, current_datetime=None):
"expected_user_id": data["uid"],
},
)
- return
+ return None
if current_datetime is None:
current_datetime = django_timezone.now()
@@ -315,14 +332,14 @@ def get_session_data(self, current_datetime=None):
extra={"ip_address": request.META["REMOTE_ADDR"], "user_id": request.user.id},
exc_info=True,
)
- return
+ return None
if data["idl"] < current_datetime:
logger.info(
"superuser.session-expired",
extra={"ip_address": request.META["REMOTE_ADDR"], "user_id": request.user.id},
)
- return
+ return None
try:
data["exp"] = datetime.fromtimestamp(float(data["exp"]), timezone.utc)
@@ -332,23 +349,23 @@ def get_session_data(self, current_datetime=None):
extra={"ip_address": request.META["REMOTE_ADDR"], "user_id": request.user.id},
exc_info=True,
)
- return
+ return None
if data["exp"] < current_datetime:
logger.info(
"superuser.session-expired",
extra={"ip_address": request.META["REMOTE_ADDR"], "user_id": request.user.id},
)
- return
+ return None
return data
- def _populate(self, current_datetime=None) -> None:
+ def _populate(self, current_datetime: datetime | None = None) -> None:
if current_datetime is None:
current_datetime = django_timezone.now()
request = self.request
- user = getattr(request, "user", None)
+ user: User | None = getattr(request, "user", None)
if not hasattr(request, "session"):
data = None
elif not (user and user.is_superuser):
@@ -359,6 +376,7 @@ def _populate(self, current_datetime=None) -> None:
if not data:
self._set_logged_out()
else:
+ assert user is not None
self._set_logged_in(expires=data["exp"], token=data["tok"], user=user)
if not self.is_active:
@@ -380,7 +398,13 @@ def _populate(self, current_datetime=None) -> None:
},
)
- def _set_logged_in(self, expires, token, user, current_datetime=None) -> None:
+ def _set_logged_in(
+ self,
+ expires: datetime,
+ token: str,
+ user: User,
+ current_datetime: datetime | None = None,
+ ) -> None:
# we bind uid here, as if you change users in the same request
# we wouldn't want to still support superuser auth (given
# the superuser check happens right here)
@@ -414,9 +438,9 @@ def _set_logged_out(self) -> None:
def set_logged_in(
self,
- user: User | AnonymousUser,
+ user: User,
current_datetime: datetime | None = None,
- prefilled_su_modal=None,
+ prefilled_su_modal: dict[str, Any] | None = None,
) -> None:
"""
Mark a session as superuser-enabled.
@@ -427,7 +451,7 @@ def set_logged_in(
token = get_random_string(12)
- def enable_and_log_superuser_access():
+ def enable_and_log_superuser_access() -> None:
self._set_logged_in(
expires=current_datetime + MAX_AGE,
token=token,
@@ -474,7 +498,7 @@ def enable_and_log_superuser_access():
extra={
"superuser_token_id": token,
"user_id": request.user.id,
- "user_email": request.user.email,
+ "user_email": getattr(request.user, "email", None),
"su_access_category": su_access_info.validated_data["superuserAccessCategory"],
"reason_for_su": su_access_info.validated_data["superuserReason"],
},
@@ -495,14 +519,14 @@ def set_logged_out(self) -> None:
extra={"ip_address": request.META["REMOTE_ADDR"], "user_id": request.user.id},
)
- def on_response(self, response) -> None:
+ def on_response(self, response: HttpResponse) -> None:
request = self.request
# always re-bind the cookie to update the idle expiration window
if self.is_active:
response.set_signed_cookie(
COOKIE_NAME,
- self.token,
+ self.token or "",
salt=COOKIE_SALT,
# set max_age to None, as we want this cookie to expire on browser close
max_age=None,
diff --git a/src/sentry/auth_v2/apps.py b/src/sentry/auth_v2/apps.py
index cc7c66bdc4215d..b9f94e7ad47f4a 100644
--- a/src/sentry/auth_v2/apps.py
+++ b/src/sentry/auth_v2/apps.py
@@ -4,5 +4,5 @@
class Config(AppConfig):
name = "sentry.auth_v2"
- def ready(self):
+ def ready(self) -> None:
pass
diff --git a/src/sentry/backup/sanitize.py b/src/sentry/backup/sanitize.py
index 4421e43df255e4..d84f1d44c5a74a 100644
--- a/src/sentry/backup/sanitize.py
+++ b/src/sentry/backup/sanitize.py
@@ -43,11 +43,11 @@
MAX_IPV6 = (2**ipaddress.IPV6LENGTH) - 1
-def random_ipv4():
+def random_ipv4() -> str:
return str(ipaddress.IPv4Address(randint(0, MAX_IPV4)))
-def random_ipv6():
+def random_ipv6() -> str:
return str(ipaddress.IPv6Address(randint(0, MAX_IPV6)))
diff --git a/src/sentry/charts/chartcuterie.py b/src/sentry/charts/chartcuterie.py
index 984c484184f494..3f20c44336390d 100644
--- a/src/sentry/charts/chartcuterie.py
+++ b/src/sentry/charts/chartcuterie.py
@@ -30,7 +30,7 @@ def service_url(self) -> str | None:
return options.get("chart-rendering.chartcuterie", {}).get("url")
@property
- def storage_options(self):
+ def storage_options(self) -> dict[str, Any] | None:
backend = options.get("chart-rendering.storage.backend")
opts = options.get("chart-rendering.storage.options")
diff --git a/src/sentry/charts/endpoints.py b/src/sentry/charts/endpoints.py
index 511419410c5411..cfa8c1b5a68506 100644
--- a/src/sentry/charts/endpoints.py
+++ b/src/sentry/charts/endpoints.py
@@ -1,6 +1,7 @@
import os.path
from django.http import HttpRequest
+from django.http.response import HttpResponseBase
from django.views import static
import sentry
@@ -12,5 +13,5 @@
def serve_chartcuterie_config(
request: HttpRequest,
-):
+) -> HttpResponseBase:
return static.serve(request, "config.js", document_root=CONFIG_DIR)
diff --git a/src/sentry/codecov/endpoints/test_results/test_results.py b/src/sentry/codecov/endpoints/test_results/test_results.py
index b7519799fe9729..58a04a8923ba9c 100644
--- a/src/sentry/codecov/endpoints/test_results/test_results.py
+++ b/src/sentry/codecov/endpoints/test_results/test_results.py
@@ -31,7 +31,7 @@ class TestResultsEndpoint(CodecovEndpoint):
}
# Disable pagination requirement for this endpoint
- def has_pagination(self, response):
+ def has_pagination(self, response) -> bool:
return True
@extend_schema(
diff --git a/src/sentry/dashboards/endpoints/organization_dashboard_details.py b/src/sentry/dashboards/endpoints/organization_dashboard_details.py
index 66e949eb3c5702..c0657aad9138ab 100644
--- a/src/sentry/dashboards/endpoints/organization_dashboard_details.py
+++ b/src/sentry/dashboards/endpoints/organization_dashboard_details.py
@@ -1,3 +1,5 @@
+from typing import Any
+
import sentry_sdk
from django.db import IntegrityError, router, transaction
from django.db.models import F
@@ -42,8 +44,13 @@ class OrganizationDashboardBase(OrganizationEndpoint):
permission_classes = (OrganizationDashboardsPermission,)
def convert_args(
- self, request: Request, organization_id_or_slug, dashboard_id, *args, **kwargs
- ):
+ self,
+ request: Request,
+ organization_id_or_slug: str | int,
+ dashboard_id: str | int,
+ *args: Any,
+ **kwargs: Any,
+ ) -> tuple[tuple[Any, ...], dict[str, Any]]:
args, kwargs = super().convert_args(request, organization_id_or_slug, *args, **kwargs)
try:
@@ -53,7 +60,9 @@ def convert_args(
return (args, kwargs)
- def _get_dashboard(self, request: Request, organization, dashboard_id):
+ def _get_dashboard(
+ self, request: Request, organization: Organization, dashboard_id: str | int
+ ) -> Dashboard:
prebuilt = Dashboard.get_prebuilt(organization, request.user, dashboard_id)
sentry_sdk.set_tag("dashboard.is_prebuilt", prebuilt is not None)
if prebuilt:
@@ -80,7 +89,9 @@ class OrganizationDashboardDetailsEndpoint(OrganizationDashboardBase):
},
examples=DashboardExamples.DASHBOARD_GET_RESPONSE,
)
- def get(self, request: Request, organization, dashboard) -> Response:
+ def get(
+ self, request: Request, organization: Organization, dashboard: Dashboard | dict[Any, Any]
+ ) -> Response:
"""
Return details about an organization's custom dashboard.
"""
@@ -101,7 +112,9 @@ def get(self, request: Request, organization, dashboard) -> Response:
404: RESPONSE_NOT_FOUND,
},
)
- def delete(self, request: Request, organization, dashboard) -> Response:
+ def delete(
+ self, request: Request, organization: Organization, dashboard: Dashboard | dict[Any, Any]
+ ) -> Response:
"""
Delete an organization's custom dashboard, or tombstone
a pre-built dashboard which effectively deletes it.
@@ -140,7 +153,12 @@ def delete(self, request: Request, organization, dashboard) -> Response:
},
examples=DashboardExamples.DASHBOARD_PUT_RESPONSE,
)
- def put(self, request: Request, organization: Organization, dashboard) -> Response:
+ def put(
+ self,
+ request: Request,
+ organization: Organization,
+ dashboard: Dashboard | dict[Any, Any] | None,
+ ) -> Response:
"""
Edit an organization's custom dashboard as well as any bulk
edits on widgets that may have been made. (For example, widgets
@@ -189,7 +207,9 @@ class OrganizationDashboardVisitEndpoint(OrganizationDashboardBase):
"POST": ApiPublishStatus.PRIVATE,
}
- def post(self, request: Request, organization, dashboard) -> Response:
+ def post(
+ self, request: Request, organization: Organization, dashboard: Dashboard | dict[Any, Any]
+ ) -> Response:
"""
Update last_visited and increment visits counter
"""
@@ -228,7 +248,9 @@ class OrganizationDashboardFavoriteEndpoint(OrganizationDashboardBase):
"PUT": ApiPublishStatus.PRIVATE,
}
- def put(self, request: Request, organization: Organization, dashboard) -> Response:
+ def put(
+ self, request: Request, organization: Organization, dashboard: Dashboard | dict[Any, Any]
+ ) -> Response:
"""
Toggle favorite status for current user by adding or removing
current user from dashboard favorites
diff --git a/src/sentry/dashboards/endpoints/organization_dashboard_widget_details.py b/src/sentry/dashboards/endpoints/organization_dashboard_widget_details.py
index 161f4547058775..561b5b675ba29c 100644
--- a/src/sentry/dashboards/endpoints/organization_dashboard_widget_details.py
+++ b/src/sentry/dashboards/endpoints/organization_dashboard_widget_details.py
@@ -8,6 +8,7 @@
from sentry.api.bases import OrganizationEndpoint
from sentry.api.serializers.rest_framework import DashboardWidgetSerializer
from sentry.dashboards.endpoints.organization_dashboards import OrganizationDashboardsPermission
+from sentry.models.organization import Organization
@region_silo_endpoint
@@ -18,7 +19,7 @@ class OrganizationDashboardWidgetDetailsEndpoint(OrganizationEndpoint):
owner = ApiOwner.DASHBOARDS
permission_classes = (OrganizationDashboardsPermission,)
- def post(self, request: Request, organization) -> Response:
+ def post(self, request: Request, organization: Organization) -> Response:
"""
Validate a Widget
`````````````````
diff --git a/src/sentry/dashboards/endpoints/organization_dashboards.py b/src/sentry/dashboards/endpoints/organization_dashboards.py
index 39789258f646c7..fbf3a4dab532a4 100644
--- a/src/sentry/dashboards/endpoints/organization_dashboards.py
+++ b/src/sentry/dashboards/endpoints/organization_dashboards.py
@@ -1,5 +1,7 @@
from __future__ import annotations
+from typing import Any
+
import sentry_sdk
from django.db import IntegrityError, router, transaction
from django.db.models import (
@@ -18,6 +20,7 @@
from rest_framework import status
from rest_framework.request import Request
from rest_framework.response import Response
+from rest_framework.views import APIView
from sentry import features, quotas, roles
from sentry.api.api_owners import ApiOwner
@@ -45,6 +48,10 @@
from sentry.db.models.fields.text import CharField
from sentry.models.dashboard import Dashboard, DashboardFavoriteUser, DashboardLastVisited
from sentry.models.organization import Organization
+from sentry.organizations.services.organization.model import (
+ RpcOrganization,
+ RpcUserOrganizationContext,
+)
from sentry.users.services.user.service import user_service
MAX_RETRIES = 2
@@ -58,7 +65,12 @@ class OrganizationDashboardsPermission(OrganizationPermission):
"DELETE": ["org:read", "org:write", "org:admin"],
}
- def has_object_permission(self, request: Request, view, obj):
+ def has_object_permission(
+ self,
+ request: Request,
+ view: APIView,
+ obj: Organization | RpcOrganization | RpcUserOrganizationContext | Dashboard,
+ ) -> bool:
if isinstance(obj, Organization):
return super().has_object_permission(request, view, obj)
@@ -252,7 +264,7 @@ def get(self, request: Request, organization: Organization) -> Response:
list_serializer = DashboardListSerializer()
- def handle_results(results):
+ def handle_results(results: list[Dashboard | dict[str, Any]]) -> list[dict[str, Any]]:
serialized = []
dashboards = []
for item in results:
@@ -306,7 +318,7 @@ def handle_results(results):
},
examples=DashboardExamples.DASHBOARD_POST_RESPONSE,
)
- def post(self, request: Request, organization, retry=0) -> Response:
+ def post(self, request: Request, organization: Organization, retry: int = 0) -> Response:
"""
Create a new dashboard for the given Organization
"""
diff --git a/src/sentry/dashboards/endpoints/organization_dashboards_starred.py b/src/sentry/dashboards/endpoints/organization_dashboards_starred.py
index 9f4356a228e8b9..85e5294f03e057 100644
--- a/src/sentry/dashboards/endpoints/organization_dashboards_starred.py
+++ b/src/sentry/dashboards/endpoints/organization_dashboards_starred.py
@@ -14,7 +14,7 @@
from sentry.api.serializers.base import serialize
from sentry.api.serializers.models.dashboard import DashboardListSerializer
from sentry.api.serializers.rest_framework.dashboard import DashboardStarredOrderSerializer
-from sentry.models.dashboard import DashboardFavoriteUser
+from sentry.models.dashboard import Dashboard, DashboardFavoriteUser
from sentry.models.organization import Organization
@@ -31,7 +31,7 @@ class OrganizationDashboardsStarredEndpoint(OrganizationEndpoint):
owner = ApiOwner.DASHBOARDS
permission_classes = (MemberPermission,)
- def has_feature(self, organization, request):
+ def has_feature(self, organization: Organization, request: Request) -> bool:
return features.has(
"organizations:dashboards-starred-reordering", organization, actor=request.user
)
@@ -56,7 +56,7 @@ def get(self, request: Request, organization: Organization) -> Response:
new_dashboard_positions=[favorite.dashboard.id for favorite in favorites],
)
- def data_fn(offset, limit):
+ def data_fn(offset: int, limit: int) -> list[Dashboard]:
return [favorite.dashboard for favorite in favorites[offset : offset + limit]]
return self.paginate(
@@ -78,7 +78,7 @@ class OrganizationDashboardsStarredOrderEndpoint(OrganizationEndpoint):
owner = ApiOwner.DASHBOARDS
permission_classes = (MemberPermission,)
- def has_feature(self, organization, request):
+ def has_feature(self, organization: Organization, request: Request) -> bool:
return features.has(
"organizations:dashboards-starred-reordering", organization, actor=request.user
)
diff --git a/src/sentry/data_export/base.py b/src/sentry/data_export/base.py
index 6e081189f79b20..278268e970948f 100644
--- a/src/sentry/data_export/base.py
+++ b/src/sentry/data_export/base.py
@@ -9,7 +9,7 @@
class ExportError(Exception):
- def __init__(self, message, recoverable=False):
+ def __init__(self, message: str, recoverable: bool = False) -> None:
super().__init__(message)
self.recoverable = recoverable
@@ -27,29 +27,31 @@ class ExportQueryType:
DISCOVER_STR = "Discover"
@classmethod
- def as_choices(cls):
+ def as_choices(cls) -> tuple[tuple[int, str], tuple[int, str]]:
return (
(cls.ISSUES_BY_TAG, str(cls.ISSUES_BY_TAG_STR)),
(cls.DISCOVER, str(cls.DISCOVER_STR)),
)
@classmethod
- def as_str_choices(cls):
+ def as_str_choices(cls) -> tuple[tuple[str, str], tuple[str, str]]:
return (
(cls.ISSUES_BY_TAG_STR, cls.ISSUES_BY_TAG_STR),
(cls.DISCOVER_STR, cls.DISCOVER_STR),
)
@classmethod
- def as_str(cls, integer):
+ def as_str(cls, integer: int) -> str:
if integer == cls.ISSUES_BY_TAG:
return cls.ISSUES_BY_TAG_STR
elif integer == cls.DISCOVER:
return cls.DISCOVER_STR
+ raise ValueError(f"Invalid ExportQueryType: {integer}")
@classmethod
- def from_str(cls, string):
+ def from_str(cls, string: str) -> int:
if string == cls.ISSUES_BY_TAG_STR:
return cls.ISSUES_BY_TAG
elif string == cls.DISCOVER_STR:
return cls.DISCOVER
+ raise ValueError(f"Invalid ExportQueryType: {string}")
diff --git a/src/sentry/data_export/endpoints/data_export.py b/src/sentry/data_export/endpoints/data_export.py
index 44a433a94fd938..f3cac721fcb277 100644
--- a/src/sentry/data_export/endpoints/data_export.py
+++ b/src/sentry/data_export/endpoints/data_export.py
@@ -1,3 +1,5 @@
+from typing import Any
+
import sentry_sdk
from django.core.exceptions import ValidationError
from rest_framework import serializers
@@ -37,11 +39,11 @@
}
-class DataExportQuerySerializer(serializers.Serializer):
+class DataExportQuerySerializer(serializers.Serializer[dict[str, Any]]):
query_type = serializers.ChoiceField(choices=ExportQueryType.as_str_choices(), required=True)
query_info = serializers.JSONField(required=True)
- def validate(self, data):
+ def validate(self, data: dict[str, Any]) -> dict[str, Any]:
organization = self.context["organization"]
has_metrics = self.context["has_metrics"]
query_info = data["query_info"]
@@ -172,7 +174,7 @@ def get_features(self, organization: Organization, request: Request) -> dict[str
return all_features
- def post(self, request: Request, organization) -> Response:
+ def post(self, request: Request, organization: Organization) -> Response:
"""
Create a new asynchronous file export task, and
email user upon completion,
diff --git a/src/sentry/data_export/endpoints/data_export_details.py b/src/sentry/data_export/endpoints/data_export_details.py
index 656cfb1f696d70..2a1147c094e27c 100644
--- a/src/sentry/data_export/endpoints/data_export_details.py
+++ b/src/sentry/data_export/endpoints/data_export_details.py
@@ -24,7 +24,9 @@ class DataExportDetailsEndpoint(OrganizationEndpoint):
owner = ApiOwner.VISIBILITY
permission_classes = (OrganizationDataExportPermission,)
- def get(self, request: Request, organization: Organization, data_export_id: str) -> Response:
+ def get(
+ self, request: Request, organization: Organization, data_export_id: str
+ ) -> Response | StreamingHttpResponse:
"""
Retrieve information about the temporary file record.
Used to populate page emailed to the user.
@@ -50,9 +52,10 @@ def get(self, request: Request, organization: Organization, data_export_id: str)
return self.download(data_export)
return Response(serialize(data_export, request.user))
- def download(self, data_export):
+ def download(self, data_export: ExportedData) -> StreamingHttpResponse:
metrics.incr("dataexport.download", sample_rate=1.0)
file = data_export._get_file()
+ assert file is not None
raw_file = file.getfile()
response = StreamingHttpResponse(
iter(lambda: raw_file.read(4096), b""), content_type="text/csv"
diff --git a/src/sentry/data_export/models.py b/src/sentry/data_export/models.py
index 536af39699e71d..15889eac447f99 100644
--- a/src/sentry/data_export/models.py
+++ b/src/sentry/data_export/models.py
@@ -1,6 +1,7 @@
from __future__ import annotations
import logging
+from datetime import datetime, timedelta
from typing import Any
import orjson
@@ -20,6 +21,7 @@
sane_repr,
)
from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey
+from sentry.models.files.file import File
from sentry.users.services.user.service import user_service
from .base import DEFAULT_EXPIRATION, ExportQueryType, ExportStatus
@@ -54,7 +56,7 @@ def status(self) -> ExportStatus:
return ExportStatus.Valid
@property
- def payload(self):
+ def payload(self) -> dict[str, Any]:
payload = self.query_info.copy()
payload["export_type"] = ExportQueryType.as_str(self.query_type)
return payload
@@ -67,7 +69,7 @@ def file_name(self) -> str:
return f"{export_type}_{date}_{self.id}.csv"
@staticmethod
- def format_date(date) -> str | None:
+ def format_date(date: datetime | None) -> str | None:
# Example: 12:21 PM on July 21, 2020 (UTC)
return None if date is None else date.strftime("%-I:%M %p on %B %d, %Y (%Z)")
@@ -76,11 +78,11 @@ def delete_file(self) -> None:
if file:
file.delete()
- def delete(self, *args, **kwargs):
+ def delete(self, *args: Any, **kwargs: Any) -> tuple[int, dict[str, Any]]:
self.delete_file()
return super().delete(*args, **kwargs)
- def finalize_upload(self, file, expiration=DEFAULT_EXPIRATION) -> None:
+ def finalize_upload(self, file: File, expiration: timedelta = DEFAULT_EXPIRATION) -> None:
self.delete_file() # If a file is present, remove it
current_time = timezone.now()
expire_time = current_time + expiration
@@ -139,9 +141,7 @@ def email_failure(self, message: str) -> None:
msg.send_async([user.email])
self.delete()
- def _get_file(self):
- from sentry.models.files.file import File
-
+ def _get_file(self) -> File | None:
if self.file_id:
try:
return File.objects.get(pk=self.file_id)
diff --git a/src/sentry/data_export/processors/discover.py b/src/sentry/data_export/processors/discover.py
index 3bb3fab43ff4f7..fd015f4ed834f4 100644
--- a/src/sentry/data_export/processors/discover.py
+++ b/src/sentry/data_export/processors/discover.py
@@ -1,10 +1,12 @@
import logging
+from typing import Any, Protocol
from sentry_relay.consts import SPAN_STATUS_CODE_TO_NAME
from sentry.api.utils import get_date_range_from_params
from sentry.models.environment import Environment
from sentry.models.group import Group
+from sentry.models.organization import Organization
from sentry.models.project import Project
from sentry.search.events.fields import get_function_alias
from sentry.search.events.types import SnubaParams
@@ -16,12 +18,16 @@
logger = logging.getLogger(__name__)
+class DataFn(Protocol):
+ def __call__(self, offset: int, limit: int) -> dict[str, Any]: ...
+
+
class DiscoverProcessor:
"""
Processor for exports of discover data based on a provided query
"""
- def __init__(self, organization, discover_query):
+ def __init__(self, organization: Organization, discover_query: dict[str, Any]):
self.projects = self.get_projects(organization.id, discover_query)
self.environments = self.get_environments(organization.id, discover_query)
self.start, self.end = get_date_range_from_params(discover_query)
@@ -47,18 +53,18 @@ def __init__(self, organization, discover_query):
query=discover_query["query"],
snuba_params=self.snuba_params,
sort=discover_query.get("sort"),
- dataset=discover_query.get("dataset"),
+ dataset_name=discover_query.get("dataset"),
)
@staticmethod
- def get_projects(organization_id, query):
+ def get_projects(organization_id: int, query: dict[str, Any]) -> list[Project]:
projects = list(Project.objects.filter(id__in=query.get("project")))
if len(projects) == 0:
raise ExportError("Requested project does not exist")
return projects
@staticmethod
- def get_environments(organization_id, query):
+ def get_environments(organization_id: int, query: dict[str, Any]) -> list[Environment]:
requested_environments = query.get("environment", [])
if not isinstance(requested_environments, list):
requested_environments = [requested_environments]
@@ -79,12 +85,19 @@ def get_environments(organization_id, query):
return environments
@staticmethod
- def get_data_fn(fields, equations, query, snuba_params, sort, dataset):
- dataset = get_dataset(dataset)
+ def get_data_fn(
+ fields: list[str],
+ equations: list[str],
+ query: str,
+ snuba_params: SnubaParams,
+ sort: str | None,
+ dataset_name: str | None,
+ ) -> DataFn:
+ dataset = get_dataset(dataset_name)
if dataset is None:
dataset = discover
- def data_fn(offset, limit):
+ def data_fn(offset: int, limit: int) -> dict[str, Any]:
return dataset.query(
selected_columns=fields,
equations=equations,
@@ -101,7 +114,7 @@ def data_fn(offset, limit):
return data_fn
- def handle_fields(self, result_list):
+ def handle_fields(self, result_list: list[dict[str, Any]]) -> list[dict[str, Any]]:
# Find issue short_id if present
# (originally in `/api/bases/organization_events.py`)
new_result_list = result_list[:]
diff --git a/src/sentry/data_export/tasks.py b/src/sentry/data_export/tasks.py
index 6cd828f4fc8066..1dfc8e34bb8030 100644
--- a/src/sentry/data_export/tasks.py
+++ b/src/sentry/data_export/tasks.py
@@ -3,6 +3,8 @@
import logging
import tempfile
from hashlib import sha1
+from io import BufferedRandom
+from typing import Any
import sentry_sdk
from celery.exceptions import MaxRetriesExceededError
@@ -56,15 +58,15 @@
),
)
def assemble_download(
- data_export_id,
- export_limit=EXPORTED_ROWS_LIMIT,
- batch_size=SNUBA_MAX_RESULTS,
- offset=0,
- bytes_written=0,
- environment_id=None,
- export_retries=3,
- **kwargs,
-):
+ data_export_id: int,
+ export_limit: int | None = EXPORTED_ROWS_LIMIT,
+ batch_size: int = SNUBA_MAX_RESULTS,
+ offset: int = 0,
+ bytes_written: int = 0,
+ environment_id: int | None = None,
+ export_retries: int = 3,
+ **kwargs: Any,
+) -> None:
with sentry_sdk.start_span(op="assemble"):
first_page = offset == 0
@@ -203,7 +205,9 @@ def assemble_download(
merge_export_blobs.delay(data_export_id)
-def get_processor(data_export, environment_id):
+def get_processor(
+ data_export: ExportedData, environment_id: int | None
+) -> IssuesByTagProcessor | DiscoverProcessor:
try:
if data_export.query_type == ExportQueryType.ISSUES_BY_TAG:
payload = data_export.query_info
@@ -229,7 +233,12 @@ def get_processor(data_export, environment_id):
raise
-def process_rows(processor, data_export, batch_size, offset):
+def process_rows(
+ processor: IssuesByTagProcessor | DiscoverProcessor,
+ data_export: ExportedData,
+ batch_size: int,
+ offset: int,
+) -> list[dict[str, str]]:
try:
if data_export.query_type == ExportQueryType.ISSUES_BY_TAG:
rows = process_issues_by_tag(processor, batch_size, offset)
@@ -247,12 +256,14 @@ def process_rows(processor, data_export, batch_size, offset):
@handle_snuba_errors(logger)
-def process_issues_by_tag(processor, limit, offset):
+def process_issues_by_tag(
+ processor: IssuesByTagProcessor, limit: int, offset: int
+) -> list[dict[str, str]]:
return processor.get_serialized_data(limit=limit, offset=offset)
@handle_snuba_errors(logger)
-def process_discover(processor, limit, offset):
+def process_discover(processor: DiscoverProcessor, limit: int, offset: int) -> list[dict[str, str]]:
raw_data_unicode = processor.data_fn(limit=limit, offset=offset)["data"]
return processor.handle_fields(raw_data_unicode)
@@ -261,7 +272,12 @@ class ExportDataFileTooBig(Exception):
pass
-def store_export_chunk_as_blob(data_export, bytes_written, fileobj, blob_size=DEFAULT_BLOB_SIZE):
+def store_export_chunk_as_blob(
+ data_export: ExportedData,
+ bytes_written: int,
+ fileobj: BufferedRandom,
+ blob_size: int = DEFAULT_BLOB_SIZE,
+) -> int:
try:
with atomic_transaction(
using=(
@@ -302,7 +318,7 @@ def store_export_chunk_as_blob(data_export, bytes_written, fileobj, blob_size=DE
namespace=export_tasks,
),
)
-def merge_export_blobs(data_export_id, **kwargs):
+def merge_export_blobs(data_export_id: int, **kwargs: Any) -> None:
with sentry_sdk.start_span(op="merge"):
try:
data_export = ExportedData.objects.get(id=data_export_id)
@@ -381,7 +397,7 @@ def merge_export_blobs(data_export_id, **kwargs):
return data_export.email_failure(message=message)
-def _set_data_on_scope(data_export):
+def _set_data_on_scope(data_export: ExportedData) -> None:
scope = sentry_sdk.get_isolation_scope()
if data_export.user_id:
user = dict(id=data_export.user_id)
diff --git a/src/sentry/data_export/utils.py b/src/sentry/data_export/utils.py
index 0653675019b6fc..9412e3341bd2da 100644
--- a/src/sentry/data_export/utils.py
+++ b/src/sentry/data_export/utils.py
@@ -1,4 +1,7 @@
+import logging
+from collections.abc import Callable
from functools import wraps
+from typing import Any
from sentry.search.events.constants import TIMEOUT_ERROR_MESSAGE
from sentry.snuba import discover
@@ -9,10 +12,12 @@
# Adapted into decorator from 'src/sentry/api/endpoints/organization_events.py'
-def handle_snuba_errors(logger):
- def wrapper(func):
+def handle_snuba_errors(
+ logger: logging.Logger,
+) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
+ def wrapper(func: Callable[..., Any]) -> Callable[..., Any]:
@wraps(func)
- def wrapped(*args, **kwargs):
+ def wrapped(*args: Any, **kwargs: Any) -> Any:
try:
return func(*args, **kwargs)
except discover.InvalidSearchQuery as error:
diff --git a/src/sentry/db/models/fields/array.py b/src/sentry/db/models/fields/array.py
index 4daf114fe052b8..11b3d1423c6657 100644
--- a/src/sentry/db/models/fields/array.py
+++ b/src/sentry/db/models/fields/array.py
@@ -38,10 +38,10 @@ def contribute_to_class(self, cls: type[models.Model], name: str, private_only:
super().contribute_to_class(cls, name, private_only=private_only)
setattr(cls, name, Creator(self))
- def db_type(self, connection):
+ def db_type(self, connection) -> str:
return f"{self.of.db_type(connection)}[]"
- def get_internal_type(self):
+ def get_internal_type(self) -> str:
return "TextField"
def get_prep_value(self, value):
diff --git a/src/sentry/db/models/fields/jsonfield.py b/src/sentry/db/models/fields/jsonfield.py
index 9bf7a0d1f2076b..c85d63333f7177 100644
--- a/src/sentry/db/models/fields/jsonfield.py
+++ b/src/sentry/db/models/fields/jsonfield.py
@@ -96,10 +96,10 @@ def get_default(self):
return json.loads(self.json_dumps(default))
return super().get_default()
- def get_internal_type(self):
+ def get_internal_type(self) -> str:
return "TextField"
- def db_type(self, connection):
+ def db_type(self, connection) -> str:
return "text"
def to_python(self, value):
diff --git a/src/sentry/db/models/fields/uuid.py b/src/sentry/db/models/fields/uuid.py
index 6c8d5b876d063a..cd5998468a4561 100644
--- a/src/sentry/db/models/fields/uuid.py
+++ b/src/sentry/db/models/fields/uuid.py
@@ -89,10 +89,10 @@ def __init__(self, auto_add=False, coerce_to=UUID, **kwargs):
# Now pass the rest of the work to CharField.
super().__init__(**kwargs)
- def db_type(self, connection):
+ def db_type(self, connection) -> str:
return "uuid"
- def get_internal_type(self):
+ def get_internal_type(self) -> str:
return "CharField"
def get_prep_value(self, value):
diff --git a/src/sentry/db/postgres/transactions.py b/src/sentry/db/postgres/transactions.py
index 0f131612be4f0f..5e6cc61eacb65e 100644
--- a/src/sentry/db/postgres/transactions.py
+++ b/src/sentry/db/postgres/transactions.py
@@ -2,6 +2,7 @@
import contextlib
import threading
+from collections.abc import Generator
from django.conf import settings
from django.db import connections, transaction
@@ -12,7 +13,7 @@
@contextlib.contextmanager
-def django_test_transaction_water_mark(using: str | None = None):
+def django_test_transaction_water_mark(using: str | None = None) -> Generator[None]:
"""
Hybrid cloud outbox flushing depends heavily on transaction.on_commit logic, but our tests do not follow
production in terms of isolation (TestCase uses two outer transactions, and stubbed RPCs cannot simulate
@@ -65,7 +66,7 @@ class InTestTransactionEnforcement(threading.local):
@contextlib.contextmanager
-def in_test_hide_transaction_boundary():
+def in_test_hide_transaction_boundary() -> Generator[None]:
"""
In production, has no effect.
In tests, it hides 'in_test_assert_no_transaction' invocations against problematic code paths.
@@ -83,7 +84,7 @@ def in_test_hide_transaction_boundary():
in_test_transaction_enforcement.enabled = prev
-def in_test_assert_no_transaction(msg: str):
+def in_test_assert_no_transaction(msg: str) -> None:
"""
In production, has no effect.
In tests, asserts that the current call is not inside of any transaction.
@@ -104,7 +105,7 @@ def in_test_assert_no_transaction(msg: str):
@contextlib.contextmanager
-def enforce_constraints(transaction: Atomic):
+def enforce_constraints(transaction: Atomic) -> Generator[None]:
"""
Nested transaction in Django do not check constraints by default, meaning IntegrityErrors can 'float' to callers
of functions that happen to wrap with additional transaction scopes. Using this context manager around a transaction
diff --git a/src/sentry/debug_files/debug_files.py b/src/sentry/debug_files/debug_files.py
index 7ddf6915a287e6..80f2509eb8bc15 100644
--- a/src/sentry/debug_files/debug_files.py
+++ b/src/sentry/debug_files/debug_files.py
@@ -12,7 +12,7 @@
from sentry.utils.db import atomic_transaction
-def maybe_renew_debug_files(debug_files: Sequence[ProjectDebugFile]):
+def maybe_renew_debug_files(debug_files: Sequence[ProjectDebugFile]) -> None:
# We take a snapshot in time that MUST be consistent across all updates.
now = timezone.now()
# We compute the threshold used to determine whether we want to renew the specific bundle.
diff --git a/src/sentry/debug_files/release_files.py b/src/sentry/debug_files/release_files.py
index 0ab69c07a58dda..81affd318a2bc3 100644
--- a/src/sentry/debug_files/release_files.py
+++ b/src/sentry/debug_files/release_files.py
@@ -11,7 +11,7 @@
from sentry.utils.db import atomic_transaction
-def maybe_renew_releasefiles(releasefiles: list[ReleaseFile]):
+def maybe_renew_releasefiles(releasefiles: list[ReleaseFile]) -> None:
# We take a snapshot in time that MUST be consistent across all updates.
now = timezone.now()
# We compute the threshold used to determine whether we want to renew the specific bundle.
@@ -22,12 +22,12 @@ def maybe_renew_releasefiles(releasefiles: list[ReleaseFile]):
# We first check if any file needs renewal, before going to the database.
needs_bump = [rf.id for rf in releasefiles if rf.date_accessed <= threshold_date]
if not needs_bump:
- return
+ return None
renew_releasefiles_by_id(needs_bump)
-def renew_releasefiles_by_id(releasefile_ids: list[int]):
+def renew_releasefiles_by_id(releasefile_ids: list[int]) -> None:
now = timezone.now()
threshold_date = now - timedelta(
days=options.get("system.debug-files-renewal-age-threshold-days")
diff --git a/src/sentry/debug_files/tasks.py b/src/sentry/debug_files/tasks.py
index d3d0003f90ed0a..0160657b852680 100644
--- a/src/sentry/debug_files/tasks.py
+++ b/src/sentry/debug_files/tasks.py
@@ -10,7 +10,7 @@
namespace=attachments_tasks,
),
)
-def refresh_artifact_bundles_in_use():
+def refresh_artifact_bundles_in_use() -> None:
from .artifact_bundles import refresh_artifact_bundles_in_use as do_refresh
do_refresh()
@@ -23,7 +23,7 @@ def refresh_artifact_bundles_in_use():
namespace=attachments_tasks,
),
)
-def backfill_artifact_bundle_db_indexing(organization_id: int, release: str, dist: str):
+def backfill_artifact_bundle_db_indexing(organization_id: int, release: str, dist: str) -> None:
from .artifact_bundles import backfill_artifact_bundle_db_indexing as do_backfill
do_backfill(organization_id, release, dist)
diff --git a/src/sentry/debug_files/upload.py b/src/sentry/debug_files/upload.py
index cb5fde4bc84425..314c724821cc85 100644
--- a/src/sentry/debug_files/upload.py
+++ b/src/sentry/debug_files/upload.py
@@ -6,7 +6,7 @@
from sentry.models.files import FileBlob
-def find_missing_chunks(organization_id: int, chunks: set[str]):
+def find_missing_chunks(organization_id: int, chunks: set[str]) -> list[str]:
"""Returns a list of chunks which are missing for an org."""
with sentry_sdk.start_span(op="find_missing_chunks") as span:
span.set_tag("organization_id", organization_id)
diff --git a/src/sentry/demo_mode/tasks.py b/src/sentry/demo_mode/tasks.py
index dfa72b0c42a7ba..4ddb9b8f25264a 100644
--- a/src/sentry/demo_mode/tasks.py
+++ b/src/sentry/demo_mode/tasks.py
@@ -31,7 +31,7 @@
queue="demo_mode",
taskworker_config=TaskworkerConfig(namespace=demomode_tasks),
)
-def sync_debug_artifacts():
+def sync_debug_artifacts() -> None:
if (
not options.get("sentry.demo_mode.sync_debug_artifacts.enable")
@@ -53,8 +53,8 @@ def sync_debug_artifacts():
def _sync_artifact_bundles(
- source_org: Organization, target_org: Organization, cutoff_date: datetime
-):
+ source_org: Organization | None, target_org: Organization | None, cutoff_date: datetime
+) -> None:
if not source_org or not target_org:
return
@@ -75,8 +75,8 @@ def _sync_artifact_bundles(
def _sync_project_debug_files(
- source_org: Organization, target_org: Organization, cutoff_date: datetime
-):
+ source_org: Organization | None, target_org: Organization | None, cutoff_date: datetime
+) -> None:
if not source_org or not target_org:
return
@@ -119,8 +119,8 @@ def _sync_project_debug_files(
def _sync_proguard_artifact_releases(
- source_org: Organization, target_org: Organization, cutoff_date: datetime
-):
+ source_org: Organization | None, target_org: Organization | None, cutoff_date: datetime
+) -> None:
if not source_org or not target_org:
return
@@ -144,7 +144,7 @@ def _sync_proguard_artifact_releases(
_sync_proguard_artifact_release(source_proguard_artifact_release, target_org)
-def _sync_artifact_bundle(source_artifact_bundle: ArtifactBundle, target_org: Organization):
+def _sync_artifact_bundle(source_artifact_bundle: ArtifactBundle, target_org: Organization) -> None:
try:
with atomic_transaction(
using=(
@@ -180,7 +180,7 @@ def _sync_artifact_bundle(source_artifact_bundle: ArtifactBundle, target_org: Or
def _sync_project_artifact_bundle(
source_artifact_bundle: ArtifactBundle,
target_artifact_bundle: ArtifactBundle,
-):
+) -> None:
source_project_artifact_bundle = ProjectArtifactBundle.objects.filter(
artifact_bundle_id=source_artifact_bundle.id,
organization_id=source_artifact_bundle.organization_id,
@@ -207,7 +207,7 @@ def _sync_project_artifact_bundle(
def _sync_release_artifact_bundle(
source_artifact_bundle: ArtifactBundle,
target_artifact_bundle: ArtifactBundle,
-):
+) -> None:
source_release_artifact_bundle = ReleaseArtifactBundle.objects.filter(
artifact_bundle_id=source_artifact_bundle.id,
organization_id=source_artifact_bundle.organization_id,
@@ -255,7 +255,7 @@ def _sync_project_debug_file(
def _sync_proguard_artifact_release(
source_proguard_artifact_release: ProguardArtifactRelease, target_org: Organization
-):
+) -> None:
try:
with atomic_transaction(using=(router.db_for_write(ProguardArtifactRelease))):
target_project = _find_matching_project(
@@ -293,7 +293,7 @@ def _sync_proguard_artifact_release(
sentry_sdk.capture_exception(e)
-def _find_matching_project(project_id, organization_id):
+def _find_matching_project(project_id: int, organization_id: int) -> Project | None:
try:
source_project = Project.objects.get(id=project_id)
@@ -302,6 +302,11 @@ def _find_matching_project(project_id, organization_id):
slug=source_project.slug,
)
except Project.DoesNotExist:
- sentry_sdk.set_context("project_id", project_id)
- sentry_sdk.set_context("organization_id", organization_id)
+ sentry_sdk.set_context(
+ "args",
+ {
+ "project_id": project_id,
+ "organization_id": organization_id,
+ },
+ )
return None
diff --git a/src/sentry/demo_mode/utils.py b/src/sentry/demo_mode/utils.py
index 5938c2642dc55a..53d494d6922971 100644
--- a/src/sentry/demo_mode/utils.py
+++ b/src/sentry/demo_mode/utils.py
@@ -17,7 +17,7 @@
)
-def is_demo_mode_enabled():
+def is_demo_mode_enabled() -> bool:
return options.get("demo-mode.enabled")
@@ -29,7 +29,7 @@ def is_demo_user(user: User | AnonymousUser | None) -> bool:
return user.id in options.get("demo-mode.users")
-def is_demo_org(organization: Organization | None):
+def is_demo_org(organization: Organization | None) -> bool:
if not organization:
return False
@@ -37,7 +37,7 @@ def is_demo_org(organization: Organization | None):
return organization.id in options.get("demo-mode.orgs")
-def get_demo_org():
+def get_demo_org() -> Organization | None:
if not is_demo_mode_enabled():
return None
@@ -45,7 +45,7 @@ def get_demo_org():
return Organization.objects.get(id=org_id)
-def get_demo_user():
+def get_demo_user() -> User | None:
if not is_demo_mode_enabled():
return None
diff --git a/src/sentry/digests/notifications.py b/src/sentry/digests/notifications.py
index 2d19f0dd70b62f..37d5bfb7bbbb1a 100644
--- a/src/sentry/digests/notifications.py
+++ b/src/sentry/digests/notifications.py
@@ -14,7 +14,7 @@
from sentry.models.rule import Rule
from sentry.notifications.types import ActionTargetType, FallthroughChoiceType
from sentry.notifications.utils.rules import get_key_from_rule_data
-from sentry.services.eventstore.models import Event
+from sentry.services.eventstore.models import Event, GroupEvent
from sentry.tsdb.base import TSDBModel
from sentry.workflow_engine.models import Workflow
from sentry.workflow_engine.models.alertrule_workflow import AlertRuleWorkflow
@@ -32,7 +32,7 @@ class DigestInfo(NamedTuple):
def split_key(
key: str,
-) -> tuple[Project, ActionTargetType, str | None, FallthroughChoiceType | None]:
+) -> tuple[Project, ActionTargetType, int | None, FallthroughChoiceType | None]:
key_parts = key.split(":", 5)
project_id = key_parts[2]
# XXX: We transitioned to new style keys (len == 5) a while ago on
@@ -40,14 +40,14 @@ def split_key(
# to keep this transition code around for a while, maybe indefinitely.
if len(key_parts) == 6:
target_type = ActionTargetType(key_parts[3])
- target_identifier = key_parts[4] if key_parts[4] else None
+ target_identifier = int(key_parts[4]) if key_parts[4] else None
try:
fallthrough_choice = FallthroughChoiceType(key_parts[5])
except ValueError:
fallthrough_choice = None
elif len(key_parts) == 5:
target_type = ActionTargetType(key_parts[3])
- target_identifier = key_parts[4] if key_parts[4] else None
+ target_identifier = int(key_parts[4]) if key_parts[4] else None
fallthrough_choice = None
else:
target_type = ActionTargetType.ISSUE_OWNERS
@@ -59,7 +59,7 @@ def split_key(
def unsplit_key(
project: Project,
target_type: ActionTargetType,
- target_identifier: str | None,
+ target_identifier: int | None,
fallthrough_choice: FallthroughChoiceType | None,
) -> str:
target_str = target_identifier if target_identifier is not None else ""
@@ -68,7 +68,7 @@ def unsplit_key(
def event_to_record(
- event: Event, rules: Sequence[Rule], notification_uuid: str | None = None
+ event: Event | GroupEvent, rules: Sequence[Rule], notification_uuid: str | None = None
) -> Record:
from sentry.notifications.notification_action.utils import should_fire_workflow_actions
diff --git a/src/sentry/digests/types.py b/src/sentry/digests/types.py
index 21cdb68209058b..c1c1aa5c0d5617 100644
--- a/src/sentry/digests/types.py
+++ b/src/sentry/digests/types.py
@@ -9,7 +9,7 @@
if TYPE_CHECKING:
from sentry.models.rule import Rule
- from sentry.services.eventstore.models import Event
+ from sentry.services.eventstore.models import Event, GroupEvent
class IdentifierKey(StrEnum):
@@ -18,7 +18,7 @@ class IdentifierKey(StrEnum):
class Notification(NamedTuple):
- event: Event
+ event: Event | GroupEvent
rules: Sequence[int] = ()
notification_uuid: str | None = None
identifier_key: IdentifierKey = IdentifierKey.RULE
@@ -50,7 +50,7 @@ def with_rules(self, rules: list[Rule]) -> RecordWithRuleObjects:
class NotificationWithRuleObjects(NamedTuple):
- event: Event
+ event: Event | GroupEvent
rules: list[Rule]
notification_uuid: str | None
diff --git a/src/sentry/digests/utils.py b/src/sentry/digests/utils.py
index 274c2436e67a5a..b99cc352ed2195 100644
--- a/src/sentry/digests/utils.py
+++ b/src/sentry/digests/utils.py
@@ -17,7 +17,7 @@
from sentry.models.rulesnooze import RuleSnooze
from sentry.notifications.types import ActionTargetType, FallthroughChoiceType
from sentry.notifications.utils.participants import get_send_to
-from sentry.services.eventstore.models import Event
+from sentry.services.eventstore.models import Event, GroupEvent
from sentry.types.actor import Actor
@@ -78,8 +78,10 @@ def get_digest_as_context(digest: Digest) -> _DigestContext:
def get_events_by_participant(
- participants_by_provider_by_event: Mapping[Event, Mapping[ExternalProviders, set[Actor]]],
-) -> Mapping[Actor, set[Event]]:
+ participants_by_provider_by_event: Mapping[
+ Event | GroupEvent, Mapping[ExternalProviders, set[Actor]]
+ ],
+) -> Mapping[Actor, set[Event | GroupEvent]]:
"""Invert a mapping of events to participants to a mapping of participants to events."""
output = defaultdict(set)
for event, participants_by_provider in participants_by_provider_by_event.items():
@@ -92,7 +94,9 @@ def get_events_by_participant(
def get_personalized_digests(
digest: Digest,
- participants_by_provider_by_event: Mapping[Event, Mapping[ExternalProviders, set[Actor]]],
+ participants_by_provider_by_event: Mapping[
+ Event | GroupEvent, Mapping[ExternalProviders, set[Actor]]
+ ],
) -> Mapping[Actor, Digest]:
events_by_participant = get_events_by_participant(participants_by_provider_by_event)
@@ -107,7 +111,7 @@ def get_personalized_digests(
return actor_to_digest
-def get_event_from_groups_in_digest(digest: Digest) -> Iterable[Event]:
+def get_event_from_groups_in_digest(digest: Digest) -> Iterable[Event | GroupEvent]:
"""Gets a random event from each group in the digest."""
return {
group_records[0].value.event
@@ -117,7 +121,7 @@ def get_event_from_groups_in_digest(digest: Digest) -> Iterable[Event]:
def build_custom_digest(
- original_digest: Digest, events: Iterable[Event], participant: Actor
+ original_digest: Digest, events: Iterable[Event | GroupEvent], participant: Actor
) -> Digest:
"""Given a digest and a set of events, filter the digest to only records that include the events."""
user_digest: Digest = {}
@@ -147,7 +151,7 @@ def get_participants_by_event(
target_type: ActionTargetType = ActionTargetType.ISSUE_OWNERS,
target_identifier: int | None = None,
fallthrough_choice: FallthroughChoiceType | None = None,
-) -> Mapping[Event, Mapping[ExternalProviders, set[Actor]]]:
+) -> Mapping[Event | GroupEvent, Mapping[ExternalProviders, set[Actor]]]:
"""
This is probably the slowest part in sending digests because we do a lot of
DB calls while we iterate over every event. It would be great if we could
@@ -174,7 +178,7 @@ def sort_func(record: Record) -> datetime:
return sorted(records, key=sort_func, reverse=True)
-def get_groups(digest: Digest) -> Sequence[tuple[Rule, Group, Event]]:
+def get_groups(digest: Digest) -> Sequence[tuple[Rule, Group, Event | GroupEvent]]:
"""
Split a digest into groups and return it as a tuple of: the applicable
rule, the group, and the group's first event.
diff --git a/src/sentry/discover/apps.py b/src/sentry/discover/apps.py
index f3a368fcdff380..34ef5972740021 100644
--- a/src/sentry/discover/apps.py
+++ b/src/sentry/discover/apps.py
@@ -4,5 +4,5 @@
class Config(AppConfig):
name = "sentry.discover"
- def ready(self):
+ def ready(self) -> None:
pass
diff --git a/src/sentry/discover/compare_tables.py b/src/sentry/discover/compare_tables.py
index ea9b7a9427e0f8..5ac352215a5f69 100644
--- a/src/sentry/discover/compare_tables.py
+++ b/src/sentry/discover/compare_tables.py
@@ -50,7 +50,9 @@ class CompareTableResultDict(TypedDict):
query: str | None
-def compare_table_results(metrics_query_result: EventsResponse, eap_result: EAPResponse):
+def compare_table_results(
+ metrics_query_result: EventsResponse, eap_result: EAPResponse
+) -> tuple[bool, list[str], CompareTableResult]:
eap_data_row = eap_result["data"][0] if len(eap_result["data"]) > 0 else {}
metrics_data_row = (
metrics_query_result["data"][0] if len(metrics_query_result["data"]) > 0 else {}
diff --git a/src/sentry/discover/dashboard_widget_split.py b/src/sentry/discover/dashboard_widget_split.py
index cc13c4b3fd8ace..6ce1e4e2560f31 100644
--- a/src/sentry/discover/dashboard_widget_split.py
+++ b/src/sentry/discover/dashboard_widget_split.py
@@ -62,7 +62,7 @@ def _save_split_decision_for_widget(
widget: DashboardWidget,
split_decision: int | None,
dataset_source: DatasetSourcesTypes | None,
-):
+) -> None:
if split_decision is not None:
widget.discover_widget_split = split_decision
if dataset_source is not None:
diff --git a/src/sentry/dynamic_sampling/rules/helpers/latest_releases.py b/src/sentry/dynamic_sampling/rules/helpers/latest_releases.py
index c4e72b6c27a65e..b0ad0bea5dfc5f 100644
--- a/src/sentry/dynamic_sampling/rules/helpers/latest_releases.py
+++ b/src/sentry/dynamic_sampling/rules/helpers/latest_releases.py
@@ -219,8 +219,8 @@ def _remove_lrb_if_limit_is_reached(self) -> None:
lrb_release = None
active_releases = 0
keys_to_delete = []
- for boosted_release_key, timestamp in boosted_releases.items():
- timestamp = float(timestamp)
+ for boosted_release_key, ts in boosted_releases.items():
+ timestamp = float(ts)
# For efficiency reasons we don't parse the release and extend it with information, therefore we have to
# check timestamps in the following way.
diff --git a/src/sentry/dynamic_sampling/rules/utils.py b/src/sentry/dynamic_sampling/rules/utils.py
index 24f73569780746..4042da6984aa87 100644
--- a/src/sentry/dynamic_sampling/rules/utils.py
+++ b/src/sentry/dynamic_sampling/rules/utils.py
@@ -1,9 +1,11 @@
+from __future__ import annotations
+
from enum import Enum
from typing import Literal, NotRequired, TypedDict, Union
import orjson
from django.conf import settings
-from rediscluster import RedisCluster
+from redis import StrictRedis
from sentry.models.dynamicsampling import CUSTOM_RULE_START
from sentry.relay.types import RuleCondition
@@ -174,6 +176,6 @@ def apply_dynamic_factor(base_sample_rate: float, x: float) -> float:
return float(x / x**base_sample_rate)
-def get_redis_client_for_ds() -> RedisCluster:
+def get_redis_client_for_ds() -> StrictRedis[str]:
cluster_key = settings.SENTRY_DYNAMIC_SAMPLING_RULES_REDIS_CLUSTER
- return redis.redis_clusters.get(cluster_key) # type: ignore[return-value]
+ return redis.redis_clusters.get(cluster_key)
diff --git a/src/sentry/dynamic_sampling/tasks/boost_low_volume_projects.py b/src/sentry/dynamic_sampling/tasks/boost_low_volume_projects.py
index 948f131d0b1e86..9ad8efe5aa8ae5 100644
--- a/src/sentry/dynamic_sampling/tasks/boost_low_volume_projects.py
+++ b/src/sentry/dynamic_sampling/tasks/boost_low_volume_projects.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import logging
from collections import defaultdict
from collections.abc import Iterator, Mapping, Sequence
@@ -308,6 +310,7 @@ def fetch_projects_with_total_root_transaction_count_and_rates(
"""
func_name = fetch_projects_with_total_root_transaction_count_and_rates.__name__
timer = context.get_timer(func_name)
+ aggregated_projects = defaultdict(list)
with timer:
context.incr_function_state(func_name, num_iterations=1)
@@ -318,7 +321,6 @@ def fetch_projects_with_total_root_transaction_count_and_rates(
query_interval,
)
)
- aggregated_projects = defaultdict(list)
for chunk in TimedIterator(context, project_count_query_iter, func_name):
for org_id, project_id, root_count_value, keep_count, drop_count in chunk:
aggregated_projects[org_id].append(
@@ -338,7 +340,7 @@ def fetch_projects_with_total_root_transaction_count_and_rates(
)
context.get_function_state(func_name).num_orgs = len(aggregated_projects)
- return aggregated_projects
+ return aggregated_projects
def query_project_counts_by_org(
diff --git a/src/sentry/dynamic_sampling/tasks/boost_low_volume_transactions.py b/src/sentry/dynamic_sampling/tasks/boost_low_volume_transactions.py
index 6d2a41dc3b3346..bc45d96db63fa4 100644
--- a/src/sentry/dynamic_sampling/tasks/boost_low_volume_transactions.py
+++ b/src/sentry/dynamic_sampling/tasks/boost_low_volume_transactions.py
@@ -1,6 +1,8 @@
+from __future__ import annotations
+
from collections.abc import Callable, Iterator, Sequence
from datetime import datetime
-from typing import TypedDict, cast
+from typing import TypedDict
import sentry_sdk
from snuba_sdk import (
@@ -72,23 +74,19 @@ class ProjectIdentity(TypedDict, total=True):
org_id: int
-class ProjectTransactions(TypedDict, total=True):
+class ProjectTransactions(ProjectIdentity, total=True):
"""
Information about the project transactions
"""
- project_id: int
- org_id: int
transaction_counts: list[tuple[str, float]]
total_num_transactions: float | None
total_num_classes: int | None
-class ProjectTransactionsTotals(TypedDict, total=True):
- project_id: int
- org_id: int
+class ProjectTransactionsTotals(ProjectIdentity, total=True):
total_num_transactions: float
- total_num_classes: int
+ total_num_classes: int | float
@instrumented_task(
@@ -299,10 +297,10 @@ def __init__(self, orgs: Sequence[int]):
self.cache: list[dict[str, int | float]] = []
self.last_org_id: int | None = None
- def __iter__(self):
+ def __iter__(self) -> FetchProjectTransactionTotals:
return self
- def __next__(self):
+ def __next__(self) -> ProjectTransactionsTotals:
self._ensure_log_state()
assert self.log_state is not None
@@ -369,7 +367,7 @@ def __next__(self):
return self._get_from_cache()
- def _get_from_cache(self):
+ def _get_from_cache(self) -> ProjectTransactionsTotals:
if self._cache_empty():
raise StopIteration()
@@ -379,15 +377,15 @@ def _get_from_cache(self):
assert self.log_state is not None
row = self.cache.pop(0)
- proj_id = row["project_id"]
- org_id = row["org_id"]
+ proj_id = int(row["project_id"])
+ org_id = int(row["org_id"])
num_transactions = row["num_transactions"]
- num_classes = row["num_classes"]
+ num_classes = int(row["num_classes"])
self.log_state.num_projects += 1
if self.last_org_id != org_id:
- self.last_org_id = cast(int, org_id)
+ self.last_org_id = org_id
self.log_state.num_orgs += 1
return {
@@ -397,14 +395,14 @@ def _get_from_cache(self):
"total_num_classes": num_classes,
}
- def _cache_empty(self):
+ def _cache_empty(self) -> bool:
return not self.cache
- def _ensure_log_state(self):
+ def _ensure_log_state(self) -> None:
if self.log_state is None:
self.log_state = DynamicSamplingLogState()
- def get_current_state(self):
+ def get_current_state(self) -> DynamicSamplingLogState:
"""
Returns the current state of the iterator (how many orgs and projects it has iterated over)
@@ -412,10 +410,13 @@ def get_current_state(self):
"""
self._ensure_log_state()
+ assert (
+ self.log_state is not None
+ ) # XXX: putting the assertion in _ensure_log_state doesn't satisfy mypy
return self.log_state
- def set_current_state(self, log_state: DynamicSamplingLogState) -> None:
+ def set_current_state(self, log_state: DynamicSamplingLogState | None) -> None:
"""
Set the log state from outside (typically immediately after creation)
@@ -464,7 +465,7 @@ def __init__(
else:
self.transaction_ordering = Direction.ASC
- def __iter__(self):
+ def __iter__(self) -> FetchProjectTransactionVolumes:
return self
def __next__(self) -> ProjectTransactions:
@@ -550,7 +551,7 @@ def __next__(self) -> ProjectTransactions:
# return from cache if empty stops iteration
return self._get_from_cache()
- def _add_results_to_cache(self, data):
+ def _add_results_to_cache(self, data: list[dict[str, int | float | str]]) -> None:
transaction_counts: list[tuple[str, float]] = []
current_org_id: int | None = None
current_proj_id: int | None = None
@@ -559,10 +560,10 @@ def _add_results_to_cache(self, data):
assert self.log_state is not None
for row in data:
- proj_id = row["project_id"]
- org_id = row["org_id"]
- transaction_name = row["transaction_name"]
- num_transactions = row["num_transactions"]
+ proj_id = int(row["project_id"])
+ org_id = int(row["org_id"])
+ transaction_name = str(row["transaction_name"])
+ num_transactions = float(row["num_transactions"])
if current_proj_id != proj_id or current_org_id != org_id:
if (
transaction_counts
@@ -603,7 +604,7 @@ def _add_results_to_cache(self, data):
}
)
- def _cache_empty(self):
+ def _cache_empty(self) -> bool:
return not self.cache
def _get_from_cache(self) -> ProjectTransactions:
@@ -612,11 +613,11 @@ def _get_from_cache(self) -> ProjectTransactions:
return self.cache.pop(0)
- def _ensure_log_state(self):
+ def _ensure_log_state(self) -> None:
if self.log_state is None:
self.log_state = DynamicSamplingLogState()
- def get_current_state(self):
+ def get_current_state(self) -> DynamicSamplingLogState:
"""
Returns the current state of the iterator (how many orgs and projects it has iterated over)
@@ -624,10 +625,13 @@ def get_current_state(self):
"""
self._ensure_log_state()
+ assert (
+ self.log_state is not None
+ ) # XXX: putting the assertion in _ensure_log_state doesn't satisfy mypy
return self.log_state
- def set_current_state(self, log_state: DynamicSamplingLogState) -> None:
+ def set_current_state(self, log_state: DynamicSamplingLogState | None) -> None:
"""
Set the log state from outside (typically immediately after creation)
@@ -640,7 +644,7 @@ def set_current_state(self, log_state: DynamicSamplingLogState) -> None:
def merge_transactions(
- left: ProjectTransactions,
+ left: ProjectTransactions | None,
right: ProjectTransactions | None,
totals: ProjectTransactionsTotals | None,
) -> ProjectTransactions:
@@ -657,10 +661,12 @@ def merge_transactions(
)
if totals is not None and not is_same_project(left, totals):
+ left_tuple = (left["org_id"], left["project_id"]) if left is not None else None
+ totals_tuple = (totals["org_id"], totals["project_id"]) if totals is not None else None
raise ValueError(
"mismatched projectTransaction and projectTransactionTotals",
- (left["org_id"], left["project_id"]),
- (totals["org_id"], totals["project_id"]),
+ left_tuple,
+ totals_tuple,
)
assert left is not None
@@ -679,6 +685,8 @@ def merge_transactions(
# not already in left, add it
merged_transactions.append((transaction_name, count))
+ total_num_classes = totals.get("total_num_classes") if totals is not None else None
+
return {
"org_id": left["org_id"],
"project_id": left["project_id"],
@@ -686,7 +694,7 @@ def merge_transactions(
"total_num_transactions": (
totals.get("total_num_transactions") if totals is not None else None
),
- "total_num_classes": totals.get("total_num_classes") if totals is not None else None,
+ "total_num_classes": int(total_num_classes) if total_num_classes is not None else None,
}
diff --git a/src/sentry/dynamic_sampling/tasks/common.py b/src/sentry/dynamic_sampling/tasks/common.py
index 0e8429c221f64c..1af4b041119861 100644
--- a/src/sentry/dynamic_sampling/tasks/common.py
+++ b/src/sentry/dynamic_sampling/tasks/common.py
@@ -1,10 +1,12 @@
+from __future__ import annotations
+
import math
import time
-from collections.abc import Iterator, Mapping
+from collections.abc import Callable, Iterator, Mapping
from dataclasses import dataclass
from datetime import datetime, timedelta
from functools import wraps
-from typing import Any, Protocol
+from typing import TYPE_CHECKING, Any, Protocol
import sentry_sdk
from snuba_sdk import (
@@ -34,6 +36,12 @@
from sentry.snuba.referrer import Referrer
from sentry.utils.snuba import raw_snql_query
+if TYPE_CHECKING:
+ from sentry.dynamic_sampling.tasks.boost_low_volume_transactions import (
+ FetchProjectTransactionTotals,
+ FetchProjectTransactionVolumes,
+ )
+
ACTIVE_ORGS_DEFAULT_TIME_INTERVAL = timedelta(hours=1)
ACTIVE_ORGS_DEFAULT_GRANULARITY = Granularity(3600)
@@ -42,7 +50,7 @@
class TimeoutException(Exception):
- def __init__(self, task_context: TaskContext, *args):
+ def __init__(self, task_context: TaskContext, *args: Any) -> None:
super().__init__(
[task_context, *args],
)
@@ -60,20 +68,20 @@ class LogStateCallable(Protocol):
"""
- def __call__(self, state: DynamicSamplingLogState, *args, **kwargs) -> Any: ...
+ def __call__(self, state: DynamicSamplingLogState, *args: Any, **kwargs: Any) -> Any: ...
__name__: str
-def timed_function(name=None):
- def timed_function_decorator(inner: LogStateCallable):
+def timed_function(name: str | None = None) -> Callable[[LogStateCallable], Callable[..., Any]]:
+ def timed_function_decorator(inner: LogStateCallable) -> Callable[..., Any]:
if name is not None:
func_name = name
else:
func_name = inner.__name__
@wraps(inner)
- def wrapped(context: TaskContext, *args, **kwargs):
+ def wrapped(context: TaskContext, *args: Any, **kwargs: Any) -> Any:
if time.monotonic() > context.expiration_time:
raise TimeoutException(context)
timer = context.get_timer(func_name)
@@ -94,9 +102,9 @@ class ContextIterator(Protocol):
An iterator that also can return its current state ( used for logging)
"""
- def __iter__(self): ...
+ def __iter__(self) -> Iterator[Any]: ...
- def __next__(self): ...
+ def __next__(self) -> Any: ...
def get_current_state(self) -> DynamicSamplingLogState:
"""
@@ -121,10 +129,10 @@ def __init__(self, inner: Iterator[Any]):
self.inner = inner
self.log_state = DynamicSamplingLogState()
- def __iter__(self):
+ def __iter__(self) -> _SimpleContextIterator:
return self
- def __next__(self):
+ def __next__(self) -> Any:
return next(self.inner)
def get_current_state(self) -> DynamicSamplingLogState:
@@ -134,7 +142,7 @@ def set_current_state(self, state: DynamicSamplingLogState) -> None:
self.log_state = state
-def to_context_iterator(inner: Iterator[Any]) -> ContextIterator:
+def to_context_iterator(inner: Iterator[Any]) -> _SimpleContextIterator:
"""
Adds a LogState to a simple iterator turning it into a ContextIterator
@@ -154,9 +162,14 @@ class TimedIterator(Iterator[Any]):
def __init__(
self,
context: TaskContext,
- inner: ContextIterator,
+ inner: (
+ ContextIterator
+ | GetActiveOrgs
+ | FetchProjectTransactionTotals
+ | FetchProjectTransactionVolumes
+ ),
name: str | None = None,
- ):
+ ) -> None:
self.context = context
self.inner = inner
@@ -168,10 +181,10 @@ def __init__(
# pick up where you last left of
inner.set_current_state(context.get_function_state(name))
- def __iter__(self):
+ def __iter__(self) -> TimedIterator:
return self
- def __next__(self):
+ def __next__(self) -> Any:
if time.monotonic() > self.context.expiration_time:
raise TimeoutException(self.context)
timer = self.context.get_timer(self.name)
@@ -210,7 +223,7 @@ def __init__(
max_projects: int | None = None,
time_interval: timedelta = ACTIVE_ORGS_DEFAULT_TIME_INTERVAL,
granularity: Granularity = ACTIVE_ORGS_DEFAULT_GRANULARITY,
- ):
+ ) -> None:
self.metric_id = indexer.resolve_shared_org(
str(TransactionMRI.COUNT_PER_ROOT_PROJECT.value)
@@ -224,7 +237,7 @@ def __init__(
self.time_interval = time_interval
self.granularity = granularity
- def __iter__(self):
+ def __iter__(self) -> GetActiveOrgs:
return self
def __next__(self) -> list[int]:
@@ -288,7 +301,7 @@ def __next__(self) -> list[int]:
# nothing left in the DB or cache
raise StopIteration()
- def get_current_state(self):
+ def get_current_state(self) -> DynamicSamplingLogState:
"""
Returns the current state of the iterator (how many orgs and projects it has iterated over)
@@ -297,10 +310,10 @@ def get_current_state(self):
"""
return self.log_state
- def set_current_state(self, log_state: DynamicSamplingLogState):
+ def set_current_state(self, log_state: DynamicSamplingLogState) -> None:
self.log_state = log_state
- def _enough_results_cached(self):
+ def _enough_results_cached(self) -> bool:
"""
Return true if we have enough data to return a full batch in the cache (i.e. last_result)
"""
@@ -315,13 +328,13 @@ def _enough_results_cached(self):
return True
return False
- def _get_orgs(self, orgs_and_counts):
+ def _get_orgs(self, orgs_and_counts: list[tuple[int, int]]) -> list[int]:
"""
Extracts the orgs from last_result
"""
return [org for org, _ in orgs_and_counts]
- def _get_from_cache(self):
+ def _get_from_cache(self) -> list[int]:
"""
Returns a batch from cache and removes the elements returned from the cache
"""
@@ -372,9 +385,9 @@ def __init__(
max_orgs: int = MAX_ORGS_PER_QUERY,
time_interval: timedelta = ACTIVE_ORGS_VOLUMES_DEFAULT_TIME_INTERVAL,
granularity: Granularity = ACTIVE_ORGS_VOLUMES_DEFAULT_GRANULARITY,
- include_keep=True,
+ include_keep: bool = True,
orgs: list[int] | None = None,
- ):
+ ) -> None:
self.include_keep = include_keep
self.orgs = orgs
self.metric_id = indexer.resolve_shared_org(
@@ -407,7 +420,7 @@ def __init__(
self.granularity = granularity
self.time_interval = time_interval
- def __iter__(self):
+ def __iter__(self) -> GetActiveOrgsVolumes:
return self
def __next__(self) -> list[OrganizationDataVolume]:
@@ -481,7 +494,7 @@ def __next__(self) -> list[OrganizationDataVolume]:
# nothing left in the DB or cache
raise StopIteration()
- def get_current_state(self):
+ def get_current_state(self) -> DynamicSamplingLogState:
"""
Returns the current state of the iterator (how many orgs and projects it has iterated over)
@@ -490,10 +503,10 @@ def get_current_state(self):
"""
return self.log_state
- def set_current_state(self, log_state: DynamicSamplingLogState):
+ def set_current_state(self, log_state: DynamicSamplingLogState) -> None:
self.log_state = log_state
- def _enough_results_cached(self):
+ def _enough_results_cached(self) -> bool:
"""
Return true if we have enough data to return a full batch in the cache (i.e. last_result)
"""
@@ -647,7 +660,11 @@ def compute_guarded_sliding_window_sample_rate(
def compute_sliding_window_sample_rate(
- org_id: int, project_id: int | None, total_root_count: int, window_size: int, context
+ org_id: int,
+ project_id: int | None,
+ total_root_count: int,
+ window_size: int,
+ context: TaskContext,
) -> float | None:
"""
Computes the actual sample rate for the sliding window given the total root count and the size of the
diff --git a/src/sentry/dynamic_sampling/tasks/custom_rule_notifications.py b/src/sentry/dynamic_sampling/tasks/custom_rule_notifications.py
index 9c9d0056b9b856..ae426a76edb3a3 100644
--- a/src/sentry/dynamic_sampling/tasks/custom_rule_notifications.py
+++ b/src/sentry/dynamic_sampling/tasks/custom_rule_notifications.py
@@ -2,6 +2,8 @@
Task for sending notifications when custom rules have gathered enough samples.
"""
+from __future__ import annotations
+
from datetime import datetime, timezone
from django.http import QueryDict
diff --git a/src/sentry/dynamic_sampling/tasks/recalibrate_orgs.py b/src/sentry/dynamic_sampling/tasks/recalibrate_orgs.py
index de243c919cf448..fc68bd8ff362b6 100644
--- a/src/sentry/dynamic_sampling/tasks/recalibrate_orgs.py
+++ b/src/sentry/dynamic_sampling/tasks/recalibrate_orgs.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
from collections.abc import Sequence
import sentry_sdk
diff --git a/src/sentry/dynamic_sampling/tasks/sliding_window_org.py b/src/sentry/dynamic_sampling/tasks/sliding_window_org.py
index 32a5d4d37c7bff..0f3381d2ef47db 100644
--- a/src/sentry/dynamic_sampling/tasks/sliding_window_org.py
+++ b/src/sentry/dynamic_sampling/tasks/sliding_window_org.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import time
from datetime import timedelta
diff --git a/src/sentry/dynamic_sampling/tasks/task_context.py b/src/sentry/dynamic_sampling/tasks/task_context.py
index 2f9167c8d2f4df..a1c22a6d5ca3e3 100644
--- a/src/sentry/dynamic_sampling/tasks/task_context.py
+++ b/src/sentry/dynamic_sampling/tasks/task_context.py
@@ -1,6 +1,9 @@
+from __future__ import annotations
+
import time
from dataclasses import dataclass
-from typing import Any
+from types import TracebackType
+from typing import Any, Literal
@dataclass
@@ -35,7 +38,7 @@ def increment(
num_iterations: int = 0,
num_projects: int = 0,
num_orgs: int = 0,
- ) -> "DynamicSamplingLogState":
+ ) -> DynamicSamplingLogState:
self.num_rows_total += num_rows_total
self.num_db_calls += num_db_calls
self.num_iterations += num_iterations
@@ -45,7 +48,7 @@ def increment(
return self
@staticmethod
- def from_dict(val: dict[Any, Any] | None) -> "DynamicSamplingLogState":
+ def from_dict(val: dict[Any, Any] | None) -> DynamicSamplingLogState:
if val is not None:
return DynamicSamplingLogState(
num_iterations=val.get("numIterations", 0),
@@ -74,14 +77,14 @@ class TaskContext:
num_seconds: float
context_data: dict[str, DynamicSamplingLogState] | None = None
- def __post_init__(self):
+ def __post_init__(self) -> None:
# always override
self.expiration_time = time.monotonic() + self.num_seconds
if self.context_data is None:
self.context_data = {}
self.timers = Timers()
- def set_function_state(self, function_id: str, log_state: DynamicSamplingLogState):
+ def set_function_state(self, function_id: str, log_state: DynamicSamplingLogState) -> None:
if self.context_data is None:
self.context_data = {}
@@ -103,7 +106,7 @@ def incr_function_state(
num_iterations: int = 0,
num_projects: int = 0,
num_orgs: int = 0,
- ):
+ ) -> None:
self.set_function_state(
function_id,
self.get_function_state(function_id).increment(
@@ -111,7 +114,7 @@ def incr_function_state(
),
)
- def get_timer(self, name) -> "NamedTimer":
+ def get_timer(self, name: str) -> NamedTimer:
return self.timers.get_timer(name)
def to_dict(self) -> dict[str, Any]:
@@ -154,10 +157,10 @@ class Timers:
"""
- def __init__(self):
+ def __init__(self) -> None:
self.timers: dict[str, TimerState] = {}
- def get_timer(self, name: str) -> "NamedTimer":
+ def get_timer(self, name: str) -> NamedTimer:
return NamedTimer(name, self)
def start(self, name: str) -> float:
@@ -209,15 +212,17 @@ class NamedTimer:
assert t.current() == 10
"""
- def __init__(self, name: str, timers: Timers):
+ def __init__(self, name: str, timers: Timers) -> None:
self.name = name
self.timers = timers
- def __enter__(self):
+ def __enter__(self) -> NamedTimer:
self.timers.start(self.name)
return self
- def __exit__(self, exc_type, exc_val, exc_tb):
+ def __exit__(
+ self, exc_type: type[Exception], exc_val: Exception, exc_tb: TracebackType
+ ) -> Literal[False]:
self.timers.stop(self.name)
return False
diff --git a/src/sentry/dynamic_sampling/tasks/utils.py b/src/sentry/dynamic_sampling/tasks/utils.py
index b694a9d27a23c5..0551ea2392ccfa 100644
--- a/src/sentry/dynamic_sampling/tasks/utils.py
+++ b/src/sentry/dynamic_sampling/tasks/utils.py
@@ -1,13 +1,13 @@
from collections.abc import Callable
from functools import wraps
from random import random
-from typing import Concatenate, ParamSpec
+from typing import Any, Concatenate, ParamSpec
from sentry.dynamic_sampling.tasks.task_context import TaskContext
from sentry.utils import metrics
-def sample_function(function, _sample_rate: float = 1.0, **kwargs):
+def sample_function(function: Callable[..., Any], _sample_rate: float = 1.0, **kwargs: Any) -> None:
"""
Calls the supplied function with a uniform probability of `_sample_rate`.
"""
@@ -25,8 +25,8 @@ def _compute_task_name(function_name: str) -> str:
def dynamic_sampling_task_with_context(
max_task_execution: int,
-) -> Callable[[DynamicTaskWithContextType], Callable[P, None]]:
- def wrapper(func: DynamicTaskWithContextType) -> Callable[P, None]:
+) -> Callable[[DynamicTaskWithContextType[P]], Callable[P, None]]:
+ def wrapper(func: DynamicTaskWithContextType[P]) -> Callable[P, None]:
@wraps(func)
def _wrapper(*args: P.args, **kwargs: P.kwargs) -> None:
function_name = func.__name__
@@ -44,9 +44,9 @@ def _wrapper(*args: P.args, **kwargs: P.kwargs) -> None:
return wrapper
-def dynamic_sampling_task(func):
+def dynamic_sampling_task(func: Callable[..., Any]) -> Callable[..., Any]:
@wraps(func)
- def _wrapper(*args, **kwargs):
+ def _wrapper(*args: Any, **kwargs: Any) -> Any:
function_name = func.__name__
task_name = _compute_task_name(function_name)
diff --git a/src/sentry/features/permanent.py b/src/sentry/features/permanent.py
index ee4cd29e5d7bfd..c77a561793c55e 100644
--- a/src/sentry/features/permanent.py
+++ b/src/sentry/features/permanent.py
@@ -9,7 +9,7 @@
# XXX: See `features/__init__.py` for documentation on how to use feature flags
-def register_permanent_features(manager: FeatureManager):
+def register_permanent_features(manager: FeatureManager) -> None:
"""
These flags are permanent.
diff --git a/src/sentry/features/temporary.py b/src/sentry/features/temporary.py
index 8ac816c266181e..72a8afdd22d536 100644
--- a/src/sentry/features/temporary.py
+++ b/src/sentry/features/temporary.py
@@ -10,7 +10,7 @@
# XXX: See `features/__init__.py` for documentation on how to use feature flags
-def register_temporary_features(manager: FeatureManager):
+def register_temporary_features(manager: FeatureManager) -> None:
"""
These flags are temporary. These flags exist as a way for us to gate newly
developed features.
diff --git a/src/sentry/feedback/apps.py b/src/sentry/feedback/apps.py
index 88d6a8cf97338f..e316070d3ec478 100644
--- a/src/sentry/feedback/apps.py
+++ b/src/sentry/feedback/apps.py
@@ -4,5 +4,5 @@
class Config(AppConfig):
name = "sentry.feedback"
- def ready(self):
+ def ready(self) -> None:
pass
diff --git a/src/sentry/feedback/usecases/ingest/save_event_feedback.py b/src/sentry/feedback/usecases/ingest/save_event_feedback.py
index de1ca7f91c1ca8..3b61401ba571bd 100644
--- a/src/sentry/feedback/usecases/ingest/save_event_feedback.py
+++ b/src/sentry/feedback/usecases/ingest/save_event_feedback.py
@@ -13,7 +13,7 @@
logger = logging.getLogger(__name__)
-def save_event_feedback(event_data: Mapping[str, Any], project_id: int):
+def save_event_feedback(event_data: Mapping[str, Any], project_id: int) -> None:
"""Saves feedback given data in an event format. This function should only
be called by the feedback consumer's ingest strategy, to process
event envelopes (feedback v2). It is currently called in a task in
diff --git a/src/sentry/feedback/usecases/ingest/shim_to_feedback.py b/src/sentry/feedback/usecases/ingest/shim_to_feedback.py
index 4265e84c07cf62..15de84df3c8a5c 100644
--- a/src/sentry/feedback/usecases/ingest/shim_to_feedback.py
+++ b/src/sentry/feedback/usecases/ingest/shim_to_feedback.py
@@ -22,7 +22,7 @@ def shim_to_feedback(
event: Event | GroupEvent,
project: Project,
source: FeedbackCreationSource,
-):
+) -> None:
"""
takes user reports from the legacy user report form/endpoint and
user reports that come from relay envelope ingestion and
diff --git a/src/sentry/identity/slack/provider.py b/src/sentry/identity/slack/provider.py
index 4cac4c4a3f4720..d8cd87778ca2ef 100644
--- a/src/sentry/identity/slack/provider.py
+++ b/src/sentry/identity/slack/provider.py
@@ -18,14 +18,14 @@ class SlackIdentityProvider(OAuth2Provider):
# user_scope, needed for unfurling.
user_scopes = ()
- def get_oauth_authorize_url(self):
+ def get_oauth_authorize_url(self) -> str:
return "https://slack.com/oauth/v2/authorize"
# XXX(epurkhiser): While workspace tokens _do_ support the oauth.access
# endpoint, it will not include the authorizing_user, so we continue to use
# the deprecated oauth.token endpoint until we are able to migrate to a bot
# app which uses oauth.access.
- def get_oauth_access_token_url(self):
+ def get_oauth_access_token_url(self) -> str:
return "https://slack.com/api/oauth.v2.access"
def get_oauth_client_id(self):
diff --git a/src/sentry/identity/vercel/provider.py b/src/sentry/identity/vercel/provider.py
index b982ea6c248123..8422b0805d47ec 100644
--- a/src/sentry/identity/vercel/provider.py
+++ b/src/sentry/identity/vercel/provider.py
@@ -11,10 +11,10 @@ class VercelIdentityProvider(OAuth2Provider):
# https://vercel.com/docs/integrations/reference#using-the-vercel-api/exchange-code-for-access-token
oauth_access_token_url = "https://api.vercel.com/v2/oauth/access_token"
- def get_oauth_client_id(self):
+ def get_oauth_client_id(self) -> str | int:
return options.get("vercel.client-id")
- def get_oauth_client_secret(self):
+ def get_oauth_client_secret(self) -> str:
return options.get("vercel.client-secret")
def get_refresh_token_url(self) -> str:
diff --git a/src/sentry/incidents/apps.py b/src/sentry/incidents/apps.py
index cd4308915a321e..5333173f8d03d4 100644
--- a/src/sentry/incidents/apps.py
+++ b/src/sentry/incidents/apps.py
@@ -4,7 +4,7 @@
class Config(AppConfig):
name = "sentry.incidents"
- def ready(self):
+ def ready(self) -> None:
from . import action_handlers # NOQA
from . import events # NOQA
from . import receivers # NOQA
diff --git a/src/sentry/incidents/charts.py b/src/sentry/incidents/charts.py
index b2cfebb5310baa..1fbe9a52df4761 100644
--- a/src/sentry/incidents/charts.py
+++ b/src/sentry/incidents/charts.py
@@ -1,7 +1,7 @@
from collections.abc import Mapping
from datetime import datetime, timedelta
from functools import reduce
-from typing import Any, Optional
+from typing import Any
from django.utils import timezone
@@ -22,7 +22,7 @@
from sentry.snuba.models import QuerySubscription, SnubaQuery, SnubaQueryEventType
from sentry.snuba.referrer import Referrer
from sentry.snuba.utils import build_query_strings
-from sentry.users.models.user import User
+from sentry.users.services.user.model import RpcUser
CRASH_FREE_SESSIONS = "percentage(sessions_crashed, sessions) AS _crash_rate_alert_aggregate"
CRASH_FREE_USERS = "percentage(users_crashed, users) AS _crash_rate_alert_aggregate"
@@ -66,7 +66,7 @@ def fetch_metric_alert_sessions_data(
organization: Organization,
rule_aggregate: str,
query_params: Mapping[str, str],
- user: Optional["User"] = None,
+ user: RpcUser | None = None,
) -> Any:
try:
resp = client.get(
@@ -92,7 +92,7 @@ def fetch_metric_alert_events_timeseries(
organization: Organization,
rule_aggregate: str,
query_params: Mapping[str, str],
- user: Optional["User"] = None,
+ user: RpcUser | None = None,
) -> list[Any]:
try:
resp = client.get(
@@ -130,7 +130,7 @@ def fetch_metric_issue_open_periods(
organization: Organization,
open_period_identifier: int,
time_period: Mapping[str, str],
- user: Optional["User"] = None,
+ user: RpcUser | None = None,
) -> list[Any]:
try:
resp = client.get(
@@ -166,7 +166,7 @@ def build_metric_alert_chart(
period: str | None = None,
start: str | None = None,
end: str | None = None,
- user: Optional["User"] = None,
+ user: RpcUser | None = None,
size: ChartSize | None = None,
subscription: QuerySubscription | None = None,
) -> str | None:
diff --git a/src/sentry/ingest/inbound_filters.py b/src/sentry/ingest/inbound_filters.py
index b6a5fd6cfb671e..f9a20130eda961 100644
--- a/src/sentry/ingest/inbound_filters.py
+++ b/src/sentry/ingest/inbound_filters.py
@@ -78,7 +78,7 @@ def get_all_filter_specs():
return tuple(filters) # returning tuple for backwards compatibility
-def set_filter_state(filter_id, project, state):
+def set_filter_state(filter_id, project: Project, state):
flt = _filter_from_filter_id(filter_id)
if flt is None:
raise FilterNotRegistered(filter_id)
diff --git a/src/sentry/insights/migrations/0002_backfill_team_starred.py b/src/sentry/insights/migrations/0002_backfill_team_starred.py
index 6be5c50c6fae15..75dedd940a53d2 100644
--- a/src/sentry/insights/migrations/0002_backfill_team_starred.py
+++ b/src/sentry/insights/migrations/0002_backfill_team_starred.py
@@ -11,7 +11,9 @@
logger = logging.getLogger(__name__)
-def migrate_team_stars_to_user_stars(apps: StateApps, schema_editor: BaseDatabaseSchemaEditor):
+def migrate_team_stars_to_user_stars(
+ apps: StateApps, schema_editor: BaseDatabaseSchemaEditor
+) -> None:
TeamKeyTransaction = apps.get_model("sentry", "teamkeytransaction")
InsightsStarredSegment = apps.get_model("insights", "insightsstarredsegment")
OrganizationMemberTeam = apps.get_model("sentry", "Organizationmemberteam")
diff --git a/src/sentry/integrations/api/endpoints/doc_integration_avatar.py b/src/sentry/integrations/api/endpoints/doc_integration_avatar.py
index dd50d954318acb..271b6981b241db 100644
--- a/src/sentry/integrations/api/endpoints/doc_integration_avatar.py
+++ b/src/sentry/integrations/api/endpoints/doc_integration_avatar.py
@@ -20,5 +20,5 @@ class DocIntegrationAvatarEndpoint(AvatarMixin[DocIntegrationAvatar], DocIntegra
model = DocIntegrationAvatar
serializer_cls = DocIntegrationAvatarSerializer
- def get_avatar_filename(self, obj):
+ def get_avatar_filename(self, obj) -> str:
return f"{obj.slug}.png"
diff --git a/src/sentry/integrations/api/parsers/doc_integration.py b/src/sentry/integrations/api/parsers/doc_integration.py
index 16c6ebf324ac90..1afd6ee47dc05c 100644
--- a/src/sentry/integrations/api/parsers/doc_integration.py
+++ b/src/sentry/integrations/api/parsers/doc_integration.py
@@ -25,7 +25,7 @@
METADATA_PROPERTIES = list(METADATA_SCHEMA["properties"].keys())
-def validate_metadata_schema(instance: Any):
+def validate_metadata_schema(instance: Any) -> Any:
v = Draft7Validator(METADATA_SCHEMA)
if not v.is_valid(instance):
raise best_match(v.iter_errors(instance))
diff --git a/src/sentry/integrations/api/serializers/models/doc_integration.py b/src/sentry/integrations/api/serializers/models/doc_integration.py
index 240ab9937bf5e6..78ddacd2de4240 100644
--- a/src/sentry/integrations/api/serializers/models/doc_integration.py
+++ b/src/sentry/integrations/api/serializers/models/doc_integration.py
@@ -18,7 +18,7 @@ def get_attrs(
item_list: Sequence[DocIntegration],
user: User | RpcUser | AnonymousUser,
**kwargs: Any,
- ):
+ ) -> dict[DocIntegration, dict[str, Any]]:
# Get associated IntegrationFeatures
doc_feature_attrs = IntegrationFeature.objects.get_by_targets_as_dict(
targets=item_list, target_type=IntegrationTypes.DOC_INTEGRATION
diff --git a/src/sentry/integrations/base.py b/src/sentry/integrations/base.py
index 0a511c7df7bf38..e46042fd9d6039 100644
--- a/src/sentry/integrations/base.py
+++ b/src/sentry/integrations/base.py
@@ -414,7 +414,7 @@ def update_organization_config(self, data: MutableMapping[str, Any]) -> None:
if org_integration is not None:
self.org_integration = org_integration
- def get_config_data(self) -> Mapping[str, str]:
+ def get_config_data(self) -> Mapping[str, Any]:
if not self.org_integration:
return {}
return self.org_integration.config
@@ -538,7 +538,7 @@ def is_response_error(resp: Any) -> bool:
return resp.status_code >= 400 and resp.status_code != 429 and resp.status_code < 500
-def get_integration_types(provider: str):
+def get_integration_types(provider: str) -> list[IntegrationDomain]:
types = []
for integration_type, providers in INTEGRATION_TYPE_TO_PROVIDER.items():
if provider in providers:
diff --git a/src/sentry/integrations/bitbucket/search.py b/src/sentry/integrations/bitbucket/search.py
index aa0122bdfb01ce..49420c9bb6dfa4 100644
--- a/src/sentry/integrations/bitbucket/search.py
+++ b/src/sentry/integrations/bitbucket/search.py
@@ -30,7 +30,7 @@ class BitbucketSearchEndpoint(SourceCodeSearchEndpoint):
}
@property
- def repository_field(self):
+ def repository_field(self) -> str:
return "repo"
@property
diff --git a/src/sentry/integrations/discord/message_builder/base/flags.py b/src/sentry/integrations/discord/message_builder/base/flags.py
index e44a8f66b69958..38cc6ff6ca11df 100644
--- a/src/sentry/integrations/discord/message_builder/base/flags.py
+++ b/src/sentry/integrations/discord/message_builder/base/flags.py
@@ -15,7 +15,7 @@ class DiscordMessageFlags:
https://discord.com/developers/docs/resources/channel#message-object-message-flags
"""
- def __init__(self):
+ def __init__(self) -> None:
self.value = 0
def set_ephemeral(self) -> DiscordMessageFlags:
diff --git a/src/sentry/integrations/example/integration.py b/src/sentry/integrations/example/integration.py
index 5520c9ba7299ce..8642fd5a1dc90a 100644
--- a/src/sentry/integrations/example/integration.py
+++ b/src/sentry/integrations/example/integration.py
@@ -85,7 +85,7 @@ def integration_name(self) -> str:
def get_client(self):
pass
- def get_issue_url(self, key):
+ def get_issue_url(self, key) -> str:
return f"https://example/issues/{key}"
def create_comment(self, issue_id, user_id, group_note):
@@ -173,7 +173,7 @@ def get_resolve_sync_action(self, data: Mapping[str, Any]) -> ResolveSyncAction:
should_unresolve=category != "done",
)
- def get_issue_display_name(self, external_issue):
+ def get_issue_display_name(self, external_issue) -> str:
return f"display name: {external_issue.key}"
def get_stacktrace_link(
diff --git a/src/sentry/integrations/github/search.py b/src/sentry/integrations/github/search.py
index a4ca5c19fb1ccf..3752e6e1093c87 100644
--- a/src/sentry/integrations/github/search.py
+++ b/src/sentry/integrations/github/search.py
@@ -22,7 +22,7 @@ class GithubSharedSearchEndpoint(SourceCodeSearchEndpoint):
"""NOTE: This endpoint is a shared search endpoint for Github and Github Enterprise integrations."""
@property
- def repository_field(self):
+ def repository_field(self) -> str:
return "repo"
@property
diff --git a/src/sentry/integrations/github/tasks/pr_comment.py b/src/sentry/integrations/github/tasks/pr_comment.py
index b8e9a203c07392..de50bce69f0075 100644
--- a/src/sentry/integrations/github/tasks/pr_comment.py
+++ b/src/sentry/integrations/github/tasks/pr_comment.py
@@ -30,7 +30,7 @@
namespace=integrations_tasks,
),
)
-def github_comment_workflow(pullrequest_id: int, project_id: int):
+def github_comment_workflow(pullrequest_id: int, project_id: int) -> None:
# TODO(jianyuan): Using `sentry.integrations.source_code_management.tasks.pr_comment_workflow` now.
# Keep this task temporarily to avoid breaking changes.
pr_comment_workflow(pr_id=pullrequest_id, project_id=project_id)
@@ -43,7 +43,7 @@ def github_comment_workflow(pullrequest_id: int, project_id: int):
namespace=integrations_tasks,
),
)
-def github_comment_reactions():
+def github_comment_reactions() -> None:
logger.info("github.pr_comment.reactions_task")
comments = PullRequestComment.objects.filter(
diff --git a/src/sentry/integrations/gitlab/blame.py b/src/sentry/integrations/gitlab/blame.py
index ca2ab6351ca055..3a3a3b81032880 100644
--- a/src/sentry/integrations/gitlab/blame.py
+++ b/src/sentry/integrations/gitlab/blame.py
@@ -122,7 +122,9 @@ def _create_file_blame_info(commit: CommitInfo, file: SourceLineInfo) -> FileBla
)
-def _handle_file_blame_error(error: ApiError, file: SourceLineInfo, extra: Mapping[str, Any]):
+def _handle_file_blame_error(
+ error: ApiError, file: SourceLineInfo, extra: Mapping[str, Any]
+) -> None:
# Ignore expected error codes
if error.code in (401, 403, 404):
diff --git a/src/sentry/integrations/gitlab/repository.py b/src/sentry/integrations/gitlab/repository.py
index 315bc22103a7b1..e50ce76eeabbee 100644
--- a/src/sentry/integrations/gitlab/repository.py
+++ b/src/sentry/integrations/gitlab/repository.py
@@ -119,7 +119,7 @@ def _transform_patchset(self, patch_set):
return file_changes
- def pull_request_url(self, repo, pull_request):
+ def pull_request_url(self, repo, pull_request) -> str:
return f"{repo.url}/merge_requests/{pull_request.key}"
def repository_external_slug(self, repo):
diff --git a/src/sentry/integrations/gitlab/search.py b/src/sentry/integrations/gitlab/search.py
index 09bc17902ccbc7..01cffd4ca7e56d 100644
--- a/src/sentry/integrations/gitlab/search.py
+++ b/src/sentry/integrations/gitlab/search.py
@@ -17,7 +17,7 @@
@control_silo_endpoint
class GitlabIssueSearchEndpoint(SourceCodeSearchEndpoint):
@property
- def repository_field(self):
+ def repository_field(self) -> str:
return "project"
@property
diff --git a/src/sentry/integrations/gitlab/utils.py b/src/sentry/integrations/gitlab/utils.py
index a7e561069163c5..7c518875621149 100644
--- a/src/sentry/integrations/gitlab/utils.py
+++ b/src/sentry/integrations/gitlab/utils.py
@@ -49,7 +49,7 @@ class GitLabApiClientPath:
user = "/user"
@staticmethod
- def build_api_url(base_url, path):
+ def build_api_url(base_url, path) -> str:
return f"{base_url.rstrip('/')}{API_VERSION}{path}"
@classmethod
diff --git a/src/sentry/integrations/jira/client.py b/src/sentry/integrations/jira/client.py
index 1def3547508939..9445fe46bfbeca 100644
--- a/src/sentry/integrations/jira/client.py
+++ b/src/sentry/integrations/jira/client.py
@@ -86,7 +86,7 @@ def finalize_request(self, prepared_request: PreparedRequest):
prepared_request.headers["Authorization"] = f"JWT {encoded_jwt}"
return prepared_request
- def get_cache_prefix(self):
+ def get_cache_prefix(self) -> str:
return "sentry-jira-2:"
def user_id_get_param(self):
diff --git a/src/sentry/integrations/jira_server/client.py b/src/sentry/integrations/jira_server/client.py
index 94207efba738f1..8c93078e7b393b 100644
--- a/src/sentry/integrations/jira_server/client.py
+++ b/src/sentry/integrations/jira_server/client.py
@@ -73,7 +73,7 @@ def __init__(
logging_context=logging_context,
)
- def get_cache_prefix(self):
+ def get_cache_prefix(self) -> str:
return "sentry-jira-server:"
def finalize_request(self, prepared_request: PreparedRequest) -> PreparedRequest:
@@ -95,13 +95,13 @@ def authorize_request(self, prepared_request: PreparedRequest):
prepared_request.prepare_auth(auth=auth_scheme)
return prepared_request
- def user_id_get_param(self):
+ def user_id_get_param(self) -> str:
return "username"
- def user_id_field(self):
+ def user_id_field(self) -> str:
return "name"
- def user_query_param(self):
+ def user_query_param(self) -> str:
return "username"
def get_issue(self, issue_id):
diff --git a/src/sentry/integrations/middleware/hybrid_cloud/parser.py b/src/sentry/integrations/middleware/hybrid_cloud/parser.py
index 81d3d16d7ed286..4379c53b2226a8 100644
--- a/src/sentry/integrations/middleware/hybrid_cloud/parser.py
+++ b/src/sentry/integrations/middleware/hybrid_cloud/parser.py
@@ -164,7 +164,7 @@ def get_response_from_webhookpayload(
regions: list[Region],
identifier: int | str | None = None,
integration_id: int | None = None,
- ):
+ ) -> HttpResponseBase:
"""
Used to create webhookpayloads for provided regions to handle the webhooks asynchronously.
Responds to the webhook provider with a 202 Accepted status.
diff --git a/src/sentry/integrations/mixins/issues.py b/src/sentry/integrations/mixins/issues.py
index ba3c9df8d8564c..552d5af155f07b 100644
--- a/src/sentry/integrations/mixins/issues.py
+++ b/src/sentry/integrations/mixins/issues.py
@@ -70,7 +70,7 @@ def from_resolve_unresolve(
class IssueBasicIntegration(IntegrationInstallation, ABC):
- def should_sync(self, attribute, sync_source: AssignmentSource | None = None):
+ def should_sync(self, attribute, sync_source: AssignmentSource | None = None) -> bool:
return False
def get_group_title(self, group, event, **kwargs):
diff --git a/src/sentry/integrations/models/doc_integration_avatar.py b/src/sentry/integrations/models/doc_integration_avatar.py
index d284d08f2effb5..80c2e147852339 100644
--- a/src/sentry/integrations/models/doc_integration_avatar.py
+++ b/src/sentry/integrations/models/doc_integration_avatar.py
@@ -29,5 +29,5 @@ class Meta:
url_path = "doc-integration-avatar"
- def get_cache_key(self, size):
+ def get_cache_key(self, size) -> str:
return f"doc_integration_avatar:{self.doc_integration_id}:{size}"
diff --git a/src/sentry/integrations/models/utils.py b/src/sentry/integrations/models/utils.py
index 3c19d174bc4334..f81a2faaff9e67 100644
--- a/src/sentry/integrations/models/utils.py
+++ b/src/sentry/integrations/models/utils.py
@@ -35,5 +35,5 @@ def has_feature(instance: Integration | RpcIntegration, feature: IntegrationFeat
return feature in instance.get_provider().features
-def get_redis_key(sentryapp: SentryApp | RpcSentryApp, org_id):
+def get_redis_key(sentryapp: SentryApp | RpcSentryApp, org_id) -> str:
return f"sentry-app-error:{sentryapp.id}:{org_id}"
diff --git a/src/sentry/integrations/msteams/card_builder/installation.py b/src/sentry/integrations/msteams/card_builder/installation.py
index 47c56461a89b8a..9a1ebd590caf10 100644
--- a/src/sentry/integrations/msteams/card_builder/installation.py
+++ b/src/sentry/integrations/msteams/card_builder/installation.py
@@ -33,7 +33,9 @@ def create_title_block(text: str) -> ColumnSetBlock:
)
-def build_installation_card(signed_params: str, title: str, description: str, instruction: str):
+def build_installation_card(
+ signed_params: str, title: str, description: str, instruction: str
+) -> AdaptiveCard:
url = absolute_uri(
InstallationMessages.MSTEAMS_CONFIGURE_URL.format(signed_params=signed_params)
)
diff --git a/src/sentry/integrations/msteams/notifications.py b/src/sentry/integrations/msteams/notifications.py
index a0e5026270ccdb..71e412d5cf1434 100644
--- a/src/sentry/integrations/msteams/notifications.py
+++ b/src/sentry/integrations/msteams/notifications.py
@@ -88,7 +88,7 @@ def send_notification_as_msteams(
recipients: Iterable[Actor],
shared_context: Mapping[str, Any],
extra_context_by_actor: Mapping[Actor, Mapping[str, Any]] | None,
-):
+) -> None:
if not is_supported_notification_type(notification):
logger.info(
"Unsupported notification type for Microsoft Teams %s", notification.__class__.__name__
diff --git a/src/sentry/integrations/opsgenie/metrics.py b/src/sentry/integrations/opsgenie/metrics.py
index 34a2e9d5466679..3f28c47cc56f24 100644
--- a/src/sentry/integrations/opsgenie/metrics.py
+++ b/src/sentry/integrations/opsgenie/metrics.py
@@ -4,7 +4,7 @@
from sentry.shared_integrations.exceptions import ApiError, ApiRateLimitedError, ApiUnauthorized
-def record_event(event: OnCallInteractionType):
+def record_event(event: OnCallInteractionType) -> OnCallInteractionEvent:
return OnCallInteractionEvent(event, OpsgenieOnCallSpec())
diff --git a/src/sentry/integrations/opsgenie/utils.py b/src/sentry/integrations/opsgenie/utils.py
index 4f1cb63e0e8742..0b47bb7fb3c4de 100644
--- a/src/sentry/integrations/opsgenie/utils.py
+++ b/src/sentry/integrations/opsgenie/utils.py
@@ -74,7 +74,9 @@ def attach_custom_priority(
return data
-def get_team(team_id: int | str | None, org_integration: RpcOrganizationIntegration | None):
+def get_team(
+ team_id: int | str | None, org_integration: RpcOrganizationIntegration | None
+) -> dict[str, str] | None:
if not org_integration:
return None
teams = org_integration.config.get("team_table")
diff --git a/src/sentry/integrations/pagerduty/actions/form.py b/src/sentry/integrations/pagerduty/actions/form.py
index a6f468f7539d40..ee7d6b25cf1499 100644
--- a/src/sentry/integrations/pagerduty/actions/form.py
+++ b/src/sentry/integrations/pagerduty/actions/form.py
@@ -10,6 +10,7 @@
from sentry.integrations.pagerduty.metrics import record_event
from sentry.integrations.services.integration import integration_service
from sentry.integrations.types import ExternalProviders
+from sentry.utils.forms import set_field_choices
def _validate_int_field(field: str, cleaned_data: Mapping[str, Any]) -> int | None:
@@ -29,28 +30,26 @@ class PagerDutyNotifyServiceForm(forms.Form):
account = forms.ChoiceField(choices=(), widget=forms.Select())
service = forms.ChoiceField(required=False, choices=(), widget=forms.Select())
- def __init__(self, *args, **kwargs):
- integrations = [(i.id, i.name) for i in kwargs.pop("integrations")]
- services = kwargs.pop("services")
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ self._integrations = [(i.id, i.name) for i in kwargs.pop("integrations")]
+ self._services = kwargs.pop("services")
super().__init__(*args, **kwargs)
- if integrations:
- self.fields["account"].initial = integrations[0][0]
+ if self._integrations:
+ self.fields["account"].initial = self._integrations[0][0]
- self.fields["account"].choices = integrations
- self.fields["account"].widget.choices = self.fields["account"].choices
+ set_field_choices(self.fields["account"], self._integrations)
- if services:
- self.fields["service"].initial = services[0][0]
+ if self._services:
+ self.fields["service"].initial = self._services[0][0]
- self.fields["service"].choices = services
- self.fields["service"].widget.choices = self.fields["service"].choices
+ set_field_choices(self.fields["service"], self._services)
def _validate_service(self, service_id: int, integration_id: int) -> None:
with record_event(OnCallInteractionType.VALIDATE_SERVICE).capture() as lifecycle:
params = {
- "account": dict(self.fields["account"].choices).get(integration_id),
- "service": dict(self.fields["service"].choices).get(service_id),
+ "account": dict(self._integrations).get(integration_id),
+ "service": dict(self._services).get(service_id),
}
org_integrations = integration_service.get_organization_integrations(
@@ -77,11 +76,13 @@ def _validate_service(self, service_id: int, integration_id: int) -> None:
def clean(self) -> dict[str, Any] | None:
cleaned_data = super().clean()
+ if cleaned_data is None:
+ return cleaned_data
integration_id = _validate_int_field("account", cleaned_data)
service_id = _validate_int_field("service", cleaned_data)
- if service_id:
+ if service_id and integration_id:
self._validate_service(service_id, integration_id)
return cleaned_data
diff --git a/src/sentry/integrations/pagerduty/actions/notification.py b/src/sentry/integrations/pagerduty/actions/notification.py
index 18fca6d7b07f04..9f19533a7505e7 100644
--- a/src/sentry/integrations/pagerduty/actions/notification.py
+++ b/src/sentry/integrations/pagerduty/actions/notification.py
@@ -1,8 +1,8 @@
from __future__ import annotations
import logging
-from collections.abc import Sequence
-from typing import cast
+from collections.abc import Generator, Sequence
+from typing import Any, TypedDict, cast
import sentry_sdk
@@ -16,12 +16,22 @@
from sentry.integrations.types import IntegrationProviderSlug
from sentry.models.rule import Rule
from sentry.rules.actions import IntegrationEventAction
+from sentry.rules.base import CallbackFuture
+from sentry.services.eventstore.models import GroupEvent
from sentry.shared_integrations.exceptions import ApiError
+from sentry.types.rules import RuleFuture
from sentry.utils.strings import truncatechars
logger = logging.getLogger("sentry.integrations.pagerduty")
+class PagerDutyService(TypedDict):
+ id: int
+ integration_key: str
+ service_name: str
+ integration_id: int
+
+
class PagerDutyNotifyServiceAction(IntegrationEventAction):
id = "sentry.integrations.pagerduty.notify_action.PagerDutyNotifyServiceAction"
label = "Send a notification to PagerDuty account {account} and service {service} with {severity} severity"
@@ -29,7 +39,7 @@ class PagerDutyNotifyServiceAction(IntegrationEventAction):
provider = IntegrationProviderSlug.PAGERDUTY.value
integration_key = "account"
- def __init__(self, *args, **kwargs):
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
self.form_fields = {
"account": {
@@ -49,7 +59,7 @@ def __init__(self, *args, **kwargs):
},
}
- def _get_service(self):
+ def _get_service(self) -> PagerDutyService | None:
oi = self.get_organization_integration()
if not oi:
return None
@@ -58,7 +68,9 @@ def _get_service(self):
return pds
return None
- def after(self, event, notification_uuid: str | None = None):
+ def after(
+ self, event: GroupEvent, notification_uuid: str | None = None
+ ) -> Generator[CallbackFuture]:
integration = self.get_integration()
log_context = {
"organization_id": self.project.organization_id,
@@ -79,7 +91,7 @@ def after(self, event, notification_uuid: str | None = None):
PagerdutySeverity, self.get_option("severity", default=PAGERDUTY_DEFAULT_SEVERITY)
)
- def send_notification(event, futures):
+ def send_notification(event: GroupEvent, futures: Sequence[RuleFuture]) -> None:
installation = integration.get_installation(self.project.organization_id)
try:
client = installation.get_keyring_client(self.get_option("service"))
@@ -147,7 +159,7 @@ def get_services(self) -> Sequence[tuple[int, str]]:
for v in oi.config.get("pagerduty_services", [])
]
- def render_label(self):
+ def render_label(self) -> str:
s = self._get_service()
if s:
service_name = s["service_name"]
diff --git a/src/sentry/integrations/pagerduty/client.py b/src/sentry/integrations/pagerduty/client.py
index bc024bac79a301..39debaca24e444 100644
--- a/src/sentry/integrations/pagerduty/client.py
+++ b/src/sentry/integrations/pagerduty/client.py
@@ -3,6 +3,8 @@
from enum import StrEnum
from typing import Any
+from requests import Response
+
from sentry.api.serializers import ExternalEventSerializer, serialize
from sentry.integrations.client import ApiClient
from sentry.integrations.on_call.metrics import OnCallInteractionType
@@ -46,7 +48,7 @@ def request(self, *args: Any, **kwargs: Any) -> Any:
kwargs.setdefault("headers", {"Content-Type": "application/json"})
return self._request(*args, **kwargs)
- def send_trigger(self, data: PagerDutyEventPayload):
+ def send_trigger(self, data: PagerDutyEventPayload) -> Response:
with record_event(OnCallInteractionType.CREATE).capture():
return self.post("/", data=data)
diff --git a/src/sentry/integrations/pagerduty/integration.py b/src/sentry/integrations/pagerduty/integration.py
index b6dadfdc99aca5..783c7972f53d5b 100644
--- a/src/sentry/integrations/pagerduty/integration.py
+++ b/src/sentry/integrations/pagerduty/integration.py
@@ -1,8 +1,8 @@
from __future__ import annotations
import logging
-from collections.abc import Mapping, Sequence
-from typing import Any
+from collections.abc import Mapping, MutableMapping, Sequence
+from typing import Any, TypedDict
import orjson
from django.db import router, transaction
@@ -73,6 +73,23 @@
)
+class PagerDutyOrganizationConfig(TypedDict):
+ name: str
+ type: str
+ label: str
+ help: str
+ addButtonText: str
+ columnLabels: dict[str, str]
+ columnKeys: list[str]
+ confirmDeleteMessage: str
+
+
+class PagerDutyServiceConfig(TypedDict):
+ service: str
+ integration_key: str
+ id: int
+
+
class PagerDutyIntegration(IntegrationInstallation):
def get_keyring_client(self, keyid: int | str) -> PagerDutyClient:
org_integration = self.org_integration
@@ -89,11 +106,11 @@ def get_keyring_client(self, keyid: int | str) -> PagerDutyClient:
integration_id=org_integration.integration_id, integration_key=integration_key
)
- def get_client(self):
+ def get_client(self) -> None:
raise NotImplementedError("Use get_keyring_client instead.")
- def get_organization_config(self):
- fields = [
+ def get_organization_config(self) -> list[PagerDutyOrganizationConfig]:
+ return [
{
"name": "service_table",
"type": "table",
@@ -106,9 +123,7 @@ def get_organization_config(self):
}
]
- return fields
-
- def update_organization_config(self, data):
+ def update_organization_config(self, data: MutableMapping[str, Any]) -> None:
if "service_table" in data:
service_rows = data["service_table"]
# validate fields
@@ -149,15 +164,15 @@ def update_organization_config(self, data):
key = row["integration_key"]
add_service(oi, integration_key=key, service_name=service_name)
- def get_config_data(self):
+ def get_config_data(self) -> Mapping[str, list[PagerDutyServiceConfig]]:
service_list = []
for s in self.services:
service_list.append(
- {
- "service": s["service_name"],
- "integration_key": s["integration_key"],
- "id": s["id"],
- }
+ PagerDutyServiceConfig(
+ service=s["service_name"],
+ integration_key=s["integration_key"],
+ id=s["id"],
+ )
)
return {"service_table": service_list}
@@ -220,7 +235,7 @@ def build_integration(self, state: Mapping[str, Any]) -> IntegrationData:
class PagerDutyInstallationRedirect:
- def get_app_url(self, account_name=None):
+ def get_app_url(self, account_name: str | None = None) -> str:
if not account_name:
account_name = "app"
diff --git a/src/sentry/integrations/pagerduty/metrics.py b/src/sentry/integrations/pagerduty/metrics.py
index 8f82ec36285ebe..ed92756f4bb461 100644
--- a/src/sentry/integrations/pagerduty/metrics.py
+++ b/src/sentry/integrations/pagerduty/metrics.py
@@ -2,5 +2,5 @@
from sentry.integrations.on_call.spec import PagerDutyOnCallSpec
-def record_event(event: OnCallInteractionType):
+def record_event(event: OnCallInteractionType) -> OnCallInteractionEvent:
return OnCallInteractionEvent(event, PagerDutyOnCallSpec())
diff --git a/src/sentry/integrations/pagerduty/utils.py b/src/sentry/integrations/pagerduty/utils.py
index 4ecbcea58dff96..4f4c955c7f18df 100644
--- a/src/sentry/integrations/pagerduty/utils.py
+++ b/src/sentry/integrations/pagerduty/utils.py
@@ -95,7 +95,7 @@ def build_incident_attachment(
alert_context: AlertContext,
metric_issue_context: MetricIssueContext,
organization: Organization,
- integration_key,
+ integration_key: str,
notification_uuid: str | None = None,
) -> dict[str, Any]:
diff --git a/src/sentry/integrations/slack/message_builder/issues.py b/src/sentry/integrations/slack/message_builder/issues.py
index 13cc52d4a6d756..7fed149c7f7f03 100644
--- a/src/sentry/integrations/slack/message_builder/issues.py
+++ b/src/sentry/integrations/slack/message_builder/issues.py
@@ -333,9 +333,10 @@ def get_suspect_commit_text(group: Group) -> str | None:
else: # for unsupported providers
suspect_commit_text += f"{commit_id[:6]} by {author_display}"
- pr_date = pull_request.date_added
- if pr_date:
- pr_date = time_since(pr_date)
+ if pull_request.date_added:
+ pr_date = time_since(pull_request.date_added)
+ else:
+ pr_date = pull_request.date_added
pr_id = pull_request.key
pr_title = pull_request.title
pr_link = pull_request.get_external_url()
diff --git a/src/sentry/integrations/slack/message_builder/notifications/issues.py b/src/sentry/integrations/slack/message_builder/notifications/issues.py
index db86674f826542..32038996867297 100644
--- a/src/sentry/integrations/slack/message_builder/notifications/issues.py
+++ b/src/sentry/integrations/slack/message_builder/notifications/issues.py
@@ -5,6 +5,7 @@
from sentry.integrations.slack.message_builder.issues import SlackIssuesMessageBuilder
from sentry.integrations.slack.message_builder.types import SlackBlock
+from sentry.models.group import Group
from sentry.notifications.notifications.base import ProjectNotification
from sentry.types.actor import Actor
@@ -23,6 +24,8 @@ def __init__(
def build(self) -> SlackBlock:
group = getattr(self.notification, "group", None)
+ assert isinstance(group, Group), "Group must exist to send an issue notification"
+
return SlackIssuesMessageBuilder(
group=group,
event=getattr(self.notification, "event", None),
diff --git a/src/sentry/integrations/slack/requests/base.py b/src/sentry/integrations/slack/requests/base.py
index b9462371b12c40..e8e41624f48411 100644
--- a/src/sentry/integrations/slack/requests/base.py
+++ b/src/sentry/integrations/slack/requests/base.py
@@ -90,7 +90,7 @@ def is_bot(self) -> bool:
def is_challenge(self) -> bool:
return False
- def _get_context(self):
+ def _get_context(self) -> None:
team_id = None
user_id = None
# Let the intended validation methods handle the errors from reading these fields
diff --git a/src/sentry/integrations/slack/tasks/link_slack_user_identities.py b/src/sentry/integrations/slack/tasks/link_slack_user_identities.py
index df3bfe487792bc..f5bfee2cd4364c 100644
--- a/src/sentry/integrations/slack/tasks/link_slack_user_identities.py
+++ b/src/sentry/integrations/slack/tasks/link_slack_user_identities.py
@@ -79,7 +79,9 @@ def link_slack_user_identities(
update_identities(data, idp)
-def update_identities(slack_data_by_user: Mapping[User, SlackUserData], idp: IdentityProvider):
+def update_identities(
+ slack_data_by_user: Mapping[User, SlackUserData], idp: IdentityProvider
+) -> None:
date_verified = timezone.now()
identities_by_user = get_identities_by_user(idp, slack_data_by_user.keys())
diff --git a/src/sentry/integrations/slack/unfurl/discover.py b/src/sentry/integrations/slack/unfurl/discover.py
index 658d58c2365237..f56509b706b2de 100644
--- a/src/sentry/integrations/slack/unfurl/discover.py
+++ b/src/sentry/integrations/slack/unfurl/discover.py
@@ -19,8 +19,8 @@
MessagingInteractionEvent,
MessagingInteractionType,
)
-from sentry.integrations.models.integration import Integration
from sentry.integrations.services.integration import integration_service
+from sentry.integrations.services.integration.model import RpcIntegration
from sentry.integrations.slack.message_builder.discover import SlackDiscoverMessageBuilder
from sentry.integrations.slack.spec import SlackMessagingSpec
from sentry.integrations.slack.unfurl.types import Handler, UnfurlableUrl, UnfurledUrl
@@ -28,7 +28,7 @@
from sentry.models.organization import Organization
from sentry.search.events.filter import to_list
from sentry.snuba.referrer import Referrer
-from sentry.users.models.user import User
+from sentry.users.services.user.model import RpcUser
from sentry.utils.dates import (
get_interval_from_range,
parse_stats_period,
@@ -117,9 +117,9 @@ def is_aggregate(field: str) -> bool:
def unfurl_discover(
request: HttpRequest,
- integration: Integration,
+ integration: RpcIntegration,
links: list[UnfurlableUrl],
- user: User | None = None,
+ user: RpcUser | None = None,
) -> UnfurledUrl:
event = MessagingInteractionEvent(
MessagingInteractionType.UNFURL_DISCOVER, SlackMessagingSpec(), user=user
@@ -129,9 +129,9 @@ def unfurl_discover(
def _unfurl_discover(
- integration: Integration,
+ integration: RpcIntegration,
links: list[UnfurlableUrl],
- user: User | None = None,
+ user: RpcUser | None = None,
) -> UnfurledUrl:
org_integrations = integration_service.get_organization_integrations(
integration_id=integration.id
diff --git a/src/sentry/integrations/slack/unfurl/issues.py b/src/sentry/integrations/slack/unfurl/issues.py
index ce19d32eb67013..98ff008c6e8d3c 100644
--- a/src/sentry/integrations/slack/unfurl/issues.py
+++ b/src/sentry/integrations/slack/unfurl/issues.py
@@ -8,8 +8,8 @@
MessagingInteractionEvent,
MessagingInteractionType,
)
-from sentry.integrations.models.integration import Integration
from sentry.integrations.services.integration import integration_service
+from sentry.integrations.services.integration.model import RpcIntegration
from sentry.integrations.slack.message_builder.issues import SlackIssuesMessageBuilder
from sentry.integrations.slack.spec import SlackMessagingSpec
from sentry.integrations.slack.unfurl.types import (
@@ -21,7 +21,7 @@
from sentry.models.group import Group
from sentry.models.project import Project
from sentry.services import eventstore
-from sentry.users.models.user import User
+from sentry.users.services.user.model import RpcUser
map_issue_args = make_type_coercer(
{
@@ -33,9 +33,9 @@
def unfurl_issues(
request: HttpRequest,
- integration: Integration,
+ integration: RpcIntegration,
links: list[UnfurlableUrl],
- user: User | None = None,
+ user: RpcUser | None = None,
) -> UnfurledUrl:
"""
Returns a map of the attachments used in the response we send to Slack
@@ -49,7 +49,7 @@ def unfurl_issues(
return _unfurl_issues(integration, links)
-def _unfurl_issues(integration: Integration, links: list[UnfurlableUrl]) -> UnfurledUrl:
+def _unfurl_issues(integration: RpcIntegration, links: list[UnfurlableUrl]) -> UnfurledUrl:
org_integrations = integration_service.get_organization_integrations(
integration_id=integration.id
)
diff --git a/src/sentry/integrations/slack/unfurl/metric_alerts.py b/src/sentry/integrations/slack/unfurl/metric_alerts.py
index 7b6afe4b49b129..f32f1541a958d8 100644
--- a/src/sentry/integrations/slack/unfurl/metric_alerts.py
+++ b/src/sentry/integrations/slack/unfurl/metric_alerts.py
@@ -25,8 +25,8 @@
MessagingInteractionEvent,
MessagingInteractionType,
)
-from sentry.integrations.models.integration import Integration
from sentry.integrations.services.integration import integration_service
+from sentry.integrations.services.integration.model import RpcIntegration
from sentry.integrations.slack.message_builder.metric_alerts import SlackMetricAlertMessageBuilder
from sentry.integrations.slack.spec import SlackMessagingSpec
from sentry.integrations.slack.unfurl.types import (
@@ -36,7 +36,7 @@
make_type_coercer,
)
from sentry.models.organization import Organization
-from sentry.users.models.user import User
+from sentry.users.services.user.model import RpcUser
map_incident_args = make_type_coercer(
{
@@ -52,9 +52,9 @@
def unfurl_metric_alerts(
request: HttpRequest,
- integration: Integration,
+ integration: RpcIntegration,
links: list[UnfurlableUrl],
- user: User | None = None,
+ user: RpcUser | None = None,
) -> UnfurledUrl:
event = MessagingInteractionEvent(
MessagingInteractionType.UNFURL_METRIC_ALERTS, SlackMessagingSpec(), user=user
@@ -64,9 +64,9 @@ def unfurl_metric_alerts(
def _unfurl_metric_alerts(
- integration: Integration,
+ integration: RpcIntegration,
links: list[UnfurlableUrl],
- user: User | None = None,
+ user: RpcUser | None = None,
) -> UnfurledUrl:
alert_filter_query = Q()
incident_filter_query = Q()
diff --git a/src/sentry/integrations/slack/unfurl/types.py b/src/sentry/integrations/slack/unfurl/types.py
index 21872e70750cd2..c8a5e5d3bf8606 100644
--- a/src/sentry/integrations/slack/unfurl/types.py
+++ b/src/sentry/integrations/slack/unfurl/types.py
@@ -7,8 +7,8 @@
from django.http.request import HttpRequest
-from sentry.integrations.models.integration import Integration
-from sentry.users.models.user import User
+from sentry.integrations.services.integration.model import RpcIntegration
+from sentry.users.services.user.model import RpcUser
UnfurledUrl = Mapping[Any, Any]
ArgsMapper = Callable[[str, Mapping[str, Optional[str]]], Mapping[str, Any]]
@@ -29,9 +29,9 @@ class HandlerCallable(Protocol):
def __call__(
self,
request: HttpRequest,
- integration: Integration,
+ integration: RpcIntegration,
links: list[UnfurlableUrl],
- user: User | None = None,
+ user: RpcUser | None = None,
) -> UnfurledUrl: ...
diff --git a/src/sentry/integrations/slack/webhooks/event.py b/src/sentry/integrations/slack/webhooks/event.py
index 4255464ac073ad..e1c3bb7b4513d5 100644
--- a/src/sentry/integrations/slack/webhooks/event.py
+++ b/src/sentry/integrations/slack/webhooks/event.py
@@ -57,8 +57,9 @@ def reply(self, slack_request: SlackDMRequest, message: str) -> Response:
client = SlackSdkClient(integration_id=slack_request.integration.id)
try:
+ assert slack_request.channel_id is not None, "Channel ID is required to send a message"
client.chat_postMessage(channel=slack_request.channel_id, text=message)
- except SlackApiError:
+ except (SlackApiError, AssertionError):
_logger.info("reply.post-message-error", extra=logger_params)
return self.respond()
@@ -73,14 +74,16 @@ def on_url_verification(self, request: Request, data: Mapping[str, str]) -> Resp
return self.respond({"challenge": data["challenge"]})
def prompt_link(self, slack_request: SlackDMRequest) -> None:
+ if slack_request.channel_id is None or slack_request.user_id is None or slack_request.user:
+ _logger.info("prompt_link.post-ephemeral.missing-data", extra=slack_request.data)
+ return
+
associate_url = build_linking_url(
integration=slack_request.integration,
slack_id=slack_request.user_id,
channel_id=slack_request.channel_id,
response_url=slack_request.response_url,
)
- if not slack_request.channel_id:
- return
payload = {
"channel": slack_request.channel_id,
@@ -114,6 +117,10 @@ def on_message(self, request: Request, slack_request: SlackDMRequest) -> Respons
if slack_request.is_bot() or not command:
return self.respond()
+ if slack_request.channel_id is None:
+ _logger.info("on_message.post-message.missing-channel", extra=slack_request.data)
+ return self.respond()
+
payload = {
"channel": slack_request.channel_id,
**SlackHelpMessageBuilder(
diff --git a/src/sentry/integrations/source_code_management/commit_context.py b/src/sentry/integrations/source_code_management/commit_context.py
index 22a5a5d57deb37..ea73d637155935 100644
--- a/src/sentry/integrations/source_code_management/commit_context.py
+++ b/src/sentry/integrations/source_code_management/commit_context.py
@@ -373,7 +373,7 @@ def create_or_update_comment(
metrics_base: str,
comment_type: int = CommentType.MERGED_PR,
language: str | None = None,
- ):
+ ) -> None:
client = self.get_client()
pr_comment = PullRequestComment.objects.filter(
diff --git a/src/sentry/integrations/source_code_management/tasks.py b/src/sentry/integrations/source_code_management/tasks.py
index 3fed9b655b9eb0..e35cf1d0a4d086 100644
--- a/src/sentry/integrations/source_code_management/tasks.py
+++ b/src/sentry/integrations/source_code_management/tasks.py
@@ -44,7 +44,7 @@
processing_deadline_duration=45,
),
)
-def pr_comment_workflow(pr_id: int, project_id: int):
+def pr_comment_workflow(pr_id: int, project_id: int) -> None:
cache_key = _debounce_pr_comment_cache_key(pullrequest_id=pr_id)
try:
diff --git a/src/sentry/integrations/time_utils.py b/src/sentry/integrations/time_utils.py
index 39ef6fadfe73f7..410817c85c987a 100644
--- a/src/sentry/integrations/time_utils.py
+++ b/src/sentry/integrations/time_utils.py
@@ -1,6 +1,6 @@
import time
from collections.abc import Mapping
-from datetime import datetime, timedelta
+from datetime import date, datetime, timedelta
from django.utils import timezone
from django.utils.timesince import timesince
@@ -24,7 +24,7 @@ def get_approx_start_time(group: Group):
return regression_time
-def time_since(value: datetime):
+def time_since(value: datetime) -> str | date:
"""
Display the relative time
"""
diff --git a/src/sentry/integrations/utils/commit_context.py b/src/sentry/integrations/utils/commit_context.py
index 5cb34894304450..d0e70645e29b39 100644
--- a/src/sentry/integrations/utils/commit_context.py
+++ b/src/sentry/integrations/utils/commit_context.py
@@ -346,7 +346,7 @@ def _record_commit_context_all_frames_analytics(
selected_provider: str | None,
platform: str,
sdk_name: str | None,
-):
+) -> None:
if not selected_blame:
reason = _get_failure_reason(
num_successfully_mapped_frames=num_successfully_mapped_frames,
@@ -376,7 +376,7 @@ def _record_commit_context_all_frames_analytics(
reason=reason,
)
)
- return
+ return None
unique_commit_ids = {blame.commit.commitId for blame in file_blames}
unique_author_emails = {blame.commit.commitAuthorEmail for blame in file_blames}
@@ -413,7 +413,7 @@ def _record_commit_context_all_frames_analytics(
)
-def _get_failure_reason(num_successfully_mapped_frames: int, has_old_blames: bool):
+def _get_failure_reason(num_successfully_mapped_frames: int, has_old_blames: bool) -> str:
if num_successfully_mapped_frames < 1:
return "no_successful_code_mapping"
if has_old_blames:
diff --git a/src/sentry/integrations/web/integration_extension_configuration.py b/src/sentry/integrations/web/integration_extension_configuration.py
index 9d5ccd312266a9..ed4a73d7a6d892 100644
--- a/src/sentry/integrations/web/integration_extension_configuration.py
+++ b/src/sentry/integrations/web/integration_extension_configuration.py
@@ -134,7 +134,7 @@ def init_pipeline(self, request: HttpRequest, organization, params):
def map_params_to_state(self, params):
return params
- def is_enabled_for_org(self, _org, _user):
+ def is_enabled_for_org(self, _org, _user) -> bool:
return True
def has_one_required_feature(self, org, user):
diff --git a/src/sentry/issues/endpoints/organization_codeowners_associations.py b/src/sentry/issues/endpoints/organization_codeowners_associations.py
index c9187ef6c9eb47..54c38ace5e38e2 100644
--- a/src/sentry/issues/endpoints/organization_codeowners_associations.py
+++ b/src/sentry/issues/endpoints/organization_codeowners_associations.py
@@ -1,5 +1,6 @@
from rest_framework import status
from rest_framework.request import Request
+from rest_framework.response import Response
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
@@ -24,7 +25,7 @@ class OrganizationCodeOwnersAssociationsEndpoint(OrganizationEndpoint):
}
permission_classes = (OrganizationIntegrationsLoosePermission,)
- def get(self, request: Request, organization: Organization):
+ def get(self, request: Request, organization: Organization) -> Response:
"""
Returns all ProjectCodeOwners associations for an organization as a dict with projects as keys
e.g. {"projectSlug": {associations: {...}, errors: {...}}, ...]
diff --git a/src/sentry/issues/endpoints/organization_issues_resolved_in_release.py b/src/sentry/issues/endpoints/organization_issues_resolved_in_release.py
index 874289c307b07e..60807b9fa870be 100644
--- a/src/sentry/issues/endpoints/organization_issues_resolved_in_release.py
+++ b/src/sentry/issues/endpoints/organization_issues_resolved_in_release.py
@@ -9,6 +9,8 @@
from sentry.api.serializers import serialize
from sentry.api.serializers.models.group import GroupSerializerSnuba
from sentry.models.group import Group
+from sentry.models.organization import Organization
+from sentry.organizations.services.organization import RpcOrganization
@region_silo_endpoint
@@ -19,7 +21,9 @@ class OrganizationIssuesResolvedInReleaseEndpoint(OrganizationEndpoint):
}
permission_classes = (OrganizationPermission,)
- def get(self, request: Request, organization, version) -> Response:
+ def get(
+ self, request: Request, organization: Organization | RpcOrganization, version: str
+ ) -> Response:
"""
List issues to be resolved in a particular release
``````````````````````````````````````````````````
diff --git a/src/sentry/lang/dart/apps.py b/src/sentry/lang/dart/apps.py
index 96b2afc49567dc..8edbcf042b9306 100644
--- a/src/sentry/lang/dart/apps.py
+++ b/src/sentry/lang/dart/apps.py
@@ -4,7 +4,7 @@
class Config(AppConfig):
name = "sentry.lang.dart"
- def ready(self):
+ def ready(self) -> None:
from sentry.plugins.base import register
from .plugin import DartPlugin
diff --git a/src/sentry/lang/dart/plugin.py b/src/sentry/lang/dart/plugin.py
index ad14f28cdd357a..665177da30d475 100644
--- a/src/sentry/lang/dart/plugin.py
+++ b/src/sentry/lang/dart/plugin.py
@@ -13,7 +13,7 @@ class DartPlugin(Plugin2):
This plugin is responsible for Dart specific processing on events or attachments.
"""
- def can_configure_for_project(self, project, **kwargs):
+ def can_configure_for_project(self, project, **kwargs) -> bool:
return False
def get_event_preprocessors(self, data: Mapping[str, Any]) -> Sequence[EventPreprocessor]:
diff --git a/src/sentry/lang/dart/utils.py b/src/sentry/lang/dart/utils.py
index 92c641051f2372..76bdb10e98d921 100644
--- a/src/sentry/lang/dart/utils.py
+++ b/src/sentry/lang/dart/utils.py
@@ -63,7 +63,7 @@ def generate_dart_symbols_map(debug_ids: list[str], project: Project):
return None
-def deobfuscate_exception_type(data: MutableMapping[str, Any]):
+def deobfuscate_exception_type(data: MutableMapping[str, Any]) -> None:
"""
Deobfuscates exception types and certain values in-place.
@@ -77,16 +77,16 @@ def deobfuscate_exception_type(data: MutableMapping[str, Any]):
debug_ids = get_debug_meta_image_ids(dict(data))
if len(debug_ids) == 0:
- return
+ return None
exceptions = data.get("exception", {}).get("values", [])
if not exceptions:
- return
+ return None
with sentry_sdk.start_span(op="dartsymbolmap.deobfuscate_exception_type"):
symbol_map = generate_dart_symbols_map(list(debug_ids), project)
if symbol_map is None:
- return
+ return None
for exception in exceptions:
exception_type = exception.get("type")
diff --git a/src/sentry/lang/java/apps.py b/src/sentry/lang/java/apps.py
index afc956ca1c7102..c426036e4c7e6a 100644
--- a/src/sentry/lang/java/apps.py
+++ b/src/sentry/lang/java/apps.py
@@ -4,7 +4,7 @@
class Config(AppConfig):
name = "sentry.lang.java"
- def ready(self):
+ def ready(self) -> None:
from sentry.plugins.base import register
from .plugin import JavaPlugin
diff --git a/src/sentry/lang/java/plugin.py b/src/sentry/lang/java/plugin.py
index a9acd71f3ab22c..3d9ad938e8126f 100644
--- a/src/sentry/lang/java/plugin.py
+++ b/src/sentry/lang/java/plugin.py
@@ -11,7 +11,7 @@
class JavaPlugin(Plugin2):
can_disable = False
- def can_configure_for_project(self, project, **kwargs):
+ def can_configure_for_project(self, project, **kwargs) -> bool:
return False
def get_stacktrace_processors(self, data, stacktrace_infos, platforms, **kwargs):
diff --git a/src/sentry/lang/javascript/apps.py b/src/sentry/lang/javascript/apps.py
index 12c07e6934837f..39b6916ab76436 100644
--- a/src/sentry/lang/javascript/apps.py
+++ b/src/sentry/lang/javascript/apps.py
@@ -4,7 +4,7 @@
class Config(AppConfig):
name = "sentry.lang.javascript"
- def ready(self):
+ def ready(self) -> None:
from sentry.plugins.base import register
from .plugin import JavascriptPlugin
diff --git a/src/sentry/lang/javascript/plugin.py b/src/sentry/lang/javascript/plugin.py
index e436214c7ea8d7..be1627aedce756 100644
--- a/src/sentry/lang/javascript/plugin.py
+++ b/src/sentry/lang/javascript/plugin.py
@@ -36,7 +36,7 @@ def generate_modules(data):
class JavascriptPlugin(Plugin2):
can_disable = False
- def can_configure_for_project(self, project, **kwargs):
+ def can_configure_for_project(self, project, **kwargs) -> bool:
return False
def get_event_preprocessors(self, data: Mapping[str, Any]) -> Sequence[EventPreprocessor]:
diff --git a/src/sentry/mail/__init__.py b/src/sentry/mail/__init__.py
index f9950393e16629..a60cd72f43e4e6 100644
--- a/src/sentry/mail/__init__.py
+++ b/src/sentry/mail/__init__.py
@@ -1,3 +1,5 @@
+from typing import Any
+
from django.conf import settings
from sentry.utils.imports import import_string
@@ -6,7 +8,7 @@
from .notifications import * # NOQA Importing this in __init__ so that @register runs.
-def load_mail_adapter():
+def load_mail_adapter() -> Any:
return import_string(settings.SENTRY_MAIL_ADAPTER_BACKEND)()
diff --git a/src/sentry/mail/actions.py b/src/sentry/mail/actions.py
index 1f5a3fbce4a51f..490ba247e3227f 100644
--- a/src/sentry/mail/actions.py
+++ b/src/sentry/mail/actions.py
@@ -1,4 +1,6 @@
import logging
+from collections.abc import Generator
+from typing import Any
from sentry.mail import mail_adapter
from sentry.mail.forms.notify_email import NotifyEmailForm
@@ -10,6 +12,8 @@
)
from sentry.notifications.utils.participants import determine_eligible_recipients
from sentry.rules.actions.base import EventAction
+from sentry.rules.base import CallbackFuture
+from sentry.services.eventstore.models import Event, GroupEvent
from sentry.utils import metrics
logger = logging.getLogger(__name__)
@@ -21,7 +25,7 @@ class NotifyEmailAction(EventAction):
prompt = "Send a notification"
metrics_slug = "EmailAction"
- def __init__(self, *args, **kwargs):
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
self.form_fields = {
"targetType": {"type": "mailAction", "choices": ACTION_CHOICES},
@@ -33,8 +37,11 @@ def render_label(self) -> str:
self.data = {**self.data, "fallthroughType": FallthroughChoiceType.ACTIVE_MEMBERS.value}
return self.label.format(**self.data)
- def after(self, event, notification_uuid: str | None = None):
+ def after(
+ self, event: GroupEvent | Event, notification_uuid: str | None = None
+ ) -> Generator[CallbackFuture]:
group = event.group
+ assert group is not None
extra = {
"event_id": event.event_id,
"group_id": group.id,
@@ -56,7 +63,7 @@ def after(self, event, notification_uuid: str | None = None):
group.project, target_type, target_identifier, event, fallthrough_type
):
self.logger.info("rule.fail.should_notify", extra=extra)
- return
+ return None
metrics.incr(
"notifications.sent",
diff --git a/src/sentry/mail/adapter.py b/src/sentry/mail/adapter.py
index 5d5108572903fe..a0be69e1521cb7 100644
--- a/src/sentry/mail/adapter.py
+++ b/src/sentry/mail/adapter.py
@@ -7,6 +7,7 @@
from sentry.digests import get_option_key as get_digest_option_key
from sentry.digests.notifications import DigestInfo, event_to_record, unsplit_key
from sentry.integrations.types import ExternalProviders
+from sentry.models.activity import Activity
from sentry.models.options.project_option import ProjectOption
from sentry.models.project import Project
from sentry.notifications.notifications.activity import EMAIL_CLASSES_BY_TYPE
@@ -20,6 +21,7 @@
)
from sentry.notifications.utils.participants import get_notification_recipients
from sentry.plugins.base.structs import Notification
+from sentry.services.eventstore.models import Event, GroupEvent
from sentry.tasks.digests import deliver_digest
from sentry.types.actor import Actor, ActorType
from sentry.utils import metrics
@@ -40,10 +42,10 @@ class MailAdapter:
def rule_notify(
self,
- event: Any,
+ event: Event | GroupEvent,
futures: Sequence[RuleFuture],
target_type: ActionTargetType,
- target_identifier: str | None = None,
+ target_identifier: int | None = None,
fallthrough_choice: FallthroughChoiceType | None = None,
skip_digests: bool = False,
notification_uuid: str | None = None,
@@ -69,17 +71,17 @@ def rule_notify(
"The default behavior for notification de-duplication does not support args"
)
- project = event.group.project
+ group = event.group
+ assert group is not None
+ project = group.project
extra["project_id"] = project.id
if digests.backend.enabled(project) and not skip_digests:
- def get_digest_option(key):
+ def get_digest_option(key: str) -> int | None:
return ProjectOption.objects.get_value(project, get_digest_option_key("mail", key))
- digest_key = unsplit_key(
- event.group.project, target_type, target_identifier, fallthrough_choice
- )
+ digest_key = unsplit_key(project, target_type, target_identifier, fallthrough_choice)
extra["digest_key"] = digest_key
immediate_delivery = digests.backend.add(
digest_key,
@@ -101,7 +103,7 @@ def get_digest_option(key):
logger.info("mail.adapter.notification.%s", log_event, extra=extra)
@staticmethod
- def get_sendable_user_objects(project):
+ def get_sendable_user_objects(project: Project) -> set[Actor]:
"""
Return a collection of USERS that are eligible to receive
notifications for the provided project.
@@ -115,21 +117,23 @@ def get_sendable_user_objects(project):
organization_id=project.organization_id,
actor_type=ActorType.USER,
)
- return recipients.get(ExternalProviders.EMAIL)
+ result = recipients.get(ExternalProviders.EMAIL)
+ assert result is not None
+ return result
- def get_sendable_user_ids(self, project):
+ def get_sendable_user_ids(self, project: Project) -> list[int]:
users = self.get_sendable_user_objects(project)
return [user.id for user in users]
@staticmethod
def notify(
- notification,
- target_type,
- target_identifier=None,
- fallthrough_choice=None,
+ notification: Notification,
+ target_type: ActionTargetType,
+ target_identifier: int | None = None,
+ fallthrough_choice: FallthroughChoiceType | None = None,
notification_uuid: str | None = None,
- **kwargs,
- ):
+ **kwargs: Any,
+ ) -> None:
AlertRuleNotification(
notification,
target_type,
@@ -158,7 +162,7 @@ def notify_digest(
).send()
@staticmethod
- def notify_about_activity(activity):
+ def notify_about_activity(activity: Activity) -> None:
metrics.incr("mail_adapter.notify_about_activity")
email_cls = EMAIL_CLASSES_BY_TYPE.get(activity.type)
if not email_cls:
@@ -168,6 +172,6 @@ def notify_about_activity(activity):
email_cls(activity).send()
@staticmethod
- def handle_user_report(report: Mapping[str, Any], project: Project):
+ def handle_user_report(report: Mapping[str, Any], project: Project) -> None:
metrics.incr("mail_adapter.handle_user_report")
return UserReportNotification(project, report).send()
diff --git a/src/sentry/mail/forms/member_team.py b/src/sentry/mail/forms/member_team.py
index ac37c5a06722ca..cb7eea70ae9f6b 100644
--- a/src/sentry/mail/forms/member_team.py
+++ b/src/sentry/mail/forms/member_team.py
@@ -1,7 +1,7 @@
from __future__ import annotations
import enum
-from typing import Generic, TypeVar
+from typing import Any, Generic, TypeVar
from django import forms
@@ -21,11 +21,11 @@ class MemberTeamForm(forms.Form, Generic[T]):
memberValue: T
targetTypeEnum: type[T]
- def __init__(self, project, *args, **kwargs):
+ def __init__(self, project: Project, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
self.project = project
- def clean_targetIdentifier(self):
+ def clean_targetIdentifier(self) -> int | None:
targetIdentifier = self.cleaned_data.get("targetIdentifier")
# XXX: Clean up some bad data in the database
if targetIdentifier == "None":
diff --git a/src/sentry/middleware/integrations/integration_control.py b/src/sentry/middleware/integrations/integration_control.py
index a965799a16107f..614ceb71a2f2b6 100644
--- a/src/sentry/middleware/integrations/integration_control.py
+++ b/src/sentry/middleware/integrations/integration_control.py
@@ -40,14 +40,14 @@ def _should_operate(self, request: HttpRequest) -> bool:
return SiloMode.get_current_mode() == SiloMode.CONTROL
@classmethod
- def register_classifications(cls, classifications: list[type[BaseClassification]]):
+ def register_classifications(cls, classifications: list[type[BaseClassification]]) -> None:
"""
Add new classifications for middleware to determine request parsing dynamically.
Used in getsentry to expand scope of parsing.
"""
cls.classifications += classifications
- def __call__(self, request: HttpRequest):
+ def __call__(self, request: HttpRequest) -> HttpResponseBase:
if not self._should_operate(request):
return self.get_response(request)
diff --git a/src/sentry/middleware/integrations/parsers/bitbucket.py b/src/sentry/middleware/integrations/parsers/bitbucket.py
index cb78a97d07dbd2..a449c0e4aeb1c4 100644
--- a/src/sentry/middleware/integrations/parsers/bitbucket.py
+++ b/src/sentry/middleware/integrations/parsers/bitbucket.py
@@ -3,6 +3,8 @@
import logging
from typing import Any
+from django.http.response import HttpResponseBase
+
from sentry.hybridcloud.outbox.category import WebhookProviderIdentifier
from sentry.integrations.bitbucket.webhook import BitbucketWebhookEndpoint
from sentry.integrations.middleware.hybrid_cloud.parser import BaseRequestParser
@@ -17,7 +19,7 @@ class BitbucketRequestParser(BaseRequestParser):
provider = IntegrationProviderSlug.BITBUCKET.value
webhook_identifier = WebhookProviderIdentifier.BITBUCKET
- def get_bitbucket_webhook_response(self):
+ def get_bitbucket_webhook_response(self) -> HttpResponseBase:
"""
Used for identifying regions from Bitbucket and Bitbucket Server webhooks
"""
@@ -50,7 +52,7 @@ def get_bitbucket_webhook_response(self):
regions=[region], identifier=mapping.organization_id
)
- def get_response(self):
+ def get_response(self) -> HttpResponseBase:
if self.view_class == BitbucketWebhookEndpoint:
return self.get_bitbucket_webhook_response()
return self.get_response_from_control_silo()
diff --git a/src/sentry/middleware/integrations/parsers/discord.py b/src/sentry/middleware/integrations/parsers/discord.py
index 8cbc28cada78d9..e86ef69c6561c6 100644
--- a/src/sentry/middleware/integrations/parsers/discord.py
+++ b/src/sentry/middleware/integrations/parsers/discord.py
@@ -5,6 +5,7 @@
import sentry_sdk
from django.http import HttpResponse, JsonResponse
+from django.http.response import HttpResponseBase
from rest_framework import status
from rest_framework.request import Request
@@ -103,7 +104,7 @@ def get_integration_from_request(self) -> Integration | None:
return None
- def get_response(self):
+ def get_response(self) -> HttpResponseBase:
if self.view_class in self.control_classes:
return self.get_response_from_control_silo()
diff --git a/src/sentry/middleware/integrations/parsers/github.py b/src/sentry/middleware/integrations/parsers/github.py
index b73a388ac970ec..389fa844fba13b 100644
--- a/src/sentry/middleware/integrations/parsers/github.py
+++ b/src/sentry/middleware/integrations/parsers/github.py
@@ -6,6 +6,7 @@
import orjson
from django.http import HttpResponse
+from django.http.response import HttpResponseBase
from sentry.hybridcloud.outbox.category import WebhookProviderIdentifier
from sentry.integrations.github.webhook import (
@@ -43,7 +44,7 @@ def get_integration_from_request(self) -> Integration | None:
return None
return Integration.objects.filter(external_id=external_id, provider=self.provider).first()
- def get_response(self):
+ def get_response(self) -> HttpResponseBase:
if self.view_class != self.webhook_endpoint:
return self.get_response_from_control_silo()
diff --git a/src/sentry/middleware/integrations/parsers/gitlab.py b/src/sentry/middleware/integrations/parsers/gitlab.py
index 5d911b4e776f1e..6b6390e17695f1 100644
--- a/src/sentry/middleware/integrations/parsers/gitlab.py
+++ b/src/sentry/middleware/integrations/parsers/gitlab.py
@@ -60,7 +60,7 @@ def get_integration_from_request(self) -> Integration | None:
return None
- def get_response_from_gitlab_webhook(self):
+ def get_response_from_gitlab_webhook(self) -> HttpResponseBase:
maybe_http_response = self._resolve_external_id()
if isinstance(maybe_http_response, HttpResponseBase):
return maybe_http_response
diff --git a/src/sentry/middleware/integrations/parsers/google.py b/src/sentry/middleware/integrations/parsers/google.py
index ec6dec7b34992d..fd1b3ef2f45d53 100644
--- a/src/sentry/middleware/integrations/parsers/google.py
+++ b/src/sentry/middleware/integrations/parsers/google.py
@@ -2,6 +2,8 @@
import logging
+from django.http.response import HttpResponseBase
+
from sentry.hybridcloud.outbox.category import WebhookProviderIdentifier
from sentry.integrations.middleware.hybrid_cloud.parser import BaseRequestParser
@@ -12,5 +14,5 @@ class GoogleRequestParser(BaseRequestParser):
provider = "google"
webhook_identifier = WebhookProviderIdentifier.GOOGLE
- def get_response(self):
+ def get_response(self) -> HttpResponseBase:
return self.get_response_from_control_silo()
diff --git a/src/sentry/middleware/integrations/parsers/jira.py b/src/sentry/middleware/integrations/parsers/jira.py
index 16ef4266cf5909..17cef6126bb080 100644
--- a/src/sentry/middleware/integrations/parsers/jira.py
+++ b/src/sentry/middleware/integrations/parsers/jira.py
@@ -3,6 +3,7 @@
import logging
import sentry_sdk
+from django.http.response import HttpResponseBase
from sentry.hybridcloud.outbox.category import WebhookProviderIdentifier
from sentry.integrations.jira.endpoints import JiraDescriptorEndpoint, JiraSearchEndpoint
@@ -51,7 +52,7 @@ def get_integration_from_request(self) -> Integration | None:
sentry_sdk.capture_exception(e)
return None
- def get_response(self):
+ def get_response(self) -> HttpResponseBase:
if self.view_class in self.control_classes:
return self.get_response_from_control_silo()
diff --git a/src/sentry/middleware/integrations/parsers/jira_server.py b/src/sentry/middleware/integrations/parsers/jira_server.py
index 0ae35378fd8475..0b869699b37500 100644
--- a/src/sentry/middleware/integrations/parsers/jira_server.py
+++ b/src/sentry/middleware/integrations/parsers/jira_server.py
@@ -6,6 +6,7 @@
import orjson
from django.http import HttpResponse
+from django.http.response import HttpResponseBase
from rest_framework import status
from sentry.hybridcloud.outbox.category import WebhookProviderIdentifier
@@ -23,7 +24,7 @@ class JiraServerRequestParser(BaseRequestParser):
provider = IntegrationProviderSlug.JIRA_SERVER.value
webhook_identifier = WebhookProviderIdentifier.JIRA_SERVER
- def get_response_from_issue_update_webhook(self):
+ def get_response_from_issue_update_webhook(self) -> HttpResponseBase:
token = self.match.kwargs.get("token")
try:
integration = get_integration_from_token(token)
@@ -68,7 +69,7 @@ def mailbox_bucket_id(self, data: Mapping[str, Any]) -> int | None:
except ValueError:
return None
- def get_response(self):
+ def get_response(self) -> HttpResponseBase:
if self.view_class == JiraServerIssueUpdatedWebhook:
return self.get_response_from_issue_update_webhook()
diff --git a/src/sentry/middleware/integrations/parsers/msteams.py b/src/sentry/middleware/integrations/parsers/msteams.py
index ff090e28f42892..b3ecf0f9cc8094 100644
--- a/src/sentry/middleware/integrations/parsers/msteams.py
+++ b/src/sentry/middleware/integrations/parsers/msteams.py
@@ -30,7 +30,7 @@ class MsTeamsRequestParser(BaseRequestParser):
_synchronous_events = [MsTeamsEvents.INSTALLATION_UPDATE]
@cached_property
- def request_data(self):
+ def request_data(self) -> Mapping[str, Any]:
data = {}
try:
data = orjson.loads(self.request.body)
diff --git a/src/sentry/middleware/integrations/parsers/plugin.py b/src/sentry/middleware/integrations/parsers/plugin.py
index daeb23d6c2afb9..d4547261ef57f0 100644
--- a/src/sentry/middleware/integrations/parsers/plugin.py
+++ b/src/sentry/middleware/integrations/parsers/plugin.py
@@ -4,6 +4,7 @@
from typing import Any
from django.http import HttpResponse
+from django.http.response import HttpResponseBase
from sentry.hybridcloud.outbox.category import WebhookProviderIdentifier
from sentry.integrations.middleware.hybrid_cloud.parser import BaseRequestParser
@@ -22,7 +23,7 @@ class PluginRequestParser(BaseRequestParser):
def should_operate(self) -> bool:
return self.view_class in {BitbucketPluginWebhookEndpoint, GithubPluginWebhookEndpoint}
- def get_response(self):
+ def get_response(self) -> HttpResponseBase:
"""
Used for identifying regions from Github and Bitbucket plugin webhooks
"""
diff --git a/src/sentry/middleware/integrations/parsers/slack.py b/src/sentry/middleware/integrations/parsers/slack.py
index 7d7d14d213da28..73f657fe65cb36 100644
--- a/src/sentry/middleware/integrations/parsers/slack.py
+++ b/src/sentry/middleware/integrations/parsers/slack.py
@@ -2,10 +2,12 @@
import logging
from collections.abc import Sequence
+from typing import Any
from urllib.parse import parse_qs
import orjson
import sentry_sdk
+from django.http import HttpRequest
from django.http.response import HttpResponse, HttpResponseBase
from rest_framework import status
from rest_framework.request import Request
@@ -89,7 +91,7 @@ class SlackRequestParser(BaseRequestParser):
See: `src/sentry/integrations/slack/views`
"""
- def build_loading_modal(self, external_id: str, title: str):
+ def build_loading_modal(self, external_id: str, title: str) -> dict[str, Any]:
return {
"type": "modal",
"external_id": external_id,
@@ -103,7 +105,7 @@ def build_loading_modal(self, external_id: str, title: str):
],
}
- def parse_slack_payload(self, request) -> tuple[dict, str]:
+ def parse_slack_payload(self, request: HttpRequest) -> tuple[dict[str, str], str]:
try:
decoded_body = parse_qs(request.body.decode(encoding="utf-8"))
payload_list = decoded_body.get("payload")
@@ -133,7 +135,7 @@ def parse_slack_payload(self, request) -> tuple[dict, str]:
except (json.JSONDecodeError, KeyError, IndexError, TypeError) as e:
raise ValueError(f"Error parsing Slack payload: {str(e)}")
- def handle_dialog(self, request, action: str, title: str) -> None:
+ def handle_dialog(self, request: HttpRequest, action: str, title: str) -> None:
payload, action_ts = self.parse_slack_payload(request)
integration = self.get_integration_from_request()
@@ -299,7 +301,7 @@ def filter_organizations_from_request(
)
return organizations
- def get_response(self):
+ def get_response(self) -> HttpResponseBase:
"""
Slack Webhook Requests all require synchronous responses.
"""
diff --git a/src/sentry/middleware/integrations/parsers/vercel.py b/src/sentry/middleware/integrations/parsers/vercel.py
index e8de1d43f0e576..9f093757e40537 100644
--- a/src/sentry/middleware/integrations/parsers/vercel.py
+++ b/src/sentry/middleware/integrations/parsers/vercel.py
@@ -2,6 +2,8 @@
import logging
+from django.http.response import HttpResponseBase
+
from sentry.hybridcloud.outbox.category import WebhookProviderIdentifier
from sentry.integrations.middleware.hybrid_cloud.parser import BaseRequestParser
@@ -12,5 +14,5 @@ class VercelRequestParser(BaseRequestParser):
provider = "vercel"
webhook_identifier = WebhookProviderIdentifier.VERCEL
- def get_response(self):
+ def get_response(self) -> HttpResponseBase:
return self.get_response_from_control_silo()
diff --git a/src/sentry/middleware/integrations/tasks.py b/src/sentry/middleware/integrations/tasks.py
index 48d5c6f6f63fa0..4da736434128ad 100644
--- a/src/sentry/middleware/integrations/tasks.py
+++ b/src/sentry/middleware/integrations/tasks.py
@@ -132,7 +132,7 @@ def convert_to_async_slack_response(
region_names: list[str],
payload: dict[str, Any],
response_url: str,
-):
+) -> None:
_AsyncSlackDispatcher(payload, response_url).dispatch(region_names)
diff --git a/src/sentry/middleware/superuser.py b/src/sentry/middleware/superuser.py
index 4b8613bed966d4..db794056ca8f15 100644
--- a/src/sentry/middleware/superuser.py
+++ b/src/sentry/middleware/superuser.py
@@ -7,7 +7,7 @@
class SuperuserMiddleware(MiddlewareMixin):
- def process_request(self, request: Request):
+ def process_request(self, request: Request) -> None:
# This avoids touching user session, which means we avoid
# setting `Vary: Cookie` as a response header which will
# break HTTP caching entirely.
diff --git a/src/sentry/models/apigrant.py b/src/sentry/models/apigrant.py
index c6fc636028e1b7..7e368f1b7a9d72 100644
--- a/src/sentry/models/apigrant.py
+++ b/src/sentry/models/apigrant.py
@@ -107,7 +107,7 @@ def redirect_uri_allowed(self, uri):
return uri == self.redirect_uri
@classmethod
- def get_lock_key(cls, grant_id):
+ def get_lock_key(cls, grant_id) -> str:
return f"api_grant:{grant_id}"
@classmethod
diff --git a/src/sentry/models/authidentityreplica.py b/src/sentry/models/authidentityreplica.py
index 54580658965f18..a326aa34b93420 100644
--- a/src/sentry/models/authidentityreplica.py
+++ b/src/sentry/models/authidentityreplica.py
@@ -39,5 +39,5 @@ class Meta:
def __str__(self) -> str:
return self.ident
- def get_audit_log_data(self):
+ def get_audit_log_data(self) -> dict[str, Any]:
return {"user_id": self.user_id, "data": self.data}
diff --git a/src/sentry/models/authproviderreplica.py b/src/sentry/models/authproviderreplica.py
index 186d67ea8d6d71..f46b9683f07aef 100644
--- a/src/sentry/models/authproviderreplica.py
+++ b/src/sentry/models/authproviderreplica.py
@@ -1,6 +1,6 @@
from __future__ import annotations
-from typing import Any
+from typing import TYPE_CHECKING, Any
from django.db import models
from django.utils import timezone
@@ -16,6 +16,9 @@
from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey
from sentry.db.models.fields.jsonfield import JSONField
+if TYPE_CHECKING:
+ from sentry.auth.provider import Provider
+
@region_silo_model
class AuthProviderReplica(Model):
@@ -47,7 +50,7 @@ class Meta:
def __str__(self) -> str:
return self.provider
- def get_provider(self):
+ def get_provider(self) -> Provider:
from sentry.auth import manager
return manager.get(self.provider, **self.config)
diff --git a/src/sentry/models/avatars/organization_avatar.py b/src/sentry/models/avatars/organization_avatar.py
index 84258b94f1e7fd..5af372aa10cf47 100644
--- a/src/sentry/models/avatars/organization_avatar.py
+++ b/src/sentry/models/avatars/organization_avatar.py
@@ -30,5 +30,5 @@ class Meta:
app_label = "sentry"
db_table = "sentry_organizationavatar"
- def get_cache_key(self, size):
+ def get_cache_key(self, size) -> str:
return f"org_avatar:{self.organization_id}:{size}"
diff --git a/src/sentry/models/broadcast.py b/src/sentry/models/broadcast.py
index 547c45d1839856..6aec7ed5032f88 100644
--- a/src/sentry/models/broadcast.py
+++ b/src/sentry/models/broadcast.py
@@ -1,4 +1,4 @@
-from datetime import timedelta
+from datetime import datetime, timedelta
from django.db import models
from django.utils import timezone
@@ -7,7 +7,7 @@
from sentry.db.models import FlexibleForeignKey, Model, control_silo_model, sane_repr
-def default_expiration():
+def default_expiration() -> datetime:
return timezone.now() + timedelta(days=7)
diff --git a/src/sentry/models/dynamicsampling.py b/src/sentry/models/dynamicsampling.py
index c73f86172a3ccd..8bd48610e1e231 100644
--- a/src/sentry/models/dynamicsampling.py
+++ b/src/sentry/models/dynamicsampling.py
@@ -316,7 +316,7 @@ def get_project_rules(
return rules[:MAX_CUSTOM_RULES_PER_PROJECT]
@staticmethod
- def deactivate_expired_rules():
+ def deactivate_expired_rules() -> None:
"""
Deactivates all rules that have expired
"""
diff --git a/src/sentry/models/environment.py b/src/sentry/models/environment.py
index cd33571d7ac4c7..c06061732db459 100644
--- a/src/sentry/models/environment.py
+++ b/src/sentry/models/environment.py
@@ -65,7 +65,7 @@ def is_valid_name(cls, value):
return OK_NAME_PATTERN.match(value) is not None
@classmethod
- def get_cache_key(cls, organization_id, name):
+ def get_cache_key(cls, organization_id, name) -> str:
return f"env:2:{organization_id}:{md5_text(name).hexdigest()}"
@classmethod
diff --git a/src/sentry/models/files/fileblob.py b/src/sentry/models/files/fileblob.py
index 4a51031f51f5b3..2731a0054b52a0 100644
--- a/src/sentry/models/files/fileblob.py
+++ b/src/sentry/models/files/fileblob.py
@@ -1,3 +1,5 @@
+from typing import Any
+
from sentry.celery import SentryTask
from sentry.db.models import region_silo_model
from sentry.models.files.abstractfileblob import AbstractFileBlob
@@ -12,7 +14,7 @@ class Meta:
db_table = "sentry_fileblob"
@classmethod
- def _storage_config(cls):
+ def _storage_config(cls) -> dict[str, Any] | None:
return None # Rely on get_storage defaults
def _create_blob_owner(self, organization_id: int) -> FileBlobOwner:
diff --git a/src/sentry/models/group.py b/src/sentry/models/group.py
index a5a6d738096f2e..15f769ee8b2c16 100644
--- a/src/sentry/models/group.py
+++ b/src/sentry/models/group.py
@@ -730,7 +730,7 @@ def make_snuba_params_for_replay_count_query():
teams=[],
)
- def _cache_key(issue_id):
+ def _cache_key(issue_id) -> str:
return f"group:has_replays:{issue_id}"
from sentry.replays.usecases.replay_counts import get_replay_counts
@@ -1006,7 +1006,7 @@ def checksum(self):
warnings.warn("Group.checksum is no longer used", DeprecationWarning)
return ""
- def get_email_subject(self):
+ def get_email_subject(self) -> str:
return f"{self.qualified_short_id} - {self.title}"
def count_users_seen(
diff --git a/src/sentry/models/groupenvironment.py b/src/sentry/models/groupenvironment.py
index e8ec368faf4e0d..b26c59fc0b7af6 100644
--- a/src/sentry/models/groupenvironment.py
+++ b/src/sentry/models/groupenvironment.py
@@ -34,7 +34,7 @@ class Meta:
__repr__ = sane_repr("group_id", "environment_id")
@classmethod
- def _get_cache_key(self, group_id, environment_id):
+ def _get_cache_key(self, group_id, environment_id) -> str:
return f"groupenv:1:{group_id}:{environment_id}"
@classmethod
diff --git a/src/sentry/models/groupsearchviewstarred.py b/src/sentry/models/groupsearchviewstarred.py
index cd3ed5494e5935..053e7b551335ee 100644
--- a/src/sentry/models/groupsearchviewstarred.py
+++ b/src/sentry/models/groupsearchviewstarred.py
@@ -33,7 +33,7 @@ def get_starred_view(
def reorder_starred_views(
self, organization: Organization, user_id: int, new_view_positions: list[int]
- ):
+ ) -> None:
"""
Reorders the positions of starred views for a user in an organization.
Does NOT add or remove starred views.
@@ -142,7 +142,9 @@ def delete_starred_view(
).update(position=models.F("position") - 1)
return True
- def clear_starred_view_for_all_members(self, organization: Organization, view: GroupSearchView):
+ def clear_starred_view_for_all_members(
+ self, organization: Organization, view: GroupSearchView
+ ) -> None:
for starred_view in self.filter(organization=organization, group_search_view=view):
self.delete_starred_view(organization, starred_view.user_id, view)
diff --git a/src/sentry/models/groupshare.py b/src/sentry/models/groupshare.py
index 5fc61f43512e1a..13c170b16e4af8 100644
--- a/src/sentry/models/groupshare.py
+++ b/src/sentry/models/groupshare.py
@@ -9,7 +9,7 @@
from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey
-def default_uuid():
+def default_uuid() -> str:
return uuid4().hex
diff --git a/src/sentry/models/organizationaccessrequest.py b/src/sentry/models/organizationaccessrequest.py
index 8e15ddd90ea3b7..c5bb20b1a0a385 100644
--- a/src/sentry/models/organizationaccessrequest.py
+++ b/src/sentry/models/organizationaccessrequest.py
@@ -25,7 +25,7 @@ class Meta:
__repr__ = sane_repr("team_id", "member_id")
- def send_request_email(self):
+ def send_request_email(self) -> None:
from sentry.models.organizationmember import OrganizationMember
from sentry.utils.email import MessageBuilder
@@ -78,7 +78,7 @@ def send_request_email(self):
msg.send_async([user.email for user in member_users])
- def send_approved_email(self):
+ def send_approved_email(self) -> None:
from sentry.utils.email import MessageBuilder
if self.member.user_id is None:
diff --git a/src/sentry/models/organizationmemberteam.py b/src/sentry/models/organizationmemberteam.py
index c2144e4b120648..cbe8d8952bb782 100644
--- a/src/sentry/models/organizationmemberteam.py
+++ b/src/sentry/models/organizationmemberteam.py
@@ -71,7 +71,7 @@ def handle_async_deletion(
organization_member_team_id=identifier,
)
- def get_audit_log_data(self):
+ def get_audit_log_data(self) -> dict[str, Any]:
return {
"team_slug": self.team.slug,
"member_id": self.organizationmember_id,
diff --git a/src/sentry/models/project.py b/src/sentry/models/project.py
index 24abf5d5580d15..3c29139343a460 100644
--- a/src/sentry/models/project.py
+++ b/src/sentry/models/project.py
@@ -672,7 +672,7 @@ def get_security_token(self):
self.update_option("sentry:token", security_token)
return security_token
- def get_lock_key(self):
+ def get_lock_key(self) -> str:
return f"project_token:{self.id}"
def copy_settings_from(self, project_id: int) -> bool:
diff --git a/src/sentry/models/projectkey.py b/src/sentry/models/projectkey.py
index d2cab62f3bc7fd..127da41e519cce 100644
--- a/src/sentry/models/projectkey.py
+++ b/src/sentry/models/projectkey.py
@@ -272,11 +272,11 @@ def otlp_traces_endpoint(self):
return f"{endpoint}/api/{self.project_id}/otlp/v1/traces"
@property
- def unreal_endpoint(self):
+ def unreal_endpoint(self) -> str:
return f"{self.get_endpoint()}/api/{self.project_id}/unreal/{self.public_key}/"
@property
- def crons_endpoint(self):
+ def crons_endpoint(self) -> str:
return f"{self.get_endpoint()}/api/{self.project_id}/cron/___MONITOR_SLUG___/{self.public_key}/"
@property
diff --git a/src/sentry/models/projectsdk.py b/src/sentry/models/projectsdk.py
index 96d51d176ae88f..4d8628dbb735d1 100644
--- a/src/sentry/models/projectsdk.py
+++ b/src/sentry/models/projectsdk.py
@@ -53,11 +53,11 @@ class Meta:
__repr__ = sane_repr("project", "event_type", "sdk_name", "sdk_version")
@classmethod
- def get_lock_key(cls, project: Project, event_type: EventType, sdk_name: str):
+ def get_lock_key(cls, project: Project, event_type: EventType, sdk_name: str) -> str:
return f"lprojectsdk:{project.id}:{event_type.value}:{md5_text(sdk_name).hexdigest()}"
@classmethod
- def get_cache_key(cls, project: Project, event_type: EventType, sdk_name: str):
+ def get_cache_key(cls, project: Project, event_type: EventType, sdk_name: str) -> str:
return f"projectsdk:{project.id}:{event_type.value}:{md5_text(sdk_name).hexdigest()}"
@classmethod
@@ -67,17 +67,17 @@ def update_with_newest_version_or_create(
event_type: EventType,
sdk_name: str,
sdk_version: str,
- ):
+ ) -> None:
try:
new_version = parse_version(sdk_version)
except InvalidVersion:
# non-semver sdk version, ignore and move on
- return
+ return None
normalized_sdk_name = normalize_sdk_name(sdk_name)
if normalized_sdk_name is None:
logger.info("Unknown sdk name: %s", sdk_name)
- return
+ return None
lock_key = cls.get_lock_key(project, event_type, normalized_sdk_name)
lock = locks.get(lock_key, duration=10, name="projectsdk")
@@ -102,7 +102,7 @@ def __update_with_newest_version_or_create(
sdk_name: str,
sdk_version: str,
new_version: Version,
- ):
+ ) -> None:
cache_key = cls.get_cache_key(project, event_type, sdk_name)
with metrics.timer(
diff --git a/src/sentry/models/projecttemplate.py b/src/sentry/models/projecttemplate.py
index e4911f357a0c5c..5b75163e1e4ad9 100644
--- a/src/sentry/models/projecttemplate.py
+++ b/src/sentry/models/projecttemplate.py
@@ -1,3 +1,5 @@
+from typing import Any
+
from django.db import models
from sentry.backup.scopes import RelocationScope
@@ -36,5 +38,5 @@ class Meta:
__repr__ = sane_repr("name", "organization_id")
- def get_audit_log_data(self):
+ def get_audit_log_data(self) -> dict[str, Any]:
return {"name": self.name, "organization_id": self.organization_id}
diff --git a/src/sentry/models/release.py b/src/sentry/models/release.py
index f779a5fb49e7bb..fcde9931396465 100644
--- a/src/sentry/models/release.py
+++ b/src/sentry/models/release.py
@@ -386,11 +386,11 @@ def semver_tuple(self) -> SemverVersion:
)
@classmethod
- def get_cache_key(cls, organization_id, version):
+ def get_cache_key(cls, organization_id, version) -> str:
return f"release:3:{organization_id}:{md5_text(version).hexdigest()}"
@classmethod
- def get_lock_key(cls, organization_id, release_id):
+ def get_lock_key(cls, organization_id, release_id) -> str:
return f"releasecommits:{organization_id}:{release_id}"
@classmethod
diff --git a/src/sentry/models/releaseenvironment.py b/src/sentry/models/releaseenvironment.py
index e88dd90727d70a..81a14323cedd2c 100644
--- a/src/sentry/models/releaseenvironment.py
+++ b/src/sentry/models/releaseenvironment.py
@@ -35,7 +35,7 @@ class Meta:
__repr__ = sane_repr("organization_id", "release_id", "environment_id")
@classmethod
- def get_cache_key(cls, organization_id, release_id, environment_id):
+ def get_cache_key(cls, organization_id, release_id, environment_id) -> str:
return f"releaseenv:2:{organization_id}:{release_id}:{environment_id}"
@classmethod
diff --git a/src/sentry/models/releaseprojectenvironment.py b/src/sentry/models/releaseprojectenvironment.py
index c001d9ba65ec2c..6ce72b6e73c0f0 100644
--- a/src/sentry/models/releaseprojectenvironment.py
+++ b/src/sentry/models/releaseprojectenvironment.py
@@ -51,7 +51,7 @@ class Meta:
__repr__ = sane_repr("project", "release", "environment")
@classmethod
- def get_cache_key(cls, release_id, project_id, environment_id):
+ def get_cache_key(cls, release_id, project_id, environment_id) -> str:
return f"releaseprojectenv:{release_id}:{project_id}:{environment_id}"
@classmethod
diff --git a/src/sentry/models/savedsearch.py b/src/sentry/models/savedsearch.py
index ceb6d67e66a00d..d0e68aa508c02b 100644
--- a/src/sentry/models/savedsearch.py
+++ b/src/sentry/models/savedsearch.py
@@ -1,5 +1,7 @@
+from __future__ import annotations
+
from enum import StrEnum
-from typing import Any, Literal
+from typing import TYPE_CHECKING, Any, Literal
from django.db import models
from django.db.models import Q, UniqueConstraint
@@ -14,6 +16,9 @@
from sentry.db.models.fields.text import CharField
from sentry.models.search_common import SearchType
+if TYPE_CHECKING:
+ from django.utils.functional import _StrPromise # fake type added by django-stubs
+
class SortOptions(StrEnum):
DATE = "date"
@@ -24,7 +29,7 @@ class SortOptions(StrEnum):
INBOX = "inbox"
@classmethod
- def as_choices(cls):
+ def as_choices(cls) -> tuple[tuple[SortOptions, _StrPromise], ...]:
return (
(cls.DATE, _("Last Seen")),
(cls.NEW, _("First Seen")),
@@ -105,7 +110,7 @@ class Meta:
]
@property
- def is_pinned(self):
+ def is_pinned(self) -> bool:
return self.visibility == Visibility.OWNER_PINNED
__repr__ = sane_repr("project_id", "name")
diff --git a/src/sentry/models/team.py b/src/sentry/models/team.py
index 0d422dd51b2e00..5aa34b59114eef 100644
--- a/src/sentry/models/team.py
+++ b/src/sentry/models/team.py
@@ -143,7 +143,7 @@ class Meta:
__repr__ = sane_repr("name", "slug")
- def class_name(self):
+ def class_name(self) -> str:
return "Team"
def __str__(self) -> str:
diff --git a/src/sentry/models/teamreplica.py b/src/sentry/models/teamreplica.py
index 821a96158d4e7f..f7f1e094979520 100644
--- a/src/sentry/models/teamreplica.py
+++ b/src/sentry/models/teamreplica.py
@@ -1,5 +1,7 @@
from __future__ import annotations
+from typing import Any
+
from django.db import models
from django.utils import timezone
@@ -27,7 +29,7 @@ class Meta:
__repr__ = sane_repr("name", "slug")
- def get_audit_log_data(self):
+ def get_audit_log_data(self) -> dict[str, Any]:
return {
"id": self.id,
"slug": self.slug,
diff --git a/src/sentry/models/tombstone.py b/src/sentry/models/tombstone.py
index 6e80ac47592fdc..6876081f14f231 100644
--- a/src/sentry/models/tombstone.py
+++ b/src/sentry/models/tombstone.py
@@ -42,7 +42,7 @@ def class_for_silo_mode(silo_mode: SiloMode) -> type[TombstoneBase] | None:
return None
@classmethod
- def record_delete(cls, table_name: str, identifier: int):
+ def record_delete(cls, table_name: str, identifier: int) -> None:
try:
with transaction.atomic(router.db_for_write(cls)):
cls.objects.create(table_name=table_name, object_identifier=identifier)
diff --git a/src/sentry/models/userreport.py b/src/sentry/models/userreport.py
index c75d28273e0d93..7888c045da8c13 100644
--- a/src/sentry/models/userreport.py
+++ b/src/sentry/models/userreport.py
@@ -31,7 +31,7 @@ class Meta:
__repr__ = sane_repr("event_id", "name", "email")
- def notify(self):
+ def notify(self) -> None:
from sentry.tasks.user_report import user_report
user_report.delay(
diff --git a/src/sentry/monitors/apps.py b/src/sentry/monitors/apps.py
index 59b011972662d5..04d256192a5603 100644
--- a/src/sentry/monitors/apps.py
+++ b/src/sentry/monitors/apps.py
@@ -4,5 +4,5 @@
class Config(AppConfig):
name = "sentry.monitors"
- def ready(self):
+ def ready(self) -> None:
pass
diff --git a/src/sentry/monitors/clock_tasks/check_missed.py b/src/sentry/monitors/clock_tasks/check_missed.py
index f6899cb9795a6d..85028adb066ac1 100644
--- a/src/sentry/monitors/clock_tasks/check_missed.py
+++ b/src/sentry/monitors/clock_tasks/check_missed.py
@@ -42,7 +42,7 @@
)
-def dispatch_check_missing(ts: datetime):
+def dispatch_check_missing(ts: datetime) -> None:
"""
Given a clock tick timestamp determine which monitor environments are past
their next_checkin_latest, indicating they haven't checked-in when they
@@ -83,7 +83,7 @@ def dispatch_check_missing(ts: datetime):
produce_task(payload)
-def mark_environment_missing(monitor_environment_id: int, ts: datetime):
+def mark_environment_missing(monitor_environment_id: int, ts: datetime) -> None:
logger.info("mark_missing", extra={"monitor_environment_id": monitor_environment_id})
try:
@@ -100,7 +100,7 @@ def mark_environment_missing(monitor_environment_id: int, ts: datetime):
except MonitorEnvironment.DoesNotExist:
# Nothing to do. We already handled this miss in an earlier tasks
# (or the environment was deleted)
- return
+ return None
monitor = monitor_environment.monitor
# next_checkin must be set, since detecting this monitor as missed means
diff --git a/src/sentry/monitors/clock_tasks/check_timeout.py b/src/sentry/monitors/clock_tasks/check_timeout.py
index cb66cd7d978809..833f1d6a593ea6 100644
--- a/src/sentry/monitors/clock_tasks/check_timeout.py
+++ b/src/sentry/monitors/clock_tasks/check_timeout.py
@@ -23,7 +23,7 @@
CHECKINS_LIMIT = 10_000
-def dispatch_check_timeout(ts: datetime):
+def dispatch_check_timeout(ts: datetime) -> None:
"""
Given a clock tick timestamp determine which check-ins are past their
timeout_at.
diff --git a/src/sentry/monitors/clock_tasks/mark_unknown.py b/src/sentry/monitors/clock_tasks/mark_unknown.py
index a0061664e2d558..1dd85281d04bdd 100644
--- a/src/sentry/monitors/clock_tasks/mark_unknown.py
+++ b/src/sentry/monitors/clock_tasks/mark_unknown.py
@@ -20,7 +20,7 @@
CHECKINS_LIMIT = 10_000
-def dispatch_mark_unknown(ts: datetime):
+def dispatch_mark_unknown(ts: datetime) -> None:
"""
Given a clock tick timestamp datetime which was processed where an anomaly
had been detected in the volume of check-ins that have been processed,
diff --git a/src/sentry/monitors/clock_tasks/producer.py b/src/sentry/monitors/clock_tasks/producer.py
index 7a8d24fc415334..173efca7928594 100644
--- a/src/sentry/monitors/clock_tasks/producer.py
+++ b/src/sentry/monitors/clock_tasks/producer.py
@@ -24,6 +24,6 @@ def _get_producer() -> KafkaProducer:
_clock_task_producer = SingletonProducer(_get_producer)
-def produce_task(payload: KafkaPayload):
+def produce_task(payload: KafkaPayload) -> None:
topic = get_topic_definition(Topic.MONITORS_CLOCK_TASKS)["real_topic_name"]
_clock_task_producer.produce(ArroyoTopic(topic), payload)
diff --git a/src/sentry/monitors/consumers/clock_tasks_consumer.py b/src/sentry/monitors/consumers/clock_tasks_consumer.py
index 5e9c608694347a..66a9ae784bf622 100644
--- a/src/sentry/monitors/consumers/clock_tasks_consumer.py
+++ b/src/sentry/monitors/consumers/clock_tasks_consumer.py
@@ -40,7 +40,7 @@ def is_mark_missing(wrapper: MonitorsClockTasks) -> TypeGuard[MarkMissing]:
return wrapper["type"] == "mark_missing"
-def process_clock_task(message: Message[KafkaPayload | FilteredPayload]):
+def process_clock_task(message: Message[KafkaPayload | FilteredPayload]) -> None:
assert not isinstance(message.payload, FilteredPayload)
assert isinstance(message.value, BrokerValue)
diff --git a/src/sentry/monitors/consumers/clock_tick_consumer.py b/src/sentry/monitors/consumers/clock_tick_consumer.py
index 9886b6d2359019..a6cf0b8e19c52f 100644
--- a/src/sentry/monitors/consumers/clock_tick_consumer.py
+++ b/src/sentry/monitors/consumers/clock_tick_consumer.py
@@ -24,7 +24,7 @@
MONITORS_CLOCK_TICK_CODEC: Codec[ClockTick] = get_topic_codec(Topic.MONITORS_CLOCK_TICK)
-def process_clock_tick(message: Message[KafkaPayload | FilteredPayload]):
+def process_clock_tick(message: Message[KafkaPayload | FilteredPayload]) -> None:
assert not isinstance(message.payload, FilteredPayload)
assert isinstance(message.value, BrokerValue)
diff --git a/src/sentry/monitors/consumers/incident_occurrences_consumer.py b/src/sentry/monitors/consumers/incident_occurrences_consumer.py
index 73e546b57da8c5..e8796581a631c4 100644
--- a/src/sentry/monitors/consumers/incident_occurrences_consumer.py
+++ b/src/sentry/monitors/consumers/incident_occurrences_consumer.py
@@ -43,7 +43,7 @@ def memoized_tick_decision(tick: datetime) -> TickAnomalyDecision | None:
def _process_incident_occurrence(
message: Message[KafkaPayload | FilteredPayload], txn: Transaction | Span
-):
+) -> None:
"""
Process a incident occurrence message. This will immediately dispatch an
issue occurrence via send_incident_occurrence.
@@ -74,7 +74,7 @@ def _process_incident_occurrence(
incident = MonitorIncident.objects.get(id=int(wrapper["incident_id"]))
except MonitorIncident.DoesNotExist:
logger.exception("missing_incident")
- return
+ return None
# previous_checkin_ids includes the failed_checkin_id
checkins = MonitorCheckIn.objects.filter(id__in=wrapper["previous_checkin_ids"])
@@ -89,7 +89,7 @@ def has_all(checkins: list[MonitorCheckIn | None]) -> TypeGuard[list[MonitorChec
# Unlikely, but if we can't find all the check-ins we can't produce an occurrence
if failed_checkin is None or not has_all(previous_checkins):
logger.error("missing_check_ins")
- return
+ return None
received = datetime.fromtimestamp(wrapper["received_ts"], UTC)
@@ -115,7 +115,7 @@ def has_all(checkins: list[MonitorCheckIn | None]) -> TypeGuard[list[MonitorChec
# Do NOT send the occurrence
txn.set_tag("result", "dropped")
metrics.incr("monitors.incident_ocurrences.dropped_incident_occurrence")
- return
+ return None
try:
send_incident_occurrence(failed_checkin, previous_checkins, incident, received)
@@ -125,7 +125,7 @@ def has_all(checkins: list[MonitorCheckIn | None]) -> TypeGuard[list[MonitorChec
logger.exception("failed_send_incident_occurrence")
-def process_incident_occurrence(message: Message[KafkaPayload | FilteredPayload]):
+def process_incident_occurrence(message: Message[KafkaPayload | FilteredPayload]) -> None:
with sentry_sdk.start_transaction(
op="_process_incident_occurrence",
name="monitors.incident_occurrence_consumer",
diff --git a/src/sentry/monitors/endpoints/organization_monitor_index_stats.py b/src/sentry/monitors/endpoints/organization_monitor_index_stats.py
index a1d9620a1d1e21..1db33d47736f0a 100644
--- a/src/sentry/monitors/endpoints/organization_monitor_index_stats.py
+++ b/src/sentry/monitors/endpoints/organization_monitor_index_stats.py
@@ -28,7 +28,7 @@
]
-def normalize_to_epoch(timestamp: datetime, seconds: int):
+def normalize_to_epoch(timestamp: datetime, seconds: int) -> int:
"""
Given a ``timestamp`` (datetime object) normalize to an epoch timestamp.
diff --git a/src/sentry/monitors/logic/incident_occurrence.py b/src/sentry/monitors/logic/incident_occurrence.py
index 5177eda8a03a6d..6d37530e1da453 100644
--- a/src/sentry/monitors/logic/incident_occurrence.py
+++ b/src/sentry/monitors/logic/incident_occurrence.py
@@ -269,7 +269,7 @@ def get_monitor_environment_context(monitor_environment: MonitorEnvironment):
}
-def resolve_incident_group(incident: MonitorIncident, project_id: int):
+def resolve_incident_group(incident: MonitorIncident, project_id: int) -> None:
status_change = StatusChangeMessage(
fingerprint=[incident.grouphash],
project_id=project_id,
diff --git a/src/sentry/new_migrations/monkey/__init__.py b/src/sentry/new_migrations/monkey/__init__.py
index 567016a2ddd6a4..144f2d5bc56f2a 100644
--- a/src/sentry/new_migrations/monkey/__init__.py
+++ b/src/sentry/new_migrations/monkey/__init__.py
@@ -91,7 +91,7 @@ def _ensure_patched_impl(*args: P.args, **kwargs: P.kwargs) -> None:
return _ensure_patched_impl
-def monkey_migrations():
+def monkey_migrations() -> None:
from django.core.management.commands import migrate
from django.db import models
diff --git a/src/sentry/notifications/apps.py b/src/sentry/notifications/apps.py
index 21e309d5ca4f8e..cfe77c77545697 100644
--- a/src/sentry/notifications/apps.py
+++ b/src/sentry/notifications/apps.py
@@ -4,7 +4,7 @@
class Config(AppConfig):
name = "sentry.notifications"
- def ready(self):
+ def ready(self) -> None:
# Register the NotificationProviders for the platform
from sentry.notifications.platform.discord.provider import ( # NOQA
DiscordNotificationProvider,
diff --git a/src/sentry/notifications/notifications/digest.py b/src/sentry/notifications/notifications/digest.py
index 39e45332413b09..b118deadc115e4 100644
--- a/src/sentry/notifications/notifications/digest.py
+++ b/src/sentry/notifications/notifications/digest.py
@@ -33,7 +33,7 @@
get_integration_link,
get_rules,
)
-from sentry.services.eventstore.models import Event
+from sentry.services.eventstore.models import Event, GroupEvent
from sentry.types.actor import Actor
from sentry.types.rules import NotificationRuleDetails
@@ -172,7 +172,9 @@ def build_context(
def get_extra_context(
self,
- participants_by_provider_by_event: Mapping[Event, Mapping[ExternalProviders, set[Actor]]],
+ participants_by_provider_by_event: Mapping[
+ Event | GroupEvent, Mapping[ExternalProviders, set[Actor]]
+ ],
) -> Mapping[Actor, Mapping[str, Any]]:
personalized_digests = get_personalized_digests(
self.digest.digest, participants_by_provider_by_event
diff --git a/src/sentry/notifications/utils/links.py b/src/sentry/notifications/utils/links.py
index e973b6ebd15d40..c4741498a5450e 100644
--- a/src/sentry/notifications/utils/links.py
+++ b/src/sentry/notifications/utils/links.py
@@ -99,7 +99,7 @@ def get_integration_link(
)
-def get_issue_replay_link(group: Group, sentry_query_params: str = ""):
+def get_issue_replay_link(group: Group, sentry_query_params: str = "") -> str:
return str(group.get_absolute_url() + "replays/" + sentry_query_params)
@@ -123,7 +123,7 @@ def get_rules(
]
-def _fetch_rule_id(rule: Rule, type_id: int | None = None):
+def _fetch_rule_id(rule: Rule, type_id: int | None = None) -> int:
# Try to fetch the legacy rule id, if it fails, return the rule id
# This allows us to support both legacy and new rule ids
try:
diff --git a/src/sentry/notifications/utils/participants.py b/src/sentry/notifications/utils/participants.py
index c5cc49bf433de7..da9030857393ed 100644
--- a/src/sentry/notifications/utils/participants.py
+++ b/src/sentry/notifications/utils/participants.py
@@ -30,7 +30,6 @@
NotificationSettingEnum,
NotificationSettingsOptionEnum,
)
-from sentry.services.eventstore.models import GroupEvent
from sentry.types.actor import Actor, ActorType
from sentry.users.models.user import User
from sentry.users.services.user import RpcUser
@@ -40,7 +39,7 @@
from sentry.utils.committers import AuthorCommitsSerialized, get_serialized_event_file_committers
if TYPE_CHECKING:
- from sentry.services.eventstore.models import Event
+ from sentry.services.eventstore.models import Event, GroupEvent
logger = logging.getLogger(__name__)
@@ -199,7 +198,7 @@ def get_participants_for_release(
def get_owners(
project: Project,
- event: Event | None = None,
+ event: Event | GroupEvent | None = None,
fallthrough_choice: FallthroughChoiceType | None = None,
) -> tuple[list[Actor], str]:
"""
@@ -229,7 +228,7 @@ def get_owners(
def get_owner_reason(
project: Project,
target_type: ActionTargetType,
- event: Event | None = None,
+ event: Event | GroupEvent | None = None,
fallthrough_choice: FallthroughChoiceType | None = None,
) -> str | None:
"""
@@ -284,7 +283,7 @@ def determine_eligible_recipients(
project: Project,
target_type: ActionTargetType,
target_identifier: int | None = None,
- event: Event | None = None,
+ event: Event | GroupEvent | None = None,
fallthrough_choice: FallthroughChoiceType | None = None,
) -> Iterable[Actor]:
"""
@@ -354,7 +353,7 @@ def get_send_to(
project: Project,
target_type: ActionTargetType,
target_identifier: int | None = None,
- event: Event | None = None,
+ event: Event | GroupEvent | None = None,
notification_type_enum: NotificationSettingEnum = NotificationSettingEnum.ISSUE_ALERTS,
fallthrough_choice: FallthroughChoiceType | None = None,
rules: Iterable[Rule] | None = None,
diff --git a/src/sentry/objectstore/metrics.py b/src/sentry/objectstore/metrics.py
index f52058ea29727f..0282a7bf51ff39 100644
--- a/src/sentry/objectstore/metrics.py
+++ b/src/sentry/objectstore/metrics.py
@@ -17,25 +17,25 @@ def __init__(self, operation: str, usecase: str):
self.compressed_size: int | None = None
self.compression: str = "unknown"
- def record_latency(self, elapsed: float):
+ def record_latency(self, elapsed: float) -> None:
tags = {"usecase": self.usecase}
metrics.timing(f"storage.{self.operation}.latency", elapsed, tags=tags)
self.elapsed = elapsed
- def record_uncompressed_size(self, value: int):
+ def record_uncompressed_size(self, value: int) -> None:
tags = {"usecase": self.usecase, "compression": "none"}
metrics.distribution(f"storage.{self.operation}.size", value, tags=tags, unit="byte")
self.uncompressed_size = value
- def record_compressed_size(self, value: int, compression: str = "unknown"):
+ def record_compressed_size(self, value: int, compression: str = "unknown") -> None:
tags = {"usecase": self.usecase, "compression": compression}
metrics.distribution(f"storage.{self.operation}.size", value, tags=tags, unit="byte")
self.compressed_size = value
self.compression = compression
- def maybe_record_compression_ratio(self):
+ def maybe_record_compression_ratio(self) -> None:
if not self.uncompressed_size or not self.compressed_size:
- return
+ return None
tags = {"usecase": self.usecase, "compression": self.compression}
metrics.distribution(
@@ -44,9 +44,9 @@ def maybe_record_compression_ratio(self):
tags=tags,
)
- def maybe_record_throughputs(self):
+ def maybe_record_throughputs(self) -> None:
if not self.elapsed or self.elapsed <= 0:
- return
+ return None
sizes = []
if self.uncompressed_size:
diff --git a/src/sentry/options/__init__.py b/src/sentry/options/__init__.py
index 5c5be10beef242..3f8d0848e34504 100644
--- a/src/sentry/options/__init__.py
+++ b/src/sentry/options/__init__.py
@@ -76,7 +76,7 @@
can_update = default_manager.can_update
-def load_defaults():
+def load_defaults() -> None:
from sentry.hybridcloud import options # NOQA
from . import defaults # NOQA
diff --git a/src/sentry/plugins/base/v1.py b/src/sentry/plugins/base/v1.py
index 5abd51074266ba..d0af76bafb5389 100644
--- a/src/sentry/plugins/base/v1.py
+++ b/src/sentry/plugins/base/v1.py
@@ -80,10 +80,10 @@ class IPlugin(local, PluggableViewMixin, PluginConfigMixin):
# used by queries to determine if the plugin is configured
required_field: str | None = None
- def _get_option_key(self, key):
+ def _get_option_key(self, key) -> str:
return f"{self.get_conf_key()}:{key}"
- def get_plugin_type(self):
+ def get_plugin_type(self) -> str:
return "default"
def is_enabled(self, project: Project | RpcProject | None = None):
diff --git a/src/sentry/plugins/base/v2.py b/src/sentry/plugins/base/v2.py
index 58eabac95bd32d..ab9eaa82c4f54d 100644
--- a/src/sentry/plugins/base/v2.py
+++ b/src/sentry/plugins/base/v2.py
@@ -78,10 +78,10 @@ class IPlugin2(local, PluginConfigMixin):
# used by queries to determine if the plugin is configured
required_field: str | None = None
- def _get_option_key(self, key):
+ def _get_option_key(self, key) -> str:
return f"{self.get_conf_key()}:{key}"
- def get_plugin_type(self):
+ def get_plugin_type(self) -> str:
return "default"
def is_enabled(self, project=None):
diff --git a/src/sentry/plugins/bases/data_forwarding.py b/src/sentry/plugins/bases/data_forwarding.py
index 35f6e5d675369a..bc96c90837bdda 100644
--- a/src/sentry/plugins/bases/data_forwarding.py
+++ b/src/sentry/plugins/bases/data_forwarding.py
@@ -12,7 +12,7 @@
class DataForwardingPlugin(Plugin):
- def has_project_conf(self):
+ def has_project_conf(self) -> bool:
return True
def get_rate_limit(self):
@@ -28,10 +28,10 @@ def forward_event(self, event: Event, payload: MutableMapping[str, Any]) -> bool
def get_event_payload(self, event):
return serialize(event)
- def get_plugin_type(self):
+ def get_plugin_type(self) -> str:
return "data-forwarding"
- def get_rl_key(self, event):
+ def get_rl_key(self, event) -> str:
return f"{self.conf_key}:{event.project.organization_id}"
def initialize_variables(self, event):
diff --git a/src/sentry/plugins/bases/issue.py b/src/sentry/plugins/bases/issue.py
index 976a519e755a88..a3f6db1df5fe18 100644
--- a/src/sentry/plugins/bases/issue.py
+++ b/src/sentry/plugins/bases/issue.py
@@ -30,7 +30,7 @@ class IssueTrackingPlugin(Plugin):
needs_auth_template = "sentry/plugins/bases/issue/needs_auth.html"
auth_provider: str | None = None
- def get_plugin_type(self):
+ def get_plugin_type(self) -> str:
return "issue-tracking"
def _get_group_body(self, group, event, **kwargs):
diff --git a/src/sentry/plugins/bases/issue2.py b/src/sentry/plugins/bases/issue2.py
index 9ed6b953e255d3..b9794a23cf8551 100644
--- a/src/sentry/plugins/bases/issue2.py
+++ b/src/sentry/plugins/bases/issue2.py
@@ -95,10 +95,10 @@ class IssueTrackingPlugin2(Plugin):
issue_fields: frozenset[str] | None = None
# issue_fields = frozenset(['id', 'title', 'url'])
- def get_plugin_type(self):
+ def get_plugin_type(self) -> str:
return "issue-tracking"
- def has_project_conf(self):
+ def has_project_conf(self) -> bool:
return True
def get_group_body(self, group, event, **kwargs):
diff --git a/src/sentry/plugins/bases/notify.py b/src/sentry/plugins/bases/notify.py
index 90cc9378fa7870..dbd92fb484fc1d 100644
--- a/src/sentry/plugins/bases/notify.py
+++ b/src/sentry/plugins/bases/notify.py
@@ -27,7 +27,7 @@ class NotificationPlugin(Plugin):
)
project_conf_form: type[forms.Form] = NotificationConfigurationForm
- def get_plugin_type(self):
+ def get_plugin_type(self) -> str:
return "notification"
def notify(self, notification: Notification, raise_exception: bool = False) -> None:
diff --git a/src/sentry/plugins/bases/releasetracking.py b/src/sentry/plugins/bases/releasetracking.py
index 28f59ae0593ad3..899406283388c0 100644
--- a/src/sentry/plugins/bases/releasetracking.py
+++ b/src/sentry/plugins/bases/releasetracking.py
@@ -2,7 +2,7 @@
class ReleaseTrackingPlugin(Plugin2):
- def get_plugin_type(self):
+ def get_plugin_type(self) -> str:
return "release-tracking"
def get_release_doc_html(self, hook_url):
diff --git a/src/sentry/plugins/examples/issue_tracking.py b/src/sentry/plugins/examples/issue_tracking.py
index 7666d0a9ea1375..ee868fc96b4889 100644
--- a/src/sentry/plugins/examples/issue_tracking.py
+++ b/src/sentry/plugins/examples/issue_tracking.py
@@ -28,7 +28,7 @@ def get_new_issue_fields(self, request: Request, group, event, **kwargs):
*fields,
]
- def create_issue(self, request: Request, group, form_data):
+ def create_issue(self, request: Request, group, form_data) -> str:
return "1"
def get_issue_label(self, group, issue_id: str) -> str:
diff --git a/src/sentry/plugins/sentry_interface_types/apps.py b/src/sentry/plugins/sentry_interface_types/apps.py
index c959cee3f2e6ce..08610257b95697 100644
--- a/src/sentry/plugins/sentry_interface_types/apps.py
+++ b/src/sentry/plugins/sentry_interface_types/apps.py
@@ -4,7 +4,7 @@
class Config(AppConfig):
name = "sentry.plugins.sentry_interface_types"
- def ready(self):
+ def ready(self) -> None:
from sentry.plugins.base import register
from .models import InterfaceTypePlugin
diff --git a/src/sentry/plugins/sentry_urls/apps.py b/src/sentry/plugins/sentry_urls/apps.py
index ef174123a6d5cc..fe4c76279e0bb1 100644
--- a/src/sentry/plugins/sentry_urls/apps.py
+++ b/src/sentry/plugins/sentry_urls/apps.py
@@ -4,7 +4,7 @@
class Config(AppConfig):
name = "sentry.plugins.sentry_urls"
- def ready(self):
+ def ready(self) -> None:
from sentry.plugins.base import register
from .models import UrlsPlugin
diff --git a/src/sentry/plugins/sentry_useragents/apps.py b/src/sentry/plugins/sentry_useragents/apps.py
index 411431d6c31d9b..c614a55efc0da8 100644
--- a/src/sentry/plugins/sentry_useragents/apps.py
+++ b/src/sentry/plugins/sentry_useragents/apps.py
@@ -4,7 +4,7 @@
class Config(AppConfig):
name = "sentry.plugins.sentry_useragents"
- def ready(self):
+ def ready(self) -> None:
from sentry.plugins.base import register
from .models import BrowserPlugin, DevicePlugin, OsPlugin
diff --git a/src/sentry/plugins/sentry_webhooks/apps.py b/src/sentry/plugins/sentry_webhooks/apps.py
index 3f290c505e7355..3151298b801533 100644
--- a/src/sentry/plugins/sentry_webhooks/apps.py
+++ b/src/sentry/plugins/sentry_webhooks/apps.py
@@ -4,7 +4,7 @@
class Config(AppConfig):
name = "sentry.plugins.sentry_webhooks"
- def ready(self):
+ def ready(self) -> None:
from sentry.plugins.base import register
from .plugin import WebHooksPlugin
diff --git a/src/sentry/relay/globalconfig.py b/src/sentry/relay/globalconfig.py
index 10a12d6c89db25..4ae6f83747b8b7 100644
--- a/src/sentry/relay/globalconfig.py
+++ b/src/sentry/relay/globalconfig.py
@@ -73,7 +73,7 @@ def span_op_defaults() -> SpanOpDefaults:
@metrics.wraps("relay.globalconfig.get")
-def get_global_config():
+def get_global_config() -> GlobalConfig:
"""Return the global configuration for Relay."""
global_config: GlobalConfig = {
diff --git a/src/sentry/relay/projectconfig_cache/redis.py b/src/sentry/relay/projectconfig_cache/redis.py
index 8df170d9f11081..102a2a24ac3867 100644
--- a/src/sentry/relay/projectconfig_cache/redis.py
+++ b/src/sentry/relay/projectconfig_cache/redis.py
@@ -27,10 +27,10 @@ def __init__(self, **options):
def validate(self):
validate_dynamic_cluster(True, self.cluster)
- def __get_redis_key(self, public_key):
+ def __get_redis_key(self, public_key) -> str:
return f"relayconfig:{public_key}"
- def __get_redis_rev_key(self, public_key):
+ def __get_redis_rev_key(self, public_key) -> str:
return f"{self.__get_redis_key(public_key)}.rev"
def set_many(self, configs: dict[str, Mapping[str, Any]]):
diff --git a/src/sentry/remote_subscriptions/apps.py b/src/sentry/remote_subscriptions/apps.py
index 00c15abf5309d2..f14a4543b52d1a 100644
--- a/src/sentry/remote_subscriptions/apps.py
+++ b/src/sentry/remote_subscriptions/apps.py
@@ -4,5 +4,5 @@
class Config(AppConfig):
name = "sentry.remote_subscriptions"
- def ready(self):
+ def ready(self) -> None:
pass
diff --git a/src/sentry/replays/lib/eap/snuba_transpiler.py b/src/sentry/replays/lib/eap/snuba_transpiler.py
index 9f08248b143b41..8981a40b88d85d 100644
--- a/src/sentry/replays/lib/eap/snuba_transpiler.py
+++ b/src/sentry/replays/lib/eap/snuba_transpiler.py
@@ -790,7 +790,7 @@ def label(expr: Column | CurriedFunction | Function | ScalarType) -> str:
return json.dumps(expr)
-def extrapolation_mode(settings: Settings):
+def extrapolation_mode(settings: Settings) -> ExtrapolationMode.ValueType:
return EXTRAPOLATION_MODE_MAP[settings.get("extrapolation_mode", "none")]
diff --git a/src/sentry/replays/lib/selector/parse.py b/src/sentry/replays/lib/selector/parse.py
index cfc4d2c581112c..c9961dd1fe09ef 100644
--- a/src/sentry/replays/lib/selector/parse.py
+++ b/src/sentry/replays/lib/selector/parse.py
@@ -9,7 +9,7 @@
class QueryType:
- def __init__(self):
+ def __init__(self) -> None:
self.alt: str | None = None
self.aria_label: str | None = None
self.classes: list[str] = []
diff --git a/src/sentry/replays/usecases/events.py b/src/sentry/replays/usecases/events.py
index c1c4df53e4735d..8db08728718560 100644
--- a/src/sentry/replays/usecases/events.py
+++ b/src/sentry/replays/usecases/events.py
@@ -55,7 +55,7 @@ def _replay_event(project_id: int, replay_id: str, event: dict[str, Any]) -> str
)
-def publish_replay_event(message: str, is_async: bool):
+def publish_replay_event(message: str, is_async: bool) -> None:
"""Publish a replay-event to the replay snuba consumer topic."""
publisher = initialize_replays_publisher(is_async=is_async)
publisher.publish("ingest-replay-events", message)
diff --git a/src/sentry/replays/usecases/ingest/event_parser.py b/src/sentry/replays/usecases/ingest/event_parser.py
index 02c264c34725d5..b0779c8d23ddf2 100644
--- a/src/sentry/replays/usecases/ingest/event_parser.py
+++ b/src/sentry/replays/usecases/ingest/event_parser.py
@@ -269,7 +269,7 @@ def get_timestamp_ms(event: dict[str, Any], event_type: EventType) -> float:
class EAPEventsBuilder:
- def __init__(self, context: EventContext):
+ def __init__(self, context: EventContext) -> None:
self.context = context
self.events: list[TraceItem] = []
@@ -584,7 +584,7 @@ class HighlightedEvents(TypedDict, total=False):
class HighlightedEventsBuilder:
- def __init__(self):
+ def __init__(self) -> None:
self.events: HighlightedEvents = {
"canvas_sizes": [],
"clicks": [],
diff --git a/src/sentry/search/eap/spans/formulas.py b/src/sentry/search/eap/spans/formulas.py
index 5fcdf2e3d00844..299653934b527d 100644
--- a/src/sentry/search/eap/spans/formulas.py
+++ b/src/sentry/search/eap/spans/formulas.py
@@ -342,7 +342,9 @@ def opportunity_score(args: ResolvedArguments, settings: ResolverSettings) -> Co
)
-def total_opportunity_score(_: ResolvedArguments, settings: ResolverSettings):
+def total_opportunity_score(
+ _: ResolvedArguments, settings: ResolverSettings
+) -> Column.BinaryFormula:
vitals = ["lcp", "fcp", "cls", "ttfb", "inp"]
vital_score_columns: list[Column] = []
diff --git a/src/sentry/search/eap/spans/utils.py b/src/sentry/search/eap/spans/utils.py
index cc8cea3b8b59fd..0396ac41a90856 100644
--- a/src/sentry/search/eap/spans/utils.py
+++ b/src/sentry/search/eap/spans/utils.py
@@ -22,7 +22,7 @@ def operate_multiple_columns(
if len(columns) < 2:
raise ValueError("No columns to operate")
- def _operate_multiple_columns(idx: int):
+ def _operate_multiple_columns(idx: int) -> Column.BinaryFormula:
two_columns_left = idx == len(columns) - 2
if two_columns_left:
return Column.BinaryFormula(
diff --git a/src/sentry/search/events/datasets/metrics.py b/src/sentry/search/events/datasets/metrics.py
index 0f952bd42e3e78..240ba1988ed03e 100644
--- a/src/sentry/search/events/datasets/metrics.py
+++ b/src/sentry/search/events/datasets/metrics.py
@@ -81,7 +81,7 @@ def resolve_metric(self, value: str) -> int:
return metric_id
@property
- def should_skip_interval_calculation(self):
+ def should_skip_interval_calculation(self) -> bool:
return self.builder.builder_config.skip_time_conditions and (
not self.builder.params.start or not self.builder.params.end
)
diff --git a/src/sentry/search/events/datasets/spans_indexed.py b/src/sentry/search/events/datasets/spans_indexed.py
index 530acce096325a..6f2f62940c4c26 100644
--- a/src/sentry/search/events/datasets/spans_indexed.py
+++ b/src/sentry/search/events/datasets/spans_indexed.py
@@ -1064,7 +1064,7 @@ def _query_total_counts(self) -> tuple[float, float]:
return self._cached_count_and_weighted
@cached_property
- def _zscore(self):
+ def _zscore(self) -> float | int:
"""Defaults to 1.96, based on a z score for a confidence level of 95%"""
return options.get("performance.extrapolation.confidence.z-score")
diff --git a/src/sentry/seer/anomaly_detection/store_data.py b/src/sentry/seer/anomaly_detection/store_data.py
index 02d7fed152ee31..8511c49383eeee 100644
--- a/src/sentry/seer/anomaly_detection/store_data.py
+++ b/src/sentry/seer/anomaly_detection/store_data.py
@@ -65,7 +65,7 @@ def handle_send_historical_data_to_seer(
project: Project,
method: str,
event_types: list[SnubaQueryEventType.EventType] | None = None,
-):
+) -> None:
event_types_param = event_types or snuba_query.event_types
try:
rule_status = send_historical_data_to_seer(
diff --git a/src/sentry/seer/fetch_issues/more_parsing.py b/src/sentry/seer/fetch_issues/more_parsing.py
index f1cfc9b0931719..10b73aa14de26f 100644
--- a/src/sentry/seer/fetch_issues/more_parsing.py
+++ b/src/sentry/seer/fetch_issues/more_parsing.py
@@ -14,7 +14,7 @@
def simple_function_name_conditions(
function_names: list[str],
stack_frame_idx: int,
-):
+) -> Condition:
return Condition(stackframe_function_name(stack_frame_idx), Op.IN, function_names)
diff --git a/src/sentry/seer/math.py b/src/sentry/seer/math.py
index e0f433f7e74f5e..6668078f1157e5 100644
--- a/src/sentry/seer/math.py
+++ b/src/sentry/seer/math.py
@@ -83,7 +83,7 @@ def _rrf(kl_rank: int, entropy_rank: int) -> float:
]
-def rank_min(xs: list[float], ascending: bool = False):
+def rank_min(xs: list[float], ascending: bool = False) -> list[int]:
ranks = {x: rank for rank, x in enumerate(sorted(set(xs), reverse=not ascending), 1)}
return [ranks[x] for x in xs]
diff --git a/src/sentry/seer/services/test_generation/model.py b/src/sentry/seer/services/test_generation/model.py
index 8d27341e8a41a5..8715f71f21e131 100644
--- a/src/sentry/seer/services/test_generation/model.py
+++ b/src/sentry/seer/services/test_generation/model.py
@@ -10,5 +10,5 @@ class CreateUnitTestResponse(RpcModel):
error_detail: str | None = None
@property
- def success(self):
+ def success(self) -> bool:
return self.error_detail is None
diff --git a/src/sentry/seer/vendored.py b/src/sentry/seer/vendored.py
index 70734c766c2756..0f7f6bb82ac8eb 100644
--- a/src/sentry/seer/vendored.py
+++ b/src/sentry/seer/vendored.py
@@ -44,7 +44,7 @@ def entr(x: float) -> float:
# https://github.com/scipy/scipy/blob/ce4b43097356dfc42504d81d6164b73ee0896c71/scipy/special/_convex_analysis.pxd#L28-L36
-def rel_entr(x: float, y: float):
+def rel_entr(x: float, y: float) -> float:
if math.isnan(x) or math.isnan(y):
return math.nan
elif x > 0 and y > 0:
diff --git a/src/sentry/seer/workflows/compare.py b/src/sentry/seer/workflows/compare.py
index 4765d7226ce179..cc2f6671ac8fb3 100644
--- a/src/sentry/seer/workflows/compare.py
+++ b/src/sentry/seer/workflows/compare.py
@@ -63,7 +63,7 @@ def keyed_rrf_score(
[("key", "true", 93), ("key", "false", 219), ("other", "true", 1)]
"""
- def _scoring_fn(baseline: list[float], outliers: list[float]):
+ def _scoring_fn(baseline: list[float], outliers: list[float]) -> tuple[float, float]:
return (entropy(outliers), kl_divergence(baseline, outliers))
scored_keys = _score_each_key(
@@ -217,7 +217,7 @@ def keyed_rrf_score_with_filter(
[("key", 0.5, True), ("key", 0.3, False), ("other", 0.1, False)]
"""
- def _scoring_fn(baseline: list[float], outliers: list[float]):
+ def _scoring_fn(baseline: list[float], outliers: list[float]) -> tuple[float, float]:
return (entropy(outliers), kl_divergence(baseline, outliers))
scored_keys = _score_each_key(
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_avatar.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_avatar.py
index 4b9f3caf8905de..115bb1655ad7d3 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_app_avatar.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_avatar.py
@@ -32,5 +32,5 @@ def put(self, request: Request, **kwargs) -> Response:
request, access=request.access, serializer=SentryAppSerializer(), **kwargs
)
- def get_avatar_filename(self, obj):
+ def get_avatar_filename(self, obj) -> str:
return f"{obj.slug}.png"
diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_rotate_secret.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_rotate_secret.py
index 73e2783c6a8bd8..fc951688ffe5c1 100644
--- a/src/sentry/sentry_apps/api/endpoints/sentry_app_rotate_secret.py
+++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_rotate_secret.py
@@ -27,7 +27,7 @@ class SentryAppRotateSecretPermission(DemoSafePermission):
"POST": ["org:write", "org:admin"],
}
- def has_object_permission(self, request: Request, view: object, sentry_app: SentryApp):
+ def has_object_permission(self, request: Request, view: object, sentry_app: SentryApp) -> bool:
log_info = {
"user_id": request.user.id,
"sentry_app_name": sentry_app.name,
diff --git a/src/sentry/sentry_apps/api/utils/webhook_requests.py b/src/sentry/sentry_apps/api/utils/webhook_requests.py
index 16abe5b37f449e..9010c6b3474b19 100644
--- a/src/sentry/sentry_apps/api/utils/webhook_requests.py
+++ b/src/sentry/sentry_apps/api/utils/webhook_requests.py
@@ -16,7 +16,7 @@ class BufferedRequest:
id: int
data: RpcSentryAppRequest
- def __hash__(self):
+ def __hash__(self) -> int:
return self.id
diff --git a/src/sentry/sentry_apps/installations.py b/src/sentry/sentry_apps/installations.py
index 45eba253da0fe7..a116e4ee3cea17 100644
--- a/src/sentry/sentry_apps/installations.py
+++ b/src/sentry/sentry_apps/installations.py
@@ -248,13 +248,13 @@ def run(self) -> SentryAppInstallation:
self.record_analytics()
return self.sentry_app_installation
- def _update_status(self):
+ def _update_status(self) -> None:
# convert from string to integer
if self.status == SentryAppInstallationStatus.INSTALLED_STR:
for install in SentryAppInstallation.objects.filter(id=self.sentry_app_installation.id):
install.update(status=SentryAppInstallationStatus.INSTALLED)
- def record_analytics(self):
+ def record_analytics(self) -> None:
analytics.record(
"sentry_app_installation.updated",
sentry_app_installation_id=self.sentry_app_installation.id,
diff --git a/src/sentry/sentry_apps/token_exchange/grant_exchanger.py b/src/sentry/sentry_apps/token_exchange/grant_exchanger.py
index 5b78ed675b9709..06c756bd24207e 100644
--- a/src/sentry/sentry_apps/token_exchange/grant_exchanger.py
+++ b/src/sentry/sentry_apps/token_exchange/grant_exchanger.py
@@ -37,7 +37,7 @@ class GrantExchanger:
client_id: str
user: User
- def run(self):
+ def run(self) -> ApiToken:
with SentryAppInteractionEvent(
operation_type=SentryAppInteractionType.AUTHORIZATIONS,
event_type=SentryAppEventType.GRANT_EXCHANGER,
diff --git a/src/sentry/sentry_apps/token_exchange/util.py b/src/sentry/sentry_apps/token_exchange/util.py
index c8ee54752396ed..e387be01a95b28 100644
--- a/src/sentry/sentry_apps/token_exchange/util.py
+++ b/src/sentry/sentry_apps/token_exchange/util.py
@@ -1,4 +1,4 @@
-from datetime import timedelta
+from datetime import datetime, timedelta
from django.utils import timezone
@@ -15,5 +15,5 @@ class GrantTypes:
REFRESH = REFRESH
-def token_expiration():
+def token_expiration() -> datetime:
return timezone.now() + timedelta(hours=TOKEN_LIFE_IN_HOURS)
diff --git a/src/sentry/sentry_metrics/indexer/cache.py b/src/sentry/sentry_metrics/indexer/cache.py
index 4ced794ad54026..a71b9674716977 100644
--- a/src/sentry/sentry_metrics/indexer/cache.py
+++ b/src/sentry/sentry_metrics/indexer/cache.py
@@ -75,7 +75,7 @@ def _make_namespaced_cache_key(self, namespace: str, key: str) -> str:
return f"indexer:{self.partition_key}:{namespace}:org:str:{use_case_id}:{hashed}"
- def _make_cache_val(self, val: int, timestamp: int):
+ def _make_cache_val(self, val: int, timestamp: int) -> str:
return f"{val}:{timestamp}"
def _format_results(
diff --git a/src/sentry/sentry_metrics/querying/data/execution.py b/src/sentry/sentry_metrics/querying/data/execution.py
index e593074dd2dc1f..1699e4789aed17 100644
--- a/src/sentry/sentry_metrics/querying/data/execution.py
+++ b/src/sentry/sentry_metrics/querying/data/execution.py
@@ -288,7 +288,7 @@ class QueryResult:
result: Mapping[str, Any]
has_more: bool
- def __post_init__(self):
+ def __post_init__(self) -> None:
if not self.series_query and not self.totals_query:
raise MetricsQueryExecutionError(
"A query result must contain at least one series or totals query"
@@ -665,7 +665,7 @@ def _bulk_execute(self) -> bool:
return True
- def _execution_loop(self):
+ def _execution_loop(self) -> None:
"""
Executes the next batch of queries until no query is left.
"""
@@ -699,7 +699,7 @@ def execute(self) -> list[QueryResult]:
return cast(list[QueryResult], self._query_results)
- def schedule(self, intermediate_query: IntermediateQuery, query_type: QueryType):
+ def schedule(self, intermediate_query: IntermediateQuery, query_type: QueryType) -> None:
"""
Lazily schedules an IntermediateQuery for execution and runs initialization code for each ScheduledQuery.
"""
diff --git a/src/sentry/sentry_metrics/querying/data/mapping/project_mapper.py b/src/sentry/sentry_metrics/querying/data/mapping/project_mapper.py
index 3ae9d1ae914268..f8506e963a231d 100644
--- a/src/sentry/sentry_metrics/querying/data/mapping/project_mapper.py
+++ b/src/sentry/sentry_metrics/querying/data/mapping/project_mapper.py
@@ -8,7 +8,7 @@ class Project2ProjectIDMapper(Mapper):
from_key: str = "project"
to_key: str = "project_id"
- def __init__(self):
+ def __init__(self) -> None:
super().__init__()
def forward(self, projects: Sequence[Project], value: str) -> int:
diff --git a/src/sentry/sentry_metrics/querying/units.py b/src/sentry/sentry_metrics/querying/units.py
index 31ca633f64c5fd..64f5d2881cc2b5 100644
--- a/src/sentry/sentry_metrics/querying/units.py
+++ b/src/sentry/sentry_metrics/querying/units.py
@@ -93,7 +93,7 @@ def apply_on_query_expression(self, query_expression: QueryExpression) -> QueryE
parameters=[query_expression, self.scaling_factor],
)
- def __hash__(self):
+ def __hash__(self) -> int:
return hash(self.name)
diff --git a/src/sentry/silo/patches/silo_aware_transaction_patch.py b/src/sentry/silo/patches/silo_aware_transaction_patch.py
index 265675f2549217..b4f429aef8cb7d 100644
--- a/src/sentry/silo/patches/silo_aware_transaction_patch.py
+++ b/src/sentry/silo/patches/silo_aware_transaction_patch.py
@@ -87,7 +87,7 @@ def seek(module_path: str, function_name: str) -> bool:
)
-def validate_transaction_using_for_silo_mode(using: str | None):
+def validate_transaction_using_for_silo_mode(using: str | None) -> None:
from sentry.hybridcloud.models.outbox import ControlOutbox, RegionOutbox
from sentry.silo.base import SiloMode
@@ -119,7 +119,7 @@ def validate_transaction_using_for_silo_mode(using: str | None):
)
-def patch_silo_aware_atomic():
+def patch_silo_aware_atomic() -> None:
global _default_on_commit, _default_get_connection, _default_atomic_impl
_default_atomic_impl = transaction.atomic
diff --git a/src/sentry/silo/safety.py b/src/sentry/silo/safety.py
index 50c16707f22499..b847ef129b8f07 100644
--- a/src/sentry/silo/safety.py
+++ b/src/sentry/silo/safety.py
@@ -3,7 +3,7 @@
import contextlib
import re
from collections import defaultdict
-from collections.abc import MutableMapping
+from collections.abc import Generator, MutableMapping
from typing import Any
from django.db.transaction import get_connection
@@ -22,7 +22,7 @@ def match_fence_query(query: str) -> re.Match[str] | None:
@contextlib.contextmanager
-def unguarded_write(using: str, *args: Any, **kwargs: Any):
+def unguarded_write(using: str, *args: Any, **kwargs: Any) -> Generator[None]:
"""
Used to indicate that the wrapped block is safe to do
mutations on outbox backed records.
diff --git a/src/sentry/similarity/backends/dummy.py b/src/sentry/similarity/backends/dummy.py
index 80e1bd2c2804ee..2cf4e54f34652e 100644
--- a/src/sentry/similarity/backends/dummy.py
+++ b/src/sentry/similarity/backends/dummy.py
@@ -11,10 +11,10 @@ def compare(self, scope, key, items, limit=None, timestamp=None):
def record(self, scope, key, items, timestamp=None):
return {}
- def merge(self, scope, destination, items, timestamp=None):
+ def merge(self, scope, destination, items, timestamp=None) -> bool:
return False
- def delete(self, scope, items, timestamp=None):
+ def delete(self, scope, items, timestamp=None) -> bool:
return False
def scan(self, scope, indices, batch=1000, timestamp=None):
diff --git a/src/sentry/snuba/utils.py b/src/sentry/snuba/utils.py
index 6aa4d3416dd33a..70585f53bcfe69 100644
--- a/src/sentry/snuba/utils.py
+++ b/src/sentry/snuba/utils.py
@@ -109,9 +109,11 @@
]
-def get_dataset(dataset_label: str) -> Any | None:
+def get_dataset(dataset_label: str | None) -> Any | None:
if dataset_label in DEPRECATED_LABELS:
logger.warning("query.deprecated_dataset.%s", dataset_label)
+ if dataset_label is None:
+ return None
return DATASET_OPTIONS.get(dataset_label)
diff --git a/src/sentry/statistical_detectors/redis.py b/src/sentry/statistical_detectors/redis.py
index cdd4fa02a75700..c0db5504e626ca 100644
--- a/src/sentry/statistical_detectors/redis.py
+++ b/src/sentry/statistical_detectors/redis.py
@@ -28,7 +28,7 @@ def __init__(
def client(
self,
client: RedisCluster | StrictRedis | None = None,
- ):
+ ) -> RedisCluster | StrictRedis:
if self._client is None:
self._client = self.get_redis_client() if client is None else client
return self._client
@@ -46,7 +46,7 @@ def bulk_write_states(
self,
payloads: list[DetectorPayload],
states: list[Mapping[str | bytes, bytes | float | int | str] | None],
- ):
+ ) -> None:
# the number of new states must match the number of payloads
assert len(states) == len(payloads)
@@ -60,7 +60,7 @@ def bulk_write_states(
pipeline.execute()
- def make_key(self, payload: DetectorPayload):
+ def make_key(self, payload: DetectorPayload) -> str:
return (
f"sd:p:{payload.project_id}:{self.regression_type.abbreviate()}:{payload.fingerprint}"
)
diff --git a/src/sentry/statistical_detectors/store.py b/src/sentry/statistical_detectors/store.py
index fc8c2b8bb62cc7..12499bae2878ca 100644
--- a/src/sentry/statistical_detectors/store.py
+++ b/src/sentry/statistical_detectors/store.py
@@ -11,4 +11,4 @@ class DetectorStore(ABC, Generic[T]):
def bulk_read_states(self, payloads: list[DetectorPayload]) -> list[T]: ...
@abstractmethod
- def bulk_write_states(self, payloads: list[DetectorPayload], states: list[T]): ...
+ def bulk_write_states(self, payloads: list[DetectorPayload], states: list[T]) -> None: ...
diff --git a/src/sentry/tagstore/snuba/backend.py b/src/sentry/tagstore/snuba/backend.py
index 22db10b764f0f4..6844199f3a437d 100644
--- a/src/sentry/tagstore/snuba/backend.py
+++ b/src/sentry/tagstore/snuba/backend.py
@@ -1457,7 +1457,7 @@ def get_groups_user_counts(self, *args, **kwargs):
def get_generic_groups_user_counts(self, *args, **kwargs):
raise NotImplementedError
- def get_snuba_column_name(self, key: str, dataset: Dataset):
+ def get_snuba_column_name(self, key: str, dataset: Dataset) -> str:
return f"flags[{key}]"
def is_reserved_key(self, key: str) -> bool:
diff --git a/src/sentry/tasks/auto_remove_inbox.py b/src/sentry/tasks/auto_remove_inbox.py
index c9c77c0ea0fe65..553b2e2c8e57e4 100644
--- a/src/sentry/tasks/auto_remove_inbox.py
+++ b/src/sentry/tasks/auto_remove_inbox.py
@@ -16,5 +16,5 @@
processing_deadline_duration=120,
),
)
-def auto_remove_inbox():
+def auto_remove_inbox() -> None:
BulkDeleteQuery(model=GroupInbox, days=7, dtfield="date_added").execute()
diff --git a/src/sentry/tasks/autofix.py b/src/sentry/tasks/autofix.py
index 0023b72359c619..ca62d2ee533f16 100644
--- a/src/sentry/tasks/autofix.py
+++ b/src/sentry/tasks/autofix.py
@@ -22,7 +22,7 @@
),
),
)
-def check_autofix_status(run_id: int):
+def check_autofix_status(run_id: int) -> None:
state = get_autofix_state(run_id=run_id)
if (
@@ -50,7 +50,7 @@ def check_autofix_status(run_id: int):
),
),
)
-def start_seer_automation(group_id: int):
+def start_seer_automation(group_id: int) -> None:
from sentry.seer.autofix.issue_summary import get_issue_summary
group = Group.objects.get(id=group_id)
diff --git a/src/sentry/tasks/check_am2_compatibility.py b/src/sentry/tasks/check_am2_compatibility.py
index e1ac9ab802fef2..ef391690a2b53d 100644
--- a/src/sentry/tasks/check_am2_compatibility.py
+++ b/src/sentry/tasks/check_am2_compatibility.py
@@ -296,11 +296,11 @@ class CheckStatus(Enum):
class CheckAM2Compatibility:
@classmethod
- def get_widget_url(cls, org_slug, dashboard_id, widget_id):
+ def get_widget_url(cls, org_slug, dashboard_id, widget_id) -> str:
return f"https://{org_slug}.sentry.io/organizations/{org_slug}/dashboard/{dashboard_id}/widget/{widget_id}/"
@classmethod
- def get_alert_url(cls, org_slug, alert_id):
+ def get_alert_url(cls, org_slug, alert_id) -> str:
return f"https://{org_slug}.sentry.io/organizations/{org_slug}/alerts/rules/details/{alert_id}/"
@classmethod
@@ -658,11 +658,11 @@ def run_compatibility_check(cls, org_id):
)
-def generate_cache_key_for_async_progress(org_id):
+def generate_cache_key_for_async_progress(org_id) -> str:
return f"ds::o:{org_id}:check_am2_compatibility_status"
-def generate_cache_key_for_async_result(org_id):
+def generate_cache_key_for_async_result(org_id) -> str:
return f"ds::o:{org_id}:check_am2_compatibility_results"
diff --git a/src/sentry/tasks/clear_expired_rulesnoozes.py b/src/sentry/tasks/clear_expired_rulesnoozes.py
index 765c785aaadbe7..138bfba6fe6e35 100644
--- a/src/sentry/tasks/clear_expired_rulesnoozes.py
+++ b/src/sentry/tasks/clear_expired_rulesnoozes.py
@@ -17,6 +17,6 @@
processing_deadline_duration=65,
),
)
-def clear_expired_rulesnoozes():
+def clear_expired_rulesnoozes() -> None:
rule_snooze_ids = RuleSnooze.objects.filter(until__lte=timezone.now()).values_list("id")[:1000]
RuleSnooze.objects.filter(id__in=rule_snooze_ids).delete()
diff --git a/src/sentry/tasks/clear_expired_snoozes.py b/src/sentry/tasks/clear_expired_snoozes.py
index e46ecac38357be..eed82547592c38 100644
--- a/src/sentry/tasks/clear_expired_snoozes.py
+++ b/src/sentry/tasks/clear_expired_snoozes.py
@@ -21,7 +21,7 @@
processing_deadline_duration=65,
),
)
-def clear_expired_snoozes():
+def clear_expired_snoozes() -> None:
groupsnooze_list = list(
GroupSnooze.objects.filter(until__lte=timezone.now()).values_list("id", "group", "until")[
:1000
diff --git a/src/sentry/tasks/ping.py b/src/sentry/tasks/ping.py
index a5c59f10925643..75e16c406effce 100644
--- a/src/sentry/tasks/ping.py
+++ b/src/sentry/tasks/ping.py
@@ -10,6 +10,6 @@
@instrumented_task(
name="sentry.tasks.send_ping", taskworker_config=TaskworkerConfig(namespace=selfhosted_tasks)
)
-def send_ping():
+def send_ping() -> None:
options.set("sentry:last_worker_ping", time())
options.set("sentry:last_worker_version", sentry.VERSION)
diff --git a/src/sentry/tasks/summaries/daily_summary.py b/src/sentry/tasks/summaries/daily_summary.py
index 99c001e1782768..bcf77c33763961 100644
--- a/src/sentry/tasks/summaries/daily_summary.py
+++ b/src/sentry/tasks/summaries/daily_summary.py
@@ -135,7 +135,7 @@ def prepare_summary_data(
duration: int,
organization_id: int,
users_to_send_to: list[int],
-):
+) -> None:
organization = Organization.objects.get(id=organization_id)
ctx = build_summary_data(
timestamp=timestamp, duration=duration, organization=organization, daily=True
@@ -145,7 +145,7 @@ def prepare_summary_data(
set_tag("report.available", report_is_available)
if not report_is_available:
logger.info("prepare_summary_data.skipping_empty", extra={"organization": organization.id})
- return
+ return None
with sentry_sdk.start_span(op="daily_summary.deliver_summary"):
deliver_summary(ctx=ctx, users=users_to_send_to)
@@ -279,7 +279,9 @@ def build_summary_data(
return ctx
-def build_top_projects_map(context: OrganizationReportContext, user_id: int):
+def build_top_projects_map(
+ context: OrganizationReportContext, user_id: int
+) -> dict[int, DailySummaryProjectContext]:
"""
Order the projects by which of the user's projects have the highest error count for the day
"""
@@ -304,7 +306,7 @@ def build_top_projects_map(context: OrganizationReportContext, user_id: int):
return top_projects_context_map
-def deliver_summary(ctx: OrganizationReportContext, users: list[int]):
+def deliver_summary(ctx: OrganizationReportContext, users: list[int]) -> None:
# TODO: change this to some setting for daily summary
user_ids = notifications_service.get_users_for_weekly_reports(
organization_id=ctx.organization.id, user_ids=users
diff --git a/src/sentry/tasks/symbolication.py b/src/sentry/tasks/symbolication.py
index f9f0970d150a52..0304cc6d9ee95f 100644
--- a/src/sentry/tasks/symbolication.py
+++ b/src/sentry/tasks/symbolication.py
@@ -157,7 +157,7 @@ def _continue_to_process_event(was_killswitched: bool = False) -> None:
"organization", Organization.objects.get_from_cache(id=project.organization_id)
)
- def on_symbolicator_request():
+ def on_symbolicator_request() -> None:
duration = time() - symbolication_start_time
if duration > settings.SYMBOLICATOR_PROCESS_EVENT_HARD_TIMEOUT:
raise SymbolicationTimeout
diff --git a/src/sentry/testutils/cases.py b/src/sentry/testutils/cases.py
index 2466bdb2528c93..3a88b272d6efc4 100644
--- a/src/sentry/testutils/cases.py
+++ b/src/sentry/testutils/cases.py
@@ -2684,7 +2684,7 @@ def tearDownClass(cls):
super().tearDownClass()
cls._project_state_cache = None
- def setup_initial_state(self):
+ def setup_initial_state(self) -> None:
# Add code here that will run before we roll back the database to the `migrate_from`
# migration. This can be useful to allow us to use the various `self.create_*` convenience
# methods.
@@ -2692,7 +2692,7 @@ def setup_initial_state(self):
# database operations are required.
pass
- def setup_before_migration(self, apps):
+ def setup_before_migration(self, apps) -> None:
# Add code here to run after we have rolled the database back to the `migrate_from`
# migration. This code must use `apps` to create any database models, and not directly
# access Django models.
diff --git a/src/sentry/testutils/helpers/alert_rule.py b/src/sentry/testutils/helpers/alert_rule.py
index 1479cc6b5cbf1b..3ba4ce3bb3fa7c 100644
--- a/src/sentry/testutils/helpers/alert_rule.py
+++ b/src/sentry/testutils/helpers/alert_rule.py
@@ -1,3 +1,4 @@
+from collections.abc import Generator
from contextlib import contextmanager
from dataclasses import dataclass
@@ -19,7 +20,7 @@ def restore(self) -> None:
@classmethod
@contextmanager
- def registry_patched(cls):
+ def registry_patched(cls) -> Generator[None]:
suspended = cls.suspend()
try:
yield
diff --git a/src/sentry/testutils/helpers/analytics.py b/src/sentry/testutils/helpers/analytics.py
index 16b1dcaa8a5bc8..deccae27de7a2f 100644
--- a/src/sentry/testutils/helpers/analytics.py
+++ b/src/sentry/testutils/helpers/analytics.py
@@ -1,4 +1,5 @@
import contextlib
+from collections.abc import Generator
from dataclasses import fields
from unittest.mock import MagicMock, patch
@@ -11,7 +12,7 @@ def assert_event_equal(
check_uuid: bool = False,
check_datetime: bool = False,
exclude_fields: list[str] | None = None,
-):
+) -> None:
if type(expected_event) is not type(recorded_event):
raise AssertionError(
f"Expected event type {type(expected_event)} but got {type(recorded_event)}"
@@ -34,7 +35,7 @@ def assert_analytics_events_recorded(
check_uuid: bool = False,
check_datetime: bool = False,
exclude_fields: list[str] | None = None,
-):
+) -> None:
recorded_events = [call.args[0] for call in mock_record.call_args_list]
assert len(expected_events) == len(recorded_events)
for expected_event, recorded_event in zip(expected_events, recorded_events):
@@ -53,7 +54,7 @@ def assert_last_analytics_event(
check_uuid: bool = False,
check_datetime: bool = False,
exclude_fields: list[str] | None = None,
-):
+) -> None:
assert_event_equal(
expected_event,
get_last_analytics_event(mock_record),
@@ -69,7 +70,7 @@ def assert_any_analytics_event(
check_uuid: bool = False,
check_datetime: bool = False,
exclude_fields: list[str] | None = None,
-):
+) -> None:
recorded_events = [call.args[0] for call in mock_record.call_args_list]
for recorded_event in recorded_events:
try:
@@ -89,7 +90,7 @@ def assert_analytics_events(
check_uuid: bool = False,
check_datetime: bool = False,
exclude_fields: list[str] | None = None,
-):
+) -> Generator[None]:
"""
Context manager that allows you to track analytics events recorded during the context.
diff --git a/src/sentry/testutils/helpers/notifications.py b/src/sentry/testutils/helpers/notifications.py
index 66def3c7f6cab6..42a758faa395b5 100644
--- a/src/sentry/testutils/helpers/notifications.py
+++ b/src/sentry/testutils/helpers/notifications.py
@@ -34,7 +34,7 @@ def get_subject(self, context: Mapping[str, Any] | None = None) -> str:
def determine_recipients(self) -> list[Actor]:
return []
- def build_attachment_title(self, *args):
+ def build_attachment_title(self, *args) -> str:
return "My Title"
def get_title_link(self, *args):
@@ -87,10 +87,10 @@ def get_notification_title(
some_value = context["some_field"]
return f"Notification Title with {some_value}"
- def build_notification_footer(self, *args):
+ def build_notification_footer(self, *args) -> str:
return "Notification Footer"
- def get_message_description(self, recipient: Actor, provider: ExternalProviders):
+ def get_message_description(self, recipient: Actor, provider: ExternalProviders) -> str:
return "Message Description"
def get_title_link(self, *args):
diff --git a/src/sentry/testutils/metrics_backend.py b/src/sentry/testutils/metrics_backend.py
index dc0b1a12aeec18..0f90ed61977fc3 100644
--- a/src/sentry/testutils/metrics_backend.py
+++ b/src/sentry/testutils/metrics_backend.py
@@ -14,7 +14,9 @@ class GenericMetricsTestMixIn:
retention_days = 90
unit = "millisecond"
- def get_mri(self, metric_name: str, metric_type: str, use_case_id: UseCaseID, unit: str | None):
+ def get_mri(
+ self, metric_name: str, metric_type: str, use_case_id: UseCaseID, unit: str | None
+ ) -> str:
mri_string = build_mri(metric_name, metric_type, use_case_id, unit)
return mri_string
diff --git a/src/sentry/testutils/outbox.py b/src/sentry/testutils/outbox.py
index 89ef0d23a53402..6a6f6970fdd7ee 100644
--- a/src/sentry/testutils/outbox.py
+++ b/src/sentry/testutils/outbox.py
@@ -56,7 +56,7 @@ def wrapper(*args: Any, **kwargs: Any) -> Any:
raise OutboxRecursionLimitError
-def assert_no_webhook_payloads():
+def assert_no_webhook_payloads() -> None:
messages = WebhookPayload.objects.filter().count()
assert messages == 0, "No webhookpayload messages should be created"
@@ -65,7 +65,7 @@ def assert_webhook_payloads_for_mailbox(
request: WSGIRequest,
mailbox_name: str,
region_names: list[str],
-):
+) -> None:
"""
A test method for asserting that a webhook payload is properly queued for
the given request
diff --git a/src/sentry/testutils/pytest/fixtures.py b/src/sentry/testutils/pytest/fixtures.py
index ab40449e5b9534..c91f593771c221 100644
--- a/src/sentry/testutils/pytest/fixtures.py
+++ b/src/sentry/testutils/pytest/fixtures.py
@@ -196,7 +196,7 @@ def inner(x):
class ReadableYamlDumper(yaml.dumper.SafeDumper):
"""Disable pyyaml aliases for identical object references"""
- def ignore_aliases(self, data):
+ def ignore_aliases(self, data) -> bool:
return True
diff --git a/src/sentry/testutils/pytest/relay.py b/src/sentry/testutils/pytest/relay.py
index 56f035a37e527b..a7da2597bb6eff 100644
--- a/src/sentry/testutils/pytest/relay.py
+++ b/src/sentry/testutils/pytest/relay.py
@@ -24,7 +24,7 @@
)
-def _relay_server_container_name():
+def _relay_server_container_name() -> str:
return "sentry_test_relay_server"
diff --git a/src/sentry/testutils/region.py b/src/sentry/testutils/region.py
index 6875ba6ad64158..b551f892b014ab 100644
--- a/src/sentry/testutils/region.py
+++ b/src/sentry/testutils/region.py
@@ -1,6 +1,6 @@
from __future__ import annotations
-from collections.abc import Collection, Sequence
+from collections.abc import Collection, Generator, Sequence
from contextlib import contextmanager
from dataclasses import dataclass
@@ -29,7 +29,7 @@ def swap_state(
self,
regions: Sequence[Region],
local_region: Region | None = None,
- ):
+ ) -> Generator[None]:
monolith_region = regions[0]
new_state = _TemporaryRegionDirectoryState(
regions=self.regions if regions is None else frozenset(regions),
@@ -50,13 +50,13 @@ def swap_state(
self._tmp_state = old_state
@contextmanager
- def swap_to_default_region(self):
+ def swap_to_default_region(self) -> Generator[None]:
"""Swap to an arbitrary region when entering region mode."""
with override_settings(SENTRY_REGION=self._tmp_state.default_region.name):
yield
@contextmanager
- def swap_to_region_by_name(self, region_name: str):
+ def swap_to_region_by_name(self, region_name: str) -> Generator[None]:
"""Swap to the specified region when entering region mode."""
region = self.get_by_name(region_name)
@@ -84,7 +84,9 @@ def get_test_env_directory() -> TestEnvRegionDirectory:
@contextmanager
-def override_regions(regions: Sequence[Region], local_region: Region | None = None):
+def override_regions(
+ regions: Sequence[Region], local_region: Region | None = None
+) -> Generator[None]:
"""Override the global set of existing regions.
The overriding value takes the place of the `SENTRY_REGION_CONFIG` setting and
diff --git a/src/sentry/uptime/apps.py b/src/sentry/uptime/apps.py
index a36058cb3931a4..3abce2eb9c7d11 100644
--- a/src/sentry/uptime/apps.py
+++ b/src/sentry/uptime/apps.py
@@ -4,5 +4,5 @@
class Config(AppConfig):
name = "sentry.uptime"
- def ready(self):
+ def ready(self) -> None:
from sentry.uptime.endpoints import serializers # NOQA
diff --git a/src/sentry/uptime/detectors/ranking.py b/src/sentry/uptime/detectors/ranking.py
index 7a1fc3bb100d51..b3c7561ab64d8f 100644
--- a/src/sentry/uptime/detectors/ranking.py
+++ b/src/sentry/uptime/detectors/ranking.py
@@ -142,7 +142,7 @@ def get_project_base_url_rank_key(project: Project) -> str:
return f"p:r:{project.id}"
-def build_organization_bucket_key(bucket: int):
+def build_organization_bucket_key(bucket: int) -> str:
return f"o:{bucket}"
diff --git a/src/sentry/uptime/detectors/result_handler.py b/src/sentry/uptime/detectors/result_handler.py
index 510d6f8f9051e1..afdb983517e30d 100644
--- a/src/sentry/uptime/detectors/result_handler.py
+++ b/src/sentry/uptime/detectors/result_handler.py
@@ -48,7 +48,7 @@ def handle_onboarding_result(
uptime_subscription: UptimeSubscription,
result: CheckResult,
metric_tags: dict[str, str],
-):
+) -> None:
if result["status"] == CHECKSTATUS_FAILURE:
redis = _get_cluster()
key = build_onboarding_failure_key(detector)
diff --git a/src/sentry/uptime/grouptype.py b/src/sentry/uptime/grouptype.py
index 40a456074e8180..9ca3e021af66d7 100644
--- a/src/sentry/uptime/grouptype.py
+++ b/src/sentry/uptime/grouptype.py
@@ -36,7 +36,7 @@
logger = logging.getLogger(__name__)
-def resolve_uptime_issue(detector: Detector):
+def resolve_uptime_issue(detector: Detector) -> None:
"""
Sends an update to the issue platform to resolve the uptime issue for this
monitor.
diff --git a/src/sentry/uptime/rdap/tasks.py b/src/sentry/uptime/rdap/tasks.py
index 68ebee3fd2f5e4..66a3e5009bce2b 100644
--- a/src/sentry/uptime/rdap/tasks.py
+++ b/src/sentry/uptime/rdap/tasks.py
@@ -25,7 +25,7 @@
processing_deadline_duration=30,
),
)
-def fetch_subscription_rdap_info(subscription_id: int):
+def fetch_subscription_rdap_info(subscription_id: int) -> None:
"""
Fetches the RDAP network details for a subscriptions host and populates the
host_provider fields in the subscription.
@@ -35,17 +35,17 @@ def fetch_subscription_rdap_info(subscription_id: int):
except UptimeSubscription.DoesNotExist:
# Nothing to do if this subscription was removed before we could fetch
# the rdap details.
- return
+ return None
parsed_url = urlparse(sub.url)
if parsed_url.hostname is None:
logger.warning("rdap_url_missing_hostname", extra={"url": sub.url})
- return
+ return None
details = resolve_rdap_network_details(parsed_url.hostname)
if details is None:
logger.info("rdap_resolve_network_details_failure", extra={"url": sub.url})
- return
+ return None
sub.update(host_provider_id=details["handle"], host_provider_name=details["owner_name"])
diff --git a/src/sentry/utils/samples.py b/src/sentry/utils/samples.py
index 9c0e76dd545671..5927e34e6ae9c3 100644
--- a/src/sentry/utils/samples.py
+++ b/src/sentry/utils/samples.py
@@ -105,13 +105,13 @@ def generate_user(username=None, email=None, ip_address=None, id=None):
def load_data(
- platform,
- default=None,
- sample_name=None,
+ platform: str,
+ default: str | None = None,
+ sample_name: str | None = None,
timestamp=None,
start_timestamp=None,
- trace=None,
- span_id=None,
+ trace: str | None = None,
+ span_id: str | None = None,
spans=None,
trace_context=None,
fingerprint=None,
diff --git a/src/sentry/utils/session_store.py b/src/sentry/utils/session_store.py
index d415a509ef6777..b8390171e12896 100644
--- a/src/sentry/utils/session_store.py
+++ b/src/sentry/utils/session_store.py
@@ -60,7 +60,7 @@ def _client(self):
return clusters.get("default").get_local_client_for_key(self.redis_key)
@property
- def session_key(self):
+ def session_key(self) -> str:
return f"store:{self.prefix}"
@property
diff --git a/src/sentry/web/frontend/debug/debug_incident_trigger_email.py b/src/sentry/web/frontend/debug/debug_incident_trigger_email.py
index d6c15461497929..ef536d12211c41 100644
--- a/src/sentry/web/frontend/debug/debug_incident_trigger_email.py
+++ b/src/sentry/web/frontend/debug/debug_incident_trigger_email.py
@@ -74,9 +74,9 @@ def get_context(self, request, incident_trigger_mock, user_option_mock):
)
@property
- def html_template(self):
+ def html_template(self) -> str:
return "sentry/emails/incidents/trigger.html"
@property
- def text_template(self):
+ def text_template(self) -> str:
return "sentry/emails/incidents/trigger.txt"
diff --git a/src/sentry/web/frontend/debug/debug_weekly_report.py b/src/sentry/web/frontend/debug/debug_weekly_report.py
index 67c42baf2eb1a6..de4ca6b517c898 100644
--- a/src/sentry/web/frontend/debug/debug_weekly_report.py
+++ b/src/sentry/web/frontend/debug/debug_weekly_report.py
@@ -118,9 +118,9 @@ def get_context(self, request):
return render_template_context(ctx, None)
@property
- def html_template(self):
+ def html_template(self) -> str:
return "sentry/emails/reports/body.html"
@property
- def text_template(self):
+ def text_template(self) -> str:
return "sentry/emails/reports/body.txt"
diff --git a/src/sentry/web/frontend/disabled_member_view.py b/src/sentry/web/frontend/disabled_member_view.py
index 7e8c03d5c5b28b..30270fddc9103c 100644
--- a/src/sentry/web/frontend/disabled_member_view.py
+++ b/src/sentry/web/frontend/disabled_member_view.py
@@ -10,7 +10,7 @@
@control_silo_view
class DisabledMemberView(ReactPageView):
- def is_member_disabled_from_limit(self, request: object, organization):
+ def is_member_disabled_from_limit(self, request: object, organization) -> bool:
return False
def handle(self, request: HttpRequest, organization, **kwargs) -> HttpResponse:
diff --git a/src/sentry/workflow_engine/apps.py b/src/sentry/workflow_engine/apps.py
index abdb88a6e562d9..7c5e897792b356 100644
--- a/src/sentry/workflow_engine/apps.py
+++ b/src/sentry/workflow_engine/apps.py
@@ -4,7 +4,7 @@
class Config(AppConfig):
name = "sentry.workflow_engine"
- def ready(self):
+ def ready(self) -> None:
# Import our base DataConditionHandlers for the workflow engine platform
import sentry.workflow_engine.handlers # NOQA
from sentry.workflow_engine.endpoints import serializers # NOQA
diff --git a/src/sentry/workflow_engine/migration_helpers/alert_rule.py b/src/sentry/workflow_engine/migration_helpers/alert_rule.py
index d83523aa37a022..2b0af74c8f6443 100644
--- a/src/sentry/workflow_engine/migration_helpers/alert_rule.py
+++ b/src/sentry/workflow_engine/migration_helpers/alert_rule.py
@@ -512,7 +512,7 @@ def create_detector(
def update_detector(
alert_rule: AlertRule,
detector: Detector,
-):
+) -> Detector:
if detector.workflow_condition_group is None:
raise MissingDataConditionGroup
detector_field_values = get_detector_field_values(alert_rule, detector.workflow_condition_group)
diff --git a/src/sentry_plugins/bitbucket/plugin.py b/src/sentry_plugins/bitbucket/plugin.py
index f400b6bd4e3e99..87961a280d9eb3 100644
--- a/src/sentry_plugins/bitbucket/plugin.py
+++ b/src/sentry_plugins/bitbucket/plugin.py
@@ -87,7 +87,7 @@ def get_group_urls(self):
)
]
- def get_url_module(self):
+ def get_url_module(self) -> str:
return "sentry_plugins.bitbucket.urls"
def is_configured(self, project) -> bool:
diff --git a/src/sentry_plugins/github/plugin.py b/src/sentry_plugins/github/plugin.py
index 13a74e8c4c9f80..43dc9ea42e6f11 100644
--- a/src/sentry_plugins/github/plugin.py
+++ b/src/sentry_plugins/github/plugin.py
@@ -108,7 +108,7 @@ def get_group_urls(self):
)
]
- def get_url_module(self):
+ def get_url_module(self) -> str:
return "sentry_plugins.github.urls"
def is_configured(self, project) -> bool:
diff --git a/src/sentry_plugins/gitlab/plugin.py b/src/sentry_plugins/gitlab/plugin.py
index 79cce39e59edba..b9490a186a9614 100644
--- a/src/sentry_plugins/gitlab/plugin.py
+++ b/src/sentry_plugins/gitlab/plugin.py
@@ -100,7 +100,7 @@ def get_allowed_assignees(self, request: Request, group):
return (("", "(Unassigned)"),) + users
- def get_new_issue_title(self, **kwargs):
+ def get_new_issue_title(self, **kwargs) -> str:
return "Create GitLab Issue"
def get_client(self, project):
diff --git a/src/sentry_plugins/heroku/plugin.py b/src/sentry_plugins/heroku/plugin.py
index 96d89ea67d3690..8b08d9ff3f66d8 100644
--- a/src/sentry_plugins/heroku/plugin.py
+++ b/src/sentry_plugins/heroku/plugin.py
@@ -149,16 +149,16 @@ class HerokuPlugin(CorePluginMixin, ReleaseTrackingPlugin):
)
]
- def can_enable_for_projects(self):
+ def can_enable_for_projects(self) -> bool:
return True
- def can_configure_for_project(self, project):
+ def can_configure_for_project(self, project) -> bool:
return True
- def has_project_conf(self):
+ def has_project_conf(self) -> bool:
return True
- def get_conf_key(self):
+ def get_conf_key(self) -> str:
return "heroku"
def get_config(self, project, user=None, initial=None, add_additional_fields: bool = False):
diff --git a/src/sentry_plugins/redmine/plugin.py b/src/sentry_plugins/redmine/plugin.py
index 2e1cfcf7438998..afed96ea48bcaa 100644
--- a/src/sentry_plugins/redmine/plugin.py
+++ b/src/sentry_plugins/redmine/plugin.py
@@ -56,13 +56,13 @@ def __init__(self):
self.client_errors = []
self.fields = []
- def has_project_conf(self):
+ def has_project_conf(self) -> bool:
return True
def is_configured(self, project) -> bool:
return all(self.get_option(k, project) for k in ("host", "key", "project_id"))
- def get_new_issue_title(self, **kwargs):
+ def get_new_issue_title(self, **kwargs) -> str:
return "Create Redmine Task"
def get_initial_form_data(self, request: Request, group, event, **kwargs):
diff --git a/src/sentry_plugins/sessionstack/plugin.py b/src/sentry_plugins/sessionstack/plugin.py
index 7015a56b5f41ce..8185cd7341e3b7 100644
--- a/src/sentry_plugins/sessionstack/plugin.py
+++ b/src/sentry_plugins/sessionstack/plugin.py
@@ -43,7 +43,7 @@ class SessionStackPlugin(CorePluginMixin, Plugin2):
)
]
- def has_project_conf(self):
+ def has_project_conf(self) -> bool:
return True
def get_custom_contexts(self):
@@ -58,7 +58,7 @@ def reset_options(self, project=None):
self.set_option("player_url", "", project)
self.set_option("api_url", "", project)
- def is_testable(self, **kwargs):
+ def is_testable(self, **kwargs) -> bool:
return False
def validate_config(self, project, config, actor=None):
diff --git a/src/sentry_plugins/splunk/plugin.py b/src/sentry_plugins/splunk/plugin.py
index 7a3fb384b7eef6..154c9ce871216e 100644
--- a/src/sentry_plugins/splunk/plugin.py
+++ b/src/sentry_plugins/splunk/plugin.py
@@ -168,7 +168,7 @@ def initialize_variables(self, event):
self.project_source = self.get_option("source", event.project) or "sentry"
- def get_rl_key(self, event):
+ def get_rl_key(self, event) -> str:
return f"{self.conf_key}:{md5_text(self.project_token).hexdigest()}"
def is_ratelimited(self, event):
diff --git a/tests/acceptance/chartcuterie/test_image_block_builder.py b/tests/acceptance/chartcuterie/test_image_block_builder.py
index 40199fd04d32cf..a28c494aea2371 100644
--- a/tests/acceptance/chartcuterie/test_image_block_builder.py
+++ b/tests/acceptance/chartcuterie/test_image_block_builder.py
@@ -25,11 +25,11 @@ class TestSlackImageBlockBuilder(
ProfilesSnubaTestCase,
OccurrenceTestMixin,
):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
cache.clear()
- def _create_endpoint_regression_issue(self):
+ def _create_endpoint_regression_issue(self) -> Group:
for i in range(10):
event_id = uuid.uuid4().hex
_ = self.process_occurrence(
diff --git a/tests/acceptance/conftest.py b/tests/acceptance/conftest.py
index 0c9dfccb6fab3e..ea14f964faaaa4 100644
--- a/tests/acceptance/conftest.py
+++ b/tests/acceptance/conftest.py
@@ -3,10 +3,12 @@
import sys
import time
+from pytest import Config
+
from sentry.utils import json
-def pytest_configure(config):
+def pytest_configure(config: Config) -> None:
"""
Generate frontend assets before running any acceptance tests
diff --git a/tests/acceptance/test_accept_organization_invite.py b/tests/acceptance/test_accept_organization_invite.py
index 2176f641177bd5..7ff54f4519655b 100644
--- a/tests/acceptance/test_accept_organization_invite.py
+++ b/tests/acceptance/test_accept_organization_invite.py
@@ -12,7 +12,7 @@
# See the accept_organization_invite.py#get_invite_state logic
@no_silo_test
class AcceptOrganizationInviteTest(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.org = self.create_organization(name="Rowdy Tiger", owner=None)
@@ -25,7 +25,7 @@ def setUp(self):
teams=[self.team],
)
- def _sign_in_user(self, email, password):
+ def _sign_in_user(self, email: str, password: str) -> None:
"""
Helper method to sign in a user with given email and password.
"""
diff --git a/tests/acceptance/test_account_settings.py b/tests/acceptance/test_account_settings.py
index b7665e696534b7..042ce780f4a499 100644
--- a/tests/acceptance/test_account_settings.py
+++ b/tests/acceptance/test_account_settings.py
@@ -4,7 +4,7 @@
@no_silo_test
class AccountSettingsTest(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.org = self.create_organization(name="Rowdy Tiger Rowdy Tiger Rowdy Tiger", owner=None)
diff --git a/tests/acceptance/test_api.py b/tests/acceptance/test_api.py
index 35261c03ba5ef4..0f9bd2d5849e02 100644
--- a/tests/acceptance/test_api.py
+++ b/tests/acceptance/test_api.py
@@ -5,7 +5,7 @@
@no_silo_test
class ApiApplicationTest(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.org = self.create_organization(name="Rowdy Tiger Rowdy Tiger Rowdy Tiger", owner=None)
self.project = self.create_project(
diff --git a/tests/acceptance/test_auth.py b/tests/acceptance/test_auth.py
index e9f82f5378c14d..e32dcdee68bdb1 100644
--- a/tests/acceptance/test_auth.py
+++ b/tests/acceptance/test_auth.py
@@ -6,7 +6,7 @@
@no_silo_test
class AuthTest(AcceptanceTestCase):
- def enter_auth(self, username, password):
+ def enter_auth(self, username: str, password: str) -> None:
self.browser.get("/auth/login/")
self.browser.driver.execute_script(
"document.addEventListener('invalid', function(e) { e.preventDefault(); }, true);"
diff --git a/tests/acceptance/test_create_organization.py b/tests/acceptance/test_create_organization.py
index 3ac8608f7bca21..cfe209d8dd1709 100644
--- a/tests/acceptance/test_create_organization.py
+++ b/tests/acceptance/test_create_organization.py
@@ -6,7 +6,7 @@
@no_silo_test
class CreateOrganizationTest(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.login_as(self.user)
diff --git a/tests/acceptance/test_create_project.py b/tests/acceptance/test_create_project.py
index d477c301c63590..b53603508938b7 100644
--- a/tests/acceptance/test_create_project.py
+++ b/tests/acceptance/test_create_project.py
@@ -6,14 +6,14 @@
@no_silo_test
class CreateProjectTest(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.org = self.create_organization(name="Rowdy Tiger", owner=self.user)
self.login_as(self.user)
self.path = f"/organizations/{self.org.slug}/projects/new/"
- def load_project_creation_page(self):
+ def load_project_creation_page(self) -> None:
self.browser.get(self.path)
self.browser.wait_until('[aria-label="Create Project"]')
diff --git a/tests/acceptance/test_create_team.py b/tests/acceptance/test_create_team.py
index ed3e2a917bc727..b05338500d1738 100644
--- a/tests/acceptance/test_create_team.py
+++ b/tests/acceptance/test_create_team.py
@@ -7,7 +7,7 @@
@no_silo_test
class CreateTeamTest(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.org = self.create_organization(name="Rowdy Tiger", owner=None)
diff --git a/tests/acceptance/test_emails.py b/tests/acceptance/test_emails.py
index 9a97613c9a54f8..f1ab8cf1d8e5e0 100644
--- a/tests/acceptance/test_emails.py
+++ b/tests/acceptance/test_emails.py
@@ -73,7 +73,7 @@ def replace_amp(text: str) -> str:
@no_silo_test
class EmailTestCase(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
create_default_projects()
# This email address is required to match FIXTURES.
diff --git a/tests/acceptance/test_error_page_embed.py b/tests/acceptance/test_error_page_embed.py
index cd751a6a94b88e..6f741ad7fb4b5b 100644
--- a/tests/acceptance/test_error_page_embed.py
+++ b/tests/acceptance/test_error_page_embed.py
@@ -9,7 +9,7 @@
@no_silo_test
class ErrorPageEmbedTest(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.project = self.create_project()
self.key = self.create_project_key(project=self.project)
@@ -20,7 +20,7 @@ def setUp(self):
quote(self.key.dsn_public),
)
- def wait_for_error_page_embed(self):
+ def wait_for_error_page_embed(self) -> None:
script = f"""
const script = window.document.createElement('script');
script.async = true;
@@ -36,7 +36,7 @@ def wait_for_error_page_embed(self):
self.browser.driver.execute_script(script)
self.browser.wait_until(".sentry-error-embed")
- def wait_for_reportdialog_closed_message(self):
+ def wait_for_reportdialog_closed_message(self) -> None:
self.browser.wait_until_script_execution(
"""return window.__error_page_embed_received_message__ === '__sentry_reportdialog_closed__'"""
)
diff --git a/tests/acceptance/test_explore_logs.py b/tests/acceptance/test_explore_logs.py
index 46d45904f8d2f0..e55630b7f4ff74 100644
--- a/tests/acceptance/test_explore_logs.py
+++ b/tests/acceptance/test_explore_logs.py
@@ -20,7 +20,7 @@
class ExploreLogsTest(AcceptanceTestCase, SnubaTestCase, OurLogTestCase):
viewname = "sentry-api-0-organization-events"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.start = self.day_ago = before_now(days=1).replace(
hour=10, minute=0, second=0, microsecond=0
diff --git a/tests/acceptance/test_explore_spans.py b/tests/acceptance/test_explore_spans.py
index 631fa8cab6bbeb..f18caf8e1b9e60 100644
--- a/tests/acceptance/test_explore_spans.py
+++ b/tests/acceptance/test_explore_spans.py
@@ -1,5 +1,5 @@
from datetime import timedelta
-from unittest.mock import patch
+from unittest.mock import MagicMock, patch
from fixtures.page_objects.explore_spans import ExploreSpansPage
from sentry.testutils.cases import AcceptanceTestCase, SnubaTestCase, SpanTestCase
@@ -15,7 +15,7 @@
class ExploreSpansTest(AcceptanceTestCase, SpanTestCase, SnubaTestCase):
viewname = "sentry-api-0-organization-events"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.start = self.day_ago = before_now(days=1).replace(
hour=10, minute=0, second=0, microsecond=0
@@ -37,7 +37,7 @@ def setUp(self):
self.dismiss_assistant(which="tour.explore.spans")
@patch("django.utils.timezone.now")
- def test_spans_table_loads_all_events(self, mock_now) -> None:
+ def test_spans_table_loads_all_events(self, mock_now: MagicMock) -> None:
mock_now.return_value = self.start
assert (
diff --git a/tests/acceptance/test_incidents.py b/tests/acceptance/test_incidents.py
index 8f5d7f588ab4ac..7e42b80ece2042 100644
--- a/tests/acceptance/test_incidents.py
+++ b/tests/acceptance/test_incidents.py
@@ -13,7 +13,7 @@
@no_silo_test
class OrganizationIncidentsListTest(AcceptanceTestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(self.user)
self.path = f"/organizations/{self.organization.slug}/alerts/"
diff --git a/tests/acceptance/test_issue_details.py b/tests/acceptance/test_issue_details.py
index 85febfdeaa56b1..dfaa0a5d1c636e 100644
--- a/tests/acceptance/test_issue_details.py
+++ b/tests/acceptance/test_issue_details.py
@@ -2,6 +2,7 @@
from unittest.mock import patch
from fixtures.page_objects.issue_details import IssueDetailsPage
+from sentry.services.eventstore.models import Event
from sentry.testutils.cases import AcceptanceTestCase, SnubaTestCase
from sentry.testutils.silo import no_silo_test
from sentry.utils.samples import load_data
@@ -11,7 +12,7 @@
@no_silo_test
class IssueDetailsTest(AcceptanceTestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
patcher = patch("django.utils.timezone.now", return_value=now)
patcher.start()
@@ -24,7 +25,14 @@ def setUp(self):
self.page = IssueDetailsPage(self.browser, self.client)
self.dismiss_assistant()
- def create_sample_event(self, platform, default=None, sample_name=None, time=None, tags=None):
+ def create_sample_event(
+ self,
+ platform: str,
+ default: str | None = None,
+ sample_name: str | None = None,
+ time: datetime | None = None,
+ tags: list[list[str]] | None = None,
+ ) -> Event:
event_data = load_data(platform, default=default, sample_name=sample_name)
event_data["event_id"] = "d964fdbd649a4cf8bfc35d18082b6b0e"
@@ -56,6 +64,7 @@ def test_python_event(self) -> None:
]
self.create_sample_event(platform="python", tags=tags)
event = self.create_sample_event(platform="python")
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
# Wait for tag bars to load
@@ -63,45 +72,55 @@ def test_python_event(self) -> None:
def test_python_rawbody_event(self) -> None:
event = self.create_sample_event(platform="python-rawbody")
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
self.browser.move_to('[data-test-id="rich-http-content-body-section-pre"]')
def test_python_formdata_event(self) -> None:
event = self.create_sample_event(platform="python-formdata")
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
def test_pii_tooltips(self) -> None:
event = self.create_sample_event(platform="pii-tooltips")
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
def test_cocoa_event(self) -> None:
event = self.create_sample_event(platform="cocoa")
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
def test_cocoa_event_frame_line_hover(self) -> None:
event = self.create_sample_event(platform="cocoa")
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
self.browser.wait_until_not(".loading")
self.browser.move_to(".traceback li:nth-child(2)")
def test_unity_event(self) -> None:
event = self.create_sample_event(default="unity", platform="csharp")
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
def test_android_event(self) -> None:
event = self.create_sample_event(platform="android")
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
def test_android_ndk_event(self) -> None:
event = self.create_sample_event(default="android-ndk", platform="android-ndk")
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
def test_aspnetcore_event(self) -> None:
event = self.create_sample_event(default="aspnetcore", platform="csharp")
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
def test_javascript_specific_event(self) -> None:
event = self.create_sample_event(platform="javascript")
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
self.browser.click('label[data-test-id="curl"]')
@@ -109,26 +128,32 @@ def test_javascript_specific_event(self) -> None:
def test_rust_event(self) -> None:
# TODO: This should become its own "rust" platform type
event = self.create_sample_event(platform="native", sample_name="Rust")
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
def test_cordova_event(self) -> None:
event = self.create_sample_event(platform="cordova")
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
def test_stripped_event(self) -> None:
event = self.create_sample_event(platform="pii")
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
def test_empty_exception(self) -> None:
event = self.create_sample_event(platform="empty-exception")
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
def test_empty_stacktrace(self) -> None:
event = self.create_sample_event(platform="empty-stacktrace")
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
def test_activity_page(self) -> None:
event = self.create_sample_event(platform="python")
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
self.page.go_to_subtab("activity")
@@ -136,42 +161,50 @@ def test_activity_page(self) -> None:
def test_resolved(self) -> None:
event = self.create_sample_event(platform="python")
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
self.page.resolve_issue()
def test_archived(self) -> None:
event = self.create_sample_event(platform="python")
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
self.page.archive_issue()
def test_exception_and_no_threads_event(self) -> None:
event = self.create_sample_event(platform="exceptions-and-no-threads")
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
def test_exception_with_stack_trace_and_crashed_thread_without_stack_trace_event(self) -> None:
event = self.create_sample_event(
platform="exception-with-stack-trace-and-crashed-thread-without-stack-trace"
)
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
def test_exception_without_stack_trace_and_crashed_thread_with_stack_trace_event(self) -> None:
event = self.create_sample_event(
platform="exception-without-stack-trace-and-crashed-thread-with-stack-trace"
)
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
def test_exception_with_stack_trace_and_crashed_thread_with_stack_trace_event(self) -> None:
event = self.create_sample_event(
platform="exception-with-stack-trace-and-crashed-thread-with-stack-trace"
)
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
def test_python_invalid_json_error(self) -> None:
event = self.create_sample_event(default="python-invalid-json-error", platform="native")
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
def test_exception_with_address_instruction(self) -> None:
event = self.create_sample_event(
default="exception-with-address-instruction", platform="cocoa"
)
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
diff --git a/tests/acceptance/test_issue_details_workflow.py b/tests/acceptance/test_issue_details_workflow.py
index 6dd6092213e43d..e82a43ac8106a6 100644
--- a/tests/acceptance/test_issue_details_workflow.py
+++ b/tests/acceptance/test_issue_details_workflow.py
@@ -4,6 +4,7 @@
from fixtures.page_objects.issue_details import IssueDetailsPage
from sentry.models.groupinbox import GroupInboxReason, add_group_to_inbox
+from sentry.services.eventstore.models import Event
from sentry.testutils.cases import AcceptanceTestCase, SnubaTestCase
from sentry.testutils.silo import no_silo_test
from sentry.utils.samples import load_data
@@ -11,7 +12,7 @@
@no_silo_test
class IssueDetailsWorkflowTest(AcceptanceTestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.org = self.create_organization(owner=self.user, name="Rowdy Tiger")
@@ -23,7 +24,9 @@ def setUp(self):
self.page = IssueDetailsPage(self.browser, self.client)
self.dismiss_assistant()
- def create_sample_event(self, platform, default=None, sample_name=None):
+ def create_sample_event(
+ self, platform: str, default: str | None = None, sample_name: str | None = None
+ ) -> Event:
event_data = load_data(platform, default=default, sample_name=sample_name)
event_data["event_id"] = "d964fdbd649a4cf8bfc35d18082b6b0e"
event = self.store_event(
@@ -37,6 +40,7 @@ def create_sample_event(self, platform, default=None, sample_name=None):
def test_resolve_basic(self) -> None:
event = self.create_sample_event(platform="python")
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
self.page.resolve_issue()
self.wait_for_loading()
@@ -47,6 +51,7 @@ def test_resolve_basic(self) -> None:
def test_archive_basic(self) -> None:
event = self.create_sample_event(platform="python")
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
self.page.archive_issue()
self.wait_for_loading()
@@ -57,6 +62,7 @@ def test_archive_basic(self) -> None:
def test_bookmark(self) -> None:
event = self.create_sample_event(platform="python")
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
self.page.bookmark_issue()
self.wait_for_loading()
@@ -67,6 +73,7 @@ def test_bookmark(self) -> None:
def test_assign_issue(self) -> None:
event = self.create_sample_event(platform="python")
+ assert event.group is not None
self.page.visit_issue(self.org.slug, event.group.id)
self.page.assign_to(self.user.email)
@@ -76,6 +83,7 @@ def test_assign_issue(self) -> None:
def test_create_comment(self) -> None:
event = self.create_sample_event(platform="python")
+ assert event.group is not None
self.page.visit_issue_activity(self.org.slug, event.group.id)
form = self.page.find_comment_form()
@@ -86,6 +94,7 @@ def test_create_comment(self) -> None:
def test_mark_reviewed(self) -> None:
event = self.create_sample_event(platform="python")
+ assert event.group is not None
add_group_to_inbox(event.group, GroupInboxReason.NEW)
self.page.visit_issue(self.org.slug, event.group.id)
self.page.mark_reviewed()
diff --git a/tests/acceptance/test_issue_saved_searches.py b/tests/acceptance/test_issue_saved_searches.py
index 4cab6cfd53243d..0d1742bf31d14a 100644
--- a/tests/acceptance/test_issue_saved_searches.py
+++ b/tests/acceptance/test_issue_saved_searches.py
@@ -13,7 +13,7 @@
@no_silo_test
class OrganizationGroupIndexTest(AcceptanceTestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.org = self.create_organization(owner=self.user, name="Rowdy Tiger")
diff --git a/tests/acceptance/test_issue_tag_values.py b/tests/acceptance/test_issue_tag_values.py
index 985fcf782758ea..e7c645db5ce6af 100644
--- a/tests/acceptance/test_issue_tag_values.py
+++ b/tests/acceptance/test_issue_tag_values.py
@@ -1,4 +1,5 @@
from fixtures.page_objects.issue_details import IssueDetailsPage
+from sentry.services.eventstore.models import Event
from sentry.testutils.cases import AcceptanceTestCase, SnubaTestCase
from sentry.testutils.helpers.datetime import before_now
from sentry.testutils.silo import no_silo_test
@@ -9,7 +10,7 @@
class IssueTagValuesTest(AcceptanceTestCase, SnubaTestCase):
page: IssueDetailsPage
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.org = self.create_organization(owner=self.user, name="Rowdy Tiger")
@@ -22,7 +23,7 @@ def setUp(self):
self.dismiss_assistant()
self.event = self.create_issue()
- def create_issue(self):
+ def create_issue(self) -> Event:
event_data = load_data("javascript")
event_data["timestamp"] = before_now(minutes=1).isoformat()
event_data["tags"] = {"url": "http://example.org/path?key=value"}
diff --git a/tests/acceptance/test_link_team.py b/tests/acceptance/test_link_team.py
index 80f1eaa91dd703..df1f6d6746db70 100644
--- a/tests/acceptance/test_link_team.py
+++ b/tests/acceptance/test_link_team.py
@@ -12,7 +12,7 @@
@no_silo_test
class SlackLinkTeamTest(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.org = self.create_organization(name="Rowdy Tiger", owner=self.user)
diff --git a/tests/acceptance/test_member_list.py b/tests/acceptance/test_member_list.py
index 8b9783ee91afea..e3b573c35d27e5 100644
--- a/tests/acceptance/test_member_list.py
+++ b/tests/acceptance/test_member_list.py
@@ -5,7 +5,7 @@
@no_silo_test
class ListOrganizationMembersTest(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.org = self.create_organization(name="Rowdy Tiger", owner=None)
diff --git a/tests/acceptance/test_oauth_authorize.py b/tests/acceptance/test_oauth_authorize.py
index 8524374c725356..676fc34bdf5dc4 100644
--- a/tests/acceptance/test_oauth_authorize.py
+++ b/tests/acceptance/test_oauth_authorize.py
@@ -4,7 +4,7 @@
@no_silo_test
class OAuthAuthorizeTest(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com", is_superuser=True)
self.login_as(self.user)
diff --git a/tests/acceptance/test_onboarding.py b/tests/acceptance/test_onboarding.py
index 85847e857bc0a3..767bd448d287af 100644
--- a/tests/acceptance/test_onboarding.py
+++ b/tests/acceptance/test_onboarding.py
@@ -10,7 +10,7 @@
@no_silo_test
class OrganizationOnboardingTest(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.org = self.create_organization(name="Rowdy Tiger", owner=None)
@@ -20,7 +20,7 @@ def setUp(self):
)
self.login_as(self.user)
- def start_onboarding(self):
+ def start_onboarding(self) -> None:
self.browser.get("/onboarding/%s/" % self.org.slug)
self.browser.wait_until('[data-test-id="onboarding-step-welcome"]')
self.browser.click('[aria-label="Start"]')
diff --git a/tests/acceptance/test_organization_alert_rule_details.py b/tests/acceptance/test_organization_alert_rule_details.py
index 307b43a8e6f53e..efcfe46ed65cc9 100644
--- a/tests/acceptance/test_organization_alert_rule_details.py
+++ b/tests/acceptance/test_organization_alert_rule_details.py
@@ -10,7 +10,7 @@
@no_silo_test
class OrganizationAlertRuleDetailsTest(AcceptanceTestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(self.user)
self.rule = Rule.objects.get(project=self.project)
diff --git a/tests/acceptance/test_organization_alert_rules.py b/tests/acceptance/test_organization_alert_rules.py
index 52528761d19406..5f71fd7dae4ed5 100644
--- a/tests/acceptance/test_organization_alert_rules.py
+++ b/tests/acceptance/test_organization_alert_rules.py
@@ -11,7 +11,7 @@
@no_silo_test
class OrganizationAlertRulesListTest(AcceptanceTestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(self.user)
self.path = f"/organizations/{self.organization.slug}/alerts/rules/"
diff --git a/tests/acceptance/test_organization_dashboards.py b/tests/acceptance/test_organization_dashboards.py
index c84e151e9e9bbc..793830f030bce7 100644
--- a/tests/acceptance/test_organization_dashboards.py
+++ b/tests/acceptance/test_organization_dashboards.py
@@ -39,7 +39,7 @@
@no_silo_test
class OrganizationDashboardsAcceptanceTest(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
min_ago = before_now(minutes=1).isoformat()
self.store_event(
@@ -54,7 +54,7 @@ def setUp(self):
)
self.login_as(self.user)
- def capture_screenshots(self, screenshot_name):
+ def capture_screenshots(self, screenshot_name: str) -> None:
"""
Captures screenshots in both a pre and post refresh state.
@@ -193,7 +193,7 @@ def test_widget_edit_keeps_same_layout_after_modification(self) -> None:
@pytest.mark.skip(reason="TODO: Convert to new widget builder or test with jest")
def test_add_issue_widgets_do_not_overlap(self) -> None:
- def add_issue_widget(widget_title):
+ def add_issue_widget(widget_title: str) -> None:
self.browser.wait_until_clickable('[data-test-id="widget-add"]')
self.page.click_dashboard_add_widget_button()
title_input = self.browser.element(WIDGET_TITLE_FIELD)
@@ -396,7 +396,7 @@ def test_delete_widget_in_view_mode(self) -> None:
@pytest.mark.skip(reason="TODO: Convert to new widget builder or test with jest")
def test_cancel_without_changes_does_not_trigger_confirm_with_custom_widget_through_header(
self,
- ):
+ ) -> None:
with self.feature(FEATURE_NAMES + EDIT_FEATURE):
self.page.visit_dashboard_detail()
@@ -416,7 +416,7 @@ def test_cancel_without_changes_does_not_trigger_confirm_with_custom_widget_thro
@pytest.mark.skip(reason="TODO: Convert to new widget builder or test with jest")
def test_position_when_adding_multiple_widgets_through_add_widget_tile_in_edit(
self,
- ):
+ ) -> None:
with self.feature(FEATURE_NAMES + EDIT_FEATURE):
self.page.visit_dashboard_detail()
self.page.enter_edit_state()
@@ -436,7 +436,7 @@ def test_position_when_adding_multiple_widgets_through_add_widget_tile_in_edit(
@pytest.mark.skip(reason="flaky: DD-1217")
def test_position_when_adding_multiple_widgets_through_add_widget_tile_in_create(
self,
- ):
+ ) -> None:
with self.feature(FEATURE_NAMES + EDIT_FEATURE):
self.page.visit_create_dashboard()
@@ -463,7 +463,7 @@ def test_position_when_adding_multiple_widgets_through_add_widget_tile_in_create
def test_deleting_stacked_widgets_by_context_menu_does_not_trigger_confirm_on_edit_cancel(
self,
- ):
+ ) -> None:
layouts = [
{"x": 0, "y": 0, "w": 2, "h": 2, "minH": 2},
{"x": 0, "y": 2, "w": 2, "h": 2, "minH": 2},
@@ -518,7 +518,7 @@ def test_deleting_stacked_widgets_by_context_menu_does_not_trigger_confirm_on_ed
@pytest.mark.skip(reason="TODO: Convert to new widget builder or test with jest")
def test_changing_number_widget_to_area_updates_widget_height(
self,
- ):
+ ) -> None:
layouts = [
(DashboardWidgetDisplayTypes.BIG_NUMBER, {"x": 0, "y": 0, "w": 2, "h": 1, "minH": 1}),
(DashboardWidgetDisplayTypes.LINE_CHART, {"x": 0, "y": 1, "w": 2, "h": 2, "minH": 2}),
@@ -578,7 +578,7 @@ def test_changing_number_widget_to_area_updates_widget_height(
@pytest.mark.skip(reason="flaky behaviour due to loading spinner")
def test_changing_number_widget_larger_than_min_height_for_area_chart_keeps_height(
self,
- ):
+ ) -> None:
existing_widget = DashboardWidget.objects.create(
dashboard=self.dashboard,
order=0,
@@ -619,7 +619,7 @@ def test_changing_number_widget_larger_than_min_height_for_area_chart_keeps_heig
@pytest.mark.skip(reason="flaky: DD-1211")
def test_changing_area_widget_larger_than_min_height_for_number_chart_keeps_height(
self,
- ):
+ ) -> None:
existing_widget = DashboardWidget.objects.create(
dashboard=self.dashboard,
order=0,
@@ -660,7 +660,7 @@ def test_changing_area_widget_larger_than_min_height_for_number_chart_keeps_heig
@no_silo_test
class OrganizationDashboardsManageAcceptanceTest(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.team = self.create_team(organization=self.organization, name="Mariachi Band")
self.project = self.create_project(
@@ -689,7 +689,7 @@ def setUp(self):
self.default_path = f"/organizations/{self.organization.slug}/dashboards/"
- def wait_until_loaded(self):
+ def wait_until_loaded(self) -> None:
self.browser.wait_until_not('[data-test-id="loading-indicator"]')
self.browser.wait_until_not('[data-test-id="loading-placeholder"]')
diff --git a/tests/acceptance/test_organization_developer_settings.py b/tests/acceptance/test_organization_developer_settings.py
index 80dd1f34fbd18b..d578db5777be3f 100644
--- a/tests/acceptance/test_organization_developer_settings.py
+++ b/tests/acceptance/test_organization_developer_settings.py
@@ -10,7 +10,7 @@ class OrganizationDeveloperSettingsNewAcceptanceTest(AcceptanceTestCase):
As a developer, I can create an integration, install it, and uninstall it
"""
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.team = self.create_team(organization=self.organization, name="Tesla Motors")
self.project = self.create_project(
@@ -20,7 +20,7 @@ def setUp(self):
self.login_as(self.user)
self.org_developer_settings_path = f"/settings/{self.organization.slug}/developer-settings/"
- def load_page(self, url):
+ def load_page(self, url: str) -> None:
self.browser.get(url)
self.browser.wait_until_not('[data-test-id="loading-indicator"]')
@@ -58,7 +58,7 @@ class OrganizationDeveloperSettingsEditAcceptanceTest(AcceptanceTestCase):
As a developer, I can edit an existing integration
"""
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.org = self.create_organization(name="Tesla", owner=self.user)
@@ -75,7 +75,7 @@ def setUp(self):
f"/settings/{self.org.slug}/developer-settings/{self.sentry_app.slug}"
)
- def load_page(self, url):
+ def load_page(self, url: str) -> None:
self.browser.get(url)
self.browser.wait_until_not('[data-test-id="loading-indicator"]')
diff --git a/tests/acceptance/test_organization_document_integration_detailed_view.py b/tests/acceptance/test_organization_document_integration_detailed_view.py
index 973221a3512e4a..252e230a12d662 100644
--- a/tests/acceptance/test_organization_document_integration_detailed_view.py
+++ b/tests/acceptance/test_organization_document_integration_detailed_view.py
@@ -8,7 +8,7 @@ class OrganizationDocumentIntegrationDetailView(AcceptanceTestCase):
As a developer, I can view an document-based integration, and learn more about it with the linked resources.
"""
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.organization = self.create_organization(owner=self.user, name="Walter Mitty")
self.doc = self.create_doc_integration(
@@ -16,7 +16,7 @@ def setUp(self):
)
self.login_as(self.user)
- def load_page(self, slug):
+ def load_page(self, slug: str) -> None:
url = f"/settings/{self.organization.slug}/document-integrations/{slug}/"
self.browser.get(url)
self.browser.wait_until_not('[data-test-id="loading-indicator"]')
diff --git a/tests/acceptance/test_organization_events_v2.py b/tests/acceptance/test_organization_events_v2.py
index c9e05391a18c80..043e6e3cd77f23 100644
--- a/tests/acceptance/test_organization_events_v2.py
+++ b/tests/acceptance/test_organization_events_v2.py
@@ -1,5 +1,6 @@
import copy
from datetime import timedelta
+from typing import Any
from unittest.mock import MagicMock, patch
from urllib.parse import urlencode
@@ -21,8 +22,8 @@
]
-def all_events_query(**kwargs):
- options = {
+def all_events_query(**kwargs: str | list[str]) -> str:
+ options: dict[str, str | list[str]] = {
"sort": ["-timestamp"],
"field": ["title", "event.type", "project", "user.display", "timestamp"],
"name": ["All Events"],
@@ -32,7 +33,7 @@ def all_events_query(**kwargs):
return urlencode(options, doseq=True)
-def errors_query(**kwargs):
+def errors_query(**kwargs: str | list[str]) -> str:
options = {
"sort": ["-title"],
"name": ["Errors"],
@@ -46,7 +47,7 @@ def errors_query(**kwargs):
return urlencode(options, doseq=True)
-def transactions_query(**kwargs):
+def transactions_query(**kwargs: str | list[str]) -> str:
options = {
"sort": ["-count"],
"name": ["Transactions"],
@@ -62,7 +63,7 @@ def transactions_query(**kwargs):
# Sorted by transactions to avoid sorting issues caused by storing events
-def transactions_sorted_query(**kwargs):
+def transactions_sorted_query(**kwargs: str | list[str]) -> str:
options = {
"sort": ["transaction"],
"name": ["Transactions"],
@@ -77,7 +78,7 @@ def transactions_sorted_query(**kwargs):
return urlencode(options, doseq=True)
-def generate_transaction(trace=None, span=None):
+def generate_transaction(trace: str | None = None, span: str | None = None) -> Any:
end_datetime = before_now(minutes=10)
start_datetime = end_datetime - timedelta(milliseconds=500)
event_data = load_data(
@@ -93,7 +94,7 @@ def generate_transaction(trace=None, span=None):
reference_span = event_data["spans"][0]
parent_span_id = reference_span["parent_span_id"]
- span_tree_blueprint = {
+ span_tree_blueprint: dict[str, str | dict[str, Any]] = {
"a": {},
"b": {"bb": {"bbb": {"bbbb": "bbbbb"}}},
"c": {},
@@ -113,7 +114,9 @@ def generate_transaction(trace=None, span=None):
"e": (timedelta(milliseconds=400), timedelta(milliseconds=100)),
}
- def build_span_tree(span_tree, spans, parent_span_id):
+ def build_span_tree(
+ span_tree: dict[str, str | dict[str, Any]], spans: list[dict[str, Any]], parent_span_id: str
+ ) -> list[dict[str, Any]]:
for span_id, child in sorted(span_tree.items(), key=lambda item: item[0]):
span = copy.deepcopy(reference_span)
# non-leaf node span
@@ -154,7 +157,7 @@ def build_span_tree(span_tree, spans, parent_span_id):
@no_silo_test
class OrganizationEventsV2Test(AcceptanceTestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com", is_superuser=True)
self.org = self.create_organization(name="Rowdy Tiger")
@@ -166,7 +169,7 @@ def setUp(self):
self.landing_path = f"/organizations/{self.org.slug}/discover/queries/"
self.result_path = f"/organizations/{self.org.slug}/discover/results/"
- def wait_until_loaded(self):
+ def wait_until_loaded(self) -> None:
self.browser.wait_until_not('[data-test-id="loading-indicator"]')
self.browser.wait_until_not('[data-test-id="loading-placeholder"]')
diff --git a/tests/acceptance/test_organization_global_selection_header.py b/tests/acceptance/test_organization_global_selection_header.py
index b860e8a568d917..c3b1227ab6acb8 100644
--- a/tests/acceptance/test_organization_global_selection_header.py
+++ b/tests/acceptance/test_organization_global_selection_header.py
@@ -15,7 +15,7 @@
@no_silo_test
class OrganizationGlobalHeaderTest(AcceptanceTestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.org = self.create_organization(owner=self.user, name="Rowdy Tiger")
@@ -48,7 +48,7 @@ def setUp(self):
self.issues_list = IssueListPage(self.browser, self.client)
self.issue_details = IssueDetailsPage(self.browser, self.client)
- def create_issues(self):
+ def create_issues(self) -> None:
self.issue_1 = self.store_event(
data={
"event_id": "a" * 32,
@@ -375,8 +375,8 @@ def test_issue_details_to_stream_with_initial_env_no_project(self, mock_now: Mag
@patch("django.utils.timezone.now")
def test_issue_details_to_stream_with_initial_env_no_project_with_multi_project_feature(
- self, mock_now
- ):
+ self, mock_now: MagicMock
+ ) -> None:
"""
Visiting issue details directly with no project but with an environment defined in URL.
When navigating back to issues stream, should keep environment and project in context.
diff --git a/tests/acceptance/test_organization_group_index.py b/tests/acceptance/test_organization_group_index.py
index 7edce2c085f2ef..ffed3aef151b9d 100644
--- a/tests/acceptance/test_organization_group_index.py
+++ b/tests/acceptance/test_organization_group_index.py
@@ -16,7 +16,7 @@
@no_silo_test
class OrganizationGroupIndexTest(AcceptanceTestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.org = self.create_organization(owner=self.user, name="Rowdy Tiger")
@@ -31,7 +31,7 @@ def setUp(self):
self.page = IssueListPage(self.browser, self.client)
self.dismiss_assistant()
- def create_issues(self):
+ def create_issues(self) -> None:
self.event_a = self.store_event(
data={
"event_id": "a" * 32,
diff --git a/tests/acceptance/test_organization_integration_configuration_tabs.py b/tests/acceptance/test_organization_integration_configuration_tabs.py
index fe6f48afd8f72d..f20dda4879b600 100644
--- a/tests/acceptance/test_organization_integration_configuration_tabs.py
+++ b/tests/acceptance/test_organization_integration_configuration_tabs.py
@@ -6,7 +6,7 @@
@no_silo_test
class OrganizationIntegrationConfigurationTabs(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(self.user)
self.provider = "github"
@@ -31,7 +31,7 @@ def setUp(self):
url="https://github.com/getsentry/sentry",
)
- def load_page(self, slug, configuration_tab=False):
+ def load_page(self, slug: str, configuration_tab: bool = False) -> None:
url = f"/settings/{self.organization.slug}/integrations/{slug}/"
if configuration_tab:
url += "?tab=configurations"
diff --git a/tests/acceptance/test_organization_integration_detail_view.py b/tests/acceptance/test_organization_integration_detail_view.py
index 03bc46bb70f340..7967cd8a251008 100644
--- a/tests/acceptance/test_organization_integration_detail_view.py
+++ b/tests/acceptance/test_organization_integration_detail_view.py
@@ -17,11 +17,11 @@ class OrganizationIntegrationDetailView(AcceptanceTestCase):
As a developer, I can create an integration, install it, and uninstall it
"""
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(self.user)
- def load_page(self, slug, configuration_tab=False):
+ def load_page(self, slug: str, configuration_tab: bool = False) -> None:
url = f"/settings/{self.organization.slug}/integrations/{slug}/"
if configuration_tab:
url += "?tab=configurations"
diff --git a/tests/acceptance/test_organization_join_request.py b/tests/acceptance/test_organization_join_request.py
index 92cb96692c9675..bc012372b204c5 100644
--- a/tests/acceptance/test_organization_join_request.py
+++ b/tests/acceptance/test_organization_join_request.py
@@ -4,7 +4,7 @@
@no_silo_test
class OrganizationJoinRequestTest(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.org = self.create_organization(name="Rowdy Tiger", owner=self.user)
diff --git a/tests/acceptance/test_organization_monitors.py b/tests/acceptance/test_organization_monitors.py
index 46aa122a4a1990..9bfde02732b458 100644
--- a/tests/acceptance/test_organization_monitors.py
+++ b/tests/acceptance/test_organization_monitors.py
@@ -17,7 +17,7 @@
@no_silo_test
class OrganizationMontorsTest(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.path = f"/organizations/{self.organization.slug}/insights/crons/"
self.team = self.create_team(organization=self.organization, name="Mariachi Band")
diff --git a/tests/acceptance/test_organization_plugin_detail_view.py b/tests/acceptance/test_organization_plugin_detail_view.py
index e7fbedfced0694..d89e6f297bddc0 100644
--- a/tests/acceptance/test_organization_plugin_detail_view.py
+++ b/tests/acceptance/test_organization_plugin_detail_view.py
@@ -11,17 +11,17 @@
@no_silo_test
class OrganizationPluginDetailedView(AcceptanceTestCase):
@cached_property
- def plugin(self):
+ def plugin(self) -> OpsGeniePlugin:
return OpsGeniePlugin()
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
# need at least two projects
self.project = self.create_project(organization=self.organization, name="Back end")
self.create_project(organization=self.organization, name="Front End")
self.login_as(self.user)
- def load_page(self, slug, configuration_tab=False):
+ def load_page(self, slug: str, configuration_tab: bool = False) -> None:
url = f"/settings/{self.organization.slug}/plugins/{slug}/"
if configuration_tab:
url += "?tab=configurations"
diff --git a/tests/acceptance/test_organization_releases.py b/tests/acceptance/test_organization_releases.py
index bd5ae19c3b5d64..683571ce891673 100644
--- a/tests/acceptance/test_organization_releases.py
+++ b/tests/acceptance/test_organization_releases.py
@@ -14,7 +14,7 @@
class OrganizationReleasesTest(AcceptanceTestCase):
release_date = datetime(2020, 5, 18, 15, 13, 58, 132928, tzinfo=UTC)
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
diff --git a/tests/acceptance/test_organization_security_privacy.py b/tests/acceptance/test_organization_security_privacy.py
index de175e9878cfd5..5a8e9ba3520c04 100644
--- a/tests/acceptance/test_organization_security_privacy.py
+++ b/tests/acceptance/test_organization_security_privacy.py
@@ -5,20 +5,20 @@
@no_silo_test
class OrganizationSecurityAndPrivacyTest(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("owner@example.com")
self.org = self.create_organization(owner=self.user, name="Rowdy Tiger")
self.login_as(self.user)
self.path = f"/settings/{self.org.slug}/security-and-privacy/"
- def load_organization_helper(self):
+ def load_organization_helper(self) -> None:
self.browser.wait_until_not('[data-test-id="loading-indicator"]')
assert self.browser.wait_until(
'[data-test-id="organization-settings-security-and-privacy"]'
)
- def renders_2fa_setting(self):
+ def renders_2fa_setting(self) -> bool:
return self.browser.wait_until("#require2FA")
def test_renders_2fa_setting_for_owner(self) -> None:
diff --git a/tests/acceptance/test_organization_sentry_app_detailed_view.py b/tests/acceptance/test_organization_sentry_app_detailed_view.py
index 31fdc3ebb703b4..49c1bf778843d7 100644
--- a/tests/acceptance/test_organization_sentry_app_detailed_view.py
+++ b/tests/acceptance/test_organization_sentry_app_detailed_view.py
@@ -8,7 +8,7 @@
@no_silo_test
class OrganizationSentryAppDetailedView(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.create_project(organization=self.organization)
self.sentry_app = self.create_sentry_app(
@@ -19,7 +19,7 @@ def setUp(self):
)
self.login_as(self.user)
- def load_page(self, slug):
+ def load_page(self, slug: str) -> None:
url = f"/settings/{self.organization.slug}/sentry-apps/{slug}/"
self.browser.get(url)
self.browser.wait_until_not('[data-test-id="loading-indicator"]')
diff --git a/tests/acceptance/test_organization_switch.py b/tests/acceptance/test_organization_switch.py
index 3a6d74ef518062..fd92ad7c5c031f 100644
--- a/tests/acceptance/test_organization_switch.py
+++ b/tests/acceptance/test_organization_switch.py
@@ -1,7 +1,9 @@
import pytest
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.common.by import By
+from selenium.webdriver.remote.webelement import WebElement
+from sentry.models.project import Project
from sentry.testutils.cases import AcceptanceTestCase, SnubaTestCase
from sentry.testutils.silo import no_silo_test
from sentry.utils.retries import TimedRetryPolicy
@@ -11,7 +13,7 @@
@no_silo_test
class OrganizationSwitchTest(AcceptanceTestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.primary_projects = [
@@ -35,16 +37,16 @@ def setUp(self):
self.login_as(self.user)
def test_organization_switches(self) -> None:
- def navigate_to_issues_page(org_slug):
+ def navigate_to_issues_page(org_slug: str) -> None:
issues_url = OrganizationSwitchTest.url_creator("issues", org_slug)
self.browser.get(issues_url)
self.browser.wait_until_not('[data-test-id="loading-indicator"]')
@TimedRetryPolicy.wrap(timeout=20, exceptions=(TimeoutException,))
- def open_project_selector():
+ def open_project_selector() -> None:
self.browser.click(selector='[data-test-id="page-filter-project-selector"]')
- def get_project_elements_from_project_selector_dropdown():
+ def get_project_elements_from_project_selector_dropdown() -> list[WebElement]:
selector = '[data-test-id="menu-list-item-label"]'
self.browser.wait_until(selector)
@@ -79,10 +81,12 @@ def get_project_elements_from_project_selector_dropdown():
)
@staticmethod
- def expect_projects_element_text_to_match_projects_slug(elements, projects):
+ def expect_projects_element_text_to_match_projects_slug(
+ elements: list[WebElement], projects: list[Project]
+ ) -> None:
assert len(elements) == len(projects)
assert {e.text for e in elements} == {p.slug for p in projects}
@staticmethod
- def url_creator(page_path, org_slug):
+ def url_creator(page_path: str, org_slug: str) -> str:
return f"organizations/{org_slug}/{page_path}/"
diff --git a/tests/acceptance/test_performance_issues.py b/tests/acceptance/test_performance_issues.py
index cc809c72216fc3..57d77aa5c5b285 100644
--- a/tests/acceptance/test_performance_issues.py
+++ b/tests/acceptance/test_performance_issues.py
@@ -1,6 +1,7 @@
import random
import string
from datetime import timedelta
+from typing import Any
from unittest import mock
from unittest.mock import MagicMock, patch
@@ -21,7 +22,7 @@
@no_silo_test
class PerformanceIssuesTest(AcceptanceTestCase, SnubaTestCase, PerformanceIssueTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.org = self.create_organization(owner=self.user, name="Rowdy Tiger")
self.team = self.create_team(
@@ -37,7 +38,7 @@ def setUp(self):
self.page = IssueDetailsPage(self.browser, self.client)
self.dismiss_assistant()
- def create_sample_event(self, fixture, start_timestamp):
+ def create_sample_event(self, fixture: str, start_timestamp: float) -> dict[str, Any]:
event = json.loads(self.load_fixture(f"events/performance_problems/{fixture}.json"))
for key in ["datetime", "location", "title"]:
@@ -55,7 +56,7 @@ def create_sample_event(self, fixture, start_timestamp):
return event
- def randomize_span_description(self, span):
+ def randomize_span_description(self, span: dict[str, Any]) -> dict[str, Any]:
return {
**span,
"description": "".join(random.choice(string.ascii_lowercase) for _ in range(10)),
diff --git a/tests/acceptance/test_performance_landing.py b/tests/acceptance/test_performance_landing.py
index e1911aedb97c15..1f18e8202597d7 100644
--- a/tests/acceptance/test_performance_landing.py
+++ b/tests/acceptance/test_performance_landing.py
@@ -17,7 +17,7 @@
@no_silo_test
class PerformanceLandingTest(AcceptanceTestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.org = self.create_organization(owner=self.user, name="Rowdy Tiger")
self.team = self.create_team(
diff --git a/tests/acceptance/test_performance_overview.py b/tests/acceptance/test_performance_overview.py
index 712370bd3c9127..08e0c94a8e53cc 100644
--- a/tests/acceptance/test_performance_overview.py
+++ b/tests/acceptance/test_performance_overview.py
@@ -17,7 +17,7 @@
@no_silo_test
class PerformanceOverviewTest(AcceptanceTestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.org = self.create_organization(owner=self.user, name="Rowdy Tiger")
self.team = self.create_team(
diff --git a/tests/acceptance/test_performance_summary.py b/tests/acceptance/test_performance_summary.py
index 947d81315178d6..1da3b70ea53aa3 100644
--- a/tests/acceptance/test_performance_summary.py
+++ b/tests/acceptance/test_performance_summary.py
@@ -1,3 +1,4 @@
+from typing import Any
from unittest.mock import MagicMock, patch
from urllib.parse import urlencode
@@ -15,7 +16,7 @@
pytestmark = pytest.mark.sentry_metrics
-def make_event(event_data):
+def make_event(event_data: dict[str, Any]) -> dict[str, object]:
event_data["event_id"] = "c" * 32
event_data["contexts"]["trace"]["trace_id"] = "a" * 32
return event_data
@@ -23,7 +24,7 @@ def make_event(event_data):
@no_silo_test
class PerformanceSummaryTest(AcceptanceTestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.org = self.create_organization(owner=self.user, name="Rowdy Tiger")
self.team = self.create_team(
diff --git a/tests/acceptance/test_performance_vital_detail.py b/tests/acceptance/test_performance_vital_detail.py
index 2999f1c609d652..8cd38b757056c7 100644
--- a/tests/acceptance/test_performance_vital_detail.py
+++ b/tests/acceptance/test_performance_vital_detail.py
@@ -15,7 +15,7 @@
@no_silo_test
class PerformanceVitalDetailsTest(AcceptanceTestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.org = self.create_organization(owner=self.user, name="Rowdy Tiger")
self.team = self.create_team(
diff --git a/tests/acceptance/test_project_alert_settings.py b/tests/acceptance/test_project_alert_settings.py
index b43203dd42fc37..2f02fa22448187 100644
--- a/tests/acceptance/test_project_alert_settings.py
+++ b/tests/acceptance/test_project_alert_settings.py
@@ -5,7 +5,7 @@
@no_silo_test
class ProjectAlertSettingsTest(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.org = self.create_organization(name="Rowdy Tiger", owner=None)
diff --git a/tests/acceptance/test_project_detail.py b/tests/acceptance/test_project_detail.py
index cf94b902e5e7c2..8747b6f1e4c813 100644
--- a/tests/acceptance/test_project_detail.py
+++ b/tests/acceptance/test_project_detail.py
@@ -10,7 +10,7 @@
@no_silo_test
class ProjectDetailTest(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.org = self.create_organization(name="Rowdy Tiger", owner=None)
diff --git a/tests/acceptance/test_project_general_settings.py b/tests/acceptance/test_project_general_settings.py
index d8e3c955027232..55cf2837fcbead 100644
--- a/tests/acceptance/test_project_general_settings.py
+++ b/tests/acceptance/test_project_general_settings.py
@@ -4,7 +4,7 @@
@no_silo_test
class ProjectGeneralSettingsTest(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.org = self.create_organization(name="Rowdy Tiger", owner=None)
diff --git a/tests/acceptance/test_project_keys.py b/tests/acceptance/test_project_keys.py
index a5f6a4726a3447..1373695f1870ef 100644
--- a/tests/acceptance/test_project_keys.py
+++ b/tests/acceptance/test_project_keys.py
@@ -7,7 +7,7 @@
@no_silo_test
class ProjectKeysTest(AcceptanceTestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.org = self.create_organization(name="Rowdy Tiger", owner=None)
@@ -34,7 +34,7 @@ def test_simple(self) -> None:
@no_silo_test
class ProjectKeyDetailsTest(AcceptanceTestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.org = self.create_organization(name="Rowdy Tiger", owner=None)
diff --git a/tests/acceptance/test_project_ownership.py b/tests/acceptance/test_project_ownership.py
index 99dd3a89b5e1d6..881de3dc3408eb 100644
--- a/tests/acceptance/test_project_ownership.py
+++ b/tests/acceptance/test_project_ownership.py
@@ -4,7 +4,7 @@
@no_silo_test
class ProjectOwnershipTest(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(self.user)
self.path = f"/settings/{self.organization.slug}/projects/{self.project.slug}/ownership/"
diff --git a/tests/acceptance/test_project_release_tracking_settings.py b/tests/acceptance/test_project_release_tracking_settings.py
index 9e4e9bfea8e9a8..b2c0df8af4520c 100644
--- a/tests/acceptance/test_project_release_tracking_settings.py
+++ b/tests/acceptance/test_project_release_tracking_settings.py
@@ -4,7 +4,7 @@
@no_silo_test
class ProjectReleaseTrackingSettingsTest(AcceptanceTestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.org = self.create_organization(name="Rowdy Tiger", owner=None)
diff --git a/tests/acceptance/test_project_servicehooks.py b/tests/acceptance/test_project_servicehooks.py
index ee801c72d5a477..24dc8b309db960 100644
--- a/tests/acceptance/test_project_servicehooks.py
+++ b/tests/acceptance/test_project_servicehooks.py
@@ -5,7 +5,7 @@
@no_silo_test
class ProjectServiceHooksTest(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.org = self.create_organization(name="Rowdy Tiger", owner=None)
diff --git a/tests/acceptance/test_project_tags_settings.py b/tests/acceptance/test_project_tags_settings.py
index c4f11ba2f4aef5..3b8e3638ef3e60 100644
--- a/tests/acceptance/test_project_tags_settings.py
+++ b/tests/acceptance/test_project_tags_settings.py
@@ -11,7 +11,7 @@
@no_silo_test
class ProjectTagsSettingsTest(AcceptanceTestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.org = self.create_organization(name="Rowdy Tiger", owner=None)
diff --git a/tests/acceptance/test_proxy.py b/tests/acceptance/test_proxy.py
index b5086faf72180c..0a80738a10199e 100644
--- a/tests/acceptance/test_proxy.py
+++ b/tests/acceptance/test_proxy.py
@@ -1,8 +1,10 @@
from __future__ import annotations
from dataclasses import asdict
+from typing import Any
import pytest
+from django.http import HttpResponse, StreamingHttpResponse
from django.urls import reverse
from pytest_django.live_server_helper import LiveServer
from rest_framework.test import APIClient
@@ -22,7 +24,7 @@
@pytest.fixture(scope="function")
-def local_live_server(request, live_server):
+def local_live_server(request: pytest.FixtureRequest, live_server: LiveServer) -> None:
if hasattr(request, "cls"):
request.cls.live_server = live_server
request.node.live_server = live_server
@@ -37,7 +39,7 @@ class EndToEndAPIProxyTest(TransactionTestCase):
organization: Organization
api_key: ApiKey
- def get_response(self, *args, **params):
+ def get_response(self, *args: str, **params: Any) -> HttpResponse | StreamingHttpResponse:
url = reverse(self.endpoint, args=args)
headers = params.pop("extra_headers", {})
return getattr(self.client, self.method)(url, format="json", data=params, **headers)
diff --git a/tests/acceptance/test_quick_start.py b/tests/acceptance/test_quick_start.py
index 25681262c7435b..48717126a241bb 100644
--- a/tests/acceptance/test_quick_start.py
+++ b/tests/acceptance/test_quick_start.py
@@ -14,7 +14,7 @@
@no_silo_test
class OrganizationQuickStartTest(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.login_as(self.user)
@@ -56,7 +56,7 @@ def test_quick_start_not_rendered_because_all_tasks_completed_and_overdue(self)
@with_feature("organizations:onboarding")
def test_quick_start_renders_even_when_all_tasks_are_overdue_but_one_is_missing_to_complete(
self,
- ):
+ ) -> None:
excluded_required_task = OnboardingTask.FIRST_TRANSACTION
tasks_to_process = list(
OrganizationOnboardingTask.TASK_KEY_MAP.keys() - {excluded_required_task}
diff --git a/tests/acceptance/test_shared_issue.py b/tests/acceptance/test_shared_issue.py
index 3448c24acdebd1..ad94a23c2bc9e6 100644
--- a/tests/acceptance/test_shared_issue.py
+++ b/tests/acceptance/test_shared_issue.py
@@ -7,7 +7,7 @@
@no_silo_test
class SharedIssueTest(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.org = self.create_organization(owner=self.user, name="Rowdy Tiger")
diff --git a/tests/acceptance/test_sidebar.py b/tests/acceptance/test_sidebar.py
index d29be710c36563..e5be00347ee40f 100644
--- a/tests/acceptance/test_sidebar.py
+++ b/tests/acceptance/test_sidebar.py
@@ -6,7 +6,7 @@
@no_silo_test
class SidebarTest(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.login_as(self.user)
diff --git a/tests/acceptance/test_teams_list.py b/tests/acceptance/test_teams_list.py
index c93c1dfd25f4fa..91ec2a9acc4983 100644
--- a/tests/acceptance/test_teams_list.py
+++ b/tests/acceptance/test_teams_list.py
@@ -6,7 +6,7 @@
@no_silo_test
class TeamsListTest(AcceptanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user("foo@example.com")
self.org = self.create_organization(name="Rowdy Tiger", owner=None)
diff --git a/tests/acceptance/test_trace_view_from_explore.py b/tests/acceptance/test_trace_view_from_explore.py
index d86b4f1282e6dd..724a2d9f411045 100644
--- a/tests/acceptance/test_trace_view_from_explore.py
+++ b/tests/acceptance/test_trace_view_from_explore.py
@@ -1,5 +1,5 @@
from datetime import timedelta
-from unittest.mock import patch
+from unittest.mock import MagicMock, patch
from fixtures.page_objects.explore_spans import ExploreSpansPage
from fixtures.page_objects.trace_view import TraceViewWaterfallPage
@@ -18,7 +18,7 @@ class TraceViewFromExploreTest(AcceptanceTestCase, TraceTestCase, SnubaTestCase)
"organizations:trace-spans-format",
]
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.snuba_eventstream = SnubaEventStream()
self.start = self.day_ago = before_now(days=1).replace(
@@ -42,7 +42,7 @@ def setUp(self):
self.dismiss_assistant(which="tour.explore.spans")
@patch("django.utils.timezone.now")
- def test_navigation(self, mock_now):
+ def test_navigation(self, mock_now: MagicMock) -> None:
mock_now.return_value = self.start
assert (
diff --git a/tests/acceptance/test_trace_view_waterfall.py b/tests/acceptance/test_trace_view_waterfall.py
index 414913c624e63b..1841193f13fa2e 100644
--- a/tests/acceptance/test_trace_view_waterfall.py
+++ b/tests/acceptance/test_trace_view_waterfall.py
@@ -1,5 +1,5 @@
from datetime import timedelta
-from unittest.mock import patch
+from unittest.mock import MagicMock, patch
from fixtures.page_objects.trace_view import TraceViewWaterfallPage
from sentry.eventstream.snuba import SnubaEventStream
@@ -17,7 +17,7 @@ class TraceViewWaterfallTest(AcceptanceTestCase, TraceTestCase, SnubaTestCase):
"organizations:trace-spans-format",
]
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.snuba_eventstream = SnubaEventStream()
self.start = self.day_ago = before_now(days=1).replace(
@@ -39,7 +39,7 @@ def setUp(self):
self.dismiss_assistant()
@patch("django.utils.timezone.now")
- def test_trace_view_waterfall_loads(self, mock_now):
+ def test_trace_view_waterfall_loads(self, mock_now: MagicMock) -> None:
mock_now.return_value = self.start
assert (
diff --git a/tests/apidocs/endpoints/events/test_group_events.py b/tests/apidocs/endpoints/events/test_group_events.py
index 0f9503f909d29a..5b2a3546a1b712 100644
--- a/tests/apidocs/endpoints/events/test_group_events.py
+++ b/tests/apidocs/endpoints/events/test_group_events.py
@@ -5,7 +5,7 @@
class ProjectGroupEventBase(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
first_release = {
"firstEvent": before_now(minutes=3),
"lastEvent": before_now(minutes=2, seconds=30),
@@ -28,7 +28,7 @@ def setUp(self):
class ProjectGroupEventsDocs(ProjectGroupEventBase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.url = f"/api/0/organizations/{self.organization.slug}/issues/{self.group_id}/events/"
@@ -40,7 +40,7 @@ def test_get(self) -> None:
class ProjectGroupEventDetailsDocs(ProjectGroupEventBase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.url = (
f"/api/0/organizations/{self.organization.slug}/issues/{self.group_id}/events/latest/"
diff --git a/tests/apidocs/endpoints/events/test_group_hashes.py b/tests/apidocs/endpoints/events/test_group_hashes.py
index bebe3067a7a804..45ae666bac8db0 100644
--- a/tests/apidocs/endpoints/events/test_group_hashes.py
+++ b/tests/apidocs/endpoints/events/test_group_hashes.py
@@ -4,7 +4,7 @@
class ProjectGroupHashesDocs(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.create_event("a")
event = self.create_event("b")
diff --git a/tests/apidocs/endpoints/events/test_group_issue_details.py b/tests/apidocs/endpoints/events/test_group_issue_details.py
index 0e2566183e14ad..0216388f2e9a26 100644
--- a/tests/apidocs/endpoints/events/test_group_issue_details.py
+++ b/tests/apidocs/endpoints/events/test_group_issue_details.py
@@ -5,7 +5,7 @@
class ProjectGroupIssueDetailsDocs(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.create_release(project=self.project, version="abcdabc")
first_release = {
diff --git a/tests/apidocs/endpoints/events/test_group_tagkey_values.py b/tests/apidocs/endpoints/events/test_group_tagkey_values.py
index 673effb5c53e4c..d33f108bb1e691 100644
--- a/tests/apidocs/endpoints/events/test_group_tagkey_values.py
+++ b/tests/apidocs/endpoints/events/test_group_tagkey_values.py
@@ -4,7 +4,7 @@
class GroupTagKeyValuesDocs(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
key, value = "foo", "bar"
event = self.create_event("a", tags={key: value})
diff --git a/tests/apidocs/endpoints/events/test_project_event_details.py b/tests/apidocs/endpoints/events/test_project_event_details.py
index 3ed3c4ebbb2f4c..a05c81fc9a2cdf 100644
--- a/tests/apidocs/endpoints/events/test_project_event_details.py
+++ b/tests/apidocs/endpoints/events/test_project_event_details.py
@@ -7,7 +7,7 @@
class ProjectEventDetailsDocs(APIDocsTestCase):
endpoint = "sentry-api-0-project-event-details"
- def setUp(self):
+ def setUp(self) -> None:
self.create_event("a")
event = self.create_event("b")
self.create_event("c")
diff --git a/tests/apidocs/endpoints/events/test_project_issues.py b/tests/apidocs/endpoints/events/test_project_issues.py
index 048b9cb4a02c15..059ead0745dec5 100644
--- a/tests/apidocs/endpoints/events/test_project_issues.py
+++ b/tests/apidocs/endpoints/events/test_project_issues.py
@@ -4,7 +4,7 @@
class ProjectIssuesDocs(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.create_event("a")
self.create_event("b")
diff --git a/tests/apidocs/endpoints/events/test_project_tagkey_values.py b/tests/apidocs/endpoints/events/test_project_tagkey_values.py
index ed040ee2ab8a14..5f87f1469ba8db 100644
--- a/tests/apidocs/endpoints/events/test_project_tagkey_values.py
+++ b/tests/apidocs/endpoints/events/test_project_tagkey_values.py
@@ -5,7 +5,7 @@
class ProjectTagKeyValuesDocs(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
key = "foo"
self.create_event("a", tags={key: "bar"})
diff --git a/tests/apidocs/endpoints/integration_platform/test_sentry_app_external_issue_details.py b/tests/apidocs/endpoints/integration_platform/test_sentry_app_external_issue_details.py
index 71700b92a750b6..75817c75bf4a31 100644
--- a/tests/apidocs/endpoints/integration_platform/test_sentry_app_external_issue_details.py
+++ b/tests/apidocs/endpoints/integration_platform/test_sentry_app_external_issue_details.py
@@ -8,7 +8,7 @@
class SentryAppDetailsDocsTest(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.org = self.create_organization(owner=self.user, name="Rowdy Tiger")
self.project = self.create_project(organization=self.org)
self.group = self.create_group(project=self.project)
diff --git a/tests/apidocs/endpoints/integration_platform/test_sentry_app_external_issues.py b/tests/apidocs/endpoints/integration_platform/test_sentry_app_external_issues.py
index 2c4bb625504ebb..a46842e431602f 100644
--- a/tests/apidocs/endpoints/integration_platform/test_sentry_app_external_issues.py
+++ b/tests/apidocs/endpoints/integration_platform/test_sentry_app_external_issues.py
@@ -8,7 +8,7 @@
class SentryAppDocsTest(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.org = self.create_organization(owner=self.user, name="Rowdy Tiger")
self.project = self.create_project(organization=self.org)
self.group = self.create_group(project=self.project)
diff --git a/tests/apidocs/endpoints/integration_platform/test_sentry_app_installations.py b/tests/apidocs/endpoints/integration_platform/test_sentry_app_installations.py
index 61108d07ccb3dd..4c609d889cb293 100644
--- a/tests/apidocs/endpoints/integration_platform/test_sentry_app_installations.py
+++ b/tests/apidocs/endpoints/integration_platform/test_sentry_app_installations.py
@@ -8,7 +8,7 @@
@control_silo_test
class SentryAppInstallationDocsTest(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.user = self.create_user("foo@example.com")
self.org = self.create_organization(name="Jessla", owner=None)
self.create_member(user=self.user, organization=self.org, role="owner")
diff --git a/tests/apidocs/endpoints/organizations/test_event_id_lookup.py b/tests/apidocs/endpoints/organizations/test_event_id_lookup.py
index b7b652044d222f..332983fd86d922 100644
--- a/tests/apidocs/endpoints/organizations/test_event_id_lookup.py
+++ b/tests/apidocs/endpoints/organizations/test_event_id_lookup.py
@@ -5,7 +5,7 @@
class OrganizationEventIDLookupDocs(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
event = self.create_event("a", message="oh no")
self.url = reverse(
"sentry-api-0-event-id-lookup",
diff --git a/tests/apidocs/endpoints/organizations/test_org_details.py b/tests/apidocs/endpoints/organizations/test_org_details.py
index 6f666600528259..5ea3ab56dcc374 100644
--- a/tests/apidocs/endpoints/organizations/test_org_details.py
+++ b/tests/apidocs/endpoints/organizations/test_org_details.py
@@ -5,7 +5,7 @@
class OrganizationDetailsDocs(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
organization = self.create_organization(owner=self.user, name="Rowdy Tiger")
self.url = reverse(
diff --git a/tests/apidocs/endpoints/organizations/test_org_index.py b/tests/apidocs/endpoints/organizations/test_org_index.py
index 82d3e7cc96b0b6..99b8a22f789179 100644
--- a/tests/apidocs/endpoints/organizations/test_org_index.py
+++ b/tests/apidocs/endpoints/organizations/test_org_index.py
@@ -5,7 +5,7 @@
class OrganizationIndexDocs(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.create_organization(owner=self.user, name="Rowdy Tiger")
self.url = reverse(
diff --git a/tests/apidocs/endpoints/organizations/test_org_projects.py b/tests/apidocs/endpoints/organizations/test_org_projects.py
index 40cc8db2c0341f..d380a534d47f10 100644
--- a/tests/apidocs/endpoints/organizations/test_org_projects.py
+++ b/tests/apidocs/endpoints/organizations/test_org_projects.py
@@ -5,7 +5,7 @@
class OrganizationProjectsDocs(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
organization = self.create_organization(owner=self.user, name="Rowdy Tiger")
self.create_project(name="foo", organization=organization, teams=[])
self.create_project(name="bar", organization=organization, teams=[])
diff --git a/tests/apidocs/endpoints/organizations/test_org_repos.py b/tests/apidocs/endpoints/organizations/test_org_repos.py
index 17727224364614..75b9cd71e78885 100644
--- a/tests/apidocs/endpoints/organizations/test_org_repos.py
+++ b/tests/apidocs/endpoints/organizations/test_org_repos.py
@@ -5,7 +5,7 @@
class OrganizationReposDocs(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
organization = self.create_organization(owner=self.user, name="Rowdy Tiger")
project = self.create_project(name="foo", organization=organization, teams=[])
self.create_repo(project=project, name="getsentry/sentry")
diff --git a/tests/apidocs/endpoints/organizations/test_org_stats_v2.py b/tests/apidocs/endpoints/organizations/test_org_stats_v2.py
index 25100ba858f263..63987c1cc8ae90 100644
--- a/tests/apidocs/endpoints/organizations/test_org_stats_v2.py
+++ b/tests/apidocs/endpoints/organizations/test_org_stats_v2.py
@@ -10,7 +10,7 @@
class OrganizationStatsDocs(APIDocsTestCase, OutcomesSnubaTest):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.now = datetime(2021, 3, 14, 12, 27, 28, tzinfo=timezone.utc)
self.login_as(user=self.user)
diff --git a/tests/apidocs/endpoints/organizations/test_repo_commits.py b/tests/apidocs/endpoints/organizations/test_repo_commits.py
index 806e40e5e9c489..914ab738fc27a3 100644
--- a/tests/apidocs/endpoints/organizations/test_repo_commits.py
+++ b/tests/apidocs/endpoints/organizations/test_repo_commits.py
@@ -5,7 +5,7 @@
class OrganizationRepoCommitsDocs(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
organization = self.create_organization(owner=self.user, name="Rowdy Tiger")
project = self.create_project(name="foo", organization=organization, teams=[])
repo = self.create_repo(project=project, name="getsentry/sentry")
diff --git a/tests/apidocs/endpoints/projects/test_dsyms.py b/tests/apidocs/endpoints/projects/test_dsyms.py
index 98593a7d030c6a..1a47eab57b72b7 100644
--- a/tests/apidocs/endpoints/projects/test_dsyms.py
+++ b/tests/apidocs/endpoints/projects/test_dsyms.py
@@ -9,7 +9,7 @@
class ProjectDsymsDocs(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.url = reverse(
"sentry-api-0-dsym-files",
kwargs={
diff --git a/tests/apidocs/endpoints/projects/test_project_index.py b/tests/apidocs/endpoints/projects/test_project_index.py
index 2dabd5f307056b..6181ffec388a6f 100644
--- a/tests/apidocs/endpoints/projects/test_project_index.py
+++ b/tests/apidocs/endpoints/projects/test_project_index.py
@@ -5,7 +5,7 @@
class ProjectIndexDocs(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.org = self.create_organization(owner=self.user)
self.team = self.create_team(organization=self.org, members=[self.user])
self.project = self.create_project(teams=[self.team])
diff --git a/tests/apidocs/endpoints/projects/test_project_stats.py b/tests/apidocs/endpoints/projects/test_project_stats.py
index 845e6f25eabe39..3d52f5e14c0bfc 100644
--- a/tests/apidocs/endpoints/projects/test_project_stats.py
+++ b/tests/apidocs/endpoints/projects/test_project_stats.py
@@ -5,7 +5,7 @@
class ProjectStatsDocs(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.create_event("a", message="oh no")
self.create_event("b", message="oh no")
diff --git a/tests/apidocs/endpoints/projects/test_service_hook_details.py b/tests/apidocs/endpoints/projects/test_service_hook_details.py
index d6b6d11e39ff79..29ff814bc233cf 100644
--- a/tests/apidocs/endpoints/projects/test_service_hook_details.py
+++ b/tests/apidocs/endpoints/projects/test_service_hook_details.py
@@ -5,7 +5,7 @@
class ProjectServiceHookDetailsDocs(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
hook = self.create_service_hook(project=self.project, events=("event.created",))
self.url = reverse(
diff --git a/tests/apidocs/endpoints/projects/test_service_hooks.py b/tests/apidocs/endpoints/projects/test_service_hooks.py
index 27bd81a27b624f..48e65e15b91a1d 100644
--- a/tests/apidocs/endpoints/projects/test_service_hooks.py
+++ b/tests/apidocs/endpoints/projects/test_service_hooks.py
@@ -5,7 +5,7 @@
class ProjectServiceHooksDocs(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.create_service_hook(project=self.project, events=("event.created",))
self.create_service_hook(project=self.project, events=("event.alert",))
diff --git a/tests/apidocs/endpoints/projects/test_tag_values.py b/tests/apidocs/endpoints/projects/test_tag_values.py
index bec299410544a5..2f7b59c8dac66d 100644
--- a/tests/apidocs/endpoints/projects/test_tag_values.py
+++ b/tests/apidocs/endpoints/projects/test_tag_values.py
@@ -5,7 +5,7 @@
class ProjectTagValuesDocs(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
key, value = "foo", "bar"
self.create_event("a", tags={key: value})
diff --git a/tests/apidocs/endpoints/projects/test_user_feedback.py b/tests/apidocs/endpoints/projects/test_user_feedback.py
index 583dbde60a47a3..0d0819b8acc814 100644
--- a/tests/apidocs/endpoints/projects/test_user_feedback.py
+++ b/tests/apidocs/endpoints/projects/test_user_feedback.py
@@ -5,7 +5,7 @@
class ProjectUserFeedbackDocs(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
event = self.create_event("a", message="oh no")
self.event_id = event.event_id
self.create_userreport(
diff --git a/tests/apidocs/endpoints/projects/test_users.py b/tests/apidocs/endpoints/projects/test_users.py
index f0e20d63add607..8a65919e2976fd 100644
--- a/tests/apidocs/endpoints/projects/test_users.py
+++ b/tests/apidocs/endpoints/projects/test_users.py
@@ -11,7 +11,7 @@
class ProjectUsersDocs(APIDocsTestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.project = self.create_project(date_added=(timezone.now() - timedelta(hours=2)))
timestamp = before_now(hours=1).isoformat()
diff --git a/tests/apidocs/endpoints/releases/test_deploys.py b/tests/apidocs/endpoints/releases/test_deploys.py
index f423ab295c1034..3b452faec6fff9 100644
--- a/tests/apidocs/endpoints/releases/test_deploys.py
+++ b/tests/apidocs/endpoints/releases/test_deploys.py
@@ -10,7 +10,7 @@
class ReleaseDeploysDocs(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
project = self.create_project(name="foo")
release = self.create_release(project=project, version="1")
release.add_project(project)
diff --git a/tests/apidocs/endpoints/releases/test_organization_release_commit_files.py b/tests/apidocs/endpoints/releases/test_organization_release_commit_files.py
index d6e1a62066ef6e..a64ce0d65e2bee 100644
--- a/tests/apidocs/endpoints/releases/test_organization_release_commit_files.py
+++ b/tests/apidocs/endpoints/releases/test_organization_release_commit_files.py
@@ -8,7 +8,7 @@
class CommitFileChangeDocsTest(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
project = self.create_project(name="foo")
release = self.create_release(project=project, version="1")
release.add_project(project)
diff --git a/tests/apidocs/endpoints/releases/test_organization_release_commits.py b/tests/apidocs/endpoints/releases/test_organization_release_commits.py
index a06431909770a4..6ab6e9e13e69c6 100644
--- a/tests/apidocs/endpoints/releases/test_organization_release_commits.py
+++ b/tests/apidocs/endpoints/releases/test_organization_release_commits.py
@@ -7,7 +7,7 @@
class ReleaseCommitsListDocsTest(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
project = self.create_project(name="foo")
release = self.create_release(project=project, version="1")
release.add_project(project)
diff --git a/tests/apidocs/endpoints/releases/test_organization_release_details.py b/tests/apidocs/endpoints/releases/test_organization_release_details.py
index 46e195c9e8a356..b539b69a7110b7 100644
--- a/tests/apidocs/endpoints/releases/test_organization_release_details.py
+++ b/tests/apidocs/endpoints/releases/test_organization_release_details.py
@@ -7,7 +7,7 @@
class OrganizationReleaseDetailsDocsTest(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
user = self.create_user(is_staff=False, is_superuser=False)
org = self.organization
org2 = self.create_organization()
diff --git a/tests/apidocs/endpoints/releases/test_organization_release_file_details.py b/tests/apidocs/endpoints/releases/test_organization_release_file_details.py
index baa50d32b80813..16c01a81435553 100644
--- a/tests/apidocs/endpoints/releases/test_organization_release_file_details.py
+++ b/tests/apidocs/endpoints/releases/test_organization_release_file_details.py
@@ -5,7 +5,7 @@
class ReleaseFileDetailsDocsTest(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.login_as(user=self.user)
project = self.create_project(name="foo")
diff --git a/tests/apidocs/endpoints/releases/test_organization_release_files.py b/tests/apidocs/endpoints/releases/test_organization_release_files.py
index b6660ae02b7d6c..21d1e1a97f84a2 100644
--- a/tests/apidocs/endpoints/releases/test_organization_release_files.py
+++ b/tests/apidocs/endpoints/releases/test_organization_release_files.py
@@ -6,7 +6,7 @@
class ReleaseFilesListDocsTest(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
project = self.create_project(name="foo")
release = self.create_release(project=self.project, version="1")
file1 = self.create_file(
diff --git a/tests/apidocs/endpoints/releases/test_organization_releases.py b/tests/apidocs/endpoints/releases/test_organization_releases.py
index e722409ac7dc9b..72314af456767a 100644
--- a/tests/apidocs/endpoints/releases/test_organization_releases.py
+++ b/tests/apidocs/endpoints/releases/test_organization_releases.py
@@ -11,7 +11,7 @@
class OrganizationReleasesDocsTest(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
user = self.create_user(is_staff=False, is_superuser=False)
org = self.create_organization(owner=user, name="blah")
org2 = self.create_organization(owner=user, name="bloop")
diff --git a/tests/apidocs/endpoints/releases/test_organization_sessions.py b/tests/apidocs/endpoints/releases/test_organization_sessions.py
index f5b94e43a4322a..ac0151389573aa 100644
--- a/tests/apidocs/endpoints/releases/test_organization_sessions.py
+++ b/tests/apidocs/endpoints/releases/test_organization_sessions.py
@@ -9,7 +9,7 @@
class OrganizationSessionsDocsTest(APIDocsTestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.organization = self.create_organization(owner=self.user, name="foo")
diff --git a/tests/apidocs/endpoints/releases/test_project_release_commits.py b/tests/apidocs/endpoints/releases/test_project_release_commits.py
index e17e992261880e..5b516b259cc9e7 100644
--- a/tests/apidocs/endpoints/releases/test_project_release_commits.py
+++ b/tests/apidocs/endpoints/releases/test_project_release_commits.py
@@ -7,7 +7,7 @@
class ProjectReleaseCommitsListDocsTest(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
project = self.create_project(name="foo")
release = self.create_release(project=project, version="1")
release.add_project(project)
diff --git a/tests/apidocs/endpoints/releases/test_project_release_file_details.py b/tests/apidocs/endpoints/releases/test_project_release_file_details.py
index 57d46d2de1551d..67d6dc04bc38f9 100644
--- a/tests/apidocs/endpoints/releases/test_project_release_file_details.py
+++ b/tests/apidocs/endpoints/releases/test_project_release_file_details.py
@@ -5,7 +5,7 @@
class ProjectReleaseFileDetailsDocsTest(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.login_as(user=self.user)
project = self.create_project(name="foo")
release = self.create_release(project=project, version="1")
diff --git a/tests/apidocs/endpoints/releases/test_project_release_files.py b/tests/apidocs/endpoints/releases/test_project_release_files.py
index a26fae89a186c4..71f963ce515131 100644
--- a/tests/apidocs/endpoints/releases/test_project_release_files.py
+++ b/tests/apidocs/endpoints/releases/test_project_release_files.py
@@ -6,7 +6,7 @@
class ProjectReleaseFilesListDocsTest(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
project = self.create_project(name="foo")
file1 = self.create_file(
name="blah.js",
diff --git a/tests/apidocs/endpoints/scim/test_group_details.py b/tests/apidocs/endpoints/scim/test_group_details.py
index e8a0f10458c2ac..3c1be929688713 100644
--- a/tests/apidocs/endpoints/scim/test_group_details.py
+++ b/tests/apidocs/endpoints/scim/test_group_details.py
@@ -6,7 +6,7 @@
class SCIMTeamDetailsDocs(APIDocsTestCase, SCIMTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
member_user = self.create_user()
self.member = self.create_member(user=member_user, organization=self.organization)
diff --git a/tests/apidocs/endpoints/scim/test_group_index.py b/tests/apidocs/endpoints/scim/test_group_index.py
index e1393bf34f19e3..eb11b48097c13a 100644
--- a/tests/apidocs/endpoints/scim/test_group_index.py
+++ b/tests/apidocs/endpoints/scim/test_group_index.py
@@ -6,7 +6,7 @@
class SCIMTeamIndexDocs(APIDocsTestCase, SCIMTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.member = self.create_member(user=self.create_user(), organization=self.organization)
self.team = self.create_team(organization=self.organization, members=[self.user])
diff --git a/tests/apidocs/endpoints/scim/test_member_details.py b/tests/apidocs/endpoints/scim/test_member_details.py
index 9c6fb6af3e16bc..914dc4fcd12dfa 100644
--- a/tests/apidocs/endpoints/scim/test_member_details.py
+++ b/tests/apidocs/endpoints/scim/test_member_details.py
@@ -6,7 +6,7 @@
class SCIMMemberDetailsDocs(APIDocsTestCase, SCIMTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.member = self.create_member(user=self.create_user(), organization=self.organization)
diff --git a/tests/apidocs/endpoints/scim/test_member_index.py b/tests/apidocs/endpoints/scim/test_member_index.py
index b55042ebcbe016..34446b9c9c1bb1 100644
--- a/tests/apidocs/endpoints/scim/test_member_index.py
+++ b/tests/apidocs/endpoints/scim/test_member_index.py
@@ -6,7 +6,7 @@
class SCIMMemberIndexDocs(APIDocsTestCase, SCIMTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.member = self.create_member(user=self.create_user(), organization=self.organization)
diff --git a/tests/apidocs/endpoints/teams/test_by_slug.py b/tests/apidocs/endpoints/teams/test_by_slug.py
index 13dc3f61624f87..16fcc46405c0b3 100644
--- a/tests/apidocs/endpoints/teams/test_by_slug.py
+++ b/tests/apidocs/endpoints/teams/test_by_slug.py
@@ -5,7 +5,7 @@
class TeamsBySlugDocs(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
team = self.create_team(organization=self.organization)
self.url = reverse(
diff --git a/tests/apidocs/endpoints/teams/test_index.py b/tests/apidocs/endpoints/teams/test_index.py
index 4e63e49875bb93..f8408d0167c952 100644
--- a/tests/apidocs/endpoints/teams/test_index.py
+++ b/tests/apidocs/endpoints/teams/test_index.py
@@ -5,7 +5,7 @@
class TeamsIndexDocs(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.create_team(organization=self.organization)
self.url = reverse(
diff --git a/tests/apidocs/endpoints/teams/test_projects.py b/tests/apidocs/endpoints/teams/test_projects.py
index 4959d325891531..bdc127fa4f5d3c 100644
--- a/tests/apidocs/endpoints/teams/test_projects.py
+++ b/tests/apidocs/endpoints/teams/test_projects.py
@@ -5,7 +5,7 @@
class TeamsProjectsDocs(APIDocsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
team = self.create_team(organization=self.organization)
self.create_project(name="foo", organization=self.organization, teams=[team])
diff --git a/tests/conftest.py b/tests/conftest.py
index 8ea0795f1ff30b..466a0dd471453d 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -51,20 +51,20 @@ def _open_files() -> frozenset[str]:
@pytest.fixture(autouse=True)
-def unclosed_files():
+def unclosed_files() -> Generator[None]:
fds = _open_files()
yield
assert _open_files() == fds
@pytest.fixture(autouse=True)
-def unclosed_threads(request):
+def unclosed_threads(request: pytest.FixtureRequest) -> Generator[None]:
# TODO(DI-1067): strict mode
yield from thread_leaks.check_test(request, strict=False)
@pytest.fixture(autouse=True)
-def validate_silo_mode():
+def validate_silo_mode() -> Generator[None]:
# NOTE! Hybrid cloud uses many mechanisms to simulate multiple different configurations of the application
# during tests. It depends upon `override_settings` using the correct contextmanager behaviors and correct
# thread handling in acceptance tests. If you hit one of these, it's possible either that cleanup logic has
@@ -83,7 +83,7 @@ def validate_silo_mode():
@pytest.fixture(autouse=True)
-def setup_simulate_on_commit(request):
+def setup_simulate_on_commit(request: pytest.FixtureRequest) -> Generator[None]:
from sentry.testutils.hybrid_cloud import simulate_on_commit
with simulate_on_commit(request):
@@ -91,7 +91,7 @@ def setup_simulate_on_commit(request):
@pytest.fixture(autouse=True)
-def setup_enforce_monotonic_transactions(request):
+def setup_enforce_monotonic_transactions(request: pytest.FixtureRequest) -> Generator[None]:
from sentry.testutils.hybrid_cloud import enforce_no_cross_transaction_interactions
with enforce_no_cross_transaction_interactions():
@@ -99,7 +99,7 @@ def setup_enforce_monotonic_transactions(request):
@pytest.fixture(autouse=True)
-def audit_hybrid_cloud_writes_and_deletes(request):
+def audit_hybrid_cloud_writes_and_deletes(request: pytest.FixtureRequest) -> Generator[None]:
"""
Ensure that write operations on hybrid cloud foreign keys are recorded
alongside outboxes or use a context manager to indicate that the
@@ -137,13 +137,13 @@ def audit_hybrid_cloud_writes_and_deletes(request):
@pytest.fixture(autouse=True)
-def clear_caches():
+def clear_caches() -> Generator[None]:
yield
cache.clear()
@pytest.fixture(autouse=True)
-def check_leaked_responses_mocks():
+def check_leaked_responses_mocks() -> Generator[None]:
yield
leaked = responses.registered()
if leaked:
diff --git a/tests/flagpole/test_feature.py b/tests/flagpole/test_feature.py
index bf0466e242c19c..e82dc2b73daec5 100644
--- a/tests/flagpole/test_feature.py
+++ b/tests/flagpole/test_feature.py
@@ -15,7 +15,9 @@ class SimpleTestContextData:
class TestParseFeatureConfig:
- def get_is_true_context_builder(self, is_true_value: bool):
+ def get_is_true_context_builder(
+ self, is_true_value: bool
+ ) -> ContextBuilder[SimpleTestContextData]:
return ContextBuilder().add_context_transformer(lambda _data: dict(is_true=is_true_value))
def test_feature_with_empty_segments(self) -> None:
diff --git a/tests/integration/test_api.py b/tests/integration/test_api.py
index d27a2b1b8a7242..602e62d7ebaa96 100644
--- a/tests/integration/test_api.py
+++ b/tests/integration/test_api.py
@@ -13,7 +13,7 @@
# TODO: move these into the tests/sentry/auth directory and remove deprecated logic
class AuthenticationTest(AuthProviderTestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.organization = self.create_organization(name="foo")
self.user = self.create_user("foobar@example.com", is_superuser=False)
team = self.create_team(name="bar", organization=self.organization)
diff --git a/tests/sentry/api/bases/test_group.py b/tests/sentry/api/bases/test_group.py
index a6229d0147052d..018231db4824a2 100644
--- a/tests/sentry/api/bases/test_group.py
+++ b/tests/sentry/api/bases/test_group.py
@@ -3,9 +3,12 @@
from rest_framework.views import APIView
from sentry.api.bases.group import GroupAiEndpoint, GroupAiPermission
+from sentry.models.apitoken import ApiToken
+from sentry.models.group import Group
from sentry.testutils.cases import TestCase
from sentry.testutils.helpers.options import override_options
from sentry.testutils.requests import drf_request_from_request
+from sentry.users.models.user import User
class GroupAiPermissionTest(TestCase):
@@ -19,7 +22,14 @@ def setUp(self) -> None:
def _demo_mode_enabled(self) -> ContextManager[None]:
return override_options({"demo-mode.enabled": True, "demo-mode.users": [self.demo_user.id]})
- def has_object_perm(self, method, obj, auth=None, user=None, is_superuser=None):
+ def has_object_perm(
+ self,
+ method: str,
+ obj: Group,
+ auth: ApiToken | None = None,
+ user: User | None = None,
+ is_superuser: bool | None = None,
+ ) -> bool:
request = self.make_request(user=user, auth=auth, method=method, is_superuser=is_superuser)
drf_request = drf_request_from_request(request)
return self.permission.has_permission(
diff --git a/tests/sentry/api/bases/test_organization.py b/tests/sentry/api/bases/test_organization.py
index 08e4c3094dc532..4b59f5d8d6cbab 100644
--- a/tests/sentry/api/bases/test_organization.py
+++ b/tests/sentry/api/bases/test_organization.py
@@ -46,7 +46,7 @@
class MockSuperUser:
@property
- def is_active(self):
+ def is_active(self) -> bool:
return True
diff --git a/tests/sentry/api/bases/test_project.py b/tests/sentry/api/bases/test_project.py
index 97609b63c7d56d..582d62a7f3d596 100644
--- a/tests/sentry/api/bases/test_project.py
+++ b/tests/sentry/api/bases/test_project.py
@@ -1,9 +1,12 @@
from rest_framework.views import APIView
from sentry.api.bases.project import ProjectAndStaffPermission, ProjectPermission
+from sentry.models.apitoken import ApiToken
+from sentry.models.project import Project
from sentry.testutils.cases import TestCase
from sentry.testutils.helpers import with_feature
from sentry.testutils.requests import drf_request_from_request
+from sentry.users.models.user import User
from sentry.users.services.user.serial import serialize_rpc_user
@@ -12,7 +15,15 @@ def setUp(self) -> None:
super().setUp()
self.permission_cls = ProjectPermission
- def has_object_perm(self, method, obj, auth=None, user=None, is_superuser=None, is_staff=None):
+ def has_object_perm(
+ self,
+ method: str,
+ obj: Project,
+ auth: ApiToken | None = None,
+ user: User | None = None,
+ is_superuser: bool | None = None,
+ is_staff: bool | None = None,
+ ) -> bool:
perm = self.permission_cls()
request = self.make_request(
user=user, auth=auth, method=method, is_superuser=is_superuser, is_staff=is_staff
diff --git a/tests/sentry/api/bases/test_team.py b/tests/sentry/api/bases/test_team.py
index 92d0d69006fff5..822a8f5d0f6c3d 100644
--- a/tests/sentry/api/bases/test_team.py
+++ b/tests/sentry/api/bases/test_team.py
@@ -1,9 +1,12 @@
from rest_framework.views import APIView
from sentry.api.bases.team import TeamPermission
+from sentry.models.apitoken import ApiToken
+from sentry.models.team import Team
from sentry.testutils.cases import TestCase
from sentry.testutils.helpers import with_feature
from sentry.testutils.requests import drf_request_from_request
+from sentry.users.models.user import User
class TeamPermissionBase(TestCase):
@@ -12,7 +15,14 @@ def setUp(self) -> None:
self.team = self.create_team(organization=self.org)
super().setUp()
- def has_object_perm(self, method, obj, auth=None, user=None, is_superuser=None):
+ def has_object_perm(
+ self,
+ method: str,
+ obj: Team,
+ auth: ApiToken | None = None,
+ user: User | None = None,
+ is_superuser: bool | None = None,
+ ) -> bool:
perm = TeamPermission()
request = self.make_request(user=user, auth=auth, method=method)
if is_superuser:
diff --git a/tests/sentry/api/endpoints/conftest.py b/tests/sentry/api/endpoints/conftest.py
index 871b2433f9231e..cc23054200205c 100644
--- a/tests/sentry/api/endpoints/conftest.py
+++ b/tests/sentry/api/endpoints/conftest.py
@@ -1,32 +1,33 @@
from uuid import uuid4
import pytest
+from sentry_relay.auth import PublicKey, SecretKey
+
+from sentry.models.relay import Relay
@pytest.fixture
-def key_pair():
+def key_pair() -> tuple[SecretKey, PublicKey]:
from sentry_relay.auth import generate_key_pair
return generate_key_pair()
@pytest.fixture
-def public_key(key_pair):
+def public_key(key_pair: tuple[SecretKey, PublicKey]) -> PublicKey:
return key_pair[1]
@pytest.fixture
-def private_key(key_pair):
+def private_key(key_pair: tuple[SecretKey, PublicKey]) -> SecretKey:
return key_pair[0]
@pytest.fixture
-def relay_id():
+def relay_id() -> str:
return str(uuid4())
@pytest.fixture
-def relay(relay_id, public_key):
- from sentry.models.relay import Relay
-
+def relay(relay_id: str | int, public_key: PublicKey) -> Relay:
return Relay.objects.create(relay_id=relay_id, public_key=str(public_key), is_internal=True)
diff --git a/tests/sentry/api/endpoints/release_thresholds/test_release_threshold_status.py b/tests/sentry/api/endpoints/release_thresholds/test_release_threshold_status.py
index a6c92d57efc6bd..ff099425c58ab7 100644
--- a/tests/sentry/api/endpoints/release_thresholds/test_release_threshold_status.py
+++ b/tests/sentry/api/endpoints/release_thresholds/test_release_threshold_status.py
@@ -1,5 +1,5 @@
from datetime import UTC, datetime, timedelta
-from unittest.mock import patch
+from unittest.mock import MagicMock, patch
from sentry.models.deploy import Deploy
from sentry.models.environment import Environment
@@ -427,13 +427,13 @@ def test_get_success_project_slug_filter(self) -> None:
)
def test_fetches_relevant_stats(
self,
- mock_is_new_issue_count_healthy,
- mock_get_new_issue_counts,
- mock_is_error_count_healthy,
- mock_get_error_counts,
- mock_is_crash_free_rate_healthy,
- mock_fetch_sessions_data,
- ):
+ mock_is_new_issue_count_healthy: MagicMock,
+ mock_get_new_issue_counts: MagicMock,
+ mock_is_error_count_healthy: MagicMock,
+ mock_get_error_counts: MagicMock,
+ mock_is_crash_free_rate_healthy: MagicMock,
+ mock_fetch_sessions_data: MagicMock,
+ ) -> None:
self.project4 = self.create_project(name="baz", organization=self.organization)
self.release4 = Release.objects.create(version="v4", organization=self.organization)
self.release4.add_project(self.project4)
diff --git a/tests/sentry/api/endpoints/test_admin_project_configs.py b/tests/sentry/api/endpoints/test_admin_project_configs.py
index 34fd0a703f224c..4fbf3fa9797d5f 100644
--- a/tests/sentry/api/endpoints/test_admin_project_configs.py
+++ b/tests/sentry/api/endpoints/test_admin_project_configs.py
@@ -39,7 +39,7 @@ def setUp(self) -> None:
}
)
- def get_url(self, proj_id=None, key=None):
+ def get_url(self, proj_id: str | int | None = None, key: str | int | None = None) -> str:
query = {}
if proj_id is not None:
query["projectId"] = proj_id
diff --git a/tests/sentry/api/endpoints/test_api_tokens.py b/tests/sentry/api/endpoints/test_api_tokens.py
index 9413ad11b479e3..7d1103465a9856 100644
--- a/tests/sentry/api/endpoints/test_api_tokens.py
+++ b/tests/sentry/api/endpoints/test_api_tokens.py
@@ -1,3 +1,5 @@
+from collections.abc import Generator
+
from django.urls import reverse
from pytest import fixture
from rest_framework import status
@@ -229,7 +231,7 @@ class ApiTokensStaffTest(APITestCase):
url = reverse("sentry-api-0-api-tokens")
@fixture(autouse=True)
- def _set_staff_option(self):
+ def _set_staff_option(self) -> Generator[None]:
with override_options({"staff.ga-rollout": True}):
yield
diff --git a/tests/sentry/api/endpoints/test_event_attachment_details.py b/tests/sentry/api/endpoints/test_event_attachment_details.py
index e30a08bd73f130..e9a2276a540205 100644
--- a/tests/sentry/api/endpoints/test_event_attachment_details.py
+++ b/tests/sentry/api/endpoints/test_event_attachment_details.py
@@ -18,7 +18,9 @@
class CreateAttachmentMixin(TestCase):
- def create_attachment(self, content: bytes | None = None, group_id: int | None = None):
+ def create_attachment(
+ self, content: bytes | None = None, group_id: int | None = None
+ ) -> EventAttachment:
self.project = self.create_project()
self.release = self.create_release(self.project, self.user)
min_ago = before_now(minutes=1).isoformat()
diff --git a/tests/sentry/api/endpoints/test_organization_insights_tree.py b/tests/sentry/api/endpoints/test_organization_insights_tree.py
index 039b337a55c5ba..cdc38877dbb684 100644
--- a/tests/sentry/api/endpoints/test_organization_insights_tree.py
+++ b/tests/sentry/api/endpoints/test_organization_insights_tree.py
@@ -30,7 +30,7 @@ def setUp(self) -> None:
self._store_nextjs_function_spans()
self._store_unrelated_spans()
- def _store_nextjs_function_spans(self):
+ def _store_nextjs_function_spans(self) -> None:
descriptions = [
"Page Server Component (/app/dashboard/)",
"Loading Server Component (/app/dashboard/)",
@@ -66,7 +66,7 @@ def _store_nextjs_function_spans(self):
self.store_span(span, is_eap=True)
spans.append(span)
- def _store_unrelated_spans(self):
+ def _store_unrelated_spans(self) -> None:
descriptions = [
"INSERT value INTO table",
"SELECT * FROM table",
diff --git a/tests/sentry/api/endpoints/test_organization_invite_request_index.py b/tests/sentry/api/endpoints/test_organization_invite_request_index.py
index 3fc8fd5d3d4bde..517d818e4e8c6e 100644
--- a/tests/sentry/api/endpoints/test_organization_invite_request_index.py
+++ b/tests/sentry/api/endpoints/test_organization_invite_request_index.py
@@ -6,6 +6,7 @@
from django.urls import reverse
from sentry.models.options.organization_option import OrganizationOption
+from sentry.models.organization import Organization
from sentry.models.organizationmember import InviteStatus, OrganizationMember
from sentry.models.organizationmemberteam import OrganizationMemberTeam
from sentry.testutils.cases import APITestCase, SlackActivityNotificationTest
@@ -17,7 +18,7 @@ class OrganizationInviteRequestListTest(APITestCase):
endpoint = "sentry-api-0-organization-invite-request-index"
@cached_property
- def org(self):
+ def org(self) -> Organization:
return self.create_organization(owner=self.user)
def setUp(self) -> None:
diff --git a/tests/sentry/api/endpoints/test_organization_join_request.py b/tests/sentry/api/endpoints/test_organization_join_request.py
index c8f55809c437d0..ac4de124e66557 100644
--- a/tests/sentry/api/endpoints/test_organization_join_request.py
+++ b/tests/sentry/api/endpoints/test_organization_join_request.py
@@ -28,7 +28,7 @@ def setUp(self) -> None:
self.email = "test@example.com"
@cached_property
- def owner(self):
+ def owner(self) -> OrganizationMember:
return OrganizationMember.objects.get(user_id=self.user.id, organization=self.organization)
def test_invalid_org_slug(self) -> None:
diff --git a/tests/sentry/api/endpoints/test_organization_metrics.py b/tests/sentry/api/endpoints/test_organization_metrics.py
index f6548143f73b78..8ac47387a7f88b 100644
--- a/tests/sentry/api/endpoints/test_organization_metrics.py
+++ b/tests/sentry/api/endpoints/test_organization_metrics.py
@@ -1,9 +1,11 @@
import copy
import pytest
+from django.http import HttpResponse
from django.urls import reverse
from sentry.models.apitoken import ApiToken
+from sentry.models.organization import Organization
from sentry.silo.base import SiloMode
from sentry.snuba.metrics import (
DERIVED_METRICS,
@@ -43,7 +45,9 @@ class OrganizationMetricsPermissionTest(APITestCase):
def setUp(self) -> None:
self.create_project(name="Bar", slug="bar", teams=[self.team], fire_project_created=True)
- def send_request(self, organization, token, method, endpoint, *args):
+ def send_request(
+ self, organization: Organization, token: ApiToken, method: str, endpoint: str, *args: str
+ ) -> HttpResponse:
url = reverse(endpoint, args=(organization.slug,) + args)
return getattr(self.client, method)(
url, HTTP_AUTHORIZATION=f"Bearer {token.token}", format="json"
diff --git a/tests/sentry/api/endpoints/test_project_alert_rule_task_details.py b/tests/sentry/api/endpoints/test_project_alert_rule_task_details.py
index 271c1c000431fc..32cb2e7917844f 100644
--- a/tests/sentry/api/endpoints/test_project_alert_rule_task_details.py
+++ b/tests/sentry/api/endpoints/test_project_alert_rule_task_details.py
@@ -31,7 +31,7 @@ def setUp(self) -> None:
},
)
- def set_value(self, status, rule_id=None):
+ def set_value(self, status: str, rule_id: int | None = None) -> None:
client = RedisRuleStatus(self.uuid)
client.set_value(status, rule_id)
diff --git a/tests/sentry/api/endpoints/test_project_filters.py b/tests/sentry/api/endpoints/test_project_filters.py
index 212a8d5acc893f..45b377089c76fc 100644
--- a/tests/sentry/api/endpoints/test_project_filters.py
+++ b/tests/sentry/api/endpoints/test_project_filters.py
@@ -1,3 +1,6 @@
+from collections.abc import Iterable
+from typing import Any
+
from sentry.testutils.cases import APITestCase
@@ -8,7 +11,9 @@ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
- def get_filter_spec(self, response_data, spec_id):
+ def get_filter_spec(
+ self, response_data: Iterable[dict[str, Any]], spec_id: str
+ ) -> dict[str, Any]:
"""
looks in a successful response data for the specified spec_id and returns it (if found)
"""
diff --git a/tests/sentry/api/endpoints/test_project_overview.py b/tests/sentry/api/endpoints/test_project_overview.py
index f68630780edd50..97a5a03325f6fb 100644
--- a/tests/sentry/api/endpoints/test_project_overview.py
+++ b/tests/sentry/api/endpoints/test_project_overview.py
@@ -1,16 +1,9 @@
from __future__ import annotations
-import orjson
-
from sentry.models.projectredirect import ProjectRedirect
from sentry.testutils.cases import APITestCase
-def first_symbol_source_id(sources_json):
- sources = orjson.loads(sources_json)
- return sources[0]["id"]
-
-
class ProjectOverviewTest(APITestCase):
endpoint = "sentry-api-0-project-overview"
diff --git a/tests/sentry/api/endpoints/test_project_plugins.py b/tests/sentry/api/endpoints/test_project_plugins.py
index 7bb1aca8c95c6f..14807b7446670a 100644
--- a/tests/sentry/api/endpoints/test_project_plugins.py
+++ b/tests/sentry/api/endpoints/test_project_plugins.py
@@ -1,3 +1,4 @@
+from collections.abc import Container
from unittest.mock import patch
from django.urls import reverse
@@ -38,7 +39,7 @@ def test_get(self) -> None:
assert issues["isHidden"] is True
self.assert_plugin_shape(issues)
- def assert_plugin_shape(self, plugin):
+ def assert_plugin_shape(self, plugin: Container[str]) -> None:
assert "id" in plugin
assert "name" in plugin
assert "shortName" in plugin
diff --git a/tests/sentry/api/endpoints/test_project_stacktrace_coverage.py b/tests/sentry/api/endpoints/test_project_stacktrace_coverage.py
index 988aab3dea52d7..471cf395460f61 100644
--- a/tests/sentry/api/endpoints/test_project_stacktrace_coverage.py
+++ b/tests/sentry/api/endpoints/test_project_stacktrace_coverage.py
@@ -32,7 +32,7 @@ def setUp(self) -> None:
self.organization.save()
@pytest.fixture(autouse=True)
- def inject_fixtures(self, caplog):
+ def inject_fixtures(self, caplog: pytest.LogCaptureFixture) -> None:
self._caplog = caplog
@patch.object(
diff --git a/tests/sentry/api/endpoints/test_project_tagkey_details.py b/tests/sentry/api/endpoints/test_project_tagkey_details.py
index 1d2cdf7cdb0d3b..f687ef01a9ed73 100644
--- a/tests/sentry/api/endpoints/test_project_tagkey_details.py
+++ b/tests/sentry/api/endpoints/test_project_tagkey_details.py
@@ -15,7 +15,7 @@ class ProjectTagKeyDetailsTest(APITestCase, SnubaTestCase):
def test_simple(self) -> None:
project = self.create_project()
- def make_event(i):
+ def make_event(i: int) -> None:
self.store_event(
data={
"tags": {"foo": f"val{i}"},
diff --git a/tests/sentry/api/endpoints/test_project_transaction_names.py b/tests/sentry/api/endpoints/test_project_transaction_names.py
index 97958806995f0e..70e0db0e62bbb9 100644
--- a/tests/sentry/api/endpoints/test_project_transaction_names.py
+++ b/tests/sentry/api/endpoints/test_project_transaction_names.py
@@ -40,17 +40,18 @@ def setUp(self) -> None:
_get_redis_key(ClustererNamespace.TRANSACTIONS, self.project), transaction
)
- def _test_get(self, datasource):
+ def _test_get(self, datasource: str) -> None:
+ request_data: dict[str, str | int | bool | list[int]] = {
+ "datasource": datasource,
+ "project": [self.project.id],
+ "statsPeriod": "1h",
+ "limit": 5,
+ "threshold": 3,
+ "returnAllNames": True,
+ }
response = self.client.get(
self.url,
- data={
- "datasource": datasource,
- "project": [self.project.id],
- "statsPeriod": "1h",
- "limit": 5,
- "threshold": 3,
- "returnAllNames": True,
- },
+ data=request_data,
format="json",
)
diff --git a/tests/sentry/api/endpoints/test_relay_register.py b/tests/sentry/api/endpoints/test_relay_register.py
index 11f3c0bed59b7b..4cedbea086e365 100644
--- a/tests/sentry/api/endpoints/test_relay_register.py
+++ b/tests/sentry/api/endpoints/test_relay_register.py
@@ -4,7 +4,7 @@
from django.conf import settings
from django.urls import reverse
from django.utils import timezone
-from sentry_relay.auth import generate_key_pair
+from sentry_relay.auth import PublicKey, SecretKey, generate_key_pair
from sentry.models.relay import Relay, RelayUsage
from sentry.testutils.cases import APITestCase
@@ -25,7 +25,9 @@ def setUp(self) -> None:
self.path = reverse("sentry-api-0-relay-register-challenge")
- def register_relay(self, key_pair, version, relay_id):
+ def register_relay(
+ self, key_pair: tuple[SecretKey, PublicKey], version: str, relay_id: str | int
+ ) -> None:
private_key = key_pair[0]
public_key = key_pair[1]
diff --git a/tests/sentry/api/endpoints/test_user_subscriptions.py b/tests/sentry/api/endpoints/test_user_subscriptions.py
index b54ad8a6302767..0b5fda776fefdd 100644
--- a/tests/sentry/api/endpoints/test_user_subscriptions.py
+++ b/tests/sentry/api/endpoints/test_user_subscriptions.py
@@ -1,3 +1,5 @@
+from collections.abc import Generator
+
import pytest
from django.conf import settings
@@ -18,7 +20,7 @@ class UserSubscriptionsNewsletterTest(APITestCase):
method = "put"
@pytest.fixture(autouse=True)
- def enable_newsletter(self):
+ def enable_newsletter(self) -> Generator[None]:
with newsletter.backend.test_only__downcast_to(DummyNewsletter).enable():
yield
diff --git a/tests/sentry/api/helpers/test_group_index.py b/tests/sentry/api/helpers/test_group_index.py
index 03059e269892a4..5c69f6c43a9a91 100644
--- a/tests/sentry/api/helpers/test_group_index.py
+++ b/tests/sentry/api/helpers/test_group_index.py
@@ -1276,7 +1276,7 @@ def test_delete_groups_simple(self, send_robust: Mock) -> None:
@patch("sentry.signals.issue_deleted.send_robust")
def test_delete_groups_deletes_seer_records_by_hash(
self, send_robust: Mock, mock_delete_seer_grouping_records_by_hash: MagicMock
- ):
+ ) -> None:
self.project.update_option("sentry:similarity_backfill_completed", int(time()))
groups = [self.create_group(), self.create_group()]
diff --git a/tests/sentry/api/serializers/test_group_stream.py b/tests/sentry/api/serializers/test_group_stream.py
index 602812aa526cdd..4ba38d1a782ed8 100644
--- a/tests/sentry/api/serializers/test_group_stream.py
+++ b/tests/sentry/api/serializers/test_group_stream.py
@@ -34,7 +34,7 @@ def test_environment(self) -> None:
for args, kwargs in get_range.call_args_list:
assert kwargs["environment_ids"] == [environment.id]
- def get_invalid_environment():
+ def get_invalid_environment() -> None:
raise Environment.DoesNotExist()
with mock.patch(
diff --git a/tests/sentry/api/serializers/test_organization_member.py b/tests/sentry/api/serializers/test_organization_member.py
index 7f89f6b83cc2e1..db6bb7619f24b7 100644
--- a/tests/sentry/api/serializers/test_organization_member.py
+++ b/tests/sentry/api/serializers/test_organization_member.py
@@ -9,6 +9,7 @@
)
from sentry.models.organizationmember import InviteStatus
from sentry.testutils.cases import TestCase
+from sentry.users.models.user import User
class OrganizationMemberSerializerTest(TestCase):
@@ -23,7 +24,7 @@ def setUp(self) -> None:
self.project = self.create_project(teams=[self.team])
self.project_2 = self.create_project(teams=[self.team_2])
- def _get_org_members(self):
+ def _get_org_members(self) -> list[User]:
return list(
self.org.member_set.filter(user_id__in=[self.owner_user.id, self.user_2.id]).order_by(
"user_email"
diff --git a/tests/sentry/api/test_utils.py b/tests/sentry/api/test_utils.py
index 33b9fb92c7095a..a6ebbc5bf463f5 100644
--- a/tests/sentry/api/test_utils.py
+++ b/tests/sentry/api/test_utils.py
@@ -124,7 +124,7 @@ def test_logs_error_locally(self, mock_stderr_write: MagicMock) -> None:
def test_passes_along_exception(
self,
mock_capture_exception: MagicMock,
- ):
+ ) -> None:
print_and_capture_handler_exception(self.handler_error)
assert mock_capture_exception.call_args.args[0] == self.handler_error
@@ -133,7 +133,7 @@ def test_passes_along_exception(
def test_merges_handler_context_with_scope(
self,
mock_capture_exception: MagicMock,
- ):
+ ) -> None:
handler_context = {"api_request_URL": "http://dogs.are.great/"}
scope = Scope()
tags = {"maisey": "silly", "charlie": "goofy"}
diff --git a/tests/sentry/auth/test_staff.py b/tests/sentry/auth/test_staff.py
index 670e5d1ebf1ed7..5f6993d0d54557 100644
--- a/tests/sentry/auth/test_staff.py
+++ b/tests/sentry/auth/test_staff.py
@@ -25,6 +25,7 @@
from sentry.testutils.cases import TestCase
from sentry.testutils.helpers.datetime import freeze_time
from sentry.testutils.silo import control_silo_test
+from sentry.users.models.user import User
from sentry.utils.auth import mark_sso_complete
UNSET = object()
@@ -90,36 +91,41 @@ def test_ips(self) -> None:
request = self.make_request(user=self.staff_user)
request.META["REMOTE_ADDR"] = "10.0.0.1"
+ user = request.user
+ assert isinstance(user, User)
+
# no ips = any host
staff = Staff(request, allowed_ips=())
- staff.set_logged_in(request.user)
+ staff.set_logged_in(user)
assert staff.is_active is True
staff = Staff(request, allowed_ips=("127.0.0.1",))
- staff.set_logged_in(request.user)
+ staff.set_logged_in(user)
assert staff.is_active is False
staff = Staff(request, allowed_ips=("10.0.0.1",))
- staff.set_logged_in(request.user)
+ staff.set_logged_in(user)
assert staff.is_active is True
def test_sso(self) -> None:
request = self.make_request(user=self.staff_user)
+ user = request.user
+ assert isinstance(user, User)
# no ips = any host
staff = Staff(request)
- staff.set_logged_in(request.user)
+ staff.set_logged_in(user)
assert staff.is_active is True
# Set ORG_ID so we run the SSO check
with override_org_id(new_org_id=self.organization.id):
staff = Staff(request)
- staff.set_logged_in(request.user)
+ staff.set_logged_in(user)
assert staff.is_active is False
mark_sso_complete(request, self.organization.id)
staff = Staff(request)
- staff.set_logged_in(request.user)
+ staff.set_logged_in(user)
assert staff.is_active is True
def test_valid_data(self) -> None:
diff --git a/tests/sentry/auth/test_superuser.py b/tests/sentry/auth/test_superuser.py
index bd14046753d9f7..a5b43024834d00 100644
--- a/tests/sentry/auth/test_superuser.py
+++ b/tests/sentry/auth/test_superuser.py
@@ -35,6 +35,7 @@
from sentry.testutils.helpers.datetime import freeze_time
from sentry.testutils.helpers.options import override_options
from sentry.testutils.silo import control_silo_test
+from sentry.users.models.user import User
from sentry.utils import json
from sentry.utils.auth import mark_sso_complete
@@ -95,35 +96,39 @@ def build_request(
def test_ips(self) -> None:
request = self.make_request(user=self.superuser)
request.META["REMOTE_ADDR"] = "10.0.0.1"
+ user = request.user
+ assert isinstance(user, User)
# no ips = any host
superuser = Superuser(request, allowed_ips=())
- superuser.set_logged_in(request.user)
+ superuser.set_logged_in(user)
assert superuser.is_active is True
superuser = Superuser(request, allowed_ips=("127.0.0.1",))
- superuser.set_logged_in(request.user)
+ superuser.set_logged_in(user)
assert superuser.is_active is False
superuser = Superuser(request, allowed_ips=("10.0.0.1",))
- superuser.set_logged_in(request.user)
+ superuser.set_logged_in(user)
assert superuser.is_active is True
def test_sso(self) -> None:
request = self.make_request(user=self.superuser)
+ user = request.user
+ assert isinstance(user, User)
# no ips = any host
superuser = Superuser(request, org_id=None)
- superuser.set_logged_in(request.user)
+ superuser.set_logged_in(user)
assert superuser.is_active is True
superuser = Superuser(request, org_id=1)
- superuser.set_logged_in(request.user)
+ superuser.set_logged_in(user)
assert superuser.is_active is False
mark_sso_complete(request, 1)
superuser = Superuser(request, org_id=1)
- superuser.set_logged_in(request.user)
+ superuser.set_logged_in(user)
assert superuser.is_active is True
def test_valid_data(self) -> None:
@@ -185,7 +190,9 @@ def test_su_access_logs(self, logger: MagicMock) -> None:
).encode()
superuser = Superuser(request, org_id=None)
- superuser.set_logged_in(request.user)
+ user = request.user
+ assert isinstance(user, User)
+ superuser.set_logged_in(user)
assert superuser.is_active is True
assert logger.info.call_count == 3
logger.info.assert_any_call(
@@ -206,7 +213,9 @@ def test_su_access_no_request(self) -> None:
superuser = Superuser(request, org_id=None)
with pytest.raises(SuperuserAccessFormInvalidJson):
- superuser.set_logged_in(request.user)
+ user = request.user
+ assert isinstance(user, User)
+ superuser.set_logged_in(user)
assert superuser.is_active is False
@freeze_time(BASETIME + OUTSIDE_PRIVILEGE_ACCESS_EXPIRE_TIME)
@@ -257,7 +266,9 @@ def test_su_access_no_request_user_missing_info(self, logger: MagicMock) -> None
superuser = Superuser(request, org_id=None)
- superuser.set_logged_in(request.user)
+ user = request.user
+ assert isinstance(user, User)
+ superuser.set_logged_in(user)
logger.exception.assert_any_call("superuser.superuser_access.missing_user_info")
@override_settings(SENTRY_SELF_HOSTED=False, VALIDATE_SUPERUSER_ACCESS_CATEGORY_AND_REASON=True)
@@ -270,7 +281,9 @@ def test_su_access_invalid_request_body(
superuser = Superuser(request, org_id=None)
with pytest.raises(SuperuserAccessFormInvalidJson):
- superuser.set_logged_in(request.user)
+ user = request.user
+ assert isinstance(user, User)
+ superuser.set_logged_in(user)
assert superuser.is_active is False
def test_login_saves_session(self) -> None:
@@ -397,7 +410,9 @@ def test_superuser_session_doesnt_need_validation_superuser_prompts(
) -> None:
request = self.make_request(user=self.superuser, method="PUT")
superuser = Superuser(request, org_id=None)
- superuser.set_logged_in(request.user)
+ user = request.user
+ assert isinstance(user, User)
+ superuser.set_logged_in(user)
assert superuser.is_active is True
assert logger.info.call_count == 2
logger.info.assert_any_call(
diff --git a/tests/sentry/auth_v2/endpoints/test_auth_v2_permissions.py b/tests/sentry/auth_v2/endpoints/test_auth_v2_permissions.py
index 04495be62747d5..b554fc2f6b8568 100644
--- a/tests/sentry/auth_v2/endpoints/test_auth_v2_permissions.py
+++ b/tests/sentry/auth_v2/endpoints/test_auth_v2_permissions.py
@@ -1,4 +1,5 @@
from django.test import override_settings
+from rest_framework.request import Request
from rest_framework.views import APIView
from sentry.auth_v2.endpoints.base import AuthV2Permission
@@ -16,7 +17,7 @@ def setUp(self) -> None:
self.auth_v2_permission = AuthV2Permission()
self.user = self.create_user(is_superuser=False, is_staff=False)
- def _make_request(self):
+ def _make_request(self) -> Request:
request = self.make_request(user=self.user)
drf_request = drf_request_from_request(request)
return drf_request
diff --git a/tests/sentry/backup/test_invariants.py b/tests/sentry/backup/test_invariants.py
index 73e52e9f153207..d15f02228ae597 100644
--- a/tests/sentry/backup/test_invariants.py
+++ b/tests/sentry/backup/test_invariants.py
@@ -47,11 +47,11 @@ def test_all_many_to_many_fields_explicitly_set_through_attribute() -> None:
assert visited > 0
-def relocation_scopes_as_set(mr: ModelRelations):
+def relocation_scopes_as_set(mr: ModelRelations) -> set[RelocationScope]:
return mr.relocation_scope if isinstance(mr.relocation_scope, set) else {mr.relocation_scope}
-def validate_dependency_scopes(allowed: set[RelocationScope]):
+def validate_dependency_scopes(allowed: set[RelocationScope]) -> None:
deps = dependencies()
models_being_validated = [
mr.model
diff --git a/tests/sentry/backup/test_rpc.py b/tests/sentry/backup/test_rpc.py
index 4704a5f4302e89..b8bcf489bd3d03 100644
--- a/tests/sentry/backup/test_rpc.py
+++ b/tests/sentry/backup/test_rpc.py
@@ -57,7 +57,7 @@ def test_good_local_retry_idempotent(self) -> None:
option_count = Option.objects.count()
import_chunk_count = RegionImportChunk.objects.count()
- def verify_option_write():
+ def verify_option_write() -> RegionImportChunk:
result = import_export_service.import_by_model(
import_model_name="sentry.option",
scope=RpcImportScope.Global,
@@ -132,7 +132,7 @@ def test_good_remote_retry_idempotent(self) -> None:
control_option_count = ControlOption.objects.count()
import_chunk_count = ControlImportChunk.objects.count()
- def verify_control_option_write():
+ def verify_control_option_write() -> ControlImportChunk:
result = import_export_service.import_by_model(
import_model_name="sentry.controloption",
scope=RpcImportScope.Global,
diff --git a/tests/sentry/buffer/test_redis.py b/tests/sentry/buffer/test_redis.py
index b108f19cd2f5bb..abc4d8d75f4613 100644
--- a/tests/sentry/buffer/test_redis.py
+++ b/tests/sentry/buffer/test_redis.py
@@ -655,5 +655,5 @@ def test_incr_uses_signal_only(self, default_group, task_runner) -> None:
datetime.date.today(),
],
)
-def test_dump_value(value) -> None:
+def test_dump_value(value: datetime.datetime) -> None:
assert RedisBuffer._load_value(json.loads(json.dumps(RedisBuffer._dump_value(value)))) == value
diff --git a/tests/sentry/celery/test_app.py b/tests/sentry/celery/test_app.py
index f9f3890738bb2c..77c2d5064e0db7 100644
--- a/tests/sentry/celery/test_app.py
+++ b/tests/sentry/celery/test_app.py
@@ -35,7 +35,7 @@ def test_validate_scheduled_task_parameters(name: str, entry_data: dict[str, Any
if parameter.kind in (parameter.VAR_POSITIONAL, parameter.VAR_KEYWORD):
continue
# The dynamic sampling tasks splice in a TaskContext via a decorator :(
- if parameter.annotation == TaskContext:
+ if parameter.annotation == TaskContext.__name__:
continue
if parameter.default == parameter.empty:
raise AssertionError(
diff --git a/tests/sentry/core/endpoints/test_organization_environments.py b/tests/sentry/core/endpoints/test_organization_environments.py
index 0d04c397bf21c2..f411605c7c8fb1 100644
--- a/tests/sentry/core/endpoints/test_organization_environments.py
+++ b/tests/sentry/core/endpoints/test_organization_environments.py
@@ -2,6 +2,7 @@
from sentry.api.serializers import serialize
from sentry.models.environment import Environment
+from sentry.models.project import Project
from sentry.testutils.cases import APITestCase
@@ -12,7 +13,7 @@ def setUp(self) -> None:
self.login_as(user=self.user)
@cached_property
- def project(self):
+ def project(self) -> Project:
return self.create_project()
def test_simple(self) -> None:
diff --git a/tests/sentry/core/endpoints/test_project_index.py b/tests/sentry/core/endpoints/test_project_index.py
index 681172b27dcc33..dd98d3f0374bb1 100644
--- a/tests/sentry/core/endpoints/test_project_index.py
+++ b/tests/sentry/core/endpoints/test_project_index.py
@@ -226,7 +226,7 @@ def test_deleted_token_with_internal_integration(self) -> None:
status_code=status.HTTP_401_UNAUTHORIZED,
)
- def get_installed_unpublished_sentry_app_access_token(self):
+ def get_installed_unpublished_sentry_app_access_token(self) -> ApiToken:
self.project = self.create_project(organization=self.organization, teams=[self.team])
sentry_app = self.create_sentry_app(
scopes=("project:read",),
diff --git a/tests/sentry/data_export/test_models.py b/tests/sentry/data_export/test_models.py
index 49ab5af938c8b9..8f5c68904ed071 100644
--- a/tests/sentry/data_export/test_models.py
+++ b/tests/sentry/data_export/test_models.py
@@ -94,7 +94,9 @@ def test_finalize_upload(self) -> None:
tf.seek(0)
self.file1.putfile(tf)
self.data_export.finalize_upload(file=self.file1)
- assert self.data_export._get_file().getfile().read() == self.TEST_STRING
+ file = self.data_export._get_file()
+ assert isinstance(file, File)
+ assert file.getfile().read() == self.TEST_STRING
assert self.data_export.date_finished is not None
assert self.data_export.date_expired is not None
assert self.data_export.date_expired == self.data_export.date_finished + DEFAULT_EXPIRATION
@@ -104,7 +106,9 @@ def test_finalize_upload(self) -> None:
tf.seek(0)
self.file2.putfile(tf)
self.data_export.finalize_upload(file=self.file2, expiration=timedelta(weeks=2))
- assert self.data_export._get_file().getfile().read() == self.TEST_STRING + self.TEST_STRING
+ file = self.data_export._get_file()
+ assert isinstance(file, File)
+ assert file.getfile().read() == self.TEST_STRING + self.TEST_STRING
# Ensure the first file is deleted
assert not File.objects.filter(id=self.file1.id).exists()
assert self.data_export.date_expired == self.data_export.date_finished + timedelta(weeks=2)
diff --git a/tests/sentry/data_export/test_tasks.py b/tests/sentry/data_export/test_tasks.py
index 305912f8b506d3..32d5f8f25f7e26 100644
--- a/tests/sentry/data_export/test_tasks.py
+++ b/tests/sentry/data_export/test_tasks.py
@@ -79,8 +79,8 @@ def test_issue_by_tag_batched(self, emailer: MagicMock) -> None:
assert de.date_finished is not None
assert de.date_expired is not None
assert de.file_id is not None
- assert isinstance(de._get_file(), File)
file = de._get_file()
+ assert isinstance(file, File)
assert file.headers == {"Content-Type": "text/csv"}
assert file.size is not None
assert file.checksum is not None
@@ -112,8 +112,8 @@ def test_no_error_on_retry(self, emailer: MagicMock) -> None:
assert de.date_finished is not None
assert de.date_expired is not None
assert de.file_id is not None
- assert isinstance(de._get_file(), File)
file = de._get_file()
+ assert isinstance(file, File)
assert file.headers == {"Content-Type": "text/csv"}
assert file.size is not None
assert file.checksum is not None
@@ -191,8 +191,8 @@ def test_issue_by_tag_outside_retention(
assert de.date_finished is not None
assert de.date_expired is not None
assert de.file_id is not None
- assert isinstance(de._get_file(), File)
file = de._get_file()
+ assert isinstance(file, File)
assert file.headers == {"Content-Type": "text/csv"}
assert file.size is not None
assert file.checksum is not None
@@ -215,8 +215,8 @@ def test_discover_batched(self, emailer: MagicMock) -> None:
assert de.date_finished is not None
assert de.date_expired is not None
assert de.file_id is not None
- assert isinstance(de._get_file(), File)
file = de._get_file()
+ assert isinstance(file, File)
assert file.headers == {"Content-Type": "text/csv"}
assert file.size is not None
assert file.checksum is not None
@@ -250,8 +250,8 @@ def test_discover_respects_selected_environment(self, emailer: MagicMock) -> Non
assert de.date_finished is not None
assert de.date_expired is not None
assert de.file_id is not None
- assert isinstance(de._get_file(), File)
file = de._get_file()
+ assert isinstance(file, File)
assert file.headers == {"Content-Type": "text/csv"}
assert file.size is not None
assert file.checksum is not None
@@ -284,8 +284,8 @@ def test_discover_respects_selected_environment_multiple(self, emailer: MagicMoc
assert de.date_finished is not None
assert de.date_expired is not None
assert de.file_id is not None
- assert isinstance(de._get_file(), File)
file = de._get_file()
+ assert isinstance(file, File)
assert file.headers == {"Content-Type": "text/csv"}
assert file.size is not None
assert file.checksum is not None
@@ -347,8 +347,8 @@ def test_discover_export_file_too_large(self, emailer) -> None:
assert de.date_finished is not None
assert de.date_expired is not None
assert de.file_id is not None
- assert isinstance(de._get_file(), File)
file = de._get_file()
+ assert isinstance(file, File)
assert file.headers == {"Content-Type": "text/csv"}
assert file.size is not None
assert file.checksum is not None
@@ -377,8 +377,8 @@ def test_discover_export_too_many_rows(self, emailer: MagicMock) -> None:
assert de.date_finished is not None
assert de.date_expired is not None
assert de.file_id is not None
- assert isinstance(de._get_file(), File)
file = de._get_file()
+ assert isinstance(file, File)
assert file.headers == {"Content-Type": "text/csv"}
assert file.size is not None
assert file.checksum is not None
@@ -464,8 +464,8 @@ def test_retries_on_recoverable_snuba_errors(self, mock_query: MagicMock) -> Non
assert de.date_finished is not None
assert de.date_expired is not None
assert de.file_id is not None
- assert isinstance(de._get_file(), File)
file = de._get_file()
+ assert isinstance(file, File)
assert file.headers == {"Content-Type": "text/csv"}
assert file.size is not None
assert file.checksum is not None
@@ -599,8 +599,10 @@ def test_discover_sort(self, emailer: MagicMock) -> None:
with self.tasks():
assemble_download(de.id, batch_size=1)
de = ExportedData.objects.get(id=de.id)
+ file = de._get_file()
+ assert isinstance(file, File)
# Convert raw csv to list of line-strings
- with de._get_file().getfile() as f:
+ with file.getfile() as f:
header, raw1, raw2, raw3 = f.read().strip().split(b"\r\n")
assert header == b"environment"
@@ -651,7 +653,8 @@ def test_discover_large_batch(self, emailer) -> None:
assert de.date_finished is not None
assert de.date_expired is not None
assert de.file_id is not None
- assert isinstance(de._get_file(), File)
+ file = de._get_file()
+ assert isinstance(file, File)
assert emailer.called
@@ -682,7 +685,8 @@ def test_character_escape(self, emailer: MagicMock) -> None:
assert de.date_finished is not None
assert de.date_expired is not None
assert de.file_id is not None
- assert isinstance(de._get_file(), File)
+ file = de._get_file()
+ assert isinstance(file, File)
assert emailer.called
diff --git a/tests/sentry/db/models/fields/test_jsonfield.py b/tests/sentry/db/models/fields/test_jsonfield.py
index 984186a10700d5..664766f8d92afe 100644
--- a/tests/sentry/db/models/fields/test_jsonfield.py
+++ b/tests/sentry/db/models/fields/test_jsonfield.py
@@ -30,7 +30,7 @@ class Meta:
app_label = "fixtures"
-def default():
+def default() -> dict[str, int]:
return {"x": 2}
diff --git a/tests/sentry/demo_mode/test_tasks.py b/tests/sentry/demo_mode/test_tasks.py
index 2b94038da7ab62..1d0f3ea5ae2af1 100644
--- a/tests/sentry/demo_mode/test_tasks.py
+++ b/tests/sentry/demo_mode/test_tasks.py
@@ -41,7 +41,7 @@ def set_up_artifact_bundle(
organization: Organization,
project: Project,
date_uploaded: datetime | None = None,
- ):
+ ) -> tuple[ArtifactBundle, ProjectArtifactBundle, ReleaseArtifactBundle]:
date_uploaded = date_uploaded or timezone.now()
artifact_bundle = self.create_artifact_bundle(org=organization, date_uploaded=date_uploaded)
project_artifact_bundle = ProjectArtifactBundle.objects.create(
@@ -64,7 +64,7 @@ def set_up_proguard_artifact_release(
organization: Organization,
project: Project,
date_added: datetime | None = None,
- ):
+ ) -> ProguardArtifactRelease:
date_added = date_added or timezone.now()
proguard_artifact_release = ProguardArtifactRelease.objects.create(
organization_id=organization.id,
@@ -76,7 +76,7 @@ def set_up_proguard_artifact_release(
)
return proguard_artifact_release
- def last_three_days(self):
+ def last_three_days(self) -> datetime:
return timezone.now() - timedelta(days=3)
def test_sync_artifact_bundles_no_bundles(self) -> None:
diff --git a/tests/sentry/digests/test_notifications.py b/tests/sentry/digests/test_notifications.py
index f12d20401e65a8..ba5a3995cfd403 100644
--- a/tests/sentry/digests/test_notifications.py
+++ b/tests/sentry/digests/test_notifications.py
@@ -59,7 +59,7 @@ class GroupRecordsTestCase(TestCase):
notification_uuid = str(uuid.uuid4())
@cached_property
- def rule(self):
+ def rule(self) -> Rule:
return self.project.rule_set.all()[0]
def test_success(self) -> None:
@@ -134,20 +134,20 @@ def test_new_style_key_no_identifier(self) -> None:
)
def test_new_style_key_identifier(self) -> None:
- identifier = "123"
+ identifier = 123
assert split_key(
f"mail:p:{self.project.id}:{ActionTargetType.ISSUE_OWNERS.value}:{identifier}"
) == (self.project, ActionTargetType.ISSUE_OWNERS, identifier, None)
def test_fallthrough_choice(self) -> None:
- identifier = "123"
+ identifier = 123
fallthrough_choice = FallthroughChoiceType.ALL_MEMBERS
assert split_key(
f"mail:p:{self.project.id}:{ActionTargetType.ISSUE_OWNERS.value}:{identifier}:{fallthrough_choice.value}"
) == (self.project, ActionTargetType.ISSUE_OWNERS, identifier, fallthrough_choice)
def test_no_fallthrough_choice(self) -> None:
- identifier = "123"
+ identifier = 123
assert split_key(
f"mail:p:{self.project.id}:{ActionTargetType.ISSUE_OWNERS.value}:{identifier}:"
) == (self.project, ActionTargetType.ISSUE_OWNERS, identifier, None)
@@ -161,14 +161,14 @@ def test_no_identifier(self) -> None:
)
def test_no_fallthrough(self) -> None:
- identifier = "123"
+ identifier = 123
assert (
unsplit_key(self.project, ActionTargetType.ISSUE_OWNERS, identifier, None)
== f"mail:p:{self.project.id}:{ActionTargetType.ISSUE_OWNERS.value}:{identifier}:"
)
def test_identifier(self) -> None:
- identifier = "123"
+ identifier = 123
fallthrough_choice = FallthroughChoiceType.ALL_MEMBERS
assert (
unsplit_key(self.project, ActionTargetType.ISSUE_OWNERS, identifier, fallthrough_choice)
diff --git a/tests/sentry/digests/test_utilities.py b/tests/sentry/digests/test_utilities.py
index f2d7bd0a75cccd..59856825b7ca40 100644
--- a/tests/sentry/digests/test_utilities.py
+++ b/tests/sentry/digests/test_utilities.py
@@ -96,7 +96,7 @@ def test_event_to_record_with_legacy_rule_id(self) -> None:
assert record.value.rules == [shadow_rule.id]
-def assert_rule_ids(digest: Digest, expected_rule_ids: list[int]):
+def assert_rule_ids(digest: Digest, expected_rule_ids: list[int]) -> None:
for rule, groups in digest.items():
assert rule.id in expected_rule_ids
@@ -107,7 +107,7 @@ def assert_get_personalized_digests(
expected_result: Mapping[int, Iterable[Event]],
target_type: ActionTargetType = ActionTargetType.ISSUE_OWNERS,
target_identifier: int | None = None,
-):
+) -> None:
result_user_ids = []
participants_by_provider_by_event = get_participants_by_event(
digest,
diff --git a/tests/sentry/discover/test_dashboard_widget_split.py b/tests/sentry/discover/test_dashboard_widget_split.py
index ff1e5e4d64dd91..cd5ca2a2335e6d 100644
--- a/tests/sentry/discover/test_dashboard_widget_split.py
+++ b/tests/sentry/discover/test_dashboard_widget_split.py
@@ -26,7 +26,7 @@
class DashboardWidgetDatasetSplitTestCase(BaseMetricsLayerTestCase, TestCase, SnubaTestCase):
@property
- def now(self):
+ def now(self) -> datetime:
return before_now(minutes=10)
def setUp(self) -> None:
diff --git a/tests/sentry/discover/test_dataset_split.py b/tests/sentry/discover/test_dataset_split.py
index 85d60384228afe..95a07c9ae48391 100644
--- a/tests/sentry/discover/test_dataset_split.py
+++ b/tests/sentry/discover/test_dataset_split.py
@@ -734,7 +734,7 @@ def project(organization: Organization) -> Project:
@django_db_all
def test_dataset_split_decision_inferred_from_query(
query: str, selected_columns: list[str], expected_dataset: int | None, project: Project
-):
+) -> None:
snuba_dataclass = SnubaParams(
start=datetime.now() - timedelta(days=1),
end=datetime.now(),
diff --git a/tests/sentry/dynamic_sampling/tasks/helpers/test_recalibrate_orgs.py b/tests/sentry/dynamic_sampling/tasks/helpers/test_recalibrate_orgs.py
index a485fab8d0b95c..abb54ade14c24b 100644
--- a/tests/sentry/dynamic_sampling/tasks/helpers/test_recalibrate_orgs.py
+++ b/tests/sentry/dynamic_sampling/tasks/helpers/test_recalibrate_orgs.py
@@ -15,7 +15,7 @@
],
)
def test_adjusted_factor(
- prev_factor, actual_rate, desired_sample_rate, expected_adj_factor
+ prev_factor: float, actual_rate: float, desired_sample_rate: float, expected_adj_factor: float
) -> None:
assert (
compute_adjusted_factor(prev_factor, actual_rate, desired_sample_rate)
diff --git a/tests/sentry/dynamic_sampling/tasks/test_boost_low_volume_projects.py b/tests/sentry/dynamic_sampling/tasks/test_boost_low_volume_projects.py
index 0c18868927a270..12d8db063477da 100644
--- a/tests/sentry/dynamic_sampling/tasks/test_boost_low_volume_projects.py
+++ b/tests/sentry/dynamic_sampling/tasks/test_boost_low_volume_projects.py
@@ -1,4 +1,4 @@
-from datetime import timedelta
+from datetime import datetime, timedelta
from typing import cast
from unittest.mock import patch
@@ -35,7 +35,7 @@
@freeze_time(MOCK_DATETIME)
class PrioritiseProjectsSnubaQueryTest(BaseMetricsLayerTestCase, TestCase, SnubaTestCase):
@property
- def now(self):
+ def now(self) -> datetime:
return MOCK_DATETIME
def test_simple_one_org_one_project(self) -> None:
diff --git a/tests/sentry/dynamic_sampling/tasks/test_custom_rule_notifications.py b/tests/sentry/dynamic_sampling/tasks/test_custom_rule_notifications.py
index 5f76a87f3705c3..da10378b86d7da 100644
--- a/tests/sentry/dynamic_sampling/tasks/test_custom_rule_notifications.py
+++ b/tests/sentry/dynamic_sampling/tasks/test_custom_rule_notifications.py
@@ -9,12 +9,13 @@
get_num_samples,
)
from sentry.models.dynamicsampling import CustomDynamicSamplingRule
+from sentry.services.eventstore.models import Event
from sentry.testutils.cases import SnubaTestCase, TestCase
from sentry.utils.samples import load_data
class CustomRuleNotificationsTest(TestCase, SnubaTestCase):
- def create_transaction(self):
+ def create_transaction(self) -> Event:
data = load_data("transaction")
return self.store_event(data, project_id=self.project.id)
diff --git a/tests/sentry/dynamic_sampling/tasks/test_sliding_window.py b/tests/sentry/dynamic_sampling/tasks/test_sliding_window.py
index 96439dbd150581..c32863506854d3 100644
--- a/tests/sentry/dynamic_sampling/tasks/test_sliding_window.py
+++ b/tests/sentry/dynamic_sampling/tasks/test_sliding_window.py
@@ -1,4 +1,4 @@
-from datetime import timedelta
+from datetime import datetime, timedelta
from django.utils import timezone
@@ -15,7 +15,7 @@
@freeze_time(MOCK_DATETIME)
class SlidingWindowOrgSnubaQueryTest(BaseMetricsLayerTestCase, TestCase, SnubaTestCase):
@property
- def now(self):
+ def now(self) -> datetime:
return MOCK_DATETIME
def test_query_with_one_org_and_multiple_projects(self) -> None:
diff --git a/tests/sentry/event_manager/test_priority.py b/tests/sentry/event_manager/test_priority.py
index a957686591268d..0467b249904267 100644
--- a/tests/sentry/event_manager/test_priority.py
+++ b/tests/sentry/event_manager/test_priority.py
@@ -34,7 +34,7 @@ def test_flag_on(self, mock_get_severity_score: MagicMock) -> None:
@patch("sentry.event_manager._get_priority_for_group", return_value=PriorityLevel.HIGH)
def test_get_priority_for_group_not_called_on_second_event(
self, mock_get_priority_for_group: MagicMock, mock_get_severity_score: MagicMock
- ):
+ ) -> None:
event = EventManager(make_event(level=logging.FATAL, platform="python")).save(
self.project.id
)
diff --git a/tests/sentry/event_manager/test_severity.py b/tests/sentry/event_manager/test_severity.py
index 61ff50fc666da8..875a96cd355fb3 100644
--- a/tests/sentry/event_manager/test_severity.py
+++ b/tests/sentry/event_manager/test_severity.py
@@ -27,7 +27,7 @@
pytestmark = [requires_snuba]
-def make_event(**kwargs) -> dict[str, Any]:
+def make_event(**kwargs: Any) -> dict[str, Any]:
result: dict[str, Any] = {
"event_id": uuid.uuid1().hex,
}
@@ -362,7 +362,7 @@ def test_flag_off(self, mock_get_severity_score: MagicMock) -> None:
@patch("sentry.event_manager._get_severity_score", return_value=(0.1121, "ml"))
def test_get_severity_score_not_called_on_second_event(
self, mock_get_severity_score: MagicMock
- ):
+ ) -> None:
nope_event = EventManager(
make_event(
exception={"values": [{"type": "NopeError", "value": "Nopey McNopeface"}]},
diff --git a/tests/sentry/feedback/__init__.py b/tests/sentry/feedback/__init__.py
index 66b02ab024253b..f32c198fcd32a2 100644
--- a/tests/sentry/feedback/__init__.py
+++ b/tests/sentry/feedback/__init__.py
@@ -1,11 +1,12 @@
import time
from datetime import UTC, datetime
+from typing import Any
from openai.types.chat.chat_completion import ChatCompletion, Choice
from openai.types.chat.chat_completion_message import ChatCompletionMessage
-def create_dummy_openai_response(*args, **kwargs):
+def create_dummy_openai_response(*args: object, **kwargs: Any) -> ChatCompletion:
return ChatCompletion(
id="test",
choices=[
@@ -31,7 +32,7 @@ def create_dummy_openai_response(*args, **kwargs):
)
-def mock_feedback_event(project_id: int, dt: datetime | None = None):
+def mock_feedback_event(project_id: int, dt: datetime | None = None) -> dict[str, Any]:
if dt is None:
dt = datetime.now(UTC)
diff --git a/tests/sentry/feedback/endpoints/test_project_user_reports.py b/tests/sentry/feedback/endpoints/test_project_user_reports.py
index 08f9602a791f87..5f6cd9810d5679 100644
--- a/tests/sentry/feedback/endpoints/test_project_user_reports.py
+++ b/tests/sentry/feedback/endpoints/test_project_user_reports.py
@@ -11,7 +11,7 @@
from sentry.testutils.helpers.datetime import before_now
-def _make_url(project: Project):
+def _make_url(project: Project) -> str:
return f"/api/0/projects/{project.organization.slug}/{project.slug}/user-feedback/"
diff --git a/tests/sentry/feedback/lib/test_feedback_query.py b/tests/sentry/feedback/lib/test_feedback_query.py
index 236aa8481ad2dd..c6fcb25514763e 100644
--- a/tests/sentry/feedback/lib/test_feedback_query.py
+++ b/tests/sentry/feedback/lib/test_feedback_query.py
@@ -27,13 +27,13 @@ class FeedbackData(TypedDict):
@django_db_all
class TestFeedbackQuery(APITestCase, SnubaTestCase, SearchIssueTestMixin):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.project = self.create_project()
self.organization = self.project.organization
self._create_standard_feedbacks()
- def _create_standard_feedbacks(self):
+ def _create_standard_feedbacks(self) -> None:
"""Create a standard set of feedbacks for all tests to use."""
feedback_data: list[FeedbackData] = [
{
@@ -100,7 +100,7 @@ def _create_standard_feedbacks(self):
override_occurrence_data={"type": FeedbackGroup.type_id},
)
- def test_get_ai_labels_from_tags_retrieves_labels_correctly(self):
+ def test_get_ai_labels_from_tags_retrieves_labels_correctly(self) -> None:
# Create a query using the function to retrieve AI labels
query = Query(
match=Entity(Dataset.IssuePlatform.value),
@@ -138,7 +138,7 @@ def test_get_ai_labels_from_tags_retrieves_labels_correctly(self):
}
assert all_labels == expected_labels
- def test_query_top_ai_labels_by_feedback_count(self):
+ def test_query_top_ai_labels_by_feedback_count(self) -> None:
result = query_top_ai_labels_by_feedback_count(
organization_id=self.organization.id,
project_ids=[self.project.id],
@@ -196,7 +196,7 @@ def test_query_top_ai_labels_by_feedback_count(self):
assert len(result_no_project) == 0
- def test_query_recent_feedbacks_with_ai_labels(self):
+ def test_query_recent_feedbacks_with_ai_labels(self) -> None:
result = query_recent_feedbacks_with_ai_labels(
organization_id=self.organization.id,
project_ids=[self.project.id],
@@ -277,7 +277,7 @@ def test_query_recent_feedbacks_with_ai_labels(self):
assert len(result_no_project) == 0
- def test_query_label_group_counts(self):
+ def test_query_label_group_counts(self) -> None:
label_groups = [
["User Interface", "Performance"],
["Authentication", "Security"],
diff --git a/tests/sentry/feedback/usecases/conftest.py b/tests/sentry/feedback/usecases/conftest.py
index df315ade3df0cd..ba92dbbdbe2a11 100644
--- a/tests/sentry/feedback/usecases/conftest.py
+++ b/tests/sentry/feedback/usecases/conftest.py
@@ -1,10 +1,12 @@
+from collections.abc import Callable, Generator
+from typing import Any
from unittest import mock
import pytest
@pytest.fixture
-def mock_produce_occurrence_to_kafka():
+def mock_produce_occurrence_to_kafka() -> Generator[mock.MagicMock]:
with mock.patch(
"sentry.feedback.usecases.ingest.create_feedback.produce_occurrence_to_kafka"
) as mck:
@@ -12,7 +14,9 @@ def mock_produce_occurrence_to_kafka():
@pytest.fixture(autouse=True)
-def llm_settings(set_sentry_option):
+def llm_settings(
+ set_sentry_option: Callable[[str, dict[str, dict[str, Any]]], Any],
+) -> Generator[None]:
with (
set_sentry_option(
"llm.provider.options",
diff --git a/tests/sentry/feedback/usecases/ingest/test_save_event_feedback.py b/tests/sentry/feedback/usecases/ingest/test_save_event_feedback.py
index 348f6895000d2c..c6e6fe3d78d501 100644
--- a/tests/sentry/feedback/usecases/ingest/test_save_event_feedback.py
+++ b/tests/sentry/feedback/usecases/ingest/test_save_event_feedback.py
@@ -40,7 +40,7 @@ def test_save_event_feedback_no_associated_event(
["number", "iso"],
)
def test_save_event_feedback_with_associated_event(
- default_project, mock_create_feedback_issue, timestamp_format
+ default_project, mock_create_feedback_issue, timestamp_format: str
):
environment = Factories.create_environment(default_project, name="production")
assoc_event = Factories.store_event(
diff --git a/tests/sentry/flags/endpoints/test_hooks.py b/tests/sentry/flags/endpoints/test_hooks.py
index d1bea10b555deb..7e0c88acd2c5cb 100644
--- a/tests/sentry/flags/endpoints/test_hooks.py
+++ b/tests/sentry/flags/endpoints/test_hooks.py
@@ -23,7 +23,7 @@ def setUp(self) -> None:
self.url = reverse(self.endpoint, args=(self.organization.slug, "launchdarkly"))
@property
- def features(self):
+ def features(self) -> dict[str, bool]:
return {}
def test_generic_post_create(self, mock_incr: MagicMock) -> None:
diff --git a/tests/sentry/flags/endpoints/test_logs.py b/tests/sentry/flags/endpoints/test_logs.py
index 8b25a190d76184..7b156d1c8ba6a9 100644
--- a/tests/sentry/flags/endpoints/test_logs.py
+++ b/tests/sentry/flags/endpoints/test_logs.py
@@ -15,7 +15,7 @@ def setUp(self) -> None:
self.url = reverse(self.endpoint, args=(self.organization.id,))
@property
- def features(self):
+ def features(self) -> dict[str, bool]:
return {}
def test_get(self) -> None:
@@ -378,7 +378,7 @@ def setUp(self) -> None:
self.url = reverse(self.endpoint, args=(self.organization.id, self.flag.id))
@property
- def features(self):
+ def features(self) -> dict[str, bool]:
return {}
def test_get(self) -> None:
diff --git a/tests/sentry/flags/endpoints/test_secrets.py b/tests/sentry/flags/endpoints/test_secrets.py
index b734769fe6360d..a117f4d6e4c44f 100644
--- a/tests/sentry/flags/endpoints/test_secrets.py
+++ b/tests/sentry/flags/endpoints/test_secrets.py
@@ -19,7 +19,7 @@ def setUp(self) -> None:
self.url = reverse(self.endpoint, args=(self.organization.id,))
@property
- def features(self):
+ def features(self) -> dict[str, bool]:
return {}
def test_browse(self) -> None:
@@ -286,7 +286,7 @@ def setUp(self) -> None:
self.url = reverse(self.endpoint, args=(self.organization.id, self.obj.id))
@property
- def features(self):
+ def features(self) -> dict[str, bool]:
return {}
def test_delete(self) -> None:
diff --git a/tests/sentry/identity/test_oauth2.py b/tests/sentry/identity/test_oauth2.py
index 3bc995013bea91..3df0c8881e6d90 100644
--- a/tests/sentry/identity/test_oauth2.py
+++ b/tests/sentry/identity/test_oauth2.py
@@ -29,7 +29,7 @@ def setUp(self) -> None:
self.request = RequestFactory().get("/")
self.request.subdomain = None
- def tearDown(self):
+ def tearDown(self) -> None:
super().tearDown()
sentry.identity.unregister(DummyProvider)
@@ -167,7 +167,7 @@ def setUp(self) -> None:
self.request.session = Client().session
self.request.subdomain = None
- def tearDown(self):
+ def tearDown(self) -> None:
super().tearDown()
sentry.identity.unregister(DummyProvider)
diff --git a/tests/sentry/incidents/endpoints/serializers/test_alert_rule.py b/tests/sentry/incidents/endpoints/serializers/test_alert_rule.py
index 0129264ab1fddc..f1c54d11096d78 100644
--- a/tests/sentry/incidents/endpoints/serializers/test_alert_rule.py
+++ b/tests/sentry/incidents/endpoints/serializers/test_alert_rule.py
@@ -1,3 +1,4 @@
+from typing import Any
from unittest.mock import MagicMock, patch
from sentry.api.serializers import serialize
@@ -82,7 +83,7 @@ def assert_alert_rule_serialized(
else:
assert result["comparisonDelta"] is None
- def create_issue_alert_rule(self, data):
+ def create_issue_alert_rule(self, data: dict[str, Any]) -> Rule:
"""data format
{
"project": project
diff --git a/tests/sentry/incidents/endpoints/test_organization_combined_rule_index_endpoint.py b/tests/sentry/incidents/endpoints/test_organization_combined_rule_index_endpoint.py
index cb5553ec3510fb..ca43a878bacd7c 100644
--- a/tests/sentry/incidents/endpoints/test_organization_combined_rule_index_endpoint.py
+++ b/tests/sentry/incidents/endpoints/test_organization_combined_rule_index_endpoint.py
@@ -50,7 +50,7 @@ def setUp(self) -> None:
self.login_as(self.user)
self.combined_rules_url = f"/api/0/organizations/{self.organization.slug}/combined-rules/"
- def setup_rules(self):
+ def setup_rules(self) -> None:
self.alert_rule = self.create_alert_rule(
name="alert rule",
organization=self.organization,
diff --git a/tests/sentry/incidents/endpoints/test_organization_ondemand_rule_stats_endpoint.py b/tests/sentry/incidents/endpoints/test_organization_ondemand_rule_stats_endpoint.py
index dc9ba0921fa2e1..a69e6dd0f68383 100644
--- a/tests/sentry/incidents/endpoints/test_organization_ondemand_rule_stats_endpoint.py
+++ b/tests/sentry/incidents/endpoints/test_organization_ondemand_rule_stats_endpoint.py
@@ -37,7 +37,7 @@ def setUp(self) -> None:
self.login_as(user=self.user)
- def do_success_request(self, extra_features: dict[str, bool] | None = None):
+ def do_success_request(self, extra_features: dict[str, bool] | None = None) -> dict[str, int]:
_features = {**self.features, **(extra_features or {})}
with self.feature(_features):
response = self.get_success_response(self.organization.slug, project_id=self.project.id)
diff --git a/tests/sentry/incidents/endpoints/test_project_alert_rule_details.py b/tests/sentry/incidents/endpoints/test_project_alert_rule_details.py
index 4fc68b2e78f0e4..62c9edb19577ce 100644
--- a/tests/sentry/incidents/endpoints/test_project_alert_rule_details.py
+++ b/tests/sentry/incidents/endpoints/test_project_alert_rule_details.py
@@ -1,5 +1,7 @@
from __future__ import annotations
+from typing import Any
+
from sentry import audit_log
from sentry.api.serializers import serialize
from sentry.deletions.tasks.scheduled import run_scheduled_deletions
@@ -43,7 +45,7 @@ def test_simple(self) -> None:
class AlertRuleDetailsPutEndpointTest(AlertRuleDetailsBase):
method = "put"
- def get_serialized_alert_rule(self):
+ def get_serialized_alert_rule(self) -> dict[str, Any]:
# Only call after calling self.alert_rule to create it.
original_endpoint = self.endpoint
original_method = self.method
diff --git a/tests/sentry/incidents/models/test_alert_rule.py b/tests/sentry/incidents/models/test_alert_rule.py
index c8e63f77306d59..5d23d6edec05e0 100644
--- a/tests/sentry/incidents/models/test_alert_rule.py
+++ b/tests/sentry/incidents/models/test_alert_rule.py
@@ -1,4 +1,5 @@
import unittest
+from collections.abc import Generator
from unittest import mock
from unittest.mock import Mock
@@ -242,7 +243,7 @@ class AlertRuleTriggerActionActivateBaseTest:
def setUp(self) -> None:
self.suspended_registry = TemporaryAlertRuleTriggerActionRegistry.suspend()
- def tearDown(self):
+ def tearDown(self) -> None:
self.suspended_registry.restore()
def test_no_handler(self) -> None:
@@ -282,14 +283,14 @@ class AlertRuleTriggerActionResolveTest(AlertRuleTriggerActionActivateBaseTest,
class AlertRuleTriggerActionActivateTest(TestCase):
@pytest.fixture(autouse=True)
- def _setup_metric_patch(self):
+ def _setup_metric_patch(self) -> Generator[None]:
with mock.patch("sentry.incidents.models.alert_rule.metrics") as self.metrics:
yield
def setUp(self) -> None:
self.suspended_registry = TemporaryAlertRuleTriggerActionRegistry.suspend()
- def tearDown(self):
+ def tearDown(self) -> None:
self.suspended_registry.restore()
def test_unhandled(self) -> None:
diff --git a/tests/sentry/incidents/subscription_processor/test_subscription_processor.py b/tests/sentry/incidents/subscription_processor/test_subscription_processor.py
index 6f976d77b58c48..fb1e65c13b44ee 100644
--- a/tests/sentry/incidents/subscription_processor/test_subscription_processor.py
+++ b/tests/sentry/incidents/subscription_processor/test_subscription_processor.py
@@ -93,7 +93,7 @@ def setUp(self) -> None:
self._run_tasks = self.tasks()
self._run_tasks.__enter__()
- def tearDown(self):
+ def tearDown(self) -> None:
super().tearDown()
self.suspended_registry.restore()
self._run_tasks.__exit__(None, None, None)
diff --git a/tests/sentry/incidents/subscription_processor/test_subscription_processor_base.py b/tests/sentry/incidents/subscription_processor/test_subscription_processor_base.py
index fd502c0aa0f531..91385594a024fe 100644
--- a/tests/sentry/incidents/subscription_processor/test_subscription_processor_base.py
+++ b/tests/sentry/incidents/subscription_processor/test_subscription_processor_base.py
@@ -33,12 +33,12 @@ def _setup_metrics_patch(self):
with mock.patch("sentry.incidents.subscription_processor.metrics") as self.metrics:
yield
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self._run_tasks = self.tasks()
self._run_tasks.__enter__()
- def tearDown(self):
+ def tearDown(self) -> None:
super().tearDown()
self._run_tasks.__exit__(None, None, None)
diff --git a/tests/sentry/ingest/ingest_consumer/test_dlq.py b/tests/sentry/ingest/ingest_consumer/test_dlq.py
index 52c4be59f7cfee..f6f3478f125d9e 100644
--- a/tests/sentry/ingest/ingest_consumer/test_dlq.py
+++ b/tests/sentry/ingest/ingest_consumer/test_dlq.py
@@ -36,7 +36,7 @@ def make_message(payload: bytes, partition: Partition, offset: int) -> Message:
],
)
@django_db_all
-def test_dlq_invalid_messages(factories, topic_name, consumer_type) -> None:
+def test_dlq_invalid_messages(factories, topic_name: str, consumer_type: ConsumerType) -> None:
# Test is for all consumers that share the IngestStrategyFactory
# Feedback test is located in feedback/consumers
organization = factories.create_organization()
diff --git a/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_kafka.py b/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_kafka.py
index 0f6b89d312cc75..5dd98f49b43875 100644
--- a/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_kafka.py
+++ b/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_kafka.py
@@ -69,7 +69,7 @@ def inner(type, project=default_project):
@pytest.fixture
-def random_group_id():
+def random_group_id() -> str:
return f"test-consumer-{random.randint(0, 2 ** 16)}"
diff --git a/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_transactions.py b/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_transactions.py
index c6080f2946d41e..9ec688ce13b872 100644
--- a/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_transactions.py
+++ b/tests/sentry/ingest/ingest_consumer/test_ingest_consumer_transactions.py
@@ -65,7 +65,7 @@ def get_test_message(project):
@pytest.fixture
-def random_group_id():
+def random_group_id() -> str:
return f"test-consumer-{random.randint(0, 2 ** 16)}"
diff --git a/tests/sentry/integrations/api/endpoints/test_organization_integration_migrate_opsgenie.py b/tests/sentry/integrations/api/endpoints/test_organization_integration_migrate_opsgenie.py
index 0fda7225948e8e..5296f7ce23a5ef 100644
--- a/tests/sentry/integrations/api/endpoints/test_organization_integration_migrate_opsgenie.py
+++ b/tests/sentry/integrations/api/endpoints/test_organization_integration_migrate_opsgenie.py
@@ -9,7 +9,7 @@ def setUp(self) -> None:
self.login_as(self.user)
self.organization = self.create_organization(owner=self.user)
- def get_path(self, integration_id):
+ def get_path(self, integration_id) -> str:
return f"/api/0/organizations/{self.organization.slug}/integrations/{integration_id}/migrate-opsgenie/"
def test_no_integration(self) -> None:
diff --git a/tests/sentry/integrations/bitbucket/test_installed.py b/tests/sentry/integrations/bitbucket/test_installed.py
index 40388d5b863f81..657230ce4527bd 100644
--- a/tests/sentry/integrations/bitbucket/test_installed.py
+++ b/tests/sentry/integrations/bitbucket/test_installed.py
@@ -94,7 +94,7 @@ def setUp(self) -> None:
plugins.register(BitbucketPlugin)
- def tearDown(self):
+ def tearDown(self) -> None:
plugins.unregister(BitbucketPlugin)
super().tearDown()
diff --git a/tests/sentry/integrations/bitbucket/test_issues.py b/tests/sentry/integrations/bitbucket/test_issues.py
index 2776f81041babd..7709424e7c8e7a 100644
--- a/tests/sentry/integrations/bitbucket/test_issues.py
+++ b/tests/sentry/integrations/bitbucket/test_issues.py
@@ -50,7 +50,7 @@ def setUp(self) -> None:
("myaccount/repo2", "myaccount/repo2"),
]
- def build_autocomplete_url(self):
+ def build_autocomplete_url(self) -> str:
return "/extensions/bitbucket/search/baz/%d/" % self.integration.id
@responses.activate
diff --git a/tests/sentry/integrations/bitbucket_server/test_repository.py b/tests/sentry/integrations/bitbucket_server/test_repository.py
index 2201fb54e88e9b..3c12996e126110 100644
--- a/tests/sentry/integrations/bitbucket_server/test_repository.py
+++ b/tests/sentry/integrations/bitbucket_server/test_repository.py
@@ -17,6 +17,7 @@
REPO,
)
from sentry.integrations.bitbucket_server.repository import BitbucketServerRepositoryProvider
+from sentry.integrations.models.integration import Integration
from sentry.models.repository import Repository
from sentry.shared_integrations.exceptions import IntegrationError
from sentry.silo.base import SiloMode
@@ -27,7 +28,7 @@
class BitbucketServerRepositoryProviderTest(APITestCase):
@cached_property
- def integration(self):
+ def integration(self) -> Integration:
with assume_test_silo_mode(SiloMode.CONTROL):
integration = self.create_provider_integration(
provider="bitbucket_server",
@@ -53,7 +54,7 @@ def integration(self):
return integration
@cached_property
- def provider(self):
+ def provider(self) -> BitbucketServerRepositoryProvider:
return BitbucketServerRepositoryProvider("bitbucket_server")
def test_get_client(self) -> None:
@@ -223,9 +224,9 @@ def test_build_repository_config(self) -> None:
}
data["identifier"] = full_repo_name
- data = self.provider.build_repository_config(organization, data)
+ repo_config_data = self.provider.build_repository_config(organization, data)
- assert data == {
+ assert repo_config_data == {
"name": full_repo_name,
"external_id": str(REPO["id"]),
"url": "https://bitbucket.example.com/projects/laurynsentry/repos/helloworld/browse",
diff --git a/tests/sentry/integrations/github/test_integration.py b/tests/sentry/integrations/github/test_integration.py
index ba28cb333fa16e..d8b1c576b458b7 100644
--- a/tests/sentry/integrations/github/test_integration.py
+++ b/tests/sentry/integrations/github/test_integration.py
@@ -118,7 +118,7 @@ def setUp(self) -> None:
self._stub_github()
plugins.register(GitHubPlugin)
- def tearDown(self):
+ def tearDown(self) -> None:
responses.reset()
plugins.unregister(GitHubPlugin)
super().tearDown()
diff --git a/tests/sentry/integrations/github/test_repository.py b/tests/sentry/integrations/github/test_repository.py
index 8212954243db86..85cde7c1136add 100644
--- a/tests/sentry/integrations/github/test_repository.py
+++ b/tests/sentry/integrations/github/test_repository.py
@@ -35,16 +35,16 @@ def setUp(self) -> None:
},
)
- def tearDown(self):
+ def tearDown(self) -> None:
super().tearDown()
responses.reset()
@cached_property
- def provider(self):
+ def provider(self) -> GitHubRepositoryProvider:
return GitHubRepositoryProvider("integrations:github")
@cached_property
- def repository(self):
+ def repository(self) -> Repository:
# TODO: Refactor this out with a call to the relevant factory if possible to avoid
# explicitly having to exempt it from silo limits
with assume_test_silo_mode(SiloMode.REGION):
@@ -67,8 +67,8 @@ def test_build_repository_config(self) -> None:
"external_id": "654321",
"integration_id": integration.id,
}
- data = self.provider.build_repository_config(organization, data)
- assert data == {
+ repo_config_data = self.provider.build_repository_config(organization, data)
+ assert repo_config_data == {
"config": {"name": "getsentry/example-repo"},
"external_id": "654321",
"integration_id": integration.id,
diff --git a/tests/sentry/integrations/github/test_search.py b/tests/sentry/integrations/github/test_search.py
index a21ac58d5c5174..adf7f2772b1583 100644
--- a/tests/sentry/integrations/github/test_search.py
+++ b/tests/sentry/integrations/github/test_search.py
@@ -6,6 +6,7 @@
from django.urls import reverse
from sentry.integrations.github.integration import build_repository_query
+from sentry.integrations.models.integration import Integration
from sentry.integrations.models.organization_integration import OrganizationIntegration
from sentry.integrations.source_code_management.metrics import SourceCodeSearchEndpointHaltReason
from sentry.integrations.types import EventLifecycleOutcome
@@ -26,7 +27,7 @@ class GithubSearchTest(APITestCase):
provider = "github"
base_url = "https://api.github.com"
- def _create_integration(self):
+ def _create_integration(self) -> Integration:
future = datetime.now() + timedelta(hours=1)
return self.create_provider_integration(
provider=self.provider,
diff --git a/tests/sentry/integrations/github_enterprise/test_repository.py b/tests/sentry/integrations/github_enterprise/test_repository.py
index 4ccb81c5850545..347c24e16807bf 100644
--- a/tests/sentry/integrations/github_enterprise/test_repository.py
+++ b/tests/sentry/integrations/github_enterprise/test_repository.py
@@ -25,7 +25,7 @@ def setUp(self) -> None:
)
@cached_property
- def provider(self):
+ def provider(self) -> GitHubEnterpriseRepositoryProvider:
return GitHubEnterpriseRepositoryProvider("integrations:github_enterprise")
@responses.activate
@@ -38,8 +38,8 @@ def test_build_repository_config(self) -> None:
"external_id": "654321",
"integration_id": self.integration.id,
}
- data = self.provider.build_repository_config(organization, data)
- assert data == {
+ rc_data = self.provider.build_repository_config(organization, data)
+ assert rc_data == {
"config": {"name": "getsentry/example-repo"},
"external_id": "654321",
"integration_id": self.integration.id,
diff --git a/tests/sentry/integrations/github_enterprise/test_search.py b/tests/sentry/integrations/github_enterprise/test_search.py
index 7ff1b386ccd123..34150bf96c33bd 100644
--- a/tests/sentry/integrations/github_enterprise/test_search.py
+++ b/tests/sentry/integrations/github_enterprise/test_search.py
@@ -1,5 +1,6 @@
from datetime import datetime, timedelta
+from sentry.integrations.models.integration import Integration
from sentry.testutils.silo import control_silo_test
from ..github import test_search
@@ -12,7 +13,7 @@ class GithubEnterpriseSearchTest(test_search.GithubSearchTest):
provider = "github_enterprise"
base_url = "https://github.example.org/api/v3"
- def _create_integration(self):
+ def _create_integration(self) -> Integration:
future = datetime.now() + timedelta(hours=1)
return self.create_provider_integration(
provider=self.provider,
diff --git a/tests/sentry/integrations/gitlab/test_client.py b/tests/sentry/integrations/gitlab/test_client.py
index ca0d8a4fe56aad..b205c8a8ad8d24 100644
--- a/tests/sentry/integrations/gitlab/test_client.py
+++ b/tests/sentry/integrations/gitlab/test_client.py
@@ -58,7 +58,7 @@ class GitlabRefreshAuthTest(GitLabClientTest):
def setUp(self) -> None:
super().setUp()
- def tearDown(self):
+ def tearDown(self) -> None:
responses.reset()
def make_users_request(self):
diff --git a/tests/sentry/integrations/gitlab/test_repository.py b/tests/sentry/integrations/gitlab/test_repository.py
index 2b0e371ae10334..9d1c85d0c7ab43 100644
--- a/tests/sentry/integrations/gitlab/test_repository.py
+++ b/tests/sentry/integrations/gitlab/test_repository.py
@@ -58,7 +58,7 @@ def setUp(self) -> None:
def provider(self):
return GitlabRepositoryProvider("gitlab")
- def tearDown(self):
+ def tearDown(self) -> None:
super().tearDown()
responses.reset()
diff --git a/tests/sentry/integrations/jira/test_client.py b/tests/sentry/integrations/jira/test_client.py
index 6393b905bc7787..f87b5b60ba7ce7 100644
--- a/tests/sentry/integrations/jira/test_client.py
+++ b/tests/sentry/integrations/jira/test_client.py
@@ -16,7 +16,7 @@
secret = "hush-hush-im-invisible"
-def mock_finalize_request(prepared_request: PreparedRequest):
+def mock_finalize_request(prepared_request: PreparedRequest) -> PreparedRequest:
prepared_request.headers["Authorization"] = f"JWT {mock_jwt}"
return prepared_request
diff --git a/tests/sentry/integrations/jira_server/test_ticket_action.py b/tests/sentry/integrations/jira_server/test_ticket_action.py
index 548162b37dd6fa..02882196c3800b 100644
--- a/tests/sentry/integrations/jira_server/test_ticket_action.py
+++ b/tests/sentry/integrations/jira_server/test_ticket_action.py
@@ -46,7 +46,7 @@ def setUp(self) -> None:
self.installation = self.integration.get_installation(self.organization.id)
self.login_as(user=self.user)
- def trigger(self, event: GroupEvent, rule_object: Rule):
+ def trigger(self, event: GroupEvent, rule_object: Rule) -> object:
action = rule_object.data.get("actions", ())[0]
action_inst = self.get_rule(data=action, rule=rule_object)
results = list(action_inst.after(event=event))
@@ -55,7 +55,7 @@ def trigger(self, event: GroupEvent, rule_object: Rule):
rule_future = RuleFuture(rule=rule_object, kwargs=results[0].kwargs)
return results[0].callback(event, futures=[rule_future])
- def get_key(self, event: GroupEvent):
+ def get_key(self, event: GroupEvent) -> str:
return ExternalIssue.objects.get_linked_issues(event, self.integration).values_list(
"key", flat=True
)[0]
diff --git a/tests/sentry/integrations/jira_server/test_webhooks.py b/tests/sentry/integrations/jira_server/test_webhooks.py
index 2f7edf4048c37b..d0d5a2601fbca0 100644
--- a/tests/sentry/integrations/jira_server/test_webhooks.py
+++ b/tests/sentry/integrations/jira_server/test_webhooks.py
@@ -23,7 +23,7 @@ def setUp(self) -> None:
self.integration = get_integration(self.organization, self.user)
@property
- def jwt_token(self):
+ def jwt_token(self) -> str:
return jwt.encode(
{"id": self.integration.external_id}, self.integration.metadata["webhook_secret"]
)
diff --git a/tests/sentry/integrations/middleware/hybrid_cloud/test_base.py b/tests/sentry/integrations/middleware/hybrid_cloud/test_base.py
index 4b4ed040dc3fb8..890f9a8fae1adb 100644
--- a/tests/sentry/integrations/middleware/hybrid_cloud/test_base.py
+++ b/tests/sentry/integrations/middleware/hybrid_cloud/test_base.py
@@ -19,7 +19,7 @@
from sentry.types.region import Region, RegionCategory
-def error_regions(region: Region, invalid_region_names: Iterable[str]):
+def error_regions(region: Region, invalid_region_names: Iterable[str]) -> str:
if region.name in invalid_region_names:
raise SiloLimit.AvailabilityError("Region is offline!")
return region.name
diff --git a/tests/sentry/integrations/msteams/test_notifications.py b/tests/sentry/integrations/msteams/test_notifications.py
index 4a3d40eed6274b..c19828e824328e 100644
--- a/tests/sentry/integrations/msteams/test_notifications.py
+++ b/tests/sentry/integrations/msteams/test_notifications.py
@@ -41,7 +41,7 @@
)
@patch("sentry.integrations.msteams.MsTeamsClientABC.send_card")
class MSTeamsNotificationTest(TestCase):
- def _install_msteams_personal(self):
+ def _install_msteams_personal(self) -> None:
self.tenant_id = "50cccd00-7c9c-4b32-8cda-58a084f9334a"
self.integration = self.create_integration(
self.organization,
@@ -63,7 +63,7 @@ def _install_msteams_personal(self):
user=self.user_1, identity_provider=self.idp, external_id=self.user_id_1
)
- def _install_msteams_team(self):
+ def _install_msteams_team(self) -> None:
self.team_id = "19:8d46058cda57449380517cc374727f2a@thread.tacv2"
self.integration = self.create_integration(
self.organization,
@@ -89,7 +89,7 @@ def setUp(self) -> None:
def test_simple(
self,
mock_send_card: MagicMock,
- ):
+ ) -> None:
self._install_msteams_personal()
notification = DummyNotification(self.organization)
@@ -180,7 +180,7 @@ class MSTeamsNotificationIntegrationTest(MSTeamsActivityNotificationTest):
Test the MS Teams notification flow end to end without mocking out functions.
"""
- def _setup_msteams_api(self):
+ def _setup_msteams_api(self) -> None:
responses.add(
method=responses.POST,
url="https://login.microsoftonline.com/botframework.com/oauth2/v2.0/token",
diff --git a/tests/sentry/integrations/msteams/test_webhook.py b/tests/sentry/integrations/msteams/test_webhook.py
index aec469db870ee4..121cbea36caf16 100644
--- a/tests/sentry/integrations/msteams/test_webhook.py
+++ b/tests/sentry/integrations/msteams/test_webhook.py
@@ -1,3 +1,4 @@
+from collections.abc import Generator
from copy import deepcopy
from unittest import mock
from unittest.mock import MagicMock, call
@@ -38,7 +39,7 @@
class MsTeamsWebhookTest(APITestCase):
@pytest.fixture(autouse=True)
- def _setup_metric_patch(self):
+ def _setup_metric_patch(self) -> Generator[None]:
with mock.patch("sentry.shared_integrations.client.base.metrics") as self.metrics:
yield
diff --git a/tests/sentry/integrations/pagerduty/test_client.py b/tests/sentry/integrations/pagerduty/test_client.py
index f04380a3479331..bb6c8123fbb867 100644
--- a/tests/sentry/integrations/pagerduty/test_client.py
+++ b/tests/sentry/integrations/pagerduty/test_client.py
@@ -1,3 +1,4 @@
+from collections.abc import Generator
from unittest import mock
from unittest.mock import MagicMock, call, patch
@@ -35,7 +36,7 @@ class PagerDutyClientTest(APITestCase):
provider = "pagerduty"
@pytest.fixture(autouse=True)
- def _setup_metric_patch(self):
+ def _setup_metric_patch(self) -> Generator[None]:
with mock.patch("sentry.shared_integrations.client.base.metrics") as self.metrics:
yield
diff --git a/tests/sentry/integrations/slack/message_builder/test_routing.py b/tests/sentry/integrations/slack/message_builder/test_routing.py
index bfbd897288e2de..7ddf150caae988 100644
--- a/tests/sentry/integrations/slack/message_builder/test_routing.py
+++ b/tests/sentry/integrations/slack/message_builder/test_routing.py
@@ -4,7 +4,7 @@
class SlackRequestRoutingTest(TestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.organization = self.create_organization()
self.project = self.create_project(organization=self.organization)
diff --git a/tests/sentry/integrations/slack/test_link_identity.py b/tests/sentry/integrations/slack/test_link_identity.py
index edef02520da36c..012769d39f6540 100644
--- a/tests/sentry/integrations/slack/test_link_identity.py
+++ b/tests/sentry/integrations/slack/test_link_identity.py
@@ -1,3 +1,4 @@
+from collections.abc import Generator
from unittest.mock import MagicMock, patch
import pytest
@@ -26,7 +27,7 @@ def setUp(self) -> None:
self.idp = add_identity(self.integration, self.user, self.external_id)
@pytest.fixture(autouse=True)
- def mock_webhook_send(self):
+ def mock_webhook_send(self) -> Generator[None]:
with patch(
"slack_sdk.webhook.WebhookClient.send",
return_value=WebhookResponse(
@@ -39,7 +40,7 @@ def mock_webhook_send(self):
yield
@pytest.fixture(autouse=True)
- def mock_chat_postMessage(self):
+ def mock_chat_postMessage(self) -> Generator[None]:
with patch(
"slack_sdk.web.WebClient.chat_postMessage",
return_value=SlackResponse(
diff --git a/tests/sentry/integrations/slack/test_requests.py b/tests/sentry/integrations/slack/test_requests.py
index 038954f895dc07..a065fd55bba531 100644
--- a/tests/sentry/integrations/slack/test_requests.py
+++ b/tests/sentry/integrations/slack/test_requests.py
@@ -35,7 +35,7 @@ def setUp(self) -> None:
)
@cached_property
- def slack_request(self):
+ def slack_request(self) -> SlackRequest:
return SlackRequest(self.request)
@patch("slack_sdk.signature.SignatureVerifier.is_valid")
@@ -132,7 +132,7 @@ def setUp(self) -> None:
)
@cached_property
- def slack_request(self):
+ def slack_request(self) -> SlackEventRequest:
return SlackEventRequest(self.request)
def test_ignores_event_validation_on_challenge_request(self) -> None:
@@ -218,7 +218,7 @@ def setUp(self) -> None:
)
@cached_property
- def slack_request(self):
+ def slack_request(self) -> SlackActionRequest:
return SlackActionRequest(self.request)
def test_type(self) -> None:
diff --git a/tests/sentry/integrations/slack/test_unfurl.py b/tests/sentry/integrations/slack/test_unfurl.py
index a621af5ee4590e..3594f8224e4d66 100644
--- a/tests/sentry/integrations/slack/test_unfurl.py
+++ b/tests/sentry/integrations/slack/test_unfurl.py
@@ -24,6 +24,7 @@
from sentry.testutils.helpers import install_slack
from sentry.testutils.helpers.datetime import before_now, freeze_time
from sentry.testutils.skips import requires_snuba
+from sentry.users.services.user.service import user_service
pytestmark = [requires_snuba, pytest.mark.sentry_metrics]
@@ -201,8 +202,9 @@ def setUp(self) -> None:
self.request = RequestFactory().get("slack/event")
self.frozen_time = freeze_time(datetime.now() - timedelta(days=1))
self.frozen_time.start()
+ self.rpc_user = user_service.get_user(user_id=self.user.id)
- def tearDown(self):
+ def tearDown(self) -> None:
self.frozen_time.stop()
def test_unfurl_issues(self) -> None:
@@ -648,7 +650,9 @@ def test_unfurl_discover(self, mock_generate_chart: MagicMock, _: MagicMock) ->
]
with self.feature(["organizations:discover-basic"]):
- unfurls = link_handlers[link_type].fn(self.request, self.integration, links, self.user)
+ unfurls = link_handlers[link_type].fn(
+ self.request, self.integration, links, self.rpc_user
+ )
assert (
unfurls[url]
@@ -687,7 +691,9 @@ def test_unfurl_discover_previous_period(
]
with self.feature(["organizations:discover-basic"]):
- unfurls = link_handlers[link_type].fn(self.request, self.integration, links, self.user)
+ unfurls = link_handlers[link_type].fn(
+ self.request, self.integration, links, self.rpc_user
+ )
assert (
unfurls[url]
@@ -735,7 +741,9 @@ def test_unfurl_discover_multi_y_axis(
]
with self.feature(["organizations:discover-basic"]):
- unfurls = link_handlers[link_type].fn(self.request, self.integration, links, self.user)
+ unfurls = link_handlers[link_type].fn(
+ self.request, self.integration, links, self.rpc_user
+ )
assert (
unfurls[url]
@@ -773,7 +781,9 @@ def test_unfurl_discover_html_escaped(
]
with self.feature(["organizations:discover-basic"]):
- unfurls = link_handlers[link_type].fn(self.request, self.integration, links, self.user)
+ unfurls = link_handlers[link_type].fn(
+ self.request, self.integration, links, self.rpc_user
+ )
assert (
unfurls[url]
@@ -814,9 +824,11 @@ def test_unfurl_discover_short_url(self, mock_generate_chart: MagicMock, _: Magi
"display": "top5",
"topEvents": 2,
}
+ assert self.rpc_user is not None, "Rpcuser should exist, unless explicitly noted in test"
+
saved_query = DiscoverSavedQuery.objects.create(
organization=self.organization,
- created_by_id=self.user.id,
+ created_by_id=self.rpc_user.id,
name="Test query",
query=query,
version=2,
@@ -839,7 +851,9 @@ def test_unfurl_discover_short_url(self, mock_generate_chart: MagicMock, _: Magi
"organizations:discover-basic",
]
):
- unfurls = link_handlers[link_type].fn(self.request, self.integration, links, self.user)
+ unfurls = link_handlers[link_type].fn(
+ self.request, self.integration, links, self.rpc_user
+ )
assert (
unfurls[url]
@@ -880,9 +894,10 @@ def test_unfurl_correct_y_axis_for_saved_query(
"p50(transaction.duration)",
],
}
+ assert self.rpc_user is not None, "Rpcuser should exist, unless explicitly noted in test"
saved_query = DiscoverSavedQuery.objects.create(
organization=self.organization,
- created_by_id=self.user.id,
+ created_by_id=self.rpc_user.id,
name="Test query",
query=query,
version=2,
@@ -905,7 +920,9 @@ def test_unfurl_correct_y_axis_for_saved_query(
"organizations:discover-basic",
]
):
- unfurls = link_handlers[link_type].fn(self.request, self.integration, links, self.user)
+ unfurls = link_handlers[link_type].fn(
+ self.request, self.integration, links, self.rpc_user
+ )
assert (
unfurls[url]
@@ -957,7 +974,9 @@ def test_top_events_url_param(self, mock_generate_chart: MagicMock, _: MagicMock
"organizations:discover-basic",
]
):
- unfurls = link_handlers[link_type].fn(self.request, self.integration, links, self.user)
+ unfurls = link_handlers[link_type].fn(
+ self.request, self.integration, links, self.rpc_user
+ )
assert (
unfurls[url]
@@ -1025,7 +1044,9 @@ def test_top_daily_events_renders_bar_chart(
"organizations:discover-basic",
]
):
- unfurls = link_handlers[link_type].fn(self.request, self.integration, links, self.user)
+ unfurls = link_handlers[link_type].fn(
+ self.request, self.integration, links, self.rpc_user
+ )
assert (
unfurls[url]
@@ -1061,9 +1082,10 @@ def test_unfurl_discover_short_url_without_project_ids(
"query": "",
"yAxis": "count_unique(users)",
}
+ assert self.rpc_user is not None, "Rpcuser should exist, unless explicitly noted in test"
saved_query = DiscoverSavedQuery.objects.create(
organization=self.organization,
- created_by_id=self.user.id,
+ created_by_id=self.rpc_user.id,
name="Test query",
query=query,
version=2,
@@ -1086,7 +1108,9 @@ def test_unfurl_discover_short_url_without_project_ids(
"organizations:discover-basic",
]
):
- unfurls = link_handlers[link_type].fn(self.request, self.integration, links, self.user)
+ unfurls = link_handlers[link_type].fn(
+ self.request, self.integration, links, self.rpc_user
+ )
assert (
unfurls[url]
@@ -1130,7 +1154,9 @@ def test_unfurl_discover_without_project_ids(
"organizations:discover-basic",
]
):
- unfurls = link_handlers[link_type].fn(self.request, self.integration, links, self.user)
+ unfurls = link_handlers[link_type].fn(
+ self.request, self.integration, links, self.rpc_user
+ )
assert (
unfurls[url]
@@ -1188,7 +1214,9 @@ def test_bar_chart_display_renders_bar_chart(
"organizations:discover-basic",
]
):
- unfurls = link_handlers[link_type].fn(self.request, self.integration, links, self.user)
+ unfurls = link_handlers[link_type].fn(
+ self.request, self.integration, links, self.rpc_user
+ )
assert (
unfurls[url]
@@ -1222,7 +1250,9 @@ def test_bar_chart_interval_with_absolute_date(
"organizations:discover-basic",
]
):
- unfurls = link_handlers[link_type].fn(self.request, self.integration, links, self.user)
+ unfurls = link_handlers[link_type].fn(
+ self.request, self.integration, links, self.rpc_user
+ )
assert (
unfurls[url]
@@ -1259,7 +1289,9 @@ def test_bar_chart_interval_with_periodic_date(
"organizations:discover-basic",
]
):
- unfurls = link_handlers[link_type].fn(self.request, self.integration, links, self.user)
+ unfurls = link_handlers[link_type].fn(
+ self.request, self.integration, links, self.rpc_user
+ )
assert (
unfurls[url]
@@ -1286,9 +1318,10 @@ def test_saved_query_with_interval(
"interval": "10m",
"statsPeriod": "24h",
}
+ assert self.rpc_user is not None, "Rpcuser should exist, unless explicitly noted in test"
saved_query = DiscoverSavedQuery.objects.create(
organization=self.organization,
- created_by_id=self.user.id,
+ created_by_id=self.rpc_user.id,
name="Test query",
query=query,
version=2,
@@ -1312,7 +1345,9 @@ def test_saved_query_with_interval(
"organizations:discover-basic",
]
):
- unfurls = link_handlers[link_type].fn(self.request, self.integration, links, self.user)
+ unfurls = link_handlers[link_type].fn(
+ self.request, self.integration, links, self.rpc_user
+ )
assert (
unfurls[url]
@@ -1341,9 +1376,10 @@ def test_saved_query_with_dataset(
"interval": "10m",
"statsPeriod": "24h",
}
+ assert self.rpc_user is not None, "Rpcuser should exist, unless explicitly noted in test"
saved_query = DiscoverSavedQuery.objects.create(
organization=self.organization,
- created_by_id=self.user.id,
+ created_by_id=self.rpc_user.id,
name="Test query",
query=query,
version=2,
@@ -1367,7 +1403,9 @@ def test_saved_query_with_dataset(
"organizations:discover-basic",
]
):
- unfurls = link_handlers[link_type].fn(self.request, self.integration, links, self.user)
+ unfurls = link_handlers[link_type].fn(
+ self.request, self.integration, links, self.rpc_user
+ )
assert (
unfurls[url]
@@ -1406,7 +1444,9 @@ def test_unfurl_discover_homepage(
]
with self.feature(["organizations:discover-basic"]):
- unfurls = link_handlers[link_type].fn(self.request, self.integration, links, self.user)
+ unfurls = link_handlers[link_type].fn(
+ self.request, self.integration, links, self.rpc_user
+ )
assert (
unfurls[url]
diff --git a/tests/sentry/integrations/slack/webhooks/commands/__init__.py b/tests/sentry/integrations/slack/webhooks/commands/__init__.py
index 006ba49eab2885..862d053fa4f336 100644
--- a/tests/sentry/integrations/slack/webhooks/commands/__init__.py
+++ b/tests/sentry/integrations/slack/webhooks/commands/__init__.py
@@ -1,6 +1,6 @@
from __future__ import annotations
-from collections.abc import Mapping
+from collections.abc import Generator, Mapping
from typing import Any
from unittest.mock import patch
from urllib.parse import urlencode
@@ -83,7 +83,7 @@ def get_slack_response(
return response
@pytest.fixture(autouse=True)
- def mock_webhook_send(self):
+ def mock_webhook_send(self) -> Generator[None]:
with patch(
"slack_sdk.webhook.WebhookClient.send",
return_value=WebhookResponse(
@@ -96,7 +96,7 @@ def mock_webhook_send(self):
yield
@pytest.fixture(autouse=True)
- def mock_chat_postMessage(self):
+ def mock_chat_postMessage(self) -> Generator[None]:
with patch(
"slack_sdk.web.WebClient.chat_postMessage",
return_value=SlackResponse(
diff --git a/tests/sentry/integrations/source_code_management/test_commit_context.py b/tests/sentry/integrations/source_code_management/test_commit_context.py
index 51ad998dc8c0ef..4c7c953651c170 100644
--- a/tests/sentry/integrations/source_code_management/test_commit_context.py
+++ b/tests/sentry/integrations/source_code_management/test_commit_context.py
@@ -4,6 +4,7 @@
from sentry.integrations.gitlab.constants import GITLAB_CLOUD_BASE_URL
from sentry.integrations.source_code_management.commit_context import (
+ CommitContextClient,
CommitContextIntegration,
SourceLineInfo,
)
@@ -20,11 +21,11 @@ class MockCommitContextIntegration(CommitContextIntegration):
integration_name = "mock_integration"
- def __init__(self):
+ def __init__(self) -> None:
self.client = Mock()
self.client.base_url = "https://example.com"
- def get_client(self):
+ def get_client(self) -> CommitContextClient:
return self.client
def on_create_or_update_comment_error(self, api_error: ApiError, metrics_base: str) -> bool:
@@ -155,7 +156,7 @@ class MockGitlabIntegration(MockCommitContextIntegration):
integration_name = "gitlab"
base_url = "https://bufo-bot.gitlab.com"
- def __init__(self):
+ def __init__(self) -> None:
super().__init__()
self.client.base_url = self.base_url
@@ -181,7 +182,7 @@ class MockGitlabIntegration(MockCommitContextIntegration):
integration_name = "gitlab"
base_url = GITLAB_CLOUD_BASE_URL
- def __init__(self):
+ def __init__(self) -> None:
super().__init__()
self.client.base_url = self.base_url
diff --git a/tests/sentry/integrations/test_base.py b/tests/sentry/integrations/test_base.py
index fe1a82a766c2f7..2feb3aba1fd4aa 100644
--- a/tests/sentry/integrations/test_base.py
+++ b/tests/sentry/integrations/test_base.py
@@ -10,7 +10,7 @@
class ExampleIntegration(IntegrationInstallation):
- def get_client(self):
+ def get_client(self) -> None:
raise NotImplementedError
diff --git a/tests/sentry/integrations/test_helpers.py b/tests/sentry/integrations/test_helpers.py
index 0e9b93cc18a016..eb441cfd22752b 100644
--- a/tests/sentry/integrations/test_helpers.py
+++ b/tests/sentry/integrations/test_helpers.py
@@ -12,7 +12,7 @@ def add_control_silo_proxy_response(
path: str | None,
additional_matchers: list[Any] | None = None,
**additional_response_kwargs: Any,
-):
+) -> responses.BaseResponse:
if additional_matchers is None:
additional_matchers = []
diff --git a/tests/sentry/integrations/test_notification_utilities.py b/tests/sentry/integrations/test_notification_utilities.py
index 37889285dc8d1f..e4f02a442b4efb 100644
--- a/tests/sentry/integrations/test_notification_utilities.py
+++ b/tests/sentry/integrations/test_notification_utilities.py
@@ -38,7 +38,7 @@ def _assert_integrations_are(
self,
actual: Mapping[Actor, Mapping[str, RpcIntegration | Integration]],
expected: Mapping[User, Mapping[str, RpcIntegration | Integration]],
- ):
+ ) -> None:
assert actual == {Actor.from_orm_user(k): v for (k, v) in expected.items()}
def test_simple(self) -> None:
diff --git a/tests/sentry/integrations/utils/test_scope.py b/tests/sentry/integrations/utils/test_scope.py
index 446c9c395fcc7a..453bb5d8e728b2 100644
--- a/tests/sentry/integrations/utils/test_scope.py
+++ b/tests/sentry/integrations/utils/test_scope.py
@@ -92,7 +92,7 @@ def test_logs_warning_if_no_orgs_found(
mock_check_tag_for_scope_bleed: MagicMock,
mock_bind_org_context: MagicMock,
mock_bind_ambiguous_org_context: MagicMock,
- ):
+ ) -> None:
with assume_test_silo_mode(SiloMode.CONTROL):
integration = self.create_provider_integration(name="squirrelChasers")
diff --git a/tests/sentry/integrations/vercel/test_webhook.py b/tests/sentry/integrations/vercel/test_webhook.py
index 530669644eb68b..74104e65f919f6 100644
--- a/tests/sentry/integrations/vercel/test_webhook.py
+++ b/tests/sentry/integrations/vercel/test_webhook.py
@@ -93,7 +93,7 @@ def setUp(self) -> None:
provider="vercel",
)
- def tearDown(self):
+ def tearDown(self) -> None:
responses.reset()
@responses.activate
diff --git a/tests/sentry/integrations/vsts/test_issues.py b/tests/sentry/integrations/vsts/test_issues.py
index 4670e41461a29d..985a568c29a567 100644
--- a/tests/sentry/integrations/vsts/test_issues.py
+++ b/tests/sentry/integrations/vsts/test_issues.py
@@ -163,7 +163,7 @@ def mock_categories(self, project):
)
@region_silo_test(include_monolith_run=True)
class VstsIssueSyncTest(VstsIssueBase):
- def tearDown(self):
+ def tearDown(self) -> None:
responses.reset()
@responses.activate
@@ -570,7 +570,7 @@ def setUp(self) -> None:
)
self.group = event.group
- def tearDown(self):
+ def tearDown(self) -> None:
responses.reset()
def update_issue_defaults(self, defaults):
diff --git a/tests/sentry/integrations/vsts/test_provider.py b/tests/sentry/integrations/vsts/test_provider.py
index 5c9329f902f97c..ee77f0781a3765 100644
--- a/tests/sentry/integrations/vsts/test_provider.py
+++ b/tests/sentry/integrations/vsts/test_provider.py
@@ -1,5 +1,6 @@
from __future__ import annotations
+from collections.abc import Generator
from time import time
from typing import Any
from unittest.mock import MagicMock, Mock, patch
@@ -305,7 +306,7 @@ def setUp(self) -> None:
self.provider = VSTSIdentityProvider()
@pytest.fixture(autouse=True)
- def patch_get_oauth_client_secret(self):
+ def patch_get_oauth_client_secret(self) -> Generator[None]:
with patch.object(
VSTSIdentityProvider, "get_oauth_client_secret", return_value=self.client_secret
):
diff --git a/tests/sentry/integrations/vsts/test_repository.py b/tests/sentry/integrations/vsts/test_repository.py
index b57ec213232b8c..47098de9d35484 100644
--- a/tests/sentry/integrations/vsts/test_repository.py
+++ b/tests/sentry/integrations/vsts/test_repository.py
@@ -170,7 +170,7 @@ def setUp(self) -> None:
}
self.login_as(self.user)
- def tearDown(self):
+ def tearDown(self) -> None:
super().tearDown()
responses.reset()
diff --git a/tests/sentry/integrations/vsts/test_webhooks.py b/tests/sentry/integrations/vsts/test_webhooks.py
index 34f51ecc86a162..97688d33594f45 100644
--- a/tests/sentry/integrations/vsts/test_webhooks.py
+++ b/tests/sentry/integrations/vsts/test_webhooks.py
@@ -68,7 +68,7 @@ def setUp(self) -> None:
self.user_to_assign = self.create_user("sentryuseremail@email.com")
- def tearDown(self):
+ def tearDown(self) -> None:
responses.reset()
def create_linked_group(self, external_issue, project, status):
diff --git a/tests/sentry/issues/auto_source_code_config/test_code_mapping.py b/tests/sentry/issues/auto_source_code_config/test_code_mapping.py
index 67ac635e5ccf87..2e07eda2fd1a94 100644
--- a/tests/sentry/issues/auto_source_code_config/test_code_mapping.py
+++ b/tests/sentry/issues/auto_source_code_config/test_code_mapping.py
@@ -95,7 +95,7 @@ def test_buckets_logic() -> None:
class TestDerivedCodeMappings(TestCase):
@pytest.fixture(autouse=True)
- def inject_fixtures(self, caplog: Any) -> None:
+ def inject_fixtures(self, caplog: pytest.LogCaptureFixture) -> None:
self._caplog = caplog
def setUp(self) -> None:
diff --git a/tests/sentry/lang/native/test_processing.py b/tests/sentry/lang/native/test_processing.py
index 4de85d89bb8234..01e70542b486f3 100644
--- a/tests/sentry/lang/native/test_processing.py
+++ b/tests/sentry/lang/native/test_processing.py
@@ -102,7 +102,7 @@ def test_merge_symbolicator_image_remove_unknown_arch() -> None:
),
],
)
-def test_merge_symbolicator_image_errors(code_file, error) -> None:
+def test_merge_symbolicator_image_errors(code_file: str, error: EventError) -> None:
raw_image = {"instruction_addr": 0xFEEBEE, "other": "foo", "code_file": code_file}
sdk_info = {"sdk_name": "macos"}
complete_image = {
diff --git a/tests/sentry/middleware/integrations/parsers/test_discord.py b/tests/sentry/middleware/integrations/parsers/test_discord.py
index e3f923d3abe2de..de98c74c527ded 100644
--- a/tests/sentry/middleware/integrations/parsers/test_discord.py
+++ b/tests/sentry/middleware/integrations/parsers/test_discord.py
@@ -38,7 +38,7 @@ def setUp(self) -> None:
provider="discord",
)
- def get_parser(self, path: str, data: Mapping[str, Any] | None = None):
+ def get_parser(self, path: str, data: Mapping[str, Any] | None = None) -> DiscordRequestParser:
if not data:
data = {}
self.request = self.factory.post(
@@ -60,6 +60,7 @@ def test_interactions_endpoint_routing_ping(self, mock_verify_signature: MagicMo
response = parser.get_response()
assert response.status_code == status.HTTP_200_OK
+ assert isinstance(response, HttpResponse)
data = json.loads(response.content)
assert data == {"type": 1}
assert len(responses.calls) == 0
@@ -78,6 +79,7 @@ def test_interactions_endpoint_validation_failure(
response = parser.get_response()
assert response.status_code == status.HTTP_401_UNAUTHORIZED
+ assert isinstance(response, HttpResponse)
assert not response.content
assert_no_webhook_payloads()
assert len(responses.calls) == 0
@@ -94,6 +96,7 @@ def test_interactions_endpoint_routing_ping_no_integration(
response = parser.get_response()
assert response.status_code == status.HTTP_200_OK
+ assert isinstance(response, HttpResponse)
data = json.loads(response.content)
assert data == {"type": 1}
assert_no_webhook_payloads()
@@ -242,6 +245,7 @@ def test_triggers_async_response(
}
)
assert response.status_code == status.HTTP_200_OK
+ assert isinstance(response, HttpResponse)
assert json.loads(response.content) == parser.async_response_data
diff --git a/tests/sentry/middleware/integrations/parsers/test_msteams.py b/tests/sentry/middleware/integrations/parsers/test_msteams.py
index 43e191c980c3f3..797933839e75fd 100644
--- a/tests/sentry/middleware/integrations/parsers/test_msteams.py
+++ b/tests/sentry/middleware/integrations/parsers/test_msteams.py
@@ -1,4 +1,5 @@
from copy import deepcopy
+from typing import Any
import responses
from django.http import HttpRequest, HttpResponse
@@ -40,7 +41,7 @@ def setUp(self) -> None:
def get_response(self, request: HttpRequest) -> HttpResponse:
return HttpResponse(status=200, content="passthrough")
- def generate_card_response(self, integration_id: int):
+ def generate_card_response(self, integration_id: int) -> dict[str, Any]:
return {
"type": "message",
"from": {"id": "user_id"},
diff --git a/tests/sentry/middleware/test_access_log_middleware.py b/tests/sentry/middleware/test_access_log_middleware.py
index 1b04fbe7fabf56..02457f73001d9e 100644
--- a/tests/sentry/middleware/test_access_log_middleware.py
+++ b/tests/sentry/middleware/test_access_log_middleware.py
@@ -180,7 +180,7 @@ def get(self, request, organization_context, organization):
@override_settings(LOG_API_ACCESS=True)
class LogCaptureAPITestCase(APITestCase):
@pytest.fixture(autouse=True)
- def inject_fixtures(self, caplog):
+ def inject_fixtures(self, caplog: pytest.LogCaptureFixture):
self._caplog = caplog
def assert_access_log_recorded(self):
diff --git a/tests/sentry/migrations/test_0921_convert_org_saved_searches_to_views_rerevised.py b/tests/sentry/migrations/test_0921_convert_org_saved_searches_to_views_rerevised.py
index a6336e1be86bd3..be1b1edf7ca171 100644
--- a/tests/sentry/migrations/test_0921_convert_org_saved_searches_to_views_rerevised.py
+++ b/tests/sentry/migrations/test_0921_convert_org_saved_searches_to_views_rerevised.py
@@ -12,7 +12,7 @@ class ConvertOrgSavedSearchesToViewsTest(TestMigrations):
migrate_from = "0920_convert_org_saved_searches_to_views_revised"
migrate_to = "0921_convert_org_saved_searches_to_views_rerevised"
- def setup_initial_state(self):
+ def setup_initial_state(self) -> None:
self.org = self.create_organization()
self.user = self.create_user()
diff --git a/tests/sentry/models/releases/test_release_project.py b/tests/sentry/models/releases/test_release_project.py
index 7f0c16dd7ada36..7bf872a4bbda58 100644
--- a/tests/sentry/models/releases/test_release_project.py
+++ b/tests/sentry/models/releases/test_release_project.py
@@ -27,7 +27,7 @@ def test_post_save_signal_runs_if_dynamic_sampling_is_disabled(self) -> None:
@receivers_raise_on_send()
def test_post_save_signal_runs_if_dynamic_sampling_is_enabled_and_latest_release_rule_does_not_exist(
self,
- ):
+ ) -> None:
with Feature(
{
"organizations:dynamic-sampling": True,
@@ -45,7 +45,7 @@ def test_post_save_signal_runs_if_dynamic_sampling_is_enabled_and_latest_release
@receivers_raise_on_send()
def test_post_save_signal_runs_if_dynamic_sampling_is_enabled_and_latest_release_rule_exists(
self,
- ):
+ ) -> None:
with Feature(
{
"organizations:dynamic-sampling": True,
diff --git a/tests/sentry/models/test_activity.py b/tests/sentry/models/test_activity.py
index 6b31307e8f7ecc..8ed3ce8376187e 100644
--- a/tests/sentry/models/test_activity.py
+++ b/tests/sentry/models/test_activity.py
@@ -325,7 +325,7 @@ def test_get_activities_for_group_flip_flop(self) -> None:
@patch("sentry.tasks.activity.send_activity_notifications.delay")
def test_skips_status_change_notifications_if_disabled(
self, mock_send_activity_notifications: MagicMock
- ):
+ ) -> None:
project = self.create_project(name="test_activities_group")
group = self.create_group(project)
@@ -351,7 +351,7 @@ def test_skips_status_change_notifications_if_disabled(
@patch("sentry.tasks.activity.send_activity_notifications.delay")
def test_skips_workflow_notifications_if_disabled(
self, mock_send_activity_notifications: MagicMock
- ):
+ ) -> None:
project = self.create_project(name="test_activities_group")
group = self.create_group(project)
diff --git a/tests/sentry/models/test_dashboard.py b/tests/sentry/models/test_dashboard.py
index 971acfcaa109bc..4724ceccfe6e48 100644
--- a/tests/sentry/models/test_dashboard.py
+++ b/tests/sentry/models/test_dashboard.py
@@ -65,7 +65,7 @@ def test_across_organizations(self) -> None:
class DashboardFavoriteUserTest(TestCase):
def create_dashboard_favorite_user(
self, dashboard: Dashboard, user: User, organization: Organization, position: int | None
- ):
+ ) -> DashboardFavoriteUser:
return DashboardFavoriteUser.objects.create(
dashboard=dashboard, user_id=user.id, organization=organization, position=position
)
diff --git a/tests/sentry/models/test_debugfile.py b/tests/sentry/models/test_debugfile.py
index 0ea4bcc72c134e..329056160013b9 100644
--- a/tests/sentry/models/test_debugfile.py
+++ b/tests/sentry/models/test_debugfile.py
@@ -355,7 +355,7 @@ def test_simple_cache_clear(self) -> None:
),
),
)
-def test_proguard_files_detected(path, name, uuid) -> None:
+def test_proguard_files_detected(path: str, name: str | None, uuid: str) -> None:
# ProGuard files are detected by the path/name, not the file contents.
# So, the ProGuard check should not depend on the file existing.
detected = detect_dif_from_path(path, name)
@@ -385,7 +385,7 @@ def test_proguard_files_detected(path, name, uuid) -> None:
),
),
)
-def test_proguard_file_not_detected(path, name) -> None:
+def test_proguard_file_not_detected(path: str, name: str | None) -> None:
with pytest.raises(FileNotFoundError):
# If the file is not detected as a ProGuard file, detect_dif_from_path
# attempts to open the file, which probably doesn't exist.
diff --git a/tests/sentry/models/test_dynamicsampling.py b/tests/sentry/models/test_dynamicsampling.py
index bbd2999d2b5db9..c5faef01d4baa1 100644
--- a/tests/sentry/models/test_dynamicsampling.py
+++ b/tests/sentry/models/test_dynamicsampling.py
@@ -113,7 +113,7 @@ def test_deactivate_old_rules(self) -> None:
old_rules = []
new_rules = []
- def create_rule(is_old: bool, idx: int):
+ def create_rule(is_old: bool, idx: int) -> CustomDynamicSamplingRule:
condition = {"op": "equals", "name": "environment", "value": f"prod{idx}"}
if is_old:
end_delta = -timedelta(hours=1)
@@ -287,7 +287,9 @@ def test_deactivate_expired_rules(self) -> None:
Tests that expired, and only expired, rules are deactivated
"""
- def create_rule(env_idx: int, end: datetime, project_ids: list[int]):
+ def create_rule(
+ env_idx: int, end: datetime, project_ids: list[int]
+ ) -> CustomDynamicSamplingRule:
condition = {"op": "equals", "name": "environment", "value": f"prod{env_idx}"}
return CustomDynamicSamplingRule.update_or_create(
condition=condition,
diff --git a/tests/sentry/models/test_environment.py b/tests/sentry/models/test_environment.py
index a9f0efdbb3c74e..189b23cebff0e1 100644
--- a/tests/sentry/models/test_environment.py
+++ b/tests/sentry/models/test_environment.py
@@ -40,5 +40,5 @@ def test_simple(self) -> None:
("no\fform-feed", False),
],
)
-def test_valid_name(val, expected) -> None:
+def test_valid_name(val: str, expected: bool) -> None:
assert Environment.is_valid_name(val) == expected
diff --git a/tests/sentry/models/test_eventerror.py b/tests/sentry/models/test_eventerror.py
index 27f6887885b309..f9619711821a7d 100644
--- a/tests/sentry/models/test_eventerror.py
+++ b/tests/sentry/models/test_eventerror.py
@@ -18,7 +18,7 @@
({"type": "INVALID_ERROR_TYPE"}, "INVALID_ERROR_TYPE", "Unknown error", {}),
),
)
-def test_event_error(error, type, message, data) -> None:
+def test_event_error(error: dict[str, str], type: str, message: str, data: dict[str, str]) -> None:
assert EventError.get_message(error) == message
assert EventError(error).type == type
assert EventError(error).message == message
diff --git a/tests/sentry/models/test_groupowner.py b/tests/sentry/models/test_groupowner.py
index 20c4cfc73e8c84..f3911b18685172 100644
--- a/tests/sentry/models/test_groupowner.py
+++ b/tests/sentry/models/test_groupowner.py
@@ -6,7 +6,7 @@
class GroupOwnerTest(TestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.timestamp = before_now(minutes=10)
@@ -37,14 +37,14 @@ def setUp(self):
"suspectCommitStrategy": SuspectCommitStrategy.RELEASE_BASED,
}
- def _make_scm_lookup_kwargs(self):
+ def _make_scm_lookup_kwargs(self) -> None:
"""
scm_based lookup_kwargs include an additional filter: context__contains,
release_based group owners don't have this field in context.
"""
self.lookup_kwargs.update(self.scm_extra_lookup)
- def test_update_or_create_and_preserve_context_create_then_update_scm(self):
+ def test_update_or_create_and_preserve_context_create_then_update_scm(self) -> None:
assert GroupOwner.objects.filter(**self.lookup_kwargs).exists() is False
self._make_scm_lookup_kwargs()
@@ -83,7 +83,7 @@ def test_update_or_create_and_preserve_context_create_then_update_scm(self):
assert obj.date_added == now
assert obj.context == self.scm_context_defaults
- def test_update_or_create_and_preserve_context_update_scm(self):
+ def test_update_or_create_and_preserve_context_update_scm(self) -> None:
original_obj = GroupOwner.objects.create(
context={
"commitId": self.c.id,
@@ -114,7 +114,7 @@ def test_update_or_create_and_preserve_context_update_scm(self):
"suspectCommitStrategy": SuspectCommitStrategy.SCM_BASED,
}
- def test_update_or_create_and_preserve_context_create_then_update_rb(self):
+ def test_update_or_create_and_preserve_context_create_then_update_rb(self) -> None:
assert GroupOwner.objects.filter(**self.lookup_kwargs).exists() is False
obj, created = GroupOwner.objects.update_or_create_and_preserve_context(
diff --git a/tests/sentry/models/test_groupsubscription.py b/tests/sentry/models/test_groupsubscription.py
index 119ab93781e2e2..9f5e13987fd81f 100644
--- a/tests/sentry/models/test_groupsubscription.py
+++ b/tests/sentry/models/test_groupsubscription.py
@@ -201,7 +201,7 @@ def setUp(self) -> None:
self.rpc_user = rpc_user
@assume_test_silo_mode(SiloMode.CONTROL)
- def update_user_settings_always(self):
+ def update_user_settings_always(self) -> None:
NotificationSettingOption.objects.update_or_create(
scope_type=NotificationScopeEnum.USER.value,
scope_identifier=self.user.id,
@@ -211,7 +211,7 @@ def update_user_settings_always(self):
)
@assume_test_silo_mode(SiloMode.CONTROL)
- def update_user_setting_subscribe_only(self):
+ def update_user_setting_subscribe_only(self) -> None:
NotificationSettingOption.objects.update_or_create(
scope_type=NotificationScopeEnum.USER.value,
scope_identifier=self.user.id,
@@ -229,7 +229,7 @@ def update_user_setting_subscribe_only(self):
)
@assume_test_silo_mode(SiloMode.CONTROL)
- def update_user_setting_never(self):
+ def update_user_setting_never(self) -> None:
NotificationSettingOption.objects.update_or_create(
scope_type=NotificationScopeEnum.USER.value,
scope_identifier=self.user.id,
@@ -247,7 +247,7 @@ def update_user_setting_never(self):
)
@assume_test_silo_mode(SiloMode.CONTROL)
- def update_project_setting_always(self):
+ def update_project_setting_always(self) -> None:
NotificationSettingOption.objects.update_or_create(
scope_type=NotificationScopeEnum.PROJECT.value,
scope_identifier=self.group.project_id,
@@ -265,7 +265,7 @@ def update_project_setting_always(self):
)
@assume_test_silo_mode(SiloMode.CONTROL)
- def update_project_setting_subscribe_only(self):
+ def update_project_setting_subscribe_only(self) -> None:
NotificationSettingOption.objects.update_or_create(
scope_type=NotificationScopeEnum.PROJECT.value,
scope_identifier=self.group.project_id,
@@ -283,7 +283,7 @@ def update_project_setting_subscribe_only(self):
)
@assume_test_silo_mode(SiloMode.CONTROL)
- def update_project_setting_never(self):
+ def update_project_setting_never(self) -> None:
NotificationSettingOption.objects.update_or_create(
scope_type=NotificationScopeEnum.PROJECT.value,
scope_identifier=self.group.project_id,
@@ -301,7 +301,7 @@ def update_project_setting_never(self):
)
@assume_test_silo_mode(SiloMode.CONTROL)
- def update_team_setting_subscribe_only(self, team_id: int):
+ def update_team_setting_subscribe_only(self, team_id: int) -> None:
NotificationSettingOption.objects.update_or_create(
scope_type=NotificationScopeEnum.TEAM.value,
scope_identifier=team_id,
@@ -324,7 +324,7 @@ def _assert_subscribers_are(
*,
email: dict[RpcUser, int] | dict[Team, int] | None = None,
slack: dict[RpcUser, int] | dict[Team, int] | None = None,
- ):
+ ) -> None:
all_participants = GroupSubscription.objects.get_participants(group or self.group)
all_expected = {ExternalProviders.EMAIL: email, ExternalProviders.SLACK: slack}
diff --git a/tests/sentry/models/test_manager.py b/tests/sentry/models/test_manager.py
index 9a9472828effc6..10aec1179115b1 100644
--- a/tests/sentry/models/test_manager.py
+++ b/tests/sentry/models/test_manager.py
@@ -8,7 +8,7 @@ class GetFromCacheTest(TestCase):
def setUp(self) -> None:
self.clear()
- def clear(self):
+ def clear(self) -> None:
cache.clear()
flush_manager_local_cache()
diff --git a/tests/sentry/models/test_project.py b/tests/sentry/models/test_project.py
index 273bc3f577452d..32bb771bbc730f 100644
--- a/tests/sentry/models/test_project.py
+++ b/tests/sentry/models/test_project.py
@@ -461,7 +461,7 @@ def setUp(self) -> None:
self.project_template = self.create_project_template(organization=self.project.organization)
self.project.template = self.project_template
- def tearDown(self):
+ def tearDown(self) -> None:
super().tearDown()
self.project_template.delete()
diff --git a/tests/sentry/models/test_projectcodeowners.py b/tests/sentry/models/test_projectcodeowners.py
index d92a1346884ad1..75551a574bca2b 100644
--- a/tests/sentry/models/test_projectcodeowners.py
+++ b/tests/sentry/models/test_projectcodeowners.py
@@ -5,7 +5,7 @@
class ProjectCodeOwnersTestCase(TestCase):
- def tearDown(self):
+ def tearDown(self) -> None:
cache.delete(ProjectCodeOwners.get_cache_key(self.project.id))
super().tearDown()
diff --git a/tests/sentry/models/test_projectcounter.py b/tests/sentry/models/test_projectcounter.py
index 572000bcdab09e..1085623b1df68c 100644
--- a/tests/sentry/models/test_projectcounter.py
+++ b/tests/sentry/models/test_projectcounter.py
@@ -13,6 +13,7 @@
refill_cached_short_ids,
)
from sentry.models.group import Group
+from sentry.models.project import Project
from sentry.testutils.helpers.eventprocessing import save_new_event
from sentry.testutils.helpers.task_runner import TaskRunner
from sentry.testutils.pytest.fixtures import django_db_all
@@ -67,7 +68,9 @@ def test_group_creation_simple(default_project) -> None:
[1, 2, 3],
ids=[" discrepancy = 1 ", " discrepancy = 2 ", " discrepancy = 3 "],
)
-def test_group_creation_with_stuck_project_counter(default_project, discrepancy) -> None:
+def test_group_creation_with_stuck_project_counter(
+ default_project: Project, discrepancy: int
+) -> None:
project = default_project
# Create enough groups that a discripancy larger than 1 will still land us on an existing group
diff --git a/tests/sentry/models/test_projecttemplate.py b/tests/sentry/models/test_projecttemplate.py
index 979faebdbbe183..7043b7b41589ff 100644
--- a/tests/sentry/models/test_projecttemplate.py
+++ b/tests/sentry/models/test_projecttemplate.py
@@ -6,7 +6,7 @@ class ProjectTemplateTest(TestCase):
def setUp(self) -> None:
self.org = self.create_organization()
- def tearDown(self):
+ def tearDown(self) -> None:
self.org.delete()
def test_create_simple_project_template(self) -> None:
diff --git a/tests/sentry/models/test_projecttemplateoption.py b/tests/sentry/models/test_projecttemplateoption.py
index f2bfdedf1a537f..decbbaa3cf8de5 100644
--- a/tests/sentry/models/test_projecttemplateoption.py
+++ b/tests/sentry/models/test_projecttemplateoption.py
@@ -138,7 +138,7 @@ def setUp(self) -> None:
name="test_project_template", organization=self.org
)
- def tearDown(self):
+ def tearDown(self) -> None:
self.org.delete()
self.project_template.delete()
diff --git a/tests/sentry/models/test_releasefile.py b/tests/sentry/models/test_releasefile.py
index fe1917cbfbfd31..9f99d5207d1360 100644
--- a/tests/sentry/models/test_releasefile.py
+++ b/tests/sentry/models/test_releasefile.py
@@ -51,7 +51,7 @@
pytest.param("app://[native_code]", ["app://[native_code]", "~"], id="invalid hostname"),
),
)
-def test_normalize(s, expected) -> None:
+def test_normalize(s: str, expected: list[str]) -> None:
assert ReleaseFile.normalize(s) == expected
diff --git a/tests/sentry/models/test_statistical_detectors.py b/tests/sentry/models/test_statistical_detectors.py
index 24c1c077a6b827..da84314a111b08 100644
--- a/tests/sentry/models/test_statistical_detectors.py
+++ b/tests/sentry/models/test_statistical_detectors.py
@@ -10,5 +10,5 @@
pytest.param(RegressionType.FUNCTION, "f", id="endpoint"),
],
)
-def test_regression_type_abbreviation(regression_type, abbreviation) -> None:
+def test_regression_type_abbreviation(regression_type: RegressionType, abbreviation: str) -> None:
assert regression_type.abbreviate() == abbreviation
diff --git a/tests/sentry/monitors/consumers/test_clock_tasks_consumer.py b/tests/sentry/monitors/consumers/test_clock_tasks_consumer.py
index 39141b693d7ae8..500ded6d708942 100644
--- a/tests/sentry/monitors/consumers/test_clock_tasks_consumer.py
+++ b/tests/sentry/monitors/consumers/test_clock_tasks_consumer.py
@@ -25,7 +25,7 @@ def send_task(
consumer: ProcessingStrategy[KafkaPayload],
ts: datetime,
task: MonitorsClockTasks,
-):
+) -> None:
value = BrokerValue(
KafkaPayload(b"fake-key", MONITORS_CLOCK_TASKS_CODEC.encode(task), []),
partition,
diff --git a/tests/sentry/monitors/migrations/test_0008_fix_processing_error_keys.py b/tests/sentry/monitors/migrations/test_0008_fix_processing_error_keys.py
index c387855453e12e..0a7d4e27e7d90f 100644
--- a/tests/sentry/monitors/migrations/test_0008_fix_processing_error_keys.py
+++ b/tests/sentry/monitors/migrations/test_0008_fix_processing_error_keys.py
@@ -24,7 +24,7 @@ class FixProcessingErrorKeysTest(TestMigrations):
app = "monitors"
connection = "secondary"
- def setup_initial_state(self):
+ def setup_initial_state(self) -> None:
redis = _get_cluster()
pipeline = redis.pipeline()
@@ -74,7 +74,7 @@ def setup_initial_state(self):
assert project_errors[0].id != self.project_error_id
assert monitor_errors[0].id != self.monitor_error_id
- def test(self):
+ def test(self) -> None:
monitor_errors = get_errors_for_monitor(self.monitor)
assert monitor_errors[0].id == self.monitor_error_id
diff --git a/tests/sentry/monitors/tasks/test_detect_broken_monitor_envs.py b/tests/sentry/monitors/tasks/test_detect_broken_monitor_envs.py
index f28fb19da58026..d32f89f9bbb359 100644
--- a/tests/sentry/monitors/tasks/test_detect_broken_monitor_envs.py
+++ b/tests/sentry/monitors/tasks/test_detect_broken_monitor_envs.py
@@ -33,7 +33,7 @@ def setUp(self) -> None:
self._run_tasks = self.tasks()
self._run_tasks.__enter__()
- def tearDown(self):
+ def tearDown(self) -> None:
super().tearDown()
self._run_tasks.__exit__(None, None, None)
diff --git a/tests/sentry/monitors/test_models.py b/tests/sentry/monitors/test_models.py
index a5b08205a43eac..88a3ea1c0d8d69 100644
--- a/tests/sentry/monitors/test_models.py
+++ b/tests/sentry/monitors/test_models.py
@@ -285,7 +285,7 @@ def test_config_validator(self) -> None:
class CronMonitorDataSourceHandlerTest(TestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.monitor = self.create_monitor(
project=self.project,
@@ -298,11 +298,11 @@ def setUp(self):
organization_id=self.organization.id,
)
- def test_bulk_get_query_object(self):
+ def test_bulk_get_query_object(self) -> None:
result = CronMonitorDataSourceHandler.bulk_get_query_object([self.data_source])
assert result[self.data_source.id] == self.monitor
- def test_bulk_get_query_object__multiple_monitors(self):
+ def test_bulk_get_query_object__multiple_monitors(self) -> None:
monitor2 = self.create_monitor(
project=self.project,
name="Test Monitor 2",
@@ -319,7 +319,7 @@ def test_bulk_get_query_object__multiple_monitors(self):
assert result[self.data_source.id] == self.monitor
assert result[data_source2.id] == monitor2
- def test_bulk_get_query_object__incorrect_data_source(self):
+ def test_bulk_get_query_object__incorrect_data_source(self) -> None:
ds_with_invalid_monitor_id = DataSource.objects.create(
type=DATA_SOURCE_CRON_MONITOR,
source_id="not_an_int",
@@ -341,7 +341,7 @@ def test_bulk_get_query_object__incorrect_data_source(self):
},
)
- def test_bulk_get_query_object__missing_monitor(self):
+ def test_bulk_get_query_object__missing_monitor(self) -> None:
ds_with_deleted_monitor = DataSource.objects.create(
type=DATA_SOURCE_CRON_MONITOR,
source_id="99999999",
@@ -354,11 +354,11 @@ def test_bulk_get_query_object__missing_monitor(self):
assert result[self.data_source.id] == self.monitor
assert result[ds_with_deleted_monitor.id] is None
- def test_bulk_get_query_object__empty_list(self):
+ def test_bulk_get_query_object__empty_list(self) -> None:
result = CronMonitorDataSourceHandler.bulk_get_query_object([])
assert result == {}
- def test_related_model(self):
+ def test_related_model(self) -> None:
relations = CronMonitorDataSourceHandler.related_model(self.data_source)
assert len(relations) == 1
relation = relations[0]
@@ -366,9 +366,9 @@ def test_related_model(self):
assert relation.params["model"] == Monitor
assert relation.params["query"] == {"id": self.data_source.source_id}
- def test_get_instance_limit(self):
+ def test_get_instance_limit(self) -> None:
assert CronMonitorDataSourceHandler.get_instance_limit(self.organization) is None
- def test_get_current_instance_count(self):
+ def test_get_current_instance_count(self) -> None:
with pytest.raises(NotImplementedError):
CronMonitorDataSourceHandler.get_current_instance_count(self.organization)
diff --git a/tests/sentry/monitors/test_schedule.py b/tests/sentry/monitors/test_schedule.py
index 81ac185f721d37..7cb950fa42bec3 100644
--- a/tests/sentry/monitors/test_schedule.py
+++ b/tests/sentry/monitors/test_schedule.py
@@ -5,7 +5,7 @@
from sentry.monitors.types import CrontabSchedule, IntervalSchedule
-def t(hour: int, minute: int):
+def t(hour: int, minute: int) -> datetime:
return datetime(2019, 1, 1, hour, minute, 0, tzinfo=timezone.utc)
diff --git a/tests/sentry/monitors/test_utils.py b/tests/sentry/monitors/test_utils.py
index 0afbb3e5277756..e50c50f01c65a5 100644
--- a/tests/sentry/monitors/test_utils.py
+++ b/tests/sentry/monitors/test_utils.py
@@ -10,11 +10,11 @@
class EnsureCronDetectorTest(TestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.monitor = self.create_monitor(owner_user_id=None)
- def test_creates_data_source_and_detector_for_new_monitor(self):
+ def test_creates_data_source_and_detector_for_new_monitor(self) -> None:
assert not get_detector_for_monitor(self.monitor)
ensure_cron_detector(self.monitor)
detector = get_detector_for_monitor(self.monitor)
@@ -25,7 +25,7 @@ def test_creates_data_source_and_detector_for_new_monitor(self):
assert detector.owner_user_id == self.monitor.owner_user_id
assert detector.owner_team_id == self.monitor.owner_team_id
- def test_idempotent_for_existing_data_source(self):
+ def test_idempotent_for_existing_data_source(self) -> None:
ensure_cron_detector(self.monitor)
detector = get_detector_for_monitor(self.monitor)
assert detector
@@ -34,7 +34,7 @@ def test_idempotent_for_existing_data_source(self):
assert detector_after is not None
assert detector.id == detector_after.id
- def test_with_owner_user(self):
+ def test_with_owner_user(self) -> None:
self.monitor.owner_user_id = self.user.id
self.monitor.save()
ensure_cron_detector(self.monitor)
@@ -45,7 +45,7 @@ def test_with_owner_user(self):
assert detector.owner_user_id == self.user.id
assert detector.owner_team_id is None
- def test_with_no_owner(self):
+ def test_with_no_owner(self) -> None:
ensure_cron_detector(self.monitor)
detector = Detector.objects.get(
@@ -55,7 +55,7 @@ def test_with_no_owner(self):
assert detector.owner_user_id is None
assert detector.owner_team_id is None
- def test_handles_database_errors_gracefully(self):
+ def test_handles_database_errors_gracefully(self) -> None:
with (
patch("sentry.monitors.utils.logger") as mock_logger,
patch("sentry.monitors.utils.DataSource.objects.get_or_create") as mock_get_or_create,
@@ -68,7 +68,7 @@ def test_handles_database_errors_gracefully(self):
type=DATA_SOURCE_CRON_MONITOR, source_id=str(self.monitor.id)
).exists()
- def test_atomic_transaction_rollback(self):
+ def test_atomic_transaction_rollback(self) -> None:
with patch("sentry.monitors.utils.Detector.objects.create") as mock_create:
mock_create.side_effect = IntegrityError("Cannot create detector")
@@ -79,15 +79,15 @@ def test_atomic_transaction_rollback(self):
class GetDetectorForMonitorTest(TestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.monitor = self.create_monitor()
- def test_returns_none_when_no_detector_exists(self):
+ def test_returns_none_when_no_detector_exists(self) -> None:
detector = get_detector_for_monitor(self.monitor)
assert detector is None
- def test_returns_detector_when_exists(self):
+ def test_returns_detector_when_exists(self) -> None:
ensure_cron_detector(self.monitor)
detector = get_detector_for_monitor(self.monitor)
@@ -96,7 +96,7 @@ def test_returns_detector_when_exists(self):
assert detector.project_id == self.monitor.project_id
assert detector.name == self.monitor.name
- def test_returns_correct_detector_for_specific_monitor(self):
+ def test_returns_correct_detector_for_specific_monitor(self) -> None:
monitor1 = self.monitor
monitor2 = self.create_monitor(name="Monitor 2")
diff --git a/tests/sentry/monitors/test_validators.py b/tests/sentry/monitors/test_validators.py
index 75a3395289b525..c7337f51f4c844 100644
--- a/tests/sentry/monitors/test_validators.py
+++ b/tests/sentry/monitors/test_validators.py
@@ -15,7 +15,7 @@
class MonitorValidatorCreateTest(MonitorTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(self.user)
diff --git a/tests/sentry/new_migrations/monkey/test_executor.py b/tests/sentry/new_migrations/monkey/test_executor.py
index a4bf862c9537fd..2d6469ce2c7002 100644
--- a/tests/sentry/new_migrations/monkey/test_executor.py
+++ b/tests/sentry/new_migrations/monkey/test_executor.py
@@ -1,3 +1,4 @@
+from collections.abc import Generator
from unittest.mock import patch
import pytest
@@ -23,7 +24,7 @@ class DummyGetsentryAppConfig(AppConfig):
class TestSentryMigrationExecutor:
@pytest.fixture(autouse=True)
- def _mock_getsentry_if_not_registered(self):
+ def _mock_getsentry_if_not_registered(self) -> Generator[None]:
if "getsentry" in settings.INSTALLED_APPS:
yield
return
diff --git a/tests/sentry/notifications/models/test_notificationsettingoption.py b/tests/sentry/notifications/models/test_notificationsettingoption.py
index 275755d9ced7da..8eedd44d30e28c 100644
--- a/tests/sentry/notifications/models/test_notificationsettingoption.py
+++ b/tests/sentry/notifications/models/test_notificationsettingoption.py
@@ -7,7 +7,7 @@
from sentry.users.models.user import User
-def assert_no_notification_settings():
+def assert_no_notification_settings() -> None:
assert NotificationSettingOption.objects.all().count() == 0
diff --git a/tests/sentry/notifications/models/test_notificationsettingprovider.py b/tests/sentry/notifications/models/test_notificationsettingprovider.py
index 92cbdf1a8204d9..7f25b503709559 100644
--- a/tests/sentry/notifications/models/test_notificationsettingprovider.py
+++ b/tests/sentry/notifications/models/test_notificationsettingprovider.py
@@ -7,7 +7,7 @@
from sentry.users.models.user import User
-def assert_no_notification_settings():
+def assert_no_notification_settings() -> None:
assert NotificationSettingProvider.objects.all().count() == 0
diff --git a/tests/sentry/notifications/notifications/test_digests.py b/tests/sentry/notifications/notifications/test_digests.py
index a481efee33961b..8fc63278e16c1f 100644
--- a/tests/sentry/notifications/notifications/test_digests.py
+++ b/tests/sentry/notifications/notifications/test_digests.py
@@ -59,7 +59,7 @@ def run_test(
event_count: int,
performance_issues: bool = False,
generic_issues: bool = False,
- ):
+ ) -> None:
with patch.object(sentry, "digests") as digests:
backend = RedisBackend()
digests.backend.digest = backend.digest
diff --git a/tests/sentry/notifications/notifications/test_organization_request.py b/tests/sentry/notifications/notifications/test_organization_request.py
index 7f25855160c84e..b4d72f7fe06241 100644
--- a/tests/sentry/notifications/notifications/test_organization_request.py
+++ b/tests/sentry/notifications/notifications/test_organization_request.py
@@ -1,3 +1,5 @@
+from django.db.models.query import QuerySet
+
from sentry.integrations.types import ExternalProviders
from sentry.models.organizationmember import OrganizationMember
from sentry.notifications.notifications.organization_request import OrganizationRequestNotification
@@ -9,7 +11,7 @@
class DummyRoleBasedRecipientStrategy(RoleBasedRecipientStrategy):
- def determine_member_recipients(self):
+ def determine_member_recipients(self) -> QuerySet[OrganizationMember, OrganizationMember]:
return OrganizationMember.objects.filter(organization=self.organization)
diff --git a/tests/sentry/notifications/notifications/test_suspect_commits_activity.py b/tests/sentry/notifications/notifications/test_suspect_commits_activity.py
index 80037afd2da2cc..8c06757271ce6f 100644
--- a/tests/sentry/notifications/notifications/test_suspect_commits_activity.py
+++ b/tests/sentry/notifications/notifications/test_suspect_commits_activity.py
@@ -18,7 +18,7 @@
class SuspectCommitsInActivityNotificationsTest(TestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(self.user)
diff --git a/tests/sentry/notifications/test_class_manager.py b/tests/sentry/notifications/test_class_manager.py
index 9dc7daa8f5294b..2009e165890a68 100644
--- a/tests/sentry/notifications/test_class_manager.py
+++ b/tests/sentry/notifications/test_class_manager.py
@@ -11,7 +11,7 @@
class ClassManagerTest(TestCase):
- def tearDown(self):
+ def tearDown(self) -> None:
manager.classes.pop("AnotherDummyNotification", None)
def test_register(self) -> None:
diff --git a/tests/sentry/notifications/utils/test_participants.py b/tests/sentry/notifications/utils/test_participants.py
index 06e456c43b3bab..bfb5df24dddc2d 100644
--- a/tests/sentry/notifications/utils/test_participants.py
+++ b/tests/sentry/notifications/utils/test_participants.py
@@ -654,7 +654,7 @@ def setUp(self) -> None:
self.rule_2 = Rule(Matcher("path", "*.js"), [Owner("team", self.team_2.slug)])
self.rule_3 = Rule(Matcher("path", "*.js"), [Owner("user", self.user_1.email)])
- def tearDown(self):
+ def tearDown(self) -> None:
cache.delete(ProjectOwnership.get_cache_key(self.project.id))
super().tearDown()
diff --git a/tests/sentry/notifications/utils/test_tasks.py b/tests/sentry/notifications/utils/test_tasks.py
index ece2a4f3177501..2442109aa13e91 100644
--- a/tests/sentry/notifications/utils/test_tasks.py
+++ b/tests/sentry/notifications/utils/test_tasks.py
@@ -12,7 +12,7 @@
class NotificationTaskTests(TestCase):
- def tearDown(self):
+ def tearDown(self) -> None:
manager.classes.pop("AnotherDummyNotification", None)
@patch(
diff --git a/tests/sentry/objectstore/test_objectstore.py b/tests/sentry/objectstore/test_objectstore.py
index 5ec1b2db946de8..d776712a5119c3 100644
--- a/tests/sentry/objectstore/test_objectstore.py
+++ b/tests/sentry/objectstore/test_objectstore.py
@@ -12,7 +12,7 @@ class Testserver:
secret = ""
-def test_stores_uncompressed():
+def test_stores_uncompressed() -> None:
server = Testserver()
client = ClientBuilder(
"test", {"base_url": server.url, "jwt_secret": server.secret}
@@ -28,7 +28,7 @@ def test_stores_uncompressed():
assert result.payload.read() == b"oh hai!"
-def test_uses_zstd_by_default():
+def test_uses_zstd_by_default() -> None:
server = Testserver()
client = ClientBuilder(
"test", {"base_url": server.url, "jwt_secret": server.secret}
@@ -51,7 +51,7 @@ def test_uses_zstd_by_default():
assert result.payload.read() == b"oh hai!"
-def test_deletes_stored_stuff():
+def test_deletes_stored_stuff() -> None:
server = Testserver()
client = ClientBuilder(
"test", {"base_url": server.url, "jwt_secret": server.secret}
diff --git a/tests/sentry/options/test_manager.py b/tests/sentry/options/test_manager.py
index 48d9ce459ee7b3..8f4e9383d39c40 100644
--- a/tests/sentry/options/test_manager.py
+++ b/tests/sentry/options/test_manager.py
@@ -1,3 +1,4 @@
+from collections.abc import Generator
from functools import cached_property
from unittest.mock import patch
@@ -30,17 +31,17 @@
@all_silo_test
class OptionsManagerTest(TestCase):
@cached_property
- def store(self):
+ def store(self) -> OptionsStore:
c = LocMemCache("test", {})
c.clear()
return OptionsStore(cache=c)
@cached_property
- def manager(self):
+ def manager(self) -> OptionsManager:
return OptionsManager(store=self.store)
@pytest.fixture(autouse=True)
- def register(self):
+ def register(self) -> Generator[None]:
default_options = settings.SENTRY_DEFAULT_OPTIONS.copy()
settings.SENTRY_DEFAULT_OPTIONS = {}
self.store.flush_local_cache()
diff --git a/tests/sentry/performance_issues/experiments/test_n_plus_one_db_span_detector.py b/tests/sentry/performance_issues/experiments/test_n_plus_one_db_span_detector.py
index 792f6e478c1f6e..8a6a85295596d5 100644
--- a/tests/sentry/performance_issues/experiments/test_n_plus_one_db_span_detector.py
+++ b/tests/sentry/performance_issues/experiments/test_n_plus_one_db_span_detector.py
@@ -46,7 +46,7 @@ def test_does_not_detect_issues_in_fast_transaction(self) -> None:
def test_detects_n_plus_one_with_unparameterized_query(
self,
- ):
+ ) -> None:
event = get_event("n-plus-one-in-django-index-view-unparameterized")
assert self.find_problems(event) == [
PerformanceProblem(
@@ -102,13 +102,13 @@ def test_detects_n_plus_one_with_unparameterized_query(
def test_does_not_detect_n_plus_one_with_source_redis_query_with_noredis_detector(
self,
- ):
+ ) -> None:
event = get_event("n-plus-one-in-django-index-view-source-redis")
assert self.find_problems(event) == []
def test_does_not_detect_n_plus_one_with_repeating_redis_query_with_noredis_detector(
self,
- ):
+ ) -> None:
event = get_event("n-plus-one-in-django-index-view-repeating-redis")
assert self.find_problems(event) == []
@@ -165,7 +165,7 @@ def test_finds_n_plus_one_with_db_dot_something_spans(self) -> None:
def test_n_plus_one_db_detector_has_different_fingerprints_for_different_n_plus_one_events(
self,
- ):
+ ) -> None:
index_n_plus_one_event = get_event("n-plus-one-in-django-index-view")
new_n_plus_one_event = get_event("n-plus-one-in-django-new-view")
diff --git a/tests/sentry/performance_issues/test_n_plus_one_db_span_detector.py b/tests/sentry/performance_issues/test_n_plus_one_db_span_detector.py
index 53b76a73ce9ebf..55fc43e71f1c96 100644
--- a/tests/sentry/performance_issues/test_n_plus_one_db_span_detector.py
+++ b/tests/sentry/performance_issues/test_n_plus_one_db_span_detector.py
@@ -42,7 +42,7 @@ def test_does_not_detect_issues_in_fast_transaction(self) -> None:
def test_detects_n_plus_one_with_unparameterized_query(
self,
- ):
+ ) -> None:
event = get_event("n-plus-one-in-django-index-view-unparameterized")
assert self.find_problems(event) == [
PerformanceProblem(
@@ -98,13 +98,13 @@ def test_detects_n_plus_one_with_unparameterized_query(
def test_does_not_detect_n_plus_one_with_source_redis_query_with_noredis_detector(
self,
- ):
+ ) -> None:
event = get_event("n-plus-one-in-django-index-view-source-redis")
assert self.find_problems(event) == []
def test_does_not_detect_n_plus_one_with_repeating_redis_query_with_noredis_detector(
self,
- ):
+ ) -> None:
event = get_event("n-plus-one-in-django-index-view-repeating-redis")
assert self.find_problems(event) == []
@@ -161,7 +161,7 @@ def test_finds_n_plus_one_with_db_dot_something_spans(self) -> None:
def test_n_plus_one_db_detector_has_different_fingerprints_for_different_n_plus_one_events(
self,
- ):
+ ) -> None:
index_n_plus_one_event = get_event("n-plus-one-in-django-index-view")
new_n_plus_one_event = get_event("n-plus-one-in-django-new-view")
diff --git a/tests/sentry/performance_issues/test_render_blocking_asset_detector.py b/tests/sentry/performance_issues/test_render_blocking_asset_detector.py
index eeb63cbb57386b..4c41cb4a753a7f 100644
--- a/tests/sentry/performance_issues/test_render_blocking_asset_detector.py
+++ b/tests/sentry/performance_issues/test_render_blocking_asset_detector.py
@@ -325,7 +325,7 @@ def test_does_not_detect_if_render_blocking_status_is_non_blocking(self) -> None
),
],
)
-def test_fingerprint_similarity(expected, first_url, second_url) -> None:
+def test_fingerprint_similarity(expected: bool, first_url: str, second_url: str) -> None:
first_event = _valid_render_blocking_asset_event(first_url)
second_event = _valid_render_blocking_asset_event(second_url)
settings = get_detection_settings()
diff --git a/tests/sentry/plugins/base/test_v2.py b/tests/sentry/plugins/base/test_v2.py
index fb1315eacb0747..28a78ed138775e 100644
--- a/tests/sentry/plugins/base/test_v2.py
+++ b/tests/sentry/plugins/base/test_v2.py
@@ -5,7 +5,7 @@
class Plugin2TestCase(TestCase):
def test_reset_config(self) -> None:
class APlugin(Plugin2):
- def get_conf_key(self):
+ def get_conf_key(self) -> str:
return "a-plugin"
project = self.create_project()
diff --git a/tests/sentry/plugins/bases/test_issue.py b/tests/sentry/plugins/bases/test_issue.py
index 820d46dbb223e5..582126e3866d39 100644
--- a/tests/sentry/plugins/bases/test_issue.py
+++ b/tests/sentry/plugins/bases/test_issue.py
@@ -11,7 +11,7 @@
@control_silo_test
class GetAuthForUserTest(TestCase):
- def _get_mock_user(self):
+ def _get_mock_user(self) -> mock.Mock:
user = mock.Mock(spec=User(id=1))
user.is_authenticated = False
return user
diff --git a/tests/sentry/plugins/sentry_webhooks/test_plugin.py b/tests/sentry/plugins/sentry_webhooks/test_plugin.py
index 83ce79595209b9..dae743cd268c24 100644
--- a/tests/sentry/plugins/sentry_webhooks/test_plugin.py
+++ b/tests/sentry/plugins/sentry_webhooks/test_plugin.py
@@ -16,7 +16,7 @@
class WebHooksPluginTest(TestCase):
@cached_property
- def plugin(self):
+ def plugin(self) -> WebHooksPlugin:
return WebHooksPlugin()
def setUp(self) -> None:
diff --git a/tests/sentry/preprod/api/endpoints/test_organization_preprod_artifact_assemble.py b/tests/sentry/preprod/api/endpoints/test_organization_preprod_artifact_assemble.py
index c96bcc3747660c..a0847437ca72cc 100644
--- a/tests/sentry/preprod/api/endpoints/test_organization_preprod_artifact_assemble.py
+++ b/tests/sentry/preprod/api/endpoints/test_organization_preprod_artifact_assemble.py
@@ -187,7 +187,7 @@ def setUp(self) -> None:
self.feature_context = Feature("organizations:preprod-artifact-assemble")
self.feature_context.__enter__()
- def tearDown(self):
+ def tearDown(self) -> None:
self.feature_context.__exit__(None, None, None)
super().tearDown()
diff --git a/tests/sentry/preprod/api/endpoints/test_project_preprod_build_details.py b/tests/sentry/preprod/api/endpoints/test_project_preprod_build_details.py
index 3d540619b14787..18f1717224a6b5 100644
--- a/tests/sentry/preprod/api/endpoints/test_project_preprod_build_details.py
+++ b/tests/sentry/preprod/api/endpoints/test_project_preprod_build_details.py
@@ -48,7 +48,7 @@ def setUp(self) -> None:
self.feature_context = self.feature({"organizations:preprod-frontend-routes": True})
self.feature_context.__enter__()
- def tearDown(self):
+ def tearDown(self) -> None:
# Exit the feature flag context manager
self.feature_context.__exit__(None, None, None)
super().tearDown()
diff --git a/tests/sentry/preprod/api/endpoints/test_project_preprod_check_for_updates.py b/tests/sentry/preprod/api/endpoints/test_project_preprod_check_for_updates.py
index 1afbc5f462e784..41e12af32da2e8 100644
--- a/tests/sentry/preprod/api/endpoints/test_project_preprod_check_for_updates.py
+++ b/tests/sentry/preprod/api/endpoints/test_project_preprod_check_for_updates.py
@@ -21,7 +21,7 @@ def setUp(self) -> None:
self.feature_context = self.feature({"organizations:preprod-frontend-routes": True})
self.feature_context.__enter__()
- def tearDown(self):
+ def tearDown(self) -> None:
# Exit the feature flag context manager
self.feature_context.__exit__(None, None, None)
super().tearDown()
diff --git a/tests/sentry/preprod/api/endpoints/test_project_preprod_list_builds.py b/tests/sentry/preprod/api/endpoints/test_project_preprod_list_builds.py
index 01f972bf3c2b81..e3d8daf005a850 100644
--- a/tests/sentry/preprod/api/endpoints/test_project_preprod_list_builds.py
+++ b/tests/sentry/preprod/api/endpoints/test_project_preprod_list_builds.py
@@ -77,7 +77,7 @@ def setUp(self) -> None:
self.feature_context = self.feature({"organizations:preprod-frontend-routes": True})
self.feature_context.__enter__()
- def tearDown(self):
+ def tearDown(self) -> None:
# Exit the feature flag context manager
self.feature_context.__exit__(None, None, None)
super().tearDown()
diff --git a/tests/sentry/preprod/test_tasks.py b/tests/sentry/preprod/test_tasks.py
index c0de8016e34d3e..ef12ce6074fc70 100644
--- a/tests/sentry/preprod/test_tasks.py
+++ b/tests/sentry/preprod/test_tasks.py
@@ -26,7 +26,7 @@
class AssemblePreprodArtifactTest(BaseAssembleTest):
- def tearDown(self):
+ def tearDown(self) -> None:
"""Clean up assembly status and force garbage collection to close unclosed files"""
import gc
diff --git a/tests/sentry/processing/backpressure/test_checking.py b/tests/sentry/processing/backpressure/test_checking.py
index 90005dabe93b74..1c501418483cf7 100644
--- a/tests/sentry/processing/backpressure/test_checking.py
+++ b/tests/sentry/processing/backpressure/test_checking.py
@@ -146,7 +146,7 @@ def test_backpressure_not_enabled(process_profile_task: MagicMock) -> None:
process_profile_task.assert_called_once()
-def process_one_message(consumer_type: str, topic: str, payload: str):
+def process_one_message(consumer_type: str, topic: str, payload: str) -> None:
if consumer_type == "profiles":
processing_strategy = ProcessProfileStrategyFactory().create_with_partitions(
commit=Mock(), partitions={}
diff --git a/tests/sentry/profiles/consumers/test_process.py b/tests/sentry/profiles/consumers/test_process.py
index a6134a6b6422f3..fda5293e785355 100644
--- a/tests/sentry/profiles/consumers/test_process.py
+++ b/tests/sentry/profiles/consumers/test_process.py
@@ -18,13 +18,13 @@
class TestProcessProfileConsumerStrategy(TestCase):
@staticmethod
- def processing_factory():
+ def processing_factory() -> ProcessProfileStrategyFactory:
return ProcessProfileStrategyFactory()
@patch("sentry.profiles.consumers.process.factory.process_profile_task.delay")
def test_basic_profile_to_celery(self, process_profile_task: MagicMock) -> None:
processing_strategy = self.processing_factory().create_with_partitions(
- commit=Mock(), partitions=None
+ commit=Mock(), partitions={}
)
message_dict = {
"organization_id": 1,
diff --git a/tests/sentry/profiles/test_java.py b/tests/sentry/profiles/test_java.py
index 9b5a164d84d730..8c35c723fcd94c 100644
--- a/tests/sentry/profiles/test_java.py
+++ b/tests/sentry/profiles/test_java.py
@@ -1,4 +1,5 @@
import pytest
+from symbolic.proguard import ProguardMapper
from sentry.lang.java.proguard import open_proguard_mapper
from sentry.profiles.java import deobfuscate_signature, format_signature
@@ -49,6 +50,6 @@ def mapper(tmp_path):
),
],
)
-def test_deobfuscate_signature(mapper, obfuscated, expected) -> None:
+def test_deobfuscate_signature(mapper: ProguardMapper, obfuscated: str, expected: str) -> None:
types = deobfuscate_signature(obfuscated, mapper)
assert format_signature(types) == expected
diff --git a/tests/sentry/quotas/test_redis.py b/tests/sentry/quotas/test_redis.py
index ab71869c52981b..4e84335f50f874 100644
--- a/tests/sentry/quotas/test_redis.py
+++ b/tests/sentry/quotas/test_redis.py
@@ -1,4 +1,5 @@
import time
+from collections.abc import Generator
from functools import cached_property
from unittest import mock
@@ -241,21 +242,21 @@ def test_abuse_quotas(self) -> None:
assert quotas[0].reason_code == "project_abuse_limit"
@pytest.fixture(autouse=True)
- def _patch_get_project_quota(self):
+ def _patch_get_project_quota(self) -> Generator[None]:
with mock.patch.object(
RedisQuota, "get_project_quota", return_value=(0, 60)
) as self.get_project_quota:
yield
@pytest.fixture(autouse=True)
- def _patch_get_organization_quota(self):
+ def _patch_get_organization_quota(self) -> Generator[None]:
with mock.patch.object(
RedisQuota, "get_organization_quota", return_value=(0, 60)
) as self.get_organization_quota:
yield
@pytest.fixture(autouse=True)
- def _patch_get_monitor_quota(self):
+ def _patch_get_monitor_quota(self) -> Generator[None]:
with mock.patch.object(
RedisQuota, "get_monitor_quota", return_value=(0, 60)
) as self.get_monitor_quota:
diff --git a/tests/sentry/ratelimits/test_cardinality.py b/tests/sentry/ratelimits/test_cardinality.py
index 717825398ba583..3f7a3ac8370f48 100644
--- a/tests/sentry/ratelimits/test_cardinality.py
+++ b/tests/sentry/ratelimits/test_cardinality.py
@@ -11,7 +11,7 @@
@pytest.fixture
-def limiter():
+def limiter() -> RedisCardinalityLimiter:
return RedisCardinalityLimiter()
diff --git a/tests/sentry/ratelimits/utils/test_above_rate_limit_check.py b/tests/sentry/ratelimits/utils/test_above_rate_limit_check.py
index 76c9712fc4dc83..7230407127eb9b 100644
--- a/tests/sentry/ratelimits/utils/test_above_rate_limit_check.py
+++ b/tests/sentry/ratelimits/utils/test_above_rate_limit_check.py
@@ -67,7 +67,7 @@ def test_above_rate_limit_check(self) -> None:
)
def test_concurrent(self) -> None:
- def do_request():
+ def do_request() -> RateLimitMeta:
uid = uuid.uuid4().hex
meta = above_rate_limit_check(
"foo", RateLimit(limit=10, window=1, concurrent_limit=3), uid, self.group
diff --git a/tests/sentry/receivers/test_transactions.py b/tests/sentry/receivers/test_transactions.py
index c91c24b076066b..4fbc173d5321fc 100644
--- a/tests/sentry/receivers/test_transactions.py
+++ b/tests/sentry/receivers/test_transactions.py
@@ -18,7 +18,7 @@
class RecordFirstTransactionTest(TestCase):
@cached_property
- def min_ago(self):
+ def min_ago(self) -> str:
return before_now(minutes=1).isoformat()
def test_transaction_processed(self) -> None:
diff --git a/tests/sentry/relay/config/test_experimental.py b/tests/sentry/relay/config/test_experimental.py
index 243acff9126107..d5f9524b7ae1f3 100644
--- a/tests/sentry/relay/config/test_experimental.py
+++ b/tests/sentry/relay/config/test_experimental.py
@@ -25,7 +25,7 @@ def test_time_checker_no_throw_on_timeout_no_hit() -> None:
@pytest.mark.parametrize("timeout", (-1, 0))
-def test_time_checker_noop_on_invalid_timeout(timeout) -> None:
+def test_time_checker_noop_on_invalid_timeout(timeout: int) -> None:
checker = TimeChecker(timedelta(seconds=timeout))
checker.check()
diff --git a/tests/sentry/release_health/test_tasks.py b/tests/sentry/release_health/test_tasks.py
index 9bccceb6dae45f..ea02ab953d65c1 100644
--- a/tests/sentry/release_health/test_tasks.py
+++ b/tests/sentry/release_health/test_tasks.py
@@ -114,7 +114,7 @@ def setUp(self) -> None:
group_id=self.event.group.id, project_id=self.project.id, release_id=self.release.id
)
- def tearDown(self):
+ def tearDown(self) -> None:
self.backend.__exit__(None, None, None)
def test_simple(self) -> None:
diff --git a/tests/sentry/releases/endpoints/test_release_deploys.py b/tests/sentry/releases/endpoints/test_release_deploys.py
index 37830b3aedbabc..343c85bf9e85f4 100644
--- a/tests/sentry/releases/endpoints/test_release_deploys.py
+++ b/tests/sentry/releases/endpoints/test_release_deploys.py
@@ -393,7 +393,7 @@ def test_environment_validation_failure(self) -> None:
assert response.status_code == 400, response.content
assert 0 == Deploy.objects.count()
- def test_api_token_with_project_releases_scope(self):
+ def test_api_token_with_project_releases_scope(self) -> None:
"""
Test that tokens with `project:releases` scope can create deploys for only one project
when the release is associated with multiple projects.
diff --git a/tests/sentry/relocation/api/endpoints/artifacts/test_details.py b/tests/sentry/relocation/api/endpoints/artifacts/test_details.py
index e69e3acd603cff..3973f5cda50450 100644
--- a/tests/sentry/relocation/api/endpoints/artifacts/test_details.py
+++ b/tests/sentry/relocation/api/endpoints/artifacts/test_details.py
@@ -81,7 +81,7 @@ def setUp(self) -> None:
).getvalue()
self.relocation_storage.save(f"{dir}/encrypted/file.tar", BytesIO(self.tarball))
- def mock_kms_client(self, fake_kms_client: mock.Mock):
+ def mock_kms_client(self, fake_kms_client: mock.Mock) -> None:
unwrapped = unwrap_encrypted_export_tarball(BytesIO(self.tarball))
plaintext_dek = LocalFileDecryptor.from_bytes(
self.priv_key_pem
diff --git a/tests/sentry/relocation/api/endpoints/test_public_key.py b/tests/sentry/relocation/api/endpoints/test_public_key.py
index 1c85461c43b579..9aabc63664abe3 100644
--- a/tests/sentry/relocation/api/endpoints/test_public_key.py
+++ b/tests/sentry/relocation/api/endpoints/test_public_key.py
@@ -22,7 +22,7 @@ def setUp(self) -> None:
(_, pub_key_pem) = generate_rsa_key_pair()
self.pub_key_pem = pub_key_pem
- def mock_kms_client(self, fake_kms_client: mock.Mock):
+ def mock_kms_client(self, fake_kms_client: mock.Mock) -> None:
fake_kms_client.return_value.get_public_key.return_value = SimpleNamespace(
pem=self.pub_key_pem.decode("utf-8")
)
diff --git a/tests/sentry/relocation/test_utils.py b/tests/sentry/relocation/test_utils.py
index 410bd8c112e5bf..e18c70d50106cd 100644
--- a/tests/sentry/relocation/test_utils.py
+++ b/tests/sentry/relocation/test_utils.py
@@ -30,7 +30,7 @@ def setUp(self) -> None:
)
self.uuid = self.relocation.uuid
- def mock_message_builder(self, fake_message_builder: Mock):
+ def mock_message_builder(self, fake_message_builder: Mock) -> None:
fake_message_builder.return_value.send_async.return_value = MagicMock()
diff --git a/tests/sentry/replays/endpoints/test_organization_replay_index.py b/tests/sentry/replays/endpoints/test_organization_replay_index.py
index 6c6cdbdd761931..29257baec2e5bc 100644
--- a/tests/sentry/replays/endpoints/test_organization_replay_index.py
+++ b/tests/sentry/replays/endpoints/test_organization_replay_index.py
@@ -25,7 +25,7 @@ def setUp(self) -> None:
self.url = reverse(self.endpoint, args=(self.organization.slug,))
@property
- def features(self):
+ def features(self) -> dict[str, bool]:
return {"organizations:session-replay": True}
def test_feature_flag_disabled(self) -> None:
diff --git a/tests/sentry/replays/endpoints/test_project_replay_recording_segment_details.py b/tests/sentry/replays/endpoints/test_project_replay_recording_segment_details.py
index 0aea213c7edcb8..00e23e15a0176f 100644
--- a/tests/sentry/replays/endpoints/test_project_replay_recording_segment_details.py
+++ b/tests/sentry/replays/endpoints/test_project_replay_recording_segment_details.py
@@ -74,7 +74,7 @@ def test_get_replay_recording_segment_download(self) -> None:
class FilestoreReplayRecordingSegmentDetailsTestCase(EnvironmentBase):
- def init_environment(self):
+ def init_environment(self) -> None:
metadata = RecordingSegmentStorageMeta(
project_id=self.project.id,
replay_id=self.replay_id,
@@ -87,7 +87,7 @@ def init_environment(self):
class StorageReplayRecordingSegmentDetailsTestCase(EnvironmentBase, ReplaysSnubaTestCase):
- def init_environment(self):
+ def init_environment(self) -> None:
metadata = RecordingSegmentStorageMeta(
project_id=self.project.id,
replay_id=self.replay_id,
@@ -110,7 +110,7 @@ def init_environment(self):
class PackedStorageReplayRecordingSegmentDetailsTestCase(EnvironmentBase, ReplaysSnubaTestCase):
- def init_environment(self):
+ def init_environment(self) -> None:
metadata = RecordingSegmentStorageMeta(
project_id=self.project.id,
replay_id=self.replay_id,
diff --git a/tests/sentry/replays/tasks/test_delete_replays_bulk.py b/tests/sentry/replays/tasks/test_delete_replays_bulk.py
index 67c0b2356cf088..6956fab624c22e 100644
--- a/tests/sentry/replays/tasks/test_delete_replays_bulk.py
+++ b/tests/sentry/replays/tasks/test_delete_replays_bulk.py
@@ -2,12 +2,17 @@
import datetime
import uuid
+from collections.abc import Generator
from unittest.mock import MagicMock, Mock, patch
from sentry.replays.models import DeletionJobStatus, ReplayDeletionJobModel
from sentry.replays.tasks import run_bulk_replay_delete_job
from sentry.replays.testutils import mock_replay
-from sentry.replays.usecases.delete import SEER_DELETE_SUMMARIES_URL, fetch_rows_matching_pattern
+from sentry.replays.usecases.delete import (
+ SEER_DELETE_SUMMARIES_URL,
+ MatchedRows,
+ fetch_rows_matching_pattern,
+)
from sentry.testutils.cases import APITestCase, ReplaysSnubaTestCase
from sentry.testutils.helpers import TaskRunner
from sentry.utils import json
@@ -242,7 +247,7 @@ def test_fetch_rows_matching_pattern(self) -> None:
def test_run_bulk_replay_delete_job_has_seer_data_true(
self, mock_delete_matched_rows: MagicMock, mock_fetch_rows: MagicMock, mock_post: MagicMock
) -> None:
- def row_generator():
+ def row_generator() -> Generator[MatchedRows]:
yield {
"rows": [
{
@@ -308,7 +313,7 @@ def row_generator():
def test_run_bulk_replay_delete_job_has_seer_data_false(
self, mock_delete_matched_rows: MagicMock, mock_fetch_rows: MagicMock, mock_post: MagicMock
) -> None:
- def row_generator():
+ def row_generator() -> Generator[MatchedRows]:
yield {
"rows": [
{
diff --git a/tests/sentry/replays/unit/test_event_parser.py b/tests/sentry/replays/unit/test_event_parser.py
index 13d9a39b65fcbd..93ead232222797 100644
--- a/tests/sentry/replays/unit/test_event_parser.py
+++ b/tests/sentry/replays/unit/test_event_parser.py
@@ -780,7 +780,7 @@ def test_which() -> None:
},
],
)
-def test_parse_highlighted_events_fault_tolerance(event) -> None:
+def test_parse_highlighted_events_fault_tolerance(event: dict[str, Any]) -> None:
# If the test raises an exception we fail. All of these events are invalid.
builder = HighlightedEventsBuilder()
builder.add(which(event), event, sampled=True)
diff --git a/tests/sentry/rules/actions/test_create_ticket_utils.py b/tests/sentry/rules/actions/test_create_ticket_utils.py
index 3c2d914b403788..f0e21b5540a41c 100644
--- a/tests/sentry/rules/actions/test_create_ticket_utils.py
+++ b/tests/sentry/rules/actions/test_create_ticket_utils.py
@@ -16,7 +16,7 @@ def test_build_description(self) -> None:
installation = MagicMock()
installation.get_group_description.return_value = "Test description"
- def generate_footer(url):
+ def generate_footer(url) -> str:
return f"\n\nThis issue was created by a rule: {url}"
description = build_description(self.event, self.rule.id, installation, generate_footer)
@@ -31,7 +31,7 @@ def test_build_description_workflow_engine_ui(self) -> None:
installation.get_group_description.return_value = "Test description"
workflow_id = 123
- def generate_footer(url):
+ def generate_footer(url) -> str:
return f"\n\nThis issue was created by a workflow: {url}"
description = build_description_workflow_engine_ui(
diff --git a/tests/sentry/rules/actions/test_notify_event_sentry_app.py b/tests/sentry/rules/actions/test_notify_event_sentry_app.py
index 2cb020c2e3a84a..492eafd8f54e18 100644
--- a/tests/sentry/rules/actions/test_notify_event_sentry_app.py
+++ b/tests/sentry/rules/actions/test_notify_event_sentry_app.py
@@ -24,7 +24,7 @@ class NotifyEventSentryAppActionTest(RuleTestCase):
]
@pytest.fixture(autouse=True)
- def create_schema(self):
+ def create_schema(self) -> None:
self.schema = {"elements": [self.create_alert_rule_action_schema()]}
def test_applies_correctly_for_sentry_apps(self) -> None:
diff --git a/tests/sentry/rules/conditions/test_tagged_event.py b/tests/sentry/rules/conditions/test_tagged_event.py
index 215ba9212b5370..4a551daae8de3d 100644
--- a/tests/sentry/rules/conditions/test_tagged_event.py
+++ b/tests/sentry/rules/conditions/test_tagged_event.py
@@ -1,5 +1,6 @@
from sentry.rules.conditions.tagged_event import TaggedEventCondition
from sentry.rules.match import MatchType
+from sentry.services.eventstore.models import Event
from sentry.testutils.cases import RuleTestCase
from sentry.testutils.skips import requires_snuba
@@ -9,7 +10,7 @@
class TaggedEventConditionTest(RuleTestCase):
rule_cls = TaggedEventCondition
- def get_event(self):
+ def get_event(self) -> Event:
event = self.event
event.data["tags"] = (
("logger", "sentry.example"),
diff --git a/tests/sentry/rules/processing/test_buffer_processing.py b/tests/sentry/rules/processing/test_buffer_processing.py
index c51e047595da12..d16da90e53c43c 100644
--- a/tests/sentry/rules/processing/test_buffer_processing.py
+++ b/tests/sentry/rules/processing/test_buffer_processing.py
@@ -37,7 +37,7 @@ def setUp(self) -> None:
self.mock_redis_buffer = mock_redis_buffer()
self.mock_redis_buffer.__enter__()
- def tearDown(self):
+ def tearDown(self) -> None:
self.mock_redis_buffer.__exit__(None, None, None)
def push_to_hash(self, project_id, rule_id, group_id, event_id=None, occurrence_id=None):
diff --git a/tests/sentry/rules/processing/test_delayed_processing.py b/tests/sentry/rules/processing/test_delayed_processing.py
index 55ec516efbee90..bcf61cf07aee6a 100644
--- a/tests/sentry/rules/processing/test_delayed_processing.py
+++ b/tests/sentry/rules/processing/test_delayed_processing.py
@@ -560,7 +560,7 @@ def setUp(self) -> None:
self.patcher = patch("sentry.rules.processing.delayed_processing.passes_comparison")
self.mock_passes_comparison = self.patcher.start()
- def tearDown(self):
+ def tearDown(self) -> None:
self.patcher.stop()
def test_comparison(self) -> None:
diff --git a/tests/sentry/rules/processing/test_processor.py b/tests/sentry/rules/processing/test_processor.py
index a3f5443c2a5fa9..d06027209be45d 100644
--- a/tests/sentry/rules/processing/test_processor.py
+++ b/tests/sentry/rules/processing/test_processor.py
@@ -45,7 +45,7 @@ class MockConditionTrue(EventCondition):
id = "tests.sentry.rules.processing.test_processor.MockConditionTrue"
label = "Mock condition which always passes."
- def passes(self, event, state):
+ def passes(self, event, state) -> bool:
return True
@@ -679,7 +679,7 @@ class MockFilterTrue(EventFilter):
id = "tests.sentry.rules.processing.test_processor.MockFilterTrue"
label = "Mock filter which always passes."
- def passes(self, event, state):
+ def passes(self, event, state) -> bool:
return True
@@ -687,7 +687,7 @@ class MockFilterFalse(EventFilter):
id = "tests.sentry.rules.processing.test_processor.MockFilterFalse"
label = "Mock filter which never passes."
- def passes(self, event, state):
+ def passes(self, event, state) -> bool:
return False
diff --git a/tests/sentry/runner/commands/test_backup.py b/tests/sentry/runner/commands/test_backup.py
index f2a0e7b510d72d..517c7276c71ea0 100644
--- a/tests/sentry/runner/commands/test_backup.py
+++ b/tests/sentry/runner/commands/test_backup.py
@@ -481,7 +481,7 @@ def test_sanitize_with_gcp_kms_decryption_and_encryption(
def cli_import_then_export(
scope: str, *, import_args: Sequence[str] = (), export_args: Sequence[str] = ()
-):
+) -> None:
with TemporaryDirectory() as tmp_dir:
tmp_in_findings = Path(tmp_dir).joinpath(
f"{''.join(choice(ascii_letters)for _ in range(6))}.json"
@@ -618,7 +618,7 @@ class GoodImportExportCommandEncryptionTests(TransactionTestCase):
"""
@staticmethod
- def cli_encrypted_import_then_export_use_local(scope: str):
+ def cli_encrypted_import_then_export_use_local(scope: str) -> None:
with TemporaryDirectory() as tmp_dir:
(tmp_priv_key_path, tmp_pub_key_path, tmp_tar_path) = create_encryption_test_files(
tmp_dir
@@ -651,7 +651,9 @@ def cli_encrypted_import_then_export_use_local(scope: str):
assert rv.exit_code == 0, rv.output
@staticmethod
- def cli_encrypted_import_then_export_use_gcp_kms(scope: str, fake_kms_client: mock.Mock):
+ def cli_encrypted_import_then_export_use_gcp_kms(
+ scope: str, fake_kms_client: mock.Mock
+ ) -> None:
fake_kms_client.reset_mock()
with TemporaryDirectory() as tmp_dir:
(tmp_priv_key_path, tmp_pub_key_path, tmp_tar_path) = create_encryption_test_files(
diff --git a/tests/sentry/runner/commands/test_run.py b/tests/sentry/runner/commands/test_run.py
index 2d0e7e859464ca..d7ca8db5882c7e 100644
--- a/tests/sentry/runner/commands/test_run.py
+++ b/tests/sentry/runner/commands/test_run.py
@@ -13,6 +13,6 @@
("192.168.1.1:9001", ("192.168.1.1", 9001)),
),
)
-def test_address_validate(value, expected) -> None:
+def test_address_validate(value: str | None, expected: tuple[str | None, int | None]) -> None:
ctx, param = mock.Mock(), mock.Mock()
assert run._address_validate(ctx, param, value) == expected
diff --git a/tests/sentry/search/events/test_filter.py b/tests/sentry/search/events/test_filter.py
index dea1b887d54c4e..fd04e641ff9d1c 100644
--- a/tests/sentry/search/events/test_filter.py
+++ b/tests/sentry/search/events/test_filter.py
@@ -1181,7 +1181,7 @@ def test_snql_boolean_search(description, query, expected_where, expected_having
),
],
)
-def test_snql_malformed_boolean_search(description, query, expected_message) -> None:
+def test_snql_malformed_boolean_search(description: str, query: str, expected_message: str) -> None:
dataset = Dataset.Discover
params: ParamsType = {}
query_filter = UnresolvedQuery(
diff --git a/tests/sentry/seer/autofix/test_issue_summary.py b/tests/sentry/seer/autofix/test_issue_summary.py
index f4405c3ec7339a..3a9f839d9c2d21 100644
--- a/tests/sentry/seer/autofix/test_issue_summary.py
+++ b/tests/sentry/seer/autofix/test_issue_summary.py
@@ -33,7 +33,7 @@ def setUp(self) -> None:
self.group = self.create_group()
self.login_as(user=self.user)
- def tearDown(self):
+ def tearDown(self) -> None:
super().tearDown()
# Clear the cache after each test
cache.delete(f"ai-group-summary-v2:{self.group.id}")
diff --git a/tests/sentry/seer/endpoints/test_group_ai_autofix.py b/tests/sentry/seer/endpoints/test_group_ai_autofix.py
index ed63214640707e..f4e17431191898 100644
--- a/tests/sentry/seer/endpoints/test_group_ai_autofix.py
+++ b/tests/sentry/seer/endpoints/test_group_ai_autofix.py
@@ -20,7 +20,7 @@
@with_feature("organizations:gen-ai-features")
@patch("sentry.seer.autofix.autofix.get_seer_org_acknowledgement", return_value=True)
class GroupAutofixEndpointTest(APITestCase, SnubaTestCase):
- def _get_url(self, group_id: int):
+ def _get_url(self, group_id: int) -> str:
return f"/api/0/issues/{group_id}/autofix/"
def setUp(self) -> None:
diff --git a/tests/sentry/seer/endpoints/test_group_ai_summary.py b/tests/sentry/seer/endpoints/test_group_ai_summary.py
index f8359149896054..d79ec13030bf6a 100644
--- a/tests/sentry/seer/endpoints/test_group_ai_summary.py
+++ b/tests/sentry/seer/endpoints/test_group_ai_summary.py
@@ -16,7 +16,7 @@ def setUp(self) -> None:
self.url = self._get_url(self.group.id)
self.login_as(user=self.user)
- def _get_url(self, group_id: int):
+ def _get_url(self, group_id: int) -> str:
return f"/api/0/issues/{group_id}/summarize/"
@patch("sentry.seer.endpoints.group_ai_summary.get_issue_summary")
diff --git a/tests/sentry/seer/endpoints/test_organization_page_web_vitals_summary.py b/tests/sentry/seer/endpoints/test_organization_page_web_vitals_summary.py
index 7ee6333629fa2a..23b3e83f8f1755 100644
--- a/tests/sentry/seer/endpoints/test_organization_page_web_vitals_summary.py
+++ b/tests/sentry/seer/endpoints/test_organization_page_web_vitals_summary.py
@@ -83,7 +83,7 @@ def setUp(self) -> None:
self.url = self._get_url()
- def _get_url(self):
+ def _get_url(self) -> str:
return f"/api/0/organizations/{self.org.slug}/page-web-vitals-summary/"
@patch("sentry.seer.endpoints.organization_page_web_vitals_summary.get_page_web_vitals_summary")
diff --git a/tests/sentry/seer/endpoints/test_organization_trace_summary.py b/tests/sentry/seer/endpoints/test_organization_trace_summary.py
index 9d184f315b8d06..5c4f1c2529cad6 100644
--- a/tests/sentry/seer/endpoints/test_organization_trace_summary.py
+++ b/tests/sentry/seer/endpoints/test_organization_trace_summary.py
@@ -77,7 +77,7 @@ def setUp(self) -> None:
self.url = self._get_url()
- def _get_url(self):
+ def _get_url(self) -> str:
return f"/api/0/organizations/{self.org.slug}/trace-summary/"
@patch("sentry.seer.endpoints.organization_trace_summary.get_trace_summary")
diff --git a/tests/sentry/seer/endpoints/test_seer_rpc.py b/tests/sentry/seer/endpoints/test_seer_rpc.py
index d0b265631d8ec3..7ce9aab5ee5635 100644
--- a/tests/sentry/seer/endpoints/test_seer_rpc.py
+++ b/tests/sentry/seer/endpoints/test_seer_rpc.py
@@ -64,7 +64,7 @@ def setUp(self) -> None:
self.organization = self.create_organization(owner=self.user)
@pytest.fixture(autouse=True)
- def inject_fixtures(self, caplog):
+ def inject_fixtures(self, caplog: pytest.LogCaptureFixture):
self._caplog = caplog
def test_get_organization_seer_consent_by_org_name_no_integrations(self) -> None:
diff --git a/tests/sentry/seer/fetch_issues/test_more_parsing.py b/tests/sentry/seer/fetch_issues/test_more_parsing.py
index 1154b477c73d79..d53a6b5a9c45a9 100644
--- a/tests/sentry/seer/fetch_issues/test_more_parsing.py
+++ b/tests/sentry/seer/fetch_issues/test_more_parsing.py
@@ -7,7 +7,7 @@
class TestPythonParserMore:
@pytest.fixture
- def parser(self):
+ def parser(self) -> more_parsing.PythonParserMore:
return cast(more_parsing.PythonParserMore, more_parsing.patch_parsers_more["py"])
def test_python_motivating_example(self, parser: more_parsing.PythonParserMore) -> None:
diff --git a/tests/sentry/seer/test_page_web_vitals_summary.py b/tests/sentry/seer/test_page_web_vitals_summary.py
index f4210cc38d7c2c..e09165d1d8be1f 100644
--- a/tests/sentry/seer/test_page_web_vitals_summary.py
+++ b/tests/sentry/seer/test_page_web_vitals_summary.py
@@ -86,7 +86,7 @@ def setUp(self) -> None:
cache.clear()
- def tearDown(self):
+ def tearDown(self) -> None:
super().tearDown()
# Clear the cache with the correct key format
cache.delete("ai-page-web-vitals-summary:" + "-".join(sorted([self.trace_id])))
diff --git a/tests/sentry/seer/test_trace_summary.py b/tests/sentry/seer/test_trace_summary.py
index 540e4915c11550..dc42da0c17d8e8 100644
--- a/tests/sentry/seer/test_trace_summary.py
+++ b/tests/sentry/seer/test_trace_summary.py
@@ -86,7 +86,7 @@ def setUp(self) -> None:
cache.clear()
- def tearDown(self):
+ def tearDown(self) -> None:
super().tearDown()
cache.delete(f"ai-trace-summary:{self.trace_id}")
diff --git a/tests/sentry/sentry_apps/api/endpoints/test_sentry_app_components.py b/tests/sentry/sentry_apps/api/endpoints/test_sentry_app_components.py
index 665274e82e4b86..843212bad32170 100644
--- a/tests/sentry/sentry_apps/api/endpoints/test_sentry_app_components.py
+++ b/tests/sentry/sentry_apps/api/endpoints/test_sentry_app_components.py
@@ -13,7 +13,7 @@
from sentry.testutils.silo import control_silo_test
-def get_sentry_app_avatars(sentry_app: SentryApp):
+def get_sentry_app_avatars(sentry_app: SentryApp) -> list[dict[str, str | bool | int]]:
return [serialize(avatar) for avatar in sentry_app.avatar.all()]
diff --git a/tests/sentry/sentry_apps/api/endpoints/test_sentry_app_publish_request.py b/tests/sentry/sentry_apps/api/endpoints/test_sentry_app_publish_request.py
index 7e1234fc63493b..98b141d459bb36 100644
--- a/tests/sentry/sentry_apps/api/endpoints/test_sentry_app_publish_request.py
+++ b/tests/sentry/sentry_apps/api/endpoints/test_sentry_app_publish_request.py
@@ -10,10 +10,10 @@
@control_silo_test
class SentryAppPublishRequestTest(APITestCase):
- def upload_logo(self):
+ def upload_logo(self) -> None:
SentryAppAvatar.objects.create(sentry_app=self.sentry_app, avatar_type=1, color=True)
- def upload_issue_link_logo(self):
+ def upload_issue_link_logo(self) -> None:
SentryAppAvatar.objects.create(sentry_app=self.sentry_app, avatar_type=1, color=False)
def setUp(self) -> None:
diff --git a/tests/sentry/sentry_apps/api/endpoints/test_sentry_internal_app_tokens.py b/tests/sentry/sentry_apps/api/endpoints/test_sentry_internal_app_tokens.py
index b31e625a54cf58..87dbfb143d81a5 100644
--- a/tests/sentry/sentry_apps/api/endpoints/test_sentry_internal_app_tokens.py
+++ b/tests/sentry/sentry_apps/api/endpoints/test_sentry_internal_app_tokens.py
@@ -112,7 +112,7 @@ def test_get_tokens(self) -> None:
assert len(response.data) == 1
assert response.data[0]["id"] == str(self.token.id)
- def no_access_for_members(self):
+ def no_access_for_members(self) -> None:
user = self.create_user(email="meep@example.com")
self.create_member(organization=self.org, user=user)
self.login_as(user)
diff --git a/tests/sentry/sentry_apps/tasks/test_sentry_apps.py b/tests/sentry/sentry_apps/tasks/test_sentry_apps.py
index 690a9e0c9c0b9f..301be21c28c6c2 100644
--- a/tests/sentry/sentry_apps/tasks/test_sentry_apps.py
+++ b/tests/sentry/sentry_apps/tasks/test_sentry_apps.py
@@ -62,11 +62,11 @@
pytestmark = [requires_snuba]
-def raiseStatusFalse():
+def raiseStatusFalse() -> bool:
return False
-def raiseStatusTrue():
+def raiseStatusTrue() -> bool:
return True
diff --git a/tests/sentry/sentry_apps/test_sentry_app_installation_notifier.py b/tests/sentry/sentry_apps/test_sentry_app_installation_notifier.py
index 1b5cddc175f5d0..0d45a59c9af839 100644
--- a/tests/sentry/sentry_apps/test_sentry_app_installation_notifier.py
+++ b/tests/sentry/sentry_apps/test_sentry_app_installation_notifier.py
@@ -12,7 +12,7 @@
from sentry.utils.sentry_apps import SentryAppWebhookRequestsBuffer
-def raiseStatusFalse():
+def raiseStatusFalse() -> bool:
return False
diff --git a/tests/sentry/sentry_apps/test_webhooks.py b/tests/sentry/sentry_apps/test_webhooks.py
index 60d802d0982d32..ee54ae61619f74 100644
--- a/tests/sentry/sentry_apps/test_webhooks.py
+++ b/tests/sentry/sentry_apps/test_webhooks.py
@@ -11,7 +11,7 @@
@region_silo_test
class BroadcastWebhooksForOrganizationTest(TestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.organization = self.create_organization()
self.project = self.create_project(organization=self.organization)
diff --git a/tests/sentry/sentry_metrics/consumers/test_last_seen_updater.py b/tests/sentry/sentry_metrics/consumers/test_last_seen_updater.py
index c06e4ea24eb056..c541cd4037e000 100644
--- a/tests/sentry/sentry_metrics/consumers/test_last_seen_updater.py
+++ b/tests/sentry/sentry_metrics/consumers/test_last_seen_updater.py
@@ -114,7 +114,7 @@ def setUp(self) -> None:
last_seen=self.fresh_last_seen,
)
- def tearDown(self):
+ def tearDown(self) -> None:
self.table.objects.filter(id=self.fresh_id).delete()
self.table.objects.filter(id=self.stale_id).delete()
diff --git a/tests/sentry/sentry_metrics/consumers/test_slicing_router.py b/tests/sentry/sentry_metrics/consumers/test_slicing_router.py
index c0a97cd1212f13..1a4f9062c7c2bd 100644
--- a/tests/sentry/sentry_metrics/consumers/test_slicing_router.py
+++ b/tests/sentry/sentry_metrics/consumers/test_slicing_router.py
@@ -121,7 +121,7 @@ def test_with_no_org_in_routing_header(setup_slicing) -> None:
@pytest.mark.parametrize("org_id", [100])
-def test_with_misconfiguration(metrics_message) -> None:
+def test_with_misconfiguration(metrics_message: int) -> None:
"""
Configuring topic override only does not kick in routing logic. So the
messages should be routed to the logical topic.
diff --git a/tests/sentry/sentry_metrics/limiters/test_writes_limiter.py b/tests/sentry/sentry_metrics/limiters/test_writes_limiter.py
index d1bf4f0f8d48cd..e8f987cda7fc5c 100644
--- a/tests/sentry/sentry_metrics/limiters/test_writes_limiter.py
+++ b/tests/sentry/sentry_metrics/limiters/test_writes_limiter.py
@@ -16,7 +16,7 @@
}
-def get_writes_limiter(namespace: str):
+def get_writes_limiter(namespace: str) -> WritesLimiter:
return WRITES_LIMITERS[namespace]
diff --git a/tests/sentry/sentry_metrics/querying/data/test_query.py b/tests/sentry/sentry_metrics/querying/data/test_query.py
index 7a2b352d00d507..192e2ce5093f4a 100644
--- a/tests/sentry/sentry_metrics/querying/data/test_query.py
+++ b/tests/sentry/sentry_metrics/querying/data/test_query.py
@@ -21,7 +21,7 @@
),
],
)
-def test_compile_mql_query(formula, queries, expected_formula) -> None:
+def test_compile_mql_query(formula: str, queries: dict[str, str], expected_formula: str) -> None:
sub_queries = {name: MQLQuery(query) for name, query in queries.items()}
compiled_query = MQLQuery(formula, **sub_queries).compile() # type: ignore[arg-type]
diff --git a/tests/sentry/sentry_metrics/test_base_indexer.py b/tests/sentry/sentry_metrics/test_base_indexer.py
index 02f30047af73d5..e88aefbb21961c 100644
--- a/tests/sentry/sentry_metrics/test_base_indexer.py
+++ b/tests/sentry/sentry_metrics/test_base_indexer.py
@@ -18,7 +18,7 @@
def assert_fetch_type_for_tag_string_set(
meta: Mapping[str, Metadata], fetch_type: FetchType, str_set: set[str]
-):
+) -> None:
assert all([meta[string].fetch_type == fetch_type for string in str_set])
diff --git a/tests/sentry/services/test_organization_actions.py b/tests/sentry/services/test_organization_actions.py
index bb0144f229449a..71afc2fc300ca9 100644
--- a/tests/sentry/services/test_organization_actions.py
+++ b/tests/sentry/services/test_organization_actions.py
@@ -15,7 +15,7 @@
from sentry.testutils.cases import TestCase
-def assert_outbox_update_message_exists(org: Organization, expected_count: int):
+def assert_outbox_update_message_exists(org: Organization, expected_count: int) -> None:
outbox_messages = RegionOutbox.objects.filter()
# TODO(HC): Remove this once we can ensure an expected count of 1 for every message
diff --git a/tests/sentry/similarity/backends/test_redis.py b/tests/sentry/similarity/backends/test_redis.py
index cefec672bae13a..e6ba5db6c6315b 100644
--- a/tests/sentry/similarity/backends/test_redis.py
+++ b/tests/sentry/similarity/backends/test_redis.py
@@ -13,7 +13,7 @@
class RedisScriptMinHashIndexBackendTestCase(TestCase):
@cached_property
- def index(self):
+ def index(self) -> RedisScriptMinHashIndexBackend:
return RedisScriptMinHashIndexBackend(
redis.clusters.get("default").get_local_client(0),
"sim",
diff --git a/tests/sentry/snuba/metrics/fields/test_base.py b/tests/sentry/snuba/metrics/fields/test_base.py
index 6b3c9e41aa98d3..a7fe497a44e796 100644
--- a/tests/sentry/snuba/metrics/fields/test_base.py
+++ b/tests/sentry/snuba/metrics/fields/test_base.py
@@ -661,5 +661,5 @@ def test_session_duration_derived_alias(self) -> None:
("foo:foo:foo", None),
],
)
-def test_known_entity_of_metric_mri(metric_mri, expected_entity) -> None:
+def test_known_entity_of_metric_mri(metric_mri: str, expected_entity: str | None) -> None:
assert _get_known_entity_of_metric_mri(metric_mri) == expected_entity
diff --git a/tests/sentry/snuba/metrics/test_datasource.py b/tests/sentry/snuba/metrics/test_datasource.py
index c5558dc93baec5..334fb124b79524 100644
--- a/tests/sentry/snuba/metrics/test_datasource.py
+++ b/tests/sentry/snuba/metrics/test_datasource.py
@@ -1,3 +1,5 @@
+from datetime import datetime
+
import pytest
from sentry.sentry_metrics.use_case_id_registry import UseCaseID
@@ -14,7 +16,7 @@
@freeze_time(BaseMetricsLayerTestCase.MOCK_DATETIME)
class DatasourceTestCase(BaseMetricsLayerTestCase, TestCase):
@property
- def now(self):
+ def now(self) -> datetime:
return BaseMetricsLayerTestCase.MOCK_DATETIME
def test_get_tag_values_with_mri(self) -> None:
diff --git a/tests/sentry/snuba/metrics/test_metrics_layer/test_metrics_enhanced_performance.py b/tests/sentry/snuba/metrics/test_metrics_layer/test_metrics_enhanced_performance.py
index 5f961720a3842a..f771a4a5e46ce4 100644
--- a/tests/sentry/snuba/metrics/test_metrics_layer/test_metrics_enhanced_performance.py
+++ b/tests/sentry/snuba/metrics/test_metrics_layer/test_metrics_enhanced_performance.py
@@ -52,7 +52,7 @@
@freeze_time(BaseMetricsLayerTestCase.MOCK_DATETIME)
class PerformanceMetricsLayerTestCase(BaseMetricsLayerTestCase, TestCase):
@property
- def now(self):
+ def now(self) -> datetime:
return BaseMetricsLayerTestCase.MOCK_DATETIME
def test_valid_filter_include_meta_derived_metrics(self) -> None:
diff --git a/tests/sentry/snuba/metrics/test_metrics_layer/test_release_health.py b/tests/sentry/snuba/metrics/test_metrics_layer/test_release_health.py
index 237283fae37a60..316c51a10bf073 100644
--- a/tests/sentry/snuba/metrics/test_metrics_layer/test_release_health.py
+++ b/tests/sentry/snuba/metrics/test_metrics_layer/test_release_health.py
@@ -3,6 +3,7 @@
"""
import time
+from datetime import datetime
import pytest
from django.utils.datastructures import MultiValueDict
@@ -24,7 +25,7 @@
@freeze_time(BaseMetricsLayerTestCase.MOCK_DATETIME)
class ReleaseHealthMetricsLayerTestCase(BaseMetricsLayerTestCase, TestCase):
@property
- def now(self):
+ def now(self) -> datetime:
return BaseMetricsLayerTestCase.MOCK_DATETIME
def test_valid_filter_include_meta(self) -> None:
diff --git a/tests/sentry/snuba/test_query_subscription_consumer.py b/tests/sentry/snuba/test_query_subscription_consumer.py
index e90631d3990a0c..0059c98ee2f34d 100644
--- a/tests/sentry/snuba/test_query_subscription_consumer.py
+++ b/tests/sentry/snuba/test_query_subscription_consumer.py
@@ -190,7 +190,7 @@ class RegisterSubscriberTest(unittest.TestCase):
def setUp(self) -> None:
self.orig_registry = deepcopy(subscriber_registry)
- def tearDown(self):
+ def tearDown(self) -> None:
subscriber_registry.clear()
subscriber_registry.update(self.orig_registry)
diff --git a/tests/sentry/spans/consumers/process/test_consumer.py b/tests/sentry/spans/consumers/process/test_consumer.py
index a21d96f6d12a62..95d13499635a14 100644
--- a/tests/sentry/spans/consumers/process/test_consumer.py
+++ b/tests/sentry/spans/consumers/process/test_consumer.py
@@ -14,7 +14,7 @@
@override_options({**DEFAULT_OPTIONS, "spans.drop-in-buffer": []})
@pytest.mark.parametrize("kafka_slice_id", [None, 2])
-def test_basic(kafka_slice_id) -> None:
+def test_basic(kafka_slice_id: int | None) -> None:
# Flush very aggressively to make test pass instantly
with mock.patch("time.sleep"):
topic = Topic("test")
diff --git a/tests/sentry/spans/grouping/test_utils.py b/tests/sentry/spans/grouping/test_utils.py
index cb2277ece8d7d5..3a13be0a32e70d 100644
--- a/tests/sentry/spans/grouping/test_utils.py
+++ b/tests/sentry/spans/grouping/test_utils.py
@@ -12,5 +12,5 @@
("{var}", None),
],
)
-def test_parse_fingerprint_var(fingerprint, result) -> None:
+def test_parse_fingerprint_var(fingerprint: str, result: str) -> None:
assert parse_fingerprint_var(fingerprint) == result
diff --git a/tests/sentry/stacktraces/test_platform.py b/tests/sentry/stacktraces/test_platform.py
index 0aadc4b32761e2..8ba10454b83f85 100644
--- a/tests/sentry/stacktraces/test_platform.py
+++ b/tests/sentry/stacktraces/test_platform.py
@@ -17,5 +17,5 @@
("whatever", "other"),
],
)
-def test_get_grouping_family_for_platform(input, output) -> None:
+def test_get_grouping_family_for_platform(input: str, output: str) -> None:
assert get_behavior_family_for_platform(input) == output
diff --git a/tests/sentry/sudo/test_utils.py b/tests/sentry/sudo/test_utils.py
index 9e6e3953634969..15593c1d2a6242 100644
--- a/tests/sentry/sudo/test_utils.py
+++ b/tests/sentry/sudo/test_utils.py
@@ -71,7 +71,7 @@ def test_revoked(self) -> None:
def test_cookie_and_token_match(self) -> None:
self.login()
- def get_signed_cookie(key, salt="", max_age=None):
+ def get_signed_cookie(key, salt="", max_age=None) -> str:
return "abc123"
self.request.session[COOKIE_NAME] = "abc123"
@@ -81,7 +81,7 @@ def get_signed_cookie(key, salt="", max_age=None):
def test_cookie_and_token_mismatch(self) -> None:
self.login()
- def get_signed_cookie(key, salt="", max_age=None):
+ def get_signed_cookie(key, salt="", max_age=None) -> str:
return "nope"
self.request.session[COOKIE_NAME] = "abc123"
diff --git a/tests/sentry/tasks/conftest.py b/tests/sentry/tasks/conftest.py
index 7d73a452a61956..a05d50602cc5a8 100644
--- a/tests/sentry/tasks/conftest.py
+++ b/tests/sentry/tasks/conftest.py
@@ -1,14 +1,18 @@
import contextlib
+from collections.abc import Callable, Generator
+from typing import Any
from unittest import mock
import pytest
@pytest.fixture
-def register_plugin(request):
+def register_plugin(
+ request: pytest.FixtureRequest,
+) -> Generator[Callable[[dict[str, Any], type[Any]], None]]:
from sentry.plugins.base import plugins
- def inner(globals, cls):
+ def inner(globals: dict[str, Any], cls: type[Any]) -> None:
ctx.enter_context(mock.patch.dict(globals, {cls.__name__: cls}))
plugins.register(cls)
request.addfinalizer(lambda: plugins.unregister(cls))
diff --git a/tests/sentry/tasks/test_activity.py b/tests/sentry/tasks/test_activity.py
index 585b737d52f449..3e63047db38207 100644
--- a/tests/sentry/tasks/test_activity.py
+++ b/tests/sentry/tasks/test_activity.py
@@ -10,7 +10,7 @@ class BasicPreprocessorPlugin(NotificationPlugin):
def notify_about_activity(self, activity):
pass
- def is_enabled(self, project=None):
+ def is_enabled(self, project=None) -> bool:
return True
diff --git a/tests/sentry/tasks/test_base.py b/tests/sentry/tasks/test_base.py
index ca6d08996293fa..7a51bc76736100 100644
--- a/tests/sentry/tasks/test_base.py
+++ b/tests/sentry/tasks/test_base.py
@@ -19,7 +19,7 @@
silo_mode=SiloMode.REGION,
taskworker_config=TaskworkerConfig(namespace=test_tasks),
)
-def region_task(param):
+def region_task(param) -> str:
return f"Region task {param}"
@@ -28,7 +28,7 @@ def region_task(param):
silo_mode=SiloMode.CONTROL,
taskworker_config=TaskworkerConfig(namespace=test_tasks),
)
-def control_task(param):
+def control_task(param) -> str:
return f"Control task {param}"
@@ -194,7 +194,7 @@ class ExpectedException(Exception):
),
)
@override_settings(SILO_MODE=SiloMode.CONTROL)
-def test_task_silo_limit_celery_task_methods(method_name) -> None:
+def test_task_silo_limit_celery_task_methods(method_name: str) -> None:
method = getattr(region_task, method_name)
with pytest.raises(SiloLimit.AvailabilityError):
method("hi")
diff --git a/tests/sentry/tasks/test_options.py b/tests/sentry/tasks/test_options.py
index 8b4684759ca7c0..80d444a5400b65 100644
--- a/tests/sentry/tasks/test_options.py
+++ b/tests/sentry/tasks/test_options.py
@@ -11,7 +11,7 @@
class SyncOptionsTest(TestCase):
_TEST_KEY = "foo"
- def tearDown(self):
+ def tearDown(self) -> None:
super().tearDown()
try:
default_manager.unregister(self._TEST_KEY)
diff --git a/tests/sentry/tasks/test_reprocessing2.py b/tests/sentry/tasks/test_reprocessing2.py
index 306e80cd5004ac..589dc670b90233 100644
--- a/tests/sentry/tasks/test_reprocessing2.py
+++ b/tests/sentry/tasks/test_reprocessing2.py
@@ -95,7 +95,7 @@ class ReprocessingTestPlugin(Plugin2):
def get_event_preprocessors(self, data):
return [f]
- def is_enabled(self, project=None):
+ def is_enabled(self, project=None) -> bool:
return True
register_plugin(globals(), ReprocessingTestPlugin)
diff --git a/tests/sentry/tasks/test_store.py b/tests/sentry/tasks/test_store.py
index 1a1926ba6ddb1f..02cb1178c5f231 100644
--- a/tests/sentry/tasks/test_store.py
+++ b/tests/sentry/tasks/test_store.py
@@ -40,7 +40,7 @@ def put_on_hold(data):
return []
- def is_enabled(self, project=None):
+ def is_enabled(self, project=None) -> bool:
return True
@@ -246,7 +246,7 @@ def test_scrubbing_after_processing(
mock_save_event,
register_plugin,
mock_event_processing_store,
- setting_method,
+ setting_method: str,
options_model,
):
class TestPlugin(Plugin2):
@@ -258,7 +258,7 @@ def more_extra(data):
return [more_extra]
- def is_enabled(self, project=None):
+ def is_enabled(self, project=None) -> bool:
return True
register_plugin(globals(), TestPlugin)
diff --git a/tests/sentry/tasks/test_taskworker_rollout.py b/tests/sentry/tasks/test_taskworker_rollout.py
index 8eb34ae8613885..548a301cc86f9e 100644
--- a/tests/sentry/tasks/test_taskworker_rollout.py
+++ b/tests/sentry/tasks/test_taskworker_rollout.py
@@ -46,7 +46,7 @@ def test_without_taskworker_rollout(self, mock_celery_apply_async: mock.MagicMoc
name="test.test_without_taskworker_rollout",
taskworker_config=self.config,
)
- def test_task(msg):
+ def test_task(msg) -> str:
return f"hello {msg}"
assert test_task.name == "test.test_without_taskworker_rollout"
diff --git a/tests/sentry/tasks/test_update_code_owners_schema.py b/tests/sentry/tasks/test_update_code_owners_schema.py
index 641e944a80ecac..0ec0cf2ed89c8d 100644
--- a/tests/sentry/tasks/test_update_code_owners_schema.py
+++ b/tests/sentry/tasks/test_update_code_owners_schema.py
@@ -1,3 +1,4 @@
+from collections.abc import Generator
from unittest import mock
import pytest
@@ -19,7 +20,7 @@ def setUp(self) -> None:
self.integration = Integration.objects.get()
@pytest.fixture(autouse=True)
- def patch_update_schema(self):
+ def patch_update_schema(self) -> Generator[None]:
with mock.patch.object(ProjectCodeOwners, "update_schema") as self.mock_update:
yield
diff --git a/tests/sentry/taskworker/test_config.py b/tests/sentry/taskworker/test_config.py
index 47ebb2700f515f..aa7b06bf343528 100644
--- a/tests/sentry/taskworker/test_config.py
+++ b/tests/sentry/taskworker/test_config.py
@@ -55,7 +55,7 @@ def test_taskworker_schedule_parameters() -> None:
if parameter.kind in (parameter.VAR_POSITIONAL, parameter.VAR_KEYWORD):
continue
# The dynamic sampling tasks splice in a TaskContext via a decorator :(
- if parameter.annotation == TaskContext:
+ if parameter.annotation == TaskContext.__name__:
continue
if parameter.default == parameter.empty:
raise AssertionError(
diff --git a/tests/sentry/taskworker/test_registry.py b/tests/sentry/taskworker/test_registry.py
index 3e59f09ad32b0c..061af16175a629 100644
--- a/tests/sentry/taskworker/test_registry.py
+++ b/tests/sentry/taskworker/test_registry.py
@@ -27,7 +27,7 @@ def test_namespace_register_task() -> None:
)
@namespace.register(name="tests.simple_task")
- def simple_task():
+ def simple_task() -> None:
raise NotImplementedError
assert namespace.default_retry is None
diff --git a/tests/sentry/templatetags/test_sentry_assets.py b/tests/sentry/templatetags/test_sentry_assets.py
index 4c0c54f5857350..20103d3875b498 100644
--- a/tests/sentry/templatetags/test_sentry_assets.py
+++ b/tests/sentry/templatetags/test_sentry_assets.py
@@ -48,7 +48,7 @@
),
),
)
-def test_script_context(input, output) -> None:
+def test_script_context(input: str, output: str) -> None:
request = RequestFactory().get("/")
request.csp_nonce = "r@nD0m"
diff --git a/tests/sentry/templatetags/test_sentry_helpers.py b/tests/sentry/templatetags/test_sentry_helpers.py
index b9b231505a1daf..8c7ad5d4b2acb6 100644
--- a/tests/sentry/templatetags/test_sentry_helpers.py
+++ b/tests/sentry/templatetags/test_sentry_helpers.py
@@ -56,7 +56,7 @@ def test_system_origin() -> None:
),
),
)
-def test_absolute_uri(input, output) -> None:
+def test_absolute_uri(input: str, output: str) -> None:
prefix = "{% load sentry_helpers %}"
result = (
engines["django"]
@@ -82,7 +82,7 @@ def test_absolute_uri(input, output) -> None:
("{% org_url organization path %}", "http://testserver/organizations/sentry/issues/"),
),
)
-def test_org_url(input, output) -> None:
+def test_org_url(input: str, output: str) -> None:
prefix = "{% load sentry_helpers %}"
org = Organization(id=1, slug="sentry", name="Sentry")
result = (
@@ -107,7 +107,7 @@ def test_org_url(input, output) -> None:
),
),
)
-def test_org_url_customer_domains(input, output) -> None:
+def test_org_url_customer_domains(input: str, output: str) -> None:
prefix = "{% load sentry_helpers %}"
org = Organization(id=1, slug="sentry", name="Sentry")
@@ -160,7 +160,7 @@ def test_date_handle_date_and_datetime() -> None:
({"hello": 1}, "hello", "1"),
),
)
-def test_get_item(a_dict, key, expected) -> None:
+def test_get_item(a_dict: dict[str, int], key: str, expected: str) -> None:
prefix = '{% load sentry_helpers %} {{ something|get_item:"' + key + '" }}'
result = engines["django"].from_string(prefix).render(context={"something": a_dict}).strip()
assert result == expected
diff --git a/tests/sentry/test_constants.py b/tests/sentry/test_constants.py
index c13310265080b3..9812e04e949063 100644
--- a/tests/sentry/test_constants.py
+++ b/tests/sentry/test_constants.py
@@ -1,3 +1,4 @@
+from contextlib import AbstractContextManager
from unittest import mock
from sentry.constants import (
@@ -7,7 +8,7 @@
)
-def mock_integration_ids():
+def mock_integration_ids() -> AbstractContextManager[object]:
return mock.patch.dict(
INTEGRATION_ID_TO_PLATFORM_DATA,
{
diff --git a/tests/sentry/test_datascrubbing.py b/tests/sentry/test_datascrubbing.py
index 91d7e2cc51c14a..385418680a4b3f 100644
--- a/tests/sentry/test_datascrubbing.py
+++ b/tests/sentry/test_datascrubbing.py
@@ -3,6 +3,7 @@
import pytest
from sentry.datascrubbing import scrub_data
+from sentry.models.project import Project
from sentry.testutils.pytest.fixtures import django_db_all
@@ -19,7 +20,7 @@ def merge_pii_configs(prefixes_and_configs):
@django_db_all
@pytest.mark.parametrize("field", ["ooo", "oöö", "o o", "o\no", "o'o"])
-def test_scrub_data(field, default_project) -> None:
+def test_scrub_data(field: str, default_project: Project) -> None:
project = default_project
organization = project.organization
diff --git a/tests/sentry/test_devimports.py b/tests/sentry/test_devimports.py
index a4534abfe7a1e7..025239f5c9ebc1 100644
--- a/tests/sentry/test_devimports.py
+++ b/tests/sentry/test_devimports.py
@@ -118,5 +118,5 @@ def _import(name, globals=None, locals=None, fromlist=(), level=0):
@pytest.mark.parametrize("pkg", ("sentry", "sentry_plugins"))
-def test_startup_imports(pkg) -> None:
+def test_startup_imports(pkg: str) -> None:
validate_package(pkg, EXCLUDED, XFAIL)
diff --git a/tests/sentry/testutils/pytest/mocking/animals/__init__.py b/tests/sentry/testutils/pytest/mocking/animals/__init__.py
index c5ed42be2b0aba..c05a86e2ce6a7e 100644
--- a/tests/sentry/testutils/pytest/mocking/animals/__init__.py
+++ b/tests/sentry/testutils/pytest/mocking/animals/__init__.py
@@ -1,24 +1,24 @@
-def get_dog():
+def get_dog() -> str:
return "maisey"
-def get_cat():
+def get_cat() -> str:
return "piper"
-def erroring_get_dog():
+def erroring_get_dog() -> None:
raise TypeError("Expected dog, but got cat instead.")
-def a_function_that_calls_get_dog():
+def a_function_that_calls_get_dog() -> str:
return f"{get_dog()} is a good dog!"
-def a_function_that_calls_get_cat():
+def a_function_that_calls_get_cat() -> str:
return f"{get_cat()} is a good cat, because she thinks she's a dog!"
-def a_function_that_calls_erroring_get_dog():
+def a_function_that_calls_erroring_get_dog() -> str:
try:
erroring_get_dog()
except TypeError:
diff --git a/tests/sentry/toolbar/utils/test_url.py b/tests/sentry/toolbar/utils/test_url.py
index eab4af7786db97..20d4ebaa4435d4 100644
--- a/tests/sentry/toolbar/utils/test_url.py
+++ b/tests/sentry/toolbar/utils/test_url.py
@@ -15,7 +15,7 @@
("https://example.com", "example.com"),
],
)
-def test_url_matches_scheme(referrer, target) -> None:
+def test_url_matches_scheme(referrer: str, target: str) -> None:
assert url_matches(urlparse(referrer), target)
@@ -43,7 +43,7 @@ def test_url_matches_rejects_mispatched_scheme(referrer, target) -> None:
("http://foo.bar.example.com", ".bar.example.com"),
],
)
-def test_url_matches_hostname(referrer, target) -> None:
+def test_url_matches_hostname(referrer: str, target: str) -> None:
assert url_matches(urlparse(referrer), target)
@@ -72,7 +72,7 @@ def test_url_matches_hostname(referrer, target) -> None:
("http://example.com", ":80"),
],
)
-def test_url_matches_rejects_mismatched_hostname(referrer, target) -> None:
+def test_url_matches_rejects_mismatched_hostname(referrer: str, target: str) -> None:
assert not url_matches(urlparse(referrer), target)
@@ -97,7 +97,7 @@ def test_url_matches_rejects_mismatched_hostname(referrer, target) -> None:
("https://example.com:1234", "https://example.com:1234"),
],
)
-def test_url_matches_port(referrer, target) -> None:
+def test_url_matches_port(referrer: str, target: str) -> None:
assert url_matches(urlparse(referrer), target)
@@ -116,7 +116,7 @@ def test_url_matches_port(referrer, target) -> None:
("http://example.com", "example.com:abc"),
],
)
-def test_url_matches_reject_mismatched_port(referrer, target) -> None:
+def test_url_matches_reject_mismatched_port(referrer: str, target: str) -> None:
assert not url_matches(urlparse(referrer), target)
@@ -136,7 +136,7 @@ def test_url_matches_reject_mismatched_port(referrer, target) -> None:
("http://example.com", "http://example.com?query=foo"),
],
)
-def test_url_matches_with_path_or_query(referrer, target) -> None:
+def test_url_matches_with_path_or_query(referrer: str, target: str) -> None:
assert url_matches(urlparse(referrer), target)
diff --git a/tests/sentry/tsdb/test_redis.py b/tests/sentry/tsdb/test_redis.py
index 6daa7a83227fdf..fe7fda345713ea 100644
--- a/tests/sentry/tsdb/test_redis.py
+++ b/tests/sentry/tsdb/test_redis.py
@@ -48,7 +48,7 @@ def setUp(self) -> None:
# the point of this test is to demonstrate behaviour with a multi-host cluster
assert len(self.db.cluster.hosts) == 3
- def tearDown(self):
+ def tearDown(self) -> None:
with self.db.cluster.all() as client:
client.flushdb()
diff --git a/tests/sentry/uptime/detectors/test_url_extraction.py b/tests/sentry/uptime/detectors/test_url_extraction.py
index c5e0c087fda593..1da24f6a72fa88 100644
--- a/tests/sentry/uptime/detectors/test_url_extraction.py
+++ b/tests/sentry/uptime/detectors/test_url_extraction.py
@@ -3,7 +3,7 @@
class ExtractBaseUrlTest(UptimeTestCase):
- def run_test(self, url: str, expected_url: str | None):
+ def run_test(self, url: str, expected_url: str | None) -> None:
assert extract_base_url(url) == expected_url
def test(self) -> None:
diff --git a/tests/sentry/uptime/test_grouptype.py b/tests/sentry/uptime/test_grouptype.py
index 584aef4fce053d..af6baf85ad4228 100644
--- a/tests/sentry/uptime/test_grouptype.py
+++ b/tests/sentry/uptime/test_grouptype.py
@@ -32,11 +32,11 @@
from sentry.uptime.types import UptimeMonitorMode
from sentry.workflow_engine.models.data_source import DataPacket
from sentry.workflow_engine.models.detector import Detector
-from sentry.workflow_engine.types import DetectorPriorityLevel
+from sentry.workflow_engine.types import DetectorEvaluationResult, DetectorPriorityLevel
class ResolveUptimeIssueTest(UptimeTestCase):
- def test(self):
+ def test(self) -> None:
subscription = self.create_uptime_subscription(subscription_id=uuid.uuid4().hex)
self.create_project_uptime_subscription(uptime_subscription=subscription)
detector = get_detector(subscription)
@@ -132,7 +132,9 @@ def test_build_event_data(self) -> None:
class TestUptimeHandler(UptimeTestCase):
- def handle_result(self, detector: Detector, sub: UptimeSubscription, check_result: CheckResult):
+ def handle_result(
+ self, detector: Detector, sub: UptimeSubscription, check_result: CheckResult
+ ) -> DetectorEvaluationResult | None:
handler = UptimeDetectorHandler(detector)
value = UptimePacketValue(
diff --git a/tests/sentry/users/api/endpoints/test_user_details.py b/tests/sentry/users/api/endpoints/test_user_details.py
index 6dc2646e1fc2b0..36071d7cfdbf22 100644
--- a/tests/sentry/users/api/endpoints/test_user_details.py
+++ b/tests/sentry/users/api/endpoints/test_user_details.py
@@ -1,3 +1,5 @@
+from collections.abc import Generator
+
from django.test import override_settings
from pytest import fixture
@@ -364,7 +366,7 @@ class UserDetailsStaffUpdateTest(UserDetailsTest):
method = "put"
@fixture(autouse=True)
- def _activate_staff_mode(self):
+ def _activate_staff_mode(self) -> Generator[None]:
with override_options({"staff.ga-rollout": True}):
yield
diff --git a/tests/sentry/users/web/test_account_identity.py b/tests/sentry/users/web/test_account_identity.py
index ead1c6fcfcd7a7..5fb658819b6835 100644
--- a/tests/sentry/users/web/test_account_identity.py
+++ b/tests/sentry/users/web/test_account_identity.py
@@ -11,7 +11,7 @@
@control_silo_test
class AccountIdentityTest(TestCase):
@pytest.fixture(autouse=True)
- def setup_dummy_identity_provider(self):
+ def setup_dummy_identity_provider(self) -> None:
identity.register(DummyProvider)
self.addCleanup(identity.unregister, DummyProvider)
diff --git a/tests/sentry/users/web/test_accounts.py b/tests/sentry/users/web/test_accounts.py
index 94a913275041c0..4d1cf1628878fa 100644
--- a/tests/sentry/users/web/test_accounts.py
+++ b/tests/sentry/users/web/test_accounts.py
@@ -19,18 +19,18 @@
@control_silo_test
class TestAccounts(TestCase):
@cached_property
- def path(self):
+ def path(self) -> str:
return reverse("sentry-account-recover")
- def password_recover_path(self, user_id, hash_):
+ def password_recover_path(self, user_id, hash_) -> str:
return reverse("sentry-account-recover-confirm", kwargs={"user_id": user_id, "hash": hash_})
- def relocation_recover_path(self, user_id, hash_):
+ def relocation_recover_path(self, user_id, hash_) -> str:
return reverse(
"sentry-account-relocate-confirm", kwargs={"user_id": user_id, "hash": hash_}
)
- def relocation_reclaim_path(self, user_id):
+ def relocation_reclaim_path(self, user_id) -> str:
return reverse("sentry-account-relocate-reclaim", kwargs={"user_id": user_id})
def test_get_renders_form(self) -> None:
diff --git a/tests/sentry/utils/kafka/test_rebalance_delay.py b/tests/sentry/utils/kafka/test_rebalance_delay.py
index 4d4d3b65ecca79..85fa7b3db0f6a1 100644
--- a/tests/sentry/utils/kafka/test_rebalance_delay.py
+++ b/tests/sentry/utils/kafka/test_rebalance_delay.py
@@ -16,7 +16,7 @@ def frozen_time_with_warp():
@pytest.mark.usefixtures("frozen_time_with_warp")
@pytest.mark.parametrize("configured_delay", [5, 10, 15])
-def test_delay_tick(configured_delay) -> None:
+def test_delay_tick(configured_delay: int) -> None:
delay_kafka_rebalance(configured_delay)
after_delay_sec = int(time.time())
diff --git a/tests/sentry/utils/sdk_crashes/conftest.py b/tests/sentry/utils/sdk_crashes/conftest.py
index 5b8c6aff4f67b4..60ad067cec65b2 100644
--- a/tests/sentry/utils/sdk_crashes/conftest.py
+++ b/tests/sentry/utils/sdk_crashes/conftest.py
@@ -1,5 +1,10 @@
+from collections.abc import Callable, Collection
+from types import ModuleType
+
import pytest
+from sentry.models.project import Project
+from sentry.services.eventstore.models import Event
from sentry.utils.sdk_crashes.path_replacer import FixedPathReplacer
from sentry.utils.sdk_crashes.sdk_crash_detection_config import (
SDKCrashDetectionConfig,
@@ -9,8 +14,10 @@
@pytest.fixture
-def store_event(default_project, factories):
- def inner(data):
+def store_event(
+ default_project: Project, factories: ModuleType
+) -> Callable[[dict[str, Collection[str]]], Event]:
+ def inner(data: dict[str, Collection[str]]) -> Event:
return factories.store_event(data=data, project_id=default_project.id)
return inner
diff --git a/tests/sentry/utils/sdk_crashes/test_sdk_crash_detection.py b/tests/sentry/utils/sdk_crashes/test_sdk_crash_detection.py
index a40760a33c3336..eaac351f40c377 100644
--- a/tests/sentry/utils/sdk_crashes/test_sdk_crash_detection.py
+++ b/tests/sentry/utils/sdk_crashes/test_sdk_crash_detection.py
@@ -1,11 +1,12 @@
import abc
-from collections.abc import Sequence
+from collections.abc import Callable, Collection, Sequence
from unittest.mock import MagicMock, call, patch
import pytest
from fixtures.sdk_crash_detection.crash_event_cocoa import get_crash_event
from sentry.issues.grouptype import PerformanceNPlusOneGroupType
+from sentry.services.eventstore.models import Event
from sentry.services.eventstore.snuba.backend import SnubaEventStorage
from sentry.testutils.cases import BaseTestCase, SnubaTestCase, TestCase
from sentry.testutils.helpers.options import override_options
@@ -122,10 +123,15 @@ def test_performance_event_increments_counter(
("sentry.cocoa.unreal", True),
],
)
-def test_sdks_detected(mock_sdk_crash_reporter, store_event, sdk_name, detected) -> None:
+def test_sdks_detected(
+ mock_sdk_crash_reporter: MagicMock,
+ store_event: Callable[[dict[str, Collection[str]]], Event],
+ sdk_name: str,
+ detected: bool,
+) -> None:
event_data = get_crash_event()
set_path(event_data, "sdk", "name", value=sdk_name)
- event = store_event(data=event_data)
+ event = store_event(event_data)
sdk_crash_detection.detect_sdk_crash(event=event, configs=build_sdk_configs())
@@ -194,9 +200,13 @@ def create_event(self, data, project_id, assert_no_errors=True):
@pytest.mark.snuba
@patch("sentry.utils.sdk_crashes.sdk_crash_detection.sdk_crash_detection.sdk_crash_reporter")
def test_sample_rate(
- mock_sdk_crash_reporter, store_event, sample_rate, random_value, sampled
+ mock_sdk_crash_reporter: MagicMock,
+ store_event: Callable[[dict[str, Collection[str]]], Event],
+ sample_rate: float,
+ random_value: float,
+ sampled: bool,
) -> None:
- event = store_event(data=get_crash_event())
+ event = store_event(get_crash_event())
with patch("random.random", return_value=random_value):
configs = build_sdk_configs()
diff --git a/tests/sentry/utils/sdk_crashes/test_sdk_crash_detection_java.py b/tests/sentry/utils/sdk_crashes/test_sdk_crash_detection_java.py
index 4b3bbcaf0471b1..52a8fd523dd7f1 100644
--- a/tests/sentry/utils/sdk_crashes/test_sdk_crash_detection_java.py
+++ b/tests/sentry/utils/sdk_crashes/test_sdk_crash_detection_java.py
@@ -100,9 +100,9 @@ def test_sdk_crash_is_reported_with_android_paths(
mock_random,
store_event,
configs,
- sdk_frame_module,
- system_frame_module,
- detected,
+ sdk_frame_module: str,
+ system_frame_module: str,
+ detected: bool,
):
event = store_event(
data=get_crash_event(
@@ -225,10 +225,10 @@ def test_sdk_crash_is_reported_for_android_runtime_tracer_crashes(
mock_random,
store_event,
configs,
- apex_frame_function,
- apex_frame_package,
- system_frame_package,
- detected,
+ apex_frame_function: str,
+ apex_frame_package: str,
+ system_frame_package: str,
+ detected: bool,
):
event = store_event(
data=get_apex_crash_event(
diff --git a/tests/sentry/utils/sdk_crashes/test_sdk_crash_detection_native.py b/tests/sentry/utils/sdk_crashes/test_sdk_crash_detection_native.py
index 3f2339497e2852..83a8bb97922dab 100644
--- a/tests/sentry/utils/sdk_crashes/test_sdk_crash_detection_native.py
+++ b/tests/sentry/utils/sdk_crashes/test_sdk_crash_detection_native.py
@@ -125,9 +125,9 @@ def test_sdk_crash_is_reported_with_native_paths(
mock_random,
store_event,
configs,
- sdk_frame_function,
- system_frame_package,
- detected,
+ sdk_frame_function: str,
+ system_frame_package: str,
+ detected: bool,
):
event = store_event(
data=get_crash_event(
@@ -200,9 +200,9 @@ def test_sdk_crash_sentry_native_keeps_sentry_package_paths(
mock_random,
store_event,
configs,
- sdk_frame_function,
- sdk_frame_package,
- expected_sdk_frame_package,
+ sdk_frame_function: str,
+ sdk_frame_package: str,
+ expected_sdk_frame_package: str,
):
event = store_event(
data=get_crash_event(
diff --git a/tests/sentry/utils/sdk_crashes/test_sdk_crash_detection_react_native.py b/tests/sentry/utils/sdk_crashes/test_sdk_crash_detection_react_native.py
index 02426827b8d0e0..70210cb0fdc90e 100644
--- a/tests/sentry/utils/sdk_crashes/test_sdk_crash_detection_react_native.py
+++ b/tests/sentry/utils/sdk_crashes/test_sdk_crash_detection_react_native.py
@@ -66,9 +66,9 @@ def test_sdk_crash_is_reported_development_paths(
mock_random,
store_event,
configs,
- filename,
- expected_stripped_filename,
- detected,
+ filename: str,
+ expected_stripped_filename: str,
+ detected: bool,
):
event = store_event(data=get_crash_event(filename=filename))
@@ -161,7 +161,7 @@ def test_sdk_crash_is_reported_development_paths(
)
@decorators
def test_sdk_crash_is_reported_production_paths(
- mock_sdk_crash_reporter, mock_random, store_event, configs, package_name, detected
+ mock_sdk_crash_reporter, mock_random, store_event, configs, package_name: str, detected: bool
):
expected_stripped_filename = f"{package_name}dist/js/integrations/reactnativeerrorhandlers.js"
# Remove the first / from the path because the module is not prefixed with /.
diff --git a/tests/sentry/utils/sdk_crashes/test_sdk_crash_detector.py b/tests/sentry/utils/sdk_crashes/test_sdk_crash_detector.py
index 63a26b59c870bb..75e4de8df480c6 100644
--- a/tests/sentry/utils/sdk_crashes/test_sdk_crash_detector.py
+++ b/tests/sentry/utils/sdk_crashes/test_sdk_crash_detector.py
@@ -1,11 +1,16 @@
import pytest
-from sentry.utils.sdk_crashes.sdk_crash_detection_config import FunctionAndModulePattern
+from sentry.utils.sdk_crashes.sdk_crash_detection_config import (
+ FunctionAndModulePattern,
+ SDKCrashDetectionConfig,
+)
from sentry.utils.sdk_crashes.sdk_crash_detector import SDKCrashDetector
@pytest.mark.parametrize("field_containing_path", ["package", "module", "abs_path", "filename"])
-def test_build_sdk_crash_detection_configs(empty_cocoa_config, field_containing_path) -> None:
+def test_build_sdk_crash_detection_configs(
+ empty_cocoa_config: SDKCrashDetectionConfig, field_containing_path: str
+) -> None:
empty_cocoa_config.sdk_frame_config.path_patterns = {"Sentry**"}
@@ -70,7 +75,12 @@ def test_build_sdk_crash_detection_configs(empty_cocoa_config, field_containing_
],
)
def test_sdk_crash_ignore_matchers(
- empty_cocoa_config, test_id, ignore_matchers, frames, is_crash, description
+ empty_cocoa_config: SDKCrashDetectionConfig,
+ test_id: str,
+ ignore_matchers: list[FunctionAndModulePattern],
+ frames: list[dict[str, str]],
+ is_crash: bool,
+ description: str,
):
empty_cocoa_config.sdk_crash_ignore_matchers = set(ignore_matchers)
empty_cocoa_config.sdk_frame_config.path_patterns = {"**"}
diff --git a/tests/sentry/utils/test_arroyo_producer.py b/tests/sentry/utils/test_arroyo_producer.py
index a8f074f0024c26..53a5b25b16abbe 100644
--- a/tests/sentry/utils/test_arroyo_producer.py
+++ b/tests/sentry/utils/test_arroyo_producer.py
@@ -1,10 +1,12 @@
from unittest.mock import Mock
+from arroyo.backends.kafka import KafkaProducer
+
from sentry.utils.arroyo_producer import SingletonProducer
def test_track_futures() -> None:
- def dummy_producer():
+ def dummy_producer() -> KafkaProducer:
raise AssertionError("no producer")
producer = SingletonProducer(dummy_producer, max_futures=2)
diff --git a/tests/sentry/utils/test_codecs.py b/tests/sentry/utils/test_codecs.py
index 8cdeeda7fd07f0..e27e5e870279b4 100644
--- a/tests/sentry/utils/test_codecs.py
+++ b/tests/sentry/utils/test_codecs.py
@@ -1,6 +1,8 @@
+from typing import Any
+
import pytest
-from sentry.utils.codecs import BytesCodec, JSONCodec, ZlibCodec, ZstdCodec
+from sentry.utils.codecs import BytesCodec, Codec, JSONCodec, ZlibCodec, ZstdCodec
@pytest.mark.parametrize(
@@ -12,7 +14,7 @@
(ZstdCodec(), b"hello", b"(\xb5/\xfd \x05)\x00\x00hello"),
],
)
-def test_codec(codec, encoded, decoded) -> None:
+def test_codec(codec: Codec[Any, Any], encoded: str | bytes, decoded: str | bytes) -> None:
assert codec.encode(decoded) == encoded
assert codec.decode(encoded) == decoded
diff --git a/tests/sentry/utils/test_event_frames.py b/tests/sentry/utils/test_event_frames.py
index 94be69e7df41b7..3745ba74322d0a 100644
--- a/tests/sentry/utils/test_event_frames.py
+++ b/tests/sentry/utils/test_event_frames.py
@@ -132,7 +132,7 @@ def test_platform_java_no_module(self) -> None:
def test_platform_java_do_not_follow_java_package_naming_convention_does_not_raise_exception(
self,
- ):
+ ) -> None:
frame = {
"abs_path": "gsp_arcus_drops_proofReadingmodecInspectionProofRead_gsp.groovy",
"module": "gsp_arcus_drops_proofReadingmodecInspectionProofRead_gsp$_run_closure2",
@@ -454,7 +454,7 @@ def test_no_package_not_munged(self) -> None:
class CocoaWaterFallTestCase(TestCase):
def test_crashing_event_with_exception_interface_but_no_frame_should_waterfall_to_thread_frames(
self,
- ):
+ ) -> None:
event = self.store_event(
data={
"platform": "cocoa",
diff --git a/tests/sentry/utils/test_github.py b/tests/sentry/utils/test_github.py
index 97f71f614bca46..16a2eaef11cb23 100644
--- a/tests/sentry/utils/test_github.py
+++ b/tests/sentry/utils/test_github.py
@@ -30,7 +30,7 @@ def setUp(self) -> None:
self.subpath = "secret_scanning"
@responses.activate
- def _verify(self):
+ def _verify(self) -> None:
responses.add(
responses.GET,
"https://api.github.com/meta/public_keys/secret_scanning",
diff --git a/tests/sentry/utils/test_glob.py b/tests/sentry/utils/test_glob.py
index d3f1508552fc7e..0e2105b106113f 100644
--- a/tests/sentry/utils/test_glob.py
+++ b/tests/sentry/utils/test_glob.py
@@ -57,5 +57,5 @@ def __call__(self):
[GlobInput.make("foo:\nbar", "foo:*", allow_newline=False), False],
],
)
-def test_glob_match(glob_input, expect) -> None:
+def test_glob_match(glob_input: GlobInput, expect: bool) -> None:
assert glob_input() == expect
diff --git a/tests/sentry/utils/test_hashlib.py b/tests/sentry/utils/test_hashlib.py
index 232272acf7ffb5..84b32c517e4ab9 100644
--- a/tests/sentry/utils/test_hashlib.py
+++ b/tests/sentry/utils/test_hashlib.py
@@ -22,7 +22,7 @@
@pytest.mark.parametrize("seed,value,hash", HASHLIB_VALUES_TESTS)
-def test_hash_values(seed, value, hash) -> None:
+def test_hash_values(seed: str, value: object, hash: str) -> None:
assert hash_values([value], seed=seed) == hash
@@ -44,5 +44,5 @@ def test_unicode(self) -> None:
("s:transactions/user@none", 1739810785),
),
)
-def test_fnv1a_32_with_mris(value, expected_value) -> None:
+def test_fnv1a_32_with_mris(value: str, expected_value: int) -> None:
assert fnv1a_32(value.encode("utf-8")) == expected_value
diff --git a/tests/sentry/utils/test_integrationdocs.py b/tests/sentry/utils/test_integrationdocs.py
index 1a0e6e4d9ebee0..939b7cb3c21574 100644
--- a/tests/sentry/utils/test_integrationdocs.py
+++ b/tests/sentry/utils/test_integrationdocs.py
@@ -15,7 +15,7 @@
"../../../etc/passwd",
],
)
-def test_path_traversal_attempt_on_load_doc_raises_exception(path) -> None:
+def test_path_traversal_attempt_on_load_doc_raises_exception(path: str) -> None:
with pytest.raises(SuspiciousDocPathOperation) as excinfo:
load_doc(path)
diff --git a/tests/sentry/utils/test_math.py b/tests/sentry/utils/test_math.py
index a7c0e5f99ccf3c..b39fbc67ef6bff 100644
--- a/tests/sentry/utils/test_math.py
+++ b/tests/sentry/utils/test_math.py
@@ -35,7 +35,7 @@
(75001, 100001, 100000),
],
)
-def test_nice_int(start, stop, expected) -> None:
+def test_nice_int(start: int, stop: int, expected: int) -> None:
for x in range(start, stop):
assert nice_int(x) == expected, "{} was rounded to {}, not {}".format(
x, nice_int(x), expected
@@ -54,7 +54,7 @@ def test_nice_int(start, stop, expected) -> None:
([i for i in range(10)], 5.239),
],
)
-def test_exponential_moving_average(sequence, expected) -> None:
+def test_exponential_moving_average(sequence: list[int], expected: int | float) -> None:
avg = ExponentialMovingAverage(2 / 11)
t = 0.0
for i, x in enumerate(sequence):
diff --git a/tests/sentry/utils/test_outcomes.py b/tests/sentry/utils/test_outcomes.py
index 5388f24ce0cb1b..81a3488ef4a929 100644
--- a/tests/sentry/utils/test_outcomes.py
+++ b/tests/sentry/utils/test_outcomes.py
@@ -53,7 +53,7 @@ def test_outcome_is_billing(outcome: Outcome, is_billing: bool) -> None:
("RATE_LIMITED", Outcome.RATE_LIMITED),
],
)
-def test_parse_outcome(name, outcome) -> None:
+def test_parse_outcome(name: str, outcome: Outcome) -> None:
"""
Asserts *case insensitive* parsing of outcomes from their canonical names,
as used in the API and queries.
@@ -231,7 +231,7 @@ def test_track_outcome_with_event_id(setup) -> None:
DataCategory.DEFAULT,
],
)
-def test_track_outcome_with_category(setup, category) -> None:
+def test_track_outcome_with_category(setup, category: DataCategory) -> None:
"""
Tests that `track_outcome` correctly includes different `category` values in the payload.
"""
@@ -393,7 +393,7 @@ def test_track_outcome_with_none_category(setup) -> None:
@pytest.mark.parametrize("quantity", [0, -1, -100])
-def test_track_outcome_with_non_positive_quantity(setup, quantity) -> None:
+def test_track_outcome_with_non_positive_quantity(setup, quantity: int) -> None:
"""
Tests that `track_outcome` handles non-positive `quantity` values.
"""
diff --git a/tests/sentry/utils/test_patch_set.py b/tests/sentry/utils/test_patch_set.py
index 1e6182df5c42a2..300de7a2fa253a 100644
--- a/tests/sentry/utils/test_patch_set.py
+++ b/tests/sentry/utils/test_patch_set.py
@@ -152,7 +152,7 @@ def test_filename_containing_spaces() -> None:
),
],
)
-def test_diff_line_counts(diff_text, expected) -> None:
+def test_diff_line_counts(diff_text: str, expected: FileModifications) -> None:
patch = f"""diff --git a/test.py b/test.py
index 0000001..0000002 100644
--- a/test.py
diff --git a/tests/sentry/utils/test_retries.py b/tests/sentry/utils/test_retries.py
index 280b9a18e294c3..a979211f1f72a7 100644
--- a/tests/sentry/utils/test_retries.py
+++ b/tests/sentry/utils/test_retries.py
@@ -80,7 +80,7 @@ def test_decorator(self) -> None:
callable = mock.MagicMock(side_effect=[bomb, mock.sentinel.OK])
@TimedRetryPolicy.wrap(0.3, delay=lambda i: 0.1)
- def retrying_func():
+ def retrying_func() -> mock._Sentinel:
return callable()
retrying_func.clock = mock.Mock()
diff --git a/tests/sentry/utils/test_samples.py b/tests/sentry/utils/test_samples.py
index da396e28818aaa..fef47675b2ece7 100644
--- a/tests/sentry/utils/test_samples.py
+++ b/tests/sentry/utils/test_samples.py
@@ -3,6 +3,7 @@
import pytest
from django.core.exceptions import SuspiciousFileOperation
+from sentry.models.project import Project
from sentry.testutils.pytest.fixtures import django_db_all
from sentry.utils.platform_categories import CONSOLES
from sentry.utils.samples import create_sample_event, load_data
@@ -20,7 +21,7 @@
"../../../etc/passwd",
],
)
-def test_path_traversal_attempt_raises_exception(platform) -> None:
+def test_path_traversal_attempt_raises_exception(platform: str) -> None:
with pytest.raises(SuspiciousFileOperation) as excinfo:
load_data(platform)
@@ -54,7 +55,9 @@ def test_sample_as_directory_raises_exception(tmp_path) -> None:
class TestConsoleSamples:
@pytest.mark.parametrize("platform", list(CONSOLES))
- def test_console_platforms_trigger_screenshot_attachment(self, default_project, platform):
+ def test_console_platforms_trigger_screenshot_attachment(
+ self, default_project: Project, platform: str
+ ):
with mock.patch(
"sentry.utils.samples.create_console_screenshot_attachment"
) as mock_attachment:
diff --git a/tests/sentry/utils/test_snuba.py b/tests/sentry/utils/test_snuba.py
index 7e9a46b91d3a47..ab10ab2e4dc061 100644
--- a/tests/sentry/utils/test_snuba.py
+++ b/tests/sentry/utils/test_snuba.py
@@ -452,7 +452,7 @@ def test_retries() -> None:
class SnubaQueryRateLimitTest(TestCase):
- def setUp(self):
+ def setUp(self) -> None:
mock_request = Request(
dataset="events",
app_id="test",
diff --git a/tests/sentry/utils/test_strings.py b/tests/sentry/utils/test_strings.py
index 61e5481ef47cd3..2e1a6bbcadf151 100644
--- a/tests/sentry/utils/test_strings.py
+++ b/tests/sentry/utils/test_strings.py
@@ -42,7 +42,7 @@
("hello\nworld", "hello\nworld"),
),
)
-def test_unescape_string(s, expected) -> None:
+def test_unescape_string(s: str, expected: str) -> None:
assert unescape_string(s) == expected
diff --git a/tests/sentry/utils/test_tag_normalization.py b/tests/sentry/utils/test_tag_normalization.py
index 8fd4f6e451f3f6..cc8de8ad5af9d1 100644
--- a/tests/sentry/utils/test_tag_normalization.py
+++ b/tests/sentry/utils/test_tag_normalization.py
@@ -12,7 +12,7 @@
("sentry_python", "sentry.python"),
),
)
-def test_normalizes_to_dots(tag, expected) -> None:
+def test_normalizes_to_dots(tag: str, expected: str) -> None:
assert normalize_sdk_tag(tag) == expected
@@ -31,7 +31,7 @@ def test_normalizes_to_dots(tag, expected) -> None:
),
),
)
-def test_shortens_non_js(tag, expected) -> None:
+def test_shortens_non_js(tag: str, expected: str) -> None:
assert normalize_sdk_tag(tag) == expected
@@ -45,7 +45,7 @@ def test_shortens_non_js(tag, expected) -> None:
("sentry.javascript.react.native.expo", "sentry.javascript.react.native"),
),
)
-def test_uses_synonyms(tag, expected) -> None:
+def test_uses_synonyms(tag: str, expected: str) -> None:
assert normalize_sdk_tag(tag) == expected
@@ -53,7 +53,7 @@ def test_uses_synonyms(tag, expected) -> None:
("tag", "expected"),
(("foo.baz.bar", "other"), ("sentryfoo", "other"), ("raven", "other")),
)
-def test_non_sentry_to_other(tag, expected) -> None:
+def test_non_sentry_to_other(tag: str, expected: str) -> None:
assert normalize_sdk_tag(tag) == expected
@@ -61,7 +61,7 @@ def test_non_sentry_to_other(tag, expected) -> None:
("tag", "expected"),
(("sentry.sparql", "other"), ("sentry.terraform.hcl", "other"), ("sentry-native", "other")),
)
-def test_unknown_sentry_to_other(tag, expected) -> None:
+def test_unknown_sentry_to_other(tag: str, expected: str) -> None:
assert normalize_sdk_tag(tag) == expected
diff --git a/tests/sentry/web/frontend/test_auth_close.py b/tests/sentry/web/frontend/test_auth_close.py
index 1f594e35797e62..072f2fc14b601b 100644
--- a/tests/sentry/web/frontend/test_auth_close.py
+++ b/tests/sentry/web/frontend/test_auth_close.py
@@ -10,7 +10,7 @@
@control_silo_test
class AuthClose(TestCase):
@cached_property
- def path(self):
+ def path(self) -> str:
return reverse("sentry-auth-close")
def test_renders_auth_close_view(self) -> None:
diff --git a/tests/sentry/web/frontend/test_auth_login.py b/tests/sentry/web/frontend/test_auth_login.py
index eeac9a3e63412b..66b42e8abc8eba 100644
--- a/tests/sentry/web/frontend/test_auth_login.py
+++ b/tests/sentry/web/frontend/test_auth_login.py
@@ -33,7 +33,7 @@
@control_silo_test
class AuthLoginTest(TestCase, HybridCloudTestMixin):
@cached_property
- def path(self):
+ def path(self) -> str:
return reverse("sentry-login")
def allow_registration(self):
@@ -553,7 +553,7 @@ def test_login_demo_mode_with_org(self) -> None:
@control_silo_test
class AuthLoginNewsletterTest(TestCase):
@cached_property
- def path(self):
+ def path(self) -> str:
return reverse("sentry-login")
@pytest.fixture(autouse=True)
@@ -625,7 +625,7 @@ def test_registration_subscribe_to_newsletter(self) -> None:
@control_silo_test
class AuthLoginCustomerDomainTest(TestCase):
@cached_property
- def path(self):
+ def path(self) -> str:
return reverse("sentry-login")
def setUp(self) -> None:
diff --git a/tests/sentry/web/frontend/test_auth_logout.py b/tests/sentry/web/frontend/test_auth_logout.py
index af766c785bfedc..f086c278da42ea 100644
--- a/tests/sentry/web/frontend/test_auth_logout.py
+++ b/tests/sentry/web/frontend/test_auth_logout.py
@@ -10,7 +10,7 @@
@control_silo_test
class AuthLogoutTest(TestCase):
@cached_property
- def path(self):
+ def path(self) -> str:
return reverse("sentry-logout")
def test_get_shows_page(self) -> None:
diff --git a/tests/sentry/web/frontend/test_auth_organization_login.py b/tests/sentry/web/frontend/test_auth_organization_login.py
index 53e4e4bd9dd832..cfcca50850706c 100644
--- a/tests/sentry/web/frontend/test_auth_organization_login.py
+++ b/tests/sentry/web/frontend/test_auth_organization_login.py
@@ -12,7 +12,7 @@
from sentry.models.authidentity import AuthIdentity
from sentry.models.authprovider import AuthProvider
from sentry.models.options.organization_option import OrganizationOption
-from sentry.models.organization import OrganizationStatus
+from sentry.models.organization import Organization, OrganizationStatus
from sentry.models.organizationmember import OrganizationMember
from sentry.organizations.services.organization.serial import serialize_rpc_organization
from sentry.silo.base import SiloMode
@@ -28,11 +28,11 @@
@control_silo_test
class OrganizationAuthLoginTest(AuthProviderTestCase):
@cached_property
- def organization(self):
+ def organization(self) -> Organization:
return self.create_organization(name="foo", owner=self.user)
@cached_property
- def path(self):
+ def path(self) -> str:
return reverse("sentry-auth-organization", args=[self.organization.slug])
def test_renders_basic(self) -> None:
diff --git a/tests/sentry/web/frontend/test_auth_saml2.py b/tests/sentry/web/frontend/test_auth_saml2.py
index 3bb74d4c611003..8b88f6eca06b46 100644
--- a/tests/sentry/web/frontend/test_auth_saml2.py
+++ b/tests/sentry/web/frontend/test_auth_saml2.py
@@ -74,7 +74,7 @@ def setUp(self) -> None:
super().setUp()
- def tearDown(self):
+ def tearDown(self) -> None:
# restore url-prefix config
settings.SENTRY_OPTIONS.update({"system.url-prefix": self.url_prefix})
diff --git a/tests/sentry/web/frontend/test_disabled_member_view.py b/tests/sentry/web/frontend/test_disabled_member_view.py
index 3fe1493015faf7..2722b92828033e 100644
--- a/tests/sentry/web/frontend/test_disabled_member_view.py
+++ b/tests/sentry/web/frontend/test_disabled_member_view.py
@@ -10,7 +10,7 @@
@control_silo_test
class DisabledMemberViewTest(TestCase):
@cached_property
- def path(self):
+ def path(self) -> str:
return reverse("sentry-organization-disabled-member", args=[self.org.slug])
def setUp(self) -> None:
@@ -19,7 +19,7 @@ def setUp(self) -> None:
self.user = self.create_user()
self.login_as(self.user)
- def create_one_member(self, flags=None):
+ def create_one_member(self, flags=None) -> None:
self.create_member(user=self.user, organization=self.org, role="member", flags=flags)
def test_member_missing(self) -> None:
diff --git a/tests/sentry/web/frontend/test_group_event_json.py b/tests/sentry/web/frontend/test_group_event_json.py
index fbf0d40dfe72a0..5d51117def2b8b 100644
--- a/tests/sentry/web/frontend/test_group_event_json.py
+++ b/tests/sentry/web/frontend/test_group_event_json.py
@@ -7,7 +7,7 @@
class GroupEventJsonTest(TestCase):
@cached_property
- def path(self):
+ def path(self) -> str:
return f"/organizations/{self.organization.slug}/issues/{self.event.group_id}/events/{self.event.event_id}/json/"
def test_does_render(self) -> None:
diff --git a/tests/sentry/web/frontend/test_home.py b/tests/sentry/web/frontend/test_home.py
index b3dd152be66678..7bd6bc6f0803ad 100644
--- a/tests/sentry/web/frontend/test_home.py
+++ b/tests/sentry/web/frontend/test_home.py
@@ -10,7 +10,7 @@
@control_silo_test
class HomeTest(TestCase):
@cached_property
- def path(self):
+ def path(self) -> str:
return reverse("sentry")
def test_redirects_to_login(self) -> None:
diff --git a/tests/sentry/web/frontend/test_js_sdk_loader.py b/tests/sentry/web/frontend/test_js_sdk_loader.py
index 68899e1fc7cb3e..4890ab8c08f531 100644
--- a/tests/sentry/web/frontend/test_js_sdk_loader.py
+++ b/tests/sentry/web/frontend/test_js_sdk_loader.py
@@ -13,14 +13,14 @@
class JavaScriptSdkLoaderTest(TestCase):
@pytest.fixture(autouse=True)
- def set_settings(self):
+ def set_settings(self) -> None:
settings.JS_SDK_LOADER_SDK_VERSION = "0.5.2"
settings.JS_SDK_LOADER_DEFAULT_SDK_URL = (
"https://s3.amazonaws.com/getsentry-cdn/@sentry/browser/%s/bundle.min.js"
)
@cached_property
- def path(self):
+ def path(self) -> str:
return reverse("sentry-js-sdk-loader", args=[self.projectkey.public_key])
def test_noop_no_pub_key(self) -> None:
diff --git a/tests/sentry/web/frontend/test_oauth_authorize.py b/tests/sentry/web/frontend/test_oauth_authorize.py
index fb23a3e8d885cb..dd27857f4dd8b6 100644
--- a/tests/sentry/web/frontend/test_oauth_authorize.py
+++ b/tests/sentry/web/frontend/test_oauth_authorize.py
@@ -12,7 +12,7 @@
@control_silo_test
class OAuthAuthorizeCodeTest(TestCase):
@cached_property
- def path(self):
+ def path(self) -> str:
return "/oauth/authorize/"
def setUp(self) -> None:
@@ -246,7 +246,7 @@ def test_unauthenticated_basic_auth(self) -> None:
@control_silo_test
class OAuthAuthorizeTokenTest(TestCase):
@cached_property
- def path(self):
+ def path(self) -> str:
return "/oauth/authorize/"
def setUp(self) -> None:
@@ -351,7 +351,7 @@ def test_minimal_params_code_deny_flow(self) -> None:
@control_silo_test
class OAuthAuthorizeOrgScopedTest(TestCase):
@cached_property
- def path(self):
+ def path(self) -> str:
return "/oauth/authorize/"
def setUp(self) -> None:
diff --git a/tests/sentry/web/frontend/test_oauth_token.py b/tests/sentry/web/frontend/test_oauth_token.py
index f772e5ebb7a6c7..683a47c7d49541 100644
--- a/tests/sentry/web/frontend/test_oauth_token.py
+++ b/tests/sentry/web/frontend/test_oauth_token.py
@@ -14,7 +14,7 @@
@control_silo_test
class OAuthTokenTest(TestCase):
@cached_property
- def path(self):
+ def path(self) -> str:
return "/oauth/token/"
def test_no_get(self) -> None:
@@ -46,7 +46,7 @@ def test_invalid_grant_type(self) -> None:
@control_silo_test
class OAuthTokenCodeTest(TestCase):
@cached_property
- def path(self):
+ def path(self) -> str:
return "/oauth/token/"
def setUp(self) -> None:
@@ -390,7 +390,7 @@ def test_valid_params_id_token_additional_scopes(self) -> None:
@control_silo_test
class OAuthTokenRefreshTokenTest(TestCase):
@cached_property
- def path(self):
+ def path(self) -> str:
return "/oauth/token/"
def setUp(self) -> None:
@@ -500,7 +500,7 @@ def test_valid_params(self) -> None:
@control_silo_test
class OAuthTokenOrganizationScopedTest(TestCase):
@cached_property
- def path(self):
+ def path(self) -> str:
return "/oauth/token/"
def setUp(self) -> None:
diff --git a/tests/sentry/web/frontend/test_reactivate_account.py b/tests/sentry/web/frontend/test_reactivate_account.py
index bdb31c5c5a59a3..089204714ea8d0 100644
--- a/tests/sentry/web/frontend/test_reactivate_account.py
+++ b/tests/sentry/web/frontend/test_reactivate_account.py
@@ -9,7 +9,7 @@
@control_silo_test
class ReactivateAccountTest(TestCase):
@cached_property
- def path(self):
+ def path(self) -> str:
return reverse("sentry-reactivate-account")
def test_renders(self) -> None:
diff --git a/tests/sentry/web/frontend/test_release_webhook.py b/tests/sentry/web/frontend/test_release_webhook.py
index b4d0b864312b0d..bbcfe06c1e8191 100644
--- a/tests/sentry/web/frontend/test_release_webhook.py
+++ b/tests/sentry/web/frontend/test_release_webhook.py
@@ -22,7 +22,7 @@ def setUp(self) -> None:
ProjectOption.objects.set_value(self.project, "sentry:release-token", self.token)
@cached_property
- def signature(self):
+ def signature(self) -> str:
return hmac.new(
key=self.token.encode("utf-8"),
msg=(f"{self.plugin_id}-{self.project.id}").encode(),
@@ -30,7 +30,7 @@ def signature(self):
).hexdigest()
@cached_property
- def path(self):
+ def path(self) -> str:
return reverse(
"sentry-release-hook",
kwargs={
diff --git a/tests/sentry/web/frontend/test_shared_group_details.py b/tests/sentry/web/frontend/test_shared_group_details.py
index caf0a1b532d947..3c722e0edabea5 100644
--- a/tests/sentry/web/frontend/test_shared_group_details.py
+++ b/tests/sentry/web/frontend/test_shared_group_details.py
@@ -12,13 +12,13 @@ def setUp(self) -> None:
self.group = self.create_group(project=self.project)
self.org_domain = f"{self.organization.slug}.testserver"
- def share_group(self):
+ def share_group(self) -> GroupShare:
with assume_test_silo_mode(SiloMode.REGION):
return GroupShare.objects.create(
project=self.project, group=self.group, user_id=self.user.id
)
- def assert_group_metadata_present(self, response: Any):
+ def assert_group_metadata_present(self, response: Any) -> None:
response_body = response.content.decode("utf8")
assert f' None:
response_body = response.content.decode("utf8")
assert f' str:
return reverse("vsts-extension-configuration")
def setUp(self) -> None:
diff --git a/tests/sentry/web/test_api.py b/tests/sentry/web/test_api.py
index bffffc024c9a7b..59a4d08b051b93 100644
--- a/tests/sentry/web/test_api.py
+++ b/tests/sentry/web/test_api.py
@@ -22,7 +22,7 @@
class CrossDomainXmlTest(TestCase):
@cached_property
- def path(self):
+ def path(self) -> str:
return reverse("sentry-api-crossdomain-xml", kwargs={"project_id": self.project.id})
def test_inaccessible_in_control_silo(self) -> None:
@@ -74,7 +74,7 @@ def test_output_allows_x_sentry_auth(self) -> None:
class RobotsTxtTest(TestCase):
@cached_property
- def path(self):
+ def path(self) -> str:
return reverse("sentry-robots-txt")
def test_robots(self) -> None:
@@ -141,7 +141,7 @@ def test_customer_domain(self) -> None:
@region_silo_test(regions=create_test_regions("us", "eu"), include_monolith_run=True)
class ClientConfigViewTest(TestCase):
@cached_property
- def path(self):
+ def path(self) -> str:
return reverse("sentry-api-client-config")
def test_cookie_names(self) -> None:
@@ -724,7 +724,7 @@ def test_customer_domain(self) -> None:
class McpJsonTest(TestCase):
@cached_property
- def path(self):
+ def path(self) -> str:
return reverse("sentry-mcp-json")
def test_mcp_json_saas_mode(self) -> None:
diff --git a/tests/sentry/workflow_engine/endpoints/test_organization_available_action_index.py b/tests/sentry/workflow_engine/endpoints/test_organization_available_action_index.py
index a832037cf953bc..865de003b52ad2 100644
--- a/tests/sentry/workflow_engine/endpoints/test_organization_available_action_index.py
+++ b/tests/sentry/workflow_engine/endpoints/test_organization_available_action_index.py
@@ -48,7 +48,7 @@ def tearDown(self) -> None:
self.registry_patcher.stop()
self.plugins_registry_patcher.stop()
- def setup_email(self):
+ def setup_email(self) -> None:
@self.registry.register(Action.Type.EMAIL)
@dataclass(frozen=True)
class EmailActionHandler(ActionHandler):
@@ -56,7 +56,7 @@ class EmailActionHandler(ActionHandler):
config_schema = {}
data_schema = {}
- def setup_integrations(self):
+ def setup_integrations(self) -> None:
@self.registry.register(Action.Type.SLACK)
@dataclass(frozen=True)
class SlackActionHandler(IntegrationActionHandler):
@@ -100,7 +100,7 @@ class MSTeamsActionHandler(IntegrationActionHandler):
config_schema = {}
data_schema = {}
- def setup_integrations_with_services(self):
+ def setup_integrations_with_services(self) -> None:
@self.registry.register(Action.Type.PAGERDUTY)
@dataclass(frozen=True)
class PagerdutyActionHandler(IntegrationActionHandler):
@@ -173,7 +173,7 @@ class OpsgenieActionHandler(IntegrationActionHandler):
self.org_integration.config = {"team_table": [self.og_team]}
self.org_integration.save()
- def setup_sentry_apps(self):
+ def setup_sentry_apps(self) -> None:
@self.registry.register(Action.Type.SENTRY_APP)
@dataclass(frozen=True)
class SentryAppActionHandler(ActionHandler):
@@ -212,7 +212,7 @@ class SentryAppActionHandler(ActionHandler):
is_alertable=True,
)
- def setup_webhooks(self):
+ def setup_webhooks(self) -> None:
@self.registry.register(Action.Type.WEBHOOK)
@dataclass(frozen=True)
class WebhookActionHandler(ActionHandler):
diff --git a/tests/sentry/workflow_engine/endpoints/test_organization_detector_types.py b/tests/sentry/workflow_engine/endpoints/test_organization_detector_types.py
index b6f40d8fba486c..cdc0965432d980 100644
--- a/tests/sentry/workflow_engine/endpoints/test_organization_detector_types.py
+++ b/tests/sentry/workflow_engine/endpoints/test_organization_detector_types.py
@@ -112,7 +112,7 @@ class TestPerformanceGroupType(GroupType):
category_v2 = GroupCategory.DB_QUERY.value
released = True
- def tearDown(self):
+ def tearDown(self) -> None:
super().tearDown()
self.registry_patcher.stop()
diff --git a/tests/sentry/workflow_engine/endpoints/test_organization_detector_workflow_details.py b/tests/sentry/workflow_engine/endpoints/test_organization_detector_workflow_details.py
index 7426502150a31a..7bf349308ea4aa 100644
--- a/tests/sentry/workflow_engine/endpoints/test_organization_detector_workflow_details.py
+++ b/tests/sentry/workflow_engine/endpoints/test_organization_detector_workflow_details.py
@@ -40,7 +40,7 @@ def setUp(self) -> None:
organization=self.organization,
)
- def tearDown(self):
+ def tearDown(self) -> None:
return super().tearDown()
diff --git a/tests/sentry/workflow_engine/endpoints/test_organization_detector_workflow_index.py b/tests/sentry/workflow_engine/endpoints/test_organization_detector_workflow_index.py
index bf1fb3d458663f..ab094e5f97bdd0 100644
--- a/tests/sentry/workflow_engine/endpoints/test_organization_detector_workflow_index.py
+++ b/tests/sentry/workflow_engine/endpoints/test_organization_detector_workflow_index.py
@@ -48,7 +48,7 @@ def setUp(self) -> None:
organization=self.organization,
)
- def tearDown(self):
+ def tearDown(self) -> None:
return super().tearDown()
diff --git a/tests/sentry/workflow_engine/endpoints/test_organization_workflow_details.py b/tests/sentry/workflow_engine/endpoints/test_organization_workflow_details.py
index 9f58298deda227..84bbc907d12846 100644
--- a/tests/sentry/workflow_engine/endpoints/test_organization_workflow_details.py
+++ b/tests/sentry/workflow_engine/endpoints/test_organization_workflow_details.py
@@ -1,3 +1,5 @@
+from contextlib import AbstractContextManager
+
from sentry import audit_log
from sentry.api.serializers import serialize
from sentry.constants import ObjectStatus
@@ -342,7 +344,7 @@ def test_update_detectors_no_changes(self) -> None:
class OrganizationDeleteWorkflowTest(OrganizationWorkflowDetailsBaseTest, BaseWorkflowTest):
method = "DELETE"
- def tasks(self):
+ def tasks(self) -> AbstractContextManager:
return TaskRunner()
def setUp(self) -> None:
diff --git a/tests/sentry/workflow_engine/endpoints/test_organization_workflow_index.py b/tests/sentry/workflow_engine/endpoints/test_organization_workflow_index.py
index 7006e73598e907..59a9b44684ef03 100644
--- a/tests/sentry/workflow_engine/endpoints/test_organization_workflow_index.py
+++ b/tests/sentry/workflow_engine/endpoints/test_organization_workflow_index.py
@@ -670,7 +670,7 @@ def setUp(self) -> None:
organization_id=self.organization.id, name="Third Workflow", enabled=False
)
- def test_bulk_enable_workflows_by_ids_success(self):
+ def test_bulk_enable_workflows_by_ids_success(self) -> None:
response = self.get_success_response(
self.organization.slug,
qs_params=[("id", str(self.workflow.id)), ("id", str(self.workflow_two.id))],
@@ -693,7 +693,7 @@ def test_bulk_enable_workflows_by_ids_success(self):
self.workflow_three.refresh_from_db()
assert self.workflow_three.enabled is False
- def test_bulk_disable_workflows_by_ids_success(self):
+ def test_bulk_disable_workflows_by_ids_success(self) -> None:
self.workflow.update(enabled=True)
self.workflow_two.update(enabled=True)
self.workflow_three.update(enabled=True)
@@ -718,7 +718,7 @@ def test_bulk_disable_workflows_by_ids_success(self):
self.workflow_three.refresh_from_db()
assert self.workflow_three.enabled is True
- def test_bulk_enable_workflows_by_query_success(self):
+ def test_bulk_enable_workflows_by_query_success(self) -> None:
response = self.get_success_response(
self.organization.slug,
qs_params={"query": "test"},
@@ -740,7 +740,7 @@ def test_bulk_enable_workflows_by_query_success(self):
assert self.workflow_two.enabled is False
assert self.workflow_three.enabled is False
- def test_bulk_update_workflows_no_parameters_error(self):
+ def test_bulk_update_workflows_no_parameters_error(self) -> None:
"""Test error when no filtering parameters are provided"""
response = self.get_error_response(
self.organization.slug,
@@ -760,7 +760,7 @@ def test_bulk_update_workflows_no_parameters_error(self):
assert self.workflow_two.enabled is False
assert self.workflow_three.enabled is False
- def test_bulk_update_workflows_missing_enabled_field_error(self):
+ def test_bulk_update_workflows_missing_enabled_field_error(self) -> None:
response = self.get_error_response(
self.organization.slug,
qs_params={"id": str(self.workflow.id)},
@@ -774,7 +774,7 @@ def test_bulk_update_workflows_missing_enabled_field_error(self):
self.workflow.refresh_from_db()
assert self.workflow.enabled is False
- def test_bulk_update_no_matching_workflows(self):
+ def test_bulk_update_no_matching_workflows(self) -> None:
# Test with non-existent ID
response = self.get_success_response(
self.organization.slug,
diff --git a/tests/sentry/workflow_engine/endpoints/validators/test_base_action.py b/tests/sentry/workflow_engine/endpoints/validators/test_base_action.py
index 7a16e7b12c4543..0b4495aa37a08e 100644
--- a/tests/sentry/workflow_engine/endpoints/validators/test_base_action.py
+++ b/tests/sentry/workflow_engine/endpoints/validators/test_base_action.py
@@ -89,7 +89,7 @@ def test_validate_data__invalid(self, mock_action_handler_get: mock.MagicMock) -
def test_validate_type__action_gated(self, mock_action_handler_get: mock.MagicMock) -> None:
organization = self.create_organization()
- def make_validator():
+ def make_validator() -> BaseActionValidator:
return BaseActionValidator(
context={"organization": organization},
data={
diff --git a/tests/sentry/workflow_engine/handlers/condition/test_age_comparison_handler.py b/tests/sentry/workflow_engine/handlers/condition/test_age_comparison_handler.py
index 218c45538a17e3..7d286b85b06e28 100644
--- a/tests/sentry/workflow_engine/handlers/condition/test_age_comparison_handler.py
+++ b/tests/sentry/workflow_engine/handlers/condition/test_age_comparison_handler.py
@@ -15,7 +15,7 @@
class TestAgeComparisonCondition(ConditionTestCase):
condition = Condition.AGE_COMPARISON
- def setup_group_event_and_job(self):
+ def setup_group_event_and_job(self) -> None:
self.group_event = self.event.for_group(self.group)
self.event_data = WorkflowEventData(event=self.group_event, group=self.group)
diff --git a/tests/sentry/workflow_engine/handlers/condition/test_level_handler.py b/tests/sentry/workflow_engine/handlers/condition/test_level_handler.py
index e540b7eb2d13be..61b5097abe9153 100644
--- a/tests/sentry/workflow_engine/handlers/condition/test_level_handler.py
+++ b/tests/sentry/workflow_engine/handlers/condition/test_level_handler.py
@@ -17,7 +17,7 @@ class TestLevelCondition(ConditionTestCase):
"level": "20",
}
- def setup_group_event_and_job(self):
+ def setup_group_event_and_job(self) -> None:
self.group_event = self.event.for_group(self.group)
self.event_data = WorkflowEventData(event=self.group_event, group=self.group)
diff --git a/tests/sentry/workflow_engine/handlers/condition/test_tagged_event_handler.py b/tests/sentry/workflow_engine/handlers/condition/test_tagged_event_handler.py
index 54932455123c44..0e552751b86574 100644
--- a/tests/sentry/workflow_engine/handlers/condition/test_tagged_event_handler.py
+++ b/tests/sentry/workflow_engine/handlers/condition/test_tagged_event_handler.py
@@ -4,6 +4,7 @@
from sentry.rules.conditions.tagged_event import TaggedEventCondition
from sentry.rules.filters.tagged_event import TaggedEventFilter
from sentry.rules.match import MatchType
+from sentry.services.eventstore.models import Event
from sentry.workflow_engine.models.data_condition import Condition
from sentry.workflow_engine.types import WorkflowEventData
from tests.sentry.workflow_engine.handlers.condition.test_base import ConditionTestCase
@@ -18,7 +19,7 @@ class TestTaggedEventCondition(ConditionTestCase):
"value": "sentry.example",
}
- def get_event(self):
+ def get_event(self) -> Event:
event = self.event
event.data["tags"] = (
("logger", "sentry.example"),
diff --git a/tests/sentry/workflow_engine/handlers/detector/test_base.py b/tests/sentry/workflow_engine/handlers/detector/test_base.py
index 7c4ca095081538..8781b5fcc276d3 100644
--- a/tests/sentry/workflow_engine/handlers/detector/test_base.py
+++ b/tests/sentry/workflow_engine/handlers/detector/test_base.py
@@ -181,7 +181,7 @@ class HandlerUpdateGroupType(GroupType):
self.handler_state_type = HandlerStateGroupType
self.update_handler_type = HandlerUpdateGroupType
- def tearDown(self):
+ def tearDown(self) -> None:
super().tearDown()
self.uuid_patcher.stop()
self.sm_comp_patcher.stop()
diff --git a/tests/sentry/workflow_engine/handlers/detector/test_stateful.py b/tests/sentry/workflow_engine/handlers/detector/test_stateful.py
index 1561e0fd36320f..620e449fa918de 100644
--- a/tests/sentry/workflow_engine/handlers/detector/test_stateful.py
+++ b/tests/sentry/workflow_engine/handlers/detector/test_stateful.py
@@ -158,7 +158,7 @@ def setUp(self) -> None:
)
self.detector.workflow_condition_group = self.create_data_condition_group()
- def add_condition(val: str, result: DetectorPriorityLevel):
+ def add_condition(val: str, result: DetectorPriorityLevel) -> None:
self.create_data_condition(
type="eq",
comparison=val,
@@ -181,7 +181,7 @@ def add_condition(val: str, result: DetectorPriorityLevel):
},
)
- def packet(self, key: int, result: DetectorPriorityLevel):
+ def packet(self, key: int, result: DetectorPriorityLevel) -> DataPacket:
"""
Constructs a test data packet that will evaluate to the
DetectorPriorityLevel specified for the result parameter.
diff --git a/tests/sentry/workflow_engine/migrations/test_0078_update_metric_detector_config_fields.py b/tests/sentry/workflow_engine/migrations/test_0078_update_metric_detector_config_fields.py
index bbf94219c17bdf..23875b29ccedf2 100644
--- a/tests/sentry/workflow_engine/migrations/test_0078_update_metric_detector_config_fields.py
+++ b/tests/sentry/workflow_engine/migrations/test_0078_update_metric_detector_config_fields.py
@@ -6,7 +6,7 @@ class UpdateMetricDetectorConfigFieldsTest(TestMigrations):
migrate_to = "0080_update_metric_detector_config_fields"
app = "workflow_engine"
- def setup_initial_state(self):
+ def setup_initial_state(self) -> None:
self.detector = self.create_detector(
type="metric_issue",
config={
diff --git a/tests/sentry/workflow_engine/migrations/test_0082_disconnect_error_detector_cron_workflows.py b/tests/sentry/workflow_engine/migrations/test_0082_disconnect_error_detector_cron_workflows.py
index 5c8e831b14bb3d..f9bc249dc0fb7b 100644
--- a/tests/sentry/workflow_engine/migrations/test_0082_disconnect_error_detector_cron_workflows.py
+++ b/tests/sentry/workflow_engine/migrations/test_0082_disconnect_error_detector_cron_workflows.py
@@ -9,7 +9,7 @@ class DisconnectCronWorkflowsTest(TestMigrations):
migrate_to = "0082_disconnect_error_detector_cron_workflows"
app = "workflow_engine"
- def setup_initial_state(self):
+ def setup_initial_state(self) -> None:
self.rule = self.create_project_rule()
self.cron_rule = self.create_project_rule()
diff --git a/tests/sentry/workflow_engine/processors/contexts/test_workflow_event_context.py b/tests/sentry/workflow_engine/processors/contexts/test_workflow_event_context.py
index 6c1489af7aa2f3..7e1677b1b21589 100644
--- a/tests/sentry/workflow_engine/processors/contexts/test_workflow_event_context.py
+++ b/tests/sentry/workflow_engine/processors/contexts/test_workflow_event_context.py
@@ -1,6 +1,7 @@
from contextvars import Token
from sentry.testutils.cases import TestCase
+from sentry.workflow_engine.models.detector import Detector
from sentry.workflow_engine.processors.contexts.workflow_event_context import (
WorkflowEventContext,
WorkflowEventContextData,
@@ -12,14 +13,14 @@ def setUp(self) -> None:
super().setUp()
self.ctx_token: Token[WorkflowEventContextData] | None = None
- def tearDown(self):
+ def tearDown(self) -> None:
if self.ctx_token:
WorkflowEventContext.reset(self.ctx_token)
self.ctx_token = None
class MockContextualClass:
- def run(self):
+ def run(self) -> Detector | None:
return WorkflowEventContext.get().detector
diff --git a/tests/sentry/workflow_engine/processors/test_delayed_workflow.py b/tests/sentry/workflow_engine/processors/test_delayed_workflow.py
index 5312744d7d1cda..9fc9f9d55d3ecd 100644
--- a/tests/sentry/workflow_engine/processors/test_delayed_workflow.py
+++ b/tests/sentry/workflow_engine/processors/test_delayed_workflow.py
@@ -122,7 +122,7 @@ def setUp(self) -> None:
buffer.backend.push_to_sorted_set(key=DelayedWorkflow.buffer_key, value=self.project.id)
buffer.backend.push_to_sorted_set(key=DelayedWorkflow.buffer_key, value=self.project2.id)
- def tearDown(self):
+ def tearDown(self) -> None:
self.mock_redis_buffer.__exit__(None, None, None)
def create_project_event_freq_workflow(
@@ -512,7 +512,7 @@ def test_get_condition_query_groups(self) -> None:
@freeze_time(FROZEN_TIME)
class TestGetSnubaResults(BaseWorkflowTest):
- def tearDown(self):
+ def tearDown(self) -> None:
super().tearDown()
def create_events(self, comparison_type: ComparisonType) -> Event:
diff --git a/tests/sentry/workflow_engine/processors/test_workflow.py b/tests/sentry/workflow_engine/processors/test_workflow.py
index ec73a8e389f4d1..6f1c14ee37a298 100644
--- a/tests/sentry/workflow_engine/processors/test_workflow.py
+++ b/tests/sentry/workflow_engine/processors/test_workflow.py
@@ -472,7 +472,7 @@ def setUp(self) -> None:
self.mock_redis_buffer = mock_redis_buffer()
self.mock_redis_buffer.__enter__()
- def tearDown(self):
+ def tearDown(self) -> None:
self.mock_redis_buffer.__exit__(None, None, None)
def test_enqueues_workflow_all_logic_type(self) -> None:
@@ -1004,7 +1004,7 @@ def setUp(self) -> None:
"action_condition",
],
)
- def test_delete_workflow(self, instance_attr) -> None:
+ def test_delete_workflow(self, instance_attr: str) -> None:
instance = getattr(self, instance_attr)
instance_id = instance.id
cls = instance.__class__
diff --git a/tests/sentry/workflow_engine/utils/test_log_context.py b/tests/sentry/workflow_engine/utils/test_log_context.py
index 46cb2b77a5c36a..ca990ec1154f90 100644
--- a/tests/sentry/workflow_engine/utils/test_log_context.py
+++ b/tests/sentry/workflow_engine/utils/test_log_context.py
@@ -113,12 +113,12 @@ def test_context_isolation(self) -> None:
context_ids = set()
@log_context.root()
- def first_context():
+ def first_context() -> None:
context = log_context._log_context_state.get()
context_ids.add(context.extra["context_id"])
@log_context.root()
- def second_context():
+ def second_context() -> None:
context = log_context._log_context_state.get()
context_ids.add(context.extra["context_id"])
diff --git a/tests/sentry_plugins/amazon_sqs/test_plugin.py b/tests/sentry_plugins/amazon_sqs/test_plugin.py
index 677f2416cd6949..e4c0dd5e1e8e87 100644
--- a/tests/sentry_plugins/amazon_sqs/test_plugin.py
+++ b/tests/sentry_plugins/amazon_sqs/test_plugin.py
@@ -5,6 +5,7 @@
import pytest
from botocore.client import ClientError
+from sentry.services.eventstore.models import Event
from sentry.testutils.cases import PluginTestCase
from sentry_plugins.amazon_sqs.plugin import AmazonSQSPlugin
@@ -15,10 +16,10 @@ def test_conf_key() -> None:
class AmazonSQSPluginTest(PluginTestCase):
@cached_property
- def plugin(self):
+ def plugin(self) -> AmazonSQSPlugin:
return AmazonSQSPlugin()
- def run_test(self):
+ def run_test(self) -> Event:
self.plugin.set_option("access_key", "access-key", self.project)
self.plugin.set_option("secret_key", "secret-key", self.project)
self.plugin.set_option("region", "us-east-1", self.project)
@@ -128,7 +129,7 @@ def test_use_s3_bucket(self, mock_client: MagicMock) -> None:
@patch("boto3.client")
@pytest.mark.skip(reason="https://github.com/getsentry/sentry/issues/44858")
- def test_invalid_s3_bucket(self, mock_client, logger) -> None:
+ def test_invalid_s3_bucket(self, mock_client: MagicMock, logger: MagicMock) -> None:
self.plugin.set_option("s3_bucket", "bad_bucket", self.project)
mock_client.return_value.put_object.side_effect = ClientError(
{"Error": {"Code": "NoSuchBucket"}},
diff --git a/tests/sentry_plugins/asana/test_plugin.py b/tests/sentry_plugins/asana/test_plugin.py
index 0dc0de82b5a5a3..6fc42f1a0ca35d 100644
--- a/tests/sentry_plugins/asana/test_plugin.py
+++ b/tests/sentry_plugins/asana/test_plugin.py
@@ -8,6 +8,7 @@
from sentry.exceptions import PluginError
from sentry.testutils.cases import PluginTestCase
+from sentry.testutils.requests import drf_request_from_request
from sentry_plugins.asana.plugin import AsanaPlugin
@@ -17,21 +18,21 @@ def test_conf_key() -> None:
class AsanaPluginTest(PluginTestCase):
@cached_property
- def plugin(self):
+ def plugin(self) -> AsanaPlugin:
return AsanaPlugin()
@cached_property
- def request(self):
+ def request(self) -> RequestFactory:
return RequestFactory()
def test_get_issue_label(self) -> None:
group = self.create_group(message="Hello world", culprit="foo.bar")
- assert self.plugin.get_issue_label(group, 1) == "Asana Issue"
+ assert self.plugin.get_issue_label(group, "1") == "Asana Issue"
def test_get_issue_url(self) -> None:
self.plugin.set_option("repo", "getsentry/sentry", self.project)
group = self.create_group(message="Hello world", culprit="foo.bar")
- assert self.plugin.get_issue_url(group, 1) == "https://app.asana.com/0/0/1"
+ assert self.plugin.get_issue_url(group, "1") == "https://app.asana.com/0/0/1"
def test_is_configured(self) -> None:
assert self.plugin.is_configured(self.project) is False
@@ -49,7 +50,7 @@ def test_create_issue(self) -> None:
self.plugin.set_option("workspace", "12345678", self.project)
group = self.create_group(message="Hello world", culprit="foo.bar")
- request = self.request.get("/")
+ request = drf_request_from_request(self.request.get("/"))
request.user = AnonymousUser()
form_data = {"title": "Hello", "description": "Fix this."}
with pytest.raises(PluginError):
@@ -79,7 +80,7 @@ def test_view_create_no_auth(self) -> None:
self.login_as(self.user)
- request = self.request.get("/")
+ request = drf_request_from_request(self.request.get("/"))
request.user = self.user
response = self.plugin.view_create(request, group)
assert response.status_code == 400
@@ -103,7 +104,7 @@ def test_link_issue(self) -> None:
self.plugin.set_option("workspace", 12345678, self.project)
group = self.create_group(message="Hello world", culprit="foo.bar")
- request = self.request.get("/")
+ request = drf_request_from_request(self.request.get("/"))
request.user = AnonymousUser()
form_data = {"comment": "please fix this", "issue_id": "1"}
with pytest.raises(PluginError):
diff --git a/tests/sentry_plugins/bitbucket/test_plugin.py b/tests/sentry_plugins/bitbucket/test_plugin.py
index 74f2f7db01255c..222822685a1398 100644
--- a/tests/sentry_plugins/bitbucket/test_plugin.py
+++ b/tests/sentry_plugins/bitbucket/test_plugin.py
@@ -7,6 +7,7 @@
from sentry.exceptions import PluginError
from sentry.testutils.cases import PluginTestCase
+from sentry.testutils.requests import drf_request_from_request
from sentry_plugins.bitbucket.plugin import BitbucketPlugin
@@ -16,22 +17,22 @@ def test_conf_key() -> None:
class BitbucketPluginTest(PluginTestCase):
@cached_property
- def plugin(self):
+ def plugin(self) -> BitbucketPlugin:
return BitbucketPlugin()
@cached_property
- def request(self):
+ def request(self) -> RequestFactory:
return RequestFactory()
def test_get_issue_label(self) -> None:
group = self.create_group(message="Hello world", culprit="foo.bar")
- assert self.plugin.get_issue_label(group, 1) == "Bitbucket-1"
+ assert self.plugin.get_issue_label(group, "1") == "Bitbucket-1"
def test_get_issue_url(self) -> None:
self.plugin.set_option("repo", "maxbittker/newsdiffs", self.project)
group = self.create_group(message="Hello world", culprit="foo.bar")
assert (
- self.plugin.get_issue_url(group, 1)
+ self.plugin.get_issue_url(group, "1")
== "https://bitbucket.org/maxbittker/newsdiffs/issue/1/"
)
@@ -51,7 +52,7 @@ def test_create_issue(self) -> None:
self.plugin.set_option("repo", "maxbittker/newsdiffs", self.project)
group = self.create_group(message="Hello world", culprit="foo.bar")
- request = self.request.get("/")
+ request = drf_request_from_request(self.request.get("/"))
request.user = AnonymousUser()
form_data = {
"title": "Hello",
@@ -97,7 +98,7 @@ def test_link_issue(self) -> None:
self.plugin.set_option("repo", "maxbittker/newsdiffs", self.project)
group = self.create_group(message="Hello world", culprit="foo.bar")
- request = self.request.get("/")
+ request = drf_request_from_request(self.request.get("/"))
request.user = AnonymousUser()
form_data = {"comment": "Hello", "issue_id": "1"}
with pytest.raises(PluginError):
diff --git a/tests/sentry_plugins/bitbucket/test_repository_provider.py b/tests/sentry_plugins/bitbucket/test_repository_provider.py
index 6246b4a46fcd87..67212072807737 100644
--- a/tests/sentry_plugins/bitbucket/test_repository_provider.py
+++ b/tests/sentry_plugins/bitbucket/test_repository_provider.py
@@ -10,7 +10,7 @@
class BitbucketPluginTest(TestCase):
@cached_property
- def provider(self):
+ def provider(self) -> BitbucketRepositoryProvider:
return BitbucketRepositoryProvider("bitbucket")
@responses.activate
diff --git a/tests/sentry_plugins/github/endpoints/test_push_event.py b/tests/sentry_plugins/github/endpoints/test_push_event.py
index 4785315f757dc6..f8f2840a4f4bf8 100644
--- a/tests/sentry_plugins/github/endpoints/test_push_event.py
+++ b/tests/sentry_plugins/github/endpoints/test_push_event.py
@@ -1,4 +1,5 @@
import contextlib
+from collections.abc import Generator
from datetime import datetime, timezone
from uuid import uuid4
@@ -14,7 +15,7 @@
@contextlib.contextmanager
-def mock_baxter_response():
+def mock_baxter_response() -> Generator[None]:
with responses.RequestsMock() as mck:
mck.add(
"GET",
diff --git a/tests/sentry_plugins/github/test_plugin.py b/tests/sentry_plugins/github/test_plugin.py
index 3e3f0f2dea8676..32207ca73e6ba6 100644
--- a/tests/sentry_plugins/github/test_plugin.py
+++ b/tests/sentry_plugins/github/test_plugin.py
@@ -8,6 +8,7 @@
from sentry.exceptions import PluginError
from sentry.testutils.cases import PluginTestCase
+from sentry.testutils.requests import drf_request_from_request
from sentry_plugins.github.plugin import GitHubPlugin
@@ -17,21 +18,23 @@ def test_conf_key() -> None:
class GitHubPluginTest(PluginTestCase):
@cached_property
- def plugin(self):
+ def plugin(self) -> GitHubPlugin:
return GitHubPlugin()
@cached_property
- def request(self):
+ def request(self) -> RequestFactory:
return RequestFactory()
def test_get_issue_label(self) -> None:
group = self.create_group(message="Hello world", culprit="foo.bar")
- assert self.plugin.get_issue_label(group, 1) == "GH-1"
+ assert self.plugin.get_issue_label(group, "1") == "GH-1"
def test_get_issue_url(self) -> None:
self.plugin.set_option("repo", "getsentry/sentry", self.project)
group = self.create_group(message="Hello world", culprit="foo.bar")
- assert self.plugin.get_issue_url(group, 1) == "https://github.com/getsentry/sentry/issues/1"
+ assert (
+ self.plugin.get_issue_url(group, "1") == "https://github.com/getsentry/sentry/issues/1"
+ )
def test_is_configured(self) -> None:
assert self.plugin.is_configured(self.project) is False
@@ -49,7 +52,7 @@ def test_create_issue(self) -> None:
self.plugin.set_option("repo", "getsentry/sentry", self.project)
group = self.create_group(message="Hello world", culprit="foo.bar")
- request = self.request.get("/")
+ request = drf_request_from_request(self.request.get("/"))
request.user = AnonymousUser()
form_data = {"title": "Hello", "description": "Fix this."}
with pytest.raises(PluginError):
@@ -83,7 +86,7 @@ def test_link_issue(self) -> None:
self.plugin.set_option("repo", "getsentry/sentry", self.project)
group = self.create_group(message="Hello world", culprit="foo.bar")
- request = self.request.get("/")
+ request = drf_request_from_request(self.request.get("/"))
request.user = AnonymousUser()
form_data = {"comment": "Hello", "issue_id": "1"}
with pytest.raises(PluginError):
diff --git a/tests/sentry_plugins/github/test_provider.py b/tests/sentry_plugins/github/test_provider.py
index 06ef9a67c476de..e5fb8bfea5efaf 100644
--- a/tests/sentry_plugins/github/test_provider.py
+++ b/tests/sentry_plugins/github/test_provider.py
@@ -21,7 +21,7 @@
class GitHubPluginTest(TestCase):
@cached_property
- def provider(self):
+ def provider(self) -> GitHubRepositoryProvider:
return GitHubRepositoryProvider("github")
def test_compare_commits(self) -> None:
@@ -172,7 +172,7 @@ def test_update_repository_with_webhook(self) -> None:
class GitHubAppsProviderTest(TestCase):
@cached_property
- def provider(self):
+ def provider(self) -> GitHubAppsRepositoryProvider:
return GitHubAppsRepositoryProvider("github_apps")
@patch.object(
@@ -185,7 +185,7 @@ def provider(self):
"get_installations",
return_value=orjson.loads(LIST_INSTALLATION_API_RESPONSE),
)
- def test_link_auth(self, *args) -> None:
+ def test_link_auth(self, *args: MagicMock) -> None:
user = self.create_user()
organization = self.create_organization()
self.create_usersocialauth(
diff --git a/tests/sentry_plugins/gitlab/test_plugin.py b/tests/sentry_plugins/gitlab/test_plugin.py
index 917fb513a1a4a6..b1e24a762cf914 100644
--- a/tests/sentry_plugins/gitlab/test_plugin.py
+++ b/tests/sentry_plugins/gitlab/test_plugin.py
@@ -6,6 +6,7 @@
from django.urls import reverse
from sentry.testutils.cases import PluginTestCase
+from sentry.testutils.requests import drf_request_from_request
from sentry_plugins.gitlab.plugin import GitLabPlugin
@@ -15,16 +16,16 @@ def test_conf_key() -> None:
class GitLabPluginTest(PluginTestCase):
@cached_property
- def plugin(self):
+ def plugin(self) -> GitLabPlugin:
return GitLabPlugin()
@cached_property
- def request(self):
+ def request(self) -> RequestFactory:
return RequestFactory()
def test_get_issue_label(self) -> None:
group = self.create_group(message="Hello world", culprit="foo.bar")
- assert self.plugin.get_issue_label(group, 1) == "GL-1"
+ assert self.plugin.get_issue_label(group, "1") == "GL-1"
def test_get_issue_url(self) -> None:
self.plugin.set_option("gitlab_url", "https://gitlab.com", self.project)
@@ -57,7 +58,7 @@ def test_create_issue(self) -> None:
self.plugin.set_option("gitlab_token", "abcdefg", self.project)
group = self.create_group(message="Hello world", culprit="foo.bar")
- request = self.request.get("/")
+ request = drf_request_from_request(self.request.get("/"))
request.user = self.user
form_data = {"title": "Hello", "description": "Fix this."}
@@ -91,7 +92,7 @@ def test_link_issue(self) -> None:
self.plugin.set_option("gitlab_token", "abcdefg", self.project)
group = self.create_group(message="Hello world", culprit="foo.bar")
- request = self.request.get("/")
+ request = drf_request_from_request(self.request.get("/"))
request.user = self.user
form_data = {"comment": "Hello", "issue_id": "1"}
diff --git a/tests/sentry_plugins/heroku/test_plugin.py b/tests/sentry_plugins/heroku/test_plugin.py
index 69b1537cbbb6b1..9aadb27851a9a7 100644
--- a/tests/sentry_plugins/heroku/test_plugin.py
+++ b/tests/sentry_plugins/heroku/test_plugin.py
@@ -1,5 +1,6 @@
from __future__ import annotations
+from collections.abc import Generator
from datetime import timedelta
from typing import Any
from unittest.mock import MagicMock, Mock, patch
@@ -126,7 +127,7 @@ def test_minimal(self, mock_fetch_commits: MagicMock) -> None:
class HookHandleTest(TestCase):
@pytest.fixture(autouse=True)
- def patch_is_valid_signature(self):
+ def patch_is_valid_signature(self) -> Generator[None]:
with patch.object(HerokuReleaseHook, "is_valid_signature"):
yield
diff --git a/tests/sentry_plugins/jira/test_plugin.py b/tests/sentry_plugins/jira/test_plugin.py
index ba7d20c51f183b..b2436f6e25c4b8 100644
--- a/tests/sentry_plugins/jira/test_plugin.py
+++ b/tests/sentry_plugins/jira/test_plugin.py
@@ -10,6 +10,7 @@
from django.urls import reverse
from sentry.testutils.cases import TestCase
+from sentry.testutils.requests import drf_request_from_request
from sentry_plugins.jira.plugin import JiraPlugin
create_meta_response = {
@@ -213,11 +214,11 @@
class JiraPluginTest(TestCase):
@cached_property
- def plugin(self):
+ def plugin(self) -> JiraPlugin:
return JiraPlugin()
@cached_property
- def request(self):
+ def request(self) -> RequestFactory:
return RequestFactory()
def test_conf_key(self) -> None:
@@ -255,7 +256,7 @@ def test_create_issue(self) -> None:
self.plugin.set_option("instance_url", "https://getsentry.atlassian.net", self.project)
group = self.create_group(message="Hello world", culprit="foo.bar")
- request = self.request.get("/")
+ request = drf_request_from_request(self.request.get("/"))
request.user = AnonymousUser()
form_data = {
"title": "Hello",
@@ -275,7 +276,7 @@ def test_link_issue(self) -> None:
self.plugin.set_option("instance_url", "https://getsentry.atlassian.net", self.project)
group = self.create_group(message="Hello world", culprit="foo.bar")
- request = self.request.get("/")
+ request = drf_request_from_request(self.request.get("/"))
request.user = AnonymousUser()
form_data = {"issue_id": "SEN-19"}
assert (
@@ -318,7 +319,7 @@ def test_get_formatted_user(self) -> None:
}
) == {"id": "robot", "text": "robot (robot)"}
- def _setup_autocomplete_jira(self):
+ def _setup_autocomplete_jira(self) -> None:
self.plugin.set_option("instance_url", "https://getsentry.atlassian.net", self.project)
self.plugin.set_option("default_project", "SEN", self.project)
self.login_as(user=self.user)
diff --git a/tests/sentry_plugins/opgsenie/test_plugin.py b/tests/sentry_plugins/opgsenie/test_plugin.py
index 1e55eded258474..3659660f4efea5 100644
--- a/tests/sentry_plugins/opgsenie/test_plugin.py
+++ b/tests/sentry_plugins/opgsenie/test_plugin.py
@@ -15,7 +15,7 @@ def test_conf_key() -> None:
class OpsGeniePluginTest(PluginTestCase):
@cached_property
- def plugin(self):
+ def plugin(self) -> OpsGeniePlugin:
return OpsGeniePlugin()
def test_is_configured(self) -> None:
diff --git a/tests/sentry_plugins/pagerduty/test_plugin.py b/tests/sentry_plugins/pagerduty/test_plugin.py
index 13858f60638220..089fee5a6bb2a2 100644
--- a/tests/sentry_plugins/pagerduty/test_plugin.py
+++ b/tests/sentry_plugins/pagerduty/test_plugin.py
@@ -26,7 +26,7 @@ def test_conf_key() -> None:
class PagerDutyPluginTest(PluginTestCase):
@cached_property
- def plugin(self):
+ def plugin(self) -> PagerDutyPlugin:
return PagerDutyPlugin()
def test_is_configured(self) -> None:
diff --git a/tests/sentry_plugins/pivotal/test_pivotal_plugin.py b/tests/sentry_plugins/pivotal/test_pivotal_plugin.py
index 98419712a05d28..286b2a401b46e9 100644
--- a/tests/sentry_plugins/pivotal/test_pivotal_plugin.py
+++ b/tests/sentry_plugins/pivotal/test_pivotal_plugin.py
@@ -13,16 +13,18 @@ def test_conf_key() -> None:
class PivotalPluginTest(PluginTestCase):
@cached_property
- def plugin(self):
+ def plugin(self) -> PivotalPlugin:
return PivotalPlugin()
def test_get_issue_label(self) -> None:
group = self.create_group(message="Hello world", culprit="foo.bar")
- assert self.plugin.get_issue_label(group, 1) == "#1"
+ assert self.plugin.get_issue_label(group, "1") == "#1"
def test_get_issue_url(self) -> None:
group = self.create_group(message="Hello world", culprit="foo.bar")
- assert self.plugin.get_issue_url(group, 1) == "https://www.pivotaltracker.com/story/show/1"
+ assert (
+ self.plugin.get_issue_url(group, "1") == "https://www.pivotaltracker.com/story/show/1"
+ )
def test_is_configured(self) -> None:
assert self.plugin.is_configured(self.project) is False
diff --git a/tests/sentry_plugins/pushover/test_plugin.py b/tests/sentry_plugins/pushover/test_plugin.py
index 29b2683977c16f..ab1d2594953a69 100644
--- a/tests/sentry_plugins/pushover/test_plugin.py
+++ b/tests/sentry_plugins/pushover/test_plugin.py
@@ -19,7 +19,7 @@ def test_conf_key() -> None:
class PushoverPluginTest(PluginTestCase):
@cached_property
- def plugin(self):
+ def plugin(self) -> PushoverPlugin:
return PushoverPlugin()
def test_is_configured(self) -> None:
diff --git a/tests/sentry_plugins/redmine/test_plugin.py b/tests/sentry_plugins/redmine/test_plugin.py
index ca1812779f2187..19e4af011bab6a 100644
--- a/tests/sentry_plugins/redmine/test_plugin.py
+++ b/tests/sentry_plugins/redmine/test_plugin.py
@@ -14,7 +14,7 @@ def test_conf_key() -> None:
class RedminePluginTest(PluginTestCase):
@cached_property
- def plugin(self):
+ def plugin(self) -> RedminePlugin:
return RedminePlugin()
@responses.activate
diff --git a/tests/sentry_plugins/segment/test_plugin.py b/tests/sentry_plugins/segment/test_plugin.py
index 990289a8b8b2fd..3ecfb8fc5c14e0 100644
--- a/tests/sentry_plugins/segment/test_plugin.py
+++ b/tests/sentry_plugins/segment/test_plugin.py
@@ -13,7 +13,7 @@ def test_conf_key() -> None:
class SegmentPluginTest(PluginTestCase):
@cached_property
- def plugin(self):
+ def plugin(self) -> SegmentPlugin:
return SegmentPlugin()
@responses.activate
diff --git a/tests/sentry_plugins/sessionstack/test_plugin.py b/tests/sentry_plugins/sessionstack/test_plugin.py
index f46e9876cad887..d3e40b2521ee12 100644
--- a/tests/sentry_plugins/sessionstack/test_plugin.py
+++ b/tests/sentry_plugins/sessionstack/test_plugin.py
@@ -23,7 +23,7 @@ def test_conf_key() -> None:
class SessionStackPluginTest(PluginTestCase):
@cached_property
- def plugin(self):
+ def plugin(self) -> SessionStackPlugin:
return SessionStackPlugin()
@responses.activate
@@ -68,11 +68,15 @@ def test_event_preprocessing(self) -> None:
add_sessionstack_context = event_preprocessors[0]
processed_event = add_sessionstack_context(event)
+ assert processed_event is not None
event_contexts = processed_event.get("contexts")
+ assert event_contexts is not None
+
sessionstack_context = event_contexts.get("sessionstack")
- session_url = sessionstack_context.get("session_url")
+ assert sessionstack_context is not None
+ session_url = sessionstack_context.get("session_url")
assert session_url == EXPECTED_SESSION_URL
def test_no_secrets(self) -> None:
diff --git a/tests/sentry_plugins/slack/test_plugin.py b/tests/sentry_plugins/slack/test_plugin.py
index 8b17308ba83c3e..71d03961081405 100644
--- a/tests/sentry_plugins/slack/test_plugin.py
+++ b/tests/sentry_plugins/slack/test_plugin.py
@@ -19,7 +19,7 @@ def test_conf_key() -> None:
class SlackPluginTest(PluginTestCase):
@cached_property
- def plugin(self):
+ def plugin(self) -> SlackPlugin:
return SlackPlugin()
@responses.activate
diff --git a/tests/sentry_plugins/splunk/test_plugin.py b/tests/sentry_plugins/splunk/test_plugin.py
index c4d86cebfd68d5..cd4581109bc259 100644
--- a/tests/sentry_plugins/splunk/test_plugin.py
+++ b/tests/sentry_plugins/splunk/test_plugin.py
@@ -15,7 +15,7 @@ def test_conf_key() -> None:
class SplunkPluginTest(PluginTestCase):
@cached_property
- def plugin(self):
+ def plugin(self) -> SplunkPlugin:
return SplunkPlugin()
@responses.activate
diff --git a/tests/sentry_plugins/trello/test_plugin.py b/tests/sentry_plugins/trello/test_plugin.py
index 0dfd5b4dcf8988..48942736a006b3 100644
--- a/tests/sentry_plugins/trello/test_plugin.py
+++ b/tests/sentry_plugins/trello/test_plugin.py
@@ -5,6 +5,7 @@
import responses
from sentry.testutils.cases import PluginTestCase
+from sentry.testutils.requests import drf_request_from_request
from sentry_plugins.trello.plugin import TrelloPlugin
@@ -14,7 +15,7 @@ def test_conf_key() -> None:
class TrelloPluginTestBase(PluginTestCase):
@cached_property
- def plugin(self):
+ def plugin(self) -> TrelloPlugin:
return TrelloPlugin()
@@ -45,7 +46,7 @@ def test_is_configured(self) -> None:
class TrelloPluginApiTests(TrelloPluginTestBase):
- def setUp(self):
+ def setUp(self) -> None:
self.group = self.create_group(message="Hello world", culprit="foo.bar")
self.plugin.set_option("token", "7c8951d1", self.project)
self.plugin.set_option("key", "39g", self.project)
@@ -122,7 +123,7 @@ def test_create_issue(self) -> None:
"board": "ads23f",
"list": "23tds",
}
- request = self.make_request(user=self.user, method="POST")
+ request = drf_request_from_request(self.make_request(user=self.user, method="POST"))
assert self.plugin.create_issue(request, self.group, form_data) == "rds43"
responses_request = responses.calls[0].request
@@ -142,7 +143,7 @@ def test_link_issue(self) -> None:
)
form_data = {"comment": "please fix this", "issue_id": "SstgnBIQ"}
- request = self.make_request(user=self.user, method="POST")
+ request = drf_request_from_request(self.make_request(user=self.user, method="POST"))
assert self.plugin.link_issue(request, self.group, form_data) == {
"title": "MyTitle",
@@ -169,8 +170,10 @@ def test_view_options(self) -> None:
json=[{"id": "8f3", "name": "list 1"}, {"id": "j8f", "name": "list 2"}],
)
- request = self.make_request(
- user=self.user, method="GET", GET={"option_field": "list", "board": "f34"}
+ request = drf_request_from_request(
+ self.make_request(
+ user=self.user, method="GET", GET={"option_field": "list", "board": "f34"}
+ )
)
response = self.plugin.view_options(request, self.group)
@@ -195,12 +198,13 @@ def test_view_autocomplete(self) -> None:
},
)
- request = self.make_request(
- user=self.user,
- method="GET",
- GET={"autocomplete_field": "issue_id", "autocomplete_query": "Key"},
+ request = drf_request_from_request(
+ self.make_request(
+ user=self.user,
+ method="GET",
+ GET={"autocomplete_field": "issue_id", "autocomplete_query": "Key"},
+ )
)
-
response = self.plugin.view_autocomplete(request, self.group)
assert response.data == {
"issue_id": [
@@ -240,12 +244,13 @@ def test_view_autocomplete_no_org(self) -> None:
},
)
- request = self.make_request(
- user=self.user,
- method="GET",
- GET={"autocomplete_field": "issue_id", "autocomplete_query": "Key"},
+ request = drf_request_from_request(
+ self.make_request(
+ user=self.user,
+ method="GET",
+ GET={"autocomplete_field": "issue_id", "autocomplete_query": "Key"},
+ )
)
-
response = self.plugin.view_autocomplete(request, self.group)
assert response.data == {
"issue_id": [
diff --git a/tests/sentry_plugins/twilio/test_plugin.py b/tests/sentry_plugins/twilio/test_plugin.py
index 11c8952dddf3e2..5ccd3eb467abdb 100644
--- a/tests/sentry_plugins/twilio/test_plugin.py
+++ b/tests/sentry_plugins/twilio/test_plugin.py
@@ -107,7 +107,7 @@ def test_conf_key() -> None:
class TwilioPluginTest(PluginTestCase):
@cached_property
- def plugin(self):
+ def plugin(self) -> TwilioPlugin:
return TwilioPlugin()
def test_is_configured(self) -> None:
diff --git a/tests/sentry_plugins/victorops/test_plugin.py b/tests/sentry_plugins/victorops/test_plugin.py
index 8188b08e11d779..427dc3012ebef3 100644
--- a/tests/sentry_plugins/victorops/test_plugin.py
+++ b/tests/sentry_plugins/victorops/test_plugin.py
@@ -6,6 +6,7 @@
from sentry.interfaces.base import Interface
from sentry.models.rule import Rule
from sentry.plugins.base import Notification
+from sentry.services.eventstore.models import Event
from sentry.testutils.cases import PluginTestCase
from sentry_plugins.victorops.plugin import VictorOpsPlugin
@@ -16,10 +17,10 @@
class UnicodeTestInterface(Interface):
- def to_string(self, event) -> str:
+ def to_string(self, event: Event) -> str:
return self.body
- def get_title(self):
+ def get_title(self) -> str:
return self.title
@@ -29,7 +30,7 @@ def test_conf_key() -> None:
class VictorOpsPluginTest(PluginTestCase):
@cached_property
- def plugin(self):
+ def plugin(self) -> VictorOpsPlugin:
return VictorOpsPlugin()
def test_is_configured(self) -> None:
diff --git a/tests/snuba/api/endpoints/test_discover_homepage_query.py b/tests/snuba/api/endpoints/test_discover_homepage_query.py
index 7a64c052d06455..a0ff1866f902ec 100644
--- a/tests/snuba/api/endpoints/test_discover_homepage_query.py
+++ b/tests/snuba/api/endpoints/test_discover_homepage_query.py
@@ -14,7 +14,7 @@
class DiscoverHomepageQueryTest(DiscoverSavedQueryBase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.url = reverse("sentry-api-0-discover-homepage-query", args=[self.org.slug])
self.query = {"fields": ["test"], "conditions": [], "limit": 10}
diff --git a/tests/snuba/api/endpoints/test_discover_key_transactions.py b/tests/snuba/api/endpoints/test_discover_key_transactions.py
index 027ed15c0d71b7..eab2fc271c6b88 100644
--- a/tests/snuba/api/endpoints/test_discover_key_transactions.py
+++ b/tests/snuba/api/endpoints/test_discover_key_transactions.py
@@ -14,7 +14,7 @@
class TeamKeyTransactionTestBase(APITestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user, superuser=False)
@@ -32,7 +32,7 @@ def __call__(
class TeamKeyTransactionTest(TeamKeyTransactionTestBase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.url = reverse("sentry-api-0-organization-key-transactions", args=[self.org.slug])
@@ -673,7 +673,7 @@ def test_delete_key_transaction_partially_existing_teams(self) -> None:
class TeamKeyTransactionListTest(TeamKeyTransactionTestBase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.url = reverse("sentry-api-0-organization-key-transactions-list", args=[self.org.slug])
diff --git a/tests/snuba/api/endpoints/test_discover_saved_queries.py b/tests/snuba/api/endpoints/test_discover_saved_queries.py
index ea0080cf79807c..ecf8a9dcce6da4 100644
--- a/tests/snuba/api/endpoints/test_discover_saved_queries.py
+++ b/tests/snuba/api/endpoints/test_discover_saved_queries.py
@@ -6,7 +6,7 @@
class DiscoverSavedQueryBase(APITestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
self.org = self.create_organization(owner=self.user)
@@ -32,7 +32,7 @@ def setUp(self):
class DiscoverSavedQueriesTest(DiscoverSavedQueryBase):
feature_name = "organizations:discover"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.url = reverse("sentry-api-0-discover-saved-queries", args=[self.org.slug])
@@ -358,7 +358,7 @@ def test_post_cannot_use_version_two_fields(self) -> None:
class DiscoverSavedQueriesVersion2Test(DiscoverSavedQueryBase):
feature_name = "organizations:discover-query"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.url = reverse("sentry-api-0-discover-saved-queries", args=[self.org.slug])
diff --git a/tests/snuba/api/endpoints/test_discover_saved_query_detail.py b/tests/snuba/api/endpoints/test_discover_saved_query_detail.py
index 532ea206235ba5..5fe9b8fc82bf09 100644
--- a/tests/snuba/api/endpoints/test_discover_saved_query_detail.py
+++ b/tests/snuba/api/endpoints/test_discover_saved_query_detail.py
@@ -13,7 +13,7 @@
class DiscoverSavedQueryDetailTest(APITestCase, SnubaTestCase):
feature_name = "organizations:discover"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
self.org = self.create_organization(owner=self.user)
@@ -504,7 +504,7 @@ def test_disallow_delete_all_projects_savedquery_when_no_open_membership(self) -
class OrganizationDiscoverQueryVisitTest(APITestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
self.org = self.create_organization(owner=self.user)
diff --git a/tests/snuba/api/endpoints/test_group_event_details.py b/tests/snuba/api/endpoints/test_group_event_details.py
index 9ae12c6b0b1d4f..29ca1159b9683b 100644
--- a/tests/snuba/api/endpoints/test_group_event_details.py
+++ b/tests/snuba/api/endpoints/test_group_event_details.py
@@ -4,7 +4,7 @@
class GroupEventDetailsTest(APITestCase, SnubaTestCase, PerformanceIssueTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
diff --git a/tests/snuba/api/endpoints/test_organization_event_details.py b/tests/snuba/api/endpoints/test_organization_event_details.py
index 4c8ad7cb9e45c4..15412639aae033 100644
--- a/tests/snuba/api/endpoints/test_organization_event_details.py
+++ b/tests/snuba/api/endpoints/test_organization_event_details.py
@@ -14,7 +14,7 @@
class OrganizationEventDetailsEndpointTest(APITestCase, SnubaTestCase, OccurrenceTestMixin):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
min_ago = before_now(minutes=1).isoformat()
two_min_ago = before_now(minutes=2).isoformat()
@@ -310,7 +310,7 @@ def test_generic_event(self) -> None:
class EventComparisonTest(MetricsEnhancedPerformanceTestCase):
endpoint = "sentry-api-0-organization-event-details"
- def setUp(self):
+ def setUp(self) -> None:
self.init_snuba()
self.ten_mins_ago = before_now(minutes=10)
self.transaction_data = load_data("transaction", timestamp=self.ten_mins_ago)
diff --git a/tests/snuba/api/endpoints/test_organization_eventid.py b/tests/snuba/api/endpoints/test_organization_eventid.py
index bb25cb9a54bf52..ec22540293b08e 100644
--- a/tests/snuba/api/endpoints/test_organization_eventid.py
+++ b/tests/snuba/api/endpoints/test_organization_eventid.py
@@ -7,7 +7,7 @@
class EventIdLookupEndpointTest(APITestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
min_ago = before_now(minutes=1).isoformat()
self.org = self.create_organization(owner=self.user)
diff --git a/tests/snuba/api/endpoints/test_organization_events.py b/tests/snuba/api/endpoints/test_organization_events.py
index 5fe3afde3c5f09..af63c92edca102 100644
--- a/tests/snuba/api/endpoints/test_organization_events.py
+++ b/tests/snuba/api/endpoints/test_organization_events.py
@@ -60,7 +60,7 @@ class OrganizationEventsEndpointTestBase(
viewname = "sentry-api-0-organization-events"
referrer = "api.organization-events"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.nine_mins_ago = before_now(minutes=9)
self.ten_mins_ago = before_now(minutes=10)
@@ -68,7 +68,7 @@ def setUp(self):
self.eleven_mins_ago = before_now(minutes=11)
self.eleven_mins_ago_iso = self.eleven_mins_ago.isoformat()
self.transaction_data = load_data("transaction", timestamp=self.ten_mins_ago)
- self.features = {}
+ self.features: dict[str, bool] = {}
def client_get(self, *args, **kwargs):
return self.client.get(*args, **kwargs)
diff --git a/tests/snuba/api/endpoints/test_organization_events_facets.py b/tests/snuba/api/endpoints/test_organization_events_facets.py
index 1d3895fa397b84..c93410aaf4d8f3 100644
--- a/tests/snuba/api/endpoints/test_organization_events_facets.py
+++ b/tests/snuba/api/endpoints/test_organization_events_facets.py
@@ -12,7 +12,7 @@
class OrganizationEventsFacetsEndpointTest(SnubaTestCase, APITestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.min_ago = before_now(minutes=1).replace(microsecond=0)
self.day_ago = before_now(days=1).replace(microsecond=0)
diff --git a/tests/snuba/api/endpoints/test_organization_events_facets_performance.py b/tests/snuba/api/endpoints/test_organization_events_facets_performance.py
index 2a6ed238850f73..7f593c6e69360d 100644
--- a/tests/snuba/api/endpoints/test_organization_events_facets_performance.py
+++ b/tests/snuba/api/endpoints/test_organization_events_facets_performance.py
@@ -15,7 +15,7 @@ class BaseOrganizationEventsFacetsPerformanceEndpointTest(SnubaTestCase, APITest
"organizations:performance-view",
)
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.min_ago = before_now(minutes=1).replace(microsecond=0)
self.two_mins_ago = before_now(minutes=2).replace(microsecond=0)
@@ -38,7 +38,7 @@ def do_request(self, query=None, features=None):
class OrganizationEventsFacetsPerformanceEndpointTest(
BaseOrganizationEventsFacetsPerformanceEndpointTest
):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self._transaction_count = 0
diff --git a/tests/snuba/api/endpoints/test_organization_events_facets_performance_histogram.py b/tests/snuba/api/endpoints/test_organization_events_facets_performance_histogram.py
index e1a8cde9d8864b..80cb4fa7299eb0 100644
--- a/tests/snuba/api/endpoints/test_organization_events_facets_performance_histogram.py
+++ b/tests/snuba/api/endpoints/test_organization_events_facets_performance_histogram.py
@@ -18,7 +18,7 @@ class OrganizationEventsFacetsPerformanceHistogramEndpointTest(
"organizations:performance-view",
)
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self._transaction_count = 0
diff --git a/tests/snuba/api/endpoints/test_organization_events_has_measurements.py b/tests/snuba/api/endpoints/test_organization_events_has_measurements.py
index 2d10e5f0fc3de7..b2a8ec3be913e7 100644
--- a/tests/snuba/api/endpoints/test_organization_events_has_measurements.py
+++ b/tests/snuba/api/endpoints/test_organization_events_has_measurements.py
@@ -7,12 +7,12 @@
class OrganizationEventsHasMeasurementsTest(APITestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.min_ago = before_now(minutes=1)
self.two_min_ago = before_now(minutes=2)
self.transaction_data = load_data("transaction", timestamp=before_now(minutes=1))
- self.features = {}
+ self.features: dict[str, bool] = {}
def do_request(self, query, features=None):
if features is None:
diff --git a/tests/snuba/api/endpoints/test_organization_events_histogram.py b/tests/snuba/api/endpoints/test_organization_events_histogram.py
index 7389828ccdbfa7..456e7f0c41dfc2 100644
--- a/tests/snuba/api/endpoints/test_organization_events_histogram.py
+++ b/tests/snuba/api/endpoints/test_organization_events_histogram.py
@@ -25,11 +25,11 @@
class OrganizationEventsHistogramEndpointTest(APITestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.min_ago = before_now(minutes=1)
self.data = load_data("transaction")
- self.features = {}
+ self.features: dict[str, bool] = {}
def populate_events(self, specs):
start = before_now(minutes=5)
@@ -1030,10 +1030,10 @@ def test_histogram_outlier_filtering_with_no_rows(self) -> None:
class OrganizationEventsMetricsEnhancedPerformanceHistogramEndpointTest(
MetricsEnhancedPerformanceTestCase
):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.min_ago = before_now(minutes=1)
- self.features = {}
+ self.features: dict[str, bool] = {}
def populate_events(self, specs):
start = before_now(minutes=5)
@@ -1162,6 +1162,6 @@ def test_histogram_exclude_outliers_data_filter(self) -> None:
class OrganizationEventsMetricsEnhancedPerformanceHistogramEndpointTestWithMetricLayer(
OrganizationEventsMetricsEnhancedPerformanceHistogramEndpointTest
):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.features["organizations:use-metrics-layer"] = True
diff --git a/tests/snuba/api/endpoints/test_organization_events_mep.py b/tests/snuba/api/endpoints/test_organization_events_mep.py
index e85d2e2e0a6a34..d3ed2c7d9b20c6 100644
--- a/tests/snuba/api/endpoints/test_organization_events_mep.py
+++ b/tests/snuba/api/endpoints/test_organization_events_mep.py
@@ -55,7 +55,7 @@ class OrganizationEventsMetricsEnhancedPerformanceEndpointTest(MetricsEnhancedPe
"d:transactions/measurements.custom_type@somethingcustom",
]
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.transaction_data = load_data("transaction", timestamp=before_now(minutes=1))
self.features = {
@@ -4070,7 +4070,7 @@ def test_metrics_enhanced_request_made_for_saved_transaction_like_dashboard_widg
class OrganizationEventsMetricsEnhancedPerformanceEndpointTestWithMetricLayer(
OrganizationEventsMetricsEnhancedPerformanceEndpointTest
):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.features["organizations:use-metrics-layer"] = True
diff --git a/tests/snuba/api/endpoints/test_organization_events_meta.py b/tests/snuba/api/endpoints/test_organization_events_meta.py
index 2a90e0b47819f3..b8ef20dcc4fe4f 100644
--- a/tests/snuba/api/endpoints/test_organization_events_meta.py
+++ b/tests/snuba/api/endpoints/test_organization_events_meta.py
@@ -28,7 +28,7 @@ class OrganizationEventsMetaEndpoint(
SpanTestCase,
OurLogTestCase,
):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.min_ago = before_now(minutes=1)
self.login_as(user=self.user)
@@ -317,7 +317,7 @@ def test_quantize_dates(self, mock_quantize: mock.MagicMock) -> None:
class OrganizationEventsRelatedIssuesEndpoint(APITestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
def test_find_related_issue(self) -> None:
diff --git a/tests/snuba/api/endpoints/test_organization_events_ourlogs.py b/tests/snuba/api/endpoints/test_organization_events_ourlogs.py
index f9c34c4514b079..eff7b8abe82f3f 100644
--- a/tests/snuba/api/endpoints/test_organization_events_ourlogs.py
+++ b/tests/snuba/api/endpoints/test_organization_events_ourlogs.py
@@ -14,7 +14,7 @@ class OrganizationEventsOurLogsEndpointTest(OrganizationEventsEndpointTestBase):
def do_request(self, query, features=None, **kwargs):
return super().do_request(query, features, **kwargs)
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.features = {
"organizations:ourlogs-enabled": True,
diff --git a/tests/snuba/api/endpoints/test_organization_events_span_indexed.py b/tests/snuba/api/endpoints/test_organization_events_span_indexed.py
index 83ee388e19b3a4..abc8ff6e5d8878 100644
--- a/tests/snuba/api/endpoints/test_organization_events_span_indexed.py
+++ b/tests/snuba/api/endpoints/test_organization_events_span_indexed.py
@@ -22,7 +22,7 @@ class OrganizationEventsSpansEndpointTest(OrganizationEventsEndpointTestBase):
def do_request(self, query, features=None, **kwargs):
return super().do_request(query, features, **kwargs)
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.features = {
"organizations:starfish-view": True,
diff --git a/tests/snuba/api/endpoints/test_organization_events_span_metrics.py b/tests/snuba/api/endpoints/test_organization_events_span_metrics.py
index 46eb107bf4294e..d2fe7b3fedfff3 100644
--- a/tests/snuba/api/endpoints/test_organization_events_span_metrics.py
+++ b/tests/snuba/api/endpoints/test_organization_events_span_metrics.py
@@ -22,7 +22,7 @@ class OrganizationEventsMetricsEnhancedPerformanceEndpointTest(MetricsEnhancedPe
"bar_transaction",
]
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.min_ago = before_now(minutes=1)
self.six_min_ago = before_now(minutes=6)
@@ -2354,7 +2354,7 @@ def test_normalized_description(self) -> None:
class OrganizationEventsMetricsEnhancedPerformanceEndpointTestWithMetricLayer(
OrganizationEventsMetricsEnhancedPerformanceEndpointTest
):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.features["organizations:use-metrics-layer"] = True
diff --git a/tests/snuba/api/endpoints/test_organization_events_span_ops.py b/tests/snuba/api/endpoints/test_organization_events_span_ops.py
index 3c0bd0b7d904a0..c580817f7ea5a7 100644
--- a/tests/snuba/api/endpoints/test_organization_events_span_ops.py
+++ b/tests/snuba/api/endpoints/test_organization_events_span_ops.py
@@ -9,7 +9,7 @@
class OrganizationEventsSpanOpsEndpointBase(APITestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
diff --git a/tests/snuba/api/endpoints/test_organization_events_spans_histogram.py b/tests/snuba/api/endpoints/test_organization_events_spans_histogram.py
index c599dd48db34ed..e077591edd6822 100644
--- a/tests/snuba/api/endpoints/test_organization_events_spans_histogram.py
+++ b/tests/snuba/api/endpoints/test_organization_events_spans_histogram.py
@@ -12,9 +12,9 @@ class OrganizationEventsSpansHistogramEndpointTest(APITestCase, SnubaTestCase):
FEATURES = ["organizations:performance-span-histogram-view"]
URL = "sentry-api-0-organization-events-spans-histogram"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
- self.features = {}
+ self.features: dict[str, bool] = {}
self.login_as(user=self.user)
self.org = self.create_organization(owner=self.user)
self.project = self.create_project(organization=self.org)
@@ -58,7 +58,7 @@ def create_event(self, **kwargs):
return self.store_event(data, project_id=self.project.id)
- def format_span(self, op, group):
+ def format_span(self, op, group) -> str:
return f"{op}:{group}"
def do_request(self, query, with_feature=True):
diff --git a/tests/snuba/api/endpoints/test_organization_events_spans_performance.py b/tests/snuba/api/endpoints/test_organization_events_spans_performance.py
index 65ce5450cf1a3e..69a2ef6e6e9772 100644
--- a/tests/snuba/api/endpoints/test_organization_events_spans_performance.py
+++ b/tests/snuba/api/endpoints/test_organization_events_spans_performance.py
@@ -20,7 +20,7 @@ class OrganizationEventsSpansEndpointTestBase(APITestCase, SnubaTestCase):
"organizations:global-views",
]
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
diff --git a/tests/snuba/api/endpoints/test_organization_events_stats.py b/tests/snuba/api/endpoints/test_organization_events_stats.py
index b446b28f2f9517..2454dc6f4e17f7 100644
--- a/tests/snuba/api/endpoints/test_organization_events_stats.py
+++ b/tests/snuba/api/endpoints/test_organization_events_stats.py
@@ -36,7 +36,7 @@ class _EventDataDict(TypedDict):
class OrganizationEventsStatsEndpointTest(APITestCase, SnubaTestCase, SearchIssueTestMixin):
endpoint = "sentry-api-0-organization-events-stats"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
self.authed_user = self.user
@@ -81,7 +81,7 @@ def setUp(self):
"sentry-api-0-organization-events-stats",
kwargs={"organization_id_or_slug": self.project.organization.slug},
)
- self.features = {}
+ self.features: dict[str, bool] = {}
def do_request(self, data, url=None, features=None):
if features is None:
@@ -1248,7 +1248,7 @@ def test_group_id_tag_simple(self) -> None:
class OrganizationEventsStatsTopNEventsSpans(APITestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
@@ -2827,7 +2827,7 @@ class OrganizationEventsStatsProfileFunctionDatasetEndpointTest(
):
endpoint = "sentry-api-0-organization-events-stats"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
@@ -2934,7 +2934,7 @@ class OrganizationEventsStatsTopNEventsProfileFunctionDatasetEndpointTest(
):
endpoint = "sentry-api-0-organization-events-stats"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
@@ -3020,7 +3020,7 @@ def test_functions_dataset_simple(self) -> None:
class OrganizationEventsStatsTopNEventsLogs(APITestCase, SnubaTestCase, OurLogTestCase):
# This is implemented almost exactly the same as spans, add a simple test case for a sanity check
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
@@ -3125,7 +3125,7 @@ def test_simple_top_events(self) -> None:
class OrganizationEventsStatsTopNEventsErrors(APITestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
@@ -3577,7 +3577,7 @@ def test_top_events_with_error_unhandled(self) -> None:
class OrganizationEventsStatsErrorUpsamplingTest(APITestCase, SnubaTestCase):
endpoint = "sentry-api-0-organization-events-stats"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
self.authed_user = self.user
diff --git a/tests/snuba/api/endpoints/test_organization_events_stats_mep.py b/tests/snuba/api/endpoints/test_organization_events_stats_mep.py
index 2ef5b1a76810c3..7329d2317f3227 100644
--- a/tests/snuba/api/endpoints/test_organization_events_stats_mep.py
+++ b/tests/snuba/api/endpoints/test_organization_events_stats_mep.py
@@ -29,7 +29,7 @@ class OrganizationEventsStatsMetricsEnhancedPerformanceEndpointTest(
"d:transactions/measurements.datacenter_memory@pebibyte",
]
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
@@ -43,7 +43,7 @@ def setUp(self):
"organizations:performance-use-metrics": True,
}
- self.additional_params = dict()
+ self.additional_params: dict[str, Any] = dict()
# These throughput tests should roughly match the ones in OrganizationEventsStatsEndpointTest
@pytest.mark.querybuilder
@@ -1126,7 +1126,7 @@ class OrganizationEventsStatsMetricsEnhancedPerformanceEndpointTestWithOnDemandW
):
endpoint = "sentry-api-0-organization-events-stats"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
diff --git a/tests/snuba/api/endpoints/test_organization_events_stats_ourlogs.py b/tests/snuba/api/endpoints/test_organization_events_stats_ourlogs.py
index 7e4d469725cd9b..6e0d98eac0259d 100644
--- a/tests/snuba/api/endpoints/test_organization_events_stats_ourlogs.py
+++ b/tests/snuba/api/endpoints/test_organization_events_stats_ourlogs.py
@@ -9,7 +9,7 @@
class OrganizationEventsStatsOurlogsEndpointTest(OrganizationEventsEndpointTestBase):
endpoint = "sentry-api-0-organization-events-stats"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
self.start = self.day_ago = before_now(days=1).replace(
diff --git a/tests/snuba/api/endpoints/test_organization_events_stats_span_indexed.py b/tests/snuba/api/endpoints/test_organization_events_stats_span_indexed.py
index 2e506db919176b..29fa81164661ed 100644
--- a/tests/snuba/api/endpoints/test_organization_events_stats_span_indexed.py
+++ b/tests/snuba/api/endpoints/test_organization_events_stats_span_indexed.py
@@ -14,7 +14,7 @@
class OrganizationEventsStatsSpansEndpointTest(OrganizationEventsEndpointTestBase):
endpoint = "sentry-api-0-organization-events-stats"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
diff --git a/tests/snuba/api/endpoints/test_organization_events_stats_span_metrics.py b/tests/snuba/api/endpoints/test_organization_events_stats_span_metrics.py
index 1f33cdac58af58..4b65ba554c5b52 100644
--- a/tests/snuba/api/endpoints/test_organization_events_stats_span_metrics.py
+++ b/tests/snuba/api/endpoints/test_organization_events_stats_span_metrics.py
@@ -18,7 +18,7 @@ class OrganizationEventsStatsSpansMetricsEndpointTest(MetricsEnhancedPerformance
]
features = {"organizations:discover-basic": True}
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
@@ -366,7 +366,7 @@ def test_messaging_receive_latency(self) -> None:
class OrganizationEventsStatsSpansMetricsEndpointTestWithMetricLayer(
OrganizationEventsStatsSpansMetricsEndpointTest
):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.features["organizations:use-metrics-layer"] = True
diff --git a/tests/snuba/api/endpoints/test_organization_events_timeseries.py b/tests/snuba/api/endpoints/test_organization_events_timeseries.py
index b12db4b241de9f..351b15c791206d 100644
--- a/tests/snuba/api/endpoints/test_organization_events_timeseries.py
+++ b/tests/snuba/api/endpoints/test_organization_events_timeseries.py
@@ -14,7 +14,7 @@
class OrganizationEventsTimeseriesEndpointTest(APITestCase, SnubaTestCase, SearchIssueTestMixin):
endpoint = "sentry-api-0-organization-events-timeseries"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
self.authed_user = self.user
diff --git a/tests/snuba/api/endpoints/test_organization_events_timeseries_logs.py b/tests/snuba/api/endpoints/test_organization_events_timeseries_logs.py
index fa42bd828a841f..b2533bbb419e7d 100644
--- a/tests/snuba/api/endpoints/test_organization_events_timeseries_logs.py
+++ b/tests/snuba/api/endpoints/test_organization_events_timeseries_logs.py
@@ -15,7 +15,7 @@
class OrganizationEventsStatsOurlogsMetricsEndpointTest(OrganizationEventsEndpointTestBase):
endpoint = "sentry-api-0-organization-events-timeseries"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
self.start = self.day_ago = before_now(days=1).replace(
diff --git a/tests/snuba/api/endpoints/test_organization_events_timeseries_spans.py b/tests/snuba/api/endpoints/test_organization_events_timeseries_spans.py
index 757aa2584d9dd3..0f2dc138a9a4b5 100644
--- a/tests/snuba/api/endpoints/test_organization_events_timeseries_spans.py
+++ b/tests/snuba/api/endpoints/test_organization_events_timeseries_spans.py
@@ -64,7 +64,7 @@ def __eq__(self, o: object):
class OrganizationEventsStatsSpansMetricsEndpointTest(OrganizationEventsEndpointTestBase):
endpoint = "sentry-api-0-organization-events-timeseries"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
self.start = self.day_ago = before_now(days=1).replace(
diff --git a/tests/snuba/api/endpoints/test_organization_events_trace.py b/tests/snuba/api/endpoints/test_organization_events_trace.py
index f23d6526fb29d1..da591b2707faca 100644
--- a/tests/snuba/api/endpoints/test_organization_events_trace.py
+++ b/tests/snuba/api/endpoints/test_organization_events_trace.py
@@ -18,7 +18,7 @@ class OrganizationEventsTraceEndpointBase(OrganizationEventsEndpointTestBase, Tr
"organizations:trace-view-load-more",
]
- def setUp(self):
+ def setUp(self) -> None:
"""
Span structure:
diff --git a/tests/snuba/api/endpoints/test_organization_events_trends.py b/tests/snuba/api/endpoints/test_organization_events_trends.py
index 4acbfce192bd24..4e38fed9d906a5 100644
--- a/tests/snuba/api/endpoints/test_organization_events_trends.py
+++ b/tests/snuba/api/endpoints/test_organization_events_trends.py
@@ -9,7 +9,7 @@
class OrganizationEventsTrendsBase(APITestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
@@ -48,7 +48,7 @@ def assert_event(self, data):
class OrganizationEventsTrendsEndpointTest(OrganizationEventsTrendsBase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.url = reverse(
"sentry-api-0-organization-events-trends",
@@ -401,7 +401,7 @@ def test_auto_aggregation(self) -> None:
class OrganizationEventsTrendsStatsEndpointTest(OrganizationEventsTrendsBase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.url = reverse(
"sentry-api-0-organization-events-trends-stats",
@@ -830,7 +830,7 @@ def test_divide_by_zero(self) -> None:
class OrganizationEventsTrendsPagingTest(APITestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
self.url = reverse(
diff --git a/tests/snuba/api/endpoints/test_organization_events_uptime_results.py b/tests/snuba/api/endpoints/test_organization_events_uptime_results.py
index 12645ce519bb7e..d261511e9bd755 100644
--- a/tests/snuba/api/endpoints/test_organization_events_uptime_results.py
+++ b/tests/snuba/api/endpoints/test_organization_events_uptime_results.py
@@ -12,7 +12,7 @@ class OrganizationEventsUptimeResultsEndpointTest(
):
dataset = "uptime_results"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.features = {
"organizations:uptime-eap-enabled": True,
diff --git a/tests/snuba/api/endpoints/test_organization_events_vitals.py b/tests/snuba/api/endpoints/test_organization_events_vitals.py
index 98607a83ae27b5..d6ccd7a9fed8f0 100644
--- a/tests/snuba/api/endpoints/test_organization_events_vitals.py
+++ b/tests/snuba/api/endpoints/test_organization_events_vitals.py
@@ -12,7 +12,7 @@
class OrganizationEventsVitalsEndpointTest(APITestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.start = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
self.end = self.start + timedelta(hours=6)
@@ -22,7 +22,7 @@ def setUp(self):
"start": self.start.isoformat(),
"end": self.end.isoformat(),
}
- self.features = {}
+ self.features: dict[str, bool] = {}
def store_event(self, data, measurements=None, **kwargs):
if measurements:
@@ -303,7 +303,7 @@ def test_edges_of_vital_thresholds(self) -> None:
class OrganizationEventsMetricsEnhancedPerformanceEndpointTest(MetricsEnhancedPerformanceTestCase):
METRIC_STRINGS = ["measurement_rating"]
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.start = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
self.end = self.start + timedelta(hours=6)
diff --git a/tests/snuba/api/endpoints/test_organization_group_index_stats.py b/tests/snuba/api/endpoints/test_organization_group_index_stats.py
index e6472cf829bef7..d4015ea46654ae 100644
--- a/tests/snuba/api/endpoints/test_organization_group_index_stats.py
+++ b/tests/snuba/api/endpoints/test_organization_group_index_stats.py
@@ -9,7 +9,7 @@
class GroupListTest(APITestCase, SnubaTestCase, OccurrenceTestMixin):
endpoint = "sentry-api-0-organization-group-index-stats"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.min_ago = before_now(minutes=1)
diff --git a/tests/snuba/api/endpoints/test_organization_issues_resolved_in_release.py b/tests/snuba/api/endpoints/test_organization_issues_resolved_in_release.py
index 56576beb9a607c..016715d16f81de 100644
--- a/tests/snuba/api/endpoints/test_organization_issues_resolved_in_release.py
+++ b/tests/snuba/api/endpoints/test_organization_issues_resolved_in_release.py
@@ -12,7 +12,7 @@ class OrganizationIssuesResolvedInReleaseEndpointTest(APITestCase, SnubaTestCase
endpoint = "sentry-api-0-organization-release-resolved"
method = "get"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.user = self.create_user()
self.org = self.create_organization()
diff --git a/tests/snuba/api/endpoints/test_organization_measurements_meta.py b/tests/snuba/api/endpoints/test_organization_measurements_meta.py
index 50c91b96f7403b..dee844284e6487 100644
--- a/tests/snuba/api/endpoints/test_organization_measurements_meta.py
+++ b/tests/snuba/api/endpoints/test_organization_measurements_meta.py
@@ -16,7 +16,7 @@ class OrganizationMeasurementsMetaEndpoint(MetricsEnhancedPerformanceTestCase):
]
features = {"organizations:discover-basic": True}
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
self.day_ago = before_now(days=1).replace(hour=10, minute=0, second=0, microsecond=0)
diff --git a/tests/snuba/api/endpoints/test_organization_metrics_meta.py b/tests/snuba/api/endpoints/test_organization_metrics_meta.py
index 6884ee2f3735d0..af34323903bb7e 100644
--- a/tests/snuba/api/endpoints/test_organization_metrics_meta.py
+++ b/tests/snuba/api/endpoints/test_organization_metrics_meta.py
@@ -8,7 +8,7 @@
class OrganizationMetricsCompatiblity(MetricsEnhancedPerformanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.min_ago = before_now(minutes=1)
self.two_min_ago = before_now(minutes=2)
@@ -122,7 +122,7 @@ def test_multiple_projects(self) -> None:
class OrganizationEventsMetricsSums(MetricsEnhancedPerformanceTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.min_ago = before_now(minutes=1)
self.two_min_ago = before_now(minutes=2)
diff --git a/tests/snuba/api/endpoints/test_organization_sessions.py b/tests/snuba/api/endpoints/test_organization_sessions.py
index a09f31d5f01a46..a70dc64d68339d 100644
--- a/tests/snuba/api/endpoints/test_organization_sessions.py
+++ b/tests/snuba/api/endpoints/test_organization_sessions.py
@@ -86,7 +86,7 @@ def adjust_end(end: datetime.datetime, interval: int) -> datetime.datetime:
class OrganizationSessionsEndpointTest(APITestCase, BaseMetricsTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.setup_fixture()
diff --git a/tests/snuba/api/endpoints/test_organization_stats_summary.py b/tests/snuba/api/endpoints/test_organization_stats_summary.py
index ec7f4b19b42664..ed6ce6ee9b9427 100644
--- a/tests/snuba/api/endpoints/test_organization_stats_summary.py
+++ b/tests/snuba/api/endpoints/test_organization_stats_summary.py
@@ -16,7 +16,7 @@
class OrganizationStatsSummaryTest(APITestCase, OutcomesSnubaTest):
_now = datetime.now(UTC).replace(hour=12, minute=27, second=28, microsecond=0)
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
diff --git a/tests/snuba/api/endpoints/test_organization_stats_v2.py b/tests/snuba/api/endpoints/test_organization_stats_v2.py
index f544f64273acc3..9bf670cd9bf938 100644
--- a/tests/snuba/api/endpoints/test_organization_stats_v2.py
+++ b/tests/snuba/api/endpoints/test_organization_stats_v2.py
@@ -12,7 +12,7 @@ class OrganizationStatsTestV2(APITestCase, OutcomesSnubaTest):
_now = datetime.now(UTC).replace(hour=12, minute=27, second=28, microsecond=0)
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
diff --git a/tests/snuba/api/endpoints/test_organization_tagkey_values.py b/tests/snuba/api/endpoints/test_organization_tagkey_values.py
index 532847cd503d2e..52b1b3e0c3ab66 100644
--- a/tests/snuba/api/endpoints/test_organization_tagkey_values.py
+++ b/tests/snuba/api/endpoints/test_organization_tagkey_values.py
@@ -17,7 +17,7 @@
class OrganizationTagKeyTestCase(APITestCase, SnubaTestCase):
endpoint = "sentry-api-0-organization-tagkey-values"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.min_ago = before_now(minutes=1)
self.day_ago = before_now(days=1)
@@ -491,7 +491,7 @@ def test_simple_flags(self) -> None:
class TransactionTagKeyValues(OrganizationTagKeyTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
data = load_data("transaction", timestamp=before_now(minutes=1))
data.update(
@@ -591,7 +591,7 @@ def test_boolean_fields(self) -> None:
class ReplayOrganizationTagKeyValuesTest(OrganizationTagKeyTestCase, ReplaysSnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
replay1_id = uuid.uuid4().hex
replay2_id = uuid.uuid4().hex
@@ -839,7 +839,7 @@ def test_schema(self) -> None:
class DatasetParamOrganizationTagKeyValuesTest(OrganizationTagKeyTestCase, OccurrenceTestMixin):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
def run_dataset_test(self, key, expected, dataset: Dataset, **kwargs):
diff --git a/tests/snuba/api/endpoints/test_organization_tags.py b/tests/snuba/api/endpoints/test_organization_tags.py
index b4c301512f516b..6f86744422ce0c 100644
--- a/tests/snuba/api/endpoints/test_organization_tags.py
+++ b/tests/snuba/api/endpoints/test_organization_tags.py
@@ -12,7 +12,7 @@
class OrganizationTagsTest(APITestCase, OccurrenceTestMixin, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.min_ago = before_now(minutes=1).isoformat()
diff --git a/tests/snuba/api/endpoints/test_organization_trace.py b/tests/snuba/api/endpoints/test_organization_trace.py
index 03054b8c6968db..f9c103205d1c31 100644
--- a/tests/snuba/api/endpoints/test_organization_trace.py
+++ b/tests/snuba/api/endpoints/test_organization_trace.py
@@ -24,7 +24,7 @@
class TestSerializeColumnarUptimeItem(TestCase):
"""Test serialization of columnar uptime data to span format."""
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.project_slugs = {1: "test-project", 2: "another-project"}
self.snuba_params = mock.MagicMock(spec=SnubaParams)
diff --git a/tests/snuba/api/endpoints/test_organization_trace_item_attributes.py b/tests/snuba/api/endpoints/test_organization_trace_item_attributes.py
index 58c7ca039c97b9..b62628248648ec 100644
--- a/tests/snuba/api/endpoints/test_organization_trace_item_attributes.py
+++ b/tests/snuba/api/endpoints/test_organization_trace_item_attributes.py
@@ -26,7 +26,7 @@ class OrganizationTraceItemAttributesEndpointTestBase(APITestCase, SnubaTestCase
viewname = "sentry-api-0-organization-trace-item-attributes"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
@@ -565,7 +565,7 @@ class OrganizationTraceItemAttributeValuesEndpointBaseTest(APITestCase, SnubaTes
viewname = "sentry-api-0-organization-trace-item-attribute-values"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
diff --git a/tests/snuba/api/endpoints/test_organization_trace_item_attributes_ranked.py b/tests/snuba/api/endpoints/test_organization_trace_item_attributes_ranked.py
index 31dece1730acc7..60e201b0e69ed7 100644
--- a/tests/snuba/api/endpoints/test_organization_trace_item_attributes_ranked.py
+++ b/tests/snuba/api/endpoints/test_organization_trace_item_attributes_ranked.py
@@ -11,7 +11,7 @@ class OrganizationTraceItemsAttributesRankedEndpointTest(
):
view = "sentry-api-0-organization-trace-item-attributes-ranked"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
self.features = {
diff --git a/tests/snuba/api/endpoints/test_organization_trace_logs.py b/tests/snuba/api/endpoints/test_organization_trace_logs.py
index 21421fdbd8e5f8..448643b7de9424 100644
--- a/tests/snuba/api/endpoints/test_organization_trace_logs.py
+++ b/tests/snuba/api/endpoints/test_organization_trace_logs.py
@@ -11,7 +11,7 @@
class OrganizationEventsTraceEndpointTest(OrganizationEventsEndpointTestBase):
url_name = "sentry-api-0-organization-trace-logs"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.features = {
"organizations:ourlogs-enabled": True,
diff --git a/tests/snuba/api/endpoints/test_project_event_details.py b/tests/snuba/api/endpoints/test_project_event_details.py
index 59c6e47a81077d..a163a4bbec3139 100644
--- a/tests/snuba/api/endpoints/test_project_event_details.py
+++ b/tests/snuba/api/endpoints/test_project_event_details.py
@@ -7,7 +7,7 @@
class ProjectEventDetailsTest(APITestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
self.setup_data()
@@ -238,7 +238,7 @@ def test_generic_event_with_occurrence(self) -> None:
class ProjectEventDetailsTransactionTest(APITestCase, SnubaTestCase, PerformanceIssueTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
project = self.create_project()
@@ -362,7 +362,7 @@ def test_no_group_id(self) -> None:
class ProjectEventJsonEndpointTest(APITestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
self.event_id = "c" * 32
@@ -434,7 +434,7 @@ def test_project_not_associated_with_event(self) -> None:
class ProjectEventDetailsTransactionTestScrubbed(APITestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
data = load_data("transaction")
diff --git a/tests/snuba/api/endpoints/test_project_group_index.py b/tests/snuba/api/endpoints/test_project_group_index.py
index ab5796b13ce6c7..8f2474ad7f61de 100644
--- a/tests/snuba/api/endpoints/test_project_group_index.py
+++ b/tests/snuba/api/endpoints/test_project_group_index.py
@@ -43,7 +43,7 @@
class GroupListTest(APITestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.min_ago = before_now(minutes=1)
@@ -56,7 +56,7 @@ def _parse_links(self, header):
return links
@cached_property
- def path(self):
+ def path(self) -> str:
return f"/api/0/projects/{self.project.organization.slug}/{self.project.slug}/issues/"
def test_sort_by_date_with_tag(self) -> None:
@@ -395,12 +395,12 @@ def test_multiple_groups_by_hashes(self) -> None:
class GroupUpdateTest(APITestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.min_ago = timezone.now() - timedelta(minutes=1)
@cached_property
- def path(self):
+ def path(self) -> str:
return f"/api/0/projects/{self.project.organization.slug}/{self.project.slug}/issues/"
def assertNoResolution(self, group):
@@ -1495,7 +1495,7 @@ def test_discard_requires_events_admin(self, mock_get_scopes: MagicMock) -> None
class GroupDeleteTest(APITestCase, SnubaTestCase):
@cached_property
- def path(self):
+ def path(self) -> str:
return f"/api/0/projects/{self.project.organization.slug}/{self.project.slug}/issues/"
def create_groups(
diff --git a/tests/snuba/api/endpoints/test_project_tags.py b/tests/snuba/api/endpoints/test_project_tags.py
index bb70bd7cc2381d..ea402ef4d0c10f 100644
--- a/tests/snuba/api/endpoints/test_project_tags.py
+++ b/tests/snuba/api/endpoints/test_project_tags.py
@@ -6,7 +6,7 @@
class ProjectTagsTest(APITestCase, SnubaTestCase):
endpoint = "sentry-api-0-project-tags"
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
diff --git a/tests/snuba/api/endpoints/test_project_trace_item_details.py b/tests/snuba/api/endpoints/test_project_trace_item_details.py
index 5c4961cefacdcb..e8fa56d3d90385 100644
--- a/tests/snuba/api/endpoints/test_project_trace_item_details.py
+++ b/tests/snuba/api/endpoints/test_project_trace_item_details.py
@@ -8,7 +8,7 @@
class ProjectTraceItemDetailsEndpointTest(APITestCase, SnubaTestCase, OurLogTestCase, SpanTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.login_as(user=self.user)
self.features = {
diff --git a/tests/snuba/api/serializers/test_group.py b/tests/snuba/api/serializers/test_group.py
index 16a90c8f52a35a..b9610cf3c5bb16 100644
--- a/tests/snuba/api/serializers/test_group.py
+++ b/tests/snuba/api/serializers/test_group.py
@@ -31,7 +31,7 @@
class GroupSerializerSnubaTest(APITestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.min_ago = before_now(minutes=1)
self.day_ago = before_now(days=1)
diff --git a/tests/snuba/incidents/test_tasks.py b/tests/snuba/incidents/test_tasks.py
index 66121bc056166d..3d3f9b7a93214b 100644
--- a/tests/snuba/incidents/test_tasks.py
+++ b/tests/snuba/incidents/test_tasks.py
@@ -43,7 +43,7 @@
@freeze_time()
class HandleSnubaQueryUpdateTest(TestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.topic = Topic.METRICS_SUBSCRIPTIONS_RESULTS
self.orig_registry = deepcopy(subscriber_registry)
@@ -59,7 +59,7 @@ def setUp(self):
create_topics(self.cluster, [self.real_topic])
- def tearDown(self):
+ def tearDown(self) -> None:
super().tearDown()
subscriber_registry.clear()
subscriber_registry.update(self.orig_registry)
diff --git a/tests/snuba/models/test_group.py b/tests/snuba/models/test_group.py
index 75a636efa3b162..cf93a7908e534f 100644
--- a/tests/snuba/models/test_group.py
+++ b/tests/snuba/models/test_group.py
@@ -185,7 +185,7 @@ def _get_oldest(
@freeze_time()
class GroupTestSnubaErrorIssue(TestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.project = self.create_project()
self.event_a = self.store_event(
@@ -333,7 +333,7 @@ def test_oldest_event(self) -> None:
@freeze_time()
class GroupTestSnubaPerformanceIssue(TestCase, SnubaTestCase, PerformanceIssueTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.project = self.create_project()
group_fingerprint = f"{PerformanceNPlusOneGroupType.type_id}-group1"
@@ -481,7 +481,7 @@ def test_oldest_event(self) -> None:
@freeze_time()
class GroupTestSnubaOccurrenceIssue(TestCase, SnubaTestCase, OccurrenceTestMixin):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.project = self.create_project()
diff --git a/tests/snuba/rules/conditions/test_event_frequency.py b/tests/snuba/rules/conditions/test_event_frequency.py
index b026b8a7e22fd0..ccbdaf53110f45 100644
--- a/tests/snuba/rules/conditions/test_event_frequency.py
+++ b/tests/snuba/rules/conditions/test_event_frequency.py
@@ -68,7 +68,7 @@ def make_session(i):
class EventFrequencyQueryTestBase(SnubaTestCase, RuleTestCase, PerformanceIssueTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.start = before_now(minutes=1)
diff --git a/tests/snuba/search/test_backend.py b/tests/snuba/search/test_backend.py
index cfdd776b2a35aa..fb52887128c702 100644
--- a/tests/snuba/search/test_backend.py
+++ b/tests/snuba/search/test_backend.py
@@ -110,7 +110,7 @@ class EventsDatasetTestSetup(SharedSnubaMixin):
def backend(self):
return EventsDatasetSnubaSearchBackend()
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.base_datetime = before_now(days=3).replace(microsecond=0)
@@ -3009,7 +3009,7 @@ class EventsTransactionsSnubaSearchTest(TestCase, SharedSnubaMixin):
def backend(self):
return EventsDatasetSnubaSearchBackend()
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.base_datetime = before_now(days=3)
@@ -3376,7 +3376,7 @@ class EventsGenericSnubaSearchTest(TestCase, SharedSnubaMixin, OccurrenceTestMix
def backend(self):
return EventsDatasetSnubaSearchBackend()
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.base_datetime = before_now(days=3)
diff --git a/tests/snuba/sessions/test_sessions.py b/tests/snuba/sessions/test_sessions.py
index 6890f1006694ec..2ac00bdcadc8bf 100644
--- a/tests/snuba/sessions/test_sessions.py
+++ b/tests/snuba/sessions/test_sessions.py
@@ -1030,7 +1030,7 @@ class GetCrashFreeRateTestCase(TestCase, BaseMetricsTestCase):
backend = MetricsReleaseHealthBackend()
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.session_started = time.time() // 60 * 60
self.session_started_gt_24_lt_48 = self.session_started - 30 * 60 * 60
@@ -1312,7 +1312,7 @@ def test(self) -> None:
class CheckNumberOfSessions(TestCase, BaseMetricsTestCase):
backend = MetricsReleaseHealthBackend()
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
# now_dt should be set to 17:40 of some day not in the future and (system time - now_dt)
# must be less than 90 days for the metrics DB TTL
diff --git a/tests/snuba/tagstore/test_tagstore_backend.py b/tests/snuba/tagstore/test_tagstore_backend.py
index 31e40e14b5b546..6f25d353395662 100644
--- a/tests/snuba/tagstore/test_tagstore_backend.py
+++ b/tests/snuba/tagstore/test_tagstore_backend.py
@@ -46,7 +46,7 @@
class TagStorageTest(TestCase, SnubaTestCase, SearchIssueTestMixin, PerformanceIssueTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.ts = SnubaTagStorage()
@@ -1195,7 +1195,7 @@ def test_error_upsampling_tag_value_counts(self) -> None:
class ProfilingTagStorageTest(TestCase, SnubaTestCase, SearchIssueTestMixin):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.ts = SnubaTagStorage()
@@ -1311,7 +1311,7 @@ class BaseSemverTest(TestCase, SnubaTestCase):
KEY: str
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.ts = SnubaTagStorage()
@@ -1463,7 +1463,7 @@ def test_semver_package(self) -> None:
class GetTagValuePaginatorForProjectsReleaseStageTest(TestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.ts = SnubaTagStorage()
diff --git a/tests/snuba/tsdb/test_tsdb_backend.py b/tests/snuba/tsdb/test_tsdb_backend.py
index c0455d11b8cbb2..01acf45dfb931b 100644
--- a/tests/snuba/tsdb/test_tsdb_backend.py
+++ b/tests/snuba/tsdb/test_tsdb_backend.py
@@ -52,7 +52,7 @@ def has_shape(data, shape):
class SnubaTSDBTest(TestCase, SnubaTestCase):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.db = SnubaTSDB()
@@ -606,7 +606,7 @@ def test_tsdb_with_consistent(self) -> None:
class SnubaTSDBGroupProfilingTest(TestCase, SnubaTestCase, SearchIssueTestMixin):
- def setUp(self):
+ def setUp(self) -> None:
super().setUp()
self.db = SnubaTSDB()
@@ -884,7 +884,7 @@ def test_get_data_or_conditions_parsed(self) -> None:
class AddJitterToSeriesTest(TestCase):
- def setUp(self):
+ def setUp(self) -> None:
self.db = SnubaTSDB()
def run_test(self, end, interval, jitter, expected_start, expected_end):
diff --git a/tests/tools/test_docker_memory_check.py b/tests/tools/test_docker_memory_check.py
index 83308dc8dacebd..6edb8aa1789523 100644
--- a/tests/tools/test_docker_memory_check.py
+++ b/tests/tools/test_docker_memory_check.py
@@ -13,7 +13,7 @@
("never", False),
),
)
-def test_should_use_color_forced(option, expected) -> None:
+def test_should_use_color_forced(option: str, expected: bool) -> None:
assert docker_memory_check.should_use_color(option) is expected
diff --git a/tests/tools/test_pin_github_action.py b/tests/tools/test_pin_github_action.py
index f5f2ed2e150865..73e1cfa5133a5e 100644
--- a/tests/tools/test_pin_github_action.py
+++ b/tests/tools/test_pin_github_action.py
@@ -13,7 +13,7 @@
("uses: actions/cache@v1.0.0 # after\n", ("actions/cache", "v1.0.0")),
),
)
-def test_matches(s, expected) -> None:
+def test_matches(s: str, expected: str) -> None:
match = ACTION_VERSION_RE.search(s)
assert match
assert (match[1], match[2]) == expected