hash
stringlengths 40
40
| date
stringdate 2017-08-30 22:37:25
2025-03-22 03:08:47
| author
stringclasses 173
values | commit_message
stringlengths 15
151
| is_merge
bool 1
class | masked_commit_message
stringlengths 6
126
| type
stringclasses 17
values | git_diff
stringlengths 182
1.51M
⌀ |
|---|---|---|---|---|---|---|---|
2d0bcbd2b879b0b0f5063f4d3504c198503caf6e
|
2022-09-27 01:25:19
|
Andrew Xue
|
feat(alert-email): convert alert email datetimes to local timezones (#39255)
| false
|
convert alert email datetimes to local timezones (#39255)
|
feat
|
diff --git a/src/sentry/incidents/action_handlers.py b/src/sentry/incidents/action_handlers.py
index dc1106450cc11b..d245d3111c608d 100644
--- a/src/sentry/incidents/action_handlers.py
+++ b/src/sentry/incidents/action_handlers.py
@@ -4,6 +4,7 @@
import logging
from typing import Sequence, Set, Tuple
+from django.conf import settings
from django.template.defaultfilters import pluralize
from django.urls import reverse
@@ -20,6 +21,8 @@
TriggerStatus,
)
from sentry.models.notificationsetting import NotificationSetting
+from sentry.models.options.user_option import UserOption
+from sentry.models.user import User
from sentry.types.integrations import ExternalProviders
from sentry.utils import json
from sentry.utils.email import MessageBuilder, get_email_addresses
@@ -88,14 +91,16 @@ def resolve(self, metric_value: int | float, new_status: IncidentStatus):
self.email_users(TriggerStatus.RESOLVED, new_status)
def email_users(self, trigger_status: TriggerStatus, incident_status: IncidentStatus) -> None:
- email_context = generate_incident_trigger_email_context(
- self.project,
- self.incident,
- self.action.alert_rule_trigger,
- trigger_status,
- incident_status,
- )
for user_id, email in self.get_targets():
+ user = User.objects.get_from_cache(id=user_id)
+ email_context = generate_incident_trigger_email_context(
+ self.project,
+ self.incident,
+ self.action.alert_rule_trigger,
+ trigger_status,
+ incident_status,
+ user,
+ )
self.build_message(email_context, trigger_status, user_id).send_async(to=[email])
def build_message(self, context, status, user_id):
@@ -185,7 +190,12 @@ def format_duration(minutes):
def generate_incident_trigger_email_context(
- project, incident, alert_rule_trigger, trigger_status, incident_status
+ project,
+ incident,
+ alert_rule_trigger,
+ trigger_status,
+ incident_status,
+ user=None,
):
trigger = alert_rule_trigger
incident_trigger = IncidentTrigger.objects.get(incident=incident, alert_rule_trigger=trigger)
@@ -225,6 +235,12 @@ def generate_incident_trigger_email_context(
except Exception:
logging.exception("Error while attempting to build_metric_alert_chart")
+ tz = settings.SENTRY_DEFAULT_TIME_ZONE
+ if user is not None:
+ user_option_tz = UserOption.objects.get_value(user=user, key="timezone")
+ if user_option_tz is not None:
+ tz = user_option_tz
+
return {
"link": absolute_uri(
reverse(
@@ -263,4 +279,5 @@ def generate_incident_trigger_email_context(
"is_warning": incident_status == IncidentStatus.WARNING,
"unsubscribe_link": None,
"chart_url": chart_url,
+ "timezone": tz,
}
diff --git a/src/sentry/web/frontend/debug/debug_incident_trigger_email.py b/src/sentry/web/frontend/debug/debug_incident_trigger_email.py
index 3d5f43cc34c389..463893261be6bc 100644
--- a/src/sentry/web/frontend/debug/debug_incident_trigger_email.py
+++ b/src/sentry/web/frontend/debug/debug_incident_trigger_email.py
@@ -1,5 +1,7 @@
from unittest import mock
+from django.utils import timezone
+
from sentry.incidents.action_handlers import generate_incident_trigger_email_context
from sentry.incidents.models import (
AlertRule,
@@ -8,23 +10,25 @@
IncidentStatus,
TriggerStatus,
)
-from sentry.models import Organization, Project
+from sentry.models import Organization, Project, User
from sentry.snuba.models import SnubaQuery
from .mail import MailPreviewView
class MockedIncidentTrigger:
- date_added = "Some date"
+ date_added = timezone.now()
class DebugIncidentTriggerEmailView(MailPreviewView):
@mock.patch(
"sentry.incidents.models.IncidentTrigger.objects.get", return_value=MockedIncidentTrigger()
)
- def get_context(self, request, mock):
+ @mock.patch("sentry.models.UserOption.objects.get_value", return_value="US/Pacific")
+ def get_context(self, request, incident_trigger_mock, user_option_mock):
organization = Organization(slug="myorg")
project = Project(slug="myproject", organization=organization)
+ user = User()
query = SnubaQuery(
time_window=60, query="transaction:/some/transaction", aggregate="count()"
@@ -41,7 +45,7 @@ def get_context(self, request, mock):
trigger = AlertRuleTrigger(alert_rule=alert_rule)
return generate_incident_trigger_email_context(
- project, incident, trigger, TriggerStatus.ACTIVE, IncidentStatus(incident.status)
+ project, incident, trigger, TriggerStatus.ACTIVE, IncidentStatus(incident.status), user
)
@property
diff --git a/src/sentry/web/helpers.py b/src/sentry/web/helpers.py
index 8ee849a85929c4..25edcf0f754012 100644
--- a/src/sentry/web/helpers.py
+++ b/src/sentry/web/helpers.py
@@ -1,9 +1,11 @@
import logging
+import pytz
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from django.http import HttpResponse
from django.template import loader
+from django.utils import timezone
from sentry.auth import access
from sentry.models import Team
@@ -84,7 +86,13 @@ def render_to_string(template, context=None, request=None):
context = dict(context)
context.update(default_context)
- return loader.render_to_string(template, context=context, request=request)
+ if "timezone" in context and context["timezone"] in pytz.all_timezones_set:
+ timezone.activate(context["timezone"])
+
+ rendered = loader.render_to_string(template, context=context, request=request)
+ timezone.deactivate()
+
+ return rendered
def render_to_response(template, context=None, request=None, status=200, content_type="text/html"):
diff --git a/tests/sentry/incidents/action_handlers/test_email.py b/tests/sentry/incidents/action_handlers/test_email.py
index 6932e8139eba13..8416d1087b5673 100644
--- a/tests/sentry/incidents/action_handlers/test_email.py
+++ b/tests/sentry/incidents/action_handlers/test_email.py
@@ -1,6 +1,7 @@
from unittest.mock import patch
import responses
+from django.conf import settings
from django.core import mail
from django.urls import reverse
from django.utils import timezone
@@ -210,6 +211,7 @@ def test(self):
"project_slug": self.project.slug,
"unsubscribe_link": None,
"chart_url": None,
+ "timezone": settings.SENTRY_DEFAULT_TIME_ZONE,
}
assert expected == generate_incident_trigger_email_context(
self.project,
@@ -382,3 +384,35 @@ def test_metric_chart_mep(self, mock_generate_chart, mock_fetch_metric_alert_eve
series_data = chart_data["timeseriesData"][0]["data"]
assert len(series_data) > 0
assert mock_generate_chart.call_args[1]["size"] == {"width": 600, "height": 200}
+
+ def test_timezones(self):
+ trigger_status = TriggerStatus.ACTIVE
+ alert_rule = self.create_alert_rule(
+ query_type=SnubaQuery.Type.PERFORMANCE, dataset=Dataset.PerformanceMetrics
+ )
+ incident = self.create_incident(alert_rule=alert_rule)
+ action = self.create_alert_rule_trigger_action(triggered_for_incident=incident)
+
+ est = "America/New_York"
+ pst = "US/Pacific"
+ UserOption.objects.set_value(user=self.user, key="timezone", value=est)
+ result = generate_incident_trigger_email_context(
+ self.project,
+ incident,
+ action.alert_rule_trigger,
+ trigger_status,
+ IncidentStatus(incident.status),
+ self.user,
+ )
+ assert result["timezone"] == est
+
+ UserOption.objects.set_value(user=self.user, key="timezone", value=pst)
+ result = generate_incident_trigger_email_context(
+ self.project,
+ incident,
+ action.alert_rule_trigger,
+ trigger_status,
+ IncidentStatus(incident.status),
+ self.user,
+ )
+ assert result["timezone"] == pst
|
4056bd349ca6acb10d3d837b13a230a1ad523e89
|
2023-10-18 21:39:33
|
Matt Quinn
|
fix(statistical-detectors): only consider backend transactions (#58277)
| false
|
only consider backend transactions (#58277)
|
fix
|
diff --git a/src/sentry/tasks/statistical_detectors.py b/src/sentry/tasks/statistical_detectors.py
index 90c172196365ba..4b6321b14e2a8b 100644
--- a/src/sentry/tasks/statistical_detectors.py
+++ b/src/sentry/tasks/statistical_detectors.py
@@ -694,6 +694,31 @@ def all_function_timeseries(
continue
+BACKEND_TRANSACTION_OPS = [
+ # Common
+ "function.aws",
+ "function.aws.lambda",
+ "http.server",
+ "queue.process",
+ "serverless.function",
+ "task",
+ "websocket.server",
+ # Python
+ "asgi.server",
+ "celery.task",
+ "queue.task.celery",
+ "queue.task.rq",
+ "rq.task",
+ # Ruby
+ "queue.active_job",
+ "queue.delayed_job",
+ "queue.sidekiq",
+ "rails.action_cable",
+ "rails.request",
+ "sidekiq",
+]
+
+
def query_transactions(
org_ids: List[int],
project_ids: List[int],
@@ -713,6 +738,11 @@ def query_transactions(
org_ids[0],
"transaction",
)
+ transaction_op_metric_id = indexer.resolve(
+ use_case_id,
+ org_ids[0],
+ "transaction.op",
+ )
# if our time range is more than an hour, use the hourly granularity
granularity = 3600 if int(end.timestamp()) - int(start.timestamp()) >= 3600 else 60
@@ -765,6 +795,11 @@ def query_transactions(
Condition(Column("timestamp"), Op.GTE, start),
Condition(Column("timestamp"), Op.LT, end),
Condition(Column("metric_id"), Op.EQ, duration_metric_id),
+ Condition(
+ Column(f"tags_raw[{transaction_op_metric_id}]"),
+ Op.IN,
+ list(BACKEND_TRANSACTION_OPS),
+ ),
],
limitby=LimitBy([Column("project_id")], transactions_per_project),
orderby=[
diff --git a/tests/sentry/tasks/test_statistical_detectors.py b/tests/sentry/tasks/test_statistical_detectors.py
index 5de06ab313a548..accf7b4257d72e 100644
--- a/tests/sentry/tasks/test_statistical_detectors.py
+++ b/tests/sentry/tasks/test_statistical_detectors.py
@@ -461,12 +461,13 @@ def setUp(self):
for project in self.projects:
for i in range(self.num_transactions):
+ # Store metrics for a backend transaction
self.store_metric(
self.org.id,
project.id,
"distribution",
TransactionMRI.DURATION.value,
- {"transaction": f"transaction_{i}"},
+ {"transaction": f"transaction_{i}", "transaction.op": "http.server"},
self.hour_ago_seconds,
1.0,
UseCaseID.TRANSACTIONS,
@@ -476,7 +477,30 @@ def setUp(self):
project.id,
"distribution",
TransactionMRI.DURATION.value,
- {"transaction": f"transaction_{i}"},
+ {"transaction": f"transaction_{i}", "transaction.op": "http.server"},
+ self.hour_ago_seconds,
+ 9.5,
+ UseCaseID.TRANSACTIONS,
+ )
+
+ # Store metrics for a frontend transaction, which should be
+ # ignored by the query
+ self.store_metric(
+ self.org.id,
+ project.id,
+ "distribution",
+ TransactionMRI.DURATION.value,
+ {"transaction": f"fe_transaction_{i}", "transaction.op": "navigation"},
+ self.hour_ago_seconds,
+ 1.0,
+ UseCaseID.TRANSACTIONS,
+ )
+ self.store_metric(
+ self.org.id,
+ project.id,
+ "distribution",
+ TransactionMRI.DURATION.value,
+ {"transaction": f"fe_transaction_{i}", "transaction.op": "navigation"},
self.hour_ago_seconds,
9.5,
UseCaseID.TRANSACTIONS,
@@ -492,7 +516,7 @@ def test_transactions_query(self) -> None:
[p.id for p in self.projects],
self.hour_ago,
self.now,
- self.num_transactions,
+ self.num_transactions + 1, # detect if any extra transactions are returned
)
assert len(res) == len(self.projects) * self.num_transactions
for trend_payload in res:
|
1c7b9cf01cb5685b3f9b3c07b819fbf1f1b9529d
|
2022-11-18 02:42:26
|
anthony sottile
|
ref: type src/sentry/web/frontend/base.py (#41514)
| false
|
type src/sentry/web/frontend/base.py (#41514)
|
ref
|
diff --git a/mypy.ini b/mypy.ini
index 60b0d9bf805c95..4b5a05b745afd6 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -165,6 +165,7 @@ files = fixtures/mypy-stubs,
src/sentry/utils/time_window.py,
src/sentry/utils/yaml.py,
src/sentry/web/decorators.py,
+ src/sentry/web/frontend/base.py,
src/sentry/web/helpers.py,
tests/sentry/eventstream/kafka/test_consumer_strategy.py,
tests/sentry/eventstream/kafka/test_synchronized.py,
@@ -196,6 +197,8 @@ no_implicit_reexport=True
follow_imports = skip
[mypy-bitfield.*]
follow_imports = silent
+[mypy-sudo.*]
+follow_imports = silent
[mypy-celery.*]
ignore_missing_imports = True
diff --git a/src/sentry/auth/view.py b/src/sentry/auth/view.py
index a988f9c8ebed55..60679927de012c 100644
--- a/src/sentry/auth/view.py
+++ b/src/sentry/auth/view.py
@@ -12,7 +12,7 @@
from sentry.models.organization import Organization
-class AuthView(BaseView): # type: ignore[misc]
+class AuthView(BaseView):
"""
A segment of Provider's auth pipeline.
diff --git a/src/sentry/integrations/slack/views/link_identity.py b/src/sentry/integrations/slack/views/link_identity.py
index e2e82b350f4099..7d34b51989b811 100644
--- a/src/sentry/integrations/slack/views/link_identity.py
+++ b/src/sentry/integrations/slack/views/link_identity.py
@@ -32,7 +32,7 @@ def build_linking_url(
)
-class SlackLinkIdentityView(BaseView): # type: ignore
+class SlackLinkIdentityView(BaseView):
@transaction_start("SlackLinkIdentityView")
@never_cache
def handle(self, request: Request, signed_params: str) -> Response:
diff --git a/src/sentry/integrations/slack/views/link_team.py b/src/sentry/integrations/slack/views/link_team.py
index e1898c7238e2b6..d660dfef68683c 100644
--- a/src/sentry/integrations/slack/views/link_team.py
+++ b/src/sentry/integrations/slack/views/link_team.py
@@ -59,7 +59,7 @@ def __init__(self, teams: Sequence[Team], *args: Any, **kwargs: Any):
self.fields["team"].widget.choices = self.fields["team"].choices
-class SlackLinkTeamView(BaseView): # type: ignore
+class SlackLinkTeamView(BaseView):
@transaction_start("SlackLinkTeamView")
@never_cache
def handle(self, request: Request, signed_params: str) -> HttpResponse:
diff --git a/src/sentry/integrations/slack/views/unlink_identity.py b/src/sentry/integrations/slack/views/unlink_identity.py
index 7269fb737e20e9..115affbdcbe2aa 100644
--- a/src/sentry/integrations/slack/views/unlink_identity.py
+++ b/src/sentry/integrations/slack/views/unlink_identity.py
@@ -31,7 +31,7 @@ def build_unlinking_url(
)
-class SlackUnlinkIdentityView(BaseView): # type: ignore
+class SlackUnlinkIdentityView(BaseView):
@transaction_start("SlackUnlinkIdentityView")
@never_cache
def handle(self, request: Request, signed_params: str) -> Response:
diff --git a/src/sentry/integrations/slack/views/unlink_team.py b/src/sentry/integrations/slack/views/unlink_team.py
index 1c3e82ef6769ef..a1d162d6efa2e2 100644
--- a/src/sentry/integrations/slack/views/unlink_team.py
+++ b/src/sentry/integrations/slack/views/unlink_team.py
@@ -34,7 +34,7 @@ def build_team_unlinking_url(
)
-class SlackUnlinkTeamView(BaseView): # type: ignore
+class SlackUnlinkTeamView(BaseView):
@transaction_start("SlackUnlinkIdentityView")
@never_cache
def handle(self, request: Request, signed_params: str) -> Response:
diff --git a/src/sentry/pipeline/views/base.py b/src/sentry/pipeline/views/base.py
index f8eda08dbd951f..024821567ca856 100644
--- a/src/sentry/pipeline/views/base.py
+++ b/src/sentry/pipeline/views/base.py
@@ -12,7 +12,7 @@
from sentry.pipeline.base import Pipeline
-class PipelineView(BaseView, abc.ABC): # type: ignore
+class PipelineView(BaseView, abc.ABC):
"""
A class implementing the PipelineView may be used in a PipelineProviders
get_pipeline_views list.
diff --git a/src/sentry/web/frontend/base.py b/src/sentry/web/frontend/base.py
index 0614071cbe9c2a..2b2591ee62032b 100644
--- a/src/sentry/web/frontend/base.py
+++ b/src/sentry/web/frontend/base.py
@@ -1,5 +1,7 @@
+from __future__ import annotations
+
import logging
-from typing import List, NoReturn, Optional, Tuple
+from typing import Any, Mapping, Protocol
from django.http import (
HttpResponse,
@@ -20,6 +22,8 @@
from sentry.auth import access
from sentry.auth.superuser import is_active_superuser
from sentry.models import Authenticator, Organization, Project, ProjectStatus, Team, TeamStatus
+from sentry.models.avatars.base import AvatarBase
+from sentry.models.user import User
from sentry.services.hybrid_cloud.organization import (
ApiOrganization,
ApiUserOrganizationContext,
@@ -38,15 +42,26 @@
audit_logger = logging.getLogger("sentry.audit.ui")
+class _HasRespond(Protocol):
+ active_organization: ApiUserOrganizationContext | None
+
+ def respond(
+ self, template: str, context: dict[str, Any] | None = None, status: int = 200
+ ) -> HttpResponse:
+ ...
+
+
class OrganizationMixin:
# This attribute will only be set once determine_active_organization is called. Subclasses should likely invoke
# that method, passing along the organization_slug context that might exist (or might not).
- active_organization: Optional[ApiUserOrganizationContext]
+ active_organization: ApiUserOrganizationContext | None
# TODO(dcramer): move the implicit organization logic into its own class
# as it's only used in a single location and over complicates the rest of
# the code
- def determine_active_organization(self, request: Request, organization_slug=None) -> NoReturn:
+ def determine_active_organization(
+ self, request: Request, organization_slug: str | None = None
+ ) -> None:
"""
Using the current request and potentially optional organization_slug, 'determines'
the current session for this mixin object's scope, placing it into the active_organization attribute.
@@ -84,14 +99,13 @@ def determine_active_organization(self, request: Request, organization_slug=None
self.active_organization = active_organization
def _lookup_organizations(
- self, is_implicit: bool, organization_slug: Optional[str], request: Request
- ) -> Tuple[Optional[ApiUserOrganizationContext], Optional[ApiOrganization]]:
- active_organization: Optional[ApiUserOrganizationContext] = self._try_superuser_org_lookup(
+ self, is_implicit: bool, organization_slug: str | None, request: Request
+ ) -> tuple[ApiUserOrganizationContext | None, ApiOrganization | None]:
+ active_organization: ApiUserOrganizationContext | None = self._try_superuser_org_lookup(
organization_slug, request
)
- backup_organization: Optional[ApiOrganization] = None
+ backup_organization: ApiOrganization | None = None
if active_organization is None:
- organizations: List[ApiOrganization]
organizations = organization_service.get_organizations(
user_id=request.user.id, scope=None, only_visible=True
)
@@ -108,11 +122,11 @@ def _try_finding_org_from_slug(
self,
is_implicit: bool,
organization_slug: str,
- organizations: List[ApiOrganization],
+ organizations: list[ApiOrganization],
request: Request,
- ) -> Optional[ApiUserOrganizationContext]:
+ ) -> ApiUserOrganizationContext | None:
try:
- backup_org: Optional[ApiOrganization] = next(
+ backup_org: ApiOrganization | None = next(
o for o in organizations if o.slug == organization_slug
)
except StopIteration:
@@ -133,9 +147,9 @@ def _try_finding_org_from_slug(
return None
def _try_superuser_org_lookup(
- self, organization_slug: str, request: Request
- ) -> Optional[ApiUserOrganizationContext]:
- active_organization: Optional[ApiUserOrganizationContext] = None
+ self, organization_slug: str | None, request: Request
+ ) -> ApiUserOrganizationContext | None:
+ active_organization: ApiUserOrganizationContext | None = None
if organization_slug is not None:
if is_active_superuser(request):
active_organization = organization_service.get_organization_by_slug(
@@ -143,24 +157,28 @@ def _try_superuser_org_lookup(
)
return active_organization
- def _find_implicit_slug(self, request):
+ def _find_implicit_slug(self, request: Request) -> str | None:
organization_slug = request.session.get("activeorg")
if request.subdomain is not None and request.subdomain != organization_slug:
# Customer domain is being used, set the subdomain as the requesting org slug.
organization_slug = request.subdomain
- return organization_slug
+ return organization_slug # type: ignore[no-any-return]
- def is_not_2fa_compliant(self, request: Request, organization):
+ def is_not_2fa_compliant(self, request: Request, organization: ApiOrganization) -> bool:
return (
organization.flags.require_2fa
and not Authenticator.objects.user_has_2fa(request.user)
and not is_active_superuser(request)
)
- def is_member_disabled_from_limit(self, request: Request, organization):
+ def is_member_disabled_from_limit(
+ self, request: Request, organization: ApiOrganization
+ ) -> bool:
return is_member_disabled_from_limit(request, organization)
- def get_active_team(self, request: Request, organization, team_slug):
+ def get_active_team(
+ self, request: Request, organization: ApiOrganization, team_slug: str
+ ) -> Team | None:
"""
Returns the currently selected team for the request or None
if no match.
@@ -175,7 +193,9 @@ def get_active_team(self, request: Request, organization, team_slug):
return team
- def get_active_project(self, request: Request, organization, project_slug):
+ def get_active_project(
+ self, request: Request, organization: ApiOrganization, project_slug: str
+ ) -> Project | None:
try:
project = Project.objects.get(slug=project_slug, organization=organization)
except Project.DoesNotExist:
@@ -186,7 +206,7 @@ def get_active_project(self, request: Request, organization, project_slug):
return project
- def redirect_to_org(self, request: Request):
+ def redirect_to_org(self: _HasRespond, request: Request) -> HttpResponse:
from sentry import features
using_customer_domain = request and is_using_customer_domain(request)
@@ -221,14 +241,21 @@ def redirect_to_org(self, request: Request):
return HttpResponseRedirect(url)
-class BaseView(View, OrganizationMixin):
+class BaseView(View, OrganizationMixin): # type: ignore[misc]
auth_required = True
# TODO(dcramer): change sudo so it can be required only on POST
sudo_required = False
csrf_protect = True
- def __init__(self, auth_required=None, sudo_required=None, csrf_protect=None, *args, **kwargs):
+ def __init__(
+ self,
+ auth_required: bool | None = None,
+ sudo_required: bool | None = None,
+ csrf_protect: bool | None = None,
+ *args: Any,
+ **kwargs: Any,
+ ) -> None:
if auth_required is not None:
self.auth_required = auth_required
if sudo_required is not None:
@@ -237,8 +264,8 @@ def __init__(self, auth_required=None, sudo_required=None, csrf_protect=None, *a
self.csrf_protect = csrf_protect
super().__init__(*args, **kwargs)
- @csrf_exempt
- def dispatch(self, request, *args, **kwargs):
+ @csrf_exempt # type: ignore[misc]
+ def dispatch(self, request: Request, *args: Any, **kwargs: Any) -> Response:
"""
A note on the CSRF protection process.
@@ -294,23 +321,25 @@ def dispatch(self, request, *args, **kwargs):
return self.handle(request, *args, **kwargs)
- def test_csrf(self, request: Request):
+ def test_csrf(self, request: Request) -> HttpResponse:
middleware = CsrfViewMiddleware()
return middleware.process_view(request, self.dispatch, [request], {})
- def get_access(self, request: Request, *args, **kwargs):
+ def get_access(self, request: Request, *args: Any, **kwargs: Any) -> access.Access:
return access.DEFAULT
- def convert_args(self, request: Request, *args, **kwargs):
+ def convert_args(
+ self, request: Request, *args: Any, **kwargs: Any
+ ) -> tuple[tuple[Any, ...], dict[str, Any]]:
return (args, kwargs)
- def handle(self, request: Request, *args, **kwargs) -> Response:
+ def handle(self, request: Request, *args: Any, **kwargs: Any) -> Response:
return super().dispatch(request, *args, **kwargs)
- def is_auth_required(self, request: Request, *args, **kwargs):
+ def is_auth_required(self, request: Request, *args: Any, **kwargs: Any) -> bool:
return self.auth_required and not (request.user.is_authenticated and request.user.is_active)
- def handle_auth_required(self, request: Request, *args, **kwargs):
+ def handle_auth_required(self, request: Request, *args: Any, **kwargs: Any) -> HttpResponse:
auth.initiate_login(request, next_url=request.get_full_path())
if "organization_slug" in kwargs:
redirect_to = reverse("sentry-auth-organization", args=[kwargs["organization_slug"]])
@@ -318,54 +347,60 @@ def handle_auth_required(self, request: Request, *args, **kwargs):
redirect_to = auth.get_login_url()
return self.redirect(redirect_to, headers={"X-Robots-Tag": "noindex, nofollow"})
- def is_sudo_required(self, request: Request, *args, **kwargs):
+ def is_sudo_required(self, request: Request, *args: Any, **kwargs: Any) -> bool:
return self.sudo_required and not request.is_sudo()
- def handle_sudo_required(self, request: Request, *args, **kwargs):
+ def handle_sudo_required(self, request: Request, *args: Any, **kwargs: Any) -> HttpResponse:
return redirect_to_sudo(request.get_full_path())
- def has_permission(self, request: Request, *args, **kwargs):
+ def has_permission(self, request: Request, *args: Any, **kwargs: Any) -> bool:
return True
- def handle_permission_required(self, request: Request, *args, **kwargs):
+ def handle_permission_required(
+ self, request: Request, *args: Any, **kwargs: Any
+ ) -> HttpResponse:
redirect_uri = self.get_no_permission_url(request, *args, **kwargs)
return self.redirect(redirect_uri)
- def handle_not_2fa_compliant(self, request: Request, *args, **kwargs):
+ def handle_not_2fa_compliant(self, request: Request, *args: Any, **kwargs: Any) -> HttpResponse:
redirect_uri = self.get_not_2fa_compliant_url(request, *args, **kwargs)
return self.redirect(redirect_uri)
- def get_no_permission_url(self, request: Request, *args, **kwargs):
- return reverse("sentry-login")
+ def get_no_permission_url(self, request: Request, *args: Any, **kwargs: Any) -> str:
+ return reverse("sentry-login") # type: ignore[no-any-return]
- def get_not_2fa_compliant_url(self, request: Request, *args, **kwargs):
- return reverse("sentry-account-settings-security")
+ def get_not_2fa_compliant_url(self, request: Request, *args: Any, **kwargs: Any) -> str:
+ return reverse("sentry-account-settings-security") # type: ignore[no-any-return]
- def get_context_data(self, request: Request, **kwargs):
+ def get_context_data(self, request: Request, **kwargs: Any) -> dict[str, Any]:
context = csrf(request)
- return context
+ return context # type: ignore[no-any-return]
- def respond(self, template, context=None, status=200):
+ def respond(
+ self, template: str, context: dict[str, Any] | None = None, status: int = 200
+ ) -> HttpResponse:
default_context = self.default_context
if context:
default_context.update(context)
return render_to_response(template, default_context, self.request, status=status)
- def redirect(self, url, headers=None):
+ def redirect(self, url: str, headers: Mapping[str, str] | None = None) -> HttpResponse:
res = HttpResponseRedirect(url)
if headers:
for k, v in headers.items():
res[k] = v
return res
- def get_team_list(self, user, organization):
- return Team.objects.get_for_user(organization=organization, user=user, with_projects=True)
+ def get_team_list(self, user: User, organization: Organization) -> list[Team]:
+ return Team.objects.get_for_user(organization=organization, user=user, with_projects=True) # type: ignore[no-any-return]
- def create_audit_entry(self, request: Request, transaction_id=None, **kwargs):
+ def create_audit_entry(
+ self, request: Request, transaction_id: int | None = None, **kwargs: Any
+ ) -> object:
return create_audit_entry(request, transaction_id, audit_logger, **kwargs)
- def handle_disabled_member(self, organization):
+ def handle_disabled_member(self, organization: Organization) -> HttpResponse:
redirect_uri = reverse("sentry-organization-disabled-member", args=[organization.slug])
return self.redirect(redirect_uri)
@@ -378,22 +413,22 @@ class OrganizationView(BaseView):
resulting dispatch.
"""
- required_scope = None
+ required_scope: str | None = None
valid_sso_required = True
- def get_access(self, request: Request, organization, *args, **kwargs):
+ def get_access(self, request: Request, organization: Organization, *args: Any, **kwargs: Any) -> access.Access: # type: ignore[override]
if organization is None:
return access.DEFAULT
return access.from_request(request, organization)
- def get_context_data(self, request: Request, organization, **kwargs):
+ def get_context_data(self, request: Request, organization: Organization, **kwargs: Any) -> dict[str, Any]: # type: ignore[override]
context = super().get_context_data(request)
context["organization"] = organization
context["TEAM_LIST"] = self.get_team_list(request.user, organization)
context["ACCESS"] = request.access.to_django_context()
return context
- def has_permission(self, request: Request, organization, *args, **kwargs):
+ def has_permission(self, request: Request, organization: Organization, *args: Any, **kwargs: Any) -> bool: # type: ignore[override]
if organization is None:
return False
if self.valid_sso_required:
@@ -411,7 +446,9 @@ def has_permission(self, request: Request, organization, *args, **kwargs):
return False
return True
- def is_auth_required(self, request: Request, organization_slug=None, *args, **kwargs):
+ def is_auth_required(
+ self, request: Request, organization_slug: str | None = None, *args: Any, **kwargs: Any
+ ) -> bool:
result = super().is_auth_required(request, *args, **kwargs)
if result:
return result
@@ -434,7 +471,7 @@ def is_auth_required(self, request: Request, organization_slug=None, *args, **kw
return False
- def handle_permission_required(self, request: Request, organization, *args, **kwargs):
+ def handle_permission_required(self, request: Request, organization: Organization, *args: Any, **kwargs: Any) -> HttpResponse: # type: ignore[override]
if self.needs_sso(request, organization):
logger.info(
"access.must-sso",
@@ -456,7 +493,7 @@ def handle_permission_required(self, request: Request, organization, *args, **kw
redirect_uri = self.get_no_permission_url(request, *args, **kwargs)
return self.redirect(redirect_uri)
- def needs_sso(self, request: Request, organization):
+ def needs_sso(self, request: Request, organization: Organization) -> bool:
if not organization:
return False
# XXX(dcramer): this branch should really never hit
@@ -472,13 +509,16 @@ def needs_sso(self, request: Request, organization):
return True
return False
- def convert_args(self, request: Request, organization_slug=None, *args, **kwargs):
+ def convert_args(
+ self, request: Request, organization_slug: str | None = None, *args: Any, **kwargs: Any
+ ) -> tuple[tuple[Any, ...], dict[str, Any]]:
# TODO: Extract separate view base classes based on control vs region / monolith,
# with distinct convert_args implementation.
if SiloMode.get_current_mode() == SiloMode.CONTROL:
+ assert self.active_organization is not None
kwargs["organization"] = self.active_organization.organization
else:
- organization: Optional[Organization] = None
+ organization: Organization | None = None
if self.active_organization:
for org in Organization.objects.filter(id=self.active_organization.organization.id):
organization = org
@@ -499,13 +539,13 @@ class ProjectView(OrganizationView):
- project
"""
- def get_context_data(self, request: Request, organization, project, **kwargs):
+ def get_context_data(self, request: Request, organization: Organization, project: Project, **kwargs: Any) -> dict[str, Any]: # type: ignore[override]
context = super().get_context_data(request, organization)
context["project"] = project
context["processing_issues"] = serialize(project).get("processingIssues", 0)
return context
- def has_permission(self, request: Request, organization, project, *args, **kwargs):
+ def has_permission(self, request: Request, organization: Organization, project: Project, *args: Any, **kwargs: Any) -> bool: # type: ignore[override]
if project is None:
return False
rv = super().has_permission(request, organization)
@@ -528,9 +568,9 @@ def has_permission(self, request: Request, organization, project, *args, **kwarg
return False
return True
- def convert_args(self, request: Request, organization_slug, project_slug, *args, **kwargs):
- organization: Optional[Organization] = None
- active_project: Optional[Project] = None
+ def convert_args(self, request: Request, organization_slug: str, project_slug: str, *args: Any, **kwargs: Any) -> tuple[tuple[Any, ...], dict[str, Any]]: # type: ignore[override]
+ organization: Organization | None = None
+ active_project: Project | None = None
if self.active_organization:
for org in Organization.objects.filter(id=self.active_organization.organization.id):
organization = org
@@ -548,10 +588,10 @@ def convert_args(self, request: Request, organization_slug, project_slug, *args,
return (args, kwargs)
-class AvatarPhotoView(View):
- model = None
+class AvatarPhotoView(View): # type: ignore[misc]
+ model: type[AvatarBase]
- def get(self, request: Request, *args, **kwargs) -> Response:
+ def get(self, request: Request, *args: Any, **kwargs: Any) -> Response:
avatar_id = kwargs["avatar_id"]
try:
avatar = self.model.objects.get(ident=avatar_id)
diff --git a/src/sudo/views.py b/src/sudo/views.py
index 3b2395930dc1cc..d8d76bd71cb57e 100644
--- a/src/sudo/views.py
+++ b/src/sudo/views.py
@@ -5,6 +5,8 @@
:copyright: (c) 2020 by Matt Robenolt.
:license: BSD, see LICENSE for more details.
"""
+from __future__ import annotations
+
from urllib.parse import urlparse, urlunparse
from django.contrib.auth.decorators import login_required
@@ -81,7 +83,7 @@ def sudo(request, **kwargs):
return SudoView(**kwargs).dispatch(request)
-def redirect_to_sudo(next_url, sudo_url=None):
+def redirect_to_sudo(next_url: str, sudo_url: str | None = None) -> HttpResponseRedirect:
"""
Redirects the user to the login page, passing the given 'next' page
"""
|
61c9a17f80ea1ed54f500cea13b1850bd800f14f
|
2020-10-30 02:48:22
|
Evan Purkhiser
|
feat(dev): Add pretty formatting option to devserver (#18803)
| false
|
Add pretty formatting option to devserver (#18803)
|
feat
|
diff --git a/src/sentry/runner/commands/devserver.py b/src/sentry/runner/commands/devserver.py
index e13bc51fcc1af4..4eb335e9c693dd 100644
--- a/src/sentry/runner/commands/devserver.py
+++ b/src/sentry/runner/commands/devserver.py
@@ -2,6 +2,7 @@
import click
import six
+import types
from six.moves.urllib.parse import urlparse
import threading
@@ -36,6 +37,9 @@ def _get_daemon(name):
@click.option(
"--prefix/--no-prefix", default=True, help="Show the service name prefix and timestamp"
)
[email protected](
+ "--pretty/--no-pretty", default=False, help="Styleize various outputs from the devserver"
+)
@click.option(
"--styleguide/--no-styleguide",
default=False,
@@ -59,7 +63,16 @@ def _get_daemon(name):
@log_options()
@configuration
def devserver(
- reload, watchers, workers, experimental_spa, styleguide, prefix, environment, debug_server, bind
+ reload,
+ watchers,
+ workers,
+ experimental_spa,
+ styleguide,
+ prefix,
+ pretty,
+ environment,
+ debug_server,
+ bind,
):
"Starts a lightweight web server for development."
@@ -221,9 +234,9 @@ def devserver(
# A better log-format for local dev when running through honcho,
# but if there aren't any other daemons, we don't want to override.
if daemons:
- uwsgi_overrides["log-format"] = '"%(method) %(status) %(uri) %(proto)" %(size)'
+ uwsgi_overrides["log-format"] = "%(method) %(status) %(uri) %(proto) %(size)"
else:
- uwsgi_overrides["log-format"] = '[%(ltime)] "%(method) %(status) %(uri) %(proto)" %(size)'
+ uwsgi_overrides["log-format"] = "[%(ltime)] %(method) %(status) %(uri) %(proto) %(size)"
server = SentryHTTPServer(
host=host, port=port, workers=1, extra_options=uwsgi_overrides, debug=debug_server
@@ -254,7 +267,14 @@ def devserver(
cwd = os.path.realpath(os.path.join(settings.PROJECT_ROOT, os.pardir, os.pardir))
- manager = Manager(Printer(prefix=prefix))
+ honcho_printer = Printer(prefix=prefix)
+
+ if pretty:
+ from sentry.runner.formatting import monkeypatch_honcho_write
+
+ honcho_printer.write = types.MethodType(monkeypatch_honcho_write, honcho_printer)
+
+ manager = Manager(honcho_printer)
for name, cmd in daemons:
manager.add_process(name, list2cmdline(cmd), quiet=False, cwd=cwd)
diff --git a/src/sentry/runner/formatting.py b/src/sentry/runner/formatting.py
new file mode 100644
index 00000000000000..0e03aa85a9a076
--- /dev/null
+++ b/src/sentry/runner/formatting.py
@@ -0,0 +1,106 @@
+# -*- coding: utf8 -*-
+
+from __future__ import absolute_import, print_function
+
+import re
+
+# Sentry colors taken from our design system. Might not look good on all
+# termianl themes tbh
+COLORS = {
+ "white": (255, 255, 255),
+ "green": (77, 199, 13),
+ "orange": (255, 119, 56),
+ "red": (250, 71, 71),
+}
+
+SERVICE_COLORS = {
+ "server": (108, 95, 199),
+ "worker": (255, 194, 39),
+ "webpack": (61, 116, 219),
+ "cron": (255, 86, 124),
+ "relay": (250, 71, 71),
+}
+
+
+def colorize_code(pattern):
+ code = int(pattern.group("code"))
+ method = pattern.group("method")
+
+ style = (COLORS["red"], COLORS["white"])
+
+ if code >= 200 and code < 300:
+ style = (COLORS["green"], COLORS["white"])
+ if code >= 400 and code < 500:
+ style = (COLORS["orange"], COLORS["white"])
+ if code >= 500:
+ style = (COLORS["red"], COLORS["white"])
+
+ return u"{bg}{fg} {code} {reset} {method:4}".format(
+ bg="\x1b[48;2;%s;%s;%sm" % (style[0]),
+ fg="\x1b[38;2;%s;%s;%sm" % (style[1]),
+ reset="\x1b[0m",
+ code=code,
+ method=method,
+ )
+
+
+def colorize_reboot(pattern):
+ return u"{bg}{fg}[ RELOADING ]{reset} {info_fg}{info}".format(
+ bg="\x1b[48;2;%s;%s;%sm" % COLORS["red"],
+ fg="\x1b[38;2;%s;%s;%sm" % COLORS["white"],
+ info_fg="\x1b[38;2;%s;%s;%sm" % COLORS["white"],
+ reset="\x1b[0m",
+ info=pattern.group(0),
+ )
+
+
+def colorize_booted(pattern):
+ return u"{bg}{fg}[ UWSGI READY ]{reset} {info_fg}{info}".format(
+ bg="\x1b[48;2;%s;%s;%sm" % COLORS["green"],
+ fg="\x1b[38;2;%s;%s;%sm" % COLORS["white"],
+ info_fg="\x1b[38;2;%s;%s;%sm" % COLORS["white"],
+ reset="\x1b[0m",
+ info=pattern.group(0),
+ )
+
+
+def colorize_traceback(pattern):
+ return u"{bg} {reset} {info_fg}{info}".format(
+ bg="\x1b[48;2;%s;%s;%sm" % COLORS["red"],
+ info_fg="\x1b[38;2;%s;%s;%sm" % COLORS["red"],
+ reset="\x1b[0m",
+ info=pattern.group(0),
+ )
+
+
+def monkeypatch_honcho_write(self, message):
+ name = message.name if message.name is not None else ""
+ name = name.rjust(self.width)
+
+ if isinstance(message.data, bytes):
+ string = message.data.decode("utf-8", "replace")
+ else:
+ string = message.data
+
+ # Colorize requests
+ string = re.sub(
+ r"(?P<method>GET|POST|PUT|HEAD|DELETE) (?P<code>[0-9]{3})", colorize_code, string
+ )
+ # Colorize reboots
+ string = re.sub(r"Gracefully killing worker [0-9]+ .*\.\.\.", colorize_reboot, string)
+ # Colorize reboot complete
+ string = re.sub(r"WSGI app [0-9]+ \(.*\) ready in [0-9]+ seconds .*", colorize_booted, string)
+ # Mark python tracebacks
+ string = re.sub(r"Traceback \(most recent call last\).*", colorize_traceback, string)
+
+ blank_color = (74, 62, 86)
+
+ prefix = u"{name_fg}{name}{reset} {indicator_bg} {reset} ".format(
+ name=name.ljust(self.width),
+ name_fg="\x1b[38;2;%s;%s;%sm" % SERVICE_COLORS.get(message.name, blank_color),
+ indicator_bg="\x1b[48;2;%s;%s;%sm" % SERVICE_COLORS.get(message.name, blank_color),
+ reset="\x1b[0m",
+ )
+
+ for line in string.splitlines():
+ self.output.write(u"{}{}\n".format(prefix, line))
|
4f6ee73456d6eff4793a03ffa1a38d2c03214de5
|
2019-05-02 00:45:45
|
Lyn Nagara
|
fix(link): Fix link proptype (#13014)
| false
|
Fix link proptype (#13014)
|
fix
|
diff --git a/src/sentry/static/sentry/app/components/links/link.jsx b/src/sentry/static/sentry/app/components/links/link.jsx
index 0541e127194311..7eb22c9e3fc2f5 100644
--- a/src/sentry/static/sentry/app/components/links/link.jsx
+++ b/src/sentry/static/sentry/app/components/links/link.jsx
@@ -8,7 +8,7 @@ import {Link as RouterLink} from 'react-router';
*/
class Link extends React.Component {
static propTypes = {
- to: PropTypes.string,
+ to: PropTypes.oneOfType([PropTypes.string, PropTypes.object]),
href: PropTypes.string,
};
|
5c47a5bbd44793bfde31e2838caff3912d26ebda
|
2024-04-02 23:44:35
|
David Wang
|
ref(alerts): Remove unused project prop (#68035)
| false
|
Remove unused project prop (#68035)
|
ref
|
diff --git a/static/app/components/selectMembers/index.tsx b/static/app/components/selectMembers/index.tsx
index 59b9eb603efe49..47dd111eafca58 100644
--- a/static/app/components/selectMembers/index.tsx
+++ b/static/app/components/selectMembers/index.tsx
@@ -8,7 +8,7 @@ import IdBadge from 'sentry/components/idBadge';
import {Tooltip} from 'sentry/components/tooltip';
import {t} from 'sentry/locale';
import MemberListStore from 'sentry/stores/memberListStore';
-import type {Member, Organization, Project, User} from 'sentry/types';
+import type {Member, Organization, User} from 'sentry/types';
import withApi from 'sentry/utils/withApi';
const getSearchKeyForUser = (user: User) =>
@@ -34,7 +34,6 @@ type Props = {
disabled?: boolean;
onInputChange?: (value: any) => any;
placeholder?: string;
- project?: Project;
styles?: {control?: (provided: any) => any};
};
diff --git a/static/app/views/alerts/rules/issue/memberTeamFields.tsx b/static/app/views/alerts/rules/issue/memberTeamFields.tsx
index 0659d9cd9bbcbf..b6e0141b6d5c45 100644
--- a/static/app/views/alerts/rules/issue/memberTeamFields.tsx
+++ b/static/app/views/alerts/rules/issue/memberTeamFields.tsx
@@ -106,7 +106,6 @@ class MemberTeamFields extends Component<Props> {
<SelectMembers
disabled={disabled}
key={teamSelected ? teamValue : memberValue}
- project={project}
organization={organization}
// The value from the endpoint is of type `number`, `SelectMembers` require value to be of type `string`
value={`${ruleData.targetIdentifier}`}
diff --git a/static/app/views/alerts/rules/metric/triggers/actionsPanel/actionTargetSelector.tsx b/static/app/views/alerts/rules/metric/triggers/actionsPanel/actionTargetSelector.tsx
index 8c6694fea61e60..ad67b68f9751e1 100644
--- a/static/app/views/alerts/rules/metric/triggers/actionsPanel/actionTargetSelector.tsx
+++ b/static/app/views/alerts/rules/metric/triggers/actionsPanel/actionTargetSelector.tsx
@@ -66,7 +66,6 @@ export default function ActionTargetSelector(props: Props) {
<SelectMembers
disabled={disabled}
key="member"
- project={project}
organization={organization}
value={action.targetIdentifier}
onChange={handleChangeTargetIdentifier}
|
94766f889ecc270a3b0727754ca0dcad726dc7b4
|
2019-06-04 12:43:24
|
Radu Woinaroski
|
ref(store): CORS pre-flight cleanup (#13421)
| false
|
CORS pre-flight cleanup (#13421)
|
ref
|
diff --git a/src/sentry/web/api.py b/src/sentry/web/api.py
index 4037478bb980ad..9f081032415654 100644
--- a/src/sentry/web/api.py
+++ b/src/sentry/web/api.py
@@ -32,18 +32,23 @@
from sentry.attachments import CachedAttachment
from sentry.coreapi import (
Auth, APIError, APIForbidden, APIRateLimited, ClientApiHelper, ClientAuthHelper,
- SecurityAuthHelper, MinidumpAuthHelper, safely_load_json_string, logger as api_logger
+ SecurityAuthHelper, MinidumpAuthHelper, safely_load_json_string, logger as api_logger,
)
from sentry.event_manager import EventManager
from sentry.interfaces import schemas
from sentry.interfaces.base import get_interface
-from sentry.lang.native.unreal import process_unreal_crash, merge_apple_crash_report, \
- unreal_attachment_type, merge_unreal_context_event, merge_unreal_logs_event
-from sentry.lang.native.minidump import merge_attached_event, merge_attached_breadcrumbs, write_minidump_placeholder, \
- MINIDUMP_ATTACHMENT_TYPE
+from sentry.lang.native.unreal import (
+ process_unreal_crash, merge_apple_crash_report,
+ unreal_attachment_type, merge_unreal_context_event, merge_unreal_logs_event,
+)
+from sentry.lang.native.minidump import (
+ merge_attached_event, merge_attached_breadcrumbs, write_minidump_placeholder,
+ MINIDUMP_ATTACHMENT_TYPE,
+)
from sentry.models import Project, OrganizationOption, Organization, File, EventAttachment, Event
from sentry.signals import (
- event_accepted, event_dropped, event_filtered, event_received)
+ event_accepted, event_dropped, event_filtered, event_received,
+)
from sentry.quotas.base import RateLimit
from sentry.utils import json, metrics
from sentry.utils.data_filters import FilterStatKeys
@@ -52,6 +57,7 @@
is_valid_origin,
get_origins,
is_same_domain,
+ origin_from_request,
)
from sentry.utils.outcomes import Outcome, track_outcome
from sentry.utils.pubsub import QueuedPublisherService, KafkaPublisher
@@ -77,6 +83,47 @@
) if getattr(settings, 'KAFKA_RAW_EVENTS_PUBLISHER_ENABLED', False) else None
+def allow_cors_options(func):
+ """
+ Decorator that adds automatic handling of OPTIONS requests for CORS
+
+ If the request is OPTIONS (i.e. pre flight CORS) construct a NO Content (204) response
+ in which we explicitly enable the caller and add the custom headers that we support
+ :param func: the original request handler
+ :return: a request handler that shortcuts OPTIONS requests and just returns an OK (CORS allowed)
+ """
+
+ @wraps(func)
+ def allow_cors_options_wrapper(self, request, *args, **kwargs):
+
+ if request.method == 'OPTIONS':
+ response = HttpResponse(status=200)
+ response['Access-Control-Max-Age'] = '3600' # don't ask for options again for 1 hour
+ else:
+ response = func(self, request, *args, **kwargs)
+
+ allow = ', '.join(self._allowed_methods())
+ response['Allow'] = allow
+ response['Access-Control-Allow-Methods'] = allow
+ response['Access-Control-Allow-Headers'] = 'X-Sentry-Auth, X-Requested-With, Origin, Accept, ' \
+ 'Content-Type, Authentication'
+ response['Access-Control-Expose-Headers'] = 'X-Sentry-Error, Retry-After'
+
+ if request.META.get('HTTP_ORIGIN') == 'null':
+ origin = 'null' # if ORIGIN header is explicitly specified as 'null' leave it alone
+ else:
+ origin = origin_from_request(request)
+
+ if origin is None or origin == 'null':
+ response['Access-Control-Allow-Origin'] = '*'
+ else:
+ response['Access-Control-Allow-Origin'] = origin
+
+ return response
+
+ return allow_cors_options_wrapper
+
+
def api(func):
@wraps(func)
def wrapped(request, *args, **kwargs):
@@ -167,7 +214,7 @@ def process_event(event_manager, project, key, remote_addr, helper, attachments)
# TODO(dcramer): ideally we'd only validate this if the event_id was
# supplied by the user
- cache_key = 'ev:%s:%s' % (project.id, event_id, )
+ cache_key = 'ev:%s:%s' % (project.id, event_id,)
if cache.get(cache_key) is not None:
track_outcome(
@@ -179,7 +226,7 @@ def process_event(event_manager, project, key, remote_addr, helper, attachments)
event_id=event_id
)
raise APIForbidden(
- 'An event with the same ID already exists (%s)' % (event_id, ))
+ 'An event with the same ID already exists (%s)' % (event_id,))
scrub_ip_address = (org_options.get('sentry:require_scrub_ip_address', False) or
project.get_option('sentry:scrub_ip_address', False))
@@ -301,13 +348,13 @@ def _publish_to_kafka(self, request):
@csrf_exempt
@never_cache
+ @allow_cors_options
def dispatch(self, request, project_id=None, *args, **kwargs):
helper = ClientApiHelper(
agent=request.META.get('HTTP_USER_AGENT'),
project_id=project_id,
ip_address=request.META['REMOTE_ADDR'],
)
- origin = None
if kafka_publisher is not None:
self._publish_to_kafka(request)
@@ -349,7 +396,7 @@ def dispatch(self, request, project_id=None, *args, **kwargs):
# tsdb could optimize this
metrics.incr('client-api.all-versions.requests', skip_internal=False)
metrics.incr('client-api.all-versions.responses.%s' %
- (response.status_code, ), skip_internal=False)
+ (response.status_code,), skip_internal=False)
metrics.incr(
'client-api.all-versions.responses.%sxx' % (six.text_type(response.status_code)[0],),
skip_internal=False,
@@ -357,7 +404,7 @@ def dispatch(self, request, project_id=None, *args, **kwargs):
if helper.context.version:
metrics.incr(
- 'client-api.v%s.requests' % (helper.context.version, ),
+ 'client-api.v%s.requests' % (helper.context.version,),
skip_internal=False,
)
metrics.incr(
@@ -370,19 +417,6 @@ def dispatch(self, request, project_id=None, *args, **kwargs):
skip_internal=False,
)
- if response.status_code != 200 and origin:
- # We allow all origins on errors
- response['Access-Control-Allow-Origin'] = '*'
-
- if origin:
- response['Access-Control-Allow-Headers'] = \
- 'X-Sentry-Auth, X-Requested-With, Origin, Accept, ' \
- 'Content-Type, Authentication'
- response['Access-Control-Allow-Methods'] = \
- ', '.join(self._allowed_methods())
- response['Access-Control-Expose-Headers'] = \
- 'X-Sentry-Error, Retry-After'
-
return response
def _dispatch(self, request, helper, project_id=None, origin=None, *args, **kwargs):
@@ -403,47 +437,31 @@ def _dispatch(self, request, helper, project_id=None, origin=None, *args, **kwar
None,
Outcome.INVALID,
FilterStatKeys.CORS)
- raise APIForbidden('Invalid origin: %s' % (origin, ))
+ raise APIForbidden('Invalid origin: %s' % (origin,))
- # XXX: It seems that the OPTIONS call does not always include custom headers
- if request.method == 'OPTIONS':
- response = self.options(request, project)
- else:
- auth = self._parse_header(request, helper, project)
+ auth = self._parse_header(request, helper, project)
- key = helper.project_key_from_auth(auth)
+ key = helper.project_key_from_auth(auth)
- # Legacy API was /api/store/ and the project ID was only available elsewhere
- if not project:
- project = Project.objects.get_from_cache(id=key.project_id)
- helper.context.bind_project(project)
- elif key.project_id != project.id:
- raise APIError('Two different projects were specified')
-
- helper.context.bind_auth(auth)
-
- # Explicitly bind Organization so we don't implicitly query it later
- # this just allows us to comfortably assure that `project.organization` is safe.
- # This also allows us to pull the object from cache, instead of being
- # implicitly fetched from database.
- project.organization = Organization.objects.get_from_cache(
- id=project.organization_id)
-
- response = super(APIView, self).dispatch(
- request=request, project=project, auth=auth, helper=helper, key=key, **kwargs
- )
+ # Legacy API was /api/store/ and the project ID was only available elsewhere
+ if not project:
+ project = Project.objects.get_from_cache(id=key.project_id)
+ helper.context.bind_project(project)
+ elif key.project_id != project.id:
+ raise APIError('Two different projects were specified')
- if origin:
- if origin == 'null':
- # If an Origin is `null`, but we got this far, that means
- # we've gotten past our CORS check for some reason. But the
- # problem is that we can't return "null" as a valid response
- # to `Access-Control-Allow-Origin` and we don't have another
- # value to work with, so just allow '*' since they've gotten
- # this far.
- response['Access-Control-Allow-Origin'] = '*'
- else:
- response['Access-Control-Allow-Origin'] = origin
+ helper.context.bind_auth(auth)
+
+ # Explicitly bind Organization so we don't implicitly query it later
+ # this just allows us to comfortably assure that `project.organization` is safe.
+ # This also allows us to pull the object from cache, instead of being
+ # implicitly fetched from database.
+ project.organization = Organization.objects.get_from_cache(
+ id=project.organization_id)
+
+ response = super(APIView, self).dispatch(
+ request=request, project=project, auth=auth, helper=helper, key=key, **kwargs
+ )
return response
@@ -452,10 +470,15 @@ def _allowed_methods(self):
return [m.upper() for m in self.http_method_names if hasattr(self, m)]
def options(self, request, *args, **kwargs):
- response = HttpResponse()
- response['Allow'] = ', '.join(self._allowed_methods())
- response['Content-Length'] = '0'
- return response
+ """
+ Serves requests for OPTIONS
+
+ NOTE: This function is not called since it is shortcut by the @allow_cors_options descriptor.
+ It is nevertheless used to construct the allowed http methods and it should not be removed.
+ """
+ raise NotImplementedError("Options request should have been handled by @allow_cors_options.\n"
+ "If dispatch was overridden either decorate it with @allow_cors_options or provide "
+ "a valid implementation for options.")
class StoreView(APIView):
@@ -832,7 +855,7 @@ def post(self, request, project, **kwargs):
# Endpoint used by the Unreal Engine 4 (UE4) Crash Reporter.
class UnrealView(StoreView):
- content_types = ('application/octet-stream', )
+ content_types = ('application/octet-stream',)
def _dispatch(self, request, helper, sentry_key, project_id=None, origin=None, *args, **kwargs):
if request.method != 'POST':
diff --git a/tests/sentry/web/api/tests.py b/tests/sentry/web/api/tests.py
index 3da13546fa567d..a97c58e60fc153 100644
--- a/tests/sentry/web/api/tests.py
+++ b/tests/sentry/web/api/tests.py
@@ -192,40 +192,30 @@ def test_options_response(self, parse_header):
self.assertIn('Content-Length', resp)
self.assertEquals(resp['Content-Length'], '0')
- @mock.patch('sentry.web.api.is_valid_origin', mock.Mock(return_value=False))
- def test_options_response_with_invalid_origin(self):
- resp = self.client.options(self.path, HTTP_ORIGIN='http://foo.com')
- assert resp.status_code == 403, (resp.status_code, resp.content)
- self.assertIn('Access-Control-Allow-Origin', resp)
- self.assertEquals(resp['Access-Control-Allow-Origin'], '*')
- self.assertIn('X-Sentry-Error', resp)
- assert resp['X-Sentry-Error'] == "Invalid origin: http://foo.com"
- assert json.loads(resp.content)['error'] == resp['X-Sentry-Error']
-
- @mock.patch('sentry.web.api.is_valid_origin', mock.Mock(return_value=False))
- def test_options_response_with_invalid_referrer(self):
- resp = self.client.options(self.path, HTTP_REFERER='http://foo.com')
- assert resp.status_code == 403, (resp.status_code, resp.content)
+ def test_options_with_no_origin_or_referrer(self):
+ resp = self.client.options(self.path)
+ assert resp.status_code == 200, (resp.status_code, resp.content)
self.assertIn('Access-Control-Allow-Origin', resp)
self.assertEquals(resp['Access-Control-Allow-Origin'], '*')
- self.assertIn('X-Sentry-Error', resp)
- assert resp['X-Sentry-Error'] == "Invalid origin: http://foo.com"
- assert json.loads(resp.content)['error'] == resp['X-Sentry-Error']
- @mock.patch('sentry.web.api.is_valid_origin', mock.Mock(return_value=True))
def test_options_response_with_valid_origin(self):
resp = self.client.options(self.path, HTTP_ORIGIN='http://foo.com')
assert resp.status_code == 200, (resp.status_code, resp.content)
self.assertIn('Access-Control-Allow-Origin', resp)
self.assertEquals(resp['Access-Control-Allow-Origin'], 'http://foo.com')
- @mock.patch('sentry.web.api.is_valid_origin', mock.Mock(return_value=True))
def test_options_response_with_valid_referrer(self):
resp = self.client.options(self.path, HTTP_REFERER='http://foo.com')
assert resp.status_code == 200, (resp.status_code, resp.content)
self.assertIn('Access-Control-Allow-Origin', resp)
self.assertEquals(resp['Access-Control-Allow-Origin'], 'http://foo.com')
+ def test_options_response_origin_preferred_over_referrer(self):
+ resp = self.client.options(self.path, HTTP_REFERER='http://foo.com', HTTP_ORIGIN='http://bar.com')
+ assert resp.status_code == 200, (resp.status_code, resp.content)
+ self.assertIn('Access-Control-Allow-Origin', resp)
+ self.assertEquals(resp['Access-Control-Allow-Origin'], 'http://bar.com')
+
@mock.patch('sentry.event_manager.is_valid_ip', mock.Mock(return_value=False))
def test_request_with_blacklisted_ip(self):
resp = self._postWithHeader({})
|
f1e0408e4561b648b22d7fc155890e91db6304cb
|
2022-07-26 01:24:00
|
Scott Cooper
|
feat(issues): Remove issue id breadcrumbs flag (#36940)
| false
|
Remove issue id breadcrumbs flag (#36940)
|
feat
|
diff --git a/static/app/views/organizationGroupDetails/header.tsx b/static/app/views/organizationGroupDetails/header.tsx
index a59504c549127f..5520ba21e3fc54 100644
--- a/static/app/views/organizationGroupDetails/header.tsx
+++ b/static/app/views/organizationGroupDetails/header.tsx
@@ -18,10 +18,8 @@ import EventAnnotation from 'sentry/components/events/eventAnnotation';
import EventMessage from 'sentry/components/events/eventMessage';
import InboxReason from 'sentry/components/group/inboxBadges/inboxReason';
import UnhandledInboxTag from 'sentry/components/group/inboxBadges/unhandledTag';
-import IdBadge from 'sentry/components/idBadge';
import ProjectBadge from 'sentry/components/idBadge/projectBadge';
import * as Layout from 'sentry/components/layouts/thirds';
-import ExternalLink from 'sentry/components/links/externalLink';
import Link from 'sentry/components/links/link';
import ListLink from 'sentry/components/links/listLink';
import NavTabs from 'sentry/components/navTabs';
@@ -151,7 +149,6 @@ class GroupHeader extends Component<Props, State> {
const hasGroupingTreeUI = organizationFeatures.has('grouping-tree-ui');
const hasSimilarView = projectFeatures.has('similarity-view');
const hasEventAttachments = organizationFeatures.has('event-attachments');
- const hasIssueIdBreadcrumbs = organizationFeatures.has('issue-id-breadcrumbs');
let className = 'group-detail';
@@ -164,7 +161,6 @@ class GroupHeader extends Component<Props, State> {
}
const {memberList} = this.state;
- const orgId = organization.slug;
const message = getMessage(group);
const searchTermWithoutQuery = omit(location.query, 'query');
@@ -203,23 +199,18 @@ class GroupHeader extends Component<Props, State> {
<div className={className}>
<StyledBreadcrumbs
crumbs={[
- {label: 'Issues', to: `/organizations/${orgId}/issues/${location.search}`},
{
- label: hasIssueIdBreadcrumbs ? shortIdBreadCrumb : t('Issue Details'),
+ label: 'Issues',
+ to: `/organizations/${organization.slug}/issues/${location.search}`,
+ },
+ {
+ label: shortIdBreadCrumb,
},
]}
/>
<div className="row">
<div className="col-sm-7">
<TitleWrapper>
- {!hasIssueIdBreadcrumbs && (
- <StyledIdBadge
- project={project}
- avatarSize={24}
- hideName
- avatarProps={{hasTooltip: true, tooltip: project.slug}}
- />
- )}
<h3>
<EventOrGroupTitle hasGuideAnchor data={group} />
</h3>
@@ -240,7 +231,7 @@ class GroupHeader extends Component<Props, State> {
<EventAnnotationWithSpace>
<Link
to={{
- pathname: `/organizations/${orgId}/issues/`,
+ pathname: `/organizations/${organization.slug}/issues/`,
query: {query: 'logger:' + group.logger},
}}
>
@@ -254,111 +245,41 @@ class GroupHeader extends Component<Props, State> {
</StyledTagAndMessageWrapper>
</div>
- {hasIssueIdBreadcrumbs ? (
- <StatsWrapper>
- <div className="count align-right m-l-1">
- <h6 className="nav-header">{t('Events')}</h6>
- {disableActions ? (
+ <StatsWrapper>
+ <div className="count align-right m-l-1">
+ <h6 className="nav-header">{t('Events')}</h6>
+ {disableActions ? (
+ <Count className="count" value={group.count} />
+ ) : (
+ <Link to={eventRouteToObject}>
<Count className="count" value={group.count} />
+ </Link>
+ )}
+ </div>
+ <div className="count align-right m-l-1">
+ <h6 className="nav-header">{t('Users')}</h6>
+ {userCount !== 0 ? (
+ disableActions ? (
+ <Count className="count" value={userCount} />
) : (
- <Link to={eventRouteToObject}>
- <Count className="count" value={group.count} />
- </Link>
- )}
- </div>
- <div className="count align-right m-l-1">
- <h6 className="nav-header">{t('Users')}</h6>
- {userCount !== 0 ? (
- disableActions ? (
+ <Link to={`${baseUrl}tags/user/${location.search}`}>
<Count className="count" value={userCount} />
- ) : (
- <Link to={`${baseUrl}tags/user/${location.search}`}>
- <Count className="count" value={userCount} />
- </Link>
- )
- ) : (
- <span>0</span>
- )}
- </div>
- <div className="assigned-to m-l-1">
- <h6 className="nav-header">{t('Assignee')}</h6>
- <AssigneeSelector
- id={group.id}
- memberList={memberList}
- disabled={disableActions}
- onAssign={this.trackAssign}
- />
- </div>
- </StatsWrapper>
- ) : (
- <div className="col-sm-5 stats">
- <div className="flex flex-justify-right">
- {group.shortId && (
- <GuideAnchor target="issue_number" position="bottom">
- <div className="short-id-box count align-right">
- <h6 className="nav-header">
- <Tooltip
- className="help-link"
- showUnderline
- title={t(
- 'This identifier is unique across your organization, and can be used to reference an issue in various places, like commit messages.'
- )}
- position="bottom"
- >
- <ExternalLink href="https://docs.sentry.io/product/integrations/source-code-mgmt/github/#resolve-via-commit-or-pull-request">
- {t('Issue #')}
- </ExternalLink>
- </Tooltip>
- </h6>
- <ShortId
- shortId={group.shortId}
- avatar={
- <StyledProjectBadge
- project={project}
- avatarSize={20}
- hideName
- />
- }
- />
- </div>
- </GuideAnchor>
- )}
- <div className="count align-right m-l-1">
- <h6 className="nav-header">{t('Events')}</h6>
- {disableActions ? (
- <Count className="count" value={group.count} />
- ) : (
- <Link to={eventRouteToObject}>
- <Count className="count" value={group.count} />
- </Link>
- )}
- </div>
- <div className="count align-right m-l-1">
- <h6 className="nav-header">{t('Users')}</h6>
- {userCount !== 0 ? (
- disableActions ? (
- <Count className="count" value={userCount} />
- ) : (
- <Link to={`${baseUrl}tags/user/${location.search}`}>
- <Count className="count" value={userCount} />
- </Link>
- )
- ) : (
- <span>0</span>
- )}
- </div>
- <div className="assigned-to m-l-1">
- <h6 className="nav-header">{t('Assignee')}</h6>
- <AssigneeSelector
- id={group.id}
- memberList={memberList}
- disabled={disableActions}
- onAssign={this.trackAssign}
- />
- </div>
- </div>
+ </Link>
+ )
+ ) : (
+ <span>0</span>
+ )}
</div>
- )}
+ <div className="assigned-to m-l-1">
+ <h6 className="nav-header">{t('Assignee')}</h6>
+ <AssigneeSelector
+ id={group.id}
+ memberList={memberList}
+ disabled={disableActions}
+ onAssign={this.trackAssign}
+ />
+ </div>
+ </StatsWrapper>
</div>
<SeenByList
seenBy={group.seenBy}
@@ -476,10 +397,6 @@ const IssueBreadcrumbWrapper = styled('div')`
align-items: center;
`;
-const StyledIdBadge = styled(IdBadge)`
- margin-right: ${space(1)};
-`;
-
const StyledTooltip = styled(Tooltip)`
display: flex;
`;
|
40a563d9300c94da609269e145b33ce0a41428d4
|
2024-01-12 23:49:59
|
Ryan Albrecht
|
feat(feedback): UI update to drop the quote mark for design (#63126)
| false
|
UI update to drop the quote mark for design (#63126)
|
feat
|
diff --git a/static/app/components/feedback/feedbackItem/feedbackItem.tsx b/static/app/components/feedback/feedbackItem/feedbackItem.tsx
index c03625d3c29c54..73dfa01bc5f62e 100644
--- a/static/app/components/feedback/feedbackItem/feedbackItem.tsx
+++ b/static/app/components/feedback/feedbackItem/feedbackItem.tsx
@@ -130,14 +130,6 @@ const Blockquote = styled('blockquote')`
margin: 0 ${space(4)};
position: relative;
- &::before {
- position: absolute;
- color: ${p => p.theme.purple300};
- content: '❝';
- font-size: ${space(4)};
- left: -${space(4)};
- top: -0.4rem;
- }
&::after {
position: absolute;
border: 1px solid ${p => p.theme.purple300};
|
11ddeab3784a755b1d3f03370eb589b7218adafc
|
2024-03-22 00:38:10
|
Seiji Chew
|
chore(staff): Remove logs and separate u2f state in session (#67405)
| false
|
Remove logs and separate u2f state in session (#67405)
|
chore
|
diff --git a/src/sentry/api/endpoints/auth_index.py b/src/sentry/api/endpoints/auth_index.py
index 75722486e094d9..cbd830a8210855 100644
--- a/src/sentry/api/endpoints/auth_index.py
+++ b/src/sentry/api/endpoints/auth_index.py
@@ -27,7 +27,6 @@
from sentry.utils.settings import is_self_hosted
logger: logging.Logger = logging.getLogger(__name__)
-getsentry_logger = logging.getLogger("getsentry.staff_auth_index")
PREFILLED_SU_MODAL_KEY = "prefilled_su_modal"
@@ -80,14 +79,6 @@ def _verify_user_via_inputs(validator: AuthVerifyValidator, request: Request) ->
challenge = json.loads(validator.validated_data["challenge"])
response = json.loads(validator.validated_data["response"])
authenticated = interface.validate_response(request, challenge, response)
- getsentry_logger.info(
- "verify.user.inputs",
- extra={
- "user": request.user.id,
- "authenticated": authenticated,
- "validator": validator.validated_data,
- },
- )
if not authenticated:
logger.warning(
"u2f_authentication.verification_failed",
@@ -114,10 +105,6 @@ def _verify_user_via_inputs(validator: AuthVerifyValidator, request: Request) ->
if authenticated:
metrics.incr("auth.password.success", sample_rate=1.0, skip_internal=False)
return authenticated
- getsentry_logger.info(
- "verify.user.inputs.failed",
- extra={"user": request.user.id, "validator": validator.validated_data},
- )
return False
diff --git a/src/sentry/auth/authenticators/u2f.py b/src/sentry/auth/authenticators/u2f.py
index fb3f24240ab5bf..e28ecea9605ea4 100644
--- a/src/sentry/auth/authenticators/u2f.py
+++ b/src/sentry/auth/authenticators/u2f.py
@@ -1,6 +1,4 @@
-import logging
from base64 import urlsafe_b64encode
-from datetime import UTC, datetime, timedelta
from functools import cached_property
from time import time
from urllib.parse import urlparse
@@ -19,7 +17,6 @@
from sentry import options
from sentry.auth.authenticators.base import EnrollmentStatus
-from sentry.silo.base import SiloMode
from sentry.utils import json
from sentry.utils.dates import to_datetime
from sentry.utils.decorators import classproperty
@@ -27,11 +24,6 @@
from .base import ActivationChallengeResult, AuthenticatorInterface
-logger = logging.getLogger("sentry.auth.u2f")
-
-# The maximum time the staff auth flow flag can stay alive on the request session
-STAFF_AUTH_FLOW_MAX_AGE = timedelta(minutes=2)
-
def decode_credential_id(device):
return urlsafe_b64encode(device["binding"].credential_data.credential_id).decode("ascii")
@@ -48,35 +40,6 @@ def _get_url_prefix() -> str:
return options.get("system.url-prefix")
-def _valid_staff_timestamp(request, limit: timedelta = STAFF_AUTH_FLOW_MAX_AGE) -> bool:
- """
- Returns whether or not the staff timestamp exists and is valid within the
- timedelta. If the timestamp is invalid, it is removed from the session.
- """
- timestamp = request.session.get("staff_auth_flow")
- if not timestamp:
- return False
-
- flag_datetime = datetime.fromtimestamp(timestamp, UTC)
- current_time = datetime.now(UTC)
- time_difference = flag_datetime - current_time
- logger.info(
- "Validating staff timestamp",
- extra={
- "user": request.user.id,
- "current_time": current_time,
- "flag_datetime": flag_datetime,
- "time_difference": flag_datetime - current_time,
- "boolean_check": timedelta(0) < time_difference < limit,
- "active_silo": SiloMode.get_current_mode(),
- },
- )
-
- # For a valid timestamp, the time difference must be positive (timestamp is in the future)
- # and less than the limit (timestamp is within the valid limit, e.g. within the last 2 minutes)
- return timedelta(0) < time_difference < limit
-
-
class U2fInterface(AuthenticatorInterface):
type = 3
interface_id = "u2f"
@@ -239,76 +202,14 @@ def activate(self, request: HttpRequest) -> ActivationChallengeResult:
challenge, state = self.webauthn_authentication_server.authenticate_begin(
credentials=credentials
)
- logger.info(
- "U2F activate",
- extra={
- "user": request.user.id,
- "staff_flag": (
- datetime.fromtimestamp(request.session["staff_auth_flow"], UTC)
- if "staff_auth_flow" in request.session
- else "missing"
- ),
- "active_silo": SiloMode.get_current_mode(),
- },
- )
- # It is an intentional decision to not check whether or not the staff
- # timestamp is valid here if it exists. The reason for this is we prefer
- # the failure to occur and present itself when tapping the U2F device,
- # not immediately upon generating the challenge/response.
- if request.session.get("staff_auth_flow"):
- request.session["staff_webauthn_authentication_state"] = state
- else:
- request.session["webauthn_authentication_state"] = state
-
- logger.info(
- "U2F activate after setting state",
- extra={
- "user": request.user.id,
- "staff_flag": (
- datetime.fromtimestamp(request.session["staff_auth_flow"], UTC)
- if "staff_auth_flow" in request.session
- else "missing"
- ),
- "has_state": "webauthn_authentication_state" in request.session,
- "has_staff_state": "staff_webauthn_authentication_state" in request.session,
- "active_silo": SiloMode.get_current_mode(),
- },
- )
+ request.session["webauthn_authentication_state"] = state
return ActivationChallengeResult(challenge=cbor.encode(challenge["publicKey"]))
def validate_response(self, request: HttpRequest, challenge, response):
try:
credentials = self.credentials()
- if hasattr(request, "user") and request.user.is_staff:
- logger.info(
- "Validating U2F for staff",
- extra={
- "user": request.user.id,
- "staff_flag": (
- datetime.fromtimestamp(request.session["staff_auth_flow"], UTC)
- if "staff_auth_flow" in request.session
- else "missing"
- ),
- "has_state": "webauthn_authentication_state" in request.session,
- "has_staff_state": "staff_webauthn_authentication_state" in request.session,
- "active_silo": SiloMode.get_current_mode(),
- },
- )
- if _valid_staff_timestamp(request):
- state = request.session["staff_webauthn_authentication_state"]
- else:
- state = request.session["webauthn_authentication_state"]
- if request.session.get("staff_webauthn_authentication_state") and request.session.get(
- "webauthn_authentication_state"
- ):
- logger.info(
- "Both staff and non-staff U2F states are set",
- extra={
- "user": request.user.id,
- },
- )
self.webauthn_authentication_server.authenticate_complete(
- state=state,
+ state=request.session.get("webauthn_authentication_state"),
credentials=credentials,
credential_id=websafe_decode(response["keyHandle"]),
client_data=ClientData(websafe_decode(response["clientData"])),
@@ -320,6 +221,4 @@ def validate_response(self, request: HttpRequest, challenge, response):
finally:
# Cleanup the U2F state from the session
request.session.pop("webauthn_authentication_state", None)
- request.session.pop("staff_webauthn_authentication_state", None)
- request.session.pop("staff_auth_flow", None)
return True
diff --git a/tests/sentry/auth/authenticators/test_u2f.py b/tests/sentry/auth/authenticators/test_u2f.py
index dd5067c90713f7..16eda50c9b435b 100644
--- a/tests/sentry/auth/authenticators/test_u2f.py
+++ b/tests/sentry/auth/authenticators/test_u2f.py
@@ -1,4 +1,3 @@
-from datetime import datetime, timedelta
from unittest.mock import Mock
from fido2 import cbor
@@ -10,17 +9,11 @@
from sentry.auth.authenticators.base import ActivationChallengeResult
from sentry.auth.authenticators.u2f import U2fInterface
from sentry.testutils.cases import TestCase
-from sentry.testutils.helpers.datetime import freeze_time
from sentry.testutils.silo import control_silo_test
@control_silo_test
class U2FInterfaceTest(TestCase):
- CURRENT_TIME = datetime(2024, 3, 11, 0, 0)
- VALID_TIMESTAMP = (CURRENT_TIME + timedelta(minutes=1)).timestamp()
- INVALID_EXPIRED_TIMESTAMP = (CURRENT_TIME - timedelta(minutes=3)).timestamp()
- INVALID_FUTURE_TIMESTAMP = (CURRENT_TIME + timedelta(minutes=3)).timestamp()
-
def setUp(self):
self.u2f = U2fInterface()
self.login_as(user=self.user)
@@ -80,37 +73,7 @@ def test_activate_webauthn(self):
assert len(self.request.session["webauthn_authentication_state"]["challenge"]) == 43
assert self.request.session["webauthn_authentication_state"]["user_verification"] is None
- @freeze_time(CURRENT_TIME)
- def test_activate_staff_webauthn_valid_timestamp(self):
- self.test_try_enroll_webauthn()
-
- self.request.session["staff_auth_flow"] = self.VALID_TIMESTAMP
-
- result = self.u2f.activate(self.request)
-
- assert isinstance(result, ActivationChallengeResult)
- assert "webauthn_authentication_state" not in self.request.session
- assert len(self.request.session["staff_webauthn_authentication_state"]["challenge"]) == 43
- assert (
- self.request.session["staff_webauthn_authentication_state"]["user_verification"] is None
- )
-
- @freeze_time(CURRENT_TIME)
- def test_activate_staff_webauthn_invalid_timestamp(self):
- self.test_try_enroll_webauthn()
-
- self.request.session["staff_auth_flow"] = self.INVALID_EXPIRED_TIMESTAMP
-
- result = self.u2f.activate(self.request)
-
- assert isinstance(result, ActivationChallengeResult)
- assert "webauthn_authentication_state" not in self.request.session
- assert len(self.request.session["staff_webauthn_authentication_state"]["challenge"]) == 43
- assert (
- self.request.session["staff_webauthn_authentication_state"]["user_verification"] is None
- )
-
- def test_validate_response_normal_state(self):
+ def test_validate_response_state(self):
self.test_try_enroll_webauthn()
mock_state = Mock()
self.u2f.webauthn_authentication_server.authenticate_complete = mock_state
@@ -122,57 +85,15 @@ def test_validate_response_normal_state(self):
assert kwargs.get("state") == "normal state"
assert "webauthn_authentication_state" not in self.request.session
- @freeze_time(CURRENT_TIME)
- def test_validate_response_staff_state_valid_timestamp(self):
- self.test_try_enroll_webauthn()
- mock_state = Mock()
- self.u2f.webauthn_authentication_server.authenticate_complete = mock_state
-
- self.request.session["staff_webauthn_authentication_state"] = "staff state"
- self.request.session["staff_auth_flow"] = self.VALID_TIMESTAMP
-
- assert self.u2f.validate_response(self.request, None, self.response)
- _, kwargs = mock_state.call_args
- assert kwargs.get("state") == "staff state"
- assert "staff_webauthn_authentication_state" not in self.request.session
-
- @freeze_time(CURRENT_TIME)
- def test_validate_response_staff_state_invalid_timestamp(self):
- self.test_try_enroll_webauthn()
- mock_state = Mock()
- self.u2f.webauthn_authentication_server.authenticate_complete = mock_state
-
- # Test expired timestamp
- self.request.session["webauthn_authentication_state"] = "non-staff state"
- self.request.session["staff_auth_flow"] = self.INVALID_EXPIRED_TIMESTAMP
-
- assert self.u2f.validate_response(self.request, None, self.response)
- _, kwargs = mock_state.call_args
- assert kwargs.get("state") == "non-staff state"
- assert "webauthn_authentication_state" not in self.request.session
-
- # Test timestamp too far in the future
- self.request.session["webauthn_authentication_state"] = "non-staff state"
- self.request.session["staff_auth_flow"] = self.INVALID_FUTURE_TIMESTAMP
- assert self.u2f.validate_response(self.request, None, self.response)
- _, kwargs = mock_state.call_args
- assert kwargs.get("state") == "non-staff state"
- assert "webauthn_authentication_state" not in self.request.session
-
- @freeze_time(CURRENT_TIME)
- def test_validate_response_failing_still_clears_all_states(self):
+ def test_validate_response_failing_still_clears_state(self):
self.test_try_enroll_webauthn()
mock_state = Mock(side_effect=ValueError("test"))
self.u2f.webauthn_authentication_server.authenticate_complete = mock_state
- self.request.session["webauthn_authentication_state"] = "non-staff state"
- self.request.session["staff_webauthn_authentication_state"] = "staff state"
- self.request.session["staff_auth_flow"] = self.VALID_TIMESTAMP
+ self.request.session["webauthn_authentication_state"] = "state"
with raises(ValueError):
self.u2f.validate_response(self.request, None, self.response)
_, kwargs = mock_state.call_args
- assert kwargs.get("state") == "staff state"
+ assert kwargs.get("state") == "state"
assert "webauthn_authentication_state" not in self.request.session
- assert "staff_webauthn_authentication_state" not in self.request.session
- assert "staff_auth_flow" not in self.request.session
|
b78d495a31e53ad52cf55e4c1adfada298879873
|
2023-11-27 23:01:00
|
Tony Xiao
|
ref(stats-detectors): Optimize hourly projects query (#60561)
| false
|
Optimize hourly projects query (#60561)
|
ref
|
diff --git a/src/sentry/tasks/statistical_detectors.py b/src/sentry/tasks/statistical_detectors.py
index ecee14f9282eb8..66b28f6197d7d5 100644
--- a/src/sentry/tasks/statistical_detectors.py
+++ b/src/sentry/tasks/statistical_detectors.py
@@ -113,30 +113,24 @@ def run_detection() -> None:
performance_projects_count = 0
profiling_projects_count = 0
- for project in RangeQuerySetWrapper(
- Project.objects.filter(status=ObjectStatus.ACTIVE).select_related("organization"),
- step=100,
+ for project_id, flags in RangeQuerySetWrapper(
+ Project.objects.filter(status=ObjectStatus.ACTIVE).values_list("id", "flags"),
+ result_value_getter=lambda item: item[0],
):
- if project.flags.has_transactions and (
- features.has(
- "organizations:performance-statistical-detectors-ema", project.organization
- )
- ):
- performance_projects.append(project)
+ if flags & Project.flags.has_transactions:
+ performance_projects.append(project_id)
performance_projects_count += 1
if len(performance_projects) >= PROJECTS_PER_BATCH:
detect_transaction_trends.delay(
- list({p.organization_id for p in performance_projects}),
- [p.id for p in performance_projects],
+ [],
+ performance_projects,
now,
)
performance_projects = []
- if project.flags.has_profiles and (
- features.has("organizations:profiling-statistical-detectors-ema", project.organization)
- ):
- profiling_projects.append(project.id)
+ if flags & Project.flags.has_profiles:
+ profiling_projects.append(project_id)
profiling_projects_count += 1
if len(profiling_projects) >= PROJECTS_PER_BATCH:
@@ -146,8 +140,8 @@ def run_detection() -> None:
# make sure to dispatch a task to handle the remaining projects
if performance_projects:
detect_transaction_trends.delay(
- list({p.organization_id for p in performance_projects}),
- [p.id for p in performance_projects],
+ [],
+ performance_projects,
now,
)
if profiling_projects:
@@ -281,7 +275,11 @@ def detect_transaction_trends(
#
# If we filter this in the earlier step, it makes the initial dispatch
# task take longer than necessary.
- projects = Project.objects.filter(id__in=project_ids)
+ projects = [
+ project
+ for project in Project.objects.filter(id__in=project_ids).select_related("organization")
+ if features.has("organizations:performance-statistical-detectors-ema", project.organization)
+ ]
settings = get_performance_issue_settings(projects)
projects = [
project
@@ -570,8 +568,11 @@ def detect_function_trends(project_ids: List[int], start: datetime, *args, **kwa
if not options.get("statistical_detectors.enable"):
return
- projects = Project.objects.filter(id__in=project_ids)
-
+ projects = [
+ project
+ for project in Project.objects.filter(id__in=project_ids).select_related("organization")
+ if features.has("organizations:profiling-statistical-detectors-ema", project.organization)
+ ]
ratelimit = options.get("statistical_detectors.ratelimit.ema")
trends = FunctionRegressionDetector.detect_trends(projects, start)
regressions = limit_regressions_by_project(trends, ratelimit)
diff --git a/tests/sentry/tasks/test_statistical_detectors.py b/tests/sentry/tasks/test_statistical_detectors.py
index b4d2daedf77059..1539797666b4e8 100644
--- a/tests/sentry/tasks/test_statistical_detectors.py
+++ b/tests/sentry/tasks/test_statistical_detectors.py
@@ -5,6 +5,7 @@
import pytest
from django.db.models import F
+from sentry.api.endpoints.project_performance_issue_settings import InternalProjectOptions
from sentry.models.options.project_option import ProjectOption
from sentry.models.project import Project
from sentry.seer.utils import BreakpointData
@@ -57,50 +58,29 @@ def project(organization):
[
"project_flags",
"enable",
- "performance_project_option_enabled",
- "performance_project",
"expected_performance_project",
- "profiling_project",
"expected_profiling_project",
],
[
- pytest.param(None, False, True, True, False, True, False, id="disabled"),
- pytest.param(None, True, True, False, False, False, False, id="no projects"),
- pytest.param(None, True, True, True, False, False, False, id="no transactions"),
- pytest.param(None, True, True, False, False, True, False, id="no profiles"),
+ pytest.param(None, False, False, False, id="disabled"),
+ pytest.param(None, True, False, False, id="no projects"),
+ pytest.param(None, True, False, False, id="no transactions"),
+ pytest.param(None, True, False, False, id="no profiles"),
pytest.param(
Project.flags.has_transactions,
True,
True,
- True,
- True,
- False,
False,
id="performance only",
),
- pytest.param(
- Project.flags.has_profiles, True, True, False, False, True, True, id="profiling only"
- ),
+ pytest.param(Project.flags.has_profiles, True, False, True, id="profiling only"),
pytest.param(
Project.flags.has_transactions | Project.flags.has_profiles,
True,
True,
- False,
- False,
- True,
True,
id="performance + profiling",
),
- pytest.param(
- Project.flags.has_transactions,
- True,
- False,
- False,
- False,
- False,
- False,
- id="performance project option disabled",
- ),
],
)
@mock.patch("sentry.tasks.statistical_detectors.detect_transaction_trends")
@@ -111,9 +91,6 @@ def test_run_detection_options(
detect_transaction_trends,
project_flags,
enable,
- performance_project,
- profiling_project,
- performance_project_option_enabled,
expected_performance_project,
expected_profiling_project,
project,
@@ -126,32 +103,12 @@ def test_run_detection_options(
"statistical_detectors.enable": enable,
}
- features = {
- "organizations:performance-statistical-detectors-ema": [project.organization.slug]
- if performance_project
- else [],
- "organizations:profiling-statistical-detectors-ema": [project.organization.slug]
- if profiling_project
- else [],
- }
-
- if performance_project_option_enabled:
- ProjectOption.objects.set_value(
- project=project,
- key="sentry:performance_issue_settings",
- value={
- "transaction_duration_regression_detection_enabled": performance_project_option_enabled
- },
- )
-
- with freeze_time(timestamp), override_options(options), Feature(features):
+ with freeze_time(timestamp), override_options(options):
run_detection()
if expected_performance_project:
assert detect_transaction_trends.delay.called
- detect_transaction_trends.delay.assert_has_calls(
- [mock.call([project.organization_id], [project.id], timestamp)]
- )
+ detect_transaction_trends.delay.assert_has_calls([mock.call([], [project.id], timestamp)])
else:
assert not detect_transaction_trends.delay.called
@@ -198,12 +155,12 @@ def test_run_detection_options_multiple_batches(
detect_transaction_trends.delay.assert_has_calls(
[
mock.call(
- [organization.id],
+ [],
[project.id for project in projects[:5]],
timestamp,
),
mock.call(
- [organization.id],
+ [],
[project.id for project in projects[5:]],
timestamp,
),
@@ -219,23 +176,40 @@ def test_run_detection_options_multiple_batches(
@pytest.mark.parametrize(
- ["enabled"],
+ ["task_enabled", "option_enabled"],
[
- pytest.param(False, id="disabled"),
- pytest.param(True, id="enabled"),
+ pytest.param(True, True, id="both enabled"),
+ pytest.param(False, False, id="both disabled"),
+ pytest.param(True, False, id="option disabled"),
+ pytest.param(False, True, id="task disabled"),
],
)
@mock.patch("sentry.tasks.statistical_detectors.query_transactions")
@django_db_all
def test_detect_transaction_trends_options(
query_transactions,
- enabled,
+ task_enabled,
+ option_enabled,
timestamp,
project,
):
- with override_options({"statistical_detectors.enable": enabled}):
+ ProjectOption.objects.set_value(
+ project=project,
+ key="sentry:performance_issue_settings",
+ value={InternalProjectOptions.TRANSACTION_DURATION_REGRESSION.value: option_enabled},
+ )
+
+ options = {
+ "statistical_detectors.enable": task_enabled,
+ }
+
+ features = {
+ "organizations:performance-statistical-detectors-ema": [project.organization.slug],
+ }
+
+ with override_options(options), Feature(features):
detect_transaction_trends([project.organization_id], [project.id], timestamp)
- assert query_transactions.called == enabled
+ assert query_transactions.called == (task_enabled and option_enabled)
@pytest.mark.parametrize(
@@ -253,7 +227,15 @@ def test_detect_function_trends_options(
timestamp,
project,
):
- with override_options({"statistical_detectors.enable": enabled}):
+ options = {
+ "statistical_detectors.enable": enabled,
+ }
+
+ features = {
+ "organizations:profiling-statistical-detectors-ema": [project.organization.slug],
+ }
+
+ with override_options(options), Feature(features):
detect_function_trends([project.id], timestamp)
assert query_functions.called == enabled
@@ -261,7 +243,15 @@ def test_detect_function_trends_options(
@mock.patch("sentry.snuba.functions.query")
@django_db_all
def test_detect_function_trends_query_timerange(functions_query, timestamp, project):
- with override_options({"statistical_detectors.enable": True}):
+ options = {
+ "statistical_detectors.enable": True,
+ }
+
+ features = {
+ "organizations:profiling-statistical-detectors-ema": [project.organization.slug],
+ }
+
+ with override_options(options), Feature(features):
detect_function_trends([project.id], timestamp)
assert functions_query.called
@@ -296,7 +286,15 @@ def test_detect_transaction_trends(
for i, ts in enumerate(timestamps)
]
- with override_options({"statistical_detectors.enable": True}):
+ options = {
+ "statistical_detectors.enable": True,
+ }
+
+ features = {
+ "organizations:performance-statistical-detectors-ema": [project.organization.slug],
+ }
+
+ with override_options(options), Feature(features):
for ts in timestamps:
detect_transaction_trends([project.organization.id], [project.id], ts)
assert detect_transaction_change_points.apply_async.called
@@ -351,12 +349,16 @@ def test_detect_transaction_trends_ratelimit(
for i, ts in enumerate(timestamps)
]
- with override_options(
- {
- "statistical_detectors.enable": True,
- "statistical_detectors.ratelimit.ema": ratelimit,
- }
- ):
+ options = {
+ "statistical_detectors.enable": True,
+ "statistical_detectors.ratelimit.ema": ratelimit,
+ }
+
+ features = {
+ "organizations:performance-statistical-detectors-ema": [project.organization.slug],
+ }
+
+ with override_options(options), Feature(features):
for ts in timestamps:
detect_transaction_trends([project.organization.id], [project.id], ts)
@@ -447,7 +449,15 @@ def test_detect_function_trends(
for i, ts in enumerate(timestamps)
]
- with override_options({"statistical_detectors.enable": True}):
+ options = {
+ "statistical_detectors.enable": True,
+ }
+
+ features = {
+ "organizations:profiling-statistical-detectors-ema": [project.organization.slug],
+ }
+
+ with override_options(options), Feature(features):
for ts in timestamps:
detect_function_trends([project.id], ts)
assert detect_function_change_points.apply_async.called
@@ -501,12 +511,16 @@ def test_detect_function_trends_ratelimit(
for i, ts in enumerate(timestamps)
]
- with override_options(
- {
- "statistical_detectors.enable": True,
- "statistical_detectors.ratelimit.ema": ratelimit,
- }
- ):
+ options = {
+ "statistical_detectors.enable": True,
+ "statistical_detectors.ratelimit.ema": ratelimit,
+ }
+
+ features = {
+ "organizations:profiling-statistical-detectors-ema": [project.organization.slug],
+ }
+
+ with override_options(options), Feature(features):
for ts in timestamps:
detect_function_trends([project.id], ts)
|
456c953bdd42231eb52c4204c18c21946324e7ad
|
2020-01-22 04:02:17
|
Lyn Nagara
|
fix(deletion): Fix group deletion (#16564)
| false
|
Fix group deletion (#16564)
|
fix
|
diff --git a/src/sentry/runner/commands/cleanup.py b/src/sentry/runner/commands/cleanup.py
index d0b24e11b5230d..2c2ceb0f10787c 100644
--- a/src/sentry/runner/commands/cleanup.py
+++ b/src/sentry/runner/commands/cleanup.py
@@ -182,7 +182,7 @@ def is_filtered(model):
# Deletions that use the `deletions` code path (which handles their child relations)
# (model, datetime_field, order_by)
- DELETES = (models.Group, "last_seen", "last_seen")
+ DELETES = ((models.Group, "last_seen", "last_seen"),)
if not silent:
click.echo("Removing expired values for LostPasswordHash")
|
d0a35b73e7b96c956b68bbdc1c92ebe8d51d7fb3
|
2020-02-18 23:58:33
|
Leander Rodrigues
|
chore(ts): Convert groupTagValues (#16986)
| false
|
Convert groupTagValues (#16986)
|
chore
|
diff --git a/src/sentry/static/sentry/app/types/index.tsx b/src/sentry/static/sentry/app/types/index.tsx
index e45c28a2ffb6ec..627c2e4244b7f0 100644
--- a/src/sentry/static/sentry/app/types/index.tsx
+++ b/src/sentry/static/sentry/app/types/index.tsx
@@ -834,6 +834,20 @@ export type Tag = {
predefined?: boolean;
};
+export type TagValue = {
+ count: number;
+ name: string;
+ value: string;
+ lastSeen: string;
+ key: string;
+ firstSeen: string;
+ query?: string;
+ email?: string;
+ username?: string;
+ identifier?: string;
+ ipAddress?: string;
+} & AvatarUser;
+
export type Level = 'error' | 'fatal' | 'info' | 'warning' | 'sample';
export type Meta = {
diff --git a/src/sentry/static/sentry/app/views/organizationGroupDetails/groupTagValues.jsx b/src/sentry/static/sentry/app/views/organizationGroupDetails/groupTagValues.jsx
deleted file mode 100644
index 8e9aad9792aebe..00000000000000
--- a/src/sentry/static/sentry/app/views/organizationGroupDetails/groupTagValues.jsx
+++ /dev/null
@@ -1,195 +0,0 @@
-import sortBy from 'lodash/sortBy';
-import property from 'lodash/property';
-import isEqual from 'lodash/isEqual';
-import PropTypes from 'prop-types';
-import React from 'react';
-
-import {isUrl, percent} from 'app/utils';
-import {t} from 'app/locale';
-import withApi from 'app/utils/withApi';
-import UserAvatar from 'app/components/avatar/userAvatar';
-import DeviceName from 'app/components/deviceName';
-import ExternalLink from 'app/components/links/externalLink';
-import GlobalSelectionLink from 'app/components/globalSelectionLink';
-import LoadingError from 'app/components/loadingError';
-import LoadingIndicator from 'app/components/loadingIndicator';
-import Pagination from 'app/components/pagination';
-import SentryTypes from 'app/sentryTypes';
-import TimeSince from 'app/components/timeSince';
-import withOrganization from 'app/utils/withOrganization';
-
-class GroupTagValues extends React.Component {
- static propTypes = {
- api: PropTypes.object,
- group: SentryTypes.Group.isRequired,
- location: PropTypes.shape({
- query: PropTypes.object,
- }),
- };
-
- state = {
- tagKey: null,
- tagValueList: null,
- loading: true,
- error: false,
- pageLinks: '',
- };
-
- componentDidMount() {
- this.fetchData();
- }
-
- componentDidUpdate(prevProps) {
- const queryHasChanged = !isEqual(prevProps.location.query, this.props.location.query);
- if (queryHasChanged || prevProps.params.tagKey !== this.props.params.tagKey) {
- this.fetchData();
- }
- }
-
- fetchData = async () => {
- const {
- params,
- location: {query},
- } = this.props;
- this.setState({
- loading: true,
- error: false,
- });
-
- const promises = [
- this.props.api.requestPromise(`/issues/${params.groupId}/tags/${params.tagKey}/`, {
- query,
- }),
- this.props.api.requestPromise(
- `/issues/${params.groupId}/tags/${params.tagKey}/values/`,
- {
- query,
- includeAllArgs: true,
- }
- ),
- ];
-
- try {
- const [tagKey, tagValueResponse] = await Promise.all(promises);
- const [tagValueList, , jqXHR] = tagValueResponse;
-
- this.setState({
- tagKey,
- tagValueList,
- loading: false,
- pageLinks: jqXHR.getResponseHeader('Link'),
- });
- } catch (rejections) {
- // eslint-disable-next-line no-console
- console.error(rejections);
- this.setState({
- error: true,
- loading: false,
- });
- }
- };
-
- getUserDisplayName(item) {
- return item.email || item.username || item.identifier || item.ipAddress || item.value;
- }
-
- render() {
- if (this.state.loading) {
- return <LoadingIndicator />;
- } else if (this.state.error) {
- return <LoadingError onRetry={this.fetchData} />;
- }
-
- const {
- group,
- params: {orgId},
- } = this.props;
- const tagKey = this.state.tagKey;
-
- const sortedTagValueList = sortBy(
- this.state.tagValueList,
- property('count')
- ).reverse();
-
- const issuesPath = `/organizations/${orgId}/issues/`;
-
- const children = sortedTagValueList.map((tagValue, tagValueIdx) => {
- const pct = percent(tagValue.count, tagKey.totalValues).toFixed(2);
- const query = tagValue.query || `${tagKey.key}:"${tagValue.value}"`;
- return (
- <tr key={tagValueIdx}>
- <td className="bar-cell">
- <span className="bar" style={{width: pct + '%'}} />
- <span className="label">{pct}%</span>
- </td>
- <td>
- <GlobalSelectionLink
- to={{
- pathname: issuesPath,
- query: {query},
- }}
- >
- {tagKey.key === 'user' ? (
- <React.Fragment>
- <UserAvatar user={tagValue} size={20} className="avatar" />
- <span style={{marginLeft: 10}}>
- {this.getUserDisplayName(tagValue)}
- </span>
- </React.Fragment>
- ) : (
- <DeviceName>{tagValue.name}</DeviceName>
- )}
- </GlobalSelectionLink>
- {tagValue.email && (
- <ExternalLink href={`mailto:${tagValue.email}`} className="external-icon">
- <em className="icon-envelope" />
- </ExternalLink>
- )}
- {isUrl(tagValue.value) && (
- <a href={tagValue.value} className="external-icon">
- <em className="icon-open" />
- </a>
- )}
- </td>
- <td>
- <TimeSince date={tagValue.lastSeen} />
- </td>
- </tr>
- );
- });
-
- return (
- <div>
- <h3>
- {tagKey.key === 'user' ? t('Affected Users') : tagKey.name}
- <a
- href={`/${orgId}/${group.project.slug}/issues/${group.id}/tags/${this.props.params.tagKey}/export/`}
- className="btn btn-default btn-sm"
- style={{marginLeft: 10}}
- >
- {t('Export to CSV')}
- </a>
- </h3>
- <table className="table table-striped">
- <thead>
- <tr>
- <th style={{width: 30}}>%</th>
- <th />
- <th style={{width: 200}}>{t('Last Seen')}</th>
- </tr>
- </thead>
- <tbody>{children}</tbody>
- </table>
- <Pagination pageLinks={this.state.pageLinks} />
- <p>
- <small>
- {t('Note: Percentage of issue is based on events seen in the last 7 days.')}
- </small>
- </p>
- </div>
- );
- }
-}
-
-export {GroupTagValues};
-export default withApi(withOrganization(GroupTagValues));
diff --git a/src/sentry/static/sentry/app/views/organizationGroupDetails/groupTagValues.tsx b/src/sentry/static/sentry/app/views/organizationGroupDetails/groupTagValues.tsx
new file mode 100644
index 00000000000000..7a36550717611e
--- /dev/null
+++ b/src/sentry/static/sentry/app/views/organizationGroupDetails/groupTagValues.tsx
@@ -0,0 +1,151 @@
+import sortBy from 'lodash/sortBy';
+import property from 'lodash/property';
+import React from 'react';
+import {RouteComponentProps} from 'react-router/lib/Router';
+import styled from '@emotion/styled';
+
+import {isUrl, percent} from 'app/utils';
+import {t} from 'app/locale';
+import AsyncComponent from 'app/components/asyncComponent';
+import UserAvatar from 'app/components/avatar/userAvatar';
+import DeviceName from 'app/components/deviceName';
+import ExternalLink from 'app/components/links/externalLink';
+import GlobalSelectionLink from 'app/components/globalSelectionLink';
+import Pagination from 'app/components/pagination';
+import TimeSince from 'app/components/timeSince';
+import space from 'app/styles/space';
+import {Group, Tag, TagValue} from 'app/types';
+
+type RouteParams = {
+ groupId: string;
+ orgId: string;
+ tagKey: string;
+};
+
+type Props = {
+ group: Group;
+} & RouteComponentProps<RouteParams, {}>;
+
+type State = {
+ tag: Tag;
+ tagValueList: TagValue[];
+ tagValueListPageLinks: string;
+};
+
+class GroupTagValues extends AsyncComponent<
+ Props & AsyncComponent['props'],
+ State & AsyncComponent['state']
+> {
+ getEndpoints(): [string, string][] {
+ const {groupId, tagKey} = this.props.params;
+ return [
+ ['tag', `/issues/${groupId}/tags/${tagKey}/`],
+ ['tagValueList', `/issues/${groupId}/tags/${tagKey}/values/`],
+ ];
+ }
+
+ getUserDisplayName(item: TagValue): string {
+ return item.email || item.username || item.identifier || item.ipAddress || item.value;
+ }
+
+ renderBody() {
+ const {
+ group,
+ params: {orgId, tagKey},
+ } = this.props;
+ const {tag, tagValueList, tagValueListPageLinks} = this.state;
+ const sortedTagValueList: TagValue[] = sortBy(
+ tagValueList,
+ property('count')
+ ).reverse();
+
+ const issuesPath = `/organizations/${orgId}/issues/`;
+
+ const children = sortedTagValueList.map((tagValue, tagValueIdx) => {
+ const pct = tag.totalValues
+ ? `${percent(tagValue.count, tag.totalValues).toFixed(2)}%`
+ : '--';
+ const query = tagValue.query || `${tag.key}:"${tagValue.value}"`;
+ return (
+ <tr key={tagValueIdx}>
+ <td className="bar-cell">
+ <span className="label">{pct}</span>
+ </td>
+ <td>
+ <GlobalSelectionLink
+ to={{
+ pathname: issuesPath,
+ query: {query},
+ }}
+ >
+ {tag.key === 'user' ? (
+ <React.Fragment>
+ <UserAvatar user={tagValue} size={20} className="avatar" />
+ <span className="m-left">{this.getUserDisplayName(tagValue)}</span>
+ </React.Fragment>
+ ) : (
+ <DeviceName>{tagValue.name}</DeviceName>
+ )}
+ </GlobalSelectionLink>
+ {tagValue.email && (
+ <ExternalLink href={`mailto:${tagValue.email}`} className="external-icon">
+ <em className="icon-envelope" />
+ </ExternalLink>
+ )}
+ {isUrl(tagValue.value) && (
+ <a href={tagValue.value} className="external-icon">
+ <em className="icon-open" />
+ </a>
+ )}
+ </td>
+ <td>
+ <TimeSince date={tagValue.lastSeen} />
+ </td>
+ </tr>
+ );
+ });
+
+ return (
+ <TableWrapper>
+ <h3>
+ {tag.key === 'user' ? t('Affected Users') : tag.name}
+ <a
+ href={`/${orgId}/${group.project.slug}/issues/${group.id}/tags/${tagKey}/export/`}
+ className="btn btn-default btn-sm m-left"
+ >
+ {t('Export to CSV')}
+ </a>
+ </h3>
+ <table className="table table-striped">
+ <thead>
+ <tr>
+ <TableHeader width={20}>%</TableHeader>
+ <th />
+ <TableHeader width={300}>{t('Last Seen')}</TableHeader>
+ </tr>
+ </thead>
+ <tbody>{children}</tbody>
+ </table>
+ <Pagination pageLinks={tagValueListPageLinks} />
+ <p>
+ <small>
+ {t('Note: Percentage of issue is based on events seen in the last 7 days.')}
+ </small>
+ </p>
+ </TableWrapper>
+ );
+ }
+}
+
+const TableWrapper = styled('div')`
+ .m-left {
+ margin-left: ${space(1.5)};
+ }
+`;
+
+const TableHeader = styled('th')<{width: number}>`
+ width: ${p => p.width}px;
+`;
+
+export {GroupTagValues};
+export default GroupTagValues;
|
a695c3a2a5a01e4adfce66de5d5f6754a3d751c2
|
2021-11-25 19:18:55
|
Joris Bayer
|
feat(metrics): Get metrics metadata from Snuba (#30163)
| false
|
Get metrics metadata from Snuba (#30163)
|
feat
|
diff --git a/src/sentry/api/endpoints/organization_metrics.py b/src/sentry/api/endpoints/organization_metrics.py
index 31bea2d203731b..bf8c84a64bf582 100644
--- a/src/sentry/api/endpoints/organization_metrics.py
+++ b/src/sentry/api/endpoints/organization_metrics.py
@@ -36,7 +36,6 @@ class OrganizationMetricDetailsEndpoint(OrganizationEndpoint):
"""Get metric name, available operations, metric unit and available tags"""
def get(self, request, organization, metric_name):
-
if not features.has("organizations:metrics", organization, actor=request.user):
return Response(status=404)
diff --git a/src/sentry/snuba/metrics.py b/src/sentry/snuba/metrics.py
index 0b4a996b457b19..07b7cd9f8fd95b 100644
--- a/src/sentry/snuba/metrics.py
+++ b/src/sentry/snuba/metrics.py
@@ -3,10 +3,23 @@
import random
import re
from abc import ABC, abstractmethod
-from collections import OrderedDict
+from collections import OrderedDict, defaultdict
from dataclasses import dataclass
from datetime import datetime, timedelta
-from typing import List, Literal, Optional, Protocol, Sequence, Tuple, TypedDict, Union
+from operator import itemgetter
+from typing import (
+ Any,
+ Collection,
+ List,
+ Literal,
+ Mapping,
+ Optional,
+ Protocol,
+ Sequence,
+ Tuple,
+ TypedDict,
+ Union,
+)
from snuba_sdk import (
And,
@@ -26,7 +39,7 @@
from sentry.models import Project
from sentry.sentry_metrics import indexer
-from sentry.snuba.dataset import Dataset
+from sentry.snuba.dataset import Dataset, EntityKey
from sentry.snuba.sessions_v2 import ( # TODO: unite metrics and sessions_v2
AllowedResolution,
InvalidField,
@@ -63,6 +76,14 @@
TS_COL_GROUP = "bucketed_time"
+def reverse_resolve(index: int) -> str:
+ resolved = indexer.reverse_resolve(index)
+ # If we cannot find a string for an integer index, that's a bug:
+ assert resolved is not None, index
+
+ return resolved
+
+
def parse_field(field: str) -> Tuple[str, str]:
matches = FIELD_REGEX.match(field)
try:
@@ -231,8 +252,31 @@ def get_intervals(query: TimeRange):
start += delta
+#: The type of metric, which determines the snuba entity to query
+MetricType = Literal["counter", "set", "distribution"]
+
+#: A function that can be applied to a metric
+MetricOperation = Literal["avg", "count", "max", "min", "p50", "p75", "p90", "p95", "p99"]
+
+MetricUnit = Literal["seconds"]
+
+
+METRIC_TYPE_TO_ENTITY: Mapping[MetricType, EntityKey] = {
+ "counter": EntityKey.MetricsCounters,
+ "set": EntityKey.MetricsSets,
+ "distribution": EntityKey.MetricsDistributions,
+}
+
+
+class MetricMeta(TypedDict):
+ name: str
+ type: MetricType
+ operations: Collection[MetricOperation]
+ unit: Optional[MetricUnit]
+
+
class Tag(TypedDict):
- key: str
+ key: str # Called key here to be consistent with JS type
class TagValue(TypedDict):
@@ -240,15 +284,21 @@ class TagValue(TypedDict):
value: str
+class MetricMetaWithTagKeys(MetricMeta):
+ tags: Sequence[Tag]
+
+
class DataSource(ABC):
"""Base class for metrics data sources"""
@abstractmethod
- def get_metrics(self, projects: Sequence[Project]) -> List[dict]:
+ def get_metrics(self, projects: Sequence[Project]) -> Sequence[MetricMeta]:
"""Get metrics metadata, without tags"""
@abstractmethod
- def get_single_metric(self, projects: Sequence[Project], metric_name: str) -> dict:
+ def get_single_metric(
+ self, projects: Sequence[Project], metric_name: str
+ ) -> MetricMetaWithTagKeys:
"""Get metadata for a single metric, without tag values"""
@abstractmethod
@@ -371,17 +421,19 @@ def _get_metric(metric_name: str) -> dict:
class IndexMockingDataSource(DataSource):
- def get_metrics(self, projects: Sequence[Project]) -> List[dict]:
+ def get_metrics(self, projects: Sequence[Project]) -> Sequence[MetricMeta]:
"""Get metrics metadata, without tags"""
return [
- dict(
+ MetricMeta(
name=name,
**{key: value for key, value in metric.items() if key != "tags"},
)
for name, metric in _METRICS.items()
]
- def get_single_metric(self, projects: Sequence[Project], metric_name: str) -> dict:
+ def get_single_metric(
+ self, projects: Sequence[Project], metric_name: str
+ ) -> MetricMetaWithTagKeys:
"""Get metadata for a single metric, without tag values"""
try:
metric = _METRICS[metric_name]
@@ -391,7 +443,7 @@ def get_single_metric(self, projects: Sequence[Project], metric_name: str) -> di
return dict(
name=metric_name,
**{
- # Only return metric names
+ # Only return tag names
key: (sorted(value.keys()) if key == "tags" else value)
for key, value in metric.items()
},
@@ -540,13 +592,6 @@ class Percentile(enum.Enum):
class SnubaQueryBuilder:
- _entity_map = {
- "counter": "metrics_counters",
- "distribution": "metrics_distributions",
- "gauge": "metrics_gauges",
- "set": "metrics_sets",
- }
-
#: Datasets actually implemented in snuba:
_implemented_datasets = {
"metrics_counters",
@@ -683,9 +728,9 @@ def _build_queries_for_entity(self, query_definition, entity, fields, where, gro
def get_snuba_queries(self):
return self._queries
- def _get_entity(self, metric_type: str) -> str:
+ def _get_entity(self, metric_type: MetricType) -> str:
- entity = self._entity_map[metric_type]
+ entity = METRIC_TYPE_TO_ENTITY[metric_type].value
if entity not in self._implemented_datasets:
raise NotImplementedError(f"Dataset not yet implemented: {entity}")
@@ -730,12 +775,12 @@ def __init__(
def _parse_tag(self, tag_string: str) -> str:
tag_key = int(tag_string.replace("tags[", "").replace("]", ""))
- return indexer.reverse_resolve(tag_key)
+ return reverse_resolve(tag_key)
def _extract_data(self, entity, data, groups):
tags = tuple((key, data[key]) for key in sorted(data.keys()) if key.startswith("tags["))
- metric_name = indexer.reverse_resolve(data["metric_id"])
+ metric_name = reverse_resolve(data["metric_id"])
ops = self._ops_by_metric[metric_name]
tag_data = groups.setdefault(
@@ -783,7 +828,7 @@ def translate_results(self):
groups = [
dict(
- by={self._parse_tag(key): indexer.reverse_resolve(value) for key, value in tags},
+ by={self._parse_tag(key): reverse_resolve(value) for key, value in tags},
**data,
)
for tags, data in groups.items()
@@ -792,12 +837,235 @@ def translate_results(self):
return groups
-class SnubaDataSource(IndexMockingDataSource):
- """Mocks metrics metadata and string indexing, but fetches real time series"""
+class MetaFromSnuba:
+ """Fetch metrics metadata (metric names, tag names, tag values, ...) from snuba.
+ This is not intended for production use, but rather as an intermediate solution
+ until we have a proper metadata store set up.
+
+ To keep things simple, and hopefully reasonably efficient, we only look at
+ the past 24 hours.
+ """
+
+ _granularity = 24 * 60 * 60 # coarsest granularity
+
+ def __init__(self, projects: Sequence[Project]):
+ assert projects
+ self._org_id = projects[0].organization_id
+ self._projects = projects
+
+ def _get_data(
+ self,
+ *,
+ entity_key: EntityKey,
+ select: List[Column],
+ where: List[Condition],
+ groupby: List[Column],
+ referrer: str,
+ ) -> Mapping[str, Any]:
+ # Round timestamp to minute to get cache efficiency:
+ now = datetime.now().replace(second=0, microsecond=0)
+
+ query = Query(
+ dataset=Dataset.Metrics.value,
+ match=Entity(entity_key.value),
+ select=select,
+ groupby=groupby,
+ where=[
+ Condition(Column("org_id"), Op.EQ, self._org_id),
+ Condition(Column("project_id"), Op.IN, [p.id for p in self._projects]),
+ Condition(Column(TS_COL_QUERY), Op.GTE, now - timedelta(hours=24)),
+ Condition(Column(TS_COL_QUERY), Op.LT, now),
+ ]
+ + where,
+ granularity=Granularity(self._granularity),
+ )
+ result = raw_snql_query(query, referrer, use_cache=True)
+ return result["data"]
+
+ def _get_metrics_for_entity(self, entity_key: EntityKey) -> Mapping[str, Any]:
+ return self._get_data(
+ entity_key=entity_key,
+ select=[Column("metric_id")],
+ groupby=[Column("metric_id")],
+ where=[],
+ referrer="snuba.metrics.get_metrics_names_for_entity",
+ )
+
+ def get_metrics(self) -> Sequence[MetricMeta]:
+ metric_names = (
+ (metric_type, row)
+ for metric_type in ("counter", "set", "distribution")
+ for row in self._get_metrics_for_entity(METRIC_TYPE_TO_ENTITY[metric_type])
+ )
+
+ return [
+ MetricMeta(
+ name=reverse_resolve(row["metric_id"]),
+ type=metric_type,
+ operations=_AVAILABLE_OPERATIONS[METRIC_TYPE_TO_ENTITY[metric_type].value],
+ unit=None, # snuba does not know the unit
+ )
+ for metric_type, row in metric_names
+ ]
+
+ def get_single_metric(self, metric_name: str) -> MetricMetaWithTagKeys:
+ """Get metadata for a single metric, without tag values"""
+ metric_id = indexer.resolve(metric_name)
+ if metric_id is None:
+ raise InvalidParams
+
+ for metric_type in ("counter", "set", "distribution"):
+ # TODO: What if metric_id exists for multiple types / units?
+ entity_key = METRIC_TYPE_TO_ENTITY[metric_type]
+ data = self._get_data(
+ entity_key=entity_key,
+ select=[Column("metric_id"), Column("tags.key")],
+ where=[Condition(Column("metric_id"), Op.EQ, metric_id)],
+ groupby=[Column("metric_id"), Column("tags.key")],
+ referrer="snuba.metrics.meta.get_single_metric",
+ )
+ if data:
+ tag_ids = {tag_id for row in data for tag_id in row["tags.key"]}
+ return {
+ "name": metric_name,
+ "type": metric_type,
+ "operations": _AVAILABLE_OPERATIONS[entity_key.value],
+ "tags": sorted(
+ ({"key": reverse_resolve(tag_id)} for tag_id in tag_ids),
+ key=itemgetter("key"),
+ ),
+ "unit": None,
+ }
+
+ raise InvalidParams
+
+ def _get_metrics_filter(
+ self, metric_names: Optional[Sequence[str]]
+ ) -> Optional[List[Condition]]:
+ """Add a condition to filter by metrics. Return None if a name cannot be resolved."""
+ where = []
+ if metric_names is not None:
+ metric_ids = []
+ for name in metric_names:
+ resolved = indexer.resolve(name)
+ if resolved is None:
+ # We are looking for tags that appear in all given metrics.
+ # A tag cannot appear in a metric if the metric is not even indexed.
+ return None
+ metric_ids.append(resolved)
+ where.append(Condition(Column("metric_id"), Op.IN, metric_ids))
+
+ return where
+
+ def get_tags(self, metric_names: Optional[Sequence[str]]) -> Sequence[Tag]:
+ """Get all metric tags for the given projects and metric_names"""
+ where = self._get_metrics_filter(metric_names)
+ if where is None:
+ return []
+
+ tag_ids_per_metric_id = defaultdict(list)
+
+ for metric_type in ("counter", "set", "distribution"):
+ # TODO: What if metric_id exists for multiple types / units?
+ entity_key = METRIC_TYPE_TO_ENTITY[metric_type]
+ rows = self._get_data(
+ entity_key=entity_key,
+ select=[Column("metric_id"), Column("tags.key")],
+ where=where,
+ groupby=[Column("metric_id"), Column("tags.key")],
+ referrer="snuba.metrics.meta.get_tags",
+ )
+ for row in rows:
+ tag_ids_per_metric_id[row["metric_id"]].extend(row["tags.key"])
+
+ tag_id_lists = tag_ids_per_metric_id.values()
+ if metric_names is not None:
+ # Only return tags that occur in all metrics
+ tag_ids = set.intersection(*map(set, tag_id_lists))
+ else:
+ tag_ids = {tag_id for ids in tag_id_lists for tag_id in ids}
+
+ tags = [{"key": reverse_resolve(tag_id)} for tag_id in tag_ids]
+ tags.sort(key=itemgetter("key"))
+
+ return tags
+
+ def get_tag_values(
+ self, tag_name: str, metric_names: Optional[Sequence[str]]
+ ) -> Sequence[TagValue]:
+ """Get all known values for a specific tag"""
+ tag_id = indexer.resolve(tag_name)
+ if tag_id is None:
+ raise InvalidParams
+
+ where = self._get_metrics_filter(metric_names)
+ if where is None:
+ return []
+
+ tags = defaultdict(list)
+
+ column_name = f"tags[{tag_id}]"
+ for metric_type in ("counter", "set", "distribution"):
+ # TODO: What if metric_id exists for multiple types / units?
+ entity_key = METRIC_TYPE_TO_ENTITY[metric_type]
+ rows = self._get_data(
+ entity_key=entity_key,
+ select=[Column("metric_id"), Column(column_name)],
+ where=where,
+ groupby=[Column("metric_id"), Column(column_name)],
+ referrer="snuba.metrics.meta.get_tag_values",
+ )
+ for row in rows:
+ value_id = row[column_name]
+ if value_id > 0:
+ metric_id = row["metric_id"]
+ tags[metric_id].append(value_id)
+
+ value_id_lists = tags.values()
+ if metric_names is not None:
+ # Only return tags that occur in all metrics
+ value_ids = set.intersection(*[set(ids) for ids in value_id_lists])
+ else:
+ value_ids = {value_id for ids in value_id_lists for value_id in ids}
+
+ tags = [{"key": tag_name, "value": reverse_resolve(value_id)} for value_id in value_ids]
+ tags.sort(key=itemgetter("key"))
+
+ return tags
+
+
+class SnubaDataSource(DataSource):
+ """Get both metadata and time series from Snuba"""
+
+ def get_metrics(self, projects: Sequence[Project]) -> Sequence[MetricMeta]:
+ meta = MetaFromSnuba(projects)
+ return meta.get_metrics()
+
+ def get_single_metric(
+ self, projects: Sequence[Project], metric_name: str
+ ) -> MetricMetaWithTagKeys:
+ """Get metadata for a single metric, without tag values"""
+ meta = MetaFromSnuba(projects)
+ return meta.get_single_metric(metric_name)
+
+ def get_tags(self, projects: Sequence[Project], metric_names=None) -> Sequence[Tag]:
+ """Get all available tag names for this project
+
+ If ``metric_names`` is provided, the list of available tag names will
+ only contain tags that appear in *all* these metrics.
+ """
+ meta = MetaFromSnuba(projects)
+ return meta.get_tags(metric_names)
+
+ def get_tag_values(
+ self, projects: Sequence[Project], tag_name: str, metric_names=None
+ ) -> Sequence[TagValue]:
+ """Get all known values for a specific tag"""
+ meta = MetaFromSnuba(projects)
+ return meta.get_tag_values(tag_name, metric_names)
def get_series(self, projects: Sequence[Project], query: QueryDefinition) -> dict:
"""Get time series for the given query"""
-
intervals = list(get_intervals(query))
snuba_queries = SnubaQueryBuilder(projects, query).get_snuba_queries()
diff --git a/tests/sentry/api/endpoints/test_organization_metrics.py b/tests/sentry/api/endpoints/test_organization_metrics.py
index 50fc3de41db4d0..7d2a6b817bea00 100644
--- a/tests/sentry/api/endpoints/test_organization_metrics.py
+++ b/tests/sentry/api/endpoints/test_organization_metrics.py
@@ -467,3 +467,226 @@ def test_orderby(self):
assert group["by"] == {"transaction": expected_transaction}
totals = group["totals"]
assert totals == {"count(measurement.lcp)": expected_count}
+
+
+class OrganizationMetricMetaIntegrationTest(SessionMetricsTestCase, APITestCase):
+ def setUp(self):
+ super().setUp()
+ self.login_as(user=self.user)
+
+ now = int(time.time())
+
+ # TODO: move _send to SnubaMetricsTestCase
+ self._send_buckets(
+ [
+ {
+ "org_id": self.organization.id,
+ "project_id": self.project.id,
+ "metric_id": indexer.record("metric1"),
+ "timestamp": now,
+ "tags": {
+ indexer.record("tag1"): indexer.record("value1"),
+ indexer.record("tag2"): indexer.record("value2"),
+ },
+ "type": "c",
+ "value": 1,
+ "retention_days": 90,
+ },
+ {
+ "org_id": self.organization.id,
+ "project_id": self.project.id,
+ "metric_id": indexer.record("metric1"),
+ "timestamp": now,
+ "tags": {
+ indexer.record("tag3"): indexer.record("value3"),
+ },
+ "type": "c",
+ "value": 1,
+ "retention_days": 90,
+ },
+ ],
+ entity="metrics_counters",
+ )
+ self._send_buckets(
+ [
+ {
+ "org_id": self.organization.id,
+ "project_id": self.project.id,
+ "metric_id": indexer.record("metric2"),
+ "timestamp": now,
+ "tags": {
+ indexer.record("tag4"): indexer.record("value3"),
+ indexer.record("tag1"): indexer.record("value2"),
+ indexer.record("tag2"): indexer.record("value1"),
+ },
+ "type": "s",
+ "value": [123],
+ "retention_days": 90,
+ },
+ {
+ "org_id": self.organization.id,
+ "project_id": self.project.id,
+ "metric_id": indexer.record("metric3"),
+ "timestamp": now,
+ "tags": {},
+ "type": "s",
+ "value": [123],
+ "retention_days": 90,
+ },
+ ],
+ entity="metrics_sets",
+ )
+
+
+class OrganizationMetricsIndexIntegrationTest(OrganizationMetricMetaIntegrationTest):
+
+ endpoint = "sentry-api-0-organization-metrics-index"
+
+ @with_feature(FEATURE_FLAG)
+ def test_metrics_index(self):
+ """
+
+ Note that this test will fail once we have a metrics meta store,
+ because the setUp bypasses it.
+ """
+
+ response = self.get_success_response(
+ self.organization.slug,
+ datasource="snuba", # TODO: remove datasource arg
+ )
+
+ assert response.data == [
+ {"name": "metric1", "type": "counter", "operations": ["sum"], "unit": None},
+ {"name": "metric2", "type": "set", "operations": ["count_unique"], "unit": None},
+ {"name": "metric3", "type": "set", "operations": ["count_unique"], "unit": None},
+ ]
+
+
+class OrganizationMetricDetailsIntegrationTest(OrganizationMetricMetaIntegrationTest):
+
+ endpoint = "sentry-api-0-organization-metric-details"
+
+ @with_feature(FEATURE_FLAG)
+ def test_metric_details(self):
+ # metric1:
+ response = self.get_success_response(
+ self.organization.slug,
+ "metric1",
+ datasource="snuba", # TODO: remove datasource arg
+ )
+ assert response.data == {
+ "name": "metric1",
+ "type": "counter",
+ "operations": ["sum"],
+ "unit": None,
+ "tags": [
+ {"key": "tag1"},
+ {"key": "tag2"},
+ {"key": "tag3"},
+ ],
+ }
+
+ # metric2:
+ response = self.get_success_response(
+ self.organization.slug,
+ "metric2",
+ datasource="snuba", # TODO: remove datasource arg
+ )
+ assert response.data == {
+ "name": "metric2",
+ "type": "set",
+ "operations": ["count_unique"],
+ "unit": None,
+ "tags": [
+ {"key": "tag1"},
+ {"key": "tag2"},
+ {"key": "tag4"},
+ ],
+ }
+
+ # metric3:
+ response = self.get_success_response(
+ self.organization.slug,
+ "metric3",
+ datasource="snuba", # TODO: remove datasource arg
+ )
+ assert response.data == {
+ "name": "metric3",
+ "type": "set",
+ "operations": ["count_unique"],
+ "unit": None,
+ "tags": [],
+ }
+
+
+class OrganizationMetricsTagsIntegrationTest(OrganizationMetricMetaIntegrationTest):
+
+ endpoint = "sentry-api-0-organization-metrics-tags"
+
+ @with_feature(FEATURE_FLAG)
+ def test_metric_tags(self):
+ response = self.get_success_response(
+ self.organization.slug,
+ datasource="snuba", # TODO: remove datasource arg
+ )
+ assert response.data == [
+ {"key": "tag1"},
+ {"key": "tag2"},
+ {"key": "tag3"},
+ {"key": "tag4"},
+ ]
+
+ # When metric names are supplied, get intersection of tag names:
+ response = self.get_success_response(
+ self.organization.slug,
+ datasource="snuba", # TODO: remove datasource arg
+ metric=["metric1", "metric2"],
+ )
+ assert response.data == [
+ {"key": "tag1"},
+ {"key": "tag2"},
+ ]
+
+ response = self.get_success_response(
+ self.organization.slug,
+ datasource="snuba", # TODO: remove datasource arg
+ metric=["metric1", "metric2", "metric3"],
+ )
+ assert response.data == []
+
+
+class OrganizationMetricsTagDetailsIntegrationTest(OrganizationMetricMetaIntegrationTest):
+
+ endpoint = "sentry-api-0-organization-metrics-tag-details"
+
+ @with_feature(FEATURE_FLAG)
+ def test_metric_tag_details(self):
+ response = self.get_success_response(
+ self.organization.slug,
+ "tag1",
+ datasource="snuba", # TODO: remove datasource arg
+ )
+ assert response.data == [
+ {"key": "tag1", "value": "value1"},
+ {"key": "tag1", "value": "value2"},
+ ]
+
+ # When single metric_name is supplied, get only tag values for that metric:
+ response = self.get_success_response(
+ self.organization.slug,
+ "tag1",
+ metric=["metric1"],
+ datasource="snuba", # TODO: remove datasource arg
+ )
+ assert response.data == [
+ {"key": "tag1", "value": "value1"},
+ ]
+
+ # When metric names are supplied, get intersection of tags:
+ response = self.get_success_response(
+ self.organization.slug,
+ "tag1",
+ metric=["metric1", "metric2"],
+ datasource="snuba", # TODO: remove datasource arg
+ )
+ assert response.data == []
|
ac03519051e58c2ae0867271f543d2252c4596f9
|
2024-01-17 17:42:52
|
ArthurKnaus
|
fix(ddm): Default query overwriting links (#63333)
| false
|
Default query overwriting links (#63333)
|
fix
|
diff --git a/static/app/views/ddm/context.tsx b/static/app/views/ddm/context.tsx
index 24935c24a860df..29893421fcd6a9 100644
--- a/static/app/views/ddm/context.tsx
+++ b/static/app/views/ddm/context.tsx
@@ -176,7 +176,7 @@ const useDefaultQuery = () => {
> | null>('ddm:default-query', null);
useEffect(() => {
- if (defaultQuery) {
+ if (defaultQuery && router.location.query.widgets === undefined) {
router.replace({...router.location, query: defaultQuery});
}
// Only call on page load
|
474251d93d8079bcfccb1e7f981c57084f15f8de
|
2023-01-24 18:58:47
|
Jodi Jang
|
feat(codecov): Add FE analytics for Codecov link CTR (#43517)
| false
|
Add FE analytics for Codecov link CTR (#43517)
|
feat
|
diff --git a/static/app/components/events/interfaces/frame/stacktraceLink.spec.tsx b/static/app/components/events/interfaces/frame/stacktraceLink.spec.tsx
index 8435b4d0436414..abda2cb351e295 100644
--- a/static/app/components/events/interfaces/frame/stacktraceLink.spec.tsx
+++ b/static/app/components/events/interfaces/frame/stacktraceLink.spec.tsx
@@ -218,10 +218,17 @@ describe('StacktraceLink', function () {
context: TestStubs.routerContext(),
organization,
});
+
expect(await screen.findByText('View Coverage Tests on Codecov')).toHaveAttribute(
'href',
'https://app.codecov.io/gh/path/to/file.py'
);
+
+ userEvent.click(await screen.findByText('View Coverage Tests on Codecov'));
+ expect(analyticsSpy).toHaveBeenCalledWith(
+ 'integrations.stacktrace_codecov_link_clicked',
+ expect.anything()
+ );
});
it('renders the missing coverage warning', async function () {
diff --git a/static/app/components/events/interfaces/frame/stacktraceLink.tsx b/static/app/components/events/interfaces/frame/stacktraceLink.tsx
index f9455f01c8ccee..046218f0a63a9e 100644
--- a/static/app/components/events/interfaces/frame/stacktraceLink.tsx
+++ b/static/app/components/events/interfaces/frame/stacktraceLink.tsx
@@ -87,7 +87,7 @@ function StacktraceLinkSetup({organization, project, event}: StacktraceLinkSetup
}
);
- trackIntegrationAnalytics('integrations.stacktrace_link_cta_dismissed', {
+ trackIntegrationAnalytics(StacktraceLinkEvents.DISMISS_CTA, {
view: 'stacktrace_issue_details',
organization,
...getAnalyicsDataForEvent(event),
@@ -124,11 +124,18 @@ function shouldshowCodecovFeatures(
}
interface CodecovLinkProps {
+ event: Event;
+ organization: Organization;
codecovStatusCode?: CodecovStatusCode;
codecovUrl?: string;
}
-function CodecovLink({codecovUrl, codecovStatusCode}: CodecovLinkProps) {
+function CodecovLink({
+ codecovUrl,
+ codecovStatusCode,
+ organization,
+ event,
+}: CodecovLinkProps) {
if (codecovStatusCode === CodecovStatusCode.NO_COVERAGE_DATA) {
return (
<CodecovWarning>
@@ -142,8 +149,17 @@ function CodecovLink({codecovUrl, codecovStatusCode}: CodecovLinkProps) {
if (!codecovUrl) {
return null;
}
+
+ const onOpenCodecovLink = () => {
+ trackIntegrationAnalytics(StacktraceLinkEvents.CODECOV_LINK_CLICKED, {
+ view: 'stacktrace_issue_details',
+ organization,
+ ...getAnalyicsDataForEvent(event),
+ });
+ };
+
return (
- <OpenInLink href={codecovUrl} openInNewTab>
+ <OpenInLink href={codecovUrl} openInNewTab onClick={onOpenCodecovLink}>
{t('View Coverage Tests on Codecov')}
<StyledIconWrapper>{getIntegrationIcon('codecov', 'sm')}</StyledIconWrapper>
</OpenInLink>
@@ -208,7 +224,7 @@ export function StacktraceLink({frame, event, line}: StacktraceLinkProps) {
return;
}
- trackIntegrationAnalytics('integrations.stacktrace_link_viewed', {
+ trackIntegrationAnalytics(StacktraceLinkEvents.LINK_VIEWED, {
view: 'stacktrace_issue_details',
organization,
platform: project?.platform,
@@ -279,6 +295,8 @@ export function StacktraceLink({frame, event, line}: StacktraceLinkProps) {
<CodecovLink
codecovUrl={match.codecovUrl}
codecovStatusCode={match.codecovStatusCode}
+ organization={organization}
+ event={event}
/>
)}
</CodeMappingButtonContainer>
@@ -314,7 +332,7 @@ export function StacktraceLink({frame, event, line}: StacktraceLinkProps) {
}
onClick={() => {
trackIntegrationAnalytics(
- 'integrations.stacktrace_start_setup',
+ StacktraceLinkEvents.START_SETUP,
{
view: 'stacktrace_issue_details',
platform: event.platform,
diff --git a/static/app/utils/analytics/integrations/stacktraceLinkAnalyticsEvents.ts b/static/app/utils/analytics/integrations/stacktraceLinkAnalyticsEvents.ts
index e6af5c20f68f6e..9ca8fd2c91f155 100644
--- a/static/app/utils/analytics/integrations/stacktraceLinkAnalyticsEvents.ts
+++ b/static/app/utils/analytics/integrations/stacktraceLinkAnalyticsEvents.ts
@@ -13,6 +13,7 @@ export enum StacktraceLinkEvents {
START_SETUP = 'integrations.stacktrace_start_setup',
SUBMIT = 'integrations.stacktrace_submit_config',
LINK_VIEWED = 'integrations.stacktrace_link_viewed',
+ CODECOV_LINK_CLICKED = 'integrations.stacktrace_codecov_link_clicked',
}
// This type allows analytics functions to use the string literal or enum.KEY
@@ -39,4 +40,6 @@ export const stacktraceLinkEventMap: Record<StacktraceLinkEventsLiterals, string
[StacktraceLinkEvents.MANUAL_OPTION]: 'Integrations: Stacktrace Manual Option Clicked',
[StacktraceLinkEvents.START_SETUP]: 'Integrations: Stacktrace Start Setup',
[StacktraceLinkEvents.SUBMIT]: 'Integrations: Stacktrace Submit Config',
+ [StacktraceLinkEvents.CODECOV_LINK_CLICKED]:
+ 'Integrations: Stacktrace Codecov Link Clicked',
};
diff --git a/static/app/utils/analytics/workflowAnalyticsEvents.tsx b/static/app/utils/analytics/workflowAnalyticsEvents.tsx
index bc4e38c5c7494f..a8b480bcdafd56 100644
--- a/static/app/utils/analytics/workflowAnalyticsEvents.tsx
+++ b/static/app/utils/analytics/workflowAnalyticsEvents.tsx
@@ -76,6 +76,7 @@ export type TeamInsightsEventParameters = {
'issue_details.attachment_tab.screenshot_modal_download': {};
'issue_details.attachment_tab.screenshot_modal_opened': {};
'issue_details.attachment_tab.screenshot_title_clicked': {};
+ 'issue_details.codecov_link_clicked': {};
'issue_details.event_json_clicked': {group_id: number};
'issue_details.event_navigation_clicked': {button: string; project_id: number};
'issue_details.issue_tab.screenshot_dropdown_deleted': {};
@@ -153,4 +154,5 @@ export const workflowEventMap: Record<TeamInsightsEventKey, string | null> = {
'project_detail.performance_tour.close': 'Project Detail: Performance Tour Close',
'project_detail.releases_tour.advance': 'Project Detail: Releases Tour Advance',
'project_detail.releases_tour.close': 'Project Detail: Releases Tour Close',
+ 'issue_details.codecov_link_clicked': 'Issue Details: Codecov Link Clicked',
};
|
6347a8e521dd304ca95b08782dc2c7cf3544e1b7
|
2023-01-19 01:44:59
|
Evan Purkhiser
|
fix(ui): Correct breadcrumb scrollbar header padding (#43395)
| false
|
Correct breadcrumb scrollbar header padding (#43395)
|
fix
|
diff --git a/static/app/components/events/interfaces/breadcrumbs/breadcrumbs.tsx b/static/app/components/events/interfaces/breadcrumbs/breadcrumbs.tsx
index 42e77f59318e51..e0638e6a1ea4c7 100644
--- a/static/app/components/events/interfaces/breadcrumbs/breadcrumbs.tsx
+++ b/static/app/components/events/interfaces/breadcrumbs/breadcrumbs.tsx
@@ -167,6 +167,11 @@ const StyledPanelTable = styled(PanelTable)<{scrollbarSize: number}>`
}
}
+ /* Scroll bar header */
+ :nth-child(6) {
+ padding: 0;
+ }
+
/* Content */
:nth-child(n + 7) {
grid-column: 1/-1;
@@ -196,8 +201,6 @@ const StyledPanelTable = styled(PanelTable)<{scrollbarSize: number}>`
}
}
}
-
- overflow: hidden;
`;
const Time = styled('div')`
|
25a88ba34ad2fc7682ccfdcf80184c27a802b111
|
2024-03-02 02:03:56
|
Michelle Zhang
|
feat(feedback): add LazyServiceWrapper for spam detection (#66119)
| false
|
add LazyServiceWrapper for spam detection (#66119)
|
feat
|
diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py
index 4b4b59b67bc93e..f6a0734f0f5f03 100644
--- a/src/sentry/conf/server.py
+++ b/src/sentry/conf/server.py
@@ -2338,6 +2338,11 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]:
SENTRY_CHART_RENDERER = "sentry.charts.chartcuterie.Chartcuterie"
SENTRY_CHART_RENDERER_OPTIONS: dict[str, Any] = {}
+# User Feedback Spam Detection
+SENTRY_USER_FEEDBACK_SPAM = "sentry.feedback.spam.stub.StubFeedbackSpamDetection"
+SENTRY_USER_FEEDBACK_SPAM_OPTIONS: dict[str, str] = {}
+
+
# URI Prefixes for generating DSN URLs
# (Defaults to URL_PREFIX by default)
SENTRY_ENDPOINT: str | None = None
diff --git a/src/sentry/feedback/spam/__init__.py b/src/sentry/feedback/spam/__init__.py
new file mode 100644
index 00000000000000..31829c5cc1c7be
--- /dev/null
+++ b/src/sentry/feedback/spam/__init__.py
@@ -0,0 +1,13 @@
+from django.conf import settings
+
+from sentry.utils.services import LazyServiceWrapper
+
+from .base import FeedbackSpamDetectionBase
+
+backend = LazyServiceWrapper(
+ FeedbackSpamDetectionBase,
+ settings.SENTRY_USER_FEEDBACK_SPAM,
+ settings.SENTRY_USER_FEEDBACK_SPAM_OPTIONS,
+)
+
+backend.expose(locals())
diff --git a/src/sentry/feedback/spam/base.py b/src/sentry/feedback/spam/base.py
new file mode 100644
index 00000000000000..19760a23575c21
--- /dev/null
+++ b/src/sentry/feedback/spam/base.py
@@ -0,0 +1,9 @@
+from sentry.utils.services import Service
+
+
+class FeedbackSpamDetectionBase(Service):
+ def __init__(self, **options):
+ pass
+
+ def spam_detection(self, text: str):
+ raise NotImplementedError
diff --git a/src/sentry/feedback/spam/stub.py b/src/sentry/feedback/spam/stub.py
new file mode 100644
index 00000000000000..dd8b0a34884c7d
--- /dev/null
+++ b/src/sentry/feedback/spam/stub.py
@@ -0,0 +1,9 @@
+from sentry.feedback.spam.base import FeedbackSpamDetectionBase
+
+
+class StubFeedbackSpamDetection(FeedbackSpamDetectionBase):
+ def __init__(self, **options):
+ pass
+
+ def spam_detection(self, text):
+ return False
diff --git a/tests/sentry/feedback/spam/test_stub.py b/tests/sentry/feedback/spam/test_stub.py
new file mode 100644
index 00000000000000..d5d22487e9b0e2
--- /dev/null
+++ b/tests/sentry/feedback/spam/test_stub.py
@@ -0,0 +1,9 @@
+from fixtures.sudo_testutils import BaseTestCase
+from sentry.feedback.spam.stub import StubFeedbackSpamDetection
+
+
+class TestStubFeedbackSpamDetection(BaseTestCase):
+ def test_spam_detection(self):
+ stub = StubFeedbackSpamDetection()
+ res = stub.spam_detection("great website!")
+ assert res is False
|
759185e5a8bab68f69418e218a3eeb8e847015f1
|
2023-04-26 20:09:16
|
Priscila Oliveira
|
ref(unit-test): Replace RTL query according to the recommended list (#47970)
| false
|
Replace RTL query according to the recommended list (#47970)
|
ref
|
diff --git a/static/app/views/settings/project/projectKeys/details/loaderSettings.spec.tsx b/static/app/views/settings/project/projectKeys/details/loaderSettings.spec.tsx
index e8984de73ca2da..90ecca6dc9d551 100644
--- a/static/app/views/settings/project/projectKeys/details/loaderSettings.spec.tsx
+++ b/static/app/views/settings/project/projectKeys/details/loaderSettings.spec.tsx
@@ -224,8 +224,9 @@ describe('Loader Script Settings', function () {
}
}
- const infos = screen.getAllByText('Only available in SDK version 7.x and above');
- expect(infos.length).toBe(2);
+ expect(
+ screen.getAllByText('Only available in SDK version 7.x and above')
+ ).toHaveLength(2);
});
it('shows replay message when it is enabled', function () {
@@ -259,7 +260,7 @@ describe('Loader Script Settings', function () {
);
expect(
- screen.queryByText(
+ screen.getByText(
'When using Replay, the loader will load the ES6 bundle instead of the ES5 bundle.'
)
).toBeInTheDocument();
|
c599e2777d852ab7b5cac8fbd62b85c47bdfebe0
|
2024-03-11 20:58:19
|
Jonas
|
feat(trace): use textContent setter and avoid html parsing (#66663)
| false
|
use textContent setter and avoid html parsing (#66663)
|
feat
|
diff --git a/static/app/views/performance/newTraceDetails/virtualizedViewManager.tsx b/static/app/views/performance/newTraceDetails/virtualizedViewManager.tsx
index f40a23c4957710..9707c634936648 100644
--- a/static/app/views/performance/newTraceDetails/virtualizedViewManager.tsx
+++ b/static/app/views/performance/newTraceDetails/virtualizedViewManager.tsx
@@ -1145,7 +1145,7 @@ export class VirtualizedViewManager {
const label = indicator.children[0] as HTMLElement | undefined;
if (label) {
- label.innerHTML = getDuration(interval / 1000, 2, true);
+ label.textContent = getDuration(interval / 1000, 2, true);
}
}
}
|
207ca8dacc7db72a54f7f774f597c7f2f8d903b3
|
2023-05-09 02:31:38
|
Ryan Skonnord
|
ref(hc): Split HC services' init modules (#48671)
| false
|
Split HC services' init modules (#48671)
|
ref
|
diff --git a/src/sentry/api/endpoints/integrations/sentry_apps/installation/external_issue/actions.py b/src/sentry/api/endpoints/integrations/sentry_apps/installation/external_issue/actions.py
index 800439494606f4..7235d3cb485643 100644
--- a/src/sentry/api/endpoints/integrations/sentry_apps/installation/external_issue/actions.py
+++ b/src/sentry/api/endpoints/integrations/sentry_apps/installation/external_issue/actions.py
@@ -7,7 +7,7 @@
from sentry.mediators.external_issues import IssueLinkCreator
from sentry.models import Group, Project
from sentry.models.user import User
-from sentry.services.hybrid_cloud.user.impl import serialize_rpc_user
+from sentry.services.hybrid_cloud.user.serial import serialize_rpc_user
from sentry.utils.functional import extract_lazy_object
diff --git a/src/sentry/auth/access.py b/src/sentry/auth/access.py
index d1236099970d32..c6f4dff698b922 100644
--- a/src/sentry/auth/access.py
+++ b/src/sentry/auth/access.py
@@ -43,7 +43,7 @@
RpcUserOrganizationContext,
organization_service,
)
-from sentry.services.hybrid_cloud.organization.impl import DatabaseBackedOrganizationService
+from sentry.services.hybrid_cloud.organization.serial import summarize_member
from sentry.services.hybrid_cloud.user import RpcUser, user_service
from sentry.utils import metrics
from sentry.utils.request_cache import request_cache
@@ -604,7 +604,7 @@ def __init__(
auth_state = auth_service.get_user_auth_state(
organization_id=member.organization_id,
is_superuser=False,
- org_member=DatabaseBackedOrganizationService.summarize_member(member),
+ org_member=summarize_member(member),
user_id=member.user_id,
)
sso_state = auth_state.sso_state
@@ -997,11 +997,7 @@ def from_request(
user_id=request.user.id,
organization_id=organization.id,
is_superuser=is_superuser,
- org_member=(
- DatabaseBackedOrganizationService.summarize_member(member)
- if member is not None
- else None
- ),
+ org_member=(summarize_member(member) if member is not None else None),
).sso_state
return OrganizationGlobalAccess(
diff --git a/src/sentry/auth/helper.py b/src/sentry/auth/helper.py
index 97eefc811d036e..73e568e5979959 100644
--- a/src/sentry/auth/helper.py
+++ b/src/sentry/auth/helper.py
@@ -41,7 +41,7 @@
RpcOrganizationMember,
organization_service,
)
-from sentry.services.hybrid_cloud.organization.impl import DatabaseBackedOrganizationService
+from sentry.services.hybrid_cloud.organization.serial import serialize_organization
from sentry.signals import sso_enabled, user_signup
from sentry.tasks.auth import email_missing_links
from sentry.utils import auth, json, metrics
@@ -737,7 +737,7 @@ def finish_pipeline(self) -> HttpResponseBase:
def auth_handler(self, identity: Mapping[str, Any]) -> AuthIdentityHandler:
# This is a temporary step to keep test_helper integrated
# TODO: Move this conversion further upstream
- rpc_org = DatabaseBackedOrganizationService.serialize_organization(self.organization)
+ rpc_org = serialize_organization(self.organization)
return AuthIdentityHandler(
self.provider_model, self.provider, rpc_org, self.request, identity
diff --git a/src/sentry/receivers/outbox/region.py b/src/sentry/receivers/outbox/region.py
index 0d86bea0b863ab..9f7b31e5382ae7 100644
--- a/src/sentry/receivers/outbox/region.py
+++ b/src/sentry/receivers/outbox/region.py
@@ -23,8 +23,8 @@
from sentry.services.hybrid_cloud.identity import identity_service
from sentry.services.hybrid_cloud.log import AuditLogEvent, UserIpEvent
from sentry.services.hybrid_cloud.log.impl import DatabaseBackedLogService
-from sentry.services.hybrid_cloud.organization_mapping import (
- organization_mapping_service,
+from sentry.services.hybrid_cloud.organization_mapping import organization_mapping_service
+from sentry.services.hybrid_cloud.organization_mapping.serial import (
update_organization_mapping_from_instance,
)
from sentry.services.hybrid_cloud.organizationmember_mapping import (
diff --git a/src/sentry/rules/actions/notify_event_service.py b/src/sentry/rules/actions/notify_event_service.py
index e8ad5866df2e4c..bd7edef839a3a9 100644
--- a/src/sentry/rules/actions/notify_event_service.py
+++ b/src/sentry/rules/actions/notify_event_service.py
@@ -19,7 +19,7 @@
from sentry.rules.base import CallbackFuture
from sentry.services.hybrid_cloud.app import RpcSentryAppService, app_service
from sentry.services.hybrid_cloud.integration import integration_service
-from sentry.services.hybrid_cloud.organization.impl import DatabaseBackedOrganizationService
+from sentry.services.hybrid_cloud.organization.serial import serialize_organization
from sentry.tasks.sentry_apps import notify_sentry_app
from sentry.utils import metrics
from sentry.utils.safe import safe_execute
@@ -63,7 +63,7 @@ def send_incident_alert_notification(
fire. If not provided we'll attempt to calculate this ourselves.
:return:
"""
- organization = DatabaseBackedOrganizationService.serialize_organization(incident.organization)
+ organization = serialize_organization(incident.organization)
incident_attachment = build_incident_attachment(incident, new_status, metric_value)
integration_service.send_incident_alert_notification(
diff --git a/src/sentry/sentry_apps/components.py b/src/sentry/sentry_apps/components.py
index c422c2e455b1ee..fb6f830355369f 100644
--- a/src/sentry/sentry_apps/components.py
+++ b/src/sentry/sentry_apps/components.py
@@ -10,7 +10,7 @@
from sentry.mediators.external_requests import SelectRequester
from sentry.models import SentryAppComponent, SentryAppInstallation
-from sentry.services.hybrid_cloud.app import app_service
+from sentry.services.hybrid_cloud.app.serial import serialize_sentry_app_installation
from sentry.utils import json
@@ -101,9 +101,7 @@ def _get_select_choices(
field.update(self._request(field["uri"], dependent_data=dependant_data))
def _request(self, uri: str, dependent_data: str | None = None) -> Any:
- install = app_service.serialize_sentry_app_installation(
- self.install, self.install.sentry_app
- )
+ install = serialize_sentry_app_installation(self.install, self.install.sentry_app)
return SelectRequester.run(
install=install,
project_slug=self.project_slug,
diff --git a/src/sentry/services/hybrid_cloud/actor.py b/src/sentry/services/hybrid_cloud/actor.py
index 47a1b13119a8a7..d0226f3026a244 100644
--- a/src/sentry/services/hybrid_cloud/actor.py
+++ b/src/sentry/services/hybrid_cloud/actor.py
@@ -1,6 +1,6 @@
# Please do not use
# from __future__ import annotations
-# in modules such as this one where hybrid cloud service classes and data models are
+# in modules such as this one where hybrid cloud data models or service classes are
# defined, because we want to reflect on type annotations and avoid forward references.
from enum import Enum
diff --git a/src/sentry/services/hybrid_cloud/app/__init__.py b/src/sentry/services/hybrid_cloud/app/__init__.py
index 5412ff0f035734..2a9746c30ef42c 100644
--- a/src/sentry/services/hybrid_cloud/app/__init__.py
+++ b/src/sentry/services/hybrid_cloud/app/__init__.py
@@ -1,289 +1,2 @@
-# Please do not use
-# from __future__ import annotations
-# in modules such as this one where hybrid cloud service classes and data models are
-# defined, because we want to reflect on type annotations and avoid forward references.
-
-import abc
-import datetime
-import hmac
-from dataclasses import dataclass
-from hashlib import sha256
-from typing import TYPE_CHECKING, Any, List, Mapping, Optional, Protocol, cast
-
-from pydantic.fields import Field
-from typing_extensions import TypedDict
-
-from sentry.constants import SentryAppInstallationStatus, SentryAppStatus
-from sentry.models import SentryApp, SentryAppInstallation
-from sentry.models.apiapplication import ApiApplication
-from sentry.services.hybrid_cloud import RpcModel
-from sentry.services.hybrid_cloud.filter_query import OpaqueSerializedResponse
-from sentry.services.hybrid_cloud.rpc import RpcService, rpc_method
-from sentry.services.hybrid_cloud.user import RpcUser
-from sentry.silo import SiloMode
-
-if TYPE_CHECKING:
- from sentry.mediators.external_requests.alert_rule_action_requester import AlertRuleActionResult
- from sentry.services.hybrid_cloud.auth import AuthenticationContext
-
-
-class RpcApiApplication(RpcModel):
- id: int = -1
- client_id: str = ""
- client_secret: str = ""
-
-
-class RpcSentryAppService(RpcModel):
- """
- A `SentryAppService` (a notification service) wrapped up and serializable via the
- rpc interface.
- """
-
- title: str = ""
- slug: str = ""
- service_type: str = "sentry_app"
-
-
-class RpcSentryApp(RpcModel):
- id: int = -1
- scope_list: List[str] = Field(default_factory=list)
- application_id: int = -1
- application: RpcApiApplication = Field(default_factory=RpcApiApplication)
- proxy_user_id: Optional[int] = None # can be null on deletion.
- owner_id: int = -1 # relation to an organization
- name: str = ""
- slug: str = ""
- uuid: str = ""
- events: List[str] = Field(default_factory=list)
- webhook_url: Optional[str] = None
- is_published: bool = False
- is_unpublished: bool = False
- is_internal: bool = True
- is_publish_request_inprogress: bool = False
- status: str = ""
-
- def show_auth_info(self, access: Any) -> bool:
- encoded_scopes = set({"%s" % scope for scope in list(access.scopes)})
- return set(self.scope_list).issubset(encoded_scopes)
-
- def build_signature(self, body: str) -> str:
- secret = self.application.client_secret
- return hmac.new(
- key=secret.encode("utf-8"), msg=body.encode("utf-8"), digestmod=sha256
- ).hexdigest()
-
- # Properties are copied from the sentry app ORM model.
- @property
- def slug_for_metrics(self) -> str:
- if self.is_internal:
- return "internal"
- if self.is_unpublished:
- return "unpublished"
- return self.slug
-
-
-class RpcSentryAppInstallation(RpcModel):
- id: int = -1
- organization_id: int = -1
- status: int = SentryAppInstallationStatus.PENDING
- sentry_app: RpcSentryApp = Field(default_factory=lambda: RpcSentryApp())
- date_deleted: Optional[datetime.datetime] = None
- uuid: str = ""
-
-
-class RpcSentryAppComponent(RpcModel):
- uuid: str = ""
- sentry_app_id: int = -1
- type: str = ""
- app_schema: Mapping[str, Any] = Field(default_factory=dict)
-
-
-class SentryAppEventDataInterface(Protocol):
- """
- Protocol making RpcSentryAppEvents capable of consuming from various sources, keeping only
- the minimum required properties.
- """
-
- id: str
- label: str
-
- @property
- def actionType(self) -> str:
- pass
-
- def is_enabled(self) -> bool:
- pass
-
-
-@dataclass # TODO: Make compatible with RpcModel
-class RpcSentryAppEventData(SentryAppEventDataInterface):
- id: str = ""
- label: str = ""
- action_type: str = ""
- enabled: bool = True
-
- @property
- def actionType(self) -> str:
- return self.action_type
-
- def is_enabled(self) -> bool:
- return self.enabled
-
- @classmethod
- def from_event(cls, data_interface: SentryAppEventDataInterface) -> "RpcSentryAppEventData":
- return RpcSentryAppEventData(
- id=data_interface.id,
- label=data_interface.label,
- action_type=data_interface.actionType,
- enabled=data_interface.is_enabled(),
- )
-
-
-class SentryAppInstallationFilterArgs(TypedDict, total=False):
- installation_ids: List[int]
- app_ids: List[int]
- organization_id: int
- uuids: List[str]
-
-
-class AppService(RpcService):
- key = "app"
- local_mode = SiloMode.CONTROL
-
- @classmethod
- def get_local_implementation(cls) -> RpcService:
- from sentry.services.hybrid_cloud.app.impl import DatabaseBackedAppService
-
- return DatabaseBackedAppService()
-
- @rpc_method
- @abc.abstractmethod
- def serialize_many(
- self,
- *,
- filter: SentryAppInstallationFilterArgs,
- as_user: Optional[RpcUser] = None,
- auth_context: Optional["AuthenticationContext"] = None,
- ) -> List[OpaqueSerializedResponse]:
- pass
-
- @rpc_method
- @abc.abstractmethod
- def get_many(
- self, *, filter: SentryAppInstallationFilterArgs
- ) -> List[RpcSentryAppInstallation]:
- pass
-
- @rpc_method
- @abc.abstractmethod
- def find_installation_by_proxy_user(
- self, *, proxy_user_id: int, organization_id: int
- ) -> Optional[RpcSentryAppInstallation]:
- pass
-
- @rpc_method
- @abc.abstractmethod
- def get_installed_for_organization(
- self,
- *,
- organization_id: int,
- ) -> List[RpcSentryAppInstallation]:
- pass
-
- @rpc_method
- @abc.abstractmethod
- def get_sentry_app_by_slug(self, *, slug: str) -> Optional[RpcSentryApp]:
- pass
-
- @rpc_method
- @abc.abstractmethod
- def find_alertable_services(self, *, organization_id: int) -> List[RpcSentryAppService]:
- pass
-
- @classmethod
- def serialize_sentry_app(cls, app: SentryApp) -> RpcSentryApp:
- return RpcSentryApp(
- id=app.id,
- scope_list=app.scope_list,
- application_id=app.application_id,
- application=cls.serialize_api_application(app.application),
- proxy_user_id=app.proxy_user_id,
- owner_id=app.owner_id,
- name=app.name,
- slug=app.slug,
- uuid=app.uuid,
- events=app.events,
- webhook_url=app.webhook_url,
- is_published=app.status == SentryAppStatus.PUBLISHED,
- is_unpublished=app.status == SentryAppStatus.UNPUBLISHED,
- is_internal=app.status == SentryAppStatus.INTERNAL,
- is_publish_request_inprogress=app.status == SentryAppStatus.PUBLISH_REQUEST_INPROGRESS,
- status=app.status,
- )
-
- @classmethod
- def serialize_api_application(self, api_app: ApiApplication) -> RpcApiApplication:
- return RpcApiApplication(
- id=api_app.id,
- client_id=api_app.client_id,
- client_secret=api_app.client_secret,
- )
-
- @rpc_method
- @abc.abstractmethod
- def find_service_hook_sentry_app(self, *, api_application_id: int) -> Optional[RpcSentryApp]:
- pass
-
- @rpc_method
- @abc.abstractmethod
- def get_custom_alert_rule_actions(
- self,
- *,
- event_data: RpcSentryAppEventData,
- organization_id: int,
- project_slug: Optional[str],
- ) -> List[Mapping[str, Any]]:
- pass
-
- @rpc_method
- @abc.abstractmethod
- def find_app_components(self, *, app_id: int) -> List[RpcSentryAppComponent]:
- pass
-
- @rpc_method
- @abc.abstractmethod
- def get_related_sentry_app_components(
- self,
- *,
- organization_ids: List[int],
- sentry_app_ids: List[int],
- type: str,
- group_by: str = "sentry_app_id",
- ) -> Mapping[str, Any]:
- pass
-
- @classmethod
- def serialize_sentry_app_installation(
- cls, installation: SentryAppInstallation, app: Optional[SentryApp] = None
- ) -> RpcSentryAppInstallation:
- if app is None:
- app = installation.sentry_app
-
- return RpcSentryAppInstallation(
- id=installation.id,
- organization_id=installation.organization_id,
- status=installation.status,
- sentry_app=cls.serialize_sentry_app(app),
- date_deleted=installation.date_deleted,
- uuid=installation.uuid,
- )
-
- @rpc_method
- @abc.abstractmethod
- def trigger_sentry_app_action_creators(
- self, *, fields: List[Mapping[str, Any]], install_uuid: Optional[str]
- ) -> "AlertRuleActionResult":
- pass
-
-
-app_service = cast(AppService, AppService.create_delegation())
+from .model import * # noqa
+from .service import * # noqa
diff --git a/src/sentry/services/hybrid_cloud/app/impl.py b/src/sentry/services/hybrid_cloud/app/impl.py
index 21a34e821300d0..f93c172ade9ea7 100644
--- a/src/sentry/services/hybrid_cloud/app/impl.py
+++ b/src/sentry/services/hybrid_cloud/app/impl.py
@@ -19,6 +19,10 @@
RpcSentryAppService,
SentryAppInstallationFilterArgs,
)
+from sentry.services.hybrid_cloud.app.serial import (
+ serialize_sentry_app,
+ serialize_sentry_app_installation,
+)
from sentry.services.hybrid_cloud.auth import AuthenticationContext
from sentry.services.hybrid_cloud.filter_query import (
FilterQueryDatabaseImpl,
@@ -56,7 +60,7 @@ def find_app_components(self, *, app_id: int) -> List[RpcSentryAppComponent]:
def get_sentry_app_by_slug(self, *, slug: str) -> Optional[RpcSentryApp]:
try:
sentry_app = SentryApp.objects.get(slug=slug)
- return self.serialize_sentry_app(sentry_app)
+ return serialize_sentry_app(sentry_app)
except SentryApp.DoesNotExist:
return None
@@ -156,7 +160,7 @@ def apply_filters(
return query
def serialize_rpc(self, object: SentryAppInstallation) -> RpcSentryAppInstallation:
- return AppService.serialize_sentry_app_installation(object)
+ return serialize_sentry_app_installation(object)
_FQ = _AppServiceFilterQuery()
@@ -175,7 +179,7 @@ def find_installation_by_proxy_user(
except SentryAppInstallation.DoesNotExist:
return None
- return self.serialize_sentry_app_installation(installation, sentry_app)
+ return serialize_sentry_app_installation(installation, sentry_app)
def trigger_sentry_app_action_creators(
self, *, fields: List[Mapping[str, Any]], install_uuid: str | None
@@ -192,9 +196,7 @@ def trigger_sentry_app_action_creators(
def find_service_hook_sentry_app(self, *, api_application_id: int) -> Optional[RpcSentryApp]:
try:
- return self.serialize_sentry_app(
- SentryApp.objects.get(application_id=api_application_id)
- )
+ return serialize_sentry_app(SentryApp.objects.get(application_id=api_application_id))
except SentryApp.DoesNotExist:
return None
diff --git a/src/sentry/services/hybrid_cloud/app/model.py b/src/sentry/services/hybrid_cloud/app/model.py
new file mode 100644
index 00000000000000..d590d2513361c9
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/app/model.py
@@ -0,0 +1,135 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+import datetime
+import hmac
+from dataclasses import dataclass
+from hashlib import sha256
+from typing import Any, List, Mapping, Optional, Protocol
+
+from pydantic.fields import Field
+from typing_extensions import TypedDict
+
+from sentry.constants import SentryAppInstallationStatus
+from sentry.services.hybrid_cloud import RpcModel
+
+
+class RpcApiApplication(RpcModel):
+ id: int = -1
+ client_id: str = ""
+ client_secret: str = ""
+
+
+class RpcSentryAppService(RpcModel):
+ """
+ A `SentryAppService` (a notification service) wrapped up and serializable via the
+ rpc interface.
+ """
+
+ title: str = ""
+ slug: str = ""
+ service_type: str = "sentry_app"
+
+
+class RpcSentryApp(RpcModel):
+ id: int = -1
+ scope_list: List[str] = Field(default_factory=list)
+ application_id: int = -1
+ application: RpcApiApplication = Field(default_factory=RpcApiApplication)
+ proxy_user_id: Optional[int] = None # can be null on deletion.
+ owner_id: int = -1 # relation to an organization
+ name: str = ""
+ slug: str = ""
+ uuid: str = ""
+ events: List[str] = Field(default_factory=list)
+ webhook_url: Optional[str] = None
+ is_published: bool = False
+ is_unpublished: bool = False
+ is_internal: bool = True
+ is_publish_request_inprogress: bool = False
+ status: str = ""
+
+ def show_auth_info(self, access: Any) -> bool:
+ encoded_scopes = set({"%s" % scope for scope in list(access.scopes)})
+ return set(self.scope_list).issubset(encoded_scopes)
+
+ def build_signature(self, body: str) -> str:
+ secret = self.application.client_secret
+ return hmac.new(
+ key=secret.encode("utf-8"), msg=body.encode("utf-8"), digestmod=sha256
+ ).hexdigest()
+
+ # Properties are copied from the sentry app ORM model.
+ @property
+ def slug_for_metrics(self) -> str:
+ if self.is_internal:
+ return "internal"
+ if self.is_unpublished:
+ return "unpublished"
+ return self.slug
+
+
+class RpcSentryAppInstallation(RpcModel):
+ id: int = -1
+ organization_id: int = -1
+ status: int = SentryAppInstallationStatus.PENDING
+ sentry_app: RpcSentryApp = Field(default_factory=lambda: RpcSentryApp())
+ date_deleted: Optional[datetime.datetime] = None
+ uuid: str = ""
+
+
+class RpcSentryAppComponent(RpcModel):
+ uuid: str = ""
+ sentry_app_id: int = -1
+ type: str = ""
+ app_schema: Mapping[str, Any] = Field(default_factory=dict)
+
+
+class SentryAppEventDataInterface(Protocol):
+ """
+ Protocol making RpcSentryAppEvents capable of consuming from various sources, keeping only
+ the minimum required properties.
+ """
+
+ id: str
+ label: str
+
+ @property
+ def actionType(self) -> str:
+ pass
+
+ def is_enabled(self) -> bool:
+ pass
+
+
+@dataclass # TODO: Make compatible with RpcModel
+class RpcSentryAppEventData(SentryAppEventDataInterface):
+ id: str = ""
+ label: str = ""
+ action_type: str = ""
+ enabled: bool = True
+
+ @property
+ def actionType(self) -> str:
+ return self.action_type
+
+ def is_enabled(self) -> bool:
+ return self.enabled
+
+ @classmethod
+ def from_event(cls, data_interface: SentryAppEventDataInterface) -> "RpcSentryAppEventData":
+ return RpcSentryAppEventData(
+ id=data_interface.id,
+ label=data_interface.label,
+ action_type=data_interface.actionType,
+ enabled=data_interface.is_enabled(),
+ )
+
+
+class SentryAppInstallationFilterArgs(TypedDict, total=False):
+ installation_ids: List[int]
+ app_ids: List[int]
+ organization_id: int
+ uuids: List[str]
diff --git a/src/sentry/services/hybrid_cloud/app/serial.py b/src/sentry/services/hybrid_cloud/app/serial.py
new file mode 100644
index 00000000000000..ab1821d6958308
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/app/serial.py
@@ -0,0 +1,54 @@
+from typing import Optional
+
+from sentry.constants import SentryAppStatus
+from sentry.models import ApiApplication, SentryApp, SentryAppInstallation
+from sentry.services.hybrid_cloud.app import (
+ RpcApiApplication,
+ RpcSentryApp,
+ RpcSentryAppInstallation,
+)
+
+
+def serialize_api_application(api_app: ApiApplication) -> RpcApiApplication:
+ return RpcApiApplication(
+ id=api_app.id,
+ client_id=api_app.client_id,
+ client_secret=api_app.client_secret,
+ )
+
+
+def serialize_sentry_app(app: SentryApp) -> RpcSentryApp:
+ return RpcSentryApp(
+ id=app.id,
+ scope_list=app.scope_list,
+ application_id=app.application_id,
+ application=serialize_api_application(app.application),
+ proxy_user_id=app.proxy_user_id,
+ owner_id=app.owner_id,
+ name=app.name,
+ slug=app.slug,
+ uuid=app.uuid,
+ events=app.events,
+ webhook_url=app.webhook_url,
+ is_published=app.status == SentryAppStatus.PUBLISHED,
+ is_unpublished=app.status == SentryAppStatus.UNPUBLISHED,
+ is_internal=app.status == SentryAppStatus.INTERNAL,
+ is_publish_request_inprogress=app.status == SentryAppStatus.PUBLISH_REQUEST_INPROGRESS,
+ status=app.status,
+ )
+
+
+def serialize_sentry_app_installation(
+ installation: SentryAppInstallation, app: Optional[SentryApp] = None
+) -> RpcSentryAppInstallation:
+ if app is None:
+ app = installation.sentry_app
+
+ return RpcSentryAppInstallation(
+ id=installation.id,
+ organization_id=installation.organization_id,
+ status=installation.status,
+ sentry_app=serialize_sentry_app(app),
+ date_deleted=installation.date_deleted,
+ uuid=installation.uuid,
+ )
diff --git a/src/sentry/services/hybrid_cloud/app/service.py b/src/sentry/services/hybrid_cloud/app/service.py
new file mode 100644
index 00000000000000..69d3e27eaf2b7d
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/app/service.py
@@ -0,0 +1,122 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+import abc
+from typing import TYPE_CHECKING, Any, List, Mapping, Optional, cast
+
+from sentry.services.hybrid_cloud.app import (
+ RpcSentryApp,
+ RpcSentryAppComponent,
+ RpcSentryAppEventData,
+ RpcSentryAppInstallation,
+ RpcSentryAppService,
+ SentryAppInstallationFilterArgs,
+)
+from sentry.services.hybrid_cloud.filter_query import OpaqueSerializedResponse
+from sentry.services.hybrid_cloud.rpc import RpcService, rpc_method
+from sentry.services.hybrid_cloud.user import RpcUser
+from sentry.silo import SiloMode
+
+if TYPE_CHECKING:
+ from sentry.mediators.external_requests.alert_rule_action_requester import AlertRuleActionResult
+ from sentry.services.hybrid_cloud.auth import AuthenticationContext
+
+
+class AppService(RpcService):
+ key = "app"
+ local_mode = SiloMode.CONTROL
+
+ @classmethod
+ def get_local_implementation(cls) -> RpcService:
+ from sentry.services.hybrid_cloud.app.impl import DatabaseBackedAppService
+
+ return DatabaseBackedAppService()
+
+ @rpc_method
+ @abc.abstractmethod
+ def serialize_many(
+ self,
+ *,
+ filter: SentryAppInstallationFilterArgs,
+ as_user: Optional[RpcUser] = None,
+ auth_context: Optional["AuthenticationContext"] = None,
+ ) -> List[OpaqueSerializedResponse]:
+ pass
+
+ @rpc_method
+ @abc.abstractmethod
+ def get_many(
+ self, *, filter: SentryAppInstallationFilterArgs
+ ) -> List[RpcSentryAppInstallation]:
+ pass
+
+ @rpc_method
+ @abc.abstractmethod
+ def find_installation_by_proxy_user(
+ self, *, proxy_user_id: int, organization_id: int
+ ) -> Optional[RpcSentryAppInstallation]:
+ pass
+
+ @rpc_method
+ @abc.abstractmethod
+ def get_installed_for_organization(
+ self,
+ *,
+ organization_id: int,
+ ) -> List[RpcSentryAppInstallation]:
+ pass
+
+ @rpc_method
+ @abc.abstractmethod
+ def get_sentry_app_by_slug(self, *, slug: str) -> Optional[RpcSentryApp]:
+ pass
+
+ @rpc_method
+ @abc.abstractmethod
+ def find_alertable_services(self, *, organization_id: int) -> List[RpcSentryAppService]:
+ pass
+
+ @rpc_method
+ @abc.abstractmethod
+ def find_service_hook_sentry_app(self, *, api_application_id: int) -> Optional[RpcSentryApp]:
+ pass
+
+ @rpc_method
+ @abc.abstractmethod
+ def get_custom_alert_rule_actions(
+ self,
+ *,
+ event_data: RpcSentryAppEventData,
+ organization_id: int,
+ project_slug: Optional[str],
+ ) -> List[Mapping[str, Any]]:
+ pass
+
+ @rpc_method
+ @abc.abstractmethod
+ def find_app_components(self, *, app_id: int) -> List[RpcSentryAppComponent]:
+ pass
+
+ @rpc_method
+ @abc.abstractmethod
+ def get_related_sentry_app_components(
+ self,
+ *,
+ organization_ids: List[int],
+ sentry_app_ids: List[int],
+ type: str,
+ group_by: str = "sentry_app_id",
+ ) -> Mapping[str, Any]:
+ pass
+
+ @rpc_method
+ @abc.abstractmethod
+ def trigger_sentry_app_action_creators(
+ self, *, fields: List[Mapping[str, Any]], install_uuid: Optional[str]
+ ) -> "AlertRuleActionResult":
+ pass
+
+
+app_service = cast(AppService, AppService.create_delegation())
diff --git a/src/sentry/services/hybrid_cloud/auth/__init__.py b/src/sentry/services/hybrid_cloud/auth/__init__.py
index 3d56a4af475474..2a9746c30ef42c 100644
--- a/src/sentry/services/hybrid_cloud/auth/__init__.py
+++ b/src/sentry/services/hybrid_cloud/auth/__init__.py
@@ -1,385 +1,2 @@
-# Please do not use
-# from __future__ import annotations
-# in modules such as this one where hybrid cloud service classes and data models are
-# defined, because we want to reflect on type annotations and avoid forward references.
-
-import abc
-import base64
-import contextlib
-from dataclasses import dataclass, field
-from enum import IntEnum
-from typing import (
- TYPE_CHECKING,
- Any,
- Dict,
- Generator,
- List,
- Mapping,
- Optional,
- Tuple,
- Type,
- Union,
- cast,
-)
-
-from pydantic.fields import Field
-from rest_framework.authentication import BaseAuthentication
-from rest_framework.request import Request
-
-from sentry.relay.utils import get_header_relay_id, get_header_relay_signature
-from sentry.services.hybrid_cloud import RpcModel
-from sentry.services.hybrid_cloud.organization import (
- RpcOrganization,
- RpcOrganizationMember,
- RpcOrganizationMemberSummary,
-)
-from sentry.services.hybrid_cloud.rpc import RpcService, rpc_method
-from sentry.services.hybrid_cloud.user import RpcUser
-from sentry.silo import SiloMode
-
-if TYPE_CHECKING:
- from django.contrib.auth.models import AnonymousUser
-
-
-class RpcAuthenticatorType(IntEnum):
- API_KEY_AUTHENTICATION = 0
- TOKEN_AUTHENTICATION = 1
- SESSION_AUTHENTICATION = 2
-
- @classmethod
- def from_authenticator(
- self, auth: Type[BaseAuthentication]
- ) -> Optional["RpcAuthenticatorType"]:
- from sentry.api.authentication import ApiKeyAuthentication, TokenAuthentication
-
- if auth == ApiKeyAuthentication:
- return RpcAuthenticatorType.API_KEY_AUTHENTICATION
- if auth == TokenAuthentication:
- return RpcAuthenticatorType.TOKEN_AUTHENTICATION
- return None
-
- def as_authenticator(self) -> BaseAuthentication:
- from sentry.api.authentication import ApiKeyAuthentication, TokenAuthentication
-
- if self == self.API_KEY_AUTHENTICATION:
- return ApiKeyAuthentication()
- if self == self.TOKEN_AUTHENTICATION:
- return TokenAuthentication()
- else:
- raise ValueError(f"{self!r} has not authenticator associated with it.")
-
-
-def _normalize_to_b64(input: Optional[Union[str, bytes]]) -> Optional[str]:
- if input is None:
- return None
- if isinstance(input, str):
- input = input.encode("utf8")
- return base64.b64encode(input).decode("utf8")
-
-
-class RpcAuthentication(BaseAuthentication): # type: ignore
- www_authenticate_realm = "api"
- types: List[RpcAuthenticatorType]
-
- def __init__(self, types: List[RpcAuthenticatorType]):
- self.types = types
-
- def authenticate(self, request: Request) -> Optional[Tuple[Any, Any]]:
- response = auth_service.authenticate_with(
- request=authentication_request_from(request), authenticator_types=self.types
- )
-
- if response.user is not None:
- return response.user, response.auth
-
- return None
-
- # What does this do you may ask? Actually, it tricks the django request_framework to returning the correct 401
- # over 403 in unauthenticated cases, due to some deep library code nonsense. Tests fail if you remove.
- # Otherwise, this authenticate header value means absolutely nothing to clients.
- def authenticate_header(self, request: Request) -> str:
- return 'xBasic realm="%s"' % self.www_authenticate_realm
-
-
-class RpcMemberSsoState(RpcModel):
- is_required: bool = False
- is_valid: bool = False
-
-
-class RpcAuthState(RpcModel):
- sso_state: RpcMemberSsoState
- permissions: List[str]
-
-
-@dataclass
-class AuthenticationRequest:
- # HTTP_X_SENTRY_RELAY_ID
- sentry_relay_id: Optional[str] = None
- # HTTP_X_SENTRY_RELAY_SIGNATURE
- sentry_relay_signature: Optional[str] = None
- backend: Optional[str] = None
- user_id: Optional[str] = None
- user_hash: Optional[str] = None
- nonce: Optional[str] = None
- remote_addr: Optional[str] = None
- signature: Optional[str] = None
- absolute_url: str = ""
- absolute_url_root: str = ""
- path: str = ""
- authorization_b64: Optional[str] = None
-
-
-def authentication_request_from(request: Request) -> AuthenticationRequest:
- from sentry.utils.linksign import find_signature
-
- return AuthenticationRequest(
- sentry_relay_id=get_header_relay_id(request),
- sentry_relay_signature=get_header_relay_signature(request),
- backend=request.session.get("_auth_user_backend", None),
- user_id=request.session.get("_auth_user_id", None),
- user_hash=request.session.get("_auth_user_hash", None),
- nonce=request.session.get("_nonce", None),
- remote_addr=request.META["REMOTE_ADDR"],
- signature=find_signature(request),
- absolute_url=request.build_absolute_uri(),
- absolute_url_root=request.build_absolute_uri("/"),
- path=request.path,
- authorization_b64=_normalize_to_b64(request.META.get("HTTP_AUTHORIZATION")),
- )
-
-
-@dataclass(eq=True)
-class AuthenticatedToken:
- allowed_origins: List[str] = field(default_factory=list)
- audit_log_data: Dict[str, Any] = field(default_factory=dict)
- scopes: List[str] = field(default_factory=list)
- entity_id: Optional[int] = None
- kind: str = "system"
- user_id: Optional[int] = None # only relevant for ApiToken
- organization_id: Optional[int] = None
- application_id: Optional[int] = None # only relevant for ApiToken
-
- @classmethod
- def from_token(cls, token: Any) -> Optional["AuthenticatedToken"]:
- if token is None:
- return None
-
- if isinstance(token, AuthenticatedToken):
- return token
-
- for kind, kind_cls in cls.get_kinds().items():
- if isinstance(token, kind_cls):
- break
- else:
- raise KeyError(f"Token {token} is a not a registered AuthenticatedToken type!")
-
- return cls(
- allowed_origins=token.get_allowed_origins(),
- scopes=token.get_scopes(),
- audit_log_data=token.get_audit_log_data(),
- entity_id=getattr(token, "id", None),
- kind=kind,
- user_id=getattr(token, "user_id", None),
- organization_id=getattr(token, "organization_id", None),
- application_id=getattr(token, "application_id", None),
- )
-
- @classmethod
- def get_kinds(cls) -> Mapping[str, Type[Any]]:
- return getattr(cls, "_kinds", {})
-
- @classmethod
- def register_kind(cls, kind_name: str, t: Type[Any]) -> None:
- kind_map = getattr(cls, "_kinds", {})
- if kind_name in kind_map:
- raise ValueError(f"Conflict detected, kind {kind_name} registered twice!")
- kind_map[kind_name] = t
- setattr(cls, "_kinds", kind_map)
-
- def get_audit_log_data(self) -> Mapping[str, Any]:
- return self.audit_log_data
-
- def get_allowed_origins(self) -> List[str]:
- return self.allowed_origins
-
- def get_scopes(self) -> List[str]:
- return self.scopes
-
- def has_scope(self, scope: str) -> bool:
- if self.kind == "system":
- return True
- return scope in self.get_scopes()
-
-
-@dataclass
-class AuthenticationContext:
- """
- The default of all values should be a valid, non authenticated context.
- """
-
- auth: Optional[AuthenticatedToken] = None
- user: Optional[RpcUser] = None
-
- def _get_user(self) -> Union[RpcUser, "AnonymousUser"]:
- """
- Helper function to avoid importing AnonymousUser when `applied_to_request` is run on startup
- """
- from django.contrib.auth.models import AnonymousUser
-
- return self.user or AnonymousUser()
-
- @contextlib.contextmanager
- def applied_to_request(self, request: Any = None) -> Generator[None, None, None]:
- """
- Some code still reaches for the global 'env' object when determining user or auth behaviors. This bleeds the
- current request context into that code, but makes it difficult to carry RPC authentication context in an
- isolated, controlled way. This method allows for a context handling an RPC or inter silo behavior to assume
- the correct user and auth context provided explicitly in a context.
- """
- from sentry.app import env
-
- if request is None:
- request = env.request
-
- if request is None:
- # Contexts that lack a request
- # Note -- if a request is setup in the env after this context manager, you run the risk of bugs.
- yield
- return
-
- has_user = hasattr(request, "user")
- has_auth = hasattr(request, "auth")
-
- old_user = getattr(request, "user", None)
- old_auth = getattr(request, "auth", None)
- request.user = self._get_user()
- request.auth = self.auth
-
- try:
- yield
- finally:
- if has_user:
- request.user = old_user
- else:
- delattr(request, "user")
-
- if has_auth:
- request.auth = old_auth
- else:
- delattr(request, "auth")
-
-
-@dataclass
-class MiddlewareAuthenticationResponse(AuthenticationContext):
- expired: bool = False
- user_from_signed_request: bool = False
-
-
-class RpcAuthProviderFlags(RpcModel):
- allow_unlinked: bool = False
- scim_enabled: bool = False
-
-
-class RpcAuthProvider(RpcModel):
- id: int = -1
- organization_id: int = -1
- provider: str = ""
- flags: RpcAuthProviderFlags = Field(default_factory=lambda: RpcAuthProviderFlags())
-
- def __hash__(self) -> int:
- return hash((self.id, self.organization_id, self.provider))
-
-
-class RpcAuthIdentity(RpcModel):
- id: int = -1
- user_id: int = -1
- provider_id: int = -1
- ident: str = ""
-
-
-class RpcOrganizationAuthConfig(RpcModel):
- organization_id: int = -1
- auth_provider: Optional[RpcAuthProvider] = None
- has_api_key: bool = False
-
-
-class AuthService(RpcService):
- key = "auth"
- local_mode = SiloMode.CONTROL
-
- @classmethod
- def get_local_implementation(cls) -> RpcService:
- from sentry.services.hybrid_cloud.auth.impl import DatabaseBackedAuthService
-
- return DatabaseBackedAuthService()
-
- @rpc_method
- @abc.abstractmethod
- def authenticate(self, *, request: AuthenticationRequest) -> MiddlewareAuthenticationResponse:
- pass
-
- @rpc_method
- @abc.abstractmethod
- def authenticate_with(
- self, *, request: AuthenticationRequest, authenticator_types: List[RpcAuthenticatorType]
- ) -> AuthenticationContext:
- pass
-
- @rpc_method
- @abc.abstractmethod
- def get_org_auth_config(
- self, *, organization_ids: List[int]
- ) -> List[RpcOrganizationAuthConfig]:
- pass
-
- @rpc_method
- @abc.abstractmethod
- def get_user_auth_state(
- self,
- *,
- user_id: int,
- is_superuser: bool,
- organization_id: Optional[int],
- org_member: Optional[RpcOrganizationMemberSummary],
- ) -> RpcAuthState:
- pass
-
- # TODO: Denormalize this scim enabled flag onto organizations?
- # This is potentially a large list
- @rpc_method
- @abc.abstractmethod
- def get_org_ids_with_scim(self) -> List[int]:
- """
- This method returns a list of org ids that have scim enabled
- :return:
- """
- pass
-
- @rpc_method
- @abc.abstractmethod
- def get_auth_providers(self, *, organization_id: int) -> List[RpcAuthProvider]:
- """
- This method returns a list of auth providers for an org
- :return:
- """
- pass
-
- @rpc_method
- @abc.abstractmethod
- def handle_new_membership(
- self,
- *,
- request: Request,
- organization: RpcOrganization,
- auth_identity: RpcAuthIdentity,
- auth_provider: RpcAuthProvider,
- ) -> Tuple[RpcUser, RpcOrganizationMember]:
- pass
-
- @rpc_method
- @abc.abstractmethod
- def token_has_org_access(self, *, token: AuthenticatedToken, organization_id: int) -> bool:
- pass
-
-
-auth_service: AuthService = cast(AuthService, AuthService.create_delegation())
+from .model import * # noqa
+from .service import * # noqa
diff --git a/src/sentry/services/hybrid_cloud/auth/impl.py b/src/sentry/services/hybrid_cloud/auth/impl.py
index 3934b815ff8cb1..4678901570cfc2 100644
--- a/src/sentry/services/hybrid_cloud/auth/impl.py
+++ b/src/sentry/services/hybrid_cloud/auth/impl.py
@@ -1,7 +1,7 @@
from __future__ import annotations
import base64
-from typing import List, Mapping, Tuple, cast
+from typing import List, Mapping, Tuple
from django.contrib.auth.models import AnonymousUser
from django.db.models import Count, F, Q
@@ -30,11 +30,11 @@
RpcAuthenticatorType,
RpcAuthIdentity,
RpcAuthProvider,
- RpcAuthProviderFlags,
RpcAuthState,
RpcMemberSsoState,
RpcOrganizationAuthConfig,
)
+from sentry.services.hybrid_cloud.auth.serial import serialize_auth_provider
from sentry.services.hybrid_cloud.organization import (
RpcOrganization,
RpcOrganizationMember,
@@ -42,9 +42,9 @@
RpcOrganizationMemberSummary,
organization_service,
)
-from sentry.services.hybrid_cloud.organization.impl import DatabaseBackedOrganizationService
+from sentry.services.hybrid_cloud.organization.serial import serialize_member
from sentry.services.hybrid_cloud.user import RpcUser
-from sentry.services.hybrid_cloud.user.impl import serialize_rpc_user
+from sentry.services.hybrid_cloud.user.serial import serialize_rpc_user
from sentry.silo import SiloMode
from sentry.utils.auth import AuthUserPasswordExpired
from sentry.utils.types import Any
@@ -131,20 +131,6 @@ def get_user_ids(org_id: int, mem_id: int) -> Any:
class DatabaseBackedAuthService(AuthService):
- def _serialize_auth_provider_flags(self, ap: AuthProvider) -> RpcAuthProviderFlags:
- return cast(
- RpcAuthProviderFlags,
- RpcAuthProviderFlags.serialize_by_field_name(ap.flags, value_transform=bool),
- )
-
- def _serialize_auth_provider(self, ap: AuthProvider) -> RpcAuthProvider:
- return RpcAuthProvider(
- id=ap.id,
- organization_id=ap.organization_id,
- provider=ap.provider,
- flags=self._serialize_auth_provider_flags(ap),
- )
-
def get_org_auth_config(
self, *, organization_ids: List[int]
) -> List[RpcOrganizationAuthConfig]:
@@ -161,7 +147,7 @@ def get_org_auth_config(
return [
RpcOrganizationAuthConfig(
organization_id=oid,
- auth_provider=self._serialize_auth_provider(aps[oid]) if oid in aps else None,
+ auth_provider=serialize_auth_provider(aps[oid]) if oid in aps else None,
has_api_key=qs.get(oid, 0) > 0,
)
for oid in organization_ids
@@ -284,7 +270,7 @@ def handle_new_membership(
if invite_helper:
if invite_helper.invite_approved:
om = invite_helper.accept_invite(user)
- return serial_user, DatabaseBackedOrganizationService.serialize_member(om)
+ return serial_user, serialize_member(om)
# It's possible the user has an _invite request_ that hasn't been approved yet,
# and is able to join the organization without an invite through the SSO flow.
diff --git a/src/sentry/services/hybrid_cloud/auth/model.py b/src/sentry/services/hybrid_cloud/auth/model.py
new file mode 100644
index 00000000000000..12a462f6ca44e4
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/auth/model.py
@@ -0,0 +1,285 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+import base64
+import contextlib
+from dataclasses import dataclass, field
+from enum import IntEnum
+from typing import TYPE_CHECKING, Any, Dict, Generator, List, Mapping, Optional, Tuple, Type, Union
+
+from pydantic.fields import Field
+from rest_framework.authentication import BaseAuthentication
+from rest_framework.request import Request
+
+from sentry.relay.utils import get_header_relay_id, get_header_relay_signature
+from sentry.services.hybrid_cloud import RpcModel
+from sentry.services.hybrid_cloud.user import RpcUser
+
+if TYPE_CHECKING:
+ from django.contrib.auth.models import AnonymousUser
+
+
+class RpcAuthenticatorType(IntEnum):
+ API_KEY_AUTHENTICATION = 0
+ TOKEN_AUTHENTICATION = 1
+ SESSION_AUTHENTICATION = 2
+
+ @classmethod
+ def from_authenticator(
+ self, auth: Type[BaseAuthentication]
+ ) -> Optional["RpcAuthenticatorType"]:
+ from sentry.api.authentication import ApiKeyAuthentication, TokenAuthentication
+
+ if auth == ApiKeyAuthentication:
+ return RpcAuthenticatorType.API_KEY_AUTHENTICATION
+ if auth == TokenAuthentication:
+ return RpcAuthenticatorType.TOKEN_AUTHENTICATION
+ return None
+
+ def as_authenticator(self) -> BaseAuthentication:
+ from sentry.api.authentication import ApiKeyAuthentication, TokenAuthentication
+
+ if self == self.API_KEY_AUTHENTICATION:
+ return ApiKeyAuthentication()
+ if self == self.TOKEN_AUTHENTICATION:
+ return TokenAuthentication()
+ else:
+ raise ValueError(f"{self!r} has not authenticator associated with it.")
+
+
+def _normalize_to_b64(input: Optional[Union[str, bytes]]) -> Optional[str]:
+ if input is None:
+ return None
+ if isinstance(input, str):
+ input = input.encode("utf8")
+ return base64.b64encode(input).decode("utf8")
+
+
+class RpcAuthentication(BaseAuthentication): # type: ignore
+ www_authenticate_realm = "api"
+ types: List[RpcAuthenticatorType]
+
+ def __init__(self, types: List[RpcAuthenticatorType]):
+ self.types = types
+
+ def authenticate(self, request: Request) -> Optional[Tuple[Any, Any]]:
+ from sentry.services.hybrid_cloud.auth.service import auth_service
+
+ response = auth_service.authenticate_with(
+ request=authentication_request_from(request), authenticator_types=self.types
+ )
+
+ if response.user is not None:
+ return response.user, response.auth
+
+ return None
+
+ # What does this do you may ask? Actually, it tricks the django request_framework to returning the correct 401
+ # over 403 in unauthenticated cases, due to some deep library code nonsense. Tests fail if you remove.
+ # Otherwise, this authenticate header value means absolutely nothing to clients.
+ def authenticate_header(self, request: Request) -> str:
+ return 'xBasic realm="%s"' % self.www_authenticate_realm
+
+
+class RpcMemberSsoState(RpcModel):
+ is_required: bool = False
+ is_valid: bool = False
+
+
+class RpcAuthState(RpcModel):
+ sso_state: RpcMemberSsoState
+ permissions: List[str]
+
+
+@dataclass
+class AuthenticationRequest:
+ # HTTP_X_SENTRY_RELAY_ID
+ sentry_relay_id: Optional[str] = None
+ # HTTP_X_SENTRY_RELAY_SIGNATURE
+ sentry_relay_signature: Optional[str] = None
+ backend: Optional[str] = None
+ user_id: Optional[str] = None
+ user_hash: Optional[str] = None
+ nonce: Optional[str] = None
+ remote_addr: Optional[str] = None
+ signature: Optional[str] = None
+ absolute_url: str = ""
+ absolute_url_root: str = ""
+ path: str = ""
+ authorization_b64: Optional[str] = None
+
+
+def authentication_request_from(request: Request) -> AuthenticationRequest:
+ from sentry.utils.linksign import find_signature
+
+ return AuthenticationRequest(
+ sentry_relay_id=get_header_relay_id(request),
+ sentry_relay_signature=get_header_relay_signature(request),
+ backend=request.session.get("_auth_user_backend", None),
+ user_id=request.session.get("_auth_user_id", None),
+ user_hash=request.session.get("_auth_user_hash", None),
+ nonce=request.session.get("_nonce", None),
+ remote_addr=request.META["REMOTE_ADDR"],
+ signature=find_signature(request),
+ absolute_url=request.build_absolute_uri(),
+ absolute_url_root=request.build_absolute_uri("/"),
+ path=request.path,
+ authorization_b64=_normalize_to_b64(request.META.get("HTTP_AUTHORIZATION")),
+ )
+
+
+@dataclass(eq=True)
+class AuthenticatedToken:
+ allowed_origins: List[str] = field(default_factory=list)
+ audit_log_data: Dict[str, Any] = field(default_factory=dict)
+ scopes: List[str] = field(default_factory=list)
+ entity_id: Optional[int] = None
+ kind: str = "system"
+ user_id: Optional[int] = None # only relevant for ApiToken
+ organization_id: Optional[int] = None
+ application_id: Optional[int] = None # only relevant for ApiToken
+
+ @classmethod
+ def from_token(cls, token: Any) -> Optional["AuthenticatedToken"]:
+ if token is None:
+ return None
+
+ if isinstance(token, AuthenticatedToken):
+ return token
+
+ for kind, kind_cls in cls.get_kinds().items():
+ if isinstance(token, kind_cls):
+ break
+ else:
+ raise KeyError(f"Token {token} is a not a registered AuthenticatedToken type!")
+
+ return cls(
+ allowed_origins=token.get_allowed_origins(),
+ scopes=token.get_scopes(),
+ audit_log_data=token.get_audit_log_data(),
+ entity_id=getattr(token, "id", None),
+ kind=kind,
+ user_id=getattr(token, "user_id", None),
+ organization_id=getattr(token, "organization_id", None),
+ application_id=getattr(token, "application_id", None),
+ )
+
+ @classmethod
+ def get_kinds(cls) -> Mapping[str, Type[Any]]:
+ return getattr(cls, "_kinds", {})
+
+ @classmethod
+ def register_kind(cls, kind_name: str, t: Type[Any]) -> None:
+ kind_map = getattr(cls, "_kinds", {})
+ if kind_name in kind_map:
+ raise ValueError(f"Conflict detected, kind {kind_name} registered twice!")
+ kind_map[kind_name] = t
+ setattr(cls, "_kinds", kind_map)
+
+ def get_audit_log_data(self) -> Mapping[str, Any]:
+ return self.audit_log_data
+
+ def get_allowed_origins(self) -> List[str]:
+ return self.allowed_origins
+
+ def get_scopes(self) -> List[str]:
+ return self.scopes
+
+ def has_scope(self, scope: str) -> bool:
+ if self.kind == "system":
+ return True
+ return scope in self.get_scopes()
+
+
+@dataclass
+class AuthenticationContext:
+ """
+ The default of all values should be a valid, non authenticated context.
+ """
+
+ auth: Optional[AuthenticatedToken] = None
+ user: Optional[RpcUser] = None
+
+ def _get_user(self) -> Union[RpcUser, "AnonymousUser"]:
+ """
+ Helper function to avoid importing AnonymousUser when `applied_to_request` is run on startup
+ """
+ from django.contrib.auth.models import AnonymousUser
+
+ return self.user or AnonymousUser()
+
+ @contextlib.contextmanager
+ def applied_to_request(self, request: Any = None) -> Generator[None, None, None]:
+ """
+ Some code still reaches for the global 'env' object when determining user or auth behaviors. This bleeds the
+ current request context into that code, but makes it difficult to carry RPC authentication context in an
+ isolated, controlled way. This method allows for a context handling an RPC or inter silo behavior to assume
+ the correct user and auth context provided explicitly in a context.
+ """
+ from sentry.app import env
+
+ if request is None:
+ request = env.request
+
+ if request is None:
+ # Contexts that lack a request
+ # Note -- if a request is setup in the env after this context manager, you run the risk of bugs.
+ yield
+ return
+
+ has_user = hasattr(request, "user")
+ has_auth = hasattr(request, "auth")
+
+ old_user = getattr(request, "user", None)
+ old_auth = getattr(request, "auth", None)
+ request.user = self._get_user()
+ request.auth = self.auth
+
+ try:
+ yield
+ finally:
+ if has_user:
+ request.user = old_user
+ else:
+ delattr(request, "user")
+
+ if has_auth:
+ request.auth = old_auth
+ else:
+ delattr(request, "auth")
+
+
+@dataclass
+class MiddlewareAuthenticationResponse(AuthenticationContext):
+ expired: bool = False
+ user_from_signed_request: bool = False
+
+
+class RpcAuthProviderFlags(RpcModel):
+ allow_unlinked: bool = False
+ scim_enabled: bool = False
+
+
+class RpcAuthProvider(RpcModel):
+ id: int = -1
+ organization_id: int = -1
+ provider: str = ""
+ flags: RpcAuthProviderFlags = Field(default_factory=lambda: RpcAuthProviderFlags())
+
+ def __hash__(self) -> int:
+ return hash((self.id, self.organization_id, self.provider))
+
+
+class RpcAuthIdentity(RpcModel):
+ id: int = -1
+ user_id: int = -1
+ provider_id: int = -1
+ ident: str = ""
+
+
+class RpcOrganizationAuthConfig(RpcModel):
+ organization_id: int = -1
+ auth_provider: Optional[RpcAuthProvider] = None
+ has_api_key: bool = False
diff --git a/src/sentry/services/hybrid_cloud/auth/serial.py b/src/sentry/services/hybrid_cloud/auth/serial.py
new file mode 100644
index 00000000000000..d80aeba7d572bf
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/auth/serial.py
@@ -0,0 +1,22 @@
+from __future__ import annotations
+
+from typing import cast
+
+from sentry.models import AuthProvider
+from sentry.services.hybrid_cloud.auth import RpcAuthProvider, RpcAuthProviderFlags
+
+
+def _serialize_auth_provider_flags(ap: AuthProvider) -> RpcAuthProviderFlags:
+ return cast(
+ RpcAuthProviderFlags,
+ RpcAuthProviderFlags.serialize_by_field_name(ap.flags, value_transform=bool),
+ )
+
+
+def serialize_auth_provider(ap: AuthProvider) -> RpcAuthProvider:
+ return RpcAuthProvider(
+ id=ap.id,
+ organization_id=ap.organization_id,
+ provider=ap.provider,
+ flags=_serialize_auth_provider_flags(ap),
+ )
diff --git a/src/sentry/services/hybrid_cloud/auth/service.py b/src/sentry/services/hybrid_cloud/auth/service.py
new file mode 100644
index 00000000000000..ddf0396fbe81b4
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/auth/service.py
@@ -0,0 +1,111 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+import abc
+from typing import List, Optional, Tuple, cast
+
+from rest_framework.request import Request
+
+from sentry.services.hybrid_cloud.auth import (
+ AuthenticatedToken,
+ AuthenticationContext,
+ AuthenticationRequest,
+ MiddlewareAuthenticationResponse,
+ RpcAuthenticatorType,
+ RpcAuthIdentity,
+ RpcAuthProvider,
+ RpcAuthState,
+ RpcOrganizationAuthConfig,
+)
+from sentry.services.hybrid_cloud.organization import (
+ RpcOrganization,
+ RpcOrganizationMember,
+ RpcOrganizationMemberSummary,
+)
+from sentry.services.hybrid_cloud.rpc import RpcService, rpc_method
+from sentry.services.hybrid_cloud.user import RpcUser
+from sentry.silo import SiloMode
+
+
+class AuthService(RpcService):
+ key = "auth"
+ local_mode = SiloMode.CONTROL
+
+ @classmethod
+ def get_local_implementation(cls) -> RpcService:
+ from sentry.services.hybrid_cloud.auth.impl import DatabaseBackedAuthService
+
+ return DatabaseBackedAuthService()
+
+ @rpc_method
+ @abc.abstractmethod
+ def authenticate(self, *, request: AuthenticationRequest) -> MiddlewareAuthenticationResponse:
+ pass
+
+ @rpc_method
+ @abc.abstractmethod
+ def authenticate_with(
+ self, *, request: AuthenticationRequest, authenticator_types: List[RpcAuthenticatorType]
+ ) -> AuthenticationContext:
+ pass
+
+ @rpc_method
+ @abc.abstractmethod
+ def get_org_auth_config(
+ self, *, organization_ids: List[int]
+ ) -> List[RpcOrganizationAuthConfig]:
+ pass
+
+ @rpc_method
+ @abc.abstractmethod
+ def get_user_auth_state(
+ self,
+ *,
+ user_id: int,
+ is_superuser: bool,
+ organization_id: Optional[int],
+ org_member: Optional[RpcOrganizationMemberSummary],
+ ) -> RpcAuthState:
+ pass
+
+ # TODO: Denormalize this scim enabled flag onto organizations?
+ # This is potentially a large list
+ @rpc_method
+ @abc.abstractmethod
+ def get_org_ids_with_scim(self) -> List[int]:
+ """
+ This method returns a list of org ids that have scim enabled
+ :return:
+ """
+ pass
+
+ @rpc_method
+ @abc.abstractmethod
+ def get_auth_providers(self, *, organization_id: int) -> List[RpcAuthProvider]:
+ """
+ This method returns a list of auth providers for an org
+ :return:
+ """
+ pass
+
+ @rpc_method
+ @abc.abstractmethod
+ def handle_new_membership(
+ self,
+ *,
+ request: Request,
+ organization: RpcOrganization,
+ auth_identity: RpcAuthIdentity,
+ auth_provider: RpcAuthProvider,
+ ) -> Tuple[RpcUser, RpcOrganizationMember]:
+ pass
+
+ @rpc_method
+ @abc.abstractmethod
+ def token_has_org_access(self, *, token: AuthenticatedToken, organization_id: int) -> bool:
+ pass
+
+
+auth_service: AuthService = cast(AuthService, AuthService.create_delegation())
diff --git a/src/sentry/services/hybrid_cloud/hook/__init__.py b/src/sentry/services/hybrid_cloud/hook/__init__.py
index 4bbff1baefce84..2a9746c30ef42c 100644
--- a/src/sentry/services/hybrid_cloud/hook/__init__.py
+++ b/src/sentry/services/hybrid_cloud/hook/__init__.py
@@ -1,82 +1,2 @@
-# Please do not use
-# from __future__ import annotations
-# in modules such as this one where hybrid cloud service classes and data models are
-# defined, because we want to reflect on type annotations and avoid forward references.
-
-import abc
-import dataclasses
-from typing import Any, List, Mapping, Optional
-
-from sentry.models import ServiceHook
-from sentry.services.hybrid_cloud import InterfaceWithLifecycle, silo_mode_delegation, stubbed
-from sentry.silo import SiloMode
-
-
[email protected]
-class RpcServiceHook:
- id: int = -1
- guid: str = ""
- application_id: int = -1
- installation_id: Optional[int] = None
- project_id: Optional[int] = None
- organization_id: Optional[int] = None
- url: str = ""
- events: List[str] = dataclasses.field(default_factory=list)
- status: int = 0
-
- def get_audit_log_data(self) -> Mapping[str, Any]:
- return {"url": self.url}
-
-
-class HookService(InterfaceWithLifecycle):
- def serialize_service_hook(self, hook: ServiceHook) -> RpcServiceHook:
- return RpcServiceHook(
- id=hook.id,
- guid=hook.guid,
- application_id=hook.application_id,
- installation_id=hook.installation_id,
- project_id=hook.project_id,
- organization_id=hook.organization_id,
- url=hook.url,
- events=hook.events,
- status=hook.status,
- )
-
- @abc.abstractmethod
- def create_service_hook(
- self,
- *,
- application_id: Optional[int] = None,
- actor_id: int = -1,
- installation_id: Optional[int] = None,
- organization_id: int = -1,
- project_ids: Optional[List[int]] = None,
- events: Optional[List[str]] = None,
- url: str = "",
- ) -> RpcServiceHook:
- pass
-
- @abc.abstractmethod
- def update_webhook_and_events(
- self,
- *,
- application_id: Optional[int] = None,
- webhook_url: Optional[str] = None,
- events: List[str],
- ) -> List[RpcServiceHook]:
- pass
-
-
-def impl_with_db() -> HookService:
- from sentry.services.hybrid_cloud.hook.impl import DatabaseBackedAppService
-
- return DatabaseBackedAppService()
-
-
-hook_service: HookService = silo_mode_delegation(
- {
- SiloMode.MONOLITH: impl_with_db,
- SiloMode.REGION: impl_with_db,
- SiloMode.CONTROL: stubbed(impl_with_db, SiloMode.REGION),
- }
-)
+from .model import * # noqa
+from .service import * # noqa
diff --git a/src/sentry/services/hybrid_cloud/hook/impl.py b/src/sentry/services/hybrid_cloud/hook/impl.py
index 2e177865fe21b2..ad015dc4073732 100644
--- a/src/sentry/services/hybrid_cloud/hook/impl.py
+++ b/src/sentry/services/hybrid_cloud/hook/impl.py
@@ -8,11 +8,10 @@
from sentry.models import ServiceHook
from sentry.sentry_apps.apps import expand_events
from sentry.services.hybrid_cloud.hook import HookService, RpcServiceHook
+from sentry.services.hybrid_cloud.hook.serial import serialize_service_hook
-class DatabaseBackedAppService(
- HookService,
-):
+class DatabaseBackedAppService(HookService):
def update_webhook_and_events(
self,
*,
@@ -27,7 +26,7 @@ def update_webhook_and_events(
hook.url = webhook_url
hook.events = expand_events(events)
hook.save()
- return [self.serialize_service_hook(h) for h in hooks]
+ return [serialize_service_hook(h) for h in hooks]
else:
deletions.exec_sync_many(list(hooks))
return []
@@ -60,7 +59,7 @@ def create_service_hook(
for project_id in project_ids:
hook.add_project(project_id)
- return self.serialize_service_hook(hook)
+ return serialize_service_hook(hook)
def close(self) -> None:
pass
diff --git a/src/sentry/services/hybrid_cloud/hook/model.py b/src/sentry/services/hybrid_cloud/hook/model.py
new file mode 100644
index 00000000000000..8256e0d71def44
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/hook/model.py
@@ -0,0 +1,25 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+from typing import Any, List, Mapping, Optional
+
+from pydantic.fields import Field
+
+from sentry.services.hybrid_cloud import RpcModel
+
+
+class RpcServiceHook(RpcModel):
+ id: int = -1
+ guid: str = ""
+ application_id: int = -1
+ installation_id: Optional[int] = None
+ project_id: Optional[int] = None
+ organization_id: Optional[int] = None
+ url: str = ""
+ events: List[str] = Field(default_factory=list)
+ status: int = 0
+
+ def get_audit_log_data(self) -> Mapping[str, Any]:
+ return {"url": self.url}
diff --git a/src/sentry/services/hybrid_cloud/hook/serial.py b/src/sentry/services/hybrid_cloud/hook/serial.py
new file mode 100644
index 00000000000000..5b7ede3142ec33
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/hook/serial.py
@@ -0,0 +1,16 @@
+from sentry.models import ServiceHook
+from sentry.services.hybrid_cloud.hook import RpcServiceHook
+
+
+def serialize_service_hook(hook: ServiceHook) -> RpcServiceHook:
+ return RpcServiceHook(
+ id=hook.id,
+ guid=hook.guid,
+ application_id=hook.application_id,
+ installation_id=hook.installation_id,
+ project_id=hook.project_id,
+ organization_id=hook.organization_id,
+ url=hook.url,
+ events=hook.events,
+ status=hook.status,
+ )
diff --git a/src/sentry/services/hybrid_cloud/hook/service.py b/src/sentry/services/hybrid_cloud/hook/service.py
new file mode 100644
index 00000000000000..fbbc89683e108d
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/hook/service.py
@@ -0,0 +1,52 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+import abc
+from typing import List, Optional
+
+from sentry.services.hybrid_cloud import InterfaceWithLifecycle, silo_mode_delegation, stubbed
+from sentry.services.hybrid_cloud.hook import RpcServiceHook
+from sentry.silo import SiloMode
+
+
+class HookService(InterfaceWithLifecycle):
+ @abc.abstractmethod
+ def create_service_hook(
+ self,
+ *,
+ application_id: Optional[int] = None,
+ actor_id: int = -1,
+ installation_id: Optional[int] = None,
+ organization_id: int = -1,
+ project_ids: Optional[List[int]] = None,
+ events: Optional[List[str]] = None,
+ url: str = "",
+ ) -> RpcServiceHook:
+ pass
+
+ @abc.abstractmethod
+ def update_webhook_and_events(
+ self,
+ *,
+ application_id: Optional[int] = None,
+ webhook_url: Optional[str] = None,
+ events: List[str],
+ ) -> List[RpcServiceHook]:
+ pass
+
+
+def impl_with_db() -> HookService:
+ from sentry.services.hybrid_cloud.hook.impl import DatabaseBackedAppService
+
+ return DatabaseBackedAppService()
+
+
+hook_service: HookService = silo_mode_delegation(
+ {
+ SiloMode.MONOLITH: impl_with_db,
+ SiloMode.REGION: impl_with_db,
+ SiloMode.CONTROL: stubbed(impl_with_db, SiloMode.REGION),
+ }
+)
diff --git a/src/sentry/services/hybrid_cloud/identity/__init__.py b/src/sentry/services/hybrid_cloud/identity/__init__.py
index 370078ba80d7ae..2a9746c30ef42c 100644
--- a/src/sentry/services/hybrid_cloud/identity/__init__.py
+++ b/src/sentry/services/hybrid_cloud/identity/__init__.py
@@ -1,115 +1,2 @@
-# Please do not use
-# from __future__ import annotations
-# in modules such as this one where hybrid cloud service classes and data models are
-# defined, because we want to reflect on type annotations and avoid forward references.
-
-from abc import abstractmethod
-from typing import TYPE_CHECKING, List, Optional, cast
-
-from sentry.services.hybrid_cloud import RpcModel
-from sentry.services.hybrid_cloud.rpc import RpcService, rpc_method
-from sentry.silo import SiloMode
-
-if TYPE_CHECKING:
- from sentry.models.identity import Identity, IdentityProvider
-
-
-class RpcIdentityProvider(RpcModel):
- id: int
- type: str
- external_id: str
-
-
-class RpcIdentity(RpcModel):
- id: int
- idp_id: int
- user_id: int
- external_id: str
-
-
-class IdentityService(RpcService):
- key = "identity"
- local_mode = SiloMode.CONTROL
-
- @classmethod
- def get_local_implementation(cls) -> RpcService:
- from sentry.services.hybrid_cloud.identity.impl import DatabaseBackedIdentityService
-
- return DatabaseBackedIdentityService()
-
- def _serialize_identity_provider(
- self, identity_provider: "IdentityProvider"
- ) -> RpcIdentityProvider:
- return RpcIdentityProvider(
- id=identity_provider.id,
- type=identity_provider.type,
- external_id=identity_provider.external_id,
- )
-
- def _serialize_identity(self, identity: "Identity") -> RpcIdentity:
- return RpcIdentity(
- id=identity.id,
- idp_id=identity.idp_id,
- user_id=identity.user_id,
- external_id=identity.external_id,
- )
-
- @rpc_method
- @abstractmethod
- def get_provider(
- self,
- *,
- provider_id: Optional[int] = None,
- provider_type: Optional[str] = None,
- provider_ext_id: Optional[str] = None,
- ) -> Optional[RpcIdentityProvider]:
- """
- Returns an RpcIdentityProvider either by using the idp.id (provider_id), or a combination
- of idp.type (provider_type) and idp.external_id (provider_ext_id)
- """
- pass
-
- @rpc_method
- @abstractmethod
- def get_identity(
- self,
- *,
- provider_id: int,
- user_id: Optional[int] = None,
- identity_ext_id: Optional[str] = None,
- ) -> Optional[RpcIdentity]:
- """
- Returns an RpcIdentity using the idp.id (provider_id) and either the user.id (user_id)
- or identity.external_id (identity_ext_id)
- """
- pass
-
- @rpc_method
- @abstractmethod
- def get_user_identities_by_provider_type(
- self,
- *,
- user_id: int,
- provider_type: str,
- exclude_matching_external_ids: bool = False,
- ) -> List[RpcIdentity]:
- """
- Returns a list of APIIdentities for a given user based on idp.type (provider_type).
- If exclude_matching_external_ids is True, excludes entries with
- identity.external_id == idp.external_id
- """
- pass
-
- @rpc_method
- @abstractmethod
- def delete_identities(self, user_id: int, organization_id: int) -> None:
- """
- Deletes the set of identities associated with a user and organization context.
- :param user_id:
- :param organization_id:
- :return:
- """
- pass
-
-
-identity_service: IdentityService = cast(IdentityService, IdentityService.create_delegation())
+from .model import * # noqa
+from .service import * # noqa
diff --git a/src/sentry/services/hybrid_cloud/identity/impl.py b/src/sentry/services/hybrid_cloud/identity/impl.py
index 87b1fde63baaf5..5b98e9c8fce464 100644
--- a/src/sentry/services/hybrid_cloud/identity/impl.py
+++ b/src/sentry/services/hybrid_cloud/identity/impl.py
@@ -4,6 +4,10 @@
from sentry.models import AuthIdentity
from sentry.services.hybrid_cloud.identity import IdentityService, RpcIdentity, RpcIdentityProvider
+from sentry.services.hybrid_cloud.identity.serial import (
+ serialize_identity,
+ serialize_identity_provider,
+)
class DatabaseBackedIdentityService(IdentityService):
@@ -28,7 +32,7 @@ def get_provider(
idp = IdentityProvider.objects.filter(**idp_kwargs).first()
- return self._serialize_identity_provider(idp) if idp else None
+ return serialize_identity_provider(idp) if idp else None
def get_identity(
self,
@@ -44,7 +48,7 @@ def get_identity(
identity = Identity.objects.filter(**identity_kwargs, idp_id=provider_id).first()
- return self._serialize_identity(identity) if identity else None
+ return serialize_identity(identity) if identity else None
def get_user_identities_by_provider_type(
self,
@@ -65,7 +69,7 @@ def get_user_identities_by_provider_type(
# We need to exclude rows where this is NOT updated to the user_id later.
identities = identities.exclude(external_id=F("idp__external_id"))
- return [self._serialize_identity(identity) for identity in identities]
+ return [serialize_identity(identity) for identity in identities]
def delete_identities(self, user_id: int, organization_id: int) -> None:
"""
diff --git a/src/sentry/services/hybrid_cloud/identity/model.py b/src/sentry/services/hybrid_cloud/identity/model.py
new file mode 100644
index 00000000000000..3f52660dc8fac2
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/identity/model.py
@@ -0,0 +1,19 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+from sentry.services.hybrid_cloud import RpcModel
+
+
+class RpcIdentityProvider(RpcModel):
+ id: int
+ type: str
+ external_id: str
+
+
+class RpcIdentity(RpcModel):
+ id: int
+ idp_id: int
+ user_id: int
+ external_id: str
diff --git a/src/sentry/services/hybrid_cloud/identity/serial.py b/src/sentry/services/hybrid_cloud/identity/serial.py
new file mode 100644
index 00000000000000..2b1bc46663cb82
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/identity/serial.py
@@ -0,0 +1,23 @@
+from typing import TYPE_CHECKING
+
+from sentry.services.hybrid_cloud.identity import RpcIdentity, RpcIdentityProvider
+
+if TYPE_CHECKING:
+ from sentry.models.identity import Identity, IdentityProvider
+
+
+def serialize_identity_provider(identity_provider: "IdentityProvider") -> RpcIdentityProvider:
+ return RpcIdentityProvider(
+ id=identity_provider.id,
+ type=identity_provider.type,
+ external_id=identity_provider.external_id,
+ )
+
+
+def serialize_identity(identity: "Identity") -> RpcIdentity:
+ return RpcIdentity(
+ id=identity.id,
+ idp_id=identity.idp_id,
+ user_id=identity.user_id,
+ external_id=identity.external_id,
+ )
diff --git a/src/sentry/services/hybrid_cloud/identity/service.py b/src/sentry/services/hybrid_cloud/identity/service.py
new file mode 100644
index 00000000000000..5c7868c3f06b67
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/identity/service.py
@@ -0,0 +1,82 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+from abc import abstractmethod
+from typing import List, Optional, cast
+
+from sentry.services.hybrid_cloud.identity import RpcIdentity, RpcIdentityProvider
+from sentry.services.hybrid_cloud.rpc import RpcService, rpc_method
+from sentry.silo import SiloMode
+
+
+class IdentityService(RpcService):
+ key = "identity"
+ local_mode = SiloMode.CONTROL
+
+ @classmethod
+ def get_local_implementation(cls) -> RpcService:
+ from sentry.services.hybrid_cloud.identity.impl import DatabaseBackedIdentityService
+
+ return DatabaseBackedIdentityService()
+
+ @rpc_method
+ @abstractmethod
+ def get_provider(
+ self,
+ *,
+ provider_id: Optional[int] = None,
+ provider_type: Optional[str] = None,
+ provider_ext_id: Optional[str] = None,
+ ) -> Optional[RpcIdentityProvider]:
+ """
+ Returns an RpcIdentityProvider either by using the idp.id (provider_id), or a combination
+ of idp.type (provider_type) and idp.external_id (provider_ext_id)
+ """
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def get_identity(
+ self,
+ *,
+ provider_id: int,
+ user_id: Optional[int] = None,
+ identity_ext_id: Optional[str] = None,
+ ) -> Optional[RpcIdentity]:
+ """
+ Returns an RpcIdentity using the idp.id (provider_id) and either the user.id (user_id)
+ or identity.external_id (identity_ext_id)
+ """
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def get_user_identities_by_provider_type(
+ self,
+ *,
+ user_id: int,
+ provider_type: str,
+ exclude_matching_external_ids: bool = False,
+ ) -> List[RpcIdentity]:
+ """
+ Returns a list of APIIdentities for a given user based on idp.type (provider_type).
+ If exclude_matching_external_ids is True, excludes entries with
+ identity.external_id == idp.external_id
+ """
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def delete_identities(self, user_id: int, organization_id: int) -> None:
+ """
+ Deletes the set of identities associated with a user and organization context.
+ :param user_id:
+ :param organization_id:
+ :return:
+ """
+ pass
+
+
+identity_service: IdentityService = cast(IdentityService, IdentityService.create_delegation())
diff --git a/src/sentry/services/hybrid_cloud/integration/__init__.py b/src/sentry/services/hybrid_cloud/integration/__init__.py
index 16ae2ce9fc08f4..2a9746c30ef42c 100644
--- a/src/sentry/services/hybrid_cloud/integration/__init__.py
+++ b/src/sentry/services/hybrid_cloud/integration/__init__.py
@@ -1,356 +1,2 @@
-# Please do not use
-# from __future__ import annotations
-# in modules such as this one where hybrid cloud service classes and data models are
-# defined, because we want to reflect on type annotations and avoid forward references.
-
-from abc import abstractmethod
-from datetime import datetime
-from typing import Any, Dict, Iterable, List, Mapping, Optional, Tuple, Union, cast
-
-from sentry.constants import ObjectStatus
-from sentry.integrations.base import (
- IntegrationFeatures,
- IntegrationInstallation,
- IntegrationProvider,
-)
-from sentry.models.integrations import Integration, OrganizationIntegration
-from sentry.services.hybrid_cloud import RpcModel
-from sentry.services.hybrid_cloud.organization import RpcOrganizationSummary
-from sentry.services.hybrid_cloud.pagination import RpcPaginationArgs, RpcPaginationResult
-from sentry.services.hybrid_cloud.rpc import RpcService, rpc_method
-from sentry.silo import SiloMode
-
-
-class RpcIntegration(RpcModel):
- id: int
- provider: str
- external_id: str
- name: str
- metadata: Dict[str, Any]
- status: int
-
- def __hash__(self) -> int:
- return hash(self.id)
-
- def get_provider(self) -> IntegrationProvider:
- from sentry import integrations
-
- return integrations.get(self.provider) # type: ignore
-
- def get_status_display(self) -> str:
- for status_id, display in ObjectStatus.as_choices():
- if status_id == self.status:
- return display
- return "disabled"
-
-
-class RpcOrganizationIntegration(RpcModel):
- id: int
- default_auth_id: Optional[int]
- organization_id: int
- integration_id: int
- config: Dict[str, Any]
- status: int # As ObjectStatus
- grace_period_end: Optional[datetime]
-
- def __hash__(self) -> int:
- return hash(self.id)
-
- def get_status_display(self) -> str:
- for status_id, display in ObjectStatus.as_choices():
- if status_id == self.status:
- return display
- return "disabled"
-
-
-class IntegrationService(RpcService):
- key = "integration"
- local_mode = SiloMode.CONTROL
-
- @classmethod
- def get_local_implementation(cls) -> RpcService:
- from sentry.services.hybrid_cloud.integration.impl import DatabaseBackedIntegrationService
-
- return DatabaseBackedIntegrationService()
-
- def _serialize_integration(self, integration: Integration) -> RpcIntegration:
- return RpcIntegration(
- id=integration.id,
- provider=integration.provider,
- external_id=integration.external_id,
- name=integration.name,
- metadata=integration.metadata,
- status=integration.status,
- )
-
- def _serialize_organization_integration(
- self, oi: OrganizationIntegration
- ) -> RpcOrganizationIntegration:
- return RpcOrganizationIntegration(
- id=oi.id,
- default_auth_id=oi.default_auth_id,
- organization_id=oi.organization_id,
- integration_id=oi.integration_id,
- config=oi.config,
- status=oi.status,
- grace_period_end=oi.grace_period_end,
- )
-
- @rpc_method
- @abstractmethod
- def page_integration_ids(
- self,
- *,
- provider_keys: List[str],
- organization_id: int,
- args: RpcPaginationArgs,
- ) -> RpcPaginationResult:
- pass
-
- @rpc_method
- @abstractmethod
- def send_message(
- self,
- *,
- integration_id: int,
- organization_id: int,
- channel: str,
- message: str,
- ) -> bool:
- pass
-
- @rpc_method
- @abstractmethod
- def page_organization_integrations_ids(
- self,
- *,
- organization_id: int,
- statuses: List[int],
- provider_key: Optional[str] = None,
- args: RpcPaginationArgs,
- ) -> RpcPaginationResult:
- pass
-
- @rpc_method
- @abstractmethod
- def get_integrations(
- self,
- *,
- integration_ids: Optional[Iterable[int]] = None,
- organization_id: Optional[int] = None,
- status: Optional[int] = None,
- providers: Optional[List[str]] = None,
- org_integration_status: Optional[int] = None,
- limit: Optional[int] = None,
- organization_integration_id: Optional[int] = None,
- ) -> List[RpcIntegration]:
- """
- Returns all APIIntegrations matching the provided kwargs.
- """
- pass
-
- @rpc_method
- @abstractmethod
- def get_integration(
- self,
- *,
- integration_id: Optional[int] = None,
- provider: Optional[str] = None,
- external_id: Optional[str] = None,
- organization_integration_id: Optional[int] = None,
- ) -> Optional[RpcIntegration]:
- """
- Returns an RpcIntegration using either the id or a combination of the provider and external_id
- """
- pass
-
- @rpc_method
- @abstractmethod
- def get_organization_integrations(
- self,
- *,
- org_integration_ids: Optional[List[int]] = None,
- integration_id: Optional[int] = None,
- organization_id: Optional[int] = None,
- organization_ids: Optional[List[int]] = None,
- status: Optional[int] = None,
- providers: Optional[List[str]] = None,
- has_grace_period: Optional[bool] = None,
- limit: Optional[int] = None,
- ) -> List[RpcOrganizationIntegration]:
- """
- Returns all APIOrganizationIntegrations from the matching kwargs.
- If providers is set, it will also be filtered by the integration providers set in the list.
- If has_grace_period is set, it will filter by whether the grace_period is null or not.
- """
- pass
-
- @rpc_method
- def get_organization_integration(
- self, *, integration_id: int, organization_id: int
- ) -> Optional[RpcOrganizationIntegration]:
- """
- Returns an RpcOrganizationIntegration from the integration and organization ids.
- """
- ois = self.get_organization_integrations(
- integration_id=integration_id, organization_id=organization_id, limit=1
- )
- return self._serialize_organization_integration(ois[0]) if len(ois) > 0 else None
-
- @rpc_method
- @abstractmethod
- def get_organization_context(
- self,
- *,
- organization_id: int,
- integration_id: Optional[int] = None,
- provider: Optional[str] = None,
- external_id: Optional[str] = None,
- ) -> Tuple[Optional[RpcIntegration], Optional[RpcOrganizationIntegration]]:
- """
- Returns a tuple of RpcIntegration and RpcOrganizationIntegration. The integration is selected
- by either integration_id, or a combination of provider and external_id.
- """
- pass
-
- @rpc_method
- @abstractmethod
- def get_organization_contexts(
- self,
- *,
- organization_id: Optional[int] = None,
- integration_id: Optional[int] = None,
- provider: Optional[str] = None,
- external_id: Optional[str] = None,
- ) -> Tuple[Optional[RpcIntegration], List[RpcOrganizationIntegration]]:
- """
- Returns a tuple of RpcIntegration and RpcOrganizationIntegrations. The integrations are selected
- by either integration_id, or a combination of provider and external_id.
- """
- pass
-
- @rpc_method
- @abstractmethod
- def update_integrations(
- self,
- *,
- integration_ids: List[int],
- name: Optional[str] = None,
- metadata: Optional[Dict[str, Any]] = None,
- status: Optional[int] = None,
- ) -> List[RpcIntegration]:
- """
- Returns a list of APIIntegrations after updating the fields provided.
- To set a field as null, use the `set_{FIELD}_null` keyword argument.
- """
- pass
-
- @rpc_method
- @abstractmethod
- def update_integration(
- self,
- *,
- integration_id: int,
- name: Optional[str] = None,
- metadata: Optional[Dict[str, Any]] = None,
- status: Optional[int] = None,
- ) -> Optional[RpcIntegration]:
- """
- Returns an RpcIntegration after updating the fields provided.
- To set a field as null, use the `set_{FIELD}_null` keyword argument.
- """
- pass
-
- @rpc_method
- @abstractmethod
- def update_organization_integrations(
- self,
- *,
- org_integration_ids: List[int],
- config: Optional[Dict[str, Any]] = None,
- status: Optional[int] = None,
- grace_period_end: Optional[datetime] = None,
- set_grace_period_end_null: Optional[bool] = None,
- ) -> List[RpcOrganizationIntegration]:
- """
- Returns a list of APIOrganizationIntegrations after updating the fields provided.
- To set a field as null, use the `set_{FIELD}_null` keyword argument.
- """
- pass
-
- @rpc_method
- @abstractmethod
- def update_organization_integration(
- self,
- *,
- org_integration_id: int,
- config: Optional[Dict[str, Any]] = None,
- status: Optional[int] = None,
- grace_period_end: Optional[datetime] = None,
- set_grace_period_end_null: Optional[bool] = None,
- ) -> Optional[RpcOrganizationIntegration]:
- """
- Returns an RpcOrganizationIntegration after updating the fields provided.
- To set a field as null, use the `set_{FIELD}_null` keyword argument.
- """
- pass
-
- # The following methods replace instance methods of the ORM objects!
-
- def get_installation(
- self,
- *,
- integration: Union[RpcIntegration, Integration],
- organization_id: int,
- ) -> IntegrationInstallation:
- """
- Returns the IntegrationInstallation class for a given integration.
- Intended to replace calls of `integration.get_installation`.
- See src/sentry/models/integrations/integration.py
- """
- from sentry import integrations
-
- provider = integrations.get(integration.provider)
- installation: IntegrationInstallation = provider.get_installation(
- model=integration,
- organization_id=organization_id,
- )
- return installation
-
- def has_feature(self, *, provider: str, feature: IntegrationFeatures) -> bool:
- """
- Returns True if the IntegrationProvider subclass contains a given feature
- Intended to replace calls of `integration.has_feature`.
- See src/sentry/models/integrations/integration.py
- """
- from sentry import integrations
-
- int_provider: IntegrationProvider = integrations.get(provider)
- return feature in int_provider.features
-
- @rpc_method
- @abstractmethod
- def send_incident_alert_notification(
- self,
- *,
- sentry_app_id: int,
- action_id: int,
- incident_id: int,
- organization: RpcOrganizationSummary,
- new_status: int,
- incident_attachment: Mapping[str, str],
- metric_value: Optional[str] = None,
- ) -> None:
- pass
-
- @rpc_method
- @abstractmethod
- def send_msteams_incident_alert_notification(
- self, *, integration_id: int, channel: Optional[str], attachment: Dict[str, Any]
- ) -> None:
- raise NotImplementedError
-
-
-integration_service: IntegrationService = cast(
- IntegrationService, IntegrationService.create_delegation()
-)
+from .model import * # noqa
+from .service import * # noqa
diff --git a/src/sentry/services/hybrid_cloud/integration/impl.py b/src/sentry/services/hybrid_cloud/integration/impl.py
index e1dae4896ab4c6..c1695940a76baa 100644
--- a/src/sentry/services/hybrid_cloud/integration/impl.py
+++ b/src/sentry/services/hybrid_cloud/integration/impl.py
@@ -18,6 +18,10 @@
RpcIntegration,
RpcOrganizationIntegration,
)
+from sentry.services.hybrid_cloud.integration.serial import (
+ serialize_integration,
+ serialize_organization_integration,
+)
from sentry.services.hybrid_cloud.organization import RpcOrganizationSummary
from sentry.services.hybrid_cloud.pagination import RpcPaginationArgs, RpcPaginationResult
from sentry.shared_integrations.exceptions import ApiError
@@ -121,7 +125,7 @@ def get_integrations(
if limit is not None:
integrations = integrations[:limit]
- return [self._serialize_integration(integration) for integration in integrations]
+ return [serialize_integration(integration) for integration in integrations]
def get_integration(
self,
@@ -151,7 +155,7 @@ def get_integration(
integration = Integration.objects.get(**integration_kwargs)
except Integration.DoesNotExist:
return None
- return self._serialize_integration(integration)
+ return serialize_integration(integration)
def get_organization_integrations(
self,
@@ -189,7 +193,7 @@ def get_organization_integrations(
if limit is not None:
ois = ois[:limit]
- return [self._serialize_organization_integration(oi) for oi in ois]
+ return [serialize_organization_integration(oi) for oi in ois]
def get_organization_context(
self,
@@ -229,8 +233,8 @@ def get_organization_contexts(
organization_id=organization_id,
)
return (
- self._serialize_integration(integration),
- [self._serialize_organization_integration(oi) for oi in organization_integrations],
+ serialize_integration(integration),
+ [serialize_organization_integration(oi) for oi in organization_integrations],
)
def update_integrations(
@@ -258,7 +262,7 @@ def update_integrations(
integrations.update(**integration_kwargs)
- return [self._serialize_integration(integration) for integration in integrations]
+ return [serialize_integration(integration) for integration in integrations]
def update_integration(
self,
@@ -274,7 +278,7 @@ def update_integration(
status=status,
metadata=metadata,
)
- return self._serialize_integration(integrations[0]) if len(integrations) > 0 else None
+ return serialize_integration(integrations[0]) if len(integrations) > 0 else None
def update_organization_integrations(
self,
@@ -304,7 +308,7 @@ def update_organization_integrations(
ois.update(**oi_kwargs)
- return [self._serialize_organization_integration(oi) for oi in ois]
+ return [serialize_organization_integration(oi) for oi in ois]
def update_organization_integration(
self,
@@ -322,7 +326,7 @@ def update_organization_integration(
grace_period_end=grace_period_end,
set_grace_period_end_null=set_grace_period_end_null,
)
- return self._serialize_organization_integration(ois[0]) if len(ois) > 0 else None
+ return serialize_organization_integration(ois[0]) if len(ois) > 0 else None
def send_incident_alert_notification(
self,
diff --git a/src/sentry/services/hybrid_cloud/integration/model.py b/src/sentry/services/hybrid_cloud/integration/model.py
new file mode 100644
index 00000000000000..e26f9cafbe63fa
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/integration/model.py
@@ -0,0 +1,53 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+from datetime import datetime
+from typing import Any, Dict, Optional
+
+from sentry.constants import ObjectStatus
+from sentry.integrations.base import IntegrationProvider
+from sentry.services.hybrid_cloud import RpcModel
+
+
+class RpcIntegration(RpcModel):
+ id: int
+ provider: str
+ external_id: str
+ name: str
+ metadata: Dict[str, Any]
+ status: int
+
+ def __hash__(self) -> int:
+ return hash(self.id)
+
+ def get_provider(self) -> IntegrationProvider:
+ from sentry import integrations
+
+ return integrations.get(self.provider) # type: ignore
+
+ def get_status_display(self) -> str:
+ for status_id, display in ObjectStatus.as_choices():
+ if status_id == self.status:
+ return display
+ return "disabled"
+
+
+class RpcOrganizationIntegration(RpcModel):
+ id: int
+ default_auth_id: Optional[int]
+ organization_id: int
+ integration_id: int
+ config: Dict[str, Any]
+ status: int # As ObjectStatus
+ grace_period_end: Optional[datetime]
+
+ def __hash__(self) -> int:
+ return hash(self.id)
+
+ def get_status_display(self) -> str:
+ for status_id, display in ObjectStatus.as_choices():
+ if status_id == self.status:
+ return display
+ return "disabled"
diff --git a/src/sentry/services/hybrid_cloud/integration/serial.py b/src/sentry/services/hybrid_cloud/integration/serial.py
new file mode 100644
index 00000000000000..f12fcebae8f5b5
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/integration/serial.py
@@ -0,0 +1,25 @@
+from sentry.models import Integration, OrganizationIntegration
+from sentry.services.hybrid_cloud.integration import RpcIntegration, RpcOrganizationIntegration
+
+
+def serialize_integration(integration: Integration) -> RpcIntegration:
+ return RpcIntegration(
+ id=integration.id,
+ provider=integration.provider,
+ external_id=integration.external_id,
+ name=integration.name,
+ metadata=integration.metadata,
+ status=integration.status,
+ )
+
+
+def serialize_organization_integration(oi: OrganizationIntegration) -> RpcOrganizationIntegration:
+ return RpcOrganizationIntegration(
+ id=oi.id,
+ default_auth_id=oi.default_auth_id,
+ organization_id=oi.organization_id,
+ integration_id=oi.integration_id,
+ config=oi.config,
+ status=oi.status,
+ grace_period_end=oi.grace_period_end,
+ )
diff --git a/src/sentry/services/hybrid_cloud/integration/service.py b/src/sentry/services/hybrid_cloud/integration/service.py
new file mode 100644
index 00000000000000..9a8e346d54e756
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/integration/service.py
@@ -0,0 +1,291 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+from abc import abstractmethod
+from datetime import datetime
+from typing import Any, Dict, Iterable, List, Mapping, Optional, Tuple, Union, cast
+
+from sentry.integrations.base import (
+ IntegrationFeatures,
+ IntegrationInstallation,
+ IntegrationProvider,
+)
+from sentry.models.integrations import Integration
+from sentry.services.hybrid_cloud.integration import RpcIntegration, RpcOrganizationIntegration
+from sentry.services.hybrid_cloud.integration.serial import serialize_organization_integration
+from sentry.services.hybrid_cloud.organization import RpcOrganizationSummary
+from sentry.services.hybrid_cloud.pagination import RpcPaginationArgs, RpcPaginationResult
+from sentry.services.hybrid_cloud.rpc import RpcService, rpc_method
+from sentry.silo import SiloMode
+
+
+class IntegrationService(RpcService):
+ key = "integration"
+ local_mode = SiloMode.CONTROL
+
+ @classmethod
+ def get_local_implementation(cls) -> RpcService:
+ from sentry.services.hybrid_cloud.integration.impl import DatabaseBackedIntegrationService
+
+ return DatabaseBackedIntegrationService()
+
+ @rpc_method
+ @abstractmethod
+ def page_integration_ids(
+ self,
+ *,
+ provider_keys: List[str],
+ organization_id: int,
+ args: RpcPaginationArgs,
+ ) -> RpcPaginationResult:
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def send_message(
+ self,
+ *,
+ integration_id: int,
+ organization_id: int,
+ channel: str,
+ message: str,
+ ) -> bool:
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def page_organization_integrations_ids(
+ self,
+ *,
+ organization_id: int,
+ statuses: List[int],
+ provider_key: Optional[str] = None,
+ args: RpcPaginationArgs,
+ ) -> RpcPaginationResult:
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def get_integrations(
+ self,
+ *,
+ integration_ids: Optional[Iterable[int]] = None,
+ organization_id: Optional[int] = None,
+ status: Optional[int] = None,
+ providers: Optional[List[str]] = None,
+ org_integration_status: Optional[int] = None,
+ limit: Optional[int] = None,
+ organization_integration_id: Optional[int] = None,
+ ) -> List[RpcIntegration]:
+ """
+ Returns all APIIntegrations matching the provided kwargs.
+ """
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def get_integration(
+ self,
+ *,
+ integration_id: Optional[int] = None,
+ provider: Optional[str] = None,
+ external_id: Optional[str] = None,
+ organization_integration_id: Optional[int] = None,
+ ) -> Optional[RpcIntegration]:
+ """
+ Returns an RpcIntegration using either the id or a combination of the provider and external_id
+ """
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def get_organization_integrations(
+ self,
+ *,
+ org_integration_ids: Optional[List[int]] = None,
+ integration_id: Optional[int] = None,
+ organization_id: Optional[int] = None,
+ organization_ids: Optional[List[int]] = None,
+ status: Optional[int] = None,
+ providers: Optional[List[str]] = None,
+ has_grace_period: Optional[bool] = None,
+ limit: Optional[int] = None,
+ ) -> List[RpcOrganizationIntegration]:
+ """
+ Returns all APIOrganizationIntegrations from the matching kwargs.
+ If providers is set, it will also be filtered by the integration providers set in the list.
+ If has_grace_period is set, it will filter by whether the grace_period is null or not.
+ """
+ pass
+
+ @rpc_method
+ def get_organization_integration(
+ self, *, integration_id: int, organization_id: int
+ ) -> Optional[RpcOrganizationIntegration]:
+ """
+ Returns an RpcOrganizationIntegration from the integration and organization ids.
+ """
+ ois = self.get_organization_integrations(
+ integration_id=integration_id, organization_id=organization_id, limit=1
+ )
+ return serialize_organization_integration(ois[0]) if len(ois) > 0 else None
+
+ @rpc_method
+ @abstractmethod
+ def get_organization_context(
+ self,
+ *,
+ organization_id: int,
+ integration_id: Optional[int] = None,
+ provider: Optional[str] = None,
+ external_id: Optional[str] = None,
+ ) -> Tuple[Optional[RpcIntegration], Optional[RpcOrganizationIntegration]]:
+ """
+ Returns a tuple of RpcIntegration and RpcOrganizationIntegration. The integration is selected
+ by either integration_id, or a combination of provider and external_id.
+ """
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def get_organization_contexts(
+ self,
+ *,
+ organization_id: Optional[int] = None,
+ integration_id: Optional[int] = None,
+ provider: Optional[str] = None,
+ external_id: Optional[str] = None,
+ ) -> Tuple[Optional[RpcIntegration], List[RpcOrganizationIntegration]]:
+ """
+ Returns a tuple of RpcIntegration and RpcOrganizationIntegrations. The integrations are selected
+ by either integration_id, or a combination of provider and external_id.
+ """
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def update_integrations(
+ self,
+ *,
+ integration_ids: List[int],
+ name: Optional[str] = None,
+ metadata: Optional[Dict[str, Any]] = None,
+ status: Optional[int] = None,
+ ) -> List[RpcIntegration]:
+ """
+ Returns a list of APIIntegrations after updating the fields provided.
+ To set a field as null, use the `set_{FIELD}_null` keyword argument.
+ """
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def update_integration(
+ self,
+ *,
+ integration_id: int,
+ name: Optional[str] = None,
+ metadata: Optional[Dict[str, Any]] = None,
+ status: Optional[int] = None,
+ ) -> Optional[RpcIntegration]:
+ """
+ Returns an RpcIntegration after updating the fields provided.
+ To set a field as null, use the `set_{FIELD}_null` keyword argument.
+ """
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def update_organization_integrations(
+ self,
+ *,
+ org_integration_ids: List[int],
+ config: Optional[Dict[str, Any]] = None,
+ status: Optional[int] = None,
+ grace_period_end: Optional[datetime] = None,
+ set_grace_period_end_null: Optional[bool] = None,
+ ) -> List[RpcOrganizationIntegration]:
+ """
+ Returns a list of APIOrganizationIntegrations after updating the fields provided.
+ To set a field as null, use the `set_{FIELD}_null` keyword argument.
+ """
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def update_organization_integration(
+ self,
+ *,
+ org_integration_id: int,
+ config: Optional[Dict[str, Any]] = None,
+ status: Optional[int] = None,
+ grace_period_end: Optional[datetime] = None,
+ set_grace_period_end_null: Optional[bool] = None,
+ ) -> Optional[RpcOrganizationIntegration]:
+ """
+ Returns an RpcOrganizationIntegration after updating the fields provided.
+ To set a field as null, use the `set_{FIELD}_null` keyword argument.
+ """
+ pass
+
+ # The following methods replace instance methods of the ORM objects!
+
+ def get_installation(
+ self,
+ *,
+ integration: Union[RpcIntegration, Integration],
+ organization_id: int,
+ ) -> IntegrationInstallation:
+ """
+ Returns the IntegrationInstallation class for a given integration.
+ Intended to replace calls of `integration.get_installation`.
+ See src/sentry/models/integrations/integration.py
+ """
+ from sentry import integrations
+
+ provider = integrations.get(integration.provider)
+ installation: IntegrationInstallation = provider.get_installation(
+ model=integration,
+ organization_id=organization_id,
+ )
+ return installation
+
+ def has_feature(self, *, provider: str, feature: IntegrationFeatures) -> bool:
+ """
+ Returns True if the IntegrationProvider subclass contains a given feature
+ Intended to replace calls of `integration.has_feature`.
+ See src/sentry/models/integrations/integration.py
+ """
+ from sentry import integrations
+
+ int_provider: IntegrationProvider = integrations.get(provider)
+ return feature in int_provider.features
+
+ @rpc_method
+ @abstractmethod
+ def send_incident_alert_notification(
+ self,
+ *,
+ sentry_app_id: int,
+ action_id: int,
+ incident_id: int,
+ organization: RpcOrganizationSummary,
+ new_status: int,
+ incident_attachment: Mapping[str, str],
+ metric_value: Optional[str] = None,
+ ) -> None:
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def send_msteams_incident_alert_notification(
+ self, *, integration_id: int, channel: Optional[str], attachment: Dict[str, Any]
+ ) -> None:
+ raise NotImplementedError
+
+
+integration_service: IntegrationService = cast(
+ IntegrationService, IntegrationService.create_delegation()
+)
diff --git a/src/sentry/services/hybrid_cloud/lost_password_hash/__init__.py b/src/sentry/services/hybrid_cloud/lost_password_hash/__init__.py
index 00cc13befeba2c..2a9746c30ef42c 100644
--- a/src/sentry/services/hybrid_cloud/lost_password_hash/__init__.py
+++ b/src/sentry/services/hybrid_cloud/lost_password_hash/__init__.py
@@ -1,60 +1,2 @@
-# Please do not use
-# from __future__ import annotations
-# in modules such as this one where hybrid cloud service classes and data models are
-# defined, because we want to reflect on type annotations and avoid forward references.
-
-import datetime
-from abc import abstractmethod
-from typing import cast
-
-from sentry.models import LostPasswordHash
-from sentry.services.hybrid_cloud import RpcModel
-from sentry.services.hybrid_cloud.rpc import RpcService, rpc_method
-from sentry.silo import SiloMode
-
-
-class LostPasswordHashService(RpcService):
- key = "lost_password_hash"
- local_mode = SiloMode.CONTROL
-
- @classmethod
- def get_local_implementation(cls) -> RpcService:
- from sentry.services.hybrid_cloud.lost_password_hash.impl import (
- DatabaseLostPasswordHashService,
- )
-
- return DatabaseLostPasswordHashService()
-
- # TODO: Denormalize this scim enabled flag onto organizations?
- # This is potentially a large list
- @rpc_method
- @abstractmethod
- def get_or_create(
- self,
- *,
- user_id: int,
- ) -> "RpcLostPasswordHash":
- """
- This method returns a valid RpcLostPasswordHash for a user
- :return:
- """
- pass
-
- @classmethod
- def serialize_lostpasswordhash(cls, lph: LostPasswordHash) -> "RpcLostPasswordHash":
- return cast(RpcLostPasswordHash, RpcLostPasswordHash.serialize_by_field_name(lph))
-
-
-class RpcLostPasswordHash(RpcModel):
- id: int = -1
- user_id: int = -1
- hash: str = ""
- date_added = datetime.datetime
-
- def get_absolute_url(self, mode: str = "recover") -> str:
- return cast(str, LostPasswordHash.get_lostpassword_url(self.user_id, self.hash, mode))
-
-
-lost_password_hash_service: LostPasswordHashService = cast(
- LostPasswordHashService, LostPasswordHashService.create_delegation()
-)
+from .model import * # noqa
+from .service import * # noqa
diff --git a/src/sentry/services/hybrid_cloud/lost_password_hash/impl.py b/src/sentry/services/hybrid_cloud/lost_password_hash/impl.py
index f891fc890619e1..e26fd1f52c202d 100644
--- a/src/sentry/services/hybrid_cloud/lost_password_hash/impl.py
+++ b/src/sentry/services/hybrid_cloud/lost_password_hash/impl.py
@@ -5,6 +5,7 @@
LostPasswordHashService,
RpcLostPasswordHash,
)
+from sentry.services.hybrid_cloud.lost_password_hash.serial import serialize_lostpasswordhash
class DatabaseLostPasswordHashService(LostPasswordHashService):
@@ -23,7 +24,7 @@ def get_or_create(
password_hash.date_added = datetime.datetime.now()
password_hash.set_hash()
password_hash.save()
- return self.serialize_lostpasswordhash(password_hash)
+ return serialize_lostpasswordhash(password_hash)
def close(self) -> None:
pass
diff --git a/src/sentry/services/hybrid_cloud/lost_password_hash/model.py b/src/sentry/services/hybrid_cloud/lost_password_hash/model.py
new file mode 100644
index 00000000000000..5f18bf7694ba58
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/lost_password_hash/model.py
@@ -0,0 +1,20 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+import datetime
+from typing import cast
+
+from sentry.models import LostPasswordHash
+from sentry.services.hybrid_cloud import RpcModel
+
+
+class RpcLostPasswordHash(RpcModel):
+ id: int = -1
+ user_id: int = -1
+ hash: str = ""
+ date_added = datetime.datetime
+
+ def get_absolute_url(self, mode: str = "recover") -> str:
+ return cast(str, LostPasswordHash.get_lostpassword_url(self.user_id, self.hash, mode))
diff --git a/src/sentry/services/hybrid_cloud/lost_password_hash/serial.py b/src/sentry/services/hybrid_cloud/lost_password_hash/serial.py
new file mode 100644
index 00000000000000..4ba787bdcd1aaf
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/lost_password_hash/serial.py
@@ -0,0 +1,8 @@
+from typing import cast
+
+from sentry.models import LostPasswordHash
+from sentry.services.hybrid_cloud.lost_password_hash import RpcLostPasswordHash
+
+
+def serialize_lostpasswordhash(lph: LostPasswordHash) -> RpcLostPasswordHash:
+ return cast(RpcLostPasswordHash, RpcLostPasswordHash.serialize_by_field_name(lph))
diff --git a/src/sentry/services/hybrid_cloud/lost_password_hash/service.py b/src/sentry/services/hybrid_cloud/lost_password_hash/service.py
new file mode 100644
index 00000000000000..d8e31ba7fce3b8
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/lost_password_hash/service.py
@@ -0,0 +1,44 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+from abc import abstractmethod
+from typing import cast
+
+from sentry.services.hybrid_cloud.lost_password_hash import RpcLostPasswordHash
+from sentry.services.hybrid_cloud.rpc import RpcService, rpc_method
+from sentry.silo import SiloMode
+
+
+class LostPasswordHashService(RpcService):
+ key = "lost_password_hash"
+ local_mode = SiloMode.CONTROL
+
+ @classmethod
+ def get_local_implementation(cls) -> RpcService:
+ from sentry.services.hybrid_cloud.lost_password_hash.impl import (
+ DatabaseLostPasswordHashService,
+ )
+
+ return DatabaseLostPasswordHashService()
+
+ # TODO: Denormalize this scim enabled flag onto organizations?
+ # This is potentially a large list
+ @rpc_method
+ @abstractmethod
+ def get_or_create(
+ self,
+ *,
+ user_id: int,
+ ) -> RpcLostPasswordHash:
+ """
+ This method returns a valid RpcLostPasswordHash for a user
+ :return:
+ """
+ pass
+
+
+lost_password_hash_service: LostPasswordHashService = cast(
+ LostPasswordHashService, LostPasswordHashService.create_delegation()
+)
diff --git a/src/sentry/services/hybrid_cloud/notifications/__init__.py b/src/sentry/services/hybrid_cloud/notifications/__init__.py
index edcb7f49e3553e..2a9746c30ef42c 100644
--- a/src/sentry/services/hybrid_cloud/notifications/__init__.py
+++ b/src/sentry/services/hybrid_cloud/notifications/__init__.py
@@ -1,136 +1,2 @@
-# Please do not use
-# from __future__ import annotations
-# in modules such as this one where hybrid cloud service classes and data models are
-# defined, because we want to reflect on type annotations and avoid forward references.
-
-from abc import abstractmethod
-from typing import TYPE_CHECKING, List, Mapping, Optional, Sequence, cast
-
-from sentry.notifications.types import (
- NotificationScopeType,
- NotificationSettingOptionValues,
- NotificationSettingTypes,
-)
-from sentry.services.hybrid_cloud import RpcModel
-from sentry.services.hybrid_cloud.actor import RpcActor
-from sentry.services.hybrid_cloud.rpc import RpcService, rpc_method
-from sentry.services.hybrid_cloud.user import RpcUser
-from sentry.silo import SiloMode
-from sentry.types.integrations import ExternalProviders
-
-if TYPE_CHECKING:
- from sentry.models import NotificationSetting
-
-
-class RpcNotificationSetting(RpcModel):
- scope_type: NotificationScopeType = NotificationScopeType.USER
- scope_identifier: int = -1
- target_id: int = -1
- provider: ExternalProviders = ExternalProviders.EMAIL
- type: NotificationSettingTypes = NotificationSettingTypes.WORKFLOW
- value: NotificationSettingOptionValues = NotificationSettingOptionValues.DEFAULT
-
-
-class NotificationsService(RpcService):
- key = "notifications"
- local_mode = SiloMode.CONTROL
-
- @classmethod
- def get_local_implementation(cls) -> RpcService:
- from sentry.services.hybrid_cloud.notifications.impl import (
- DatabaseBackedNotificationsService,
- )
-
- return DatabaseBackedNotificationsService()
-
- @rpc_method
- @abstractmethod
- def get_settings_for_recipient_by_parent(
- self,
- *,
- type: NotificationSettingTypes,
- parent_id: int,
- recipients: Sequence[RpcActor],
- ) -> List[RpcNotificationSetting]:
- pass
-
- @rpc_method
- @abstractmethod
- def get_settings_for_users(
- self,
- *,
- types: List[NotificationSettingTypes],
- users: List[RpcUser],
- value: NotificationSettingOptionValues,
- ) -> List[RpcNotificationSetting]:
- pass
-
- @rpc_method
- @abstractmethod
- def get_settings_for_user_by_projects(
- self, *, type: NotificationSettingTypes, user_id: int, parent_ids: List[int]
- ) -> List[RpcNotificationSetting]:
- pass
-
- @classmethod
- def serialize_notification_setting(
- self, setting: "NotificationSetting"
- ) -> RpcNotificationSetting:
- return RpcNotificationSetting(
- scope_type=setting.scope_type,
- scope_identifier=setting.scope_identifier,
- target_id=setting.target_id,
- provider=setting.provider,
- type=setting.type,
- value=setting.value,
- )
-
- @rpc_method
- @abstractmethod
- def update_settings(
- self,
- *,
- external_provider: ExternalProviders,
- notification_type: NotificationSettingTypes,
- setting_option: NotificationSettingOptionValues,
- actor: RpcActor,
- project_id: Optional[int] = None,
- organization_id: Optional[int] = None,
- ) -> None:
- pass
-
- @rpc_method
- @abstractmethod
- def bulk_update_settings(
- self,
- *,
- notification_type_to_value_map: Mapping[
- NotificationSettingTypes, NotificationSettingOptionValues
- ],
- external_provider: ExternalProviders,
- actor: RpcActor,
- ) -> None:
- pass
-
- @rpc_method
- @abstractmethod
- def uninstall_slack_settings(
- self,
- organization_id: int,
- project_ids: List[int],
- ) -> None:
- pass
-
- @rpc_method
- @abstractmethod
- def remove_notification_settings(self, *, actor_id: int, provider: ExternalProviders) -> None:
- """
- Delete notification settings based on an actor_id
- There is no foreign key relationship so we have to manually cascade.
- """
- pass
-
-
-notifications_service: NotificationsService = cast(
- NotificationsService, NotificationsService.create_delegation()
-)
+from .model import * # noqa
+from .service import * # noqa
diff --git a/src/sentry/services/hybrid_cloud/notifications/impl.py b/src/sentry/services/hybrid_cloud/notifications/impl.py
index 8a2f2a8c76e2de..6133eb2d1ed6c2 100644
--- a/src/sentry/services/hybrid_cloud/notifications/impl.py
+++ b/src/sentry/services/hybrid_cloud/notifications/impl.py
@@ -14,6 +14,7 @@
)
from sentry.services.hybrid_cloud.actor import ActorType, RpcActor
from sentry.services.hybrid_cloud.notifications import NotificationsService, RpcNotificationSetting
+from sentry.services.hybrid_cloud.notifications.serial import serialize_notification_setting
from sentry.services.hybrid_cloud.user import RpcUser
from sentry.types.integrations import ExternalProviders
@@ -81,7 +82,7 @@ def get_settings_for_users(
value=value.value,
scope_type=NotificationScopeType.USER.value,
)
- return [self.serialize_notification_setting(u) for u in settings]
+ return [serialize_notification_setting(u) for u in settings]
def get_settings_for_recipient_by_parent(
self, *, type: NotificationSettingTypes, parent_id: int, recipients: Sequence[RpcActor]
@@ -108,7 +109,7 @@ def get_settings_for_recipient_by_parent(
target_id__in=actor_ids,
)
- return [self.serialize_notification_setting(s) for s in notification_settings]
+ return [serialize_notification_setting(s) for s in notification_settings]
def get_settings_for_user_by_projects(
self, *, type: NotificationSettingTypes, user_id: int, parent_ids: List[int]
@@ -120,7 +121,7 @@ def get_settings_for_user_by_projects(
scope_type = get_scope_type(type)
return [
- self.serialize_notification_setting(s)
+ serialize_notification_setting(s)
for s in NotificationSetting.objects.filter(
Q(
scope_type=scope_type.value,
diff --git a/src/sentry/services/hybrid_cloud/notifications/model.py b/src/sentry/services/hybrid_cloud/notifications/model.py
new file mode 100644
index 00000000000000..4a6d7bd4acfb16
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/notifications/model.py
@@ -0,0 +1,21 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+from sentry.notifications.types import (
+ NotificationScopeType,
+ NotificationSettingOptionValues,
+ NotificationSettingTypes,
+)
+from sentry.services.hybrid_cloud import RpcModel
+from sentry.types.integrations import ExternalProviders
+
+
+class RpcNotificationSetting(RpcModel):
+ scope_type: NotificationScopeType = NotificationScopeType.USER
+ scope_identifier: int = -1
+ target_id: int = -1
+ provider: ExternalProviders = ExternalProviders.EMAIL
+ type: NotificationSettingTypes = NotificationSettingTypes.WORKFLOW
+ value: NotificationSettingOptionValues = NotificationSettingOptionValues.DEFAULT
diff --git a/src/sentry/services/hybrid_cloud/notifications/serial.py b/src/sentry/services/hybrid_cloud/notifications/serial.py
new file mode 100644
index 00000000000000..bb4362c25a4f86
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/notifications/serial.py
@@ -0,0 +1,13 @@
+from sentry.models import NotificationSetting
+from sentry.services.hybrid_cloud.notifications import RpcNotificationSetting
+
+
+def serialize_notification_setting(setting: NotificationSetting) -> RpcNotificationSetting:
+ return RpcNotificationSetting(
+ scope_type=setting.scope_type,
+ scope_identifier=setting.scope_identifier,
+ target_id=setting.target_id,
+ provider=setting.provider,
+ type=setting.type,
+ value=setting.value,
+ )
diff --git a/src/sentry/services/hybrid_cloud/notifications/service.py b/src/sentry/services/hybrid_cloud/notifications/service.py
new file mode 100644
index 00000000000000..3cc935fdc77824
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/notifications/service.py
@@ -0,0 +1,107 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+from abc import abstractmethod
+from typing import List, Mapping, Optional, Sequence, cast
+
+from sentry.notifications.types import NotificationSettingOptionValues, NotificationSettingTypes
+from sentry.services.hybrid_cloud.actor import RpcActor
+from sentry.services.hybrid_cloud.notifications import RpcNotificationSetting
+from sentry.services.hybrid_cloud.rpc import RpcService, rpc_method
+from sentry.services.hybrid_cloud.user import RpcUser
+from sentry.silo import SiloMode
+from sentry.types.integrations import ExternalProviders
+
+
+class NotificationsService(RpcService):
+ key = "notifications"
+ local_mode = SiloMode.CONTROL
+
+ @classmethod
+ def get_local_implementation(cls) -> RpcService:
+ from sentry.services.hybrid_cloud.notifications.impl import (
+ DatabaseBackedNotificationsService,
+ )
+
+ return DatabaseBackedNotificationsService()
+
+ @rpc_method
+ @abstractmethod
+ def get_settings_for_recipient_by_parent(
+ self,
+ *,
+ type: NotificationSettingTypes,
+ parent_id: int,
+ recipients: Sequence[RpcActor],
+ ) -> List[RpcNotificationSetting]:
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def get_settings_for_users(
+ self,
+ *,
+ types: List[NotificationSettingTypes],
+ users: List[RpcUser],
+ value: NotificationSettingOptionValues,
+ ) -> List[RpcNotificationSetting]:
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def get_settings_for_user_by_projects(
+ self, *, type: NotificationSettingTypes, user_id: int, parent_ids: List[int]
+ ) -> List[RpcNotificationSetting]:
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def update_settings(
+ self,
+ *,
+ external_provider: ExternalProviders,
+ notification_type: NotificationSettingTypes,
+ setting_option: NotificationSettingOptionValues,
+ actor: RpcActor,
+ project_id: Optional[int] = None,
+ organization_id: Optional[int] = None,
+ ) -> None:
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def bulk_update_settings(
+ self,
+ *,
+ notification_type_to_value_map: Mapping[
+ NotificationSettingTypes, NotificationSettingOptionValues
+ ],
+ external_provider: ExternalProviders,
+ actor: RpcActor,
+ ) -> None:
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def uninstall_slack_settings(
+ self,
+ organization_id: int,
+ project_ids: List[int],
+ ) -> None:
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def remove_notification_settings(self, *, actor_id: int, provider: ExternalProviders) -> None:
+ """
+ Delete notification settings based on an actor_id
+ There is no foreign key relationship so we have to manually cascade.
+ """
+ pass
+
+
+notifications_service: NotificationsService = cast(
+ NotificationsService, NotificationsService.create_delegation()
+)
diff --git a/src/sentry/services/hybrid_cloud/organization/__init__.py b/src/sentry/services/hybrid_cloud/organization/__init__.py
index 2633fa6603c268..2a9746c30ef42c 100644
--- a/src/sentry/services/hybrid_cloud/organization/__init__.py
+++ b/src/sentry/services/hybrid_cloud/organization/__init__.py
@@ -1,338 +1,2 @@
-# Please do not use
-# from __future__ import annotations
-# in modules such as this one where hybrid cloud service classes and data models are
-# defined, because we want to reflect on type annotations and avoid forward references.
-
-from abc import abstractmethod
-from typing import Any, Iterable, List, Mapping, Optional, cast
-
-from pydantic import Field
-
-from sentry.constants import ObjectStatus
-from sentry.models.organization import OrganizationStatus
-from sentry.models.organizationmember import InviteStatus
-from sentry.roles import team_roles
-from sentry.roles.manager import TeamRole
-from sentry.services.hybrid_cloud import RpcModel
-from sentry.services.hybrid_cloud.region import (
- ByOrganizationId,
- ByOrganizationIdAttribute,
- ByOrganizationSlug,
- UnimplementedRegionResolution,
-)
-from sentry.services.hybrid_cloud.rpc import RpcService, regional_rpc_method
-from sentry.silo import SiloMode
-
-
-def team_status_visible() -> int:
- from sentry.models import TeamStatus
-
- return int(TeamStatus.ACTIVE)
-
-
-class RpcTeam(RpcModel):
- id: int = -1
- status: int = Field(default_factory=team_status_visible)
- organization_id: int = -1
- slug: str = ""
- actor_id: Optional[int] = None
- org_role: Optional[str] = None
-
- def class_name(self) -> str:
- return "Team"
-
-
-class RpcTeamMember(RpcModel):
- id: int = -1
- is_active: bool = False
- role_id: str = ""
- project_ids: List[int] = Field(default_factory=list)
- scopes: List[str] = Field(default_factory=list)
- team_id: int = -1
-
- @property
- def role(self) -> Optional[TeamRole]:
- return team_roles.get(self.role_id) if self.role_id else None
-
-
-def project_status_visible() -> int:
- return int(ObjectStatus.ACTIVE)
-
-
-class RpcProject(RpcModel):
- id: int = -1
- slug: str = ""
- name: str = ""
- organization_id: int = -1
- status: int = Field(default_factory=project_status_visible)
-
-
-class RpcOrganizationMemberFlags(RpcModel):
- sso__linked: bool = False
- sso__invalid: bool = False
- member_limit__restricted: bool = False
-
- def __getattr__(self, item: str) -> bool:
- from sentry.services.hybrid_cloud.organization.impl import escape_flag_name
-
- item = escape_flag_name(item)
- return bool(getattr(self, item))
-
- def __getitem__(self, item: str) -> bool:
- return bool(getattr(self, item))
-
-
-class RpcOrganizationMemberSummary(RpcModel):
- id: int = -1
- organization_id: int = -1
- user_id: Optional[int] = None # This can be null when the user is deleted.
- flags: RpcOrganizationMemberFlags = Field(default_factory=lambda: RpcOrganizationMemberFlags())
-
-
-class RpcOrganizationMember(RpcOrganizationMemberSummary):
- member_teams: List[RpcTeamMember] = Field(default_factory=list)
- role: str = ""
- has_global_access: bool = False
- project_ids: List[int] = Field(default_factory=list)
- scopes: List[str] = Field(default_factory=list)
- invite_status: int = InviteStatus.APPROVED.value
-
- def get_audit_log_metadata(self, user_email: str) -> Mapping[str, Any]:
- team_ids = [mt.team_id for mt in self.member_teams]
-
- return {
- "email": user_email,
- "teams": team_ids,
- "has_global_access": self.has_global_access,
- "role": self.role,
- "invite_status": self.invite_status,
- }
-
-
-class RpcOrganizationFlags(RpcModel):
- allow_joinleave: bool = False
- enhanced_privacy: bool = False
- disable_shared_issues: bool = False
- early_adopter: bool = False
- require_2fa: bool = False
- disable_new_visibility_features: bool = False
- require_email_verification: bool = False
-
-
-class RpcOrganizationInvite(RpcModel):
- id: int = -1
- token: str = ""
- email: str = ""
-
-
-class RpcOrganizationSummary(RpcModel):
- """
- The subset of organization metadata available from the control silo specifically.
- """
-
- slug: str = ""
- id: int = -1
- name: str = ""
-
- def __hash__(self) -> int:
- # Mimic the behavior of hashing a Django ORM entity, for compatibility with
- # serializers, as this organization summary object is often used for that.
- return hash((self.id, self.slug))
-
-
-class RpcOrganization(RpcOrganizationSummary):
- # Represents the full set of teams and projects associated with the org. Note that these are not filtered by
- # visibility, but you can apply a manual filter on the status attribute.
- teams: List[RpcTeam] = Field(default_factory=list)
- projects: List[RpcProject] = Field(default_factory=list)
-
- flags: RpcOrganizationFlags = Field(default_factory=lambda: RpcOrganizationFlags())
- status: OrganizationStatus = OrganizationStatus.ACTIVE
-
- default_role: str = ""
-
-
-class RpcUserOrganizationContext(RpcModel):
- """
- This object wraps an organization result inside of its membership context in terms of an (optional) user id.
- This is due to the large number of callsites that require an organization and a user's membership at the
- same time and in a consistency state. This object allows a nice envelop for both of these ideas from a single
- transactional query. Used by access, determine_active_organization, and others.
- """
-
- # user_id is None iff the get_organization_by_id call is not provided a user_id context.
- user_id: Optional[int] = None
- # The organization is always non-null because the null wrapping is around this object instead.
- # A None organization => a None RpcUserOrganizationContext
- organization: RpcOrganization = Field(default_factory=lambda: RpcOrganization())
- # member can be None when the given user_id does not have membership with the given organization.
- # Note that all related fields of this organization member are filtered by visibility and is_active=True.
- member: Optional[RpcOrganizationMember] = None
-
- def __post_init__(self) -> None:
- # Ensures that outer user_id always agrees with the inner member object.
- if self.user_id is not None and self.member is not None:
- assert self.user_id == self.member.user_id
-
-
-class OrganizationService(RpcService):
- key = "organization"
- local_mode = SiloMode.REGION
-
- @classmethod
- def get_local_implementation(cls) -> RpcService:
- from sentry.services.hybrid_cloud.organization.impl import DatabaseBackedOrganizationService
-
- return DatabaseBackedOrganizationService()
-
- @regional_rpc_method(resolve=ByOrganizationId("id"))
- @abstractmethod
- def get_organization_by_id(
- self, *, id: int, user_id: Optional[int] = None, slug: Optional[str] = None
- ) -> Optional[RpcUserOrganizationContext]:
- """
- Fetches the organization, team, and project data given by an organization id, regardless of its visibility
- status. When user_id is provided, membership data related to that user from the organization
- is also given in the response. See RpcUserOrganizationContext for more info.
- """
- pass
-
- @regional_rpc_method(resolve=ByOrganizationSlug())
- @abstractmethod
- def get_org_by_slug(
- self,
- *,
- slug: str,
- user_id: Optional[int] = None,
- ) -> Optional[RpcOrganizationSummary]:
- """
- Fetches the organization, by an organization slug. If user_id is passed, it will enforce visibility
- rules. This method is differentiated from get_organization_by_slug by not being cached and returning
- RpcOrganizationSummary instead of org contexts
- """
- pass
-
- # TODO: This should return RpcOrganizationSummary objects, since we cannot realistically span out requests and
- # capture full org objects / teams / permissions. But we can gather basic summary data from the control silo.
- @regional_rpc_method(resolve=UnimplementedRegionResolution())
- @abstractmethod
- def get_organizations(
- self,
- *,
- user_id: Optional[int],
- scope: Optional[str],
- only_visible: bool,
- organization_ids: Optional[List[int]] = None,
- ) -> List[RpcOrganizationSummary]:
- """
- When user_id is set, returns all organizations associated with that user id given
- a scope and visibility requirement. When user_id is not set, but organization_ids is, provides the
- set of organizations matching those ids, ignore scope and user_id.
-
- When only_visible set, the organization object is only returned if it's status is Visible, otherwise any
- organization will be returned.
-
- Because this endpoint fetches not from region silos, but the control silo organization membership table,
- only a subset of all organization metadata is available. Spanning out and querying multiple organizations
- for their full metadata is greatly discouraged for performance reasons.
- """
- pass
-
- @regional_rpc_method(resolve=ByOrganizationId())
- @abstractmethod
- def check_membership_by_email(
- self, *, organization_id: int, email: str
- ) -> Optional[RpcOrganizationMember]:
- """
- Used to look up an organization membership by an email
- """
- pass
-
- @regional_rpc_method(resolve=ByOrganizationId())
- @abstractmethod
- def check_membership_by_id(
- self, *, organization_id: int, user_id: int
- ) -> Optional[RpcOrganizationMember]:
- """
- Used to look up an organization membership by a user id
- """
- pass
-
- @regional_rpc_method(resolve=ByOrganizationSlug())
- @abstractmethod
- def check_organization_by_slug(self, *, slug: str, only_visible: bool) -> Optional[int]:
- """
- If exists and matches the only_visible requirement, returns an organization's id by the slug.
- """
- pass
-
- def get_organization_by_slug(
- self, *, user_id: Optional[int], slug: str, only_visible: bool
- ) -> Optional[RpcUserOrganizationContext]:
- """
- Defers to check_organization_by_slug -> get_organization_by_id
- """
- org_id = self.check_organization_by_slug(slug=slug, only_visible=only_visible)
- if org_id is None:
- return None
-
- return self.get_organization_by_id(id=org_id, user_id=user_id)
-
- @regional_rpc_method(resolve=ByOrganizationId())
- @abstractmethod
- def add_organization_member(
- self,
- *,
- organization_id: int,
- default_org_role: str,
- user_id: Optional[int] = None,
- email: Optional[str] = None,
- flags: Optional[RpcOrganizationMemberFlags] = None,
- role: Optional[str] = None,
- inviter_id: Optional[int] = None,
- invite_status: Optional[int] = InviteStatus.APPROVED.value,
- ) -> RpcOrganizationMember:
- pass
-
- @regional_rpc_method(resolve=ByOrganizationIdAttribute("organization_member"))
- @abstractmethod
- def add_team_member(self, *, team_id: int, organization_member: RpcOrganizationMember) -> None:
- pass
-
- @regional_rpc_method(resolve=UnimplementedRegionResolution())
- @abstractmethod
- def get_team_members(self, *, team_id: int) -> Iterable[RpcOrganizationMember]:
- pass
-
- @regional_rpc_method(resolve=ByOrganizationIdAttribute("organization_member"))
- @abstractmethod
- def update_membership_flags(self, *, organization_member: RpcOrganizationMember) -> None:
- pass
-
- @regional_rpc_method(resolve=ByOrganizationIdAttribute("organization_member"))
- @abstractmethod
- def get_all_org_roles(
- self,
- *,
- organization_member: Optional[RpcOrganizationMember] = None,
- member_id: Optional[int] = None,
- ) -> List[str]:
- pass
-
- @regional_rpc_method(resolve=ByOrganizationId())
- @abstractmethod
- def get_top_dog_team_member_ids(self, *, organization_id: int) -> List[int]:
- pass
-
- @regional_rpc_method(resolve=ByOrganizationId())
- @abstractmethod
- def remove_user(self, *, organization_id: int, user_id: int) -> RpcOrganizationMember:
- pass
-
- @regional_rpc_method(resolve=ByOrganizationId())
- @abstractmethod
- def reset_idp_flags(self, *, organization_id: int) -> None:
- pass
-
-
-organization_service = cast(OrganizationService, OrganizationService.create_delegation())
+from .model import * # noqa
+from .service import * # noqa
diff --git a/src/sentry/services/hybrid_cloud/organization/impl.py b/src/sentry/services/hybrid_cloud/organization/impl.py
index 755267d3f80ab0..b161e63214279d 100644
--- a/src/sentry/services/hybrid_cloud/organization/impl.py
+++ b/src/sentry/services/hybrid_cloud/organization/impl.py
@@ -1,175 +1,37 @@
from __future__ import annotations
-from collections import defaultdict
-from typing import Iterable, List, MutableMapping, Optional, Set, cast
+from typing import Iterable, List, Optional, Set, cast
from django.db import models, transaction
from sentry import roles
-from sentry.constants import ObjectStatus
from sentry.db.postgres.roles import in_test_psql_role_override
from sentry.models import (
Organization,
OrganizationMember,
OrganizationMemberTeam,
OrganizationStatus,
- Project,
- ProjectTeam,
Team,
- TeamStatus,
)
from sentry.models.organizationmember import InviteStatus
from sentry.services.hybrid_cloud import logger
from sentry.services.hybrid_cloud.organization import (
OrganizationService,
- RpcOrganization,
- RpcOrganizationFlags,
RpcOrganizationInvite,
RpcOrganizationMember,
RpcOrganizationMemberFlags,
- RpcOrganizationMemberSummary,
RpcOrganizationSummary,
- RpcProject,
- RpcTeam,
- RpcTeamMember,
RpcUserOrganizationContext,
)
+from sentry.services.hybrid_cloud.organization.serial import (
+ serialize_member,
+ serialize_organization,
+ serialize_organization_summary,
+)
from sentry.services.hybrid_cloud.util import flags_to_bits
-def escape_flag_name(flag_name: str) -> str:
- return flag_name.replace(":", "__").replace("-", "_")
-
-
-def unescape_flag_name(flag_name: str) -> str:
- return flag_name.replace("__", ":").replace("_", "-")
-
-
class DatabaseBackedOrganizationService(OrganizationService):
- @classmethod
- def _serialize_member_flags(cls, member: OrganizationMember) -> RpcOrganizationMemberFlags:
- return cast(
- RpcOrganizationMemberFlags,
- RpcOrganizationMemberFlags.serialize_by_field_name(
- member.flags, name_transform=unescape_flag_name, value_transform=bool
- ),
- )
-
- @classmethod
- def serialize_member(
- cls,
- member: OrganizationMember,
- ) -> RpcOrganizationMember:
- rpc_member = RpcOrganizationMember(
- id=member.id,
- organization_id=member.organization_id,
- user_id=member.user.id if member.user is not None else None,
- role=member.role,
- has_global_access=member.has_global_access,
- scopes=list(member.get_scopes()),
- flags=cls._serialize_member_flags(member),
- invite_status=member.invite_status,
- )
-
- omts = OrganizationMemberTeam.objects.filter(
- organizationmember=member, is_active=True, team__status=TeamStatus.ACTIVE
- )
-
- all_project_ids: Set[int] = set()
- project_ids_by_team_id: MutableMapping[int, List[int]] = defaultdict(list)
- for pt in ProjectTeam.objects.filter(
- project__status=ObjectStatus.ACTIVE, team_id__in={omt.team_id for omt in omts}
- ):
- all_project_ids.add(pt.project_id)
- project_ids_by_team_id[pt.team_id].append(pt.project_id)
-
- for omt in omts:
- omt.organizationmember = member
- rpc_member.member_teams.append(
- cls._serialize_team_member(omt, project_ids_by_team_id[omt.team_id])
- )
- rpc_member.project_ids = list(all_project_ids)
-
- return rpc_member
-
- @classmethod
- def summarize_member(
- cls,
- member: OrganizationMember,
- ) -> RpcOrganizationMemberSummary:
- return RpcOrganizationMemberSummary(
- id=member.id,
- organization_id=member.organization_id,
- user_id=member.user_id,
- flags=cls._serialize_member_flags(member),
- )
-
- @classmethod
- def _serialize_flags(cls, org: Organization) -> RpcOrganizationFlags:
- return cast(
- RpcOrganizationFlags,
- RpcOrganizationFlags.serialize_by_field_name(org.flags, value_transform=bool),
- )
-
- @classmethod
- def _serialize_team(cls, team: Team) -> RpcTeam:
- return RpcTeam(
- id=team.id,
- status=team.status,
- organization_id=team.organization_id,
- slug=team.slug,
- org_role=team.org_role,
- )
-
- @classmethod
- def _serialize_team_member(
- cls, team_member: OrganizationMemberTeam, project_ids: Iterable[int]
- ) -> RpcTeamMember:
- result = RpcTeamMember(
- id=team_member.id,
- is_active=team_member.is_active,
- role_id=team_member.get_team_role().id,
- team_id=team_member.team_id,
- project_ids=list(project_ids),
- scopes=list(team_member.get_scopes()),
- )
-
- return result
-
- @classmethod
- def _serialize_project(cls, project: Project) -> RpcProject:
- return RpcProject(
- id=project.id,
- slug=project.slug,
- name=project.name,
- organization_id=project.organization_id,
- status=project.status,
- )
-
- def _serialize_organization_summary(self, org: Organization) -> RpcOrganizationSummary:
- return RpcOrganizationSummary(
- slug=org.slug,
- id=org.id,
- name=org.name,
- )
-
- @classmethod
- def serialize_organization(cls, org: Organization) -> RpcOrganization:
- rpc_org: RpcOrganization = RpcOrganization(
- slug=org.slug,
- id=org.id,
- flags=cls._serialize_flags(org),
- name=org.name,
- status=org.status,
- default_role=org.default_role,
- )
-
- projects: List[Project] = Project.objects.filter(organization=org)
- teams: List[Team] = Team.objects.filter(organization=org)
- rpc_org.projects.extend(cls._serialize_project(project) for project in projects)
- rpc_org.teams.extend(cls._serialize_team(team) for team in teams)
- return rpc_org
-
def check_membership_by_id(
self, organization_id: int, user_id: int
) -> Optional[RpcOrganizationMember]:
@@ -182,7 +44,7 @@ def check_membership_by_id(
except OrganizationMember.DoesNotExist:
return None
- return self.serialize_member(member)
+ return serialize_member(member)
def get_organization_by_id(
self, *, id: int, user_id: Optional[int] = None, slug: Optional[str] = None
@@ -200,7 +62,7 @@ def get_organization_by_id(
return None
return RpcUserOrganizationContext(
- user_id=user_id, organization=self.serialize_organization(org), member=membership
+ user_id=user_id, organization=serialize_organization(org), member=membership
)
def get_org_by_slug(
@@ -216,7 +78,7 @@ def get_org_by_slug(
member_set__user_id=user_id,
)
try:
- return self._serialize_organization_summary(query.get())
+ return serialize_organization_summary(query.get())
except Organization.DoesNotExist:
return None
@@ -228,7 +90,7 @@ def check_membership_by_email(
except OrganizationMember.DoesNotExist:
return None
- return self.serialize_member(member)
+ return serialize_member(member)
def check_organization_by_slug(self, *, slug: str, only_visible: bool) -> Optional[int]:
try:
@@ -263,7 +125,7 @@ def get_organizations(
organizations = list(qs)
else:
organizations = []
- return [self._serialize_organization_summary(o) for o in organizations]
+ return [serialize_organization_summary(o) for o in organizations]
def _query_organizations(
self, user_id: int, scope: Optional[str], only_visible: bool
@@ -323,7 +185,7 @@ def add_organization_member(
region_outbox.save()
if region_outbox:
region_outbox.drain_shard(max_updates_to_drain=10)
- return self.serialize_member(org_member)
+ return serialize_member(org_member)
def add_team_member(self, *, team_id: int, organization_member: RpcOrganizationMember) -> None:
OrganizationMemberTeam.objects.create(
@@ -335,9 +197,7 @@ def add_team_member(self, *, team_id: int, organization_member: RpcOrganizationM
def get_team_members(self, *, team_id: int) -> Iterable[RpcOrganizationMember]:
team_members = OrganizationMemberTeam.objects.filter(team_id=team_id)
- return [
- self.serialize_member(team_member.organizationmember) for team_member in team_members
- ]
+ return [serialize_member(team_member.organizationmember) for team_member in team_members]
def update_membership_flags(self, *, organization_member: RpcOrganizationMember) -> None:
model = OrganizationMember.objects.get(id=organization_member.id)
@@ -355,7 +215,7 @@ def get_all_org_roles(
) -> List[str]:
if member_id:
member = OrganizationMember.objects.get(id=member_id)
- organization_member = self.serialize_member(member)
+ organization_member = serialize_member(member)
org_roles: List[str] = []
if organization_member:
@@ -393,7 +253,7 @@ def remove_user(self, *, organization_id: int, user_id: int) -> RpcOrganizationM
region_outbox.save()
if region_outbox:
region_outbox.drain_shard(max_updates_to_drain=10)
- return self.serialize_member(org_member)
+ return serialize_member(org_member)
def reset_idp_flags(self, *, organization_id: int) -> None:
OrganizationMember.objects.filter(
diff --git a/src/sentry/services/hybrid_cloud/organization/model.py b/src/sentry/services/hybrid_cloud/organization/model.py
new file mode 100644
index 00000000000000..3cee80d6923402
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/organization/model.py
@@ -0,0 +1,166 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+from typing import Any, List, Mapping, Optional
+
+from pydantic import Field
+
+from sentry.constants import ObjectStatus
+from sentry.models.organization import OrganizationStatus
+from sentry.models.organizationmember import InviteStatus
+from sentry.roles import team_roles
+from sentry.roles.manager import TeamRole
+from sentry.services.hybrid_cloud import RpcModel
+
+
+def team_status_visible() -> int:
+ from sentry.models import TeamStatus
+
+ return int(TeamStatus.ACTIVE)
+
+
+class RpcTeam(RpcModel):
+ id: int = -1
+ status: int = Field(default_factory=team_status_visible)
+ organization_id: int = -1
+ slug: str = ""
+ actor_id: Optional[int] = None
+ org_role: Optional[str] = None
+
+ def class_name(self) -> str:
+ return "Team"
+
+
+class RpcTeamMember(RpcModel):
+ id: int = -1
+ is_active: bool = False
+ role_id: str = ""
+ project_ids: List[int] = Field(default_factory=list)
+ scopes: List[str] = Field(default_factory=list)
+ team_id: int = -1
+
+ @property
+ def role(self) -> Optional[TeamRole]:
+ return team_roles.get(self.role_id) if self.role_id else None
+
+
+def project_status_visible() -> int:
+ return int(ObjectStatus.ACTIVE)
+
+
+class RpcProject(RpcModel):
+ id: int = -1
+ slug: str = ""
+ name: str = ""
+ organization_id: int = -1
+ status: int = Field(default_factory=project_status_visible)
+
+
+class RpcOrganizationMemberFlags(RpcModel):
+ sso__linked: bool = False
+ sso__invalid: bool = False
+ member_limit__restricted: bool = False
+
+ def __getattr__(self, item: str) -> bool:
+ from sentry.services.hybrid_cloud.organization.serial import escape_flag_name
+
+ item = escape_flag_name(item)
+ return bool(getattr(self, item))
+
+ def __getitem__(self, item: str) -> bool:
+ return bool(getattr(self, item))
+
+
+class RpcOrganizationMemberSummary(RpcModel):
+ id: int = -1
+ organization_id: int = -1
+ user_id: Optional[int] = None # This can be null when the user is deleted.
+ flags: RpcOrganizationMemberFlags = Field(default_factory=lambda: RpcOrganizationMemberFlags())
+
+
+class RpcOrganizationMember(RpcOrganizationMemberSummary):
+ member_teams: List[RpcTeamMember] = Field(default_factory=list)
+ role: str = ""
+ has_global_access: bool = False
+ project_ids: List[int] = Field(default_factory=list)
+ scopes: List[str] = Field(default_factory=list)
+ invite_status: int = InviteStatus.APPROVED.value
+
+ def get_audit_log_metadata(self, user_email: str) -> Mapping[str, Any]:
+ team_ids = [mt.team_id for mt in self.member_teams]
+
+ return {
+ "email": user_email,
+ "teams": team_ids,
+ "has_global_access": self.has_global_access,
+ "role": self.role,
+ "invite_status": self.invite_status,
+ }
+
+
+class RpcOrganizationFlags(RpcModel):
+ allow_joinleave: bool = False
+ enhanced_privacy: bool = False
+ disable_shared_issues: bool = False
+ early_adopter: bool = False
+ require_2fa: bool = False
+ disable_new_visibility_features: bool = False
+ require_email_verification: bool = False
+
+
+class RpcOrganizationInvite(RpcModel):
+ id: int = -1
+ token: str = ""
+ email: str = ""
+
+
+class RpcOrganizationSummary(RpcModel):
+ """
+ The subset of organization metadata available from the control silo specifically.
+ """
+
+ slug: str = ""
+ id: int = -1
+ name: str = ""
+
+ def __hash__(self) -> int:
+ # Mimic the behavior of hashing a Django ORM entity, for compatibility with
+ # serializers, as this organization summary object is often used for that.
+ return hash((self.id, self.slug))
+
+
+class RpcOrganization(RpcOrganizationSummary):
+ # Represents the full set of teams and projects associated with the org. Note that these are not filtered by
+ # visibility, but you can apply a manual filter on the status attribute.
+ teams: List[RpcTeam] = Field(default_factory=list)
+ projects: List[RpcProject] = Field(default_factory=list)
+
+ flags: RpcOrganizationFlags = Field(default_factory=lambda: RpcOrganizationFlags())
+ status: OrganizationStatus = OrganizationStatus.ACTIVE
+
+ default_role: str = ""
+
+
+class RpcUserOrganizationContext(RpcModel):
+ """
+ This object wraps an organization result inside of its membership context in terms of an (optional) user id.
+ This is due to the large number of callsites that require an organization and a user's membership at the
+ same time and in a consistency state. This object allows a nice envelop for both of these ideas from a single
+ transactional query. Used by access, determine_active_organization, and others.
+ """
+
+ # user_id is None iff the get_organization_by_id call is not provided a user_id context.
+ user_id: Optional[int] = None
+ # The organization is always non-null because the null wrapping is around this object instead.
+ # A None organization => a None RpcUserOrganizationContext
+ organization: RpcOrganization = Field(default_factory=lambda: RpcOrganization())
+ # member can be None when the given user_id does not have membership with the given organization.
+ # Note that all related fields of this organization member are filtered by visibility and is_active=True.
+ member: Optional[RpcOrganizationMember] = None
+
+ def __post_init__(self) -> None:
+ # Ensures that outer user_id always agrees with the inner member object.
+ if self.user_id is not None and self.member is not None:
+ assert self.user_id == self.member.user_id
diff --git a/src/sentry/services/hybrid_cloud/organization/serial.py b/src/sentry/services/hybrid_cloud/organization/serial.py
new file mode 100644
index 00000000000000..d8271a09bee97c
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/organization/serial.py
@@ -0,0 +1,153 @@
+from __future__ import annotations
+
+from collections import defaultdict
+from typing import Iterable, List, MutableMapping, Set, cast
+
+from sentry.constants import ObjectStatus
+from sentry.models import (
+ Organization,
+ OrganizationMember,
+ OrganizationMemberTeam,
+ Project,
+ ProjectTeam,
+ Team,
+ TeamStatus,
+)
+from sentry.services.hybrid_cloud.organization import (
+ RpcOrganization,
+ RpcOrganizationFlags,
+ RpcOrganizationMember,
+ RpcOrganizationMemberFlags,
+ RpcOrganizationMemberSummary,
+ RpcOrganizationSummary,
+ RpcProject,
+ RpcTeam,
+ RpcTeamMember,
+)
+
+
+def escape_flag_name(flag_name: str) -> str:
+ return flag_name.replace(":", "__").replace("-", "_")
+
+
+def unescape_flag_name(flag_name: str) -> str:
+ return flag_name.replace("__", ":").replace("_", "-")
+
+
+def _serialize_member_flags(member: OrganizationMember) -> RpcOrganizationMemberFlags:
+ return cast(
+ RpcOrganizationMemberFlags,
+ RpcOrganizationMemberFlags.serialize_by_field_name(
+ member.flags, name_transform=unescape_flag_name, value_transform=bool
+ ),
+ )
+
+
+def serialize_member(member: OrganizationMember) -> RpcOrganizationMember:
+ rpc_member = RpcOrganizationMember(
+ id=member.id,
+ organization_id=member.organization_id,
+ user_id=member.user.id if member.user is not None else None,
+ role=member.role,
+ has_global_access=member.has_global_access,
+ scopes=list(member.get_scopes()),
+ flags=_serialize_member_flags(member),
+ invite_status=member.invite_status,
+ )
+
+ omts = OrganizationMemberTeam.objects.filter(
+ organizationmember=member, is_active=True, team__status=TeamStatus.ACTIVE
+ )
+
+ all_project_ids: Set[int] = set()
+ project_ids_by_team_id: MutableMapping[int, List[int]] = defaultdict(list)
+ for pt in ProjectTeam.objects.filter(
+ project__status=ObjectStatus.ACTIVE, team_id__in={omt.team_id for omt in omts}
+ ):
+ all_project_ids.add(pt.project_id)
+ project_ids_by_team_id[pt.team_id].append(pt.project_id)
+
+ for omt in omts:
+ omt.organizationmember = member
+ rpc_member.member_teams.append(
+ _serialize_team_member(omt, project_ids_by_team_id[omt.team_id])
+ )
+ rpc_member.project_ids = list(all_project_ids)
+
+ return rpc_member
+
+
+def summarize_member(member: OrganizationMember) -> RpcOrganizationMemberSummary:
+ return RpcOrganizationMemberSummary(
+ id=member.id,
+ organization_id=member.organization_id,
+ user_id=member.user_id,
+ flags=_serialize_member_flags(member),
+ )
+
+
+def _serialize_flags(org: Organization) -> RpcOrganizationFlags:
+ return cast(
+ RpcOrganizationFlags,
+ RpcOrganizationFlags.serialize_by_field_name(org.flags, value_transform=bool),
+ )
+
+
+def _serialize_team(team: Team) -> RpcTeam:
+ return RpcTeam(
+ id=team.id,
+ status=team.status,
+ organization_id=team.organization_id,
+ slug=team.slug,
+ org_role=team.org_role,
+ )
+
+
+def _serialize_team_member(
+ team_member: OrganizationMemberTeam, project_ids: Iterable[int]
+) -> RpcTeamMember:
+ result = RpcTeamMember(
+ id=team_member.id,
+ is_active=team_member.is_active,
+ role_id=team_member.get_team_role().id,
+ team_id=team_member.team_id,
+ project_ids=list(project_ids),
+ scopes=list(team_member.get_scopes()),
+ )
+
+ return result
+
+
+def _serialize_project(project: Project) -> RpcProject:
+ return RpcProject(
+ id=project.id,
+ slug=project.slug,
+ name=project.name,
+ organization_id=project.organization_id,
+ status=project.status,
+ )
+
+
+def serialize_organization_summary(org: Organization) -> RpcOrganizationSummary:
+ return RpcOrganizationSummary(
+ slug=org.slug,
+ id=org.id,
+ name=org.name,
+ )
+
+
+def serialize_organization(org: Organization) -> RpcOrganization:
+ rpc_org: RpcOrganization = RpcOrganization(
+ slug=org.slug,
+ id=org.id,
+ flags=_serialize_flags(org),
+ name=org.name,
+ status=org.status,
+ default_role=org.default_role,
+ )
+
+ projects: List[Project] = Project.objects.filter(organization=org)
+ teams: List[Team] = Team.objects.filter(organization=org)
+ rpc_org.projects.extend(_serialize_project(project) for project in projects)
+ rpc_org.teams.extend(_serialize_team(team) for team in teams)
+ return rpc_org
diff --git a/src/sentry/services/hybrid_cloud/organization/service.py b/src/sentry/services/hybrid_cloud/organization/service.py
new file mode 100644
index 00000000000000..73f45e4f234d4d
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/organization/service.py
@@ -0,0 +1,186 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+from abc import abstractmethod
+from typing import Iterable, List, Optional, cast
+
+from sentry.models.organizationmember import InviteStatus
+from sentry.services.hybrid_cloud.organization import (
+ RpcOrganizationMember,
+ RpcOrganizationMemberFlags,
+ RpcOrganizationSummary,
+ RpcUserOrganizationContext,
+)
+from sentry.services.hybrid_cloud.region import (
+ ByOrganizationId,
+ ByOrganizationIdAttribute,
+ ByOrganizationSlug,
+ UnimplementedRegionResolution,
+)
+from sentry.services.hybrid_cloud.rpc import RpcService, regional_rpc_method
+from sentry.silo import SiloMode
+
+
+class OrganizationService(RpcService):
+ key = "organization"
+ local_mode = SiloMode.REGION
+
+ @classmethod
+ def get_local_implementation(cls) -> RpcService:
+ from sentry.services.hybrid_cloud.organization.impl import DatabaseBackedOrganizationService
+
+ return DatabaseBackedOrganizationService()
+
+ @regional_rpc_method(resolve=ByOrganizationId("id"))
+ @abstractmethod
+ def get_organization_by_id(
+ self, *, id: int, user_id: Optional[int] = None, slug: Optional[str] = None
+ ) -> Optional[RpcUserOrganizationContext]:
+ """
+ Fetches the organization, team, and project data given by an organization id, regardless of its visibility
+ status. When user_id is provided, membership data related to that user from the organization
+ is also given in the response. See RpcUserOrganizationContext for more info.
+ """
+ pass
+
+ @regional_rpc_method(resolve=ByOrganizationSlug())
+ @abstractmethod
+ def get_org_by_slug(
+ self,
+ *,
+ slug: str,
+ user_id: Optional[int] = None,
+ ) -> Optional[RpcOrganizationSummary]:
+ """
+ Fetches the organization, by an organization slug. If user_id is passed, it will enforce visibility
+ rules. This method is differentiated from get_organization_by_slug by not being cached and returning
+ RpcOrganizationSummary instead of org contexts
+ """
+ pass
+
+ # TODO: This should return RpcOrganizationSummary objects, since we cannot realistically span out requests and
+ # capture full org objects / teams / permissions. But we can gather basic summary data from the control silo.
+ @regional_rpc_method(resolve=UnimplementedRegionResolution())
+ @abstractmethod
+ def get_organizations(
+ self,
+ *,
+ user_id: Optional[int],
+ scope: Optional[str],
+ only_visible: bool,
+ organization_ids: Optional[List[int]] = None,
+ ) -> List[RpcOrganizationSummary]:
+ """
+ When user_id is set, returns all organizations associated with that user id given
+ a scope and visibility requirement. When user_id is not set, but organization_ids is, provides the
+ set of organizations matching those ids, ignore scope and user_id.
+
+ When only_visible set, the organization object is only returned if it's status is Visible, otherwise any
+ organization will be returned.
+
+ Because this endpoint fetches not from region silos, but the control silo organization membership table,
+ only a subset of all organization metadata is available. Spanning out and querying multiple organizations
+ for their full metadata is greatly discouraged for performance reasons.
+ """
+ pass
+
+ @regional_rpc_method(resolve=ByOrganizationId())
+ @abstractmethod
+ def check_membership_by_email(
+ self, *, organization_id: int, email: str
+ ) -> Optional[RpcOrganizationMember]:
+ """
+ Used to look up an organization membership by an email
+ """
+ pass
+
+ @regional_rpc_method(resolve=ByOrganizationId())
+ @abstractmethod
+ def check_membership_by_id(
+ self, *, organization_id: int, user_id: int
+ ) -> Optional[RpcOrganizationMember]:
+ """
+ Used to look up an organization membership by a user id
+ """
+ pass
+
+ @regional_rpc_method(resolve=ByOrganizationSlug())
+ @abstractmethod
+ def check_organization_by_slug(self, *, slug: str, only_visible: bool) -> Optional[int]:
+ """
+ If exists and matches the only_visible requirement, returns an organization's id by the slug.
+ """
+ pass
+
+ def get_organization_by_slug(
+ self, *, user_id: Optional[int], slug: str, only_visible: bool
+ ) -> Optional[RpcUserOrganizationContext]:
+ """
+ Defers to check_organization_by_slug -> get_organization_by_id
+ """
+ org_id = self.check_organization_by_slug(slug=slug, only_visible=only_visible)
+ if org_id is None:
+ return None
+
+ return self.get_organization_by_id(id=org_id, user_id=user_id)
+
+ @regional_rpc_method(resolve=ByOrganizationId())
+ @abstractmethod
+ def add_organization_member(
+ self,
+ *,
+ organization_id: int,
+ default_org_role: str,
+ user_id: Optional[int] = None,
+ email: Optional[str] = None,
+ flags: Optional[RpcOrganizationMemberFlags] = None,
+ role: Optional[str] = None,
+ inviter_id: Optional[int] = None,
+ invite_status: Optional[int] = InviteStatus.APPROVED.value,
+ ) -> RpcOrganizationMember:
+ pass
+
+ @regional_rpc_method(resolve=ByOrganizationIdAttribute("organization_member"))
+ @abstractmethod
+ def add_team_member(self, *, team_id: int, organization_member: RpcOrganizationMember) -> None:
+ pass
+
+ @regional_rpc_method(resolve=UnimplementedRegionResolution())
+ @abstractmethod
+ def get_team_members(self, *, team_id: int) -> Iterable[RpcOrganizationMember]:
+ pass
+
+ @regional_rpc_method(resolve=ByOrganizationIdAttribute("organization_member"))
+ @abstractmethod
+ def update_membership_flags(self, *, organization_member: RpcOrganizationMember) -> None:
+ pass
+
+ @regional_rpc_method(resolve=ByOrganizationIdAttribute("organization_member"))
+ @abstractmethod
+ def get_all_org_roles(
+ self,
+ *,
+ organization_member: Optional[RpcOrganizationMember] = None,
+ member_id: Optional[int] = None,
+ ) -> List[str]:
+ pass
+
+ @regional_rpc_method(resolve=ByOrganizationId())
+ @abstractmethod
+ def get_top_dog_team_member_ids(self, *, organization_id: int) -> List[int]:
+ pass
+
+ @regional_rpc_method(resolve=ByOrganizationId())
+ @abstractmethod
+ def remove_user(self, *, organization_id: int, user_id: int) -> RpcOrganizationMember:
+ pass
+
+ @regional_rpc_method(resolve=ByOrganizationId())
+ @abstractmethod
+ def reset_idp_flags(self, *, organization_id: int) -> None:
+ pass
+
+
+organization_service = cast(OrganizationService, OrganizationService.create_delegation())
diff --git a/src/sentry/services/hybrid_cloud/organization_mapping/__init__.py b/src/sentry/services/hybrid_cloud/organization_mapping/__init__.py
index ccfc182472d221..2a9746c30ef42c 100644
--- a/src/sentry/services/hybrid_cloud/organization_mapping/__init__.py
+++ b/src/sentry/services/hybrid_cloud/organization_mapping/__init__.py
@@ -1,114 +1,2 @@
-# Please do not use
-# from __future__ import annotations
-# in modules such as this one where hybrid cloud service classes and data models are
-# defined, because we want to reflect on type annotations and avoid forward references.
-
-from abc import abstractmethod
-from datetime import datetime
-from typing import Optional, cast
-
-from django.utils import timezone
-from pydantic.fields import Field
-from typing_extensions import TypedDict
-
-from sentry.models import Organization
-from sentry.services.hybrid_cloud import RpcModel
-from sentry.services.hybrid_cloud.rpc import RpcService, rpc_method
-from sentry.silo import SiloMode
-
-
-class RpcOrganizationMapping(RpcModel):
- organization_id: int = -1
- slug: str = ""
- name: str = ""
- region_name: str = ""
- date_created: datetime = Field(default_factory=timezone.now)
- verified: bool = False
- customer_id: Optional[str] = None
-
-
-class RpcOrganizationMappingUpdate(TypedDict):
- """A set of values to be updated on an OrganizationMapping.
-
- An absent key indicates that the attribute should not be updated. (Compare to a
- `"customer_id": None` entry, which indicates that `customer_id` should be
- overwritten with a null value.)
- """
-
- name: str
- customer_id: Optional[str]
-
-
-def update_organization_mapping_from_instance(
- organization: Organization,
-) -> RpcOrganizationMappingUpdate:
- attributes = {
- attr_name: getattr(organization, attr_name)
- for attr_name in RpcOrganizationMappingUpdate.__annotations__.keys()
- }
- return RpcOrganizationMappingUpdate(**attributes) # type: ignore
-
-
-class OrganizationMappingService(RpcService):
- key = "organization_mapping"
- local_mode = SiloMode.CONTROL
-
- @classmethod
- def get_local_implementation(cls) -> RpcService:
- from sentry.services.hybrid_cloud.organization_mapping.impl import (
- DatabaseBackedOrganizationMappingService,
- )
-
- return DatabaseBackedOrganizationMappingService()
-
- @rpc_method
- @abstractmethod
- def create(
- self,
- *,
- organization_id: int,
- slug: str,
- name: str,
- region_name: str,
- idempotency_key: Optional[str] = "",
- customer_id: Optional[str],
- user: Optional[int] = None,
- ) -> RpcOrganizationMapping:
- """
- This method returns a new or recreated OrganizationMapping object.
- If a record already exists with the same slug, the organization_id can only be
- updated IF the idempotency key is identical.
- Will raise IntegrityError if the slug already exists.
-
- :param organization_id:
- The org id to create the slug for
- :param slug:
- A slug to reserve for this organization
- :param customer_id:
- A unique per customer billing identifier
- :return:
- """
- pass
-
- def close(self) -> None:
- pass
-
- @rpc_method
- @abstractmethod
- def update(self, *, organization_id: int, update: RpcOrganizationMappingUpdate) -> None:
- pass
-
- @rpc_method
- @abstractmethod
- def verify_mappings(self, *, organization_id: int, slug: str) -> None:
- pass
-
- @rpc_method
- @abstractmethod
- def delete(self, *, organization_id: int) -> None:
- pass
-
-
-organization_mapping_service: OrganizationMappingService = cast(
- OrganizationMappingService, OrganizationMappingService.create_delegation()
-)
+from .model import * # noqa
+from .service import * # noqa
diff --git a/src/sentry/services/hybrid_cloud/organization_mapping/impl.py b/src/sentry/services/hybrid_cloud/organization_mapping/impl.py
index 8f2deb0a605231..649dc042a2d289 100644
--- a/src/sentry/services/hybrid_cloud/organization_mapping/impl.py
+++ b/src/sentry/services/hybrid_cloud/organization_mapping/impl.py
@@ -1,4 +1,4 @@
-from typing import Optional, cast
+from typing import Optional
from django.db import transaction
@@ -8,6 +8,7 @@
RpcOrganizationMapping,
RpcOrganizationMappingUpdate,
)
+from sentry.services.hybrid_cloud.organization_mapping.serial import serialize_organization_mapping
class DatabaseBackedOrganizationMappingService(OrganizationMappingService):
@@ -45,14 +46,7 @@ def create(
customer_id=customer_id,
)
- return self.serialize_organization_mapping(org_mapping)
-
- def serialize_organization_mapping(
- self, org_mapping: OrganizationMapping
- ) -> RpcOrganizationMapping:
- return cast(
- RpcOrganizationMapping, RpcOrganizationMapping.serialize_by_field_name(org_mapping)
- )
+ return serialize_organization_mapping(org_mapping)
def update(self, organization_id: int, update: RpcOrganizationMappingUpdate) -> None:
with transaction.atomic():
diff --git a/src/sentry/services/hybrid_cloud/organization_mapping/model.py b/src/sentry/services/hybrid_cloud/organization_mapping/model.py
new file mode 100644
index 00000000000000..42e020ccf35974
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/organization_mapping/model.py
@@ -0,0 +1,35 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+from datetime import datetime
+from typing import Optional
+
+from django.utils import timezone
+from pydantic.fields import Field
+from typing_extensions import TypedDict
+
+from sentry.services.hybrid_cloud import RpcModel
+
+
+class RpcOrganizationMapping(RpcModel):
+ organization_id: int = -1
+ slug: str = ""
+ name: str = ""
+ region_name: str = ""
+ date_created: datetime = Field(default_factory=timezone.now)
+ verified: bool = False
+ customer_id: Optional[str] = None
+
+
+class RpcOrganizationMappingUpdate(TypedDict):
+ """A set of values to be updated on an OrganizationMapping.
+
+ An absent key indicates that the attribute should not be updated. (Compare to a
+ `"customer_id": None` entry, which indicates that `customer_id` should be
+ overwritten with a null value.)
+ """
+
+ name: str
+ customer_id: Optional[str]
diff --git a/src/sentry/services/hybrid_cloud/organization_mapping/serial.py b/src/sentry/services/hybrid_cloud/organization_mapping/serial.py
new file mode 100644
index 00000000000000..b7da92a0c085a7
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/organization_mapping/serial.py
@@ -0,0 +1,21 @@
+from typing import cast
+
+from sentry.models import Organization, OrganizationMapping
+from sentry.services.hybrid_cloud.organization_mapping import (
+ RpcOrganizationMapping,
+ RpcOrganizationMappingUpdate,
+)
+
+
+def update_organization_mapping_from_instance(
+ organization: Organization,
+) -> RpcOrganizationMappingUpdate:
+ attributes = {
+ attr_name: getattr(organization, attr_name)
+ for attr_name in RpcOrganizationMappingUpdate.__annotations__.keys()
+ }
+ return RpcOrganizationMappingUpdate(**attributes) # type: ignore
+
+
+def serialize_organization_mapping(org_mapping: OrganizationMapping) -> RpcOrganizationMapping:
+ return cast(RpcOrganizationMapping, RpcOrganizationMapping.serialize_by_field_name(org_mapping))
diff --git a/src/sentry/services/hybrid_cloud/organization_mapping/service.py b/src/sentry/services/hybrid_cloud/organization_mapping/service.py
new file mode 100644
index 00000000000000..a2b903eb354c89
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/organization_mapping/service.py
@@ -0,0 +1,79 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+from abc import abstractmethod
+from typing import Optional, cast
+
+from sentry.services.hybrid_cloud.organization_mapping import (
+ RpcOrganizationMapping,
+ RpcOrganizationMappingUpdate,
+)
+from sentry.services.hybrid_cloud.rpc import RpcService, rpc_method
+from sentry.silo import SiloMode
+
+
+class OrganizationMappingService(RpcService):
+ key = "organization_mapping"
+ local_mode = SiloMode.CONTROL
+
+ @classmethod
+ def get_local_implementation(cls) -> RpcService:
+ from sentry.services.hybrid_cloud.organization_mapping.impl import (
+ DatabaseBackedOrganizationMappingService,
+ )
+
+ return DatabaseBackedOrganizationMappingService()
+
+ @rpc_method
+ @abstractmethod
+ def create(
+ self,
+ *,
+ organization_id: int,
+ slug: str,
+ name: str,
+ region_name: str,
+ idempotency_key: Optional[str] = "",
+ customer_id: Optional[str],
+ user: Optional[int] = None,
+ ) -> RpcOrganizationMapping:
+ """
+ This method returns a new or recreated OrganizationMapping object.
+ If a record already exists with the same slug, the organization_id can only be
+ updated IF the idempotency key is identical.
+ Will raise IntegrityError if the slug already exists.
+
+ :param organization_id:
+ The org id to create the slug for
+ :param slug:
+ A slug to reserve for this organization
+ :param customer_id:
+ A unique per customer billing identifier
+ :return:
+ """
+ pass
+
+ def close(self) -> None:
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def update(self, *, organization_id: int, update: RpcOrganizationMappingUpdate) -> None:
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def verify_mappings(self, *, organization_id: int, slug: str) -> None:
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def delete(self, *, organization_id: int) -> None:
+ pass
+
+
+organization_mapping_service: OrganizationMappingService = cast(
+ OrganizationMappingService, OrganizationMappingService.create_delegation()
+)
diff --git a/src/sentry/services/hybrid_cloud/organizationmember_mapping/__init__.py b/src/sentry/services/hybrid_cloud/organizationmember_mapping/__init__.py
index e51284f3f021f9..2a9746c30ef42c 100644
--- a/src/sentry/services/hybrid_cloud/organizationmember_mapping/__init__.py
+++ b/src/sentry/services/hybrid_cloud/organizationmember_mapping/__init__.py
@@ -1,119 +1,2 @@
-# Please do not use
-# from __future__ import annotations
-# in modules such as this one where hybrid cloud service classes and data models are
-# defined, because we want to reflect on type annotations and avoid forward references.
-
-from abc import abstractmethod
-from datetime import datetime
-from typing import Optional, cast
-
-from django.utils import timezone
-from pydantic.fields import Field
-
-from sentry.models import OrganizationMember
-from sentry.services.hybrid_cloud import RpcModel
-from sentry.services.hybrid_cloud.rpc import RpcService, rpc_method
-from sentry.silo import SiloMode
-
-
-class RpcOrganizationMemberMapping(RpcModel):
- organizationmember_id: int = -1
- organization_id: int = -1
- date_added: datetime = Field(default_factory=timezone.now)
-
- role: str = ""
- user_id: Optional[int] = None
- email: Optional[str] = None
- inviter_id: Optional[int] = None
- invite_status: Optional[int] = None
-
-
-class RpcOrganizationMemberMappingUpdate(RpcModel):
- """
- A set of values to be updated on an OrganizationMemberMapping.
-
- An omitted key indicates that the attribute should not be updated. (Compare to a
- `"user_id": None` entry, which indicates that `user_id` should be
- overwritten with a null value.)
- """
-
- role: str
- user_id: Optional[int]
- email: Optional[str]
- inviter_id: Optional[int]
- invite_status: Optional[int]
-
-
-class OrganizationMemberMappingService(RpcService):
- key = "organizationmember_mapping"
- local_mode = SiloMode.CONTROL
-
- @classmethod
- def get_local_implementation(cls) -> RpcService:
- from sentry.services.hybrid_cloud.organizationmember_mapping.impl import (
- DatabaseBackedOrganizationMemberMappingService,
- )
-
- return DatabaseBackedOrganizationMemberMappingService()
-
- @rpc_method
- @abstractmethod
- def create_mapping(
- self,
- *,
- organizationmember_id: int,
- organization_id: int,
- role: str,
- user_id: Optional[int] = None,
- email: Optional[str] = None,
- inviter_id: Optional[int] = None,
- invite_status: Optional[int] = None,
- ) -> RpcOrganizationMemberMapping:
- pass
-
- def create_with_organization_member(
- self, *, org_member: OrganizationMember
- ) -> RpcOrganizationMemberMapping:
- return self.create_mapping(
- organizationmember_id=org_member.id,
- organization_id=org_member.organization_id,
- role=org_member.role,
- user_id=org_member.user_id,
- email=org_member.email,
- inviter_id=org_member.inviter_id,
- invite_status=org_member.invite_status,
- )
-
- @rpc_method
- @abstractmethod
- def update_with_organization_member(
- self,
- *,
- organizationmember_id: int,
- organization_id: int,
- rpc_update_org_member: RpcOrganizationMemberMappingUpdate,
- ) -> RpcOrganizationMemberMapping:
- pass
-
- @rpc_method
- @abstractmethod
- def delete_with_organization_member(
- self,
- *,
- organizationmember_id: int,
- organization_id: int,
- ) -> None:
- pass
-
-
-def impl_with_db() -> OrganizationMemberMappingService:
- from sentry.services.hybrid_cloud.organizationmember_mapping.impl import (
- DatabaseBackedOrganizationMemberMappingService,
- )
-
- return DatabaseBackedOrganizationMemberMappingService()
-
-
-organizationmember_mapping_service: OrganizationMemberMappingService = cast(
- OrganizationMemberMappingService, OrganizationMemberMappingService.create_delegation()
-)
+from .model import * # noqa
+from .service import * # noqa
diff --git a/src/sentry/services/hybrid_cloud/organizationmember_mapping/impl.py b/src/sentry/services/hybrid_cloud/organizationmember_mapping/impl.py
index 358416c5adefde..a072ed1ea3c24f 100644
--- a/src/sentry/services/hybrid_cloud/organizationmember_mapping/impl.py
+++ b/src/sentry/services/hybrid_cloud/organizationmember_mapping/impl.py
@@ -1,9 +1,9 @@
# Please do not use
# from __future__ import annotations
-# in modules such as this one where hybrid cloud service classes and data models are
+# in modules such as this one where hybrid cloud data models or service classes are
# defined, because we want to reflect on type annotations and avoid forward references.
-from typing import Optional, cast
+from typing import Optional
from django.db import transaction
@@ -13,6 +13,9 @@
RpcOrganizationMemberMapping,
RpcOrganizationMemberMappingUpdate,
)
+from sentry.services.hybrid_cloud.organizationmember_mapping.serial import (
+ serialize_org_member_mapping,
+)
class DatabaseBackedOrganizationMemberMappingService(OrganizationMemberMappingService):
@@ -58,7 +61,7 @@ def create_mapping(
inviter_id=inviter_id,
invite_status=invite_status,
)
- return self._serialize_rpc(org_member_mapping)
+ return serialize_org_member_mapping(org_member_mapping)
def update_with_organization_member(
self,
@@ -73,7 +76,7 @@ def update_with_organization_member(
organizationmember_id=organizationmember_id,
)
org_member_map.update(**rpc_update_org_member.dict())
- return self._serialize_rpc(org_member_map)
+ return serialize_org_member_mapping(org_member_map)
except OrganizationMemberMapping.DoesNotExist:
return self.create_mapping(
organizationmember_id=organizationmember_id,
@@ -94,11 +97,3 @@ def delete_with_organization_member(
def close(self) -> None:
pass
-
- def _serialize_rpc(
- self, org_member_mapping: OrganizationMemberMapping
- ) -> RpcOrganizationMemberMapping:
- return cast(
- RpcOrganizationMemberMapping,
- RpcOrganizationMemberMapping.serialize_by_field_name(org_member_mapping),
- )
diff --git a/src/sentry/services/hybrid_cloud/organizationmember_mapping/model.py b/src/sentry/services/hybrid_cloud/organizationmember_mapping/model.py
new file mode 100644
index 00000000000000..d1039c2fbea1fd
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/organizationmember_mapping/model.py
@@ -0,0 +1,40 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+from datetime import datetime
+from typing import Optional
+
+from django.utils import timezone
+from pydantic.fields import Field
+
+from sentry.services.hybrid_cloud import RpcModel
+
+
+class RpcOrganizationMemberMapping(RpcModel):
+ organizationmember_id: int = -1
+ organization_id: int = -1
+ date_added: datetime = Field(default_factory=timezone.now)
+
+ role: str = ""
+ user_id: Optional[int] = None
+ email: Optional[str] = None
+ inviter_id: Optional[int] = None
+ invite_status: Optional[int] = None
+
+
+class RpcOrganizationMemberMappingUpdate(RpcModel):
+ """
+ A set of values to be updated on an OrganizationMemberMapping.
+
+ An omitted key indicates that the attribute should not be updated. (Compare to a
+ `"user_id": None` entry, which indicates that `user_id` should be
+ overwritten with a null value.)
+ """
+
+ role: str
+ user_id: Optional[int]
+ email: Optional[str]
+ inviter_id: Optional[int]
+ invite_status: Optional[int]
diff --git a/src/sentry/services/hybrid_cloud/organizationmember_mapping/serial.py b/src/sentry/services/hybrid_cloud/organizationmember_mapping/serial.py
new file mode 100644
index 00000000000000..70b2a024f26562
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/organizationmember_mapping/serial.py
@@ -0,0 +1,13 @@
+from typing import cast
+
+from sentry.models import OrganizationMemberMapping
+from sentry.services.hybrid_cloud.organizationmember_mapping import RpcOrganizationMemberMapping
+
+
+def serialize_org_member_mapping(
+ org_member_mapping: OrganizationMemberMapping,
+) -> RpcOrganizationMemberMapping:
+ return cast(
+ RpcOrganizationMemberMapping,
+ RpcOrganizationMemberMapping.serialize_by_field_name(org_member_mapping),
+ )
diff --git a/src/sentry/services/hybrid_cloud/organizationmember_mapping/service.py b/src/sentry/services/hybrid_cloud/organizationmember_mapping/service.py
new file mode 100644
index 00000000000000..7d68e34eb0cf2e
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/organizationmember_mapping/service.py
@@ -0,0 +1,90 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+from abc import abstractmethod
+from typing import Optional, cast
+
+from sentry.models import OrganizationMember
+from sentry.services.hybrid_cloud.organizationmember_mapping import (
+ RpcOrganizationMemberMapping,
+ RpcOrganizationMemberMappingUpdate,
+)
+from sentry.services.hybrid_cloud.rpc import RpcService, rpc_method
+from sentry.silo import SiloMode
+
+
+class OrganizationMemberMappingService(RpcService):
+ key = "organizationmember_mapping"
+ local_mode = SiloMode.CONTROL
+
+ @classmethod
+ def get_local_implementation(cls) -> RpcService:
+ from sentry.services.hybrid_cloud.organizationmember_mapping.impl import (
+ DatabaseBackedOrganizationMemberMappingService,
+ )
+
+ return DatabaseBackedOrganizationMemberMappingService()
+
+ @rpc_method
+ @abstractmethod
+ def create_mapping(
+ self,
+ *,
+ organizationmember_id: int,
+ organization_id: int,
+ role: str,
+ user_id: Optional[int] = None,
+ email: Optional[str] = None,
+ inviter_id: Optional[int] = None,
+ invite_status: Optional[int] = None,
+ ) -> RpcOrganizationMemberMapping:
+ pass
+
+ def create_with_organization_member(
+ self, *, org_member: OrganizationMember
+ ) -> RpcOrganizationMemberMapping:
+ return self.create_mapping(
+ organizationmember_id=org_member.id,
+ organization_id=org_member.organization_id,
+ role=org_member.role,
+ user_id=org_member.user_id,
+ email=org_member.email,
+ inviter_id=org_member.inviter_id,
+ invite_status=org_member.invite_status,
+ )
+
+ @rpc_method
+ @abstractmethod
+ def update_with_organization_member(
+ self,
+ *,
+ organizationmember_id: int,
+ organization_id: int,
+ rpc_update_org_member: RpcOrganizationMemberMappingUpdate,
+ ) -> RpcOrganizationMemberMapping:
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def delete_with_organization_member(
+ self,
+ *,
+ organizationmember_id: int,
+ organization_id: int,
+ ) -> None:
+ pass
+
+
+def impl_with_db() -> OrganizationMemberMappingService:
+ from sentry.services.hybrid_cloud.organizationmember_mapping.impl import (
+ DatabaseBackedOrganizationMemberMappingService,
+ )
+
+ return DatabaseBackedOrganizationMemberMappingService()
+
+
+organizationmember_mapping_service: OrganizationMemberMappingService = cast(
+ OrganizationMemberMappingService, OrganizationMemberMappingService.create_delegation()
+)
diff --git a/src/sentry/services/hybrid_cloud/pagination.py b/src/sentry/services/hybrid_cloud/pagination.py
index 18926bbd8c4276..8ea44a99545906 100644
--- a/src/sentry/services/hybrid_cloud/pagination.py
+++ b/src/sentry/services/hybrid_cloud/pagination.py
@@ -1,6 +1,6 @@
# Please do not use
# from __future__ import annotations
-# in modules such as this one where hybrid cloud service classes and data models are
+# in modules such as this one where hybrid cloud data models or service classes are
# defined, because we want to reflect on type annotations and avoid forward references.
diff --git a/src/sentry/services/hybrid_cloud/project_key/__init__.py b/src/sentry/services/hybrid_cloud/project_key/__init__.py
index eb1700e290b2e4..2a9746c30ef42c 100644
--- a/src/sentry/services/hybrid_cloud/project_key/__init__.py
+++ b/src/sentry/services/hybrid_cloud/project_key/__init__.py
@@ -1,53 +1,2 @@
-# Please do not use
-# from __future__ import annotations
-# in modules such as this one where hybrid cloud service classes and data models are
-# defined, because we want to reflect on type annotations and avoid forward references.
-
-from abc import abstractmethod
-from enum import Enum
-from typing import Any, Optional, cast
-
-from sentry.services.hybrid_cloud import RpcModel
-from sentry.services.hybrid_cloud.region import UnimplementedRegionResolution
-from sentry.services.hybrid_cloud.rpc import RpcService, regional_rpc_method
-from sentry.silo import SiloMode
-
-
-class ProjectKeyRole(Enum):
- store = "store"
- api = "api"
-
- def as_orm_role(self) -> Any:
- from sentry.models import ProjectKey
-
- if self == ProjectKeyRole.store:
- return ProjectKey.roles.store
- elif self == ProjectKeyRole.api:
- return ProjectKey.roles.api
- else:
- raise ValueError("Unexpected project key role enum")
-
-
-class RpcProjectKey(RpcModel):
- dsn_public: str = ""
-
-
-class ProjectKeyService(RpcService):
- key = "project_key"
- local_mode = SiloMode.REGION
-
- @classmethod
- def get_local_implementation(cls) -> "RpcService":
- from sentry.services.hybrid_cloud.project_key.impl import DatabaseBackedProjectKeyService
-
- return DatabaseBackedProjectKeyService()
-
- @regional_rpc_method(resolve=UnimplementedRegionResolution())
- @abstractmethod
- def get_project_key(self, *, project_id: str, role: ProjectKeyRole) -> Optional[RpcProjectKey]:
- pass
-
-
-project_key_service: ProjectKeyService = cast(
- ProjectKeyService, ProjectKeyService.create_delegation()
-)
+from .model import * # noqa
+from .service import * # noqa
diff --git a/src/sentry/services/hybrid_cloud/project_key/model.py b/src/sentry/services/hybrid_cloud/project_key/model.py
new file mode 100644
index 00000000000000..1f9162f56d7eff
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/project_key/model.py
@@ -0,0 +1,28 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+from enum import Enum
+from typing import Any
+
+from sentry.services.hybrid_cloud import RpcModel
+
+
+class ProjectKeyRole(Enum):
+ store = "store"
+ api = "api"
+
+ def as_orm_role(self) -> Any:
+ from sentry.models import ProjectKey
+
+ if self == ProjectKeyRole.store:
+ return ProjectKey.roles.store
+ elif self == ProjectKeyRole.api:
+ return ProjectKey.roles.api
+ else:
+ raise ValueError("Unexpected project key role enum")
+
+
+class RpcProjectKey(RpcModel):
+ dsn_public: str = ""
diff --git a/src/sentry/services/hybrid_cloud/project_key/service.py b/src/sentry/services/hybrid_cloud/project_key/service.py
new file mode 100644
index 00000000000000..fe692e57c69625
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/project_key/service.py
@@ -0,0 +1,33 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+from abc import abstractmethod
+from typing import Optional, cast
+
+from sentry.services.hybrid_cloud.project_key import ProjectKeyRole, RpcProjectKey
+from sentry.services.hybrid_cloud.region import UnimplementedRegionResolution
+from sentry.services.hybrid_cloud.rpc import RpcService, regional_rpc_method
+from sentry.silo import SiloMode
+
+
+class ProjectKeyService(RpcService):
+ key = "project_key"
+ local_mode = SiloMode.REGION
+
+ @classmethod
+ def get_local_implementation(cls) -> "RpcService":
+ from sentry.services.hybrid_cloud.project_key.impl import DatabaseBackedProjectKeyService
+
+ return DatabaseBackedProjectKeyService()
+
+ @regional_rpc_method(resolve=UnimplementedRegionResolution())
+ @abstractmethod
+ def get_project_key(self, *, project_id: str, role: ProjectKeyRole) -> Optional[RpcProjectKey]:
+ pass
+
+
+project_key_service: ProjectKeyService = cast(
+ ProjectKeyService, ProjectKeyService.create_delegation()
+)
diff --git a/src/sentry/services/hybrid_cloud/tombstone/__init__.py b/src/sentry/services/hybrid_cloud/tombstone/__init__.py
index 73e31c17e3b8e9..2a9746c30ef42c 100644
--- a/src/sentry/services/hybrid_cloud/tombstone/__init__.py
+++ b/src/sentry/services/hybrid_cloud/tombstone/__init__.py
@@ -1,51 +1,2 @@
-# Please do not use
-# from __future__ import annotations
-# in modules such as this one where hybrid cloud service classes and data models are
-# defined, because we want to reflect on type annotations and avoid forward references.
-
-from abc import abstractmethod
-
-from sentry.services.hybrid_cloud import InterfaceWithLifecycle, RpcModel, silo_mode_delegation
-from sentry.silo import SiloMode
-
-
-class RpcTombstone(RpcModel):
- table_name: str = ""
- identifier: int = -1
-
-
-# the tombstone service itself is unaware of model mapping, that is the responsibility of the caller and the outbox
-# logic. Basically, if you record a remote tombstone, you are implying the destination table_name exists, remotely.
-# Implementors should, thus, _not_ constraint these entries and gracefully handle version drift cases when the "mapping"
-# of who owns what models changes independent of the rollout of logic.
-class TombstoneService(InterfaceWithLifecycle):
- @abstractmethod
- def record_remote_tombstone(self, tombstone: RpcTombstone) -> None:
- pass
-
-
-def control_impl() -> TombstoneService:
- from sentry.services.hybrid_cloud.tombstone.impl import ControlTombstoneService
-
- return ControlTombstoneService()
-
-
-def region_impl() -> TombstoneService:
- from sentry.services.hybrid_cloud.tombstone.impl import RegionTombstoneService
-
- return RegionTombstoneService()
-
-
-def monolith_impl() -> TombstoneService:
- from sentry.services.hybrid_cloud.tombstone.impl import MonolithTombstoneService
-
- return MonolithTombstoneService()
-
-
-tombstone_service: TombstoneService = silo_mode_delegation(
- {
- SiloMode.MONOLITH: monolith_impl,
- SiloMode.REGION: region_impl,
- SiloMode.CONTROL: control_impl,
- }
-)
+from .model import * # noqa
+from .service import * # noqa
diff --git a/src/sentry/services/hybrid_cloud/tombstone/model.py b/src/sentry/services/hybrid_cloud/tombstone/model.py
new file mode 100644
index 00000000000000..1619c5be8b88a4
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/tombstone/model.py
@@ -0,0 +1,11 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+from sentry.services.hybrid_cloud import RpcModel
+
+
+class RpcTombstone(RpcModel):
+ table_name: str = ""
+ identifier: int = -1
diff --git a/src/sentry/services/hybrid_cloud/tombstone/service.py b/src/sentry/services/hybrid_cloud/tombstone/service.py
new file mode 100644
index 00000000000000..d5c1e1afa0c93e
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/tombstone/service.py
@@ -0,0 +1,47 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+from abc import abstractmethod
+
+from sentry.services.hybrid_cloud import InterfaceWithLifecycle, silo_mode_delegation
+from sentry.services.hybrid_cloud.tombstone import RpcTombstone
+from sentry.silo import SiloMode
+
+
+# the tombstone service itself is unaware of model mapping, that is the responsibility of the caller and the outbox
+# logic. Basically, if you record a remote tombstone, you are implying the destination table_name exists, remotely.
+# Implementors should, thus, _not_ constraint these entries and gracefully handle version drift cases when the "mapping"
+# of who owns what models changes independent of the rollout of logic.
+class TombstoneService(InterfaceWithLifecycle):
+ @abstractmethod
+ def record_remote_tombstone(self, tombstone: RpcTombstone) -> None:
+ pass
+
+
+def control_impl() -> TombstoneService:
+ from sentry.services.hybrid_cloud.tombstone.impl import ControlTombstoneService
+
+ return ControlTombstoneService()
+
+
+def region_impl() -> TombstoneService:
+ from sentry.services.hybrid_cloud.tombstone.impl import RegionTombstoneService
+
+ return RegionTombstoneService()
+
+
+def monolith_impl() -> TombstoneService:
+ from sentry.services.hybrid_cloud.tombstone.impl import MonolithTombstoneService
+
+ return MonolithTombstoneService()
+
+
+tombstone_service: TombstoneService = silo_mode_delegation(
+ {
+ SiloMode.MONOLITH: monolith_impl,
+ SiloMode.REGION: region_impl,
+ SiloMode.CONTROL: control_impl,
+ }
+)
diff --git a/src/sentry/services/hybrid_cloud/user/__init__.py b/src/sentry/services/hybrid_cloud/user/__init__.py
index bb1e6899263145..2a9746c30ef42c 100644
--- a/src/sentry/services/hybrid_cloud/user/__init__.py
+++ b/src/sentry/services/hybrid_cloud/user/__init__.py
@@ -1,225 +1,2 @@
-# Please do not use
-# from __future__ import annotations
-# in modules such as this one where hybrid cloud service classes and data models are
-# defined, because we want to reflect on type annotations and avoid forward references.
-
-import datetime
-from abc import abstractmethod
-from enum import IntEnum
-from typing import TYPE_CHECKING, Any, FrozenSet, List, Optional, cast
-
-from pydantic.fields import Field
-from typing_extensions import TypedDict
-
-from sentry.services.hybrid_cloud import DEFAULT_DATE, RpcModel
-from sentry.services.hybrid_cloud.filter_query import OpaqueSerializedResponse
-from sentry.services.hybrid_cloud.rpc import RpcService, rpc_method
-from sentry.silo import SiloMode
-
-if TYPE_CHECKING:
- from sentry.models import Group
- from sentry.services.hybrid_cloud.auth import AuthenticationContext
-
-
-class RpcAvatar(RpcModel):
- id: int = 0
- file_id: Optional[int] = None
- ident: str = ""
- avatar_type: str = "letter_avatar"
-
-
-class RpcUserEmail(RpcModel):
- id: int = 0
- email: str = ""
- is_verified: bool = False
-
-
-class RpcAuthenticator(RpcModel):
- id: int = 0
- user_id: int = -1
- created_at: datetime.datetime = DEFAULT_DATE
- last_used_at: Optional[datetime.datetime] = None
- type: int = -1
- config: Any = None
-
-
-class RpcUser(RpcModel):
- id: int = -1
- pk: int = -1
- name: str = ""
- email: str = ""
- emails: FrozenSet[str] = frozenset()
- username: str = ""
- actor_id: Optional[int] = None
- display_name: str = ""
- label: str = ""
- is_superuser: bool = False
- is_authenticated: bool = False
- is_anonymous: bool = False
- is_active: bool = False
- is_staff: bool = False
- last_active: Optional[datetime.datetime] = None
- is_sentry_app: bool = False
- password_usable: bool = False
- is_password_expired: bool = False
- session_nonce: Optional[str] = None
-
- roles: FrozenSet[str] = frozenset()
- permissions: FrozenSet[str] = frozenset()
- avatar: Optional[RpcAvatar] = None
- useremails: List[RpcUserEmail] = Field(default_factory=list)
- authenticators: List[RpcAuthenticator] = Field(default_factory=list)
-
- def __hash__(self) -> int:
- # Mimic the behavior of hashing a Django ORM entity, for compatibility with
- # legacy code that treats User entities as dict keys.
- # TODO: Remove the need for this
- return hash((self.id, self.pk))
-
- def has_usable_password(self) -> bool:
- return self.password_usable
-
- def get_display_name(self) -> str: # API compatibility with ORM User
- return self.display_name
-
- def get_label(self) -> str: # API compatibility with ORM User
- return self.label
-
- def get_full_name(self) -> str:
- return self.name
-
- def get_salutation_name(self) -> str:
- name = self.name or self.username.split("@", 1)[0].split(".", 1)[0]
- first_name = name.split(" ", 1)[0]
- return first_name.capitalize()
-
- def get_avatar_type(self) -> str:
- if self.avatar is not None:
- return self.avatar.avatar_type
- return "letter_avatar"
-
- def class_name(self) -> str:
- return "User"
-
- def has_2fa(self) -> bool:
- return len(self.authenticators) > 0
-
-
-class UserSerializeType(IntEnum): # annoying
- SIMPLE = 0
- DETAILED = 1
- SELF_DETAILED = 2
-
-
-class UserFilterArgs(TypedDict, total=False):
- user_ids: List[int]
- is_active: bool
- organization_id: int
- project_ids: List[int]
- team_ids: List[int]
- is_active_memberteam: bool
- emails: List[str]
-
-
-class UserUpdateArgs(TypedDict, total=False):
- avatar_url: str
- avatar_type: int
- actor_id: int # TODO(hybrid-cloud): Remove this after the actor migration is complete
-
-
-class UserService(RpcService):
- key = "user"
- local_mode = SiloMode.CONTROL
-
- @classmethod
- def get_local_implementation(cls) -> RpcService:
- from sentry.services.hybrid_cloud.user.impl import DatabaseBackedUserService
-
- return DatabaseBackedUserService()
-
- @rpc_method
- @abstractmethod
- def serialize_many(
- self,
- *,
- filter: UserFilterArgs,
- as_user: Optional[RpcUser] = None,
- auth_context: Optional["AuthenticationContext"] = None,
- serializer: Optional[UserSerializeType] = None,
- ) -> List[OpaqueSerializedResponse]:
- pass
-
- @rpc_method
- @abstractmethod
- def get_many(self, *, filter: UserFilterArgs) -> List[RpcUser]:
- pass
-
- @rpc_method
- @abstractmethod
- def get_many_by_email(
- self,
- *,
- emails: List[str],
- is_active: bool = True,
- is_verified: bool = True,
- is_project_member: bool = False,
- project_id: Optional[int] = None,
- ) -> List[RpcUser]:
- """
- Return a list of users matching the filters
- :param email:
- A case insensitive email to match
- :return:
- """
- pass
-
- @rpc_method
- @abstractmethod
- def get_by_username(
- self, *, username: str, with_valid_password: bool = True, is_active: Optional[bool] = None
- ) -> List[RpcUser]:
- """
- Return a list of users that match a username and falling back to email
- :param username:
- A case insensitive username/email to match
- :param with_valid_password:
- filter to ensure a password is set
- :param is_active:
- filter for only active users
- :return:
- """
- pass
-
- @rpc_method
- @abstractmethod
- def get_from_group(self, *, group: "Group") -> List[RpcUser]:
- """Get all users in all teams in a given Group's project."""
- pass
-
- @rpc_method
- @abstractmethod
- def get_by_actor_ids(self, *, actor_ids: List[int]) -> List[RpcUser]:
- pass
-
- @rpc_method
- @abstractmethod
- def update_user(self, *, user_id: int, attrs: UserUpdateArgs) -> Any:
- # Returns a serialized user
- pass
-
- @rpc_method
- def get_user(self, user_id: int) -> Optional[RpcUser]:
- """
- This method returns a User object given an ID
- :param user_id:
- A user ID to fetch
- :return:
- """
- users = self.get_many(filter=dict(user_ids=[user_id]))
- if len(users) > 0:
- return users[0]
- else:
- return None
-
-
-user_service: UserService = cast(UserService, UserService.create_delegation())
+from .model import * # noqa
+from .service import * # noqa
diff --git a/src/sentry/services/hybrid_cloud/user/impl.py b/src/sentry/services/hybrid_cloud/user/impl.py
index e3603591911c73..a1be0f9dc9613e 100644
--- a/src/sentry/services/hybrid_cloud/user/impl.py
+++ b/src/sentry/services/hybrid_cloud/user/impl.py
@@ -1,6 +1,6 @@
from __future__ import annotations
-from typing import Any, Callable, FrozenSet, Iterable, List, Optional
+from typing import Any, Callable, List, Optional
from django.db.models import QuerySet
@@ -12,7 +12,6 @@
from sentry.api.serializers.base import Serializer
from sentry.db.models import BaseQuerySet
from sentry.db.models.query import in_iexact
-from sentry.models.avatars.user_avatar import UserAvatar
from sentry.models.group import Group
from sentry.models.user import User
from sentry.services.hybrid_cloud.auth import AuthenticationContext
@@ -21,15 +20,13 @@
OpaqueSerializedResponse,
)
from sentry.services.hybrid_cloud.user import (
- RpcAuthenticator,
- RpcAvatar,
RpcUser,
- RpcUserEmail,
UserFilterArgs,
UserSerializeType,
UserService,
UserUpdateArgs,
)
+from sentry.services.hybrid_cloud.user.serial import serialize_rpc_user
class DatabaseBackedUserService(UserService):
@@ -191,87 +188,3 @@ def serialize_rpc(self, user: User) -> RpcUser:
return serialize_rpc_user(user)
_FQ = _UserFilterQuery()
-
-
-def serialize_rpc_user(user: User) -> RpcUser:
- args = {
- field_name: getattr(user, field_name)
- for field_name in RpcUser.__fields__
- if hasattr(user, field_name)
- }
- args["pk"] = user.pk
- args["display_name"] = user.get_display_name()
- args["label"] = user.get_label()
- args["is_superuser"] = user.is_superuser
- args["is_sentry_app"] = user.is_sentry_app or False
- args["password_usable"] = user.has_usable_password()
-
- # Prefer eagerloaded attributes from _base_query
- if hasattr(user, "useremails") and user.useremails is not None:
- args["emails"] = frozenset([e["email"] for e in user.useremails if e["is_verified"]])
- else:
- args["emails"] = frozenset([email.email for email in user.get_verified_emails()])
- args["session_nonce"] = user.session_nonce
-
- # And process the _base_query special data additions
- args["permissions"] = frozenset(getattr(user, "permissions", None) or ())
-
- if args["name"] is None:
- # This field is non-nullable according to the Django schema, but may be null
- # on some servers due to migration history
- args["name"] = ""
-
- roles: FrozenSet[str] = frozenset()
- if hasattr(user, "roles") and user.roles is not None:
- roles = frozenset(flatten(user.roles))
- args["roles"] = roles
-
- args["useremails"] = [
- RpcUserEmail(id=e["id"], email=e["email"], is_verified=e["is_verified"])
- for e in (getattr(user, "useremails", None) or ())
- ]
-
- avatar = None
- # Use eagerloaded attributes from _base_query() if available.
- if hasattr(user, "useravatar"):
- if user.useravatar is not None:
- avatar_dict = user.useravatar[0]
- avatar_type_map = dict(UserAvatar.AVATAR_TYPES)
- avatar = RpcAvatar(
- id=avatar_dict["id"],
- file_id=avatar_dict["file_id"],
- ident=avatar_dict["ident"],
- avatar_type=avatar_type_map.get(avatar_dict["avatar_type"], "letter_avatar"),
- )
- else:
- orm_avatar = user.avatar.first()
- if orm_avatar is not None:
- avatar = RpcAvatar(
- id=orm_avatar.id,
- file_id=orm_avatar.file_id,
- ident=orm_avatar.ident,
- avatar_type=orm_avatar.get_avatar_type_display(),
- )
- args["avatar"] = avatar
-
- args["authenticators"] = [
- RpcAuthenticator(
- id=a["id"],
- user_id=a["user_id"],
- created_at=a["created_at"],
- last_used_at=a["last_used_at"],
- type=a["type"],
- config=a["config"],
- )
- for a in (getattr(user, "authenticators", None) or ())
- ]
-
- return RpcUser(**args)
-
-
-def flatten(iter: Iterable[Any]) -> List[Any]:
- return (
- ((flatten(iter[0]) + flatten(iter[1:])) if len(iter) > 0 else [])
- if type(iter) is list or isinstance(iter, BaseQuerySet)
- else [iter]
- )
diff --git a/src/sentry/services/hybrid_cloud/user/model.py b/src/sentry/services/hybrid_cloud/user/model.py
new file mode 100644
index 00000000000000..dbdad0504653f9
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/user/model.py
@@ -0,0 +1,119 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+import datetime
+from enum import IntEnum
+from typing import Any, FrozenSet, List, Optional
+
+from pydantic.fields import Field
+from typing_extensions import TypedDict
+
+from sentry.services.hybrid_cloud import DEFAULT_DATE, RpcModel
+
+
+class RpcAvatar(RpcModel):
+ id: int = 0
+ file_id: Optional[int] = None
+ ident: str = ""
+ avatar_type: str = "letter_avatar"
+
+
+class RpcUserEmail(RpcModel):
+ id: int = 0
+ email: str = ""
+ is_verified: bool = False
+
+
+class RpcAuthenticator(RpcModel):
+ id: int = 0
+ user_id: int = -1
+ created_at: datetime.datetime = DEFAULT_DATE
+ last_used_at: Optional[datetime.datetime] = None
+ type: int = -1
+ config: Any = None
+
+
+class RpcUser(RpcModel):
+ id: int = -1
+ pk: int = -1
+ name: str = ""
+ email: str = ""
+ emails: FrozenSet[str] = frozenset()
+ username: str = ""
+ actor_id: Optional[int] = None
+ display_name: str = ""
+ label: str = ""
+ is_superuser: bool = False
+ is_authenticated: bool = False
+ is_anonymous: bool = False
+ is_active: bool = False
+ is_staff: bool = False
+ last_active: Optional[datetime.datetime] = None
+ is_sentry_app: bool = False
+ password_usable: bool = False
+ is_password_expired: bool = False
+ session_nonce: Optional[str] = None
+
+ roles: FrozenSet[str] = frozenset()
+ permissions: FrozenSet[str] = frozenset()
+ avatar: Optional[RpcAvatar] = None
+ useremails: List[RpcUserEmail] = Field(default_factory=list)
+ authenticators: List[RpcAuthenticator] = Field(default_factory=list)
+
+ def __hash__(self) -> int:
+ # Mimic the behavior of hashing a Django ORM entity, for compatibility with
+ # legacy code that treats User entities as dict keys.
+ # TODO: Remove the need for this
+ return hash((self.id, self.pk))
+
+ def has_usable_password(self) -> bool:
+ return self.password_usable
+
+ def get_display_name(self) -> str: # API compatibility with ORM User
+ return self.display_name
+
+ def get_label(self) -> str: # API compatibility with ORM User
+ return self.label
+
+ def get_full_name(self) -> str:
+ return self.name
+
+ def get_salutation_name(self) -> str:
+ name = self.name or self.username.split("@", 1)[0].split(".", 1)[0]
+ first_name = name.split(" ", 1)[0]
+ return first_name.capitalize()
+
+ def get_avatar_type(self) -> str:
+ if self.avatar is not None:
+ return self.avatar.avatar_type
+ return "letter_avatar"
+
+ def class_name(self) -> str:
+ return "User"
+
+ def has_2fa(self) -> bool:
+ return len(self.authenticators) > 0
+
+
+class UserSerializeType(IntEnum): # annoying
+ SIMPLE = 0
+ DETAILED = 1
+ SELF_DETAILED = 2
+
+
+class UserFilterArgs(TypedDict, total=False):
+ user_ids: List[int]
+ is_active: bool
+ organization_id: int
+ project_ids: List[int]
+ team_ids: List[int]
+ is_active_memberteam: bool
+ emails: List[str]
+
+
+class UserUpdateArgs(TypedDict, total=False):
+ avatar_url: str
+ avatar_type: int
+ actor_id: int # TODO(hybrid-cloud): Remove this after the actor migration is complete
diff --git a/src/sentry/services/hybrid_cloud/user/serial.py b/src/sentry/services/hybrid_cloud/user/serial.py
new file mode 100644
index 00000000000000..cf3bb82d3e9d54
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/user/serial.py
@@ -0,0 +1,92 @@
+from __future__ import annotations
+
+from typing import Any, FrozenSet, Iterable, List
+
+from sentry.db.models import BaseQuerySet
+from sentry.models.avatars.user_avatar import UserAvatar
+from sentry.models.user import User
+from sentry.services.hybrid_cloud.user import RpcAuthenticator, RpcAvatar, RpcUser, RpcUserEmail
+
+
+def serialize_rpc_user(user: User) -> RpcUser:
+ args = {
+ field_name: getattr(user, field_name)
+ for field_name in RpcUser.__fields__
+ if hasattr(user, field_name)
+ }
+ args["pk"] = user.pk
+ args["display_name"] = user.get_display_name()
+ args["label"] = user.get_label()
+ args["is_superuser"] = user.is_superuser
+ args["is_sentry_app"] = user.is_sentry_app or False
+ args["password_usable"] = user.has_usable_password()
+
+ # Prefer eagerloaded attributes from _base_query
+ if hasattr(user, "useremails") and user.useremails is not None:
+ args["emails"] = frozenset([e["email"] for e in user.useremails if e["is_verified"]])
+ else:
+ args["emails"] = frozenset([email.email for email in user.get_verified_emails()])
+ args["session_nonce"] = user.session_nonce
+
+ # And process the _base_query special data additions
+ args["permissions"] = frozenset(getattr(user, "permissions", None) or ())
+
+ if args["name"] is None:
+ # This field is non-nullable according to the Django schema, but may be null
+ # on some servers due to migration history
+ args["name"] = ""
+
+ roles: FrozenSet[str] = frozenset()
+ if hasattr(user, "roles") and user.roles is not None:
+ roles = frozenset(_flatten(user.roles))
+ args["roles"] = roles
+
+ args["useremails"] = [
+ RpcUserEmail(id=e["id"], email=e["email"], is_verified=e["is_verified"])
+ for e in (getattr(user, "useremails", None) or ())
+ ]
+
+ avatar = None
+ # Use eagerloaded attributes from _base_query() if available.
+ if hasattr(user, "useravatar"):
+ if user.useravatar is not None:
+ avatar_dict = user.useravatar[0]
+ avatar_type_map = dict(UserAvatar.AVATAR_TYPES)
+ avatar = RpcAvatar(
+ id=avatar_dict["id"],
+ file_id=avatar_dict["file_id"],
+ ident=avatar_dict["ident"],
+ avatar_type=avatar_type_map.get(avatar_dict["avatar_type"], "letter_avatar"),
+ )
+ else:
+ orm_avatar = user.avatar.first()
+ if orm_avatar is not None:
+ avatar = RpcAvatar(
+ id=orm_avatar.id,
+ file_id=orm_avatar.file_id,
+ ident=orm_avatar.ident,
+ avatar_type=orm_avatar.get_avatar_type_display(),
+ )
+ args["avatar"] = avatar
+
+ args["authenticators"] = [
+ RpcAuthenticator(
+ id=a["id"],
+ user_id=a["user_id"],
+ created_at=a["created_at"],
+ last_used_at=a["last_used_at"],
+ type=a["type"],
+ config=a["config"],
+ )
+ for a in (getattr(user, "authenticators", None) or ())
+ ]
+
+ return RpcUser(**args)
+
+
+def _flatten(iter: Iterable[Any]) -> List[Any]:
+ return (
+ ((_flatten(iter[0]) + _flatten(iter[1:])) if len(iter) > 0 else [])
+ if type(iter) is list or isinstance(iter, BaseQuerySet)
+ else [iter]
+ )
diff --git a/src/sentry/services/hybrid_cloud/user/service.py b/src/sentry/services/hybrid_cloud/user/service.py
new file mode 100644
index 00000000000000..c726f739de0e71
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/user/service.py
@@ -0,0 +1,119 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+from abc import abstractmethod
+from typing import TYPE_CHECKING, Any, List, Optional, cast
+
+from sentry.services.hybrid_cloud.filter_query import OpaqueSerializedResponse
+from sentry.services.hybrid_cloud.rpc import RpcService, rpc_method
+from sentry.services.hybrid_cloud.user import (
+ RpcUser,
+ UserFilterArgs,
+ UserSerializeType,
+ UserUpdateArgs,
+)
+from sentry.silo import SiloMode
+
+if TYPE_CHECKING:
+ from sentry.models import Group
+ from sentry.services.hybrid_cloud.auth import AuthenticationContext
+
+
+class UserService(RpcService):
+ key = "user"
+ local_mode = SiloMode.CONTROL
+
+ @classmethod
+ def get_local_implementation(cls) -> RpcService:
+ from sentry.services.hybrid_cloud.user.impl import DatabaseBackedUserService
+
+ return DatabaseBackedUserService()
+
+ @rpc_method
+ @abstractmethod
+ def serialize_many(
+ self,
+ *,
+ filter: UserFilterArgs,
+ as_user: Optional[RpcUser] = None,
+ auth_context: Optional["AuthenticationContext"] = None,
+ serializer: Optional[UserSerializeType] = None,
+ ) -> List[OpaqueSerializedResponse]:
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def get_many(self, *, filter: UserFilterArgs) -> List[RpcUser]:
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def get_many_by_email(
+ self,
+ *,
+ emails: List[str],
+ is_active: bool = True,
+ is_verified: bool = True,
+ is_project_member: bool = False,
+ project_id: Optional[int] = None,
+ ) -> List[RpcUser]:
+ """
+ Return a list of users matching the filters
+ :param email:
+ A case insensitive email to match
+ :return:
+ """
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def get_by_username(
+ self, *, username: str, with_valid_password: bool = True, is_active: Optional[bool] = None
+ ) -> List[RpcUser]:
+ """
+ Return a list of users that match a username and falling back to email
+ :param username:
+ A case insensitive username/email to match
+ :param with_valid_password:
+ filter to ensure a password is set
+ :param is_active:
+ filter for only active users
+ :return:
+ """
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def get_from_group(self, *, group: "Group") -> List[RpcUser]:
+ """Get all users in all teams in a given Group's project."""
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def get_by_actor_ids(self, *, actor_ids: List[int]) -> List[RpcUser]:
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def update_user(self, *, user_id: int, attrs: UserUpdateArgs) -> Any:
+ # Returns a serialized user
+ pass
+
+ @rpc_method
+ def get_user(self, user_id: int) -> Optional[RpcUser]:
+ """
+ This method returns a User object given an ID
+ :param user_id:
+ A user ID to fetch
+ :return:
+ """
+ users = self.get_many(filter=dict(user_ids=[user_id]))
+ if len(users) > 0:
+ return users[0]
+ else:
+ return None
+
+
+user_service: UserService = cast(UserService, UserService.create_delegation())
diff --git a/src/sentry/services/hybrid_cloud/user_option/__init__.py b/src/sentry/services/hybrid_cloud/user_option/__init__.py
index ef1384d4e3b802..2a9746c30ef42c 100644
--- a/src/sentry/services/hybrid_cloud/user_option/__init__.py
+++ b/src/sentry/services/hybrid_cloud/user_option/__init__.py
@@ -1,99 +1,2 @@
-# Please do not use
-# from __future__ import annotations
-# in modules such as this one where hybrid cloud service classes and data models are
-# defined, because we want to reflect on type annotations and avoid forward references.
-
-from abc import abstractmethod
-from typing import Any, Iterable, List, Optional, cast
-
-from typing_extensions import TypedDict
-
-from sentry.services.hybrid_cloud import RpcModel
-from sentry.services.hybrid_cloud.auth import AuthenticationContext
-from sentry.services.hybrid_cloud.filter_query import OpaqueSerializedResponse
-from sentry.services.hybrid_cloud.rpc import RpcService, rpc_method
-from sentry.services.hybrid_cloud.user import RpcUser
-from sentry.silo import SiloMode
-
-
-class RpcUserOption(RpcModel):
- id: int = -1
- user_id: int = -1
- value: Any = None
- key: str = ""
- project_id: Optional[int] = None
- organization_id: Optional[int] = None
-
-
-def get_option_from_list(
- options: List[RpcUserOption],
- *,
- key: Optional[str] = None,
- user_id: Optional[int] = None,
- default: Any = None,
-) -> Any:
- for option in options:
- if key is not None and option.key != key:
- continue
- if user_id is not None and option.user_id != user_id:
- continue
- return option.value
- return default
-
-
-class UserOptionFilterArgs(TypedDict, total=False):
- user_ids: Iterable[int]
- keys: List[str]
- key: str
- project_id: Optional[int]
- organization_id: Optional[int]
-
-
-class UserOptionService(RpcService):
- key = "user_option"
- local_mode = SiloMode.CONTROL
-
- @classmethod
- def get_local_implementation(cls) -> RpcService:
- from sentry.services.hybrid_cloud.user_option.impl import DatabaseBackedUserOptionService
-
- return DatabaseBackedUserOptionService()
-
- @rpc_method
- @abstractmethod
- def serialize_many(
- self,
- *,
- filter: UserOptionFilterArgs,
- as_user: Optional[RpcUser] = None,
- auth_context: Optional[AuthenticationContext] = None,
- ) -> List[OpaqueSerializedResponse]:
- pass
-
- @rpc_method
- @abstractmethod
- def get_many(self, *, filter: UserOptionFilterArgs) -> List[RpcUserOption]:
- pass
-
- @rpc_method
- @abstractmethod
- def delete_options(self, *, option_ids: List[int]) -> None:
- pass
-
- @rpc_method
- @abstractmethod
- def set_option(
- self,
- *,
- user_id: int,
- value: Any,
- key: str,
- project_id: Optional[int] = None,
- organization_id: Optional[int] = None,
- ) -> None:
- pass
-
-
-user_option_service: UserOptionService = cast(
- UserOptionService, UserOptionService.create_delegation()
-)
+from .model import * # noqa
+from .service import * # noqa
diff --git a/src/sentry/services/hybrid_cloud/user_option/model.py b/src/sentry/services/hybrid_cloud/user_option/model.py
new file mode 100644
index 00000000000000..5a7f25afe6a23b
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/user_option/model.py
@@ -0,0 +1,27 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+from typing import Any, Iterable, List, Optional
+
+from typing_extensions import TypedDict
+
+from sentry.services.hybrid_cloud import RpcModel
+
+
+class RpcUserOption(RpcModel):
+ id: int = -1
+ user_id: int = -1
+ value: Any = None
+ key: str = ""
+ project_id: Optional[int] = None
+ organization_id: Optional[int] = None
+
+
+class UserOptionFilterArgs(TypedDict, total=False):
+ user_ids: Iterable[int]
+ keys: List[str]
+ key: str
+ project_id: Optional[int]
+ organization_id: Optional[int]
diff --git a/src/sentry/services/hybrid_cloud/user_option/service.py b/src/sentry/services/hybrid_cloud/user_option/service.py
new file mode 100644
index 00000000000000..1d5fdf1ba217dc
--- /dev/null
+++ b/src/sentry/services/hybrid_cloud/user_option/service.py
@@ -0,0 +1,80 @@
+# Please do not use
+# from __future__ import annotations
+# in modules such as this one where hybrid cloud data models or service classes are
+# defined, because we want to reflect on type annotations and avoid forward references.
+
+from abc import abstractmethod
+from typing import Any, List, Optional, cast
+
+from sentry.services.hybrid_cloud.auth import AuthenticationContext
+from sentry.services.hybrid_cloud.filter_query import OpaqueSerializedResponse
+from sentry.services.hybrid_cloud.rpc import RpcService, rpc_method
+from sentry.services.hybrid_cloud.user import RpcUser
+from sentry.services.hybrid_cloud.user_option import RpcUserOption, UserOptionFilterArgs
+from sentry.silo import SiloMode
+
+
+def get_option_from_list(
+ options: List[RpcUserOption],
+ *,
+ key: Optional[str] = None,
+ user_id: Optional[int] = None,
+ default: Any = None,
+) -> Any:
+ for option in options:
+ if key is not None and option.key != key:
+ continue
+ if user_id is not None and option.user_id != user_id:
+ continue
+ return option.value
+ return default
+
+
+class UserOptionService(RpcService):
+ key = "user_option"
+ local_mode = SiloMode.CONTROL
+
+ @classmethod
+ def get_local_implementation(cls) -> RpcService:
+ from sentry.services.hybrid_cloud.user_option.impl import DatabaseBackedUserOptionService
+
+ return DatabaseBackedUserOptionService()
+
+ @rpc_method
+ @abstractmethod
+ def serialize_many(
+ self,
+ *,
+ filter: UserOptionFilterArgs,
+ as_user: Optional[RpcUser] = None,
+ auth_context: Optional[AuthenticationContext] = None,
+ ) -> List[OpaqueSerializedResponse]:
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def get_many(self, *, filter: UserOptionFilterArgs) -> List[RpcUserOption]:
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def delete_options(self, *, option_ids: List[int]) -> None:
+ pass
+
+ @rpc_method
+ @abstractmethod
+ def set_option(
+ self,
+ *,
+ user_id: int,
+ value: Any,
+ key: str,
+ project_id: Optional[int] = None,
+ organization_id: Optional[int] = None,
+ ) -> None:
+ pass
+
+
+user_option_service: UserOptionService = cast(
+ UserOptionService, UserOptionService.create_delegation()
+)
diff --git a/src/sentry/testutils/factories.py b/src/sentry/testutils/factories.py
index 21a04d84e55502..eb19713f8b45ea 100644
--- a/src/sentry/testutils/factories.py
+++ b/src/sentry/testutils/factories.py
@@ -102,7 +102,7 @@
from sentry.models.releasefile import update_artifact_index
from sentry.sentry_apps import SentryAppInstallationCreator, SentryAppInstallationTokenCreator
from sentry.sentry_apps.apps import SentryAppCreator
-from sentry.services.hybrid_cloud.app import app_service
+from sentry.services.hybrid_cloud.app.serial import serialize_sentry_app_installation
from sentry.services.hybrid_cloud.hook import hook_service
from sentry.services.hybrid_cloud.organizationmember_mapping import (
organizationmember_mapping_service,
@@ -944,7 +944,7 @@ def create_sentry_app_installation(
install.status = SentryAppInstallationStatus.INSTALLED if status is None else status
install.save()
- rpc_install = app_service.serialize_sentry_app_installation(install, install.sentry_app)
+ rpc_install = serialize_sentry_app_installation(install, install.sentry_app)
if not prevent_token_exchange and (install.sentry_app.status != SentryAppStatus.INTERNAL):
token_exchange.GrantExchanger.run(
diff --git a/tests/sentry/auth/test_helper.py b/tests/sentry/auth/test_helper.py
index 36815171c834fb..8f92feb7c3cf4b 100644
--- a/tests/sentry/auth/test_helper.py
+++ b/tests/sentry/auth/test_helper.py
@@ -19,7 +19,7 @@
OrganizationMember,
UserEmail,
)
-from sentry.services.hybrid_cloud.organization.impl import DatabaseBackedOrganizationService
+from sentry.services.hybrid_cloud.organization.serial import serialize_organization
from sentry.testutils import TestCase
from sentry.testutils.hybrid_cloud import HybridCloudTestMixin
from sentry.testutils.silo import control_silo_test, exempt_from_silo_limits
@@ -59,9 +59,7 @@ def handler(self):
def _handler_with(self, identity):
with exempt_from_silo_limits():
- rpc_organization = DatabaseBackedOrganizationService.serialize_organization(
- self.organization
- )
+ rpc_organization = serialize_organization(self.organization)
return AuthIdentityHandler(
self.auth_provider,
DummyProvider(self.provider),
@@ -210,9 +208,7 @@ def test_no_invite_members_flag(self, mock_auth):
assert getattr(persisted_om.flags, "sso:linked")
assert getattr(persisted_om.flags, "member-limit:restricted")
assert not getattr(persisted_om.flags, "sso:invalid")
- expected_rpc_org = DatabaseBackedOrganizationService.serialize_organization(
- self.organization
- )
+ expected_rpc_org = serialize_organization(self.organization)
features_has.assert_any_call("organizations:invite-members", expected_rpc_org)
self.assert_org_member_mapping(org_member=persisted_om)
@@ -335,7 +331,7 @@ def _test_simple(self, mock_render, expected_template):
assert request is self.request
assert status == 200
- expected_org = DatabaseBackedOrganizationService.serialize_organization(self.organization)
+ expected_org = serialize_organization(self.organization)
assert context["organization"] == expected_org
assert context["identity"] == self.identity
diff --git a/tests/sentry/hybrid_cloud/test_integration.py b/tests/sentry/hybrid_cloud/test_integration.py
index 47a7c4da0874e4..5f25d0e059fa99 100644
--- a/tests/sentry/hybrid_cloud/test_integration.py
+++ b/tests/sentry/hybrid_cloud/test_integration.py
@@ -15,6 +15,10 @@
RpcOrganizationIntegration,
integration_service,
)
+from sentry.services.hybrid_cloud.integration.serial import (
+ serialize_integration,
+ serialize_organization_integration,
+)
from sentry.testutils import TestCase
from sentry.testutils.silo import all_silo_test, exempt_from_silo_limits
@@ -93,7 +97,7 @@ def verify_org_integration_result(self, result: RpcIntegration, expected: Integr
@all_silo_test(stable=True)
class IntegrationServiceTest(BaseIntegrationServiceTest):
def test_serialize_integration(self):
- api_integration1 = integration_service._serialize_integration(self.integration1)
+ api_integration1 = serialize_integration(self.integration1)
self.verify_integration_result(result=api_integration1, expected=self.integration1)
def test_get_integrations(self):
@@ -161,7 +165,7 @@ def test_update_integrations(self):
assert i.metadata == new_metadata
def test_get_installation(self):
- api_integration1 = integration_service._serialize_integration(integration=self.integration1)
+ api_integration1 = serialize_integration(integration=self.integration1)
api_install = integration_service.get_installation(
integration=api_integration1, organization_id=self.organization.id
)
@@ -171,9 +175,7 @@ def test_get_installation(self):
def test_has_feature(self):
for feature in IntegrationFeatures:
- api_integration2 = integration_service._serialize_integration(
- integration=self.integration2
- )
+ api_integration2 = serialize_integration(integration=self.integration2)
integration_has_feature = self.integration2.has_feature(feature)
api_integration_has_feature = integration_service.has_feature(
provider=api_integration2.provider, feature=feature
@@ -184,9 +186,7 @@ def test_has_feature(self):
@all_silo_test(stable=True)
class OrganizationIntegrationServiceTest(BaseIntegrationServiceTest):
def test_serialize_org_integration(self):
- rpc_org_integration1 = integration_service._serialize_organization_integration(
- self.org_integration1
- )
+ rpc_org_integration1 = serialize_organization_integration(self.org_integration1)
self.verify_org_integration_result(
result=rpc_org_integration1, expected=self.org_integration1
)
diff --git a/tests/sentry/hybrid_cloud/test_organization.py b/tests/sentry/hybrid_cloud/test_organization.py
index c8b8ab76bc2d4a..1b86baf493f650 100644
--- a/tests/sentry/hybrid_cloud/test_organization.py
+++ b/tests/sentry/hybrid_cloud/test_organization.py
@@ -20,7 +20,7 @@
RpcTeamMember,
organization_service,
)
-from sentry.services.hybrid_cloud.organization.impl import unescape_flag_name
+from sentry.services.hybrid_cloud.organization.serial import unescape_flag_name
from sentry.testutils.factories import Factories
from sentry.testutils.hybrid_cloud import use_real_service
from sentry.testutils.silo import all_silo_test
diff --git a/tests/sentry/hybrid_cloud/test_rpc.py b/tests/sentry/hybrid_cloud/test_rpc.py
index e16896dbf2a952..11ddf88e75b218 100644
--- a/tests/sentry/hybrid_cloud/test_rpc.py
+++ b/tests/sentry/hybrid_cloud/test_rpc.py
@@ -11,14 +11,14 @@
RpcOrganizationMemberFlags,
RpcUserOrganizationContext,
)
-from sentry.services.hybrid_cloud.organization.impl import DatabaseBackedOrganizationService
+from sentry.services.hybrid_cloud.organization.serial import serialize_organization
from sentry.services.hybrid_cloud.rpc import (
RpcSendException,
dispatch_remote_call,
dispatch_to_local_service,
)
from sentry.services.hybrid_cloud.user import RpcUser
-from sentry.services.hybrid_cloud.user.impl import serialize_rpc_user
+from sentry.services.hybrid_cloud.user.serial import serialize_rpc_user
from sentry.silo import SiloMode
from sentry.testutils import TestCase
from sentry.testutils.region import override_regions
@@ -46,7 +46,7 @@ def test_remote_service(self, mock_dispatch_remote_call):
)
serial_user = RpcUser(id=user.id)
- serial_org = DatabaseBackedOrganizationService.serialize_organization(organization)
+ serial_org = serialize_organization(organization)
service = OrganizationService.create_delegation()
with override_regions(_REGIONS), override_settings(SILO_MODE=SiloMode.CONTROL):
@@ -100,7 +100,7 @@ def test_dispatch_to_local_service(self):
user = self.create_user()
organization = self.create_organization()
- serial_org = DatabaseBackedOrganizationService.serialize_organization(organization)
+ serial_org = serialize_organization(organization)
serial_arguments = dict(
organization_id=serial_org.id,
default_org_role=serial_org.default_role,
@@ -139,9 +139,7 @@ def _set_up_mock_response(mock_urlopen, response_value):
@mock.patch("sentry.services.hybrid_cloud.rpc.urlopen")
def test_region_to_control_happy_path(self, mock_urlopen):
org = self.create_organization()
- response_value = RpcUserOrganizationContext(
- organization=DatabaseBackedOrganizationService.serialize_organization(org)
- )
+ response_value = RpcUserOrganizationContext(organization=serialize_organization(org))
self._set_up_mock_response(mock_urlopen, response_value.dict())
result = dispatch_remote_call(
diff --git a/tests/sentry/incidents/endpoints/test_serializers.py b/tests/sentry/incidents/endpoints/test_serializers.py
index bcb295fa6985bd..9d9b3c8c47e436 100644
--- a/tests/sentry/incidents/endpoints/test_serializers.py
+++ b/tests/sentry/incidents/endpoints/test_serializers.py
@@ -27,7 +27,7 @@
from sentry.models import ACTOR_TYPES, Environment, Integration
from sentry.models.actor import get_actor_for_user
from sentry.models.user import User
-from sentry.services.hybrid_cloud.integration import integration_service
+from sentry.services.hybrid_cloud.integration.serial import serialize_integration
from sentry.snuba.dataset import Dataset
from sentry.snuba.models import SnubaQuery, SnubaQueryEventType
from sentry.testutils import TestCase
@@ -570,7 +570,7 @@ def test_invalid_team_with_channel_timeout(self, mock_get_channel_id):
serializer.save()
assert excinfo.value.detail == {"nonFieldErrors": ["Team does not exist"]}
mock_get_channel_id.assert_called_with(
- integration_service._serialize_integration(self.integration), "my-channel", 10
+ serialize_integration(self.integration), "my-channel", 10
)
def test_event_types(self):
diff --git a/tests/sentry/integrations/jira/test_integration.py b/tests/sentry/integrations/jira/test_integration.py
index 46d2f6e805ec40..514f737599c72d 100644
--- a/tests/sentry/integrations/jira/test_integration.py
+++ b/tests/sentry/integrations/jira/test_integration.py
@@ -20,7 +20,7 @@
OrganizationIntegration,
)
from sentry.services.hybrid_cloud.integration import integration_service
-from sentry.services.hybrid_cloud.user.impl import serialize_rpc_user
+from sentry.services.hybrid_cloud.user.serial import serialize_rpc_user
from sentry.shared_integrations.exceptions import IntegrationError
from sentry.testutils import APITestCase, IntegrationTestCase
from sentry.testutils.factories import DEFAULT_EVENT_DATA
diff --git a/tests/sentry/integrations/jira_server/test_integration.py b/tests/sentry/integrations/jira_server/test_integration.py
index 59e53133d3c50e..ce6d40eb4d53f1 100644
--- a/tests/sentry/integrations/jira_server/test_integration.py
+++ b/tests/sentry/integrations/jira_server/test_integration.py
@@ -18,7 +18,7 @@
OrganizationIntegration,
)
from sentry.services.hybrid_cloud.integration import integration_service
-from sentry.services.hybrid_cloud.user.impl import serialize_rpc_user
+from sentry.services.hybrid_cloud.user.serial import serialize_rpc_user
from sentry.shared_integrations.exceptions import IntegrationError
from sentry.testutils import APITestCase
from sentry.testutils.factories import DEFAULT_EVENT_DATA
diff --git a/tests/sentry/integrations/slack/test_tasks.py b/tests/sentry/integrations/slack/test_tasks.py
index 3a97c5c79b1096..6d430e2483622b 100644
--- a/tests/sentry/integrations/slack/test_tasks.py
+++ b/tests/sentry/integrations/slack/test_tasks.py
@@ -9,7 +9,7 @@
from sentry.integrations.slack.utils import SLACK_RATE_LIMITED_MESSAGE, RedisRuleStatus
from sentry.models import Rule
from sentry.receivers.rules import DEFAULT_RULE_LABEL
-from sentry.services.hybrid_cloud.integration import integration_service
+from sentry.services.hybrid_cloud.integration.serial import serialize_integration
from sentry.tasks.integrations.slack import (
find_channel_id_for_alert_rule,
find_channel_id_for_rule,
@@ -255,7 +255,7 @@ def test_task_new_alert_rule(self, mock_get_channel_id, mock_set_value):
assert rule.created_by_id == self.user.id
mock_set_value.assert_called_with("success", rule.id)
mock_get_channel_id.assert_called_with(
- integration_service._serialize_integration(self.integration), "my-channel", 180
+ serialize_integration(self.integration), "my-channel", 180
)
trigger_action = AlertRuleTriggerAction.objects.get(integration_id=self.integration.id)
@@ -282,7 +282,7 @@ def test_task_failed_id_lookup(self, mock_get_channel_id, mock_set_value):
assert not AlertRule.objects.filter(name="New Rule").exists()
mock_set_value.assert_called_with("failed")
mock_get_channel_id.assert_called_with(
- integration_service._serialize_integration(self.integration), "my-channel", 180
+ serialize_integration(self.integration), "my-channel", 180
)
@patch.object(RedisRuleStatus, "set_value", return_value=None)
@@ -306,7 +306,7 @@ def test_task_timeout_id_lookup(self, mock_get_channel_id, mock_set_value):
assert not AlertRule.objects.filter(name="New Rule").exists()
mock_set_value.assert_called_with("failed")
mock_get_channel_id.assert_called_with(
- integration_service._serialize_integration(self.integration), "my-channel", 180
+ serialize_integration(self.integration), "my-channel", 180
)
@patch.object(RedisRuleStatus, "set_value", return_value=None)
@@ -334,7 +334,7 @@ def test_task_existing_metric_alert(self, mock_get_channel_id, mock_set_value):
rule = AlertRule.objects.get(name="New Rule")
mock_set_value.assert_called_with("success", rule.id)
mock_get_channel_id.assert_called_with(
- integration_service._serialize_integration(self.integration), "my-channel", 180
+ serialize_integration(self.integration), "my-channel", 180
)
trigger_action = AlertRuleTriggerAction.objects.get(integration_id=self.integration.id)
diff --git a/tests/sentry/integrations/slack/test_unfurl.py b/tests/sentry/integrations/slack/test_unfurl.py
index 422a0d6b1f977a..49f2f7090b0be6 100644
--- a/tests/sentry/integrations/slack/test_unfurl.py
+++ b/tests/sentry/integrations/slack/test_unfurl.py
@@ -14,7 +14,7 @@
from sentry.integrations.slack.message_builder.issues import SlackIssuesMessageBuilder
from sentry.integrations.slack.message_builder.metric_alerts import SlackMetricAlertMessageBuilder
from sentry.integrations.slack.unfurl import LinkType, UnfurlableUrl, link_handlers, match_link
-from sentry.services.hybrid_cloud.integration import integration_service
+from sentry.services.hybrid_cloud.integration.serial import serialize_integration
from sentry.snuba.dataset import Dataset
from sentry.testutils import TestCase
from sentry.testutils.helpers import install_slack
@@ -175,7 +175,7 @@ def setUp(self):
# Sharing project ids across tests could result in some race conditions
self.project = self.create_project()
self._integration = install_slack(self.organization)
- self.integration = integration_service._serialize_integration(self._integration)
+ self.integration = serialize_integration(self._integration)
self.request = RequestFactory().get("slack/event")
self.frozen_time = freezegun.freeze_time(datetime.now() - timedelta(days=1))
diff --git a/tests/sentry/integrations/test_notification_utilities.py b/tests/sentry/integrations/test_notification_utilities.py
index 0e23f4515a8843..19323fd48042c9 100644
--- a/tests/sentry/integrations/test_notification_utilities.py
+++ b/tests/sentry/integrations/test_notification_utilities.py
@@ -5,7 +5,8 @@
from sentry.integrations.notifications import get_integrations_by_channel_by_recipient
from sentry.models import Integration, User
from sentry.services.hybrid_cloud.actor import RpcActor
-from sentry.services.hybrid_cloud.integration import RpcIntegration, integration_service
+from sentry.services.hybrid_cloud.integration import RpcIntegration
+from sentry.services.hybrid_cloud.integration.serial import serialize_integration
from sentry.testutils.cases import TestCase
from sentry.testutils.helpers.notifications import DummyNotification
from sentry.types.integrations import ExternalProviders
@@ -18,7 +19,7 @@ def setUp(self):
self.external_user_id_1 = "UXXXXXXX1"
self.integration = self.create_slack_integration(self.notification.organization)
- self.api_integration = integration_service._serialize_integration(self.integration)
+ self.api_integration = serialize_integration(self.integration)
self.user_2 = self.create_user()
self.external_team_id_2 = "TXXXXXXX2"
@@ -28,7 +29,7 @@ def setUp(self):
user=self.user_2,
identity_external_id=self.external_team_id_2,
)
- self.api_integration2 = integration_service._serialize_integration(self.integration2)
+ self.api_integration2 = serialize_integration(self.integration2)
def _assert_integrations_are(
self,
diff --git a/tests/sentry/manager/test_external_issue_manager.py b/tests/sentry/manager/test_external_issue_manager.py
index bce2b3043a055e..20f44f54cb445b 100644
--- a/tests/sentry/manager/test_external_issue_manager.py
+++ b/tests/sentry/manager/test_external_issue_manager.py
@@ -1,5 +1,5 @@
from sentry.models.integrations.external_issue import ExternalIssue
-from sentry.services.hybrid_cloud.integration import integration_service
+from sentry.services.hybrid_cloud.integration.serial import serialize_integration
from sentry.testutils import TestCase
from sentry.testutils.silo import region_silo_test
@@ -11,15 +11,15 @@ def setUp(self):
self.integration1 = self.create_integration(
organization=self.organization, external_id="example:1", provider="example"
)
- self.api_integration1 = integration_service._serialize_integration(self.integration1)
+ self.api_integration1 = serialize_integration(self.integration1)
self.integration2 = self.create_integration(
organization=self.organization, external_id="example:2", provider="example"
)
- self.api_integration2 = integration_service._serialize_integration(self.integration2)
+ self.api_integration2 = serialize_integration(self.integration2)
self.integration3 = self.create_integration(
organization=self.create_organization(), external_id="example:3", provider="example"
)
- self.api_integration3 = integration_service._serialize_integration(self.integration3)
+ self.api_integration3 = serialize_integration(self.integration3)
self.event1 = self.store_event(
data={"event_id": "a" * 32, "message": "ooop"},
diff --git a/tests/sentry/manager/test_group_manager.py b/tests/sentry/manager/test_group_manager.py
index db75db2505346c..2c51eae17470f4 100644
--- a/tests/sentry/manager/test_group_manager.py
+++ b/tests/sentry/manager/test_group_manager.py
@@ -1,5 +1,5 @@
from sentry.models import Group, Integration
-from sentry.services.hybrid_cloud.integration import integration_service
+from sentry.services.hybrid_cloud.integration.serial import serialize_integration
from sentry.testutils import TestCase
@@ -20,7 +20,7 @@ def test_get_groups_by_external_issue(self):
metadata={"base_url": "https://example.com"},
)
integration_model.add_organization(group.organization, self.user)
- integration = integration_service._serialize_integration(integration=integration_model)
+ integration = serialize_integration(integration=integration_model)
self.create_integration_external_issue(
group=group, integration=integration, key=external_issue_key
)
diff --git a/tests/sentry/mediators/external_issues/test_issue_link_creator.py b/tests/sentry/mediators/external_issues/test_issue_link_creator.py
index 2527411608c847..d324793a198674 100644
--- a/tests/sentry/mediators/external_issues/test_issue_link_creator.py
+++ b/tests/sentry/mediators/external_issues/test_issue_link_creator.py
@@ -5,7 +5,7 @@
from sentry.mediators.external_issues import IssueLinkCreator
from sentry.models import PlatformExternalIssue
from sentry.services.hybrid_cloud.app import app_service
-from sentry.services.hybrid_cloud.user.impl import serialize_rpc_user
+from sentry.services.hybrid_cloud.user.serial import serialize_rpc_user
from sentry.testutils import TestCase
from sentry.testutils.silo import region_silo_test
diff --git a/tests/sentry/mediators/external_requests/test_issue_link_requester.py b/tests/sentry/mediators/external_requests/test_issue_link_requester.py
index afcf2ebdf19b5c..e0c3821ed37113 100644
--- a/tests/sentry/mediators/external_requests/test_issue_link_requester.py
+++ b/tests/sentry/mediators/external_requests/test_issue_link_requester.py
@@ -4,7 +4,7 @@
from sentry.coreapi import APIError
from sentry.mediators.external_requests import IssueLinkRequester
from sentry.services.hybrid_cloud.app import app_service
-from sentry.services.hybrid_cloud.user.impl import serialize_rpc_user
+from sentry.services.hybrid_cloud.user.serial import serialize_rpc_user
from sentry.testutils import TestCase
from sentry.testutils.silo import region_silo_test
from sentry.utils import json
diff --git a/tests/sentry/middleware/test_auth.py b/tests/sentry/middleware/test_auth.py
index ce9dd31c70c568..0de06f98b59107 100644
--- a/tests/sentry/middleware/test_auth.py
+++ b/tests/sentry/middleware/test_auth.py
@@ -7,7 +7,7 @@
from sentry.middleware.auth import AuthenticationMiddleware
from sentry.models import ApiKey, ApiToken, UserIP
from sentry.services.hybrid_cloud.auth import AuthenticatedToken
-from sentry.services.hybrid_cloud.user.impl import serialize_rpc_user
+from sentry.services.hybrid_cloud.user.serial import serialize_rpc_user
from sentry.silo import SiloMode
from sentry.testutils import TestCase
from sentry.testutils.outbox import outbox_runner
diff --git a/tests/sentry/notifications/utils/test_transforms.py b/tests/sentry/notifications/utils/test_transforms.py
index 8eeb1efb8257fd..5becf1fc83c1f4 100644
--- a/tests/sentry/notifications/utils/test_transforms.py
+++ b/tests/sentry/notifications/utils/test_transforms.py
@@ -10,7 +10,7 @@
NotificationSettingTypes,
)
from sentry.services.hybrid_cloud.actor import RpcActor
-from sentry.services.hybrid_cloud.notifications import NotificationsService
+from sentry.services.hybrid_cloud.notifications.serial import serialize_notification_setting
from sentry.testutils import TestCase
from sentry.testutils.silo import control_silo_test
from sentry.types.integrations import ExternalProviders
@@ -42,8 +42,7 @@ def setUp(self) -> None:
self.user_actor = RpcActor.from_orm_user(self.user)
self.rpc_notification_settings = [
- NotificationsService.serialize_notification_setting(setting)
- for setting in self.notification_settings
+ serialize_notification_setting(setting) for setting in self.notification_settings
]
diff --git a/tests/sentry/sentry_apps/test_sentry_app_component_preparer.py b/tests/sentry/sentry_apps/test_sentry_app_component_preparer.py
index f85277259ee36f..2235b893f24753 100644
--- a/tests/sentry/sentry_apps/test_sentry_app_component_preparer.py
+++ b/tests/sentry/sentry_apps/test_sentry_app_component_preparer.py
@@ -2,7 +2,7 @@
from sentry.models import Organization
from sentry.sentry_apps.components import SentryAppComponentPreparer
-from sentry.services.hybrid_cloud.app import app_service
+from sentry.services.hybrid_cloud.app.serial import serialize_sentry_app_installation
from sentry.testutils import TestCase
from sentry.testutils.silo import control_silo_test, exempt_from_silo_limits
from sentry.utils import json
@@ -58,9 +58,7 @@ def test_prepares_components_requiring_requests(self, run):
self.preparer.run()
- install = app_service.serialize_sentry_app_installation(
- self.install, self.install.sentry_app
- )
+ install = serialize_sentry_app_installation(self.install, self.install.sentry_app)
assert (
call(
install=install,
@@ -194,9 +192,7 @@ def test_prepares_components_requiring_requests(self, run):
self.preparer.run()
- install = app_service.serialize_sentry_app_installation(
- self.install, self.install.sentry_app
- )
+ install = serialize_sentry_app_installation(self.install, self.install.sentry_app)
assert (
call(
|
e85c19d47d06c5a34cdfd8ba5d8e43be53b507fd
|
2019-06-03 22:48:10
|
Jan Michael Auer
|
ref(native): Remove the legacy stacktrace processor (#13500)
| false
|
Remove the legacy stacktrace processor (#13500)
|
ref
|
diff --git a/src/sentry/lang/native/minidump.py b/src/sentry/lang/native/minidump.py
index eebfe030861ac1..e18af41bd54833 100644
--- a/src/sentry/lang/native/minidump.py
+++ b/src/sentry/lang/native/minidump.py
@@ -5,14 +5,9 @@
import dateutil.parser as dp
from msgpack import unpack, Unpacker, UnpackException, ExtraData
-from sentry.event_manager import validate_and_set_timestamp
-from sentry.lang.native.utils import get_sdk_from_event, merge_symbolicated_frame
-from sentry.lang.native.symbolicator import merge_symbolicator_image
-from sentry.lang.native.error import SymbolicationFailed, write_error
-from sentry.models.eventerror import EventError
from sentry.attachments import attachment_cache
from sentry.coreapi import cache_key_for_event
-from sentry.utils.safe import get_path, set_path, setdefault_path
+from sentry.utils.safe import setdefault_path
minidumps_logger = logging.getLogger('sentry.minidumps')
@@ -23,11 +18,6 @@
MAX_MSGPACK_EVENT_SIZE_BYTES = 100000
-def is_minidump_event(data):
- exceptions = get_path(data, 'exception', 'values', filter=True)
- return get_path(exceptions, 0, 'mechanism', 'type') in ('minidump', 'unreal')
-
-
def write_minidump_placeholder(data):
# Minidump events must be native platform.
data['platform'] = 'native'
@@ -113,80 +103,3 @@ def get_attached_minidump(data):
cache_key = cache_key_for_event(data)
attachments = attachment_cache.get(cache_key) or []
return next((a for a in attachments if a.type == MINIDUMP_ATTACHMENT_TYPE), None)
-
-
-def merge_symbolicator_minidump_system_info(data, system_info):
- set_path(data, 'contexts', 'os', 'type', value='os') # Required by "get_sdk_from_event"
- setdefault_path(data, 'contexts', 'os', 'name', value=system_info.get('os_name'))
- setdefault_path(data, 'contexts', 'os', 'version', value=system_info.get('os_version'))
- setdefault_path(data, 'contexts', 'os', 'build', value=system_info.get('os_build'))
-
- set_path(data, 'contexts', 'device', 'type', value='device')
- setdefault_path(data, 'contexts', 'device', 'arch', value=system_info.get('cpu_arch'))
-
-
-def merge_symbolicator_minidump_response(data, response):
- data['platform'] = 'native'
- if response.get('crashed') is not None:
- data['level'] = 'fatal' if response['crashed'] else 'info'
-
- validate_and_set_timestamp(data, response.get('timestamp'))
-
- if response.get('system_info'):
- merge_symbolicator_minidump_system_info(data, response['system_info'])
-
- sdk_info = get_sdk_from_event(data)
-
- images = []
- set_path(data, 'debug_meta', 'images', value=images)
-
- for complete_image in response['modules']:
- image = {}
- merge_symbolicator_image(
- image, complete_image, sdk_info,
- lambda e: write_error(e, data)
- )
- images.append(image)
-
- # Extract the crash reason and infos
- data_exception = get_path(data, 'exception', 'values', 0)
- exc_value = (
- 'Assertion Error: %s' % response.get('assertion')
- if response.get('assertion')
- else 'Fatal Error: %s' % response.get('crash_reason')
- )
- data_exception['value'] = exc_value
- data_exception['type'] = response.get('crash_reason')
-
- data_threads = []
- if response['stacktraces']:
- data['threads'] = {'values': data_threads}
- else:
- error = SymbolicationFailed(message='minidump has no thread list',
- type=EventError.NATIVE_SYMBOLICATOR_FAILED)
- write_error(error, data)
-
- for complete_stacktrace in response['stacktraces']:
- is_requesting = complete_stacktrace.get('is_requesting')
- thread_id = complete_stacktrace.get('thread_id')
-
- data_thread = {
- 'id': thread_id,
- 'crashed': is_requesting,
- }
- data_threads.append(data_thread)
-
- if is_requesting:
- data_exception['thread_id'] = thread_id
- data_stacktrace = data_exception.setdefault('stacktrace', {})
- data_stacktrace['frames'] = []
- else:
- data_thread['stacktrace'] = data_stacktrace = {'frames': []}
-
- if complete_stacktrace.get('registers'):
- data_stacktrace['registers'] = complete_stacktrace['registers']
-
- for complete_frame in reversed(complete_stacktrace['frames']):
- new_frame = {}
- merge_symbolicated_frame(new_frame, complete_frame)
- data_stacktrace['frames'].append(new_frame)
diff --git a/src/sentry/lang/native/plugin.py b/src/sentry/lang/native/plugin.py
index 6f36dade0b1d24..7bc35a0e81d0db 100644
--- a/src/sentry/lang/native/plugin.py
+++ b/src/sentry/lang/native/plugin.py
@@ -1,378 +1,15 @@
from __future__ import absolute_import
-import uuid
-import logging
-
-from symbolic import LineInfo, parse_addr, find_best_instruction, arch_get_ip_reg_name, \
- ObjectLookup
-from symbolic.utils import make_buffered_slice_reader
-
-from sentry import options
+from sentry.lang.native.processing import process_minidump, process_payload
+from sentry.lang.native.utils import is_minidump_event, is_native_event
from sentry.plugins import Plugin2
-from sentry.lang.native.error import write_error
-from sentry.lang.native.minidump import get_attached_minidump, is_minidump_event, merge_symbolicator_minidump_response
-from sentry.lang.native.symbolicator import Symbolicator, merge_symbolicator_image, handle_symbolicator_response_status
-from sentry.lang.native.utils import get_sdk_from_event, cpu_name_from_data, \
- merge_symbolicated_frame, native_images_from_data, rebase_addr, signal_from_data, \
- is_native_platform, is_native_event
-from sentry.lang.native.systemsymbols import lookup_system_symbols
-from sentry.models import Project
-from sentry.utils.in_app import is_known_third_party
-from sentry.utils.safe import get_path, trim
-from sentry.stacktraces.processing import StacktraceProcessor, find_stacktraces_in_data
-
-logger = logging.getLogger(__name__)
-
-FRAME_CACHE_VERSION = 6
-
-SYMBOLICATOR_FRAME_ATTRS = ("instruction_addr", "package", "lang", "symbol",
- "function", "symbol_addr", "filename", "lineno",
- "line_addr")
-
-
-def task_id_cache_key_for_event(data):
- return u'symbolicator:{1}:{0}'.format(data['project'], data['event_id'])
-
-
-class NativeStacktraceProcessor(StacktraceProcessor):
- supported_platforms = ('cocoa', 'native')
- # TODO(ja): Clean up all uses of image type "apple", "uuid", "id" and "name"
- supported_images = ('apple', 'symbolic', 'elf', 'macho', 'pe')
-
- def __init__(self, *args, **kwargs):
- StacktraceProcessor.__init__(self, *args, **kwargs)
-
- self.arch = cpu_name_from_data(self.data)
- self.signal = signal_from_data(self.data)
-
- self.sym = None
-
- images = get_path(self.data, 'debug_meta', 'images', default=(),
- filter=self._is_valid_image)
-
- if images:
- self.available = True
- self.sdk_info = get_sdk_from_event(self.data)
- self.object_lookup = ObjectLookup(images)
- self.images = images
- else:
- self.available = False
-
- def _is_valid_image(self, image):
- # TODO(ja): Deprecate this. The symbolicator should take care of
- # filtering valid images.
- return bool(image) \
- and image.get('type') in self.supported_images \
- and image.get('image_addr') is not None \
- and image.get('image_size') is not None \
- and (image.get('debug_id') or image.get('id') or image.get('uuid')) is not None
-
- def close(self):
- StacktraceProcessor.close(self)
-
- def find_best_instruction(self, processable_frame):
- """Given a frame, stacktrace info and frame index this returns the
- interpolated instruction address we then use for symbolication later.
- """
- if self.arch is None:
- return parse_addr(processable_frame['instruction_addr'])
-
- crashing_frame = False
- signal = None
- ip_reg = None
-
- # We only need to provide meta information for frame zero
- if processable_frame.idx == 0:
- # The signal is useful information for symbolic in some situations
- # to disambiugate the first frame. If we can get this information
- # from the mechanism we want to pass it onwards.
- signal = self.signal
-
- registers = processable_frame.stacktrace_info.stacktrace.get('registers')
- if registers:
- ip_reg_name = arch_get_ip_reg_name(self.arch)
- if ip_reg_name:
- ip_reg = registers.get(ip_reg_name)
- crashing_frame = True
-
- return find_best_instruction(
- processable_frame['instruction_addr'],
- arch=self.arch,
- crashing_frame=crashing_frame,
- signal=signal,
- ip_reg=ip_reg
- )
-
- def handles_frame(self, frame, stacktrace_info):
- if not self.available:
- return False
-
- platform = frame.get('platform') or self.data.get('platform')
- if platform not in self.supported_platforms:
- return False
-
- if frame.get('data', {}).get('symbolicator_status') == 'symbolicated':
- return False
-
- if 'instruction_addr' not in frame:
- return False
-
- return True
-
- def preprocess_frame(self, processable_frame):
- instr_addr = self.find_best_instruction(processable_frame)
- obj = self.object_lookup.find_object(instr_addr)
-
- processable_frame.data = {
- 'instruction_addr': instr_addr,
- 'obj': obj,
- 'debug_id': obj.debug_id if obj is not None else None,
- 'symbolserver_match': None,
- }
-
- def preprocess_step(self, processing_task):
- if not self.available:
- return False
-
- if options.get('symbolserver.enabled'):
- self.fetch_ios_system_symbols(processing_task)
-
- def fetch_ios_system_symbols(self, processing_task):
- to_lookup = []
- pf_list = []
- for pf in processing_task.iter_processable_frames(self):
- if pf.cache_value is not None:
- continue
-
- obj = pf.data['obj']
- package = obj and obj.code_file
- # TODO(ja): This should check for iOS specifically. Also
- # check in symbolicator.py for handle_symbolicator_status
- if not package or not is_known_third_party(package):
- continue
-
- # We can only look up objects in the symbol server that have a
- # uuid. If we encounter things with an age appended or
- # similar we need to skip.
- try:
- uuid.UUID(obj.debug_id)
- except (ValueError, TypeError):
- continue
-
- to_lookup.append(
- {
- 'object_uuid': obj.debug_id,
- 'object_name': obj.code_file or '<unknown>',
- 'addr': '0x%x' % rebase_addr(pf.data['instruction_addr'], obj)
- }
- )
- pf_list.append(pf)
-
- if not to_lookup:
- return
-
- rv = lookup_system_symbols(to_lookup, self.sdk_info, self.arch)
- if rv is not None:
- for symrv, pf in zip(rv, pf_list):
- if symrv is None:
- continue
- pf.data['symbolserver_match'] = symrv
-
- def process_frame(self, processable_frame, processing_task):
- frame = processable_frame.frame
- raw_frame = dict(frame)
-
- # Ensure that package is set in the raw frame, mapped from the
- # debug_images array in the payload. Grouping and UI can use this path
- # to infer in_app and exclude frames from grouping.
- if raw_frame.get('package') is None:
- obj = processable_frame.data['obj']
- raw_frame['package'] = obj and obj.code_file or None
-
- symbolicated_frames = convert_ios_symbolserver_match(
- processable_frame.data['instruction_addr'],
- processable_frame.data['symbolserver_match']
- )
-
- if not symbolicated_frames:
- if raw_frame.get('trust') == 'scan':
- return [], [raw_frame], []
- else:
- return None, [raw_frame], []
-
- new_frames = []
- for sfrm in symbolicated_frames:
- new_frame = dict(raw_frame)
- merge_symbolicated_frame(new_frame, sfrm)
- new_frames.append(new_frame)
-
- return new_frames, [raw_frame], []
-
-
-def reprocess_minidump(data):
- project = Project.objects.get_from_cache(id=data['project'])
-
- minidump = get_attached_minidump(data)
-
- if not minidump:
- logger.error("Missing minidump for minidump event")
- return
-
- task_id_cache_key = task_id_cache_key_for_event(data)
-
- symbolicator = Symbolicator(
- project=project,
- task_id_cache_key=task_id_cache_key
- )
-
- response = symbolicator.process_minidump(make_buffered_slice_reader(minidump.data, None))
-
- if handle_symbolicator_response_status(data, response):
- merge_symbolicator_minidump_response(data, response)
-
- return data
-
-
-def _handles_frame(data, frame):
- if not frame:
- return False
-
- if get_path(frame, 'data', 'symbolicator_status') is not None:
- return False
-
- # TODO: Consider ignoring platform
- platform = frame.get('platform') or data.get('platform')
- return is_native_platform(platform) and 'instruction_addr' in frame
-
-
-def process_payload(data):
- project = Project.objects.get_from_cache(id=data['project'])
- task_id_cache_key = task_id_cache_key_for_event(data)
-
- symbolicator = Symbolicator(
- project=project,
- task_id_cache_key=task_id_cache_key
- )
-
- stacktrace_infos = [
- stacktrace
- for stacktrace in find_stacktraces_in_data(data)
- if any(is_native_platform(x) for x in stacktrace.platforms)
- ]
-
- stacktraces = [
- {
- 'registers': sinfo.stacktrace.get('registers') or {},
- 'frames': [
- f for f in reversed(sinfo.stacktrace.get('frames') or ())
- if _handles_frame(data, f)
- ]
- }
- for sinfo in stacktrace_infos
- ]
-
- if not any(stacktrace['frames'] for stacktrace in stacktraces):
- return
-
- modules = native_images_from_data(data)
-
- response = symbolicator.process_payload(stacktraces=stacktraces, modules=modules)
-
- assert len(modules) == len(response['modules']), (modules, response)
-
- sdk_info = get_sdk_from_event(data)
-
- for raw_image, complete_image in zip(modules, response['modules']):
- merge_symbolicator_image(
- raw_image,
- complete_image,
- sdk_info,
- lambda e: write_error(
- e,
- data))
-
- assert len(stacktraces) == len(response['stacktraces']), (stacktraces, response)
-
- for sinfo, complete_stacktrace in zip(stacktrace_infos, response['stacktraces']):
- complete_frames_by_idx = {}
- for complete_frame in complete_stacktrace.get('frames') or ():
- complete_frames_by_idx \
- .setdefault(complete_frame['original_index'], []) \
- .append(complete_frame)
-
- new_frames = []
- native_frames_idx = 0
-
- for raw_frame in reversed(sinfo.stacktrace['frames']):
- if not _handles_frame(data, raw_frame):
- new_frames.append(raw_frame)
- continue
-
- for complete_frame in complete_frames_by_idx.get(native_frames_idx) or ():
- merged_frame = dict(raw_frame)
- merge_symbolicated_frame(merged_frame, complete_frame)
- if merged_frame.get('package'):
- raw_frame['package'] = merged_frame['package']
- new_frames.append(merged_frame)
-
- native_frames_idx += 1
-
- if sinfo.container is not None and native_frames_idx > 0:
- sinfo.container['raw_stacktrace'] = {
- 'frames': list(sinfo.stacktrace['frames']),
- 'registers': sinfo.stacktrace.get('registers')
- }
-
- new_frames.reverse()
- sinfo.stacktrace['frames'] = new_frames
-
- return data
-
-
-def convert_ios_symbolserver_match(instruction_addr, symbolserver_match):
- if not symbolserver_match:
- return []
-
- symbol = symbolserver_match['symbol']
- if symbol[:1] == '_':
- symbol = symbol[1:]
-
- # We still use this construct from symbolic for demangling (at least)
- line_info = LineInfo(
- sym_addr=parse_addr(symbolserver_match['addr']),
- instr_addr=parse_addr(instruction_addr),
- line=None,
- lang=None,
- symbol=symbol
- )
-
- function = line_info.function_name
- package = symbolserver_match['object_name']
-
- return [{
- 'sym_addr': '0x%x' % (line_info.sym_addr,),
- 'instruction_addr': '0x%x' % (line_info.instr_addr,),
- 'function': function,
- 'symbol': symbol if function != symbol else None,
- 'filename': trim(line_info.rel_path, 256),
- 'abs_path': trim(line_info.abs_path, 256),
- 'package': package,
- }]
class NativePlugin(Plugin2):
can_disable = False
def get_event_enhancers(self, data):
- rv = []
-
if is_minidump_event(data):
- rv.append(reprocess_minidump)
- if is_native_event(data):
- rv.append(process_payload)
-
- return rv
-
- def get_stacktrace_processors(self, data, stacktrace_infos, platforms, **kwargs):
- if any(platform in NativeStacktraceProcessor.supported_platforms for platform in platforms):
- return [NativeStacktraceProcessor]
+ return [process_minidump]
+ elif is_native_event(data):
+ return [process_payload]
diff --git a/src/sentry/lang/native/processing.py b/src/sentry/lang/native/processing.py
new file mode 100644
index 00000000000000..2e83bcadf11d7d
--- /dev/null
+++ b/src/sentry/lang/native/processing.py
@@ -0,0 +1,336 @@
+from __future__ import absolute_import
+
+import logging
+import posixpath
+import six
+
+from symbolic.utils import make_buffered_slice_reader
+
+from sentry.event_manager import validate_and_set_timestamp
+from sentry.lang.native.error import write_error, SymbolicationFailed
+from sentry.lang.native.minidump import get_attached_minidump
+from sentry.lang.native.symbolicator import Symbolicator
+from sentry.lang.native.utils import get_sdk_from_event, native_images_from_data, \
+ is_native_platform, image_name, signal_from_data
+from sentry.models import Project, EventError
+from sentry.utils.in_app import is_known_third_party, is_optional_package
+from sentry.utils.safe import get_path, set_path, setdefault_path, trim
+from sentry.stacktraces.functions import trim_function_name
+from sentry.stacktraces.processing import find_stacktraces_in_data
+
+logger = logging.getLogger(__name__)
+
+
+IMAGE_STATUS_FIELDS = frozenset((
+ 'unwind_status',
+ 'debug_status',
+))
+
+
+def task_id_cache_key_for_event(data):
+ return u'symbolicator:{1}:{0}'.format(data['project'], data['event_id'])
+
+
+def _merge_frame(new_frame, symbolicated):
+ if symbolicated.get('function'):
+ raw_func = trim(symbolicated['function'], 256)
+ func = trim(trim_function_name(symbolicated['function'], 'native'), 256)
+
+ # if function and raw function match, we can get away without
+ # storing a raw function
+ if func == raw_func:
+ new_frame['function'] = raw_func
+ # otherwise we store both
+ else:
+ new_frame['raw_function'] = raw_func
+ new_frame['function'] = func
+ if symbolicated.get('instruction_addr'):
+ new_frame['instruction_addr'] = symbolicated['instruction_addr']
+ if symbolicated.get('symbol'):
+ new_frame['symbol'] = symbolicated['symbol']
+ if symbolicated.get('abs_path'):
+ new_frame['abs_path'] = symbolicated['abs_path']
+ new_frame['filename'] = posixpath.basename(symbolicated['abs_path'])
+ if symbolicated.get('filename'):
+ new_frame['filename'] = symbolicated['filename']
+ if symbolicated.get('lineno'):
+ new_frame['lineno'] = symbolicated['lineno']
+ if symbolicated.get('colno'):
+ new_frame['colno'] = symbolicated['colno']
+ if symbolicated.get('package'):
+ new_frame['package'] = symbolicated['package']
+ if symbolicated.get('trust'):
+ new_frame['trust'] = symbolicated['trust']
+ if symbolicated.get('status'):
+ frame_meta = new_frame.setdefault('data', {})
+ frame_meta['symbolicator_status'] = symbolicated['status']
+
+
+def _handle_image_status(status, image, sdk_info, handle_symbolication_failed):
+ if status in ('found', 'unused'):
+ return
+ elif status == 'missing':
+ package = image.get('code_file')
+ # TODO(mitsuhiko): This check seems wrong? This call seems to
+ # mirror the one in the ios symbol server support. If we change
+ # one we need to change the other.
+ if not package or is_known_third_party(package, sdk_info=sdk_info):
+ return
+
+ if is_optional_package(package, sdk_info=sdk_info):
+ error = SymbolicationFailed(
+ type=EventError.NATIVE_MISSING_OPTIONALLY_BUNDLED_DSYM)
+ else:
+ error = SymbolicationFailed(type=EventError.NATIVE_MISSING_DSYM)
+ elif status == 'malformed':
+ error = SymbolicationFailed(type=EventError.NATIVE_BAD_DSYM)
+ elif status == 'too_large':
+ error = SymbolicationFailed(type=EventError.FETCH_TOO_LARGE)
+ elif status == 'fetching_failed':
+ error = SymbolicationFailed(type=EventError.FETCH_GENERIC_ERROR)
+ elif status == 'other':
+ error = SymbolicationFailed(type=EventError.UNKNOWN_ERROR)
+ else:
+ logger.error("Unknown status: %s", status)
+ return
+
+ error.image_arch = image.get('arch')
+ error.image_path = image.get('code_file')
+ error.image_name = image_name(image.get('code_file'))
+ error.image_uuid = image.get('debug_id')
+ handle_symbolication_failed(error)
+
+
+def _merge_image(raw_image, complete_image, sdk_info, handle_symbolication_failed):
+ statuses = set()
+
+ # Set image data from symbolicator as symbolicator might know more
+ # than the SDK, especially for minidumps
+ for k, v in six.iteritems(complete_image):
+ if k in IMAGE_STATUS_FIELDS:
+ statuses.add(v)
+ elif not (v is None or (k, v) == ('arch', 'unknown')):
+ raw_image[k] = v
+
+ for status in set(statuses):
+ _handle_image_status(status, raw_image, sdk_info, handle_symbolication_failed)
+
+
+def _handle_response_status(event_data, response_json):
+ if not response_json:
+ error = SymbolicationFailed(type=EventError.NATIVE_INTERNAL_FAILURE)
+ elif response_json['status'] == 'completed':
+ return True
+ elif response_json['status'] == 'failed':
+ error = SymbolicationFailed(message=response_json.get('message') or None,
+ type=EventError.NATIVE_SYMBOLICATOR_FAILED)
+ else:
+ logger.error('Unexpected symbolicator status: %s', response_json['status'])
+ error = SymbolicationFailed(type=EventError.NATIVE_INTERNAL_FAILURE)
+
+ write_error(error, event_data)
+
+
+def _merge_system_info(data, system_info):
+ set_path(data, 'contexts', 'os', 'type', value='os') # Required by "get_sdk_from_event"
+ setdefault_path(data, 'contexts', 'os', 'name', value=system_info.get('os_name'))
+ setdefault_path(data, 'contexts', 'os', 'version', value=system_info.get('os_version'))
+ setdefault_path(data, 'contexts', 'os', 'build', value=system_info.get('os_build'))
+
+ set_path(data, 'contexts', 'device', 'type', value='device')
+ setdefault_path(data, 'contexts', 'device', 'arch', value=system_info.get('cpu_arch'))
+
+
+def _merge_minidump_response(data, response):
+ data['platform'] = 'native'
+ if response.get('crashed') is not None:
+ data['level'] = 'fatal' if response['crashed'] else 'info'
+
+ validate_and_set_timestamp(data, response.get('timestamp'))
+
+ if response.get('system_info'):
+ _merge_system_info(data, response['system_info'])
+
+ sdk_info = get_sdk_from_event(data)
+
+ images = []
+ set_path(data, 'debug_meta', 'images', value=images)
+
+ for complete_image in response['modules']:
+ image = {}
+ _merge_image(
+ image, complete_image, sdk_info,
+ lambda e: write_error(e, data)
+ )
+ images.append(image)
+
+ # Extract the crash reason and infos
+ data_exception = get_path(data, 'exception', 'values', 0)
+ exc_value = (
+ 'Assertion Error: %s' % response.get('assertion')
+ if response.get('assertion')
+ else 'Fatal Error: %s' % response.get('crash_reason')
+ )
+ data_exception['value'] = exc_value
+ data_exception['type'] = response.get('crash_reason')
+
+ data_threads = []
+ if response['stacktraces']:
+ data['threads'] = {'values': data_threads}
+ else:
+ error = SymbolicationFailed(message='minidump has no thread list',
+ type=EventError.NATIVE_SYMBOLICATOR_FAILED)
+ write_error(error, data)
+
+ for complete_stacktrace in response['stacktraces']:
+ is_requesting = complete_stacktrace.get('is_requesting')
+ thread_id = complete_stacktrace.get('thread_id')
+
+ data_thread = {
+ 'id': thread_id,
+ 'crashed': is_requesting,
+ }
+ data_threads.append(data_thread)
+
+ if is_requesting:
+ data_exception['thread_id'] = thread_id
+ data_stacktrace = data_exception.setdefault('stacktrace', {})
+ data_stacktrace['frames'] = []
+ else:
+ data_thread['stacktrace'] = data_stacktrace = {'frames': []}
+
+ if complete_stacktrace.get('registers'):
+ data_stacktrace['registers'] = complete_stacktrace['registers']
+
+ for complete_frame in reversed(complete_stacktrace['frames']):
+ new_frame = {}
+ _merge_frame(new_frame, complete_frame)
+ data_stacktrace['frames'].append(new_frame)
+
+
+def process_minidump(data):
+ project = Project.objects.get_from_cache(id=data['project'])
+
+ minidump = get_attached_minidump(data)
+
+ if not minidump:
+ logger.error("Missing minidump for minidump event")
+ return
+
+ task_id_cache_key = task_id_cache_key_for_event(data)
+
+ symbolicator = Symbolicator(
+ project=project,
+ task_id_cache_key=task_id_cache_key
+ )
+
+ response = symbolicator.process_minidump(make_buffered_slice_reader(minidump.data, None))
+
+ if _handle_response_status(data, response):
+ _merge_minidump_response(data, response)
+
+ return data
+
+
+def _handles_frame(data, frame):
+ if not frame:
+ return False
+
+ if get_path(frame, 'data', 'symbolicator_status') is not None:
+ return False
+
+ # TODO: Consider ignoring platform
+ platform = frame.get('platform') or data.get('platform')
+ return is_native_platform(platform) and 'instruction_addr' in frame
+
+
+def process_payload(data):
+ project = Project.objects.get_from_cache(id=data['project'])
+ task_id_cache_key = task_id_cache_key_for_event(data)
+
+ symbolicator = Symbolicator(
+ project=project,
+ task_id_cache_key=task_id_cache_key
+ )
+
+ stacktrace_infos = [
+ stacktrace
+ for stacktrace in find_stacktraces_in_data(data)
+ if any(is_native_platform(x) for x in stacktrace.platforms)
+ ]
+
+ stacktraces = [
+ {
+ 'registers': sinfo.stacktrace.get('registers') or {},
+ 'frames': [
+ f for f in reversed(sinfo.stacktrace.get('frames') or ())
+ if _handles_frame(data, f)
+ ]
+ }
+ for sinfo in stacktrace_infos
+ ]
+
+ if not any(stacktrace['frames'] for stacktrace in stacktraces):
+ return
+
+ modules = native_images_from_data(data)
+ signal = signal_from_data(data)
+
+ response = symbolicator.process_payload(
+ stacktraces=stacktraces,
+ modules=modules,
+ signal=signal,
+ )
+
+ if not _handle_response_status(data, response):
+ return data
+
+ assert len(modules) == len(response['modules']), (modules, response)
+
+ sdk_info = get_sdk_from_event(data)
+
+ for raw_image, complete_image in zip(modules, response['modules']):
+ _merge_image(
+ raw_image,
+ complete_image,
+ sdk_info,
+ lambda e: write_error(
+ e,
+ data))
+
+ assert len(stacktraces) == len(response['stacktraces']), (stacktraces, response)
+
+ for sinfo, complete_stacktrace in zip(stacktrace_infos, response['stacktraces']):
+ complete_frames_by_idx = {}
+ for complete_frame in complete_stacktrace.get('frames') or ():
+ complete_frames_by_idx \
+ .setdefault(complete_frame['original_index'], []) \
+ .append(complete_frame)
+
+ new_frames = []
+ native_frames_idx = 0
+
+ for raw_frame in reversed(sinfo.stacktrace['frames']):
+ if not _handles_frame(data, raw_frame):
+ new_frames.append(raw_frame)
+ continue
+
+ for complete_frame in complete_frames_by_idx.get(native_frames_idx) or ():
+ merged_frame = dict(raw_frame)
+ _merge_frame(merged_frame, complete_frame)
+ if merged_frame.get('package'):
+ raw_frame['package'] = merged_frame['package']
+ new_frames.append(merged_frame)
+
+ native_frames_idx += 1
+
+ if sinfo.container is not None and native_frames_idx > 0:
+ sinfo.container['raw_stacktrace'] = {
+ 'frames': list(sinfo.stacktrace['frames']),
+ 'registers': sinfo.stacktrace.get('registers')
+ }
+
+ new_frames.reverse()
+ sinfo.stacktrace['frames'] = new_frames
+
+ return data
diff --git a/src/sentry/lang/native/symbolicator.py b/src/sentry/lang/native/symbolicator.py
index 853d8c49bd4a98..51b390debab2b7 100644
--- a/src/sentry/lang/native/symbolicator.py
+++ b/src/sentry/lang/native/symbolicator.py
@@ -15,11 +15,7 @@
from sentry import options
from sentry.auth.system import get_system_token
from sentry.cache import default_cache
-from sentry.lang.native.error import SymbolicationFailed, write_error
-from sentry.lang.native.utils import image_name
-from sentry.models.eventerror import EventError
from sentry.utils import json, metrics
-from sentry.utils.in_app import is_known_third_party, is_optional_package
from sentry.net.http import Session
from sentry.tasks.store import RetrySymbolication
@@ -112,13 +108,6 @@
}
-IMAGE_STATUS_FIELDS = frozenset((
- 'status', # TODO(markus): Legacy key. Remove after next deploy
- 'unwind_status',
- 'debug_status'
-))
-
-
class Symbolicator(object):
def __init__(self, project, task_id_cache_key):
symbolicator_options = options.get('symbolicator.options')
@@ -292,77 +281,6 @@ def get_sources_for_project(project):
return sources
-def handle_symbolicator_response_status(event_data, response_json):
- if not response_json:
- error = SymbolicationFailed(type=EventError.NATIVE_INTERNAL_FAILURE)
- elif response_json['status'] == 'completed':
- return True
- elif response_json['status'] == 'failed':
- error = SymbolicationFailed(message=response_json.get('message') or None,
- type=EventError.NATIVE_SYMBOLICATOR_FAILED)
- else:
- logger.error('Unexpected symbolicator status: %s', response_json['status'])
- error = SymbolicationFailed(type=EventError.NATIVE_INTERNAL_FAILURE)
-
- write_error(error, event_data)
-
-
-def merge_symbolicator_image(raw_image, complete_image, sdk_info, handle_symbolication_failed):
- statuses = set()
-
- # Set image data from symbolicator as symbolicator might know more
- # than the SDK, especially for minidumps
- for k, v in six.iteritems(complete_image):
- if k in IMAGE_STATUS_FIELDS:
- statuses.add(v)
- elif not (v is None or (k, v) == ('arch', 'unknown')):
- raw_image[k] = v
-
- for status in set(statuses):
- handle_symbolicator_status(status, raw_image, sdk_info, handle_symbolication_failed)
-
-
-def handle_symbolicator_status(status, image, sdk_info, handle_symbolication_failed):
- if status in ('found', 'unused'):
- return
- elif status in (
- 'missing_debug_file', # TODO(markus): Legacy key. Remove after next deploy
- 'missing'
- ):
- package = image.get('code_file')
- # TODO(mitsuhiko): This check seems wrong? This call seems to
- # mirror the one in the ios symbol server support. If we change
- # one we need to change the other.
- if not package or is_known_third_party(package, sdk_info=sdk_info):
- return
-
- if is_optional_package(package, sdk_info=sdk_info):
- error = SymbolicationFailed(
- type=EventError.NATIVE_MISSING_OPTIONALLY_BUNDLED_DSYM)
- else:
- error = SymbolicationFailed(type=EventError.NATIVE_MISSING_DSYM)
- elif status in (
- 'malformed_debug_file', # TODO(markus): Legacy key. Remove after next deploy
- 'malformed'
- ):
- error = SymbolicationFailed(type=EventError.NATIVE_BAD_DSYM)
- elif status == 'too_large':
- error = SymbolicationFailed(type=EventError.FETCH_TOO_LARGE)
- elif status == 'fetching_failed':
- error = SymbolicationFailed(type=EventError.FETCH_GENERIC_ERROR)
- elif status == 'other':
- error = SymbolicationFailed(type=EventError.UNKNOWN_ERROR)
- else:
- logger.error("Unknown status: %s", status)
- return
-
- error.image_arch = image.get('arch')
- error.image_path = image.get('code_file')
- error.image_name = image_name(image.get('code_file'))
- error.image_uuid = image.get('debug_id')
- handle_symbolication_failed(error)
-
-
class SymbolicatorSession(object):
def __init__(self, url=None, sources=None, project_id=None, timeout=None):
self.url = url
diff --git a/src/sentry/lang/native/systemsymbols.py b/src/sentry/lang/native/systemsymbols.py
deleted file mode 100644
index 02c41d19014e10..00000000000000
--- a/src/sentry/lang/native/systemsymbols.py
+++ /dev/null
@@ -1,53 +0,0 @@
-from __future__ import absolute_import
-
-import time
-import logging
-
-from requests.exceptions import RequestException
-
-from sentry import options
-from sentry.net.http import Session
-from sentry.lang.native.utils import sdk_info_to_sdk_id
-
-MAX_ATTEMPTS = 3
-
-logger = logging.getLogger(__name__)
-
-
-def lookup_system_symbols(symbols, sdk_info=None, cpu_name=None):
- """Looks for system symbols in the configured system server if
- enabled. If this failes or the server is disabled, `None` is
- returned.
- """
- if not options.get('symbolserver.enabled'):
- return
-
- url = '%s/lookup' % options.get('symbolserver.options')['url'].rstrip('/')
- sess = Session()
- symbol_query = {
- 'sdk_id': sdk_info_to_sdk_id(sdk_info),
- 'cpu_name': cpu_name,
- 'symbols': symbols,
- }
-
- attempts = 0
- wait = 0.5
-
- with sess:
- while 1:
- try:
- rv = sess.post(url, json=symbol_query)
- # If the symbols server does not know about the SDK at all
- # it will report a 404 here. In that case just assume
- # that we did not find a match and do not retry.
- if rv.status_code == 404:
- return None
- rv.raise_for_status()
- return rv.json()['symbols']
- except (IOError, RequestException):
- attempts += 1
- if attempts > MAX_ATTEMPTS:
- logger.error('Failed to contact system symbol server', exc_info=True)
- return
- time.sleep(wait)
- wait *= 2.0
diff --git a/src/sentry/lang/native/unreal.py b/src/sentry/lang/native/unreal.py
index b5e657b77eb40f..4d3a6cc030b177 100644
--- a/src/sentry/lang/native/unreal.py
+++ b/src/sentry/lang/native/unreal.py
@@ -2,13 +2,7 @@
from symbolic import Unreal4Crash
from sentry.lang.native.minidump import MINIDUMP_ATTACHMENT_TYPE
from sentry.models import UserReport
-from sentry.utils.safe import set_path, setdefault_path, get_path
-
-
-def is_unreal_event(data):
- """Whether this event is an Unreal crash that should be processed in
- enhancers. For the legacy codepath this still returns False."""
- return get_path(data, 'contexts', 'unreal', 'type') == 'unreal'
+from sentry.utils.safe import set_path, setdefault_path
def process_unreal_crash(payload, user_id, environment, event):
diff --git a/src/sentry/lang/native/utils.py b/src/sentry/lang/native/utils.py
index 6389922cc1e290..e94f2783166fdb 100644
--- a/src/sentry/lang/native/utils.py
+++ b/src/sentry/lang/native/utils.py
@@ -3,15 +3,9 @@
import re
import six
import logging
-import posixpath
-from collections import namedtuple
-from symbolic import parse_addr
-
-from sentry.interfaces.contexts import DeviceContextType
-from sentry.stacktraces.functions import trim_function_name
from sentry.stacktraces.processing import find_stacktraces_in_data
-from sentry.utils.safe import get_path, trim
+from sentry.utils.safe import get_path
logger = logging.getLogger(__name__)
@@ -33,8 +27,6 @@
'pe' # Windows
)
-AppInfo = namedtuple('AppInfo', ['id', 'version', 'build', 'name'])
-
def is_native_platform(platform):
return platform in NATIVE_PLATFORMS
@@ -64,6 +56,11 @@ def is_native_event(data):
return False
+def is_minidump_event(data):
+ exceptions = get_path(data, 'exception', 'values', filter=True)
+ return get_path(exceptions, 0, 'mechanism', 'type') in ('minidump', 'unreal')
+
+
def image_name(pkg):
if not pkg:
return pkg
@@ -100,32 +97,6 @@ def get_sdk_from_os(data):
}
-def cpu_name_from_data(data):
- """Returns the CPU name from the given data if it exists."""
- device = DeviceContextType.primary_value_for_data(data)
- if device and device.get('arch'):
- return device['arch']
-
- return None
-
-
-def rebase_addr(instr_addr, obj):
- return parse_addr(instr_addr) - parse_addr(obj.addr)
-
-
-def sdk_info_to_sdk_id(sdk_info):
- if sdk_info is None:
- return None
- rv = '%s_%d.%d.%d' % (
- sdk_info['sdk_name'], sdk_info['version_major'], sdk_info['version_minor'],
- sdk_info['version_patchlevel'],
- )
- build = sdk_info.get('build')
- if build is not None:
- rv = '%s_%s' % (rv, build)
- return rv
-
-
def signal_from_data(data):
exceptions = get_path(data, 'exception', 'values', filter=True)
signal = get_path(exceptions, 0, 'mechanism', 'meta', 'signal', 'number')
@@ -133,38 +104,3 @@ def signal_from_data(data):
return int(signal)
return None
-
-
-def merge_symbolicated_frame(new_frame, sfrm):
- if sfrm.get('function'):
- raw_func = trim(sfrm['function'], 256)
- func = trim(trim_function_name(sfrm['function'], 'native'), 256)
-
- # if function and raw function match, we can get away without
- # storing a raw function
- if func == raw_func:
- new_frame['function'] = raw_func
- # otherwise we store both
- else:
- new_frame['raw_function'] = raw_func
- new_frame['function'] = func
- if sfrm.get('instruction_addr'):
- new_frame['instruction_addr'] = sfrm['instruction_addr']
- if sfrm.get('symbol'):
- new_frame['symbol'] = sfrm['symbol']
- if sfrm.get('abs_path'):
- new_frame['abs_path'] = sfrm['abs_path']
- new_frame['filename'] = posixpath.basename(sfrm['abs_path'])
- if sfrm.get('filename'):
- new_frame['filename'] = sfrm['filename']
- if sfrm.get('lineno'):
- new_frame['lineno'] = sfrm['lineno']
- if sfrm.get('colno'):
- new_frame['colno'] = sfrm['colno']
- if sfrm.get('package'):
- new_frame['package'] = sfrm['package']
- if sfrm.get('trust'):
- new_frame['trust'] = sfrm['trust']
- if sfrm.get('status'):
- frame_meta = new_frame.setdefault('data', {})
- frame_meta['symbolicator_status'] = sfrm['status']
diff --git a/tests/sentry/lang/native/test_minidump.py b/tests/sentry/lang/native/test_minidump.py
index 0342f6f7ed2305..11ead686a0a297 100644
--- a/tests/sentry/lang/native/test_minidump.py
+++ b/tests/sentry/lang/native/test_minidump.py
@@ -1,69 +1,7 @@
from __future__ import absolute_import
import io
import msgpack
-from sentry.lang.native.minidump import merge_attached_breadcrumbs, is_minidump_event, merge_attached_event
-
-
-def test_is_minidump():
- assert is_minidump_event({
- 'exception': {
- 'values': [{
- 'mechanism': {
- 'type': 'minidump'
- }
- }]
- }
- })
-
- assert not is_minidump_event({
- 'exception': {
- 'values': [{
- 'mechanism': {
- 'type': 'other'
- }
- }]
- }
- })
-
- assert not is_minidump_event({
- 'exception': {
- 'values': [{
- 'mechanism': {
- 'type': None
- }
- }]
- }
- })
-
- assert not is_minidump_event({
- 'exception': {
- 'values': [{
- 'mechanism': None
- }]
- }
- })
-
- assert not is_minidump_event({
- 'exception': {
- 'values': [None]
- }
- })
-
- assert not is_minidump_event({
- 'exception': {
- 'values': []
- }
- })
-
- assert not is_minidump_event({
- 'exception': {
- 'values': None
- }
- })
-
- assert not is_minidump_event({
- 'exception': None
- })
+from sentry.lang.native.minidump import merge_attached_breadcrumbs, merge_attached_event
class MockFile(object):
diff --git a/tests/sentry/lang/native/test_symbolicator.py b/tests/sentry/lang/native/test_processing.py
similarity index 84%
rename from tests/sentry/lang/native/test_symbolicator.py
rename to tests/sentry/lang/native/test_processing.py
index dc4e2af1d11518..3a700c08ce2b51 100644
--- a/tests/sentry/lang/native/test_symbolicator.py
+++ b/tests/sentry/lang/native/test_processing.py
@@ -9,12 +9,12 @@
from sentry.models.eventerror import EventError
-from sentry.lang.native.symbolicator import merge_symbolicator_image
+from sentry.lang.native.processing import _merge_image
def test_merge_symbolicator_image_empty():
errors = []
- merge_symbolicator_image({}, {}, None, errors.append)
+ _merge_image({}, {}, None, errors.append)
assert not errors
@@ -29,7 +29,7 @@ def test_merge_symbolicator_image_basic():
}
errors = []
- merge_symbolicator_image(raw_image, complete_image, sdk_info, errors.append)
+ _merge_image(raw_image, complete_image, sdk_info, errors.append)
assert not errors
assert raw_image == {"instruction_addr": 0xFEEBEE, "other": "foo", "other2": "bar"}
@@ -46,7 +46,7 @@ def test_merge_symbolicator_image_basic_success():
}
errors = []
- merge_symbolicator_image(raw_image, complete_image, sdk_info, errors.append)
+ _merge_image(raw_image, complete_image, sdk_info, errors.append)
assert not errors
assert raw_image == {
@@ -67,7 +67,7 @@ def test_merge_symbolicator_image_remove_unknown_arch():
}
errors = []
- merge_symbolicator_image(raw_image, complete_image, sdk_info, errors.append)
+ _merge_image(raw_image, complete_image, sdk_info, errors.append)
assert not errors
assert raw_image == {"instruction_addr": 0xFEEBEE}
@@ -92,7 +92,7 @@ def test_merge_symbolicator_image_errors(code_file, error):
}
errors = []
- merge_symbolicator_image(raw_image, complete_image, sdk_info, errors.append)
+ _merge_image(raw_image, complete_image, sdk_info, errors.append)
e, = errors
diff --git a/tests/sentry/lang/native/test_utils.py b/tests/sentry/lang/native/test_utils.py
index 358c1c3a075cb2..9258b7576205d1 100644
--- a/tests/sentry/lang/native/test_utils.py
+++ b/tests/sentry/lang/native/test_utils.py
@@ -1,6 +1,6 @@
from __future__ import absolute_import
-from sentry.lang.native.utils import get_sdk_from_event, cpu_name_from_data
+from sentry.lang.native.utils import get_sdk_from_event, is_minidump_event
def test_get_sdk_from_event():
@@ -39,35 +39,63 @@ def test_get_sdk_from_event():
assert sdk_info['version_patchlevel'] == 1
-def test_cpu_name_from_data():
- cpu_name = cpu_name_from_data(
- {
- 'contexts': {
- 'device': {
- 'type': 'device',
- 'arch': 'arm64'
- },
- 'device2': {
- 'type': 'device',
- 'arch': 'arm7'
- },
- }
+def test_is_minidump():
+ assert is_minidump_event({
+ 'exception': {
+ 'values': [{
+ 'mechanism': {
+ 'type': 'minidump'
+ }
+ }]
}
- )
-
- assert cpu_name == 'arm64'
+ })
+ assert not is_minidump_event({
+ 'exception': {
+ 'values': [{
+ 'mechanism': {
+ 'type': 'other'
+ }
+ }]
+ }
+ })
-def test_cpu_name_from_data_inferred_type():
- cpu_name = cpu_name_from_data(
- {
- 'contexts': {
- 'some_device': {
- 'type': 'device',
- 'arch': 'arm64'
+ assert not is_minidump_event({
+ 'exception': {
+ 'values': [{
+ 'mechanism': {
+ 'type': None
}
- }
+ }]
}
- )
+ })
+
+ assert not is_minidump_event({
+ 'exception': {
+ 'values': [{
+ 'mechanism': None
+ }]
+ }
+ })
+
+ assert not is_minidump_event({
+ 'exception': {
+ 'values': [None]
+ }
+ })
+
+ assert not is_minidump_event({
+ 'exception': {
+ 'values': []
+ }
+ })
+
+ assert not is_minidump_event({
+ 'exception': {
+ 'values': None
+ }
+ })
- assert cpu_name == 'arm64'
+ assert not is_minidump_event({
+ 'exception': None
+ })
diff --git a/tests/symbolicator/snapshots/SymbolicatorMinidumpIntegrationTest/test_full_minidump.pysnap b/tests/symbolicator/snapshots/SymbolicatorMinidumpIntegrationTest/test_full_minidump.pysnap
index 8514f4961875e6..f5168b64c23b8c 100644
--- a/tests/symbolicator/snapshots/SymbolicatorMinidumpIntegrationTest/test_full_minidump.pysnap
+++ b/tests/symbolicator/snapshots/SymbolicatorMinidumpIntegrationTest/test_full_minidump.pysnap
@@ -1,5 +1,5 @@
---
-created: '2019-05-29T11:21:05.608393Z'
+created: '2019-06-03T14:50:05.902803Z'
creator: sentry
source: tests/symbolicator/test_minidump_full.py
---
@@ -144,59 +144,6 @@ exception:
handled: false
synthetic: true
type: minidump
- raw_stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x771d0f44'
- package: C:\Windows\System32\ntdll.dll
- trust: fp
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x771d0f79'
- package: C:\Windows\System32\ntdll.dll
- trust: fp
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x750662c4'
- package: C:\Windows\System32\kernel32.dll
- trust: cfi
- - abs_path: f:\dd\vctools\crt\vcstartup\src\startup\exe_common.inl
- data:
- symbolicator_status: symbolicated
- filename: exe_common.inl
- function: __scrt_common_main_seh
- in_app: false
- instruction_addr: '0x2a2d96'
- lineno: 283
- package: C:\projects\breakpad-tools\windows\Release\crash.exe
- symbol: __scrt_common_main_seh
- trust: cfi
- - abs_path: c:\projects\breakpad-tools\windows\crash\main.cpp
- data:
- symbolicator_status: symbolicated
- filename: main.cpp
- function: main
- in_app: false
- instruction_addr: '0x2a2a3d'
- lineno: 35
- package: C:\projects\breakpad-tools\windows\Release\crash.exe
- symbol: main
- trust: context
- registers:
- eax: '0x0'
- ebp: '0x10ff670'
- ebx: '0xfe5000'
- ecx: '0x10ff670'
- edi: '0x13bfd78'
- edx: '0x7'
- eflags: '0x10246'
- eip: '0x2a2a3d'
- esi: '0x759c6314'
- esp: '0x10ff644'
stacktrace:
frames:
- data:
@@ -260,43 +207,6 @@ threads:
id: 1636
- crashed: false
id: 3580
- raw_stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x771d0f44'
- package: C:\Windows\System32\ntdll.dll
- trust: fp
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x771d0f79'
- package: C:\Windows\System32\ntdll.dll
- trust: fp
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x750662c4'
- package: C:\Windows\System32\kernel32.dll
- trust: fp
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x771e016c'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- eax: '0x0'
- ebp: '0x159faa4'
- ebx: '0x13b0990'
- ecx: '0x0'
- edi: '0x13b4af0'
- edx: '0x0'
- eflags: '0x216'
- eip: '0x771e016c'
- esi: '0x13b4930'
- esp: '0x159f900'
stacktrace:
frames:
- data:
@@ -336,43 +246,6 @@ threads:
esp: '0x159f900'
- crashed: false
id: 2600
- raw_stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x771d0f44'
- package: C:\Windows\System32\ntdll.dll
- trust: fp
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x771d0f79'
- package: C:\Windows\System32\ntdll.dll
- trust: fp
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x750662c4'
- package: C:\Windows\System32\kernel32.dll
- trust: fp
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x771e016c'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- eax: '0x0'
- ebp: '0x169fb98'
- ebx: '0x13b0990'
- ecx: '0x0'
- edi: '0x13b7c28'
- edx: '0x0'
- eflags: '0x202'
- eip: '0x771e016c'
- esi: '0x13b7a68'
- esp: '0x169f9f4'
stacktrace:
frames:
- data:
@@ -412,25 +285,6 @@ threads:
esp: '0x169f9f4'
- crashed: false
id: 2920
- raw_stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x771df3dc'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- eax: '0x0'
- ebp: '0x179f2b8'
- ebx: '0x17b1aa0'
- ecx: '0x0'
- edi: '0x17b1a90'
- edx: '0x0'
- eflags: '0x206'
- eip: '0x771df3dc'
- esi: '0x2cc'
- esp: '0x179f2ac'
stacktrace:
frames:
- data:
diff --git a/tests/symbolicator/snapshots/SymbolicatorMinidumpIntegrationTest/test_missing_dsym.pysnap b/tests/symbolicator/snapshots/SymbolicatorMinidumpIntegrationTest/test_missing_dsym.pysnap
index 23461afcc1b8ea..b9cb7c64d37138 100644
--- a/tests/symbolicator/snapshots/SymbolicatorMinidumpIntegrationTest/test_missing_dsym.pysnap
+++ b/tests/symbolicator/snapshots/SymbolicatorMinidumpIntegrationTest/test_missing_dsym.pysnap
@@ -1,5 +1,5 @@
---
-created: '2019-05-28T09:16:05.541318Z'
+created: '2019-06-03T14:50:09.219100Z'
creator: sentry
source: tests/symbolicator/test_minidump_full.py
---
@@ -147,7 +147,7 @@ exception:
handled: false
synthetic: true
type: minidump
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -232,49 +232,6 @@ exception:
eip: '0x2a2a3d'
esi: '0x759c6314'
esp: '0x10ff644'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x771d0f44'
- package: C:\Windows\System32\ntdll.dll
- trust: fp
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x771d0f79'
- package: C:\Windows\System32\ntdll.dll
- trust: fp
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x750662c4'
- package: C:\Windows\System32\kernel32.dll
- trust: fp
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x2a28d0'
- package: C:\projects\breakpad-tools\windows\Release\crash.exe
- trust: fp
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x2a2a3d'
- package: C:\projects\breakpad-tools\windows\Release\crash.exe
- trust: context
- registers:
- eax: '0x0'
- ebp: '0x10ff670'
- ebx: '0xfe5000'
- ecx: '0x10ff670'
- edi: '0x13bfd78'
- edx: '0x7'
- eflags: '0x10246'
- eip: '0x2a2a3d'
- esi: '0x759c6314'
- esp: '0x10ff644'
thread_id: 1636
type: EXCEPTION_ACCESS_VIOLATION_WRITE
value: 'Fatal Error: EXCEPTION_ACCESS_VIOLATION_WRITE'
@@ -285,43 +242,6 @@ threads:
id: 1636
- crashed: false
id: 3580
- raw_stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x771d0f44'
- package: C:\Windows\System32\ntdll.dll
- trust: fp
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x771d0f79'
- package: C:\Windows\System32\ntdll.dll
- trust: fp
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x750662c4'
- package: C:\Windows\System32\kernel32.dll
- trust: fp
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x771e016c'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- eax: '0x0'
- ebp: '0x159faa4'
- ebx: '0x13b0990'
- ecx: '0x0'
- edi: '0x13b4af0'
- edx: '0x0'
- eflags: '0x216'
- eip: '0x771e016c'
- esi: '0x13b4930'
- esp: '0x159f900'
stacktrace:
frames:
- data:
@@ -361,43 +281,6 @@ threads:
esp: '0x159f900'
- crashed: false
id: 2600
- raw_stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x771d0f44'
- package: C:\Windows\System32\ntdll.dll
- trust: fp
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x771d0f79'
- package: C:\Windows\System32\ntdll.dll
- trust: fp
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x750662c4'
- package: C:\Windows\System32\kernel32.dll
- trust: fp
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x771e016c'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- eax: '0x0'
- ebp: '0x169fb98'
- ebx: '0x13b0990'
- ecx: '0x0'
- edi: '0x13b7c28'
- edx: '0x0'
- eflags: '0x202'
- eip: '0x771e016c'
- esi: '0x13b7a68'
- esp: '0x169f9f4'
stacktrace:
frames:
- data:
@@ -437,25 +320,6 @@ threads:
esp: '0x169f9f4'
- crashed: false
id: 2920
- raw_stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x771df3dc'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- eax: '0x0'
- ebp: '0x179f2b8'
- ebx: '0x17b1aa0'
- ecx: '0x0'
- edi: '0x17b1a90'
- edx: '0x0'
- eflags: '0x206'
- eip: '0x771df3dc'
- esi: '0x2cc'
- esp: '0x179f2ac'
stacktrace:
frames:
- data:
diff --git a/tests/symbolicator/snapshots/SymbolicatorMinidumpIntegrationTest/test_raw_minidump.pysnap b/tests/symbolicator/snapshots/SymbolicatorMinidumpIntegrationTest/test_raw_minidump.pysnap
index 9720fa8f12af00..891e87776ff5dc 100644
--- a/tests/symbolicator/snapshots/SymbolicatorMinidumpIntegrationTest/test_raw_minidump.pysnap
+++ b/tests/symbolicator/snapshots/SymbolicatorMinidumpIntegrationTest/test_raw_minidump.pysnap
@@ -1,5 +1,5 @@
---
-created: '2019-06-03T10:14:01.816927Z'
+created: '2019-06-03T14:58:29.235537Z'
creator: sentry
source: tests/symbolicator/test_minidump_full.py
---
@@ -144,59 +144,6 @@ exception:
handled: false
synthetic: true
type: minidump
- raw_stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x771d0f44'
- package: C:\Windows\System32\ntdll.dll
- trust: fp
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x771d0f79'
- package: C:\Windows\System32\ntdll.dll
- trust: fp
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x750662c4'
- package: C:\Windows\System32\kernel32.dll
- trust: cfi
- - abs_path: f:\dd\vctools\crt\vcstartup\src\startup\exe_common.inl
- data:
- symbolicator_status: symbolicated
- filename: exe_common.inl
- function: __scrt_common_main_seh
- in_app: false
- instruction_addr: '0x2a2d96'
- lineno: 283
- package: C:\projects\breakpad-tools\windows\Release\crash.exe
- symbol: __scrt_common_main_seh
- trust: cfi
- - abs_path: c:\projects\breakpad-tools\windows\crash\main.cpp
- data:
- symbolicator_status: symbolicated
- filename: main.cpp
- function: main
- in_app: false
- instruction_addr: '0x2a2a3d'
- lineno: 35
- package: C:\projects\breakpad-tools\windows\Release\crash.exe
- symbol: main
- trust: context
- registers:
- eax: '0x0'
- ebp: '0x10ff670'
- ebx: '0xfe5000'
- ecx: '0x10ff670'
- edi: '0x13bfd78'
- edx: '0x7'
- eflags: '0x10246'
- eip: '0x2a2a3d'
- esi: '0x759c6314'
- esp: '0x10ff644'
stacktrace:
frames:
- data:
@@ -260,43 +207,6 @@ threads:
id: 1636
- crashed: false
id: 3580
- raw_stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x771d0f44'
- package: C:\Windows\System32\ntdll.dll
- trust: fp
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x771d0f79'
- package: C:\Windows\System32\ntdll.dll
- trust: fp
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x750662c4'
- package: C:\Windows\System32\kernel32.dll
- trust: fp
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x771e016c'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- eax: '0x0'
- ebp: '0x159faa4'
- ebx: '0x13b0990'
- ecx: '0x0'
- edi: '0x13b4af0'
- edx: '0x0'
- eflags: '0x216'
- eip: '0x771e016c'
- esi: '0x13b4930'
- esp: '0x159f900'
stacktrace:
frames:
- data:
@@ -336,43 +246,6 @@ threads:
esp: '0x159f900'
- crashed: false
id: 2600
- raw_stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x771d0f44'
- package: C:\Windows\System32\ntdll.dll
- trust: fp
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x771d0f79'
- package: C:\Windows\System32\ntdll.dll
- trust: fp
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x750662c4'
- package: C:\Windows\System32\kernel32.dll
- trust: fp
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x771e016c'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- eax: '0x0'
- ebp: '0x169fb98'
- ebx: '0x13b0990'
- ecx: '0x0'
- edi: '0x13b7c28'
- edx: '0x0'
- eflags: '0x202'
- eip: '0x771e016c'
- esi: '0x13b7a68'
- esp: '0x169f9f4'
stacktrace:
frames:
- data:
@@ -412,25 +285,6 @@ threads:
esp: '0x169f9f4'
- crashed: false
id: 2920
- raw_stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x771df3dc'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- eax: '0x0'
- ebp: '0x179f2b8'
- ebx: '0x17b1aa0'
- ecx: '0x0'
- edi: '0x17b1a90'
- edx: '0x0'
- eflags: '0x206'
- eip: '0x771df3dc'
- esi: '0x2cc'
- esp: '0x179f2ac'
stacktrace:
frames:
- data:
diff --git a/tests/symbolicator/snapshots/SymbolicatorResolvingIntegrationTest/test_missing_debug_images.pysnap b/tests/symbolicator/snapshots/SymbolicatorResolvingIntegrationTest/test_missing_debug_images.pysnap
index cf8400dd5e305d..8ec7f85e399186 100644
--- a/tests/symbolicator/snapshots/SymbolicatorResolvingIntegrationTest/test_missing_debug_images.pysnap
+++ b/tests/symbolicator/snapshots/SymbolicatorResolvingIntegrationTest/test_missing_debug_images.pysnap
@@ -1,5 +1,5 @@
---
-created: '2019-06-03T10:36:59.097805Z'
+created: '2019-06-03T15:00:16.825909Z'
creator: sentry
source: tests/symbolicator/test_payload_full.py
---
diff --git a/tests/symbolicator/snapshots/SymbolicatorResolvingIntegrationTest/test_missing_dsym.pysnap b/tests/symbolicator/snapshots/SymbolicatorResolvingIntegrationTest/test_missing_dsym.pysnap
index 64b07790c4a419..845f5acaa7aa23 100644
--- a/tests/symbolicator/snapshots/SymbolicatorResolvingIntegrationTest/test_missing_dsym.pysnap
+++ b/tests/symbolicator/snapshots/SymbolicatorResolvingIntegrationTest/test_missing_dsym.pysnap
@@ -1,5 +1,5 @@
---
-created: '2019-06-03T10:37:01.851037Z'
+created: '2019-06-03T15:00:19.524785Z'
creator: sentry
source: tests/symbolicator/test_payload_full.py
---
@@ -31,9 +31,7 @@ exception:
- function: hi
in_app: false
platform: foobar
- - data:
- symbolicator_status: missing
- function: unknown
+ - function: unknown
in_app: true
instruction_addr: '0x100000fa0'
package: Foo.app/Contents/Foo
diff --git a/tests/symbolicator/snapshots/SymbolicatorResolvingIntegrationTest/test_real_resolving.pysnap b/tests/symbolicator/snapshots/SymbolicatorResolvingIntegrationTest/test_real_resolving.pysnap
index 932acc407903ac..b8481c4c2aa394 100644
--- a/tests/symbolicator/snapshots/SymbolicatorResolvingIntegrationTest/test_real_resolving.pysnap
+++ b/tests/symbolicator/snapshots/SymbolicatorResolvingIntegrationTest/test_real_resolving.pysnap
@@ -1,5 +1,5 @@
---
-created: '2019-06-03T10:37:04.436235Z'
+created: '2019-06-03T15:00:21.958338Z'
creator: sentry
source: tests/symbolicator/test_payload_full.py
---
diff --git a/tests/symbolicator/snapshots/SymbolicatorUnrealIntegrationTest/test_unreal_apple_crash_with_attachments.pysnap b/tests/symbolicator/snapshots/SymbolicatorUnrealIntegrationTest/test_unreal_apple_crash_with_attachments.pysnap
index f718f2f2ad99a4..885079c4d943cf 100644
--- a/tests/symbolicator/snapshots/SymbolicatorUnrealIntegrationTest/test_unreal_apple_crash_with_attachments.pysnap
+++ b/tests/symbolicator/snapshots/SymbolicatorUnrealIntegrationTest/test_unreal_apple_crash_with_attachments.pysnap
@@ -1,5 +1,5 @@
---
-created: '2019-05-27T11:48:28.187262Z'
+created: '2019-06-03T14:50:25.482449Z'
creator: sentry
source: tests/symbolicator/test_unreal_full.py
---
@@ -114,69 +114,43 @@ threads:
id: 0
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61a8e085'
package: /usr/lib/system/libdyld.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x108b702a6'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x108b7092b'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff31f4375d'
package: /System/Library/Frameworks/AppKit.framework/Versions/C/AppKit
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff31f496fa'
package: /System/Library/Frameworks/AppKit.framework/Versions/C/AppKit
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff31f4a95b'
package: /System/Library/Frameworks/AppKit.framework/Versions/C/AppKit
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff33c8d348'
package: /System/Library/Frameworks/Carbon.framework/Versions/A/Frameworks/HIToolbox.framework/Versions/A/HIToolbox
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff33c8d5cb'
package: /System/Library/Frameworks/Carbon.framework/Versions/A/Frameworks/HIToolbox.framework/Versions/A/HIToolbox
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff33c8d895'
package: /System/Library/Frameworks/Carbon.framework/Versions/A/Frameworks/HIToolbox.framework/Versions/A/HIToolbox
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff349f3ce4'
package: /System/Library/Frameworks/CoreFoundation.framework/Versions/A/CoreFoundation
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff349f45ad'
package: /System/Library/Frameworks/CoreFoundation.framework/Versions/A/CoreFoundation
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff349f505e'
package: /System/Library/Frameworks/CoreFoundation.framework/Versions/A/CoreFoundation
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bc6c2a'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -250,18 +224,12 @@ threads:
id: 1
raw_stacktrace:
frames:
- - data:
- symbolicator_status: unknown_image
- in_app: false
+ - in_app: false
instruction_addr: '0x54485244'
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f415'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bc85be'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -284,18 +252,12 @@ threads:
id: 2
raw_stacktrace:
frames:
- - data:
- symbolicator_status: unknown_image
- in_app: false
+ - in_app: false
instruction_addr: '0x54485244'
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f415'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bc85be'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -318,14 +280,10 @@ threads:
id: 3
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f415'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bc85be'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -344,18 +302,12 @@ threads:
id: 4
raw_stacktrace:
frames:
- - data:
- symbolicator_status: unknown_image
- in_app: false
+ - in_app: false
instruction_addr: '0x54485244'
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f415'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bc85be'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -378,99 +330,61 @@ threads:
id: 5
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff36d4a234'
package: /System/Library/Frameworks/Foundation.framework/Versions/C/Foundation
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1095aab27'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x108b6fd8e'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x108b65169'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x108b55d76'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10c28647e'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10c2a6f41'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10ce48e4a'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10ce4f8f5'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10cfadb9e'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10c2e4621'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10cfe80be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10bc5eb21'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10bc5fc29'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10bc3b85b'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1090a0132'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
registers:
@@ -618,44 +532,28 @@ threads:
id: 6
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff31f53581'
package: /System/Library/Frameworks/AppKit.framework/Versions/C/AppKit
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff349f3ce4'
package: /System/Library/Frameworks/CoreFoundation.framework/Versions/A/CoreFoundation
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff349f45ad'
package: /System/Library/Frameworks/CoreFoundation.framework/Versions/A/CoreFoundation
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff349f505e'
package: /System/Library/Frameworks/CoreFoundation.framework/Versions/A/CoreFoundation
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bc6c2a'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -704,54 +602,34 @@ threads:
id: 7
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109432156'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094312e8'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109433182'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482f6f'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -810,54 +688,34 @@ threads:
id: 8
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109432156'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094312e8'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109433182'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482f6f'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -916,54 +774,34 @@ threads:
id: 9
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109432156'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094312e8'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109433182'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482f6f'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -1022,54 +860,34 @@ threads:
id: 10
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109432156'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094312e8'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109433182'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482f6f'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -1128,54 +946,34 @@ threads:
id: 11
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109432156'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094312e8'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109433182'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482f6f'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -1234,54 +1032,34 @@ threads:
id: 12
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109432156'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094312e8'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109433182'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482f6f'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -1340,54 +1118,34 @@ threads:
id: 13
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109432156'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094312e8'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109433182'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482f6f'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -1446,54 +1204,34 @@ threads:
id: 14
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109432156'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094312e8'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109433182'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482f6f'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -1552,54 +1290,34 @@ threads:
id: 15
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109432156'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094312e8'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109433182'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482f6f'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -1658,59 +1376,37 @@ threads:
id: 16
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1097a01a8'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109430351'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109434103'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094358c3'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482f6f'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -1774,44 +1470,28 @@ threads:
id: 17
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10951d7f1'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482d68'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -1860,44 +1540,28 @@ threads:
id: 18
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10951d7f1'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482d68'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -1946,44 +1610,28 @@ threads:
id: 19
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10951d7f1'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482d68'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -2032,44 +1680,28 @@ threads:
id: 20
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10951d7f1'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482d68'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -2118,44 +1750,28 @@ threads:
id: 21
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10951d7f1'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482d68'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -2204,18 +1820,12 @@ threads:
id: 22
raw_stacktrace:
frames:
- - data:
- symbolicator_status: unknown_image
- in_app: false
+ - in_app: false
instruction_addr: '0x54485244'
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f415'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bc85be'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -2238,18 +1848,12 @@ threads:
id: 23
raw_stacktrace:
frames:
- - data:
- symbolicator_status: unknown_image
- in_app: false
+ - in_app: false
instruction_addr: '0x54485244'
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f415'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bc85be'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -2272,18 +1876,12 @@ threads:
id: 24
raw_stacktrace:
frames:
- - data:
- symbolicator_status: unknown_image
- in_app: false
+ - in_app: false
instruction_addr: '0x70001989cba0'
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f415'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bc85be'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -2306,18 +1904,12 @@ threads:
id: 25
raw_stacktrace:
frames:
- - data:
- symbolicator_status: unknown_image
- in_app: false
+ - in_app: false
instruction_addr: '0x54485244'
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f415'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bc85be'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -2340,44 +1932,28 @@ threads:
id: 26
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1096b1e4d'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61b55724'
package: /usr/lib/system/libsystem_c.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca876'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -2426,44 +2002,28 @@ threads:
id: 27
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x108e4c4c5'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482d68'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -2512,44 +2072,28 @@ threads:
id: 28
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10951d7f1'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482d68'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -2598,44 +2142,28 @@ threads:
id: 29
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10951d7f1'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482d68'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -2684,44 +2212,28 @@ threads:
id: 30
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10951d7f1'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482d68'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -2770,44 +2282,28 @@ threads:
id: 31
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10951d7f1'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482d68'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -2856,44 +2352,28 @@ threads:
id: 32
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10a44a1eb'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482d68'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -2942,59 +2422,37 @@ threads:
id: 33
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10a874ab6'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109430351'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109434103'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094358c3'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482f6f'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -3058,94 +2516,58 @@ threads:
id: 34
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10a875ba7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10a86402c'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109430351'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109434103'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109435e23'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10c7c83da'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10c7f8302'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1093cd746'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1093c61d9'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1093c3c67'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1092abd6f'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109266ff1'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c74969'
package: /usr/lib/system/libsystem_platform.dylib
stacktrace:
@@ -3244,49 +2666,31 @@ threads:
id: 35
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10a87c821'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109481b6e'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61b55724'
package: /usr/lib/system/libsystem_c.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca876'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -3340,44 +2744,28 @@ threads:
id: 36
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x108ddca01'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482d68'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -3426,44 +2814,28 @@ threads:
id: 37
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x108ddca01'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482d68'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -3512,44 +2884,28 @@ threads:
id: 38
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x108ddca01'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482d68'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -3598,44 +2954,28 @@ threads:
id: 39
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x108ddca01'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482d68'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -3684,44 +3024,28 @@ threads:
id: 40
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x108ddca01'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482d68'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -3770,44 +3094,28 @@ threads:
id: 41
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x108ddca01'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482d68'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -3856,44 +3164,28 @@ threads:
id: 42
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x108ddca01'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482d68'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -3942,44 +3234,28 @@ threads:
id: 43
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x108ddca01'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482d68'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -4028,44 +3304,28 @@ threads:
id: 44
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10bbea8b8'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482d68'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -4114,44 +3374,28 @@ threads:
id: 45
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10dc214e1'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482d68'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -4200,44 +3444,28 @@ threads:
id: 46
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10951d7f1'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482d68'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -4286,44 +3514,28 @@ threads:
id: 47
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10951d7f1'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482d68'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -4372,44 +3584,28 @@ threads:
id: 48
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10951d7f1'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482d68'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -4458,44 +3654,28 @@ threads:
id: 49
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10951d7f1'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482d68'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -4544,44 +3724,28 @@ threads:
id: 50
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10951d7f1'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482d68'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -4630,44 +3794,28 @@ threads:
id: 51
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10951d7f1'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482d68'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -4716,34 +3864,22 @@ threads:
id: 52
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61a4db5a'
package: /usr/lib/system/libdispatch.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61a40b19'
package: /usr/lib/system/libdispatch.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bc6c7e'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -4782,49 +3918,31 @@ threads:
id: 53
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff344410d6'
package: /System/Library/Frameworks/CoreAudio.framework/Versions/A/CoreAudio
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff344415ee'
package: /System/Library/Frameworks/CoreAudio.framework/Versions/A/CoreAudio
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff34441b84'
package: /System/Library/Frameworks/CoreAudio.framework/Versions/A/CoreAudio
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff3444580d'
package: /System/Library/Frameworks/CoreAudio.framework/Versions/A/CoreAudio
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff3444589a'
package: /System/Library/Frameworks/CoreAudio.framework/Versions/A/CoreAudio
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bc6c2a'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
@@ -4878,59 +3996,37 @@ threads:
id: 54
raw_stacktrace:
frames:
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c7f425'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c832a7'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61c8033d'
package: /usr/lib/system/libsystem_pthread.dylib
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094ba0b7'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094dd7be'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x10be5f7ce'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109430351'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109434103'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x1094358c3'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: true
+ - in_app: true
instruction_addr: '0x109482f6f'
package: /Users/bruno/Documents/Unreal Projects/YetAnotherMac/MacNoEditor/YetAnotherMac.app/Contents/MacOS/YetAnotherMac
- - data:
- symbolicator_status: missing
- in_app: false
+ - in_app: false
instruction_addr: '0x7fff61bca1b2'
package: /usr/lib/system/libsystem_kernel.dylib
stacktrace:
diff --git a/tests/symbolicator/snapshots/SymbolicatorUnrealIntegrationTest/test_unreal_crash_with_attachments.pysnap b/tests/symbolicator/snapshots/SymbolicatorUnrealIntegrationTest/test_unreal_crash_with_attachments.pysnap
index c4e7bf44dcb16a..cc63cb7d8aff93 100644
--- a/tests/symbolicator/snapshots/SymbolicatorUnrealIntegrationTest/test_unreal_crash_with_attachments.pysnap
+++ b/tests/symbolicator/snapshots/SymbolicatorUnrealIntegrationTest/test_unreal_crash_with_attachments.pysnap
@@ -1,5 +1,5 @@
---
-created: '2019-05-29T12:11:45.277288Z'
+created: '2019-06-03T14:50:28.969003Z'
creator: sentry
source: tests/symbolicator/test_unreal_full.py
---
@@ -143,7 +143,7 @@ threads:
values:
- crashed: false
id: 248
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -193,35 +193,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x8c3f2cc448'
rsp: '0x8c3f2cc418'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x7ff7544e0000'
- r11: '0x7ff7589ca88b'
- r12: '0x7ff7544e0000'
- r13: '0x8c3f2cf910'
- r14: '0x26c'
- r15: '0x8c3f2cc670'
- r8: '0x8c3f2cccc0'
- r9: '0x8c3f2cc670'
- rax: '0x4'
- rbp: '0x8c3f2cf910'
- rbx: '0x0'
- rcx: '0x26c'
- rdi: '0x26c'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x8c3f2cc448'
- rsp: '0x8c3f2cc418'
- crashed: false
id: 9772
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -259,35 +233,9 @@ threads:
rip: '0x7ffe100fd854'
rsi: '0x7ffe10083140'
rsp: '0x8c3f79fb48'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100fd854'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x6c'
- r11: '0x8c3f79f140'
- r12: '0x0'
- r13: '0x23df7323810'
- r14: '0x7ffe10080a60'
- r15: '0x7ffe10081350'
- r8: '0x8c3f79f3a0'
- r9: '0x23df732ca30'
- rax: '0x1cb'
- rbp: '0x0'
- rbx: '0x23df7325070'
- rcx: '0x50'
- rdi: '0x10'
- rdx: '0x23df7325070'
- rip: '0x7ffe100fd854'
- rsi: '0x7ffe10083140'
- rsp: '0x8c3f79fb48'
- crashed: false
id: 8188
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -325,35 +273,9 @@ threads:
rip: '0x7ffe100fd854'
rsi: '0x7ffe10083140'
rsp: '0x8c3fc6f838'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100fd854'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x23d833cc5b0'
- r11: '0x23d833ce4b0'
- r12: '0x0'
- r13: '0x23df7323810'
- r14: '0x7ffe10080a60'
- r15: '0x7ffe10081350'
- r8: '0x8'
- r9: '0x1'
- rax: '0x1cb'
- rbp: '0x0'
- rbx: '0x23df7326fd0'
- rcx: '0x50'
- rdi: '0x10'
- rdx: '0x23df7326fd0'
- rip: '0x7ffe100fd854'
- rsi: '0x7ffe10083140'
- rsp: '0x8c3fc6f838'
- crashed: false
id: 10188
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -391,37 +313,11 @@ threads:
rip: '0x7ffe100fd854'
rsi: '0x7ffe10083140'
rsp: '0x8c4013f6b8'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100fd854'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x23df7290cc0'
- r11: '0x7ffe10178b20'
- r12: '0x0'
- r13: '0x23df7323810'
- r14: '0x7ffe10080a60'
- r15: '0x7ffe10081350'
- r8: '0x23d83413970'
- r9: '0x1b0'
- rax: '0x1cb'
- rbp: '0x0'
- rbx: '0x23df7327370'
- rcx: '0x50'
- rdi: '0x10'
- rdx: '0x23df7327370'
- rip: '0x7ffe100fd854'
- rsi: '0x7ffe10083140'
- rsp: '0x8c4013f6b8'
- crashed: true
id: 6900
- crashed: false
id: 5200
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -471,35 +367,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x0'
rsp: '0x8c3ebbf438'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x7ffef000'
- r11: '0x1'
- r12: '0x1'
- r13: '0x23df8d19f20'
- r14: '0x2a0'
- r15: '0x23df9a304c8'
- r8: '0x1'
- r9: '0x8c3ebbf410'
- rax: '0x4'
- rbp: '0x8c3ebbf5a9'
- rbx: '0x0'
- rcx: '0x2a0'
- rdi: '0x2a0'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x0'
- rsp: '0x8c3ebbf438'
- crashed: false
id: 9648
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -549,35 +419,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x0'
rsp: '0x8c4068f738'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x0'
- r11: '0x246'
- r12: '0x1'
- r13: '0x23df8d19f60'
- r14: '0x2a4'
- r15: '0x23df9a304c8'
- r8: '0x8c4068f738'
- r9: '0x8c4068f8a9'
- rax: '0x4'
- rbp: '0x8c4068f8a9'
- rbx: '0x0'
- rcx: '0x2a4'
- rdi: '0x2a4'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x0'
- rsp: '0x8c4068f738'
- crashed: false
id: 4372
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -627,35 +471,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x0'
rsp: '0x8c4070f778'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x0'
- r11: '0x23d8e930000'
- r12: '0x1'
- r13: '0x23df8d19800'
- r14: '0x2a8'
- r15: '0x23df9a304c8'
- r8: '0x10000'
- r9: '0xcdcdcdcdcdcdcdcd'
- rax: '0x4'
- rbp: '0x8c4070f8e9'
- rbx: '0x0'
- rcx: '0x2a8'
- rdi: '0x2a8'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x0'
- rsp: '0x8c4070f778'
- crashed: false
id: 10628
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -699,35 +517,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x0'
rsp: '0x8c4078f858'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x7ff758ce10a8'
- r11: '0x23d8d30edc0'
- r12: '0x1'
- r13: '0x23df8d197a0'
- r14: '0x2ac'
- r15: '0x23df9a304c8'
- r8: '0x1240'
- r9: '0xdddddddddddddddd'
- rax: '0x4'
- rbp: '0x8c4078f9c9'
- rbx: '0x0'
- rcx: '0x2ac'
- rdi: '0x2ac'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x0'
- rsp: '0x8c4078f858'
- crashed: false
id: 11280
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -783,35 +575,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x0'
rsp: '0x8c4080f468'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x7ff758ce10a8'
- r11: '0x23d8d48edc0'
- r12: '0x1'
- r13: '0x23df8d197e0'
- r14: '0x2b0'
- r15: '0x23df9a304c8'
- r8: '0x1240'
- r9: '0xdddddddddddddddd'
- rax: '0x4'
- rbp: '0x8c4080f5d9'
- rbx: '0x0'
- rcx: '0x2b0'
- rdi: '0x2b0'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x0'
- rsp: '0x8c4080f468'
- crashed: false
id: 2432
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -861,40 +627,14 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x0'
rsp: '0x8c4088f808'
+ - crashed: false
+ id: 6680
stacktrace:
frames:
- data:
symbolicator_status: missing
in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x23df732f648'
- r11: '0x23df732f638'
- r12: '0x1'
- r13: '0x23df8d19860'
- r14: '0x2b4'
- r15: '0x23df9a304c8'
- r8: '0x0'
- r9: '0x0'
- rax: '0x4'
- rbp: '0x8c4088f979'
- rbx: '0x0'
- rcx: '0x2b4'
- rdi: '0x2b4'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x0'
- rsp: '0x8c4088f808'
- - crashed: false
- id: 6680
- raw_stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100d1471'
+ instruction_addr: '0x7ffe100d1471'
package: C:\Windows\System32\ntdll.dll
trust: scan
- data:
@@ -945,35 +685,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x0'
rsp: '0x8c4090f958'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x23df7346090'
- r11: '0x23d829847a0'
- r12: '0x1'
- r13: '0x23df8d19cc0'
- r14: '0x2b8'
- r15: '0x23df9a304c8'
- r8: '0x0'
- r9: '0x1'
- rax: '0x4'
- rbp: '0x8c4090fac9'
- rbx: '0x0'
- rcx: '0x2b8'
- rdi: '0x2b8'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x0'
- rsp: '0x8c4090f958'
- crashed: false
id: 6492
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -1029,35 +743,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x0'
rsp: '0x8c4098fa68'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x0'
- r11: '0x23d8d180000'
- r12: '0x1'
- r13: '0x23df8d19c80'
- r14: '0x2bc'
- r15: '0x23df9a304c8'
- r8: '0x60000'
- r9: '0xcdcdcdcdcdcdcdcd'
- rax: '0x4'
- rbp: '0x8c4098fbd9'
- rbx: '0x0'
- rcx: '0x2bc'
- rdi: '0x2bc'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x0'
- rsp: '0x8c4098fa68'
- crashed: false
id: 6080
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -1113,35 +801,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x0'
rsp: '0x8c40a0f2f8'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x0'
- r11: '0x23d8d180000'
- r12: '0x1'
- r13: '0x23df8d19c40'
- r14: '0x2c0'
- r15: '0x23df9a304c8'
- r8: '0x23d8d077638'
- r9: '0x40'
- rax: '0x4'
- rbp: '0x8c40a0f469'
- rbx: '0x0'
- rcx: '0x2c0'
- rdi: '0x2c0'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x0'
- rsp: '0x8c40a0f2f8'
- crashed: false
id: 6984
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -1197,35 +859,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x0'
rsp: '0x8c40a8f868'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x0'
- r11: '0x246'
- r12: '0x1'
- r13: '0x23df8d19c00'
- r14: '0x2c4'
- r15: '0x23df9a304c8'
- r8: '0x23df8f10000'
- r9: '0x10000'
- rax: '0x4'
- rbp: '0x8c40a8f9d9'
- rbx: '0x0'
- rcx: '0x2c4'
- rdi: '0x2c4'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x0'
- rsp: '0x8c40a8f868'
- crashed: false
id: 10192
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -1263,35 +899,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x0'
rsp: '0x8c40b0f5c8'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x23df7346090'
- r11: '0x8c40b0eef0'
- r12: '0x1'
- r13: '0x23df8d19bc0'
- r14: '0x2c8'
- r15: '0x23df9a304c8'
- r8: '0x2'
- r9: '0x2'
- rax: '0x4'
- rbp: '0x8c40b0f739'
- rbx: '0x0'
- rcx: '0x2c8'
- rdi: '0x2c8'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x0'
- rsp: '0x8c40b0f5c8'
- crashed: false
id: 11120
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -1329,35 +939,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x0'
rsp: '0x8c40b8fa78'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x7ffef000'
- r11: '0x23d82351d80'
- r12: '0x1'
- r13: '0x23df8d19b80'
- r14: '0x2cc'
- r15: '0x23df9a304c8'
- r8: '0x1'
- r9: '0x18'
- rax: '0x4'
- rbp: '0x8c40b8fbe9'
- rbx: '0x0'
- rcx: '0x2cc'
- rdi: '0x2cc'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x0'
- rsp: '0x8c40b8fa78'
- crashed: false
id: 4872
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -1389,35 +973,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x0'
rsp: '0x8c40c0fa78'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x7ff758ce10a8'
- r11: '0x8c40c0f4a8'
- r12: '0x1'
- r13: '0x23df8d1a380'
- r14: '0x2d0'
- r15: '0x23df9a304c8'
- r8: '0x6'
- r9: '0x8'
- rax: '0x4'
- rbp: '0x8c40c0fbe9'
- rbx: '0x0'
- rcx: '0x2d0'
- rdi: '0x2d0'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x0'
- rsp: '0x8c40c0fa78'
- crashed: false
id: 4160
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -1455,35 +1013,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x0'
rsp: '0x8c40c8f838'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x7ff758ce10a8'
- r11: '0x8c40c8f268'
- r12: '0x1'
- r13: '0x23df8d1a340'
- r14: '0x2d4'
- r15: '0x23df9a304c8'
- r8: '0x23df8fb6b80'
- r9: '0x2'
- rax: '0x4'
- rbp: '0x8c40c8f9a9'
- rbx: '0x0'
- rcx: '0x2d4'
- rdi: '0x2d4'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x0'
- rsp: '0x8c40c8f838'
- crashed: false
id: 11048
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -1521,40 +1053,14 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x0'
rsp: '0x8c40d0f8a8'
+ - crashed: false
+ id: 7512
stacktrace:
frames:
- data:
symbolicator_status: missing
in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x7ff758ce10a8'
- r11: '0x23d8d490080'
- r12: '0x1'
- r13: '0x23df8d1a300'
- r14: '0x2d8'
- r15: '0x23df9a304c8'
- r8: '0x1240'
- r9: '0xdddddddddddddddd'
- rax: '0x4'
- rbp: '0x8c40d0fa19'
- rbx: '0x0'
- rcx: '0x2d8'
- rdi: '0x2d8'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x0'
- rsp: '0x8c40d0f8a8'
- - crashed: false
- id: 7512
- raw_stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100d1471'
+ instruction_addr: '0x7ffe100d1471'
package: C:\Windows\System32\ntdll.dll
trust: scan
- data:
@@ -1599,35 +1105,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x0'
rsp: '0x8c40d8f9f8'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x0'
- r11: '0x23d8e630000'
- r12: '0x0'
- r13: '0x23df8d19840'
- r14: '0x6c'
- r15: '0x23df9a304c8'
- r8: '0x10000'
- r9: '0xdddddddddddddddd'
- rax: '0x4'
- rbp: '0x8c40d8fb69'
- rbx: '0x0'
- rcx: '0x6c'
- rdi: '0x6c'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x0'
- rsp: '0x8c40d8f9f8'
- crashed: false
id: 8684
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -1671,35 +1151,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x8c3ebdfab8'
rsp: '0x8c3ebdfa88'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x0'
- r11: '0x246'
- r12: '0x0'
- r13: '0x23df918ca40'
- r14: '0x308'
- r15: '0x23df9a304c8'
- r8: '0x8c3ebdfa88'
- r9: '0x8c3ebdfbf9'
- rax: '0x4'
- rbp: '0x8c3ebdfbf9'
- rbx: '0x0'
- rcx: '0x308'
- rdi: '0x308'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x8c3ebdfab8'
- rsp: '0x8c3ebdfa88'
- crashed: false
id: 5444
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -1743,35 +1197,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x8c3ebff488'
rsp: '0x8c3ebff458'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x0'
- r11: '0x246'
- r12: '0x0'
- r13: '0x23df918cac0'
- r14: '0x310'
- r15: '0x23df9a304c8'
- r8: '0x8c3ebff458'
- r9: '0x8c3ebff5c9'
- rax: '0x4'
- rbp: '0x8c3ebff5c9'
- rbx: '0x0'
- rcx: '0x310'
- rdi: '0x310'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x8c3ebff488'
- rsp: '0x8c3ebff458'
- crashed: false
id: 12064
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -1815,35 +1243,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x8c40dafb28'
rsp: '0x8c40dafaf8'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x7ffef000'
- r11: '0x1'
- r12: '0x0'
- r13: '0x23df918cbc0'
- r14: '0x318'
- r15: '0x23df9a304c8'
- r8: '0x0'
- r9: '0xfffffffffe7f6361'
- rax: '0x4'
- rbp: '0x8c40dafc69'
- rbx: '0x0'
- rcx: '0x318'
- rdi: '0x318'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x8c40dafb28'
- rsp: '0x8c40dafaf8'
- crashed: false
id: 468
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -1887,35 +1289,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x8c40dcf6c8'
rsp: '0x8c40dcf698'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x7ffef000'
- r11: '0x1'
- r12: '0x0'
- r13: '0x23df918cba0'
- r14: '0x320'
- r15: '0x23df9a304c8'
- r8: '0x0'
- r9: '0xfffffffffe7f6361'
- rax: '0x4'
- rbp: '0x8c40dcf809'
- rbx: '0x0'
- rcx: '0x320'
- rdi: '0x320'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x8c40dcf6c8'
- rsp: '0x8c40dcf698'
- crashed: false
id: 8276
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -1959,35 +1335,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x8c40def448'
rsp: '0x8c40def418'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x7ffef000'
- r11: '0x8c40def4b8'
- r12: '0x0'
- r13: '0x23df918cc40'
- r14: '0x328'
- r15: '0x23df9a304c8'
- r8: '0x0'
- r9: '0xfffffffffe7f6361'
- rax: '0x4'
- rbp: '0x8c40def589'
- rbx: '0x0'
- rcx: '0x328'
- rdi: '0x328'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x8c40def448'
- rsp: '0x8c40def418'
- crashed: false
id: 7604
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -2031,35 +1381,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x8c40e0f588'
rsp: '0x8c40e0f558'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x7ffef000'
- r11: '0x1'
- r12: '0x0'
- r13: '0x23df918cb00'
- r14: '0x330'
- r15: '0x23df9a304c8'
- r8: '0x0'
- r9: '0xfffffffffe7f6361'
- rax: '0x4'
- rbp: '0x8c40e0f6c9'
- rbx: '0x0'
- rcx: '0x330'
- rdi: '0x330'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x8c40e0f588'
- rsp: '0x8c40e0f558'
- crashed: false
id: 8056
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -2103,35 +1427,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x8c40e2f868'
rsp: '0x8c40e2f838'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x7ffef000'
- r11: '0x8c40e2f8d8'
- r12: '0x0'
- r13: '0x23df918ccc0'
- r14: '0x338'
- r15: '0x23df9a304c8'
- r8: '0x0'
- r9: '0xfffffffffe7f6361'
- rax: '0x4'
- rbp: '0x8c40e2f9a9'
- rbx: '0x0'
- rcx: '0x338'
- rdi: '0x338'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x8c40e2f868'
- rsp: '0x8c40e2f838'
- crashed: false
id: 7540
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -2175,35 +1473,9 @@ threads:
rip: '0x7ffe100fa584'
rsi: '0x0'
rsp: '0x8c412ff828'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100fa584'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x0'
- r11: '0x246'
- r12: '0x0'
- r13: '0x0'
- r14: '0x0'
- r15: '0x0'
- r8: '0x8c412ff828'
- r9: '0x0'
- rax: '0x34'
- rbp: '0x0'
- rbx: '0x0'
- rcx: '0x0'
- rdi: '0xa'
- rdx: '0x8c412ff850'
- rip: '0x7ffe100fa584'
- rsi: '0x0'
- rsp: '0x8c412ff828'
- crashed: false
id: 9920
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -2247,35 +1519,9 @@ threads:
rip: '0x7ffe100fa584'
rsi: '0x0'
rsp: '0x8c4131fd08'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100fa584'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x0'
- r11: '0x246'
- r12: '0x0'
- r13: '0x0'
- r14: '0x0'
- r15: '0x0'
- r8: '0x8c4131fd08'
- r9: '0x0'
- rax: '0x34'
- rbp: '0x0'
- rbx: '0x0'
- rcx: '0x0'
- rdi: '0x21'
- rdx: '0x8c4131fd30'
- rip: '0x7ffe100fa584'
- rsi: '0x0'
- rsp: '0x8c4131fd08'
- crashed: false
id: 4264
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -2319,35 +1565,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x8c4133f728'
rsp: '0x8c4133f6f8'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x0'
- r11: '0x246'
- r12: '0x0'
- r13: '0x23df90dfa80'
- r14: '0x3d4'
- r15: '0x23df9a304c8'
- r8: '0x8c4133f6f8'
- r9: '0x8c4133f869'
- rax: '0x4'
- rbp: '0x8c4133f869'
- rbx: '0x0'
- rcx: '0x3d4'
- rdi: '0x3d4'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x8c4133f728'
- rsp: '0x8c4133f6f8'
- crashed: false
id: 2548
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -2391,35 +1611,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x8c4135fb98'
rsp: '0x8c4135fb68'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x0'
- r11: '0x23d8e5b0000'
- r12: '0x0'
- r13: '0x23df90decc0'
- r14: '0x460'
- r15: '0x23df9a304c8'
- r8: '0x10000'
- r9: '0xcdcdcdcdcdcdcdcd'
- rax: '0x4'
- rbp: '0x8c4135fcd9'
- rbx: '0x0'
- rcx: '0x460'
- rdi: '0x460'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x8c4135fb98'
- rsp: '0x8c4135fb68'
- crashed: false
id: 3060
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -2463,35 +1657,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x8c4137fa18'
rsp: '0x8c4137f9e8'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x7ffef000'
- r11: '0x8c4137fa88'
- r12: '0x0'
- r13: '0x23df90deee0'
- r14: '0x46c'
- r15: '0x23df9a304c8'
- r8: '0x8c4137fa18'
- r9: '0xfffffffffe7f6361'
- rax: '0x4'
- rbp: '0x8c4137fb59'
- rbx: '0x0'
- rcx: '0x46c'
- rdi: '0x46c'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x8c4137fa18'
- rsp: '0x8c4137f9e8'
- crashed: false
id: 664
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -2535,35 +1703,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x8c4139fa38'
rsp: '0x8c4139fa08'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x0'
- r11: '0x246'
- r12: '0x0'
- r13: '0x23df90dd3e0'
- r14: '0x474'
- r15: '0x23df9a304c8'
- r8: '0x8c4139fa83'
- r9: '0xfffffffffe7f6361'
- rax: '0x4'
- rbp: '0x8c4139fb79'
- rbx: '0x0'
- rcx: '0x474'
- rdi: '0x474'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x8c4139fa38'
- rsp: '0x8c4139fa08'
- crashed: false
id: 3028
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -2607,35 +1749,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x8c413bf8f8'
rsp: '0x8c413bf8c8'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x0'
- r11: '0x246'
- r12: '0x0'
- r13: '0x23df90df340'
- r14: '0x47c'
- r15: '0x23df9a304c8'
- r8: '0x8c413bf8c8'
- r9: '0x8c413bfa39'
- rax: '0x4'
- rbp: '0x8c413bfa39'
- rbx: '0x0'
- rcx: '0x47c'
- rdi: '0x47c'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x8c413bf8f8'
- rsp: '0x8c413bf8c8'
- crashed: false
id: 964
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -2667,35 +1783,9 @@ threads:
rip: '0x7ffe100fd854'
rsi: '0x7ffe10083140'
rsp: '0x8c4223f9f8'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100fd854'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x23df7327fc8'
- r11: '0x7ffe10178b20'
- r12: '0x0'
- r13: '0x23df7320b30'
- r14: '0x7ffe10080a60'
- r15: '0x7ffe10081350'
- r8: '0x1'
- r9: '0x23d83380000'
- rax: '0x1cb'
- rbp: '0x0'
- rbx: '0x23dfa7338e0'
- rcx: '0x10'
- rdi: '0x10'
- rdx: '0x23dfa7338e0'
- rip: '0x7ffe100fd854'
- rsi: '0x7ffe10083140'
- rsp: '0x8c4223f9f8'
- crashed: false
id: 9124
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -2739,35 +1829,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x8c418bfc08'
rsp: '0x8c418bfbd8'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x7ffef000'
- r11: '0x1'
- r12: '0x0'
- r13: '0x23d8235b880'
- r14: '0x694'
- r15: '0x23df9a304c8'
- r8: '0x8c418bfb83'
- r9: '0xfffffffffe7f6361'
- rax: '0x4'
- rbp: '0x8c418bfd49'
- rbx: '0x0'
- rcx: '0x694'
- rdi: '0x694'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x8c418bfc08'
- rsp: '0x8c418bfbd8'
- crashed: false
id: 9264
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -2811,35 +1875,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x8c4270fa48'
rsp: '0x8c4270fa18'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x0'
- r11: '0x246'
- r12: '0x0'
- r13: '0x23d821716c0'
- r14: '0x4f4'
- r15: '0x23df9a304c8'
- r8: '0x8c4270fc08'
- r9: '0x0'
- rax: '0x4'
- rbp: '0x8c4270fb89'
- rbx: '0x0'
- rcx: '0x4f4'
- rdi: '0x4f4'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x8c4270fa48'
- rsp: '0x8c4270fa18'
- crashed: false
id: 7528
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -2877,35 +1915,9 @@ threads:
rip: '0x7ffe100fd854'
rsi: '0x7ffe10083140'
rsp: '0x8c42bdfa78'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100fd854'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x0'
- r11: '0x7ffe00d599b6'
- r12: '0x0'
- r13: '0x23df7320b30'
- r14: '0x7ffe10080a60'
- r15: '0x7ffe10081350'
- r8: '0x50'
- r9: '0x0'
- rax: '0x1cb'
- rbp: '0x0'
- rbx: '0x23d833a5830'
- rcx: '0x10'
- rdi: '0x10'
- rdx: '0x23d833a5830'
- rip: '0x7ffe100fd854'
- rsi: '0x7ffe10083140'
- rsp: '0x8c42bdfa78'
- crashed: false
id: 4136
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -3105,35 +2117,9 @@ threads:
rip: '0x7ffe100faa54'
rsi: '0x0'
rsp: '0x8c430af608'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100faa54'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x0'
- r11: '0x0'
- r12: '0x0'
- r13: '0x8c430af960'
- r14: '0x0'
- r15: '0x0'
- r8: '0x97'
- r9: '0x0'
- rax: '0x5b'
- rbp: '0x650'
- rbx: '0x1'
- rcx: '0x1'
- rdi: '0x1'
- rdx: '0x8c430af960'
- rip: '0x7ffe100faa54'
- rsi: '0x0'
- rsp: '0x8c430af608'
- crashed: false
id: 10520
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -3177,35 +2163,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x8c418df688'
rsp: '0x8c418df658'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x7ffef000'
- r11: '0x1'
- r12: '0x0'
- r13: '0x23d8235c4e0'
- r14: '0x5a0'
- r15: '0x23df9a304c8'
- r8: '0x8c418df683'
- r9: '0xfffffffffe7f6361'
- rax: '0x4'
- rbp: '0x8c418df7c9'
- rbx: '0x0'
- rcx: '0x5a0'
- rdi: '0x5a0'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x8c418df688'
- rsp: '0x8c418df658'
- crashed: false
id: 10828
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -3249,35 +2209,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x8c418ff998'
rsp: '0x8c418ff968'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x7ffef000'
- r11: '0x1'
- r12: '0x0'
- r13: '0x23d823598a0'
- r14: '0x4e0'
- r15: '0x23df9a304c8'
- r8: '0x8c418ff983'
- r9: '0xfffffffffe7f6361'
- rax: '0x4'
- rbp: '0x8c418ffad9'
- rbx: '0x0'
- rcx: '0x4e0'
- rdi: '0x4e0'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x8c418ff998'
- rsp: '0x8c418ff968'
- crashed: false
id: 6428
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -3321,35 +2255,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x8c4191f678'
rsp: '0x8c4191f648'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x0'
- r11: '0x246'
- r12: '0x0'
- r13: '0x23d82359820'
- r14: '0x4d4'
- r15: '0x23df9a304c8'
- r8: '0x8c4191f683'
- r9: '0x8c4191f7b9'
- rax: '0x4'
- rbp: '0x8c4191f7b9'
- rbx: '0x0'
- rcx: '0x4d4'
- rdi: '0x4d4'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x8c4191f678'
- rsp: '0x8c4191f648'
- crashed: false
id: 11276
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -3393,80 +2301,28 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x8c4193f7b8'
rsp: '0x8c4193f788'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x0'
- r11: '0x246'
- r12: '0x0'
- r13: '0x23d823597e0'
- r14: '0x64c'
- r15: '0x23df9a304c8'
- r8: '0x8c4193f788'
- r9: '0x8c4193f8f9'
- rax: '0x4'
- rbp: '0x8c4193f8f9'
- rbx: '0x0'
- rcx: '0x64c'
- rdi: '0x64c'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x8c4193f7b8'
- rsp: '0x8c4193f788'
- crashed: false
id: 11076
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
in_app: false
instruction_addr: '0x7ffe100d1471'
package: C:\Windows\System32\ntdll.dll
- trust: scan
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe0fd53034'
- package: C:\Windows\System32\kernel32.dll
- trust: scan
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe0ca99252'
- package: C:\Windows\System32\KERNELBASE.dll
- trust: scan
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x7ffef000'
- r11: '0x8c4195f7a8'
- r12: '0x0'
- r13: '0x23d823597a0'
- r14: '0x804'
- r15: '0x23df9a304c8'
- r8: '0x0'
- r9: '0xfffffffffe7f6361'
- rax: '0x4'
- rbp: '0x8c4195f879'
- rbx: '0x0'
- rcx: '0x804'
- rdi: '0x804'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x8c4195f738'
- rsp: '0x8c4195f708'
- stacktrace:
- frames:
+ trust: scan
+ - data:
+ symbolicator_status: missing
+ in_app: false
+ instruction_addr: '0x7ffe0fd53034'
+ package: C:\Windows\System32\kernel32.dll
+ trust: scan
+ - data:
+ symbolicator_status: missing
+ in_app: false
+ instruction_addr: '0x7ffe0ca99252'
+ package: C:\Windows\System32\KERNELBASE.dll
+ trust: scan
- data:
symbolicator_status: missing
in_app: false
@@ -3493,7 +2349,7 @@ threads:
rsp: '0x8c4195f708'
- crashed: false
id: 9748
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -3537,35 +2393,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x8c4197f9f8'
rsp: '0x8c4197f9c8'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x0'
- r11: '0x246'
- r12: '0x0'
- r13: '0x23d82359000'
- r14: '0x80c'
- r15: '0x23df9a304c8'
- r8: '0x8c4197f9c8'
- r9: '0x8c4197fb39'
- rax: '0x4'
- rbp: '0x8c4197fb39'
- rbx: '0x0'
- rcx: '0x80c'
- rdi: '0x80c'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x8c4197f9f8'
- rsp: '0x8c4197f9c8'
- crashed: false
id: 6820
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -3609,35 +2439,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x8c4199fac8'
rsp: '0x8c4199fa98'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x7ffef000'
- r11: '0x1'
- r12: '0x0'
- r13: '0x23d82359760'
- r14: '0x814'
- r15: '0x23df9a304c8'
- r8: '0x8c4199fa83'
- r9: '0xfffffffffe7f6361'
- rax: '0x4'
- rbp: '0x8c4199fc09'
- rbx: '0x0'
- rcx: '0x814'
- rdi: '0x814'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x8c4199fac8'
- rsp: '0x8c4199fa98'
- crashed: false
id: 5932
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -3681,35 +2485,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x8c419bf6d8'
rsp: '0x8c419bf6a8'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x7ffef000'
- r11: '0x1'
- r12: '0x0'
- r13: '0x23d823592c0'
- r14: '0x81c'
- r15: '0x23df9a304c8'
- r8: '0x8c419bf683'
- r9: '0xfffffffffe7f6361'
- rax: '0x4'
- rbp: '0x8c419bf819'
- rbx: '0x0'
- rcx: '0x81c'
- rdi: '0x81c'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x8c419bf6d8'
- rsp: '0x8c419bf6a8'
- crashed: false
id: 10672
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -3741,35 +2519,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x8c419df628'
rsp: '0x8c419df5f8'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x23df7346090'
- r11: '0x8c419df480'
- r12: '0x0'
- r13: '0x23dfc965440'
- r14: '0x54c'
- r15: '0x23df9a304c8'
- r8: '0x23d82170c60'
- r9: '0x20'
- rax: '0x4'
- rbp: '0x8c419df769'
- rbx: '0x0'
- rcx: '0x54c'
- rdi: '0x54c'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x8c419df628'
- rsp: '0x8c419df5f8'
- crashed: false
id: 3096
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -3813,35 +2565,9 @@ threads:
rip: '0x7ffe100fa584'
rsi: '0x0'
rsp: '0x8c419efb58'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100fa584'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x0'
- r11: '0x246'
- r12: '0x0'
- r13: '0x0'
- r14: '0x0'
- r15: '0x23df9a31050'
- r8: '0x8c419efd38'
- r9: '0x0'
- rax: '0x34'
- rbp: '0x8c419efcc9'
- rbx: '0x0'
- rcx: '0x0'
- rdi: '0x3e8'
- rdx: '0x8c419efb80'
- rip: '0x7ffe100fa584'
- rsi: '0x0'
- rsp: '0x8c419efb58'
- crashed: false
id: 10944
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -3963,35 +2689,9 @@ threads:
rip: '0x7ffe100faa54'
rsi: '0x0'
rsp: '0x8c4357f4f8'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100faa54'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x7ffef000'
- r11: '0x23d8d4b3848'
- r12: '0x0'
- r13: '0x23d8d4b1088'
- r14: '0x0'
- r15: '0x0'
- r8: '0x0'
- r9: '0x8c4357f730'
- rax: '0x5b'
- rbp: '0x8c4357f940'
- rbx: '0x3'
- rcx: '0x3'
- rdi: '0x3'
- rdx: '0x23d8d4b1088'
- rip: '0x7ffe100faa54'
- rsi: '0x0'
- rsp: '0x8c4357f4f8'
- crashed: false
id: 7648
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -4023,35 +2723,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x0'
rsp: '0x8c43a4f4d8'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x23df7346090'
- r11: '0x8c43a4eff0'
- r12: '0x1'
- r13: '0x23df8d19d60'
- r14: '0x288'
- r15: '0x23df9a304c8'
- r8: '0x0'
- r9: '0x1'
- rax: '0x4'
- rbp: '0x8c43a4f649'
- rbx: '0x0'
- rcx: '0x288'
- rdi: '0x288'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x0'
- rsp: '0x8c43a4f4d8'
- crashed: false
id: 612
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -4095,35 +2769,9 @@ threads:
rip: '0x7ffe100f9f84'
rsi: '0x0'
rsp: '0x8c4188f7a8'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100f9f84'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x7ffef000'
- r11: '0x1'
- r12: '0x0'
- r13: '0x23df8d19ea0'
- r14: '0x298'
- r15: '0x23df9a304c8'
- r8: '0x1'
- r9: '0x7ff75a3e20d0'
- rax: '0x4'
- rbp: '0x8c4188f919'
- rbx: '0x0'
- rcx: '0x298'
- rdi: '0x298'
- rdx: '0x0'
- rip: '0x7ffe100f9f84'
- rsi: '0x0'
- rsp: '0x8c4188f7a8'
- crashed: false
id: 10900
- raw_stacktrace:
+ stacktrace:
frames:
- data:
symbolicator_status: missing
@@ -4167,29 +2815,3 @@ threads:
rip: '0x7ffe100fa584'
rsi: '0x0'
rsp: '0x8c4189fba8'
- stacktrace:
- frames:
- - data:
- symbolicator_status: missing
- in_app: false
- instruction_addr: '0x7ffe100fa584'
- package: C:\Windows\System32\ntdll.dll
- trust: context
- registers:
- r10: '0x7ffef000'
- r11: '0x1'
- r12: '0x0'
- r13: '0x7ff758a67a48'
- r14: '0x0'
- r15: '0x23df9a31050'
- r8: '0xff'
- r9: '0xfffffffffe7f6361'
- rax: '0x34'
- rbp: '0x8c4189fd19'
- rbx: '0x0'
- rcx: '0x0'
- rdi: '0x6'
- rdx: '0x8c4189fbd0'
- rip: '0x7ffe100fa584'
- rsi: '0x0'
- rsp: '0x8c4189fba8'
|
1511d8fcf7530a8ab890bc3b6290eebfbb22d908
|
2023-05-17 00:18:25
|
Malachi Willey
|
fix(issue-details): Fix memoization function so additional data can change (#49215)
| false
|
Fix memoization function so additional data can change (#49215)
|
fix
|
diff --git a/static/app/components/events/eventExtraData/index.tsx b/static/app/components/events/eventExtraData/index.tsx
index 878ebc459bcca0..61bd7ebbda29a2 100644
--- a/static/app/components/events/eventExtraData/index.tsx
+++ b/static/app/components/events/eventExtraData/index.tsx
@@ -57,5 +57,5 @@ export const EventExtraData = memo(
</EventDataSection>
);
},
- (prevProps: Props, nextProps: Props) => prevProps.event.id !== nextProps.event.id
+ (prevProps: Props, nextProps: Props) => prevProps.event.id === nextProps.event.id
);
|
e60e27729ac47c7edd5507a13dcc3fcc6a2fb7e4
|
2023-12-06 02:12:12
|
anthony sottile
|
ref: fix typing for AggregateFilter.key (#61129)
| false
|
fix typing for AggregateFilter.key (#61129)
|
ref
|
diff --git a/pyproject.toml b/pyproject.toml
index 61b2fd853a2422..784366041a85b7 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -705,7 +705,6 @@ module = [
"tests.sentry.api.test_authentication",
"tests.sentry.api.test_base",
"tests.sentry.api.test_event_search",
- "tests.sentry.api.test_issue_search",
"tests.sentry.digests.test_notifications",
"tests.sentry.eventstore.test_base",
"tests.sentry.grouping.test_result",
diff --git a/src/sentry/api/event_search.py b/src/sentry/api/event_search.py
index 13b9be7a84f4a9..eb7150f2a78d46 100644
--- a/src/sentry/api/event_search.py
+++ b/src/sentry/api/event_search.py
@@ -437,8 +437,12 @@ def is_in_filter(self) -> bool:
return self.operator in ("IN", "NOT IN")
+class AggregateKey(NamedTuple):
+ name: str
+
+
class AggregateFilter(NamedTuple):
- key: SearchKey
+ key: AggregateKey
operator: str
value: SearchValue
@@ -446,10 +450,6 @@ def __str__(self):
return f"{self.key.name}{self.operator}{self.value.raw_value}"
-class AggregateKey(NamedTuple):
- name: str
-
-
@dataclass
class SearchConfig:
"""
|
cc6e75f118a8cac5f2818043f19cfbad9faecf4b
|
2023-08-02 23:19:15
|
Alex Zaslavsky
|
meta(gitignore): Ignore user-specific VSCode settings (#54036)
| false
|
Ignore user-specific VSCode settings (#54036)
|
meta
|
diff --git a/.gitignore b/.gitignore
index 5f3cf7146a1d6f..3186d2769492a8 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,5 +1,6 @@
.env
.cache/
+.code-workspace
.coverage*
.DS_Store
.venv
|
705f001366618545f4868de69b3613d933af6e84
|
2022-10-25 02:11:23
|
Megan Heskett
|
ref(ui): Remove processed transaction data category (#40462)
| false
|
Remove processed transaction data category (#40462)
|
ref
|
diff --git a/static/app/constants/index.tsx b/static/app/constants/index.tsx
index 14a6408a261da7..a81094fd7c72e8 100644
--- a/static/app/constants/index.tsx
+++ b/static/app/constants/index.tsx
@@ -216,7 +216,6 @@ export const DATA_CATEGORY_NAMES = {
[DataCategory.ERRORS]: t('Errors'),
[DataCategory.TRANSACTIONS]: t('Transactions'),
[DataCategory.ATTACHMENTS]: t('Attachments'),
- [DataCategory.TRANSACTIONS_PROCESSED]: t('Processed Transactions'),
[DataCategory.PROFILES]: t('Profiles'),
};
diff --git a/static/app/types/core.tsx b/static/app/types/core.tsx
index 4e83c7b6628b36..9075b58c5b78cc 100644
--- a/static/app/types/core.tsx
+++ b/static/app/types/core.tsx
@@ -68,7 +68,6 @@ export enum DataCategory {
ERRORS = 'errors',
TRANSACTIONS = 'transactions',
ATTACHMENTS = 'attachments',
- TRANSACTIONS_PROCESSED = 'transactionsProcessed',
PROFILES = 'profiles',
}
|
23e53a88486789b6e8d6144c17f214c3135e538c
|
2023-06-01 22:32:32
|
Scott Cooper
|
fix(escalating): Update archive/resolve tooltip copy (#50168)
| false
|
Update archive/resolve tooltip copy (#50168)
|
fix
|
diff --git a/static/app/components/actions/archive.tsx b/static/app/components/actions/archive.tsx
index 2702667e43c143..009c92e7aabe9a 100644
--- a/static/app/components/actions/archive.tsx
+++ b/static/app/components/actions/archive.tsx
@@ -116,7 +116,7 @@ function ArchiveActions({
<ArchiveButton
size={size}
tooltipProps={{delay: 1000, disabled}}
- title={t('Hides the issue until the sh*t hits the fan and events escalate.')}
+ title={t('Archive issue until a high number of events are seen.')}
onClick={() => onArchive(ARCHIVE_UNTIL_ESCALATING)}
disabled={disabled}
>
diff --git a/static/app/components/actions/resolve.tsx b/static/app/components/actions/resolve.tsx
index c4ad847676e6da..afac9053edd4d5 100644
--- a/static/app/components/actions/resolve.tsx
+++ b/static/app/components/actions/resolve.tsx
@@ -239,7 +239,7 @@ function ResolveActions({
<ResolveButton
priority={priority}
size={size}
- title={t("We'll nag you with a notification if the issue's seen again.")}
+ title={t("We'll nag you with a notification if another event is seen.")}
tooltipProps={{delay: 1000, disabled}}
onClick={() =>
openConfirmModal({
|
15af691eed501b63a4073977dc9b470564f1d5b9
|
2023-10-26 21:24:28
|
John
|
feat(metrics): Add ability for indexer cache to write new schema (#58414)
| false
|
Add ability for indexer cache to write new schema (#58414)
|
feat
|
diff --git a/src/sentry/sentry_metrics/indexer/cache.py b/src/sentry/sentry_metrics/indexer/cache.py
index c7577157ff4095..9806b1056dd0b9 100644
--- a/src/sentry/sentry_metrics/indexer/cache.py
+++ b/src/sentry/sentry_metrics/indexer/cache.py
@@ -2,6 +2,7 @@
import logging
import random
+from datetime import datetime
from typing import Collection, Iterable, Mapping, MutableMapping, Optional, Sequence, Set
from django.conf import settings
@@ -33,7 +34,10 @@
_INDEXER_CACHE_FETCH_METRIC = "sentry_metrics.indexer.memcache.fetch"
-BULK_RECORD_CACHE_NAME_SPACE = "br"
+NAMESPACED_WRITE_FEAT_FLAG = "sentry-metrics.indexer.write-new-cache-namespace"
+NAMESPACED_READ_FEAT_FLAG = "sentry-metrics.indexer.read-new-cache-namespace"
+
+BULK_RECORD_CACHE_NAMESPACE = "br"
RESOLVE_CACHE_NAMESPACE = "res"
@@ -67,6 +71,9 @@ def _make_namespaced_cache_key(self, namespace: str, key: str) -> str:
return f"indexer:{self.partition_key}:{namespace}:org:str:{use_case_id}:{hashed}"
+ def _make_cache_val(self, val: int, timestamp: int):
+ return f"{val}:{timestamp}"
+
def _format_results(
self, keys: Iterable[str], results: Mapping[str, Optional[int]]
) -> MutableMapping[str, Optional[int]]:
@@ -109,11 +116,10 @@ def _validate_result(self, result: Optional[str]) -> Optional[int]:
if result is None:
return None
result, _ = result.split(":")
-
return int(result)
def get(self, namespace: str, key: str) -> Optional[int]:
- if options.get("sentry-metrics.indexer.read-new-cache-namespace"):
+ if options.get(NAMESPACED_READ_FEAT_FLAG):
result = self.cache.get(
self._make_namespaced_cache_key(namespace, key), version=self.version
)
@@ -127,9 +133,16 @@ def set(self, namespace: str, key: str, value: int) -> None:
timeout=self.randomized_ttl,
version=self.version,
)
+ if options.get(NAMESPACED_WRITE_FEAT_FLAG):
+ self.cache.set(
+ key=self._make_namespaced_cache_key(namespace, key),
+ value=self._make_cache_val(value, int(datetime.utcnow().timestamp())),
+ timeout=self.randomized_ttl,
+ version=self.version,
+ )
def get_many(self, namespace: str, keys: Iterable[str]) -> MutableMapping[str, Optional[int]]:
- if options.get("sentry-metrics.indexer.read-new-cache-namespace"):
+ if options.get(NAMESPACED_READ_FEAT_FLAG):
cache_keys = {self._make_namespaced_cache_key(namespace, key): key for key in keys}
namespaced_results: MutableMapping[str, Optional[int]] = {
k: self._validate_result(v)
@@ -150,14 +163,28 @@ def get_many(self, namespace: str, keys: Iterable[str]) -> MutableMapping[str, O
def set_many(self, namespace: str, key_values: Mapping[str, int]) -> None:
cache_key_values = {self._make_cache_key(k): v for k, v in key_values.items()}
self.cache.set_many(cache_key_values, timeout=self.randomized_ttl, version=self.version)
+ if options.get(NAMESPACED_WRITE_FEAT_FLAG):
+ timestamp = int(datetime.utcnow().timestamp())
+ namespaced_cache_key_values = {
+ self._make_namespaced_cache_key(namespace, k): self._make_cache_val(v, timestamp)
+ for k, v in key_values.items()
+ }
+ self.cache.set_many(
+ namespaced_cache_key_values, timeout=self.randomized_ttl, version=self.version
+ )
def delete(self, namespace: str, key: str) -> None:
- cache_key = self._make_cache_key(key)
- self.cache.delete(cache_key, version=self.version)
+ self.cache.delete(self._make_cache_key(key), version=self.version)
+ if options.get(NAMESPACED_WRITE_FEAT_FLAG):
+ self.cache.delete(self._make_namespaced_cache_key(namespace, key), version=self.version)
def delete_many(self, namespace: str, keys: Sequence[str]) -> None:
- cache_keys = [self._make_cache_key(key) for key in keys]
- self.cache.delete_many(cache_keys, version=self.version)
+ self.cache.delete_many([self._make_cache_key(key) for key in keys], version=self.version)
+ if options.get(NAMESPACED_WRITE_FEAT_FLAG):
+ self.cache.delete_many(
+ [self._make_namespaced_cache_key(namespace, key) for key in keys],
+ version=self.version,
+ )
class CachingIndexer(StringIndexer):
@@ -172,7 +199,7 @@ def bulk_record(
cache_keys = UseCaseKeyCollection(strings)
metrics.gauge("sentry_metrics.indexer.lookups_per_batch", value=cache_keys.size)
cache_key_strs = cache_keys.as_strings()
- cache_results = self.cache.get_many(BULK_RECORD_CACHE_NAME_SPACE, cache_key_strs)
+ cache_results = self.cache.get_many(BULK_RECORD_CACHE_NAMESPACE, cache_key_strs)
hits = [k for k, v in cache_results.items() if v is not None]
@@ -213,7 +240,7 @@ def bulk_record(
)
self.cache.set_many(
- BULK_RECORD_CACHE_NAME_SPACE, db_record_key_results.get_mapped_strings_to_ints()
+ BULK_RECORD_CACHE_NAMESPACE, db_record_key_results.get_mapped_strings_to_ints()
)
return cache_key_results.merge(db_record_key_results)
diff --git a/tests/sentry/sentry_metrics/test_all_indexers.py b/tests/sentry/sentry_metrics/test_all_indexers.py
index 46617f0ebe497a..9e2d4c128d7f0b 100644
--- a/tests/sentry/sentry_metrics/test_all_indexers.py
+++ b/tests/sentry/sentry_metrics/test_all_indexers.py
@@ -202,7 +202,12 @@ def test_static_and_non_static_strings_generic_metrics(indexer):
def test_indexer(indexer, indexer_cache, use_case_id):
- with override_options({"sentry-metrics.indexer.read-new-cache-namespace": False}):
+ with override_options(
+ {
+ "sentry-metrics.indexer.read-new-cache-namespace": False,
+ "sentry-metrics.indexer.write-new-cache-namespace": False,
+ }
+ ):
org1_id = 1
org2_id = 2
strings = {"hello", "hey", "hi"}
@@ -257,7 +262,12 @@ def test_resolve_and_reverse_resolve(indexer, indexer_cache, use_case_id):
"""
Test `resolve` and `reverse_resolve` methods
"""
- with override_options({"sentry-metrics.indexer.read-new-cache-namespace": False}):
+ with override_options(
+ {
+ "sentry-metrics.indexer.read-new-cache-namespace": False,
+ "sentry-metrics.indexer.write-new-cache-namespace": False,
+ }
+ ):
org1_id = 1
strings = {"hello", "hey", "hi"}
@@ -285,7 +295,12 @@ def test_already_created_plus_written_results(indexer, indexer_cache, use_case_i
Test that we correctly combine db read results with db write results
for the same organization.
"""
- with override_options({"sentry-metrics.indexer.read-new-cache-namespace": False}):
+ with override_options(
+ {
+ "sentry-metrics.indexer.read-new-cache-namespace": False,
+ "sentry-metrics.indexer.write-new-cache-namespace": False,
+ }
+ ):
org_id = 1234
raw_indexer = indexer
@@ -332,7 +347,12 @@ def test_already_cached_plus_read_results(indexer, indexer_cache, use_case_id) -
Test that we correctly combine cached results with read results
for the same organization.
"""
- with override_options({"sentry-metrics.indexer.read-new-cache-namespace": False}):
+ with override_options(
+ {
+ "sentry-metrics.indexer.read-new-cache-namespace": False,
+ "sentry-metrics.indexer.write-new-cache-namespace": False,
+ }
+ ):
org_id = 8
cached = {
f"{use_case_id.value}:{org_id}:beep": 10,
@@ -371,7 +391,12 @@ def test_already_cached_plus_read_results(indexer, indexer_cache, use_case_id) -
def test_read_when_bulk_record(indexer, use_case_id):
- with override_options({"sentry-metrics.indexer.read-new-cache-namespace": False}):
+ with override_options(
+ {
+ "sentry-metrics.indexer.read-new-cache-namespace": False,
+ "sentry-metrics.indexer.write-new-cache-namespace": False,
+ }
+ ):
strings = {
use_case_id: {
1: {"a"},
@@ -483,7 +508,12 @@ def test_bulk_reverse_resolve(indexer):
Tests reverse resolve properly returns the corresponding strings
in the proper order when given a combination of shared and non-shared ids.
"""
- with override_options({"sentry-metrics.indexer.read-new-cache-namespace": False}):
+ with override_options(
+ {
+ "sentry-metrics.indexer.read-new-cache-namespace": False,
+ "sentry-metrics.indexer.write-new-cache-namespace": False,
+ }
+ ):
org_id = 7
use_case_id = UseCaseID.SESSIONS # any use case would do
static_indexer = StaticStringIndexer(indexer)
diff --git a/tests/sentry/sentry_metrics/test_indexer_cache.py b/tests/sentry/sentry_metrics/test_indexer_cache.py
index b1532453776156..314375ea79f9bc 100644
--- a/tests/sentry/sentry_metrics/test_indexer_cache.py
+++ b/tests/sentry/sentry_metrics/test_indexer_cache.py
@@ -22,7 +22,12 @@ def use_case_id() -> str:
def test_cache(use_case_id: str) -> None:
- with override_options({"sentry-metrics.indexer.read-new-cache-namespace": False}):
+ with override_options(
+ {
+ "sentry-metrics.indexer.read-new-cache-namespace": False,
+ "sentry-metrics.indexer.write-new-cache-namespace": False,
+ }
+ ):
cache.clear()
namespace = "test"
assert indexer_cache.get(namespace, f"{use_case_id}:1:blah:123") is None
@@ -32,9 +37,67 @@ def test_cache(use_case_id: str) -> None:
indexer_cache.delete(namespace, f"{use_case_id}:1:blah:123")
assert indexer_cache.get(namespace, f"{use_case_id}:1:blah:123") is None
+ with override_options(
+ {
+ "sentry-metrics.indexer.read-new-cache-namespace": False,
+ "sentry-metrics.indexer.write-new-cache-namespace": True,
+ }
+ ):
+ cache.clear()
+ namespace = "test"
+ assert indexer_cache.get(namespace, f"{use_case_id}:1:blah:123") is None
+ indexer_cache.set(namespace, f"{use_case_id}:1:blah:123", 1)
+ assert indexer_cache.get(namespace, f"{use_case_id}:1:blah:123") == 1
+
+ indexer_cache.delete(namespace, f"{use_case_id}:1:blah:123")
+ assert indexer_cache.get(namespace, f"{use_case_id}:1:blah:123") is None
+
+ with override_options(
+ {
+ "sentry-metrics.indexer.read-new-cache-namespace": True,
+ "sentry-metrics.indexer.write-new-cache-namespace": True,
+ }
+ ):
+ cache.clear()
+ namespace = "test"
+ assert indexer_cache.get(namespace, f"{use_case_id}:1:blah:123") is None
+ indexer_cache.set(namespace, f"{use_case_id}:1:blah:123", 1)
+ assert indexer_cache.get(namespace, f"{use_case_id}:1:blah:123") == 1
+
+ indexer_cache.delete(namespace, f"{use_case_id}:1:blah:123")
+ assert indexer_cache.get(namespace, f"{use_case_id}:1:blah:123") is None
+
+ with override_options(
+ {
+ "sentry-metrics.indexer.read-new-cache-namespace": True,
+ "sentry-metrics.indexer.write-new-cache-namespace": True,
+ }
+ ):
+ cache.clear()
+ namespace_1 = "1"
+ namespace_2 = "2"
+ assert indexer_cache.get(namespace_1, f"{use_case_id}:1:blah:123") is None
+ indexer_cache.set(namespace_1, f"{use_case_id}:1:blah:123", 1)
+ assert indexer_cache.get(namespace_1, f"{use_case_id}:1:blah:123") == 1
+
+ indexer_cache.delete(namespace_1, f"{use_case_id}:1:blah:123")
+ assert indexer_cache.get(namespace_1, f"{use_case_id}:1:blah:123") is None
+
+ assert indexer_cache.get(namespace_2, f"{use_case_id}:1:blah:123") is None
+ indexer_cache.set(namespace_2, f"{use_case_id}:1:blah:123", 2)
+ assert indexer_cache.get(namespace_2, f"{use_case_id}:1:blah:123") == 2
+
+ indexer_cache.delete(namespace_2, f"{use_case_id}:1:blah:123")
+ assert indexer_cache.get(namespace_2, f"{use_case_id}:1:blah:123") is None
+
def test_cache_many(use_case_id: str) -> None:
- with override_options({"sentry-metrics.indexer.read-new-cache-namespace": False}):
+ with override_options(
+ {
+ "sentry-metrics.indexer.read-new-cache-namespace": False,
+ "sentry-metrics.indexer.write-new-cache-namespace": False,
+ }
+ ):
cache.clear()
namespace = "test"
values = {f"{use_case_id}:100:hello": 2, f"{use_case_id}:100:bye": 3}
@@ -53,7 +116,12 @@ def test_cache_many(use_case_id: str) -> None:
def test_make_cache_key(use_case_id: str) -> None:
- with override_options({"sentry-metrics.indexer.read-new-cache-namespace": False}):
+ with override_options(
+ {
+ "sentry-metrics.indexer.read-new-cache-namespace": False,
+ "sentry-metrics.indexer.write-new-cache-namespace": False,
+ }
+ ):
cache.clear()
namespace = "test"
orgId = 1
@@ -64,7 +132,12 @@ def test_make_cache_key(use_case_id: str) -> None:
assert key == f"indexer:test:org:str:{use_case_id}:{hashed}"
- with override_options({"sentry-metrics.indexer.read-new-cache-namespace": True}):
+ with override_options(
+ {
+ "sentry-metrics.indexer.read-new-cache-namespace": True,
+ "sentry-metrics.indexer.write-new-cache-namespace": False,
+ }
+ ):
cache.clear()
namespace = "test"
orgId = 1
@@ -77,14 +150,24 @@ def test_make_cache_key(use_case_id: str) -> None:
def test_formatted_results(use_case_id: str) -> None:
- with override_options({"sentry-metrics.indexer.read-new-cache-namespace": False}):
+ with override_options(
+ {
+ "sentry-metrics.indexer.read-new-cache-namespace": False,
+ "sentry-metrics.indexer.write-new-cache-namespace": False,
+ }
+ ):
cache.clear()
namespace = "test"
values = {f"{use_case_id}:1:::hello": 2, f"{use_case_id}:1:::bye": 3}
results = {indexer_cache._make_cache_key(k): v for k, v in values.items()}
assert indexer_cache._format_results(list(values.keys()), results) == values
- with override_options({"sentry-metrics.indexer.read-new-cache-namespace": True}):
+ with override_options(
+ {
+ "sentry-metrics.indexer.read-new-cache-namespace": True,
+ "sentry-metrics.indexer.write-new-cache-namespace": False,
+ }
+ ):
cache.clear()
namespace = "test"
values = {
@@ -114,7 +197,12 @@ def test_ttl_jitter() -> None:
def test_separate_namespacing() -> None:
- with override_options({"sentry-metrics.indexer.read-new-cache-namespace": False}):
+ with override_options(
+ {
+ "sentry-metrics.indexer.read-new-cache-namespace": False,
+ "sentry-metrics.indexer.write-new-cache-namespace": False,
+ }
+ ):
namespace = "test"
indexer_cache.set(namespace, "sessions:3:what", 1)
assert indexer_cache.get(namespace, "sessions:3:what") == 1
|
c204da6a2ecb6c85652af145b58e3498c46f8b9c
|
2021-07-13 00:07:52
|
Mark Story
|
chore: Clean up migration history (#26714)
| false
|
Clean up migration history (#26714)
|
chore
|
diff --git a/src/sentry/migrations/0001_initial.py b/src/sentry/migrations/0001_initial.py
deleted file mode 100644
index e1d6bd1c7e2eb8..00000000000000
--- a/src/sentry/migrations/0001_initial.py
+++ /dev/null
@@ -1,4891 +0,0 @@
-import django.db.models.deletion
-import django.utils.timezone
-from django.conf import settings
-from django.db import migrations, models
-
-import bitfield.models
-import sentry.db.mixin
-import sentry.db.models.fields.array
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.citext
-import sentry.db.models.fields.encrypted
-import sentry.db.models.fields.foreignkey
-import sentry.db.models.fields.gzippeddict
-import sentry.db.models.fields.jsonfield
-import sentry.db.models.fields.node
-import sentry.db.models.fields.uuid
-import sentry.models.apiapplication
-import sentry.models.apigrant
-import sentry.models.apitoken
-import sentry.models.broadcast
-import sentry.models.event
-import sentry.models.groupshare
-import sentry.models.scheduledeletion
-import sentry.models.sentryapp
-import sentry.models.sentryappinstallation
-import sentry.models.servicehook
-import sentry.models.user
-import sentry.models.useremail
-
-
-class Migration(migrations.Migration):
-
- dependencies = []
-
- operations = [
- migrations.CreateModel(
- name="User",
- fields=[
- ("password", models.CharField(max_length=128, verbose_name="password")),
- (
- "last_login",
- models.DateTimeField(null=True, verbose_name="last login", blank=True),
- ),
- (
- "id",
- sentry.db.models.fields.bounded.BoundedAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "username",
- models.CharField(unique=True, max_length=128, verbose_name="username"),
- ),
- (
- "name",
- models.CharField(
- max_length=200, verbose_name="name", db_column="first_name", blank=True
- ),
- ),
- (
- "email",
- models.EmailField(max_length=75, verbose_name="email address", blank=True),
- ),
- (
- "is_staff",
- models.BooleanField(
- default=False,
- help_text="Designates whether the user can log into this admin site.",
- verbose_name="staff status",
- ),
- ),
- (
- "is_active",
- models.BooleanField(
- default=True,
- help_text="Designates whether this user should be treated as active. Unselect this instead of deleting accounts.",
- verbose_name="active",
- ),
- ),
- (
- "is_superuser",
- models.BooleanField(
- default=False,
- help_text="Designates that this user has all permissions without explicitly assigning them.",
- verbose_name="superuser status",
- ),
- ),
- (
- "is_managed",
- models.BooleanField(
- default=False,
- help_text="Designates whether this user should be treated as managed. Select this to disallow the user from modifying their account (username, password, etc).",
- verbose_name="managed",
- ),
- ),
- (
- "is_sentry_app",
- models.NullBooleanField(
- default=None,
- help_text="Designates whether this user is the entity used for Permissionson behalf of a Sentry App. Cannot login or use Sentry like anormal User would.",
- verbose_name="is sentry app",
- ),
- ),
- (
- "is_password_expired",
- models.BooleanField(
- default=False,
- help_text="If set to true then the user needs to change the password on next sign in.",
- verbose_name="password expired",
- ),
- ),
- (
- "last_password_change",
- models.DateTimeField(
- help_text="The date the password was changed last.",
- null=True,
- verbose_name="date of last password change",
- ),
- ),
- (
- "flags",
- bitfield.models.BitField(
- (
- (
- "newsletter_consent_prompt",
- "Do we need to ask this user for newsletter consent?",
- ),
- ),
- default=0,
- null=True,
- ),
- ),
- ("session_nonce", models.CharField(max_length=12, null=True)),
- (
- "date_joined",
- models.DateTimeField(
- default=django.utils.timezone.now, verbose_name="date joined"
- ),
- ),
- (
- "last_active",
- models.DateTimeField(
- default=django.utils.timezone.now, null=True, verbose_name="last active"
- ),
- ),
- ],
- options={
- "db_table": "auth_user",
- "verbose_name": "user",
- "verbose_name_plural": "users",
- },
- managers=[("objects", sentry.models.user.UserManager(cache_fields=["pk"]))],
- ),
- migrations.CreateModel(
- name="Activity",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "type",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (1, "set_resolved"),
- (15, "set_resolved_by_age"),
- (13, "set_resolved_in_release"),
- (16, "set_resolved_in_commit"),
- (21, "set_resolved_in_pull_request"),
- (2, "set_unresolved"),
- (3, "set_ignored"),
- (4, "set_public"),
- (5, "set_private"),
- (6, "set_regression"),
- (7, "create_issue"),
- (8, "note"),
- (9, "first_seen"),
- (10, "release"),
- (11, "assigned"),
- (12, "unassigned"),
- (14, "merge"),
- (17, "deploy"),
- (18, "new_processing_issues"),
- (19, "unmerge_source"),
- (20, "unmerge_destination"),
- ]
- ),
- ),
- ("ident", models.CharField(max_length=64, null=True)),
- ("datetime", models.DateTimeField(default=django.utils.timezone.now)),
- ("data", sentry.db.models.fields.gzippeddict.GzippedDictField(null=True)),
- ],
- options={"db_table": "sentry_activity"},
- ),
- migrations.CreateModel(
- name="ApiApplication",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "client_id",
- models.CharField(
- default=sentry.models.apiapplication.generate_token,
- unique=True,
- max_length=64,
- ),
- ),
- (
- "client_secret",
- sentry.db.models.fields.encrypted.EncryptedTextField(
- default=sentry.models.apiapplication.generate_token
- ),
- ),
- (
- "name",
- models.CharField(
- default=sentry.models.apiapplication.generate_name,
- max_length=64,
- blank=True,
- ),
- ),
- (
- "status",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0, db_index=True, choices=[(0, "Active"), (1, "Inactive")]
- ),
- ),
- ("allowed_origins", models.TextField(null=True, blank=True)),
- ("redirect_uris", models.TextField()),
- ("homepage_url", models.URLField(null=True)),
- ("privacy_url", models.URLField(null=True)),
- ("terms_url", models.URLField(null=True)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "owner",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to=settings.AUTH_USER_MODEL
- ),
- ),
- ],
- options={"db_table": "sentry_apiapplication"},
- ),
- migrations.CreateModel(
- name="ApiAuthorization",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "scopes",
- bitfield.models.BitField(
- (
- ("project:read", "project:read"),
- ("project:write", "project:write"),
- ("project:admin", "project:admin"),
- ("project:releases", "project:releases"),
- ("team:read", "team:read"),
- ("team:write", "team:write"),
- ("team:admin", "team:admin"),
- ("event:read", "event:read"),
- ("event:write", "event:write"),
- ("event:admin", "event:admin"),
- ("org:read", "org:read"),
- ("org:write", "org:write"),
- ("org:admin", "org:admin"),
- ("member:read", "member:read"),
- ("member:write", "member:write"),
- ("member:admin", "member:admin"),
- ),
- default=None,
- ),
- ),
- ("scope_list", sentry.db.models.fields.array.ArrayField(null=True)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "application",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.ApiApplication", null=True
- ),
- ),
- (
- "user",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to=settings.AUTH_USER_MODEL
- ),
- ),
- ],
- options={"db_table": "sentry_apiauthorization"},
- ),
- migrations.CreateModel(
- name="ApiGrant",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "code",
- models.CharField(
- default=sentry.models.apigrant.generate_code, max_length=64, db_index=True
- ),
- ),
- (
- "expires_at",
- models.DateTimeField(
- default=sentry.models.apigrant.default_expiration, db_index=True
- ),
- ),
- ("redirect_uri", models.CharField(max_length=255)),
- (
- "scopes",
- bitfield.models.BitField(
- (
- ("project:read", "project:read"),
- ("project:write", "project:write"),
- ("project:admin", "project:admin"),
- ("project:releases", "project:releases"),
- ("team:read", "team:read"),
- ("team:write", "team:write"),
- ("team:admin", "team:admin"),
- ("event:read", "event:read"),
- ("event:write", "event:write"),
- ("event:admin", "event:admin"),
- ("org:read", "org:read"),
- ("org:write", "org:write"),
- ("org:admin", "org:admin"),
- ("member:read", "member:read"),
- ("member:write", "member:write"),
- ("member:admin", "member:admin"),
- ),
- default=None,
- ),
- ),
- ("scope_list", sentry.db.models.fields.array.ArrayField(null=True)),
- (
- "application",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.ApiApplication"
- ),
- ),
- (
- "user",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to=settings.AUTH_USER_MODEL
- ),
- ),
- ],
- options={"db_table": "sentry_apigrant"},
- ),
- migrations.CreateModel(
- name="ApiKey",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("label", models.CharField(default="Default", max_length=64, blank=True)),
- ("key", models.CharField(unique=True, max_length=32)),
- (
- "scopes",
- bitfield.models.BitField(
- (
- ("project:read", "project:read"),
- ("project:write", "project:write"),
- ("project:admin", "project:admin"),
- ("project:releases", "project:releases"),
- ("team:read", "team:read"),
- ("team:write", "team:write"),
- ("team:admin", "team:admin"),
- ("event:read", "event:read"),
- ("event:write", "event:write"),
- ("event:admin", "event:admin"),
- ("org:read", "org:read"),
- ("org:write", "org:write"),
- ("org:admin", "org:admin"),
- ("member:read", "member:read"),
- ("member:write", "member:write"),
- ("member:admin", "member:admin"),
- ),
- default=None,
- ),
- ),
- ("scope_list", sentry.db.models.fields.array.ArrayField(null=True)),
- (
- "status",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0, db_index=True, choices=[(0, "Active"), (1, "Inactive")]
- ),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ("allowed_origins", models.TextField(null=True, blank=True)),
- ],
- options={"db_table": "sentry_apikey"},
- ),
- migrations.CreateModel(
- name="ApiToken",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "scopes",
- bitfield.models.BitField(
- (
- ("project:read", "project:read"),
- ("project:write", "project:write"),
- ("project:admin", "project:admin"),
- ("project:releases", "project:releases"),
- ("team:read", "team:read"),
- ("team:write", "team:write"),
- ("team:admin", "team:admin"),
- ("event:read", "event:read"),
- ("event:write", "event:write"),
- ("event:admin", "event:admin"),
- ("org:read", "org:read"),
- ("org:write", "org:write"),
- ("org:admin", "org:admin"),
- ("member:read", "member:read"),
- ("member:write", "member:write"),
- ("member:admin", "member:admin"),
- ),
- default=None,
- ),
- ),
- ("scope_list", sentry.db.models.fields.array.ArrayField(null=True)),
- (
- "token",
- models.CharField(
- default=sentry.models.apitoken.generate_token, unique=True, max_length=64
- ),
- ),
- (
- "refresh_token",
- models.CharField(
- default=sentry.models.apitoken.generate_token,
- max_length=64,
- unique=True,
- null=True,
- ),
- ),
- (
- "expires_at",
- models.DateTimeField(
- default=sentry.models.apitoken.default_expiration, null=True
- ),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "application",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.ApiApplication", null=True
- ),
- ),
- (
- "user",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to=settings.AUTH_USER_MODEL
- ),
- ),
- ],
- options={"db_table": "sentry_apitoken"},
- ),
- migrations.CreateModel(
- name="AssistantActivity",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("guide_id", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
- ("viewed_ts", models.DateTimeField(null=True)),
- ("dismissed_ts", models.DateTimeField(null=True)),
- ("useful", models.NullBooleanField()),
- (
- "user",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to=settings.AUTH_USER_MODEL
- ),
- ),
- ],
- options={"db_table": "sentry_assistant_activity"},
- ),
- migrations.CreateModel(
- name="AuditLogEntry",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("actor_label", models.CharField(max_length=64, null=True, blank=True)),
- (
- "target_object",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
- ),
- (
- "event",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (1, "member.invite"),
- (2, "member.add"),
- (3, "member.accept-invite"),
- (5, "member.remove"),
- (4, "member.edit"),
- (6, "member.join-team"),
- (7, "member.leave-team"),
- (8, "member.pending"),
- (20, "team.create"),
- (21, "team.edit"),
- (22, "team.remove"),
- (30, "project.create"),
- (31, "project.edit"),
- (32, "project.remove"),
- (33, "project.set-public"),
- (34, "project.set-private"),
- (35, "project.request-transfer"),
- (36, "project.accept-transfer"),
- (10, "org.create"),
- (11, "org.edit"),
- (12, "org.remove"),
- (13, "org.restore"),
- (40, "tagkey.remove"),
- (50, "projectkey.create"),
- (51, "projectkey.edit"),
- (52, "projectkey.remove"),
- (53, "projectkey.enable"),
- (53, "projectkey.disable"),
- (60, "sso.enable"),
- (61, "sso.disable"),
- (62, "sso.edit"),
- (63, "sso-identity.link"),
- (70, "api-key.create"),
- (71, "api-key.edit"),
- (72, "api-key.remove"),
- (80, "rule.create"),
- (81, "rule.edit"),
- (82, "rule.remove"),
- (100, "serivcehook.create"),
- (101, "serivcehook.edit"),
- (102, "serivcehook.remove"),
- (103, "serivcehook.enable"),
- (104, "serivcehook.disable"),
- (110, "integration.add"),
- (111, "integration.edit"),
- (112, "integration.remove"),
- (113, "sentry-app.add"),
- (115, "sentry-app.remove"),
- (116, "sentry-app.install"),
- (117, "sentry-app.uninstall"),
- (90, "ondemand.edit"),
- (91, "trial.started"),
- (92, "plan.changed"),
- ]
- ),
- ),
- ("ip_address", models.GenericIPAddressField(null=True, unpack_ipv4=True)),
- ("data", sentry.db.models.fields.gzippeddict.GzippedDictField()),
- ("datetime", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "actor",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="audit_actors",
- blank=True,
- to=settings.AUTH_USER_MODEL,
- null=True,
- ),
- ),
- (
- "actor_key",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- blank=True, to="sentry.ApiKey", null=True
- ),
- ),
- ],
- options={"db_table": "sentry_auditlogentry"},
- ),
- migrations.CreateModel(
- name="Authenticator",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "created_at",
- models.DateTimeField(
- default=django.utils.timezone.now, verbose_name="created at"
- ),
- ),
- ("last_used_at", models.DateTimeField(null=True, verbose_name="last used at")),
- (
- "type",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (0, "Recovery Codes"),
- (1, "Authenticator App"),
- (2, "Text Message"),
- (3, "U2F (Universal 2nd Factor)"),
- ]
- ),
- ),
- (
- "config",
- sentry.db.models.fields.encrypted.EncryptedPickledObjectField(editable=False),
- ),
- (
- "user",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to=settings.AUTH_USER_MODEL
- ),
- ),
- ],
- options={
- "db_table": "auth_authenticator",
- "verbose_name": "authenticator",
- "verbose_name_plural": "authenticators",
- },
- ),
- migrations.CreateModel(
- name="AuthIdentity",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("ident", models.CharField(max_length=128)),
- ("data", sentry.db.models.fields.encrypted.EncryptedJsonField(default=dict)),
- ("last_verified", models.DateTimeField(default=django.utils.timezone.now)),
- ("last_synced", models.DateTimeField(default=django.utils.timezone.now)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ],
- options={"db_table": "sentry_authidentity"},
- ),
- migrations.CreateModel(
- name="AuthProvider",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("provider", models.CharField(max_length=128)),
- ("config", sentry.db.models.fields.encrypted.EncryptedJsonField(default=dict)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "sync_time",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
- ),
- ("last_sync", models.DateTimeField(null=True)),
- (
- "default_role",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(default=50),
- ),
- ("default_global_access", models.BooleanField(default=True)),
- (
- "flags",
- bitfield.models.BitField(
- (
- (
- "allow_unlinked",
- "Grant access to members who have not linked SSO accounts.",
- ),
- ),
- default=0,
- ),
- ),
- ],
- options={"db_table": "sentry_authprovider"},
- ),
- migrations.CreateModel(
- name="Broadcast",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("upstream_id", models.CharField(max_length=32, null=True, blank=True)),
- ("title", models.CharField(max_length=32)),
- ("message", models.CharField(max_length=256)),
- ("link", models.URLField(null=True, blank=True)),
- ("is_active", models.BooleanField(default=True, db_index=True)),
- (
- "date_expires",
- models.DateTimeField(
- default=sentry.models.broadcast.default_expiration, null=True, blank=True
- ),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ],
- options={"db_table": "sentry_broadcast"},
- ),
- migrations.CreateModel(
- name="BroadcastSeen",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("date_seen", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "broadcast",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Broadcast"),
- ),
- (
- "user",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to=settings.AUTH_USER_MODEL
- ),
- ),
- ],
- options={"db_table": "sentry_broadcastseen"},
- ),
- migrations.CreateModel(
- name="Commit",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "organization_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(db_index=True),
- ),
- ("repository_id", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
- ("key", models.CharField(max_length=64)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ("message", models.TextField(null=True)),
- ],
- options={"db_table": "sentry_commit"},
- ),
- migrations.CreateModel(
- name="CommitAuthor",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "organization_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(db_index=True),
- ),
- ("name", models.CharField(max_length=128, null=True)),
- ("email", models.EmailField(max_length=75)),
- ("external_id", models.CharField(max_length=164, null=True)),
- ],
- options={"db_table": "sentry_commitauthor"},
- ),
- migrations.CreateModel(
- name="CommitFileChange",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "organization_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(db_index=True),
- ),
- ("filename", models.TextField()),
- (
- "type",
- models.CharField(
- max_length=1, choices=[("A", "Added"), ("D", "Deleted"), ("M", "Modified")]
- ),
- ),
- (
- "commit",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Commit"),
- ),
- ],
- options={"db_table": "sentry_commitfilechange"},
- ),
- migrations.CreateModel(
- name="Counter",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("value", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
- ],
- options={"db_table": "sentry_projectcounter"},
- ),
- migrations.CreateModel(
- name="Dashboard",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("title", models.CharField(max_length=255)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "status",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0,
- choices=[
- (0, "active"),
- (1, "disabled"),
- (2, "pending_deletion"),
- (3, "deletion_in_progress"),
- ],
- ),
- ),
- (
- "created_by",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to=settings.AUTH_USER_MODEL
- ),
- ),
- ],
- options={"db_table": "sentry_dashboard"},
- ),
- migrations.CreateModel(
- name="DeletedOrganization",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("actor_label", models.CharField(max_length=64, null=True)),
- ("actor_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True)),
- ("actor_key", models.CharField(max_length=32, null=True)),
- ("ip_address", models.GenericIPAddressField(null=True, unpack_ipv4=True)),
- ("date_deleted", models.DateTimeField(default=django.utils.timezone.now)),
- ("date_created", models.DateTimeField(null=True)),
- ("reason", models.TextField(null=True, blank=True)),
- ("name", models.CharField(max_length=64, null=True)),
- ("slug", models.CharField(max_length=50, null=True)),
- ],
- options={"db_table": "sentry_deletedorganization"},
- ),
- migrations.CreateModel(
- name="DeletedProject",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("actor_label", models.CharField(max_length=64, null=True)),
- ("actor_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True)),
- ("actor_key", models.CharField(max_length=32, null=True)),
- ("ip_address", models.GenericIPAddressField(null=True, unpack_ipv4=True)),
- ("date_deleted", models.DateTimeField(default=django.utils.timezone.now)),
- ("date_created", models.DateTimeField(null=True)),
- ("reason", models.TextField(null=True, blank=True)),
- ("slug", models.CharField(max_length=50, null=True)),
- ("name", models.CharField(max_length=200, null=True)),
- (
- "organization_id",
- sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True),
- ),
- ("organization_name", models.CharField(max_length=64, null=True)),
- ("organization_slug", models.CharField(max_length=50, null=True)),
- ("platform", models.CharField(max_length=64, null=True)),
- ],
- options={"db_table": "sentry_deletedproject"},
- ),
- migrations.CreateModel(
- name="DeletedTeam",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("actor_label", models.CharField(max_length=64, null=True)),
- ("actor_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True)),
- ("actor_key", models.CharField(max_length=32, null=True)),
- ("ip_address", models.GenericIPAddressField(null=True, unpack_ipv4=True)),
- ("date_deleted", models.DateTimeField(default=django.utils.timezone.now)),
- ("date_created", models.DateTimeField(null=True)),
- ("reason", models.TextField(null=True, blank=True)),
- ("name", models.CharField(max_length=64, null=True)),
- ("slug", models.CharField(max_length=50, null=True)),
- (
- "organization_id",
- sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True),
- ),
- ("organization_name", models.CharField(max_length=64, null=True)),
- ("organization_slug", models.CharField(max_length=50, null=True)),
- ],
- options={"db_table": "sentry_deletedteam"},
- ),
- migrations.CreateModel(
- name="Deploy",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "organization_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(db_index=True),
- ),
- (
- "environment_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(db_index=True),
- ),
- ("date_finished", models.DateTimeField(default=django.utils.timezone.now)),
- ("date_started", models.DateTimeField(null=True, blank=True)),
- ("name", models.CharField(max_length=64, null=True, blank=True)),
- ("url", models.URLField(null=True, blank=True)),
- ("notified", models.NullBooleanField(default=False, db_index=True)),
- ],
- options={"db_table": "sentry_deploy"},
- ),
- migrations.CreateModel(
- name="DiscoverSavedQuery",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("name", models.CharField(max_length=255)),
- ("query", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
- ("date_created", models.DateTimeField(auto_now_add=True)),
- ("date_updated", models.DateTimeField(auto_now=True)),
- (
- "created_by",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.SET_NULL,
- to=settings.AUTH_USER_MODEL,
- null=True,
- ),
- ),
- ],
- options={"db_table": "sentry_discoversavedquery"},
- ),
- migrations.CreateModel(
- name="DiscoverSavedQueryProject",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "discover_saved_query",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.DiscoverSavedQuery"
- ),
- ),
- ],
- options={"db_table": "sentry_discoversavedqueryproject"},
- ),
- migrations.CreateModel(
- name="Distribution",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "organization_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(db_index=True),
- ),
- ("name", models.CharField(max_length=64)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ],
- options={"db_table": "sentry_distribution"},
- ),
- migrations.CreateModel(
- name="Email",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "email",
- sentry.db.models.fields.citext.CIEmailField(
- unique=True, max_length=75, verbose_name="email address"
- ),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ],
- options={"db_table": "sentry_email"},
- ),
- migrations.CreateModel(
- name="Environment",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("organization_id", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
- (
- "project_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
- ),
- ("name", models.CharField(max_length=64)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ],
- options={"db_table": "sentry_environment"},
- ),
- migrations.CreateModel(
- name="EnvironmentProject",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("is_hidden", models.NullBooleanField()),
- (
- "environment",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Environment"),
- ),
- ],
- options={"db_table": "sentry_environmentproject"},
- ),
- migrations.CreateModel(
- name="Event",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "group_id",
- sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True, blank=True),
- ),
- ("event_id", models.CharField(max_length=32, null=True, db_column="message_id")),
- (
- "project_id",
- sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True, blank=True),
- ),
- ("message", models.TextField()),
- ("platform", models.CharField(max_length=64, null=True)),
- (
- "datetime",
- models.DateTimeField(default=django.utils.timezone.now, db_index=True),
- ),
- ("time_spent", sentry.db.models.fields.bounded.BoundedIntegerField(null=True)),
- ("data", sentry.db.models.fields.node.NodeField(null=True, blank=True)),
- ],
- options={
- "db_table": "sentry_message",
- "verbose_name": "message",
- "verbose_name_plural": "messages",
- },
- ),
- migrations.CreateModel(
- name="EventAttachment",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("project_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
- (
- "group_id",
- sentry.db.models.fields.bounded.BoundedBigIntegerField(
- null=True, db_index=True
- ),
- ),
- ("event_id", models.CharField(max_length=32, db_index=True)),
- ("name", models.TextField()),
- (
- "date_added",
- models.DateTimeField(default=django.utils.timezone.now, db_index=True),
- ),
- ],
- options={"db_table": "sentry_eventattachment"},
- ),
- migrations.CreateModel(
- name="EventMapping",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("project_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
- ("group_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
- ("event_id", models.CharField(max_length=32)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ],
- options={"db_table": "sentry_eventmapping"},
- ),
- migrations.CreateModel(
- name="EventProcessingIssue",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- )
- ],
- options={"db_table": "sentry_eventprocessingissue"},
- ),
- migrations.CreateModel(
- name="EventTag",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("project_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
- ("group_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True)),
- ("event_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
- ("key_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
- ("value_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
- (
- "date_added",
- models.DateTimeField(default=django.utils.timezone.now, db_index=True),
- ),
- ],
- options={"db_table": "sentry_eventtag"},
- ),
- migrations.CreateModel(
- name="EventUser",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "project_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(db_index=True),
- ),
- ("hash", models.CharField(max_length=32)),
- ("ident", models.CharField(max_length=128, null=True)),
- ("email", models.EmailField(max_length=75, null=True)),
- ("username", models.CharField(max_length=128, null=True)),
- ("name", models.CharField(max_length=128, null=True)),
- ("ip_address", models.GenericIPAddressField(null=True)),
- (
- "date_added",
- models.DateTimeField(default=django.utils.timezone.now, db_index=True),
- ),
- ],
- options={"db_table": "sentry_eventuser"},
- ),
- migrations.CreateModel(
- name="ExternalIssue",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("organization_id", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
- ("integration_id", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
- ("key", models.CharField(max_length=128)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ("title", models.TextField(null=True)),
- ("description", models.TextField(null=True)),
- ("metadata", sentry.db.models.fields.jsonfield.JSONField(null=True)),
- ],
- options={"db_table": "sentry_externalissue"},
- ),
- migrations.CreateModel(
- name="FeatureAdoption",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "feature_id",
- models.PositiveIntegerField(
- choices=[
- (0, "Python"),
- (1, "JavaScript"),
- (2, "Node.js"),
- (3, "Ruby"),
- (4, "Java"),
- (5, "Cocoa"),
- (6, "Objective-C"),
- (7, "PHP"),
- (8, "Go"),
- (9, "C#"),
- (10, "Perl"),
- (11, "Elixir"),
- (12, "CFML"),
- (13, "Groovy"),
- (14, "CSP Reports"),
- (20, "Flask"),
- (21, "Django"),
- (22, "Celery"),
- (23, "Bottle"),
- (24, "Pylons"),
- (25, "Tornado"),
- (26, "web.py"),
- (27, "Zope"),
- (40, "First Event"),
- (41, "Release Tracking"),
- (42, "Environment Tracking"),
- (43, "User Tracking"),
- (44, "Custom Tags"),
- (45, "Source Maps"),
- (46, "User Feedback"),
- (48, "Breadcrumbs"),
- (49, "Resolve with Commit"),
- (60, "First Project"),
- (61, "Invite Team"),
- (62, "Assign Issue"),
- (63, "Resolve in Next Release"),
- (64, "Advanced Search"),
- (65, "Saved Search"),
- (66, "Inbound Filters"),
- (67, "Alert Rules"),
- (68, "Issue Tracker Integration"),
- (69, "Notification Integration"),
- (70, "Delete and Discard Future Events"),
- (71, "Link a Repository"),
- (72, "Ownership Rules"),
- (73, "Ignore Issue"),
- (80, "SSO"),
- (81, "Data Scrubbers"),
- (90, "Create Release Using API"),
- (91, "Create Deploy Using API"),
- ]
- ),
- ),
- ("date_completed", models.DateTimeField(default=django.utils.timezone.now)),
- ("complete", models.BooleanField(default=False)),
- ("applicable", models.BooleanField(default=True)),
- ("data", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
- ],
- options={"db_table": "sentry_featureadoption"},
- ),
- migrations.CreateModel(
- name="File",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("name", models.TextField()),
- ("type", models.CharField(max_length=64)),
- (
- "timestamp",
- models.DateTimeField(default=django.utils.timezone.now, db_index=True),
- ),
- ("headers", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
- ("size", sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True)),
- ("checksum", models.CharField(max_length=40, null=True, db_index=True)),
- ("path", models.TextField(null=True)),
- ],
- options={"db_table": "sentry_file"},
- ),
- migrations.CreateModel(
- name="FileBlob",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("path", models.TextField(null=True)),
- ("size", sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True)),
- ("checksum", models.CharField(unique=True, max_length=40)),
- (
- "timestamp",
- models.DateTimeField(default=django.utils.timezone.now, db_index=True),
- ),
- ],
- options={"db_table": "sentry_fileblob"},
- ),
- migrations.CreateModel(
- name="FileBlobIndex",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("offset", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
- (
- "blob",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.FileBlob"),
- ),
- ("file", sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.File")),
- ],
- options={"db_table": "sentry_fileblobindex"},
- ),
- migrations.CreateModel(
- name="FileBlobOwner",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "blob",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.FileBlob"),
- ),
- ],
- options={"db_table": "sentry_fileblobowner"},
- ),
- migrations.CreateModel(
- name="Group",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("logger", models.CharField(default="", max_length=64, db_index=True, blank=True)),
- (
- "level",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=40,
- blank=True,
- db_index=True,
- choices=[
- (0, "sample"),
- (10, "debug"),
- (20, "info"),
- (30, "warning"),
- (40, "error"),
- (50, "fatal"),
- ],
- ),
- ),
- ("message", models.TextField()),
- (
- "culprit",
- models.CharField(max_length=200, null=True, db_column="view", blank=True),
- ),
- (
- "num_comments",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0, null=True
- ),
- ),
- ("platform", models.CharField(max_length=64, null=True)),
- (
- "status",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0,
- db_index=True,
- choices=[(0, "Unresolved"), (1, "Resolved"), (2, "Ignored")],
- ),
- ),
- (
- "times_seen",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=1, db_index=True
- ),
- ),
- (
- "last_seen",
- models.DateTimeField(default=django.utils.timezone.now, db_index=True),
- ),
- (
- "first_seen",
- models.DateTimeField(default=django.utils.timezone.now, db_index=True),
- ),
- ("resolved_at", models.DateTimeField(null=True, db_index=True)),
- ("active_at", models.DateTimeField(null=True, db_index=True)),
- (
- "time_spent_total",
- sentry.db.models.fields.bounded.BoundedIntegerField(default=0),
- ),
- (
- "time_spent_count",
- sentry.db.models.fields.bounded.BoundedIntegerField(default=0),
- ),
- ("score", sentry.db.models.fields.bounded.BoundedIntegerField(default=0)),
- ("is_public", models.NullBooleanField(default=False)),
- (
- "data",
- sentry.db.models.fields.gzippeddict.GzippedDictField(null=True, blank=True),
- ),
- ("short_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True)),
- ],
- options={
- "verbose_name_plural": "grouped messages",
- "db_table": "sentry_groupedmessage",
- "verbose_name": "grouped message",
- "permissions": (("can_view", "Can view"),),
- },
- ),
- migrations.CreateModel(
- name="GroupAssignee",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "group",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="assignee_set", to="sentry.Group", unique=True
- ),
- ),
- ],
- options={"db_table": "sentry_groupasignee"},
- ),
- migrations.CreateModel(
- name="GroupBookmark",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
- (
- "group",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="bookmark_set", to="sentry.Group"
- ),
- ),
- ],
- options={"db_table": "sentry_groupbookmark"},
- ),
- migrations.CreateModel(
- name="GroupCommitResolution",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("group_id", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
- (
- "commit_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(db_index=True),
- ),
- (
- "datetime",
- models.DateTimeField(default=django.utils.timezone.now, db_index=True),
- ),
- ],
- options={"db_table": "sentry_groupcommitresolution"},
- ),
- migrations.CreateModel(
- name="GroupEmailThread",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("email", models.EmailField(max_length=75)),
- ("msgid", models.CharField(max_length=100)),
- ("date", models.DateTimeField(default=django.utils.timezone.now, db_index=True)),
- (
- "group",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="groupemail_set", to="sentry.Group"
- ),
- ),
- ],
- options={"db_table": "sentry_groupemailthread"},
- ),
- migrations.CreateModel(
- name="GroupEnvironment",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "first_seen",
- models.DateTimeField(
- default=django.utils.timezone.now, null=True, db_index=True
- ),
- ),
- (
- "environment",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Environment", db_constraint=False
- ),
- ),
- ],
- options={"db_table": "sentry_groupenvironment"},
- ),
- migrations.CreateModel(
- name="GroupHash",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("hash", models.CharField(max_length=32)),
- (
- "group_tombstone_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- null=True, db_index=True
- ),
- ),
- (
- "state",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- null=True, choices=[(1, "Locked (Migration in Progress)")]
- ),
- ),
- (
- "group",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Group", null=True
- ),
- ),
- ],
- options={"db_table": "sentry_grouphash"},
- ),
- migrations.CreateModel(
- name="GroupLink",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("group_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
- (
- "project_id",
- sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
- ),
- (
- "linked_type",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=1,
- choices=[(1, "Commit"), (2, "Pull Request"), (3, "Tracker Issue")],
- ),
- ),
- ("linked_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
- (
- "relationship",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=2, choices=[(1, "Resolves"), (2, "Linked")]
- ),
- ),
- ("data", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
- (
- "datetime",
- models.DateTimeField(default=django.utils.timezone.now, db_index=True),
- ),
- ],
- options={"db_table": "sentry_grouplink"},
- ),
- migrations.CreateModel(
- name="GroupMeta",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("key", models.CharField(max_length=64)),
- ("value", models.TextField()),
- ("group", sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Group")),
- ],
- options={"db_table": "sentry_groupmeta"},
- ),
- migrations.CreateModel(
- name="GroupRedirect",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("group_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True)),
- (
- "previous_group_id",
- sentry.db.models.fields.bounded.BoundedBigIntegerField(unique=True),
- ),
- ],
- options={"db_table": "sentry_groupredirect"},
- ),
- migrations.CreateModel(
- name="GroupRelease",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "project_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(db_index=True),
- ),
- ("group_id", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
- (
- "release_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(db_index=True),
- ),
- ("environment", models.CharField(default="", max_length=64)),
- ("first_seen", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "last_seen",
- models.DateTimeField(default=django.utils.timezone.now, db_index=True),
- ),
- ],
- options={"db_table": "sentry_grouprelease"},
- ),
- migrations.CreateModel(
- name="GroupResolution",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "type",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- null=True, choices=[(1, "in_next_release"), (0, "in_release")]
- ),
- ),
- (
- "actor_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
- ),
- (
- "datetime",
- models.DateTimeField(default=django.utils.timezone.now, db_index=True),
- ),
- (
- "status",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0, choices=[(0, "Pending"), (1, "Resolved")]
- ),
- ),
- (
- "group",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Group", unique=True
- ),
- ),
- ],
- options={"db_table": "sentry_groupresolution"},
- ),
- migrations.CreateModel(
- name="GroupRuleStatus",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("status", models.PositiveSmallIntegerField(default=0)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ("last_active", models.DateTimeField(null=True)),
- ("group", sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Group")),
- ],
- options={"db_table": "sentry_grouprulestatus"},
- ),
- migrations.CreateModel(
- name="GroupSeen",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("last_seen", models.DateTimeField(default=django.utils.timezone.now)),
- ("group", sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Group")),
- ],
- options={"db_table": "sentry_groupseen"},
- ),
- migrations.CreateModel(
- name="GroupShare",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "uuid",
- models.CharField(
- default=sentry.models.groupshare.default_uuid, unique=True, max_length=32
- ),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "group",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Group", unique=True
- ),
- ),
- ],
- options={"db_table": "sentry_groupshare"},
- ),
- migrations.CreateModel(
- name="GroupSnooze",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("until", models.DateTimeField(null=True)),
- ("count", sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True)),
- ("window", sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True)),
- (
- "user_count",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
- ),
- (
- "user_window",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
- ),
- ("state", sentry.db.models.fields.jsonfield.JSONField(null=True)),
- (
- "actor_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
- ),
- (
- "group",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Group", unique=True
- ),
- ),
- ],
- options={"db_table": "sentry_groupsnooze"},
- ),
- migrations.CreateModel(
- name="GroupSubscription",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("is_active", models.BooleanField(default=True)),
- ("reason", sentry.db.models.fields.bounded.BoundedPositiveIntegerField(default=0)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
- (
- "group",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="subscription_set", to="sentry.Group"
- ),
- ),
- ],
- options={"db_table": "sentry_groupsubscription"},
- ),
- migrations.CreateModel(
- name="GroupTagKey",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "project_id",
- sentry.db.models.fields.bounded.BoundedBigIntegerField(
- null=True, db_index=True
- ),
- ),
- ("group_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True)),
- ("key", models.CharField(max_length=32)),
- (
- "values_seen",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(default=0),
- ),
- ],
- options={"db_table": "sentry_grouptagkey"},
- ),
- migrations.CreateModel(
- name="GroupTagValue",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "project_id",
- sentry.db.models.fields.bounded.BoundedBigIntegerField(
- null=True, db_index=True
- ),
- ),
- ("group_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True)),
- (
- "times_seen",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(default=0),
- ),
- ("key", models.CharField(max_length=32)),
- ("value", models.CharField(max_length=200)),
- (
- "last_seen",
- models.DateTimeField(
- default=django.utils.timezone.now, null=True, db_index=True
- ),
- ),
- (
- "first_seen",
- models.DateTimeField(
- default=django.utils.timezone.now, null=True, db_index=True
- ),
- ),
- ],
- options={"db_table": "sentry_messagefiltervalue"},
- ),
- migrations.CreateModel(
- name="GroupTombstone",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "previous_group_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(unique=True),
- ),
- (
- "level",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=40,
- blank=True,
- choices=[
- (0, "sample"),
- (10, "debug"),
- (20, "info"),
- (30, "warning"),
- (40, "error"),
- (50, "fatal"),
- ],
- ),
- ),
- ("message", models.TextField()),
- ("culprit", models.CharField(max_length=200, null=True, blank=True)),
- (
- "data",
- sentry.db.models.fields.gzippeddict.GzippedDictField(null=True, blank=True),
- ),
- (
- "actor_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
- ),
- ],
- options={"db_table": "sentry_grouptombstone"},
- ),
- migrations.CreateModel(
- name="Identity",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("external_id", models.CharField(max_length=64)),
- ("data", sentry.db.models.fields.encrypted.EncryptedJsonField(default=dict)),
- ("status", sentry.db.models.fields.bounded.BoundedPositiveIntegerField(default=0)),
- ("scopes", sentry.db.models.fields.array.ArrayField(null=True)),
- ("date_verified", models.DateTimeField(default=django.utils.timezone.now)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ],
- options={"db_table": "sentry_identity"},
- ),
- migrations.CreateModel(
- name="IdentityProvider",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("type", models.CharField(max_length=64)),
- ("config", sentry.db.models.fields.encrypted.EncryptedJsonField(default=dict)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
- ("external_id", models.CharField(max_length=64, null=True)),
- ],
- options={"db_table": "sentry_identityprovider"},
- ),
- migrations.CreateModel(
- name="Integration",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("provider", models.CharField(max_length=64)),
- ("external_id", models.CharField(max_length=64)),
- ("name", models.CharField(max_length=200)),
- ("metadata", sentry.db.models.fields.encrypted.EncryptedJsonField(default=dict)),
- (
- "status",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0,
- null=True,
- choices=[
- (0, "active"),
- (1, "disabled"),
- (2, "pending_deletion"),
- (3, "deletion_in_progress"),
- ],
- ),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
- ],
- options={"db_table": "sentry_integration"},
- ),
- migrations.CreateModel(
- name="IntegrationExternalProject",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "organization_integration_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(db_index=True),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ("name", models.CharField(max_length=128)),
- ("external_id", models.CharField(max_length=64)),
- ("resolved_status", models.CharField(max_length=64)),
- ("unresolved_status", models.CharField(max_length=64)),
- ],
- options={"db_table": "sentry_integrationexternalproject"},
- ),
- migrations.CreateModel(
- name="LatestRelease",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("repository_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
- ("environment_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
- ("release_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
- ("deploy_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True)),
- ("commit_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True)),
- ],
- options={"db_table": "sentry_latestrelease"},
- ),
- migrations.CreateModel(
- name="LostPasswordHash",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("hash", models.CharField(max_length=32)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "user",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to=settings.AUTH_USER_MODEL, unique=True
- ),
- ),
- ],
- options={"db_table": "sentry_lostpasswordhash"},
- ),
- migrations.CreateModel(
- name="Monitor",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "guid",
- sentry.db.models.fields.uuid.UUIDField(
- unique=True, max_length=32, editable=False
- ),
- ),
- (
- "organization_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(db_index=True),
- ),
- (
- "project_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(db_index=True),
- ),
- ("name", models.CharField(max_length=128)),
- (
- "status",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0,
- choices=[
- (0, "active"),
- (1, "disabled"),
- (2, "pending_deletion"),
- (3, "deletion_in_progress"),
- (4, "ok"),
- (5, "error"),
- ],
- ),
- ),
- (
- "type",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0,
- choices=[
- (0, "unknown"),
- (1, "health_check"),
- (2, "heartbeat"),
- (3, "cron_job"),
- ],
- ),
- ),
- ("config", sentry.db.models.fields.encrypted.EncryptedJsonField(default=dict)),
- ("next_checkin", models.DateTimeField(null=True)),
- ("last_checkin", models.DateTimeField(null=True)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ],
- options={"db_table": "sentry_monitor"},
- ),
- migrations.CreateModel(
- name="MonitorCheckIn",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "guid",
- sentry.db.models.fields.uuid.UUIDField(
- unique=True, max_length=32, editable=False
- ),
- ),
- (
- "project_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(db_index=True),
- ),
- (
- "status",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0,
- choices=[(0, "unknown"), (1, "ok"), (2, "error"), (3, "in_progress")],
- ),
- ),
- ("config", sentry.db.models.fields.encrypted.EncryptedJsonField(default=dict)),
- (
- "duration",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
- ],
- options={"db_table": "sentry_monitorcheckin"},
- ),
- migrations.CreateModel(
- name="MonitorLocation",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "guid",
- sentry.db.models.fields.uuid.UUIDField(
- unique=True, max_length=32, editable=False
- ),
- ),
- ("name", models.CharField(max_length=128)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ],
- options={"db_table": "sentry_monitorlocation"},
- ),
- migrations.CreateModel(
- name="Option",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("key", models.CharField(unique=True, max_length=64)),
- (
- "value",
- sentry.db.models.fields.encrypted.EncryptedPickledObjectField(editable=False),
- ),
- ("last_updated", models.DateTimeField(default=django.utils.timezone.now)),
- ],
- options={"db_table": "sentry_option"},
- ),
- migrations.CreateModel(
- name="Organization",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("name", models.CharField(max_length=64)),
- ("slug", models.SlugField(unique=True)),
- (
- "status",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0,
- choices=[
- (0, "active"),
- (1, "pending deletion"),
- (2, "deletion in progress"),
- ],
- ),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "default_role",
- models.CharField(
- default="member",
- max_length=32,
- choices=[
- ("member", "Member"),
- ("admin", "Admin"),
- ("manager", "Manager"),
- ("owner", "Owner"),
- ],
- ),
- ),
- (
- "flags",
- bitfield.models.BitField(
- (
- (
- "allow_joinleave",
- "Allow members to join and leave teams without requiring approval.",
- ),
- (
- "enhanced_privacy",
- "Enable enhanced privacy controls to limit personally identifiable information (PII) as well as source code in things like notifications.",
- ),
- (
- "disable_shared_issues",
- "Disable sharing of limited details on issues to anonymous users.",
- ),
- (
- "early_adopter",
- "Enable early adopter status, gaining access to features prior to public release.",
- ),
- (
- "require_2fa",
- "Require and enforce two-factor authentication for all members.",
- ),
- (
- "disable_new_visibility_features",
- "Temporarily opt out of new visibility features and ui",
- ),
- ),
- default=1,
- ),
- ),
- ],
- options={"db_table": "sentry_organization"},
- ),
- migrations.CreateModel(
- name="OrganizationAccessRequest",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- )
- ],
- options={"db_table": "sentry_organizationaccessrequest"},
- ),
- migrations.CreateModel(
- name="OrganizationAvatar",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("ident", models.CharField(unique=True, max_length=32, db_index=True)),
- (
- "avatar_type",
- models.PositiveSmallIntegerField(
- default=0, choices=[(0, "letter_avatar"), (1, "upload")]
- ),
- ),
- (
- "file",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True,
- on_delete=django.db.models.deletion.SET_NULL,
- to="sentry.File",
- unique=True,
- db_constraint=False,
- ),
- ),
- (
- "organization",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="avatar", to="sentry.Organization", unique=True
- ),
- ),
- ],
- options={"db_table": "sentry_organizationavatar"},
- ),
- migrations.CreateModel(
- name="OrganizationIntegration",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("config", sentry.db.models.fields.encrypted.EncryptedJsonField(default=dict)),
- (
- "default_auth_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- null=True, db_index=True
- ),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
- (
- "status",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0,
- choices=[
- (0, "active"),
- (1, "disabled"),
- (2, "pending_deletion"),
- (3, "deletion_in_progress"),
- ],
- ),
- ),
- (
- "integration",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Integration"),
- ),
- (
- "organization",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Organization"),
- ),
- ],
- options={"db_table": "sentry_organizationintegration"},
- ),
- migrations.CreateModel(
- name="OrganizationMember",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("email", models.EmailField(max_length=75, null=True, blank=True)),
- (
- "role",
- models.CharField(
- default="member",
- max_length=32,
- choices=[
- ("member", "Member"),
- ("admin", "Admin"),
- ("manager", "Manager"),
- ("owner", "Owner"),
- ],
- ),
- ),
- (
- "flags",
- bitfield.models.BitField(
- (("sso:linked", "sso:linked"), ("sso:invalid", "sso:invalid")), default=0
- ),
- ),
- ("token", models.CharField(max_length=64, unique=True, null=True, blank=True)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ("token_expires_at", models.DateTimeField(default=None, null=True)),
- ("has_global_access", models.BooleanField(default=True)),
- (
- "type",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=50, blank=True
- ),
- ),
- (
- "organization",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="member_set", to="sentry.Organization"
- ),
- ),
- ],
- options={"db_table": "sentry_organizationmember"},
- ),
- migrations.CreateModel(
- name="OrganizationMemberTeam",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("is_active", models.BooleanField(default=True)),
- (
- "organizationmember",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.OrganizationMember"
- ),
- ),
- ],
- options={"db_table": "sentry_organizationmember_teams"},
- ),
- migrations.CreateModel(
- name="OrganizationOnboardingTask",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "task",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (2, "First event"),
- (3, "Invite member"),
- (9, "Issue tracker"),
- (10, "Notification services"),
- (4, "Second platform"),
- (5, "User context"),
- (7, "Upload sourcemaps"),
- (6, "Release tracking"),
- (8, "User reports"),
- ]
- ),
- ),
- (
- "status",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[(1, "Complete"), (2, "Pending"), (3, "Skipped")]
- ),
- ),
- ("date_completed", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "project_id",
- sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True, blank=True),
- ),
- ("data", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
- (
- "organization",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Organization"),
- ),
- (
- "user",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to=settings.AUTH_USER_MODEL, null=True
- ),
- ),
- ],
- options={"db_table": "sentry_organizationonboardingtask"},
- ),
- migrations.CreateModel(
- name="OrganizationOption",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("key", models.CharField(max_length=64)),
- (
- "value",
- sentry.db.models.fields.encrypted.EncryptedPickledObjectField(editable=False),
- ),
- (
- "organization",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Organization"),
- ),
- ],
- options={"db_table": "sentry_organizationoptions"},
- ),
- migrations.CreateModel(
- name="PlatformExternalIssue",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("group_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
- ("service_type", models.CharField(max_length=64)),
- ("display_name", models.TextField()),
- ("web_url", models.URLField()),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ],
- options={"db_table": "sentry_platformexternalissue"},
- ),
- migrations.CreateModel(
- name="ProcessingIssue",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("checksum", models.CharField(max_length=40, db_index=True)),
- ("type", models.CharField(max_length=30)),
- ("data", sentry.db.models.fields.gzippeddict.GzippedDictField()),
- ("datetime", models.DateTimeField(default=django.utils.timezone.now)),
- ],
- options={"db_table": "sentry_processingissue"},
- ),
- migrations.CreateModel(
- name="Project",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("slug", models.SlugField(null=True)),
- ("name", models.CharField(max_length=200)),
- ("forced_color", models.CharField(max_length=6, null=True, blank=True)),
- ("public", models.BooleanField(default=False)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "status",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0,
- db_index=True,
- choices=[
- (0, "Active"),
- (2, "Pending Deletion"),
- (3, "Deletion in Progress"),
- ],
- ),
- ),
- ("first_event", models.DateTimeField(null=True)),
- (
- "flags",
- bitfield.models.BitField(
- (("has_releases", "This Project has sent release data"),),
- default=0,
- null=True,
- ),
- ),
- ("platform", models.CharField(max_length=64, null=True)),
- (
- "organization",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Organization"),
- ),
- ],
- options={"db_table": "sentry_project"},
- bases=(models.Model, sentry.db.mixin.PendingDeletionMixin),
- ),
- migrations.CreateModel(
- name="ProjectAvatar",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("ident", models.CharField(unique=True, max_length=32, db_index=True)),
- (
- "avatar_type",
- models.PositiveSmallIntegerField(
- default=0, choices=[(0, "letter_avatar"), (1, "upload")]
- ),
- ),
- (
- "file",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True,
- on_delete=django.db.models.deletion.SET_NULL,
- to="sentry.File",
- unique=True,
- db_constraint=False,
- ),
- ),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="avatar", to="sentry.Project", unique=True
- ),
- ),
- ],
- options={"db_table": "sentry_projectavatar"},
- ),
- migrations.CreateModel(
- name="ProjectBookmark",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False, blank=True, to="sentry.Project", null=True
- ),
- ),
- (
- "user",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to=settings.AUTH_USER_MODEL
- ),
- ),
- ],
- options={"db_table": "sentry_projectbookmark"},
- ),
- migrations.CreateModel(
- name="ProjectCfiCacheFile",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("checksum", models.CharField(max_length=40)),
- ("version", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
- (
- "cache_file",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.File"),
- ),
- ],
- options={"abstract": False, "db_table": "sentry_projectcficachefile"},
- ),
- migrations.CreateModel(
- name="ProjectDebugFile",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("object_name", models.TextField()),
- ("cpu_name", models.CharField(max_length=40)),
- ("debug_id", models.CharField(max_length=64, db_column="uuid")),
- ("code_id", models.CharField(max_length=64, null=True)),
- ("data", sentry.db.models.fields.jsonfield.JSONField(null=True)),
- ("file", sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.File")),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Project",
- null=True,
- db_constraint=False,
- ),
- ),
- ],
- options={"db_table": "sentry_projectdsymfile"},
- ),
- migrations.CreateModel(
- name="ProjectIntegration",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("config", sentry.db.models.fields.encrypted.EncryptedJsonField(default=dict)),
- (
- "integration",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Integration"),
- ),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Project"),
- ),
- ],
- options={"db_table": "sentry_projectintegration"},
- ),
- migrations.CreateModel(
- name="ProjectKey",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("label", models.CharField(max_length=64, null=True, blank=True)),
- ("public_key", models.CharField(max_length=32, unique=True, null=True)),
- ("secret_key", models.CharField(max_length=32, unique=True, null=True)),
- (
- "roles",
- bitfield.models.BitField(
- (("store", "Event API access"), ("api", "Web API access")), default=1
- ),
- ),
- (
- "status",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0, db_index=True, choices=[(0, "Active"), (1, "Inactive")]
- ),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
- (
- "rate_limit_count",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
- ),
- (
- "rate_limit_window",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
- ),
- ("data", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="key_set", to="sentry.Project"
- ),
- ),
- ],
- options={"db_table": "sentry_projectkey"},
- ),
- migrations.CreateModel(
- name="ProjectOption",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("key", models.CharField(max_length=64)),
- (
- "value",
- sentry.db.models.fields.encrypted.EncryptedPickledObjectField(editable=False),
- ),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Project"),
- ),
- ],
- options={"db_table": "sentry_projectoptions"},
- ),
- migrations.CreateModel(
- name="ProjectOwnership",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("raw", models.TextField(null=True)),
- ("schema", sentry.db.models.fields.jsonfield.JSONField(null=True)),
- ("fallthrough", models.BooleanField(default=True)),
- ("date_created", models.DateTimeField(default=django.utils.timezone.now)),
- ("last_updated", models.DateTimeField(default=django.utils.timezone.now)),
- ("is_active", models.BooleanField(default=True)),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Project", unique=True
- ),
- ),
- ],
- options={"db_table": "sentry_projectownership"},
- ),
- migrations.CreateModel(
- name="ProjectPlatform",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("project_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
- ("platform", models.CharField(max_length=64)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ("last_seen", models.DateTimeField(default=django.utils.timezone.now)),
- ],
- options={"db_table": "sentry_projectplatform"},
- ),
- migrations.CreateModel(
- name="ProjectRedirect",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("redirect_slug", models.SlugField()),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "organization",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Organization"),
- ),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Project"),
- ),
- ],
- options={"db_table": "sentry_projectredirect"},
- ),
- migrations.CreateModel(
- name="ProjectSymCacheFile",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("checksum", models.CharField(max_length=40)),
- ("version", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
- (
- "cache_file",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.File"),
- ),
- (
- "debug_file",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_column="dsym_file_id",
- on_delete=django.db.models.deletion.DO_NOTHING,
- to="sentry.ProjectDebugFile",
- ),
- ),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Project", null=True
- ),
- ),
- ],
- options={"abstract": False, "db_table": "sentry_projectsymcachefile"},
- ),
- migrations.CreateModel(
- name="ProjectTeam",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Project"),
- ),
- ],
- options={"db_table": "sentry_projectteam"},
- ),
- migrations.CreateModel(
- name="PromptsActivity",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "organization_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(db_index=True),
- ),
- (
- "project_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(db_index=True),
- ),
- ("feature", models.CharField(max_length=64)),
- ("data", sentry.db.models.fields.jsonfield.JSONField(default={})),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "user",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to=settings.AUTH_USER_MODEL
- ),
- ),
- ],
- options={"db_table": "sentry_promptsactivity"},
- ),
- migrations.CreateModel(
- name="PullRequest",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "organization_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(db_index=True),
- ),
- ("repository_id", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
- ("key", models.CharField(max_length=64)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ("title", models.TextField(null=True)),
- ("message", models.TextField(null=True)),
- ("merge_commit_sha", models.CharField(max_length=64, null=True)),
- (
- "author",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.CommitAuthor", null=True
- ),
- ),
- ],
- options={"db_table": "sentry_pull_request"},
- ),
- migrations.CreateModel(
- name="PullRequestCommit",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "commit",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Commit"),
- ),
- (
- "pull_request",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.PullRequest"),
- ),
- ],
- options={"db_table": "sentry_pullrequest_commit"},
- ),
- migrations.CreateModel(
- name="RawEvent",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("event_id", models.CharField(max_length=32, null=True)),
- ("datetime", models.DateTimeField(default=django.utils.timezone.now)),
- ("data", sentry.db.models.fields.node.NodeField(null=True, blank=True)),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Project"),
- ),
- ],
- options={"db_table": "sentry_rawevent"},
- ),
- migrations.CreateModel(
- name="RecentSearch",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("type", models.PositiveSmallIntegerField()),
- ("query", models.TextField()),
- ("query_hash", models.CharField(max_length=32)),
- ("last_seen", models.DateTimeField(default=django.utils.timezone.now)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "organization",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Organization"),
- ),
- (
- "user",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to=settings.AUTH_USER_MODEL, db_index=False
- ),
- ),
- ],
- options={"db_table": "sentry_recentsearch"},
- ),
- migrations.CreateModel(
- name="Relay",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("relay_id", models.CharField(unique=True, max_length=64)),
- ("public_key", models.CharField(max_length=200)),
- ("first_seen", models.DateTimeField(default=django.utils.timezone.now)),
- ("last_seen", models.DateTimeField(default=django.utils.timezone.now)),
- ("is_internal", models.BooleanField(default=False)),
- ],
- options={"db_table": "sentry_relay"},
- ),
- migrations.CreateModel(
- name="Release",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("project_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True)),
- ("version", models.CharField(max_length=250)),
- ("ref", models.CharField(max_length=250, null=True, blank=True)),
- ("url", models.URLField(null=True, blank=True)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ("date_started", models.DateTimeField(null=True, blank=True)),
- ("date_released", models.DateTimeField(null=True, blank=True)),
- ("data", sentry.db.models.fields.jsonfield.JSONField(default={})),
- (
- "new_groups",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(default=0),
- ),
- (
- "commit_count",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0, null=True
- ),
- ),
- (
- "last_commit_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
- ),
- ("authors", sentry.db.models.fields.array.ArrayField(null=True)),
- (
- "total_deploys",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0, null=True
- ),
- ),
- (
- "last_deploy_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
- ),
- (
- "organization",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Organization"),
- ),
- (
- "owner",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.SET_NULL,
- blank=True,
- to=settings.AUTH_USER_MODEL,
- null=True,
- ),
- ),
- ],
- options={"db_table": "sentry_release"},
- ),
- migrations.CreateModel(
- name="ReleaseCommit",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "organization_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(db_index=True),
- ),
- (
- "project_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
- ),
- ("order", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
- (
- "commit",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Commit"),
- ),
- (
- "release",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Release"),
- ),
- ],
- options={"db_table": "sentry_releasecommit"},
- ),
- migrations.CreateModel(
- name="ReleaseEnvironment",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "project_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
- ),
- ("first_seen", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "last_seen",
- models.DateTimeField(default=django.utils.timezone.now, db_index=True),
- ),
- (
- "environment",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Environment", db_constraint=False
- ),
- ),
- (
- "organization",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Organization", db_constraint=False
- ),
- ),
- (
- "release",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Release", db_constraint=False
- ),
- ),
- ],
- options={"db_table": "sentry_environmentrelease"},
- ),
- migrations.CreateModel(
- name="ReleaseFile",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("project_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True)),
- ("ident", models.CharField(max_length=40)),
- ("name", models.TextField()),
- (
- "dist",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Distribution", null=True, db_constraint=False
- ),
- ),
- ("file", sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.File")),
- (
- "organization",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Organization", db_constraint=False
- ),
- ),
- (
- "release",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Release", db_constraint=False
- ),
- ),
- ],
- options={"db_table": "sentry_releasefile"},
- ),
- migrations.CreateModel(
- name="ReleaseHeadCommit",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "organization_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(db_index=True),
- ),
- ("repository_id", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
- (
- "commit",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Commit"),
- ),
- (
- "release",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Release"),
- ),
- ],
- options={"db_table": "sentry_releaseheadcommit"},
- ),
- migrations.CreateModel(
- name="ReleaseProject",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "new_groups",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0, null=True
- ),
- ),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Project"),
- ),
- (
- "release",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Release"),
- ),
- ],
- options={"db_table": "sentry_release_project"},
- ),
- migrations.CreateModel(
- name="ReleaseProjectEnvironment",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "new_issues_count",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(default=0),
- ),
- ("first_seen", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "last_seen",
- models.DateTimeField(default=django.utils.timezone.now, db_index=True),
- ),
- (
- "last_deploy_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- null=True, db_index=True
- ),
- ),
- (
- "environment",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Environment"),
- ),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Project"),
- ),
- (
- "release",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Release"),
- ),
- ],
- options={"db_table": "sentry_releaseprojectenvironment"},
- ),
- migrations.CreateModel(
- name="Repository",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "organization_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(db_index=True),
- ),
- ("name", models.CharField(max_length=200)),
- ("url", models.URLField(null=True)),
- ("provider", models.CharField(max_length=64, null=True)),
- ("external_id", models.CharField(max_length=64, null=True)),
- ("config", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
- (
- "status",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0,
- db_index=True,
- choices=[
- (0, "active"),
- (1, "disabled"),
- (2, "pending_deletion"),
- (3, "deletion_in_progress"),
- ],
- ),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "integration_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- null=True, db_index=True
- ),
- ),
- ],
- options={"db_table": "sentry_repository"},
- bases=(models.Model, sentry.db.mixin.PendingDeletionMixin),
- ),
- migrations.CreateModel(
- name="ReprocessingReport",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("event_id", models.CharField(max_length=32, null=True)),
- ("datetime", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Project"),
- ),
- ],
- options={"db_table": "sentry_reprocessingreport"},
- ),
- migrations.CreateModel(
- name="Rule",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "environment_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
- ),
- ("label", models.CharField(max_length=64)),
- ("data", sentry.db.models.fields.gzippeddict.GzippedDictField()),
- (
- "status",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0, db_index=True, choices=[(0, "Active"), (1, "Inactive")]
- ),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Project"),
- ),
- ],
- options={"db_table": "sentry_rule"},
- ),
- migrations.CreateModel(
- name="SavedSearch",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("type", models.PositiveSmallIntegerField(default=0, null=True)),
- ("name", models.CharField(max_length=128)),
- ("query", models.TextField()),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ("is_default", models.BooleanField(default=False)),
- ("is_global", models.NullBooleanField(default=False, db_index=True)),
- (
- "organization",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Organization", null=True
- ),
- ),
- (
- "owner",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to=settings.AUTH_USER_MODEL, null=True
- ),
- ),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Project", null=True
- ),
- ),
- ],
- options={"db_table": "sentry_savedsearch"},
- ),
- migrations.CreateModel(
- name="SavedSearchUserDefault",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Project"),
- ),
- (
- "savedsearch",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.SavedSearch"),
- ),
- (
- "user",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to=settings.AUTH_USER_MODEL
- ),
- ),
- ],
- options={"db_table": "sentry_savedsearch_userdefault"},
- ),
- migrations.CreateModel(
- name="ScheduledDeletion",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "guid",
- models.CharField(
- default=sentry.models.scheduledeletion.default_guid,
- unique=True,
- max_length=32,
- ),
- ),
- ("app_label", models.CharField(max_length=64)),
- ("model_name", models.CharField(max_length=64)),
- ("object_id", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "date_scheduled",
- models.DateTimeField(
- default=sentry.models.scheduledeletion.default_date_schedule
- ),
- ),
- ("actor_id", sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True)),
- ("data", sentry.db.models.fields.jsonfield.JSONField(default={})),
- ("in_progress", models.BooleanField(default=False)),
- ("aborted", models.BooleanField(default=False)),
- ],
- options={"db_table": "sentry_scheduleddeletion"},
- ),
- migrations.CreateModel(
- name="ScheduledJob",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("name", models.CharField(max_length=128)),
- ("payload", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ("date_scheduled", models.DateTimeField()),
- ],
- options={"db_table": "sentry_scheduledjob"},
- ),
- migrations.CreateModel(
- name="SentryApp",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("date_deleted", models.DateTimeField(null=True, blank=True)),
- (
- "scopes",
- bitfield.models.BitField(
- (
- ("project:read", "project:read"),
- ("project:write", "project:write"),
- ("project:admin", "project:admin"),
- ("project:releases", "project:releases"),
- ("team:read", "team:read"),
- ("team:write", "team:write"),
- ("team:admin", "team:admin"),
- ("event:read", "event:read"),
- ("event:write", "event:write"),
- ("event:admin", "event:admin"),
- ("org:read", "org:read"),
- ("org:write", "org:write"),
- ("org:admin", "org:admin"),
- ("member:read", "member:read"),
- ("member:write", "member:write"),
- ("member:admin", "member:admin"),
- ),
- default=None,
- ),
- ),
- ("scope_list", sentry.db.models.fields.array.ArrayField(null=True)),
- ("name", models.TextField()),
- ("slug", models.CharField(unique=True, max_length=64)),
- ("author", models.TextField(null=True)),
- (
- "status",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0, db_index=True, choices=[(0, "unpublished"), (1, "published")]
- ),
- ),
- (
- "uuid",
- models.CharField(default=sentry.models.sentryapp.default_uuid, max_length=64),
- ),
- ("redirect_url", models.URLField(null=True)),
- ("webhook_url", models.URLField()),
- ("is_alertable", models.BooleanField(default=False)),
- ("events", sentry.db.models.fields.array.ArrayField(null=True)),
- ("overview", models.TextField(null=True)),
- ("schema", sentry.db.models.fields.encrypted.EncryptedJsonField(default=dict)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "application",
- models.OneToOneField(
- related_name="sentry_app",
- null=True,
- on_delete=django.db.models.deletion.SET_NULL,
- to="sentry.ApiApplication",
- ),
- ),
- (
- "owner",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="owned_sentry_apps", to="sentry.Organization"
- ),
- ),
- (
- "proxy_user",
- models.OneToOneField(
- related_name="sentry_app",
- null=True,
- on_delete=django.db.models.deletion.SET_NULL,
- to=settings.AUTH_USER_MODEL,
- ),
- ),
- ],
- options={"db_table": "sentry_sentryapp"},
- ),
- migrations.CreateModel(
- name="SentryAppAvatar",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("ident", models.CharField(unique=True, max_length=32, db_index=True)),
- (
- "avatar_type",
- models.PositiveSmallIntegerField(
- default=0, choices=[(0, "letter_avatar"), (1, "upload")]
- ),
- ),
- (
- "file",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True,
- on_delete=django.db.models.deletion.SET_NULL,
- to="sentry.File",
- unique=True,
- db_constraint=False,
- ),
- ),
- (
- "sentry_app",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="avatar", to="sentry.SentryApp", unique=True
- ),
- ),
- ],
- options={"db_table": "sentry_sentryappavatar"},
- ),
- migrations.CreateModel(
- name="SentryAppComponent",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "uuid",
- sentry.db.models.fields.uuid.UUIDField(
- unique=True, max_length=32, editable=False
- ),
- ),
- ("type", models.CharField(max_length=64)),
- ("schema", sentry.db.models.fields.encrypted.EncryptedJsonField(default=dict)),
- (
- "sentry_app",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="components", to="sentry.SentryApp"
- ),
- ),
- ],
- options={"db_table": "sentry_sentryappcomponent"},
- ),
- migrations.CreateModel(
- name="SentryAppInstallation",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("date_deleted", models.DateTimeField(null=True, blank=True)),
- (
- "uuid",
- models.CharField(
- default=sentry.models.sentryappinstallation.default_uuid, max_length=64
- ),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "api_grant",
- models.OneToOneField(
- related_name="sentry_app_installation",
- null=True,
- on_delete=django.db.models.deletion.SET_NULL,
- to="sentry.ApiGrant",
- ),
- ),
- (
- "authorization",
- models.OneToOneField(
- related_name="sentry_app_installation",
- null=True,
- on_delete=django.db.models.deletion.SET_NULL,
- to="sentry.ApiAuthorization",
- ),
- ),
- (
- "organization",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="sentry_app_installations", to="sentry.Organization"
- ),
- ),
- (
- "sentry_app",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="installations", to="sentry.SentryApp"
- ),
- ),
- ],
- options={"db_table": "sentry_sentryappinstallation"},
- ),
- migrations.CreateModel(
- name="ServiceHook",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("guid", models.CharField(max_length=32, unique=True, null=True)),
- (
- "actor_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(db_index=True),
- ),
- (
- "project_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(db_index=True),
- ),
- (
- "organization_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- null=True, db_index=True
- ),
- ),
- ("url", models.URLField(max_length=512)),
- (
- "secret",
- sentry.db.models.fields.encrypted.EncryptedTextField(
- default=sentry.models.servicehook.generate_secret
- ),
- ),
- ("events", sentry.db.models.fields.array.ArrayField(null=True)),
- (
- "status",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0,
- db_index=True,
- choices=[
- (0, "active"),
- (1, "disabled"),
- (2, "pending_deletion"),
- (3, "deletion_in_progress"),
- ],
- ),
- ),
- (
- "version",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0, choices=[(0, "0")]
- ),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "application",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.ApiApplication", null=True
- ),
- ),
- ],
- options={"db_table": "sentry_servicehook"},
- ),
- migrations.CreateModel(
- name="ServiceHookProject",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "project_id",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(db_index=True),
- ),
- (
- "service_hook",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.ServiceHook"),
- ),
- ],
- options={"db_table": "sentry_servicehookproject"},
- ),
- migrations.CreateModel(
- name="TagKey",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "project_id",
- sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
- ),
- ("key", models.CharField(max_length=32)),
- (
- "values_seen",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(default=0),
- ),
- ("label", models.CharField(max_length=64, null=True)),
- (
- "status",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0,
- choices=[
- (0, "Visible"),
- (1, "Pending Deletion"),
- (2, "Deletion in Progress"),
- ],
- ),
- ),
- ],
- options={"db_table": "sentry_filterkey"},
- ),
- migrations.CreateModel(
- name="TagValue",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "project_id",
- sentry.db.models.fields.bounded.BoundedBigIntegerField(
- null=True, db_index=True
- ),
- ),
- ("key", models.CharField(max_length=32)),
- ("value", models.CharField(max_length=200)),
- (
- "data",
- sentry.db.models.fields.gzippeddict.GzippedDictField(null=True, blank=True),
- ),
- (
- "times_seen",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(default=0),
- ),
- (
- "last_seen",
- models.DateTimeField(
- default=django.utils.timezone.now, null=True, db_index=True
- ),
- ),
- (
- "first_seen",
- models.DateTimeField(
- default=django.utils.timezone.now, null=True, db_index=True
- ),
- ),
- ],
- options={"db_table": "sentry_filtervalue"},
- ),
- migrations.CreateModel(
- name="Team",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("slug", models.SlugField()),
- ("name", models.CharField(max_length=64)),
- (
- "status",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0,
- choices=[
- (0, "Active"),
- (1, "Pending Deletion"),
- (2, "Deletion in Progress"),
- ],
- ),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
- (
- "organization",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Organization"),
- ),
- ],
- options={"db_table": "sentry_team"},
- ),
- migrations.CreateModel(
- name="TeamAvatar",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("ident", models.CharField(unique=True, max_length=32, db_index=True)),
- (
- "avatar_type",
- models.PositiveSmallIntegerField(
- default=0, choices=[(0, "letter_avatar"), (1, "upload")]
- ),
- ),
- (
- "file",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True,
- on_delete=django.db.models.deletion.SET_NULL,
- to="sentry.File",
- unique=True,
- db_constraint=False,
- ),
- ),
- (
- "team",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="avatar", to="sentry.Team", unique=True
- ),
- ),
- ],
- options={"db_table": "sentry_teamavatar"},
- ),
- migrations.CreateModel(
- name="UserAvatar",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("ident", models.CharField(unique=True, max_length=32, db_index=True)),
- (
- "avatar_type",
- models.PositiveSmallIntegerField(
- default=0, choices=[(0, "letter_avatar"), (1, "upload"), (2, "gravatar")]
- ),
- ),
- (
- "file",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True,
- on_delete=django.db.models.deletion.SET_NULL,
- to="sentry.File",
- unique=True,
- db_constraint=False,
- ),
- ),
- (
- "user",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="avatar", to=settings.AUTH_USER_MODEL, unique=True
- ),
- ),
- ],
- options={"db_table": "sentry_useravatar"},
- ),
- migrations.CreateModel(
- name="UserEmail",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("email", models.EmailField(max_length=75, verbose_name="email address")),
- (
- "validation_hash",
- models.CharField(
- default=sentry.models.useremail.default_validation_hash, max_length=32
- ),
- ),
- ("date_hash_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "is_verified",
- models.BooleanField(
- default=False,
- help_text="Designates whether this user has confirmed their email.",
- verbose_name="verified",
- ),
- ),
- (
- "user",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="emails", to=settings.AUTH_USER_MODEL
- ),
- ),
- ],
- options={"db_table": "sentry_useremail"},
- ),
- migrations.CreateModel(
- name="UserIP",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("ip_address", models.GenericIPAddressField()),
- ("country_code", models.CharField(max_length=16, null=True)),
- ("region_code", models.CharField(max_length=16, null=True)),
- ("first_seen", models.DateTimeField(default=django.utils.timezone.now)),
- ("last_seen", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "user",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to=settings.AUTH_USER_MODEL
- ),
- ),
- ],
- options={"db_table": "sentry_userip"},
- ),
- migrations.CreateModel(
- name="UserOption",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("key", models.CharField(max_length=64)),
- (
- "value",
- sentry.db.models.fields.encrypted.EncryptedPickledObjectField(editable=False),
- ),
- (
- "organization",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Organization", null=True
- ),
- ),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Project", null=True
- ),
- ),
- (
- "user",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to=settings.AUTH_USER_MODEL
- ),
- ),
- ],
- options={"db_table": "sentry_useroption"},
- ),
- migrations.CreateModel(
- name="UserPermission",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("permission", models.CharField(max_length=32)),
- (
- "user",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to=settings.AUTH_USER_MODEL
- ),
- ),
- ],
- options={"db_table": "sentry_userpermission"},
- ),
- migrations.CreateModel(
- name="UserReport",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "event_user_id",
- sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True),
- ),
- ("event_id", models.CharField(max_length=32)),
- ("name", models.CharField(max_length=128)),
- ("email", models.EmailField(max_length=75)),
- ("comments", models.TextField()),
- (
- "date_added",
- models.DateTimeField(default=django.utils.timezone.now, db_index=True),
- ),
- (
- "environment",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Environment", null=True
- ),
- ),
- (
- "group",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Group", null=True
- ),
- ),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Project"),
- ),
- ],
- options={"db_table": "sentry_userreport"},
- ),
- migrations.CreateModel(
- name="Widget",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("order", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
- ("title", models.CharField(max_length=255)),
- (
- "display_type",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (0, "line"),
- (1, "area"),
- (2, "stacked_area"),
- (3, "bar"),
- (4, "pie"),
- (5, "table"),
- (6, "world_map"),
- (7, "percentage_area_chart"),
- ]
- ),
- ),
- ("display_options", sentry.db.models.fields.jsonfield.JSONField(default={})),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "status",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0,
- choices=[
- (0, "active"),
- (1, "disabled"),
- (2, "pending_deletion"),
- (3, "deletion_in_progress"),
- ],
- ),
- ),
- (
- "dashboard",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Dashboard"),
- ),
- ],
- options={"db_table": "sentry_widget"},
- ),
- migrations.CreateModel(
- name="WidgetDataSource",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "type",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[(0, "discover_saved_search")]
- ),
- ),
- ("name", models.CharField(max_length=255)),
- ("data", sentry.db.models.fields.jsonfield.JSONField(default={})),
- ("order", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "status",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0,
- choices=[
- (0, "active"),
- (1, "disabled"),
- (2, "pending_deletion"),
- (3, "deletion_in_progress"),
- ],
- ),
- ),
- (
- "widget",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Widget"),
- ),
- ],
- options={"db_table": "sentry_widgetdatasource"},
- ),
- migrations.AlterUniqueTogether(
- name="tagvalue", unique_together={("project_id", "key", "value")}
- ),
- migrations.AlterIndexTogether(
- name="tagvalue", index_together={("project_id", "key", "last_seen")}
- ),
- migrations.AlterUniqueTogether(name="tagkey", unique_together={("project_id", "key")}),
- migrations.AlterUniqueTogether(
- name="scheduleddeletion",
- unique_together={("app_label", "model_name", "object_id")},
- ),
- migrations.AlterUniqueTogether(
- name="repository",
- unique_together={
- ("organization_id", "provider", "external_id"),
- ("organization_id", "name"),
- },
- ),
- migrations.AddField(
- model_name="release",
- name="projects",
- field=models.ManyToManyField(
- related_name="releases", through="sentry.ReleaseProject", to="sentry.Project"
- ),
- ),
- migrations.AddField(
- model_name="projectteam",
- name="team",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Team"),
- ),
- migrations.AlterUniqueTogether(
- name="projectplatform", unique_together={("project_id", "platform")}
- ),
- migrations.AddField(
- model_name="projectcficachefile",
- name="debug_file",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_column="dsym_file_id",
- on_delete=django.db.models.deletion.DO_NOTHING,
- to="sentry.ProjectDebugFile",
- ),
- ),
- migrations.AddField(
- model_name="projectcficachefile",
- name="project",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Project", null=True
- ),
- ),
- migrations.AddField(
- model_name="project",
- name="teams",
- field=models.ManyToManyField(
- related_name="teams", through="sentry.ProjectTeam", to="sentry.Team"
- ),
- ),
- migrations.AddField(
- model_name="processingissue",
- name="project",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Project"),
- ),
- migrations.AlterUniqueTogether(
- name="platformexternalissue", unique_together={("group_id", "service_type")}
- ),
- migrations.AddField(
- model_name="organizationmemberteam",
- name="team",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Team"),
- ),
- migrations.AddField(
- model_name="organizationmember",
- name="teams",
- field=models.ManyToManyField(
- to="sentry.Team", through="sentry.OrganizationMemberTeam", blank=True
- ),
- ),
- migrations.AddField(
- model_name="organizationmember",
- name="user",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="sentry_orgmember_set",
- blank=True,
- to=settings.AUTH_USER_MODEL,
- null=True,
- ),
- ),
- migrations.AddField(
- model_name="organizationaccessrequest",
- name="member",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.OrganizationMember"
- ),
- ),
- migrations.AddField(
- model_name="organizationaccessrequest",
- name="team",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Team"),
- ),
- migrations.AddField(
- model_name="organization",
- name="members",
- field=models.ManyToManyField(
- related_name="org_memberships",
- through="sentry.OrganizationMember",
- to=settings.AUTH_USER_MODEL,
- ),
- ),
- migrations.AddField(
- model_name="monitorcheckin",
- name="location",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.MonitorLocation", null=True
- ),
- ),
- migrations.AddField(
- model_name="monitorcheckin",
- name="monitor",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Monitor"),
- ),
- migrations.AlterIndexTogether(name="monitor", index_together={("type", "next_checkin")}),
- migrations.AlterUniqueTogether(
- name="latestrelease", unique_together={("repository_id", "environment_id")}
- ),
- migrations.AlterUniqueTogether(
- name="integrationexternalproject",
- unique_together={("organization_integration_id", "external_id")},
- ),
- migrations.AddField(
- model_name="integration",
- name="organizations",
- field=models.ManyToManyField(
- related_name="integrations",
- through="sentry.OrganizationIntegration",
- to="sentry.Organization",
- ),
- ),
- migrations.AddField(
- model_name="integration",
- name="projects",
- field=models.ManyToManyField(
- related_name="integrations",
- through="sentry.ProjectIntegration",
- to="sentry.Project",
- ),
- ),
- migrations.AlterUniqueTogether(
- name="identityprovider", unique_together={("type", "external_id")}
- ),
- migrations.AddField(
- model_name="identity",
- name="idp",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.IdentityProvider"
- ),
- ),
- migrations.AddField(
- model_name="identity",
- name="user",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to=settings.AUTH_USER_MODEL
- ),
- ),
- migrations.AddField(
- model_name="grouptombstone",
- name="project",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Project"),
- ),
- migrations.AlterUniqueTogether(
- name="grouptagvalue", unique_together={("group_id", "key", "value")}
- ),
- migrations.AlterIndexTogether(
- name="grouptagvalue", index_together={("project_id", "key", "value", "last_seen")}
- ),
- migrations.AlterUniqueTogether(
- name="grouptagkey", unique_together={("project_id", "group_id", "key")}
- ),
- migrations.AddField(
- model_name="groupsubscription",
- name="project",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="subscription_set", to="sentry.Project"
- ),
- ),
- migrations.AddField(
- model_name="groupsubscription",
- name="user",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to=settings.AUTH_USER_MODEL
- ),
- ),
- migrations.AddField(
- model_name="groupshare",
- name="project",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Project"),
- ),
- migrations.AddField(
- model_name="groupshare",
- name="user",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to=settings.AUTH_USER_MODEL, null=True
- ),
- ),
- migrations.AddField(
- model_name="groupseen",
- name="project",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Project"),
- ),
- migrations.AddField(
- model_name="groupseen",
- name="user",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to=settings.AUTH_USER_MODEL, db_index=False
- ),
- ),
- migrations.AddField(
- model_name="grouprulestatus",
- name="project",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Project"),
- ),
- migrations.AddField(
- model_name="grouprulestatus",
- name="rule",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Rule"),
- ),
- migrations.AddField(
- model_name="groupresolution",
- name="release",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Release"),
- ),
- migrations.AlterUniqueTogether(
- name="grouprelease", unique_together={("group_id", "release_id", "environment")}
- ),
- migrations.AlterUniqueTogether(
- name="grouplink", unique_together={("group_id", "linked_type", "linked_id")}
- ),
- migrations.AddField(
- model_name="grouphash",
- name="project",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Project", null=True
- ),
- ),
- migrations.AddField(
- model_name="groupenvironment",
- name="first_release",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- on_delete=django.db.models.deletion.DO_NOTHING,
- to="sentry.Release",
- null=True,
- ),
- ),
- migrations.AddField(
- model_name="groupenvironment",
- name="group",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Group", db_constraint=False
- ),
- ),
- migrations.AddField(
- model_name="groupemailthread",
- name="project",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="groupemail_set", to="sentry.Project"
- ),
- ),
- migrations.AlterUniqueTogether(
- name="groupcommitresolution", unique_together={("group_id", "commit_id")}
- ),
- migrations.AddField(
- model_name="groupbookmark",
- name="project",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="bookmark_set", to="sentry.Project"
- ),
- ),
- migrations.AddField(
- model_name="groupbookmark",
- name="user",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="sentry_bookmark_set", to=settings.AUTH_USER_MODEL
- ),
- ),
- migrations.AddField(
- model_name="groupassignee",
- name="project",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="assignee_set", to="sentry.Project"
- ),
- ),
- migrations.AddField(
- model_name="groupassignee",
- name="team",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="sentry_assignee_set", to="sentry.Team", null=True
- ),
- ),
- migrations.AddField(
- model_name="groupassignee",
- name="user",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="sentry_assignee_set", to=settings.AUTH_USER_MODEL, null=True
- ),
- ),
- migrations.AddField(
- model_name="group",
- name="first_release",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.PROTECT, to="sentry.Release", null=True
- ),
- ),
- migrations.AddField(
- model_name="group",
- name="project",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Project", null=True
- ),
- ),
- migrations.AddField(
- model_name="fileblobowner",
- name="organization",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Organization", db_constraint=False
- ),
- ),
- migrations.AddField(
- model_name="file",
- name="blob",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="legacy_blob", to="sentry.FileBlob", null=True
- ),
- ),
- migrations.AddField(
- model_name="file",
- name="blobs",
- field=models.ManyToManyField(to="sentry.FileBlob", through="sentry.FileBlobIndex"),
- ),
- migrations.AddField(
- model_name="featureadoption",
- name="organization",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Organization"),
- ),
- migrations.AlterUniqueTogether(
- name="externalissue",
- unique_together={("organization_id", "integration_id", "key")},
- ),
- migrations.AlterUniqueTogether(
- name="eventuser", unique_together={("project_id", "hash"), ("project_id", "ident")}
- ),
- migrations.AlterIndexTogether(
- name="eventuser",
- index_together={
- ("project_id", "username"),
- ("project_id", "ip_address"),
- ("project_id", "email"),
- },
- ),
- migrations.AlterUniqueTogether(
- name="eventtag", unique_together={("event_id", "key_id", "value_id")}
- ),
- migrations.AlterIndexTogether(
- name="eventtag", index_together={("group_id", "key_id", "value_id")}
- ),
- migrations.AddField(
- model_name="eventprocessingissue",
- name="processing_issue",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.ProcessingIssue"
- ),
- ),
- migrations.AddField(
- model_name="eventprocessingissue",
- name="raw_event",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.RawEvent"),
- ),
- migrations.AlterUniqueTogether(
- name="eventmapping", unique_together={("project_id", "event_id")}
- ),
- migrations.AddField(
- model_name="eventattachment",
- name="file",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.File", db_constraint=False
- ),
- ),
- migrations.AlterUniqueTogether(name="event", unique_together={("project_id", "event_id")}),
- migrations.AlterIndexTogether(name="event", index_together={("group_id", "datetime")}),
- migrations.AddField(
- model_name="environmentproject",
- name="project",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Project"),
- ),
- migrations.AddField(
- model_name="environment",
- name="projects",
- field=models.ManyToManyField(to="sentry.Project", through="sentry.EnvironmentProject"),
- ),
- migrations.AddField(
- model_name="distribution",
- name="release",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Release"),
- ),
- migrations.AddField(
- model_name="discoversavedqueryproject",
- name="project",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Project"),
- ),
- migrations.AddField(
- model_name="discoversavedquery",
- name="organization",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Organization"),
- ),
- migrations.AddField(
- model_name="discoversavedquery",
- name="projects",
- field=models.ManyToManyField(
- to="sentry.Project", through="sentry.DiscoverSavedQueryProject"
- ),
- ),
- migrations.AddField(
- model_name="deploy",
- name="release",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Release"),
- ),
- migrations.AddField(
- model_name="dashboard",
- name="organization",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Organization"),
- ),
- migrations.AddField(
- model_name="counter",
- name="project",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Project", unique=True
- ),
- ),
- migrations.AlterUniqueTogether(
- name="commitauthor",
- unique_together={("organization_id", "email"), ("organization_id", "external_id")},
- ),
- migrations.AddField(
- model_name="commit",
- name="author",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.CommitAuthor", null=True
- ),
- ),
- migrations.AddField(
- model_name="authprovider",
- name="default_teams",
- field=models.ManyToManyField(to="sentry.Team", blank=True),
- ),
- migrations.AddField(
- model_name="authprovider",
- name="organization",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Organization", unique=True
- ),
- ),
- migrations.AddField(
- model_name="authidentity",
- name="auth_provider",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.AuthProvider"),
- ),
- migrations.AddField(
- model_name="authidentity",
- name="user",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to=settings.AUTH_USER_MODEL
- ),
- ),
- migrations.AddField(
- model_name="auditlogentry",
- name="organization",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Organization"),
- ),
- migrations.AddField(
- model_name="auditlogentry",
- name="target_user",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="audit_targets", blank=True, to=settings.AUTH_USER_MODEL, null=True
- ),
- ),
- migrations.AddField(
- model_name="apikey",
- name="organization",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="key_set", to="sentry.Organization"
- ),
- ),
- migrations.AddField(
- model_name="activity",
- name="group",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Group", null=True
- ),
- ),
- migrations.AddField(
- model_name="activity",
- name="project",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Project"),
- ),
- migrations.AddField(
- model_name="activity",
- name="user",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to=settings.AUTH_USER_MODEL, null=True
- ),
- ),
- migrations.AlterUniqueTogether(
- name="widgetdatasource", unique_together={("widget", "name"), ("widget", "order")}
- ),
- migrations.AlterUniqueTogether(
- name="widget", unique_together={("dashboard", "title"), ("dashboard", "order")}
- ),
- migrations.AlterUniqueTogether(
- name="userreport", unique_together={("project", "event_id")}
- ),
- migrations.AlterIndexTogether(
- name="userreport",
- index_together={("project", "date_added"), ("project", "event_id")},
- ),
- migrations.AlterUniqueTogether(
- name="userpermission", unique_together={("user", "permission")}
- ),
- migrations.AlterUniqueTogether(
- name="useroption",
- unique_together={("user", "project", "key"), ("user", "organization", "key")},
- ),
- migrations.AlterUniqueTogether(name="userip", unique_together={("user", "ip_address")}),
- migrations.AlterUniqueTogether(name="useremail", unique_together={("user", "email")}),
- migrations.AlterUniqueTogether(name="team", unique_together={("organization", "slug")}),
- migrations.AlterUniqueTogether(
- name="servicehookproject", unique_together={("service_hook", "project_id")}
- ),
- migrations.AlterUniqueTogether(
- name="savedsearchuserdefault", unique_together={("project", "user")}
- ),
- migrations.AlterUniqueTogether(
- name="savedsearch",
- unique_together={("organization", "owner", "type"), ("project", "name")},
- ),
- migrations.AlterUniqueTogether(
- name="reprocessingreport", unique_together={("project", "event_id")}
- ),
- migrations.AlterUniqueTogether(
- name="releaseprojectenvironment",
- unique_together={("project", "release", "environment")},
- ),
- migrations.AlterUniqueTogether(
- name="releaseproject", unique_together={("project", "release")}
- ),
- migrations.AlterUniqueTogether(
- name="releaseheadcommit", unique_together={("repository_id", "release")}
- ),
- migrations.AlterUniqueTogether(name="releasefile", unique_together={("release", "ident")}),
- migrations.AlterIndexTogether(name="releasefile", index_together={("release", "name")}),
- migrations.AlterUniqueTogether(
- name="releaseenvironment",
- unique_together={("organization", "release", "environment")},
- ),
- migrations.AlterUniqueTogether(
- name="releasecommit", unique_together={("release", "commit"), ("release", "order")}
- ),
- migrations.AlterUniqueTogether(
- name="release", unique_together={("organization", "version")}
- ),
- migrations.AlterUniqueTogether(
- name="recentsearch",
- unique_together={("user", "organization", "type", "query_hash")},
- ),
- migrations.AlterUniqueTogether(name="rawevent", unique_together={("project", "event_id")}),
- migrations.AlterUniqueTogether(
- name="pullrequestcommit", unique_together={("pull_request", "commit")}
- ),
- migrations.AlterUniqueTogether(
- name="pullrequest", unique_together={("repository_id", "key")}
- ),
- migrations.AlterIndexTogether(
- name="pullrequest",
- index_together={
- ("repository_id", "date_added"),
- ("organization_id", "merge_commit_sha"),
- },
- ),
- migrations.AlterUniqueTogether(
- name="promptsactivity",
- unique_together={("user", "feature", "organization_id", "project_id")},
- ),
- migrations.AlterUniqueTogether(name="projectteam", unique_together={("project", "team")}),
- migrations.AlterUniqueTogether(
- name="projectsymcachefile", unique_together={("project", "debug_file")}
- ),
- migrations.AlterUniqueTogether(
- name="projectredirect", unique_together={("organization", "redirect_slug")}
- ),
- migrations.AlterUniqueTogether(name="projectoption", unique_together={("project", "key")}),
- migrations.AlterUniqueTogether(
- name="projectintegration", unique_together={("project", "integration")}
- ),
- migrations.AlterIndexTogether(
- name="projectdebugfile",
- index_together={("project", "code_id"), ("project", "debug_id")},
- ),
- migrations.AlterUniqueTogether(
- name="projectcficachefile", unique_together={("project", "debug_file")}
- ),
- migrations.AlterUniqueTogether(
- name="projectbookmark", unique_together={("project", "user")}
- ),
- migrations.AlterUniqueTogether(name="project", unique_together={("organization", "slug")}),
- migrations.AlterUniqueTogether(
- name="processingissue", unique_together={("project", "checksum", "type")}
- ),
- migrations.AlterUniqueTogether(
- name="organizationoption", unique_together={("organization", "key")}
- ),
- migrations.AlterUniqueTogether(
- name="organizationonboardingtask", unique_together={("organization", "task")}
- ),
- migrations.AlterUniqueTogether(
- name="organizationmemberteam", unique_together={("team", "organizationmember")}
- ),
- migrations.AlterUniqueTogether(
- name="organizationmember",
- unique_together={("organization", "user"), ("organization", "email")},
- ),
- migrations.AlterUniqueTogether(
- name="organizationintegration", unique_together={("organization", "integration")}
- ),
- migrations.AlterUniqueTogether(
- name="organizationaccessrequest", unique_together={("team", "member")}
- ),
- migrations.AlterUniqueTogether(
- name="integration", unique_together={("provider", "external_id")}
- ),
- migrations.AlterUniqueTogether(
- name="identity", unique_together={("idp", "external_id"), ("idp", "user")}
- ),
- migrations.AlterUniqueTogether(
- name="groupsubscription", unique_together={("group", "user")}
- ),
- migrations.AlterUniqueTogether(name="groupseen", unique_together={("user", "group")}),
- migrations.AlterUniqueTogether(name="grouprulestatus", unique_together={("rule", "group")}),
- migrations.AlterUniqueTogether(name="groupmeta", unique_together={("group", "key")}),
- migrations.AlterUniqueTogether(name="grouphash", unique_together={("project", "hash")}),
- migrations.AlterUniqueTogether(
- name="groupenvironment", unique_together={("group", "environment")}
- ),
- migrations.AlterIndexTogether(
- name="groupenvironment", index_together={("environment", "first_release")}
- ),
- migrations.AlterUniqueTogether(
- name="groupemailthread", unique_together={("email", "msgid"), ("email", "group")}
- ),
- migrations.AlterUniqueTogether(
- name="groupbookmark", unique_together={("project", "user", "group")}
- ),
- migrations.AlterUniqueTogether(name="group", unique_together={("project", "short_id")}),
- migrations.AlterIndexTogether(name="group", index_together={("project", "first_release")}),
- migrations.AlterUniqueTogether(
- name="fileblobowner", unique_together={("blob", "organization")}
- ),
- migrations.AlterUniqueTogether(
- name="fileblobindex", unique_together={("file", "blob", "offset")}
- ),
- migrations.AlterUniqueTogether(
- name="featureadoption", unique_together={("organization", "feature_id")}
- ),
- migrations.AlterUniqueTogether(
- name="eventprocessingissue", unique_together={("raw_event", "processing_issue")}
- ),
- migrations.AlterUniqueTogether(
- name="eventattachment", unique_together={("project_id", "event_id", "file")}
- ),
- migrations.AlterIndexTogether(
- name="eventattachment", index_together={("project_id", "date_added")}
- ),
- migrations.AlterUniqueTogether(
- name="environmentproject", unique_together={("project", "environment")}
- ),
- migrations.AlterUniqueTogether(
- name="environment", unique_together={("organization_id", "name")}
- ),
- migrations.AlterUniqueTogether(name="distribution", unique_together={("release", "name")}),
- migrations.AlterUniqueTogether(
- name="discoversavedqueryproject",
- unique_together={("project", "discover_saved_query")},
- ),
- migrations.AlterUniqueTogether(
- name="dashboard", unique_together={("organization", "title")}
- ),
- migrations.AlterUniqueTogether(
- name="commitfilechange", unique_together={("commit", "filename")}
- ),
- migrations.AlterUniqueTogether(name="commit", unique_together={("repository_id", "key")}),
- migrations.AlterIndexTogether(
- name="commit", index_together={("repository_id", "date_added")}
- ),
- migrations.AlterUniqueTogether(
- name="broadcastseen", unique_together={("broadcast", "user")}
- ),
- migrations.AlterUniqueTogether(
- name="authidentity",
- unique_together={("auth_provider", "ident"), ("auth_provider", "user")},
- ),
- migrations.AlterUniqueTogether(name="authenticator", unique_together={("user", "type")}),
- migrations.AlterUniqueTogether(
- name="assistantactivity", unique_together={("user", "guide_id")}
- ),
- migrations.AlterUniqueTogether(
- name="apiauthorization", unique_together={("user", "application")}
- ),
- # XXX(dcramer): these are manually generated and ported from South
- migrations.RunSQL(
- """
- create or replace function sentry_increment_project_counter(
- project bigint, delta int) returns int as $$
- declare
- new_val int;
- begin
- loop
- update sentry_projectcounter set value = value + delta
- where project_id = project
- returning value into new_val;
- if found then
- return new_val;
- end if;
- begin
- insert into sentry_projectcounter(project_id, value)
- values (project, delta)
- returning value into new_val;
- return new_val;
- exception when unique_violation then
- end;
- end loop;
- end
- $$ language plpgsql;
- """,
- hints={"tables": ["sentry_projectcounter"]},
- ),
- migrations.RunSQL(
- """
- CREATE UNIQUE INDEX sentry_savedsearch_is_global_6793a2f9e1b59b95
- ON sentry_savedsearch USING btree (is_global, name)
- WHERE is_global
- """,
- hints={"tables": ["sentry_savedsearch"]},
- ),
- migrations.RunSQL(
- """
- CREATE UNIQUE INDEX sentry_savedsearch_organization_id_313a24e907cdef99
- ON sentry_savedsearch USING btree (organization_id, name, type)
- WHERE (owner_id IS NULL);
- """,
- hints={"tables": ["sentry_savedsearch"]},
- ),
- ]
diff --git a/src/sentry/migrations/0001_squashed_0200_release_indices.py b/src/sentry/migrations/0001_squashed_0200_release_indices.py
index 89cadbdfcd9556..8ce444ae5bd3dd 100644
--- a/src/sentry/migrations/0001_squashed_0200_release_indices.py
+++ b/src/sentry/migrations/0001_squashed_0200_release_indices.py
@@ -73,213 +73,6 @@ class Migration(migrations.Migration):
# transaction.
atomic = False
- replaces = [
- ("sentry", "0001_initial"),
- ("sentry", "0002_912_to_recent"),
- ("sentry", "0003_auto_20191022_0122"),
- (
- "sentry",
- "0004_bitfieldtestmodel_blankjsonfieldtestmodel_callabledefaultmodel_jsonfieldtestmodel_jsonfieldwithdefau",
- ),
- ("sentry", "0005_fix_content_types"),
- ("sentry", "0006_sentryapp_date_published"),
- ("sentry", "0007_auto_20191029_0131"),
- ("sentry", "0008_auto_20191030_0016"),
- ("sentry", "0009_auto_20191101_1608"),
- ("sentry", "0010_auto_20191104_1641"),
- ("sentry", "0011_remove_pagerdutyservice_service_id_from_state"),
- ("sentry", "0012_remove_pagerdutyservice_service_id"),
- ("sentry", "0013_auto_20191111_1829"),
- ("sentry", "0014_delete_sentryappwebhookerror"),
- ("sentry", "0015_delete_sentryappwebhookerror_db"),
- ("sentry", "0016_delete_alert_rule_deprecated_fields"),
- ("sentry", "0017_incident_aggregation"),
- ("sentry", "0018_discoversavedquery_version"),
- ("sentry", "0019_auto_20191114_2040"),
- ("sentry", "0020_auto_20191125_1420"),
- ("sentry", "0021_auto_20191203_1803"),
- ("sentry", "0021_auto_20191202_1716"),
- ("sentry", "0022_merge"),
- ("sentry", "0023_hide_environment_none_20191126"),
- ("sentry", "0024_auto_20191230_2052"),
- ("sentry", "0025_organizationaccessrequest_requester"),
- ("sentry", "0026_delete_event"),
- ("sentry", "0027_exporteddata"),
- ("sentry", "0028_user_reports"),
- ("sentry", "0029_discover_query_upgrade"),
- ("sentry", "0030_auto_20200201_0039"),
- ("sentry", "0031_delete_alert_rules_and_incidents"),
- ("sentry", "0032_delete_alert_email"),
- ("sentry", "0033_auto_20200210_2137"),
- ("sentry", "0034_auto_20200210_2311"),
- ("sentry", "0035_auto_20200127_1711"),
- ("sentry", "0036_auto_20200213_0106"),
- ("sentry", "0037_auto_20200213_0140"),
- ("sentry", "0038_auto_20200213_1904"),
- ("sentry", "0039_delete_incidentsuspectcommit"),
- ("sentry", "0040_remove_incidentsuspectcommittable"),
- ("sentry", "0041_incidenttrigger_date_modified"),
- ("sentry", "0042_auto_20200214_1607"),
- ("sentry", "0043_auto_20200218_1903"),
- ("sentry", "0044_auto_20200219_0018"),
- ("sentry", "0045_remove_incidentactivity_event_stats_snapshot"),
- ("sentry", "0046_auto_20200221_1735"),
- ("sentry", "0047_auto_20200224_2319"),
- ("sentry", "0048_auto_20200302_1825"),
- ("sentry", "0049_auto_20200304_0254"),
- ("sentry", "0050_auto_20200306_2346"),
- ("sentry", "0051_fix_auditlog_pickled_data"),
- ("sentry", "0052_organizationonboardingtask_completion_seen"),
- ("sentry", "0053_migrate_alert_task_onboarding"),
- ("sentry", "0054_create_key_transaction"),
- ("sentry", "0055_query_subscription_status"),
- ("sentry", "0056_remove_old_functions"),
- ("sentry", "0057_remove_unused_project_flag"),
- ("sentry", "0058_project_issue_alerts_targeting"),
- ("sentry", "0059_add_new_sentry_app_features"),
- ("sentry", "0060_add_file_eventattachment_index"),
- ("sentry", "0061_alertrule_partial_index"),
- ("sentry", "0062_key_transactions_unique_with_owner"),
- ("sentry", "0063_drop_alertrule_constraint"),
- ("sentry", "0064_project_has_transactions"),
- ("sentry", "0065_add_incident_status_method"),
- ("sentry", "0066_alertrule_manager"),
- ("sentry", "0067_migrate_rules_alert_targeting"),
- ("sentry", "0068_project_default_flags"),
- ("sentry", "0069_remove_tracked_superusers"),
- ("sentry", "0070_incident_snapshot_support"),
- ("sentry", "0071_add_default_fields_model_subclass"),
- ("sentry", "0072_alert_rules_query_changes"),
- ("sentry", "0073_migrate_alert_query_model"),
- ("sentry", "0074_add_metric_alert_feature"),
- ("sentry", "0075_metric_alerts_fix_releases"),
- ("sentry", "0076_alert_rules_disable_constraints"),
- ("sentry", "0077_alert_query_col_drop_state"),
- ("sentry", "0078_incident_field_updates"),
- ("sentry", "0079_incidents_remove_query_field_state"),
- ("sentry", "0080_alert_rules_drop_unused_tables_cols"),
- ("sentry", "0081_add_integraiton_upgrade_audit_log"),
- ("sentry", "0082_alert_rules_threshold_float"),
- ("sentry", "0083_add_max_length_webhook_url"),
- ("sentry", "0084_exported_data_blobs"),
- ("sentry", "0085_fix_error_rate_snuba_query"),
- ("sentry", "0086_sentry_app_installation_for_provider"),
- ("sentry", "0087_fix_time_series_data_type"),
- ("sentry", "0088_rule_level_resolve_threshold_type"),
- ("sentry", "0089_rule_level_fields_backfill"),
- ("sentry", "0090_fix_auditlog_pickled_data_take_2"),
- ("sentry", "0091_alertruleactivity"),
- ("sentry", "0092_remove_trigger_threshold_type_nullable"),
- ("sentry", "0093_make_identity_user_id_textfield"),
- ("sentry", "0094_cleanup_unreferenced_event_files"),
- ("sentry", "0095_ruleactivity"),
- ("sentry", "0096_sentry_app_component_skip_load_on_open"),
- ("sentry", "0097_add_sentry_app_id_to_sentry_alertruletriggeraction"),
- ("sentry", "0098_add-performance-onboarding"),
- ("sentry", "0099_fix_project_platforms"),
- ("sentry", "0100_file_type_on_event_attachment"),
- ("sentry", "0101_backfill_file_type_on_event_attachment"),
- ("sentry", "0102_collect_relay_analytics"),
- ("sentry", "0103_project_has_alert_filters"),
- ("sentry", "0104_collect_relay_public_key_usage"),
- ("sentry", "0105_remove_nullability_of_event_attachment_type"),
- ("sentry", "0106_service_hook_project_id_nullable"),
- ("sentry", "0107_remove_spaces_from_slugs"),
- ("sentry", "0108_update_fileblob_action"),
- ("sentry", "0109_sentry_app_creator"),
- ("sentry", "0110_sentry_app_creator_backill"),
- ("sentry", "0111_snuba_query_event_type"),
- ("sentry", "0112_groupinboxmodel"),
- ("sentry", "0113_add_repositoryprojectpathconfig"),
- ("sentry", "0114_add_unhandled_savedsearch"),
- ("sentry", "0115_add_checksum_to_debug_file"),
- ("sentry", "0116_backfill_debug_file_checksum"),
- ("sentry", "0117_dummy-activityupdate"),
- ("sentry", "0118_backfill_snuba_query_event_types"),
- ("sentry", "0119_fix_set_none"),
- ("sentry", "0120_commit_author_charfield"),
- ("sentry", "0121_obliterate_group_inbox"),
- ("sentry", "0122_add_release_status"),
- ("sentry", "0123_groupinbox_addprojandorg"),
- ("sentry", "0124_add_release_status_model"),
- ("sentry", "0125_add_platformexternalissue_project_id"),
- ("sentry", "0126_make_platformexternalissue_group_id_flexfk"),
- ("sentry", "0127_backfill_platformexternalissue_project_id"),
- ("sentry", "0128_change_dashboards"),
- ("sentry", "0129_remove_dashboard_keys"),
- ("sentry", "0130_remove_old_widget_models"),
- ("sentry", "0131_drop_widget_tables"),
- ("sentry", "0132_groupownermodel"),
- ("sentry", "0133_dashboard_delete_object_status"),
- ("sentry", "0134_dashboard_drop_object_status_column"),
- ("sentry", "0135_removinguniquegroupownerconstraint"),
- ("sentry", "0136_issue_alert_filter_all_orgs"),
- ("sentry", "0137_dashboard_widget_interval"),
- ("sentry", "0138_widget_query_remove_interval"),
- ("sentry", "0139_remove_widgetquery_interval"),
- ("sentry", "0140_subscription_checker"),
- ("sentry", "0141_remove_widget_constraints"),
- ("sentry", "0142_add_dashboard_tombstone"),
- ("sentry", "0143_add_alerts_integrationfeature"),
- ("sentry", "0144_add_publish_request_inprogress_status"),
- ("sentry", "0145_rename_alert_rule_feature"),
- ("sentry", "0146_backfill_members_alert_write"),
- ("sentry", "0147_add_groupinbox_date_added_index"),
- ("sentry", "0148_group_id_bigint"),
- ("sentry", "0149_bigint"),
- ("sentry", "0150_remove_userreport_eventattachment_constraints"),
- ("sentry", "0151_add_world_map_dashboard_widget_type"),
- ("sentry", "0152_remove_slack_workspace_orgintegrations"),
- ("sentry", "0153_add_big_number_dashboard_widget_type"),
- ("sentry", "0154_groupedmessage_inbox_sort"),
- ("sentry", "0155_add_dashboard_query_orderby"),
- ("sentry", "0156_add_mark_reviewed_activity"),
- ("sentry", "0157_make_repositoryprojectpathconfig_organization_integration_nullable"),
- ("sentry", "0158_create_externalteam_table"),
- ("sentry", "0159_create_externaluser_table"),
- ("sentry", "0160_create_projectcodeowners_table"),
- ("sentry", "0161_add_saved_search_sort"),
- ("sentry", "0162_backfill_saved_search_sort"),
- ("sentry", "0163_add_organizationmember_and_external_name"),
- ("sentry", "0164_add_protect_on_delete_codeowners"),
- ("sentry", "0165_metric_alerts_fix_group_ids"),
- ("sentry", "0166_create_notificationsetting_table"),
- ("sentry", "0167_rm_organization_integration_from_projectcodeowners"),
- ("sentry", "0168_demo_orgs_users"),
- ("sentry", "0169_delete_organization_integration_from_projectcodeowners"),
- ("sentry", "0170_actor_introduction"),
- ("sentry", "0171_backfill_actors"),
- ("sentry", "0172_rule_owner_fields"),
- ("sentry", "0173_remove_demo_flag"),
- ("sentry", "0174_my_issues_saved_search"),
- ("sentry", "0175_make_targets_nullable"),
- ("sentry", "0176_remove_targets"),
- ("sentry", "0177_drop_targets"),
- ("sentry", "0178_add_new_target_column"),
- ("sentry", "0179_update_legacy_discover_saved_query_timestamps"),
- ("sentry", "0180_add_saved_search_sorts"),
- ("sentry", "0181_copy_useroptions_to_notificationsettings"),
- ("sentry", "0182_update_user_misery_on_saved_queries"),
- ("sentry", "0183_make_codemapping_unique_on_projectcodeowners"),
- ("sentry", "0184_copy_useroptions_to_notificationsettings_2"),
- ("sentry", "0185_rm_copied_useroptions"),
- ("sentry", "0186_add_externalactor"),
- ("sentry", "0187_backfill_me_or_none"),
- ("sentry", "0188_remove_externalteam_externaluser_fk_constraints"),
- ("sentry", "0189_remove_externaluser_externalteam_models"),
- ("sentry", "0190_drop_external_user_table"),
- ("sentry", "0191_make_externalactor_integration_id_not_null"),
- ("sentry", "0192_remove_fileblobowner_org_fk"),
- ("sentry", "0193_grouprelease_indexes"),
- ("sentry", "0194_add_custom_scm_provider"),
- ("sentry", "0195_add_team_key_transactions"),
- ("sentry", "0196_add_restricted_member_limit"),
- ("sentry", "0197_add_scim_enabled_boolean"),
- ("sentry", "0198_add_project_transaction_threshold"),
- ("sentry", "0199_release_semver"),
- ("sentry", "0200_release_indices"),
- ]
-
initial = True
dependencies = []
diff --git a/src/sentry/migrations/0002_912_to_recent.py b/src/sentry/migrations/0002_912_to_recent.py
deleted file mode 100644
index d8fb4bb75b600b..00000000000000
--- a/src/sentry/migrations/0002_912_to_recent.py
+++ /dev/null
@@ -1,997 +0,0 @@
-import django.db.models.deletion
-import django.utils.timezone
-from django.conf import settings
-from django.db import migrations, models
-
-import sentry.db.models.fields.array
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.encrypted
-import sentry.db.models.fields.foreignkey
-import sentry.db.models.fields.jsonfield
-import sentry.db.models.fields.uuid
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- dependencies = [("sentry", "0001_initial")]
-
- operations = [
- migrations.CreateModel(
- name="AlertRule",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("name", models.TextField()),
- ("status", models.SmallIntegerField(default=0)),
- ("dataset", models.TextField()),
- ("query", models.TextField()),
- ("include_all_projects", models.BooleanField(default=False)),
- ("aggregation", models.IntegerField(default=0)),
- ("time_window", models.IntegerField()),
- ("resolution", models.IntegerField()),
- ("threshold_type", models.SmallIntegerField(null=True)),
- ("alert_threshold", models.IntegerField(null=True)),
- ("resolve_threshold", models.IntegerField(null=True)),
- ("threshold_period", models.IntegerField()),
- ("date_modified", models.DateTimeField(default=django.utils.timezone.now)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ],
- options={"db_table": "sentry_alertrule"},
- ),
- migrations.CreateModel(
- name="AlertRuleExcludedProjects",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "alert_rule",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.AlertRule", db_index=False
- ),
- ),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Project", db_constraint=False
- ),
- ),
- ],
- options={"db_table": "sentry_alertruleexcludedprojects"},
- ),
- migrations.CreateModel(
- name="AlertRuleQuerySubscription",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "alert_rule",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.AlertRule"),
- ),
- ],
- options={"db_table": "sentry_alertrulequerysubscription"},
- ),
- migrations.CreateModel(
- name="AlertRuleTrigger",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("label", models.TextField()),
- ("threshold_type", models.SmallIntegerField()),
- ("alert_threshold", models.IntegerField()),
- ("resolve_threshold", models.IntegerField(null=True)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "alert_rule",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.AlertRule"),
- ),
- ],
- options={"db_table": "sentry_alertruletrigger"},
- ),
- migrations.CreateModel(
- name="AlertRuleTriggerExclusion",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "alert_rule_trigger",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="exclusions", to="sentry.AlertRuleTrigger"
- ),
- ),
- ],
- options={"db_table": "sentry_alertruletriggerexclusion"},
- ),
- migrations.CreateModel(
- name="Incident",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("identifier", models.IntegerField()),
- (
- "detection_uuid",
- sentry.db.models.fields.uuid.UUIDField(max_length=32, null=True, db_index=True),
- ),
- ("status", models.PositiveSmallIntegerField(default=1)),
- ("type", models.PositiveSmallIntegerField(default=1)),
- ("title", models.TextField()),
- ("query", models.TextField()),
- ("date_started", models.DateTimeField(default=django.utils.timezone.now)),
- ("date_detected", models.DateTimeField(default=django.utils.timezone.now)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ("date_closed", models.DateTimeField(null=True)),
- (
- "alert_rule",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.SET_NULL,
- to="sentry.AlertRule",
- null=True,
- ),
- ),
- ],
- options={"db_table": "sentry_incident"},
- ),
- migrations.CreateModel(
- name="IncidentActivity",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("type", models.IntegerField()),
- ("value", models.TextField(null=True)),
- ("previous_value", models.TextField(null=True)),
- ("comment", models.TextField(null=True)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ],
- options={"db_table": "sentry_incidentactivity"},
- ),
- migrations.CreateModel(
- name="IncidentGroup",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- )
- ],
- options={"db_table": "sentry_incidentgroup"},
- ),
- migrations.CreateModel(
- name="IncidentProject",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "incident",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Incident"),
- ),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False, to="sentry.Project", db_index=False
- ),
- ),
- ],
- options={"db_table": "sentry_incidentproject"},
- ),
- migrations.CreateModel(
- name="IncidentSeen",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("last_seen", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "incident",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Incident"),
- ),
- (
- "user",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to=settings.AUTH_USER_MODEL, db_index=False
- ),
- ),
- ],
- options={"db_table": "sentry_incidentseen"},
- ),
- migrations.CreateModel(
- name="IncidentSnapshot",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("unique_users", models.IntegerField()),
- ("total_events", models.IntegerField()),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ],
- options={"db_table": "sentry_incidentsnapshot"},
- ),
- migrations.CreateModel(
- name="IncidentSubscription",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "incident",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Incident", db_index=False
- ),
- ),
- (
- "user",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to=settings.AUTH_USER_MODEL
- ),
- ),
- ],
- options={"db_table": "sentry_incidentsubscription"},
- ),
- migrations.CreateModel(
- name="IncidentSuspectCommit",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("order", models.SmallIntegerField()),
- (
- "commit",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Commit", db_constraint=False
- ),
- ),
- (
- "incident",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Incident", db_index=False
- ),
- ),
- ],
- options={"db_table": "sentry_incidentsuspectcommit"},
- ),
- migrations.CreateModel(
- name="IncidentTrigger",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("status", models.SmallIntegerField()),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "alert_rule_trigger",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.AlertRuleTrigger"
- ),
- ),
- (
- "incident",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Incident", db_index=False
- ),
- ),
- ],
- options={"db_table": "sentry_incidenttrigger"},
- ),
- migrations.CreateModel(
- name="IntegrationFeature",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("user_description", models.TextField(null=True)),
- (
- "feature",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0,
- choices=[
- (0, "integrations-api"),
- (1, "integrations-issue-link"),
- (2, "integrations-stacktrace-link"),
- (3, "integrations-event-hooks"),
- ],
- ),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ],
- options={"db_table": "sentry_integrationfeature"},
- ),
- migrations.CreateModel(
- name="PagerDutyService",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("integration_key", models.CharField(max_length=255)),
- ("service_id", models.CharField(max_length=255)),
- ("service_name", models.CharField(max_length=255)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "organization_integration",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.OrganizationIntegration"
- ),
- ),
- ],
- options={"db_table": "sentry_pagerdutyservice"},
- ),
- migrations.CreateModel(
- name="PagerDutyServiceProject",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("integration_key", models.CharField(max_length=255, null=True)),
- ("service_id", models.CharField(max_length=255, null=True)),
- ("service_name", models.CharField(max_length=255, null=True)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
- (
- "organization_integration",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.OrganizationIntegration", null=True
- ),
- ),
- (
- "pagerduty_service",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.PagerDutyService"
- ),
- ),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False, to="sentry.Project", db_index=False
- ),
- ),
- ],
- options={"db_table": "sentry_pagerdutyserviceproject"},
- ),
- migrations.CreateModel(
- name="QuerySubscription",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("type", models.TextField()),
- ("subscription_id", models.TextField(unique=True)),
- ("dataset", models.TextField()),
- ("query", models.TextField()),
- ("aggregation", models.IntegerField(default=0)),
- ("time_window", models.IntegerField()),
- ("resolution", models.IntegerField()),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Project", db_constraint=False
- ),
- ),
- ],
- options={"db_table": "sentry_querysubscription"},
- ),
- migrations.CreateModel(
- name="SentryAppInstallationToken",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "api_token",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.ApiToken"),
- ),
- ],
- options={"db_table": "sentry_sentryappinstallationtoken"},
- ),
- migrations.CreateModel(
- name="SentryAppWebhookError",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- (
- "date_added",
- models.DateTimeField(default=django.utils.timezone.now, db_index=True),
- ),
- (
- "request_body",
- sentry.db.models.fields.encrypted.EncryptedJsonField(default=dict),
- ),
- (
- "request_headers",
- sentry.db.models.fields.encrypted.EncryptedJsonField(default=dict),
- ),
- ("event_type", models.CharField(max_length=64)),
- ("webhook_url", models.URLField()),
- ("response_body", models.TextField()),
- ("response_code", models.PositiveSmallIntegerField()),
- ],
- options={"db_table": "sentry_sentryappwebhookerror"},
- ),
- migrations.CreateModel(
- name="TimeSeriesSnapshot",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("start", models.DateTimeField()),
- ("end", models.DateTimeField()),
- ("values", sentry.db.models.fields.array.ArrayField(null=True)),
- ("period", models.IntegerField()),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ],
- options={"db_table": "sentry_timeseriessnapshot"},
- ),
- migrations.DeleteModel(name="EventMapping"),
- migrations.DeleteModel(name="EventTag"),
- migrations.DeleteModel(name="GroupTagKey"),
- migrations.DeleteModel(name="GroupTagValue"),
- # migrations.AlterUniqueTogether(
- # name='projectcficachefile',
- # unique_together=set([]),
- # ),
- migrations.RemoveField(model_name="projectcficachefile", name="cache_file"),
- migrations.RemoveField(model_name="projectcficachefile", name="debug_file"),
- migrations.RemoveField(model_name="projectcficachefile", name="project"),
- migrations.AlterUniqueTogether(name="projectsymcachefile", unique_together=set()),
- migrations.RemoveField(model_name="projectsymcachefile", name="cache_file"),
- migrations.RemoveField(model_name="projectsymcachefile", name="debug_file"),
- migrations.RemoveField(model_name="projectsymcachefile", name="project"),
- migrations.DeleteModel(name="TagKey"),
- migrations.DeleteModel(name="TagValue"),
- migrations.RemoveField(model_name="sentryappinstallation", name="authorization"),
- migrations.AddField(
- model_name="broadcast",
- name="cta",
- field=models.CharField(max_length=256, null=True, blank=True),
- ),
- migrations.AddField(
- model_name="groupredirect",
- name="organization_id",
- field=sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True),
- ),
- migrations.AddField(
- model_name="groupredirect",
- name="previous_project_slug",
- field=models.SlugField(null=True),
- ),
- migrations.AddField(
- model_name="groupredirect",
- name="previous_short_id",
- field=sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True),
- ),
- migrations.AddField(
- model_name="organizationmember",
- name="invite_status",
- field=models.PositiveSmallIntegerField(
- default=0,
- null=True,
- choices=[
- (0, "Approved"),
- (1, "Organization member requested to invite user"),
- (2, "User requested to join organization"),
- ],
- ),
- ),
- migrations.AddField(
- model_name="organizationmember",
- name="inviter",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="sentry_inviter_set",
- blank=True,
- to=settings.AUTH_USER_MODEL,
- null=True,
- ),
- ),
- migrations.AddField(
- model_name="projectownership",
- name="auto_assignment",
- field=models.BooleanField(default=False),
- ),
- migrations.AddField(
- model_name="sentryapp", name="verify_install", field=models.BooleanField(default=True)
- ),
- migrations.AddField(
- model_name="sentryappinstallation",
- name="api_token",
- field=models.OneToOneField(
- related_name="sentry_app_installation",
- null=True,
- on_delete=django.db.models.deletion.SET_NULL,
- to="sentry.ApiToken",
- ),
- ),
- migrations.AddField(
- model_name="sentryappinstallation",
- name="status",
- field=sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0, db_index=True, choices=[(0, "pending"), (1, "installed")]
- ),
- ),
- migrations.AlterField(
- model_name="auditlogentry",
- name="event",
- field=sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (1, "member.invite"),
- (2, "member.add"),
- (3, "member.accept-invite"),
- (5, "member.remove"),
- (4, "member.edit"),
- (6, "member.join-team"),
- (7, "member.leave-team"),
- (8, "member.pending"),
- (20, "team.create"),
- (21, "team.edit"),
- (22, "team.remove"),
- (30, "project.create"),
- (31, "project.edit"),
- (32, "project.remove"),
- (33, "project.set-public"),
- (34, "project.set-private"),
- (35, "project.request-transfer"),
- (36, "project.accept-transfer"),
- (37, "project.enable"),
- (38, "project.disable"),
- (10, "org.create"),
- (11, "org.edit"),
- (12, "org.remove"),
- (13, "org.restore"),
- (40, "tagkey.remove"),
- (50, "projectkey.create"),
- (51, "projectkey.edit"),
- (52, "projectkey.remove"),
- (53, "projectkey.enable"),
- (53, "projectkey.disable"),
- (60, "sso.enable"),
- (61, "sso.disable"),
- (62, "sso.edit"),
- (63, "sso-identity.link"),
- (70, "api-key.create"),
- (71, "api-key.edit"),
- (72, "api-key.remove"),
- (80, "rule.create"),
- (81, "rule.edit"),
- (82, "rule.remove"),
- (100, "servicehook.create"),
- (101, "servicehook.edit"),
- (102, "servicehook.remove"),
- (103, "servicehook.enable"),
- (104, "servicehook.disable"),
- (110, "integration.add"),
- (111, "integration.edit"),
- (112, "integration.remove"),
- (113, "sentry-app.add"),
- (115, "sentry-app.remove"),
- (116, "sentry-app.install"),
- (117, "sentry-app.uninstall"),
- (130, "internal-integration.create"),
- (135, "internal-integration.add-token"),
- (136, "internal-integration.remove-token"),
- (90, "ondemand.edit"),
- (91, "trial.started"),
- (92, "plan.changed"),
- (93, "plan.cancelled"),
- ]
- ),
- ),
- migrations.AlterField(
- model_name="commitfilechange", name="filename", field=models.CharField(max_length=255)
- ),
- migrations.AlterField(
- model_name="discoversavedquery",
- name="query",
- field=sentry.db.models.fields.jsonfield.JSONField(default=dict),
- ),
- migrations.AlterField(
- model_name="externalissue",
- name="metadata",
- field=sentry.db.models.fields.jsonfield.JSONField(null=True),
- ),
- migrations.AlterField(
- model_name="featureadoption",
- name="data",
- field=sentry.db.models.fields.jsonfield.JSONField(default=dict),
- ),
- migrations.AlterField(
- model_name="file",
- name="headers",
- field=sentry.db.models.fields.jsonfield.JSONField(default=dict),
- ),
- migrations.AlterField(
- model_name="group",
- name="project",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Project"),
- ),
- migrations.AlterField(
- model_name="grouplink",
- name="data",
- field=sentry.db.models.fields.jsonfield.JSONField(default=dict),
- ),
- migrations.AlterField(
- model_name="groupsnooze",
- name="state",
- field=sentry.db.models.fields.jsonfield.JSONField(null=True),
- ),
- migrations.AlterField(
- model_name="organization",
- name="default_role",
- field=models.CharField(
- default="member",
- max_length=32,
- choices=[
- ("member", "Member"),
- ("admin", "Admin"),
- ("manager", "Manager"),
- ("owner", "Organization Owner"),
- ],
- ),
- ),
- migrations.AlterField(
- model_name="organizationmember",
- name="role",
- field=models.CharField(
- default="member",
- max_length=32,
- choices=[
- ("member", "Member"),
- ("admin", "Admin"),
- ("manager", "Manager"),
- ("owner", "Organization Owner"),
- ],
- ),
- ),
- migrations.AlterField(
- model_name="organizationonboardingtask",
- name="data",
- field=sentry.db.models.fields.jsonfield.JSONField(default=dict),
- ),
- migrations.AlterField(
- model_name="projectdebugfile",
- name="data",
- field=sentry.db.models.fields.jsonfield.JSONField(null=True),
- ),
- migrations.AlterField(
- model_name="projectkey",
- name="data",
- field=sentry.db.models.fields.jsonfield.JSONField(default=dict),
- ),
- migrations.AlterField(
- model_name="projectownership",
- name="schema",
- field=sentry.db.models.fields.jsonfield.JSONField(null=True),
- ),
- migrations.AlterField(
- model_name="promptsactivity",
- name="data",
- field=sentry.db.models.fields.jsonfield.JSONField(default={}),
- ),
- migrations.AlterField(
- model_name="release",
- name="data",
- field=sentry.db.models.fields.jsonfield.JSONField(default={}),
- ),
- migrations.AlterField(
- model_name="release",
- name="project_id",
- field=sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
- ),
- migrations.AlterField(
- model_name="releasefile",
- name="project_id",
- field=sentry.db.models.fields.bounded.BoundedPositiveIntegerField(null=True),
- ),
- migrations.AlterField(
- model_name="repository",
- name="config",
- field=sentry.db.models.fields.jsonfield.JSONField(default=dict),
- ),
- migrations.AlterField(
- model_name="scheduleddeletion",
- name="data",
- field=sentry.db.models.fields.jsonfield.JSONField(default={}),
- ),
- migrations.AlterField(
- model_name="scheduledjob",
- name="payload",
- field=sentry.db.models.fields.jsonfield.JSONField(default=dict),
- ),
- migrations.AlterField(
- model_name="sentryapp",
- name="status",
- field=sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- default=0,
- db_index=True,
- choices=[(0, "unpublished"), (1, "published"), (2, "internal")],
- ),
- ),
- migrations.AlterField(
- model_name="sentryapp", name="webhook_url", field=models.URLField(null=True)
- ),
- migrations.AlterField(
- model_name="widget",
- name="display_options",
- field=sentry.db.models.fields.jsonfield.JSONField(default={}),
- ),
- migrations.AlterField(
- model_name="widgetdatasource",
- name="data",
- field=sentry.db.models.fields.jsonfield.JSONField(default={}),
- ),
- migrations.AlterUniqueTogether(
- name="groupassignee", unique_together={("project", "group")}
- ),
- migrations.AlterUniqueTogether(
- name="groupredirect",
- unique_together={("organization_id", "previous_short_id", "previous_project_slug")},
- ),
- migrations.AlterIndexTogether(
- name="group", index_together={("project", "first_release"), ("project", "id")}
- ),
- migrations.DeleteModel(name="ProjectCfiCacheFile"),
- migrations.DeleteModel(name="ProjectSymCacheFile"),
- migrations.AddField(
- model_name="sentryappwebhookerror",
- name="organization",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="sentry_app_webhook_errors", to="sentry.Organization"
- ),
- ),
- migrations.AddField(
- model_name="sentryappwebhookerror",
- name="sentry_app",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- related_name="webhook_errors", to="sentry.SentryApp"
- ),
- ),
- migrations.AddField(
- model_name="sentryappinstallationtoken",
- name="sentry_app_installation",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.SentryAppInstallation"
- ),
- ),
- migrations.AddField(
- model_name="integrationfeature",
- name="sentry_app",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.SentryApp"),
- ),
- migrations.AddField(
- model_name="incidentsnapshot",
- name="event_stats_snapshot",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.TimeSeriesSnapshot"
- ),
- ),
- migrations.AddField(
- model_name="incidentsnapshot",
- name="incident",
- field=models.OneToOneField(to="sentry.Incident", on_delete=models.CASCADE),
- ),
- migrations.AddField(
- model_name="incidentgroup",
- name="group",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False, to="sentry.Group", db_index=False
- ),
- ),
- migrations.AddField(
- model_name="incidentgroup",
- name="incident",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Incident"),
- ),
- migrations.AddField(
- model_name="incidentactivity",
- name="event_stats_snapshot",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.TimeSeriesSnapshot", null=True
- ),
- ),
- migrations.AddField(
- model_name="incidentactivity",
- name="incident",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Incident"),
- ),
- migrations.AddField(
- model_name="incidentactivity",
- name="user",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to=settings.AUTH_USER_MODEL, null=True
- ),
- ),
- migrations.AddField(
- model_name="incident",
- name="groups",
- field=models.ManyToManyField(
- related_name="incidents", through="sentry.IncidentGroup", to="sentry.Group"
- ),
- ),
- migrations.AddField(
- model_name="incident",
- name="organization",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(to="sentry.Organization"),
- ),
- migrations.AddField(
- model_name="incident",
- name="projects",
- field=models.ManyToManyField(
- related_name="incidents", through="sentry.IncidentProject", to="sentry.Project"
- ),
- ),
- migrations.AddField(
- model_name="alertruletriggerexclusion",
- name="query_subscription",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.QuerySubscription"
- ),
- ),
- migrations.AddField(
- model_name="alertruletrigger",
- name="triggered_incidents",
- field=models.ManyToManyField(
- related_name="triggers", through="sentry.IncidentTrigger", to="sentry.Incident"
- ),
- ),
- migrations.AddField(
- model_name="alertrulequerysubscription",
- name="query_subscription",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.QuerySubscription", unique=True
- ),
- ),
- migrations.AddField(
- model_name="alertrule",
- name="excluded_projects",
- field=models.ManyToManyField(
- related_name="alert_rule_exclusions",
- through="sentry.AlertRuleExcludedProjects",
- to="sentry.Project",
- ),
- ),
- migrations.AddField(
- model_name="alertrule",
- name="organization",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Organization", null=True, db_index=False
- ),
- ),
- migrations.AddField(
- model_name="alertrule",
- name="query_subscriptions",
- field=models.ManyToManyField(
- related_name="alert_rules",
- through="sentry.AlertRuleQuerySubscription",
- to="sentry.QuerySubscription",
- ),
- ),
- migrations.AlterUniqueTogether(
- name="sentryappinstallationtoken",
- unique_together={("sentry_app_installation", "api_token")},
- ),
- migrations.AlterUniqueTogether(
- name="pagerdutyserviceproject", unique_together={("project", "pagerduty_service")}
- ),
- migrations.AlterUniqueTogether(
- name="pagerdutyservice",
- unique_together={("service_id", "organization_integration")},
- ),
- migrations.AlterUniqueTogether(
- name="integrationfeature", unique_together={("sentry_app", "feature")}
- ),
- migrations.AlterUniqueTogether(
- name="incidenttrigger", unique_together={("incident", "alert_rule_trigger")}
- ),
- migrations.AlterUniqueTogether(
- name="incidentsuspectcommit", unique_together={("incident", "commit")}
- ),
- migrations.AlterUniqueTogether(
- name="incidentsubscription", unique_together={("incident", "user")}
- ),
- migrations.AlterUniqueTogether(name="incidentseen", unique_together={("user", "incident")}),
- migrations.AlterUniqueTogether(
- name="incidentproject", unique_together={("project", "incident")}
- ),
- migrations.AlterUniqueTogether(
- name="incidentgroup", unique_together={("group", "incident")}
- ),
- migrations.AlterUniqueTogether(
- name="incident", unique_together={("organization", "identifier")}
- ),
- migrations.AlterIndexTogether(
- name="incident", index_together={("alert_rule", "type", "status")}
- ),
- migrations.AlterUniqueTogether(
- name="alertruletriggerexclusion",
- unique_together={("alert_rule_trigger", "query_subscription")},
- ),
- migrations.AlterUniqueTogether(
- name="alertruletrigger", unique_together={("alert_rule", "label")}
- ),
- migrations.AlterUniqueTogether(
- name="alertruleexcludedprojects", unique_together={("alert_rule", "project")}
- ),
- migrations.AlterUniqueTogether(
- name="alertrule", unique_together={("organization", "name")}
- ),
- ]
diff --git a/src/sentry/migrations/0003_auto_20191022_0122.py b/src/sentry/migrations/0003_auto_20191022_0122.py
deleted file mode 100644
index 607d54af6ec1aa..00000000000000
--- a/src/sentry/migrations/0003_auto_20191022_0122.py
+++ /dev/null
@@ -1,128 +0,0 @@
-from django.db import migrations, models
-import django.utils.timezone
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- dependencies = [("sentry", "0002_912_to_recent")]
-
- operations = [
- migrations.CreateModel(
- name="AlertRuleTriggerAction",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- serialize=False, primary_key=True
- ),
- ),
- ("type", models.SmallIntegerField()),
- ("target_type", models.SmallIntegerField()),
- ("target_identifier", models.TextField(null=True)),
- ("target_display", models.TextField(null=True)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "alert_rule_trigger",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.AlertRuleTrigger"
- ),
- ),
- (
- "integration",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Integration", null=True
- ),
- ),
- ],
- options={"db_table": "sentry_alertruletriggeraction"},
- ),
- migrations.AlterField(
- model_name="auditlogentry",
- name="event",
- field=sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (1, "member.invite"),
- (2, "member.add"),
- (3, "member.accept-invite"),
- (5, "member.remove"),
- (4, "member.edit"),
- (6, "member.join-team"),
- (7, "member.leave-team"),
- (8, "member.pending"),
- (20, "team.create"),
- (21, "team.edit"),
- (22, "team.remove"),
- (30, "project.create"),
- (31, "project.edit"),
- (32, "project.remove"),
- (33, "project.set-public"),
- (34, "project.set-private"),
- (35, "project.request-transfer"),
- (36, "project.accept-transfer"),
- (37, "project.enable"),
- (38, "project.disable"),
- (10, "org.create"),
- (11, "org.edit"),
- (12, "org.remove"),
- (13, "org.restore"),
- (40, "tagkey.remove"),
- (50, "projectkey.create"),
- (51, "projectkey.edit"),
- (52, "projectkey.remove"),
- (53, "projectkey.enable"),
- (53, "projectkey.disable"),
- (60, "sso.enable"),
- (61, "sso.disable"),
- (62, "sso.edit"),
- (63, "sso-identity.link"),
- (70, "api-key.create"),
- (71, "api-key.edit"),
- (72, "api-key.remove"),
- (80, "rule.create"),
- (81, "rule.edit"),
- (82, "rule.remove"),
- (100, "servicehook.create"),
- (101, "servicehook.edit"),
- (102, "servicehook.remove"),
- (103, "servicehook.enable"),
- (104, "servicehook.disable"),
- (110, "integration.add"),
- (111, "integration.edit"),
- (112, "integration.remove"),
- (113, "sentry-app.add"),
- (115, "sentry-app.remove"),
- (116, "sentry-app.install"),
- (117, "sentry-app.uninstall"),
- (130, "internal-integration.create"),
- (135, "internal-integration.add-token"),
- (136, "internal-integration.remove-token"),
- (90, "ondemand.edit"),
- (91, "trial.started"),
- (92, "plan.changed"),
- (93, "plan.cancelled"),
- (140, "invite-request.create"),
- (141, "invite-request.remove"),
- ]
- ),
- ),
- migrations.AlterField(
- model_name="sentryappwebhookerror",
- name="response_code",
- field=models.PositiveSmallIntegerField(null=True),
- ),
- ]
diff --git a/src/sentry/migrations/0004_bitfieldtestmodel_blankjsonfieldtestmodel_callabledefaultmodel_jsonfieldtestmodel_jsonfieldwithdefau.py b/src/sentry/migrations/0004_bitfieldtestmodel_blankjsonfieldtestmodel_callabledefaultmodel_jsonfieldtestmodel_jsonfieldwithdefau.py
deleted file mode 100644
index 84fd255b4edf7c..00000000000000
--- a/src/sentry/migrations/0004_bitfieldtestmodel_blankjsonfieldtestmodel_callabledefaultmodel_jsonfieldtestmodel_jsonfieldwithdefau.py
+++ /dev/null
@@ -1,106 +0,0 @@
-from django.conf import settings
-from django.db import migrations, models
-import bitfield.models
-import sentry.models
-import sentry.db.models.fields.jsonfield
-
-
-def is_test_db():
- return settings.DATABASES.get("default", {}).get("NAME", "").startswith("test_")
-
-
-class Migration(migrations.Migration):
- """
- This is a hack to get these test models to work when we run the tests using
- migrations. We don't need to run this in dev or prod, and so we just check that the
- database name starts with `test_`.
- """
-
- is_dangerous = True
-
- dependencies = [("sentry", "0003_auto_20191022_0122")]
-
- if is_test_db():
- operations = [
- migrations.CreateModel(
- name="BitFieldTestModel",
- fields=[
- (
- "id",
- models.AutoField(
- verbose_name="ID", serialize=False, auto_created=True, primary_key=True
- ),
- ),
- (
- "flags",
- bitfield.models.BitField(
- ("FLAG_0", "FLAG_1", "FLAG_2", "FLAG_3"),
- default=3,
- db_column="another_name",
- ),
- ),
- ],
- ),
- migrations.CreateModel(
- name="BlankJSONFieldTestModel",
- fields=[
- (
- "id",
- models.AutoField(
- verbose_name="ID", serialize=False, auto_created=True, primary_key=True
- ),
- ),
- ("null_json", sentry.db.models.fields.jsonfield.JSONField(null=True)),
- (
- "blank_json",
- sentry.db.models.fields.jsonfield.JSONField(default=dict, blank=True),
- ),
- ],
- ),
- migrations.CreateModel(
- name="CallableDefaultModel",
- fields=[
- (
- "id",
- models.AutoField(
- verbose_name="ID", serialize=False, auto_created=True, primary_key=True
- ),
- ),
- ("json", sentry.db.models.fields.jsonfield.JSONField()),
- ],
- ),
- migrations.CreateModel(
- name="JSONFieldTestModel",
- fields=[
- (
- "id",
- models.AutoField(
- verbose_name="ID", serialize=False, auto_created=True, primary_key=True
- ),
- ),
- (
- "json",
- sentry.db.models.fields.jsonfield.JSONField(
- null=True, verbose_name="test", blank=True
- ),
- ),
- ],
- ),
- migrations.CreateModel(
- name="JSONFieldWithDefaultTestModel",
- fields=[
- (
- "id",
- models.AutoField(
- verbose_name="ID", serialize=False, auto_created=True, primary_key=True
- ),
- ),
- (
- "json",
- sentry.db.models.fields.jsonfield.JSONField(
- default={"sukasuka": "YAAAAAZ"}
- ),
- ),
- ],
- ),
- ]
diff --git a/src/sentry/migrations/0005_fix_content_types.py b/src/sentry/migrations/0005_fix_content_types.py
deleted file mode 100644
index 25201f095e6ca8..00000000000000
--- a/src/sentry/migrations/0005_fix_content_types.py
+++ /dev/null
@@ -1,53 +0,0 @@
-from django.db import migrations
-
-
-def fix_content_types(apps, schema_editor):
- # XXX: This is a gross hack. We missed removing this column a long time ago while
- # upgrading Django. Since different databases might be in different states depending
- # on which path they take to get to Django migrations, it's safest to just check
- # if the column exists for everyone, and remove it if so. This removal is safe,
- # since the column has been long removed from the Django model.
-
- c = schema_editor.connection.cursor()
- c.execute(
- """
- SELECT 1
- FROM information_schema.columns
- WHERE table_name='django_content_type' and column_name='name';
- """
- )
- results = c.fetchall()
- if len(results):
- c.execute('ALTER TABLE django_content_type DROP COLUMN "name";')
- c.close()
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- dependencies = [
- (
- "sentry",
- "0004_bitfieldtestmodel_blankjsonfieldtestmodel_callabledefaultmodel_jsonfieldtestmodel_jsonfieldwithdefau",
- )
- ]
-
- operations = [
- migrations.RunPython(
- fix_content_types,
- migrations.RunPython.noop,
- hints={"tables": ["django_content_type"]},
- )
- ]
diff --git a/src/sentry/migrations/0006_sentryapp_date_published.py b/src/sentry/migrations/0006_sentryapp_date_published.py
deleted file mode 100644
index ff5016b2640e9b..00000000000000
--- a/src/sentry/migrations/0006_sentryapp_date_published.py
+++ /dev/null
@@ -1,29 +0,0 @@
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- dependencies = [
- ("sentry", "0005_fix_content_types"),
- ]
-
- operations = [
- migrations.AddField(
- model_name="sentryapp",
- name="date_published",
- field=models.DateTimeField(null=True, blank=True),
- ),
- ]
diff --git a/src/sentry/migrations/0007_auto_20191029_0131.py b/src/sentry/migrations/0007_auto_20191029_0131.py
deleted file mode 100644
index e8f6c1c8c39042..00000000000000
--- a/src/sentry/migrations/0007_auto_20191029_0131.py
+++ /dev/null
@@ -1,32 +0,0 @@
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- dependencies = [("sentry", "0006_sentryapp_date_published")]
-
- operations = [
- migrations.AlterField(
- model_name="organization",
- name="default_role",
- field=models.CharField(default="member", max_length=32),
- ),
- migrations.AlterField(
- model_name="organizationmember",
- name="role",
- field=models.CharField(default="member", max_length=32),
- ),
- ]
diff --git a/src/sentry/migrations/0008_auto_20191030_0016.py b/src/sentry/migrations/0008_auto_20191030_0016.py
deleted file mode 100644
index a2f220ded33072..00000000000000
--- a/src/sentry/migrations/0008_auto_20191030_0016.py
+++ /dev/null
@@ -1,30 +0,0 @@
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- dependencies = [("sentry", "0007_auto_20191029_0131")]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- database_operations=[],
- state_operations=[
- migrations.RemoveField(model_name="alertrule", name="alert_threshold"),
- migrations.RemoveField(model_name="alertrule", name="resolve_threshold"),
- migrations.RemoveField(model_name="alertrule", name="threshold_type"),
- ],
- )
- ]
diff --git a/src/sentry/migrations/0009_auto_20191101_1608.py b/src/sentry/migrations/0009_auto_20191101_1608.py
deleted file mode 100644
index 171f1c8ca6716f..00000000000000
--- a/src/sentry/migrations/0009_auto_20191101_1608.py
+++ /dev/null
@@ -1,29 +0,0 @@
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- dependencies = [
- ("sentry", "0008_auto_20191030_0016"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="pagerdutyservice",
- name="service_id",
- field=models.CharField(max_length=255, null=True),
- )
- ]
diff --git a/src/sentry/migrations/0010_auto_20191104_1641.py b/src/sentry/migrations/0010_auto_20191104_1641.py
deleted file mode 100644
index 890e7b9d1d0e7f..00000000000000
--- a/src/sentry/migrations/0010_auto_20191104_1641.py
+++ /dev/null
@@ -1,28 +0,0 @@
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- dependencies = [
- ("sentry", "0009_auto_20191101_1608"),
- ]
-
- operations = [
- migrations.AlterUniqueTogether(
- name="pagerdutyservice",
- unique_together=set(),
- ),
- ]
diff --git a/src/sentry/migrations/0011_remove_pagerdutyservice_service_id_from_state.py b/src/sentry/migrations/0011_remove_pagerdutyservice_service_id_from_state.py
deleted file mode 100644
index 2d384d4b502834..00000000000000
--- a/src/sentry/migrations/0011_remove_pagerdutyservice_service_id_from_state.py
+++ /dev/null
@@ -1,28 +0,0 @@
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- dependencies = [("sentry", "0010_auto_20191104_1641")]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- database_operations=[],
- state_operations=[
- migrations.RemoveField(model_name="pagerdutyservice", name="service_id"),
- ],
- )
- ]
diff --git a/src/sentry/migrations/0012_remove_pagerdutyservice_service_id.py b/src/sentry/migrations/0012_remove_pagerdutyservice_service_id.py
deleted file mode 100644
index 275ad42d6f1583..00000000000000
--- a/src/sentry/migrations/0012_remove_pagerdutyservice_service_id.py
+++ /dev/null
@@ -1,38 +0,0 @@
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- dependencies = [
- ("sentry", "0011_remove_pagerdutyservice_service_id_from_state"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- database_operations=[
- migrations.RunSQL(
- """
- ALTER TABLE "sentry_pagerdutyservice" DROP COLUMN "service_id";
- """,
- reverse_sql="""
- ALTER TABLE "sentry_pagerdutyservice" ADD COLUMN "service_id" varchar(255) NULL;
- """,
- hints={"tables": ["sentry_pagerdutyservice"]},
- )
- ],
- state_operations=[],
- )
- ]
diff --git a/src/sentry/migrations/0013_auto_20191111_1829.py b/src/sentry/migrations/0013_auto_20191111_1829.py
deleted file mode 100644
index 057419a5b1740f..00000000000000
--- a/src/sentry/migrations/0013_auto_20191111_1829.py
+++ /dev/null
@@ -1,35 +0,0 @@
-from django.db import migrations
-import sentry.db.models.fields.bounded
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- dependencies = [("sentry", "0012_remove_pagerdutyservice_service_id")]
-
- operations = [
- migrations.AlterField(
- model_name="sentryappwebhookerror",
- name="organization",
- field=sentry.db.models.fields.bounded.BoundedBigIntegerField(
- db_column="organization_id"
- ),
- ),
- migrations.AlterField(
- model_name="sentryappwebhookerror",
- name="sentry_app",
- field=sentry.db.models.fields.bounded.BoundedBigIntegerField(db_column="sentry_app_id"),
- ),
- ]
diff --git a/src/sentry/migrations/0014_delete_sentryappwebhookerror.py b/src/sentry/migrations/0014_delete_sentryappwebhookerror.py
deleted file mode 100644
index 2e8429d2b109ad..00000000000000
--- a/src/sentry/migrations/0014_delete_sentryappwebhookerror.py
+++ /dev/null
@@ -1,32 +0,0 @@
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- dependencies = [
- ("sentry", "0013_auto_20191111_1829"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- database_operations=[],
- state_operations=[
- migrations.DeleteModel(
- name="SentryAppWebhookError",
- ),
- ],
- )
- ]
diff --git a/src/sentry/migrations/0015_delete_sentryappwebhookerror_db.py b/src/sentry/migrations/0015_delete_sentryappwebhookerror_db.py
deleted file mode 100644
index 07599df1c4de3a..00000000000000
--- a/src/sentry/migrations/0015_delete_sentryappwebhookerror_db.py
+++ /dev/null
@@ -1,35 +0,0 @@
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- dependencies = [
- ("sentry", "0014_delete_sentryappwebhookerror"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- database_operations=[
- migrations.RunSQL(
- """
- DROP TABLE "sentry_sentryappwebhookerror";
- """,
- hints={"tables": ["sentry_sentryappwebhookerror"]},
- )
- ],
- state_operations=[],
- )
- ]
diff --git a/src/sentry/migrations/0016_delete_alert_rule_deprecated_fields.py b/src/sentry/migrations/0016_delete_alert_rule_deprecated_fields.py
deleted file mode 100644
index 9c7e527feeb541..00000000000000
--- a/src/sentry/migrations/0016_delete_alert_rule_deprecated_fields.py
+++ /dev/null
@@ -1,41 +0,0 @@
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- dependencies = [("sentry", "0015_delete_sentryappwebhookerror_db")]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- database_operations=[
- migrations.RunSQL(
- """
- ALTER TABLE "sentry_alertrule" DROP COLUMN "alert_threshold";
- ALTER TABLE "sentry_alertrule" DROP COLUMN "resolve_threshold";
- ALTER TABLE "sentry_alertrule" DROP COLUMN "threshold_type";
- """,
- reverse_sql="""
- ALTER TABLE "sentry_alertrule" ADD COLUMN "alert_threshold" smallint NULL;
- ALTER TABLE "sentry_alertrule" ADD COLUMN "resolve_threshold" int NULL;
- ALTER TABLE "sentry_alertrule" ADD COLUMN "threshold_type" int NULL;
-
- """,
- hints={"tables": ["sentry_alertrule"]},
- )
- ],
- state_operations=[],
- )
- ]
diff --git a/src/sentry/migrations/0017_incident_aggregation.py b/src/sentry/migrations/0017_incident_aggregation.py
deleted file mode 100644
index ff3931899a226f..00000000000000
--- a/src/sentry/migrations/0017_incident_aggregation.py
+++ /dev/null
@@ -1,34 +0,0 @@
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- dependencies = [("sentry", "0016_delete_alert_rule_deprecated_fields")]
-
- """
- Generated SQL:
- This table has low hundreds of rows, adding a default is fine.
- ALTER TABLE "sentry_incident" ADD COLUMN "aggregation" smallint DEFAULT 0 NOT NULL CHECK ("aggregation" >= 0);
- ALTER TABLE "sentry_incident" ALTER COLUMN "aggregation" DROP DEFAULT;
- """
-
- operations = [
- migrations.AddField(
- model_name="incident",
- name="aggregation",
- field=models.PositiveSmallIntegerField(default=0),
- )
- ]
diff --git a/src/sentry/migrations/0018_discoversavedquery_version.py b/src/sentry/migrations/0018_discoversavedquery_version.py
deleted file mode 100644
index a852a928892a0b..00000000000000
--- a/src/sentry/migrations/0018_discoversavedquery_version.py
+++ /dev/null
@@ -1,35 +0,0 @@
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- dependencies = [
- ("sentry", "0017_incident_aggregation"),
- ]
-
- """
- Generated SQL:
- ALTER TABLE "sentry_discoversavedquery" ADD COLUMN "version" integer NULL;
- ALTER TABLE "sentry_discoversavedquery" ALTER COLUMN "version" DROP DEFAULT;
- """
-
- operations = [
- migrations.AddField(
- model_name="discoversavedquery",
- name="version",
- field=models.IntegerField(null=True),
- ),
- ]
diff --git a/src/sentry/migrations/0019_auto_20191114_2040.py b/src/sentry/migrations/0019_auto_20191114_2040.py
deleted file mode 100644
index fd1d13d6b3ac7d..00000000000000
--- a/src/sentry/migrations/0019_auto_20191114_2040.py
+++ /dev/null
@@ -1,46 +0,0 @@
-from django.db import migrations
-
-
-def forwards(apps, schema_editor):
- """
- Backfill the saved queries with their version.
- """
- DiscoverSavedQuery = apps.get_model("sentry", "DiscoverSavedQuery")
- for query in DiscoverSavedQuery.objects.filter(version__isnull=True).all():
- if "version" in query.query:
- query.version = query.query.get("version", 1)
- del query.query["version"]
- else:
- query.version = 1
- query.save()
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # We are running many updates, so we don't want to be in a transaction.
- atomic = False
-
- dependencies = [
- ("sentry", "0018_discoversavedquery_version"),
- ]
-
- operations = [
- migrations.RunPython(
- forwards,
- migrations.RunPython.noop,
- hints={"tables": ["sentry_discoversavedquery"]},
- )
- ]
diff --git a/src/sentry/migrations/0020_auto_20191125_1420.py b/src/sentry/migrations/0020_auto_20191125_1420.py
deleted file mode 100644
index 942814d77771a2..00000000000000
--- a/src/sentry/migrations/0020_auto_20191125_1420.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# Generated by Django 1.9.13 on 2019-11-25 14:20
-
-from django.db import migrations
-
-from sentry import eventstore
-from sentry.utils.query import RangeQuerySetWrapper
-
-
-def backfill_group_ids(model):
- query = model.objects.filter(group_id__isnull=True)
-
- for attachment in RangeQuerySetWrapper(query, step=1000):
- event = eventstore.get_event_by_id(attachment.project_id, attachment.event_id)
- if event:
- model.objects.filter(id=attachment.id).update(group_id=event.group_id)
-
-
-def forwards(apps, schema_editor):
- EventAttachment = apps.get_model("sentry", "EventAttachment")
- backfill_group_ids(EventAttachment)
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- dependencies = [
- ("sentry", "0019_auto_20191114_2040"),
- ]
-
- operations = [
- migrations.RunPython(
- forwards,
- migrations.RunPython.noop,
- hints={"tables": ["sentry_eventattachment"]},
- )
- ]
diff --git a/src/sentry/migrations/0021_auto_20191202_1716.py b/src/sentry/migrations/0021_auto_20191202_1716.py
deleted file mode 100644
index 87f4902663a552..00000000000000
--- a/src/sentry/migrations/0021_auto_20191202_1716.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# Generated by Django 1.9.13 on 2019-12-02 17:16
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- dependencies = [
- ("sentry", "0020_auto_20191125_1420"),
- ]
-
- operations = [
- migrations.RenameModel(
- old_name="LatestRelease",
- new_name="LatestRepoReleaseEnvironment",
- ),
- ]
diff --git a/src/sentry/migrations/0021_auto_20191203_1803.py b/src/sentry/migrations/0021_auto_20191203_1803.py
deleted file mode 100644
index 131473f26ec34f..00000000000000
--- a/src/sentry/migrations/0021_auto_20191203_1803.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# Generated by Django 1.9.13 on 2019-12-03 18:03
-
-from django.db import migrations
-import django.db.models.deletion
-import sentry.db.models.fields.onetoone
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- dependencies = [
- ("sentry", "0020_auto_20191125_1420"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- database_operations=[],
- state_operations=[
- migrations.AlterField(
- model_name="incidentsnapshot",
- name="incident",
- field=sentry.db.models.fields.onetoone.OneToOneCascadeDeletes(
- on_delete=django.db.models.deletion.CASCADE, to="sentry.Incident"
- ),
- ),
- ],
- )
- ]
diff --git a/src/sentry/migrations/0022_merge.py b/src/sentry/migrations/0022_merge.py
deleted file mode 100644
index 502481c951ad83..00000000000000
--- a/src/sentry/migrations/0022_merge.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# Generated by Django 1.9.13 on 2019-12-04 19:46
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- dependencies = [
- ("sentry", "0021_auto_20191203_1803"),
- ("sentry", "0021_auto_20191202_1716"),
- ]
-
- operations = []
diff --git a/src/sentry/migrations/0023_hide_environment_none_20191126.py b/src/sentry/migrations/0023_hide_environment_none_20191126.py
deleted file mode 100644
index 77107a1e8e580d..00000000000000
--- a/src/sentry/migrations/0023_hide_environment_none_20191126.py
+++ /dev/null
@@ -1,42 +0,0 @@
-from django.db import migrations
-
-
-def hide_environment_none(apps, schema_editor):
- """
- Hide environments that are named none, since they're blacklisted and no longer can be created.
-
- We should iterate over each environment row individually in python instead so that we don't lock the DB up. This is
- far slower but much safer
- """
- EnvironmentProject = apps.get_model("sentry", "EnvironmentProject")
- for project in EnvironmentProject.objects.filter(environment__name="none"):
- project.is_hidden = True
- project.save()
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- dependencies = [
- ("sentry", "0022_merge"),
- ]
-
- operations = [
- migrations.RunPython(
- hide_environment_none,
- migrations.RunPython.noop,
- hints={"tables": ["sentry_environmentproject"]},
- )
- ]
diff --git a/src/sentry/migrations/0024_auto_20191230_2052.py b/src/sentry/migrations/0024_auto_20191230_2052.py
deleted file mode 100644
index 1421f97b490cdc..00000000000000
--- a/src/sentry/migrations/0024_auto_20191230_2052.py
+++ /dev/null
@@ -1,152 +0,0 @@
-# Generated by Django 1.9.13 on 2019-12-30 20:52
-
-import os
-from datetime import timedelta
-
-from django.db import migrations
-from django.utils import timezone
-
-from sentry import nodestore, options
-from sentry.eventstore.models import Event as NewEvent
-from sentry.utils.dates import to_timestamp
-
-
-def backfill_eventstream(apps, schema_editor):
- """
- Inserts Postgres events into the eventstream if there are recent events in Postgres.
-
- This is for open source users migrating from 9.x who want to keep their events.
- If there are no recent events in Postgres, skip the backfill.
- """
- from sentry import eventstore, eventstream
- from sentry.utils.query import RangeQuerySetWrapper
-
- Event = apps.get_model("sentry", "Event")
- Group = apps.get_model("sentry", "Group")
- Project = apps.get_model("sentry", "Project")
-
- # Kill switch to skip this migration
- skip_backfill = os.environ.get("SENTRY_SKIP_EVENTS_BACKFILL_FOR_10", False)
-
- # Use 90 day retention if the option has not been set or set to 0
- DEFAULT_RETENTION = 90
- retention_days = options.get("system.event-retention-days") or DEFAULT_RETENTION
-
- def get_events(last_days):
- to_date = timezone.now()
- from_date = to_date - timedelta(days=last_days)
- return Event.objects.filter(
- datetime__gte=from_date, datetime__lte=to_date, group_id__isnull=False
- )
-
- def _attach_related(_events):
- project_ids = set()
- group_ids = set()
- for event in _events:
- project_ids.add(event.project_id)
- group_ids.add(event.group_id)
- projects = {p.id: p for p in Project.objects.filter(id__in=project_ids)}
- groups = {g.id: g for g in Group.objects.filter(id__in=group_ids)}
-
- for event in _events:
- event.project = projects.get(event.project_id)
- event.group = groups.get(event.group_id)
- # When migrating old data from Sentry 9.0.0 to 9.1.2 to 10 in rapid succession, the event timestamp may be
- # missing. This adds it back
- if "timestamp" not in event.data.data:
- event.data.data["timestamp"] = to_timestamp(event.datetime)
- eventstore.bind_nodes(_events, "data")
-
- if skip_backfill:
- print("Skipping backfill.\n") # noqa: B314
- return
-
- events = get_events(retention_days)
- count = events.count()
-
- if count == 0:
- print("Nothing to do, skipping migration.\n") # noqa: B314
- return
-
- print(f"Events to process: {count}\n") # noqa: B314
-
- processed = 0
- for e in RangeQuerySetWrapper(events, step=100, callbacks=(_attach_related,)):
- event_data = e.data.data
- if e.project is None or e.group is None or len(event_data) == 0:
- print( # noqa: B314
- f"Skipped {e} as group, project or node data information is invalid.\n"
- )
- continue
-
- event = NewEvent(
- project_id=e.project_id, event_id=e.event_id, group_id=e.group_id, data=event_data
- )
-
- event.group = e.group
- event.project = e.project
-
- try:
- eventstream.insert(
- group=event.group,
- event=event,
- is_new=False,
- is_regression=False,
- is_new_group_environment=False,
- primary_hash=event.get_primary_hash(),
- received_timestamp=event.data.get("received")
- or float(event.datetime.strftime("%s")),
- skip_consume=True,
- )
-
- # The node ID format was changed in Sentry 9.1.0
- # (https://github.com/getsentry/sentry/commit/f73a4039d16a5c4f88bde37f6464cac21deb50e1)
- # If we are migrating from older versions of Sentry (i.e. 9.0.0 and earlier)
- # we need to resave the node using the new node ID scheme and delete the old
- # node.
- old_node_id = e.data.id
- new_node_id = event.data.id
- if old_node_id != new_node_id:
- event.data.save()
- nodestore.delete(old_node_id)
-
- processed += 1
- except Exception as error:
- print( # noqa: B314
- f"An error occured while trying to migrate the following event: {event}\n.----\n{error}"
- )
-
- if processed == 0:
- raise Exception(
- "Cannot migrate any event. If this is okay, re-run migrations with SENTRY_SKIP_EVENTS_BACKFILL_FOR_10 environment variable set to skip this step."
- )
-
- print(f"Event migration done. Migrated {processed} of {count} events.\n") # noqa: B314
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- dependencies = [
- ("sentry", "0023_hide_environment_none_20191126"),
- ]
-
- operations = [
- migrations.RunPython(
- backfill_eventstream,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_event", "sentry_groupedmessage", "sentry_project"]},
- ),
- ]
diff --git a/src/sentry/migrations/0025_organizationaccessrequest_requester.py b/src/sentry/migrations/0025_organizationaccessrequest_requester.py
deleted file mode 100644
index 940f228459c786..00000000000000
--- a/src/sentry/migrations/0025_organizationaccessrequest_requester.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# Generated by Django 1.9.13 on 2020-01-08 06:35
-
-from django.conf import settings
-from django.db import migrations
-import django.db.models.deletion
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- dependencies = [
- ("sentry", "0024_auto_20191230_2052"),
- ]
-
- operations = [
- migrations.AddField(
- model_name="organizationaccessrequest",
- name="requester",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
- ),
- ),
- ]
-
-
-# BEGIN;
-# --
-# -- Add field requester to organizationaccessrequest
-# --
-# ALTER TABLE "sentry_organizationaccessrequest" ADD COLUMN "requester_id" integer NULL;
-# ALTER TABLE "sentry_organizationaccessrequest" ALTER COLUMN "requester_id" DROP DEFAULT;
-# CREATE INDEX "sentry_organizationaccessrequest_573f8683" ON "sentry_organizationaccessrequest" ("requester_id");
-# ALTER TABLE "sentry_organizationaccessrequest" ADD CONSTRAINT "sentry_organizationaccess_requester_id_73de4b1e_fk_auth_user_id" FOREIGN KEY ("requester_id") REFERENCES "auth_user" ("id") DEFERRABLE INITIALLY DEFERRED;
-
-# COMMIT;
diff --git a/src/sentry/migrations/0026_delete_event.py b/src/sentry/migrations/0026_delete_event.py
deleted file mode 100644
index e18e2c7315ff6c..00000000000000
--- a/src/sentry/migrations/0026_delete_event.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Generated by Django 1.10.8 on 2020-01-21 19:40
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- dependencies = [
- ("sentry", "0025_organizationaccessrequest_requester"),
- ]
-
- operations = [
- migrations.DeleteModel(
- name="Event",
- ),
- ]
diff --git a/src/sentry/migrations/0027_exporteddata.py b/src/sentry/migrations/0027_exporteddata.py
deleted file mode 100644
index a1054fdaeb2482..00000000000000
--- a/src/sentry/migrations/0027_exporteddata.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# Generated by Django 1.11.27 on 2020-01-24 19:19
-
-from django.conf import settings
-from django.db import migrations, models
-import django.db.models.deletion
-import django.utils.timezone
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.foreignkey
-import sentry.db.models.fields.jsonfield
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- dependencies = [("sentry", "0026_delete_event")]
-
- operations = [
- migrations.CreateModel(
- name="ExportedData",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- primary_key=True, serialize=False
- ),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- ("date_finished", models.DateTimeField(null=True)),
- ("date_expired", models.DateTimeField(null=True)),
- ("storage_url", models.URLField(null=True)),
- (
- "query_type",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[(0, "DISCOVER_V1"), (1, "BILLING_REPORT"), (2, "ISSUE_BY_TAG")]
- ),
- ),
- ("query_info", sentry.db.models.fields.jsonfield.JSONField(default=dict)),
- (
- "organization",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE, to="sentry.Organization"
- ),
- ),
- (
- "user",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
- ),
- ),
- ],
- options={"db_table": "sentry_exporteddata"},
- )
- ]
diff --git a/src/sentry/migrations/0028_user_reports.py b/src/sentry/migrations/0028_user_reports.py
deleted file mode 100644
index d240c5969caf1c..00000000000000
--- a/src/sentry/migrations/0028_user_reports.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# Generated by Django 1.11.27 on 2020-01-23 19:07
-
-import logging
-
-from django.db import migrations
-
-from sentry import eventstore
-from sentry.utils.query import RangeQuerySetWrapper
-from sentry.utils.snuba import (
- QueryOutsideGroupActivityError,
- QueryOutsideRetentionError,
- SnubaError,
-)
-
-logger = logging.getLogger(__name__)
-
-
-def backfill_user_reports(apps, schema_editor):
- """
- Processes user reports that are missing event data, and adds the appropriate data
- if the event exists in Clickhouse.
- """
- UserReport = apps.get_model("sentry", "UserReport")
-
- user_reports = UserReport.objects.filter(group_id__isnull=True, environment_id__isnull=True)
-
- for report in RangeQuerySetWrapper(user_reports, step=1000):
- try:
- event = eventstore.get_event_by_id(report.project_id, report.event_id)
- except (SnubaError, QueryOutsideGroupActivityError, QueryOutsideRetentionError) as se:
- logger.warn(
- "failed to fetch event %s for project %d: %s"
- % (report.event_id, report.project_id, se)
- )
- continue
-
- if event:
- report.group_id = event.group_id
- report.environment_id = event.get_environment().id
- report.save(update_fields=["group_id", "environment_id"])
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- dependencies = [
- ("sentry", "0027_exporteddata"),
- ]
-
- operations = [
- migrations.RunPython(
- backfill_user_reports,
- migrations.RunPython.noop,
- hints={"tables": ["sentry_userreport"]},
- ),
- ]
diff --git a/src/sentry/migrations/0029_discover_query_upgrade.py b/src/sentry/migrations/0029_discover_query_upgrade.py
deleted file mode 100644
index 10af8cdbb93df1..00000000000000
--- a/src/sentry/migrations/0029_discover_query_upgrade.py
+++ /dev/null
@@ -1,209 +0,0 @@
-# Generated by Hand
-from django.db import migrations
-
-from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
-
-# SearchVisitor.numeric_keys + SearchVisitor.date_keys
-OPERATOR_KEYS = {
- "project_id",
- "project.id",
- "issue.id",
- "device.battery_level",
- "device.charging",
- "device.online",
- "device.simulator",
- "error.handled",
- "stack.colno",
- "stack.in_app",
- "stack.lineno",
- "stack.stack_level",
- "transaction.duration",
- "apdex",
- "impact",
- "p75",
- "p95",
- "p99",
- "error_rate",
- "start",
- "end",
- "first_seen",
- "last_seen",
- "time",
- "timestamp",
- "transaction.start_time",
- "transaction.end_time",
-}
-
-
-# Aggregates are now fields
-def convert_field(fieldname, unique, reverse):
- if fieldname == "count":
- fieldname = "count()"
- elif unique:
- fieldname = f"count_unique({fieldname})"
-
- fieldname = f"-{fieldname}" if reverse else fieldname
- return fieldname
-
-
-def prepare_value(value):
- value = value.replace("%", "*")
- if " " in value and not value.startswith('"'):
- value = f'"{value}"'
- return value
-
-
-def convert(
- DiscoverSavedQuery,
- DiscoverSavedQueryProject,
- saved_query,
- name_extra=" (migrated from legacy discover)",
-):
- """Create a v2 query from a v1 query"""
- if saved_query.version == 2:
- # nothing to do! Already v2 :)
- return saved_query
-
- updated_query = {
- "environment": [],
- "fields": saved_query.query.get("fields", []),
- "orderby": "",
- "query": [], # Will become a string later via join
- }
-
- if "range" in saved_query.query:
- updated_query["range"] = saved_query.query["range"]
- elif "start" in saved_query.query and "end" in saved_query.query:
- updated_query["start"] = saved_query.query["start"]
- updated_query["end"] = saved_query.query["end"]
- else:
- updated_query["range"] = "14d"
-
- for aggregate in saved_query.query.get("aggregations", []):
- if aggregate[0] == "uniq":
- field = convert_field(aggregate[1], True, False)
- else:
- field = convert_field(aggregate[0], False, False)
- if field:
- updated_query["fields"].append(field)
-
- # Order by
- orderby = saved_query.query.get("orderby", "")
- unique = reverse = False
- if orderby.startswith("-"):
- reverse = True
- orderby = orderby[1:]
- if orderby.startswith("uniq_"):
- unique = True
- orderby = orderby[5:].replace("_", ".")
- field = convert_field(orderby, unique, reverse)
-
- if field:
- updated_query["orderby"] = field
- if reverse:
- field = field[1:]
- if field not in updated_query["fields"]:
- updated_query["fields"].append(field)
-
- # Conditions become a query now
- for condition in saved_query.query.get("conditions", []):
- column, operator, value = condition
- if column in ["contexts.key"]:
- column = "tags[contexts.key]"
- if column == "environment" and operator == "=":
- updated_query["environment"].append(value.strip('"'))
- elif operator == "IS NOT NULL":
- updated_query["query"].append(f"has:{column}")
- elif operator == "IS NULL":
- updated_query["query"].append(f"!has:{column}")
- elif column in OPERATOR_KEYS:
- updated_query["query"].append(
- "{}:{}{}".format(column, operator if operator != "=" else "", value)
- )
- elif operator in ["LIKE", "="]:
- updated_query["query"].append(f"{column}:{prepare_value(value)}")
- elif operator in ["NOT LIKE", "!="]:
- updated_query["query"].append(f"!{column}:{prepare_value(value)}")
- updated_query["query"] = " ".join(updated_query["query"])
-
- # Create the version 2 query
- new_query = DiscoverSavedQuery.objects.create(
- organization=saved_query.organization,
- name=saved_query.name + name_extra,
- query=updated_query,
- version=2,
- )
-
- # Set project_ids
- saved_query_project_ids = DiscoverSavedQueryProject.objects.filter(
- discover_saved_query=saved_query
- ).values_list("project", flat=True)
-
- # This is DiscoverSavedQueryProject.set_projects
- DiscoverSavedQueryProject.objects.filter(discover_saved_query=new_query).exclude(
- project__in=saved_query_project_ids
- ).delete()
-
- existing_project_ids = DiscoverSavedQueryProject.objects.filter(
- discover_saved_query=new_query
- ).values_list("project", flat=True)
-
- new_project_ids = list(set(saved_query_project_ids) - set(existing_project_ids))
-
- DiscoverSavedQueryProject.objects.bulk_create(
- [
- DiscoverSavedQueryProject(project_id=project_id, discover_saved_query=new_query)
- for project_id in new_project_ids
- ]
- )
-
- return new_query
-
-
-def migrate_v1_queries(apps, schema_editor):
- """
- Creates v2 versions of existing v1 queries
- """
- DiscoverSavedQuery = apps.get_model("sentry", "DiscoverSavedQuery")
- DiscoverSavedQueryProject = apps.get_model("sentry", "DiscoverSavedQueryProject")
-
- """ Seq Scan on sentry_discoversavedquery
- (cost=0.00..102.86 rows=1601 width=284)
- (actual time=0.027..1.158 rows=1275 loops=1)
- Filter: (version = 1)
- Rows Removed by Filter: 69
- Planning time: 0.929 ms
- Execution time: 1.296 ms
- """
- queryset = DiscoverSavedQuery.objects.filter(version=1)
-
- for query in RangeQuerySetWrapperWithProgressBar(queryset):
- convert(DiscoverSavedQuery, DiscoverSavedQueryProject, query)
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
- atomic = False
- dependencies = [
- ("sentry", "0028_user_reports"),
- ]
-
- operations = [
- migrations.RunPython(
- migrate_v1_queries,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_discoversavedquery", "sentry_discoversavedqueryproject"]},
- ),
- ]
diff --git a/src/sentry/migrations/0030_auto_20200201_0039.py b/src/sentry/migrations/0030_auto_20200201_0039.py
deleted file mode 100644
index 1e8689390f7219..00000000000000
--- a/src/sentry/migrations/0030_auto_20200201_0039.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# Generated by Django 1.11.27 on 2020-02-01 00:39
-
-from django.db import migrations
-import sentry.db.models.fields.bounded
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [("sentry", "0029_discover_query_upgrade")]
-
- operations = [
- migrations.AlterField(
- model_name="incidentsuspectcommit",
- name="commit",
- field=sentry.db.models.fields.bounded.BoundedBigIntegerField(
- db_column="commit_id", db_index=True
- ),
- ),
- migrations.AlterField(
- model_name="incidentsuspectcommit",
- name="incident",
- field=sentry.db.models.fields.bounded.BoundedBigIntegerField(db_column="incident_id"),
- ),
- ]
diff --git a/src/sentry/migrations/0031_delete_alert_rules_and_incidents.py b/src/sentry/migrations/0031_delete_alert_rules_and_incidents.py
deleted file mode 100644
index d652b093329c07..00000000000000
--- a/src/sentry/migrations/0031_delete_alert_rules_and_incidents.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# Generated by Django 1.11.27 on 2020-02-05 22:26
-
-from django.db import migrations
-
-
-def delete_alert_rules_incidents(apps, schema_editor):
- from sentry.utils.query import RangeQuerySetWrapper
-
- Incident = apps.get_model("sentry", "Incident")
- AlertRule = apps.get_model("sentry", "AlertRule")
- TimeSeriesSnapshot = apps.get_model("sentry", "TimeSeriesSnapshot")
- QuerySubscription = apps.get_model("sentry", "QuerySubscription")
-
- for incident in RangeQuerySetWrapper(Incident.objects.all()):
- incident.delete()
-
- for alert_rule in RangeQuerySetWrapper(AlertRule.objects.all()):
- alert_rule.delete()
-
- for snapshot in RangeQuerySetWrapper(TimeSeriesSnapshot.objects.all()):
- snapshot.delete()
-
- for sub in RangeQuerySetWrapper(QuerySubscription.objects.all()):
- sub.delete()
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [("sentry", "0030_auto_20200201_0039")]
-
- operations = [
- migrations.RunPython(
- delete_alert_rules_incidents,
- reverse_code=migrations.RunPython.noop,
- hints={
- "tables": [
- "sentry_alertrule",
- "sentry_incident",
- "sentry_timeseriessnapshot",
- "sentry_querysubscription",
- ]
- },
- )
- ]
diff --git a/src/sentry/migrations/0032_delete_alert_email.py b/src/sentry/migrations/0032_delete_alert_email.py
deleted file mode 100644
index 942e1ee1c8c93b..00000000000000
--- a/src/sentry/migrations/0032_delete_alert_email.py
+++ /dev/null
@@ -1,55 +0,0 @@
-# Generated by Django 1.11.27 on 2020-02-06 21:02
-
-from django.db import migrations
-
-
-def delete_alert_email_user_options(apps, schema_editor):
- """
- Processes user reports that are missing event data, and adds the appropriate data
- if the event exists in Clickhouse.
- """
- from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
-
- UserOption = apps.get_model("sentry", "UserOption")
-
- """
- Seq Scan on sentry_useroption (cost=0.00..40142.93 rows=42564 width=65) (actual time=30.690..9720.536 rows=42407 loops=1)
- Filter: ((key)::text = 'alert_email'::text)
- Rows Removed by Filter: 1692315
- Planning time: 234.778 ms
- Execution time: 9730.608 ms
- """
- for user_option in RangeQuerySetWrapperWithProgressBar(UserOption.objects.all()):
- if user_option.key == "alert_email":
- user_option.delete()
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [
- ("sentry", "0031_delete_alert_rules_and_incidents"),
- ]
-
- operations = [
- migrations.RunPython(
- delete_alert_email_user_options,
- migrations.RunPython.noop,
- hints={"tables": ["sentry_useroption"]},
- ),
- ]
diff --git a/src/sentry/migrations/0033_auto_20200210_2137.py b/src/sentry/migrations/0033_auto_20200210_2137.py
deleted file mode 100644
index 0175f3c4ab29bc..00000000000000
--- a/src/sentry/migrations/0033_auto_20200210_2137.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# Generated by Django 1.11.27 on 2020-02-10 21:37
-
-from django.db import migrations
-import django.db.models.deletion
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0032_delete_alert_email"),
- ]
-
- operations = [
- migrations.RemoveField(
- model_name="exporteddata",
- name="storage_url",
- ),
- migrations.AddField(
- model_name="exporteddata",
- name="file",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- null=True,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.File",
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0034_auto_20200210_2311.py b/src/sentry/migrations/0034_auto_20200210_2311.py
deleted file mode 100644
index 3e3259ffa228e0..00000000000000
--- a/src/sentry/migrations/0034_auto_20200210_2311.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# Generated by Django 1.11.27 on 2020-02-10 23:11
-
-import bitfield.models
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- """
- BEGIN;
- --
- -- Alter field flags on project
- --
- COMMIT;
- """
-
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [("sentry", "0033_auto_20200210_2137")]
-
- operations = [
- migrations.AlterField(
- model_name="project",
- name="flags",
- field=bitfield.models.BitField(
- (
- ("has_releases", "This Project has sent release data"),
- ("has_sourcemaps", "This Project has processed source maps"),
- ),
- default=0,
- null=True,
- ),
- )
- ]
diff --git a/src/sentry/migrations/0035_auto_20200127_1711.py b/src/sentry/migrations/0035_auto_20200127_1711.py
deleted file mode 100644
index 4afb0943f98d9a..00000000000000
--- a/src/sentry/migrations/0035_auto_20200127_1711.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# Generated by Django 1.11.27 on 2020-01-27 17:11
-
-from django.db import migrations, models
-import django.db.models.deletion
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- dependencies = [
- ("sentry", "0034_auto_20200210_2311"),
- ]
-
- operations = [
- migrations.CreateModel(
- name="AlertRuleEnvironment",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- primary_key=True, serialize=False
- ),
- ),
- (
- "alert_rule",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE, to="sentry.AlertRule"
- ),
- ),
- (
- "environment",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE, to="sentry.Environment"
- ),
- ),
- ],
- options={
- "db_table": "sentry_alertruleenvironment",
- },
- ),
- migrations.AddField(
- model_name="alertrule",
- name="environment",
- field=models.ManyToManyField(
- related_name="alert_rule_environment",
- through="sentry.AlertRuleEnvironment",
- to="sentry.Environment",
- ),
- ),
- migrations.AlterUniqueTogether(
- name="alertruleenvironment",
- unique_together={("alert_rule", "environment")},
- ),
- ]
diff --git a/src/sentry/migrations/0036_auto_20200213_0106.py b/src/sentry/migrations/0036_auto_20200213_0106.py
deleted file mode 100644
index 756e5079e9a117..00000000000000
--- a/src/sentry/migrations/0036_auto_20200213_0106.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# Generated by Django 1.11.28 on 2020-02-13 01:06
-
-from django.db import migrations
-import sentry.db.models.fields.bounded
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [("sentry", "0035_auto_20200127_1711")]
-
- operations = [
- migrations.AlterField(
- model_name="pagerdutyserviceproject",
- name="organization_integration",
- field=sentry.db.models.fields.bounded.BoundedBigIntegerField(
- db_column="organization_integration_id", db_index=True, null=True
- ),
- ),
- migrations.AlterField(
- model_name="pagerdutyserviceproject",
- name="pagerduty_service",
- field=sentry.db.models.fields.bounded.BoundedBigIntegerField(
- db_column="pagerduty_service_id", db_index=True
- ),
- ),
- migrations.AlterField(
- model_name="pagerdutyserviceproject",
- name="project",
- field=sentry.db.models.fields.bounded.BoundedBigIntegerField(db_column="project_id"),
- ),
- ]
diff --git a/src/sentry/migrations/0037_auto_20200213_0140.py b/src/sentry/migrations/0037_auto_20200213_0140.py
deleted file mode 100644
index e1d32f366bb257..00000000000000
--- a/src/sentry/migrations/0037_auto_20200213_0140.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# Generated by Django 1.11.28 on 2020-02-13 01:40
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0036_auto_20200213_0106"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- state_operations=[migrations.DeleteModel(name="PagerDutyServiceProject")],
- database_operations=[],
- )
- ]
diff --git a/src/sentry/migrations/0038_auto_20200213_1904.py b/src/sentry/migrations/0038_auto_20200213_1904.py
deleted file mode 100644
index c4407909cce71f..00000000000000
--- a/src/sentry/migrations/0038_auto_20200213_1904.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# Generated by Django 1.11.28 on 2020-02-13 19:04
-
-from django.db import migrations
-
-import sentry.db.models.fields.bounded
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [("sentry", "0037_auto_20200213_0140")]
-
- operations = [
- migrations.AlterField(
- model_name="exporteddata",
- name="query_type",
- field=sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[(0, "DISCOVER_V2"), (1, "BILLING_REPORT"), (2, "ISSUE_BY_TAG")]
- ),
- ),
- migrations.SeparateDatabaseAndState(
- database_operations=[
- migrations.RunSQL(
- """
- DROP TABLE "sentry_pagerdutyserviceproject";
- """,
- reverse_sql="",
- hints={"tables": ["sentry_pagerdutyserviceproject"]},
- )
- ],
- state_operations=[],
- ),
- ]
diff --git a/src/sentry/migrations/0039_delete_incidentsuspectcommit.py b/src/sentry/migrations/0039_delete_incidentsuspectcommit.py
deleted file mode 100644
index b47ccb896ccd9b..00000000000000
--- a/src/sentry/migrations/0039_delete_incidentsuspectcommit.py
+++ /dev/null
@@ -1,31 +0,0 @@
-# Generated by Django 1.11.27 on 2020-02-03 22:32
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [("sentry", "0038_auto_20200213_1904")]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- state_operations=[migrations.DeleteModel(name="IncidentSuspectCommit")],
- database_operations=[],
- )
- ]
diff --git a/src/sentry/migrations/0040_remove_incidentsuspectcommittable.py b/src/sentry/migrations/0040_remove_incidentsuspectcommittable.py
deleted file mode 100644
index 892197b9834d68..00000000000000
--- a/src/sentry/migrations/0040_remove_incidentsuspectcommittable.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Generated by Django 1.11.27 on 2020-02-03 22:58
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [("sentry", "0039_delete_incidentsuspectcommit")]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- database_operations=[
- migrations.RunSQL(
- """
- DROP TABLE "sentry_incidentsuspectcommit";
- """,
- reverse_sql="",
- hints={"tables": ["sentry_incidentsuspectcommit"]},
- )
- ],
- state_operations=[],
- )
- ]
diff --git a/src/sentry/migrations/0041_incidenttrigger_date_modified.py b/src/sentry/migrations/0041_incidenttrigger_date_modified.py
deleted file mode 100644
index 3f942220dc340a..00000000000000
--- a/src/sentry/migrations/0041_incidenttrigger_date_modified.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# Generated by Django 1.11.28 on 2020-02-14 02:12
-
-from django.db import migrations, models
-import django.utils.timezone
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0040_remove_incidentsuspectcommittable"),
- ]
-
- operations = [
- migrations.AddField(
- model_name="incidenttrigger",
- name="date_modified",
- field=models.DateTimeField(default=django.utils.timezone.now, null=True),
- ),
- ]
diff --git a/src/sentry/migrations/0042_auto_20200214_1607.py b/src/sentry/migrations/0042_auto_20200214_1607.py
deleted file mode 100644
index d4ba3599ab1ffb..00000000000000
--- a/src/sentry/migrations/0042_auto_20200214_1607.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# Generated by Django 1.11.28 on 2020-02-14 16:07
-
-from django.db import migrations, models
-import django.utils.timezone
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0041_incidenttrigger_date_modified"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="incidenttrigger",
- name="date_modified",
- field=models.DateTimeField(default=django.utils.timezone.now),
- ),
- ]
diff --git a/src/sentry/migrations/0043_auto_20200218_1903.py b/src/sentry/migrations/0043_auto_20200218_1903.py
deleted file mode 100644
index 56e8b91e2dfffb..00000000000000
--- a/src/sentry/migrations/0043_auto_20200218_1903.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Generated by Django 1.11.28 on 2020-02-18 19:03
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0042_auto_20200214_1607"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="incident",
- name="type",
- field=models.PositiveSmallIntegerField(),
- ),
- ]
diff --git a/src/sentry/migrations/0044_auto_20200219_0018.py b/src/sentry/migrations/0044_auto_20200219_0018.py
deleted file mode 100644
index f7bed5c5e7f35f..00000000000000
--- a/src/sentry/migrations/0044_auto_20200219_0018.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Generated by Django 1.11.27 on 2020-02-19 00:18
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0043_auto_20200218_1903"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="exporteddata",
- name="date_expired",
- field=models.DateTimeField(db_index=True, null=True),
- ),
- ]
diff --git a/src/sentry/migrations/0045_remove_incidentactivity_event_stats_snapshot.py b/src/sentry/migrations/0045_remove_incidentactivity_event_stats_snapshot.py
deleted file mode 100644
index 577e8f31c11995..00000000000000
--- a/src/sentry/migrations/0045_remove_incidentactivity_event_stats_snapshot.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# Generated by Django 1.11.28 on 2020-02-20 02:02
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [("sentry", "0044_auto_20200219_0018")]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- state_operations=[
- migrations.RemoveField(model_name="incidentactivity", name="event_stats_snapshot")
- ],
- database_operations=[],
- )
- ]
diff --git a/src/sentry/migrations/0046_auto_20200221_1735.py b/src/sentry/migrations/0046_auto_20200221_1735.py
deleted file mode 100644
index 1ba95ac00e65e8..00000000000000
--- a/src/sentry/migrations/0046_auto_20200221_1735.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# Generated by Django 1.11.28 on 2020-02-21 17:35
-
-from django.conf import settings
-from django.db import migrations
-import django.db.models.deletion
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0045_remove_incidentactivity_event_stats_snapshot"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="exporteddata",
- name="file",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- null=True,
- on_delete=django.db.models.deletion.SET_NULL,
- to="sentry.File",
- ),
- ),
- migrations.AlterField(
- model_name="exporteddata",
- name="user",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0047_auto_20200224_2319.py b/src/sentry/migrations/0047_auto_20200224_2319.py
deleted file mode 100644
index 30bac5252eea54..00000000000000
--- a/src/sentry/migrations/0047_auto_20200224_2319.py
+++ /dev/null
@@ -1,36 +0,0 @@
-# Generated by Django 1.11.28 on 2020-02-24 23:19
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [("sentry", "0046_auto_20200221_1735")]
-
- operations = [
- migrations.RunSQL(
- """
- ALTER TABLE "sentry_incidentactivity" DROP COLUMN "event_stats_snapshot_id";
- """,
- reverse_sql="""
- ALTER TABLE "sentry_incidentactivity" ADD COLUMN "event_stats_snapshot_id" bigint NULL;
- """,
- hints={"tables": ["sentry_incidentactivity"]},
- )
- ]
diff --git a/src/sentry/migrations/0048_auto_20200302_1825.py b/src/sentry/migrations/0048_auto_20200302_1825.py
deleted file mode 100644
index 867217fd7b8f8a..00000000000000
--- a/src/sentry/migrations/0048_auto_20200302_1825.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# Generated by Django 1.11.28 on 2020-03-02 19:55
-
-from django.db import migrations
-import django.db.models.deletion
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [("sentry", "0047_auto_20200224_2319")]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- state_operations=[
- migrations.RemoveField(model_name="organizationonboardingtask", name="project_id"),
- migrations.AddField(
- model_name="organizationonboardingtask",
- name="project",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- null=True,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.Project",
- ),
- ),
- migrations.AlterField(
- model_name="organizationonboardingtask",
- name="status",
- field=sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[(1, "complete"), (2, "pending"), (3, "skipped")]
- ),
- ),
- migrations.AlterField(
- model_name="organizationonboardingtask",
- name="task",
- field=sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (1, "create_project"),
- (2, "send_first_event"),
- (3, "invite_member"),
- (4, "setup_second_platform"),
- (5, "setup_user_context"),
- (6, "setup_release_tracking"),
- (7, "setup_sourcemaps"),
- (8, "setup_user_reports"),
- (9, "setup_issue_tracker"),
- (10, "setup_alert_rules"),
- ]
- ),
- ),
- ]
- )
- ]
diff --git a/src/sentry/migrations/0049_auto_20200304_0254.py b/src/sentry/migrations/0049_auto_20200304_0254.py
deleted file mode 100644
index 3e82af68b9292d..00000000000000
--- a/src/sentry/migrations/0049_auto_20200304_0254.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# Generated by Django 1.11.28 on 2020-03-04 02:54
-
-from django.db import migrations, models
-import django.db.models.deletion
-import django.utils.timezone
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0048_auto_20200302_1825"),
- ]
-
- operations = [
- migrations.CreateModel(
- name="QuerySubscriptionEnvironment",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- primary_key=True, serialize=False
- ),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "environment",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE, to="sentry.Environment"
- ),
- ),
- (
- "query_subscription",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE, to="sentry.QuerySubscription"
- ),
- ),
- ],
- options={
- "db_table": "sentry_querysubscriptionenvironment",
- },
- ),
- migrations.AddField(
- model_name="querysubscription",
- name="environments",
- field=models.ManyToManyField(
- through="sentry.QuerySubscriptionEnvironment", to="sentry.Environment"
- ),
- ),
- migrations.AlterUniqueTogether(
- name="querysubscriptionenvironment",
- unique_together={("query_subscription", "environment")},
- ),
- ]
diff --git a/src/sentry/migrations/0050_auto_20200306_2346.py b/src/sentry/migrations/0050_auto_20200306_2346.py
deleted file mode 100644
index e988dbe806d2c7..00000000000000
--- a/src/sentry/migrations/0050_auto_20200306_2346.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# Generated by Django 1.11.28 on 2020-03-06 23:46
-
-from django.db import migrations
-import sentry.db.models.fields.bounded
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [("sentry", "0049_auto_20200304_0254")]
-
- operations = [
- migrations.AlterField(
- model_name="exporteddata",
- name="query_type",
- field=sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[(0, "Issues-by-Tag"), (1, "Discover")]
- ),
- )
- ]
diff --git a/src/sentry/migrations/0051_fix_auditlog_pickled_data.py b/src/sentry/migrations/0051_fix_auditlog_pickled_data.py
deleted file mode 100644
index c10604d971cf79..00000000000000
--- a/src/sentry/migrations/0051_fix_auditlog_pickled_data.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# Generated by Django 1.11.28 on 2020-03-06 00:49
-
-from django.db import migrations
-
-from bitfield.types import BitHandler
-from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
-
-
-def cleanup_audit_log_data(apps, schema_editor):
- """
- Fix `AuditLogEntry` rows that have pickled `Team` models in their `data` field. Also
- fixes some rows where flags were serialized as a BitHandler rather than a int.
- See sentry.api.serializers.models.auditlogentry::fix for details.
- """
- AuditLogEntry = apps.get_model("sentry", "AuditLogEntry")
- for audit_log in RangeQuerySetWrapperWithProgressBar(AuditLogEntry.objects.all()):
- modified = False
- if "flags" in audit_log.data and isinstance(audit_log.data["flags"], BitHandler):
- audit_log.data["flags"] = int(audit_log.data["flags"])
- modified = True
- if audit_log.event == 3: # MEMBER_ACCEPT
- teams = audit_log.data.get("teams")
- if teams and hasattr(teams[0], "id"):
- # We have a team in here rather than just the expected data
- audit_log.data["teams"] = [team.id for team in teams]
- audit_log.data["teams_slugs"] = [team.slug for team in teams]
- modified = True
- if modified:
- audit_log.save()
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [("sentry", "0050_auto_20200306_2346")]
-
- operations = [
- migrations.RunPython(
- code=cleanup_audit_log_data,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_auditlogentry"]},
- )
- ]
diff --git a/src/sentry/migrations/0052_organizationonboardingtask_completion_seen.py b/src/sentry/migrations/0052_organizationonboardingtask_completion_seen.py
deleted file mode 100644
index 36d722dc3d24ac..00000000000000
--- a/src/sentry/migrations/0052_organizationonboardingtask_completion_seen.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Generated by Django 1.11.28 on 2020-03-11 21:30
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0051_fix_auditlog_pickled_data"),
- ]
-
- operations = [
- migrations.AddField(
- model_name="organizationonboardingtask",
- name="completion_seen",
- field=models.DateTimeField(null=True),
- ),
- ]
diff --git a/src/sentry/migrations/0053_migrate_alert_task_onboarding.py b/src/sentry/migrations/0053_migrate_alert_task_onboarding.py
deleted file mode 100644
index af477a09b6b850..00000000000000
--- a/src/sentry/migrations/0053_migrate_alert_task_onboarding.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# Generated by Django 1.11.28 on 2020-03-12 21:02
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [("sentry", "0052_organizationonboardingtask_completion_seen")]
-
- # NOTE: This migration previously was making changes that were only
- # applicable to the sentry.io SAAS project. We've changed this migration to
- # be a no-op before it had been run, and will be moving the migration to
- # getsentry.
- operations = []
diff --git a/src/sentry/migrations/0054_create_key_transaction.py b/src/sentry/migrations/0054_create_key_transaction.py
deleted file mode 100644
index e03a9bedf575fb..00000000000000
--- a/src/sentry/migrations/0054_create_key_transaction.py
+++ /dev/null
@@ -1,74 +0,0 @@
-# Generated by Django 1.11.28 on 2020-03-13 20:05
-
-from django.conf import settings
-from django.db import migrations, models
-import django.db.models.deletion
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0053_migrate_alert_task_onboarding"),
- ]
-
- operations = [
- migrations.CreateModel(
- name="KeyTransaction",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- primary_key=True, serialize=False
- ),
- ),
- ("transaction", models.CharField(max_length=200)),
- (
- "organization",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE, to="sentry.Organization"
- ),
- ),
- (
- "owner",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True,
- on_delete=django.db.models.deletion.SET_NULL,
- to=settings.AUTH_USER_MODEL,
- ),
- ),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.Project",
- ),
- ),
- ],
- options={
- "db_table": "sentry_discoverkeytransaction",
- },
- ),
- migrations.AlterUniqueTogether(
- name="keytransaction",
- unique_together={("project", "transaction")},
- ),
- ]
diff --git a/src/sentry/migrations/0055_query_subscription_status.py b/src/sentry/migrations/0055_query_subscription_status.py
deleted file mode 100644
index 3deab08dbc4c1c..00000000000000
--- a/src/sentry/migrations/0055_query_subscription_status.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Generated by Django 1.11.28 on 2020-03-17 00:30
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0054_create_key_transaction"),
- ]
-
- operations = [
- migrations.AddField(
- model_name="querysubscription",
- name="status",
- field=models.SmallIntegerField(default=0),
- ),
- migrations.AlterField(
- model_name="querysubscription",
- name="subscription_id",
- field=models.TextField(null=True, unique=True),
- ),
- ]
diff --git a/src/sentry/migrations/0056_remove_old_functions.py b/src/sentry/migrations/0056_remove_old_functions.py
deleted file mode 100644
index e731e70e9d1322..00000000000000
--- a/src/sentry/migrations/0056_remove_old_functions.py
+++ /dev/null
@@ -1,130 +0,0 @@
-# Generated by Django 1.11.28 on 2020-03-11 15:29
-
-import re
-
-from django.db import migrations
-from django.db.models import Q
-
-from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
-
-FIELDS_TO_CHANGE = {"orderby", "fields", "yAxis", "query"}
-FUNCTION_CHANGE = {
- "p75": "p75()",
- "p95": "p95()",
- "p99": "p99()",
- "apdex": "apdex(300)",
- "impact": "impact(300)",
- "last_seen": "last_seen()",
- "latest_event": "latest_event()",
-}
-COUNT_REGEX = re.compile(r".*(count\([a-zA-Z\._]+\)).*")
-FUNCTION_PATTERN = re.compile(r"^(?P<function>[^\(]+)\((?P<columns>[^\)]*)\)$")
-
-
-def get_function_alias_with_columns(function_name, columns):
- columns = "_".join(columns).replace(".", "_")
- return f"{function_name}_{columns}".rstrip("_")
-
-
-def get_function_alias(field):
- match = FUNCTION_PATTERN.search(field)
- columns = [c.strip() for c in match.group("columns").split(",") if len(c.strip()) > 0]
- return get_function_alias_with_columns(match.group("function"), columns)
-
-
-def convert_function(field, count_default="count()", transform=None):
- if transform is None:
- transform = lambda x: x
-
- if "count" in field and "count_unique" not in field:
- field = count_default
- return field
-
- for old_fn, new_fn in FUNCTION_CHANGE.items():
- if old_fn + "()" in field:
- field = field.replace(old_fn + "()", transform(new_fn))
- elif old_fn in field:
- field = field.replace(old_fn, transform(new_fn))
-
- return field
-
-
-def convert(DiscoverSavedQuery, saved_query):
- old_query = saved_query.query
- new_query = {}
-
- for key in old_query:
- if key in FIELDS_TO_CHANGE:
- continue
-
- new_query[key] = old_query[key]
-
- orderby = old_query.get("orderby")
- if orderby:
- new_query["orderby"] = convert_function(
- orderby, count_default="count", transform=get_function_alias
- )
-
- yAxis = old_query.get("yAxis")
- if yAxis:
- new_query["yAxis"] = convert_function(yAxis)
-
- fields = old_query.get("fields")
- new_fields = []
- for field in fields:
- new_fields.append(convert_function(field))
- new_query["fields"] = new_fields
-
- search = old_query.get("query")
- if search:
- match = COUNT_REGEX.match(search)
- if match:
- search = search.replace(match.groups()[0], "count()")
- for old_fn, new_fn in FUNCTION_CHANGE.items():
- if old_fn + "()" in search:
- search = search.replace(old_fn + "()", new_fn)
- elif old_fn in search:
- search = search.replace(old_fn, new_fn)
- new_query["query"] = search
-
- DiscoverSavedQuery.objects.filter(id=saved_query.id).update(query=new_query)
-
-
-def migrate_functions_in_queries(apps, schema_editor):
- """
- Creates v2 versions of existing v1 queries
- """
- DiscoverSavedQuery = apps.get_model("sentry", "DiscoverSavedQuery")
-
- """
- Seq Scan on sentry_discoversavedquery (cost=0.00..225.15 rows=1077 width=200) (actual time=0.054..7.875 rows=1037 loops=1)
- Filter: ((version = 2) AND ((query ~~ '%p95%'::text) OR (query ~~ '%p99%'::text) OR (query ~~ '%p75%'::text) OR (query ~~ '%apdex%'::text) OR (query ~~ '%impact%'::text) OR (query ~~ '%last_seen%'::text) OR (query ~~ '%latest_event%'::text) OR (query ~~ '%count(%'::text)))
- Rows Removed by Filter: 2074
- Planning time: 2.305 ms
- Execution time: 8.694 ms
- """
- function_filter = Q(query__contains="count(")
- for key in FUNCTION_CHANGE:
- function_filter |= Q(query__contains=key)
-
- queryset = DiscoverSavedQuery.objects.filter(function_filter, version=2)
-
- for query in RangeQuerySetWrapperWithProgressBar(queryset):
- convert(DiscoverSavedQuery, query)
-
-
-class Migration(migrations.Migration):
- is_dangerous = False
- atomic = False
-
- dependencies = [
- ("sentry", "0055_query_subscription_status"),
- ]
-
- operations = [
- migrations.RunPython(
- migrate_functions_in_queries,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_discoversavedquery"]},
- ),
- ]
diff --git a/src/sentry/migrations/0057_remove_unused_project_flag.py b/src/sentry/migrations/0057_remove_unused_project_flag.py
deleted file mode 100644
index a96a34654f6a18..00000000000000
--- a/src/sentry/migrations/0057_remove_unused_project_flag.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# Generated by Django 1.11.28 on 2020-03-27 03:34
-
-import bitfield.models
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [("sentry", "0056_remove_old_functions")]
-
- """
- Generated DDL:
- BEGIN;
- --
- -- Alter field flags on project
- --
- COMMIT;
- """
-
- operations = [
- migrations.AlterField(
- model_name="project",
- name="flags",
- field=bitfield.models.BitField(
- (("has_releases", "This Project has sent release data"),), default=0, null=True
- ),
- )
- ]
diff --git a/src/sentry/migrations/0058_project_issue_alerts_targeting.py b/src/sentry/migrations/0058_project_issue_alerts_targeting.py
deleted file mode 100644
index 35ab4a552a8c6c..00000000000000
--- a/src/sentry/migrations/0058_project_issue_alerts_targeting.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# Generated by Django 1.11.28 on 2020-03-27 21:40
-
-import bitfield.models
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [("sentry", "0057_remove_unused_project_flag")]
-
- operations = [
- migrations.AlterField(
- model_name="project",
- name="flags",
- field=bitfield.models.BitField(
- (
- ("has_releases", "This Project has sent release data"),
- ("has_issue_alerts_targeting", "This Project has issue alerts targeting"),
- ),
- default=0,
- null=True,
- ),
- )
- ]
diff --git a/src/sentry/migrations/0059_add_new_sentry_app_features.py b/src/sentry/migrations/0059_add_new_sentry_app_features.py
deleted file mode 100644
index ce34ee2682844a..00000000000000
--- a/src/sentry/migrations/0059_add_new_sentry_app_features.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# Generated by Django 1.11.27 on 2020-03-30 21:35
-
-from django.db import migrations
-import sentry.db.models.fields.bounded
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [("sentry", "0058_project_issue_alerts_targeting")]
-
- operations = [
- migrations.AlterField(
- model_name="integrationfeature",
- name="feature",
- field=sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (0, "integrations-api"),
- (1, "integrations-issue-link"),
- (2, "integrations-stacktrace-link"),
- (3, "integrations-event-hooks"),
- (4, "integrations-project-management"),
- (5, "integrations-incident-management"),
- (6, "integrations-feature-flag"),
- ],
- default=0,
- ),
- )
- ]
diff --git a/src/sentry/migrations/0060_add_file_eventattachment_index.py b/src/sentry/migrations/0060_add_file_eventattachment_index.py
deleted file mode 100644
index 380c0eb6d59dae..00000000000000
--- a/src/sentry/migrations/0060_add_file_eventattachment_index.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Generated by Django 1.11.28 on 2020-04-01 01:34
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [
- ("sentry", "0059_add_new_sentry_app_features"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- database_operations=[
- migrations.RunSQL(
- """
- CREATE INDEX CONCURRENTLY "sentry_eventattachment_project_id_date_added_fi_f3b0597f_idx" ON "sentry_eventattachment" ("project_id", "date_added", "file_id");
- """,
- reverse_sql="""
- DROP INDEX CONCURRENTLY "sentry_eventattachment_project_id_date_added_fi_f3b0597f_idx";
- """,
- hints={"tables": ["sentry_eventattachment"]},
- )
- ],
- state_operations=[
- migrations.AlterIndexTogether(
- name="eventattachment",
- index_together={
- ("project_id", "date_added", "file"),
- ("project_id", "date_added"),
- },
- ),
- ],
- )
- ]
diff --git a/src/sentry/migrations/0061_alertrule_partial_index.py b/src/sentry/migrations/0061_alertrule_partial_index.py
deleted file mode 100644
index 4c1e781cdc3c82..00000000000000
--- a/src/sentry/migrations/0061_alertrule_partial_index.py
+++ /dev/null
@@ -1,84 +0,0 @@
-# Generated by Django 1.11.27 on 2020-04-08 01:07
-
-import django.db.models.deletion
-from django.db import migrations
-
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [
- ("sentry", "0060_add_file_eventattachment_index"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- database_operations=[
- migrations.RunSQL(
- """
- ALTER TABLE sentry_alertrule DROP CONSTRAINT IF EXISTS sentry_alertrule_organization_id_name_12c48b37_uniq;
- """,
- reverse_sql="""DO $$
- BEGIN
- BEGIN
- ALTER TABLE sentry_alertrule ADD CONSTRAINT sentry_alertrule_organization_id_name_12c48b37_uniq UNIQUE (organization_id, name);
- EXCEPTION
- WHEN duplicate_table THEN
- END;
- END $$;
- """,
- hints={"tables": ["sentry_alertrule"]},
- ),
- migrations.RunSQL(
- """
- DROP INDEX CONCURRENTLY IF EXISTS sentry_alertrule_organization_id_name_12c48b37_uniq;
- """,
- reverse_sql="""
- CREATE UNIQUE INDEX CONCURRENTLY IF NOT EXISTS sentry_alertrule_organization_id_name_12c48b37_uniq
- ON sentry_alertrule USING btree (organization_id, name);
- """,
- hints={"tables": ["sentry_alertrule"]},
- ),
- migrations.RunSQL(
- """
- CREATE UNIQUE INDEX CONCURRENTLY IF NOT EXISTS sentry_alertrule_status_active
- ON sentry_alertrule USING btree (organization_id, name, status)
- WHERE status = 0;
- """,
- reverse_sql="""
- DROP INDEX CONCURRENTLY IF EXISTS sentry_alertrule_status_active;
- """,
- hints={"tables": ["sentry_alertrule"]},
- ),
- ],
- state_operations=[
- migrations.AlterUniqueTogether(
- name="alertrule", unique_together={("organization", "name", "status")}
- )
- ],
- ),
- migrations.AlterField(
- model_name="alertrule",
- name="organization",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True, on_delete=django.db.models.deletion.CASCADE, to="sentry.Organization"
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0062_key_transactions_unique_with_owner.py b/src/sentry/migrations/0062_key_transactions_unique_with_owner.py
deleted file mode 100644
index 66bea9a49c28d0..00000000000000
--- a/src/sentry/migrations/0062_key_transactions_unique_with_owner.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# Generated by Django 1.11.28 on 2020-04-09 22:30
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0061_alertrule_partial_index"),
- ]
-
- operations = [
- migrations.AlterUniqueTogether(
- name="keytransaction",
- unique_together={("project", "owner", "transaction")},
- ),
- ]
diff --git a/src/sentry/migrations/0063_drop_alertrule_constraint.py b/src/sentry/migrations/0063_drop_alertrule_constraint.py
deleted file mode 100644
index 6baf47e6d739d1..00000000000000
--- a/src/sentry/migrations/0063_drop_alertrule_constraint.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# Generated by Django 1.11.27 on 2020-04-08 01:07
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [
- ("sentry", "0062_key_transactions_unique_with_owner"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- database_operations=[
- migrations.RunSQL(
- """
- ALTER TABLE sentry_alertrule DROP CONSTRAINT IF EXISTS sentry_alertrule_organization_id_382634eccd5f9371_uniq;
- """,
- reverse_sql="""DO $$
- BEGIN
- BEGIN
- ALTER TABLE sentry_alertrule ADD CONSTRAINT sentry_alertrule_organization_id_382634eccd5f9371_uniq UNIQUE (organization_id, name);
- EXCEPTION
- WHEN duplicate_table THEN
- END;
- END $$;
- """,
- hints={"tables": ["sentry_alertrule"]},
- ),
- ],
- state_operations=[],
- ),
- ]
diff --git a/src/sentry/migrations/0064_project_has_transactions.py b/src/sentry/migrations/0064_project_has_transactions.py
deleted file mode 100644
index cb01a36b77476a..00000000000000
--- a/src/sentry/migrations/0064_project_has_transactions.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# Generated by Django 1.11.28 on 2020-04-15 22:18
-
-import bitfield.models
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- """
- BEGIN;
- --
- -- Alter field flags on project
- --
- COMMIT;
- """
-
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [("sentry", "0063_drop_alertrule_constraint")]
-
- operations = [
- migrations.AlterField(
- model_name="project",
- name="flags",
- field=bitfield.models.BitField(
- (
- ("has_releases", "This Project has sent release data"),
- ("has_issue_alerts_targeting", "This Project has issue alerts targeting"),
- ("has_transactions", "This Project has sent transactions"),
- ),
- default=0,
- null=True,
- ),
- )
- ]
diff --git a/src/sentry/migrations/0065_add_incident_status_method.py b/src/sentry/migrations/0065_add_incident_status_method.py
deleted file mode 100644
index 9e851d7fc681ab..00000000000000
--- a/src/sentry/migrations/0065_add_incident_status_method.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Generated by Django 1.11.29 on 2020-04-16 03:25
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0064_project_has_transactions"),
- ]
-
- operations = [
- migrations.AddField(
- model_name="incident",
- name="status_method",
- field=models.PositiveSmallIntegerField(default=3),
- ),
- ]
diff --git a/src/sentry/migrations/0066_alertrule_manager.py b/src/sentry/migrations/0066_alertrule_manager.py
deleted file mode 100644
index eb7f60829d6251..00000000000000
--- a/src/sentry/migrations/0066_alertrule_manager.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# Generated by Django 1.11.29 on 2020-04-15 23:27
-
-from django.db import migrations
-import django.db.models.manager
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0065_add_incident_status_method"),
- ]
-
- operations = [
- migrations.AlterModelOptions(
- name="alertrule",
- options={
- "base_manager_name": "objects_with_snapshots",
- "default_manager_name": "objects_with_snapshots",
- },
- ),
- migrations.AlterModelManagers(
- name="alertrule",
- managers=[
- ("objects_with_snapshots", django.db.models.manager.Manager()),
- ],
- ),
- ]
diff --git a/src/sentry/migrations/0067_migrate_rules_alert_targeting.py b/src/sentry/migrations/0067_migrate_rules_alert_targeting.py
deleted file mode 100644
index c932e215445bc5..00000000000000
--- a/src/sentry/migrations/0067_migrate_rules_alert_targeting.py
+++ /dev/null
@@ -1,137 +0,0 @@
-# Generated by Django 1.11.29 on 2020-04-20 20:53
-
-import logging
-
-from django.db import migrations, transaction
-
-from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
-
-mail_action = {
- "id": "sentry.mail.actions.NotifyEmailAction",
- "targetType": "IssueOwners",
- "targetIdentifier": "None",
-}
-
-
-def set_user_option(UserOption, user, key, value, project):
- inst, created = UserOption.objects.get_or_create(
- user=user, project=project, key=key, defaults={"value": value}
- )
- if not created and inst.value != value:
- inst.update(value=value)
-
-
-def migrate_project_to_issue_alert_targeting(project, ProjectOption, Rule, User, UserOption):
- if project.flags.has_issue_alerts_targeting:
- # Migration has already been run.
- return
- with transaction.atomic():
- # Determine whether this project actually has mail enabled
- try:
- mail_enabled = ProjectOption.objects.get(project=project, key="mail:enabled").value
- except ProjectOption.DoesNotExist:
- mail_enabled = True
- for rule in Rule.objects.filter(project=project, status=0):
- migrate_legacy_rule(rule, mail_enabled)
-
- if not mail_enabled:
- # If mail disabled, then we want to disable mail options for all
- # users associated with this project so that they don't suddenly start
- # getting mail via the `MailAdapter`, since it will always be enabled.
- for user in User.objects.filter(
- sentry_orgmember_set__teams__in=project.teams.all(), is_active=True
- ):
- set_user_option(UserOption, user, "mail:alert", 0, project)
- set_user_option(UserOption, user, "workflow:notifications", "2", project=project)
-
- # This marks the migration finished and shows the new UI
- project.flags.has_issue_alerts_targeting = True
- project.save()
-
-
-def migrate_legacy_rule(rule, mail_enabled):
- actions = rule.data.get("actions", [])
- new_actions = []
- has_mail_action = False
- for action in actions:
- action_id = action.get("id")
- if action_id == "sentry.rules.actions.notify_event.NotifyEventAction":
- # This is the "Send a notification (for all legacy integrations)" action.
- # When this action exists, we want to add the new `NotifyEmailAction` action
- # to the rule. We'll still leave `NotifyEventAction` in place, since it will
- # only notify non-mail plugins once we've migrated.
- new_actions.append(action)
- has_mail_action = True
- elif (
- action_id == "sentry.rules.actions.notify_event_service.NotifyEventServiceAction"
- and action.get("service") == "mail"
- ):
- # This is the "Send a notification via mail" action. When this action
- # exists, we want to add the new `NotifyEmailAction` action to the rule.
- # We'll drop this action from the rule, since all it does it send mail and
- # we don't want to double up.
- has_mail_action = True
- else:
- new_actions.append(action)
-
- # We only add the new action if the mail plugin is actually enabled, and there's an
- # action that sends by mail. We do this outside the loop to ensure we don't add it
- # more than once.
- if mail_enabled and has_mail_action:
- new_actions.append(mail_action)
-
- if actions != new_actions:
- rule.data["actions"] = new_actions
- rule.save()
-
-
-def migrate_to_issue_alert_targeting(apps, schema_editor):
- Project = apps.get_model("sentry", "Project")
- ProjectOption = apps.get_model("sentry", "ProjectOption")
- Organization = apps.get_model("sentry", "Organization")
- Rule = apps.get_model("sentry", "Rule")
- User = apps.get_model("sentry", "User")
- UserOption = apps.get_model("sentry", "UserOption")
-
- for org in RangeQuerySetWrapperWithProgressBar(Organization.objects.filter(status=0)):
- # We migrate a project at a time, but we prefer to group by org so that for the
- # most part an org will see the changes all at once.
- for project in Project.objects.filter(organization=org, status=0):
- try:
- migrate_project_to_issue_alert_targeting(
- project, ProjectOption, Rule, User, UserOption
- )
- except Exception:
- # If a project fails we'll just log and continue. We shouldn't see any
- # failures, but if we do we can analyze them and re-run this migration,
- # since it is idempotent.
- logging.exception(f"Error migrating project {project.id}")
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [("sentry", "0066_alertrule_manager")]
-
- operations = [
- migrations.RunPython(
- migrate_to_issue_alert_targeting,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_useroption", "sentry_rule", "sentry_project"]},
- )
- ]
diff --git a/src/sentry/migrations/0068_project_default_flags.py b/src/sentry/migrations/0068_project_default_flags.py
deleted file mode 100644
index 547abadb5eaf21..00000000000000
--- a/src/sentry/migrations/0068_project_default_flags.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# Generated by Django 1.11.29 on 2020-04-21 21:08
-
-import bitfield.models
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [("sentry", "0067_migrate_rules_alert_targeting")]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- state_operations=[
- migrations.AlterField(
- model_name="project",
- name="flags",
- field=bitfield.models.BitField(
- (
- ("has_releases", "This Project has sent release data"),
- (
- "has_issue_alerts_targeting",
- "This Project has issue alerts targeting",
- ),
- ("has_transactions", "This Project has sent transactions"),
- ),
- default=2,
- null=True,
- ),
- )
- ]
- )
- ]
diff --git a/src/sentry/migrations/0069_remove_tracked_superusers.py b/src/sentry/migrations/0069_remove_tracked_superusers.py
deleted file mode 100644
index 67f2071bef0ac3..00000000000000
--- a/src/sentry/migrations/0069_remove_tracked_superusers.py
+++ /dev/null
@@ -1,55 +0,0 @@
-# Generated by Django 1.11.29 on 2020-04-20 20:03
-
-from django.db import migrations
-
-
-def has_access(org, user, access=None):
- queryset = org.member_set.filter(user=user)
- if access is not None:
- queryset = queryset.filter(type__lte=access)
-
- return queryset.exists()
-
-
-def remove_tracked_superuser_views(apps, schema_editor):
- """
- We recently added code to only track alert views of people in the org + member of associated alert projects.
- This migration removes all of the views we've tracked before adding this change (i.e. superuser views of orgs we're not a part of).
- """
- IncidentSeen = apps.get_model("sentry", "IncidentSeen")
- tracked_views = IncidentSeen.objects.all().select_related("user", "incident")
- for tracked_view in tracked_views:
- org_member = has_access(tracked_view.incident.organization, tracked_view.user)
- if not org_member:
- tracked_view.delete()
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [
- ("sentry", "0068_project_default_flags"),
- ]
-
- operations = [
- migrations.RunPython(
- remove_tracked_superuser_views,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_incidentseen"]},
- ),
- ]
diff --git a/src/sentry/migrations/0070_incident_snapshot_support.py b/src/sentry/migrations/0070_incident_snapshot_support.py
deleted file mode 100644
index 3482aec1c4714b..00000000000000
--- a/src/sentry/migrations/0070_incident_snapshot_support.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# Generated by Django 1.11.29 on 2020-04-28 23:00
-
-from django.db import migrations, models
-import django.db.models.deletion
-import django.utils.timezone
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.onetoone
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0069_remove_tracked_superusers"),
- ]
-
- operations = [
- migrations.CreateModel(
- name="PendingIncidentSnapshot",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- primary_key=True, serialize=False
- ),
- ),
- (
- "target_run_date",
- models.DateTimeField(db_index=True, default=django.utils.timezone.now),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "incident",
- sentry.db.models.fields.onetoone.OneToOneCascadeDeletes(
- on_delete=django.db.models.deletion.CASCADE, to="sentry.Incident"
- ),
- ),
- ],
- options={
- "db_table": "sentry_pendingincidentsnapshot",
- },
- ),
- ]
diff --git a/src/sentry/migrations/0071_add_default_fields_model_subclass.py b/src/sentry/migrations/0071_add_default_fields_model_subclass.py
deleted file mode 100644
index 68757d3a514f8a..00000000000000
--- a/src/sentry/migrations/0071_add_default_fields_model_subclass.py
+++ /dev/null
@@ -1,50 +0,0 @@
-# Generated by Django 1.11.29 on 2020-05-01 16:14
-
-from django.db import migrations, models
-import django.utils.timezone
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [
- ("sentry", "0070_incident_snapshot_support"),
- ]
-
- operations = [
- migrations.AddField(
- model_name="integration",
- name="date_updated",
- field=models.DateTimeField(default=django.utils.timezone.now),
- ),
- migrations.AddField(
- model_name="integrationexternalproject",
- name="date_updated",
- field=models.DateTimeField(default=django.utils.timezone.now),
- ),
- migrations.AddField(
- model_name="organizationintegration",
- name="date_updated",
- field=models.DateTimeField(default=django.utils.timezone.now),
- ),
- migrations.AddField(
- model_name="pagerdutyservice",
- name="date_updated",
- field=models.DateTimeField(default=django.utils.timezone.now),
- ),
- ]
diff --git a/src/sentry/migrations/0072_alert_rules_query_changes.py b/src/sentry/migrations/0072_alert_rules_query_changes.py
deleted file mode 100644
index 5991b6479d53ff..00000000000000
--- a/src/sentry/migrations/0072_alert_rules_query_changes.py
+++ /dev/null
@@ -1,132 +0,0 @@
-# Generated by Django 1.11.29 on 2020-05-01 23:36
-
-from django.db import migrations, models
-import django.db.models.deletion
-import django.utils.timezone
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0071_add_default_fields_model_subclass"),
- ]
-
- operations = [
- migrations.CreateModel(
- name="SnubaQuery",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- primary_key=True, serialize=False
- ),
- ),
- ("dataset", models.TextField()),
- ("query", models.TextField()),
- ("aggregate", models.TextField()),
- ("time_window", models.IntegerField()),
- ("resolution", models.IntegerField()),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "environment",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- null=True,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.Environment",
- ),
- ),
- ],
- options={
- "db_table": "sentry_snubaquery",
- },
- ),
- migrations.AlterField(
- model_name="alertrule",
- name="aggregation",
- field=models.IntegerField(default=0, null=True),
- ),
- migrations.AlterField(
- model_name="alertrule",
- name="dataset",
- field=models.TextField(null=True),
- ),
- migrations.AlterField(
- model_name="alertrule",
- name="query",
- field=models.TextField(null=True),
- ),
- migrations.AlterField(
- model_name="alertrule",
- name="resolution",
- field=models.IntegerField(null=True),
- ),
- migrations.AlterField(
- model_name="alertrule",
- name="time_window",
- field=models.IntegerField(null=True),
- ),
- migrations.AlterField(
- model_name="querysubscription",
- name="aggregation",
- field=models.IntegerField(default=0, null=True),
- ),
- migrations.AlterField(
- model_name="querysubscription",
- name="dataset",
- field=models.TextField(null=True),
- ),
- migrations.AlterField(
- model_name="querysubscription",
- name="query",
- field=models.TextField(null=True),
- ),
- migrations.AlterField(
- model_name="querysubscription",
- name="resolution",
- field=models.IntegerField(null=True),
- ),
- migrations.AlterField(
- model_name="querysubscription",
- name="time_window",
- field=models.IntegerField(null=True),
- ),
- migrations.AddField(
- model_name="alertrule",
- name="snuba_query",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.SnubaQuery",
- unique=True,
- ),
- ),
- migrations.AddField(
- model_name="querysubscription",
- name="snuba_query",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True,
- on_delete=django.db.models.deletion.CASCADE,
- related_name="subscriptions",
- to="sentry.SnubaQuery",
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0073_migrate_alert_query_model.py b/src/sentry/migrations/0073_migrate_alert_query_model.py
deleted file mode 100644
index 96b34732d551f1..00000000000000
--- a/src/sentry/migrations/0073_migrate_alert_query_model.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# Generated by Django 1.11.29 on 2020-05-06 21:19
-
-from datetime import timedelta
-from enum import Enum
-
-from django.db import migrations, transaction
-
-from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
-
-
-class QueryAggregations(Enum):
- TOTAL = 0
- UNIQUE_USERS = 1
-
-
-aggregation_function_translations = {
- QueryAggregations.TOTAL: "count()",
- QueryAggregations.UNIQUE_USERS: "count_unique(user)",
-}
-
-
-def convert_alert_rule_to_snuba_query(alert_rule, SnubaQuery):
- if alert_rule.snuba_query:
- return
-
- with transaction.atomic():
- try:
- environment = alert_rule.environment.all()[0]
- except IndexError:
- environment = None
- snuba_query = SnubaQuery.objects.create(
- dataset=alert_rule.dataset,
- query=alert_rule.query,
- aggregate=aggregation_function_translations[QueryAggregations(alert_rule.aggregation)],
- time_window=int(timedelta(minutes=alert_rule.time_window).total_seconds()),
- resolution=int(timedelta(minutes=alert_rule.resolution).total_seconds()),
- environment=environment,
- )
- alert_rule.snuba_query = snuba_query
- alert_rule.save()
- alert_rule.query_subscriptions.all().update(snuba_query=snuba_query)
-
-
-def migrate_alert_query_model(apps, schema_editor):
- AlertRule = apps.get_model("sentry", "AlertRule")
- SnubaQuery = apps.get_model("sentry", "SnubaQuery")
- for alert_rule in RangeQuerySetWrapperWithProgressBar(
- AlertRule.objects_with_snapshots.filter(snuba_query__isnull=True)
- ):
- convert_alert_rule_to_snuba_query(alert_rule, SnubaQuery)
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [("sentry", "0072_alert_rules_query_changes")]
-
- operations = [
- migrations.RunPython(
- migrate_alert_query_model,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_alertrule", "sentry_snubaquery"]},
- )
- ]
diff --git a/src/sentry/migrations/0074_add_metric_alert_feature.py b/src/sentry/migrations/0074_add_metric_alert_feature.py
deleted file mode 100644
index c8685e672ae8cf..00000000000000
--- a/src/sentry/migrations/0074_add_metric_alert_feature.py
+++ /dev/null
@@ -1,86 +0,0 @@
-# Generated by Django 1.11.29 on 2020-05-07 22:15
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [("sentry", "0073_migrate_alert_query_model")]
-
- operations = [
- migrations.AlterField(
- model_name="featureadoption",
- name="feature_id",
- field=models.PositiveIntegerField(
- choices=[
- (0, "Python"),
- (1, "JavaScript"),
- (2, "Node.js"),
- (3, "Ruby"),
- (4, "Java"),
- (5, "Cocoa"),
- (6, "Objective-C"),
- (7, "PHP"),
- (8, "Go"),
- (9, "C#"),
- (10, "Perl"),
- (11, "Elixir"),
- (12, "CFML"),
- (13, "Groovy"),
- (14, "CSP Reports"),
- (20, "Flask"),
- (21, "Django"),
- (22, "Celery"),
- (23, "Bottle"),
- (24, "Pylons"),
- (25, "Tornado"),
- (26, "web.py"),
- (27, "Zope"),
- (40, "First Event"),
- (41, "Release Tracking"),
- (42, "Environment Tracking"),
- (43, "User Tracking"),
- (44, "Custom Tags"),
- (45, "Source Maps"),
- (46, "User Feedback"),
- (48, "Breadcrumbs"),
- (49, "Resolve with Commit"),
- (60, "First Project"),
- (61, "Invite Team"),
- (62, "Assign Issue"),
- (63, "Resolve in Next Release"),
- (64, "Advanced Search"),
- (65, "Saved Search"),
- (66, "Inbound Filters"),
- (67, "Alert Rules"),
- (68, "Issue Tracker Integration"),
- (69, "Notification Integration"),
- (70, "Delete and Discard Future Events"),
- (71, "Link a Repository"),
- (72, "Ownership Rules"),
- (73, "Ignore Issue"),
- (80, "SSO"),
- (81, "Data Scrubbers"),
- (90, "Create Release Using API"),
- (91, "Create Deploy Using API"),
- (92, "Metric Alert Rules"),
- ]
- ),
- )
- ]
diff --git a/src/sentry/migrations/0075_metric_alerts_fix_releases.py b/src/sentry/migrations/0075_metric_alerts_fix_releases.py
deleted file mode 100644
index 186752c8fc484c..00000000000000
--- a/src/sentry/migrations/0075_metric_alerts_fix_releases.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# Generated by Django 1.11.29 on 2020-05-08 20:43
-
-from django.db import migrations
-
-from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
-
-
-def migrate_alert_query_model(apps, schema_editor):
- SnubaQuery = apps.get_model("sentry", "SnubaQuery")
- for snuba_query in RangeQuerySetWrapperWithProgressBar(
- SnubaQuery.objects.filter(aggregate="count_unique(user)")
- ):
- snuba_query.aggregate = "count_unique(tags[sentry:user])"
- snuba_query.save()
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [("sentry", "0074_add_metric_alert_feature")]
-
- operations = [
- migrations.RunPython(
- migrate_alert_query_model,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_snubaquery"]},
- )
- ]
diff --git a/src/sentry/migrations/0076_alert_rules_disable_constraints.py b/src/sentry/migrations/0076_alert_rules_disable_constraints.py
deleted file mode 100644
index b505ae830ea391..00000000000000
--- a/src/sentry/migrations/0076_alert_rules_disable_constraints.py
+++ /dev/null
@@ -1,86 +0,0 @@
-# Generated by Django 1.11.29 on 2020-05-13 02:04
-
-from django.db import migrations
-import django.db.models.deletion
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0075_metric_alerts_fix_releases"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="alertruleenvironment",
- name="alert_rule",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.AlertRule",
- ),
- ),
- migrations.AlterField(
- model_name="alertruleenvironment",
- name="environment",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.Environment",
- ),
- ),
- migrations.AlterField(
- model_name="alertrulequerysubscription",
- name="alert_rule",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.AlertRule",
- ),
- ),
- migrations.AlterField(
- model_name="alertrulequerysubscription",
- name="query_subscription",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.QuerySubscription",
- unique=True,
- ),
- ),
- migrations.AlterField(
- model_name="querysubscriptionenvironment",
- name="environment",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.Environment",
- ),
- ),
- migrations.AlterField(
- model_name="querysubscriptionenvironment",
- name="query_subscription",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.QuerySubscription",
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0077_alert_query_col_drop_state.py b/src/sentry/migrations/0077_alert_query_col_drop_state.py
deleted file mode 100644
index 02990d8c9cd8a1..00000000000000
--- a/src/sentry/migrations/0077_alert_query_col_drop_state.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# Generated by Django 1.11.29 on 2020-05-13 22:23
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [("sentry", "0076_alert_rules_disable_constraints")]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- state_operations=[
- migrations.AlterUniqueTogether(name="alertruleenvironment", unique_together=set()),
- migrations.RemoveField(model_name="alertruleenvironment", name="alert_rule"),
- migrations.RemoveField(model_name="alertruleenvironment", name="environment"),
- migrations.RemoveField(model_name="alertrulequerysubscription", name="alert_rule"),
- migrations.RemoveField(
- model_name="alertrulequerysubscription", name="query_subscription"
- ),
- migrations.AlterUniqueTogether(
- name="querysubscriptionenvironment", unique_together=set()
- ),
- migrations.RemoveField(
- model_name="querysubscriptionenvironment", name="environment"
- ),
- migrations.RemoveField(
- model_name="querysubscriptionenvironment", name="query_subscription"
- ),
- migrations.RemoveField(model_name="alertrule", name="aggregation"),
- migrations.RemoveField(model_name="alertrule", name="dataset"),
- migrations.RemoveField(model_name="alertrule", name="environment"),
- migrations.RemoveField(model_name="alertrule", name="query"),
- migrations.RemoveField(model_name="alertrule", name="query_subscriptions"),
- migrations.RemoveField(model_name="alertrule", name="resolution"),
- migrations.RemoveField(model_name="alertrule", name="time_window"),
- migrations.RemoveField(model_name="querysubscription", name="aggregation"),
- migrations.RemoveField(model_name="querysubscription", name="dataset"),
- migrations.RemoveField(model_name="querysubscription", name="environments"),
- migrations.RemoveField(model_name="querysubscription", name="query"),
- migrations.RemoveField(model_name="querysubscription", name="resolution"),
- migrations.RemoveField(model_name="querysubscription", name="time_window"),
- migrations.DeleteModel(name="AlertRuleEnvironment"),
- migrations.DeleteModel(name="AlertRuleQuerySubscription"),
- migrations.DeleteModel(name="QuerySubscriptionEnvironment"),
- ]
- )
- ]
diff --git a/src/sentry/migrations/0078_incident_field_updates.py b/src/sentry/migrations/0078_incident_field_updates.py
deleted file mode 100644
index eb411e1687b740..00000000000000
--- a/src/sentry/migrations/0078_incident_field_updates.py
+++ /dev/null
@@ -1,72 +0,0 @@
-# Generated by Django 1.11.29 on 2020-05-15 20:50
-
-import django.db.models.deletion
-from django.db import migrations, models
-
-import sentry.db.models.fields.foreignkey
-from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
-
-
-def delete_incidents_with_no_alert_rule(apps, schema_editor):
- # These are only test incidents that we don't care about, should be fine to remove
- # these so that we can require there always be an AlertRule associated with
- # Incidents going forward
- Incident = apps.get_model("sentry", "Incident")
- for incident in RangeQuerySetWrapperWithProgressBar(
- Incident.objects.filter(alert_rule__isnull=True)
- ):
- incident.delete()
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [("sentry", "0077_alert_query_col_drop_state")]
-
- operations = [
- migrations.RunPython(
- delete_incidents_with_no_alert_rule,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_incident"]},
- ),
- migrations.RemoveField(model_name="incident", name="groups"),
- migrations.AlterField(
- model_name="incident",
- name="aggregation",
- field=models.PositiveSmallIntegerField(default=0, null=True),
- ),
- migrations.AlterField(
- model_name="incident",
- name="alert_rule",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.PROTECT, to="sentry.AlertRule"
- ),
- ),
- migrations.AlterField(
- model_name="incident", name="query", field=models.TextField(null=True)
- ),
- migrations.AlterField(
- model_name="incidentgroup",
- name="incident",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.Incident",
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0079_incidents_remove_query_field_state.py b/src/sentry/migrations/0079_incidents_remove_query_field_state.py
deleted file mode 100644
index 1f61ed6ae77f81..00000000000000
--- a/src/sentry/migrations/0079_incidents_remove_query_field_state.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# Generated by Django 1.11.29 on 2020-05-20 23:40
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [("sentry", "0078_incident_field_updates")]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- state_operations=[
- migrations.AlterUniqueTogether(name="incidentgroup", unique_together=set()),
- migrations.RemoveField(model_name="incidentgroup", name="group"),
- migrations.RemoveField(model_name="incidentgroup", name="incident"),
- migrations.RemoveField(model_name="incident", name="aggregation"),
- migrations.RemoveField(model_name="incident", name="query"),
- migrations.DeleteModel(name="IncidentGroup"),
- ]
- )
- ]
diff --git a/src/sentry/migrations/0080_alert_rules_drop_unused_tables_cols.py b/src/sentry/migrations/0080_alert_rules_drop_unused_tables_cols.py
deleted file mode 100644
index a304613e4704ac..00000000000000
--- a/src/sentry/migrations/0080_alert_rules_drop_unused_tables_cols.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# Generated by Django 1.11.29 on 2020-05-22 18:10
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [("sentry", "0079_incidents_remove_query_field_state")]
-
- operations = [
- migrations.RunSQL(
- """
- DROP TABLE "sentry_alertruleenvironment";
- DROP TABLE "sentry_alertrulequerysubscription";
- DROP TABLE "sentry_querysubscriptionenvironment";
- DROP TABLE "sentry_incidentgroup";
-
- ALTER TABLE "sentry_alertrule"
- DROP COLUMN "aggregation",
- DROP COLUMN "dataset",
- DROP COLUMN "query",
- DROP COLUMN "resolution",
- DROP COLUMN "time_window";
-
- ALTER TABLE "sentry_querysubscription"
- DROP COLUMN "aggregation",
- DROP COLUMN "dataset",
- DROP COLUMN "query",
- DROP COLUMN "resolution";
-
- ALTER TABLE "sentry_incident"
- DROP COLUMN "aggregation",
- DROP COLUMN "query";
- """,
- reverse_sql="""
- CREATE TABLE "sentry_alertruleenvironment" (id bigint);
- CREATE TABLE "sentry_alertrulequerysubscription" (id bigint);
- CREATE TABLE "sentry_querysubscriptionenvironment" (id bigint);
- CREATE TABLE "sentry_incidentgroup" (id bigint);
-
- ALTER TABLE "sentry_alertrule"
- ADD COLUMN "aggregation" int,
- ADD COLUMN "dataset" int,
- ADD COLUMN "query" text,
- ADD COLUMN "resolution" text,
- ADD COLUMN "time_window" int;
-
- ALTER TABLE "sentry_querysubscription"
- ADD COLUMN "aggregation" int,
- ADD COLUMN "dataset" int,
- ADD COLUMN "query" text,
- ADD COLUMN "resolution" text;
-
- ALTER TABLE "sentry_incident"
- ADD COLUMN "aggregation" int,
- ADD COLUMN "query" text;
-
- """,
- hints={
- "tables": [
- "sentry_incident",
- "sentry_alertrule",
- "sentry_querysubscription",
- "sentry_alertruleenvironment",
- "sentry_alertrulequerysubscription",
- "sentry_querysubscriptionenvironment",
- "sentry_incidentgroup",
- ]
- },
- )
- ]
diff --git a/src/sentry/migrations/0081_add_integraiton_upgrade_audit_log.py b/src/sentry/migrations/0081_add_integraiton_upgrade_audit_log.py
deleted file mode 100644
index c734d0944046b6..00000000000000
--- a/src/sentry/migrations/0081_add_integraiton_upgrade_audit_log.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# Generated by Django 1.11.29 on 2020-05-29 15:18
-
-from django.db import migrations
-import sentry.db.models.fields.bounded
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [("sentry", "0080_alert_rules_drop_unused_tables_cols")]
-
- operations = [
- migrations.AlterField(
- model_name="auditlogentry",
- name="event",
- field=sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (1, "member.invite"),
- (2, "member.add"),
- (3, "member.accept-invite"),
- (5, "member.remove"),
- (4, "member.edit"),
- (6, "member.join-team"),
- (7, "member.leave-team"),
- (8, "member.pending"),
- (20, "team.create"),
- (21, "team.edit"),
- (22, "team.remove"),
- (30, "project.create"),
- (31, "project.edit"),
- (32, "project.remove"),
- (33, "project.set-public"),
- (34, "project.set-private"),
- (35, "project.request-transfer"),
- (36, "project.accept-transfer"),
- (37, "project.enable"),
- (38, "project.disable"),
- (10, "org.create"),
- (11, "org.edit"),
- (12, "org.remove"),
- (13, "org.restore"),
- (40, "tagkey.remove"),
- (50, "projectkey.create"),
- (51, "projectkey.edit"),
- (52, "projectkey.remove"),
- (53, "projectkey.enable"),
- (53, "projectkey.disable"),
- (60, "sso.enable"),
- (61, "sso.disable"),
- (62, "sso.edit"),
- (63, "sso-identity.link"),
- (70, "api-key.create"),
- (71, "api-key.edit"),
- (72, "api-key.remove"),
- (80, "rule.create"),
- (81, "rule.edit"),
- (82, "rule.remove"),
- (100, "servicehook.create"),
- (101, "servicehook.edit"),
- (102, "servicehook.remove"),
- (103, "servicehook.enable"),
- (104, "servicehook.disable"),
- (109, "integration.upgrade"),
- (110, "integration.add"),
- (111, "integration.edit"),
- (112, "integration.remove"),
- (113, "sentry-app.add"),
- (115, "sentry-app.remove"),
- (116, "sentry-app.install"),
- (117, "sentry-app.uninstall"),
- (130, "internal-integration.create"),
- (135, "internal-integration.add-token"),
- (136, "internal-integration.remove-token"),
- (90, "ondemand.edit"),
- (91, "trial.started"),
- (92, "plan.changed"),
- (93, "plan.cancelled"),
- (140, "invite-request.create"),
- (141, "invite-request.remove"),
- ]
- ),
- )
- ]
diff --git a/src/sentry/migrations/0082_alert_rules_threshold_float.py b/src/sentry/migrations/0082_alert_rules_threshold_float.py
deleted file mode 100644
index 050a0e80d27dca..00000000000000
--- a/src/sentry/migrations/0082_alert_rules_threshold_float.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Generated by Django 1.11.29 on 2020-05-29 20:19
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0081_add_integraiton_upgrade_audit_log"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="alertruletrigger",
- name="alert_threshold",
- field=models.FloatField(),
- ),
- migrations.AlterField(
- model_name="alertruletrigger",
- name="resolve_threshold",
- field=models.FloatField(null=True),
- ),
- ]
diff --git a/src/sentry/migrations/0083_add_max_length_webhook_url.py b/src/sentry/migrations/0083_add_max_length_webhook_url.py
deleted file mode 100644
index dc5dad7f6c0d1e..00000000000000
--- a/src/sentry/migrations/0083_add_max_length_webhook_url.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Generated by Django 1.11.29 on 2020-06-01 20:07
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0082_alert_rules_threshold_float"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="sentryapp",
- name="webhook_url",
- field=models.URLField(max_length=512, null=True),
- ),
- ]
diff --git a/src/sentry/migrations/0084_exported_data_blobs.py b/src/sentry/migrations/0084_exported_data_blobs.py
deleted file mode 100644
index 52b9d0e85674ea..00000000000000
--- a/src/sentry/migrations/0084_exported_data_blobs.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# Generated by Django 1.11.29 on 2020-06-03 01:15
-
-from django.db import migrations
-import django.db.models.deletion
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0083_add_max_length_webhook_url"),
- ]
-
- operations = [
- migrations.CreateModel(
- name="ExportedDataBlob",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- primary_key=True, serialize=False
- ),
- ),
- ("offset", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
- (
- "blob",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.FileBlob",
- ),
- ),
- (
- "data_export",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE, to="sentry.ExportedData"
- ),
- ),
- ],
- options={
- "db_table": "sentry_exporteddatablob",
- },
- ),
- migrations.AlterUniqueTogether(
- name="exporteddatablob",
- unique_together={("data_export", "blob", "offset")},
- ),
- ]
diff --git a/src/sentry/migrations/0085_fix_error_rate_snuba_query.py b/src/sentry/migrations/0085_fix_error_rate_snuba_query.py
deleted file mode 100644
index f39006a77c7c50..00000000000000
--- a/src/sentry/migrations/0085_fix_error_rate_snuba_query.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# Generated by Django 1.11.29 on 2020-06-10 22:11
-
-from django.db import migrations
-
-from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
-
-
-def fix_error_rate_snuba_queries(apps, schema_editor):
- SnubaQuery = apps.get_model("sentry", "SnubaQuery")
- for snuba_query in RangeQuerySetWrapperWithProgressBar(
- SnubaQuery.objects.filter(aggregate="error_rate()")
- ):
- snuba_query.aggregate = "failure_rate()"
- snuba_query.save()
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [("sentry", "0084_exported_data_blobs")]
-
- operations = [
- migrations.RunPython(
- fix_error_rate_snuba_queries,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_snubaquery"]},
- )
- ]
diff --git a/src/sentry/migrations/0086_sentry_app_installation_for_provider.py b/src/sentry/migrations/0086_sentry_app_installation_for_provider.py
deleted file mode 100644
index b978ff2130aaee..00000000000000
--- a/src/sentry/migrations/0086_sentry_app_installation_for_provider.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# Generated by Django 1.11.29 on 2020-06-17 21:46
-
-from django.db import migrations, models
-import django.db.models.deletion
-import django.utils.timezone
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0085_fix_error_rate_snuba_query"),
- ]
-
- operations = [
- migrations.CreateModel(
- name="SentryAppInstallationForProvider",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- primary_key=True, serialize=False
- ),
- ),
- ("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
- ("provider", models.CharField(max_length=64)),
- (
- "organization",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE, to="sentry.Organization"
- ),
- ),
- (
- "sentry_app_installation",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.SentryAppInstallation",
- ),
- ),
- ],
- options={
- "db_table": "sentry_sentryappinstallationforprovider",
- },
- ),
- migrations.AlterUniqueTogether(
- name="sentryappinstallationforprovider",
- unique_together={("provider", "organization")},
- ),
- ]
diff --git a/src/sentry/migrations/0087_fix_time_series_data_type.py b/src/sentry/migrations/0087_fix_time_series_data_type.py
deleted file mode 100644
index ace71597274f18..00000000000000
--- a/src/sentry/migrations/0087_fix_time_series_data_type.py
+++ /dev/null
@@ -1,31 +0,0 @@
-# Generated by Django 1.11.29 on 2020-06-23 00:22
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [("sentry", "0086_sentry_app_installation_for_provider")]
-
- operations = [
- migrations.RunSQL(
- "ALTER TABLE sentry_timeseriessnapshot ALTER COLUMN values SET DATA TYPE float[] USING values::float[]",
- hints={"tables": ["sentry_timeseriessnapshot"]},
- )
- ]
diff --git a/src/sentry/migrations/0088_rule_level_resolve_threshold_type.py b/src/sentry/migrations/0088_rule_level_resolve_threshold_type.py
deleted file mode 100644
index 1fa962570ae7a1..00000000000000
--- a/src/sentry/migrations/0088_rule_level_resolve_threshold_type.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Generated by Django 1.11.29 on 2020-06-24 00:53
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0087_fix_time_series_data_type"),
- ]
-
- operations = [
- migrations.AddField(
- model_name="alertrule",
- name="resolve_threshold",
- field=models.FloatField(null=True),
- ),
- migrations.AddField(
- model_name="alertrule",
- name="threshold_type",
- field=models.SmallIntegerField(null=True),
- ),
- ]
diff --git a/src/sentry/migrations/0089_rule_level_fields_backfill.py b/src/sentry/migrations/0089_rule_level_fields_backfill.py
deleted file mode 100644
index 0cb6f1ec1944d4..00000000000000
--- a/src/sentry/migrations/0089_rule_level_fields_backfill.py
+++ /dev/null
@@ -1,61 +0,0 @@
-# Generated by Django 1.11.29 on 2020-06-25 20:24
-
-from django.db import migrations
-
-from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
-
-
-def backfill_rule_level_fields(apps, schema_editor):
- AlertRule = apps.get_model("sentry", "AlertRule")
- for alert_rule in RangeQuerySetWrapperWithProgressBar(AlertRule.objects_with_snapshots.all()):
- triggers = list(alert_rule.alertruletrigger_set.all())
- # Determine the resolve_threshold and threshold_type from the rule's triggers
- if triggers:
- # Threshold types are the same for all triggers on a rule, so just grab one
- threshold_type = triggers[0].threshold_type
- resolve_thresholds = [
- t.resolve_threshold for t in triggers if t.resolve_threshold is not None
- ]
- if resolve_thresholds:
- # Either grab the min or max resolve threshold depending on whether
- # we're an above or below threshold rule.
- func = min if threshold_type == 0 else max
- resolve_threshold = func(resolve_thresholds)
- else:
- resolve_threshold = None
-
- alert_rule.resolve_threshold = resolve_threshold
- alert_rule.threshold_type = threshold_type
- else:
- # Just a failsafe in case we have any bad rules without triggers.
- alert_rule.threshold_type = 0
- alert_rule.save()
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [("sentry", "0088_rule_level_resolve_threshold_type")]
-
- operations = [
- migrations.RunPython(
- backfill_rule_level_fields,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_alertrule"]},
- )
- ]
diff --git a/src/sentry/migrations/0090_fix_auditlog_pickled_data_take_2.py b/src/sentry/migrations/0090_fix_auditlog_pickled_data_take_2.py
deleted file mode 100644
index fbc2c485713386..00000000000000
--- a/src/sentry/migrations/0090_fix_auditlog_pickled_data_take_2.py
+++ /dev/null
@@ -1,53 +0,0 @@
-# Generated by Django 1.11.29 on 2020-07-15 21:50
-
-from django.db import migrations
-
-from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
-
-
-def cleanup_audit_log_data(apps, schema_editor):
- """
- Fix `AuditLogEntry` rows that have pickled `Team` models in their `data` field.
-
- We originally had fixed this in [0], but we missed some types. This is
- basically the same migration, but without the audit log entry type gaurd.
-
- [0]: https://github.com/getsentry/sentry/pull/17545
- """
- AuditLogEntry = apps.get_model("sentry", "AuditLogEntry")
- for audit_log in RangeQuerySetWrapperWithProgressBar(AuditLogEntry.objects.all()):
- teams = audit_log.data.get("teams")
- if teams and hasattr(teams[0], "id"):
- # We have a team in here rather than just the expected data
- audit_log.data["teams"] = [team.id for team in teams]
- audit_log.data["teams_slugs"] = [team.slug for team in teams]
- audit_log.save()
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [("sentry", "0089_rule_level_fields_backfill")]
-
- operations = [
- migrations.RunPython(
- code=cleanup_audit_log_data,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_auditlogentry"]},
- )
- ]
diff --git a/src/sentry/migrations/0091_alertruleactivity.py b/src/sentry/migrations/0091_alertruleactivity.py
deleted file mode 100644
index d907ce6da6e812..00000000000000
--- a/src/sentry/migrations/0091_alertruleactivity.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# Generated by Django 1.11.29 on 2020-07-20 12:11
-
-from django.conf import settings
-from django.db import migrations, models
-import django.db.models.deletion
-import django.utils.timezone
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0090_fix_auditlog_pickled_data_take_2"),
- ]
-
- operations = [
- migrations.CreateModel(
- name="AlertRuleActivity",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- primary_key=True, serialize=False
- ),
- ),
- ("type", models.IntegerField()),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "alert_rule",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE, to="sentry.AlertRule"
- ),
- ),
- (
- "previous_alert_rule",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True,
- on_delete=django.db.models.deletion.CASCADE,
- related_name="previous_alert_rule",
- to="sentry.AlertRule",
- ),
- ),
- (
- "user",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True,
- on_delete=django.db.models.deletion.CASCADE,
- to=settings.AUTH_USER_MODEL,
- ),
- ),
- ],
- options={
- "db_table": "sentry_alertruleactivity",
- },
- ),
- ]
diff --git a/src/sentry/migrations/0092_remove_trigger_threshold_type_nullable.py b/src/sentry/migrations/0092_remove_trigger_threshold_type_nullable.py
deleted file mode 100644
index fa45dbc093a261..00000000000000
--- a/src/sentry/migrations/0092_remove_trigger_threshold_type_nullable.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Generated by Django 1.11.29 on 2020-07-24 01:25
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0091_alertruleactivity"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="alertruletrigger",
- name="threshold_type",
- field=models.SmallIntegerField(null=True),
- ),
- ]
diff --git a/src/sentry/migrations/0093_make_identity_user_id_textfield.py b/src/sentry/migrations/0093_make_identity_user_id_textfield.py
deleted file mode 100644
index 8ce00bb2ea0b3f..00000000000000
--- a/src/sentry/migrations/0093_make_identity_user_id_textfield.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Generated by Django 1.11.29 on 2020-07-29 00:15
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0092_remove_trigger_threshold_type_nullable"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="identity",
- name="external_id",
- field=models.TextField(),
- ),
- ]
diff --git a/src/sentry/migrations/0094_cleanup_unreferenced_event_files.py b/src/sentry/migrations/0094_cleanup_unreferenced_event_files.py
deleted file mode 100644
index 66ace321fc1ba8..00000000000000
--- a/src/sentry/migrations/0094_cleanup_unreferenced_event_files.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# Generated by Django 1.11.28 on 2020-07-28 16:24
-
-from django.db import migrations
-from django.db.models import Min
-
-from sentry.utils.query import RangeQuerySetWrapper
-
-
-def cleanup_event_attachment_files(apps, schema_editor):
- """
- Previously, cleanup task code did a SQL bulk delete on EventAttachment
- leaving orphaned File and FileBlob objects. These orphaned files now need to
- be purged as they are still consuming space.
- """
- EventAttachment = apps.get_model("sentry", "EventAttachment")
- File = apps.get_model("sentry", "File")
-
- # Find the oldest live attachment as we only want to purge old files.
- # If there are not files skip everything.
- oldest_attachment = EventAttachment.objects.all().aggregate(Min("date_added"))
- if not oldest_attachment or oldest_attachment["date_added__min"] is None:
- return
-
- # File types used in event attachments.
- attachment_types = [
- "event.applecrashreport",
- "event.attachment",
- "event.payload",
- "event.minidump",
- "unreal.context",
- "unreal.logs",
- ]
- file_query = File.objects.filter(timestamp__lt=oldest_attachment["date_added__min"]).filter(
- type__in=attachment_types
- )
-
- for f in RangeQuerySetWrapper(file_query):
- # Double check that the file is not referenced.
- if not EventAttachment.objects.filter(file=f).exists():
- f.delete()
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [
- ("sentry", "0093_make_identity_user_id_textfield"),
- ]
-
- operations = [
- migrations.RunPython(
- code=cleanup_event_attachment_files,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_eventattachment"]},
- )
- ]
diff --git a/src/sentry/migrations/0095_ruleactivity.py b/src/sentry/migrations/0095_ruleactivity.py
deleted file mode 100644
index 6dc4896b16ef06..00000000000000
--- a/src/sentry/migrations/0095_ruleactivity.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# Generated by Django 1.11.29 on 2020-07-31 13:00
-
-from django.conf import settings
-from django.db import migrations, models
-import django.db.models.deletion
-import django.utils.timezone
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0094_cleanup_unreferenced_event_files"),
- ]
-
- operations = [
- migrations.CreateModel(
- name="RuleActivity",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- primary_key=True, serialize=False
- ),
- ),
- ("type", models.IntegerField()),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "rule",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE, to="sentry.Rule"
- ),
- ),
- (
- "user",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True,
- on_delete=django.db.models.deletion.CASCADE,
- to=settings.AUTH_USER_MODEL,
- ),
- ),
- ],
- options={
- "db_table": "sentry_ruleactivity",
- },
- ),
- ]
diff --git a/src/sentry/migrations/0096_sentry_app_component_skip_load_on_open.py b/src/sentry/migrations/0096_sentry_app_component_skip_load_on_open.py
deleted file mode 100644
index 7b3d8666ae4cac..00000000000000
--- a/src/sentry/migrations/0096_sentry_app_component_skip_load_on_open.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# Generated by Django 1.11.29 on 2020-07-28 22:38
-
-from django.db import migrations
-
-
-# update the field with mutation
-def convert_field(field):
- # even if async if false, we had a bug where we'd treat it the same as true
- # so to maintain legacy behavior, we have to replicate that same check when setting skip_load_on_open
- if "async" in field:
- field["skip_load_on_open"] = True
- del field["async"]
-
-
-# updates the schema with mutation
-def update_element_schema(schema):
- # update all the fields in the schema
- link = schema.get("link", {})
- create = schema.get("create", {})
-
- for field in link.get("required_fields", []):
- convert_field(field)
-
- for field in link.get("optional_fields", []):
- convert_field(field)
-
- for field in create.get("required_fields", []):
- convert_field(field)
-
- for field in create.get("optional_fields", []):
- convert_field(field)
-
-
-def update_ui_components(apps, schema_editor):
- SentryAppComponent = apps.get_model("sentry", "SentryAppComponent")
- for component in SentryAppComponent.objects.filter(type="issue-link").select_related(
- "sentry_app"
- ):
- # need to update the denormalized data
- update_element_schema(component.schema)
- for element in component.sentry_app.schema.get("elements", []):
- # only update issue link elements
- if element.get("type") == "issue-link":
- update_element_schema(element)
-
- # save the UI component and the sentry app
- component.save()
- component.sentry_app.save()
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [
- ("sentry", "0095_ruleactivity"),
- ]
-
- operations = [
- migrations.RunPython(
- update_ui_components,
- migrations.RunPython.noop,
- hints={"tables": ["sentry_appcomponent"]},
- )
- ]
diff --git a/src/sentry/migrations/0097_add_sentry_app_id_to_sentry_alertruletriggeraction.py b/src/sentry/migrations/0097_add_sentry_app_id_to_sentry_alertruletriggeraction.py
deleted file mode 100644
index 57157942190d97..00000000000000
--- a/src/sentry/migrations/0097_add_sentry_app_id_to_sentry_alertruletriggeraction.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# Generated by Django 1.11.29 on 2020-08-14 20:18
-
-from django.db import migrations
-import django.db.models.deletion
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0096_sentry_app_component_skip_load_on_open"),
- ]
-
- operations = [
- migrations.AddField(
- model_name="alertruletriggeraction",
- name="sentry_app",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True, on_delete=django.db.models.deletion.CASCADE, to="sentry.SentryApp"
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0098_add-performance-onboarding.py b/src/sentry/migrations/0098_add-performance-onboarding.py
deleted file mode 100644
index 7ace8b16001040..00000000000000
--- a/src/sentry/migrations/0098_add-performance-onboarding.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# Generated by Django 1.11.29 on 2020-08-18 18:52
-
-from django.db import migrations
-import sentry.db.models.fields.bounded
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [("sentry", "0097_add_sentry_app_id_to_sentry_alertruletriggeraction")]
-
- operations = [
- migrations.AlterField(
- model_name="organizationonboardingtask",
- name="task",
- field=sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (1, "create_project"),
- (2, "send_first_event"),
- (3, "invite_member"),
- (4, "setup_second_platform"),
- (5, "setup_user_context"),
- (6, "setup_release_tracking"),
- (7, "setup_sourcemaps"),
- (8, "setup_user_reports"),
- (9, "setup_issue_tracker"),
- (10, "setup_alert_rules"),
- (11, "setup_transactions"),
- ]
- ),
- )
- ]
diff --git a/src/sentry/migrations/0099_fix_project_platforms.py b/src/sentry/migrations/0099_fix_project_platforms.py
deleted file mode 100644
index 3da2730d6b1894..00000000000000
--- a/src/sentry/migrations/0099_fix_project_platforms.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# Generated by Django 1.11.28 on 2020-09-02 21:33
-
-import logging
-
-from django.db import migrations
-
-from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
-
-
-def fix_project_platform(apps, schema_editor):
- """
- Find projects whose platform is either 'python-tracing' or 'node-tracing',
- and change them to be either 'python' or 'node' respectively.
- """
- Project = apps.get_model("sentry", "Project")
-
- for project in RangeQuerySetWrapperWithProgressBar(Project.objects.all()):
- try:
- if project.platform == "node-tracing":
- project.platform = "node"
- project.save()
- continue
-
- if project.platform == "python-tracing":
- project.platform = "python"
- project.save()
- continue
- except Exception:
- logging.exception(f"Error changing platform for project {project.id}")
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [("sentry", "0098_add-performance-onboarding")]
-
- operations = [
- migrations.RunPython(
- code=fix_project_platform,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_project"]},
- )
- ]
diff --git a/src/sentry/migrations/0100_file_type_on_event_attachment.py b/src/sentry/migrations/0100_file_type_on_event_attachment.py
deleted file mode 100644
index 68a02440be7e96..00000000000000
--- a/src/sentry/migrations/0100_file_type_on_event_attachment.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Generated by Django 1.11.29 on 2020-09-15 08:00
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [
- ("sentry", "0099_fix_project_platforms"),
- ]
-
- operations = [
- migrations.AddField(
- model_name="eventattachment",
- name="type",
- field=models.CharField(db_index=True, max_length=64, null=True),
- ),
- ]
diff --git a/src/sentry/migrations/0101_backfill_file_type_on_event_attachment.py b/src/sentry/migrations/0101_backfill_file_type_on_event_attachment.py
deleted file mode 100644
index 62bdb79086caf0..00000000000000
--- a/src/sentry/migrations/0101_backfill_file_type_on_event_attachment.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# Generated by Django 1.11.27 on 2020-01-23 19:07
-
-import logging
-
-from django.db import migrations
-
-from sentry.utils.query import RangeQuerySetWrapper
-
-logger = logging.getLogger(__name__)
-
-
-def backfill_file_type(apps, schema_editor):
- """
- Fill the new EventAttachment.type column with values from the related File.type.
- """
- EventAttachment = apps.get_model("sentry", "EventAttachment")
- File = apps.get_model("sentry", "File")
- all_event_attachments = EventAttachment.objects.all()
- for event_attachment in RangeQuerySetWrapper(all_event_attachments, step=1000):
- if event_attachment.type is None:
- file = File.objects.get(id=event_attachment.file_id)
- event_attachment.type = file.type
- event_attachment.save(update_fields=["type"])
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Adding indexes to large tables. These indexes should be created concurrently,
- # unfortunately we can't run migrations outside of a transaction until Django
- # 1.10. So until then these should be run manually.
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [
- ("sentry", "0100_file_type_on_event_attachment"),
- ]
-
- operations = [
- migrations.RunPython(
- backfill_file_type,
- migrations.RunPython.noop,
- hints={"tables": ["sentry_eventattachment"]},
- ),
- ]
diff --git a/src/sentry/migrations/0102_collect_relay_analytics.py b/src/sentry/migrations/0102_collect_relay_analytics.py
deleted file mode 100644
index 0fe480ac4ab21e..00000000000000
--- a/src/sentry/migrations/0102_collect_relay_analytics.py
+++ /dev/null
@@ -1,61 +0,0 @@
-# Generated by Django 1.11.29 on 2020-09-16 08:42
-
-from django.db import migrations, models
-import django.utils.timezone
-import sentry.db.models.fields.bounded
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [("sentry", "0101_backfill_file_type_on_event_attachment")]
-
- operations = [
- migrations.CreateModel(
- name="RelayUsage",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- primary_key=True, serialize=False
- ),
- ),
- ("relay_id", models.CharField(max_length=64)),
- ("version", models.CharField(default="0.0.1", max_length=32)),
- ("first_seen", models.DateTimeField(default=django.utils.timezone.now)),
- ("last_seen", models.DateTimeField(default=django.utils.timezone.now)),
- ],
- options={"db_table": "sentry_relayusage"},
- ),
- migrations.AlterField(
- model_name="relay",
- name="first_seen",
- field=models.DateTimeField(default=None, null=True),
- ),
- migrations.AlterField(
- model_name="relay", name="is_internal", field=models.NullBooleanField(default=None)
- ),
- migrations.AlterField(
- model_name="relay",
- name="last_seen",
- field=models.DateTimeField(default=None, null=True),
- ),
- migrations.AlterUniqueTogether(
- name="relayusage", unique_together={("relay_id", "version")}
- ),
- ]
diff --git a/src/sentry/migrations/0103_project_has_alert_filters.py b/src/sentry/migrations/0103_project_has_alert_filters.py
deleted file mode 100644
index 18de07fc537125..00000000000000
--- a/src/sentry/migrations/0103_project_has_alert_filters.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# Generated by Django 1.11.29 on 2020-09-16 02:03
-
-import bitfield.models
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [("sentry", "0102_collect_relay_analytics")]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- state_operations=[
- migrations.AlterField(
- model_name="project",
- name="flags",
- field=bitfield.models.BitField(
- (
- ("has_releases", "This Project has sent release data"),
- (
- "has_issue_alerts_targeting",
- "This Project has issue alerts targeting",
- ),
- ("has_transactions", "This Project has sent transactions"),
- ("has_alert_filters", "This Project has filters"),
- ),
- default=10,
- null=True,
- ),
- )
- ]
- )
- ]
diff --git a/src/sentry/migrations/0104_collect_relay_public_key_usage.py b/src/sentry/migrations/0104_collect_relay_public_key_usage.py
deleted file mode 100644
index 9f296520b8956d..00000000000000
--- a/src/sentry/migrations/0104_collect_relay_public_key_usage.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Generated by Django 1.11.29 on 2020-09-18 11:56
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0103_project_has_alert_filters"),
- ]
-
- operations = [
- migrations.AddField(
- model_name="relayusage",
- name="public_key",
- field=models.CharField(db_index=True, max_length=200, null=True),
- ),
- ]
diff --git a/src/sentry/migrations/0105_remove_nullability_of_event_attachment_type.py b/src/sentry/migrations/0105_remove_nullability_of_event_attachment_type.py
deleted file mode 100644
index e253e96b015560..00000000000000
--- a/src/sentry/migrations/0105_remove_nullability_of_event_attachment_type.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Generated by Django 1.11.29 on 2020-09-19 09:24
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0104_collect_relay_public_key_usage"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="eventattachment",
- name="type",
- field=models.CharField(db_index=True, max_length=64),
- ),
- ]
diff --git a/src/sentry/migrations/0106_service_hook_project_id_nullable.py b/src/sentry/migrations/0106_service_hook_project_id_nullable.py
deleted file mode 100644
index 2304fd57104bd9..00000000000000
--- a/src/sentry/migrations/0106_service_hook_project_id_nullable.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# Generated by Django 1.11.29 on 2020-09-21 19:33
-
-from django.db import migrations
-import sentry.db.models.fields.bounded
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0105_remove_nullability_of_event_attachment_type"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="servicehook",
- name="project_id",
- field=sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- db_index=True, null=True
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0107_remove_spaces_from_slugs.py b/src/sentry/migrations/0107_remove_spaces_from_slugs.py
deleted file mode 100644
index 6ff7b84cbad9c1..00000000000000
--- a/src/sentry/migrations/0107_remove_spaces_from_slugs.py
+++ /dev/null
@@ -1,42 +0,0 @@
-from django.db import migrations
-
-
-def remove_trailing_spaces(apps, schema_editor):
- """
- There are currently only two organizations with trailing spaces so we're
- updating them with python. Using SQL would lock the table for too long.
- """
- Organization = apps.get_model("sentry", "Organization")
-
- for organization in Organization.objects.filter(slug__endswith=" "):
- organization.slug = organization.slug.strip()
- organization.save()
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [("sentry", "0106_service_hook_project_id_nullable")]
-
- operations = [
- migrations.RunPython(
- remove_trailing_spaces,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_organization"]},
- )
- ]
diff --git a/src/sentry/migrations/0108_update_fileblob_action.py b/src/sentry/migrations/0108_update_fileblob_action.py
deleted file mode 100644
index 36d63345964474..00000000000000
--- a/src/sentry/migrations/0108_update_fileblob_action.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# Generated by Django 1.11.27 on 2020-09-28 14:29
-
-from django.db import migrations
-import django.db.models.deletion
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0107_remove_spaces_from_slugs"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- state_operations=[
- migrations.AlterField(
- model_name="fileblobindex",
- name="blob",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.PROTECT, to="sentry.FileBlob"
- ),
- ),
- ]
- )
- ]
diff --git a/src/sentry/migrations/0109_sentry_app_creator.py b/src/sentry/migrations/0109_sentry_app_creator.py
deleted file mode 100644
index f5296c93d2535d..00000000000000
--- a/src/sentry/migrations/0109_sentry_app_creator.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# Generated by Django 1.11.29 on 2020-10-06 15:42
-
-from django.conf import settings
-from django.db import migrations, models
-import django.db.models.deletion
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0108_update_fileblob_action"),
- ]
-
- operations = [
- migrations.AddField(
- model_name="sentryapp",
- name="creator_label",
- field=models.TextField(null=True),
- ),
- migrations.AddField(
- model_name="sentryapp",
- name="creator_user",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- null=True,
- on_delete=django.db.models.deletion.SET_NULL,
- to=settings.AUTH_USER_MODEL,
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0110_sentry_app_creator_backill.py b/src/sentry/migrations/0110_sentry_app_creator_backill.py
deleted file mode 100644
index ddc3c01326c9b6..00000000000000
--- a/src/sentry/migrations/0110_sentry_app_creator_backill.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# Generated by Django 1.11.29 on 2020-10-06 17:57
-
-from django.db import migrations
-
-
-def backfill_one(sentry_app, AuditLogEntry):
- queryset = AuditLogEntry.objects.filter(
- organization_id=sentry_app.owner_id, actor_id__isnull=False, event=113
- ) # sentry app add
-
- for audit_log_entry in queryset:
- name = audit_log_entry.data.get("sentry_app")
- # find a name match based on the name
- if name and name == sentry_app.name:
- user = audit_log_entry.actor
- sentry_app.creator_user = user
- sentry_app.creator_label = user.email or user.username
- sentry_app.save()
- return
-
-
-def backfill_sentry_app_creator(apps, schema_editor):
- """
- Backills the creator fields of SentryApp from
- the audit log table
- """
- SentryApp = apps.get_model("sentry", "SentryApp")
- AuditLogEntry = apps.get_model("sentry", "AuditLogEntry")
-
- queryset = SentryApp.objects.filter(date_deleted__isnull=True, creator_user_id__isnull=True)
-
- for sentry_app in queryset:
- backfill_one(sentry_app, AuditLogEntry)
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [
- ("sentry", "0109_sentry_app_creator"),
- ]
-
- operations = [
- migrations.RunPython(
- backfill_sentry_app_creator,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_auditlogentry"]},
- ),
- ]
diff --git a/src/sentry/migrations/0111_snuba_query_event_type.py b/src/sentry/migrations/0111_snuba_query_event_type.py
deleted file mode 100644
index b1c758b87389a3..00000000000000
--- a/src/sentry/migrations/0111_snuba_query_event_type.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# Generated by Django 1.11.29 on 2020-10-08 23:35
-
-from django.db import migrations, models
-import django.db.models.deletion
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0110_sentry_app_creator_backill"),
- ]
-
- operations = [
- migrations.CreateModel(
- name="SnubaQueryEventType",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- primary_key=True, serialize=False
- ),
- ),
- ("type", models.SmallIntegerField()),
- (
- "snuba_query",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE, to="sentry.SnubaQuery"
- ),
- ),
- ],
- options={
- "db_table": "sentry_snubaqueryeventtype",
- },
- ),
- migrations.AlterUniqueTogether(
- name="snubaqueryeventtype",
- unique_together={("snuba_query", "type")},
- ),
- ]
diff --git a/src/sentry/migrations/0112_groupinboxmodel.py b/src/sentry/migrations/0112_groupinboxmodel.py
deleted file mode 100644
index d2a653a4b6ab28..00000000000000
--- a/src/sentry/migrations/0112_groupinboxmodel.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# Generated by Django 1.11.29 on 2020-10-14 21:05
-
-from django.db import migrations, models
-import django.db.models.deletion
-import django.utils.timezone
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.foreignkey
-import sentry.db.models.fields.jsonfield
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0111_snuba_query_event_type"),
- ]
-
- operations = [
- migrations.CreateModel(
- name="GroupInbox",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- primary_key=True, serialize=False
- ),
- ),
- ("reason", models.PositiveSmallIntegerField(default=0)),
- ("reason_details", sentry.db.models.fields.jsonfield.JSONField(null=True)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "group",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.Group",
- unique=True,
- ),
- ),
- ],
- options={
- "db_table": "sentry_groupinbox",
- },
- ),
- ]
diff --git a/src/sentry/migrations/0113_add_repositoryprojectpathconfig.py b/src/sentry/migrations/0113_add_repositoryprojectpathconfig.py
deleted file mode 100644
index 7db2fafa8761e0..00000000000000
--- a/src/sentry/migrations/0113_add_repositoryprojectpathconfig.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# Generated by Django 1.11.29 on 2020-10-15 17:38
-
-from django.db import migrations, models
-import django.db.models.deletion
-import django.utils.timezone
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0112_groupinboxmodel"),
- ]
-
- operations = [
- migrations.CreateModel(
- name="RepositoryProjectPathConfig",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- primary_key=True, serialize=False
- ),
- ),
- ("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
- ("stack_root", models.TextField()),
- ("source_root", models.TextField()),
- ("default_branch", models.TextField(null=True)),
- (
- "organization_integration",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.OrganizationIntegration",
- ),
- ),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.Project",
- ),
- ),
- (
- "repository",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE, to="sentry.Repository"
- ),
- ),
- ],
- options={
- "db_table": "sentry_repositoryprojectpathconfig",
- },
- ),
- migrations.AlterUniqueTogether(
- name="repositoryprojectpathconfig",
- unique_together={("project", "stack_root")},
- ),
- ]
diff --git a/src/sentry/migrations/0114_add_unhandled_savedsearch.py b/src/sentry/migrations/0114_add_unhandled_savedsearch.py
deleted file mode 100644
index 75d9d41e0bcc50..00000000000000
--- a/src/sentry/migrations/0114_add_unhandled_savedsearch.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Generated by Django 1.11.29 on 2020-10-14 14:21
-
-from django.db import migrations
-
-
-def add_unhandled_search(apps, schema_editor):
- SavedSearch = apps.get_model("sentry", "SavedSearch")
- search = SavedSearch.objects.create(
- name="Unhandled Errors",
- query="is:unresolved error.unhandled:true",
- organization_id=None,
- is_default=False,
- is_global=True,
- # models.search_common.SearchType.ISSUE
- type=0,
- )
- search.save()
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0113_add_repositoryprojectpathconfig"),
- ]
-
- migrations.RunPython(
- code=add_unhandled_search,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_savedsearch"]},
- )
diff --git a/src/sentry/migrations/0115_add_checksum_to_debug_file.py b/src/sentry/migrations/0115_add_checksum_to_debug_file.py
deleted file mode 100644
index 70996ba928c756..00000000000000
--- a/src/sentry/migrations/0115_add_checksum_to_debug_file.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# Generated by Django 1.11.29 on 2020-10-22 08:10
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [
- ("sentry", "0114_add_unhandled_savedsearch"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- database_operations=[
- migrations.RunSQL(
- """
- ALTER TABLE "sentry_projectdsymfile" ADD COLUMN "checksum" varchar(40) NULL;
- """,
- reverse_sql="""
- ALTER TABLE "sentry_projectdsymfile" DROP COLUMN "checksum";
- """,
- hints={"tables": ["sentry_projectdsymfile"]},
- ),
- migrations.RunSQL(
- """
- CREATE INDEX CONCURRENTLY "sentry_projectdsymfile_checksum_8fb028a8_idx" ON "sentry_projectdsymfile" ("checksum");
- """,
- reverse_sql="""
- DROP INDEX CONCURRENTLY "sentry_projectdsymfile_checksum_8fb028a8_idx";
- """,
- hints={"tables": ["sentry_projectdsymfile"]},
- ),
- ],
- state_operations=[
- migrations.AddField(
- model_name="projectdebugfile",
- name="checksum",
- field=models.CharField(db_index=True, max_length=40, null=True),
- ),
- ],
- )
- ]
diff --git a/src/sentry/migrations/0116_backfill_debug_file_checksum.py b/src/sentry/migrations/0116_backfill_debug_file_checksum.py
deleted file mode 100644
index 806d010696ab54..00000000000000
--- a/src/sentry/migrations/0116_backfill_debug_file_checksum.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# Generated by Django 1.11.29 on 2020-10-22 08:13
-
-from django.db import migrations
-
-from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
-
-
-def backfill_debug_file_checksum(apps, schema_editor):
- """
- Fill the ProjectDebugFile.checksum from related File.checksum.
- """
- ProjectDebugFile = apps.get_model("sentry", "ProjectDebugFile")
- all_debug_files = ProjectDebugFile.objects.filter(checksum__isnull=True).select_related("file")
- for debug_file in RangeQuerySetWrapperWithProgressBar(queryset=all_debug_files, step=1000):
- if debug_file.file.checksum:
- ProjectDebugFile.objects.filter(id=debug_file.id).update(
- checksum=debug_file.file.checksum
- )
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [
- ("sentry", "0115_add_checksum_to_debug_file"),
- ]
-
- operations = [
- migrations.RunPython(
- backfill_debug_file_checksum,
- migrations.RunPython.noop,
- hints={"tables": ["sentry_projectdsymfile"]},
- ),
- ]
diff --git a/src/sentry/migrations/0117_dummy-activityupdate.py b/src/sentry/migrations/0117_dummy-activityupdate.py
deleted file mode 100644
index 05c383ae7cdb45..00000000000000
--- a/src/sentry/migrations/0117_dummy-activityupdate.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# Generated by Django 1.11.28 on 2020-10-27 18:48
-
-from django.db import migrations
-import sentry.db.models.fields.bounded
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [
- ("sentry", "0116_backfill_debug_file_checksum"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="activity",
- name="type",
- field=sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (1, "set_resolved"),
- (15, "set_resolved_by_age"),
- (13, "set_resolved_in_release"),
- (16, "set_resolved_in_commit"),
- (21, "set_resolved_in_pull_request"),
- (2, "set_unresolved"),
- (3, "set_ignored"),
- (4, "set_public"),
- (5, "set_private"),
- (6, "set_regression"),
- (7, "create_issue"),
- (8, "note"),
- (9, "first_seen"),
- (10, "release"),
- (11, "assigned"),
- (12, "unassigned"),
- (14, "merge"),
- (17, "deploy"),
- (18, "new_processing_issues"),
- (19, "unmerge_source"),
- (20, "unmerge_destination"),
- (22, "reprocess"),
- ]
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0118_backfill_snuba_query_event_types.py b/src/sentry/migrations/0118_backfill_snuba_query_event_types.py
deleted file mode 100644
index e4ce3eb7f5b790..00000000000000
--- a/src/sentry/migrations/0118_backfill_snuba_query_event_types.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Generated by Django 1.11.29 on 2020-10-28 22:57
-from django.db import migrations
-
-from sentry.utils.query import RangeQuerySetWrapper
-
-
-def backfill_snuba_query_event_type(apps, schema_editor):
- """
- This backfills all SnubaQuery rows that don't have a `SnubaQueryEventType`.
- """
- SnubaQuery = apps.get_model("sentry", "SnubaQuery")
- SnubaQueryEventType = apps.get_model("sentry", "SnubaQueryEventType")
-
- for snuba_query in RangeQuerySetWrapper(SnubaQuery.objects.all()):
- if not SnubaQueryEventType.objects.filter(snuba_query=snuba_query).exists():
- # 0 is SnubaQueryEventType.EventTypes.ERROR,
- # 2 is SnubaQueryEventType.EventTypes.TRANSACTION.
- SnubaQueryEventType.objects.create(
- snuba_query=snuba_query, type=(0 if snuba_query.dataset == "events" else 2)
- )
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [
- ("sentry", "0117_dummy-activityupdate"),
- ]
-
- operations = [
- migrations.RunPython(
- backfill_snuba_query_event_type,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_snubaqueryeventtype"]},
- ),
- ]
diff --git a/src/sentry/migrations/0119_fix_set_none.py b/src/sentry/migrations/0119_fix_set_none.py
deleted file mode 100644
index 33bc7fb0e597ec..00000000000000
--- a/src/sentry/migrations/0119_fix_set_none.py
+++ /dev/null
@@ -1,105 +0,0 @@
-# Generated by Django 1.11.29 on 2020-10-30 20:17
-
-from django.conf import settings
-from django.db import migrations
-import django.db.models.deletion
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0118_backfill_snuba_query_event_types"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- state_operations=[
- migrations.AlterField(
- model_name="activity",
- name="user",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True,
- on_delete=django.db.models.deletion.SET_NULL,
- to=settings.AUTH_USER_MODEL,
- ),
- ),
- migrations.AlterField(
- model_name="alertruleactivity",
- name="user",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True,
- on_delete=django.db.models.deletion.SET_NULL,
- to=settings.AUTH_USER_MODEL,
- ),
- ),
- migrations.AlterField(
- model_name="auditlogentry",
- name="actor",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- blank=True,
- null=True,
- on_delete=django.db.models.deletion.SET_NULL,
- related_name="audit_actors",
- to=settings.AUTH_USER_MODEL,
- ),
- ),
- migrations.AlterField(
- model_name="auditlogentry",
- name="target_user",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- blank=True,
- null=True,
- on_delete=django.db.models.deletion.SET_NULL,
- related_name="audit_targets",
- to=settings.AUTH_USER_MODEL,
- ),
- ),
- migrations.AlterField(
- model_name="organizationmember",
- name="inviter",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- blank=True,
- null=True,
- on_delete=django.db.models.deletion.SET_NULL,
- related_name="sentry_inviter_set",
- to=settings.AUTH_USER_MODEL,
- ),
- ),
- migrations.AlterField(
- model_name="organizationonboardingtask",
- name="user",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True,
- on_delete=django.db.models.deletion.SET_NULL,
- to=settings.AUTH_USER_MODEL,
- ),
- ),
- migrations.AlterField(
- model_name="ruleactivity",
- name="user",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True,
- on_delete=django.db.models.deletion.SET_NULL,
- to=settings.AUTH_USER_MODEL,
- ),
- ),
- ]
- )
- ]
diff --git a/src/sentry/migrations/0120_commit_author_charfield.py b/src/sentry/migrations/0120_commit_author_charfield.py
deleted file mode 100644
index 3090ad630388a7..00000000000000
--- a/src/sentry/migrations/0120_commit_author_charfield.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Generated by Django 1.11.29 on 2020-11-02 20:25
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0119_fix_set_none"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="commitauthor",
- name="email",
- field=models.CharField(max_length=75),
- ),
- ]
diff --git a/src/sentry/migrations/0121_obliterate_group_inbox.py b/src/sentry/migrations/0121_obliterate_group_inbox.py
deleted file mode 100644
index 724ad9366a50ee..00000000000000
--- a/src/sentry/migrations/0121_obliterate_group_inbox.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# Generated by Django 1.11.29 on 2020-11-02 20:25
-
-from django.db import migrations
-
-BATCH_SIZE = 500
-
-
-def obliterate_group_inbox(apps, schema_editor):
- import progressbar
-
- GroupInbox = apps.get_model("sentry.GroupInbox")
-
- total = GroupInbox.objects.all().count()
- widgets = [
- "GroupInbox: ",
- progressbar.Percentage(),
- " ",
- progressbar.Bar(),
- " ",
- progressbar.ETA(),
- ]
- bar = progressbar.ProgressBar(widgets=widgets, maxval=total)
- bar.start()
- progress = 0
- while True:
- deleted, _ = GroupInbox.objects.filter(
- id__in=GroupInbox.objects.all()[:BATCH_SIZE]
- ).delete()
- progress += deleted
- bar.update(min(progress, total))
- if not deleted:
- bar.finish()
- break
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [
- ("sentry", "0120_commit_author_charfield"),
- ]
-
- operations = [
- migrations.RunPython(
- obliterate_group_inbox,
- migrations.RunPython.noop,
- hints={"tables": ["sentry_groupinbox"]},
- )
- ]
diff --git a/src/sentry/migrations/0122_add_release_status.py b/src/sentry/migrations/0122_add_release_status.py
deleted file mode 100644
index 5181be1b1fd4c7..00000000000000
--- a/src/sentry/migrations/0122_add_release_status.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# Generated by Django 1.11.28 on 2020-11-04 13:39
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [
- ("sentry", "0121_obliterate_group_inbox"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- database_operations=[
- migrations.RunSQL(
- """
- ALTER TABLE "sentry_release" ADD COLUMN "status" integer NULL;
- """,
- reverse_sql="""
- ALTER TABLE "sentry_release" DROP COLUMN "status";
- """,
- hints={"tables": ["sentry_release"]},
- )
- ],
- state_operations=[],
- )
- ]
diff --git a/src/sentry/migrations/0123_groupinbox_addprojandorg.py b/src/sentry/migrations/0123_groupinbox_addprojandorg.py
deleted file mode 100644
index aad6aaa412a03c..00000000000000
--- a/src/sentry/migrations/0123_groupinbox_addprojandorg.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# Generated by Django 1.11.29 on 2020-11-05 16:10
-
-import django.db.models.deletion
-from django.db import migrations
-
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [
- ("sentry", "0122_add_release_status"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- database_operations=[
- migrations.RunSQL(
- """
- ALTER TABLE "sentry_groupinbox" ADD COLUMN "organization_id" bigint NULL;
- ALTER TABLE "sentry_groupinbox" ADD COLUMN "project_id" bigint NULL;
- """,
- reverse_sql="""
- ALTER TABLE "sentry_groupinbox" DROP COLUMN "organization_id";
- ALTER TABLE "sentry_groupinbox" DROP COLUMN "project_id";
- """,
- hints={"tables": ["sentry_groupinbox"]},
- ),
- migrations.RunSQL(
- """
- CREATE INDEX CONCURRENTLY "sentry_groupinbox_organization_id_7b67769a" ON "sentry_groupinbox" ("organization_id");
- """,
- reverse_sql="""
- DROP INDEX CONCURRENTLY "sentry_groupinbox_organization_id_7b67769a";
- """,
- hints={"tables": ["sentry_groupinbox"]},
- ),
- migrations.RunSQL(
- """
- CREATE INDEX CONCURRENTLY "sentry_groupinbox_project_id_ef8f034d" ON "sentry_groupinbox" ("project_id");
- """,
- reverse_sql="""
- DROP INDEX CONCURRENTLY "sentry_groupinbox_project_id_ef8f034d";
- """,
- hints={"tables": ["sentry_groupinbox"]},
- ),
- ],
- state_operations=[
- migrations.AddField(
- model_name="groupinbox",
- name="organization",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- null=True,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.Organization",
- ),
- ),
- migrations.AddField(
- model_name="groupinbox",
- name="project",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- null=True,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.Project",
- ),
- ),
- ],
- )
- ]
diff --git a/src/sentry/migrations/0124_add_release_status_model.py b/src/sentry/migrations/0124_add_release_status_model.py
deleted file mode 100644
index 87ce4532a4c311..00000000000000
--- a/src/sentry/migrations/0124_add_release_status_model.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# Generated by Django 1.11.28 on 2020-11-06 09:54
-
-from django.db import migrations
-import sentry.db.models.fields.bounded
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0123_groupinbox_addprojandorg"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- state_operations=[
- migrations.AddField(
- model_name="release",
- name="status",
- field=sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[(0, "Open"), (1, "Archived")], default=0, null=True
- ),
- ),
- ],
- database_operations=[],
- )
- ]
diff --git a/src/sentry/migrations/0125_add_platformexternalissue_project_id.py b/src/sentry/migrations/0125_add_platformexternalissue_project_id.py
deleted file mode 100644
index a8d8abf811d41c..00000000000000
--- a/src/sentry/migrations/0125_add_platformexternalissue_project_id.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# Generated by Django 1.11.29 on 2020-11-09 19:24
-
-from django.db import migrations
-import django.db.models.deletion
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0124_add_release_status_model"),
- ]
-
- operations = [
- migrations.AddField(
- model_name="platformexternalissue",
- name="project",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- null=True,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.Project",
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0126_make_platformexternalissue_group_id_flexfk.py b/src/sentry/migrations/0126_make_platformexternalissue_group_id_flexfk.py
deleted file mode 100644
index 9623256ce9e3b4..00000000000000
--- a/src/sentry/migrations/0126_make_platformexternalissue_group_id_flexfk.py
+++ /dev/null
@@ -1,54 +0,0 @@
-# Generated by Django 1.11.29 on 2020-11-09 21:35
-
-from django.db import migrations
-import django.db.models.deletion
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0125_add_platformexternalissue_project_id"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- state_operations=[
- migrations.AddField(
- model_name="platformexternalissue",
- name="group",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- db_index=False,
- null=False,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.Group",
- ),
- ),
- migrations.RemoveField(
- model_name="platformexternalissue",
- name="group_id",
- ),
- migrations.AlterUniqueTogether(
- name="platformexternalissue",
- unique_together={("group", "service_type")},
- ),
- ]
- )
- ]
diff --git a/src/sentry/migrations/0127_backfill_platformexternalissue_project_id.py b/src/sentry/migrations/0127_backfill_platformexternalissue_project_id.py
deleted file mode 100644
index 2cac078408fd16..00000000000000
--- a/src/sentry/migrations/0127_backfill_platformexternalissue_project_id.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# Generated by Django 1.11.29 on 2020-11-10 00:02
-
-from django.db import migrations
-
-from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
-
-
-def backfill_platformexternalissue_project_id(apps, schema_editor):
- """
- Fill the PlatformExternalIssue.project_id from related Group.project_id.
- """
- PlatformExternalIssue = apps.get_model("sentry", "PlatformExternalIssue")
- Group = apps.get_model("sentry", "Group")
- external_issues_with_group = PlatformExternalIssue.objects.filter(
- project_id__isnull=True
- ).select_related("group")
- for external_issue in RangeQuerySetWrapperWithProgressBar(
- queryset=external_issues_with_group, step=1000
- ):
- try:
- PlatformExternalIssue.objects.filter(id=external_issue.id).update(
- project_id=external_issue.group.project_id
- )
- except Group.DoesNotExist:
- pass
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [
- ("sentry", "0126_make_platformexternalissue_group_id_flexfk"),
- ]
-
- operations = [
- migrations.RunPython(
- backfill_platformexternalissue_project_id,
- migrations.RunPython.noop,
- hints={"tables": ["sentry_platformexternalissue"]},
- ),
- ]
diff --git a/src/sentry/migrations/0128_change_dashboards.py b/src/sentry/migrations/0128_change_dashboards.py
deleted file mode 100644
index e4083c129ba6d2..00000000000000
--- a/src/sentry/migrations/0128_change_dashboards.py
+++ /dev/null
@@ -1,103 +0,0 @@
-# Generated by Django 1.11.29 on 2020-11-13 20:33
-
-from django.db import migrations, models
-import django.db.models.deletion
-import django.utils.timezone
-import sentry.db.models.fields.array
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0127_backfill_platformexternalissue_project_id"),
- ]
-
- operations = [
- migrations.CreateModel(
- name="DashboardWidget",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- primary_key=True, serialize=False
- ),
- ),
- ("order", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
- ("title", models.CharField(max_length=255)),
- (
- "display_type",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (0, "line"),
- (1, "area"),
- (2, "stacked_area"),
- (3, "bar"),
- (4, "table"),
- ]
- ),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "dashboard",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE, to="sentry.Dashboard"
- ),
- ),
- ],
- options={
- "db_table": "sentry_dashboardwidget",
- },
- ),
- migrations.CreateModel(
- name="DashboardWidgetQuery",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- primary_key=True, serialize=False
- ),
- ),
- ("name", models.CharField(max_length=255)),
- ("fields", sentry.db.models.fields.array.ArrayField(null=True)),
- ("conditions", models.TextField()),
- ("interval", models.CharField(max_length=10)),
- ("order", sentry.db.models.fields.bounded.BoundedPositiveIntegerField()),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "widget",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE, to="sentry.DashboardWidget"
- ),
- ),
- ],
- options={
- "db_table": "sentry_dashboardwidgetquery",
- },
- ),
- migrations.AlterUniqueTogether(
- name="dashboardwidgetquery",
- unique_together={("widget", "order"), ("widget", "name")},
- ),
- migrations.AlterUniqueTogether(
- name="dashboardwidget",
- unique_together={("dashboard", "title"), ("dashboard", "order")},
- ),
- ]
diff --git a/src/sentry/migrations/0129_remove_dashboard_keys.py b/src/sentry/migrations/0129_remove_dashboard_keys.py
deleted file mode 100644
index ec78b6fe882184..00000000000000
--- a/src/sentry/migrations/0129_remove_dashboard_keys.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# Generated by Django 1.11.29 on 2020-11-17 18:12
-
-from django.db import migrations
-import django.db.models.deletion
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0128_change_dashboards"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="widget",
- name="dashboard",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- db_index=False,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.Dashboard",
- ),
- ),
- migrations.AlterField(
- model_name="widget",
- name="display_type",
- field=sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[(0, "line"), (1, "area"), (2, "stacked_area"), (3, "bar"), (4, "table")]
- ),
- ),
- migrations.AlterField(
- model_name="widgetdatasource",
- name="widget",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- db_index=False,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.Widget",
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0130_remove_old_widget_models.py b/src/sentry/migrations/0130_remove_old_widget_models.py
deleted file mode 100644
index 05e834ba4e47a9..00000000000000
--- a/src/sentry/migrations/0130_remove_old_widget_models.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# Generated by Django 1.11.29 on 2020-11-18 16:36
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0129_remove_dashboard_keys"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- database_operations=[],
- state_operations=[
- migrations.DeleteModel(
- name="Widget",
- ),
- migrations.DeleteModel(
- name="WidgetDataSource",
- ),
- ],
- )
- ]
diff --git a/src/sentry/migrations/0131_drop_widget_tables.py b/src/sentry/migrations/0131_drop_widget_tables.py
deleted file mode 100644
index 8920f255d2982a..00000000000000
--- a/src/sentry/migrations/0131_drop_widget_tables.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Generated by Django 1.11.29 on 2020-11-18 16:43
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0130_remove_old_widget_models"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- database_operations=[
- migrations.RunSQL(
- """
- DROP TABLE sentry_widget;
- """,
- hints={"tables": ["sentry_widget"]},
- ),
- migrations.RunSQL(
- """
- DROP TABLE sentry_widgetdatasource;
- """,
- hints={"tables": ["sentry_widget"]},
- ),
- ],
- state_operations=[],
- )
- ]
diff --git a/src/sentry/migrations/0132_groupownermodel.py b/src/sentry/migrations/0132_groupownermodel.py
deleted file mode 100644
index 6b30bcd6f9d316..00000000000000
--- a/src/sentry/migrations/0132_groupownermodel.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# Generated by Django 1.11.29 on 2020-11-20 18:43
-
-from django.conf import settings
-from django.db import migrations, models
-import django.db.models.deletion
-import django.utils.timezone
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.foreignkey
-import sentry.models.groupowner
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0131_drop_widget_tables"),
- ]
-
- operations = [
- migrations.CreateModel(
- name="GroupOwner",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- primary_key=True, serialize=False
- ),
- ),
- (
- "type",
- models.PositiveSmallIntegerField(
- choices=[
- (sentry.models.groupowner.GroupOwnerType(0), "Suspect Commit"),
- (sentry.models.groupowner.GroupOwnerType(1), "Ownership Rule"),
- ]
- ),
- ),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "group",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.Group",
- unique=True,
- ),
- ),
- (
- "organization",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.Organization",
- ),
- ),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.Project",
- ),
- ),
- (
- "team",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True, on_delete=django.db.models.deletion.CASCADE, to="sentry.Team"
- ),
- ),
- (
- "user",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True,
- on_delete=django.db.models.deletion.CASCADE,
- to=settings.AUTH_USER_MODEL,
- ),
- ),
- ],
- options={
- "db_table": "sentry_groupowner",
- },
- ),
- ]
diff --git a/src/sentry/migrations/0133_dashboard_delete_object_status.py b/src/sentry/migrations/0133_dashboard_delete_object_status.py
deleted file mode 100644
index b24e2e194f10f2..00000000000000
--- a/src/sentry/migrations/0133_dashboard_delete_object_status.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# Generated by Django 1.11.29 on 2020-11-19 22:26
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0132_groupownermodel"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- state_operations=[
- migrations.RemoveField(
- model_name="dashboard",
- name="status",
- ),
- ]
- )
- ]
diff --git a/src/sentry/migrations/0134_dashboard_drop_object_status_column.py b/src/sentry/migrations/0134_dashboard_drop_object_status_column.py
deleted file mode 100644
index dedca6f3de6d86..00000000000000
--- a/src/sentry/migrations/0134_dashboard_drop_object_status_column.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# Generated by Django 1.11.28 on 2020-11-19 23:26
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0133_dashboard_delete_object_status"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- database_operations=[
- migrations.RunSQL(
- """
- ALTER TABLE "sentry_dashboard" DROP COLUMN "status";
- """,
- reverse_sql="""
- ALTER TABLE "sentry_dashboard" ADD COLUMN "status" int NOT NULL;
- """,
- hints={"tables": ["sentry_dashboard"]},
- )
- ],
- state_operations=[],
- )
- ]
diff --git a/src/sentry/migrations/0135_removinguniquegroupownerconstraint.py b/src/sentry/migrations/0135_removinguniquegroupownerconstraint.py
deleted file mode 100644
index 1275cb69f3cf07..00000000000000
--- a/src/sentry/migrations/0135_removinguniquegroupownerconstraint.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# Generated by Django 1.11.29 on 2020-11-23 21:47
-
-from django.db import migrations
-import django.db.models.deletion
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0134_dashboard_drop_object_status_column"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="groupowner",
- name="group",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False, on_delete=django.db.models.deletion.CASCADE, to="sentry.Group"
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0136_issue_alert_filter_all_orgs.py b/src/sentry/migrations/0136_issue_alert_filter_all_orgs.py
deleted file mode 100644
index 7c38c44bfacd67..00000000000000
--- a/src/sentry/migrations/0136_issue_alert_filter_all_orgs.py
+++ /dev/null
@@ -1,196 +0,0 @@
-# Generated by Django 1.11.28 on 2020-07-28 16:24
-
-import logging
-
-from django.db import migrations, transaction
-
-from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
-
-conditions_to_filters = {
- "sentry.rules.conditions.tagged_event.TaggedEventCondition": "sentry.rules.filters.tagged_event.TaggedEventFilter",
- "sentry.rules.conditions.event_attribute.EventAttributeCondition": "sentry.rules.filters.event_attribute.EventAttributeFilter",
- "sentry.rules.conditions.level.LevelCondition": "sentry.rules.filters.level.LevelFilter",
-}
-every_event_condition = "sentry.rules.conditions.every_event.EveryEventCondition"
-filter_prefix = "sentry.rules.filters"
-
-
-def get_migration_func(rule):
- data = rule.data
-
- conditions = data.get("conditions") or []
- has_old_conditions = False
- has_migrated_conditions = False
- for condition in conditions:
- if condition["id"] in conditions_to_filters:
- has_migrated_conditions = True
- elif not condition["id"].startswith(filter_prefix):
- has_old_conditions = True
-
- if data.get("action_match") == "none":
- # If the rule contains any conditions that are not migrated, then we must run a more complex
- # migration on the 'none' rules because the 'none' action match does not exist anymore.
- if has_old_conditions:
- return modify_none_rule
- elif data.get("action_match") == "any":
- # If the rule contains some conditions that are migrated and some that aren't with an 'any'
- # match then migrating the rule will cause functionality to change. We will need to split
- # these rules into two rules to maintain the same functionality.
- if has_migrated_conditions and has_old_conditions:
- return split_alert_rule
-
- # all other cases can be handled with a simple migration
- return simple_migrate_alert_rule
-
-
-# Returns a filter version of the given condition, or the original condition
-def migrate_condition(condition):
- # attempt to change the condition id to the filter version, if the condition does not need
- # to be migrated, then just keep the original id
- condition["id"] = conditions_to_filters.get(condition["id"], condition["id"])
- return condition
-
-
-# Migrate the alert rule by moving certain conditions to become filters and applying the correct match
-def simple_migrate_alert_rule(rule, Rule):
- data = rule.data
- action_match = data.get("action_match")
- conditions = data.get("conditions") or []
-
- # if a migration is necessary
- if any([condition["id"] in conditions_to_filters for condition in conditions]):
- rule.data["conditions"] = [migrate_condition(cond) for cond in conditions]
- rule.data["filter_match"] = action_match
-
- if action_match == "none":
- rule.data["action_match"] = "all"
-
- rule.save()
-
-
-# In the case where the alert rule has an 'any' match with filters/conditions, we must split this rule into two
-def split_alert_rule(rule, Rule):
- data = rule.data
- conditions = data.get("conditions") or []
- actions = data.get("actions")
- frequency = data.get("frequency")
- original_name = rule.label
-
- # truncate original name if adding the number makes it exceed the 64 char limit
- if len(original_name + " (1)") >= 64:
- original_name = original_name[:60]
-
- # split the conditions into a filters and triggers array
- filters = [
- migrate_condition(condition)
- for condition in conditions
- if condition["id"] in conditions_to_filters or condition["id"].startswith(filter_prefix)
- ]
- triggers = [
- condition
- for condition in conditions
- if not (
- condition["id"] in conditions_to_filters or condition["id"].startswith(filter_prefix)
- )
- ]
-
- # the original rule will only have the triggers
- rule.data["conditions"] = triggers
- rule.label = original_name + " (1)"
- rule.save()
-
- # create a new rule with just the filters and same actions
- rule_args = {
- "data": {
- "filter_match": "any",
- "action_match": "any",
- "actions": actions,
- "conditions": filters,
- "frequency": frequency,
- },
- "label": original_name + " (2)",
- "environment_id": rule.environment_id,
- "project": rule.project,
- }
- Rule.objects.create(**rule_args)
-
-
-# In the case where the alert rule has a 'none' match with migrated conditions, we migrate all applicable conditions and then set the match to 'any'
-# this doesn't persist the functionality of the rule, but because the 'none' match doesn't exist on conditions it is the best we can do.
-def modify_none_rule(rule, Rule):
- data = rule.data
- conditions = data.get("conditions") or []
-
- # remove the event occurs condition if it exists and migrate all conditions that should be filters
- migrated_conditions = [
- migrate_condition(cond) for cond in conditions if cond["id"] != every_event_condition
- ]
- rule.data["conditions"] = migrated_conditions
- rule.data["filter_match"] = "none"
-
- # set the match to be 'any'
- rule.data["action_match"] = "any"
- rule.save()
-
-
-def migrate_project_alert_rules(project, Rule):
- with transaction.atomic():
- rules = Rule.objects.filter(project=project, status=0)
- for rule in rules:
- migration_func = get_migration_func(rule)
- migration_func(rule, Rule)
- project.flags.has_alert_filters = True
- project.save()
-
-
-def migrate_all_orgs(apps, schema_editor):
- """
- Migrate an org's projects' rules over to conditions/filters
- and turn on issue alert filters for each.
- """
- Organization = apps.get_model("sentry", "Organization")
- Project = apps.get_model("sentry", "Project")
- Rule = apps.get_model("sentry", "Rule")
-
- for org in RangeQuerySetWrapperWithProgressBar(Organization.objects.filter(status=0)):
- # We migrate a project at a time, but we prefer to group by org so that for the
- # most part an org will see the changes all at once.
- for project in Project.objects.filter(organization=org, status=0):
- try:
- migrate_project_alert_rules(project, Rule)
- except Exception:
- # If a project fails we'll just log and continue. We shouldn't see any
- # failures, but if we do we can analyze them and re-run this migration,
- # since it is idempotent.
- logging.exception(f"Error migrating project {project.id}")
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [
- ("sentry", "0135_removinguniquegroupownerconstraint"),
- ]
-
- operations = [
- migrations.RunPython(
- code=migrate_all_orgs,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_rule"]},
- )
- ]
diff --git a/src/sentry/migrations/0137_dashboard_widget_interval.py b/src/sentry/migrations/0137_dashboard_widget_interval.py
deleted file mode 100644
index 0ddfc4c8778e97..00000000000000
--- a/src/sentry/migrations/0137_dashboard_widget_interval.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Generated by Django 1.11.29 on 2020-11-25 20:49
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0136_issue_alert_filter_all_orgs"),
- ]
-
- operations = [
- migrations.AddField(
- model_name="dashboardwidget",
- name="interval",
- field=models.CharField(max_length=10, null=True),
- ),
- migrations.AlterField(
- model_name="dashboardwidgetquery",
- name="interval",
- field=models.CharField(max_length=10, null=True),
- ),
- ]
diff --git a/src/sentry/migrations/0138_widget_query_remove_interval.py b/src/sentry/migrations/0138_widget_query_remove_interval.py
deleted file mode 100644
index c23ae35d6d3999..00000000000000
--- a/src/sentry/migrations/0138_widget_query_remove_interval.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# Generated by Django 1.11.29 on 2020-11-26 17:41
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0137_dashboard_widget_interval"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- state_operations=[
- migrations.RemoveField(
- model_name="dashboardwidgetquery",
- name="interval",
- ),
- ]
- )
- ]
diff --git a/src/sentry/migrations/0139_remove_widgetquery_interval.py b/src/sentry/migrations/0139_remove_widgetquery_interval.py
deleted file mode 100644
index e2e0a09b5f885f..00000000000000
--- a/src/sentry/migrations/0139_remove_widgetquery_interval.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# Generated by Django 1.11.29 on 2020-11-27 21:28
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0138_widget_query_remove_interval"),
- ]
-
- operations = [
- migrations.RunSQL(
- """ALTER TABLE sentry_dashboardwidgetquery DROP COLUMN interval""",
- hints={"tables": ["sentry_dashboardwidgetquery"]},
- )
- ]
diff --git a/src/sentry/migrations/0140_subscription_checker.py b/src/sentry/migrations/0140_subscription_checker.py
deleted file mode 100644
index b0e2966b296c99..00000000000000
--- a/src/sentry/migrations/0140_subscription_checker.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# Generated by Django 1.11.29 on 2020-12-01 03:30
-
-from django.db import migrations, models
-import django.utils.timezone
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0139_remove_widgetquery_interval"),
- ]
-
- operations = [
- migrations.AddField(
- model_name="querysubscription",
- name="date_updated",
- field=models.DateTimeField(default=django.utils.timezone.now, null=True),
- ),
- migrations.AlterField(
- model_name="querysubscription",
- name="status",
- field=models.SmallIntegerField(db_index=True, default=0),
- ),
- ]
diff --git a/src/sentry/migrations/0141_remove_widget_constraints.py b/src/sentry/migrations/0141_remove_widget_constraints.py
deleted file mode 100644
index 2028a1bc433cc3..00000000000000
--- a/src/sentry/migrations/0141_remove_widget_constraints.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# Generated by Django 1.11.29 on 2020-12-03 14:58
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0140_subscription_checker"),
- ]
-
- operations = [
- migrations.AlterUniqueTogether(
- name="dashboardwidget",
- unique_together={("dashboard", "order")},
- ),
- migrations.AlterUniqueTogether(
- name="dashboardwidgetquery",
- unique_together={("widget", "order")},
- ),
- ]
diff --git a/src/sentry/migrations/0142_add_dashboard_tombstone.py b/src/sentry/migrations/0142_add_dashboard_tombstone.py
deleted file mode 100644
index 3465a2155adc25..00000000000000
--- a/src/sentry/migrations/0142_add_dashboard_tombstone.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# Generated by Django 1.11.29 on 2020-12-04 22:12
-
-from django.db import migrations, models
-import django.db.models.deletion
-import django.utils.timezone
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0141_remove_widget_constraints"),
- ]
-
- operations = [
- migrations.CreateModel(
- name="DashboardTombstone",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- primary_key=True, serialize=False
- ),
- ),
- ("slug", models.CharField(max_length=255)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "organization",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE, to="sentry.Organization"
- ),
- ),
- ],
- options={
- "db_table": "sentry_dashboardtombstone",
- },
- ),
- migrations.AlterUniqueTogether(
- name="dashboardtombstone",
- unique_together={("organization", "slug")},
- ),
- ]
diff --git a/src/sentry/migrations/0143_add_alerts_integrationfeature.py b/src/sentry/migrations/0143_add_alerts_integrationfeature.py
deleted file mode 100644
index 8ebf10baaca631..00000000000000
--- a/src/sentry/migrations/0143_add_alerts_integrationfeature.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# Generated by Django 1.11.29 on 2020-12-10 23:55
-
-from django.db import migrations
-import sentry.db.models.fields.bounded
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0142_add_dashboard_tombstone"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="integrationfeature",
- name="feature",
- field=sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (0, "integrations-api"),
- (1, "integrations-issue-link"),
- (2, "integrations-stacktrace-link"),
- (3, "integrations-event-hooks"),
- (4, "integrations-project-management"),
- (5, "integrations-incident-management"),
- (6, "integrations-feature-flag"),
- (7, "integrations-alerts"),
- ],
- default=0,
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0144_add_publish_request_inprogress_status.py b/src/sentry/migrations/0144_add_publish_request_inprogress_status.py
deleted file mode 100644
index 950a5ddec38f40..00000000000000
--- a/src/sentry/migrations/0144_add_publish_request_inprogress_status.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# Generated by Django 1.11.29 on 2020-12-15 02:28
-
-from django.db import migrations
-import sentry.db.models.fields.bounded
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0143_add_alerts_integrationfeature"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="sentryapp",
- name="status",
- field=sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (0, "unpublished"),
- (1, "published"),
- (2, "internal"),
- (3, "publish_request_inprogress"),
- ],
- db_index=True,
- default=0,
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0145_rename_alert_rule_feature.py b/src/sentry/migrations/0145_rename_alert_rule_feature.py
deleted file mode 100644
index a7ec61d8bede1b..00000000000000
--- a/src/sentry/migrations/0145_rename_alert_rule_feature.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# Generated by Django 1.11.29 on 2020-12-17 22:20
-
-from django.db import migrations
-import sentry.db.models.fields.bounded
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0144_add_publish_request_inprogress_status"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="integrationfeature",
- name="feature",
- field=sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (0, "integrations-api"),
- (1, "integrations-issue-link"),
- (2, "integrations-stacktrace-link"),
- (3, "integrations-event-hooks"),
- (4, "integrations-project-management"),
- (5, "integrations-incident-management"),
- (6, "integrations-feature-flag"),
- (7, "integrations-alert-rule"),
- ],
- default=0,
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0146_backfill_members_alert_write.py b/src/sentry/migrations/0146_backfill_members_alert_write.py
deleted file mode 100644
index fe22c8d4e03f88..00000000000000
--- a/src/sentry/migrations/0146_backfill_members_alert_write.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# Generated by Django 1.11.28 on 2020-07-28 16:24
-
-import logging
-
-from django.db import migrations
-
-from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
-
-
-def backfill_existing_orgs(apps, schema_editor):
- """
- Backfill the OrganizationOption alerts_member_write to be False for existing orgs
- """
- Organization = apps.get_model("sentry", "Organization")
- OrganizationOption = apps.get_model("sentry", "OrganizationOption")
-
- for org in RangeQuerySetWrapperWithProgressBar(Organization.objects.all()):
- if org.status != 0:
- continue
- try:
- OrganizationOption.objects.create(
- organization=org, key="sentry:alerts_member_write", value=False
- )
- except Exception:
- logging.exception(f"Error backfilling organization {org.id}")
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [
- ("sentry", "0145_rename_alert_rule_feature"),
- ]
-
- operations = [
- migrations.RunPython(
- code=backfill_existing_orgs,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_organizaionoption"]},
- )
- ]
diff --git a/src/sentry/migrations/0147_add_groupinbox_date_added_index.py b/src/sentry/migrations/0147_add_groupinbox_date_added_index.py
deleted file mode 100644
index 75022b5342ea3c..00000000000000
--- a/src/sentry/migrations/0147_add_groupinbox_date_added_index.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Generated by Django 1.11.29 on 2021-01-05 22:14
-
-import django.utils.timezone
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [
- ("sentry", "0146_backfill_members_alert_write"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- database_operations=[
- migrations.RunSQL(
- """
- CREATE INDEX CONCURRENTLY IF NOT EXISTS sentry_groupinbox_date_added_f113c11b
- ON sentry_groupinbox (date_added);
- """,
- reverse_sql="""
- DROP INDEX CONCURRENTLY IF EXISTS sentry_groupinbox_date_added_f113c11b;
- """,
- hints={"tables": ["sentry_groupinbox"]},
- ),
- ],
- state_operations=[
- migrations.AlterField(
- model_name="groupinbox",
- name="date_added",
- field=models.DateTimeField(db_index=True, default=django.utils.timezone.now),
- ),
- ],
- ),
- ]
diff --git a/src/sentry/migrations/0148_group_id_bigint.py b/src/sentry/migrations/0148_group_id_bigint.py
deleted file mode 100644
index 5a934359298de6..00000000000000
--- a/src/sentry/migrations/0148_group_id_bigint.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# Generated by Django 1.11.29 on 2021-01-12 21:58
-
-from django.db import migrations
-import sentry.db.models.fields.bounded
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0147_add_groupinbox_date_added_index"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="groupcommitresolution",
- name="group_id",
- field=sentry.db.models.fields.bounded.BoundedBigIntegerField(),
- ),
- migrations.AlterField(
- model_name="grouprelease",
- name="group_id",
- field=sentry.db.models.fields.bounded.BoundedBigIntegerField(),
- ),
- ]
diff --git a/src/sentry/migrations/0149_bigint.py b/src/sentry/migrations/0149_bigint.py
deleted file mode 100644
index 43dddbd4799427..00000000000000
--- a/src/sentry/migrations/0149_bigint.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# Generated by Django 1.11.29 on 2021-01-14 23:22
-
-from django.db import migrations
-import sentry.db.models.fields.bounded
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0148_group_id_bigint"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="groupcommitresolution",
- name="commit_id",
- field=sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
- ),
- migrations.AlterField(
- model_name="grouptombstone",
- name="previous_group_id",
- field=sentry.db.models.fields.bounded.BoundedBigIntegerField(unique=True),
- ),
- migrations.AlterField(
- model_name="release",
- name="last_commit_id",
- field=sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True),
- ),
- ]
diff --git a/src/sentry/migrations/0150_remove_userreport_eventattachment_constraints.py b/src/sentry/migrations/0150_remove_userreport_eventattachment_constraints.py
deleted file mode 100644
index 30bc22793b5b56..00000000000000
--- a/src/sentry/migrations/0150_remove_userreport_eventattachment_constraints.py
+++ /dev/null
@@ -1,140 +0,0 @@
-# Generated by Django 1.11.29 on 2021-01-20 19:23
-
-from django.db import migrations
-import django.db.models.deletion
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [
- ("sentry", "0149_bigint"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- database_operations=[
- migrations.AlterField(
- model_name="eventattachment",
- name="file",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.File",
- ),
- ),
- migrations.AlterField(
- model_name="userreport",
- name="environment",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- null=True,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.Environment",
- ),
- ),
- migrations.AlterField(
- model_name="userreport",
- name="group",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- null=True,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.Group",
- ),
- ),
- migrations.AlterField(
- model_name="userreport",
- name="project",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.Project",
- ),
- ),
- ],
- state_operations=[
- migrations.AddField(
- model_name="eventattachment",
- name="file_id",
- field=sentry.db.models.fields.bounded.BoundedBigIntegerField(
- db_index=True, default=1
- ),
- preserve_default=False,
- ),
- migrations.AddField(
- model_name="userreport",
- name="environment_id",
- field=sentry.db.models.fields.bounded.BoundedBigIntegerField(
- db_index=True, null=True
- ),
- ),
- migrations.AddField(
- model_name="userreport",
- name="group_id",
- field=sentry.db.models.fields.bounded.BoundedBigIntegerField(
- db_index=True, null=True
- ),
- ),
- migrations.AddField(
- model_name="userreport",
- name="project_id",
- field=sentry.db.models.fields.bounded.BoundedBigIntegerField(
- db_index=True, default=1
- ),
- preserve_default=False,
- ),
- migrations.RemoveField(
- model_name="eventattachment",
- name="file",
- ),
- migrations.AlterUniqueTogether(
- name="eventattachment",
- unique_together={("project_id", "event_id", "file_id")},
- ),
- migrations.RemoveField(
- model_name="userreport",
- name="environment",
- ),
- migrations.RemoveField(
- model_name="userreport",
- name="group",
- ),
- migrations.RemoveField(
- model_name="userreport",
- name="project",
- ),
- migrations.AlterUniqueTogether(
- name="userreport",
- unique_together={("project_id", "event_id")},
- ),
- migrations.AlterIndexTogether(
- name="eventattachment",
- index_together={
- ("project_id", "date_added"),
- ("project_id", "date_added", "file_id"),
- },
- ),
- migrations.AlterIndexTogether(
- name="userreport",
- index_together={("project_id", "date_added"), ("project_id", "event_id")},
- ),
- ],
- )
- ]
diff --git a/src/sentry/migrations/0151_add_world_map_dashboard_widget_type.py b/src/sentry/migrations/0151_add_world_map_dashboard_widget_type.py
deleted file mode 100644
index 565e3158e904eb..00000000000000
--- a/src/sentry/migrations/0151_add_world_map_dashboard_widget_type.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# Generated by Django 1.11.29 on 2021-01-22 17:41
-
-from django.db import migrations
-import sentry.db.models.fields.bounded
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = True
-
- dependencies = [
- ("sentry", "0150_remove_userreport_eventattachment_constraints"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="dashboardwidget",
- name="display_type",
- field=sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (0, "line"),
- (1, "area"),
- (2, "stacked_area"),
- (3, "bar"),
- (4, "table"),
- (5, "world_map"),
- ]
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0152_remove_slack_workspace_orgintegrations.py b/src/sentry/migrations/0152_remove_slack_workspace_orgintegrations.py
deleted file mode 100644
index aad93af16cccff..00000000000000
--- a/src/sentry/migrations/0152_remove_slack_workspace_orgintegrations.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# Generated by Django 1.11.29 on 2021-01-25 18:29
-
-from django.db import migrations
-
-from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
-
-
-def remove_slack_workspace_apps(apps, schema_editor):
- """
- Remove OrganizationIntegrations that are linked to Slack Integrations that are
- still using workspace apps.
- We can determine which integrations are workspace apps by looking at the metadata.
- There is a field for new or migrated slack integrations called the `installation_type`.
- If a slack integration is still a workspace app, it will not have this field
- in the metadata.
- """
-
- Integration = apps.get_model("sentry", "Integration")
- OrganizationIntegration = apps.get_model("sentry", "OrganizationIntegration")
-
- for integration in RangeQuerySetWrapperWithProgressBar(
- Integration.objects.filter(provider="slack")
- ):
- if not integration.metadata.get("installation_type"):
- # classic bot apps use the `user_access_token` but
- # may be missing the installation_type. Don't delete these
- if "user_access_token" in integration.metadata:
- continue
- for org_integration in OrganizationIntegration.objects.filter(
- integration_id=integration.id
- ):
- org_integration.delete()
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- atomic = False
-
- dependencies = [
- ("sentry", "0151_add_world_map_dashboard_widget_type"),
- ]
-
- operations = [
- migrations.RunPython(
- code=remove_slack_workspace_apps,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_integration"]},
- )
- ]
diff --git a/src/sentry/migrations/0153_add_big_number_dashboard_widget_type.py b/src/sentry/migrations/0153_add_big_number_dashboard_widget_type.py
deleted file mode 100644
index ab2983b2789129..00000000000000
--- a/src/sentry/migrations/0153_add_big_number_dashboard_widget_type.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# Generated by Django 1.11.29 on 2021-01-27 23:55
-
-from django.db import migrations
-import sentry.db.models.fields.bounded
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0152_remove_slack_workspace_orgintegrations"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="dashboardwidget",
- name="display_type",
- field=sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (0, "line"),
- (1, "area"),
- (2, "stacked_area"),
- (3, "bar"),
- (4, "table"),
- (5, "world_map"),
- (6, "big_number"),
- ]
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0154_groupedmessage_inbox_sort.py b/src/sentry/migrations/0154_groupedmessage_inbox_sort.py
deleted file mode 100644
index f86621749b3ef9..00000000000000
--- a/src/sentry/migrations/0154_groupedmessage_inbox_sort.py
+++ /dev/null
@@ -1,36 +0,0 @@
-# Generated by Django 1.11.29 on 2021-01-28 18:49
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = False
-
- dependencies = [
- ("sentry", "0153_add_big_number_dashboard_widget_type"),
- ]
-
- operations = [
- migrations.AlterIndexTogether(
- name="groupinbox",
- index_together={("project", "date_added")},
- ),
- ]
diff --git a/src/sentry/migrations/0155_add_dashboard_query_orderby.py b/src/sentry/migrations/0155_add_dashboard_query_orderby.py
deleted file mode 100644
index dbf71fa4a7f6f4..00000000000000
--- a/src/sentry/migrations/0155_add_dashboard_query_orderby.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# Generated by Django 1.11.29 on 2021-01-29 16:11
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0154_groupedmessage_inbox_sort"),
- ]
-
- operations = [
- migrations.AddField(
- model_name="dashboardwidgetquery",
- name="orderby",
- field=models.TextField(default=""),
- ),
- ]
diff --git a/src/sentry/migrations/0156_add_mark_reviewed_activity.py b/src/sentry/migrations/0156_add_mark_reviewed_activity.py
deleted file mode 100644
index 9be2186dcbdf91..00000000000000
--- a/src/sentry/migrations/0156_add_mark_reviewed_activity.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# Generated by Django 1.11.29 on 2021-01-28 01:05
-
-from django.db import migrations
-import sentry.db.models.fields.bounded
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0155_add_dashboard_query_orderby"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="activity",
- name="type",
- field=sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (1, "set_resolved"),
- (15, "set_resolved_by_age"),
- (13, "set_resolved_in_release"),
- (16, "set_resolved_in_commit"),
- (21, "set_resolved_in_pull_request"),
- (2, "set_unresolved"),
- (3, "set_ignored"),
- (4, "set_public"),
- (5, "set_private"),
- (6, "set_regression"),
- (7, "create_issue"),
- (8, "note"),
- (9, "first_seen"),
- (10, "release"),
- (11, "assigned"),
- (12, "unassigned"),
- (14, "merge"),
- (17, "deploy"),
- (18, "new_processing_issues"),
- (19, "unmerge_source"),
- (20, "unmerge_destination"),
- (22, "reprocess"),
- (23, "mark_reviewed"),
- ]
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0157_make_repositoryprojectpathconfig_organization_integration_nullable.py b/src/sentry/migrations/0157_make_repositoryprojectpathconfig_organization_integration_nullable.py
deleted file mode 100644
index 0397b40de96fa1..00000000000000
--- a/src/sentry/migrations/0157_make_repositoryprojectpathconfig_organization_integration_nullable.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# Generated by Django 1.11.29 on 2021-02-05 23:58
-
-from django.db import migrations
-import django.db.models.deletion
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0156_add_mark_reviewed_activity"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="repositoryprojectpathconfig",
- name="organization_integration",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.OrganizationIntegration",
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0158_create_externalteam_table.py b/src/sentry/migrations/0158_create_externalteam_table.py
deleted file mode 100644
index 13e0a699243f7d..00000000000000
--- a/src/sentry/migrations/0158_create_externalteam_table.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# Generated by Django 1.11.29 on 2021-02-08 19:24
-
-from django.db import migrations, models
-import django.db.models.deletion
-import django.utils.timezone
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.foreignkey
-import sentry.models.integration
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0157_make_repositoryprojectpathconfig_organization_integration_nullable"),
- ]
-
- operations = [
- migrations.CreateModel(
- name="ExternalTeam",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- primary_key=True, serialize=False
- ),
- ),
- ("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
- (
- "provider",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (sentry.models.integration.ExternalProviders(0), "github"),
- (sentry.models.integration.ExternalProviders(1), "gitlab"),
- ]
- ),
- ),
- ("external_id", models.TextField()),
- (
- "team",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE, to="sentry.Team"
- ),
- ),
- ],
- options={
- "db_table": "sentry_externalteam",
- },
- ),
- migrations.AlterUniqueTogether(
- name="externalteam",
- unique_together={("team", "provider", "external_id")},
- ),
- ]
diff --git a/src/sentry/migrations/0159_create_externaluser_table.py b/src/sentry/migrations/0159_create_externaluser_table.py
deleted file mode 100644
index fff186117a9f4e..00000000000000
--- a/src/sentry/migrations/0159_create_externaluser_table.py
+++ /dev/null
@@ -1,74 +0,0 @@
-# Generated by Django 1.11.29 on 2021-02-08 19:18
-
-from django.conf import settings
-from django.db import migrations, models
-import django.db.models.deletion
-import django.utils.timezone
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.foreignkey
-import sentry.models.integration
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0158_create_externalteam_table"),
- ]
-
- operations = [
- migrations.CreateModel(
- name="ExternalUser",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- primary_key=True, serialize=False
- ),
- ),
- ("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
- (
- "provider",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (sentry.models.integration.ExternalProviders(0), "github"),
- (sentry.models.integration.ExternalProviders(1), "gitlab"),
- ]
- ),
- ),
- ("external_id", models.TextField()),
- (
- "user",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
- ),
- ),
- ],
- options={
- "db_table": "sentry_externaluser",
- },
- ),
- migrations.AlterUniqueTogether(
- name="externaluser",
- unique_together={("user", "provider", "external_id")},
- ),
- ]
diff --git a/src/sentry/migrations/0160_create_projectcodeowners_table.py b/src/sentry/migrations/0160_create_projectcodeowners_table.py
deleted file mode 100644
index cc26a5f3ff406e..00000000000000
--- a/src/sentry/migrations/0160_create_projectcodeowners_table.py
+++ /dev/null
@@ -1,78 +0,0 @@
-# Generated by Django 1.11.29 on 2021-02-10 19:04
-
-from django.db import migrations, models
-import django.db.models.deletion
-import django.utils.timezone
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.foreignkey
-import sentry.db.models.fields.jsonfield
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0159_create_externaluser_table"),
- ]
-
- operations = [
- migrations.CreateModel(
- name="ProjectCodeOwners",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- primary_key=True, serialize=False
- ),
- ),
- ("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
- ("raw", models.TextField(null=True)),
- ("schema", sentry.db.models.fields.jsonfield.JSONField(null=True)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now)),
- (
- "organization_integration",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.OrganizationIntegration",
- ),
- ),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.Project",
- ),
- ),
- (
- "repository_project_path_config",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.RepositoryProjectPathConfig",
- ),
- ),
- ],
- options={
- "db_table": "sentry_projectcodeowners",
- },
- ),
- ]
diff --git a/src/sentry/migrations/0161_add_saved_search_sort.py b/src/sentry/migrations/0161_add_saved_search_sort.py
deleted file mode 100644
index 77f8b65165de89..00000000000000
--- a/src/sentry/migrations/0161_add_saved_search_sort.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# Generated by Django 1.11.29 on 2021-02-12 22:34
-
-from django.db import migrations
-import sentry.db.models.fields.text
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0160_create_projectcodeowners_table"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- database_operations=[
- migrations.AddField(
- model_name="savedsearch",
- name="sort",
- field=sentry.db.models.fields.text.CharField(
- choices=[
- ("date", "Last Seen"),
- ("new", "First Seen"),
- ("priority", "Priority"),
- ("freq", "Events"),
- ("user", "Users"),
- ],
- null=True,
- max_length=16,
- ),
- ),
- ],
- state_operations=[
- migrations.AddField(
- model_name="savedsearch",
- name="sort",
- field=sentry.db.models.fields.text.CharField(
- choices=[
- ("date", "Last Seen"),
- ("new", "First Seen"),
- ("priority", "Priority"),
- ("freq", "Events"),
- ("user", "Users"),
- ],
- default="date",
- null=True,
- max_length=16,
- ),
- ),
- ],
- )
- ]
diff --git a/src/sentry/migrations/0162_backfill_saved_search_sort.py b/src/sentry/migrations/0162_backfill_saved_search_sort.py
deleted file mode 100644
index 491ade3dadd7b6..00000000000000
--- a/src/sentry/migrations/0162_backfill_saved_search_sort.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Generated by Django 1.11.29 on 2021-02-13 00:22
-
-from django.db import migrations
-
-from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
-
-
-def backfill_saved_search_sort(apps, schema_editor):
- """
- Set all saved searches to current default sort "Last Seen"
- """
- SavedSearch = apps.get_model("sentry", "SavedSearch")
-
- for search in RangeQuerySetWrapperWithProgressBar(SavedSearch.objects.all()):
- if search.sort is None:
- search.sort = "date"
- search.save()
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = False
-
- dependencies = [
- ("sentry", "0161_add_saved_search_sort"),
- ]
-
- operations = [
- migrations.RunPython(
- backfill_saved_search_sort,
- migrations.RunPython.noop,
- hints={"tables": ["sentry_savedsearch"]},
- ),
- ]
diff --git a/src/sentry/migrations/0163_add_organizationmember_and_external_name.py b/src/sentry/migrations/0163_add_organizationmember_and_external_name.py
deleted file mode 100644
index ec12c03d8ff381..00000000000000
--- a/src/sentry/migrations/0163_add_organizationmember_and_external_name.py
+++ /dev/null
@@ -1,74 +0,0 @@
-# Generated by Django 1.11.29 on 2021-02-18 01:06
-
-from django.db import migrations, models
-import django.db.models.deletion
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0162_backfill_saved_search_sort"),
- ]
-
- operations = [
- migrations.AddField(
- model_name="externalteam",
- name="external_name",
- field=models.TextField(),
- preserve_default=False,
- ),
- migrations.AddField(
- model_name="externaluser",
- name="external_name",
- field=models.TextField(),
- preserve_default=False,
- ),
- migrations.AddField(
- model_name="externaluser",
- name="organizationmember",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE, to="sentry.OrganizationMember"
- ),
- preserve_default=False,
- ),
- migrations.AlterUniqueTogether(
- name="externalteam",
- unique_together={("team", "provider", "external_name")},
- ),
- migrations.RemoveField(
- model_name="externalteam",
- name="external_id",
- ),
- migrations.AlterUniqueTogether(
- name="externaluser",
- unique_together={("organizationmember", "provider", "external_name")},
- ),
- migrations.RemoveField(
- model_name="externaluser",
- name="external_id",
- ),
- migrations.RemoveField(
- model_name="externaluser",
- name="user",
- ),
- ]
diff --git a/src/sentry/migrations/0164_add_protect_on_delete_codeowners.py b/src/sentry/migrations/0164_add_protect_on_delete_codeowners.py
deleted file mode 100644
index fc94b56b30c923..00000000000000
--- a/src/sentry/migrations/0164_add_protect_on_delete_codeowners.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Generated by Django 1.11.29 on 2021-02-19 23:38
-
-from django.db import migrations
-import django.db.models.deletion
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0163_add_organizationmember_and_external_name"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- state_operations=[
- migrations.AlterField(
- model_name="projectcodeowners",
- name="repository_project_path_config",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.PROTECT,
- to="sentry.RepositoryProjectPathConfig",
- ),
- ),
- ]
- )
- ]
diff --git a/src/sentry/migrations/0165_metric_alerts_fix_group_ids.py b/src/sentry/migrations/0165_metric_alerts_fix_group_ids.py
deleted file mode 100644
index 264e3da410e447..00000000000000
--- a/src/sentry/migrations/0165_metric_alerts_fix_group_ids.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# Generated by Django 1.11.29 on 2021-02-24 00:18
-import logging
-
-from django.db import migrations, transaction
-
-
-def fix_metric_alert_group_ids(apps, schema_editor):
- from sentry.snuba.tasks import update_subscription_in_snuba
-
- QuerySubscription = apps.get_model("sentry", "QuerySubscription")
-
- # This table has < 10k rows, so filtering on an unindexed column will be fine. This
- # should return < 50 rows in Saas, and even fewer on other deploys
- for subscription in QuerySubscription.objects.filter(snuba_query__query__contains="issue.id"):
- # We want to recreate the subscription in snuba, so that the conditions are built
- # properly for issue ids. To do this, we just fire off the update task.
- try:
- with transaction.atomic():
- subscription.status = 2 # QuerySubscription.Status.UPDATING.value
- subscription.save()
- update_subscription_in_snuba(subscription.id)
- except Exception:
- logging.exception("Failed to fix subscription")
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = False
-
- dependencies = [
- ("sentry", "0164_add_protect_on_delete_codeowners"),
- ]
-
- operations = [
- migrations.RunPython(
- fix_metric_alert_group_ids,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_querysubscription"]},
- )
- ]
diff --git a/src/sentry/migrations/0166_create_notificationsetting_table.py b/src/sentry/migrations/0166_create_notificationsetting_table.py
deleted file mode 100644
index 16f69bce57bd70..00000000000000
--- a/src/sentry/migrations/0166_create_notificationsetting_table.py
+++ /dev/null
@@ -1,162 +0,0 @@
-# Generated by Django 1.11.29 on 2021-02-26 22:11
-
-from django.db import migrations
-import sentry.db.models.fields.bounded
-import sentry.models.integration
-import sentry.models.notificationsetting
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0165_metric_alerts_fix_group_ids"),
- ]
-
- operations = [
- migrations.CreateModel(
- name="NotificationSetting",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- primary_key=True, serialize=False
- ),
- ),
- (
- "scope_type",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (sentry.models.notificationsetting.NotificationScopeType(0), "user"),
- (
- sentry.models.notificationsetting.NotificationScopeType(10),
- "organization",
- ),
- (
- sentry.models.notificationsetting.NotificationScopeType(20),
- "project",
- ),
- ]
- ),
- ),
- ("scope_identifier", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
- (
- "target_type",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (sentry.models.notificationsetting.NotificationTargetType(0), "user"),
- (sentry.models.notificationsetting.NotificationTargetType(10), "team"),
- ]
- ),
- ),
- ("target_identifier", sentry.db.models.fields.bounded.BoundedBigIntegerField()),
- (
- "provider",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (sentry.models.integration.ExternalProviders(100), "email"),
- (sentry.models.integration.ExternalProviders(110), "slack"),
- ]
- ),
- ),
- (
- "type",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (
- sentry.models.notificationsetting.NotificationSettingTypes(0),
- "default",
- ),
- (
- sentry.models.notificationsetting.NotificationSettingTypes(10),
- "deploy",
- ),
- (
- sentry.models.notificationsetting.NotificationSettingTypes(20),
- "issue",
- ),
- (
- sentry.models.notificationsetting.NotificationSettingTypes(30),
- "workflow",
- ),
- ]
- ),
- ),
- (
- "value",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (
- sentry.models.notificationsetting.NotificationSettingOptionValues(
- 0
- ),
- "default",
- ),
- (
- sentry.models.notificationsetting.NotificationSettingOptionValues(
- 10
- ),
- "off",
- ),
- (
- sentry.models.notificationsetting.NotificationSettingOptionValues(
- 20
- ),
- "on",
- ),
- (
- sentry.models.notificationsetting.NotificationSettingOptionValues(
- 30
- ),
- "subscribe_only",
- ),
- (
- sentry.models.notificationsetting.NotificationSettingOptionValues(
- 40
- ),
- "committed_only",
- ),
- ]
- ),
- ),
- ],
- options={
- "db_table": "sentry_notificationsetting",
- },
- ),
- migrations.AlterUniqueTogether(
- name="notificationsetting",
- unique_together={
- (
- "scope_type",
- "scope_identifier",
- "target_type",
- "target_identifier",
- "provider",
- "type",
- )
- },
- ),
- migrations.AlterIndexTogether(
- name="notificationsetting",
- index_together={("target_type", "target_identifier")},
- ),
- ]
diff --git a/src/sentry/migrations/0167_rm_organization_integration_from_projectcodeowners.py b/src/sentry/migrations/0167_rm_organization_integration_from_projectcodeowners.py
deleted file mode 100644
index 2dc757e0dc9f7c..00000000000000
--- a/src/sentry/migrations/0167_rm_organization_integration_from_projectcodeowners.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# Generated by Django 1.11.29 on 2021-03-01 23:53
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0166_create_notificationsetting_table"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- database_operations=[],
- state_operations=[
- migrations.RemoveField(
- model_name="projectcodeowners",
- name="organization_integration",
- ),
- ],
- )
- ]
diff --git a/src/sentry/migrations/0168_demo_orgs_users.py b/src/sentry/migrations/0168_demo_orgs_users.py
deleted file mode 100644
index 77a262f2eb36fe..00000000000000
--- a/src/sentry/migrations/0168_demo_orgs_users.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# Generated by Django 1.11.29 on 2021-03-02 00:45
-
-import bitfield.models
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0167_rm_organization_integration_from_projectcodeowners"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="organization",
- name="flags",
- field=bitfield.models.BitField(
- (
- (
- "allow_joinleave",
- "Allow members to join and leave teams without requiring approval.",
- ),
- (
- "enhanced_privacy",
- "Enable enhanced privacy controls to limit personally identifiable information (PII) as well as source code in things like notifications.",
- ),
- (
- "disable_shared_issues",
- "Disable sharing of limited details on issues to anonymous users.",
- ),
- (
- "early_adopter",
- "Enable early adopter status, gaining access to features prior to public release.",
- ),
- (
- "require_2fa",
- "Require and enforce two-factor authentication for all members.",
- ),
- (
- "disable_new_visibility_features",
- "Temporarily opt out of new visibility features and ui",
- ),
- ("demo_mode", "Mark an organization as a demo org."),
- ),
- default=1,
- ),
- ),
- migrations.AlterField(
- model_name="user",
- name="flags",
- field=bitfield.models.BitField(
- (
- (
- "newsletter_consent_prompt",
- "Do we need to ask this user for newsletter consent?",
- ),
- ("demo_mode", "Mark an user as a demo user."),
- ),
- default=0,
- null=True,
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0169_delete_organization_integration_from_projectcodeowners.py b/src/sentry/migrations/0169_delete_organization_integration_from_projectcodeowners.py
deleted file mode 100644
index e7de69a06f3270..00000000000000
--- a/src/sentry/migrations/0169_delete_organization_integration_from_projectcodeowners.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Generated by Django 1.11.29 on 2021-03-02 01:25
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0168_demo_orgs_users"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- database_operations=[
- migrations.RunSQL(
- """
- ALTER TABLE "sentry_projectcodeowners" DROP COLUMN "organization_integration_id";
- """,
- reverse_sql="""
- ALTER TABLE "sentry_projectcodeowners" ADD COLUMN "organization_integration_id" bigint NULL;
- """,
- hints={"tables": ["sentry_projectcodeowners"]},
- )
- ],
- state_operations=[],
- )
- ]
diff --git a/src/sentry/migrations/0170_actor_introduction.py b/src/sentry/migrations/0170_actor_introduction.py
deleted file mode 100644
index 1c5da07bf156e5..00000000000000
--- a/src/sentry/migrations/0170_actor_introduction.py
+++ /dev/null
@@ -1,132 +0,0 @@
-# Generated by Django 1.11.29 on 2021-03-03 22:11
-
-import django.db.models.deletion
-from django.db import migrations, models
-
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = False
-
- dependencies = [
- ("sentry", "0169_delete_organization_integration_from_projectcodeowners"),
- ]
-
- operations = [
- migrations.CreateModel(
- name="Actor",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- primary_key=True, serialize=False
- ),
- ),
- (
- "type",
- models.PositiveSmallIntegerField(
- choices=[
- (0, "team"),
- (1, "user"),
- ]
- ),
- ),
- ],
- options={
- "db_table": "sentry_actor",
- },
- ),
- migrations.SeparateDatabaseAndState(
- database_operations=[
- migrations.RunSQL(
- """
- ALTER TABLE sentry_team ADD COLUMN "actor_id" bigint NULL;
- ALTER TABLE auth_user ADD COLUMN "actor_id" bigint NULL;
- """,
- reverse_sql="""
- ALTER TABLE sentry_team DROP COLUMN "actor_id";
- ALTER TABLE auth_user DROP COLUMN "actor_id";
- """,
- hints={"tables": ["sentry_team", "auth_user"]},
- ),
- migrations.RunSQL(
- """
- CREATE UNIQUE INDEX CONCURRENTLY IF NOT EXISTS sentry_team_actor_idx ON sentry_team (actor_id);
- """,
- reverse_sql="""
- DROP INDEX CONCURRENTLY IF EXISTS sentry_team_actor_idx;
- """,
- hints={"tables": ["sentry_team"]},
- ),
- migrations.RunSQL(
- """
- ALTER TABLE sentry_team ADD CONSTRAINT "sentry_team_actor_idx_fk_sentry_actor_id" FOREIGN KEY ("actor_id") REFERENCES "sentry_actor" ("id") DEFERRABLE INITIALLY DEFERRED;
- """,
- reverse_sql="""
- ALTER TABLE sentry_team DROP CONSTRAINT IF EXISTS sentry_team_actor_idx_fk_sentry_actor_id;
- """,
- hints={"tables": ["sentry_actor"]},
- ),
- migrations.RunSQL(
- """
- CREATE UNIQUE INDEX CONCURRENTLY IF NOT EXISTS auth_user_actor_idx ON auth_user (actor_id);
- """,
- reverse_sql="""
- DROP INDEX CONCURRENTLY IF EXISTS auth_user_actor_idx;
- """,
- hints={"tables": ["auth_user"]},
- ),
- migrations.RunSQL(
- """
- ALTER TABLE auth_user ADD CONSTRAINT "auth_user_actor_idx_fk_sentry_actor_id" FOREIGN KEY ("actor_id") REFERENCES "sentry_actor" ("id") DEFERRABLE INITIALLY DEFERRED;
- """,
- reverse_sql="""
- ALTER TABLE sentry_team DROP CONSTRAINT IF EXISTS auth_user_actor_idx_fk_sentry_actor_id;
- """,
- hints={"tables": ["sentry_team"]},
- ),
- ],
- state_operations=[
- migrations.AddField(
- model_name="team",
- name="actor",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True,
- on_delete=django.db.models.deletion.PROTECT,
- to="sentry.Actor",
- unique=True,
- ),
- ),
- migrations.AddField(
- model_name="user",
- name="actor",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True,
- on_delete=django.db.models.deletion.PROTECT,
- to="sentry.Actor",
- unique=True,
- ),
- ),
- ],
- ),
- ]
diff --git a/src/sentry/migrations/0171_backfill_actors.py b/src/sentry/migrations/0171_backfill_actors.py
deleted file mode 100644
index 240a5172a9aae1..00000000000000
--- a/src/sentry/migrations/0171_backfill_actors.py
+++ /dev/null
@@ -1,54 +0,0 @@
-# Generated by Django 1.11.29 on 2021-03-05 18:22
-
-from django.db import migrations
-
-from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
-
-
-def backfill_null_actors(apps, schema_editor):
- User = apps.get_model("sentry", "User")
- Team = apps.get_model("sentry", "Team")
- Actor = apps.get_model("sentry", "Actor")
- for user in RangeQuerySetWrapperWithProgressBar(User.objects.all()):
- if user.actor_id is None:
- user.actor_id = Actor.objects.create(type=1).id
- user.save()
-
- for team in RangeQuerySetWrapperWithProgressBar(Team.objects.all()):
- if team.actor_id is None:
- team.actor_id = Actor.objects.create(type=0).id
- team.save()
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = False
-
- dependencies = [
- ("sentry", "0170_actor_introduction"),
- ]
-
- operations = [
- migrations.RunPython(
- backfill_null_actors,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_user", "sentry_team", "sentry_actor"]},
- )
- ]
diff --git a/src/sentry/migrations/0172_rule_owner_fields.py b/src/sentry/migrations/0172_rule_owner_fields.py
deleted file mode 100644
index 7da2a87f9d776c..00000000000000
--- a/src/sentry/migrations/0172_rule_owner_fields.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# Generated by Django 1.11.29 on 2021-03-10 16:18
-
-from django.db import migrations
-import django.db.models.deletion
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0171_backfill_actors"),
- ]
-
- operations = [
- migrations.AddField(
- model_name="alertrule",
- name="owner",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True, on_delete=django.db.models.deletion.CASCADE, to="sentry.Actor"
- ),
- ),
- migrations.AddField(
- model_name="rule",
- name="owner",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True, on_delete=django.db.models.deletion.CASCADE, to="sentry.Actor"
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0173_remove_demo_flag.py b/src/sentry/migrations/0173_remove_demo_flag.py
deleted file mode 100644
index b1ca6117693d77..00000000000000
--- a/src/sentry/migrations/0173_remove_demo_flag.py
+++ /dev/null
@@ -1,80 +0,0 @@
-# Generated by Django 1.11.29 on 2021-03-08 19:42
-
-import bitfield.models
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0172_rule_owner_fields"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="organization",
- name="flags",
- field=bitfield.models.BitField(
- (
- (
- "allow_joinleave",
- "Allow members to join and leave teams without requiring approval.",
- ),
- (
- "enhanced_privacy",
- "Enable enhanced privacy controls to limit personally identifiable information (PII) as well as source code in things like notifications.",
- ),
- (
- "disable_shared_issues",
- "Disable sharing of limited details on issues to anonymous users.",
- ),
- (
- "early_adopter",
- "Enable early adopter status, gaining access to features prior to public release.",
- ),
- (
- "require_2fa",
- "Require and enforce two-factor authentication for all members.",
- ),
- (
- "disable_new_visibility_features",
- "Temporarily opt out of new visibility features and ui",
- ),
- ),
- default=1,
- ),
- ),
- migrations.AlterField(
- model_name="user",
- name="flags",
- field=bitfield.models.BitField(
- (
- (
- "newsletter_consent_prompt",
- "Do we need to ask this user for newsletter consent?",
- ),
- ),
- default=0,
- null=True,
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0174_my_issues_saved_search.py b/src/sentry/migrations/0174_my_issues_saved_search.py
deleted file mode 100644
index 707e114874f0a0..00000000000000
--- a/src/sentry/migrations/0174_my_issues_saved_search.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Generated by Django 1.11.29 on 2021-02-19 00:08
-
-from django.db import migrations
-
-
-def add_my_issues_search(apps, schema_editor):
- SavedSearch = apps.get_model("sentry", "SavedSearch")
- SavedSearch.objects.create(
- name="My Issues",
- query="is:unresolved assigned_or_suggested:me",
- organization_id=None,
- is_default=False,
- is_global=True,
- # models.search_common.SearchType.ISSUE
- type=0,
- sort="date",
- )
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = False
-
- dependencies = [
- ("sentry", "0173_remove_demo_flag"),
- ]
-
- operations = [
- migrations.RunPython(
- add_my_issues_search,
- migrations.RunPython.noop,
- hints={"tables": ["sentry_savedsearch"]},
- ),
- ]
diff --git a/src/sentry/migrations/0175_make_targets_nullable.py b/src/sentry/migrations/0175_make_targets_nullable.py
deleted file mode 100644
index 38919ee9f28c1f..00000000000000
--- a/src/sentry/migrations/0175_make_targets_nullable.py
+++ /dev/null
@@ -1,50 +0,0 @@
-# Generated by Django 1.11.29 on 2021-03-11 21:20
-
-from django.db import migrations
-import sentry.db.models.fields.bounded
-import sentry.models.notificationsetting
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0174_my_issues_saved_search"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="notificationsetting",
- name="target_identifier",
- field=sentry.db.models.fields.bounded.BoundedBigIntegerField(null=True),
- ),
- migrations.AlterField(
- model_name="notificationsetting",
- name="target_type",
- field=sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (sentry.models.notificationsetting.NotificationTargetType(0), "user"),
- (sentry.models.notificationsetting.NotificationTargetType(10), "team"),
- ],
- null=True,
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0176_remove_targets.py b/src/sentry/migrations/0176_remove_targets.py
deleted file mode 100644
index 02589edef16814..00000000000000
--- a/src/sentry/migrations/0176_remove_targets.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# Generated by Django 1.11.29 on 2021-03-11 23:15
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0175_make_targets_nullable"),
- ]
-
- operations = [
- migrations.AlterUniqueTogether(
- name="notificationsetting",
- unique_together={("scope_type", "scope_identifier", "provider", "type")},
- ),
- migrations.AlterIndexTogether(
- name="notificationsetting",
- index_together=set(),
- ),
- migrations.SeparateDatabaseAndState(
- database_operations=[],
- state_operations=[
- migrations.RemoveField(model_name="notificationsetting", name="target_identifier"),
- migrations.RemoveField(model_name="notificationsetting", name="target_type"),
- ],
- ),
- ]
diff --git a/src/sentry/migrations/0177_drop_targets.py b/src/sentry/migrations/0177_drop_targets.py
deleted file mode 100644
index a8362c0febaf7d..00000000000000
--- a/src/sentry/migrations/0177_drop_targets.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# Generated by Django 1.11.29 on 2021-03-12 17:46
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0176_remove_targets"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- database_operations=[
- migrations.RunSQL(
- """
- ALTER TABLE "sentry_notificationsetting" DROP COLUMN "target_identifier";
- ALTER TABLE "sentry_notificationsetting" DROP COLUMN "target_type";
- """,
- reverse_sql="""
- ALTER TABLE "sentry_notificationsetting" ADD COLUMN "target_identifier" bigint NULL;
- ALTER TABLE "sentry_notificationsetting" ADD COLUMN "target_type" int NULL;
-
- """,
- hints={"tables": ["sentry_notificationsetting"]},
- )
- ],
- state_operations=[],
- )
- ]
diff --git a/src/sentry/migrations/0178_add_new_target_column.py b/src/sentry/migrations/0178_add_new_target_column.py
deleted file mode 100644
index d8038330edd2eb..00000000000000
--- a/src/sentry/migrations/0178_add_new_target_column.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Generated by Django 1.11.29 on 2021-03-12 23:02
-
-from django.db import migrations
-import django.db.models.deletion
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0177_drop_targets"),
- ]
-
- operations = [
- migrations.AddField(
- model_name="notificationsetting",
- name="target",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- default=1, on_delete=django.db.models.deletion.CASCADE, to="sentry.Actor"
- ),
- preserve_default=False,
- ),
- migrations.AlterUniqueTogether(
- name="notificationsetting",
- unique_together={("scope_type", "scope_identifier", "target", "provider", "type")},
- ),
- ]
diff --git a/src/sentry/migrations/0179_update_legacy_discover_saved_query_timestamps.py b/src/sentry/migrations/0179_update_legacy_discover_saved_query_timestamps.py
deleted file mode 100644
index 068aa8f57b7a3d..00000000000000
--- a/src/sentry/migrations/0179_update_legacy_discover_saved_query_timestamps.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# Generated by Django 1.11.29 on 2021-03-15 14:56
-from datetime import datetime
-
-import pytz
-from django.db import migrations
-
-from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
-
-LEGACY_DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S%z"
-DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ"
-
-
-def update_legacy_discover_saved_query_timestamps(apps, schema_editor):
- DiscoverSavedQuery = apps.get_model("sentry", "DiscoverSavedQuery")
-
- for saved_query in RangeQuerySetWrapperWithProgressBar(DiscoverSavedQuery.objects.all()):
- query = saved_query.query
- updated = False
- for key in ["start", "end"]:
- if key not in query:
- continue
-
- value = query[key]
- try:
- parsed = datetime.strptime(value, LEGACY_DATETIME_FORMAT).astimezone(pytz.utc)
- except ValueError:
- pass
- else:
- value = datetime.strftime(parsed, DATETIME_FORMAT)
- query[key] = value
- updated = True
-
- if updated:
- saved_query.query = query
- saved_query.save()
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = False
-
- dependencies = [
- ("sentry", "0178_add_new_target_column"),
- ]
-
- operations = [
- migrations.RunPython(
- update_legacy_discover_saved_query_timestamps,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_discoversavedquery"]},
- )
- ]
diff --git a/src/sentry/migrations/0180_add_saved_search_sorts.py b/src/sentry/migrations/0180_add_saved_search_sorts.py
deleted file mode 100644
index 4824a8a991011f..00000000000000
--- a/src/sentry/migrations/0180_add_saved_search_sorts.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Generated by Django 1.11.29 on 2021-03-12 23:33
-
-from django.db import migrations
-import sentry.db.models.fields.text
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0179_update_legacy_discover_saved_query_timestamps"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="savedsearch",
- name="sort",
- field=sentry.db.models.fields.text.CharField(
- choices=[
- ("date", "Last Seen"),
- ("new", "First Seen"),
- ("priority", "Priority"),
- ("freq", "Events"),
- ("user", "Users"),
- ("trend", "Relative Change"),
- ("inbox", "Date Added"),
- ],
- default="date",
- max_length=16,
- null=True,
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0181_copy_useroptions_to_notificationsettings.py b/src/sentry/migrations/0181_copy_useroptions_to_notificationsettings.py
deleted file mode 100644
index 259202741653fa..00000000000000
--- a/src/sentry/migrations/0181_copy_useroptions_to_notificationsettings.py
+++ /dev/null
@@ -1,165 +0,0 @@
-# Generated by Django 1.11.29 on 2021-03-22 17:36
-
-from enum import Enum
-
-from django.db import migrations
-
-from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
-
-
-class UserOptionValue:
- # 'workflow:notifications'
- all_conversations = "0"
- participating_only = "1"
- no_conversations = "2"
- # 'deploy-emails
- all_deploys = "2"
- committed_deploys_only = "3"
- no_deploys = "4"
-
-
-class ExternalProviders(Enum):
- GITHUB = 0
- GITLAB = 1
- EMAIL = 100
- SLACK = 110
-
-
-class NotificationScopeType(Enum):
- USER = 0
- ORGANIZATION = 10
- PROJECT = 20
-
-
-class NotificationSettingTypes(Enum):
- # top level config of on/off
- # for workflow also includes SUBSCRIBE_ONLY
- # for deploy also includes COMMITTED_ONLY
- DEFAULT = 0
- # send deploy notifications
- DEPLOY = 10
- # notifications for issues
- ISSUE_ALERTS = 20
- # notifications for changes in assignment, resolution, comments
- WORKFLOW = 30
-
-
-class NotificationSettingOptionValues(Enum):
- DEFAULT = 0 # Defer to a setting one level up.
- NEVER = 10
- ALWAYS = 20
- SUBSCRIBE_ONLY = 30 # workflow
- COMMITTED_ONLY = 40 # deploy
-
-
-def copy_useroption_to_notificationsetting(apps, schema_editor):
- UserOption = apps.get_model("sentry", "UserOption")
- User = apps.get_model("sentry", "User")
- NotificationSetting = apps.get_model("sentry", "NotificationSetting")
- for user_option in RangeQuerySetWrapperWithProgressBar(UserOption.objects.all()):
- if user_option.key == "workflow:notifications":
- # if you have fine tuning for projects, project_id will have a value (rather than None)
- if user_option.project_id:
- scope_identifier = user_option.project_id
- scope_type = NotificationScopeType.PROJECT.value
- else:
- scope_identifier = user_option.user.id
- scope_type = NotificationScopeType.USER.value
- type = NotificationSettingTypes.WORKFLOW.value
- if user_option.value == UserOptionValue.all_conversations:
- value = NotificationSettingOptionValues.ALWAYS.value
- if user_option.value == UserOptionValue.participating_only:
- value = NotificationSettingOptionValues.SUBSCRIBE_ONLY.value
- if user_option.value == UserOptionValue.no_conversations:
- value = NotificationSettingOptionValues.NEVER.value
- elif user_option.key == "mail:alert": # fine tuned project alerts
- if user_option.project_id:
- scope_identifier = user_option.project_id
- scope_type = NotificationScopeType.PROJECT.value
- else:
- scope_identifier = user_option.user.id
- scope_type = NotificationScopeType.USER.value
- type = NotificationSettingTypes.ISSUE_ALERTS.value
- try:
- int_value = int(user_option.value)
- except (ValueError, TypeError):
- # if for some reason this isn't an int or a stringified int, it's garbage and we'll skip
- # because an empty value is meaningless
- continue
- if int_value == 0:
- value = NotificationSettingOptionValues.NEVER.value
- if int_value == 1:
- value = NotificationSettingOptionValues.ALWAYS.value
- elif user_option.key == "subscribe_by_default": # top level project alerts on/off
- scope_identifier = user_option.user.id
- scope_type = NotificationScopeType.USER.value
- type = NotificationSettingTypes.ISSUE_ALERTS.value
- try:
- int_value = int(user_option.value)
- except (ValueError, TypeError):
- continue
- if int_value == 1:
- value = NotificationSettingOptionValues.ALWAYS.value
- if int_value == 0:
- value = NotificationSettingOptionValues.NEVER.value
- elif user_option.key == "deploy-emails":
- # if you have fine tuning for an org, organization_id will have a value (rather than None)
- if user_option.organization_id:
- scope_identifier = user_option.organization_id
- scope_type = NotificationScopeType.ORGANIZATION.value
- else:
- scope_identifier = user_option.user.id
- scope_type = NotificationScopeType.USER.value
- type = NotificationSettingTypes.DEPLOY.value
- # if you've not explicitly set anything OR set it to default, there is no db row
- # by default deploy notifications are set to committed_deploys_only,
- # but there will be an entry for the top level alert option
- # if you change the value to something else
- if user_option.value == UserOptionValue.all_deploys:
- value = NotificationSettingOptionValues.ALWAYS.value
- if user_option.value == UserOptionValue.no_deploys:
- value = NotificationSettingOptionValues.NEVER.value
- if user_option.value == UserOptionValue.committed_deploys_only:
- value = NotificationSettingOptionValues.COMMITTED_ONLY.value
- else:
- continue
- user = User.objects.select_related("actor").get(id=user_option.user_id)
- NotificationSetting.objects.update_or_create(
- scope_type=scope_type, # user, org, or project
- scope_identifier=scope_identifier, # user_id, organization_id, or project_id
- target=user.actor,
- provider=ExternalProviders.EMAIL.value, # 100
- type=type,
- defaults={"value": value}, # NotificationSettingOptionValues
- )
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = False
- dependencies = [
- ("sentry", "0180_add_saved_search_sorts"),
- ]
- operations = [
- migrations.RunPython(
- copy_useroption_to_notificationsetting,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_notificationsetting"]},
- )
- ]
diff --git a/src/sentry/migrations/0182_update_user_misery_on_saved_queries.py b/src/sentry/migrations/0182_update_user_misery_on_saved_queries.py
deleted file mode 100644
index cf4a39e183c81b..00000000000000
--- a/src/sentry/migrations/0182_update_user_misery_on_saved_queries.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# Generated by Django 1.11.29 on 2021-03-26 14:43
-import re
-
-from django.db import migrations
-
-from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
-
-USER_MISERY_REGEX = r"^user_misery\((\d+)\)"
-
-
-def update_user_misery_column_on_saved_queries(apps, schema_editor):
- DiscoverSavedQuery = apps.get_model("sentry", "DiscoverSavedQuery")
-
- for saved_query in RangeQuerySetWrapperWithProgressBar(DiscoverSavedQuery.objects.all()):
- query = saved_query.query
- fields = query.get("fields")
- if not fields:
- continue
-
- updated = False
- for i, field in enumerate(fields):
- match = re.match(USER_MISERY_REGEX, field)
- if match and "count_unique(user)" not in fields:
- fields[i] = f"count_miserable(user, {match.group(1)})"
- updated = True
-
- if updated:
- saved_query.query["fields"] = fields
- saved_query.save()
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = False
-
- dependencies = [
- ("sentry", "0181_copy_useroptions_to_notificationsettings"),
- ]
-
- operations = [
- migrations.RunPython(
- update_user_misery_column_on_saved_queries,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_discoversavedquery"]},
- )
- ]
diff --git a/src/sentry/migrations/0183_make_codemapping_unique_on_projectcodeowners.py b/src/sentry/migrations/0183_make_codemapping_unique_on_projectcodeowners.py
deleted file mode 100644
index 517648bb70b9f8..00000000000000
--- a/src/sentry/migrations/0183_make_codemapping_unique_on_projectcodeowners.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# Generated by Django 1.11.29 on 2021-03-29 23:25
-
-from django.db import migrations
-import django.db.models.deletion
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0182_update_user_misery_on_saved_queries"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="projectcodeowners",
- name="repository_project_path_config",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.PROTECT,
- to="sentry.RepositoryProjectPathConfig",
- unique=True,
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0184_copy_useroptions_to_notificationsettings_2.py b/src/sentry/migrations/0184_copy_useroptions_to_notificationsettings_2.py
deleted file mode 100644
index 7a7f7cd5b7b097..00000000000000
--- a/src/sentry/migrations/0184_copy_useroptions_to_notificationsettings_2.py
+++ /dev/null
@@ -1,109 +0,0 @@
-# Generated by Django 1.11.29 on 2021-03-31 18:24
-
-from enum import Enum
-
-from django.db import migrations
-
-from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
-
-
-class ExternalProviders(Enum):
- GITHUB = 0
- GITLAB = 1
- EMAIL = 100
- SLACK = 110
-
-
-class NotificationScopeType(Enum):
- USER = 0
- ORGANIZATION = 10
- PROJECT = 20
-
-
-class NotificationSettingTypes(Enum):
- # top level config of on/off
- # for workflow also includes SUBSCRIBE_ONLY
- # for deploy also includes COMMITTED_ONLY
- DEFAULT = 0
- # send deploy notifications
- DEPLOY = 10
- # notifications for issues
- ISSUE_ALERTS = 20
- # notifications for changes in assignment, resolution, comments
- WORKFLOW = 30
-
-
-class NotificationSettingOptionValues(Enum):
- DEFAULT = 0 # Defer to a setting one level up.
- NEVER = 10
- ALWAYS = 20
- SUBSCRIBE_ONLY = 30 # workflow
- COMMITTED_ONLY = 40 # deploy
-
-
-def get_value(user_option):
- try:
- int_value = int(user_option.value)
- except (ValueError, TypeError):
- return None
-
- if int_value == 1:
- return NotificationSettingOptionValues.ALWAYS.value
- elif int_value == 0:
- return NotificationSettingOptionValues.NEVER.value
- return None
-
-
-def copy_useroption_to_notificationsetting(apps, schema_editor):
- UserOption = apps.get_model("sentry", "UserOption")
- User = apps.get_model("sentry", "User")
- NotificationSetting = apps.get_model("sentry", "NotificationSetting")
- for user_option in RangeQuerySetWrapperWithProgressBar(UserOption.objects.all()):
- if user_option.key == "subscribe_by_default": # top level issue alerts on/off
- value = get_value(user_option)
- if value is None:
- continue
-
- user = User.objects.get(id=user_option.user_id)
- NotificationSetting.objects.update_or_create(
- scope_type=NotificationScopeType.USER.value,
- scope_identifier=user_option.user.id,
- target_id=user.actor_id,
- provider=ExternalProviders.EMAIL.value, # 100
- type=NotificationSettingTypes.ISSUE_ALERTS.value,
- defaults={"value": value}, # NotificationSettingOptionValues
- )
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = False
-
- dependencies = [
- ("sentry", "0183_make_codemapping_unique_on_projectcodeowners"),
- ]
-
- operations = [
- migrations.RunPython(
- copy_useroption_to_notificationsetting,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_notificationsetting"]},
- )
- ]
diff --git a/src/sentry/migrations/0185_rm_copied_useroptions.py b/src/sentry/migrations/0185_rm_copied_useroptions.py
deleted file mode 100644
index 043b2f2949dc6f..00000000000000
--- a/src/sentry/migrations/0185_rm_copied_useroptions.py
+++ /dev/null
@@ -1,55 +0,0 @@
-# Generated by Django 1.11.29 on 2021-04-07 21:30
-
-from django.db import migrations
-
-from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
-
-
-def delete_duplicate_useroption_rows(apps, schema_editor):
- """
- Delete the rows in UserOption that have already been copied over
- to the NotificationSetting table, also add a few unused ones while we're at it
- """
- UserOption = apps.get_model("sentry", "UserOption")
- for user_option in RangeQuerySetWrapperWithProgressBar(UserOption.objects.all()):
- if user_option.key in (
- "workflow:notifications",
- "mail:alert",
- "deploy-emails",
- "subscribe_by_default",
- "seen_release_broadcast",
- "twilio:alert",
- "workflow_notification",
- ):
- user_option.delete()
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = False
- dependencies = [
- ("sentry", "0184_copy_useroptions_to_notificationsettings_2"),
- ]
- operations = [
- migrations.RunPython(
- code=delete_duplicate_useroption_rows,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_useroption"]},
- )
- ]
diff --git a/src/sentry/migrations/0186_add_externalactor.py b/src/sentry/migrations/0186_add_externalactor.py
deleted file mode 100644
index 51d435cb4fb126..00000000000000
--- a/src/sentry/migrations/0186_add_externalactor.py
+++ /dev/null
@@ -1,93 +0,0 @@
-# Generated by Django 1.11.29 on 2021-04-14 19:02
-
-import django.db.models.deletion
-import django.utils.timezone
-from django.db import migrations, models
-
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.foreignkey
-import sentry.types.integrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0185_rm_copied_useroptions"),
- ]
-
- operations = [
- migrations.CreateModel(
- name="ExternalActor",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- primary_key=True, serialize=False
- ),
- ),
- ("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
- (
- "provider",
- sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (sentry.types.integrations.ExternalProviders(100), "email"),
- (sentry.types.integrations.ExternalProviders(110), "slack"),
- (sentry.types.integrations.ExternalProviders(120), "msteams"),
- (sentry.types.integrations.ExternalProviders(130), "pagerduty"),
- (sentry.types.integrations.ExternalProviders(200), "github"),
- (sentry.types.integrations.ExternalProviders(210), "gitlab"),
- ]
- ),
- ),
- ("external_name", models.TextField()),
- ("external_id", models.TextField(null=True)),
- (
- "actor",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE, to="sentry.Actor"
- ),
- ),
- (
- "integration",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- null=True,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.Integration",
- ),
- ),
- (
- "organization",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE, to="sentry.Organization"
- ),
- ),
- ],
- options={
- "db_table": "sentry_externalactor",
- },
- ),
- migrations.AlterUniqueTogether(
- name="externalactor",
- unique_together={("organization", "provider", "external_name", "actor")},
- ),
- ]
diff --git a/src/sentry/migrations/0187_backfill_me_or_none.py b/src/sentry/migrations/0187_backfill_me_or_none.py
deleted file mode 100644
index 88404e915c61c7..00000000000000
--- a/src/sentry/migrations/0187_backfill_me_or_none.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# Generated by Django 1.11.29 on 2021-04-26 20:15
-
-from django.db import migrations
-
-from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
-
-
-def backfill_me_or_none(apps, schema_editor):
- SavedSearch = apps.get_model("sentry", "SavedSearch")
- for saved_search in RangeQuerySetWrapperWithProgressBar(SavedSearch.objects.all()):
- if ":me_or_none" in saved_search.query:
- saved_search.query = saved_search.query.replace(":me_or_none", ":[me, none]")
- saved_search.save()
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = False
-
- dependencies = [
- ("sentry", "0186_add_externalactor"),
- ]
-
- operations = [
- migrations.RunPython(
- backfill_me_or_none,
- reverse_code=migrations.RunPython.noop,
- hints={"tables": ["sentry_savedsearch"]},
- )
- ]
diff --git a/src/sentry/migrations/0188_remove_externalteam_externaluser_fk_constraints.py b/src/sentry/migrations/0188_remove_externalteam_externaluser_fk_constraints.py
deleted file mode 100644
index 2497f7e3fdd7b1..00000000000000
--- a/src/sentry/migrations/0188_remove_externalteam_externaluser_fk_constraints.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Generated by Django 1.11.29 on 2021-05-03 18:25
-
-import django.db.models.deletion
-from django.db import migrations
-
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0187_backfill_me_or_none"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="externalteam",
- name="team",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False, on_delete=django.db.models.deletion.CASCADE, to="sentry.Team"
- ),
- ),
- migrations.AlterField(
- model_name="externaluser",
- name="organizationmember",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.OrganizationMember",
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0189_remove_externaluser_externalteam_models.py b/src/sentry/migrations/0189_remove_externaluser_externalteam_models.py
deleted file mode 100644
index d94d7793363f9d..00000000000000
--- a/src/sentry/migrations/0189_remove_externaluser_externalteam_models.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# Generated by Django 1.11.29 on 2021-05-03 20:27
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0188_remove_externalteam_externaluser_fk_constraints"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- state_operations=[migrations.DeleteModel(name="ExternalUser")],
- database_operations=[],
- ),
- migrations.SeparateDatabaseAndState(
- state_operations=[migrations.DeleteModel(name="ExternalTeam")],
- database_operations=[],
- ),
- ]
diff --git a/src/sentry/migrations/0190_drop_external_user_table.py b/src/sentry/migrations/0190_drop_external_user_table.py
deleted file mode 100644
index b1f85e12cd0ae9..00000000000000
--- a/src/sentry/migrations/0190_drop_external_user_table.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Generated by Django 1.11.29 on 2021-05-03 21:05
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0189_remove_externaluser_externalteam_models"),
- ]
-
- operations = [
- migrations.RunSQL(
- """
- DROP TABLE "sentry_externaluser";
- """,
- reverse_sql="CREATE TABLE sentry_externaluser (fake_col int)", # We just create a fake table here so that the DROP will work if we roll back the migration.
- hints={"tables": ["sentry_externaluser"]},
- ),
- migrations.RunSQL(
- """
- DROP TABLE "sentry_externalteam";
- """,
- reverse_sql="CREATE TABLE sentry_externalteam (fake_col int)", # We just create a fake table here so that the DROP will work if we roll back the migration.
- hints={"tables": ["sentry_externalteam"]},
- ),
- ]
diff --git a/src/sentry/migrations/0191_make_externalactor_integration_id_not_null.py b/src/sentry/migrations/0191_make_externalactor_integration_id_not_null.py
deleted file mode 100644
index 2bb419842cc432..00000000000000
--- a/src/sentry/migrations/0191_make_externalactor_integration_id_not_null.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# Generated by Django 1.11.29 on 2021-05-03 22:09
-import django.db.models.deletion
-from django.db import migrations
-
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = False
- dependencies = [
- ("sentry", "0190_drop_external_user_table"),
- ]
- operations = [
- migrations.SeparateDatabaseAndState(
- database_operations=[
- migrations.RunSQL(
- """
- ALTER TABLE "sentry_externalactor" ALTER COLUMN "integration_id" SET DEFAULT 1;
- UPDATE "sentry_externalactor" SET "integration_id" = 1 where "integration_id" is NULL;
- """,
- reverse_sql="""
- ALTER TABLE "sentry_externalactor" ALTER COLUMN "integration_id" DROP DEFAULT;
- """,
- hints={"tables": ["sentry_externalactor"]},
- ),
- migrations.RunSQL(
- """
- ALTER TABLE "sentry_externalactor" ALTER COLUMN "integration_id" SET NOT NULL;
- ALTER TABLE "sentry_externalactor" ALTER COLUMN "integration_id" DROP DEFAULT;
- """,
- reverse_sql="""
- ALTER TABLE "sentry_externalactor" ALTER COLUMN "integration_id" DROP NOT NULL;
- """,
- hints={"tables": ["sentry_externalactor"]},
- ),
- ],
- state_operations=[
- migrations.AlterField(
- model_name="externalactor",
- name="integration",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- default=1,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.Integration",
- ),
- preserve_default=False,
- ),
- ],
- )
- ]
diff --git a/src/sentry/migrations/0192_remove_fileblobowner_org_fk.py b/src/sentry/migrations/0192_remove_fileblobowner_org_fk.py
deleted file mode 100644
index 01b4f2b6a1ad76..00000000000000
--- a/src/sentry/migrations/0192_remove_fileblobowner_org_fk.py
+++ /dev/null
@@ -1,62 +0,0 @@
-# Generated by Django 1.11.29 on 2021-05-06 10:03
-
-from django.db import migrations
-
-import sentry.db.models.fields.bounded
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = False
-
- dependencies = [
- ("sentry", "0191_make_externalactor_integration_id_not_null"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- database_operations=[
- migrations.AlterField(
- model_name="fileblobowner",
- name="organization",
- field=sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- to="sentry.Organization",
- db_constraint=False,
- ),
- ),
- ],
- state_operations=[
- migrations.AddField(
- model_name="fileblobowner",
- name="organization_id",
- field=sentry.db.models.fields.bounded.BoundedBigIntegerField(db_index=True),
- preserve_default=False,
- ),
- migrations.RemoveField(
- model_name="fileblobowner",
- name="organization",
- ),
- migrations.AlterUniqueTogether(
- name="fileblobowner",
- unique_together={("blob", "organization_id")},
- ),
- ],
- )
- ]
diff --git a/src/sentry/migrations/0193_grouprelease_indexes.py b/src/sentry/migrations/0193_grouprelease_indexes.py
deleted file mode 100644
index 5f41a9acc55a37..00000000000000
--- a/src/sentry/migrations/0193_grouprelease_indexes.py
+++ /dev/null
@@ -1,62 +0,0 @@
-# Generated by Django 1.11.29 on 2021-04-26 20:15
-
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = False
-
- dependencies = [
- ("sentry", "0192_remove_fileblobowner_org_fk"),
- ]
-
- operations = [
- migrations.SeparateDatabaseAndState(
- database_operations=[
- migrations.RunSQL(
- """
- CREATE INDEX CONCURRENTLY IF NOT EXISTS sentry_grouprelease_group_id_first_seen_53fc35ds
- ON sentry_grouprelease USING btree (group_id, first_seen);
- """,
- reverse_sql="""
- DROP INDEX CONCURRENTLY IF EXISTS sentry_grouprelease_group_id_first_seen_53fc35ds;
- """,
- hints={"tables": ["sentry_grouprelease"]},
- ),
- migrations.RunSQL(
- """
- CREATE INDEX CONCURRENTLY IF NOT EXISTS sentry_grouprelease_group_id_last_seen_g8v2sk7c
- ON sentry_grouprelease USING btree (group_id, last_seen DESC);
- """,
- reverse_sql="""
- DROP INDEX CONCURRENTLY IF EXISTS sentry_grouprelease_group_id_last_seen_g8v2sk7c;
- """,
- hints={"tables": ["sentry_grouprelease"]},
- ),
- ],
- state_operations=[
- migrations.AlterIndexTogether(
- name="grouprelease",
- index_together={("group_id", "last_seen"), ("group_id", "first_seen")},
- ),
- ],
- )
- ]
diff --git a/src/sentry/migrations/0194_add_custom_scm_provider.py b/src/sentry/migrations/0194_add_custom_scm_provider.py
deleted file mode 100644
index 53ac6cc0cb6e48..00000000000000
--- a/src/sentry/migrations/0194_add_custom_scm_provider.py
+++ /dev/null
@@ -1,50 +0,0 @@
-# Generated by Django 1.11.29 on 2021-05-14 00:17
-
-from django.db import migrations
-
-import sentry.db.models.fields.bounded
-import sentry.types.integrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0193_grouprelease_indexes"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="externalactor",
- name="provider",
- field=sentry.db.models.fields.bounded.BoundedPositiveIntegerField(
- choices=[
- (sentry.types.integrations.ExternalProviders(100), "email"),
- (sentry.types.integrations.ExternalProviders(110), "slack"),
- (sentry.types.integrations.ExternalProviders(120), "msteams"),
- (sentry.types.integrations.ExternalProviders(130), "pagerduty"),
- (sentry.types.integrations.ExternalProviders(200), "github"),
- (sentry.types.integrations.ExternalProviders(210), "gitlab"),
- (sentry.types.integrations.ExternalProviders(700), "custom_scm"),
- ]
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0195_add_team_key_transactions.py b/src/sentry/migrations/0195_add_team_key_transactions.py
deleted file mode 100644
index f7001b10f09e77..00000000000000
--- a/src/sentry/migrations/0195_add_team_key_transactions.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# Generated by Django 1.11.29 on 2021-05-14 20:56
-
-import django.db.models.deletion
-from django.db import migrations, models
-
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0194_add_custom_scm_provider"),
- ]
-
- operations = [
- migrations.CreateModel(
- name="TeamKeyTransaction",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- primary_key=True, serialize=False
- ),
- ),
- ("transaction", models.CharField(max_length=200)),
- (
- "organization",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE, to="sentry.Organization"
- ),
- ),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.Project",
- ),
- ),
- (
- "team",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE, to="sentry.Team"
- ),
- ),
- ],
- options={
- "db_table": "sentry_performanceteamkeytransaction",
- },
- ),
- migrations.AlterUniqueTogether(
- name="teamkeytransaction",
- unique_together={("project", "team", "transaction")},
- ),
- ]
diff --git a/src/sentry/migrations/0196_add_restricted_member_limit.py b/src/sentry/migrations/0196_add_restricted_member_limit.py
deleted file mode 100644
index 52a159bb68fa7e..00000000000000
--- a/src/sentry/migrations/0196_add_restricted_member_limit.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Generated by Django 1.11.29 on 2021-05-17 21:02
-
-from django.db import migrations
-
-import bitfield.models
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0195_add_team_key_transactions"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="organizationmember",
- name="flags",
- field=bitfield.models.BitField(
- (
- ("sso:linked", "sso:linked"),
- ("sso:invalid", "sso:invalid"),
- ("member-limit:restricted", "member-limit:restricted"),
- ),
- default=0,
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0197_add_scim_enabled_boolean.py b/src/sentry/migrations/0197_add_scim_enabled_boolean.py
deleted file mode 100644
index c51e42b37fb855..00000000000000
--- a/src/sentry/migrations/0197_add_scim_enabled_boolean.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# Generated by Django 1.11.29 on 2021-05-18 17:22
-
-from django.db import migrations
-
-import bitfield.models
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0196_add_restricted_member_limit"),
- ]
-
- operations = [
- migrations.AlterField(
- model_name="authprovider",
- name="flags",
- field=bitfield.models.BitField(
- (
- ("allow_unlinked", "Grant access to members who have not linked SSO accounts."),
- ("scim_enabled", "Enable SCIM for member and team provisioning and syncing"),
- ),
- default=0,
- ),
- ),
- ]
diff --git a/src/sentry/migrations/0198_add_project_transaction_threshold.py b/src/sentry/migrations/0198_add_project_transaction_threshold.py
deleted file mode 100644
index bf9340b9a41ace..00000000000000
--- a/src/sentry/migrations/0198_add_project_transaction_threshold.py
+++ /dev/null
@@ -1,126 +0,0 @@
-# Generated by Django 1.11.29 on 2021-05-19 12:45
-
-import django.db.models.deletion
-import django.utils.timezone
-from django.conf import settings
-from django.db import migrations, models
-
-import sentry.db.models.fields.bounded
-import sentry.db.models.fields.foreignkey
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = False
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = True
-
- dependencies = [
- ("sentry", "0197_add_scim_enabled_boolean"),
- ]
-
- operations = [
- migrations.CreateModel(
- name="ProjectTransactionThreshold",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- primary_key=True, serialize=False
- ),
- ),
- ("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
- ("threshold", models.IntegerField()),
- ("metric", models.PositiveSmallIntegerField(default=1)),
- (
- "edited_by",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- null=True,
- on_delete=django.db.models.deletion.SET_NULL,
- to=settings.AUTH_USER_MODEL,
- ),
- ),
- (
- "organization",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE, to="sentry.Organization"
- ),
- ),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.Project",
- unique=True,
- ),
- ),
- ],
- options={
- "db_table": "sentry_projecttransactionthreshold",
- },
- ),
- migrations.CreateModel(
- name="ProjectTransactionThresholdOverride",
- fields=[
- (
- "id",
- sentry.db.models.fields.bounded.BoundedBigAutoField(
- primary_key=True, serialize=False
- ),
- ),
- ("date_updated", models.DateTimeField(default=django.utils.timezone.now)),
- ("date_added", models.DateTimeField(default=django.utils.timezone.now, null=True)),
- ("transaction", models.CharField(max_length=200)),
- ("threshold", models.IntegerField()),
- ("metric", models.PositiveSmallIntegerField(default=1)),
- (
- "edited_by",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- null=True,
- on_delete=django.db.models.deletion.SET_NULL,
- to=settings.AUTH_USER_MODEL,
- ),
- ),
- (
- "organization",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- on_delete=django.db.models.deletion.CASCADE, to="sentry.Organization"
- ),
- ),
- (
- "project",
- sentry.db.models.fields.foreignkey.FlexibleForeignKey(
- db_constraint=False,
- on_delete=django.db.models.deletion.CASCADE,
- to="sentry.Project",
- ),
- ),
- ],
- options={
- "db_table": "sentry_projecttransactionthresholdoverride",
- },
- ),
- migrations.AlterUniqueTogether(
- name="projecttransactionthresholdoverride",
- unique_together={("project", "transaction")},
- ),
- ]
diff --git a/src/sentry/migrations/0199_release_semver.py b/src/sentry/migrations/0199_release_semver.py
deleted file mode 100644
index f8869f09f79a8d..00000000000000
--- a/src/sentry/migrations/0199_release_semver.py
+++ /dev/null
@@ -1,103 +0,0 @@
-# Generated by Django 1.11.29 on 2021-05-20 20:27
-
-from django.db import migrations, models
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
-
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = False
-
- dependencies = [
- ("sentry", "0198_add_project_transaction_threshold"),
- ]
-
- operations = [
- migrations.AddField(
- model_name="release",
- name="build_code",
- field=models.TextField(null=True),
- ),
- migrations.AddField(
- model_name="release",
- name="build_number",
- field=models.BigIntegerField(null=True),
- ),
- migrations.AddField(
- model_name="release",
- name="major",
- field=models.BigIntegerField(null=True),
- ),
- migrations.AddField(
- model_name="release",
- name="minor",
- field=models.BigIntegerField(null=True),
- ),
- migrations.AddField(
- model_name="release",
- name="patch",
- field=models.BigIntegerField(null=True),
- ),
- migrations.AddField(
- model_name="release",
- name="prerelease",
- field=models.TextField(null=True),
- ),
- migrations.AddField(
- model_name="release",
- name="revision",
- field=models.BigIntegerField(null=True),
- ),
- migrations.SeparateDatabaseAndState(
- database_operations=[
- migrations.RunSQL(
- """
- CREATE INDEX CONCURRENTLY IF NOT EXISTS "sentry_release_organization_id_major_mi_38715957_idx"
- ON "sentry_release" ("organization_id", "major" DESC, "minor" DESC, "patch" DESC, "revision" DESC);
- """,
- reverse_sql="DROP INDEX CONCURRENTLY IF EXISTS sentry_release_organization_id_major_mi_38715957_idx",
- hints={"tables": ["sentry_release"]},
- ),
- migrations.RunSQL(
- """
- CREATE INDEX CONCURRENTLY IF NOT EXISTS "sentry_release_organization_id_build_code_f93815e5_idx" ON "sentry_release" ("organization_id", "build_code");
- """,
- reverse_sql="DROP INDEX CONCURRENTLY IF EXISTS sentry_release_organization_id_build_code_f93815e5_idx",
- hints={"tables": ["sentry_release"]},
- ),
- migrations.RunSQL(
- """
- CREATE INDEX CONCURRENTLY IF NOT EXISTS "sentry_release_organization_id_build_number_e1646551_idx" ON "sentry_release" ("organization_id", "build_number");
- """,
- reverse_sql="DROP INDEX CONCURRENTLY IF EXISTS sentry_release_organization_id_build_number_e1646551_idx",
- hints={"tables": ["sentry_release"]},
- ),
- ],
- state_operations=[
- migrations.AlterIndexTogether(
- name="release",
- index_together={
- ("organization", "build_code"),
- ("organization", "major", "minor", "patch", "revision"),
- ("organization", "build_number"),
- },
- ),
- ],
- ),
- ]
diff --git a/src/sentry/migrations/0200_release_indices.py b/src/sentry/migrations/0200_release_indices.py
deleted file mode 100644
index 25e31b28dec01a..00000000000000
--- a/src/sentry/migrations/0200_release_indices.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# Generated by Django 1.11.29 on 2021-05-27 17:16
-from django.db import migrations
-
-
-class Migration(migrations.Migration):
- # This flag is used to mark that a migration shouldn't be automatically run in
- # production. We set this to True for operations that we think are risky and want
- # someone from ops to run manually and monitor.
- # General advice is that if in doubt, mark your migration as `is_dangerous`.
- # Some things you should always mark as dangerous:
- # - Large data migrations. Typically we want these to be run manually by ops so that
- # they can be monitored. Since data migrations will now hold a transaction open
- # this is even more important.
- # - Adding columns to highly active tables, even ones that are NULL.
- is_dangerous = True
- # This flag is used to decide whether to run this migration in a transaction or not.
- # By default we prefer to run in a transaction, but for migrations where you want
- # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
- # want to create an index concurrently when adding one to an existing table.
- # You'll also usually want to set this to `False` if you're writing a data
- # migration, since we don't want the entire migration to run in one long-running
- # transaction.
- atomic = False
- dependencies = [
- ("sentry", "0199_release_semver"),
- ]
- operations = [
- migrations.SeparateDatabaseAndState(
- database_operations=[
- migrations.RunSQL(
- """
- CREATE INDEX CONCURRENTLY IF NOT EXISTS "sentry_release_organization_id_status_3c637259_idx" ON "sentry_release" ("organization_id", "status");
- """,
- reverse_sql="DROP INDEX CONCURRENTLY IF EXISTS sentry_release_organization_id_status_3c637259_idx",
- hints={"tables": ["sentry_release"]},
- ),
- migrations.RunSQL(
- """
- CREATE INDEX CONCURRENTLY IF NOT EXISTS "sentry_release_organization_id_date_added_8ebd273a_idx" ON "sentry_release" ("organization_id", "date_added");
- """,
- reverse_sql="DROP INDEX CONCURRENTLY IF EXISTS sentry_release_organization_id_date_added_8ebd273a_idx",
- hints={"tables": ["sentry_release"]},
- ),
- ],
- state_operations=[
- migrations.AlterIndexTogether(
- name="release",
- index_together={
- ("organization", "build_code"),
- ("organization", "major", "minor", "patch", "revision"),
- ("organization", "build_number"),
- ("organization", "status"),
- ("organization", "date_added"),
- },
- ),
- ],
- ),
- ]
diff --git a/src/sentry/migrations/0201_semver_package.py b/src/sentry/migrations/0201_semver_package.py
index e2d8c59c164abb..c3e4fd5545a952 100644
--- a/src/sentry/migrations/0201_semver_package.py
+++ b/src/sentry/migrations/0201_semver_package.py
@@ -25,7 +25,7 @@ class Migration(migrations.Migration):
atomic = False
dependencies = [
- ("sentry", "0200_release_indices"),
+ ("sentry", "0001_squashed_0200_release_indices"),
]
operations = [
diff --git a/src/sentry/runner/commands/upgrade.py b/src/sentry/runner/commands/upgrade.py
index 03b1029ca6b869..dddf112b3ee787 100644
--- a/src/sentry/runner/commands/upgrade.py
+++ b/src/sentry/runner/commands/upgrade.py
@@ -1,12 +1,41 @@
import click
from django.conf import settings
+from django.db import connections
+from django.db.utils import ProgrammingError
from sentry.runner.decorators import configuration
+def _check_history():
+ connection = connections["default"]
+ cursor = connection.cursor()
+ try:
+ # If this query fails because there are no tables we're good to go.
+ cursor.execute("SELECT COUNT(*) FROM django_migrations")
+ if cursor.fetchone()[0] == 0:
+ return
+ except ProgrammingError as e:
+ # Having no migrations table is ok, as we're likely operating on a new install.
+ if 'relation "django_migrations" does not exist' in str(e):
+ return
+ click.echo(f"Checking migration state failed with: {e}")
+ raise click.ClickException("Could not determine migration state. Aborting")
+
+ # If we haven't run all the migration up to the latest squash abort.
+ # As we squash more history this should be updated.
+ cursor.execute("SELECT 1 FROM django_migrations WHERE name = '0200_release_indices'")
+ if not cursor.fetchone()[0]:
+ raise click.ClickException(
+ "It looks like you've skipped a hard stop in our upgrade process. "
+ "Please follow the upgrade process here: https://develop.sentry.dev/self-hosted/#hard-stops"
+ )
+
+
def _upgrade(interactive, traceback, verbosity, repair, with_nodestore):
from django.core.management import call_command as dj_call_command
+ _check_history()
+
for db_conn in settings.DATABASES.keys():
# Always run migrations for the default connection.
# Also run migrations on connections that have migrations explicitly enabled.
diff --git a/src/sentry_plugins/jira_ac/migrations/0001_initial.py b/src/sentry_plugins/jira_ac/migrations/0001_initial.py
index 036c26abbf424e..582aaccf1b7789 100644
--- a/src/sentry_plugins/jira_ac/migrations/0001_initial.py
+++ b/src/sentry_plugins/jira_ac/migrations/0001_initial.py
@@ -19,7 +19,7 @@ class Migration(migrations.Migration):
# - Adding columns to highly active tables, even ones that are NULL.
is_dangerous = False
- dependencies = [("sentry", "0001_initial")]
+ dependencies = [("sentry", "0001_squashed_0200_release_indices")]
operations = [
migrations.CreateModel(
|
1270308b57fd55295fbba0623673c3153d43a60f
|
2024-06-26 14:08:42
|
Daniel Szoke
|
ref: Remove Hub from project configs endpoints (#73357)
| false
|
Remove Hub from project configs endpoints (#73357)
|
ref
|
diff --git a/src/sentry/api/endpoints/relay/project_configs.py b/src/sentry/api/endpoints/relay/project_configs.py
index 07cdf8f3a6642d..fda297e0228bb5 100644
--- a/src/sentry/api/endpoints/relay/project_configs.py
+++ b/src/sentry/api/endpoints/relay/project_configs.py
@@ -4,7 +4,7 @@
from rest_framework.request import Request
from rest_framework.response import Response
-from sentry_sdk import Hub, set_tag, start_span
+from sentry_sdk import set_tag, start_span
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
@@ -207,7 +207,7 @@ def _post_by_key(self, request: Request) -> MutableMapping[str, ProjectConfig]:
# Prevent organization from being fetched again in quotas.
project.set_cached_field_value("organization", organization)
- with Hub.current.start_span(op="get_config"):
+ with start_span(op="get_config"):
with metrics.timer("relay_project_configs.get_config.duration"):
project_config = config.get_project_config(
project,
|
e27f063e1415802d0ff114a424ef2679e5263f3b
|
2022-04-12 11:49:44
|
Priscila Oliveira
|
ref(searchbar): Converto to FC (#33494)
| false
|
Converto to FC (#33494)
|
ref
|
diff --git a/static/app/components/events/searchBar.tsx b/static/app/components/events/searchBar.tsx
index ac55769b615436..9b1f7542009c42 100644
--- a/static/app/components/events/searchBar.tsx
+++ b/static/app/components/events/searchBar.tsx
@@ -1,13 +1,11 @@
-import * as React from 'react';
+import {useEffect} from 'react';
import {ClassNames} from '@emotion/react';
import assign from 'lodash/assign';
import flatten from 'lodash/flatten';
-import isEqual from 'lodash/isEqual';
import memoize from 'lodash/memoize';
import omit from 'lodash/omit';
import {fetchTagValues} from 'sentry/actionCreators/tags';
-import {Client} from 'sentry/api';
import SmartSearchBar from 'sentry/components/smartSearchBar';
import {NEGATION_OPERATOR, SEARCH_WILDCARD} from 'sentry/constants';
import {Organization, SavedSearchType, TagCollection} from 'sentry/types';
@@ -22,7 +20,7 @@ import {
TRACING_FIELDS,
} from 'sentry/utils/discover/fields';
import Measurements from 'sentry/utils/measurements/measurements';
-import withApi from 'sentry/utils/withApi';
+import useApi from 'sentry/utils/useApi';
import withTags from 'sentry/utils/withTags';
const SEARCH_SPECIAL_CHARS_REGEXP = new RegExp(
@@ -31,35 +29,37 @@ const SEARCH_SPECIAL_CHARS_REGEXP = new RegExp(
);
export type SearchBarProps = Omit<React.ComponentProps<typeof SmartSearchBar>, 'tags'> & {
- api: Client;
organization: Organization;
tags: TagCollection;
fields?: Readonly<Field[]>;
includeSessionTagsValues?: boolean;
+ maxSearchItems?: React.ComponentProps<typeof SmartSearchBar>['maxSearchItems'];
omitTags?: string[];
projectIds?: number[] | Readonly<number[]>;
};
-class SearchBar extends React.PureComponent<SearchBarProps> {
- componentDidMount() {
+function SearchBar(props: SearchBarProps) {
+ const {
+ maxSearchItems,
+ organization,
+ tags,
+ omitTags,
+ fields,
+ projectIds,
+ includeSessionTagsValues,
+ } = props;
+
+ const api = useApi();
+
+ useEffect(() => {
// Clear memoized data on mount to make tests more consistent.
- this.getEventFieldValues.cache.clear?.();
- }
-
- componentDidUpdate(prevProps) {
- if (!isEqual(this.props.projectIds, prevProps.projectIds)) {
- // Clear memoized data when projects change.
- this.getEventFieldValues.cache.clear?.();
- }
- }
-
- /**
- * Returns array of tag values that substring match `query`; invokes `callback`
- * with data when ready
- */
- getEventFieldValues = memoize(
+ getEventFieldValues.cache.clear?.();
+ }, [projectIds]);
+
+ // Returns array of tag values that substring match `query`; invokes `callback`
+ // with data when ready
+ const getEventFieldValues = memoize(
(tag, query, endpointParams): Promise<string[]> => {
- const {api, organization, projectIds, includeSessionTagsValues} = this.props;
const projectIdStrings = (projectIds as Readonly<number>[])?.map(String);
if (isAggregateField(tag.key) || isMeasurement(tag.key)) {
@@ -92,18 +92,11 @@ class SearchBar extends React.PureComponent<SearchBarProps> {
({key}, query) => `${key}-${query}`
);
- /**
- * Prepare query string (e.g. strip special characters like negation operator)
- */
- prepareQuery = query => query.replace(SEARCH_SPECIAL_CHARS_REGEXP, '');
-
- getTagList(
+ const getTagList = (
measurements: Parameters<
React.ComponentProps<typeof Measurements>['children']
>[0]['measurements']
- ) {
- const {fields, organization, tags, omitTags} = this.props;
-
+ ) => {
const functionTags = fields
? Object.fromEntries(
fields
@@ -128,36 +121,35 @@ class SearchBar extends React.PureComponent<SearchBarProps> {
};
return omit(combined, omitTags ?? []);
- }
-
- render() {
- return (
- <Measurements>
- {({measurements}) => {
- const tags = this.getTagList(measurements);
- return (
- <ClassNames>
- {({css}) => (
- <SmartSearchBar
- hasRecentSearches
- savedSearchType={SavedSearchType.EVENT}
- onGetTagValues={this.getEventFieldValues}
- supportedTags={tags}
- prepareQuery={this.prepareQuery}
- excludeEnvironment
- dropdownClassName={css`
- max-height: 300px;
- overflow-y: auto;
- `}
- {...this.props}
- />
- )}
- </ClassNames>
- );
- }}
- </Measurements>
- );
- }
+ };
+
+ return (
+ <Measurements>
+ {({measurements}) => (
+ <ClassNames>
+ {({css}) => (
+ <SmartSearchBar
+ hasRecentSearches
+ savedSearchType={SavedSearchType.EVENT}
+ onGetTagValues={getEventFieldValues}
+ supportedTags={getTagList(measurements)}
+ prepareQuery={query => {
+ // Prepare query string (e.g. strip special characters like negation operator)
+ return query.replace(SEARCH_SPECIAL_CHARS_REGEXP, '');
+ }}
+ maxSearchItems={maxSearchItems}
+ excludeEnvironment
+ dropdownClassName={css`
+ max-height: 300px;
+ overflow-y: auto;
+ `}
+ {...props}
+ />
+ )}
+ </ClassNames>
+ )}
+ </Measurements>
+ );
}
-export default withApi(withTags(SearchBar));
+export default withTags(SearchBar);
|
a5939c3b8b5fd3d5affdc202297a96a37965f89f
|
2022-01-17 21:51:45
|
Tony Xiao
|
fix(suspect-spans): All columns must be explicitly specified (#31135)
| false
|
All columns must be explicitly specified (#31135)
|
fix
|
diff --git a/src/sentry/api/endpoints/organization_events_spans_performance.py b/src/sentry/api/endpoints/organization_events_spans_performance.py
index 72039c342d37aa..e575ad94a87886 100644
--- a/src/sentry/api/endpoints/organization_events_spans_performance.py
+++ b/src/sentry/api/endpoints/organization_events_spans_performance.py
@@ -449,31 +449,16 @@ def query_suspect_span_groups(
] + [
"array_join(spans_op)",
"array_join(spans_group)",
- "count()",
- "count_unique(id)",
# want a single event id to fetch from nodestore for the span description
"any(id)",
]
equations: List[str] = [
strip_equation(column)
- for column in suspect_span_columns.suspect_op_group_columns
+ for column in suspect_span_columns.suspect_op_group_columns + fields
if is_equation(column)
]
- # TODO: This adds all the possible fields to the query by default. However,
- # due to the way shards aggregate the rows, this can be slow. As an
- # optimization, allow the fields to be user specified to only get the
- # necessary aggregations.
- #
- # As part of the transition, continue to add all possible fields when its
- # not specified, but this should be removed in the future.
- if not fields:
- for column in SPAN_PERFORMANCE_COLUMNS.values():
- for col in column.suspect_op_group_sort:
- if not col.startswith("equation["):
- selected_columns.append(col)
-
builder = QueryBuilder(
dataset=Dataset.Discover,
params=params,
diff --git a/tests/snuba/api/endpoints/test_organization_events_spans_performance.py b/tests/snuba/api/endpoints/test_organization_events_spans_performance.py
index e2318c471ec0a2..3a67fd9465a4e0 100644
--- a/tests/snuba/api/endpoints/test_organization_events_spans_performance.py
+++ b/tests/snuba/api/endpoints/test_organization_events_spans_performance.py
@@ -491,7 +491,18 @@ def test_sort_default(self):
with self.feature(self.FEATURES):
response = self.client.get(
self.url,
- data={"project": self.project.id},
+ data={
+ "project": self.project.id,
+ "field": [
+ "percentileArray(spans_exclusive_time, 0.50)",
+ "percentileArray(spans_exclusive_time, 0.75)",
+ "percentileArray(spans_exclusive_time, 0.95)",
+ "percentileArray(spans_exclusive_time, 0.99)",
+ "count()",
+ "count_unique(id)",
+ "sumArray(spans_exclusive_time)",
+ ],
+ },
format="json",
)
|
78bd700a93ce86351e90baf1fec197300c894b7b
|
2024-09-03 20:28:18
|
Tony Xiao
|
chore(profiling): Clean up continuous profiling flags (#76741)
| false
|
Clean up continuous profiling flags (#76741)
|
chore
|
diff --git a/static/app/views/issueDetails/groupEventDetails/groupEventDetailsContent.tsx b/static/app/views/issueDetails/groupEventDetails/groupEventDetailsContent.tsx
index a17d7474e08a29..1e047215670e38 100644
--- a/static/app/views/issueDetails/groupEventDetails/groupEventDetailsContent.tsx
+++ b/static/app/views/issueDetails/groupEventDetails/groupEventDetailsContent.tsx
@@ -474,7 +474,6 @@ function ProfilingDurationRegressionIssueDetailsContent({
event,
project,
}: Required<EventDetailsContentProps>) {
- const organization = useOrganization();
return (
<RegressionEventContainer>
<TransactionsDeltaProvider event={event} project={project}>
@@ -485,11 +484,9 @@ function ProfilingDurationRegressionIssueDetailsContent({
<ErrorBoundary mini>
<EventFunctionBreakpointChart event={event} />
</ErrorBoundary>
- {!organization.features.includes('continuous-profiling-compat') && (
- <ErrorBoundary mini>
- <EventAffectedTransactions event={event} group={group} project={project} />
- </ErrorBoundary>
- )}
+ <ErrorBoundary mini>
+ <EventAffectedTransactions event={event} group={group} project={project} />
+ </ErrorBoundary>
<ErrorBoundary mini>
<InterimSection
type={SectionKey.REGRESSION_FLAMEGRAPH}
diff --git a/static/app/views/performance/newTraceDetails/traceDrawer/details/styles.tsx b/static/app/views/performance/newTraceDetails/traceDrawer/details/styles.tsx
index 8f4ae3dd0610d0..9ab1549af07006 100644
--- a/static/app/views/performance/newTraceDetails/traceDrawer/details/styles.tsx
+++ b/static/app/views/performance/newTraceDetails/traceDrawer/details/styles.tsx
@@ -472,16 +472,17 @@ function NodeActions(props: {
(typeof eventSize === 'number' ? ` (${formatBytesBase10(eventSize, 0)})` : ''),
};
- const continuousProfileLink: MenuItemProps | null = profileLink
- ? {
- key: 'continuous-profile',
- onAction: () => {
- traceAnalytics.trackViewContinuousProfile(props.organization);
- browserHistory.push(profileLink!);
- },
- label: t('Continuous Profile'),
- }
- : null;
+ const continuousProfileLink: MenuItemProps | null =
+ organization.features.includes('continuous-profiling-ui') && !profileLink
+ ? {
+ key: 'continuous-profile',
+ onAction: () => {
+ traceAnalytics.trackViewContinuousProfile(props.organization);
+ browserHistory.push(profileLink!);
+ },
+ label: t('Continuous Profile'),
+ }
+ : null;
if (isTransactionNode(props.node)) {
return [showInView, jsonDetails, continuousProfileLink].filter(TypeSafeBoolean);
@@ -503,7 +504,7 @@ function NodeActions(props: {
}
return [showInView];
- }, [props, profileLink]);
+ }, [props, profileLink, organization.features]);
return (
<ActionsContainer>
diff --git a/static/app/views/profiling/content.tsx b/static/app/views/profiling/content.tsx
index 175b4bfb8756c0..d7ace2e833d7bd 100644
--- a/static/app/views/profiling/content.tsx
+++ b/static/app/views/profiling/content.tsx
@@ -424,17 +424,13 @@ function ProfilingTransactionsContent(props: ProfilingTabContentProps) {
const cursor = decodeScalar(location.query.cursor);
const query = decodeScalar(location.query.query, '');
- const continuousProfilingCompat = organization.features.includes(
- 'continuous-profiling-compat'
- );
-
const transactions = useProfileEvents<FieldType>({
cursor,
fields,
query,
sort,
referrer: 'api.profiling.landing-table',
- continuousProfilingCompat,
+ continuousProfilingCompat: true,
});
const transactionsError =
@@ -489,14 +485,14 @@ function ProfilingTransactionsContent(props: ProfilingTabContentProps) {
<ProfilingOnboardingCTA />
) : (
<Fragment>
- {organization.features.includes('continuous-profiling-compat') ? (
+ {organization.features.includes('continuous-profiling-ui') ? (
<Fragment>
<ProfilesChartWidget
chartHeight={150}
referrer="api.profiling.landing-chart"
userQuery={query}
selection={selection}
- continuousProfilingCompat={continuousProfilingCompat}
+ continuousProfilingCompat
/>
<SlowestFunctionsTable userQuery={query} />
</Fragment>
@@ -507,7 +503,7 @@ function ProfilingTransactionsContent(props: ProfilingTabContentProps) {
referrer="api.profiling.landing-chart"
userQuery={query}
selection={selection}
- continuousProfilingCompat={continuousProfilingCompat}
+ continuousProfilingCompat
/>
<WidgetsContainer>
<LandingWidgetSelector
|
4cfcced9696e0484ded3d23544365f0ce8a9abca
|
2023-01-12 15:22:58
|
Andrii Soldatenko
|
test(metric-extraction): Skip `test_all_transaction_metrics_emitted` test and add new metric `c:transactions@count_per_root_project` [TET-627] (#43167)
| false
|
Skip `test_all_transaction_metrics_emitted` test and add new metric `c:transactions@count_per_root_project` [TET-627] (#43167)
|
test
|
diff --git a/src/sentry/sentry_metrics/indexer/strings.py b/src/sentry/sentry_metrics/indexer/strings.py
index 7553573d0408ba..ca956dde813c8e 100644
--- a/src/sentry/sentry_metrics/indexer/strings.py
+++ b/src/sentry/sentry_metrics/indexer/strings.py
@@ -71,6 +71,7 @@
"d:transactions/breakdowns.span_ops.ops.browser@millisecond": PREFIX + 122,
"d:transactions/breakdowns.span_ops.ops.resource@millisecond": PREFIX + 123,
"d:transactions/breakdowns.span_ops.ops.ui@millisecond": PREFIX + 124,
+ "c:transactions/count_per_root_project@none": PREFIX + 125,
}
# 200 - 299
diff --git a/tests/relay_integration/test_metrics_extraction.py b/tests/relay_integration/test_metrics_extraction.py
index 5a869a1f1004fa..0f45e1f05e3c62 100644
--- a/tests/relay_integration/test_metrics_extraction.py
+++ b/tests/relay_integration/test_metrics_extraction.py
@@ -1,6 +1,7 @@
import uuid
import confluent_kafka as kafka
+import pytest
from sentry.sentry_metrics.indexer.strings import SHARED_STRINGS
from sentry.tasks.relay import compute_projectkey_config
@@ -11,6 +12,9 @@
class MetricsExtractionTest(RelayStoreHelper, TransactionTestCase):
+ @pytest.mark.skip(
+ "TET-627: We need to release new metric first in relay and than adjust the test"
+ )
def test_all_transaction_metrics_emitted(self):
with Feature(
{
|
cb4dad174b16a288f3f0822743a00c32ccb6fdb4
|
2023-03-27 20:21:07
|
Ash Anand
|
perf(sentry): Add experimental UI element tag for interaction transactions (#46340)
| false
|
Add experimental UI element tag for interaction transactions (#46340)
|
perf
|
diff --git a/static/app/bootstrap/initializeSdk.tsx b/static/app/bootstrap/initializeSdk.tsx
index a86864d7c1635f..d36b84e4a7c696 100644
--- a/static/app/bootstrap/initializeSdk.tsx
+++ b/static/app/bootstrap/initializeSdk.tsx
@@ -7,7 +7,7 @@ import {_browserPerformanceTimeOriginMode} from '@sentry/utils';
import {SENTRY_RELEASE_VERSION, SPA_DSN} from 'sentry/constants';
import {Config} from 'sentry/types';
-import {addExtraMeasurements} from 'sentry/utils/performanceForSentry';
+import {addExtraMeasurements, addUIElementTag} from 'sentry/utils/performanceForSentry';
import {normalizeUrl} from 'sentry/utils/withDomainRequired';
const SPA_MODE_ALLOW_URLS = [
@@ -98,6 +98,7 @@ export function initializeSdk(config: Config, {routes}: {routes?: Function} = {}
},
beforeSendTransaction(event) {
addExtraMeasurements(event);
+ addUIElementTag(event);
event.spans = event.spans?.filter(span => {
// Filter analytic timeout spans.
diff --git a/static/app/utils/performanceForSentry.tsx b/static/app/utils/performanceForSentry.tsx
index 50ac0a3ba36391..082795f2039b32 100644
--- a/static/app/utils/performanceForSentry.tsx
+++ b/static/app/utils/performanceForSentry.tsx
@@ -425,3 +425,24 @@ export const setGroupedEntityTag = (
groups = [...groups, +Infinity];
setTag(`${tagName}.grouped`, `<=${groups.find(g => n <= g)}`);
};
+
+/**
+ * A temporary util function used for interaction transactions that will attach a tag to the transaction, indicating the element
+ * that was interacted with. This will allow for querying for transactions by a specific element. This is a high cardinality tag, but
+ * it is only temporary for an experiment
+ */
+export const addUIElementTag = (transaction: TransactionEvent) => {
+ if (!transaction || transaction.contexts?.trace?.op !== 'ui.action.click') {
+ return;
+ }
+
+ if (!transaction.tags) {
+ return;
+ }
+
+ const interactionSpan = transaction.spans?.find(
+ span => span.op === 'ui.interaction.click'
+ );
+
+ transaction.tags.interactionElement = interactionSpan?.description;
+};
|
6aa7a49edb0f967ff5400ad7661a8ee3a47736af
|
2019-02-22 05:19:15
|
ted kaemming
|
ref(eventstream): Remove "relay" alias for "post-process-forwarder" (#12164)
| false
|
Remove "relay" alias for "post-process-forwarder" (#12164)
|
ref
|
diff --git a/src/sentry/runner/commands/run.py b/src/sentry/runner/commands/run.py
index 088f526eec0725..11bf65c3806244 100644
--- a/src/sentry/runner/commands/run.py
+++ b/src/sentry/runner/commands/run.py
@@ -253,45 +253,32 @@ def cron(**options):
).run()
-def _make_forwarder_command():
- # XXX: Calling ``run.command`` mutates the option specifications for some
- # reason that I don't care to identify (they only get picked up for the
- # first registered command), so we have to create two distinct instances of
- # this function for the temporary "relay" alas to work correctly. After the
- # alias is removed, this hack can be removed and the task function can be
- # defined at module level like everything else is normally.
-
- @click.option('--consumer-group', default='snuba-post-processor',
- help='Consumer group used to track event offsets that have been enqueued for post-processing.')
- @click.option('--commit-log-topic', default='snuba-commit-log',
- help='Topic that the Snuba writer is publishing its committed offsets to.')
- @click.option('--synchronize-commit-group', default='snuba-consumers',
- help='Consumer group that the Snuba writer is committing its offset as.')
- @click.option('--commit-batch-size', default=1000, type=int,
- help='How many messages to process (may or may not result in an enqueued task) before committing offsets.')
- @click.option('--initial-offset-reset', default='latest', type=click.Choice(['earliest', 'latest']),
- help='Position in the commit log topic to begin reading from when no prior offset has been recorded.')
- @log_options()
- @configuration
- def post_process_forwarder(**options):
- from sentry import eventstream
- from sentry.eventstream.base import ForwarderNotRequired
- try:
- eventstream.run_post_process_forwarder(
- consumer_group=options['consumer_group'],
- commit_log_topic=options['commit_log_topic'],
- synchronize_commit_group=options['synchronize_commit_group'],
- commit_batch_size=options['commit_batch_size'],
- initial_offset_reset=options['initial_offset_reset'],
- )
- except ForwarderNotRequired:
- sys.stdout.write(
- 'The configured event stream backend does not need a forwarder '
- 'process to enqueue post-process tasks. Exiting...\n')
- return
-
- return post_process_forwarder
-
-
-run.command('relay')(_make_forwarder_command()) # temporary alias for compatibility
-run.command('post-process-forwarder')(_make_forwarder_command())
[email protected]('post-process-forwarder')
[email protected]('--consumer-group', default='snuba-post-processor',
+ help='Consumer group used to track event offsets that have been enqueued for post-processing.')
[email protected]('--commit-log-topic', default='snuba-commit-log',
+ help='Topic that the Snuba writer is publishing its committed offsets to.')
[email protected]('--synchronize-commit-group', default='snuba-consumers',
+ help='Consumer group that the Snuba writer is committing its offset as.')
[email protected]('--commit-batch-size', default=1000, type=int,
+ help='How many messages to process (may or may not result in an enqueued task) before committing offsets.')
[email protected]('--initial-offset-reset', default='latest', type=click.Choice(['earliest', 'latest']),
+ help='Position in the commit log topic to begin reading from when no prior offset has been recorded.')
+@log_options()
+@configuration
+def post_process_forwarder(**options):
+ from sentry import eventstream
+ from sentry.eventstream.base import ForwarderNotRequired
+ try:
+ eventstream.run_post_process_forwarder(
+ consumer_group=options['consumer_group'],
+ commit_log_topic=options['commit_log_topic'],
+ synchronize_commit_group=options['synchronize_commit_group'],
+ commit_batch_size=options['commit_batch_size'],
+ initial_offset_reset=options['initial_offset_reset'],
+ )
+ except ForwarderNotRequired:
+ sys.stdout.write(
+ 'The configured event stream backend does not need a forwarder '
+ 'process to enqueue post-process tasks. Exiting...\n')
+ return
|
68572fef62ed7726347a3d1603dfc5b2e3823209
|
2023-11-04 02:41:53
|
Vu Luong
|
fix(pageFilters): Check ability to select multiple projects in mapping function (#59232)
| false
|
Check ability to select multiple projects in mapping function (#59232)
|
fix
|
diff --git a/static/app/components/organizations/projectPageFilter/index.tsx b/static/app/components/organizations/projectPageFilter/index.tsx
index 681aca0c5b2676..eaf2b0654fd755 100644
--- a/static/app/components/organizations/projectPageFilter/index.tsx
+++ b/static/app/components/organizations/projectPageFilter/index.tsx
@@ -158,12 +158,14 @@ export function ProjectPageFilter({
// "My Projects"
if (!val.length) {
- return memberProjects.map(p => parseInt(p.id, 10));
+ return allowMultiple
+ ? memberProjects.map(p => parseInt(p.id, 10))
+ : [parseInt(memberProjects[0]?.id, 10)];
}
- return val;
+ return allowMultiple ? val : [val[0]];
},
- [memberProjects]
+ [memberProjects, allowMultiple]
);
const value = useMemo<number[]>(
diff --git a/static/app/views/organizationStats/index.spec.tsx b/static/app/views/organizationStats/index.spec.tsx
index fa53cf028cb4e0..79a9ca4c24b045 100644
--- a/static/app/views/organizationStats/index.spec.tsx
+++ b/static/app/views/organizationStats/index.spec.tsx
@@ -234,6 +234,7 @@ describe('OrganizationStats', function () {
render(<OrganizationStats {...defaultProps} organization={newOrg.organization} />, {
context: newOrg.routerContext,
+ organization: newOrg.organization,
});
expect(screen.queryByText('My Projects')).not.toBeInTheDocument();
@@ -249,8 +250,10 @@ describe('OrganizationStats', function () {
// TODO(Leander): Remove the following check once the project-stats flag is GA
'project-stats',
];
+ OrganizationStore.onUpdate(newOrg.organization, {replace: true});
render(<OrganizationStats {...defaultProps} organization={newOrg.organization} />, {
context: newOrg.routerContext,
+ organization: newOrg.organization,
});
expect(screen.getByText('All Projects')).toBeInTheDocument();
@@ -288,7 +291,10 @@ describe('OrganizationStats', function () {
organization={newOrg.organization}
selection={newSelection}
/>,
- {context: newOrg.routerContext}
+ {
+ context: newOrg.routerContext,
+ organization: newOrg.organization,
+ }
);
act(() => PageFiltersStore.updateProjects(selectedProjects, []));
@@ -330,7 +336,10 @@ describe('OrganizationStats', function () {
organization={newOrg.organization}
selection={newSelection}
/>,
- {context: newOrg.routerContext}
+ {
+ context: newOrg.routerContext,
+ organization: newOrg.organization,
+ }
);
act(() => PageFiltersStore.updateProjects(selectedProject, []));
@@ -363,6 +372,7 @@ describe('OrganizationStats', function () {
];
render(<OrganizationStats {...defaultProps} organization={newOrg.organization} />, {
context: newOrg.routerContext,
+ organization: newOrg.organization,
});
await userEvent.click(screen.getByTestId('proj-1'));
expect(screen.queryByText('My Projects')).not.toBeInTheDocument();
@@ -387,7 +397,10 @@ describe('OrganizationStats', function () {
organization={newOrg.organization}
selection={newSelection}
/>,
- {context: newOrg.routerContext}
+ {
+ context: newOrg.routerContext,
+ organization: newOrg.organization,
+ }
);
act(() => PageFiltersStore.updateProjects(selectedProject, []));
expect(screen.queryByText('My Projects')).not.toBeInTheDocument();
|
9fcf3ca3677f40a133269d791205a7c02daf0907
|
2021-01-22 01:43:14
|
k-fish
|
ref(metrics): Add capture envelope metric (#23222)
| false
|
Add capture envelope metric (#23222)
|
ref
|
diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py
index 95af91e090e6b4..5222d2d097d009 100644
--- a/src/sentry/utils/sdk.py
+++ b/src/sentry/utils/sdk.py
@@ -183,6 +183,9 @@ def configure_sdk():
class MultiplexingTransport(sentry_sdk.transport.Transport):
def capture_envelope(self, envelope):
+ # Temporarily capture envelope counts to compare to ingested
+ # transactions.
+ metrics.incr("internal.captured.events.envelopes")
# Assume only transactions get sent via envelopes
if options.get("transaction-events.force-disable-internal-project"):
return
|
fbf245e76c0dbada594a45de84b727f9278df419
|
2021-02-10 07:43:52
|
josh
|
ref(pyupgrade): f-strings complete (#23758)
| false
|
f-strings complete (#23758)
|
ref
|
diff --git a/examples/oauth2_consumer_webserver/app.py b/examples/oauth2_consumer_webserver/app.py
index ba50c150f9a343..2eaa7c504cced6 100644
--- a/examples/oauth2_consumer_webserver/app.py
+++ b/examples/oauth2_consumer_webserver/app.py
@@ -20,13 +20,13 @@
sentry = oauth.remote_app(
"sentry",
base_url=BASE_URL,
- authorize_url="{}/oauth/authorize/".format(BASE_URL),
+ authorize_url=f"{BASE_URL}/oauth/authorize/",
request_token_url=None,
request_token_params={
"scope": "project:releases event:read org:read org:write",
"response_type": "code",
},
- access_token_url="{}/oauth/token/".format(BASE_URL),
+ access_token_url=f"{BASE_URL}/oauth/token/",
access_token_method="POST",
access_token_params={"grant_type": "authorization_code"},
consumer_key=CLIENT_ID,
@@ -44,8 +44,8 @@ def index():
from urllib2 import Request, urlopen, URLError
- headers = {"Authorization": "Bearer {}".format(access_token)}
- req = Request("{}/api/0/organizations/".format(BASE_URL), None, headers)
+ headers = {"Authorization": f"Bearer {access_token}"}
+ req = Request(f"{BASE_URL}/api/0/organizations/", None, headers)
try:
res = urlopen(req)
except URLError as e:
diff --git a/setup.py b/setup.py
index 43cdc4cbfc22fb..da86fc06222b6e 100755
--- a/setup.py
+++ b/setup.py
@@ -80,7 +80,7 @@ def run(self):
def get_requirements(env):
- with open("requirements-{}.txt".format(env)) as fp:
+ with open(f"requirements-{env}.txt") as fp:
return [x.strip() for x in fp.read().split("\n") if not x.startswith("#")]
@@ -109,12 +109,8 @@ def get_requirements(env):
cmdclass=cmdclass,
license="BSL-1.1",
include_package_data=True,
- package_data={
- "sentry": ["static/sentry/{}/**".format(d) for d in ("dist", "js", "images", "vendor")]
- },
- exclude_package_data={
- "sentry": ["static/sentry/{}/**".format(d) for d in ("app", "fonts", "less")]
- },
+ package_data={"sentry": [f"static/sentry/{d}/**" for d in ("dist", "js", "images", "vendor")]},
+ exclude_package_data={"sentry": [f"static/sentry/{d}/**" for d in ("app", "fonts", "less")]},
entry_points={
"console_scripts": ["sentry = sentry.runner:main"],
"sentry.apps": [
diff --git a/src/bitfield/types.py b/src/bitfield/types.py
index 6e405f7a420587..151a7dfd229431 100644
--- a/src/bitfield/types.py
+++ b/src/bitfield/types.py
@@ -135,9 +135,9 @@ def __cmp__(self, other):
return cmp(self._value, other)
def __repr__(self):
- return "<%s: %s>" % (
+ return "<{}: {}>".format(
self.__class__.__name__,
- ", ".join("%s=%s" % (k, self.get_bit(n).is_set) for n, k in enumerate(self._keys)),
+ ", ".join("{}={}".format(k, self.get_bit(n).is_set) for n, k in enumerate(self._keys)),
)
def __str__(self):
diff --git a/src/sentry/__init__.py b/src/sentry/__init__.py
index eec00ebc178710..7d61e556dc8c79 100644
--- a/src/sentry/__init__.py
+++ b/src/sentry/__init__.py
@@ -37,7 +37,7 @@ def get_revision():
def get_version():
if __build__:
- return "%s.%s" % (__version__, __build__)
+ return f"{__version__}.{__build__}"
return __version__
diff --git a/src/sentry/bgtasks/api.py b/src/sentry/bgtasks/api.py
index 9f01b3d288ea27..6e14403f31ec6a 100644
--- a/src/sentry/bgtasks/api.py
+++ b/src/sentry/bgtasks/api.py
@@ -27,7 +27,7 @@ def __init__(self, callback, roles=None, interval=60):
@property
def name(self):
- return "%s:%s" % (self.callback.__module__, self.callback.__name__)
+ return f"{self.callback.__module__}:{self.callback.__name__}"
def run(self):
if self.running:
diff --git a/src/sentry/buffer/base.py b/src/sentry/buffer/base.py
index 031597ec494591..d9048ebfc3ae58 100644
--- a/src/sentry/buffer/base.py
+++ b/src/sentry/buffer/base.py
@@ -10,7 +10,7 @@
class BufferMount(type):
def __new__(cls, name, bases, attrs):
new_cls = type.__new__(cls, name, bases, attrs)
- new_cls.logger = logging.getLogger("sentry.buffer.%s" % (new_cls.__name__.lower(),))
+ new_cls.logger = logging.getLogger(f"sentry.buffer.{new_cls.__name__.lower()}")
return new_cls
diff --git a/src/sentry/buffer/redis.py b/src/sentry/buffer/redis.py
index f13c327a19fcd7..72e888ce43f5ca 100644
--- a/src/sentry/buffer/redis.py
+++ b/src/sentry/buffer/redis.py
@@ -74,10 +74,10 @@ def _make_key(self, model, filters):
"""
Returns a Redis-compatible key for the model given filters.
"""
- return "b:k:%s:%s" % (
+ return "b:k:{}:{}".format(
model._meta,
md5_text(
- "&".join("%s=%s" % (k, self._coerce_val(v)) for k, v in sorted(filters.items()))
+ "&".join("{}={}".format(k, self._coerce_val(v)) for k, v in sorted(filters.items()))
).hexdigest(),
)
@@ -103,7 +103,7 @@ def _make_pending_key_from_key(self, key):
return self._make_pending_key(crc32(key) % self.pending_partitions)
def _make_lock_key(self, key):
- return "l:%s" % (key,)
+ return f"l:{key}"
def _dump_values(self, values):
result = {}
@@ -142,7 +142,7 @@ def _load_value(self, payload):
elif type_ == "f":
return float(value)
else:
- raise TypeError("invalid type: {}".format(type_))
+ raise TypeError(f"invalid type: {type_}")
def incr(self, model, columns, filters, extra=None, signal_only=None):
"""
@@ -164,7 +164,7 @@ def incr(self, model, columns, filters, extra=None, signal_only=None):
conn = self.cluster.get_local_client_for_key(key)
pipe = conn.pipeline()
- pipe.hsetnx(key, "m", "%s.%s" % (model.__module__, model.__name__))
+ pipe.hsetnx(key, "m", f"{model.__module__}.{model.__name__}")
# TODO(dcramer): once this goes live in production, we can kill the pickle path
# (this is to ensure a zero downtime deploy where we can transition event processing)
pipe.hsetnx(key, "f", pickle.dumps(filters))
diff --git a/src/sentry/cache/redis.py b/src/sentry/cache/redis.py
index 33bd3e0811d2a5..88cacd9a77adac 100644
--- a/src/sentry/cache/redis.py
+++ b/src/sentry/cache/redis.py
@@ -20,7 +20,7 @@ def set(self, key, value, timeout, version=None, raw=False):
key = self.make_key(key, version=version)
v = json.dumps(value) if not raw else value
if len(v) > self.max_size:
- raise ValueTooLarge("Cache key too large: %r %r" % (key, len(v)))
+ raise ValueTooLarge("Cache key too large: {!r} {!r}".format(key, len(v)))
if timeout:
self.client.setex(key, int(timeout), v)
else:
diff --git a/src/sentry/conf/locale.py b/src/sentry/conf/locale.py
index c4dc14a7731c41..8aaffea64c793e 100644
--- a/src/sentry/conf/locale.py
+++ b/src/sentry/conf/locale.py
@@ -8,7 +8,7 @@
def dirname_to_local(dir_name):
if "_" in dir_name:
pre, post = dir_name.split("_", 1)
- dir_name = "{}-{}".format(pre, post.lower())
+ dir_name = f"{pre}-{post.lower()}"
return dir_name
diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py
index cb53f019aee2e2..5394d2b6a3c2a7 100644
--- a/src/sentry/conf/server.py
+++ b/src/sentry/conf/server.py
@@ -610,7 +610,7 @@ def SOCIAL_AUTH_DEFAULT_USERNAME():
def create_partitioned_queues(name):
exchange = Exchange(name, type="direct")
for num in range(1):
- CELERY_QUEUES.append(Queue("{}-{}".format(name, num), exchange=exchange))
+ CELERY_QUEUES.append(Queue(f"{name}-{num}", exchange=exchange))
create_partitioned_queues("counters")
diff --git a/src/sentry/constants.py b/src/sentry/constants.py
index a71d2ddfeeaa74..c83f80ed0f8b39 100644
--- a/src/sentry/constants.py
+++ b/src/sentry/constants.py
@@ -24,7 +24,7 @@ def get_all_languages():
continue
if "_" in path:
pre, post = path.split("_", 1)
- path = "{}-{}".format(pre, post.lower())
+ path = f"{pre}-{post.lower()}"
results.append(path)
return results
@@ -384,7 +384,7 @@ def get_integration_id_for_event(platform, sdk_name, integrations):
return PLATFORM_INTEGRATION_TO_INTEGRATION_ID[platform][integration]
# try <platform>-<integration>, for example "java-log4j"
- integration_id = "%s-%s" % (platform, integration)
+ integration_id = f"{platform}-{integration}"
if integration_id in INTEGRATION_ID_TO_PLATFORM_DATA:
return integration_id
diff --git a/src/sentry/culprit.py b/src/sentry/culprit.py
index ca48216fd6d82f..0bc567ea3a95af 100644
--- a/src/sentry/culprit.py
+++ b/src/sentry/culprit.py
@@ -67,5 +67,5 @@ def get_frame_culprit(frame, platform):
elif platform in ("javascript", "node"):
# function and fileloc might be unicode here, so let it coerce
# to a unicode string if needed.
- return "%s(%s)" % (frame.get("function") or "?", fileloc)
- return "%s in %s" % (fileloc, frame.get("function") or "?")
+ return "{}({})".format(frame.get("function") or "?", fileloc)
+ return "{} in {}".format(fileloc, frame.get("function") or "?")
diff --git a/src/sentry/data_export/endpoints/data_export.py b/src/sentry/data_export/endpoints/data_export.py
index 62b39edb9005eb..ad8856f88dfb43 100644
--- a/src/sentry/data_export/endpoints/data_export.py
+++ b/src/sentry/data_export/endpoints/data_export.py
@@ -46,9 +46,7 @@ def validate(self, data):
fields = [fields]
if len(fields) > MAX_FIELDS:
- detail = "You can export up to {} fields at a time. Please delete some and try again.".format(
- MAX_FIELDS
- )
+ detail = f"You can export up to {MAX_FIELDS} fields at a time. Please delete some and try again."
raise serializers.ValidationError(detail)
query_info["field"] = fields
diff --git a/src/sentry/data_export/endpoints/data_export_details.py b/src/sentry/data_export/endpoints/data_export_details.py
index 9bd42729abcd70..cf3b5e38b83611 100644
--- a/src/sentry/data_export/endpoints/data_export_details.py
+++ b/src/sentry/data_export/endpoints/data_export_details.py
@@ -49,5 +49,5 @@ def download(self, data_export):
iter(lambda: raw_file.read(4096), b""), content_type="text/csv"
)
response["Content-Length"] = file.size
- response["Content-Disposition"] = 'attachment; filename="{}"'.format(file.name)
+ response["Content-Disposition"] = f'attachment; filename="{file.name}"'
return response
diff --git a/src/sentry/data_export/models.py b/src/sentry/data_export/models.py
index 61cffec715d8b0..a8f76e784bc77e 100644
--- a/src/sentry/data_export/models.py
+++ b/src/sentry/data_export/models.py
@@ -59,7 +59,7 @@ def file_name(self):
date = self.date_added.strftime("%Y-%B-%d")
export_type = ExportQueryType.as_str(self.query_type)
# Example: Discover_2020-July-21_27.csv
- return "{}_{}_{}.csv".format(export_type, date, self.id)
+ return f"{export_type}_{date}_{self.id}.csv"
@staticmethod
def format_date(date):
diff --git a/src/sentry/data_export/processors/issues_by_tag.py b/src/sentry/data_export/processors/issues_by_tag.py
index acd403ef7e64e8..a55820851d2ca0 100644
--- a/src/sentry/data_export/processors/issues_by_tag.py
+++ b/src/sentry/data_export/processors/issues_by_tag.py
@@ -59,7 +59,7 @@ def get_header_fields(key):
@staticmethod
def get_lookup_key(key):
- return str("sentry:{}".format(key)) if tagstore.is_reserved_key(key) else key
+ return str(f"sentry:{key}") if tagstore.is_reserved_key(key) else key
@staticmethod
def get_eventuser_callback(project_id):
diff --git a/src/sentry/datascrubbing.py b/src/sentry/datascrubbing.py
index 9ec6c2414637df..217fb08bb8943a 100644
--- a/src/sentry/datascrubbing.py
+++ b/src/sentry/datascrubbing.py
@@ -113,7 +113,7 @@ def _merge_pii_configs(prefixes_and_configs):
rules = partial_config.get("rules") or {}
for rule_name, rule in rules.items():
- prefixed_rule_name = "{}{}".format(prefix, rule_name)
+ prefixed_rule_name = f"{prefix}{rule_name}"
merged_config.setdefault("rules", {})[
prefixed_rule_name
] = _prefix_rule_references_in_rule(rules, rule, prefix)
@@ -125,7 +125,7 @@ def _merge_pii_configs(prefixes_and_configs):
for application in applications:
if application in rules:
- prefixed_rule_name = "{}{}".format(prefix, application)
+ prefixed_rule_name = f"{prefix}{application}"
merged_applications.append(prefixed_rule_name)
else:
merged_applications.append(application)
@@ -152,7 +152,7 @@ def _prefix_rule_references_in_rule(custom_rules, rule_def, prefix):
if rule_def.get("type") == "multiple" and rule_def.get("rules"):
rule_def = copy.deepcopy(rule_def)
rule_def["rules"] = list(
- "{}{}".format(prefix, x) if x in custom_rules else x for x in rule_def["rules"]
+ f"{prefix}{x}" if x in custom_rules else x for x in rule_def["rules"]
)
elif (
rule_def.get("type") == "multiple"
diff --git a/src/sentry/deletions/base.py b/src/sentry/deletions/base.py
index 5f510322de79bb..36b34c350cf652 100644
--- a/src/sentry/deletions/base.py
+++ b/src/sentry/deletions/base.py
@@ -13,7 +13,7 @@ def __init__(self, params, task):
self.params = params
def __repr__(self):
- return "<%s: task=%s params=%s>" % (type(self), self.task, self.params)
+ return "<{}: task={} params={}>".format(type(self), self.task, self.params)
class ModelRelation(BaseRelation):
@@ -41,7 +41,7 @@ def __init__(
self.chunk_size = chunk_size if chunk_size is not None else self.DEFAULT_CHUNK_SIZE
def __repr__(self):
- return "<%s: skip_models=%s transaction_id=%s actor_id=%s>" % (
+ return "<{}: skip_models={} transaction_id={} actor_id={}>".format(
type(self),
self.skip_models,
self.transaction_id,
@@ -145,7 +145,7 @@ def __init__(self, manager, model, query, query_limit=None, order_by=None, **kwa
self.order_by = order_by
def __repr__(self):
- return "<%s: model=%s query=%s order_by=%s transaction_id=%s actor_id=%s>" % (
+ return "<{}: model={} query={} order_by={} transaction_id={} actor_id={}>".format(
type(self),
self.model,
self.query,
@@ -181,13 +181,7 @@ def chunk(self, num_shards=None, shard_id=None):
if num_shards:
assert num_shards > 1
assert shard_id < num_shards
- queryset = queryset.extra(
- where=[
- "id %% {num_shards} = {shard_id}".format(
- num_shards=num_shards, shard_id=shard_id
- )
- ]
- )
+ queryset = queryset.extra(where=[f"id %% {num_shards} = {shard_id}"])
queryset = list(queryset[:query_limit])
if not queryset:
diff --git a/src/sentry/digests/__init__.py b/src/sentry/digests/__init__.py
index 764c299b65805d..316dd231d3c239 100644
--- a/src/sentry/digests/__init__.py
+++ b/src/sentry/digests/__init__.py
@@ -26,4 +26,4 @@ def datetime(self):
def get_option_key(plugin, option):
assert option in OPTIONS
- return "digests:{}:{}".format(plugin, option)
+ return f"digests:{plugin}:{option}"
diff --git a/src/sentry/digests/backends/redis.py b/src/sentry/digests/backends/redis.py
index 90432b349bf345..f2404d0ada5a74 100644
--- a/src/sentry/digests/backends/redis.py
+++ b/src/sentry/digests/backends/redis.py
@@ -89,10 +89,10 @@ def validate(self):
check_cluster_versions(self.cluster, Version((2, 8, 9)), label="Digests")
def _get_connection(self, key):
- return self.cluster.get_local_client_for_key("{}:t:{}".format(self.namespace, key))
+ return self.cluster.get_local_client_for_key(f"{self.namespace}:t:{key}")
def _get_timeline_lock(self, key, duration):
- lock_key = "{}:t:{}".format(self.namespace, key)
+ lock_key = f"{self.namespace}:t:{key}"
return self.locks.get(lock_key, duration=duration, routing_key=lock_key)
def add(self, key, record, increment_delay=None, maximum_delay=None, timestamp=None):
diff --git a/src/sentry/discover/endpoints/serializers.py b/src/sentry/discover/endpoints/serializers.py
index 45d8f7e336d4bb..e8d1c2ee9f4421 100644
--- a/src/sentry/discover/endpoints/serializers.py
+++ b/src/sentry/discover/endpoints/serializers.py
@@ -93,9 +93,7 @@ def validate_aggregations(self, value):
if not requested_functions.issubset(valid_functions):
invalid_functions = ", ".join(requested_functions - valid_functions)
- raise serializers.ValidationError(
- "Invalid aggregate function - {}".format(invalid_functions)
- )
+ raise serializers.ValidationError(f"Invalid aggregate function - {invalid_functions}")
return value
@@ -125,7 +123,7 @@ def get_condition(self, condition):
value = condition[2]
if isinstance(value, str):
- value = "'{}'".format(value)
+ value = f"'{value}'"
bool_value = 1 if condition[1] == "=" else 0
@@ -215,7 +213,7 @@ def validate(self, data):
try:
get_filter(query["query"], self.context["params"])
except InvalidSearchQuery as err:
- raise serializers.ValidationError("Cannot save invalid query: {}".format(err))
+ raise serializers.ValidationError(f"Cannot save invalid query: {err}")
return {
"name": data["name"],
@@ -246,6 +244,6 @@ def validate(self, data):
# Limit the number of key transactions
if KeyTransaction.objects.filter(**base_filter).count() >= MAX_KEY_TRANSACTIONS:
raise serializers.ValidationError(
- "At most {} Key Transactions can be added".format(MAX_KEY_TRANSACTIONS)
+ f"At most {MAX_KEY_TRANSACTIONS} Key Transactions can be added"
)
return data
diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py
index 3c42b533922b39..eabf2aa61e3eee 100644
--- a/src/sentry/event_manager.py
+++ b/src/sentry/event_manager.py
@@ -443,7 +443,7 @@ def save(self, project_id, raw=False, assume_normalized=False, start_time=None,
_materialize_event_metrics(jobs)
for attachment in attachments:
- key = "bytes.stored.%s" % (attachment.type,)
+ key = f"bytes.stored.{attachment.type}"
old_bytes = job["event_metrics"].get(key) or 0
job["event_metrics"][key] = old_bytes + attachment.size
@@ -849,7 +849,7 @@ def _get_event_user_impl(project, data, metrics_tags):
if not euser.hash:
return
- cache_key = "euserid:1:{}:{}".format(project.id, euser.hash)
+ cache_key = f"euserid:1:{project.id}:{euser.hash}"
euser_id = cache.get(cache_key)
if euser_id is None:
metrics_tags["cache_hit"] = "false"
@@ -1398,7 +1398,7 @@ def _materialize_event_metrics(jobs):
for metric_name in ("flag.processing.error", "flag.processing.fatal"):
if event_metrics.get(metric_name):
- metrics.incr("event_manager.save.event_metrics.%s" % (metric_name,))
+ metrics.incr(f"event_manager.save.event_metrics.{metric_name}")
job["event_metrics"] = event_metrics
diff --git a/src/sentry/eventstore/models.py b/src/sentry/eventstore/models.py
index aca04f80bbcb20..0d974567664e31 100644
--- a/src/sentry/eventstore/models.py
+++ b/src/sentry/eventstore/models.py
@@ -262,7 +262,7 @@ def generate_node_id(cls, project_id, event_id):
be saved under this key in nodestore so it can be retrieved using the
same generated id when we only have project_id and event_id.
"""
- return md5("{}:{}".format(project_id, event_id).encode("utf-8")).hexdigest()
+ return md5(f"{project_id}:{event_id}".encode("utf-8")).hexdigest()
# TODO We need a better way to cache these properties. functools
# doesn't quite do the trick as there is a reference bug with unsaved
@@ -469,11 +469,11 @@ def search_message(self):
for value in event_metadata.values():
value_u = force_text(value, errors="replace")
if value_u not in message:
- message = "{} {}".format(message, value_u)
+ message = f"{message} {value_u}"
if culprit and culprit not in message:
culprit_u = force_text(culprit, errors="replace")
- message = "{} {}".format(message, culprit_u)
+ message = f"{message} {culprit_u}"
return trim(message.strip(), settings.SENTRY_MAX_MESSAGE_LENGTH)
diff --git a/src/sentry/eventstore/snuba/backend.py b/src/sentry/eventstore/snuba/backend.py
index fc16387bde0bd6..ba1ddb7aa46acd 100644
--- a/src/sentry/eventstore/snuba/backend.py
+++ b/src/sentry/eventstore/snuba/backend.py
@@ -14,7 +14,7 @@
PROJECT_ID = Columns.PROJECT_ID.value.alias
TIMESTAMP = Columns.TIMESTAMP.value.alias
-DESC_ORDERING = ["-{}".format(TIMESTAMP), "-{}".format(EVENT_ID)]
+DESC_ORDERING = [f"-{TIMESTAMP}", f"-{EVENT_ID}"]
ASC_ORDERING = [TIMESTAMP, EVENT_ID]
DEFAULT_LIMIT = 100
DEFAULT_OFFSET = 0
diff --git a/src/sentry/eventstream/kafka/backend.py b/src/sentry/eventstream/kafka/backend.py
index 545042bd394295..43d5c2af846e6e 100644
--- a/src/sentry/eventstream/kafka/backend.py
+++ b/src/sentry/eventstream/kafka/backend.py
@@ -98,7 +98,9 @@ def commit(partitions):
errors = [i for i in results if i.error is not None]
if errors:
raise Exception(
- "Failed to commit %s/%s partitions: %r" % (len(errors), len(partitions), errors)
+ "Failed to commit {}/{} partitions: {!r}".format(
+ len(errors), len(partitions), errors
+ )
)
return results
@@ -111,7 +113,7 @@ def on_assign(consumer, partitions):
updated_offset = None
elif i.offset < 0:
raise Exception(
- "Received unexpected negative offset during partition assignment: %r" % (i,)
+ f"Received unexpected negative offset during partition assignment: {i!r}"
)
else:
updated_offset = i.offset
diff --git a/src/sentry/eventstream/kafka/consumer.py b/src/sentry/eventstream/kafka/consumer.py
index e9b0c780767c1e..d8248aad19d71a 100644
--- a/src/sentry/eventstream/kafka/consumer.py
+++ b/src/sentry/eventstream/kafka/consumer.py
@@ -208,7 +208,7 @@ def __start_commit_log_consumer(self, timeout=None):
functools.partial(
run_commit_log_consumer,
cluster_name=self.cluster_name,
- consumer_group="{}:sync:{}".format(self.consumer_group, uuid.uuid1().hex),
+ consumer_group=f"{self.consumer_group}:sync:{uuid.uuid1().hex}",
commit_log_topic=self.commit_log_topic,
synchronize_commit_group=self.synchronize_commit_group,
partition_state_manager=self.__partition_state_manager,
@@ -261,7 +261,7 @@ def __on_partition_state_change(
elif current_state is SynchronizedPartitionState.LOCAL_BEHIND:
self.__consumer.resume([TopicPartition(topic, partition, current_offsets.local)])
else:
- raise NotImplementedError("Unexpected partition state: %s" % (current_state,))
+ raise NotImplementedError(f"Unexpected partition state: {current_state}")
def subscribe(self, topics, on_assign=None, on_revoke=None):
"""
diff --git a/src/sentry/eventstream/kafka/protocol.py b/src/sentry/eventstream/kafka/protocol.py
index 7f49f30e429ec6..1353192647bb5f 100644
--- a/src/sentry/eventstream/kafka/protocol.py
+++ b/src/sentry/eventstream/kafka/protocol.py
@@ -53,7 +53,7 @@ def handle_message(operation, *data):
logger.debug("Skipping unsupported operation: %s", operation)
return None
else:
- raise UnexpectedOperation("Received unexpected operation type: {!r}".format(operation))
+ raise UnexpectedOperation(f"Received unexpected operation type: {operation!r}")
return handle_message
@@ -109,7 +109,7 @@ def get_task_kwargs_for_message(value):
handler = version_handlers[int(version)]
except (ValueError, KeyError):
raise InvalidVersion(
- "Received event payload with unexpected version identifier: {}".format(version)
+ f"Received event payload with unexpected version identifier: {version}"
)
return handler(*payload[1:])
diff --git a/src/sentry/eventstream/kafka/state.py b/src/sentry/eventstream/kafka/state.py
index 40435d0d278fb9..f5ba30f67df3b8 100644
--- a/src/sentry/eventstream/kafka/state.py
+++ b/src/sentry/eventstream/kafka/state.py
@@ -116,9 +116,7 @@ def set_local_offset(self, topic, partition, local_offset):
and updated_state not in self.transitions[previous_state]
):
raise InvalidStateTransition(
- "Unexpected state transition for {}/{} from {} to {}".format(
- topic, partition, previous_state, updated_state
- )
+ f"Unexpected state transition for {topic}/{partition} from {previous_state} to {updated_state}"
)
self.partitions[(topic, partition)] = (updated_state, updated_offsets)
if previous_state is not updated_state:
@@ -163,9 +161,7 @@ def set_remote_offset(self, topic, partition, remote_offset):
and updated_state not in self.transitions[previous_state]
):
raise InvalidStateTransition(
- "Unexpected state transition for {}/{} from {} to {}".format(
- topic, partition, previous_state, updated_state
- )
+ f"Unexpected state transition for {topic}/{partition} from {previous_state} to {updated_state}"
)
self.partitions[(topic, partition)] = (updated_state, updated_offsets)
if previous_state is not updated_state:
diff --git a/src/sentry/eventstream/snuba.py b/src/sentry/eventstream/snuba.py
index 7e5f7f2e16e8a1..a76f2982415f13 100644
--- a/src/sentry/eventstream/snuba.py
+++ b/src/sentry/eventstream/snuba.py
@@ -293,9 +293,9 @@ def _send(
for dataset in datasets:
resp = snuba._snuba_pool.urlopen(
"POST",
- "/tests/{}/eventstream".format(dataset),
+ f"/tests/{dataset}/eventstream",
body=json.dumps(data),
- headers={"X-Sentry-{}".format(k): v for k, v in headers.items()},
+ headers={f"X-Sentry-{k}": v for k, v in headers.items()},
)
if resp.status != 200:
raise snuba.SnubaError("HTTP %s response from Snuba!" % resp.status)
diff --git a/src/sentry/filestore/gcs.py b/src/sentry/filestore/gcs.py
index 72ba84a952af06..1ca63ff3724daa 100644
--- a/src/sentry/filestore/gcs.py
+++ b/src/sentry/filestore/gcs.py
@@ -139,11 +139,9 @@ def __init__(self, download_url, *args, **kwargs):
def _get_download_url(self, *args, **kwargs):
# media_link is for public objects; we completely ignore it.
- download_url = "{download_url}/download/storage/v1{path}?alt=media".format(
- download_url=self.download_url, path=self.path
- )
+ download_url = f"{self.download_url}/download/storage/v1{self.path}?alt=media"
if self.generation is not None:
- download_url += "&generation={:d}".format(self.generation)
+ download_url += f"&generation={self.generation:d}"
return download_url
@@ -324,7 +322,7 @@ def _get_blob(self, name):
blob = self.bucket.get_blob(name)
if blob is None:
- raise NotFound("File does not exist: {}".format(name))
+ raise NotFound(f"File does not exist: {name}")
return blob
diff --git a/src/sentry/filestore/s3.py b/src/sentry/filestore/s3.py
index ec839df36df555..0d789891ffcff4 100644
--- a/src/sentry/filestore/s3.py
+++ b/src/sentry/filestore/s3.py
@@ -633,7 +633,7 @@ def url(self, name, parameters=None, expire=None):
# TODO: Handle force_http=not self.secure_urls like in s3boto
name = self._normalize_name(self._clean_name(name))
if self.custom_domain:
- return "%s//%s/%s" % (self.url_protocol, self.custom_domain, filepath_to_uri(name))
+ return "{}//{}/{}".format(self.url_protocol, self.custom_domain, filepath_to_uri(name))
if expire is None:
expire = self.querystring_expire
diff --git a/src/sentry/grouping/api.py b/src/sentry/grouping/api.py
index cbdb8549f63aff..8ec0cbbe62acaa 100644
--- a/src/sentry/grouping/api.py
+++ b/src/sentry/grouping/api.py
@@ -57,7 +57,7 @@ def _get_project_enhancements_config(project):
from sentry.utils.hashlib import md5_text
cache_key = (
- "grouping-enhancements:" + md5_text("%s|%s" % (enhancements_base, enhancements)).hexdigest()
+ "grouping-enhancements:" + md5_text(f"{enhancements_base}|{enhancements}").hexdigest()
)
rv = cache.get(cache_key)
if rv is not None:
@@ -160,8 +160,8 @@ def _get_calculated_grouping_variants_for_event(event, config):
if component.contributes:
winning_strategy = strategy.name
variants_hint = "/".join(sorted(k for k, v in rv.items() if v.contributes))
- precedence_hint = "%s take%s precedence" % (
- "%s of %s" % (strategy.name, variants_hint)
+ precedence_hint = "{} take{} precedence".format(
+ f"{strategy.name} of {variants_hint}"
if variant != "default"
else strategy.name,
"" if strategy.name.endswith("s") else "s",
diff --git a/src/sentry/grouping/component.py b/src/sentry/grouping/component.py
index 776b70811f1935..cdbc893f4aa31f 100644
--- a/src/sentry/grouping/component.py
+++ b/src/sentry/grouping/component.py
@@ -168,9 +168,4 @@ def as_dict(self):
return rv
def __repr__(self):
- return "GroupingComponent(%r, hint=%r, contributes=%r, values=%r)" % (
- self.id,
- self.hint,
- self.contributes,
- self.values,
- )
+ return f"GroupingComponent({self.id!r}, hint={self.hint!r}, contributes={self.contributes!r}, values={self.values!r})"
diff --git a/src/sentry/grouping/enhancer.py b/src/sentry/grouping/enhancer.py
index fa4581e3f7f3b2..ea558e9aa658cd 100644
--- a/src/sentry/grouping/enhancer.py
+++ b/src/sentry/grouping/enhancer.py
@@ -121,7 +121,7 @@ def __init__(self, key, pattern, negated=False):
@property
def description(self):
- return "%s:%s" % (
+ return "{}:{}".format(
self.key,
self.pattern.split() != [self.pattern] and '"%s"' % self.pattern or self.pattern,
)
@@ -224,7 +224,7 @@ def __init__(self, key, flag, range):
self.range = range
def __str__(self):
- return "%s%s%s" % (
+ return "{}{}{}".format(
{"up": "^", "down": "v"}.get(self.range, ""),
self.flag and "+" or "-",
self.key,
@@ -263,7 +263,7 @@ def apply_modifications_to_frame(self, frames, idx):
def update_frame_components_contributions(self, components, frames, idx, rule=None):
rule_hint = "stack trace rule"
if rule:
- rule_hint = "%s (%s)" % (rule_hint, rule.matcher_description)
+ rule_hint = f"{rule_hint} ({rule.matcher_description})"
sliced_components = self._slice_to_range(components, idx)
sliced_frames = self._slice_to_range(frames, idx)
@@ -271,13 +271,13 @@ def update_frame_components_contributions(self, components, frames, idx, rule=No
if self.key == "group" and self.flag != component.contributes:
component.update(
contributes=self.flag,
- hint="%s by %s" % (self.flag and "un-ignored" or "ignored", rule_hint),
+ hint="{} by {}".format(self.flag and "un-ignored" or "ignored", rule_hint),
)
# The in app flag was set by `apply_modifications_to_frame`
# but we want to add a hint if there is none yet.
elif self.key == "app" and self._in_app_changed(frame, component):
component.update(
- hint="marked %s by %s" % (self.flag and "in-app" or "out of app", rule_hint)
+ hint="marked {} by {}".format(self.flag and "in-app" or "out of app", rule_hint)
)
@@ -289,7 +289,7 @@ def __init__(self, var, value):
self.value = value
def __str__(self):
- return "%s=%s" % (self.var, self.value)
+ return f"{self.var}={self.value}"
def _to_config_structure(self):
return [self.var, self.value]
@@ -320,7 +320,7 @@ def add_to_hint(self, hint, var):
description = self.describe_var_rule(var)
if description is None:
return hint
- return "%s by stack trace rule (%s)" % (hint, description)
+ return f"{hint} by stack trace rule ({description})"
class Enhancements:
@@ -464,7 +464,7 @@ def from_config_string(self, s, bases=None, id=None):
if len(context) == 33:
context = context[:-1] + "..."
raise InvalidEnhancerConfig(
- 'Invalid syntax near "%s" (line %s, column %s)' % (context, e.line(), e.column())
+ f'Invalid syntax near "{context}" (line {e.line()}, column {e.column()})'
)
return EnhancmentsVisitor(bases, id).visit(tree)
@@ -478,7 +478,7 @@ def __init__(self, matchers, actions):
def matcher_description(self):
rv = " ".join(x.description for x in self.matchers)
for action in self.actions:
- rv = "%s %s" % (rv, action)
+ rv = f"{rv} {action}"
return rv
def as_dict(self):
diff --git a/src/sentry/grouping/fingerprinting.py b/src/sentry/grouping/fingerprinting.py
index 128a05b41da7d9..9c5585e10a2749 100644
--- a/src/sentry/grouping/fingerprinting.py
+++ b/src/sentry/grouping/fingerprinting.py
@@ -212,7 +212,7 @@ def from_config_string(self, s):
if len(context) == 33:
context = context[:-1] + "..."
raise InvalidFingerprintingConfig(
- 'Invalid syntax near "%s" (line %s, column %s)' % (context, e.line(), e.column())
+ f'Invalid syntax near "{context}" (line {e.line()}, column {e.column()})'
)
return FingerprintingVisitor().visit(tree)
@@ -338,7 +338,7 @@ def _from_config_structure(cls, obj):
@property
def text(self):
- return '%s%s:"%s"' % (
+ return '{}{}:"{}"'.format(
self.negated and "!" or "",
self.key,
self.pattern,
@@ -394,7 +394,7 @@ def text(self):
% (
" ".join(x.text for x in self.matchers),
"".join(x for x in self.fingerprint),
- " ".join('%s="%s"' % (k, v) for (k, v) in sorted(self.attributes.items())),
+ " ".join(f'{k}="{v}"' for (k, v) in sorted(self.attributes.items())),
)
).rstrip()
diff --git a/src/sentry/grouping/strategies/__init__.py b/src/sentry/grouping/strategies/__init__.py
index ac5219fd5b7e22..22352d2e6c8552 100644
--- a/src/sentry/grouping/strategies/__init__.py
+++ b/src/sentry/grouping/strategies/__init__.py
@@ -10,7 +10,7 @@ def _import_all():
"configurations",
]
for module in strategy_modules:
- __import__("%s.%s" % (__name__, module))
+ __import__(f"{__name__}.{module}")
_import_all()
diff --git a/src/sentry/grouping/strategies/base.py b/src/sentry/grouping/strategies/base.py
index dd7b06830270e2..7f01fb8f83ef7f 100644
--- a/src/sentry/grouping/strategies/base.py
+++ b/src/sentry/grouping/strategies/base.py
@@ -68,7 +68,7 @@ def __init__(self, id, name, interfaces, variants, score, func):
self.variant_processor_func = None
def __repr__(self):
- return "<%s id=%r variants=%r>" % (self.__class__.__name__, self.id, self.variants)
+ return f"<{self.__class__.__name__} id={self.id!r} variants={self.variants!r}>"
def _invoke(self, func, *args, **kwargs):
# We forcefully override strategy here. This lets a strategy
@@ -180,7 +180,7 @@ def __init__(self, enhancements=None, **extra):
self.enhancements = enhancements
def __repr__(self):
- return "<%s %r>" % (self.__class__.__name__, self.id)
+ return f"<{self.__class__.__name__} {self.id!r}>"
def iter_strategies(self):
"""Iterates over all strategies by highest score to lowest."""
@@ -247,7 +247,7 @@ class NewStrategyConfiguration(StrategyConfiguration):
for strategy_id in strategies or {}:
strategy = lookup_strategy(strategy_id)
if strategy.score is None:
- raise RuntimeError("Unscored strategy %s added to %s" % (strategy_id, id))
+ raise RuntimeError(f"Unscored strategy {strategy_id} added to {id}")
for old_id in by_class.get(strategy.strategy_class) or ():
NewStrategyConfiguration.strategies.pop(old_id, None)
NewStrategyConfiguration.strategies[strategy_id] = strategy
diff --git a/src/sentry/grouping/strategies/similarity_encoders.py b/src/sentry/grouping/strategies/similarity_encoders.py
index c3188e80aaf5ca..46d9c9f7cbf567 100644
--- a/src/sentry/grouping/strategies/similarity_encoders.py
+++ b/src/sentry/grouping/strategies/similarity_encoders.py
@@ -2,7 +2,7 @@
def text_shingle_encoder(n):
- label = "character-{}-shingle".format(n)
+ label = f"character-{n}-shingle"
def inner(id, value):
yield (id, label), text_shingle(n, value)
diff --git a/src/sentry/grouping/variants.py b/src/sentry/grouping/variants.py
index daf830acc68a48..e8052cf1e058bf 100644
--- a/src/sentry/grouping/variants.py
+++ b/src/sentry/grouping/variants.py
@@ -27,7 +27,7 @@ def encode_for_similarity(self):
raise NotImplementedError()
def __repr__(self):
- return "<%s %r (%s)>" % (self.__class__.__name__, self.get_hash(), self.type)
+ return f"<{self.__class__.__name__} {self.get_hash()!r} ({self.type})>"
class ChecksumVariant(BaseVariant):
diff --git a/src/sentry/http.py b/src/sentry/http.py
index fe6b1d6e57ba17..0a7d3b8739d38f 100644
--- a/src/sentry/http.py
+++ b/src/sentry/http.py
@@ -148,7 +148,7 @@ def fetch_file(
# lock down domains that are problematic
if domain_lock_enabled:
domain = urlparse(url).netloc
- domain_key = "source:blacklist:v2:%s" % (md5_text(domain).hexdigest(),)
+ domain_key = "source:blacklist:v2:{}".format(md5_text(domain).hexdigest())
domain_result = cache.get(domain_key)
if domain_result:
domain_result["url"] = url
diff --git a/src/sentry/incidents/action_handlers.py b/src/sentry/incidents/action_handlers.py
index 0a635667361fa7..66971ee7679141 100644
--- a/src/sentry/incidents/action_handlers.py
+++ b/src/sentry/incidents/action_handlers.py
@@ -82,7 +82,7 @@ def build_message(self, context, status, user_id):
),
template="sentry/emails/incidents/trigger.txt",
html_template="sentry/emails/incidents/trigger.html",
- type="incident.alert_rule_{}".format(display.lower()),
+ type=f"incident.alert_rule_{display.lower()}",
context=context,
headers={"X-SMTPAPI": json.dumps({"category": "metric_alert_email"})},
)
diff --git a/src/sentry/incidents/endpoints/serializers.py b/src/sentry/incidents/endpoints/serializers.py
index 06d28a4302854a..1c4f8bb5878f1a 100644
--- a/src/sentry/incidents/endpoints/serializers.py
+++ b/src/sentry/incidents/endpoints/serializers.py
@@ -334,7 +334,7 @@ def validate_query(self, query):
for query_term in query_terms:
if query_term in unsupported_queries:
raise serializers.ValidationError(
- "Unsupported Query: We do not currently support the {} query".format(query_term)
+ f"Unsupported Query: We do not currently support the {query_term} query"
)
return query
@@ -502,9 +502,7 @@ def _validate_critical_warning_triggers(self, threshold_type, critical, warning)
if alert_op(critical["alert_threshold"], warning["alert_threshold"]):
raise serializers.ValidationError(
- "Critical trigger must have an alert threshold {} warning trigger".format(
- threshold_type
- )
+ f"Critical trigger must have an alert threshold {threshold_type} warning trigger"
)
def create(self, validated_data):
diff --git a/src/sentry/incidents/models.py b/src/sentry/incidents/models.py
index 9d414b7842a51e..7cf4978c0f31e0 100644
--- a/src/sentry/incidents/models.py
+++ b/src/sentry/incidents/models.py
@@ -588,7 +588,7 @@ def build_handler(self, incident, project):
if type in self._type_registrations:
return self._type_registrations[type].handler(self, incident, project)
else:
- metrics.incr("alert_rule_trigger.unhandled_type.{}".format(self.type))
+ metrics.incr(f"alert_rule_trigger.unhandled_type.{self.type}")
def fire(self, incident, project, metric_value):
handler = self.build_handler(incident, project)
diff --git a/src/sentry/incidents/tasks.py b/src/sentry/incidents/tasks.py
index 35cb94efb12a4f..8aad23203d535e 100644
--- a/src/sentry/incidents/tasks.py
+++ b/src/sentry/incidents/tasks.py
@@ -66,7 +66,7 @@ def send_subscriber_notifications(activity_id):
def generate_incident_activity_email(activity, user):
incident = activity.incident
return MessageBuilder(
- subject="Activity on Alert {} (#{})".format(incident.title, incident.identifier),
+ subject=f"Activity on Alert {incident.title} (#{incident.identifier})",
template="sentry/emails/incidents/activity.txt",
html_template="sentry/emails/incidents/activity.html",
type="incident.activity",
@@ -78,13 +78,13 @@ def build_activity_context(activity, user):
if activity.type == IncidentActivityType.COMMENT.value:
action = "left a comment"
else:
- action = "changed status from %s to %s" % (
+ action = "changed status from {} to {}".format(
INCIDENT_STATUS[IncidentStatus(int(activity.previous_value))],
INCIDENT_STATUS[IncidentStatus(int(activity.value))],
)
incident = activity.incident
- action = "%s on alert %s (#%s)" % (action, incident.title, incident.identifier)
+ action = f"{action} on alert {incident.title} (#{incident.identifier})"
return {
"user_name": activity.user.name if activity.user else "Sentry",
diff --git a/src/sentry/ingest/inbound_filters.py b/src/sentry/ingest/inbound_filters.py
index 2d5a1146063b23..1f5534e89ca6c4 100644
--- a/src/sentry/ingest/inbound_filters.py
+++ b/src/sentry/ingest/inbound_filters.py
@@ -83,7 +83,7 @@ def set_filter_state(filter_id, project, state):
option_val = set(state["subfilters"])
ProjectOption.objects.set_value(
- project=project, key="filters:{}".format(filter_id), value=option_val
+ project=project, key=f"filters:{filter_id}", value=option_val
)
return option_val == "1" if option_val in ("0", "1") else option_val
@@ -95,7 +95,7 @@ def set_filter_state(filter_id, project, state):
ProjectOption.objects.set_value(
project=project,
- key="filters:{}".format(filter_id),
+ key=f"filters:{filter_id}",
value="1" if state.get("active", False) else "0",
)
@@ -120,7 +120,7 @@ def get_filter_state(filter_id, project):
if flt is None:
raise FilterNotRegistered(filter_id)
- filter_state = ProjectOption.objects.get_value(project=project, key="filters:{}".format(flt.id))
+ filter_state = ProjectOption.objects.get_value(project=project, key=f"filters:{flt.id}")
if filter_state is None:
raise ValueError(
diff --git a/src/sentry/ingest/ingest_consumer.py b/src/sentry/ingest/ingest_consumer.py
index 9a28e01e28b707..61533135d2b89e 100644
--- a/src/sentry/ingest/ingest_consumer.py
+++ b/src/sentry/ingest/ingest_consumer.py
@@ -61,7 +61,7 @@ def _flush_batch(self, batch):
elif message_type == "user_report":
other_messages.append((process_userreport, message))
else:
- raise ValueError("Unknown message type: {}".format(message_type))
+ raise ValueError(f"Unknown message type: {message_type}")
metrics.incr(
"ingest_consumer.flush.messages_seen", tags={"message_type": message_type}
)
@@ -122,7 +122,7 @@ def _do_process_event(message, projects):
# This code has been ripped from the old python store endpoint. We're
# keeping it around because it does provide some protection against
# reprocessing good events if a single consumer is in a restart loop.
- deduplication_key = "ev:{}:{}".format(project_id, event_id)
+ deduplication_key = f"ev:{project_id}:{event_id}"
if cache.get(deduplication_key) is not None:
logger.warning(
"pre-process-forwarder detected a duplicated event" " with id:%s for project:%s.",
diff --git a/src/sentry/interfaces/base.py b/src/sentry/interfaces/base.py
index 73fa2d07ca5a59..12c73b9bc470db 100644
--- a/src/sentry/interfaces/base.py
+++ b/src/sentry/interfaces/base.py
@@ -21,12 +21,12 @@ def get_interface(name):
name = get_canonical_name(name)
import_path = settings.SENTRY_INTERFACES[name]
except KeyError:
- raise ValueError("Invalid interface name: %s" % (name,))
+ raise ValueError(f"Invalid interface name: {name}")
try:
interface = import_string(import_path)
except Exception:
- raise ValueError("Unable to load interface: %s" % (name,))
+ raise ValueError(f"Unable to load interface: {name}")
return interface
@@ -149,7 +149,7 @@ def to_email_html(self, event, **kwargs):
body = self.to_string(event)
if not body:
return ""
- return "<pre>%s</pre>" % (escape(body),)
+ return "<pre>{}</pre>".format(escape(body))
# deprecated stuff. These were deprecated in late 2018, once
# determined they are unused we can kill them.
diff --git a/src/sentry/interfaces/contexts.py b/src/sentry/interfaces/contexts.py
index 9b2f98f362fed6..fdb60456a807aa 100644
--- a/src/sentry/interfaces/contexts.py
+++ b/src/sentry/interfaces/contexts.py
@@ -75,7 +75,7 @@ def iter_tags(self):
if not field:
yield (self.alias, value)
else:
- yield ("%s.%s" % (self.alias, field), value)
+ yield (f"{self.alias}.{field}", value)
# TODO(dcramer): contexts need to document/describe expected (optional) fields
diff --git a/src/sentry/interfaces/exception.py b/src/sentry/interfaces/exception.py
index 2611b1cc3089b9..bc751162eb9456 100644
--- a/src/sentry/interfaces/exception.py
+++ b/src/sentry/interfaces/exception.py
@@ -433,7 +433,7 @@ def to_string(self, event, is_public=False, **kwargs):
if not exc:
continue
- output.append("{}: {}\n".format(exc.type, exc.value))
+ output.append(f"{exc.type}: {exc.value}\n")
if exc.stacktrace:
output.append(
exc.stacktrace.get_stacktrace(
diff --git a/src/sentry/interfaces/stacktrace.py b/src/sentry/interfaces/stacktrace.py
index 7f93bc17be4b25..36e47d7fe5d3f9 100644
--- a/src/sentry/interfaces/stacktrace.py
+++ b/src/sentry/interfaces/stacktrace.py
@@ -44,9 +44,9 @@ def to_hex_addr(addr):
addr = int(addr[2:], 16)
rv = "0x%x" % int(addr)
else:
- raise ValueError("Unsupported address format %r" % (addr,))
+ raise ValueError(f"Unsupported address format {addr!r}")
if len(rv) > 24:
- raise ValueError("Address too long %r" % (rv,))
+ raise ValueError(f"Address too long {rv!r}")
return rv
diff --git a/src/sentry/interfaces/template.py b/src/sentry/interfaces/template.py
index d5d0fd92ac8416..4eb8e9b0ddb532 100644
--- a/src/sentry/interfaces/template.py
+++ b/src/sentry/interfaces/template.py
@@ -57,7 +57,7 @@ def to_string(self, event, is_public=False, **kwargs):
return "\n".join(result)
def get_traceback(self, event, context):
- result = [event.message, "", 'File "%s", line %s' % (self.filename, self.lineno), ""]
+ result = [event.message, "", f'File "{self.filename}", line {self.lineno}', ""]
result.extend([n[1].strip("\n") if n[1] else "" for n in context])
return "\n".join(result)
diff --git a/src/sentry/lang/java/plugin.py b/src/sentry/lang/java/plugin.py
index f829b135e5eee7..d5acd227c185bb 100644
--- a/src/sentry/lang/java/plugin.py
+++ b/src/sentry/lang/java/plugin.py
@@ -72,7 +72,7 @@ def process_exception(self, exception):
if not ty or not mod:
return False
- key = "%s.%s" % (mod, ty)
+ key = f"{mod}.{ty}"
for view in self.mapping_views:
mapped = view.remap_class(key)
diff --git a/src/sentry/lang/javascript/processor.py b/src/sentry/lang/javascript/processor.py
index 806ef709674e4f..5d4682c3682356 100644
--- a/src/sentry/lang/javascript/processor.py
+++ b/src/sentry/lang/javascript/processor.py
@@ -212,7 +212,7 @@ def discover_sourcemap(result):
def get_release_file_cache_key(release_id, releasefile_ident):
- return "releasefile:v1:%s:%s" % (release_id, releasefile_ident)
+ return f"releasefile:v1:{release_id}:{releasefile_ident}"
def get_release_file_cache_key_meta(release_id, releasefile_ident):
@@ -360,7 +360,7 @@ def fetch_file(url, project=None, release=None, dist=None, allow_scraping=True):
# otherwise, try the web-scraping cache and then the web itself
- cache_key = "source:cache:v4:%s" % (md5_text(url).hexdigest(),)
+ cache_key = "source:cache:v4:{}".format(md5_text(url).hexdigest())
if result is None:
if not allow_scraping or not url.startswith(("http:", "https:")):
diff --git a/src/sentry/lang/native/applecrashreport.py b/src/sentry/lang/native/applecrashreport.py
index f69b161f0bb914..93b8b83ac6e5b0 100644
--- a/src/sentry/lang/native/applecrashreport.py
+++ b/src/sentry/lang/native/applecrashreport.py
@@ -30,7 +30,7 @@ def __str__(self):
return "\n\n".join(rv) + "\n\nEOF"
def _get_meta_header(self):
- return "OS Version: %s %s (%s)\nReport Version: %s" % (
+ return "OS Version: {} {} ({})\nReport Version: {}".format(
get_path(self.context, "os", "name"),
get_path(self.context, "os", "version"),
get_path(self.context, "os", "build"),
@@ -53,7 +53,9 @@ def _get_exception_info(self):
if name or signal:
rv.append(
- "Exception Type: %s%s" % (name or "Unknown", signal and (" (%s)" % signal) or "")
+ "Exception Type: {}{}".format(
+ name or "Unknown", signal and (" (%s)" % signal) or ""
+ )
)
exc_name = get_path(mechanism_meta, "signal", "code_name")
@@ -109,7 +111,7 @@ def get_thread_apple_string(self, thread_info):
thread_name_string = " name: %s" % (thread_name) if thread_name else ""
thread_crashed = thread_info.get("crashed") or is_exception
thread_crashed_thread = " Crashed:" if thread_crashed else ""
- thread_string = "Thread %s%s%s\n" % (thread_id, thread_name_string, thread_crashed_thread)
+ thread_string = f"Thread {thread_id}{thread_name_string}{thread_crashed_thread}\n"
return thread_string + "\n".join(rv)
def _convert_frame_to_apple_string(self, frame, next=None, number=0):
@@ -130,16 +132,16 @@ def _convert_frame_to_apple_string(self, frame, next=None, number=0):
if self.symbolicated:
file = ""
if frame.get("filename") and frame.get("lineno"):
- file = " (%s:%s)" % (
+ file = " ({}:{})".format(
posixpath.basename(frame.get("filename") or NATIVE_UNKNOWN_STRING),
frame["lineno"],
)
- symbol = "%s%s" % (frame.get("function") or NATIVE_UNKNOWN_STRING, file)
+ symbol = "{}{}".format(frame.get("function") or NATIVE_UNKNOWN_STRING, file)
if next and parse_addr(frame.get("instruction_addr")) == parse_addr(
next.get("instruction_addr")
):
symbol = "[inlined] " + symbol
- return "%s%s%s%s%s" % (
+ return "{}{}{}{}{}".format(
str(number).ljust(4, " "),
image_name(frame.get("package") or NATIVE_UNKNOWN_STRING).ljust(32, " "),
hex(instruction_addr).ljust(20, " "),
@@ -167,7 +169,7 @@ def get_binary_images_apple_string(self):
def _convert_debug_meta_to_binary_image_row(self, debug_image):
slide_value = parse_addr(debug_image.get("image_vmaddr", 0))
image_addr = parse_addr(debug_image["image_addr"]) + slide_value
- return "%s - %s %s %s <%s> %s" % (
+ return "{} - {} {} {} <{}> {}".format(
hex(image_addr),
hex(image_addr + debug_image["image_size"] - 1),
image_name(debug_image.get("code_file") or NATIVE_UNKNOWN_STRING),
diff --git a/src/sentry/lang/native/processing.py b/src/sentry/lang/native/processing.py
index eb3a9841c0358f..91cef556123e7f 100644
--- a/src/sentry/lang/native/processing.py
+++ b/src/sentry/lang/native/processing.py
@@ -195,11 +195,11 @@ def _merge_full_response(data, response):
# Extract the crash reason and infos
data_exception = get_path(data, "exception", "values", 0)
if response.get("assertion"):
- data_exception["value"] = "Assertion Error: %s" % (response["assertion"],)
+ data_exception["value"] = "Assertion Error: {}".format(response["assertion"])
elif response.get("crash_details"):
data_exception["value"] = response["crash_details"]
elif response.get("crash_reason"):
- data_exception["value"] = "Fatal Error: %s" % (response["crash_reason"],)
+ data_exception["value"] = "Fatal Error: {}".format(response["crash_reason"])
else:
# We're merging a full response, so there was no initial payload
# submitted. Assuming that this still contains the placeholder, remove
diff --git a/src/sentry/lang/native/symbolicator.py b/src/sentry/lang/native/symbolicator.py
index ebb25647a7aff2..14f78519990964 100644
--- a/src/sentry/lang/native/symbolicator.py
+++ b/src/sentry/lang/native/symbolicator.py
@@ -99,7 +99,7 @@
def _task_id_cache_key_for_event(project_id, event_id):
- return "symbolicator:{1}:{0}".format(project_id, event_id)
+ return f"symbolicator:{event_id}:{project_id}"
class Symbolicator:
@@ -286,7 +286,7 @@ def get_internal_source(project):
).replace("127.0.0.1", "host.docker.internal")
assert internal_url_prefix
- sentry_source_url = "%s%s" % (
+ sentry_source_url = "{}{}".format(
internal_url_prefix.rstrip("/"),
reverse(
"sentry-api-0-dsym-files",
@@ -321,7 +321,9 @@ def normalize_user_source(source):
username = source.pop("username", None)
password = source.pop("password", None)
if username or password:
- auth = base64.b64encode(("%s:%s" % (username or "", password or "")).encode("utf-8"))
+ auth = base64.b64encode(
+ ("{}:{}".format(username or "", password or "")).encode("utf-8")
+ )
source["headers"] = {
"authorization": "Basic %s" % auth.decode("ascii"),
}
@@ -351,7 +353,7 @@ def parse_sources(config):
if is_internal_source_id(source["id"]):
raise InvalidSourcesError('Source ids must not start with "sentry:"')
if source["id"] in ids:
- raise InvalidSourcesError("Duplicate source id: %s" % (source["id"],))
+ raise InvalidSourcesError("Duplicate source id: {}".format(source["id"]))
ids.add(source["id"])
return sources
@@ -569,7 +571,7 @@ def upload_applecrashreport(self, report):
)
def query_task(self, task_id):
- task_url = "requests/%s" % (task_id,)
+ task_url = f"requests/{task_id}"
params = {
"timeout": 0, # Only wait when creating, but not when querying tasks
diff --git a/src/sentry/lint/engine.py b/src/sentry/lint/engine.py
index 3542b653b25951..5391c5be9b364d 100644
--- a/src/sentry/lint/engine.py
+++ b/src/sentry/lint/engine.py
@@ -188,9 +188,7 @@ def is_prettier_valid(project_root, prettier_path):
prettier_version = subprocess.check_output([prettier_path, "--version"]).decode("utf8").rstrip()
if prettier_version != package_version:
sys.stderr.write(
- "[sentry.lint] Prettier is out of date: {} (expected {}). Please run `yarn install`.\n".format(
- prettier_version, package_version
- )
+ f"[sentry.lint] Prettier is out of date: {prettier_version} (expected {package_version}). Please run `yarn install`.\n"
)
return False
diff --git a/src/sentry/logging/handlers.py b/src/sentry/logging/handlers.py
index e3b44137b59bb5..7a79b96cae3a24 100644
--- a/src/sentry/logging/handlers.py
+++ b/src/sentry/logging/handlers.py
@@ -54,14 +54,14 @@ class HumanRenderer:
def __call__(self, logger, name, event_dict):
level = event_dict.pop("level")
real_level = level.upper() if isinstance(level, str) else logging.getLevelName(level)
- base = "%s [%s] %s: %s" % (
+ base = "{} [{}] {}: {}".format(
now().strftime("%H:%M:%S"),
real_level,
event_dict.pop("name", "root"),
event_dict.pop("event", ""),
)
join = " ".join(k + "=" + repr(v) for k, v in event_dict.items())
- return "%s%s" % (base, (" (%s)" % join if join else ""))
+ return "{}{}".format(base, (" (%s)" % join if join else ""))
class StructLogHandler(logging.StreamHandler):
diff --git a/src/sentry/mail/activity/base.py b/src/sentry/mail/activity/base.py
index 795e4d60e6a79f..0425b7de829f7e 100644
--- a/src/sentry/mail/activity/base.py
+++ b/src/sentry/mail/activity/base.py
@@ -61,7 +61,7 @@ def get_html_template(self):
return "sentry/emails/activity/generic.html"
def get_project_link(self):
- return absolute_uri("/{}/{}/".format(self.organization.slug, self.project.slug))
+ return absolute_uri(f"/{self.organization.slug}/{self.project.slug}/")
def get_group_link(self):
referrer = self.__class__.__name__
@@ -94,15 +94,15 @@ def get_group_context(self):
}
def get_email_type(self):
- return "notify.activity.{}".format(self.activity.get_type_display())
+ return f"notify.activity.{self.activity.get_type_display()}"
def get_subject(self):
group = self.group
- return "%s - %s" % (group.qualified_short_id, group.title)
+ return f"{group.qualified_short_id} - {group.title}"
def get_subject_with_prefix(self):
- return "{}{}".format(self._get_subject_prefix(), self.get_subject()).encode("utf-8")
+ return f"{self._get_subject_prefix()}{self.get_subject()}".encode("utf-8")
def get_context(self):
description = self.get_description()
diff --git a/src/sentry/mail/activity/new_processing_issues.py b/src/sentry/mail/activity/new_processing_issues.py
index 45fb8716d5dc71..7732625c573383 100644
--- a/src/sentry/mail/activity/new_processing_issues.py
+++ b/src/sentry/mail/activity/new_processing_issues.py
@@ -14,7 +14,7 @@ def summarize_issues(issues):
if "image_path" in issue["data"]:
extra_info = issue["data"]["image_path"].rsplit("/", 1)[-1]
if "image_arch" in issue["data"]:
- extra_info = "%s (%s)" % (extra_info, issue["data"]["image_arch"])
+ extra_info = "{} ({})".format(extra_info, issue["data"]["image_arch"])
rv.append({"message": EventError(msg_d).message, "extra_info": extra_info})
return rv
@@ -37,14 +37,12 @@ def get_context(self):
"issues": self.issues,
"reprocessing_active": self.activity.data["reprocessing_active"],
"info_url": absolute_uri(
- "/settings/{}/projects/{}/processing-issues/".format(
- self.organization.slug, self.project.slug
- )
+ f"/settings/{self.organization.slug}/projects/{self.project.slug}/processing-issues/"
),
}
def get_subject(self):
- return "Processing Issues on {}".format(self.project.slug)
+ return f"Processing Issues on {self.project.slug}"
def get_template(self):
return "sentry/emails/activity/new_processing_issues.txt"
diff --git a/src/sentry/mail/activity/release.py b/src/sentry/mail/activity/release.py
index 4f777568f404c3..7bb60352dfb22a 100644
--- a/src/sentry/mail/activity/release.py
+++ b/src/sentry/mail/activity/release.py
@@ -195,9 +195,7 @@ def get_context(self):
"release": self.release,
"deploy": self.deploy,
"environment": self.environment,
- "setup_repo_link": absolute_uri(
- "/organizations/{}/repos/".format(self.organization.slug)
- ),
+ "setup_repo_link": absolute_uri(f"/organizations/{self.organization.slug}/repos/"),
}
def get_user_context(self, user):
@@ -214,9 +212,7 @@ def get_user_context(self, user):
release_links = [
absolute_uri(
- "/organizations/{}/releases/{}/?project={}".format(
- self.organization.slug, self.release.version, p.id
- )
+ f"/organizations/{self.organization.slug}/releases/{self.release.version}/?project={p.id}"
)
for p in projects
]
@@ -228,7 +224,7 @@ def get_user_context(self, user):
}
def get_subject(self):
- return "Deployed version {} to {}".format(self.release.version, self.environment)
+ return f"Deployed version {self.release.version} to {self.environment}"
def get_template(self):
return "sentry/emails/activity/release.txt"
diff --git a/src/sentry/mail/adapter.py b/src/sentry/mail/adapter.py
index 39a05e052a35f1..fc3051b3bb197d 100644
--- a/src/sentry/mail/adapter.py
+++ b/src/sentry/mail/adapter.py
@@ -123,7 +123,7 @@ def _build_message(
subject = force_text(subject)
msg = MessageBuilder(
- subject="%s%s" % (subject_prefix, subject),
+ subject=f"{subject_prefix}{subject}",
template=template,
html_template=html_template,
body=body,
@@ -257,7 +257,7 @@ def get_send_to_member(self, project, target_identifier):
return {user.id}
def get_send_to_all_in_project(self, project):
- cache_key = "mail:send_to:{}".format(project.pk)
+ cache_key = f"mail:send_to:{project.pk}"
send_to_list = cache.get(cache_key)
if send_to_list is None:
send_to_list = [s for s in self.get_sendable_users(project) if s]
@@ -302,7 +302,7 @@ def notify(self, notification, target_type, target_identifier=None, **kwargs):
rules = []
for rule in notification.rules:
- rule_link = "/organizations/%s/alerts/rules/%s/%s/" % (org.slug, project.slug, rule.id)
+ rule_link = f"/organizations/{org.slug}/alerts/rules/{project.slug}/{rule.id}/"
rules.append((rule.label, rule_link))
@@ -467,9 +467,7 @@ def notify_about_activity(self, activity):
email_cls = emails.get(activity.type)
if not email_cls:
- logger.debug(
- "No email associated with activity type `{}`".format(activity.get_type_display())
- )
+ logger.debug(f"No email associated with activity type `{activity.get_type_display()}`")
return
email = email_cls(activity)
@@ -489,7 +487,7 @@ def handle_user_report(self, payload, project, **kwargs):
context = {
"project": project,
- "project_link": absolute_uri("/{}/{}/".format(project.organization.slug, project.slug)),
+ "project_link": absolute_uri(f"/{project.organization.slug}/{project.slug}/"),
"issue_link": absolute_uri(
"/{}/{}/issues/{}/".format(
project.organization.slug, project.slug, payload["report"]["issue"]["id"]
diff --git a/src/sentry/management/commands/collectstatic.py b/src/sentry/management/commands/collectstatic.py
index 781b9445701524..87a66d0f1720f9 100644
--- a/src/sentry/management/commands/collectstatic.py
+++ b/src/sentry/management/commands/collectstatic.py
@@ -26,8 +26,8 @@ def checksum(file_):
def get_bundle_version(files):
hasher = md5()
for (short, _), sum in zip(files, map(checksum, files)):
- echo("%s %s" % (sum, short))
- hasher.update("{} {}\n".format(sum, short).encode("utf-8"))
+ echo(f"{sum} {short}")
+ hasher.update(f"{sum} {short}\n".encode("utf-8"))
return hasher.hexdigest()
diff --git a/src/sentry/management/commands/create_sample_event.py b/src/sentry/management/commands/create_sample_event.py
index c3d08b0ad06591..dbabaa7fc52738 100644
--- a/src/sentry/management/commands/create_sample_event.py
+++ b/src/sentry/management/commands/create_sample_event.py
@@ -31,6 +31,6 @@ def handle(self, **options):
platform = options["platform"]
event = create_sample_event(project, platform)
if not event:
- raise CommandError("Unable to create an event for platform %r" % (platform,))
+ raise CommandError(f"Unable to create an event for platform {platform!r}")
- self.stdout.write("Event created: %s" % (event.group.get_absolute_url(),))
+ self.stdout.write(f"Event created: {event.group.get_absolute_url()}")
diff --git a/src/sentry/management/commands/generate_reset_password_link.py b/src/sentry/management/commands/generate_reset_password_link.py
index 2950b58f00c2a5..cada96dde662ef 100644
--- a/src/sentry/management/commands/generate_reset_password_link.py
+++ b/src/sentry/management/commands/generate_reset_password_link.py
@@ -32,4 +32,4 @@ def handle(self, username, **options):
password_hash.date_added = timezone.now()
password_hash.set_hash()
password_hash.save()
- echo("{} ({}) - {}".format(user.username, user.email, password_hash.get_absolute_url()))
+ echo(f"{user.username} ({user.email}) - {password_hash.get_absolute_url()}")
diff --git a/src/sentry/management/commands/makemigrations.py b/src/sentry/management/commands/makemigrations.py
index 52268db8657c42..e61e6db1bbca76 100644
--- a/src/sentry/management/commands/makemigrations.py
+++ b/src/sentry/management/commands/makemigrations.py
@@ -45,8 +45,7 @@ def handle(self, *app_labels, **options):
)
result = "\n".join(
- "{}: {}".format(app_label, name)
- for app_label, name in sorted(latest_migration_by_app.items())
+ f"{app_label}: {name}" for app_label, name in sorted(latest_migration_by_app.items())
)
with open(
diff --git a/src/sentry/management/commands/merge_users.py b/src/sentry/management/commands/merge_users.py
index f1454fa94068b9..92b7b403d15f6e 100644
--- a/src/sentry/management/commands/merge_users.py
+++ b/src/sentry/management/commands/merge_users.py
@@ -105,9 +105,7 @@ def handle(self, *usernames, **options):
for user in user_list[1:]:
user.merge_to(primary_user)
- sys.stdout.write(
- "{} was merged into {}\n".format(user.username, primary_user.username)
- )
+ sys.stdout.write(f"{user.username} was merged into {primary_user.username}\n")
if options["delete"]:
for user in user_list[1:]:
diff --git a/src/sentry/management/commands/serve_normalize.py b/src/sentry/management/commands/serve_normalize.py
index 46aac4bd220f85..090f2a281a763b 100644
--- a/src/sentry/management/commands/serve_normalize.py
+++ b/src/sentry/management/commands/serve_normalize.py
@@ -113,7 +113,7 @@ def collect_metrics(self):
metrics.update(usage_dict)
if self.is_linux:
- with open("/proc/{}/status".format(self.pid)) as procfh:
+ with open(f"/proc/{self.pid}/status") as procfh:
metrics["proc"] = procfh.read()
return metrics
@@ -206,12 +206,12 @@ def handle(self, **options):
server_type = "forking"
else:
server_type = "single-threaded"
- self.stdout.write("Server type: %s\n" % (server_type,))
+ self.stdout.write(f"Server type: {server_type}\n")
if socket_file:
self.socket_file = os.path.abspath(socket_file)
self._check_socket_path(socket_file)
- self.stdout.write("Binding to unix socket: %s\n" % (socket_file,))
+ self.stdout.write(f"Binding to unix socket: {socket_file}\n")
if threading:
server = SocketServer.ThreadingUnixStreamServer(socket_file, EventNormalizeHandler)
server.daemon_threads = True
@@ -222,7 +222,7 @@ def handle(self, **options):
elif network_socket:
host, port = network_socket.split(":")
port = int(port)
- self.stdout.write("Binding to network socket: %s:%s\n" % (host, port))
+ self.stdout.write(f"Binding to network socket: {host}:{port}\n")
if threading:
server = SocketServer.ThreadingTCPServer((host, port), EventNormalizeHandler)
server.daemon_threads = True
diff --git a/src/sentry/migrations/0024_auto_20191230_2052.py b/src/sentry/migrations/0024_auto_20191230_2052.py
index 9461c568a5109c..bd5416684c0a44 100644
--- a/src/sentry/migrations/0024_auto_20191230_2052.py
+++ b/src/sentry/migrations/0024_auto_20191230_2052.py
@@ -68,14 +68,14 @@ def _attach_related(_events):
print("Nothing to do, skipping migration.\n") # noqa: B314
return
- print("Events to process: {}\n".format(count)) # noqa: B314
+ print(f"Events to process: {count}\n") # noqa: B314
processed = 0
for e in RangeQuerySetWrapper(events, step=100, callbacks=(_attach_related,)):
event_data = e.data.data
if e.project is None or e.group is None or len(event_data) == 0:
print( # noqa: B314
- "Skipped {} as group, project or node data information is invalid.\n".format(e)
+ f"Skipped {e} as group, project or node data information is invalid.\n"
)
continue
@@ -113,9 +113,7 @@ def _attach_related(_events):
processed += 1
except Exception as error:
print( # noqa: B314
- "An error occured while trying to migrate the following event: {}\n.----\n{}".format(
- event, error
- )
+ f"An error occured while trying to migrate the following event: {event}\n.----\n{error}"
)
if processed == 0:
@@ -123,9 +121,7 @@ def _attach_related(_events):
"Cannot migrate any event. If this is okay, re-run migrations with SENTRY_SKIP_EVENTS_BACKFILL_FOR_10 environment variable set to skip this step."
)
- print( # noqa: B314
- "Event migration done. Migrated {} of {} events.\n".format(processed, count)
- )
+ print(f"Event migration done. Migrated {processed} of {count} events.\n") # noqa: B314
class Migration(migrations.Migration):
diff --git a/src/sentry/migrations/0029_discover_query_upgrade.py b/src/sentry/migrations/0029_discover_query_upgrade.py
index e557f4d8cfed4a..cc625f5239baba 100644
--- a/src/sentry/migrations/0029_discover_query_upgrade.py
+++ b/src/sentry/migrations/0029_discover_query_upgrade.py
@@ -39,16 +39,16 @@ def convert_field(fieldname, unique, reverse):
if fieldname == "count":
fieldname = "count()"
elif unique:
- fieldname = "count_unique({})".format(fieldname)
+ fieldname = f"count_unique({fieldname})"
- fieldname = "-{}".format(fieldname) if reverse else fieldname
+ fieldname = f"-{fieldname}" if reverse else fieldname
return fieldname
def prepare_value(value):
value = value.replace("%", "*")
if " " in value and not value.startswith('"'):
- value = '"{}"'.format(value)
+ value = f'"{value}"'
return value
@@ -112,9 +112,9 @@ def convert(
if column == "environment" and operator == "=":
updated_query["environment"].append(value.strip('"'))
elif operator == "IS NOT NULL":
- updated_query["query"].append("has:{}".format(column))
+ updated_query["query"].append(f"has:{column}")
elif operator == "IS NULL":
- updated_query["query"].append("!has:{}".format(column))
+ updated_query["query"].append(f"!has:{column}")
elif column in OPERATOR_KEYS:
updated_query["query"].append(
"{}:{}{}".format(column, operator if operator != "=" else "", value)
diff --git a/src/sentry/migrations/0056_remove_old_functions.py b/src/sentry/migrations/0056_remove_old_functions.py
index a0ebb6d4ff652e..3e53bd730bd837 100644
--- a/src/sentry/migrations/0056_remove_old_functions.py
+++ b/src/sentry/migrations/0056_remove_old_functions.py
@@ -24,7 +24,7 @@
def get_function_alias_with_columns(function_name, columns):
columns = "_".join(columns).replace(".", "_")
- return "{}_{}".format(function_name, columns).rstrip("_")
+ return f"{function_name}_{columns}".rstrip("_")
def get_function_alias(field):
diff --git a/src/sentry/migrations/0067_migrate_rules_alert_targeting.py b/src/sentry/migrations/0067_migrate_rules_alert_targeting.py
index b7ac6b77e548cc..8d714a5194cd45 100644
--- a/src/sentry/migrations/0067_migrate_rules_alert_targeting.py
+++ b/src/sentry/migrations/0067_migrate_rules_alert_targeting.py
@@ -105,7 +105,7 @@ def migrate_to_issue_alert_targeting(apps, schema_editor):
# If a project fails we'll just log and continue. We shouldn't see any
# failures, but if we do we can analyze them and re-run this migration,
# since it is idempotent.
- logging.exception("Error migrating project {}".format(project.id))
+ logging.exception(f"Error migrating project {project.id}")
class Migration(migrations.Migration):
diff --git a/src/sentry/migrations/0099_fix_project_platforms.py b/src/sentry/migrations/0099_fix_project_platforms.py
index 280e32fd68f2c4..a09f383a8e68f6 100644
--- a/src/sentry/migrations/0099_fix_project_platforms.py
+++ b/src/sentry/migrations/0099_fix_project_platforms.py
@@ -26,7 +26,7 @@ def fix_project_platform(apps, schema_editor):
project.save()
continue
except Exception:
- logging.exception("Error changing platform for project {}".format(project.id))
+ logging.exception(f"Error changing platform for project {project.id}")
class Migration(migrations.Migration):
diff --git a/src/sentry/migrations/0136_issue_alert_filter_all_orgs.py b/src/sentry/migrations/0136_issue_alert_filter_all_orgs.py
index 7b6f5b1b1c0ce0..425bd7684319c8 100644
--- a/src/sentry/migrations/0136_issue_alert_filter_all_orgs.py
+++ b/src/sentry/migrations/0136_issue_alert_filter_all_orgs.py
@@ -162,7 +162,7 @@ def migrate_all_orgs(apps, schema_editor):
# If a project fails we'll just log and continue. We shouldn't see any
# failures, but if we do we can analyze them and re-run this migration,
# since it is idempotent.
- logging.exception("Error migrating project {}".format(project.id))
+ logging.exception(f"Error migrating project {project.id}")
class Migration(migrations.Migration):
diff --git a/src/sentry/migrations/0146_backfill_members_alert_write.py b/src/sentry/migrations/0146_backfill_members_alert_write.py
index 75e2e56379f9ad..1cebc27e7e0a07 100644
--- a/src/sentry/migrations/0146_backfill_members_alert_write.py
+++ b/src/sentry/migrations/0146_backfill_members_alert_write.py
@@ -22,7 +22,7 @@ def backfill_existing_orgs(apps, schema_editor):
organization=org, key="sentry:alerts_member_write", value=False
)
except Exception:
- logging.exception("Error backfilling organization {}".format(org.id))
+ logging.exception(f"Error backfilling organization {org.id}")
class Migration(migrations.Migration):
diff --git a/src/sentry/monkey/__init__.py b/src/sentry/monkey/__init__.py
index 48eba46188335b..6be2aff194c7d1 100644
--- a/src/sentry/monkey/__init__.py
+++ b/src/sentry/monkey/__init__.py
@@ -27,7 +27,7 @@ def patch_httprequest_repr():
# logged. This was yanked out of Django master anyhow.
# https://code.djangoproject.com/ticket/12098
def safe_httprequest_repr(self):
- return "<%s: %s %r>" % (self.__class__.__name__, self.method, self.get_full_path())
+ return f"<{self.__class__.__name__}: {self.method} {self.get_full_path()!r}>"
HttpRequest.__repr__ = safe_httprequest_repr
diff --git a/src/sentry/net/http.py b/src/sentry/net/http.py
index db343e45391791..c5334a5899230b 100644
--- a/src/sentry/net/http.py
+++ b/src/sentry/net/http.py
@@ -70,7 +70,8 @@ def _new_conn(self):
except SocketTimeout:
raise ConnectTimeoutError(
- self, "Connection to %s timed out. (connect timeout=%s)" % (self.host, self.timeout)
+ self,
+ f"Connection to {self.host} timed out. (connect timeout={self.timeout})",
)
except SocketError as e:
@@ -139,7 +140,7 @@ def send(self, *args, **kwargs):
return HTTPAdapter.send(self, *args, **kwargs)
-USER_AGENT = "sentry/{version} (https://sentry.io)".format(version=SENTRY_VERSION)
+USER_AGENT = f"sentry/{SENTRY_VERSION} (https://sentry.io)"
class Session(_Session):
@@ -191,7 +192,7 @@ class UnixHTTPConnectionPool(HTTPConnectionPool):
ConnectionCls = UnixHTTPConnection
def __str__(self):
- return "%s(host=%r)" % (type(self).__name__, self.host)
+ return "{}(host={!r})".format(type(self).__name__, self.host)
def connection_from_url(endpoint, **kw):
diff --git a/src/sentry/net/socket.py b/src/sentry/net/socket.py
index bb5125ed5d0414..39788bd244be69 100644
--- a/src/sentry/net/socket.py
+++ b/src/sentry/net/socket.py
@@ -126,7 +126,7 @@ def safe_create_connection(
# suspicious.
if host == ip:
raise RestrictedIPAddress("(%s) matches the URL blacklist" % ip)
- raise RestrictedIPAddress("(%s/%s) matches the URL blacklist" % (host, ip))
+ raise RestrictedIPAddress(f"({host}/{ip}) matches the URL blacklist")
sock = None
try:
diff --git a/src/sentry/nodestore/bigtable/backend.py b/src/sentry/nodestore/bigtable/backend.py
index c669eb06e764da..511f9649439655 100644
--- a/src/sentry/nodestore/bigtable/backend.py
+++ b/src/sentry/nodestore/bigtable/backend.py
@@ -28,7 +28,7 @@ def _compress_data(data, compression):
elif compression is False:
pass
else:
- raise ValueError("invalid argument for compression: {!r}".format(compression))
+ raise ValueError(f"invalid argument for compression: {compression!r}")
return data, flags
diff --git a/src/sentry/options/manager.py b/src/sentry/options/manager.py
index e6dd558de3eea3..c5c4b956b695ee 100644
--- a/src/sentry/options/manager.py
+++ b/src/sentry/options/manager.py
@@ -83,7 +83,7 @@ def set(self, key, value, coerce=True):
if coerce:
value = opt.type(value)
elif not opt.type.test(value):
- raise TypeError("got %r, expected %r" % (_type(value), opt.type))
+ raise TypeError("got {!r}, expected {!r}".format(_type(value), opt.type))
return self.store.set(opt, value)
@@ -227,7 +227,7 @@ def default():
# Make sure the type is correct at registration time
if default_value is not None and not type.test(default_value):
- raise TypeError("got %r, expected %r" % (_type(default), type))
+ raise TypeError("got {!r}, expected {!r}".format(_type(default), type))
# If we don't have a default, but we have a type, pull the default
# value from the type
@@ -264,7 +264,7 @@ def validate_option(self, key, value):
opt = self.lookup_key(key)
assert not (opt.flags & FLAG_STOREONLY), "%r is not allowed to be loaded from config" % key
if not opt.type.test(value):
- raise TypeError("%r: got %r, expected %r" % (key, _type(value), opt.type))
+ raise TypeError("{!r}: got {!r}, expected {!r}".format(key, _type(value), opt.type))
def all(self):
"""
diff --git a/src/sentry/pipeline/__init__.py b/src/sentry/pipeline/__init__.py
index 022ec2702bde34..d6f21b0f70a301 100644
--- a/src/sentry/pipeline/__init__.py
+++ b/src/sentry/pipeline/__init__.py
@@ -280,4 +280,4 @@ def fetch_state(self, key=None):
return data if key is None else data.get(key)
def get_logger(self):
- return logging.getLogger("sentry.integration.%s" % (self.provider.key,))
+ return logging.getLogger(f"sentry.integration.{self.provider.key}")
diff --git a/src/sentry/quotas/base.py b/src/sentry/quotas/base.py
index 5e32de295c7831..2a3beb925aee0a 100644
--- a/src/sentry/quotas/base.py
+++ b/src/sentry/quotas/base.py
@@ -308,7 +308,7 @@ def get_key_quota(self, key):
# XXX(epurkhiser): Avoid excessive feature manager checks (which can be
# expensive depending on feature handlers) for project rate limits.
# This happens on /store.
- cache_key = "project:{}:features:rate-limits".format(key.project.id)
+ cache_key = f"project:{key.project.id}:features:rate-limits"
has_rate_limits = cache.get(cache_key)
if has_rate_limits is None:
@@ -336,7 +336,7 @@ def get_project_quota(self, project):
org_quota, window = self.get_organization_quota(org)
if max_quota_share != 100 and org_quota:
- quota = self._translate_quota("{}%".format(max_quota_share), org_quota)
+ quota = self._translate_quota(f"{max_quota_share}%", org_quota)
else:
quota = None
diff --git a/src/sentry/quotas/redis.py b/src/sentry/quotas/redis.py
index 349e12096d0b27..e1c4901faf0b5e 100644
--- a/src/sentry/quotas/redis.py
+++ b/src/sentry/quotas/redis.py
@@ -49,10 +49,10 @@ def __get_redis_key(self, quota, timestamp, shift, organization_id):
if self.is_redis_cluster:
scope_id = quota.scope_id or "" if quota.scope != QuotaScope.ORGANIZATION else ""
# new style redis cluster format which always has the organization id in
- local_key = "%s{%s}%s" % (quota.id, organization_id, scope_id)
+ local_key = f"{quota.id}{{{organization_id}}}{scope_id}"
else:
# legacy key format
- local_key = "%s:%s" % (quota.id, quota.scope_id or organization_id)
+ local_key = "{}:{}".format(quota.id, quota.scope_id or organization_id)
interval = quota.window
return "{}:{}:{}".format(self.namespace, local_key, int((timestamp - shift) // interval))
@@ -146,7 +146,7 @@ def get_value_for_result(result, refund_result):
return [get_value_for_result(*r) for r in results]
def get_refunded_quota_key(self, key):
- return "r:{}".format(key)
+ return f"r:{key}"
def refund(self, project, key=None, timestamp=None, category=None, quantity=None):
if timestamp is None:
diff --git a/src/sentry/ratelimits/redis.py b/src/sentry/ratelimits/redis.py
index da607e494fd72b..ceebc4268da153 100644
--- a/src/sentry/ratelimits/redis.py
+++ b/src/sentry/ratelimits/redis.py
@@ -27,9 +27,9 @@ def is_limited(self, key, limit, project=None, window=None):
bucket = int(time() / window)
if project:
- key = "rl:%s:%s:%s" % (key_hex, project.id, bucket)
+ key = f"rl:{key_hex}:{project.id}:{bucket}"
else:
- key = "rl:%s:%s" % (key_hex, bucket)
+ key = f"rl:{key_hex}:{bucket}"
with self.cluster.map() as client:
result = client.incr(key)
diff --git a/src/sentry/relay/config.py b/src/sentry/relay/config.py
index 2f9909d9cf18f7..6c7252264be809 100644
--- a/src/sentry/relay/config.py
+++ b/src/sentry/relay/config.py
@@ -56,11 +56,11 @@ def get_filter_settings(project):
filter_settings[filter_id] = settings
if features.has("projects:custom-inbound-filters", project):
- invalid_releases = project.get_option("sentry:{}".format(FilterTypes.RELEASES))
+ invalid_releases = project.get_option(f"sentry:{FilterTypes.RELEASES}")
if invalid_releases:
filter_settings["releases"] = {"releases": invalid_releases}
- error_messages = project.get_option("sentry:{}".format(FilterTypes.ERROR_MESSAGES))
+ error_messages = project.get_option(f"sentry:{FilterTypes.ERROR_MESSAGES}")
if error_messages:
filter_settings["errorMessages"] = {"patterns": error_messages}
@@ -254,10 +254,10 @@ def __str__(self):
try:
return utils.json.dumps(self.to_dict(), sort_keys=True)
except Exception as e:
- return "Content Error:{}".format(e)
+ return f"Content Error:{e}"
def __repr__(self):
- return "({}){}".format(self.__class__.__name__, self)
+ return f"({self.__class__.__name__}){self}"
class ProjectConfig(_ConfigBase):
@@ -282,7 +282,7 @@ def _load_filter_settings(flt, project):
default options for the filter will be returned
"""
filter_id = flt.id
- filter_key = "filters:{}".format(filter_id)
+ filter_key = f"filters:{filter_id}"
setting = project.get_option(filter_key)
return _filter_option_to_config_setting(flt, setting)
diff --git a/src/sentry/relay/projectconfig_cache/redis.py b/src/sentry/relay/projectconfig_cache/redis.py
index 106edc21733981..8eddf25dfe91c1 100644
--- a/src/sentry/relay/projectconfig_cache/redis.py
+++ b/src/sentry/relay/projectconfig_cache/redis.py
@@ -17,7 +17,7 @@ def validate(self):
validate_dynamic_cluster(self.is_redis_cluster, self.cluster)
def __get_redis_key(self, project_id):
- return "relayconfig:%s" % (project_id,)
+ return f"relayconfig:{project_id}"
def __get_redis_client(self, routing_key):
if self.is_redis_cluster:
diff --git a/src/sentry/relay/projectconfig_debounce_cache/redis.py b/src/sentry/relay/projectconfig_debounce_cache/redis.py
index 74064615528997..661b417b72ccfa 100644
--- a/src/sentry/relay/projectconfig_debounce_cache/redis.py
+++ b/src/sentry/relay/projectconfig_debounce_cache/redis.py
@@ -7,9 +7,9 @@
def _get_redis_key(project_id, organization_id):
if organization_id:
- return "relayconfig-debounce:o:%s" % (organization_id,)
+ return f"relayconfig-debounce:o:{organization_id}"
elif project_id:
- return "relayconfig-debounce:p:%s" % (project_id,)
+ return f"relayconfig-debounce:p:{project_id}"
else:
raise ValueError()
diff --git a/src/sentry/reprocessing.py b/src/sentry/reprocessing.py
index 726374e942fe25..49108c7d18d569 100644
--- a/src/sentry/reprocessing.py
+++ b/src/sentry/reprocessing.py
@@ -66,7 +66,7 @@ def report_processing_issue(event_data, scope, object=None, type=None, data=None
logger.error("processing_issue.bad_report", extra={"platform": event_data.get("platform")})
return
- uid = "%s:%s" % (scope, object)
+ uid = f"{scope}:{object}"
event_data.setdefault("processing_issues", {})[uid] = {
"scope": scope,
"object": object,
diff --git a/src/sentry/reprocessing2.py b/src/sentry/reprocessing2.py
index ea890d7e35cabf..3ca965a4196a56 100644
--- a/src/sentry/reprocessing2.py
+++ b/src/sentry/reprocessing2.py
@@ -104,7 +104,7 @@
def _generate_unprocessed_event_node_id(project_id, event_id):
- return hashlib.md5("{}:{}:unprocessed".format(project_id, event_id).encode("utf-8")).hexdigest()
+ return hashlib.md5(f"{project_id}:{event_id}:unprocessed".encode("utf-8")).hexdigest()
def save_unprocessed_event(project, event_id):
@@ -253,11 +253,11 @@ def _get_sync_redis_client():
def _get_sync_counter_key(group_id):
- return "re2:count:{}".format(group_id)
+ return f"re2:count:{group_id}"
def _get_info_reprocessed_key(group_id):
- return "re2:info:{}".format(group_id)
+ return f"re2:info:{group_id}"
def mark_event_reprocessed(data):
diff --git a/src/sentry/roles/manager.py b/src/sentry/roles/manager.py
index 331ff5a76a26e3..cb2c2ad8c28988 100644
--- a/src/sentry/roles/manager.py
+++ b/src/sentry/roles/manager.py
@@ -16,7 +16,7 @@ def __str__(self):
return str(self.name)
def __repr__(self):
- return "<Role: {}>".format(self.id)
+ return f"<Role: {self.id}>"
def has_scope(self, scope):
return scope in self.scopes
diff --git a/src/sentry/rules/actions/base.py b/src/sentry/rules/actions/base.py
index 214dfd68de151d..a7274ac65b1061 100644
--- a/src/sentry/rules/actions/base.py
+++ b/src/sentry/rules/actions/base.py
@@ -155,9 +155,7 @@ def build_description(event, rule_id, installation, generate_footer):
Format the description of the ticket/work item
"""
project = event.group.project
- rule_url = "/organizations/{}/alerts/rules/{}/{}/".format(
- project.organization.slug, project.slug, rule_id
- )
+ rule_url = f"/organizations/{project.organization.slug}/alerts/rules/{project.slug}/{rule_id}/"
return installation.get_group_description(event.group, event) + generate_footer(rule_url)
@@ -193,7 +191,7 @@ def create_issue(event, futures):
if has_linked_issue(event, integration):
logger.info(
- "{}.rule_trigger.link_already_exists".format(integration.provider),
+ f"{integration.provider}.rule_trigger.link_already_exists",
extra={
"rule_id": rule_id,
"project_id": event.group.project.id,
@@ -260,14 +258,14 @@ def translate_integration(self, integration):
@property
def prompt(self):
- return "Create {}".format(self.ticket_type)
+ return f"Create {self.ticket_type}"
def generate_footer(self, rule_url):
raise NotImplementedError
def after(self, event, state):
integration_id = self.get_integration_id()
- key = "{}:{}".format(self.provider, integration_id)
+ key = f"{self.provider}:{integration_id}"
return self.future(
create_issue,
key=key,
diff --git a/src/sentry/rules/base.py b/src/sentry/rules/base.py
index 465f6dc5d6f75d..c2a2f455476762 100644
--- a/src/sentry/rules/base.py
+++ b/src/sentry/rules/base.py
@@ -41,7 +41,7 @@
class RuleDescriptor(type):
def __new__(cls, *args, **kwargs):
new_cls = super().__new__(cls, *args, **kwargs)
- new_cls.id = "%s.%s" % (new_cls.__module__, new_cls.__name__)
+ new_cls.id = f"{new_cls.__module__}.{new_cls.__name__}"
return new_cls
diff --git a/src/sentry/rules/conditions/level.py b/src/sentry/rules/conditions/level.py
index 050329c48decd5..b3dc16642f8b8c 100644
--- a/src/sentry/rules/conditions/level.py
+++ b/src/sentry/rules/conditions/level.py
@@ -6,7 +6,7 @@
from sentry.rules.conditions.base import EventCondition
LEVEL_CHOICES = OrderedDict(
- [("{}".format(k), v) for k, v in sorted(LOG_LEVELS.items(), key=lambda x: x[0], reverse=True)]
+ [(f"{k}", v) for k, v in sorted(LOG_LEVELS.items(), key=lambda x: x[0], reverse=True)]
)
diff --git a/src/sentry/rules/filters/assigned_to.py b/src/sentry/rules/filters/assigned_to.py
index 73bda2e5b1741a..3b6e0e8f40ba50 100644
--- a/src/sentry/rules/filters/assigned_to.py
+++ b/src/sentry/rules/filters/assigned_to.py
@@ -35,7 +35,7 @@ class AssignedToFilter(EventFilter):
form_fields = {"targetType": {"type": "assignee", "choices": CHOICES}}
def get_assignees(self, group):
- cache_key = "group:{}:assignees".format(group.id)
+ cache_key = f"group:{group.id}:assignees"
assignee_list = cache.get(cache_key)
if assignee_list is None:
assignee_list = list(group.assignee_set.all())
diff --git a/src/sentry/rules/filters/latest_release.py b/src/sentry/rules/filters/latest_release.py
index d9fc51332c1855..cd8c8e3c596b31 100644
--- a/src/sentry/rules/filters/latest_release.py
+++ b/src/sentry/rules/filters/latest_release.py
@@ -8,7 +8,7 @@
def get_project_release_cache_key(project_id):
- return "project:{}:latest_release".format(project_id)
+ return f"project:{project_id}:latest_release"
# clear the cache given a Release object
diff --git a/src/sentry/search/snuba/backend.py b/src/sentry/search/snuba/backend.py
index 16c0ab28e0b9f1..8b2ca4fb1e0091 100644
--- a/src/sentry/search/snuba/backend.py
+++ b/src/sentry/search/snuba/backend.py
@@ -230,7 +230,7 @@ def apply(self, queryset, search_filter):
q = self.callback(value)
if search_filter.operator not in ("=", "!="):
raise InvalidSearchQuery(
- "Operator {} not valid for search {}".format(search_filter.operator, search_filter)
+ f"Operator {search_filter.operator} not valid for search {search_filter}"
)
queryset_method = queryset.filter if search_filter.operator == "=" else queryset.exclude
queryset = queryset_method(q)
@@ -252,14 +252,14 @@ def __init__(self, field, extra=None):
def _get_operator(self, search_filter):
django_operator = self.OPERATOR_TO_DJANGO.get(search_filter.operator, "")
if django_operator:
- django_operator = "__{}".format(django_operator)
+ django_operator = f"__{django_operator}"
return django_operator
def apply(self, queryset, search_filter):
django_operator = self._get_operator(search_filter)
qs_method = queryset.exclude if search_filter.operator == "!=" else queryset.filter
- q_dict = {"{}{}".format(self.field, django_operator): search_filter.value.raw_value}
+ q_dict = {f"{self.field}{django_operator}": search_filter.value.raw_value}
if self.extra:
q_dict.update(self.extra)
@@ -330,7 +330,7 @@ def query(
# ensure sort strategy is supported by executor
if not query_executor.has_sort_strategy(sort_by):
- raise InvalidSearchQuery("Sort key '{}' not supported.".format(sort_by))
+ raise InvalidSearchQuery(f"Sort key '{sort_by}' not supported.")
return query_executor.query(
projects=projects,
diff --git a/src/sentry/search/snuba/executors.py b/src/sentry/search/snuba/executors.py
index ed3450fc7ad635..657759e14d0d34 100644
--- a/src/sentry/search/snuba/executors.py
+++ b/src/sentry/search/snuba/executors.py
@@ -167,9 +167,7 @@ def snuba_search(
selected_columns = []
if get_sample:
query_hash = md5(json.dumps(conditions).encode("utf-8")).hexdigest()[:8]
- selected_columns.append(
- ("cityHash64", ("'{}'".format(query_hash), "group_id"), "sample")
- )
+ selected_columns.append(("cityHash64", (f"'{query_hash}'", "group_id"), "sample"))
sort_field = "sample"
orderby = [sort_field]
referrer = "search_sample"
@@ -177,7 +175,7 @@ def snuba_search(
# Get the top matching groups by score, i.e. the actual search results
# in the order that we want them.
orderby = [
- "-{}".format(sort_field),
+ f"-{sort_field}",
"group_id",
] # ensure stable sort within the same score
referrer = "search"
@@ -226,10 +224,10 @@ def trend_aggregation(start, end):
middle = start + timedelta(seconds=(end - start).total_seconds() * 0.5)
middle = datetime.strftime(middle, DateArg.date_format)
- agg_range_1 = "countIf(greater(toDateTime('{}'), timestamp))".format(middle)
- agg_range_2 = "countIf(lessOrEquals(toDateTime('{}'), timestamp))".format(middle)
+ agg_range_1 = f"countIf(greater(toDateTime('{middle}'), timestamp))"
+ agg_range_2 = f"countIf(lessOrEquals(toDateTime('{middle}'), timestamp))"
return [
- "if(greater({}, 0), divide({}, {}), 0)".format(agg_range_1, agg_range_2, agg_range_1),
+ f"if(greater({agg_range_1}, 0), divide({agg_range_2}, {agg_range_1}), 0)",
"",
]
diff --git a/src/sentry/search/utils.py b/src/sentry/search/utils.py
index 3b16bd8ca75e92..48a755f9f5d5be 100644
--- a/src/sentry/search/utils.py
+++ b/src/sentry/search/utils.py
@@ -23,9 +23,9 @@ def get_user_tag(projects, key, value):
project_id__in=[p.id for p in projects], **{lookup: value}
)[0]
except (KeyError, IndexError):
- return "{}:{}".format(key, value)
+ return f"{key}:{value}"
except DataError:
- raise InvalidQuery("malformed '{}:' query '{}'.".format(key, value))
+ raise InvalidQuery(f"malformed '{key}:' query '{value}'.")
return euser.tag_value
@@ -41,7 +41,7 @@ def parse_duration(value, interval):
try:
value = float(value)
except ValueError:
- raise InvalidQuery("{} is not a valid duration value".format(value))
+ raise InvalidQuery(f"{value} is not a valid duration value")
if interval == "ms":
delta = timedelta(milliseconds=value)
@@ -57,9 +57,7 @@ def parse_duration(value, interval):
delta = timedelta(days=value * 7)
else:
raise InvalidQuery(
- "{} is not a valid duration type, must be ms, s, min, m, hr, h, day, d, wk or w".format(
- interval
- )
+ f"{interval} is not a valid duration type, must be ms, s, min, m, hr, h, day, d, wk or w"
)
return delta.total_seconds() * 1000.0
@@ -69,7 +67,7 @@ def parse_percentage(value):
try:
value = float(value)
except ValueError:
- raise InvalidQuery("{} is not a valid percentage value".format(value))
+ raise InvalidQuery(f"{value} is not a valid percentage value")
return value / 100
@@ -78,10 +76,10 @@ def parse_datetime_range(value):
try:
flag, count, interval = value[0], int(value[1:-1]), value[-1]
except (ValueError, TypeError, IndexError):
- raise InvalidQuery("{} is not a valid datetime query".format(value))
+ raise InvalidQuery(f"{value} is not a valid datetime query")
if flag not in ("+", "-"):
- raise InvalidQuery("{} is not a valid datetime query".format(value))
+ raise InvalidQuery(f"{value} is not a valid datetime query")
if interval == "h":
delta = timedelta(hours=count)
@@ -92,7 +90,7 @@ def parse_datetime_range(value):
elif interval == "m":
delta = timedelta(minutes=count)
else:
- raise InvalidQuery("{} is not a valid datetime query".format(value))
+ raise InvalidQuery(f"{value} is not a valid datetime query")
if flag == "-":
return ((timezone.now() - delta, True), None)
@@ -127,7 +125,7 @@ def parse_datetime_string(value):
except ValueError:
pass
- raise InvalidQuery("{} is not a valid ISO8601 date query".format(value))
+ raise InvalidQuery(f"{value} is not a valid ISO8601 date query")
def parse_datetime_comparison(value):
@@ -140,7 +138,7 @@ def parse_datetime_comparison(value):
if value[:1] == "<":
return (None, (parse_datetime_string(value[1:]), False))
- raise InvalidQuery("{} is not a valid datetime query".format(value))
+ raise InvalidQuery(f"{value} is not a valid datetime query")
def parse_datetime_value(value):
@@ -176,7 +174,7 @@ def parse_datetime_value(value):
pass
if result is None:
- raise InvalidQuery("{} is not a valid datetime query".format(value))
+ raise InvalidQuery(f"{value} is not a valid datetime query")
return ((result - timedelta(minutes=5), True), (result + timedelta(minutes=6), False))
@@ -195,12 +193,10 @@ def get_date_params(value, from_field, to_field):
result = {}
if date_from is not None:
date_from_value, date_from_inclusive = date_from
- result.update(
- {from_field: date_from_value, "{}_inclusive".format(from_field): date_from_inclusive}
- )
+ result.update({from_field: date_from_value, f"{from_field}_inclusive": date_from_inclusive})
if date_to is not None:
date_to_value, date_to_inclusive = date_to
- result.update({to_field: date_to_value, "{}_inclusive".format(to_field): date_to_inclusive})
+ result.update({to_field: date_to_value, f"{to_field}_inclusive": date_to_inclusive})
return result
@@ -274,29 +270,29 @@ def parse_release(value, projects, environments, organization_id=None):
(
">=",
lambda field, value: {
- "{}_lower".format(field): value,
- "{}_lower_inclusive".format(field): True,
+ f"{field}_lower": value,
+ f"{field}_lower_inclusive": True,
},
),
(
"<=",
lambda field, value: {
- "{}_upper".format(field): value,
- "{}_upper_inclusive".format(field): True,
+ f"{field}_upper": value,
+ f"{field}_upper_inclusive": True,
},
),
(
">",
lambda field, value: {
- "{}_lower".format(field): value,
- "{}_lower_inclusive".format(field): False,
+ f"{field}_lower": value,
+ f"{field}_lower_inclusive": False,
},
),
(
"<",
lambda field, value: {
- "{}_upper".format(field): value,
- "{}_upper_inclusive".format(field): False,
+ f"{field}_upper": value,
+ f"{field}_upper_inclusive": False,
},
),
]
@@ -310,7 +306,7 @@ def get_numeric_field_value(field, raw_value, type=int):
else:
return {field: type(raw_value)}
except ValueError:
- msg = '"{}" could not be converted to a number.'.format(raw_value)
+ msg = f'"{raw_value}" could not be converted to a number.'
raise InvalidQuery(msg)
@@ -447,7 +443,7 @@ def parse_query(projects, query, user, environments):
try:
results["status"] = STATUS_QUERY_CHOICES[value]
except KeyError:
- raise InvalidQuery("'is:' had unknown status code '{}'.".format(value))
+ raise InvalidQuery(f"'is:' had unknown status code '{value}'.")
elif key == "assigned":
results["assigned_to"] = parse_actor_or_none_value(projects, value, user)
elif key == "assigned_or_suggested":
@@ -504,4 +500,4 @@ def convert_user_tag_to_query(key, value):
if key == "user" and ":" in value:
sub_key, value = value.split(":", 1)
if KEYWORD_MAP.get_key(sub_key, None):
- return 'user.%s:"%s"' % (sub_key, value.replace('"', '\\"'))
+ return 'user.{}:"{}"'.format(sub_key, value.replace('"', '\\"'))
diff --git a/src/sentry/security/emails.py b/src/sentry/security/emails.py
index de9016d93cd5c3..581f5fad5e0d01 100644
--- a/src/sentry/security/emails.py
+++ b/src/sentry/security/emails.py
@@ -26,7 +26,7 @@ def generate_security_email(account, type, actor, ip_address, context=None, curr
template = "sentry/emails/api-token-generated.txt"
html_template = "sentry/emails/api-token-generated.html"
else:
- raise ValueError("unknown type: {}".format(type))
+ raise ValueError(f"unknown type: {type}")
new_context = {
"account": account,
diff --git a/src/sentry/security/utils.py b/src/sentry/security/utils.py
index a98271bf702d3f..038768484018a3 100644
--- a/src/sentry/security/utils.py
+++ b/src/sentry/security/utils.py
@@ -19,7 +19,7 @@ def capture_security_activity(
if type == "mfa-removed" or type == "mfa-added":
logger_context["authenticator_id"] = context["authenticator"].id
- logger.info("user.{}".format(type), extra=logger_context)
+ logger.info(f"user.{type}", extra=logger_context)
if send_email:
msg = generate_security_email(
diff --git a/src/sentry/services/http.py b/src/sentry/services/http.py
index e4fc97e725a8ff..7a346fd6c026a0 100644
--- a/src/sentry/services/http.py
+++ b/src/sentry/services/http.py
@@ -18,7 +18,7 @@ def convert_options_to_env(options):
elif isinstance(v, int):
value = str(v)
else:
- raise TypeError("Unknown option type: %r (%s)" % (k, type(v)))
+ raise TypeError("Unknown option type: {!r} ({})".format(k, type(v)))
yield key, value
@@ -69,7 +69,7 @@ def __init__(
'%(addr) - %(user) [%(ltime)] "%(method) %(uri) %(proto)" %(status) %(size) "%(referer)" "%(uagent)"',
)
- options.setdefault("%s-socket" % options["protocol"], "%s:%s" % (host, port))
+ options.setdefault("%s-socket" % options["protocol"], f"{host}:{port}")
# We only need to set uid/gid when stepping down from root, but if
# we are trying to run as root, then ignore it entirely.
@@ -151,7 +151,7 @@ def prepare_environment(self, env=None):
virtualenv_path = os.path.dirname(os.path.abspath(sys.argv[0]))
current_path = env.get("PATH", "")
if virtualenv_path not in current_path:
- env["PATH"] = "%s:%s" % (virtualenv_path, current_path)
+ env["PATH"] = f"{virtualenv_path}:{current_path}"
def run(self):
self.prepare_environment()
diff --git a/src/sentry/shared_integrations/client.py b/src/sentry/shared_integrations/client.py
index 73d2908aa76431..5d9e403e20541b 100644
--- a/src/sentry/shared_integrations/client.py
+++ b/src/sentry/shared_integrations/client.py
@@ -25,7 +25,7 @@ def __init__(self, headers=None, status_code=None):
self.status_code = status_code
def __repr__(self):
- return "<%s: code=%s, content_type=%s>" % (
+ return "<{}: code={}, content_type={}>".format(
type(self).__name__,
self.status_code,
self.headers.get("Content-Type", "") if self.headers else "",
@@ -53,9 +53,7 @@ def from_response(self, response, allow_text=False):
raise ValueError("Not a valid response type: {}".format(response.text[:128]))
elif response.status_code < 200 or response.status_code >= 300:
raise ValueError(
- "Received unexpected plaintext response for code {}".format(
- response.status_code
- )
+ f"Received unexpected plaintext response for code {response.status_code}"
)
return TextApiResponse(response.text, response.headers, response.status_code)
@@ -173,13 +171,13 @@ def __init__(self, verify_ssl=True, logging_context=None):
self.logging_context = logging_context
def get_cache_prefix(self):
- return "%s.%s.client:" % (self.integration_type, self.name)
+ return f"{self.integration_type}.{self.name}.client:"
def build_url(self, path):
if path.startswith("/"):
if not self.base_url:
- raise ValueError("Invalid URL: {}".format(path))
- return "{}{}".format(self.base_url, path)
+ raise ValueError(f"Invalid URL: {path}")
+ return f"{self.base_url}{path}"
return path
def _request(
@@ -225,8 +223,8 @@ def _request(
trace_id = None
with sentry_sdk.start_transaction(
- op="{}.http".format(self.integration_type),
- name="{}.http_response.{}".format(self.integration_type, self.name),
+ op=f"{self.integration_type}.http",
+ name=f"{self.integration_type}.http_response.{self.name}",
parent_span_id=parent_span_id,
trace_id=trace_id,
sampled=True,
@@ -357,8 +355,8 @@ def request(self, *args, **kwargs):
trace_id = None
with sentry_sdk.start_transaction(
- op="{}.http".format(self.integration_type),
- name="{}.http_response.{}".format(self.integration_type, self.name),
+ op=f"{self.integration_type}.http",
+ name=f"{self.integration_type}.http_response.{self.name}",
parent_span_id=parent_span_id,
trace_id=trace_id,
sampled=True,
diff --git a/src/sentry/shared_integrations/exceptions.py b/src/sentry/shared_integrations/exceptions.py
index dd40cf5a6c3607..1e54068cbb1c15 100644
--- a/src/sentry/shared_integrations/exceptions.py
+++ b/src/sentry/shared_integrations/exceptions.py
@@ -51,7 +51,7 @@ def from_exception(cls, exception):
@classmethod
def from_request(cls, request):
host = urlparse(request.url).netloc
- return cls("Unable to reach host: {}".format(host))
+ return cls(f"Unable to reach host: {host}")
class ApiTimeoutError(ApiError):
@@ -66,7 +66,7 @@ def from_exception(cls, exception):
@classmethod
def from_request(cls, request):
host = urlparse(request.url).netloc
- return cls("Timed out attempting to reach host: {}".format(host))
+ return cls(f"Timed out attempting to reach host: {host}")
class ApiUnauthorized(ApiError):
@@ -101,5 +101,5 @@ class ClientError(RequestException):
"""4xx Error Occurred"""
def __init__(self, status_code, url, response=None):
- http_error_msg = "%s Client Error: for url: %s" % (status_code, url)
+ http_error_msg = f"{status_code} Client Error: for url: {url}"
super().__init__(http_error_msg, response=response)
diff --git a/src/sentry/similarity/__init__.py b/src/sentry/similarity/__init__.py
index 56d0f46763b6f3..d07127c9da1dff 100644
--- a/src/sentry/similarity/__init__.py
+++ b/src/sentry/similarity/__init__.py
@@ -67,7 +67,7 @@ def _make_index_backend(cluster, namespace="sim:1"):
cluster = redis.redis_clusters.get(cluster_id)
except KeyError:
index = DummyIndexBackend()
- logger.info("No redis cluster provided for similarity, using {!r}.".format(index))
+ logger.info(f"No redis cluster provided for similarity, using {index!r}.")
return index
return MetricsWrapper(
diff --git a/src/sentry/similarity/features.py b/src/sentry/similarity/features.py
index ed904115d06b26..d43422bb1de72c 100644
--- a/src/sentry/similarity/features.py
+++ b/src/sentry/similarity/features.py
@@ -71,10 +71,10 @@ def __init__(
assert set(self.aliases) == set(self.features)
def __get_scope(self, project):
- return "{}".format(project.id)
+ return f"{project.id}"
def __get_key(self, group):
- return "{}".format(group.id)
+ return f"{group.id}"
def extract(self, event):
results = {}
diff --git a/src/sentry/snuba/discover.py b/src/sentry/snuba/discover.py
index d674be6474cd22..201a1c382c8d7b 100644
--- a/src/sentry/snuba/discover.py
+++ b/src/sentry/snuba/discover.py
@@ -683,7 +683,7 @@ def get_facets(query, params, limit=10, referrer=None):
) as span:
span.set_data("tag_count", len(individual_tags))
for tag_name in individual_tags:
- tag = "tags[{}]".format(tag_name)
+ tag = f"tags[{tag_name}]"
tag_values = raw_query(
aggregations=[["count", None, "count"]],
conditions=snuba_filter.conditions,
@@ -787,9 +787,7 @@ def histogram_query(
for f in fields:
measurement = get_measurement_name(f)
if measurement is None:
- raise InvalidSearchQuery(
- "multihistogram expected all measurements, received: {}".format(f)
- )
+ raise InvalidSearchQuery(f"multihistogram expected all measurements, received: {f}")
measurements.append(measurement)
conditions.append([key_alias, "IN", measurements])
@@ -833,12 +831,7 @@ def get_histogram_column(fields, key_column, histogram_params):
"""
field = fields[0] if key_column is None else "measurements_value"
- return "histogram({}, {:d}, {:d}, {:d})".format(
- field,
- histogram_params.bucket_size,
- histogram_params.start_offset,
- histogram_params.multiplier,
- )
+ return f"histogram({field}, {histogram_params.bucket_size:d}, {histogram_params.start_offset:d}, {histogram_params.multiplier:d})"
def find_histogram_params(num_buckets, min_value, max_value, multiplier):
@@ -907,12 +900,12 @@ def find_histogram_min_max(fields, min_value, max_value, user_query, params, dat
quartiles = []
for field in fields:
if min_value is None:
- min_columns.append("min({})".format(field))
+ min_columns.append(f"min({field})")
if max_value is None:
- max_columns.append("max({})".format(field))
+ max_columns.append(f"max({field})")
if data_filter == "exclude_outliers":
- quartiles.append("percentile({}, 0.25)".format(field))
- quartiles.append("percentile({}, 0.75)".format(field))
+ quartiles.append(f"percentile({field}, 0.25)")
+ quartiles.append(f"percentile({field}, 0.75)")
results = query(
selected_columns=min_columns + max_columns + quartiles,
@@ -944,8 +937,8 @@ def find_histogram_min_max(fields, min_value, max_value, user_query, params, dat
fences = []
if data_filter == "exclude_outliers":
for field in fields:
- q1_alias = get_function_alias("percentile({}, 0.25)".format(field))
- q3_alias = get_function_alias("percentile({}, 0.75)".format(field))
+ q1_alias = get_function_alias(f"percentile({field}, 0.25)")
+ q3_alias = get_function_alias(f"percentile({field}, 0.75)")
first_quartile = row[q1_alias]
third_quartile = row[q3_alias]
diff --git a/src/sentry/snuba/sessions_v2.py b/src/sentry/snuba/sessions_v2.py
index 71d44688f8d8e0..88f140913ce9a5 100644
--- a/src/sentry/snuba/sessions_v2.py
+++ b/src/sentry/snuba/sessions_v2.py
@@ -220,13 +220,13 @@ def __init__(self, query, project_ids=None):
self.fields = {}
for key in raw_fields:
if key not in COLUMN_MAP:
- raise InvalidField('Invalid field: "{}"'.format(key))
+ raise InvalidField(f'Invalid field: "{key}"')
self.fields[key] = COLUMN_MAP[key]
self.groupby = []
for key in raw_groupby:
if key not in GROUPBY_MAP:
- raise InvalidField('Invalid groupBy: "{}"'.format(key))
+ raise InvalidField(f'Invalid groupBy: "{key}"')
self.groupby.append(GROUPBY_MAP[key])
start, end, rollup = get_date_range_rollup_from_params(query, "1h", round_range=True)
diff --git a/src/sentry/snuba/tasks.py b/src/sentry/snuba/tasks.py
index 0b70e91aedb832..7686f8760e8ef3 100644
--- a/src/sentry/snuba/tasks.py
+++ b/src/sentry/snuba/tasks.py
@@ -46,14 +46,14 @@ def apply_dataset_query_conditions(dataset, query, event_types, discover=False):
return query
if event_types:
event_type_conditions = " OR ".join(
- ["event.type:{}".format(event_type.name.lower()) for event_type in event_types]
+ [f"event.type:{event_type.name.lower()}" for event_type in event_types]
)
elif dataset in DATASET_CONDITIONS:
event_type_conditions = DATASET_CONDITIONS[dataset]
else:
return query
- return "({}) AND ({})".format(event_type_conditions, query)
+ return f"({event_type_conditions}) AND ({query})"
@instrumented_task(
@@ -188,7 +188,7 @@ def _create_in_snuba(subscription):
)
response = _snuba_pool.urlopen(
"POST",
- "/%s/subscriptions" % (snuba_query.dataset,),
+ f"/{snuba_query.dataset}/subscriptions",
body=json.dumps(
{
"project_id": subscription.project_id,
@@ -206,9 +206,7 @@ def _create_in_snuba(subscription):
def _delete_from_snuba(dataset, subscription_id):
- response = _snuba_pool.urlopen(
- "DELETE", "/%s/subscriptions/%s" % (dataset.value, subscription_id)
- )
+ response = _snuba_pool.urlopen("DELETE", f"/{dataset.value}/subscriptions/{subscription_id}")
if response.status != 202:
raise SnubaError("HTTP %s response from Snuba!" % response.status)
diff --git a/src/sentry/stacktraces/processing.py b/src/sentry/stacktraces/processing.py
index 409159800b07a9..90b28f892f69d0 100644
--- a/src/sentry/stacktraces/processing.py
+++ b/src/sentry/stacktraces/processing.py
@@ -35,7 +35,7 @@ def __init__(self, frame, idx, processor, stacktrace_info, processable_frames):
self.processable_frames = processable_frames
def __repr__(self):
- return "<ProcessableFrame %r #%r at %r>" % (
+ return "<ProcessableFrame {!r} #{!r} at {!r}>".format(
self.frame.get("function") or "unknown",
self.idx,
self.frame.get("instruction_addr"),
diff --git a/src/sentry/status_checks/celery_app_version.py b/src/sentry/status_checks/celery_app_version.py
index 2b7a36f6314b3e..1d95b65c5f9f80 100644
--- a/src/sentry/status_checks/celery_app_version.py
+++ b/src/sentry/status_checks/celery_app_version.py
@@ -18,8 +18,6 @@ def check(self):
return []
return [
Problem(
- "Celery workers are referencing a different version of Sentry ({version1} vs {version2})".format(
- version1=sentry.VERSION, version2=version
- )
+ f"Celery workers are referencing a different version of Sentry ({sentry.VERSION} vs {version})"
)
]
diff --git a/src/sentry/tagstore/base.py b/src/sentry/tagstore/base.py
index 16520206a19867..848740caeb4de0 100644
--- a/src/sentry/tagstore/base.py
+++ b/src/sentry/tagstore/base.py
@@ -92,7 +92,7 @@ def is_reserved_key(self, key):
def prefix_reserved_key(self, key):
# XXX(dcramer): kill sentry prefix for internal reserved tags
if self.is_reserved_key(key):
- return "sentry:{}".format(key)
+ return f"sentry:{key}"
else:
return key
diff --git a/src/sentry/tagstore/snuba/backend.py b/src/sentry/tagstore/snuba/backend.py
index f51d31fe9b6209..774fb8fc3522dd 100644
--- a/src/sentry/tagstore/snuba/backend.py
+++ b/src/sentry/tagstore/snuba/backend.py
@@ -57,7 +57,7 @@ def get_project_list(project_id):
class SnubaTagStorage(TagStorage):
def __get_tag_key(self, project_id, group_id, environment_id, key):
- tag = "tags[{}]".format(key)
+ tag = f"tags[{key}]"
filters = {"project_id": get_project_list(project_id)}
if environment_id:
filters["environment"] = [environment_id]
@@ -85,7 +85,7 @@ def __get_tag_key_and_top_values(
self, project_id, group_id, environment_id, key, limit=3, raise_on_empty=True, **kwargs
):
- tag = "tags[{}]".format(key)
+ tag = f"tags[{key}]"
filters = {"project_id": get_project_list(project_id)}
if environment_id:
filters["environment"] = [environment_id]
@@ -216,7 +216,7 @@ def __get_tag_keys_for_projects(
result = None
if should_cache:
- filtering_strings = ["{}={}".format(key, value) for key, value in filters.items()]
+ filtering_strings = [f"{key}={value}" for key, value in filters.items()]
cache_key = "tagstore.__get_tag_keys:{}".format(
md5_text(*filtering_strings).hexdigest()
)
@@ -231,7 +231,7 @@ def __get_tag_keys_for_projects(
duration = (end - start).total_seconds()
# Cause there's rounding to create this cache suffix, we want to update the query end so results match
end = snuba.quantize_time(end, key_hash)
- cache_key += ":{}@{}".format(duration, end.isoformat())
+ cache_key += f":{duration}@{end.isoformat()}"
result = cache.get(cache_key, None)
if result is not None:
metrics.incr("testing.tagstore.cache_tag_key.hit")
@@ -275,7 +275,7 @@ def __get_tag_keys_for_projects(
return results
def __get_tag_value(self, project_id, group_id, environment_id, key, value):
- tag = "tags[{}]".format(key)
+ tag = f"tags[{key}]"
filters = {"project_id": get_project_list(project_id)}
if environment_id:
filters["environment"] = [environment_id]
@@ -375,7 +375,7 @@ def get_group_tag_values(self, project_id, group_id, environment_id, key):
return set(key.top_values)
def get_group_list_tag_value(self, project_ids, group_id_list, environment_ids, key, value):
- tag = "tags[{}]".format(key)
+ tag = f"tags[{key}]"
filters = {"project_id": project_ids, "group_id": group_id_list}
if environment_ids:
filters["environment"] = environment_ids
@@ -426,7 +426,7 @@ def get_group_seen_values_for_environments(
return {issue: fix_tag_value_data(data) for issue, data in result.items()}
def get_group_tag_value_count(self, project_id, group_id, environment_id, key):
- tag = "tags[{}]".format(key)
+ tag = f"tags[{key}]"
filters = {"project_id": get_project_list(project_id), "group_id": [group_id]}
if environment_id:
filters["environment"] = [environment_id]
@@ -551,7 +551,7 @@ def get_release_tags(self, project_ids, environment_id, versions):
# this method is already dealing with version strings rather than
# release ids which would need to be translated by the snuba util.
tag = "sentry:release"
- col = "tags[{}]".format(tag)
+ col = f"tags[{tag}]"
conditions = [[col, "IN", versions], DEFAULT_TYPE_CONDITION]
aggregations = [
["count()", "", "times_seen"],
@@ -768,10 +768,10 @@ def get_tag_value_paginator_for_projects(
snuba_name = FIELD_ALIASES[USER_DISPLAY_ALIAS].get_field()
snuba.resolve_complex_column(snuba_name, resolver)
elif snuba_name in BLACKLISTED_COLUMNS:
- snuba_name = "tags[%s]" % (key,)
+ snuba_name = f"tags[{key}]"
if query:
- conditions.append([snuba_name, "LIKE", "%{}%".format(query)])
+ conditions.append([snuba_name, "LIKE", f"%{query}%"])
else:
conditions.append([snuba_name, "!=", ""])
@@ -904,7 +904,7 @@ def get_group_event_filter(self, project_id, group_id, environment_ids, tags, st
conditions = []
for tag_name, tag_val in tags.items():
operator = "IN" if isinstance(tag_val, list) else "="
- conditions.append(["tags[{}]".format(tag_name), operator, tag_val])
+ conditions.append([f"tags[{tag_name}]", operator, tag_val])
result = snuba.raw_query(
start=start,
diff --git a/src/sentry/tagstore/types.py b/src/sentry/tagstore/types.py
index f0644d560784f6..394ed2a651555d 100644
--- a/src/sentry/tagstore/types.py
+++ b/src/sentry/tagstore/types.py
@@ -11,9 +11,9 @@ class TagType:
_sort_key = None
def __repr__(self):
- return "<%s: %s>" % (
+ return "<{}: {}>".format(
type(self).__name__,
- ", ".join("%s=%r" % (name, getattr(self, name)) for name in self.__slots__),
+ ", ".join("{}={!r}".format(name, getattr(self, name)) for name in self.__slots__),
)
def __hash__(self):
diff --git a/src/sentry/tasks/files.py b/src/sentry/tasks/files.py
index 94e841e1c0f890..c1989c27da8cf5 100644
--- a/src/sentry/tasks/files.py
+++ b/src/sentry/tasks/files.py
@@ -14,7 +14,7 @@ def delete_file(path, checksum, **kwargs):
from sentry.app import locks
from sentry.utils.retries import TimedRetryPolicy
- lock = locks.get("fileblob:upload:{}".format(checksum), duration=60 * 10)
+ lock = locks.get(f"fileblob:upload:{checksum}", duration=60 * 10)
with TimedRetryPolicy(60)(lock.acquire):
if not FileBlob.objects.filter(checksum=checksum).exists():
get_storage().delete(path)
diff --git a/src/sentry/tasks/integrations.py b/src/sentry/tasks/integrations.py
index d6ba4d8c89f287..9603fa01c2f7aa 100644
--- a/src/sentry/tasks/integrations.py
+++ b/src/sentry/tasks/integrations.py
@@ -268,7 +268,7 @@ def migrate_repo(repo_id, integration_id, organization_id):
)
repo.integration_id = integration_id
- repo.provider = "integrations:%s" % (integration.provider,)
+ repo.provider = f"integrations:{integration.provider}"
# check against disabled specifically -- don't want to accidentally un-delete repos
original_status = repo.status
if repo.status == ObjectStatus.DISABLED:
diff --git a/src/sentry/tasks/members.py b/src/sentry/tasks/members.py
index 2e975eed84ccd8..3e61c59abe583a 100644
--- a/src/sentry/tasks/members.py
+++ b/src/sentry/tasks/members.py
@@ -51,7 +51,7 @@ def send_invite_request_notification_email(member_id):
)
msg = MessageBuilder(
- subject="Access request to %s" % (om.organization.name,),
+ subject=f"Access request to {om.organization.name}",
type="organization.invite-request",
context=context,
**email_args,
diff --git a/src/sentry/tasks/post_process.py b/src/sentry/tasks/post_process.py
index cef3f8042be0cd..ed8b6ac5182316 100644
--- a/src/sentry/tasks/post_process.py
+++ b/src/sentry/tasks/post_process.py
@@ -17,7 +17,7 @@
def _get_service_hooks(project_id):
from sentry.models import ServiceHook
- cache_key = "servicehooks:1:{}".format(project_id)
+ cache_key = f"servicehooks:1:{project_id}"
result = cache.get(cache_key)
if result is None:
@@ -30,7 +30,7 @@ def _get_service_hooks(project_id):
def _should_send_error_created_hooks(project):
from sentry.models import ServiceHook, Organization
- cache_key = "servicehooks-error-created:1:{}".format(project.id)
+ cache_key = f"servicehooks-error-created:1:{project.id}"
result = cache.get(cache_key)
if result is None:
@@ -64,7 +64,7 @@ def _capture_stats(event, is_new):
metrics.incr("events.unique", tags=tags, skip_internal=False)
metrics.incr("events.processed", tags=tags, skip_internal=False)
- metrics.incr("events.processed.{platform}".format(platform=platform), skip_internal=False)
+ metrics.incr(f"events.processed.{platform}", skip_internal=False)
metrics.timing("events.size.data", event.size, tags=tags)
# This is an experiment to understand whether we have, in production,
@@ -266,11 +266,11 @@ def post_process_group(
try:
lock = locks.get(
- "w-o:{}-d-l".format(event.group_id),
+ f"w-o:{event.group_id}-d-l",
duration=10,
)
with lock.acquire():
- has_commit_key = "w-o:{}-h-c".format(event.project.organization_id)
+ has_commit_key = f"w-o:{event.project.organization_id}-h-c"
org_has_commit = cache.get(has_commit_key)
if org_has_commit is None:
org_has_commit = Commit.objects.filter(
@@ -279,7 +279,7 @@ def post_process_group(
cache.set(has_commit_key, org_has_commit, 3600)
if org_has_commit:
- group_cache_key = "w-o-i:g-{}".format(event.group_id)
+ group_cache_key = f"w-o-i:g-{event.group_id}"
if cache.get(group_cache_key):
metrics.incr(
"sentry.tasks.process_suspect_commits.debounce",
diff --git a/src/sentry/tasks/release_registry.py b/src/sentry/tasks/release_registry.py
index a79ac97dc0de89..ce8110fa2f1af6 100644
--- a/src/sentry/tasks/release_registry.py
+++ b/src/sentry/tasks/release_registry.py
@@ -26,7 +26,7 @@ def _fetch_registry_url(relative_url):
base_url = settings.SENTRY_RELEASE_REGISTRY_BASEURL.rstrip("/")
relative_url = relative_url.lstrip("/")
- full_url = "%s/%s" % (base_url, relative_url)
+ full_url = f"{base_url}/{relative_url}"
with metrics.timer(
"release_registry.fetch.duration", tags={"url": relative_url}, sample_rate=1.0
diff --git a/src/sentry/tasks/reprocessing2.py b/src/sentry/tasks/reprocessing2.py
index 2f4e39c839f2cc..f02924334f7b6e 100644
--- a/src/sentry/tasks/reprocessing2.py
+++ b/src/sentry/tasks/reprocessing2.py
@@ -125,7 +125,7 @@ def handle_remaining_events(project_id, new_group_id, event_ids, remaining_event
elif remaining_events == "keep":
eventstream.replace_group_unsafe(project_id, event_ids, new_group_id=new_group_id)
else:
- raise ValueError("Invalid value for remaining_events: {}".format(remaining_events))
+ raise ValueError(f"Invalid value for remaining_events: {remaining_events}")
@instrumented_task(
diff --git a/src/sentry/tasks/sentry_apps.py b/src/sentry/tasks/sentry_apps.py
index 83578a2b506db8..4cf36455351d54 100644
--- a/src/sentry/tasks/sentry_apps.py
+++ b/src/sentry/tasks/sentry_apps.py
@@ -78,7 +78,7 @@ def _webhook_event_data(event, group_id, project_id):
# The URL has a regex OR in it ("|") which means `reverse` cannot generate
# a valid URL (it can't know which option to pick). We have to manually
# create this URL for, that reason.
- event_context["issue_url"] = absolute_uri("/api/0/issues/{}/".format(group_id))
+ event_context["issue_url"] = absolute_uri(f"/api/0/issues/{group_id}/")
return event_context
@@ -168,7 +168,7 @@ def _process_resource_change(action, sender, instance_id, retryer=None, *args, *
# we hit the max number of retries.
return retryer.retry(exc=e)
- event = "{}.{}".format(name, action)
+ event = f"{name}.{action}"
if event not in VALID_EVENTS:
return
@@ -254,7 +254,7 @@ def workflow_notification(installation_id, issue_id, type, user_id, *args, **kwa
data = kwargs.get("data", {})
data.update({"issue": serialize(issue)})
- send_webhooks(installation=install, event="issue.{}".format(type), data=data, actor=user)
+ send_webhooks(installation=install, event=f"issue.{type}", data=data, actor=user)
def notify_sentry_app(event, futures):
@@ -339,7 +339,7 @@ def send_and_save_webhook_request(sentry_app, app_platform_event, url=None):
buffer = SentryAppWebhookRequestsBuffer(sentry_app)
org_id = app_platform_event.install.organization_id
- event = "{}.{}".format(app_platform_event.resource, app_platform_event.action)
+ event = f"{app_platform_event.resource}.{app_platform_event.action}"
slug = sentry_app.slug_for_metrics
url = url or sentry_app.webhook_url
diff --git a/src/sentry/tasks/servicehooks.py b/src/sentry/tasks/servicehooks.py
index 80d1b85d402235..a4b01faa899826 100644
--- a/src/sentry/tasks/servicehooks.py
+++ b/src/sentry/tasks/servicehooks.py
@@ -15,7 +15,7 @@ def get_payload_v0(event):
group_context["url"] = group.get_absolute_url()
event_context = serialize(event)
- event_context["url"] = "{}events/{}/".format(group.get_absolute_url(), event.event_id)
+ event_context["url"] = f"{group.get_absolute_url()}events/{event.event_id}/"
data = {
"project": {"slug": project.slug, "name": project.name},
"group": group_context,
diff --git a/src/sentry/tasks/store.py b/src/sentry/tasks/store.py
index 9830bdcc5d25a2..65b3cc9140f0c2 100644
--- a/src/sentry/tasks/store.py
+++ b/src/sentry/tasks/store.py
@@ -384,7 +384,7 @@ def retry_process_event(process_task_name, task_kwargs, **kwargs):
process_task = tasks.get(process_task_name)
if not process_task:
- raise ValueError("Invalid argument for process_task_name: %s" % (process_task_name,))
+ raise ValueError(f"Invalid argument for process_task_name: {process_task_name}")
process_task.delay(**task_kwargs)
diff --git a/src/sentry/templatetags/sentry_assets.py b/src/sentry/templatetags/sentry_assets.py
index 7dc28a2536f6c1..b8a8f456750797 100644
--- a/src/sentry/templatetags/sentry_assets.py
+++ b/src/sentry/templatetags/sentry_assets.py
@@ -58,10 +58,10 @@ def locale_js_include(context):
nonce = ""
if hasattr(request, "csp_nonce"):
- nonce = ' nonce="{}"'.format(request.csp_nonce)
+ nonce = f' nonce="{request.csp_nonce}"'
href = get_asset_url("sentry", "dist/locale/" + lang_code + ".js")
- return mark_safe('<script src="{}"{}{}></script>'.format(href, crossorigin(), nonce))
+ return mark_safe(f'<script src="{href}"{crossorigin()}{nonce}></script>')
@register.tag
@@ -83,7 +83,7 @@ def script(parser, token):
return ScriptNode(nodelist, **kwargs)
except ValueError as err:
- raise template.TemplateSyntaxError("`script` tag failed to compile. : {}".format(err))
+ raise template.TemplateSyntaxError(f"`script` tag failed to compile. : {err}")
class ScriptNode(template.Node):
@@ -108,18 +108,18 @@ def render(self, context):
if "src" not in self.attrs:
content = self.nodelist.render(context).strip()
content = self._unwrap_content(content)
- return "<script{}>{}</script>".format(attrs, content)
+ return f"<script{attrs}>{content}</script>"
def _render_attrs(self, context):
output = []
for k, v in self.attrs.items():
value = self._get_value(v, context)
if value in (True, "True"):
- output.append(" {}".format(k))
+ output.append(f" {k}")
elif value in (None, False, "False"):
continue
else:
- output.append(' {}="{}"'.format(k, value))
+ output.append(f' {k}="{value}"')
output = sorted(output)
return "".join(output)
diff --git a/src/sentry/templatetags/sentry_helpers.py b/src/sentry/templatetags/sentry_helpers.py
index 3fe53052bd3027..1c78a63292eb6f 100644
--- a/src/sentry/templatetags/sentry_helpers.py
+++ b/src/sentry/templatetags/sentry_helpers.py
@@ -158,7 +158,7 @@ def small_count(v, precision=1):
if o:
if len(str(o)) > 2 or not p:
return "%d%s" % (o, y)
- return ("%.{}f%s".format(precision)) % (v / float(x), y)
+ return (f"%.{precision}f%s") % (v / float(x), y)
return v
diff --git a/src/sentry/tsdb/redis.py b/src/sentry/tsdb/redis.py
index fd30325e129ff3..f280b3b16a98ef 100644
--- a/src/sentry/tsdb/redis.py
+++ b/src/sentry/tsdb/redis.py
@@ -139,7 +139,7 @@ def get_cluster_groups(self, environment_ids):
def add_environment_parameter(self, key, environment_id):
if environment_id is not None:
- return "{}?e={}".format(key, environment_id)
+ return f"{key}?e={environment_id}"
else:
return key
@@ -473,7 +473,7 @@ def get_distinct_counts_union(
temporary_id = uuid.uuid1().hex
def make_temporary_key(key):
- return "{}{}:{}".format(self.prefix, temporary_id, key)
+ return f"{self.prefix}{temporary_id}:{key}"
def expand_key(key):
"""
@@ -499,7 +499,7 @@ def get_partition_aggregate(value):
results from merging all HyperLogLogs at the provided keys.
"""
(host, keys) = value
- destination = make_temporary_key("p:{}".format(host))
+ destination = make_temporary_key(f"p:{host}")
client = cluster.get_local_client(host)
with client.pipeline(transaction=False) as pipeline:
pipeline.execute_command(
@@ -514,7 +514,7 @@ def merge_aggregates(values):
Calculate the cardinality of the provided HyperLogLog values.
"""
destination = make_temporary_key("a") # all values will be merged into this key
- aggregates = {make_temporary_key("a:{}".format(host)): value for host, value in values}
+ aggregates = {make_temporary_key(f"a:{host}"): value for host, value in values}
# Choose a random host to execute the reduction on. (We use a host
# here that we've already accessed as part of this process -- this
@@ -558,7 +558,7 @@ def merge_distinct_counts(
temporary_id = uuid.uuid1().hex
def make_temporary_key(key):
- return "{}{}:{}".format(self.prefix, temporary_id, key)
+ return f"{self.prefix}{temporary_id}:{key}"
data = {}
for rollup, series in rollups.items():
diff --git a/src/sentry/tsdb/snuba.py b/src/sentry/tsdb/snuba.py
index 474f95b2678d99..82ae8120696eb0 100644
--- a/src/sentry/tsdb/snuba.py
+++ b/src/sentry/tsdb/snuba.py
@@ -240,7 +240,7 @@ def get_data(
model_query_settings = self.model_query_settings.get(model)
if model_query_settings is None:
- raise Exception("Unsupported TSDBModel: {}".format(model.name))
+ raise Exception(f"Unsupported TSDBModel: {model.name}")
model_group = model_query_settings.groupby
model_aggregate = model_query_settings.aggregate
@@ -296,7 +296,7 @@ def get_data(
rollup=rollup,
limit=limit,
orderby=orderby,
- referrer="tsdb-modelid:{}".format(model.value),
+ referrer=f"tsdb-modelid:{model.value}",
is_grouprelease=(model == TSDBModel.frequent_releases_by_group),
)
else:
@@ -356,7 +356,7 @@ def get_range(
else:
model_query_settings = self.model_query_settings.get(model)
- assert model_query_settings is not None, "Unsupported TSDBModel: {}".format(model.name)
+ assert model_query_settings is not None, f"Unsupported TSDBModel: {model.name}"
if model_query_settings.dataset == snuba.Dataset.Outcomes:
aggregate_function = "sum"
@@ -429,7 +429,7 @@ def get_distinct_counts_union(
def get_most_frequent(
self, model, keys, start, end=None, rollup=None, limit=10, environment_id=None
):
- aggregation = "topK({})".format(limit)
+ aggregation = f"topK({limit})"
result = self.get_data(
model,
keys,
@@ -452,7 +452,7 @@ def get_most_frequent(
def get_most_frequent_series(
self, model, keys, start, end=None, rollup=None, limit=10, environment_id=None
):
- aggregation = "topK({})".format(limit)
+ aggregation = f"topK({limit})"
result = self.get_data(
model,
keys,
diff --git a/src/sentry/web/client_config.py b/src/sentry/web/client_config.py
index 77f04dd3939d68..103dc6b23cdc4b 100644
--- a/src/sentry/web/client_config.py
+++ b/src/sentry/web/client_config.py
@@ -81,7 +81,7 @@ def _get_public_dsn():
return settings.SENTRY_FRONTEND_DSN
project_id = settings.SENTRY_FRONTEND_PROJECT or settings.SENTRY_PROJECT
- cache_key = "dsn:%s" % (project_id,)
+ cache_key = f"dsn:{project_id}"
result = cache.get(cache_key)
if result is None:
diff --git a/src/sentry/web/forms/accounts.py b/src/sentry/web/forms/accounts.py
index fd022d7b2c9e5b..b64ce3dd7e190c 100644
--- a/src/sentry/web/forms/accounts.py
+++ b/src/sentry/web/forms/accounts.py
@@ -20,7 +20,7 @@ def _get_timezone_choices():
for tz in pytz.common_timezones:
now = datetime.now(pytz.timezone(tz))
offset = now.strftime("%z")
- results.append((int(offset), tz, "(UTC%s) %s" % (offset, tz)))
+ results.append((int(offset), tz, f"(UTC{offset}) {tz}"))
results.sort()
for i in range(len(results)):
@@ -95,7 +95,7 @@ def _is_ip_rate_limited(self):
return False
ip_address = self.request.META["REMOTE_ADDR"]
- return ratelimiter.is_limited("auth:ip:{}".format(ip_address), limit)
+ return ratelimiter.is_limited(f"auth:ip:{ip_address}", limit)
def _is_user_rate_limited(self):
limit = options.get("auth.user-rate-limit")
@@ -106,7 +106,7 @@ def _is_user_rate_limited(self):
if not username:
return False
- return ratelimiter.is_limited("auth:username:{}".format(username), limit)
+ return ratelimiter.is_limited(f"auth:username:{username}", limit)
def clean(self):
username = self.cleaned_data.get("username")
diff --git a/src/sentry/web/frontend/accounts.py b/src/sentry/web/frontend/accounts.py
index 8210501607c40f..ae90c7b7508d9d 100644
--- a/src/sentry/web/frontend/accounts.py
+++ b/src/sentry/web/frontend/accounts.py
@@ -24,7 +24,7 @@
def get_template(mode, name):
- return "sentry/account/{}/{}.html".format(mode, name)
+ return f"sentry/account/{mode}/{name}.html"
@login_required
@@ -146,7 +146,7 @@ def start_confirm_email(request):
from sentry.app import ratelimiter
if ratelimiter.is_limited(
- "auth:confirm-email:{}".format(request.user.id),
+ f"auth:confirm-email:{request.user.id}",
limit=10,
window=60, # 10 per minute should be enough for anyone
):
diff --git a/src/sentry/web/frontend/debug/debug_new_processing_issues_email.py b/src/sentry/web/frontend/debug/debug_new_processing_issues_email.py
index 38d1f243bf8a36..f88b7307f2764f 100644
--- a/src/sentry/web/frontend/debug/debug_new_processing_issues_email.py
+++ b/src/sentry/web/frontend/debug/debug_new_processing_issues_email.py
@@ -48,7 +48,7 @@ def get(self, request):
),
"reprocessing_active": self.reprocessing_active,
"info_url": absolute_uri(
- "/settings/{}/projects/{}/processing-issues/".format(org.slug, project.slug)
+ f"/settings/{org.slug}/projects/{project.slug}/processing-issues/"
),
},
).render(request)
diff --git a/src/sentry/web/frontend/debug/debug_new_release_email.py b/src/sentry/web/frontend/debug/debug_new_release_email.py
index 469974e3c2857e..c5efc9c79b2f4f 100644
--- a/src/sentry/web/frontend/debug/debug_new_release_email.py
+++ b/src/sentry/web/frontend/debug/debug_new_release_email.py
@@ -41,9 +41,7 @@ def get(self, request):
)
release_links = [
- absolute_uri(
- "/organizations/{}/releases/{}/?project={}".format(org.slug, release.version, p.id)
- )
+ absolute_uri(f"/organizations/{org.slug}/releases/{release.version}/?project={p.id}")
for p in projects
]
@@ -115,6 +113,6 @@ def get(self, request):
"file_count": 5,
"environment": "production",
"deploy": deploy,
- "setup_repo_link": absolute_uri("/organizations/{}/repos/".format(org.slug)),
+ "setup_repo_link": absolute_uri(f"/organizations/{org.slug}/repos/"),
},
).render(request)
diff --git a/src/sentry/web/frontend/debug/debug_new_user_feedback_email.py b/src/sentry/web/frontend/debug/debug_new_user_feedback_email.py
index 0bdc0cebb529ba..2ba894c6ca4ecc 100644
--- a/src/sentry/web/frontend/debug/debug_new_user_feedback_email.py
+++ b/src/sentry/web/frontend/debug/debug_new_user_feedback_email.py
@@ -19,7 +19,7 @@ def get(self, request):
group = event.group
link = absolute_uri(
- "/{}/{}/issues/{}/feedback/".format(project.organization.slug, project.slug, group.id)
+ f"/{project.organization.slug}/{project.slug}/issues/{group.id}/feedback/"
)
return MailPreview(
diff --git a/src/sentry/web/frontend/debug/mail.py b/src/sentry/web/frontend/debug/mail.py
index b01d044d3b24a3..47277f7f62b8ce 100644
--- a/src/sentry/web/frontend/debug/mail.py
+++ b/src/sentry/web/frontend/debug/mail.py
@@ -308,8 +308,7 @@ def digest(request):
project = Project(id=1, slug="example", name="Example Project", organization=org)
rules = {
- i: Rule(id=i, project=project, label="Rule #%s" % (i,))
- for i in range(1, random.randint(2, 4))
+ i: Rule(id=i, project=project, label=f"Rule #{i}") for i in range(1, random.randint(2, 4))
}
state = {
diff --git a/src/sentry/web/frontend/generic.py b/src/sentry/web/frontend/generic.py
index 338d3655293655..50912758a520df 100644
--- a/src/sentry/web/frontend/generic.py
+++ b/src/sentry/web/frontend/generic.py
@@ -42,7 +42,7 @@ def static_media(request, **kwargs):
version = kwargs.get("version")
if module:
- path = "%s/%s" % (module, path)
+ path = f"{module}/{path}"
try:
document_root, path = resolve(path)
diff --git a/src/sentry/web/frontend/group_plugin_action.py b/src/sentry/web/frontend/group_plugin_action.py
index 8dea77b7dc1f89..05165054ea46fe 100644
--- a/src/sentry/web/frontend/group_plugin_action.py
+++ b/src/sentry/web/frontend/group_plugin_action.py
@@ -26,5 +26,5 @@ def handle(self, request, organization, project, group_id, slug):
redirect = request.META.get("HTTP_REFERER", "")
if not is_safe_url(redirect, host=request.get_host()):
- redirect = "/{}/{}/".format(organization.slug, group.project.slug)
+ redirect = f"/{organization.slug}/{group.project.slug}/"
return HttpResponseRedirect(redirect)
diff --git a/src/sentry/web/frontend/integration_extension_configuration.py b/src/sentry/web/frontend/integration_extension_configuration.py
index 4322176952a112..a7c08aef9d407c 100644
--- a/src/sentry/web/frontend/integration_extension_configuration.py
+++ b/src/sentry/web/frontend/integration_extension_configuration.py
@@ -20,11 +20,8 @@ def _dialog_success(self, _org_integration):
if "next" in self.request.GET:
param_string = "?%s" % urlencode({"next": self.request.GET["next"]})
- redirect_uri = "/settings/%s/integrations/%s/%s/%s" % (
- org_slug,
- provider,
- integration_id,
- param_string,
+ redirect_uri = (
+ f"/settings/{org_slug}/integrations/{provider}/{integration_id}/{param_string}"
)
return HttpResponseRedirect(redirect_uri)
@@ -98,9 +95,7 @@ def is_enabled_for_org(self, _org, _user):
def has_one_required_feature(self, org, user):
provider = integrations.get(self.provider)
- integration_features = [
- "organizations:integrations-{}".format(f.value) for f in provider.features
- ]
+ integration_features = [f"organizations:integrations-{f.value}" for f in provider.features]
for flag_name in integration_features:
try:
if features.has(flag_name, org, actor=user):
diff --git a/src/sentry/web/frontend/js_sdk_loader.py b/src/sentry/web/frontend/js_sdk_loader.py
index 504a6cda9f0b6b..794ec98695389f 100644
--- a/src/sentry/web/frontend/js_sdk_loader.py
+++ b/src/sentry/web/frontend/js_sdk_loader.py
@@ -73,10 +73,7 @@ def get(self, request, public_key, minified):
response["Access-Control-Allow-Origin"] = "*"
response["Cache-Control"] = CACHE_CONTROL
if sdk_version and key:
- response["Surrogate-Key"] = "project/%s sdk/%s sdk-loader" % (
- key.project_id,
- sdk_version,
- )
+ response["Surrogate-Key"] = f"project/{key.project_id} sdk/{sdk_version} sdk-loader"
ms = int((time.time() - start_time) * 1000)
metrics.timing("js-sdk-loader.duration", ms, instance=instance)
diff --git a/src/sentry/web/frontend/mailgun_inbound_webhook.py b/src/sentry/web/frontend/mailgun_inbound_webhook.py
index 5c429a456662b4..60691831e3b52c 100644
--- a/src/sentry/web/frontend/mailgun_inbound_webhook.py
+++ b/src/sentry/web/frontend/mailgun_inbound_webhook.py
@@ -22,7 +22,7 @@ def verify(self, api_key, token, timestamp, signature):
signature,
hmac.new(
key=api_key.encode("utf-8"),
- msg=("{}{}".format(timestamp, token)).encode("utf-8"),
+ msg=(f"{timestamp}{token}").encode("utf-8"),
digestmod=sha256,
).hexdigest(),
)
diff --git a/src/sentry/web/frontend/mixins/csv.py b/src/sentry/web/frontend/mixins/csv.py
index 02ed09120da0de..e57ca8381b01e6 100644
--- a/src/sentry/web/frontend/mixins/csv.py
+++ b/src/sentry/web/frontend/mixins/csv.py
@@ -30,5 +30,5 @@ def row_iter():
response = StreamingHttpResponse(
(writer.writerow(r) for r in row_iter()), content_type="text/csv"
)
- response["Content-Disposition"] = 'attachment; filename="{}.csv"'.format(filename)
+ response["Content-Disposition"] = f'attachment; filename="{filename}.csv"'
return response
diff --git a/src/sentry/web/frontend/oauth_authorize.py b/src/sentry/web/frontend/oauth_authorize.py
index d888cec44137ee..ee7946aec0ca39 100644
--- a/src/sentry/web/frontend/oauth_authorize.py
+++ b/src/sentry/web/frontend/oauth_authorize.py
@@ -57,18 +57,14 @@ def error(
if err_response:
return self.respond(
"sentry/oauth-error.html",
- {
- "error": mark_safe(
- "Missing or invalid <em>{}</em> parameter.".format(err_response)
- )
- },
+ {"error": mark_safe(f"Missing or invalid <em>{err_response}</em> parameter.")},
status=400,
)
return self.redirect_response(response_type, redirect_uri, {"error": name, "state": state})
def respond_login(self, request, context, application, **kwargs):
- context["banner"] = "Connect Sentry to {}".format(application.name)
+ context["banner"] = f"Connect Sentry to {application.name}"
return self.respond("sentry/login.html", context)
def get(self, request, **kwargs):
@@ -197,9 +193,7 @@ def get(self, request, **kwargs):
pending_scopes.remove(scope)
if pending_scopes:
- raise NotImplementedError(
- "{} scopes did not have descriptions".format(pending_scopes)
- )
+ raise NotImplementedError(f"{pending_scopes} scopes did not have descriptions")
context = {
"user": request.user,
diff --git a/src/sentry/web/frontend/organization_auth_settings.py b/src/sentry/web/frontend/organization_auth_settings.py
index 5cee852653eedc..f237b6011f83d2 100644
--- a/src/sentry/web/frontend/organization_auth_settings.py
+++ b/src/sentry/web/frontend/organization_auth_settings.py
@@ -77,7 +77,7 @@ def handle_existing_provider(self, request, organization, auth_provider):
messages.add_message(request, messages.SUCCESS, OK_PROVIDER_DISABLED)
- next_uri = "/settings/{}/auth/".format(organization.slug)
+ next_uri = f"/settings/{organization.slug}/auth/"
return self.redirect(next_uri)
elif op == "reinvite":
email_missing_links.delay(organization.id, request.user.id, provider.key)
@@ -108,7 +108,7 @@ def handle_existing_provider(self, request, organization, auth_provider):
changed_data = {}
for key, value in form.cleaned_data.items():
if form.initial.get(key) != value:
- changed_data[key] = "to {}".format(value)
+ changed_data[key] = f"to {value}"
self.create_audit_entry(
request,
@@ -178,7 +178,7 @@ def handle(self, request, organization):
if request.method == "POST":
provider_key = request.POST.get("provider")
if not manager.exists(provider_key):
- raise ValueError("Provider not found: {}".format(provider_key))
+ raise ValueError(f"Provider not found: {provider_key}")
helper = AuthHelper(
request=request,
diff --git a/src/sentry/web/frontend/release_webhook.py b/src/sentry/web/frontend/release_webhook.py
index 4031154453a0e7..cb437e1b8fbee6 100644
--- a/src/sentry/web/frontend/release_webhook.py
+++ b/src/sentry/web/frontend/release_webhook.py
@@ -23,7 +23,7 @@ def verify(self, plugin_id, project_id, token, signature):
signature,
hmac.new(
key=token.encode("utf-8"),
- msg=("{}-{}".format(plugin_id, project_id)).encode("utf-8"),
+ msg=(f"{plugin_id}-{project_id}").encode("utf-8"),
digestmod=sha256,
).hexdigest(),
)
@@ -33,7 +33,7 @@ def dispatch(self, *args, **kwargs):
return super().dispatch(*args, **kwargs)
def _handle_builtin(self, request, project):
- endpoint = "/projects/{}/{}/releases/".format(project.organization.slug, project.slug)
+ endpoint = f"/projects/{project.organization.slug}/{project.slug}/releases/"
try:
data = json.loads(request.body)
diff --git a/src/sentry/web/frontend/restore_organization.py b/src/sentry/web/frontend/restore_organization.py
index 55e747c2bd493c..219e2a1cd73afd 100644
--- a/src/sentry/web/frontend/restore_organization.py
+++ b/src/sentry/web/frontend/restore_organization.py
@@ -63,7 +63,7 @@ def post(self, request, organization):
).update(status=OrganizationStatus.VISIBLE)
if updated:
client.put(
- "/organizations/{}/".format(organization.slug),
+ f"/organizations/{organization.slug}/",
data={"cancelDeletion": True},
request=request,
)
diff --git a/src/sentry/web/frontend/setup_wizard.py b/src/sentry/web/frontend/setup_wizard.py
index 3693054cc53e5b..71233c66cdd3d6 100644
--- a/src/sentry/web/frontend/setup_wizard.py
+++ b/src/sentry/web/frontend/setup_wizard.py
@@ -24,7 +24,7 @@ def get(self, request, wizard_hash):
Redirects to organization whenever cache has been deleted
"""
context = {"hash": wizard_hash}
- key = "%s%s" % (SETUP_WIZARD_CACHE_KEY, wizard_hash)
+ key = f"{SETUP_WIZARD_CACHE_KEY}{wizard_hash}"
wizard_data = default_cache.get(key)
if wizard_data is None:
@@ -76,7 +76,7 @@ def get(self, request, wizard_hash):
result = {"apiKeys": serialize(token), "projects": filled_projects}
- key = "%s%s" % (SETUP_WIZARD_CACHE_KEY, wizard_hash)
+ key = f"{SETUP_WIZARD_CACHE_KEY}{wizard_hash}"
default_cache.set(key, result, SETUP_WIZARD_CACHE_TIMEOUT)
return render_to_response("sentry/setup-wizard.html", context, request)
diff --git a/src/sentry/web/frontend/twofactor.py b/src/sentry/web/frontend/twofactor.py
index d11cd4573666ae..8bbe6256912855 100644
--- a/src/sentry/web/frontend/twofactor.py
+++ b/src/sentry/web/frontend/twofactor.py
@@ -115,7 +115,7 @@ def handle(self, request):
interface = self.negotiate_interface(request, interfaces)
if request.method == "POST" and ratelimiter.is_limited(
- "auth-2fa:user:{}".format(user.id), limit=5, window=60
+ f"auth-2fa:user:{user.id}", limit=5, window=60
):
# TODO: Maybe email the account owner or do something to notify someone
# This would probably be good for them to know.
|
84af10963e18567f30866eace92a2c074983136a
|
2023-10-24 23:31:37
|
Matt Quinn
|
fix(statistical-detectors): use DURATION_LIGHT metric for timeseries query (#58705)
| false
|
use DURATION_LIGHT metric for timeseries query (#58705)
|
fix
|
diff --git a/src/sentry/tasks/statistical_detectors.py b/src/sentry/tasks/statistical_detectors.py
index d2a4a8d3c69014..a05c8f9c7e7f1a 100644
--- a/src/sentry/tasks/statistical_detectors.py
+++ b/src/sentry/tasks/statistical_detectors.py
@@ -378,8 +378,11 @@ def query_transactions_timeseries(
project_ids = {p for p, _ in transaction_chunk}
project_objects = Project.objects.filter(id__in=project_ids)
org_ids = list({project.organization_id for project in project_objects})
+ # The only tag available on DURATION_LIGHT is `transaction`: as long as
+ # we don't filter on any other tags, DURATION_LIGHT's lower cardinality
+ # will be faster to query.
duration_metric_id = indexer.resolve(
- use_case_id, org_ids[0], str(TransactionMRI.DURATION.value)
+ use_case_id, org_ids[0], str(TransactionMRI.DURATION_LIGHT.value)
)
transaction_name_metric_id = indexer.resolve(
use_case_id,
@@ -863,6 +866,9 @@ def query_transactions(
# both the metric and tag that we are using are hardcoded values in sentry_metrics.indexer.strings
# so the org_id that we are using does not actually matter here, we only need to pass in an org_id
+ #
+ # Because we filter on more than just `transaction`, we have to use DURATION here instead of
+ # DURATION_LIGHT.
duration_metric_id = indexer.resolve(
use_case_id, org_ids[0], str(TransactionMRI.DURATION.value)
)
diff --git a/tests/sentry/tasks/test_statistical_detectors.py b/tests/sentry/tasks/test_statistical_detectors.py
index 0345ee9e2ddb69..c42adde093142e 100644
--- a/tests/sentry/tasks/test_statistical_detectors.py
+++ b/tests/sentry/tasks/test_statistical_detectors.py
@@ -580,7 +580,7 @@ def store_metric(project_id, transaction, minutes_ago, value):
self.org.id,
project_id,
"distribution",
- TransactionMRI.DURATION.value,
+ TransactionMRI.DURATION_LIGHT.value,
{"transaction": transaction},
int((self.now - timedelta(minutes=minutes_ago)).timestamp()),
value,
|
efc34b49fcbb246810856da04417940e4a68a177
|
2017-10-31 01:56:16
|
Matt Robenolt
|
fix(slack): str.format misuse
| false
|
str.format misuse
|
fix
|
diff --git a/src/sentry/integrations/slack/event_endpoint.py b/src/sentry/integrations/slack/event_endpoint.py
index 56811e55b0a65c..ee03d40908ae2c 100644
--- a/src/sentry/integrations/slack/event_endpoint.py
+++ b/src/sentry/integrations/slack/event_endpoint.py
@@ -41,7 +41,7 @@ def _parse_issue_id_from_url(self, link):
def _attachment_for(self, group):
return {
- 'fallback': '[{}] {}'.format(group.project.slug, group.title),
+ 'fallback': u'[{}] {}'.format(group.project.slug, group.title),
'title': group.title,
'title_link': self._add_notification_referrer_param(group.get_absolute_url()),
}
|
bd744109df0bfeb5579ac4f52c75601c4de2bf3c
|
2022-04-12 17:46:46
|
Priscila Oliveira
|
ref(new-widget-builder-experience): Add support to derived metrics (#33458)
| false
|
Add support to derived metrics (#33458)
|
ref
|
diff --git a/static/app/components/dashboards/widgetQueriesForm.tsx b/static/app/components/dashboards/widgetQueriesForm.tsx
index 0d4123518081bf..ca6c2c217adf5b 100644
--- a/static/app/components/dashboards/widgetQueriesForm.tsx
+++ b/static/app/components/dashboards/widgetQueriesForm.tsx
@@ -261,9 +261,11 @@ class WidgetQueriesForm extends React.Component<Props> {
onChange={fields => {
const {aggregates, columns} = getColumnsAndAggregatesAsStrings(fields);
const fieldStrings = fields.map(field => generateFieldAsString(field));
+
const aggregateAliasFieldStrings = isMetrics
? fieldStrings
: fieldStrings.map(field => getAggregateAlias(field));
+
queries.forEach((widgetQuery, queryIndex) => {
const newQuery = cloneDeep(widgetQuery);
newQuery.fields = fieldStrings;
diff --git a/static/app/components/modals/addDashboardWidgetModal.tsx b/static/app/components/modals/addDashboardWidgetModal.tsx
index 7d873bec57a166..c1095b9d5b46a5 100644
--- a/static/app/components/modals/addDashboardWidgetModal.tsx
+++ b/static/app/components/modals/addDashboardWidgetModal.tsx
@@ -54,7 +54,7 @@ import {
WidgetType,
} from 'sentry/views/dashboardsV2/types';
import {generateIssueWidgetFieldOptions} from 'sentry/views/dashboardsV2/widgetBuilder/issueWidget/utils';
-import {generateMetricsWidgetFieldOptions} from 'sentry/views/dashboardsV2/widgetBuilder/metricWidget/fields';
+import {generateReleaseWidgetFieldOptions} from 'sentry/views/dashboardsV2/widgetBuilder/releaseWidget/fields';
import {
getMetricFields,
mapErrors,
@@ -613,7 +613,7 @@ class AddDashboardWidgetModal extends React.Component<Props, State> {
renderWidgetQueryForm(
querySelection: PageFilters,
- metricsWidgetFieldOptions: ReturnType<typeof generateMetricsWidgetFieldOptions>
+ releaseWidgetFieldOptions: ReturnType<typeof generateReleaseWidgetFieldOptions>
) {
const {organization, tags} = this.props;
const state = this.state;
@@ -671,7 +671,7 @@ class AddDashboardWidgetModal extends React.Component<Props, State> {
widgetType={state.widgetType}
queries={state.queries}
errors={errors?.queries}
- fieldOptions={metricsWidgetFieldOptions}
+ fieldOptions={releaseWidgetFieldOptions}
onChange={(queryIndex: number, widgetQuery: WidgetQuery) =>
this.handleQueryChange(widgetQuery, queryIndex)
}
@@ -885,7 +885,7 @@ class AddDashboardWidgetModal extends React.Component<Props, State> {
skipLoad={!organization.features.includes('dashboards-metrics')}
>
{({metas: metricsMeta, tags: metricsTags}) => {
- const metricsWidgetFieldOptions = generateMetricsWidgetFieldOptions(
+ const metricsWidgetFieldOptions = generateReleaseWidgetFieldOptions(
Object.values(metricsMeta),
Object.values(metricsTags).map(({key}) => key)
);
diff --git a/static/app/utils/discover/fields.tsx b/static/app/utils/discover/fields.tsx
index cfb17a79dc6210..1ff89a1cfcdf1b 100644
--- a/static/app/utils/discover/fields.tsx
+++ b/static/app/utils/discover/fields.tsx
@@ -906,6 +906,7 @@ export function isLegalEquationColumn(column: Column): boolean {
if (column.kind === 'function' && column.function[0] === 'any') {
return false;
}
+
const columnType = getColumnType(column);
return columnType === 'number' || columnType === 'integer' || columnType === 'duration';
}
@@ -955,7 +956,7 @@ export function explodeFieldString(field: string, alias?: string): Column {
}
if (isDerivedMetric(field)) {
- return {kind: 'calculatedField', field: stripDerivedMetricsPrefix(field)};
+ return {kind: 'calculatedField', field: stripDerivedMetricsPrefix(field), alias};
}
const results = parseFunction(field);
diff --git a/static/app/views/dashboardsV2/widgetBuilder/buildSteps/columnsStep/releaseColumnFields.tsx b/static/app/views/dashboardsV2/widgetBuilder/buildSteps/columnsStep/releaseColumnFields.tsx
index 1ad6536be24f17..bf6f9d5991bcad 100644
--- a/static/app/views/dashboardsV2/widgetBuilder/buildSteps/columnsStep/releaseColumnFields.tsx
+++ b/static/app/views/dashboardsV2/widgetBuilder/buildSteps/columnsStep/releaseColumnFields.tsx
@@ -1,15 +1,10 @@
import {t} from 'sentry/locale';
import {Organization} from 'sentry/types';
-import {
- aggregateFunctionOutputType,
- isLegalYAxisType,
- QueryFieldValue,
-} from 'sentry/utils/discover/fields';
+import {QueryFieldValue} from 'sentry/utils/discover/fields';
import {useMetricsContext} from 'sentry/utils/useMetricsContext';
import {DisplayType, WidgetType} from 'sentry/views/dashboardsV2/types';
-import {generateMetricsWidgetFieldOptions} from 'sentry/views/dashboardsV2/widgetBuilder/metricWidget/fields';
-import {FieldValueOption} from 'sentry/views/eventsV2/table/queryField';
-import {FieldValueKind} from 'sentry/views/eventsV2/table/types';
+import {generateReleaseWidgetFieldOptions} from 'sentry/views/dashboardsV2/widgetBuilder/releaseWidget/fields';
+import {filterPrimaryOptions} from 'sentry/views/dashboardsV2/widgetBuilder/utils';
import {ColumnFields} from './columnFields';
@@ -31,32 +26,6 @@ export function ReleaseColumnFields({
onYAxisOrColumnFieldChange,
}: Props) {
const {metas, tags} = useMetricsContext();
- // Any function/field choice for Big Number widgets is legal since the
- // data source is from an endpoint that is not timeseries-based.
- // The function/field choice for World Map widget will need to be numeric-like.
- // Column builder for Table widget is already handled above.
- const doNotValidateYAxis = displayType === DisplayType.BIG_NUMBER;
-
- function filterPrimaryOptions(option: FieldValueOption) {
- if (displayType === DisplayType.TABLE) {
- return [FieldValueKind.FUNCTION, FieldValueKind.TAG].includes(option.value.kind);
- }
-
- // Only validate function names for timeseries widgets and
- // world map widgets.
- if (!doNotValidateYAxis && option.value.kind === FieldValueKind.FUNCTION) {
- const primaryOutput = aggregateFunctionOutputType(
- option.value.meta.name,
- undefined
- );
- if (primaryOutput) {
- // If a function returns a specific type, then validate it.
- return isLegalYAxisType(primaryOutput);
- }
- }
-
- return option.value.kind === FieldValueKind.FUNCTION;
- }
return (
<ColumnFields
@@ -65,11 +34,17 @@ export function ReleaseColumnFields({
widgetType={widgetType}
fields={explodedFields}
errors={queryErrors?.[0] ? [queryErrors?.[0]] : undefined}
- fieldOptions={generateMetricsWidgetFieldOptions(
+ fieldOptions={generateReleaseWidgetFieldOptions(
Object.values(metas),
Object.values(tags).map(({key}) => key)
)}
- filterPrimaryOptions={filterPrimaryOptions}
+ filterPrimaryOptions={option =>
+ filterPrimaryOptions({
+ option,
+ widgetType,
+ displayType,
+ })
+ }
onChange={onYAxisOrColumnFieldChange}
noFieldsMessage={t('There are no metrics for this project.')}
/>
diff --git a/static/app/views/dashboardsV2/widgetBuilder/buildSteps/yAxisStep/releaseYAxisSelector.tsx b/static/app/views/dashboardsV2/widgetBuilder/buildSteps/yAxisStep/releaseYAxisSelector.tsx
index d348098b651213..4e892c98810760 100644
--- a/static/app/views/dashboardsV2/widgetBuilder/buildSteps/yAxisStep/releaseYAxisSelector.tsx
+++ b/static/app/views/dashboardsV2/widgetBuilder/buildSteps/yAxisStep/releaseYAxisSelector.tsx
@@ -2,7 +2,7 @@ import {t} from 'sentry/locale';
import {QueryFieldValue} from 'sentry/utils/discover/fields';
import {useMetricsContext} from 'sentry/utils/useMetricsContext';
import {DisplayType, WidgetType} from 'sentry/views/dashboardsV2/types';
-import {generateMetricsWidgetFieldOptions} from 'sentry/views/dashboardsV2/widgetBuilder/metricWidget/fields';
+import {generateReleaseWidgetFieldOptions} from 'sentry/views/dashboardsV2/widgetBuilder/releaseWidget/fields';
import {YAxisSelector} from './yAxisSelector';
@@ -30,7 +30,7 @@ export function ReleaseYAxisSelector({
aggregates={aggregates}
onChange={onChange}
errors={errors}
- fieldOptions={generateMetricsWidgetFieldOptions(
+ fieldOptions={generateReleaseWidgetFieldOptions(
Object.values(metas),
Object.values(tags).map(({key}) => key)
)}
diff --git a/static/app/views/dashboardsV2/widgetBuilder/buildSteps/yAxisStep/yAxisSelector/index.tsx b/static/app/views/dashboardsV2/widgetBuilder/buildSteps/yAxisStep/yAxisSelector/index.tsx
index 561db8fb301b84..6bd5c3b5df023d 100644
--- a/static/app/views/dashboardsV2/widgetBuilder/buildSteps/yAxisStep/yAxisSelector/index.tsx
+++ b/static/app/views/dashboardsV2/widgetBuilder/buildSteps/yAxisStep/yAxisSelector/index.tsx
@@ -11,6 +11,10 @@ import {
} from 'sentry/utils/discover/fields';
import useOrganization from 'sentry/utils/useOrganization';
import {DisplayType, Widget, WidgetType} from 'sentry/views/dashboardsV2/types';
+import {
+ doNotValidateYAxis,
+ filterPrimaryOptions,
+} from 'sentry/views/dashboardsV2/widgetBuilder/utils';
import {FieldValueOption, QueryField} from 'sentry/views/eventsV2/table/queryField';
import {FieldValueKind} from 'sentry/views/eventsV2/table/types';
import {generateFieldOptions} from 'sentry/views/eventsV2/utils';
@@ -82,43 +86,11 @@ export function YAxisSelector({
onChange([value]);
}
- // Any function/field choice for Big Number widgets is legal since the
- // data source is from an endpoint that is not timeseries-based.
- // The function/field choice for World Map widget will need to be numeric-like.
- // Column builder for Table widget is already handled above.
- const doNotValidateYAxis = displayType === DisplayType.BIG_NUMBER;
-
- function filterPrimaryOptions(option: FieldValueOption) {
- if (widgetType === WidgetType.METRICS) {
- if (displayType === DisplayType.TABLE) {
- return [FieldValueKind.FUNCTION, FieldValueKind.TAG].includes(option.value.kind);
- }
- if (displayType === DisplayType.TOP_N) {
- return option.value.kind === FieldValueKind.TAG;
- }
- }
-
- // Only validate function names for timeseries widgets and
- // world map widgets.
- if (!doNotValidateYAxis && option.value.kind === FieldValueKind.FUNCTION) {
- const primaryOutput = aggregateFunctionOutputType(
- option.value.meta.name,
- undefined
- );
- if (primaryOutput) {
- // If a function returns a specific type, then validate it.
- return isLegalYAxisType(primaryOutput);
- }
- }
-
- return option.value.kind === FieldValueKind.FUNCTION;
- }
-
function filterAggregateParameters(fieldValue: QueryFieldValue) {
return (option: FieldValueOption) => {
// Only validate function parameters for timeseries widgets and
// world map widgets.
- if (doNotValidateYAxis) {
+ if (doNotValidateYAxis(displayType)) {
return true;
}
@@ -159,7 +131,13 @@ export function YAxisSelector({
fieldValue={fieldValue}
fieldOptions={generateFieldOptions({organization})}
onChange={handleTopNChangeField}
- filterPrimaryOptions={filterPrimaryOptions}
+ filterPrimaryOptions={option =>
+ filterPrimaryOptions({
+ option,
+ widgetType,
+ displayType,
+ })
+ }
filterAggregateParameters={filterAggregateParameters(fieldValue)}
/>
</QueryFieldWrapper>
@@ -183,7 +161,13 @@ export function YAxisSelector({
fieldValue={fieldValue}
fieldOptions={fieldOptions}
onChange={value => handleChangeQueryField(value, i)}
- filterPrimaryOptions={filterPrimaryOptions}
+ filterPrimaryOptions={option =>
+ filterPrimaryOptions({
+ option,
+ widgetType,
+ displayType,
+ })
+ }
filterAggregateParameters={filterAggregateParameters(fieldValue)}
otherColumns={aggregates}
noFieldsMessage={noFieldsMessage}
diff --git a/static/app/views/dashboardsV2/widgetBuilder/metricWidget/fields.tsx b/static/app/views/dashboardsV2/widgetBuilder/releaseWidget/fields.tsx
similarity index 98%
rename from static/app/views/dashboardsV2/widgetBuilder/metricWidget/fields.tsx
rename to static/app/views/dashboardsV2/widgetBuilder/releaseWidget/fields.tsx
index 9c55ccd62e77af..4b883f4dfcd5c2 100644
--- a/static/app/views/dashboardsV2/widgetBuilder/metricWidget/fields.tsx
+++ b/static/app/views/dashboardsV2/widgetBuilder/releaseWidget/fields.tsx
@@ -3,7 +3,7 @@ import {defined} from 'sentry/utils';
import {METRICS_OPERATIONS} from 'sentry/utils/metrics/fields';
import {FieldValue, FieldValueKind} from 'sentry/views/eventsV2/table/types';
-export function generateMetricsWidgetFieldOptions(
+export function generateReleaseWidgetFieldOptions(
fields: MetricsMeta[] = [],
tagKeys?: string[]
) {
diff --git a/static/app/views/dashboardsV2/widgetBuilder/releaseWidget/metricsSearchBar.tsx b/static/app/views/dashboardsV2/widgetBuilder/releaseWidget/metricsSearchBar.tsx
new file mode 100644
index 00000000000000..12a6aefb90de54
--- /dev/null
+++ b/static/app/views/dashboardsV2/widgetBuilder/releaseWidget/metricsSearchBar.tsx
@@ -0,0 +1,89 @@
+import {ClassNames} from '@emotion/react';
+import memoize from 'lodash/memoize';
+
+import SmartSearchBar from 'sentry/components/smartSearchBar';
+import {NEGATION_OPERATOR, SEARCH_WILDCARD} from 'sentry/constants';
+import {MetricsTagValue, Organization, Tag} from 'sentry/types';
+import useApi from 'sentry/utils/useApi';
+import {useMetricsContext} from 'sentry/utils/useMetricsContext';
+
+const SEARCH_SPECIAL_CHARS_REGEXP = new RegExp(
+ `^${NEGATION_OPERATOR}|\\${SEARCH_WILDCARD}`,
+ 'g'
+);
+
+type Props = Pick<
+ React.ComponentProps<typeof SmartSearchBar>,
+ 'onSearch' | 'onBlur' | 'query' | 'maxQueryLength' | 'searchSource'
+> & {
+ orgSlug: Organization['slug'];
+ projectIds: number[] | readonly number[];
+ className?: string;
+};
+
+function MetricsSearchBar({
+ orgSlug,
+ onSearch,
+ onBlur,
+ maxQueryLength,
+ searchSource,
+ projectIds,
+ className,
+ ...props
+}: Props) {
+ const api = useApi();
+ const {tags} = useMetricsContext();
+
+ /**
+ * Prepare query string (e.g. strip special characters like negation operator)
+ */
+ function prepareQuery(query: string) {
+ return query.replace(SEARCH_SPECIAL_CHARS_REGEXP, '');
+ }
+
+ function fetchTagValues(tagKey: string) {
+ return api.requestPromise(`/organizations/${orgSlug}/metrics/tags/${tagKey}/`, {
+ query: {project: projectIds},
+ });
+ }
+
+ function getTagValues(tag: Tag, _query: string): Promise<string[]> {
+ return fetchTagValues(tag.key).then(
+ tagValues => (tagValues as MetricsTagValue[]).map(({value}) => value),
+ () => {
+ throw new Error('Unable to fetch tag values');
+ }
+ );
+ }
+
+ const supportedTags = Object.values(tags).reduce((acc, {key}) => {
+ acc[key] = {key, name: key};
+ return acc;
+ }, {});
+
+ return (
+ <ClassNames>
+ {({css}) => (
+ <SmartSearchBar
+ onGetTagValues={memoize(getTagValues, ({key}, query) => `${key}-${query}`)}
+ supportedTags={supportedTags}
+ prepareQuery={prepareQuery}
+ excludeEnvironment
+ dropdownClassName={css`
+ max-height: 300px;
+ overflow-y: auto;
+ `}
+ onSearch={onSearch}
+ onBlur={onBlur}
+ maxQueryLength={maxQueryLength}
+ searchSource={searchSource}
+ className={className}
+ query={props.query}
+ hasRecentSearches
+ />
+ )}
+ </ClassNames>
+ );
+}
+
+export default MetricsSearchBar;
diff --git a/static/app/views/dashboardsV2/widgetBuilder/utils.tsx b/static/app/views/dashboardsV2/widgetBuilder/utils.tsx
index 6ceec9eb988da8..45dd24f2ab54e6 100644
--- a/static/app/views/dashboardsV2/widgetBuilder/utils.tsx
+++ b/static/app/views/dashboardsV2/widgetBuilder/utils.tsx
@@ -4,9 +4,11 @@ import {generateOrderOptions} from 'sentry/components/dashboards/widgetQueriesFo
import {t} from 'sentry/locale';
import {Organization, TagCollection} from 'sentry/types';
import {
+ aggregateFunctionOutputType,
aggregateOutputType,
getAggregateAlias,
isLegalYAxisType,
+ stripDerivedMetricsPrefix,
} from 'sentry/utils/discover/fields';
import {MeasurementCollection} from 'sentry/utils/measurements/measurements';
import {SPAN_OP_BREAKDOWN_FIELDS} from 'sentry/utils/performance/spanOperationBreakdowns/constants';
@@ -16,6 +18,8 @@ import {
WidgetQuery,
WidgetType,
} from 'sentry/views/dashboardsV2/types';
+import {FieldValueOption} from 'sentry/views/eventsV2/table/queryField';
+import {FieldValueKind} from 'sentry/views/eventsV2/table/types';
import {generateFieldOptions} from 'sentry/views/eventsV2/utils';
import {IssueSortOptions} from 'sentry/views/issueList/utils';
@@ -119,8 +123,13 @@ export function normalizeQueries({
query.fields = fields.filter(field => !columns.includes(field));
}
+ const queryOrderBy =
+ widgetType === WidgetType.METRICS
+ ? stripDerivedMetricsPrefix(queries[0].orderby)
+ : queries[0].orderby;
+
const orderBy =
- getAggregateAlias(queries[0].orderby) ||
+ getAggregateAlias(queryOrderBy) ||
(widgetType === WidgetType.ISSUE
? IssueSortOptions.DATE
: generateOrderOptions({
@@ -275,3 +284,48 @@ export function getMetricFields(queries: WidgetQuery[]) {
return acc;
}, [] as string[]);
}
+
+// Any function/field choice for Big Number widgets is legal since the
+// data source is from an endpoint that is not timeseries-based.
+// The function/field choice for World Map widget will need to be numeric-like.
+// Column builder for Table widget is already handled above.
+export function doNotValidateYAxis(displayType: DisplayType) {
+ return displayType === DisplayType.BIG_NUMBER;
+}
+
+export function filterPrimaryOptions({
+ option,
+ widgetType,
+ displayType,
+}: {
+ displayType: DisplayType;
+ option: FieldValueOption;
+ widgetType?: WidgetType;
+}) {
+ if (widgetType === WidgetType.METRICS) {
+ if (displayType === DisplayType.TABLE) {
+ return [
+ FieldValueKind.FUNCTION,
+ FieldValueKind.TAG,
+ FieldValueKind.NUMERIC_METRICS,
+ ].includes(option.value.kind);
+ }
+ if (displayType === DisplayType.TOP_N) {
+ return option.value.kind === FieldValueKind.TAG;
+ }
+ }
+
+ // Only validate function names for timeseries widgets and
+ // world map widgets.
+ if (!doNotValidateYAxis(displayType) && option.value.kind === FieldValueKind.FUNCTION) {
+ const primaryOutput = aggregateFunctionOutputType(option.value.meta.name, undefined);
+ if (primaryOutput) {
+ // If a function returns a specific type, then validate it.
+ return isLegalYAxisType(primaryOutput);
+ }
+ }
+
+ return [FieldValueKind.FUNCTION, FieldValueKind.NUMERIC_METRICS].includes(
+ option.value.kind
+ );
+}
diff --git a/static/app/views/dashboardsV2/widgetBuilder/widgetBuilder.tsx b/static/app/views/dashboardsV2/widgetBuilder/widgetBuilder.tsx
index 3c2d2d593e42e0..ab65b407430f2e 100644
--- a/static/app/views/dashboardsV2/widgetBuilder/widgetBuilder.tsx
+++ b/static/app/views/dashboardsV2/widgetBuilder/widgetBuilder.tsx
@@ -35,6 +35,7 @@ import {
getColumnsAndAggregates,
getColumnsAndAggregatesAsStrings,
QueryFieldValue,
+ stripDerivedMetricsPrefix,
} from 'sentry/utils/discover/fields';
import handleXhrErrorResponse from 'sentry/utils/handleXhrErrorResponse';
import {SessionMetric} from 'sentry/utils/metrics/fields';
@@ -518,7 +519,7 @@ function WidgetBuilder({
const fieldStrings = newFields.map(generateFieldAsString);
const aggregateAliasFieldStrings =
state.dataSet === DataSet.RELEASE
- ? fieldStrings
+ ? fieldStrings.map(stripDerivedMetricsPrefix)
: fieldStrings.map(getAggregateAlias);
const columnsAndAggregates = isColumn
@@ -531,7 +532,9 @@ function WidgetBuilder({
const isDescending = query.orderby.startsWith('-');
const orderbyAggregateAliasField = query.orderby.replace('-', '');
const prevAggregateAliasFieldStrings = query.aggregates.map(aggregate =>
- state.dataSet === DataSet.RELEASE ? aggregate : getAggregateAlias(aggregate)
+ state.dataSet === DataSet.RELEASE
+ ? stripDerivedMetricsPrefix(aggregate)
+ : getAggregateAlias(aggregate)
);
const newQuery = cloneDeep(query);
diff --git a/static/app/views/eventsV2/table/columnEditCollection.tsx b/static/app/views/eventsV2/table/columnEditCollection.tsx
index ccc8753c7a2be9..49112e7041a40a 100644
--- a/static/app/views/eventsV2/table/columnEditCollection.tsx
+++ b/static/app/views/eventsV2/table/columnEditCollection.tsx
@@ -182,6 +182,7 @@ class ColumnEditCollection extends React.Component<Props, State> {
// Find the equations in the list of columns
for (let i = 0; i < newColumns.length; i++) {
const newColumn = newColumns[i];
+
if (newColumn.kind === 'equation') {
const result = parseArithmetic(newColumn.field);
let newEquation = '';
diff --git a/tests/js/spec/views/dashboardsV2/widgetBuilder/metricWidget/fields.spec.tsx b/tests/js/spec/views/dashboardsV2/widgetBuilder/metricWidget/fields.spec.tsx
index 8e7387ced53ec2..5055dae35029da 100644
--- a/tests/js/spec/views/dashboardsV2/widgetBuilder/metricWidget/fields.spec.tsx
+++ b/tests/js/spec/views/dashboardsV2/widgetBuilder/metricWidget/fields.spec.tsx
@@ -1,11 +1,11 @@
-import {generateMetricsWidgetFieldOptions} from 'sentry/views/dashboardsV2/widgetBuilder/metricWidget/fields';
+import {generateReleaseWidgetFieldOptions} from 'sentry/views/dashboardsV2/widgetBuilder/releaseWidget/fields';
-describe('generateMetricsWidgetFieldOptions', function () {
+describe('generateReleaseWidgetFieldOptions', function () {
const fields = TestStubs.MetricsMeta();
const tagKeys = ['release', 'environment'];
it('generates correct field options', function () {
- expect(generateMetricsWidgetFieldOptions(fields, tagKeys)).toEqual({
+ expect(generateReleaseWidgetFieldOptions(fields, tagKeys)).toEqual({
'field:sentry.sessions.session': {
label: 'sentry.sessions.session',
value: {
@@ -303,6 +303,6 @@ describe('generateMetricsWidgetFieldOptions', function () {
});
it('ignores tags+aggregates if there are no fields', function () {
- expect(generateMetricsWidgetFieldOptions([], tagKeys)).toEqual({});
+ expect(generateReleaseWidgetFieldOptions([], tagKeys)).toEqual({});
});
});
|
8072ab614192169619c822f1f33fc3ae44bcf5f9
|
2022-09-28 01:13:43
|
Evan Purkhiser
|
ref(js): Improve feature badge spacing (#39352)
| false
|
Improve feature badge spacing (#39352)
|
ref
|
diff --git a/static/app/components/featureBadge.tsx b/static/app/components/featureBadge.tsx
index 2d390a81b832f4..79f6f6dd1c213f 100644
--- a/static/app/components/featureBadge.tsx
+++ b/static/app/components/featureBadge.tsx
@@ -8,7 +8,7 @@ import CircleIndicator from 'sentry/components/circleIndicator';
import Tag from 'sentry/components/tagDeprecated';
import Tooltip from 'sentry/components/tooltip';
import {t} from 'sentry/locale';
-import space from 'sentry/styles/space';
+import space, {ValidSize} from 'sentry/styles/space';
type BadgeProps = {
type: 'alpha' | 'beta' | 'new';
@@ -72,12 +72,10 @@ const StyledTag = styled(Tag)`
padding: 3px ${space(0.75)};
`;
-const FeatureBadge = styled(BaseFeatureBadge)`
+const FeatureBadge = styled(BaseFeatureBadge)<{space?: ValidSize}>`
display: inline-flex;
align-items: center;
- margin-left: ${space(0.75)};
- position: relative;
- top: -1px;
+ margin-left: ${p => space(p.space ?? 0.75)};
`;
export default FeatureBadge;
|
8483164692968a2b42a33e4750b9bae4046dc2eb
|
2019-09-16 23:39:40
|
Evan Purkhiser
|
ref(ts): Improve HoC typings (#14677)
| false
|
Improve HoC typings (#14677)
|
ref
|
diff --git a/package.json b/package.json
index a7938385ed7c3d..c84a4f5ae6e409 100644
--- a/package.json
+++ b/package.json
@@ -22,6 +22,7 @@
"@sentry/integrations": "5.6.0-beta.4",
"@types/classnames": "^2.2.0",
"@types/clipboard": "^2.0.1",
+ "@types/create-react-class": "^15.6.2",
"@types/echarts": "^4.1.10",
"@types/jest": "^24.0.17",
"@types/jquery": "^2.0.53",
diff --git a/src/sentry/static/sentry/app/components/acl/feature.tsx b/src/sentry/static/sentry/app/components/acl/feature.tsx
index f02e41929806f1..54b3d481ee47dc 100644
--- a/src/sentry/static/sentry/app/components/acl/feature.tsx
+++ b/src/sentry/static/sentry/app/components/acl/feature.tsx
@@ -1,7 +1,7 @@
import PropTypes from 'prop-types';
import React from 'react';
-import {Project, Organization} from 'app/types';
+import {Project, Organization, Config} from 'app/types';
import HookStore from 'app/stores/hookStore';
import SentryTypes from 'app/sentryTypes';
import withConfig from 'app/utils/withConfig';
@@ -10,9 +10,10 @@ import withProject from 'app/utils/withProject';
import ComingSoon from './comingSoon';
-type BaseFeatureProps = {
- organization?: Organization;
- project?: Project;
+type FeatureProps = {
+ organization: Organization;
+ project: Project;
+ config: Config;
features: string[];
requireAll?: boolean;
renderDisabled?: Function | boolean;
@@ -20,22 +21,17 @@ type BaseFeatureProps = {
children: React.ReactNode;
};
-type FeatureProps = BaseFeatureProps & {
- configFeatures?: string[];
-};
-
/**
* Component to handle feature flags.
*/
class Feature extends React.Component<FeatureProps> {
static propTypes = {
/**
- * The following properties will be set by the FeatureContainer component
- * that typically wraps this component.
+ * The following properties will be set by the HoCs
*/
organization: SentryTypes.Organization,
project: SentryTypes.Project,
- configFeatures: PropTypes.arrayOf(PropTypes.string),
+ config: SentryTypes.Config.isRequired,
/**
* List of required feature tags. Note we do not enforce uniqueness of tags anywhere.
@@ -44,7 +40,7 @@ class Feature extends React.Component<FeatureProps> {
*
* Use `organizations:` or `projects:` prefix strings to specify a feature with context.
*/
- features: PropTypes.arrayOf(PropTypes.string).isRequired,
+ features: PropTypes.arrayOf(PropTypes.string.isRequired).isRequired,
/**
* Should the component require all features or just one or more.
@@ -107,9 +103,10 @@ class Feature extends React.Component<FeatureProps> {
organization: string[];
project: string[];
} {
- const {organization, project, configFeatures} = this.props;
+ const {organization, project, config} = this.props;
+
return {
- configFeatures: configFeatures || [],
+ configFeatures: config.features ? Array.from(config.features) : [],
organization: (organization && organization.features) || [],
project: (project && project.features) || [],
};
@@ -193,22 +190,4 @@ class Feature extends React.Component<FeatureProps> {
}
}
-type FeatureContainerProps = BaseFeatureProps & {
- config: {[key: string]: string};
-};
-
-class FeatureContainer extends React.Component<FeatureContainerProps> {
- static propTypes = {
- config: SentryTypes.Config.isRequired,
- };
-
- render() {
- const features = this.props.config.features
- ? Array.from(this.props.config.features)
- : [];
-
- return <Feature configFeatures={features} {...this.props} />;
- }
-}
-
-export default withConfig(withOrganization(withProject(FeatureContainer)));
+export default withOrganization(withProject(withConfig(Feature)));
diff --git a/src/sentry/static/sentry/app/components/sidebar/discover2Item.tsx b/src/sentry/static/sentry/app/components/sidebar/discover2Item.tsx
index c38d0e329151b4..c1107fc85374b1 100644
--- a/src/sentry/static/sentry/app/components/sidebar/discover2Item.tsx
+++ b/src/sentry/static/sentry/app/components/sidebar/discover2Item.tsx
@@ -26,7 +26,7 @@ import withDiscoverSavedQueries from 'app/utils/withDiscoverSavedQueries';
import SidebarItem from './sidebarItem';
-type Props = React.ComponentProps<SidebarItem> & {
+type Props = {
api: Client;
organization: Organization;
savedQueries: SavedQuery[];
diff --git a/src/sentry/static/sentry/app/types/index.tsx b/src/sentry/static/sentry/app/types/index.tsx
index 19e3455a3bcdb9..0e86e87620ba0c 100644
--- a/src/sentry/static/sentry/app/types/index.tsx
+++ b/src/sentry/static/sentry/app/types/index.tsx
@@ -202,6 +202,7 @@ export type Plugin = {};
export type GlobalSelection = {
projects: number[];
environments: string[];
+ forceUrlSync?: boolean;
datetime: {
start: string;
end: string;
diff --git a/src/sentry/static/sentry/app/utils/withApi.tsx b/src/sentry/static/sentry/app/utils/withApi.tsx
index 4a38fa0c66ddba..a3dbc38ac5c70c 100644
--- a/src/sentry/static/sentry/app/utils/withApi.tsx
+++ b/src/sentry/static/sentry/app/utils/withApi.tsx
@@ -3,17 +3,25 @@ import React from 'react';
import {Client} from 'app/api';
import getDisplayName from 'app/utils/getDisplayName';
+type InjectedApiProps = {
+ api: Client;
+};
+
+type WrappedProps<P> = Omit<P, keyof InjectedApiProps> & Partial<InjectedApiProps>;
+
/**
- * HoC that provides "api" client when mounted, and clears API requests when component is unmounted
+ * HoC that provides "api" client when mounted, and clears API requests when
+ * component is unmounted
*/
-const withApi = <P extends object>(WrappedComponent: React.ComponentType<P>) => {
- return class extends React.Component<Omit<P, 'api'>> {
+const withApi = <P extends InjectedApiProps>(WrappedComponent: React.ComponentType<P>) =>
+ class extends React.Component<WrappedProps<P>> {
static displayName = `withApi(${getDisplayName(WrappedComponent)})`;
- constructor(props) {
+ constructor(props: WrappedProps<P>) {
super(props);
this.api = new Client();
}
+
componentWillUnmount() {
this.api.clear();
}
@@ -24,6 +32,5 @@ const withApi = <P extends object>(WrappedComponent: React.ComponentType<P>) =>
return <WrappedComponent api={this.api} {...this.props as P} />;
}
};
-};
export default withApi;
diff --git a/src/sentry/static/sentry/app/utils/withConfig.tsx b/src/sentry/static/sentry/app/utils/withConfig.tsx
index 0b3ca3d4ab57a2..d791b89f5e51e0 100644
--- a/src/sentry/static/sentry/app/utils/withConfig.tsx
+++ b/src/sentry/static/sentry/app/utils/withConfig.tsx
@@ -1,31 +1,43 @@
import React from 'react';
import Reflux from 'reflux';
import createReactClass from 'create-react-class';
+
+import {Config} from 'app/types';
import getDisplayName from 'app/utils/getDisplayName';
import ConfigStore from 'app/stores/configStore';
+type InjectedConfigProps = {
+ config: Config;
+};
+
+type State = {
+ config: Config;
+};
+
/**
* Higher order component that passes the config object to the wrapped component
*/
-const withConfig = <P extends object>(WrappedComponent: React.ComponentType<P>) =>
- createReactClass({
+const withConfig = <P extends InjectedConfigProps>(
+ WrappedComponent: React.ComponentType<P>
+) =>
+ createReactClass<
+ Omit<P, keyof InjectedConfigProps> & Partial<InjectedConfigProps>,
+ State
+ >({
displayName: `withConfig(${getDisplayName(WrappedComponent)})`,
mixins: [Reflux.listenTo(ConfigStore, 'onUpdate')],
+
getInitialState() {
- return {
- config: ConfigStore.getConfig(),
- };
+ return {config: ConfigStore.getConfig()};
},
onUpdate() {
- this.setState({
- config: ConfigStore.getConfig(),
- });
+ this.setState({config: ConfigStore.getConfig()});
},
render() {
return (
- <WrappedComponent config={this.state.config as object} {...this.props as P} />
+ <WrappedComponent config={this.state.config as Config} {...this.props as P} />
);
},
});
diff --git a/src/sentry/static/sentry/app/utils/withDiscoverSavedQueries.tsx b/src/sentry/static/sentry/app/utils/withDiscoverSavedQueries.tsx
index 11fe14a3a8cf7c..66ba4b6ba1f17e 100644
--- a/src/sentry/static/sentry/app/utils/withDiscoverSavedQueries.tsx
+++ b/src/sentry/static/sentry/app/utils/withDiscoverSavedQueries.tsx
@@ -6,7 +6,11 @@ import DiscoverSavedQueriesStore from 'app/stores/discoverSavedQueriesStore';
import getDisplayName from 'app/utils/getDisplayName';
import {SavedQuery} from 'app/views/discover/types';
-type Props = {
+type InjectedDiscoverSavedQueriesProps = {
+ savedQueries: SavedQuery[];
+};
+
+type State = {
savedQueries: SavedQuery[];
};
@@ -14,10 +18,14 @@ type Props = {
* Higher order component that uses DiscoverSavedQueryStor and provides the
* saved queries for the current organization
*/
-const withDiscoverSavedQueries = <P extends Props>(
+const withDiscoverSavedQueries = <P extends InjectedDiscoverSavedQueriesProps>(
WrappedComponent: React.ComponentType<P>
) =>
- createReactClass({
+ createReactClass<
+ Omit<P, keyof InjectedDiscoverSavedQueriesProps> &
+ Partial<InjectedDiscoverSavedQueriesProps>,
+ State
+ >({
displayName: `withDiscoverSavedQuery(${getDisplayName(WrappedComponent)})`,
mixins: [Reflux.listenTo(DiscoverSavedQueriesStore, 'onUpdate')],
@@ -45,10 +53,7 @@ const withDiscoverSavedQueries = <P extends Props>(
render() {
return (
- <WrappedComponent
- savedQueries={this.state.savedQueries as SavedQuery[]}
- {...this.props as P}
- />
+ <WrappedComponent savedQueries={this.state.savedQueries} {...this.props as P} />
);
},
});
diff --git a/src/sentry/static/sentry/app/utils/withGlobalSelection.tsx b/src/sentry/static/sentry/app/utils/withGlobalSelection.tsx
index ae350f77df1675..50ed7a97565363 100644
--- a/src/sentry/static/sentry/app/utils/withGlobalSelection.tsx
+++ b/src/sentry/static/sentry/app/utils/withGlobalSelection.tsx
@@ -6,16 +6,29 @@ import GlobalSelectionStore from 'app/stores/globalSelectionStore';
import getDisplayName from 'app/utils/getDisplayName';
import {GlobalSelection} from 'app/types';
+type InjectedGlobalSelectionProps = {
+ forceUrlSync?: boolean;
+ selection: GlobalSelection;
+};
+
+type State = {
+ selection: GlobalSelection;
+};
+
/**
* Higher order component that uses GlobalSelectionStore and provides the
* active project
*/
-const withGlobalSelection = <P extends object>(
+const withGlobalSelection = <P extends InjectedGlobalSelectionProps>(
WrappedComponent: React.ComponentType<P>
) =>
- createReactClass({
+ createReactClass<
+ Omit<P, keyof InjectedGlobalSelectionProps> & Partial<InjectedGlobalSelectionProps>,
+ State
+ >({
displayName: `withGlobalSelection(${getDisplayName(WrappedComponent)})`,
mixins: [Reflux.listenTo(GlobalSelectionStore, 'onUpdate')],
+
getInitialState() {
return {
selection: GlobalSelectionStore.get(),
@@ -34,9 +47,7 @@ const withGlobalSelection = <P extends object>(
const selection = GlobalSelectionStore.get();
if (this.state.selection !== selection) {
- this.setState({
- selection,
- });
+ this.setState({selection});
}
},
@@ -44,7 +55,7 @@ const withGlobalSelection = <P extends object>(
const {forceUrlSync, ...selection} = this.state.selection;
return (
<WrappedComponent
- forceUrlSync={forceUrlSync as boolean}
+ forceUrlSync={!!forceUrlSync}
selection={selection as GlobalSelection}
{...this.props as P}
/>
diff --git a/src/sentry/static/sentry/app/utils/withLatestContext.tsx b/src/sentry/static/sentry/app/utils/withLatestContext.tsx
index 9b26755deed164..884d788a88f528 100644
--- a/src/sentry/static/sentry/app/utils/withLatestContext.tsx
+++ b/src/sentry/static/sentry/app/utils/withLatestContext.tsx
@@ -10,13 +10,36 @@ import getDisplayName from 'app/utils/getDisplayName';
import withOrganizations from 'app/utils/withOrganizations';
import {Project, Organization} from 'app/types';
-const withLatestContext = <P extends object>(WrappedComponent: React.ComponentType<P>) =>
+type InjectedLatestContextProps = {
+ organizations?: Organization[];
+ organization?: Organization;
+ project?: Project;
+ lastRoute?: string;
+};
+
+type WithPluginProps = {
+ organization?: Organization;
+ organizations: Organization[];
+};
+
+type State = {
+ latestContext: Omit<InjectedLatestContextProps, 'organizations'>;
+};
+
+const withLatestContext = <P extends InjectedLatestContextProps>(
+ WrappedComponent: React.ComponentType<P>
+) =>
withOrganizations(
- createReactClass({
+ createReactClass<
+ Omit<P, keyof InjectedLatestContextProps> &
+ Partial<InjectedLatestContextProps> &
+ WithPluginProps,
+ State
+ >({
displayName: `withLatestContext(${getDisplayName(WrappedComponent)})`,
propTypes: {
organization: SentryTypes.Organization,
- organizations: PropTypes.arrayOf(SentryTypes.Organization),
+ organizations: PropTypes.arrayOf(SentryTypes.Organization).isRequired,
},
mixins: [Reflux.connect(LatestContextStore, 'latestContext')],
diff --git a/src/sentry/static/sentry/app/utils/withOrganization.tsx b/src/sentry/static/sentry/app/utils/withOrganization.tsx
index a7fff50a77f345..9f7d3127bb1c6c 100644
--- a/src/sentry/static/sentry/app/utils/withOrganization.tsx
+++ b/src/sentry/static/sentry/app/utils/withOrganization.tsx
@@ -4,8 +4,16 @@ import SentryTypes from 'app/sentryTypes';
import getDisplayName from 'app/utils/getDisplayName';
import {Organization} from 'app/types';
-const withOrganization = <P extends object>(WrappedComponent: React.ComponentType<P>) =>
- class extends React.Component<Omit<P, 'organization'>> {
+type InjectedOrganizationProps = {
+ organization: Organization;
+};
+
+const withOrganization = <P extends InjectedOrganizationProps>(
+ WrappedComponent: React.ComponentType<P>
+) =>
+ class extends React.Component<
+ Omit<P, keyof InjectedOrganizationProps> & Partial<InjectedOrganizationProps>
+ > {
static displayName = `withOrganization(${getDisplayName(WrappedComponent)})`;
static contextTypes = {
organization: SentryTypes.Organization,
diff --git a/src/sentry/static/sentry/app/utils/withOrganizations.tsx b/src/sentry/static/sentry/app/utils/withOrganizations.tsx
index e40d9df0401d6a..a7672d54aef113 100644
--- a/src/sentry/static/sentry/app/utils/withOrganizations.tsx
+++ b/src/sentry/static/sentry/app/utils/withOrganizations.tsx
@@ -6,8 +6,22 @@ import getDisplayName from 'app/utils/getDisplayName';
import OrganizationsStore from 'app/stores/organizationsStore';
import {Organization} from 'app/types';
-const withOrganizations = <P extends object>(WrappedComponent: React.ComponentType<P>) =>
- createReactClass({
+type InjectedOrganizationsProps = {
+ organizationsLoading?: boolean;
+ organizations: Organization[];
+};
+
+type State = {
+ organizations: Organization[];
+};
+
+const withOrganizations = <P extends InjectedOrganizationsProps>(
+ WrappedComponent: React.ComponentType<P>
+) =>
+ createReactClass<
+ Omit<P, keyof InjectedOrganizationsProps> & Partial<InjectedOrganizationsProps>,
+ State
+ >({
displayName: `withOrganizations(${getDisplayName(WrappedComponent)})`,
mixins: [Reflux.connect(OrganizationsStore, 'organizations')],
diff --git a/src/sentry/static/sentry/app/utils/withPlugins.tsx b/src/sentry/static/sentry/app/utils/withPlugins.tsx
index 398f66c27a8c00..06e84a08f604b4 100644
--- a/src/sentry/static/sentry/app/utils/withPlugins.tsx
+++ b/src/sentry/static/sentry/app/utils/withPlugins.tsx
@@ -3,28 +3,37 @@ import Reflux from 'reflux';
import createReactClass from 'create-react-class';
import {defined} from 'app/utils';
+import {Organization, Project, Plugin} from 'app/types';
import {fetchPlugins} from 'app/actionCreators/plugins';
import getDisplayName from 'app/utils/getDisplayName';
import PluginsStore from 'app/stores/pluginsStore';
import SentryTypes from 'app/sentryTypes';
-
import withOrganization from 'app/utils/withOrganization';
import withProject from 'app/utils/withProject';
-import {Plugin} from 'app/types';
+
+type WithPluginProps = {
+ organization: Organization;
+ project: Project;
+};
+
+type InjectedPluginProps = {
+ plugins: Plugin[];
+};
/**
* Higher order component that fetches list of plugins and
* passes PluginsStore to component as `plugins`
*/
-
-const withPlugins = <P extends object>(WrappedComponent: React.ComponentType<P>) =>
+const withPlugins = <P extends InjectedPluginProps>(
+ WrappedComponent: React.ComponentType<P>
+) =>
withOrganization(
withProject(
- createReactClass({
+ createReactClass<Omit<P, keyof InjectedPluginProps> & WithPluginProps, {}>({
displayName: `withPlugins(${getDisplayName(WrappedComponent)})`,
propTypes: {
- organization: SentryTypes.Organization,
- project: SentryTypes.Project,
+ organization: SentryTypes.Organization.isRequired,
+ project: SentryTypes.Project.isRequired,
},
mixins: [Reflux.connect(PluginsStore, 'store')],
diff --git a/src/sentry/static/sentry/app/utils/withProject.tsx b/src/sentry/static/sentry/app/utils/withProject.tsx
index 1348675bd6a97c..1f946507be0ceb 100644
--- a/src/sentry/static/sentry/app/utils/withProject.tsx
+++ b/src/sentry/static/sentry/app/utils/withProject.tsx
@@ -4,11 +4,19 @@ import SentryTypes from 'app/sentryTypes';
import getDisplayName from 'app/utils/getDisplayName';
import {Project} from 'app/types';
+type InjectedProjectProps = {
+ project: Project;
+};
+
/**
* Currently wraps component with project from context
*/
-const withProject = <P extends object>(WrappedComponent: React.ComponentType<P>) =>
- class extends React.Component<Omit<P, 'project'>> {
+const withProject = <P extends InjectedProjectProps>(
+ WrappedComponent: React.ComponentType<P>
+) =>
+ class extends React.Component<
+ Omit<P, keyof InjectedProjectProps> & Partial<InjectedProjectProps>
+ > {
static displayName = `withProject(${getDisplayName(WrappedComponent)})`;
static contextTypes = {
project: SentryTypes.Project,
diff --git a/src/sentry/static/sentry/app/utils/withProjects.tsx b/src/sentry/static/sentry/app/utils/withProjects.tsx
index 9e200fa4e7e6ca..92478b4dbef784 100644
--- a/src/sentry/static/sentry/app/utils/withProjects.tsx
+++ b/src/sentry/static/sentry/app/utils/withProjects.tsx
@@ -7,15 +7,24 @@ import ProjectsStore from 'app/stores/projectsStore';
import SentryTypes from 'app/sentryTypes';
import {Project} from 'app/types';
-/**
- * Higher order component that uses ProjectsStore and provides a list of projects
- */
-type Props = {
+type InjectedProjectsProps = {
projects: Project[];
};
-const withProjects = <P extends Props>(WrappedComponent: React.ComponentType<P>) =>
- createReactClass({
+type State = {
+ projects: Project[];
+};
+
+/**
+ * Higher order component that uses ProjectsStore and provides a list of projects
+ */
+const withProjects = <P extends InjectedProjectsProps>(
+ WrappedComponent: React.ComponentType<P>
+) =>
+ createReactClass<
+ Omit<P, keyof InjectedProjectsProps> & Partial<InjectedProjectsProps>,
+ State
+ >({
displayName: `withProjects(${getDisplayName(WrappedComponent)})`,
propTypes: {
organization: SentryTypes.Organization,
@@ -24,13 +33,13 @@ const withProjects = <P extends Props>(WrappedComponent: React.ComponentType<P>)
mixins: [Reflux.listenTo(ProjectsStore, 'onProjectUpdate')],
getInitialState() {
return {
- projects: ProjectsStore.getAll(),
+ projects: ProjectsStore.getAll() as Project[],
};
},
onProjectUpdate() {
this.setState({
- projects: ProjectsStore.getAll(),
+ projects: ProjectsStore.getAll() as Project[],
});
},
render() {
diff --git a/src/sentry/static/sentry/app/utils/withSavedSearches.tsx b/src/sentry/static/sentry/app/utils/withSavedSearches.tsx
index aa0558b6b0c8b5..e176b24f3ee849 100644
--- a/src/sentry/static/sentry/app/utils/withSavedSearches.tsx
+++ b/src/sentry/static/sentry/app/utils/withSavedSearches.tsx
@@ -6,14 +6,30 @@ import SavedSearchesStore from 'app/stores/savedSearchesStore';
import getDisplayName from 'app/utils/getDisplayName';
import {SavedSearch} from 'app/types';
+type InjectedSavedSearchesProps = {
+ savedSearches: SavedSearch[];
+ savedSearchLoading: boolean;
+ savedSearch: SavedSearch | null;
+};
+
+type State = {
+ SavedSearchs: SavedSearch;
+ isLoading: boolean;
+};
+
/**
* Currently wraps component with organization from context
*/
-
-const withSavedSearches = <P extends object>(WrappedComponent: React.ComponentType<P>) =>
- createReactClass({
+const withSavedSearches = <P extends InjectedSavedSearchesProps>(
+ WrappedComponent: React.ComponentType<P>
+) =>
+ createReactClass<
+ Omit<P, keyof InjectedSavedSearchesProps> & Partial<InjectedSavedSearchesProps>,
+ State
+ >({
displayName: `withSavedSearches(${getDisplayName(WrappedComponent)})`,
mixins: [Reflux.listenTo(SavedSearchesStore, 'onUpdate')],
+
getInitialState() {
return SavedSearchesStore.get();
},
@@ -55,5 +71,4 @@ const withSavedSearches = <P extends object>(WrappedComponent: React.ComponentTy
);
},
});
-
export default withSavedSearches;
diff --git a/src/sentry/static/sentry/app/utils/withSentryAppComponents.tsx b/src/sentry/static/sentry/app/utils/withSentryAppComponents.tsx
index 25caf93891b519..2794a132beb956 100644
--- a/src/sentry/static/sentry/app/utils/withSentryAppComponents.tsx
+++ b/src/sentry/static/sentry/app/utils/withSentryAppComponents.tsx
@@ -5,18 +5,29 @@ import createReactClass from 'create-react-class';
import getDisplayName from 'app/utils/getDisplayName';
import SentryAppComponentsStore from 'app/stores/sentryAppComponentsStore';
+// TODO(ts): Update when component type is defined
+type Component = {};
+
+type InjectedAppComponentsProps = {
+ components: Component[];
+};
+
+type State = {
+ components: Component[];
+};
+
type Options = {
componentType?: 'stacktrace-link';
};
-// TODO(ts): Update when component type is defined
-type Component = {};
-
-const withSentryAppComponents = <P extends object>(
+const withSentryAppComponents = <P extends InjectedAppComponentsProps>(
WrappedComponent: React.ComponentType<P>,
{componentType}: Options = {}
) =>
- createReactClass({
+ createReactClass<
+ Omit<P, keyof InjectedAppComponentsProps> & Partial<InjectedAppComponentsProps>,
+ State
+ >({
displayName: `withSentryAppComponents(${getDisplayName(WrappedComponent)})`,
mixins: [Reflux.connect(SentryAppComponentsStore, 'components')],
diff --git a/src/sentry/static/sentry/app/utils/withTeams.tsx b/src/sentry/static/sentry/app/utils/withTeams.tsx
index 91faf93c87bcb1..e160abd23ab67a 100644
--- a/src/sentry/static/sentry/app/utils/withTeams.tsx
+++ b/src/sentry/static/sentry/app/utils/withTeams.tsx
@@ -2,21 +2,28 @@ import React from 'react';
import Reflux from 'reflux';
import createReactClass from 'create-react-class';
+import {Team} from 'app/types';
import getDisplayName from 'app/utils/getDisplayName';
-import SentryTypes from 'app/sentryTypes';
import TeamStore from 'app/stores/teamStore';
-import {Team} from 'app/types';
+
+type InjectedTeamsProps = {
+ teams: Team[];
+};
+
+type State = {
+ teams: Team[];
+};
/**
* Higher order component that uses TeamStore and provides a list of teams
*/
-const withTeams = <P extends object>(WrappedComponent: React.ComponentType<P>) =>
- createReactClass({
+const withTeams = <P extends InjectedTeamsProps>(
+ WrappedComponent: React.ComponentType<P>
+) =>
+ createReactClass<Omit<P, keyof InjectedTeamsProps>, State>({
displayName: `withTeams(${getDisplayName(WrappedComponent)})`,
- propTypes: {
- organization: SentryTypes.Organization,
- },
mixins: [Reflux.listenTo(TeamStore, 'onTeamUpdate')],
+
getInitialState() {
return {
teams: TeamStore.getAll(),
@@ -32,5 +39,4 @@ const withTeams = <P extends object>(WrappedComponent: React.ComponentType<P>) =
return <WrappedComponent {...this.props as P} teams={this.state.teams as Team[]} />;
},
});
-
export default withTeams;
diff --git a/src/sentry/static/sentry/app/views/eventsV2/eventDetails.tsx b/src/sentry/static/sentry/app/views/eventsV2/eventDetails.tsx
index 9f44cf5db9531d..09e4e9a5a08fda 100644
--- a/src/sentry/static/sentry/app/views/eventsV2/eventDetails.tsx
+++ b/src/sentry/static/sentry/app/views/eventsV2/eventDetails.tsx
@@ -10,7 +10,6 @@ import SentryTypes from 'app/sentryTypes';
import AsyncComponent from 'app/components/asyncComponent';
import ModalDialog from 'app/components/modalDialog';
import NotFound from 'app/components/errors/notFound';
-import withApi from 'app/utils/withApi';
import theme from 'app/utils/theme';
import space from 'app/styles/space';
import {Organization, Event} from 'app/types';
@@ -127,4 +126,4 @@ class EventDetails extends AsyncComponent<Props, State & AsyncComponent['state']
}
}
-export default withApi(EventDetails);
+export default EventDetails;
diff --git a/src/sentry/static/sentry/app/views/eventsV2/modalLineGraph.tsx b/src/sentry/static/sentry/app/views/eventsV2/modalLineGraph.tsx
index 78c0febe531ce1..f394953722c0d0 100644
--- a/src/sentry/static/sentry/app/views/eventsV2/modalLineGraph.tsx
+++ b/src/sentry/static/sentry/app/views/eventsV2/modalLineGraph.tsx
@@ -20,7 +20,7 @@ import {Panel} from 'app/components/panels';
import withApi from 'app/utils/withApi';
import withGlobalSelection from 'app/utils/withGlobalSelection';
import theme from 'app/utils/theme';
-import {Event, Organization} from 'app/types';
+import {Event, Organization, GlobalSelection} from 'app/types';
import {MODAL_QUERY_KEYS, PIN_ICON} from './data';
import EventView from './eventView';
@@ -136,8 +136,7 @@ type ModalLineGraphProps = {
location: Location;
currentEvent: Event;
eventView: EventView;
- // TODO(ts): adjust
- selection: any;
+ selection: GlobalSelection;
};
/**
@@ -180,8 +179,9 @@ const ModalLineGraph = (props: ModalLineGraphProps) => {
period={selection.datetime.period}
project={selection.projects}
environment={selection.environments}
- start={selection.datetime.start}
- end={selection.datetime.end}
+ // TODO(ts): adjust. Expects date, got strings
+ start={selection.datetime.start as any}
+ end={selection.datetime.end as any}
interval={interval}
showLoading={true}
query={queryString}
diff --git a/src/sentry/static/sentry/app/views/eventsV2/relatedEvents.tsx b/src/sentry/static/sentry/app/views/eventsV2/relatedEvents.tsx
index 18b9e07f777f1f..ea6b870cd8f806 100644
--- a/src/sentry/static/sentry/app/views/eventsV2/relatedEvents.tsx
+++ b/src/sentry/static/sentry/app/views/eventsV2/relatedEvents.tsx
@@ -4,7 +4,7 @@ import PropTypes from 'prop-types';
import {omit} from 'lodash';
import {Location} from 'history';
-import {Organization, EventViewv1, Event, Project} from 'app/types';
+import {Organization, Event, Project} from 'app/types';
import {t} from 'app/locale';
import SentryTypes from 'app/sentryTypes';
import AsyncComponent from 'app/components/asyncComponent';
@@ -23,7 +23,6 @@ import {EventQuery} from './utils';
type Props = {
location: Location;
organization: Organization;
- view: EventViewv1;
event: Event;
projects: Array<Project>;
};
diff --git a/src/sentry/static/sentry/app/views/incidents/details/relatedIssues/index.tsx b/src/sentry/static/sentry/app/views/incidents/details/relatedIssues/index.tsx
index 36a17804db9426..3420394e10d4c8 100644
--- a/src/sentry/static/sentry/app/views/incidents/details/relatedIssues/index.tsx
+++ b/src/sentry/static/sentry/app/views/incidents/details/relatedIssues/index.tsx
@@ -3,6 +3,7 @@ import React from 'react';
import styled from 'react-emotion';
import {Client} from 'app/api';
+import {Organization} from 'app/types';
import {Panel, PanelBody, PanelItem} from 'app/components/panels';
import {t} from 'app/locale';
import EventOrGroupExtraDetails from 'app/components/eventOrGroupExtraDetails';
@@ -21,6 +22,7 @@ type Props = {
className?: string;
incident?: Incident;
params: Params;
+ organization: Organization;
};
class RelatedIssues extends React.Component<Props> {
diff --git a/yarn.lock b/yarn.lock
index cbcd5587181cca..529e2b7718cccf 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -2061,6 +2061,13 @@
resolved "https://registry.yarnpkg.com/@types/clipboard/-/clipboard-2.0.1.tgz#75a74086c293d75b12bc93ff13bc7797fef05a40"
integrity sha512-gJJX9Jjdt3bIAePQRRjYWG20dIhAgEqonguyHxXuqALxsoDsDLimihqrSg8fXgVTJ4KZCzkfglKtwsh/8dLfbA==
+"@types/create-react-class@^15.6.2":
+ version "15.6.2"
+ resolved "https://registry.yarnpkg.com/@types/create-react-class/-/create-react-class-15.6.2.tgz#0e1b89153be31ded959359c2b827cceaa9d18cf6"
+ integrity sha512-jeDUr85ld9dTUmrb0VEX1P4dGDPZocWXjeW/+jFJpdCqpCcs0Hdrv3awZqjkEsRaB/IEDe+v0ARYgBqNoDORFQ==
+ dependencies:
+ "@types/react" "*"
+
"@types/echarts@^4.1.10":
version "4.1.10"
resolved "https://registry.yarnpkg.com/@types/echarts/-/echarts-4.1.10.tgz#ee71911eb8b1717c7c12c0bd81fc83db872f4d3b"
|
ad923a6ed85b7e168979fa4294a10816fe6097da
|
2018-08-29 00:08:48
|
Ayesha Omarali
|
feat(ui): Discover - Truncate Tooltip Titles (#9532)
| false
|
Discover - Truncate Tooltip Titles (#9532)
|
feat
|
diff --git a/src/sentry/static/sentry/app/views/organizationDiscover/result/utils.jsx b/src/sentry/static/sentry/app/views/organizationDiscover/result/utils.jsx
index c8d64034cbb350..261de499b11361 100644
--- a/src/sentry/static/sentry/app/views/organizationDiscover/result/utils.jsx
+++ b/src/sentry/static/sentry/app/views/organizationDiscover/result/utils.jsx
@@ -21,7 +21,7 @@ export function getChartData(data, query) {
data: data.map(res => {
return {
value: res[aggregation[2]],
- name: fields.map(field => `${field} ${res[field]}`).join(' '),
+ name: truncateLabel(fields.map(field => `${field} ${res[field]}`).join(' ')),
};
}),
};
@@ -73,7 +73,16 @@ export function getChartDataByDay(data, query) {
});
}
- result.push({seriesName: key, data: output[key].data});
+ result.push({seriesName: truncateLabel(key), data: output[key].data});
+ }
+
+ return result;
+}
+
+function truncateLabel(seriesName) {
+ let result = seriesName;
+ if (seriesName.length > 45) {
+ result = seriesName.substring(0, 45) + '…';
}
return result;
}
|
0fb1e42472334fecf1c1333405e708802b11fa60
|
2022-04-26 02:40:45
|
Ryan Skonnord
|
ref(access): Simplify get_allowed_roles (#33851)
| false
|
Simplify get_allowed_roles (#33851)
|
ref
|
diff --git a/src/sentry/api/endpoints/organization_member/__init__.py b/src/sentry/api/endpoints/organization_member/__init__.py
index 2b9c3085e72526..f1a287a794572b 100644
--- a/src/sentry/api/endpoints/organization_member/__init__.py
+++ b/src/sentry/api/endpoints/organization_member/__init__.py
@@ -1,6 +1,6 @@
from __future__ import annotations
-from typing import Iterable
+from typing import Collection
from django.db import transaction
from rest_framework.request import Request
@@ -27,32 +27,29 @@ def get_allowed_roles(
request: Request,
organization: Organization,
member: OrganizationMember | None = None,
-) -> tuple[bool, Iterable[Role]]:
- can_admin = request.access.has_scope("member:admin")
+) -> Collection[Role]:
+ if is_active_superuser(request):
+ return roles.get_all()
+ if not request.access.has_scope("member:admin"):
+ return ()
- allowed_roles = []
- if can_admin and not is_active_superuser(request):
- if member:
- acting_member = member
- else:
- try:
- acting_member = OrganizationMember.objects.get(
- user=request.user, organization=organization
- )
- except OrganizationMember.DoesNotExist:
- # This can happen if the request was authorized by an app integration
- # token whose proxy user does not have an OrganizationMember object.
- return can_admin, allowed_roles
+ if member:
+ acting_member = member
+ else:
+ try:
+ acting_member = OrganizationMember.objects.get(
+ user=request.user, organization=organization
+ )
+ except OrganizationMember.DoesNotExist:
+ # This can happen if the request was authorized by an app integration
+ # token whose proxy user does not have an OrganizationMember object.
+ return ()
- if member and roles.get(acting_member.role).priority < roles.get(member.role).priority:
- can_admin = False
- else:
- allowed_roles = acting_member.get_allowed_roles_to_invite()
- can_admin = bool(allowed_roles)
- elif is_active_superuser(request):
- allowed_roles = roles.get_all()
+ if member and roles.get(acting_member.role).priority < roles.get(member.role).priority:
+ # Disallow the acting member from demoting another member who outranks them
+ return ()
- return can_admin, allowed_roles
+ return acting_member.get_allowed_roles_to_invite()
from .details import OrganizationMemberDetailsEndpoint
diff --git a/src/sentry/api/endpoints/organization_member/details.py b/src/sentry/api/endpoints/organization_member/details.py
index 077ebdb2ea60d5..1dd0dd80e33385 100644
--- a/src/sentry/api/endpoints/organization_member/details.py
+++ b/src/sentry/api/endpoints/organization_member/details.py
@@ -156,7 +156,7 @@ def get(
Will return a pending invite as long as it's already approved.
"""
- _, allowed_roles = get_allowed_roles(request, organization, member)
+ allowed_roles = get_allowed_roles(request, organization, member)
context = self._serialize_member(member, request, allowed_roles)
@@ -250,7 +250,7 @@ def put(
assigned_role = result.get("role")
if assigned_role:
- _, allowed_roles = get_allowed_roles(request, organization)
+ allowed_roles = get_allowed_roles(request, organization)
allowed_role_ids = {r.id for r in allowed_roles}
# A user cannot promote others above themselves
diff --git a/src/sentry/api/endpoints/organization_member/index.py b/src/sentry/api/endpoints/organization_member/index.py
index a5252d1622c41a..0953b6e100b8bd 100644
--- a/src/sentry/api/endpoints/organization_member/index.py
+++ b/src/sentry/api/endpoints/organization_member/index.py
@@ -200,7 +200,7 @@ def post(self, request: Request, organization) -> Response:
{"organization": "Your organization is not allowed to invite members"}, status=403
)
- _, allowed_roles = get_allowed_roles(request, organization)
+ allowed_roles = get_allowed_roles(request, organization)
serializer = OrganizationMemberSerializer(
data=request.data,
diff --git a/src/sentry/api/endpoints/organization_member/requests/invite/details.py b/src/sentry/api/endpoints/organization_member/requests/invite/details.py
index 1c757375421df0..1dc36d1a1b7400 100644
--- a/src/sentry/api/endpoints/organization_member/requests/invite/details.py
+++ b/src/sentry/api/endpoints/organization_member/requests/invite/details.py
@@ -108,7 +108,7 @@ def put(
save_team_assignments(member, result["teams"])
if "approve" in request.data:
- _, allowed_roles = get_allowed_roles(request, organization)
+ allowed_roles = get_allowed_roles(request, organization)
serializer = ApproveInviteRequestSerializer(
data=request.data,
diff --git a/src/sentry/web/frontend/base.py b/src/sentry/web/frontend/base.py
index f2b45be37af8fe..892e8322c5b876 100644
--- a/src/sentry/web/frontend/base.py
+++ b/src/sentry/web/frontend/base.py
@@ -14,7 +14,6 @@
from rest_framework.request import Request
from rest_framework.response import Response
-from sentry import roles
from sentry.api.serializers import serialize
from sentry.api.utils import is_member_disabled_from_limit
from sentry.auth import access
@@ -419,27 +418,6 @@ def convert_args(self, request: Request, organization_slug=None, *args, **kwargs
return (args, kwargs)
- def get_allowed_roles(self, request: Request, organization, member=None):
- can_admin = request.access.has_scope("member:admin")
-
- allowed_roles = []
- if can_admin and not is_active_superuser(request):
- acting_member = OrganizationMember.objects.get(
- user=request.user, organization=organization
- )
- if member and roles.get(acting_member.role).priority < roles.get(member.role).priority:
- can_admin = False
- else:
- allowed_roles = [
- r
- for r in roles.get_all()
- if r.priority <= roles.get(acting_member.role).priority
- ]
- can_admin = bool(allowed_roles)
- elif is_active_superuser(request):
- allowed_roles = roles.get_all()
- return (can_admin, allowed_roles)
-
class ProjectView(OrganizationView):
"""
|
400e107fa261cab2adf8467ba0c3c5b3429d0782
|
2025-02-11 01:23:59
|
Evan Purkhiser
|
ref(uptime): Remove environment from checks table (#84871)
| false
|
Remove environment from checks table (#84871)
|
ref
|
diff --git a/static/app/views/issueDetails/groupUptimeChecks.spec.tsx b/static/app/views/issueDetails/groupUptimeChecks.spec.tsx
index de54aa25c2f073..dd4651165fd81e 100644
--- a/static/app/views/issueDetails/groupUptimeChecks.spec.tsx
+++ b/static/app/views/issueDetails/groupUptimeChecks.spec.tsx
@@ -58,15 +58,7 @@ describe('GroupUptimeChecks', () => {
render(<GroupUptimeChecks />, {organization, router});
expect(await screen.findByText('All Uptime Checks')).toBeInTheDocument();
- for (const column of [
- 'Timestamp',
- 'Status',
- 'Duration',
- 'Environment',
- 'Trace',
- 'Region',
- 'ID',
- ]) {
+ for (const column of ['Timestamp', 'Status', 'Duration', 'Trace', 'Region', 'ID']) {
expect(screen.getByText(column)).toBeInTheDocument();
}
expect(screen.getByText('No matching uptime checks found')).toBeInTheDocument();
@@ -89,7 +81,6 @@ describe('GroupUptimeChecks', () => {
expect(screen.getByRole('time')).toHaveTextContent(/Jan 1, 2025/);
expect(screen.getByText(statusToText[uptimeCheck.checkStatus])).toBeInTheDocument();
expect(screen.getByText(`${uptimeCheck.durationMs}ms`)).toBeInTheDocument();
- expect(screen.getByText(uptimeCheck.environment)).toBeInTheDocument();
expect(
screen.getByRole('link', {name: getShortEventId(uptimeCheck.traceId)})
).toHaveAttribute('href', `/performance/trace/${uptimeCheck.traceId}/`);
diff --git a/static/app/views/issueDetails/groupUptimeChecks.tsx b/static/app/views/issueDetails/groupUptimeChecks.tsx
index 2a73cc5965a7a6..b157b49d9a983e 100644
--- a/static/app/views/issueDetails/groupUptimeChecks.tsx
+++ b/static/app/views/issueDetails/groupUptimeChecks.tsx
@@ -100,7 +100,6 @@ export default function GroupUptimeChecks() {
{key: 'timestamp', width: COL_WIDTH_UNDEFINED, name: t('Timestamp')},
{key: 'checkStatus', width: 115, name: t('Status')},
{key: 'durationMs', width: 110, name: t('Duration')},
- {key: 'environment', width: 115, name: t('Environment')},
{key: 'traceId', width: 100, name: t('Trace')},
{key: 'region', width: 100, name: t('Region')},
{key: 'uptimeCheckId', width: 100, name: t('ID')},
|
c4397274e4ea7bfffa591455612dd6d16c2acc2b
|
2022-09-15 19:46:39
|
Dan Fuller
|
feat(perf_issues): Make `GroupTagKeyValuesEndpoint` work with perf issues (#38865)
| false
|
Make `GroupTagKeyValuesEndpoint` work with perf issues (#38865)
|
feat
|
diff --git a/src/sentry/api/endpoints/group_tagkey_values.py b/src/sentry/api/endpoints/group_tagkey_values.py
index 056b79bc8c4672..6940e8a20d793f 100644
--- a/src/sentry/api/endpoints/group_tagkey_values.py
+++ b/src/sentry/api/endpoints/group_tagkey_values.py
@@ -29,10 +29,9 @@ def get(self, request: Request, group, key) -> Response:
environment_ids = [e.id for e in get_environments(request, group.project.organization)]
try:
- tagstore.get_tag_key(group.project_id, None, lookup_key)
- except tagstore.TagKeyNotFound:
+ tagstore.get_group_tag_key(group, None, lookup_key)
+ except tagstore.GroupTagKeyNotFound:
raise ResourceDoesNotExist
-
sort = request.GET.get("sort")
if sort == "date":
order_by = "-last_seen"
@@ -49,7 +48,7 @@ def get(self, request: Request, group, key) -> Response:
serializer_cls = None
paginator = tagstore.get_group_tag_value_paginator(
- group.project_id, group.id, environment_ids, lookup_key, order_by=order_by
+ group, environment_ids, lookup_key, order_by=order_by
)
return self.paginate(
diff --git a/src/sentry/data_export/processors/issues_by_tag.py b/src/sentry/data_export/processors/issues_by_tag.py
index 9eb5c1160c3d7c..35622067b5d2d8 100644
--- a/src/sentry/data_export/processors/issues_by_tag.py
+++ b/src/sentry/data_export/processors/issues_by_tag.py
@@ -95,8 +95,7 @@ def get_raw_data(self, limit=1000, offset=0):
Returns list of GroupTagValues
"""
return tagstore.get_group_tag_value_iter(
- project_id=self.group.project_id,
- group_id=self.group.id,
+ group=self.group,
environment_ids=[self.environment_id],
key=self.lookup_key,
callbacks=self.callbacks,
diff --git a/src/sentry/tagstore/base.py b/src/sentry/tagstore/base.py
index caaf90afec8d22..7374e4807d857e 100644
--- a/src/sentry/tagstore/base.py
+++ b/src/sentry/tagstore/base.py
@@ -228,19 +228,15 @@ def get_tag_value_paginator_for_projects(
"""
raise NotImplementedError
- def get_group_tag_value_iter(
- self, project_id, group_id, environment_ids, key, callbacks=(), offset=0
- ):
+ def get_group_tag_value_iter(self, group, environment_ids, key, callbacks=(), offset=0):
"""
- >>> get_group_tag_value_iter(1, 2, 3, 'environment')
+ >>> get_group_tag_value_iter(group, 2, 3, 'environment')
"""
raise NotImplementedError
- def get_group_tag_value_paginator(
- self, project_id, group_id, environment_ids, key, order_by="-id"
- ):
+ def get_group_tag_value_paginator(self, group, environment_ids, key, order_by="-id"):
"""
- >>> get_group_tag_value_paginator(1, 2, 3, 'environment')
+ >>> get_group_tag_value_paginator(group, 3, 'environment')
"""
raise NotImplementedError
diff --git a/src/sentry/tagstore/snuba/backend.py b/src/sentry/tagstore/snuba/backend.py
index 7cbf9daa35e05d..2fb7da573e0eeb 100644
--- a/src/sentry/tagstore/snuba/backend.py
+++ b/src/sentry/tagstore/snuba/backend.py
@@ -1194,19 +1194,21 @@ def score_field_to_int(tv: TagValue) -> int:
)
def get_group_tag_value_iter(
- self, project_id, group_id, environment_ids, key, callbacks=(), limit=1000, offset=0
+ self, group, environment_ids, key, callbacks=(), limit=1000, offset=0
):
filters = {
- "project_id": get_project_list(project_id),
+ "project_id": get_project_list(group.project_id),
"tags_key": [key],
- "group_id": [group_id],
}
+ dataset, conditions, filters = self.apply_group_filters_conditions(group, [], filters)
+
if environment_ids:
filters["environment"] = environment_ids
results = snuba.query(
- dataset=Dataset.Events,
+ dataset=dataset,
groupby=["tags_value"],
filter_keys=filters,
+ conditions=conditions,
aggregations=[
["count()", "", "times_seen"],
["min", "timestamp", "first_seen"],
@@ -1219,7 +1221,7 @@ def get_group_tag_value_iter(
)
group_tag_values = [
- GroupTagValue(group_id=group_id, key=key, value=value, **fix_tag_value_data(data))
+ GroupTagValue(group_id=group.id, key=key, value=value, **fix_tag_value_data(data))
for value, data in results.items()
]
@@ -1228,9 +1230,7 @@ def get_group_tag_value_iter(
return group_tag_values
- def get_group_tag_value_paginator(
- self, project_id, group_id, environment_ids, key, order_by="-id"
- ):
+ def get_group_tag_value_paginator(self, group, environment_ids, key, order_by="-id"):
from sentry.api.paginator import SequencePaginator
if order_by in ("-last_seen", "-first_seen", "-times_seen"):
@@ -1241,7 +1241,7 @@ def get_group_tag_value_paginator(
else:
raise ValueError("Unsupported order_by: %s" % order_by)
- group_tag_values = self.get_group_tag_value_iter(project_id, group_id, environment_ids, key)
+ group_tag_values = self.get_group_tag_value_iter(group, environment_ids, key)
desc = order_by.startswith("-")
score_field = order_by.lstrip("-")
diff --git a/tests/sentry/api/endpoints/test_group_tagkey_details.py b/tests/sentry/api/endpoints/test_group_tagkey_details.py
index d79e074dbb8b66..924e531d7e391e 100644
--- a/tests/sentry/api/endpoints/test_group_tagkey_details.py
+++ b/tests/sentry/api/endpoints/test_group_tagkey_details.py
@@ -71,16 +71,6 @@ def hack_pull_out_data(jobs, projects):
project_id=self.project.id,
)
- for i in range(3):
- self.store_event(
- data={
- "tags": {"foo": "bar"},
- "fingerprint": ["group1"],
- "timestamp": iso_format(before_now(seconds=1)),
- },
- project_id=self.project.id,
- )
-
self.login_as(user=self.user)
url = f"/api/0/issues/{perf_group.id}/tags/foo/"
diff --git a/tests/sentry/api/endpoints/test_group_tagkey_values.py b/tests/sentry/api/endpoints/test_group_tagkey_values.py
index 49a579ccb2facb..b264a2021019a7 100644
--- a/tests/sentry/api/endpoints/test_group_tagkey_values.py
+++ b/tests/sentry/api/endpoints/test_group_tagkey_values.py
@@ -1,6 +1,10 @@
+from unittest import mock
+
+from sentry.event_manager import _pull_out_data
from sentry.testutils import APITestCase, SnubaTestCase
from sentry.testutils.helpers.datetime import before_now, iso_format
from sentry.testutils.silo import region_silo_test
+from sentry.types.issues import GroupType
@region_silo_test
@@ -27,6 +31,47 @@ def test_simple(self):
assert response.data[0]["value"] == "bar"
+ def test_simple_perf(self):
+ key, value = "foo", "bar"
+
+ transaction_event_data = {
+ "message": "hello",
+ "type": "transaction",
+ "culprit": "app/components/events/eventEntries in map",
+ "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
+ }
+
+ def hack_pull_out_data(jobs, projects):
+ _pull_out_data(jobs, projects)
+ for job in jobs:
+ job["event"].groups = [perf_group]
+ return jobs, projects
+
+ perf_group = self.create_group(type=GroupType.PERFORMANCE_SLOW_SPAN.value)
+
+ with mock.patch("sentry.event_manager._pull_out_data", hack_pull_out_data):
+ self.store_event(
+ data={
+ **transaction_event_data,
+ "event_id": "a" * 32,
+ "timestamp": iso_format(before_now(minutes=1)),
+ "start_timestamp": iso_format(before_now(minutes=1)),
+ "tags": {key: value},
+ },
+ project_id=self.project.id,
+ )
+
+ self.login_as(user=self.user)
+
+ url = f"/api/0/issues/{perf_group.id}/tags/{key}/values/"
+
+ response = self.client.get(url)
+
+ assert response.status_code == 200
+ assert len(response.data) == 1
+
+ assert response.data[0]["value"] == value
+
def test_user_tag(self):
project = self.create_project()
event = self.store_event(
diff --git a/tests/snuba/tagstore/test_tagstore_backend.py b/tests/snuba/tagstore/test_tagstore_backend.py
index bc377a3da25564..7a8092266436ed 100644
--- a/tests/snuba/tagstore/test_tagstore_backend.py
+++ b/tests/snuba/tagstore/test_tagstore_backend.py
@@ -30,7 +30,7 @@
from sentry.tagstore.snuba.backend import SnubaTagStorage
from sentry.tagstore.types import GroupTagValue, TagValue
from sentry.testutils import SnubaTestCase, TestCase
-from sentry.testutils.helpers.datetime import before_now, iso_format
+from sentry.testutils.helpers.datetime import iso_format
from sentry.types.issues import GroupType
exception = {
@@ -162,8 +162,8 @@ def hack_pull_out_data(jobs, projects):
data={
**transaction_event_data,
"event_id": "a" * 32,
- "timestamp": iso_format(before_now(minutes=1)),
- "start_timestamp": iso_format(before_now(minutes=1)),
+ "timestamp": iso_format(self.now - timedelta(seconds=1)),
+ "start_timestamp": iso_format(self.now - timedelta(seconds=1)),
"tags": {"foo": "bar", "biz": "baz"},
"release": "releaseme",
},
@@ -173,8 +173,8 @@ def hack_pull_out_data(jobs, projects):
data={
**transaction_event_data,
"event_id": "b" * 32,
- "timestamp": iso_format(before_now(minutes=2)),
- "start_timestamp": iso_format(before_now(minutes=2)),
+ "timestamp": iso_format(self.now - timedelta(seconds=2)),
+ "start_timestamp": iso_format(self.now - timedelta(seconds=2)),
"tags": {"foo": "quux"},
"release": "releaseme",
},
@@ -765,9 +765,7 @@ def test_get_group_tag_value_iter(self):
from sentry.tagstore.types import GroupTagValue
assert list(
- self.ts.get_group_tag_value_iter(
- self.proj1.id, self.proj1group1.id, [self.proj1env1.id], "sentry:user"
- )
+ self.ts.get_group_tag_value_iter(self.proj1group1, [self.proj1env1.id], "sentry:user")
) == [
GroupTagValue(
group_id=self.proj1group1.id,
@@ -787,12 +785,36 @@ def test_get_group_tag_value_iter(self):
),
]
+ def test_get_group_tag_value_iter_perf(self):
+ from sentry.tagstore.types import GroupTagValue
+
+ group, env = self.perf_group_and_env
+
+ assert list(self.ts.get_group_tag_value_iter(group, [env.id], "foo")) == [
+ GroupTagValue(
+ group_id=group.id,
+ key="foo",
+ value="bar",
+ times_seen=1,
+ first_seen=self.now - timedelta(seconds=1),
+ last_seen=self.now - timedelta(seconds=1),
+ ),
+ GroupTagValue(
+ group_id=group.id,
+ key="foo",
+ value="quux",
+ times_seen=1,
+ first_seen=self.now - timedelta(seconds=2),
+ last_seen=self.now - timedelta(seconds=2),
+ ),
+ ]
+
def test_get_group_tag_value_paginator(self):
from sentry.tagstore.types import GroupTagValue
assert list(
self.ts.get_group_tag_value_paginator(
- self.proj1.id, self.proj1group1.id, [self.proj1env1.id], "sentry:user"
+ self.proj1group1, [self.proj1env1.id], "sentry:user"
).get_result(10)
) == [
GroupTagValue(
@@ -813,6 +835,32 @@ def test_get_group_tag_value_paginator(self):
),
]
+ def test_get_group_tag_value_paginator_perf(self):
+ from sentry.tagstore.types import GroupTagValue
+
+ group, env = self.perf_group_and_env
+
+ assert list(
+ self.ts.get_group_tag_value_paginator(group, [env.id], "foo").get_result(10)
+ ) == [
+ GroupTagValue(
+ group_id=group.id,
+ key="foo",
+ value="bar",
+ times_seen=1,
+ first_seen=self.now - timedelta(seconds=1),
+ last_seen=self.now - timedelta(seconds=1),
+ ),
+ GroupTagValue(
+ group_id=group.id,
+ key="foo",
+ value="quux",
+ times_seen=1,
+ first_seen=self.now - timedelta(seconds=2),
+ last_seen=self.now - timedelta(seconds=2),
+ ),
+ ]
+
def test_get_group_tag_value_paginator_times_seen(self):
from sentry.tagstore.types import GroupTagValue
@@ -838,8 +886,7 @@ def test_get_group_tag_value_paginator_times_seen(self):
assert list(
self.ts.get_group_tag_value_paginator(
- self.proj1.id,
- self.proj1group1.id,
+ self.proj1group1,
[self.proj1env1.id],
"sentry:user",
order_by="-times_seen",
@@ -863,6 +910,59 @@ def test_get_group_tag_value_paginator_times_seen(self):
),
]
+ def test_get_group_tag_value_paginator_times_seen_perf(self):
+ from sentry.tagstore.types import GroupTagValue
+
+ group, env = self.perf_group_and_env
+
+ def hack_pull_out_data(jobs, projects):
+ _pull_out_data(jobs, projects)
+ for job in jobs:
+ job["event"].groups = [group]
+ return jobs, projects
+
+ with mock.patch("sentry.event_manager._pull_out_data", hack_pull_out_data):
+ self.store_event(
+ data={
+ "message": "hello",
+ "type": "transaction",
+ "culprit": "app/components/events/eventEntries in map",
+ "contexts": {"trace": {"trace_id": "b" * 32, "span_id": "c" * 16, "op": ""}},
+ "environment": env.name,
+ "event_id": "a" * 32,
+ "timestamp": iso_format(self.now - timedelta(seconds=1)),
+ "start_timestamp": iso_format(self.now - timedelta(seconds=1)),
+ "tags": {"foo": "bar"},
+ },
+ project_id=self.project.id,
+ )
+
+ assert list(
+ self.ts.get_group_tag_value_paginator(
+ group,
+ [env.id],
+ "foo",
+ order_by="-times_seen",
+ ).get_result(10)
+ ) == [
+ GroupTagValue(
+ group_id=group.id,
+ key="foo",
+ value="bar",
+ times_seen=2,
+ first_seen=self.now - timedelta(seconds=1),
+ last_seen=self.now - timedelta(seconds=1),
+ ),
+ GroupTagValue(
+ group_id=group.id,
+ key="foo",
+ value="quux",
+ times_seen=1,
+ first_seen=self.now - timedelta(seconds=2),
+ last_seen=self.now - timedelta(seconds=2),
+ ),
+ ]
+
def test_get_group_seen_values_for_environments(self):
assert self.ts.get_group_seen_values_for_environments(
[self.proj1.id], [self.proj1group1.id], [self.proj1env1.id]
|
9aef9da7819be74c4ea7a741b4d76d31f44cd431
|
2024-01-26 03:15:15
|
Dan Fuller
|
fix(group-attributes): Stop manually sending `post_save` signals for `Group` after `Queryset.update` (#63556)
| false
|
Stop manually sending `post_save` signals for `Group` after `Queryset.update` (#63556)
|
fix
|
diff --git a/src/sentry/api/helpers/group_index/update.py b/src/sentry/api/helpers/group_index/update.py
index 6c0014f6c575d1..66c614a431706a 100644
--- a/src/sentry/api/helpers/group_index/update.py
+++ b/src/sentry/api/helpers/group_index/update.py
@@ -15,7 +15,7 @@
from rest_framework.request import Request
from rest_framework.response import Response
-from sentry import analytics, features
+from sentry import analytics, features, options
from sentry.api.serializers import serialize
from sentry.api.serializers.models.actor import ActorSerializer
from sentry.db.models.query import create_or_update
@@ -492,7 +492,7 @@ def update_groups(
group.status = GroupStatus.RESOLVED
group.substatus = None
group.resolved_at = now
- if affected:
+ if affected and not options.get("groups.enable-post-update-signal"):
post_save.send(
sender=Group,
instance=group,
diff --git a/src/sentry/event_manager.py b/src/sentry/event_manager.py
index 6720e6d9cad8fa..bb4aa7848d8f8e 100644
--- a/src/sentry/event_manager.py
+++ b/src/sentry/event_manager.py
@@ -1673,12 +1673,13 @@ def _handle_regression(group: Group, event: Event, release: Optional[Release]) -
transition_type="automatic",
sender="handle_regression",
)
- post_save.send(
- sender=Group,
- instance=group,
- created=False,
- update_fields=["last_seen", "active_at", "status", "substatus"],
- )
+ if not options.get("groups.enable-post-update-signal"):
+ post_save.send(
+ sender=Group,
+ instance=group,
+ created=False,
+ update_fields=["last_seen", "active_at", "status", "substatus"],
+ )
follows_semver = False
resolved_in_activity = None
diff --git a/src/sentry/issues/escalating.py b/src/sentry/issues/escalating.py
index 19810cfc76deb2..3daf5716bea75b 100644
--- a/src/sentry/issues/escalating.py
+++ b/src/sentry/issues/escalating.py
@@ -26,7 +26,7 @@
)
from snuba_sdk.expressions import Granularity
-from sentry import features
+from sentry import features, options
from sentry.eventstore.models import GroupEvent
from sentry.issues.escalating_group_forecast import EscalatingGroupForecast
from sentry.issues.escalating_issues_alg import GroupCount
@@ -500,12 +500,13 @@ def manage_issue_states(
if updated:
group.status = GroupStatus.UNRESOLVED
group.substatus = GroupSubStatus.ESCALATING
- post_save.send(
- sender=Group,
- instance=group,
- created=False,
- update_fields=["status", "substatus"],
- )
+ if not options.get("groups.enable-post-update-signal"):
+ post_save.send(
+ sender=Group,
+ instance=group,
+ created=False,
+ update_fields=["status", "substatus"],
+ )
add_group_to_inbox(group, GroupInboxReason.ESCALATING, snooze_details)
record_group_history(group, GroupHistoryStatus.ESCALATING)
@@ -543,12 +544,13 @@ def manage_issue_states(
if updated:
group.status = GroupStatus.UNRESOLVED
group.substatus = GroupSubStatus.ONGOING
- post_save.send(
- sender=Group,
- instance=group,
- created=False,
- update_fields=["status", "substatus"],
- )
+ if not options.get("groups.enable-post-update-signal"):
+ post_save.send(
+ sender=Group,
+ instance=group,
+ created=False,
+ update_fields=["status", "substatus"],
+ )
add_group_to_inbox(group, GroupInboxReason.ONGOING, snooze_details)
record_group_history(group, GroupHistoryStatus.ONGOING)
@@ -563,12 +565,13 @@ def manage_issue_states(
if updated:
group.status = GroupStatus.UNRESOLVED
group.substatus = GroupSubStatus.ONGOING
- post_save.send(
- sender=Group,
- instance=group,
- created=False,
- update_fields=["status", "substatus"],
- )
+ if not options.get("groups.enable-post-update-signal"):
+ post_save.send(
+ sender=Group,
+ instance=group,
+ created=False,
+ update_fields=["status", "substatus"],
+ )
add_group_to_inbox(group, GroupInboxReason.UNIGNORED, snooze_details)
record_group_history(group, GroupHistoryStatus.UNIGNORED)
Activity.objects.create_group_activity(
diff --git a/src/sentry/issues/ongoing.py b/src/sentry/issues/ongoing.py
index 5a075979cb95ad..8dcca91527b0ff 100644
--- a/src/sentry/issues/ongoing.py
+++ b/src/sentry/issues/ongoing.py
@@ -3,6 +3,7 @@
import sentry_sdk
from django.db.models.signals import post_save
+from sentry import options
from sentry.models.group import Group, GroupStatus
from sentry.models.groupinbox import bulk_remove_groups_from_inbox
from sentry.types.activity import ActivityType
@@ -41,10 +42,11 @@ def bulk_transition_group_to_ongoing(
bulk_remove_groups_from_inbox(groups_to_transistion)
with sentry_sdk.start_span(description="post_save_send_robust"):
- for group in groups_to_transistion:
- post_save.send_robust(
- sender=Group,
- instance=group,
- created=False,
- update_fields=["status", "substatus"],
- )
+ if not options.get("groups.enable-post-update-signal"):
+ for group in groups_to_transistion:
+ post_save.send_robust(
+ sender=Group,
+ instance=group,
+ created=False,
+ update_fields=["status", "substatus"],
+ )
diff --git a/src/sentry/issues/status_change.py b/src/sentry/issues/status_change.py
index 848a9f06c09a8c..d3c50668d6a920 100644
--- a/src/sentry/issues/status_change.py
+++ b/src/sentry/issues/status_change.py
@@ -5,6 +5,7 @@
from django.db.models.signals import post_save
+from sentry import options
from sentry.models.activity import Activity
from sentry.models.group import Group, GroupStatus
from sentry.models.grouphistory import record_group_history_from_activity_type
@@ -119,11 +120,12 @@ def handle_status_update(
kwargs={"project_id": group.project_id, "group_id": group.id}
)
- post_save.send(
- sender=Group,
- instance=group,
- created=False,
- update_fields=["status", "substatus"],
- )
+ if not options.get("groups.enable-post-update-signal"):
+ post_save.send(
+ sender=Group,
+ instance=group,
+ created=False,
+ update_fields=["status", "substatus"],
+ )
return ActivityInfo(activity_type, activity_data)
diff --git a/src/sentry/models/activity.py b/src/sentry/models/activity.py
index af44974c31d776..aa9e0545adc7a4 100644
--- a/src/sentry/models/activity.py
+++ b/src/sentry/models/activity.py
@@ -9,7 +9,7 @@
from django.db.models.signals import post_save
from django.utils import timezone
-from sentry import features
+from sentry import features, options
from sentry.backup.scopes import RelocationScope
from sentry.db.models import (
BaseManager,
@@ -142,9 +142,10 @@ def save(self, *args, **kwargs):
from sentry.models.group import Group
self.group.update(num_comments=F("num_comments") + 1)
- post_save.send_robust(
- sender=Group, instance=self.group, created=True, update_fields=["num_comments"]
- )
+ if not options.get("groups.enable-post-update-signal"):
+ post_save.send_robust(
+ sender=Group, instance=self.group, created=True, update_fields=["num_comments"]
+ )
def delete(self, *args, **kwargs):
super().delete(*args, **kwargs)
@@ -154,9 +155,10 @@ def delete(self, *args, **kwargs):
from sentry.models.group import Group
self.group.update(num_comments=F("num_comments") - 1)
- post_save.send_robust(
- sender=Group, instance=self.group, created=True, update_fields=["num_comments"]
- )
+ if not options.get("groups.enable-post-update-signal"):
+ post_save.send_robust(
+ sender=Group, instance=self.group, created=True, update_fields=["num_comments"]
+ )
def send_notification(self):
activity.send_activity_notifications.delay(self.id)
|
51063cb2d20223a99b9a9ee890785d0cf5c7c020
|
2023-01-19 03:11:57
|
Kev
|
feat(perf-issues): Change threshold unc. compressed (#43389)
| false
|
Change threshold unc. compressed (#43389)
|
feat
|
diff --git a/static/app/components/events/interfaces/performance/utils.tsx b/static/app/components/events/interfaces/performance/utils.tsx
index 8cc06b8ce540de..8674dbb77f79d9 100644
--- a/static/app/components/events/interfaces/performance/utils.tsx
+++ b/static/app/components/events/interfaces/performance/utils.tsx
@@ -33,7 +33,7 @@ const RESOURCES_DESCRIPTIONS: Record<IssueType, string> = {
'Slow DB Queries are SELECT query spans that take longer than 1s. A quick method to understand why this may be the case is running an EXPLAIN command on the query itself. To learn more about how to fix slow DB queries, check out these resources:'
),
[IssueType.PERFORMANCE_UNCOMPRESSED_ASSET]: t(
- 'Uncompressed assets are asset spans that take over 50ms and are larger than 512kB which can usually be made faster with compression. Check that your server or CDN serving your assets is accepting the content encoding header from the browser and is returning them compressed.'
+ 'Uncompressed assets are asset spans that take over 200ms and are larger than 512kB which can usually be made faster with compression. Check that your server or CDN serving your assets is accepting the content encoding header from the browser and is returning them compressed.'
),
[IssueType.ERROR]: '',
};
|
463762b48edd46d420a56f5cb0b8b7daba8035a6
|
2023-07-21 18:52:57
|
Evan Purkhiser
|
ref(ts): Convert sentry-test/index to tsx (#53314)
| false
|
Convert sentry-test/index to tsx (#53314)
|
ref
|
diff --git a/tests/js/sentry-test/index.jsx b/tests/js/sentry-test/index.tsx
similarity index 100%
rename from tests/js/sentry-test/index.jsx
rename to tests/js/sentry-test/index.tsx
|
8441c3eb84c3501339eb757cdcd1588cfcf677a7
|
2018-12-05 22:17:08
|
Brett Hoerner
|
fix(snuba): Run post_process RuleProcessor code with Snuba's consiste… (#10909)
| false
|
Run post_process RuleProcessor code with Snuba's consiste… (#10909)
|
fix
|
diff --git a/src/sentry/tasks/post_process.py b/src/sentry/tasks/post_process.py
index 3fdf2fbd346b89..8083f080e87963 100644
--- a/src/sentry/tasks/post_process.py
+++ b/src/sentry/tasks/post_process.py
@@ -14,6 +14,7 @@
from django.conf import settings
from sentry import features
+from sentry.utils import snuba
from sentry.utils.cache import cache
from sentry.plugins import plugins
from sentry.signals import event_processed
@@ -108,13 +109,14 @@ def post_process_group(event, is_new, is_regression, is_sample, is_new_group_env
# we process snoozes before rules as it might create a regression
has_reappeared = process_snoozes(event.group)
- rp = RuleProcessor(event, is_new, is_regression, is_new_group_environment, has_reappeared)
- has_alert = False
- # TODO(dcramer): ideally this would fanout, but serializing giant
- # objects back and forth isn't super efficient
- for callback, futures in rp.apply():
- has_alert = True
- safe_execute(callback, event, futures)
+ with snuba.options_override({'consistent': True}):
+ rp = RuleProcessor(event, is_new, is_regression, is_new_group_environment, has_reappeared)
+ has_alert = False
+ # TODO(dcramer): ideally this would fanout, but serializing giant
+ # objects back and forth isn't super efficient
+ for callback, futures in rp.apply():
+ has_alert = True
+ safe_execute(callback, event, futures)
if features.has(
'projects:servicehooks',
diff --git a/src/sentry/utils/snuba.py b/src/sentry/utils/snuba.py
index ba92e23f6de2c0..8ffdd7dd56891c 100644
--- a/src/sentry/utils/snuba.py
+++ b/src/sentry/utils/snuba.py
@@ -24,6 +24,11 @@
MAX_ISSUES = 500
MAX_HASHES = 5000
+# Global Snuba request option override dictionary. Only intended
+# to be used with the `options_override` contextmanager below.
+# NOT THREAD SAFE!
+OVERRIDE_OPTIONS = {}
+
SENTRY_SNUBA_MAP = {
# general
'id': 'event_id',
@@ -157,6 +162,34 @@ def timer(name, prefix='snuba.client'):
metrics.timing(u'{}.{}'.format(prefix, name), time.time() - t)
+@contextmanager
+def options_override(overrides):
+ """\
+ NOT THREAD SAFE!
+
+ Adds to OVERRIDE_OPTIONS, restoring previous values and removing
+ keys that didn't previously exist on exit, so that calls to this
+ can be nested.
+ """
+ previous = {}
+ delete = []
+
+ for k, v in overrides.items():
+ try:
+ previous[k] = OVERRIDE_OPTIONS[k]
+ except KeyError:
+ delete.append(k)
+ OVERRIDE_OPTIONS[k] = v
+
+ try:
+ yield
+ finally:
+ for k, v in previous.items():
+ OVERRIDE_OPTIONS[k] = v
+ for k in delete:
+ OVERRIDE_OPTIONS.pop(k)
+
+
def connection_from_url(url, **kw):
if url[:1] == '/':
from sentry.net.http import UnixHTTPConnectionPool
@@ -363,6 +396,8 @@ def raw_query(start, end, groupby=None, conditions=None, filter_keys=None,
'turbo': turbo
}) if v is not None}
+ request.update(OVERRIDE_OPTIONS)
+
headers = {}
if referrer:
headers['referer'] = referrer
diff --git a/tests/snuba/test_util.py b/tests/snuba/test_util.py
index 0fd8edce9df701..71ef73a380194d 100644
--- a/tests/snuba/test_util.py
+++ b/tests/snuba/test_util.py
@@ -62,3 +62,12 @@ def test_shrink_timeframe(self):
['count()', '', 'count'],
],
)
+
+ def test_override_options(self):
+ assert snuba.OVERRIDE_OPTIONS == {}
+ with snuba.options_override({'foo': 1}):
+ assert snuba.OVERRIDE_OPTIONS == {'foo': 1}
+ with snuba.options_override({'foo': 2}):
+ assert snuba.OVERRIDE_OPTIONS == {'foo': 2}
+ assert snuba.OVERRIDE_OPTIONS == {'foo': 1}
+ assert snuba.OVERRIDE_OPTIONS == {}
|
bdfccf0c09cb911bdfd16eeea1d766afb28435a2
|
2020-09-05 05:17:09
|
Taylan Gocmen
|
fix(workflow): Use empty as default when query string is undefined (#20516)
| false
|
Use empty as default when query string is undefined (#20516)
|
fix
|
diff --git a/src/sentry/static/sentry/app/utils/stream.tsx b/src/sentry/static/sentry/app/utils/stream.tsx
index 03962d4aa9f7c4..cd01d75c415e15 100644
--- a/src/sentry/static/sentry/app/utils/stream.tsx
+++ b/src/sentry/static/sentry/app/utils/stream.tsx
@@ -17,7 +17,7 @@ type QueryObj = {
[key: string]: string;
};
-export function queryToObj(queryStr: string): QueryObj {
+export function queryToObj(queryStr = ''): QueryObj {
const text: string[] = [];
const queryItems = queryStr.match(/\S+:"[^"]*"?|\S+/g);
diff --git a/tests/js/spec/utils/stream.spec.jsx b/tests/js/spec/utils/stream.spec.jsx
index 46a690e05c4b20..8dcbfe42045dd8 100644
--- a/tests/js/spec/utils/stream.spec.jsx
+++ b/tests/js/spec/utils/stream.spec.jsx
@@ -32,6 +32,12 @@ describe('utils/stream', function() {
is: 'unresolved',
});
});
+
+ it('should use empty string as __text and not fail if query is undefined', function() {
+ expect(queryToObj()).toEqual({
+ __text: '',
+ });
+ });
});
describe('objToQuery()', function() {
|
5972c50a311ad5248923331ab4fee3de1e36bc08
|
2020-03-04 13:20:28
|
Priscila Oliveira
|
ref(ui): converted sidebarMenuItem to ts (#17431)
| false
|
converted sidebarMenuItem to ts (#17431)
|
ref
|
diff --git a/src/sentry/static/sentry/app/components/sidebar/help.jsx b/src/sentry/static/sentry/app/components/sidebar/help.jsx
index 393fab290f6460..c9dedacd1adff2 100644
--- a/src/sentry/static/sentry/app/components/sidebar/help.jsx
+++ b/src/sentry/static/sentry/app/components/sidebar/help.jsx
@@ -51,10 +51,10 @@ class SidebarHelp extends React.Component {
<SidebarMenuItem onClick={this.handleSearchClick}>
{t('Search Docs and FAQs')}
</SidebarMenuItem>
- <SidebarMenuItem href="https://forum.sentry.io/" target="_blank">
+ <SidebarMenuItem href="https://forum.sentry.io/" openInNewTab>
{t('Community Discussions')}
</SidebarMenuItem>
- <SidebarMenuItem href="https://status.sentry.io/" target="_blank">
+ <SidebarMenuItem href="https://status.sentry.io/" openInNewTab>
{t('Service Status')}
</SidebarMenuItem>
</HelpMenu>
diff --git a/src/sentry/static/sentry/app/components/sidebar/sidebarDropdown/index.jsx b/src/sentry/static/sentry/app/components/sidebar/sidebarDropdown/index.jsx
index 882394bb03e4b0..e750dec34c298c 100644
--- a/src/sentry/static/sentry/app/components/sidebar/sidebarDropdown/index.jsx
+++ b/src/sentry/static/sentry/app/components/sidebar/sidebarDropdown/index.jsx
@@ -13,7 +13,7 @@ import InlineSvg from 'app/components/inlineSvg';
import Link from 'app/components/links/link';
import SentryTypes from 'app/sentryTypes';
import SidebarDropdownMenu from 'app/components/sidebar/sidebarDropdownMenu.styled';
-import SidebarMenuItem, {getMenuItemStyles} from 'app/components/sidebar/sidebarMenuItem';
+import SidebarMenuItem, {menuItemStyles} from 'app/components/sidebar/sidebarMenuItem';
import SidebarOrgSummary from 'app/components/sidebar/sidebarOrgSummary';
import TextOverflow from 'app/components/textOverflow';
import withApi from 'app/utils/withApi';
@@ -176,7 +176,7 @@ const SentryLink = styled(Link)`
`;
const UserSummary = styled(Link)`
- ${getMenuItemStyles}
+ ${menuItemStyles}
padding: 10px 15px;
`;
diff --git a/src/sentry/static/sentry/app/components/sidebar/sidebarMenuItem.jsx b/src/sentry/static/sentry/app/components/sidebar/sidebarMenuItem.jsx
deleted file mode 100644
index 373aad4a0cc7d2..00000000000000
--- a/src/sentry/static/sentry/app/components/sidebar/sidebarMenuItem.jsx
+++ /dev/null
@@ -1,92 +0,0 @@
-import PropTypes from 'prop-types';
-import React from 'react';
-import styled from '@emotion/styled';
-import {css} from '@emotion/core';
-
-import Link from 'app/components/links/link';
-import ExternalLink from 'app/components/links/externalLink';
-
-import {OrgSummary} from './sidebarOrgSummary';
-
-class SidebarMenuItem extends React.Component {
- static propTypes = {
- /**
- * Use this prop if button is a react-router link
- */
- to: PropTypes.string,
- /**
- * Use this prop if button should use a normal (non-react-router) link
- */
- href: PropTypes.string,
- /**
- * Is an external link? (Will open in new tab; Only applicable if `href` is used)
- */
- external: PropTypes.bool,
- };
- render() {
- const {children, to, href, ...props} = this.props;
- const hasMenu = !to && !href;
-
- return (
- <MenuItemLink to={to} href={href} {...props}>
- <MenuItemLabel hasMenu={hasMenu}>{children}</MenuItemLabel>
- </MenuItemLink>
- );
- }
-}
-export default SidebarMenuItem;
-
-const MenuItemLabel = styled('span')`
- flex: 1;
- ${p =>
- p.hasMenu
- ? css`
- margin: 0 -15px;
- padding: 0 15px;
- `
- : css`
- overflow: hidden;
- `};
-`;
-
-const getMenuItemStyles = p => css`
- color: ${p.theme.gray5};
- cursor: pointer;
- display: flex;
- font-size: 14px;
- line-height: 32px;
- padding: 0 ${p.theme.sidebar.menuSpacing};
- position: relative;
- transition: 0.1s all linear;
- ${(!!p.to || !!p.href) && 'overflow: hidden'};
-
- &:hover,
- &:active,
- &.focus-visible {
- background: ${p.theme.offWhite};
- color: ${p.theme.gray5};
- outline: none;
- }
-
- ${OrgSummary} {
- padding-left: 0;
- padding-right: 0;
- }
-`;
-
-export {getMenuItemStyles};
-
-const MenuItemLink = styled(({to, href, external, ...props}) => {
- if (to) {
- return <Link to={to} href={href} {...props} />;
- }
-
- if (href) {
- const Component = external ? ExternalLink : Link;
- return <Component href={href} {...props} />;
- }
-
- return <div tabIndex="0" {...props} />;
-})`
- ${getMenuItemStyles}
-`;
diff --git a/src/sentry/static/sentry/app/components/sidebar/sidebarMenuItem.tsx b/src/sentry/static/sentry/app/components/sidebar/sidebarMenuItem.tsx
new file mode 100644
index 00000000000000..dcd9a427c67a8f
--- /dev/null
+++ b/src/sentry/static/sentry/app/components/sidebar/sidebarMenuItem.tsx
@@ -0,0 +1,66 @@
+import React from 'react';
+import styled from '@emotion/styled';
+import {css} from '@emotion/core';
+
+import {Theme} from 'app/utils/theme';
+
+import SidebarMenuItemLink, {SidebarMenuItemLinkProps} from './sidebarMenuItemLink';
+import {OrgSummary} from './sidebarOrgSummary';
+
+type Props = {
+ children: React.ReactNode;
+} & SidebarMenuItemLinkProps;
+
+const SidebarMenuItem = ({to, href, children, ...props}: Props) => {
+ const hasMenu = !to && !href;
+ return (
+ <StyledSidebarMenuItemLink to={to} href={href} {...props}>
+ <MenuItemLabel hasMenu={hasMenu}>{children}</MenuItemLabel>
+ </StyledSidebarMenuItemLink>
+ );
+};
+
+const menuItemStyles = (p: SidebarMenuItemLinkProps & {theme: Theme}) => css`
+ color: ${p.theme.gray5};
+ cursor: pointer;
+ display: flex;
+ font-size: ${p.theme.fontSizeMedium};
+ line-height: 32px;
+ padding: 0 ${p.theme.sidebar.menuSpacing};
+ position: relative;
+ transition: 0.1s all linear;
+ ${(!!p.to || !!p.href) && 'overflow: hidden'};
+
+ &:hover,
+ &:active,
+ &.focus-visible {
+ background: ${p.theme.offWhite};
+ color: ${p.theme.gray5};
+ outline: none;
+ }
+
+ ${OrgSummary} {
+ padding-left: 0;
+ padding-right: 0;
+ }
+`;
+
+const MenuItemLabel = styled('span')<{hasMenu?: boolean}>`
+ flex: 1;
+ ${p =>
+ p.hasMenu
+ ? css`
+ margin: 0 -15px;
+ padding: 0 15px;
+ `
+ : css`
+ overflow: hidden;
+ `};
+`;
+
+const StyledSidebarMenuItemLink = styled(SidebarMenuItemLink)`
+ ${menuItemStyles}
+`;
+
+export {menuItemStyles};
+export default SidebarMenuItem;
diff --git a/src/sentry/static/sentry/app/components/sidebar/sidebarMenuItemLink.tsx b/src/sentry/static/sentry/app/components/sidebar/sidebarMenuItemLink.tsx
new file mode 100644
index 00000000000000..96d105297a8f73
--- /dev/null
+++ b/src/sentry/static/sentry/app/components/sidebar/sidebarMenuItemLink.tsx
@@ -0,0 +1,58 @@
+import React from 'react';
+
+import Link from 'app/components/links/link';
+import ExternalLink from 'app/components/links/externalLink';
+
+export type SidebarMenuItemLinkProps = {
+ /**
+ * Use this prop if button is a react-router link
+ */
+ to?: string;
+ /**
+ * Use this prop if button should use a normal (non-react-router) link
+ */
+ href?: string;
+ /**
+ * Is an external link? (Will open in new tab; Only applicable if `href` is used)
+ */
+ external?: boolean;
+ /**
+ * specifies whether to open the linked document in a new tab
+ */
+ openInNewTab?: boolean;
+ /**
+ * It is raised when the user clicks on the element - optional
+ */
+ onClick?: () => void;
+ /**
+ * Inline styles
+ */
+ style?: React.CSSProperties;
+};
+
+const SidebarMenuItemLink = ({
+ to,
+ href,
+ external,
+ openInNewTab,
+ ...props
+}: SidebarMenuItemLinkProps) => {
+ const target = openInNewTab ? '_blank' : '_self';
+
+ if (to) {
+ return <Link {...props} to={to} href={href} target={target} />;
+ }
+
+ if (href) {
+ return external ? (
+ // target is not passed here, as ExternalLink by default opens the link in a new tab
+ <ExternalLink {...props} href={href} />
+ ) : (
+ <Link href={href} target={target} {...props} />
+ );
+ }
+
+ return <div tabIndex={0} {...props} />;
+};
+
+export default SidebarMenuItemLink;
diff --git a/src/sentry/static/sentry/app/utils/theme.tsx b/src/sentry/static/sentry/app/utils/theme.tsx
index c0f7a638d9acf0..52abba61a6942d 100644
--- a/src/sentry/static/sentry/app/utils/theme.tsx
+++ b/src/sentry/static/sentry/app/utils/theme.tsx
@@ -321,4 +321,6 @@ const theme = {
space: [0, 8, 16, 20, 30],
};
+export type Theme = typeof theme;
+
export default theme;
diff --git a/tests/js/spec/components/sidebar/__snapshots__/index.spec.jsx.snap b/tests/js/spec/components/sidebar/__snapshots__/index.spec.jsx.snap
index 4bd139e31318cc..bef74565f3baad 100644
--- a/tests/js/spec/components/sidebar/__snapshots__/index.spec.jsx.snap
+++ b/tests/js/spec/components/sidebar/__snapshots__/index.spec.jsx.snap
@@ -28,7 +28,7 @@ exports[`Sidebar SidebarDropdown can open "Switch Organization" sub-menu 1`] = `
}
to="/organizations/new/"
>
- <MenuItemLink
+ <StyledSidebarMenuItemLink
data-test-id="sidebar-create-org"
style={
Object {
@@ -37,8 +37,8 @@ exports[`Sidebar SidebarDropdown can open "Switch Organization" sub-menu 1`] = `
}
to="/organizations/new/"
>
- <Component
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ <SidebarMenuItemLink
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
data-test-id="sidebar-create-org"
style={
Object {
@@ -48,17 +48,18 @@ exports[`Sidebar SidebarDropdown can open "Switch Organization" sub-menu 1`] = `
to="/organizations/new/"
>
<Link
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
data-test-id="sidebar-create-org"
style={
Object {
"alignItems": "center",
}
}
+ target="_self"
to="/organizations/new/"
>
<Link
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
data-test-id="sidebar-create-org"
onlyActiveOnIndex={false}
style={
@@ -66,10 +67,11 @@ exports[`Sidebar SidebarDropdown can open "Switch Organization" sub-menu 1`] = `
"alignItems": "center",
}
}
+ target="_self"
to="/organizations/new/"
>
<a
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
data-test-id="sidebar-create-org"
onClick={[Function]}
style={
@@ -77,12 +79,13 @@ exports[`Sidebar SidebarDropdown can open "Switch Organization" sub-menu 1`] = `
"alignItems": "center",
}
}
+ target="_self"
>
<MenuItemLabel
hasMenu={false}
>
<span
- className="css-1ug1il4-MenuItemLabel-MenuItemLabel e1ru2gxu0"
+ className="css-1ug1il4-MenuItemLabel-MenuItemLabel eev51g20"
>
<MenuItemLabelWithIcon>
<span
@@ -120,8 +123,8 @@ exports[`Sidebar SidebarDropdown can open "Switch Organization" sub-menu 1`] = `
</a>
</Link>
</Link>
- </Component>
- </MenuItemLink>
+ </SidebarMenuItemLink>
+ </StyledSidebarMenuItemLink>
</SidebarMenuItem>
</div>
</SwitchOrganizationMenu>
@@ -306,33 +309,36 @@ exports[`Sidebar SidebarDropdown can open Sidebar org/name dropdown menu 1`] = `
<SidebarMenuItem
to="/settings/org-slug/"
>
- <MenuItemLink
+ <StyledSidebarMenuItemLink
to="/settings/org-slug/"
>
- <Component
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ <SidebarMenuItemLink
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
to="/settings/org-slug/"
>
<Link
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
+ target="_self"
to="/settings/org-slug/"
>
<Link
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
onlyActiveOnIndex={false}
style={Object {}}
+ target="_self"
to="/settings/org-slug/"
>
<a
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
onClick={[Function]}
style={Object {}}
+ target="_self"
>
<MenuItemLabel
hasMenu={false}
>
<span
- className="css-1ug1il4-MenuItemLabel-MenuItemLabel e1ru2gxu0"
+ className="css-1ug1il4-MenuItemLabel-MenuItemLabel eev51g20"
>
Organization settings
</span>
@@ -340,39 +346,42 @@ exports[`Sidebar SidebarDropdown can open Sidebar org/name dropdown menu 1`] = `
</a>
</Link>
</Link>
- </Component>
- </MenuItemLink>
+ </SidebarMenuItemLink>
+ </StyledSidebarMenuItemLink>
</SidebarMenuItem>
<SidebarMenuItem
to="/settings/org-slug/teams/"
>
- <MenuItemLink
+ <StyledSidebarMenuItemLink
to="/settings/org-slug/teams/"
>
- <Component
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ <SidebarMenuItemLink
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
to="/settings/org-slug/teams/"
>
<Link
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
+ target="_self"
to="/settings/org-slug/teams/"
>
<Link
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
onlyActiveOnIndex={false}
style={Object {}}
+ target="_self"
to="/settings/org-slug/teams/"
>
<a
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
onClick={[Function]}
style={Object {}}
+ target="_self"
>
<MenuItemLabel
hasMenu={false}
>
<span
- className="css-1ug1il4-MenuItemLabel-MenuItemLabel e1ru2gxu0"
+ className="css-1ug1il4-MenuItemLabel-MenuItemLabel eev51g20"
>
Teams
</span>
@@ -380,8 +389,8 @@ exports[`Sidebar SidebarDropdown can open Sidebar org/name dropdown menu 1`] = `
</a>
</Link>
</Link>
- </Component>
- </MenuItemLink>
+ </SidebarMenuItemLink>
+ </StyledSidebarMenuItemLink>
</SidebarMenuItem>
<Hook
name="sidebar:organization-dropdown-menu"
@@ -417,19 +426,19 @@ exports[`Sidebar SidebarDropdown can open Sidebar org/name dropdown menu 1`] = `
<Hook(sidebar:organization-dropdown-menu) />
</Hook>
<SidebarMenuItem>
- <MenuItemLink>
- <Component
- className="css-1a5th4k-MenuItemLink e1ru2gxu1"
+ <StyledSidebarMenuItemLink>
+ <SidebarMenuItemLink
+ className="css-1sehl9l-StyledSidebarMenuItemLink eev51g21"
>
<div
- className="css-1a5th4k-MenuItemLink e1ru2gxu1"
- tabIndex="0"
+ className="css-1sehl9l-StyledSidebarMenuItemLink eev51g21"
+ tabIndex={0}
>
<MenuItemLabel
hasMenu={true}
>
<span
- className="css-uvye8q-MenuItemLabel-MenuItemLabel e1ru2gxu0"
+ className="css-uvye8q-MenuItemLabel-MenuItemLabel eev51g20"
>
<withOrganizations(SwitchOrganization)
canCreateOrganization={false}
@@ -487,8 +496,8 @@ exports[`Sidebar SidebarDropdown can open Sidebar org/name dropdown menu 1`] = `
</span>
</MenuItemLabel>
</div>
- </Component>
- </MenuItemLink>
+ </SidebarMenuItemLink>
+ </StyledSidebarMenuItemLink>
</SidebarMenuItem>
<Divider>
<div
@@ -755,33 +764,36 @@ exports[`Sidebar SidebarDropdown can open Sidebar org/name dropdown menu 1`] = `
<SidebarMenuItem
to="/settings/account/"
>
- <MenuItemLink
+ <StyledSidebarMenuItemLink
to="/settings/account/"
>
- <Component
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ <SidebarMenuItemLink
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
to="/settings/account/"
>
<Link
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
+ target="_self"
to="/settings/account/"
>
<Link
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
onlyActiveOnIndex={false}
style={Object {}}
+ target="_self"
to="/settings/account/"
>
<a
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
onClick={[Function]}
style={Object {}}
+ target="_self"
>
<MenuItemLabel
hasMenu={false}
>
<span
- className="css-1ug1il4-MenuItemLabel-MenuItemLabel e1ru2gxu0"
+ className="css-1ug1il4-MenuItemLabel-MenuItemLabel eev51g20"
>
User settings
</span>
@@ -789,39 +801,42 @@ exports[`Sidebar SidebarDropdown can open Sidebar org/name dropdown menu 1`] = `
</a>
</Link>
</Link>
- </Component>
- </MenuItemLink>
+ </SidebarMenuItemLink>
+ </StyledSidebarMenuItemLink>
</SidebarMenuItem>
<SidebarMenuItem
to="/settings/account/api/"
>
- <MenuItemLink
+ <StyledSidebarMenuItemLink
to="/settings/account/api/"
>
- <Component
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ <SidebarMenuItemLink
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
to="/settings/account/api/"
>
<Link
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
+ target="_self"
to="/settings/account/api/"
>
<Link
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
onlyActiveOnIndex={false}
style={Object {}}
+ target="_self"
to="/settings/account/api/"
>
<a
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
onClick={[Function]}
style={Object {}}
+ target="_self"
>
<MenuItemLabel
hasMenu={false}
>
<span
- className="css-1ug1il4-MenuItemLabel-MenuItemLabel e1ru2gxu0"
+ className="css-1ug1il4-MenuItemLabel-MenuItemLabel eev51g20"
>
API keys
</span>
@@ -829,40 +844,40 @@ exports[`Sidebar SidebarDropdown can open Sidebar org/name dropdown menu 1`] = `
</a>
</Link>
</Link>
- </Component>
- </MenuItemLink>
+ </SidebarMenuItemLink>
+ </StyledSidebarMenuItemLink>
</SidebarMenuItem>
<SidebarMenuItem
data-test-id="sidebarSignout"
onClick={[Function]}
>
- <MenuItemLink
+ <StyledSidebarMenuItemLink
data-test-id="sidebarSignout"
onClick={[Function]}
>
- <Component
- className="css-1a5th4k-MenuItemLink e1ru2gxu1"
+ <SidebarMenuItemLink
+ className="css-1sehl9l-StyledSidebarMenuItemLink eev51g21"
data-test-id="sidebarSignout"
onClick={[Function]}
>
<div
- className="css-1a5th4k-MenuItemLink e1ru2gxu1"
+ className="css-1sehl9l-StyledSidebarMenuItemLink eev51g21"
data-test-id="sidebarSignout"
onClick={[Function]}
- tabIndex="0"
+ tabIndex={0}
>
<MenuItemLabel
hasMenu={true}
>
<span
- className="css-uvye8q-MenuItemLabel-MenuItemLabel e1ru2gxu0"
+ className="css-uvye8q-MenuItemLabel-MenuItemLabel eev51g20"
>
Sign out
</span>
</MenuItemLabel>
</div>
- </Component>
- </MenuItemLink>
+ </SidebarMenuItemLink>
+ </StyledSidebarMenuItemLink>
</SidebarMenuItem>
</div>
</div>
@@ -917,51 +932,51 @@ exports[`Sidebar SidebarHelp can toggle help menu 1`] = `
<SidebarMenuItem
onClick={[Function]}
>
- <MenuItemLink
+ <StyledSidebarMenuItemLink
onClick={[Function]}
>
- <Component
- className="css-1a5th4k-MenuItemLink e1ru2gxu1"
+ <SidebarMenuItemLink
+ className="css-1sehl9l-StyledSidebarMenuItemLink eev51g21"
onClick={[Function]}
>
<div
- className="css-1a5th4k-MenuItemLink e1ru2gxu1"
+ className="css-1sehl9l-StyledSidebarMenuItemLink eev51g21"
onClick={[Function]}
- tabIndex="0"
+ tabIndex={0}
>
<MenuItemLabel
hasMenu={true}
>
<span
- className="css-uvye8q-MenuItemLabel-MenuItemLabel e1ru2gxu0"
+ className="css-uvye8q-MenuItemLabel-MenuItemLabel eev51g20"
>
Search Docs and FAQs
</span>
</MenuItemLabel>
</div>
- </Component>
- </MenuItemLink>
+ </SidebarMenuItemLink>
+ </StyledSidebarMenuItemLink>
</SidebarMenuItem>
<SidebarMenuItem
href="https://forum.sentry.io/"
- target="_blank"
+ openInNewTab={true}
>
- <MenuItemLink
+ <StyledSidebarMenuItemLink
href="https://forum.sentry.io/"
- target="_blank"
+ openInNewTab={true}
>
- <Component
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ <SidebarMenuItemLink
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
href="https://forum.sentry.io/"
- target="_blank"
+ openInNewTab={true}
>
<Link
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
href="https://forum.sentry.io/"
target="_blank"
>
<a
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
href="https://forum.sentry.io/"
target="_blank"
>
@@ -969,36 +984,36 @@ exports[`Sidebar SidebarHelp can toggle help menu 1`] = `
hasMenu={false}
>
<span
- className="css-1ug1il4-MenuItemLabel-MenuItemLabel e1ru2gxu0"
+ className="css-1ug1il4-MenuItemLabel-MenuItemLabel eev51g20"
>
Community Discussions
</span>
</MenuItemLabel>
</a>
</Link>
- </Component>
- </MenuItemLink>
+ </SidebarMenuItemLink>
+ </StyledSidebarMenuItemLink>
</SidebarMenuItem>
<SidebarMenuItem
href="https://status.sentry.io/"
- target="_blank"
+ openInNewTab={true}
>
- <MenuItemLink
+ <StyledSidebarMenuItemLink
href="https://status.sentry.io/"
- target="_blank"
+ openInNewTab={true}
>
- <Component
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ <SidebarMenuItemLink
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
href="https://status.sentry.io/"
- target="_blank"
+ openInNewTab={true}
>
<Link
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
href="https://status.sentry.io/"
target="_blank"
>
<a
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
href="https://status.sentry.io/"
target="_blank"
>
@@ -1006,15 +1021,15 @@ exports[`Sidebar SidebarHelp can toggle help menu 1`] = `
hasMenu={false}
>
<span
- className="css-1ug1il4-MenuItemLabel-MenuItemLabel e1ru2gxu0"
+ className="css-1ug1il4-MenuItemLabel-MenuItemLabel eev51g20"
>
Service Status
</span>
</MenuItemLabel>
</a>
</Link>
- </Component>
- </MenuItemLink>
+ </SidebarMenuItemLink>
+ </StyledSidebarMenuItemLink>
</SidebarMenuItem>
</div>
</HelpMenu>
@@ -5872,33 +5887,36 @@ exports[`Sidebar renders without org and router 1`] = `
<SidebarMenuItem
to="/settings/account/"
>
- <MenuItemLink
+ <StyledSidebarMenuItemLink
to="/settings/account/"
>
- <Component
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ <SidebarMenuItemLink
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
to="/settings/account/"
>
<Link
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
+ target="_self"
to="/settings/account/"
>
<Link
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
onlyActiveOnIndex={false}
style={Object {}}
+ target="_self"
to="/settings/account/"
>
<a
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
onClick={[Function]}
style={Object {}}
+ target="_self"
>
<MenuItemLabel
hasMenu={false}
>
<span
- className="css-1ug1il4-MenuItemLabel-MenuItemLabel e1ru2gxu0"
+ className="css-1ug1il4-MenuItemLabel-MenuItemLabel eev51g20"
>
User settings
</span>
@@ -5906,39 +5924,42 @@ exports[`Sidebar renders without org and router 1`] = `
</a>
</Link>
</Link>
- </Component>
- </MenuItemLink>
+ </SidebarMenuItemLink>
+ </StyledSidebarMenuItemLink>
</SidebarMenuItem>
<SidebarMenuItem
to="/settings/account/api/"
>
- <MenuItemLink
+ <StyledSidebarMenuItemLink
to="/settings/account/api/"
>
- <Component
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ <SidebarMenuItemLink
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
to="/settings/account/api/"
>
<Link
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
+ target="_self"
to="/settings/account/api/"
>
<Link
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
onlyActiveOnIndex={false}
style={Object {}}
+ target="_self"
to="/settings/account/api/"
>
<a
- className="css-3v825n-MenuItemLink e1ru2gxu1"
+ className="css-1eunyni-StyledSidebarMenuItemLink eev51g21"
onClick={[Function]}
style={Object {}}
+ target="_self"
>
<MenuItemLabel
hasMenu={false}
>
<span
- className="css-1ug1il4-MenuItemLabel-MenuItemLabel e1ru2gxu0"
+ className="css-1ug1il4-MenuItemLabel-MenuItemLabel eev51g20"
>
API keys
</span>
@@ -5946,40 +5967,40 @@ exports[`Sidebar renders without org and router 1`] = `
</a>
</Link>
</Link>
- </Component>
- </MenuItemLink>
+ </SidebarMenuItemLink>
+ </StyledSidebarMenuItemLink>
</SidebarMenuItem>
<SidebarMenuItem
data-test-id="sidebarSignout"
onClick={[Function]}
>
- <MenuItemLink
+ <StyledSidebarMenuItemLink
data-test-id="sidebarSignout"
onClick={[Function]}
>
- <Component
- className="css-1a5th4k-MenuItemLink e1ru2gxu1"
+ <SidebarMenuItemLink
+ className="css-1sehl9l-StyledSidebarMenuItemLink eev51g21"
data-test-id="sidebarSignout"
onClick={[Function]}
>
<div
- className="css-1a5th4k-MenuItemLink e1ru2gxu1"
+ className="css-1sehl9l-StyledSidebarMenuItemLink eev51g21"
data-test-id="sidebarSignout"
onClick={[Function]}
- tabIndex="0"
+ tabIndex={0}
>
<MenuItemLabel
hasMenu={true}
>
<span
- className="css-uvye8q-MenuItemLabel-MenuItemLabel e1ru2gxu0"
+ className="css-uvye8q-MenuItemLabel-MenuItemLabel eev51g20"
>
Sign out
</span>
</MenuItemLabel>
</div>
- </Component>
- </MenuItemLink>
+ </SidebarMenuItemLink>
+ </StyledSidebarMenuItemLink>
</SidebarMenuItem>
</div>
</div>
|
ef3001b7f43a47a4ffd6bbac91643e15c208d247
|
2021-04-15 00:30:14
|
Evan Purkhiser
|
ref(js): Bump size-limit-preset-small-lib (#25256)
| false
|
Bump size-limit-preset-small-lib (#25256)
|
ref
|
diff --git a/package.json b/package.json
index 2c8676d6402c67..36b3df0287997c 100644
--- a/package.json
+++ b/package.json
@@ -140,7 +140,7 @@
"@babel/plugin-transform-react-jsx-source": "^7.12.13",
"@pmmmwh/react-refresh-webpack-plugin": "^0.4.2",
"@sentry/node": "6.3.0-beta.6",
- "@size-limit/preset-small-lib": "^4.5.6",
+ "@size-limit/preset-small-lib": "^4.10.2",
"@storybook/addon-a11y": "^6.2.8",
"@storybook/addon-actions": "^6.2.8",
"@storybook/addon-essentials": "^6.2.8",
diff --git a/yarn.lock b/yarn.lock
index b2e798cb00a2c0..5075c87da96077 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -1935,6 +1935,11 @@
schema-utils "^2.6.5"
source-map "^0.7.3"
+"@polka/url@^1.0.0-next.9":
+ version "1.0.0-next.12"
+ resolved "https://registry.yarnpkg.com/@polka/url/-/url-1.0.0-next.12.tgz#431ec342a7195622f86688bbda82e3166ce8cb28"
+ integrity sha512-6RglhutqrGFMO1MNUXp95RBuYIuc8wTnMAV5MUhLmjTOy78ncwOw7RgeQ/HeymkKXRhZd0s2DNrM1rL7unk3MQ==
+
"@popperjs/core@^2.5.4", "@popperjs/core@^2.6.0":
version "2.9.1"
resolved "https://registry.yarnpkg.com/@popperjs/core/-/core-2.9.1.tgz#7f554e7368c9ab679a11f4a042ca17149d70cf12"
@@ -2103,37 +2108,37 @@
dependencies:
"@sinonjs/commons" "^1.7.0"
-"@size-limit/[email protected]":
- version "4.5.6"
- resolved "https://registry.yarnpkg.com/@size-limit/file/-/file-4.5.6.tgz#6d4c620df37f7cbd58b361302ff2cd4e5f269dd1"
- integrity sha512-6USNLtFRwNXrrvWDTJ5vhF7cDTuDm9YD+8prDEmid504ShYO7ZYptJEhTKsilgsXYtBNQ4lpVWQsKUYu/1JBzw==
+"@size-limit/[email protected]":
+ version "4.10.2"
+ resolved "https://registry.yarnpkg.com/@size-limit/file/-/file-4.10.2.tgz#0a91b83ae310d267bd0a9d6d865e06b0f3fef6ce"
+ integrity sha512-IrmEzZitNMTyGcbvIN5bMN6u8A5x8M1YVjfJnEiO3mukMtszGK2yOqVYltyyvB0Qm0Wvqcm4qXAxxRASXtDwVg==
dependencies:
- semver "7.3.2"
+ semver "7.3.5"
-"@size-limit/preset-small-lib@^4.5.6":
- version "4.5.6"
- resolved "https://registry.yarnpkg.com/@size-limit/preset-small-lib/-/preset-small-lib-4.5.6.tgz#7ccc4cca795ecadf1ab49f466724587b02e1b29a"
- integrity sha512-J5lAL7g+06LuLMy80pB8VQPVLZsMNeWMPOjtlcfX61Xe1nyE6AzQkXcYYyt3mAwR7PWF8GHIiQFFi4MHQ58QyQ==
+"@size-limit/preset-small-lib@^4.10.2":
+ version "4.10.2"
+ resolved "https://registry.yarnpkg.com/@size-limit/preset-small-lib/-/preset-small-lib-4.10.2.tgz#9522f38fb091f88fcb7178735903b31ac9bf4f17"
+ integrity sha512-TjnxyhwLbazXXMUPYqfta+l0lFKhdhg3GJ92rdxioiO1syS8dMbrvi8VBb9b7CJkfjnAf3gI4kmQxALwfhbCiA==
dependencies:
- "@size-limit/file" "4.5.6"
- "@size-limit/webpack" "4.5.6"
+ "@size-limit/file" "4.10.2"
+ "@size-limit/webpack" "4.10.2"
-"@size-limit/[email protected]":
- version "4.5.6"
- resolved "https://registry.yarnpkg.com/@size-limit/webpack/-/webpack-4.5.6.tgz#ed496700919a41ad70e1901cf9c711d08cfccd3e"
- integrity sha512-6cphzivIpOcCrZy2he+e6eYNAyB6g2jTWDjTuj9nned77tyBdkbOCzTG8gRRdo2/TDVdHHRdkPaPeJx4ZROJ8g==
+"@size-limit/[email protected]":
+ version "4.10.2"
+ resolved "https://registry.yarnpkg.com/@size-limit/webpack/-/webpack-4.10.2.tgz#e9bf3ca30eaa371d40687e03e5be4a1c82c6e561"
+ integrity sha512-ZWGQk4RO8XGOQmYVWiOj5tTsltb7O4f2FEr5iULURbaOuziMItDk6fR1Bs8mXFawrb4s1lKSIGzBxi4uf+TjTQ==
dependencies:
- css-loader "^4.2.1"
+ css-loader "^5.2.0"
escape-string-regexp "^4.0.0"
- file-loader "^6.0.0"
+ file-loader "^6.2.0"
mkdirp "^1.0.4"
- nanoid "^3.1.10"
- optimize-css-assets-webpack-plugin "^5.0.3"
+ nanoid "^3.1.22"
+ optimize-css-assets-webpack-plugin "^5.0.4"
pnp-webpack-plugin "^1.6.4"
rimraf "^3.0.2"
- style-loader "^1.2.1"
+ style-loader "^2.0.0"
webpack "^4.44.1"
- webpack-bundle-analyzer "^3.8.0"
+ webpack-bundle-analyzer "^4.4.0"
"@storybook/addon-a11y@^6.2.8":
version "6.2.8"
@@ -3843,6 +3848,11 @@ acorn-walk@^7.1.1, acorn-walk@^7.2.0:
resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc"
integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==
+acorn-walk@^8.0.0:
+ version "8.0.2"
+ resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.0.2.tgz#d4632bfc63fd93d0f15fd05ea0e984ffd3f5a8c3"
+ integrity sha512-+bpA9MJsHdZ4bgfDcpk0ozQyhhVct7rzOmO0s1IIr0AGGgKBljss8n2zp11rRP2wid5VGeh04CgeKzgat5/25A==
+
acorn@^5.5.3:
version "5.7.4"
resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.7.4.tgz#3e8d8a9947d0599a1796d10225d7432f4a4acf5e"
@@ -4615,16 +4625,6 @@ better-opn@^2.1.1:
dependencies:
open "^7.0.3"
-bfj@^6.1.1:
- version "6.1.2"
- resolved "https://registry.yarnpkg.com/bfj/-/bfj-6.1.2.tgz#325c861a822bcb358a41c78a33b8e6e2086dde7f"
- integrity sha512-BmBJa4Lip6BPRINSZ0BPEIfB1wUY/9rwbwvIHQA1KjX9om29B6id0wnWXq7m3bn5JrUVjeOTnVuhPT1FiHwPGw==
- dependencies:
- bluebird "^3.5.5"
- check-types "^8.0.3"
- hoopy "^0.1.4"
- tryer "^1.0.1"
-
big.js@^3.1.3:
version "3.2.0"
resolved "https://registry.yarnpkg.com/big.js/-/big.js-3.2.0.tgz#a5fc298b81b9e0dca2e458824784b65c52ba588e"
@@ -4652,7 +4652,7 @@ bindings@^1.5.0:
dependencies:
file-uri-to-path "1.0.0"
-bluebird@^3.3.5, bluebird@^3.5.5:
+bluebird@^3.3.5:
version "3.7.2"
resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f"
integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==
@@ -5100,11 +5100,6 @@ chardet@^0.7.0:
resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e"
integrity sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==
-check-types@^8.0.3:
- version "8.0.3"
- resolved "https://registry.yarnpkg.com/check-types/-/check-types-8.0.3.tgz#3356cca19c889544f2d7a95ed49ce508a0ecf552"
- integrity sha512-YpeKZngUmG65rLudJ4taU7VLkOCTMhNl/u4ctNC56LQS/zJTyNH0Lrtwm1tfTsbLlwvlfsA2d1c8vCf/Kh2KwQ==
-
cheerio@^1.0.0-rc.2:
version "1.0.0-rc.3"
resolved "https://registry.yarnpkg.com/cheerio/-/cheerio-1.0.0-rc.3.tgz#094636d425b2e9c0f4eb91a46c05630c9a1a8bf6"
@@ -5417,7 +5412,7 @@ comma-separated-tokens@^1.0.0:
resolved "https://registry.yarnpkg.com/comma-separated-tokens/-/comma-separated-tokens-1.0.8.tgz#632b80b6117867a158f1080ad498b2fbe7e3f5ea"
integrity sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw==
-commander@^2.18.0, commander@^2.19.0, commander@^2.20.0:
+commander@^2.19.0, commander@^2.20.0:
version "2.20.3"
resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33"
integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==
@@ -5427,7 +5422,7 @@ commander@^4.0.0:
resolved "https://registry.yarnpkg.com/commander/-/commander-4.1.1.tgz#9fd602bd936294e9e9ef46a3f4d6964044b18068"
integrity sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==
-commander@^6.2.1:
+commander@^6.2.0, commander@^6.2.1:
version "6.2.1"
resolved "https://registry.yarnpkg.com/commander/-/commander-6.2.1.tgz#0792eb682dfbc325999bb2b84fddddba110ac73c"
integrity sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==
@@ -5604,7 +5599,7 @@ core-js@^2.6.5:
resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.11.tgz#38831469f9922bded8ee21c9dc46985e0399308c"
integrity sha512-5wjnpaT/3dV+XB4borEsnAYQchn00XSgTAWKDkEqv+K8KevjbzmofK6hfJ9TZIlpj2N0xQpazy7PiRQiWHqzWg==
-core-js@^3.0.4, core-js@^3.1.2, core-js@^3.6.5, core-js@^3.8.2, core-js@^3.10.1:
+core-js@^3.0.4, core-js@^3.1.2, core-js@^3.10.1, core-js@^3.6.5, core-js@^3.8.2:
version "3.10.1"
resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.10.1.tgz#e683963978b6806dcc6c0a4a8bd4ab0bdaf3f21a"
integrity sha512-pwCxEXnj27XG47mu7SXAwhLP3L5CrlvCB91ANUkIz40P27kUcvNfSdvyZJ9CLHiVoKSp+TTChMQMSKQEH/IQxA==
@@ -5791,7 +5786,7 @@ css-declaration-sorter@^4.0.1:
postcss "^7.0.1"
timsort "^0.3.0"
-css-loader@^3.6.0, css-loader@^4.2.1, css-loader@^5.0.0, css-loader@^5.0.1:
+css-loader@^3.6.0, css-loader@^5.0.0, css-loader@^5.0.1, css-loader@^5.2.0:
version "5.1.3"
resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-5.1.3.tgz#87f6fc96816b20debe3cf682f85c7e56a963d0d1"
integrity sha512-CoPZvyh8sLiGARK3gqczpfdedbM74klGWurF2CsNZ2lhNaXdLIUks+3Mfax3WBeRuHoglU+m7KG/+7gY6G4aag==
@@ -6480,6 +6475,11 @@ duplexer@^0.1.1:
resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.1.tgz#ace6ff808c1ce66b57d1ebf97977acb02334cfc1"
integrity sha1-rOb/gIwc5mtX0ev5eXessCM0z8E=
+duplexer@^0.1.2:
+ version "0.1.2"
+ resolved "https://registry.yarnpkg.com/duplexer/-/duplexer-0.1.2.tgz#3abe43aef3835f8ae077d136ddce0f276b0400e6"
+ integrity sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==
+
ecc-jsbn@~0.1.1:
version "0.1.2"
resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9"
@@ -6508,11 +6508,6 @@ [email protected]:
resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d"
integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=
-ejs@^2.6.1:
- version "2.7.4"
- resolved "https://registry.yarnpkg.com/ejs/-/ejs-2.7.4.tgz#48661287573dcc53e366c7a1ae52c3a120eec9ba"
- integrity sha512-7vmuyh5+kuUyJKePhQfRQBhXV5Ce+RnaeeQArKu1EAMpL3WbgMt5WG6uQZpEVvYSSsxMXRKOewtDk9RaTKXRlA==
-
electron-to-chromium@^1.3.564, electron-to-chromium@^1.3.649:
version "1.3.693"
resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.693.tgz#5089c506a925c31f93fcb173a003a22e341115dd"
@@ -7270,7 +7265,7 @@ expect@^26.6.2:
jest-message-util "^26.6.2"
jest-regex-util "^26.0.0"
-express@^4.16.3, express@^4.17.1:
+express@^4.17.1:
version "4.17.1"
resolved "https://registry.yarnpkg.com/express/-/express-4.17.1.tgz#4491fc38605cf51f8629d39c2b5d026f98a4c134"
integrity sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g==
@@ -7476,7 +7471,7 @@ file-entry-cache@^5.0.1:
dependencies:
flat-cache "^2.0.1"
-file-loader@^6.0.0, file-loader@^6.2.0:
+file-loader@^6.2.0:
version "6.2.0"
resolved "https://registry.yarnpkg.com/file-loader/-/file-loader-6.2.0.tgz#baef7cf8e1840df325e4390b4484879480eebe4d"
integrity sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw==
@@ -7503,11 +7498,6 @@ [email protected]:
resolved "https://registry.yarnpkg.com/filesize/-/filesize-6.1.0.tgz#e81bdaa780e2451d714d71c0d7a4f3238d37ad00"
integrity sha512-LpCHtPQ3sFx67z+uh2HnSyWSLLu5Jxo21795uRDuar/EOuYWXib5EmPaGIBuSnRqH2IODiKA2k5re/K9OnN/Yg==
-filesize@^3.6.1:
- version "3.6.1"
- resolved "https://registry.yarnpkg.com/filesize/-/filesize-3.6.1.tgz#090bb3ee01b6f801a8a8be99d31710b3422bb317"
- integrity sha512-7KjR1vv6qnicaPMi1iiTcI85CyYwRO/PSFCu6SvqL8jN2Wjt/NIYQTFtFs7fSDCYOstUkEWIQGFUg5YZQfjlcg==
-
fill-range@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7"
@@ -8106,7 +8096,7 @@ gud@^1.0.0:
resolved "https://registry.yarnpkg.com/gud/-/gud-1.0.0.tgz#a489581b17e6a70beca9abe3ae57de7a499852c0"
integrity sha512-zGEOVKFM5sVPPrYs7J5/hYEw2Pof8KCyOwyhG8sAF26mCAeUFAcYPu1mwB7hhpIP29zOIBaDqwuHdLp0jvZXjw==
[email protected], gzip-size@^5.0.0:
[email protected]:
version "5.1.1"
resolved "https://registry.yarnpkg.com/gzip-size/-/gzip-size-5.1.1.tgz#cb9bee692f87c0612b232840a873904e4c135274"
integrity sha512-FNHi6mmoHvs1mxZAds4PpdCS6QG8B4C1krxJsMutgxl5t3+GlRTzzI3NEkifXx2pVsOvJdOGSmIgDhQ55FwdPA==
@@ -8114,6 +8104,13 @@ [email protected], gzip-size@^5.0.0:
duplexer "^0.1.1"
pify "^4.0.1"
+gzip-size@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.yarnpkg.com/gzip-size/-/gzip-size-6.0.0.tgz#065367fd50c239c0671cbcbad5be3e2eeb10e462"
+ integrity sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q==
+ dependencies:
+ duplexer "^0.1.2"
+
handle-thing@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/handle-thing/-/handle-thing-2.0.0.tgz#0e039695ff50c93fc288557d696f3c1dc6776754"
@@ -8347,11 +8344,6 @@ hoist-non-react-statics@^3.0.0, hoist-non-react-statics@^3.3.0, hoist-non-react-
dependencies:
react-is "^16.7.0"
-hoopy@^0.1.4:
- version "0.1.4"
- resolved "https://registry.yarnpkg.com/hoopy/-/hoopy-0.1.4.tgz#609207d661100033a9a9402ad3dea677381c1b1d"
- integrity sha512-HRcs+2mr52W0K+x8RzcLzuPPmVIKMSv97RGHy0Ea9y/mpcaK+xTrjICA04KAHi4GRzxliNqNJEFYWHghy3rSfQ==
-
hosted-git-info@^2.1.4:
version "2.8.5"
resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.5.tgz#759cfcf2c4d156ade59b0b2dfabddc42a6b9c70c"
@@ -10746,7 +10738,7 @@ [email protected], mime@^1.4.1:
resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1"
integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==
-mime@^2.4.4:
+mime@^2.3.1, mime@^2.4.4:
version "2.5.2"
resolved "https://registry.yarnpkg.com/mime/-/mime-2.5.2.tgz#6e3dc6cc2b9510643830e5f19d5cb753da5eeabe"
integrity sha512-tqkh47FzKeCPD2PUiPB6pkbMzsCasjxAfC62/Wap5qrUWcb+sFasXUC5I3gYM5iBM8v/Qpn4UK0x+j0iHyFPDg==
@@ -10963,11 +10955,16 @@ nan@^2.12.1:
resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.0.tgz#7818f722027b2459a86f0295d434d1fc2336c52c"
integrity sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==
-nanoid@^3.1.10, nanoid@^3.1.20:
+nanoid@^3.1.20:
version "3.1.20"
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.1.20.tgz#badc263c6b1dcf14b71efaa85f6ab4c1d6cfc788"
integrity sha512-a1cQNyczgKbLX9jwbS/+d7W8fX/RfgYR7lVWwWOGIPNgK2m0MWvrGF6/m4kk6U3QcFMnZf3RIhL0v2Jgh/0Uxw==
+nanoid@^3.1.22:
+ version "3.1.22"
+ resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.1.22.tgz#b35f8fb7d151990a8aebd5aa5015c03cf726f844"
+ integrity sha512-/2ZUaJX2ANuLtTvqTlgqBQNJoQO398KyJgZloL0PZkC0dpysjncRUPsFe3DUPzz/y3h+u7C46np8RMuvF3jsSQ==
+
nanomatch@^1.2.9:
version "1.2.13"
resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119"
@@ -11399,10 +11396,10 @@ open@^7.0.2, open@^7.0.3:
is-docker "^2.0.0"
is-wsl "^2.1.1"
-opener@^1.5.1:
- version "1.5.1"
- resolved "https://registry.yarnpkg.com/opener/-/opener-1.5.1.tgz#6d2f0e77f1a0af0032aca716c2c1fbb8e7e8abed"
- integrity sha512-goYSy5c2UXE4Ra1xixabeVh1guIX/ZV/YokJksb6q2lubWu6UbvPQ20p542/sFIll1nl8JnCyK9oBaOcCWXwvA==
+opener@^1.5.2:
+ version "1.5.2"
+ resolved "https://registry.yarnpkg.com/opener/-/opener-1.5.2.tgz#5d37e1f35077b9dcac4301372271afdeb2a13598"
+ integrity sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==
opn@^5.5.0:
version "5.5.0"
@@ -11411,10 +11408,10 @@ opn@^5.5.0:
dependencies:
is-wsl "^1.1.0"
-optimize-css-assets-webpack-plugin@^5.0.3:
- version "5.0.3"
- resolved "https://registry.yarnpkg.com/optimize-css-assets-webpack-plugin/-/optimize-css-assets-webpack-plugin-5.0.3.tgz#e2f1d4d94ad8c0af8967ebd7cf138dcb1ef14572"
- integrity sha512-q9fbvCRS6EYtUKKSwI87qm2IxlyJK5b4dygW1rKUBT6mMDhdG5e5bZT63v6tnJR9F9FB/H5a0HTmtw+laUBxKA==
+optimize-css-assets-webpack-plugin@^5.0.4:
+ version "5.0.4"
+ resolved "https://registry.yarnpkg.com/optimize-css-assets-webpack-plugin/-/optimize-css-assets-webpack-plugin-5.0.4.tgz#85883c6528aaa02e30bbad9908c92926bb52dc90"
+ integrity sha512-wqd6FdI2a5/FdoiCNNkEvLeA//lHHfG24Ln2Xm2qqdIk4aOlsR18jwpyOihqQ8849W3qu2DX8fOYxpvTMj+93A==
dependencies:
cssnano "^4.1.10"
last-call-webpack-plugin "^3.0.0"
@@ -13831,23 +13828,18 @@ [email protected]:
resolved "https://registry.yarnpkg.com/semver/-/semver-7.0.0.tgz#5f3ca35761e47e05b206c6daff2cf814f0316b8e"
integrity sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==
[email protected]:
- version "7.3.2"
- resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.2.tgz#604962b052b81ed0786aae84389ffba70ffd3938"
- integrity sha512-OrOb32TeeambH6UrhtShmF7CRDqhL6/5XpPNp2DuRH6+9QLw/orhp72j87v8Qa1ScDkvrrBNpZcDejAirJmfXQ==
-
-semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0:
- version "6.3.0"
- resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d"
- integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==
-
-semver@^7.3.2, semver@^7.3.4:
[email protected], semver@^7.3.2, semver@^7.3.4:
version "7.3.5"
resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7"
integrity sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==
dependencies:
lru-cache "^6.0.0"
+semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.3.0:
+ version "6.3.0"
+ resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d"
+ integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==
+
[email protected]:
version "0.17.1"
resolved "https://registry.yarnpkg.com/send/-/send-0.17.1.tgz#c1d8b059f7900f7466dd4938bdc44e11ddb376c8"
@@ -14018,6 +14010,15 @@ simple-swizzle@^0.2.2:
dependencies:
is-arrayish "^0.3.1"
+sirv@^1.0.7:
+ version "1.0.11"
+ resolved "https://registry.yarnpkg.com/sirv/-/sirv-1.0.11.tgz#81c19a29202048507d6ec0d8ba8910fda52eb5a4"
+ integrity sha512-SR36i3/LSWja7AJNRBz4fF/Xjpn7lQFI30tZ434dIy+bitLYSP+ZEenHg36i23V2SGEz+kqjksg0uOGZ5LPiqg==
+ dependencies:
+ "@polka/url" "^1.0.0-next.9"
+ mime "^2.3.1"
+ totalist "^1.0.0"
+
sisteransi@^1.0.5:
version "1.0.5"
resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed"
@@ -14543,7 +14544,7 @@ strip-json-comments@^2.0.1:
resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo=
-style-loader@^1.2.1, style-loader@^1.3.0, style-loader@^2.0.0:
+style-loader@^1.3.0, style-loader@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-2.0.0.tgz#9669602fd4690740eaaec137799a03addbbc393c"
integrity sha512-Z0gYUJmzZ6ZdRUqpg1r8GsaFKypE+3xAzuFeMuoHgjc9KZv3wMyCRjQIWEbhoFSq7+7yoHXySDJyyWQaPajeiQ==
@@ -14993,6 +14994,11 @@ [email protected]:
resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.0.tgz#7e1be3470f1e77948bc43d94a3c8f4d7752ba553"
integrity sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==
+totalist@^1.0.0:
+ version "1.1.0"
+ resolved "https://registry.yarnpkg.com/totalist/-/totalist-1.1.0.tgz#a4d65a3e546517701e3e5c37a47a70ac97fe56df"
+ integrity sha512-gduQwd1rOdDMGxFG1gEvhV88Oirdo2p+KjoYFU7k2g+i7n6AFFbDQ5kMPUsW0pNbfQsB/cwXvT1i4Bue0s9g5g==
+
tough-cookie@^2.3.3, tough-cookie@^2.3.4, tough-cookie@~2.5.0:
version "2.5.0"
resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2"
@@ -15044,11 +15050,6 @@ trough@^1.0.0:
resolved "https://registry.yarnpkg.com/trough/-/trough-1.0.4.tgz#3b52b1f13924f460c3fbfd0df69b587dbcbc762e"
integrity sha512-tdzBRDGWcI1OpPVmChbdSKhvSVurznZ8X36AYURAcl+0o2ldlCY2XPzyXNNxwJwwyIU+rIglTCG4kxtNKBQH7Q==
-tryer@^1.0.1:
- version "1.0.1"
- resolved "https://registry.yarnpkg.com/tryer/-/tryer-1.0.1.tgz#f2c85406800b9b0f74c9f7465b81eaad241252f8"
- integrity sha512-c3zayb8/kWWpycWYg87P71E1S1ZL6b6IJxfb5fvsUgsf0S2MVGaDhDXXjDMpdCpfWXqptc+4mXwmiy1ypXqRAA==
-
ts-dedent@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/ts-dedent/-/ts-dedent-2.0.0.tgz#47c5eb23d9096f3237cc413bc82d387d36dbe690"
@@ -15695,24 +15696,20 @@ webidl-conversions@^6.1.0:
resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-6.1.0.tgz#9111b4d7ea80acd40f5270d666621afa78b69514"
integrity sha512-qBIvFLGiBpLjfwmYAaHPXsn+ho5xZnGvyGvsarywGNc8VyQJUMHJ8OBKGGrPER0okBeMDaan4mNBlgBROxuI8w==
-webpack-bundle-analyzer@^3.8.0:
- version "3.8.0"
- resolved "https://registry.yarnpkg.com/webpack-bundle-analyzer/-/webpack-bundle-analyzer-3.8.0.tgz#ce6b3f908daf069fd1f7266f692cbb3bded9ba16"
- integrity sha512-PODQhAYVEourCcOuU+NiYI7WdR8QyELZGgPvB1y2tjbUpbmcQOt5Q7jEK+ttd5se0KSBKD9SXHCEozS++Wllmw==
+webpack-bundle-analyzer@^4.4.0:
+ version "4.4.1"
+ resolved "https://registry.yarnpkg.com/webpack-bundle-analyzer/-/webpack-bundle-analyzer-4.4.1.tgz#c71fb2eaffc10a4754d7303b224adb2342069da1"
+ integrity sha512-j5m7WgytCkiVBoOGavzNokBOqxe6Mma13X1asfVYtKWM3wxBiRRu1u1iG0Iol5+qp9WgyhkMmBAcvjEfJ2bdDw==
dependencies:
- acorn "^7.1.1"
- acorn-walk "^7.1.1"
- bfj "^6.1.1"
- chalk "^2.4.1"
- commander "^2.18.0"
- ejs "^2.6.1"
- express "^4.16.3"
- filesize "^3.6.1"
- gzip-size "^5.0.0"
- lodash "^4.17.15"
- mkdirp "^0.5.1"
- opener "^1.5.1"
- ws "^6.0.0"
+ acorn "^8.0.4"
+ acorn-walk "^8.0.0"
+ chalk "^4.1.0"
+ commander "^6.2.0"
+ gzip-size "^6.0.0"
+ lodash "^4.17.20"
+ opener "^1.5.2"
+ sirv "^1.0.7"
+ ws "^7.3.1"
[email protected]:
version "4.5.0"
@@ -16069,7 +16066,7 @@ ws@^5.2.0:
dependencies:
async-limiter "~1.0.0"
-ws@^6.0.0, ws@^6.2.1:
+ws@^6.2.1:
version "6.2.1"
resolved "https://registry.yarnpkg.com/ws/-/ws-6.2.1.tgz#442fdf0a47ed64f59b6a5d8ff130f4748ed524fb"
integrity sha512-GIyAXC2cB7LjvpgMt9EKS2ldqr0MTrORaleiOno6TweZ6r3TKtoFQWay/2PceJ3RuBasOHzXNn5Lrw1X0bEjqA==
@@ -16081,6 +16078,11 @@ ws@^7.2.3:
resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.0.tgz#a5dd76a24197940d4a8bb9e0e152bb4503764da7"
integrity sha512-kyFwXuV/5ymf+IXhS6f0+eAFvydbaBW3zjpT6hUdAh/hbVjTIB5EHBGi0bPoCLSK2wcuz3BrEkB9LrYv1Nm4NQ==
+ws@^7.3.1:
+ version "7.4.4"
+ resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.4.tgz#383bc9742cb202292c9077ceab6f6047b17f2d59"
+ integrity sha512-Qm8k8ojNQIMx7S+Zp8u/uHOx7Qazv3Yv4q68MiWWWOJhiwG5W3x7iqmRtJo8xxrciZUY4vRxUTJCKuRnF28ZZw==
+
xml-name-validator@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a"
|
3b36576c2f1d59b463176c7a191eae8d643702b4
|
2021-08-24 14:43:59
|
Stephen Cefali
|
feat(analytics): analytics factory methods (#28004)
| false
|
analytics factory methods (#28004)
|
feat
|
diff --git a/static/app/utils/advancedAnalytics.tsx b/static/app/utils/advancedAnalytics.tsx
index 8d7ea54e649468..acba1ec97a25d7 100644
--- a/static/app/utils/advancedAnalytics.tsx
+++ b/static/app/utils/advancedAnalytics.tsx
@@ -1,84 +1,24 @@
-import {LightWeightOrganization} from 'app/types';
-import {Hooks} from 'app/types/hooks';
-import {trackAnalyticsEventV2} from 'app/utils/analytics';
+import makeAnalyticsFunction from 'app/utils/analytics/makeAnalyticsFunction';
import {growthEventMap, GrowthEventParameters} from 'app/utils/growthAnalyticsEvents';
-import {uniqueId} from 'app/utils/guid';
-import {
- integrationEventMap,
- IntegrationEventParameters,
-} from 'app/utils/integrationEvents';
import {issueEventMap, IssueEventParameters} from 'app/utils/issueEvents';
import {
performanceEventMap,
PerformanceEventParameters,
} from 'app/utils/performanceEvents';
-const ANALYTICS_SESSION = 'ANALYTICS_SESSION';
-
-export const startAnalyticsSession = () => {
- const sessionId = uniqueId();
- window.sessionStorage.setItem(ANALYTICS_SESSION, sessionId);
- return sessionId;
-};
-
-export const clearAnalyticsSession = () => {
- window.sessionStorage.removeItem(ANALYTICS_SESSION);
-};
-
-export const getAnalyticsSessionId = () =>
- window.sessionStorage.getItem(ANALYTICS_SESSION);
-
-const hasAnalyticsDebug = () => window.localStorage.getItem('DEBUG_ANALYTICS') === '1';
-
-export type EventParameters = IntegrationEventParameters &
- GrowthEventParameters &
+export type EventParameters = GrowthEventParameters &
IssueEventParameters &
PerformanceEventParameters;
const allEventMap = {
- ...integrationEventMap,
...growthEventMap,
...issueEventMap,
...performanceEventMap,
};
-type AnalyticsKey = keyof EventParameters;
-
-type OptionalOrg = {organization: LightWeightOrganization | null};
-
/**
- * Tracks an event for analytics.
- * Must be tied to an organization.
- * Uses the current session ID or generates a new one if startSession == true.
- * An analytics session corresponds to a single action funnel such as installation.
- * Tracking by session allows us to track individual funnel attempts for a single user.
+ * Generic typed analytics function for growth, issue, and performance events.
+ * Can split up analytics functions to a smaller set of events like we do for trackIntegrationEvent
*/
-export function trackAdvancedAnalyticsEvent<T extends AnalyticsKey>(
- eventKey: T,
- analyticsParams: EventParameters[T] & OptionalOrg,
- options?: Parameters<Hooks['analytics:track-event-v2']>[1]
-) {
- const eventName = allEventMap[eventKey];
-
- // need to destructure the org here to make TS happy
- const {organization, ...rest} = analyticsParams;
- const params = {
- eventKey,
- eventName,
- organization,
- ...rest,
- };
-
- // could put this into a debug method or for the main trackAnalyticsEvent event
- if (hasAnalyticsDebug()) {
- // eslint-disable-next-line no-console
- console.log('trackAdvancedAnalytics', params);
- }
-
- // only apply options if required to make mock assertions easier
- if (options) {
- trackAnalyticsEventV2(params, options);
- } else {
- trackAnalyticsEventV2(params);
- }
-}
+export const trackAdvancedAnalyticsEvent =
+ makeAnalyticsFunction<EventParameters>(allEventMap);
diff --git a/static/app/utils/analytics/makeAnalyticsFunction.tsx b/static/app/utils/analytics/makeAnalyticsFunction.tsx
new file mode 100644
index 00000000000000..c1fc6063ab284a
--- /dev/null
+++ b/static/app/utils/analytics/makeAnalyticsFunction.tsx
@@ -0,0 +1,56 @@
+import {LightWeightOrganization} from 'app/types';
+import {Hooks} from 'app/types/hooks';
+import {trackAnalyticsEventV2} from 'app/utils/analytics';
+
+const hasAnalyticsDebug = () => window.localStorage.getItem('DEBUG_ANALYTICS') === '1';
+
+type OptionalOrg = {organization: LightWeightOrganization | null};
+type Options = Parameters<Hooks['analytics:track-event-v2']>[1];
+
+/**
+ * Generates functions used to track an event for analytics.
+ * Each function can only handle the event types specified by the
+ * generic for EventParameters and the events in eventKeyToNameMap.
+ * Can specifcy default options with the defaultOptions argument as well.
+ * Can make orgnization required with the second generic.
+ */
+export default function makeAnalyticsFunction<
+ EventParameters extends Record<string, Record<string, any>>,
+ OrgRequirement extends OptionalOrg = OptionalOrg
+>(
+ eventKeyToNameMap: Record<keyof EventParameters, string | null>,
+ defaultOptions?: Options
+) {
+ /**
+ * Function used for analytics of specifc types determined from factory function
+ * Uses the current session ID or generates a new one if startSession == true.
+ * An analytics session corresponds to a single action funnel such as installation.
+ * Tracking by session allows us to track individual funnel attempts for a single user.
+ */
+ return <EventKey extends keyof EventParameters & string>(
+ eventKey: EventKey,
+ analyticsParams: EventParameters[EventKey] & OrgRequirement,
+ options?: Options
+ ) => {
+ const eventName = eventKeyToNameMap[eventKey];
+
+ const params = {
+ eventKey,
+ eventName,
+ ...analyticsParams,
+ };
+
+ if (hasAnalyticsDebug()) {
+ // eslint-disable-next-line no-console
+ console.log('analyticsEvent', params);
+ }
+
+ // only apply options if required to make mock assertions easier
+ if (options || defaultOptions) {
+ options = {...defaultOptions, ...options};
+ trackAnalyticsEventV2(params, options);
+ } else {
+ trackAnalyticsEventV2(params);
+ }
+ };
+}
diff --git a/static/app/utils/integrationUtil.tsx b/static/app/utils/integrationUtil.tsx
index b63963307b395d..116e6c05c217f9 100644
--- a/static/app/utils/integrationUtil.tsx
+++ b/static/app/utils/integrationUtil.tsx
@@ -22,8 +22,11 @@ import {
SentryAppInstallation,
} from 'app/types';
import {Hooks} from 'app/types/hooks';
-import {EventParameters, trackAdvancedAnalyticsEvent} from 'app/utils/advancedAnalytics';
-import {IntegrationAnalyticsKey} from 'app/utils/integrationEvents';
+import makeAnalyticsFunction from 'app/utils/analytics/makeAnalyticsFunction';
+import {
+ integrationEventMap,
+ IntegrationEventParameters,
+} from 'app/utils/integrationEvents';
const mapIntegrationParams = analyticsParams => {
// Reload expects integration_status even though it's not relevant for non-sentry apps
@@ -35,17 +38,12 @@ const mapIntegrationParams = analyticsParams => {
return fullParams;
};
-// wrapper around trackAdvancedAnalyticsEvent which has some extra
-// data massaging above
-export function trackIntegrationEvent<T extends IntegrationAnalyticsKey>(
- eventKey: T,
- analyticsParams: EventParameters[T] & {organization: LightWeightOrganization}, // integration events should always be tied to an org
- options?: Parameters<typeof trackAdvancedAnalyticsEvent>[2]
-) {
- options = options || {};
- options.mapValuesFn = mapIntegrationParams;
- return trackAdvancedAnalyticsEvent(eventKey, analyticsParams, options);
-}
+export const trackIntegrationEvent = makeAnalyticsFunction<
+ IntegrationEventParameters,
+ {organization: LightWeightOrganization} // org is required
+>(integrationEventMap, {
+ mapValuesFn: mapIntegrationParams,
+});
/**
* In sentry.io the features list supports rendering plan details. If the hook
|
e71a5fb24a56a89b8ffff78caeb15871e5bcf60d
|
2023-06-10 06:01:56
|
David Wang
|
feat(crons): Add timeline component into listing page (#50699)
| false
|
Add timeline component into listing page (#50699)
|
feat
|
diff --git a/static/app/views/monitors/components/checkInTimeline.tsx b/static/app/views/monitors/components/checkInTimeline.tsx
index 0d85c222ac3697..f09aea235f95aa 100644
--- a/static/app/views/monitors/components/checkInTimeline.tsx
+++ b/static/app/views/monitors/components/checkInTimeline.tsx
@@ -1,17 +1,42 @@
import {Theme} from '@emotion/react';
import styled from '@emotion/styled';
+import DateTime from 'sentry/components/dateTime';
import {Resizeable} from 'sentry/components/replays/resizeable';
+import {Tooltip} from 'sentry/components/tooltip';
import {space} from 'sentry/styles/space';
-import {CheckIn, CheckInStatus} from 'sentry/views/monitors/types';
+import {
+ MonitorBucketData,
+ MonitorBucketEnvMapping,
+} from 'sentry/views/monitors/components/overviewTimeline/types';
+import {CheckInStatus} from 'sentry/views/monitors/types';
interface Props {
- checkins: CheckIn[];
+ bucketedData: MonitorBucketData;
end: Date;
start: Date;
width?: number;
}
+function getAggregateStatus(envData: MonitorBucketEnvMapping) {
+ // Orders the status in terms of precedence for showing to the user
+ const statusOrdering = [
+ CheckInStatus.OK,
+ CheckInStatus.MISSED,
+ CheckInStatus.TIMEOUT,
+ CheckInStatus.ERROR,
+ ];
+
+ return Object.values(envData).reduce((currentStatus, value) => {
+ for (const [index, status] of statusOrdering.entries()) {
+ if (value[status] > 0 && index > statusOrdering.indexOf(currentStatus)) {
+ currentStatus = status;
+ }
+ }
+ return currentStatus;
+ }, CheckInStatus.OK);
+}
+
function getColorFromStatus(status: CheckInStatus, theme: Theme) {
const statusToColor: Record<CheckInStatus, string> = {
[CheckInStatus.ERROR]: theme.red200,
@@ -23,13 +48,17 @@ function getColorFromStatus(status: CheckInStatus, theme: Theme) {
return statusToColor[status];
}
-function getCheckInPosition(checkDate: string, timelineStart: Date, msPerPixel: number) {
- const elapsedSinceStart = new Date(checkDate).getTime() - timelineStart.getTime();
+function getBucketedCheckInsPosition(
+ timestamp: number,
+ timelineStart: Date,
+ msPerPixel: number
+) {
+ const elapsedSinceStart = new Date(timestamp).getTime() - timelineStart.getTime();
return elapsedSinceStart / msPerPixel;
}
export function CheckInTimeline(props: Props) {
- const {checkins, start, end} = props;
+ const {bucketedData, start, end} = props;
function renderTimelineWithWidth(width: number) {
const timeWindow = end.getTime() - start.getTime();
@@ -37,13 +66,24 @@ export function CheckInTimeline(props: Props) {
return (
<TimelineContainer>
- {checkins.map(({id, dateCreated, status}) => {
- const left = getCheckInPosition(dateCreated, start, msPerPixel);
+ {bucketedData.map(([timestamp, envData]) => {
+ const timestampMs = timestamp * 1000;
+ if (Object.keys(envData).length === 0) {
+ return null;
+ }
+
+ const left = getBucketedCheckInsPosition(timestampMs, start, msPerPixel);
if (left < 0) {
return null;
}
- return <JobTick key={id} left={left} status={status} />;
+ return (
+ <JobTickContainer left={left} key={timestamp}>
+ <Tooltip title={<DateTime date={timestampMs} />}>
+ <JobTick status={getAggregateStatus(envData)} />
+ </Tooltip>
+ </JobTickContainer>
+ );
})}
</TimelineContainer>
);
@@ -62,12 +102,14 @@ const TimelineContainer = styled('div')`
margin: ${space(4)} 0;
`;
-const JobTick = styled('div')<{left: number; status: CheckInStatus}>`
+const JobTickContainer = styled('div')<{left: number}>`
position: absolute;
- width: 4px;
- height: 14px;
- border-radius: 6px;
left: ${p => p.left}px;
+`;
+const JobTick = styled('div')<{status: CheckInStatus}>`
background: ${p => getColorFromStatus(p.status, p.theme)};
+ width: 4px;
+ height: 14px;
+ border-radius: 6px;
`;
diff --git a/static/app/views/monitors/components/overviewTimeline/index.tsx b/static/app/views/monitors/components/overviewTimeline/index.tsx
index 00d10104931ab3..2fd12b2269ff04 100644
--- a/static/app/views/monitors/components/overviewTimeline/index.tsx
+++ b/static/app/views/monitors/components/overviewTimeline/index.tsx
@@ -4,12 +4,16 @@ import styled from '@emotion/styled';
import {Button} from 'sentry/components/button';
import Link from 'sentry/components/links/link';
import Panel from 'sentry/components/panels/panel';
+import Placeholder from 'sentry/components/placeholder';
import {SegmentedControl} from 'sentry/components/segmentedControl';
import {IconSort} from 'sentry/icons';
import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
+import {useApiQuery} from 'sentry/utils/queryClient';
+import {useDimensions} from 'sentry/utils/useDimensions';
import useOrganization from 'sentry/utils/useOrganization';
import useRouter from 'sentry/utils/useRouter';
+import {CheckInTimeline} from 'sentry/views/monitors/components/checkInTimeline';
import {
GridLineOverlay,
GridLineTimeLabels,
@@ -18,6 +22,9 @@ import {
import {Monitor} from '../../types';
import {scheduleAsText} from '../../utils';
+import {MonitorBucketData} from './types';
+import {getStartFromTimeWindow, timeWindowData} from './utils';
+
interface Props {
monitorList: Monitor[];
monitorListPageLinks?: string | null;
@@ -25,9 +32,12 @@ interface Props {
export function OverviewTimeline({monitorList}: Props) {
const {replace, location} = useRouter();
+ const organization = useOrganization();
const resolution = location.query?.resolution ?? '24h';
const nowRef = useRef<Date>(new Date());
+ const start = getStartFromTimeWindow(nowRef.current, resolution);
+ const {elementRef, width: timelineWidth} = useDimensions<HTMLDivElement>();
const handleResolutionChange = useCallback(
(value: string) => {
@@ -36,6 +46,29 @@ export function OverviewTimeline({monitorList}: Props) {
[location, replace]
);
+ const rollup = Math.floor(
+ (timeWindowData[resolution].elapsedMinutes * 60) / timelineWidth
+ );
+ const monitorStatsQueryKey = `/organizations/${organization.slug}/monitors-stats/`;
+ const {data: monitorStats, isLoading} = useApiQuery<Record<string, MonitorBucketData>>(
+ [
+ monitorStatsQueryKey,
+ {
+ query: {
+ until: Math.floor(nowRef.current.getTime() / 1000),
+ since: Math.floor(start.getTime() / 1000),
+ monitor: monitorList.map(m => m.slug),
+ resolution: `${rollup}s`,
+ ...location.query,
+ },
+ },
+ ],
+ {
+ staleTime: 0,
+ enabled: timelineWidth > 0,
+ }
+ );
+
return (
<MonitorListPanel>
<ListFilters>
@@ -52,13 +85,31 @@ export function OverviewTimeline({monitorList}: Props) {
<SegmentedControl.Item key="30d">{t('Month')}</SegmentedControl.Item>
</SegmentedControl>
</ListFilters>
- <GridLineTimeLabels timeWindow={resolution} end={nowRef.current} />
- <GridLineOverlay timeWindow={resolution} end={nowRef.current} />
+ <TimelineWidthTracker ref={elementRef} />
+ <GridLineTimeLabels
+ timeWindow={resolution}
+ end={nowRef.current}
+ width={timelineWidth}
+ />
+ <GridLineOverlay
+ timeWindow={resolution}
+ end={nowRef.current}
+ width={timelineWidth}
+ />
{monitorList.map(monitor => (
<Fragment key={monitor.id}>
<MonitorDetails monitor={monitor} />
- <TimelineContainer />
+ {isLoading || !monitorStats ? (
+ <Placeholder />
+ ) : (
+ <CheckInTimeline
+ bucketedData={monitorStats[monitor.slug]}
+ end={nowRef.current}
+ start={start}
+ width={timelineWidth}
+ />
+ )}
</Fragment>
))}
</MonitorListPanel>
@@ -84,8 +135,6 @@ const MonitorListPanel = styled(Panel)`
grid-template-columns: 350px 1fr;
`;
-const TimelineContainer = styled('div')``;
-
const DetailsContainer = styled(Link)`
color: ${p => p.theme.textColor};
padding: ${space(2)};
@@ -113,3 +162,10 @@ const ListFilters = styled('div')`
padding: ${space(1.5)} ${space(2)};
border-bottom: 1px solid ${p => p.theme.border};
`;
+
+const TimelineWidthTracker = styled('div')`
+ position: absolute;
+ width: 100%;
+ grid-row: 1;
+ grid-column: 2;
+`;
diff --git a/static/app/views/monitors/components/overviewTimeline/timelineScrubber.tsx b/static/app/views/monitors/components/overviewTimeline/timelineScrubber.tsx
index ed3d0191701188..a8560d5942c345 100644
--- a/static/app/views/monitors/components/overviewTimeline/timelineScrubber.tsx
+++ b/static/app/views/monitors/components/overviewTimeline/timelineScrubber.tsx
@@ -3,7 +3,6 @@ import moment from 'moment';
import DateTime from 'sentry/components/dateTime';
import {space} from 'sentry/styles/space';
-import {useDimensions} from 'sentry/utils/useDimensions';
import {TimeWindow} from 'sentry/views/monitors/components/overviewTimeline/types';
import {
getStartFromTimeWindow,
@@ -13,6 +12,7 @@ import {
interface Props {
end: Date;
timeWindow: TimeWindow;
+ width: number;
}
function clampTimeBasedOnResolution(date: moment.Moment, resolution: string) {
@@ -48,10 +48,9 @@ function getTimeMarkers(end: Date, timeWindow: TimeWindow, width: number): TimeM
return times;
}
-export function GridLineTimeLabels({end, timeWindow}: Props) {
- const {elementRef, width} = useDimensions<HTMLDivElement>();
+export function GridLineTimeLabels({end, timeWindow, width}: Props) {
return (
- <LabelsContainer ref={elementRef}>
+ <LabelsContainer>
{getTimeMarkers(end, timeWindow, width).map(({date, position}) => (
<TimeLabelContainer key={date.getTime()} left={position}>
<TimeLabel date={date} {...timeWindowData[timeWindow].dateTimeProps} />
@@ -61,10 +60,9 @@ export function GridLineTimeLabels({end, timeWindow}: Props) {
);
}
-export function GridLineOverlay({end, timeWindow}: Props) {
- const {elementRef, width} = useDimensions<HTMLDivElement>();
+export function GridLineOverlay({end, timeWindow, width}: Props) {
return (
- <Overlay ref={elementRef}>
+ <Overlay>
<GridLineContainer>
{getTimeMarkers(end, timeWindow, width).map(({date, position}) => (
<Gridline key={date.getTime()} left={position} />
diff --git a/static/app/views/monitors/components/overviewTimeline/types.tsx b/static/app/views/monitors/components/overviewTimeline/types.tsx
index de0faa37b5f17d..dcbf09e47cc8da 100644
--- a/static/app/views/monitors/components/overviewTimeline/types.tsx
+++ b/static/app/views/monitors/components/overviewTimeline/types.tsx
@@ -1,3 +1,5 @@
+import {CheckInStatus} from 'sentry/views/monitors/types';
+
export type TimeWindow = '1h' | '24h' | '7d' | '30d';
export interface TimeWindowOptions {
@@ -16,3 +18,7 @@ export interface TimeWindowOptions {
}
export type TimeWindowData = Record<TimeWindow, TimeWindowOptions>;
+
+export type MonitorBucketData = [timestamp: number, envData: MonitorBucketEnvMapping][];
+
+export type MonitorBucketEnvMapping = Record<string, Record<CheckInStatus, number>>;
|
e1d9de2c3c601dd93825d96713861949a962578f
|
2024-09-06 22:28:00
|
Michelle Zhang
|
fix(replay): fix read more link in proj settings (#77045)
| false
|
fix read more link in proj settings (#77045)
|
fix
|
diff --git a/static/app/views/settings/project/projectReplays.tsx b/static/app/views/settings/project/projectReplays.tsx
index effc76b259aec0..3b12afb0518522 100644
--- a/static/app/views/settings/project/projectReplays.tsx
+++ b/static/app/views/settings/project/projectReplays.tsx
@@ -69,7 +69,7 @@ function ProjectReplaySettings({organization, project, params: {projectId}}: Pro
action={
<LinkButton
external
- href="https://docs.sentry.io/product/session-replay/replay-page-and-filters/"
+ href="https://docs.sentry.io/product/issues/issue-details/replay-issues/"
>
{t('Read the Docs')}
</LinkButton>
|
653230a5c47ee91cf643d86bc2b58e55e1dd666a
|
2019-11-20 22:38:37
|
David Cramer
|
fix(discover): Format numbers as floats
| false
|
Format numbers as floats
|
fix
|
diff --git a/src/sentry/static/sentry/app/components/stackedBarChart.jsx b/src/sentry/static/sentry/app/components/stackedBarChart.jsx
index fd9cb2f4f6f1fe..99b7a96e3a777c 100644
--- a/src/sentry/static/sentry/app/components/stackedBarChart.jsx
+++ b/src/sentry/static/sentry/app/components/stackedBarChart.jsx
@@ -8,6 +8,7 @@ import Tooltip from 'app/components/tooltip';
import Count from 'app/components/count';
import ConfigStore from 'app/stores/configStore';
import theme from 'app/utils/theme';
+import floatFormat from 'app/utils/floatFormat';
class StackedBarChart extends React.Component {
static propTypes = {
@@ -100,7 +101,7 @@ class StackedBarChart extends React.Component {
}
}
- shouldComponentUpdate(nextProps, nextState) {
+ shouldComponentUpdate(nextProps, _nextState) {
return !isEqual(this.props, nextProps);
}
@@ -113,7 +114,7 @@ class StackedBarChart extends React.Component {
pointsToSeries = points => {
const series = [];
- points.forEach((p, pIdx) => {
+ points.forEach((p, _pIdx) => {
p.y.forEach((y, yIdx) => {
if (!series[yIdx]) {
series[yIdx] = {data: []};
@@ -143,11 +144,6 @@ class StackedBarChart extends React.Component {
return options.clock24Hours;
}
- floatFormat(number, places) {
- const multi = Math.pow(10, places);
- return parseInt(number * multi, 10) / multi;
- }
-
timeLabelAsHour(point) {
const timeMoment = moment(point.x * 1000);
const nextMoment = timeMoment.clone().add(59, 'minute');
@@ -251,7 +247,7 @@ class StackedBarChart extends React.Component {
);
}
- renderTooltip = (point, pointIdx) => {
+ renderTooltip = (point, _pointIdx) => {
const timeLabel = this.getTimeLabel(point);
const totalY = point.y.reduce((a, b) => a + b);
return (
@@ -280,14 +276,14 @@ class StackedBarChart extends React.Component {
);
};
- getMinHeight(index, pointLength) {
+ getMinHeight(index, _pointLength) {
const {minHeights} = this.props;
return minHeights && (minHeights[index] || minHeights[index] === 0)
? this.props.minHeights[index]
: 1;
}
- renderChartColumn(point, maxval, pointWidth, index, totalPoints) {
+ renderChartColumn(point, maxval, pointWidth, index, _totalPoints) {
const totalY = point.y.reduce((a, b) => a + b);
const totalPct = totalY / maxval;
// we leave a little extra space for bars with min-heights.
@@ -296,7 +292,7 @@ class StackedBarChart extends React.Component {
let prevPct = 0;
const pts = point.y.map((y, i) => {
const pct = Math.max(
- totalY && this.floatFormat((y / totalY) * totalPct * maxPercentage, 2),
+ totalY && floatFormat((y / totalY) * totalPct * maxPercentage, 2),
this.getMinHeight(i, point.y.length)
);
@@ -345,10 +341,7 @@ class StackedBarChart extends React.Component {
const totalPoints = Math.max(...series.map(s => s.data.length));
// we expand the graph just a hair beyond 100% prevent a subtle white line on the edge
const nudge = 0.1;
- const pointWidth = this.floatFormat(
- (100.0 + this.props.gap + nudge) / totalPoints,
- 2
- );
+ const pointWidth = floatFormat((100.0 + this.props.gap + nudge) / totalPoints, 2);
const maxval = this.maxPointValue();
const markers = this.props.markers.slice();
diff --git a/src/sentry/static/sentry/app/utils/floatFormat.tsx b/src/sentry/static/sentry/app/utils/floatFormat.tsx
new file mode 100644
index 00000000000000..f9acdcee428bf1
--- /dev/null
+++ b/src/sentry/static/sentry/app/utils/floatFormat.tsx
@@ -0,0 +1,4 @@
+export default function floatFormat(number: number, places: number) {
+ const multi = Math.pow(10, places);
+ return parseInt((number * multi).toString(), 10) / multi;
+}
diff --git a/src/sentry/static/sentry/app/utils/formatAbbreviatedNumber.jsx b/src/sentry/static/sentry/app/utils/formatAbbreviatedNumber.jsx
index ea12a07ed79bdb..92a3f40d26b6f0 100644
--- a/src/sentry/static/sentry/app/utils/formatAbbreviatedNumber.jsx
+++ b/src/sentry/static/sentry/app/utils/formatAbbreviatedNumber.jsx
@@ -1,9 +1,6 @@
-const numberFormats = [[1000000000, 'b'], [1000000, 'm'], [1000, 'k']];
+import floatFormat from './floatFormat';
-function floatFormat(number, places) {
- const multi = Math.pow(10, places);
- return parseInt(number * multi, 10) / multi;
-}
+const numberFormats = [[1000000000, 'b'], [1000000, 'm'], [1000, 'k']];
export default function formatNumber(number) {
let b, x, y, o, p;
diff --git a/src/sentry/static/sentry/app/views/eventsV2/data.tsx b/src/sentry/static/sentry/app/views/eventsV2/data.tsx
index 2562e3647b1f35..925b911d098105 100644
--- a/src/sentry/static/sentry/app/views/eventsV2/data.tsx
+++ b/src/sentry/static/sentry/app/views/eventsV2/data.tsx
@@ -13,6 +13,7 @@ import overflowEllipsis from 'app/styles/overflowEllipsis';
import pinIcon from 'app/../images/location-pin.png';
import {Organization, NewQuery} from 'app/types';
import Duration from 'app/components/duration';
+import floatFormat from 'app/utils/floatFormat';
import {QueryLink} from './styles';
import {generateEventDetailsRoute, generateEventSlug} from './eventDetails/utils';
@@ -195,7 +196,7 @@ export const FIELD_FORMATTERS: FieldFormatters = {
sortField: true,
renderFunc: (field, data) => (
<NumberContainer>
- {typeof data[field] === 'number' ? <Count value={data[field]} /> : emptyValue}
+ {typeof data[field] === 'number' ? floatFormat(data[field], 5) : emptyValue}
</NumberContainer>
),
},
diff --git a/src/sentry/static/sentry/app/views/eventsV2/eventQueryParams.tsx b/src/sentry/static/sentry/app/views/eventsV2/eventQueryParams.tsx
index e4b3f179c31f23..cc7e534995d009 100644
--- a/src/sentry/static/sentry/app/views/eventsV2/eventQueryParams.tsx
+++ b/src/sentry/static/sentry/app/views/eventsV2/eventQueryParams.tsx
@@ -1,6 +1,7 @@
export type ColumnValueType =
| '*' // Matches to everything
| 'string'
+ | 'integer'
| 'number'
| 'duration'
| 'timestamp'
diff --git a/src/sentry/static/sentry/app/views/eventsV2/table/tableView.tsx b/src/sentry/static/sentry/app/views/eventsV2/table/tableView.tsx
index 8150b4416deeca..343890de3aee4d 100644
--- a/src/sentry/static/sentry/app/views/eventsV2/table/tableView.tsx
+++ b/src/sentry/static/sentry/app/views/eventsV2/table/tableView.tsx
@@ -234,7 +234,7 @@ class TableView extends React.Component<TableViewProps> {
const field = column.eventViewField;
// establish alignment based on the type
- const alignedTypes: ColumnValueType[] = ['number', 'duration'];
+ const alignedTypes: ColumnValueType[] = ['number', 'duration', 'integer'];
let align: 'right' | 'left' = alignedTypes.includes(column.type) ? 'right' : 'left';
if (column.type === 'never' || column.type === '*') {
diff --git a/tests/js/spec/utils/floatFormat.spec.jsx b/tests/js/spec/utils/floatFormat.spec.jsx
new file mode 100644
index 00000000000000..6b7b45ab56d1e5
--- /dev/null
+++ b/tests/js/spec/utils/floatFormat.spec.jsx
@@ -0,0 +1,12 @@
+import floatFormat from 'app/utils/floatFormat';
+
+describe('floatFormat()', function() {
+ it('should format decimals', function() {
+ expect(floatFormat(0, 0)).toBe(0);
+ expect(floatFormat(10.513434, 1)).toBe(10.5);
+ expect(floatFormat(10.513494, 3)).toBe(10.513);
+ });
+ it('should not round', function() {
+ expect(floatFormat(10.513494, 4)).toBe(10.5134);
+ });
+});
|
bbfa0e281e9658c6d433807da4576490e2c19c02
|
2023-10-06 21:20:39
|
Matt Quinn
|
fix(statistical-detectors): fix txn breakpoint metric (#57671)
| false
|
fix txn breakpoint metric (#57671)
|
fix
|
diff --git a/src/sentry/tasks/statistical_detectors.py b/src/sentry/tasks/statistical_detectors.py
index 15269a92e03adc..90c172196365ba 100644
--- a/src/sentry/tasks/statistical_detectors.py
+++ b/src/sentry/tasks/statistical_detectors.py
@@ -185,7 +185,7 @@ def detect_transaction_change_points(
metrics.incr(
"statistical_detectors.breakpoint.transactions",
- amount=len(breakpoints),
+ amount=breakpoint_count,
sample_rate=1.0,
)
|
1f200a64b6dc0cf16d0cdcc5d6aa7b405a49b44e
|
2020-03-18 00:59:47
|
Jan Michael Auer
|
feat(sessions): Issue a reject-all quota without feature flag (#17728)
| false
|
Issue a reject-all quota without feature flag (#17728)
|
feat
|
diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py
index e10cc192dde01d..ce7beb0e154500 100644
--- a/src/sentry/features/__init__.py
+++ b/src/sentry/features/__init__.py
@@ -90,6 +90,7 @@
default_manager.add("organizations:grouping-info", OrganizationFeature) # NOQA
default_manager.add("organizations:tweak-grouping-config", OrganizationFeature) # NOQA
default_manager.add("organizations:set-grouping-config", OrganizationFeature) # NOQA
+default_manager.add("organizations:set-grouping-config", OrganizationFeature) # NOQA
# Project scoped features
default_manager.add("projects:custom-inbound-filters", ProjectFeature) # NOQA
diff --git a/src/sentry/quotas/redis.py b/src/sentry/quotas/redis.py
index ac786b2220072b..99703034fd0db1 100644
--- a/src/sentry/quotas/redis.py
+++ b/src/sentry/quotas/redis.py
@@ -5,6 +5,7 @@
from time import time
+from sentry import features
from sentry.constants import DataCategory
from sentry.quotas.base import NotRateLimited, Quota, QuotaConfig, QuotaScope, RateLimited
from sentry.utils.redis import (
@@ -66,6 +67,16 @@ def get_quotas(self, project, key=None, keys=None):
results = []
+ if not features.has("organizations:releases-v2", project.organization):
+ results.append(
+ QuotaConfig(
+ limit=0,
+ scope=QuotaScope.ORGANIZATION,
+ categories=[DataCategory.SESSION],
+ reason_code="sessions_unavailable",
+ )
+ )
+
pquota = self.get_project_quota(project)
if pquota[0] is not None:
results.append(
diff --git a/tests/sentry/quotas/redis/tests.py b/tests/sentry/quotas/redis/tests.py
index 9e5e6eb2bce987..410221a4a20df4 100644
--- a/tests/sentry/quotas/redis/tests.py
+++ b/tests/sentry/quotas/redis/tests.py
@@ -8,6 +8,7 @@
from exam import fixture, patcher
+from sentry.constants import DataCategory
from sentry.quotas.base import QuotaConfig, QuotaScope
from sentry.quotas.redis import is_rate_limited, RedisQuota
from sentry.testutils import TestCase
@@ -87,16 +88,31 @@ def get_organization_quota(self):
def test_uses_defined_quotas(self):
self.get_project_quota.return_value = (200, 60)
self.get_organization_quota.return_value = (300, 60)
- quotas = self.quota.get_quotas(self.project)
+ with self.feature("organizations:releases-v2"):
+ quotas = self.quota.get_quotas(self.project)
+
assert quotas[0].id == u"p"
+ assert quotas[0].scope == QuotaScope.PROJECT
assert quotas[0].scope_id == six.text_type(self.project.id)
assert quotas[0].limit == 200
assert quotas[0].window == 60
assert quotas[1].id == u"o"
+ assert quotas[1].scope == QuotaScope.ORGANIZATION
assert quotas[1].scope_id == six.text_type(self.organization.id)
assert quotas[1].limit == 300
assert quotas[1].window == 60
+ def test_sessions_quota(self):
+ self.get_project_quota.return_value = (200, 60)
+ self.get_organization_quota.return_value = (300, 60)
+ with self.feature({"organizations:releases-v2": False}):
+ quotas = self.quota.get_quotas(self.project)
+
+ assert quotas[0].id is None # should not be counted
+ assert quotas[0].categories == set([DataCategory.SESSION])
+ assert quotas[0].scope == QuotaScope.ORGANIZATION
+ assert quotas[0].limit == 0
+
@mock.patch("sentry.quotas.redis.is_rate_limited")
@mock.patch.object(RedisQuota, "get_quotas", return_value=[])
def test_bails_immediately_without_any_quota(self, get_quotas, is_rate_limited):
@@ -175,16 +191,16 @@ def test_get_usage(self):
self.quota.is_rate_limited(self.project, timestamp=timestamp)
quotas = self.quota.get_quotas(self.project)
+ all_quotas = quotas + [
+ QuotaConfig(id="unlimited", limit=None, window=60, reason_code="unlimited"),
+ QuotaConfig(id="dummy", limit=10, window=60, reason_code="dummy"),
+ ]
- assert self.quota.get_usage(
- self.project.organization_id,
- quotas
- + [
- QuotaConfig(id="unlimited", limit=None, window=60, reason_code="unlimited"),
- QuotaConfig(id="dummy", limit=10, window=60, reason_code="dummy"),
- ],
- timestamp=timestamp,
- ) == [n for _ in quotas] + [0, 0]
+ usage = self.quota.get_usage(self.project.organization_id, all_quotas, timestamp=timestamp)
+
+ # Only quotas with an ID are counted in Redis (via this ID). Assume the
+ # count for these quotas and None for the others.
+ assert usage == [n if q.id else None for q in quotas] + [0, 0]
@mock.patch.object(RedisQuota, "get_quotas")
def test_refund(self, mock_get_quotas):
@@ -234,14 +250,14 @@ def test_get_usage_uses_refund(self):
self.quota.refund(self.project, timestamp=timestamp)
quotas = self.quota.get_quotas(self.project)
+ all_quotas = quotas + [
+ QuotaConfig(id="unlimited", limit=None, window=60, reason_code="unlimited"),
+ QuotaConfig(id="dummy", limit=10, window=60, reason_code="dummy"),
+ ]
+
+ usage = self.quota.get_usage(self.project.organization_id, all_quotas, timestamp=timestamp)
- assert self.quota.get_usage(
- self.project.organization_id,
- quotas
- + [
- QuotaConfig(id="unlimited", limit=None, window=60, reason_code="unlimited"),
- QuotaConfig(id="dummy", limit=10, window=60, reason_code="dummy"),
- ],
- timestamp=timestamp,
- # the - 1 is because we refunded once
- ) == [n - 1 for _ in quotas] + [0, 0]
+ # Only quotas with an ID are counted in Redis (via this ID). Assume the
+ # count for these quotas and None for the others.
+ # The ``- 1`` is because we refunded once.
+ assert usage == [n - 1 if q.id else None for q in quotas] + [0, 0]
|
9d55755b45a128406c7aff87209d1e0056d27742
|
2022-04-19 15:18:01
|
Floris Bruynooghe
|
fix(appconnect): Fix permanently pending builds (#33578)
| false
|
Fix permanently pending builds (#33578)
|
fix
|
diff --git a/src/sentry/lang/native/appconnect.py b/src/sentry/lang/native/appconnect.py
index f8a34f3dee0889..e589e5edaf6f21 100644
--- a/src/sentry/lang/native/appconnect.py
+++ b/src/sentry/lang/native/appconnect.py
@@ -260,7 +260,9 @@ def from_config(cls, config: AppStoreConnectConfig) -> "AppConnectClient":
def list_builds(self) -> List[BuildInfo]:
"""Returns the available AppStore builds."""
- return appstore_connect.get_build_info(self._session, self._api_credentials, self._app_id)
+ return appstore_connect.get_build_info(
+ self._session, self._api_credentials, self._app_id, include_expired=True
+ )
def download_dsyms(self, build: BuildInfo, path: pathlib.Path) -> None:
"""Downloads the dSYMs from the build into the filename given by `path`.
|
389ef1c79f1415cf91e223c5f5f92d31c50d0592
|
2022-03-24 17:55:28
|
Ahmed Etefy
|
feat(metrics): Adds `session.healthy` to derived metrics (#32901)
| false
|
Adds `session.healthy` to derived metrics (#32901)
|
feat
|
diff --git a/src/sentry/snuba/metrics/fields/base.py b/src/sentry/snuba/metrics/fields/base.py
index 6b3bd94afba3a0..749733117e9035 100644
--- a/src/sentry/snuba/metrics/fields/base.py
+++ b/src/sentry/snuba/metrics/fields/base.py
@@ -48,6 +48,7 @@
MetricEntity,
MetricType,
NotSupportedOverCompositeEntityException,
+ combine_dictionary_of_list_values,
)
from sentry.utils.snuba import raw_snql_query
@@ -413,10 +414,13 @@ def __recursively_generate_singular_entity_constituents(cls, projects, derived_m
entities_and_metric_names.setdefault(entity, []).append(
constituent_metric_obj.metric_name
)
- entities_and_metric_names.update(
+ # This is necessary because we don't want to override entity lists but rather append
+ # to them
+ entities_and_metric_names = combine_dictionary_of_list_values(
+ entities_and_metric_names,
cls.__recursively_generate_singular_entity_constituents(
projects, constituent_metric_obj
- )
+ ),
)
return entities_and_metric_names
@@ -494,6 +498,7 @@ class DerivedMetricKey(Enum):
SESSION_ERRORED_PREAGGREGATED = "session.errored_preaggregated"
SESSION_ERRORED_SET = "session.errored_set"
SESSION_ERRORED = "session.errored"
+ SESSION_HEALTHY = "session.healthy"
SESSION_CRASH_FREE_RATE = "session.crash_free_rate"
SESSION_CRASH_FREE_USER_RATE = "session.crash_free_user_rate"
@@ -577,6 +582,15 @@ class DerivedMetricKey(Enum):
unit="sessions",
post_query_func=lambda *args: sum([*args]),
),
+ CompositeEntityDerivedMetric(
+ metric_name=DerivedMetricKey.SESSION_HEALTHY.value,
+ metrics=[
+ DerivedMetricKey.SESSION_ALL.value,
+ DerivedMetricKey.SESSION_ERRORED.value,
+ ],
+ unit="sessions",
+ post_query_func=lambda init, errored: max(0, init - errored),
+ ),
]
}
diff --git a/src/sentry/snuba/metrics/query_builder.py b/src/sentry/snuba/metrics/query_builder.py
index 0b55f4c28214ff..22ffb6b630cd8d 100644
--- a/src/sentry/snuba/metrics/query_builder.py
+++ b/src/sentry/snuba/metrics/query_builder.py
@@ -588,6 +588,10 @@ def translate_results(self):
totals[metric_name] = metric_obj.run_post_query_function(totals)
# Series
for idx in range(0, len(self._intervals)):
+ series.setdefault(
+ metric_name,
+ [metric_obj.generate_default_null_values()] * len(self._intervals),
+ )
series[metric_name][idx] = metric_obj.run_post_query_function(series, idx)
# Remove the extra fields added due to the constituent metrics that were added
diff --git a/src/sentry/snuba/metrics/utils.py b/src/sentry/snuba/metrics/utils.py
index a87cbb682ed6a1..4e0add325f1d96 100644
--- a/src/sentry/snuba/metrics/utils.py
+++ b/src/sentry/snuba/metrics/utils.py
@@ -144,6 +144,23 @@ class MetricMetaWithTagKeys(MetricMeta):
UNIT_TO_TYPE = {"sessions": "count", "percentage": "percentage", "users": "count"}
+def combine_dictionary_of_list_values(main_dict, other_dict):
+ """
+ Function that combines dictionary of lists. For instance, let's say we have
+ Dict A -> {"a": [1,2], "b": [3]} and Dict B -> {"a": [6], "c": [4]}
+ Calling this function would result in {"a": [1, 2, 6], "b": [3], "c": [4]}
+ """
+ if not isinstance(main_dict, dict) or not isinstance(other_dict, dict):
+ raise TypeError()
+ for key, value in other_dict.items():
+ main_dict.setdefault(key, [])
+ if not isinstance(value, list) or not isinstance(main_dict[key], list):
+ raise TypeError()
+ main_dict[key] += value
+ main_dict[key] = list(set(main_dict[key]))
+ return main_dict
+
+
class MetricDoesNotExistException(Exception):
...
diff --git a/tests/sentry/api/endpoints/test_organization_metric_data.py b/tests/sentry/api/endpoints/test_organization_metric_data.py
index 8a5e74de2086d5..97573392c74d12 100644
--- a/tests/sentry/api/endpoints/test_organization_metric_data.py
+++ b/tests/sentry/api/endpoints/test_organization_metric_data.py
@@ -1487,3 +1487,66 @@ def test_crash_free_user_rate_orderby_crash_free_rate(self):
assert group["by"]["release"] == "[email protected]"
assert group["totals"]["session.crash_free_rate"] == 0.25
assert group["totals"]["session.crash_free_user_rate"] == 1.0
+
+ @freeze_time((timezone.now() - timedelta(days=2)).replace(hour=3, minute=26))
+ def test_healthy_sessions(self):
+ user_ts = time.time()
+ org_id = self.organization.id
+ self._send_buckets(
+ [
+ {
+ "org_id": org_id,
+ "project_id": self.project.id,
+ "metric_id": self.session_metric,
+ "timestamp": (user_ts // 60) * 60,
+ "tags": {
+ self.session_status_tag: indexer.record(org_id, "errored_preaggr"),
+ self.release_tag: indexer.record(org_id, "foo"),
+ },
+ "type": "c",
+ "value": 4,
+ "retention_days": 90,
+ },
+ {
+ "org_id": org_id,
+ "project_id": self.project.id,
+ "metric_id": self.session_metric,
+ "timestamp": user_ts,
+ "tags": {
+ self.session_status_tag: indexer.record(org_id, "init"),
+ self.release_tag: indexer.record(org_id, "foo"),
+ },
+ "type": "c",
+ "value": 10,
+ "retention_days": 90,
+ },
+ ],
+ entity="metrics_counters",
+ )
+ self._send_buckets(
+ [
+ {
+ "org_id": org_id,
+ "project_id": self.project.id,
+ "metric_id": self.session_error_metric,
+ "timestamp": user_ts,
+ "tags": {tag: value},
+ "type": "s",
+ "value": numbers,
+ "retention_days": 90,
+ }
+ for tag, value, numbers in (
+ (self.release_tag, indexer.record(org_id, "foo"), list(range(3))),
+ )
+ ],
+ entity="metrics_sets",
+ )
+ response = self.get_success_response(
+ self.organization.slug,
+ field=["session.healthy", "session.errored", "session.all"],
+ statsPeriod="6m",
+ interval="6m",
+ )
+ group = response.data["groups"][0]
+ assert group["totals"]["session.healthy"] == 3
+ assert group["series"]["session.healthy"] == [3]
diff --git a/tests/sentry/snuba/metrics/test_fields.py b/tests/sentry/snuba/metrics/test_fields.py
index ba9f3983f6ae63..b7c28323d40aae 100644
--- a/tests/sentry/snuba/metrics/test_fields.py
+++ b/tests/sentry/snuba/metrics/test_fields.py
@@ -275,6 +275,12 @@ def test_get_entity_and_validate_dependency_tree_of_single_entity_constituents(s
"metrics_counters": ["session.errored_preaggregated"],
"metrics_sets": ["session.errored_set"],
}
+ component_entities = DERIVED_METRICS["session.healthy"].get_entity(projects=[1])
+ assert sorted(component_entities["metrics_counters"]) == [
+ "session.all",
+ "session.errored_preaggregated",
+ ]
+ assert sorted(component_entities["metrics_sets"]) == ["session.errored_set"]
def test_generate_metric_ids(self):
with pytest.raises(NotSupportedOverCompositeEntityException):
|
6a54d2a3febdeab4132f620350dad4dbf8b2aa4e
|
2020-07-17 21:38:49
|
Abhijeet Prasad
|
fix: Make sure that gatsby traces don't have missing instrumentation (#19930)
| false
|
Make sure that gatsby traces don't have missing instrumentation (#19930)
|
fix
|
diff --git a/src/sentry/static/sentry/app/components/events/interfaces/spans/utils.tsx b/src/sentry/static/sentry/app/components/events/interfaces/spans/utils.tsx
index 2d591a0473469b..56113f458f92bf 100644
--- a/src/sentry/static/sentry/app/components/events/interfaces/spans/utils.tsx
+++ b/src/sentry/static/sentry/app/components/events/interfaces/spans/utils.tsx
@@ -589,9 +589,11 @@ export function isEventFromBrowserJavaScriptSDK(event: SentryTransactionEvent):
return false;
}
// based on https://github.com/getsentry/sentry-javascript/blob/master/packages/browser/src/version.ts
- return ['sentry.javascript.browser', 'sentry.javascript.react'].includes(
- sdkName.toLowerCase()
- );
+ return [
+ 'sentry.javascript.browser',
+ 'sentry.javascript.react',
+ 'sentry.javascript.gatsby',
+ ].includes(sdkName.toLowerCase());
}
// Durationless ops from: https://github.com/getsentry/sentry-javascript/blob/0defcdcc2dfe719343efc359d58c3f90743da2cd/packages/apm/src/integrations/tracing.ts#L629-L688
|
6658f2706ea83162ef9f16f478b464a206571830
|
2018-09-06 03:45:47
|
David Cramer
|
fix: Count apps missing migrations as ghosts
| false
|
Count apps missing migrations as ghosts
|
fix
|
diff --git a/src/south/migration/__init__.py b/src/south/migration/__init__.py
index ba159ba481aed7..60896bade51ea0 100644
--- a/src/south/migration/__init__.py
+++ b/src/south/migration/__init__.py
@@ -85,7 +85,7 @@ def check_migration_histories(histories, delete_ghosts=False, ignore_ghosts=Fals
try:
m = h.get_migration()
m.migration()
- except exceptions.UnknownMigration:
+ except (exceptions.UnknownMigration, exceptions.NoMigrations):
ghosts.append(h)
except ImproperlyConfigured:
pass # Ignore missing applications
|
e28618c202c53f4dbcf18366315d45c99e2be342
|
2024-08-05 16:32:02
|
Reinaldy Rafli
|
feat(relay): add span-normalization.allowed-hosts into relay config (#74195)
| false
|
add span-normalization.allowed-hosts into relay config (#74195)
|
feat
|
diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py
index 5a121ce5c9e070..c1b9081d688063 100644
--- a/src/sentry/options/defaults.py
+++ b/src/sentry/options/defaults.py
@@ -475,6 +475,13 @@
flags=FLAG_AUTOMATOR_MODIFIABLE,
)
+# Allow the Relay to skip normalization of spans for certain hosts.
+register(
+ "relay.span-normalization.allowed_hosts",
+ default=[],
+ flags=FLAG_ALLOW_EMPTY | FLAG_AUTOMATOR_MODIFIABLE,
+)
+
# Analytics
register("analytics.backend", default="noop", flags=FLAG_NOSTORE)
register("analytics.options", default={}, flags=FLAG_NOSTORE)
diff --git a/src/sentry/relay/globalconfig.py b/src/sentry/relay/globalconfig.py
index a5a1d5a60ecba6..61c0b7b2400b8e 100644
--- a/src/sentry/relay/globalconfig.py
+++ b/src/sentry/relay/globalconfig.py
@@ -26,6 +26,7 @@
"relay.span-extraction.sample-rate",
"relay.compute-metrics-summaries.sample-rate",
"sentry-metrics.extrapolation.duplication-limit",
+ "relay.span-normalization.allowed_hosts",
"sentry-metrics.extrapolation.propagate-rates",
]
|
c445396258b67b4910c63755ea4652e03e7eecec
|
2024-12-14 03:08:04
|
Snigdha Sharma
|
feat(metric-issues): Configure workflow notifications by group type (#81609)
| false
|
Configure workflow notifications by group type (#81609)
|
feat
|
diff --git a/src/sentry/issues/grouptype.py b/src/sentry/issues/grouptype.py
index 39035bf1bd072e..9661494f0e368f 100644
--- a/src/sentry/issues/grouptype.py
+++ b/src/sentry/issues/grouptype.py
@@ -174,6 +174,9 @@ class GroupType:
notification_config: NotificationConfig = NotificationConfig()
detector_handler: type[DetectorHandler] | None = None
detector_validator: type[BaseGroupTypeDetectorValidator] | None = None
+ # Controls whether status change (i.e. resolved, regressed) workflow notifications are enabled.
+ # Defaults to true to maintain the default workflow notification behavior as it exists for error group types.
+ enable_status_change_workflow_notifications: bool = True
detector_config_schema: ClassVar[dict[str, Any]] = {}
def __init_subclass__(cls: type[GroupType], **kwargs: Any) -> None:
@@ -636,6 +639,7 @@ class MetricIssuePOC(GroupType):
default_priority = PriorityLevel.HIGH
enable_auto_resolve = False
enable_escalation_detection = False
+ enable_status_change_workflow_notifications = False
def should_create_group(
diff --git a/src/sentry/models/activity.py b/src/sentry/models/activity.py
index 827d244880cf3c..ae5e30d012c2d6 100644
--- a/src/sentry/models/activity.py
+++ b/src/sentry/models/activity.py
@@ -23,8 +23,9 @@
)
from sentry.db.models.fields.hybrid_cloud_foreign_key import HybridCloudForeignKey
from sentry.db.models.manager.base import BaseManager
+from sentry.issues.grouptype import get_group_type_by_type_id
from sentry.tasks import activity
-from sentry.types.activity import CHOICES, ActivityType
+from sentry.types.activity import CHOICES, STATUS_CHANGE_ACTIVITY_TYPES, ActivityType
from sentry.types.group import PriorityLevel
if TYPE_CHECKING:
@@ -191,6 +192,18 @@ def delete(self, *args, **kwargs):
)
def send_notification(self):
+ if self.group:
+ group_type = get_group_type_by_type_id(self.group.type)
+ has_status_change_notifications = group_type.enable_status_change_workflow_notifications
+ is_status_change = self.type in {
+ activity.value for activity in STATUS_CHANGE_ACTIVITY_TYPES
+ }
+
+ # Skip sending the activity notification if the group type does not
+ # support status change workflow notifications
+ if is_status_change and not has_status_change_notifications:
+ return
+
activity.send_activity_notifications.delay(self.id)
diff --git a/src/sentry/testutils/factories.py b/src/sentry/testutils/factories.py
index 96f402a651f2cc..58fedd24b9308f 100644
--- a/src/sentry/testutils/factories.py
+++ b/src/sentry/testutils/factories.py
@@ -1003,6 +1003,9 @@ def store_event(
@staticmethod
@assume_test_silo_mode(SiloMode.REGION)
def create_group(project, **kwargs):
+ from sentry.models.group import GroupStatus
+ from sentry.types.group import GroupSubStatus
+
kwargs.setdefault("message", "Hello world")
kwargs.setdefault("data", {})
if "type" not in kwargs["data"]:
@@ -1012,6 +1015,10 @@ def create_group(project, **kwargs):
if "metadata" in kwargs:
metadata = kwargs.pop("metadata")
kwargs["data"].setdefault("metadata", {}).update(metadata)
+ if "status" not in kwargs:
+ kwargs["status"] = GroupStatus.UNRESOLVED
+ kwargs["substatus"] = GroupSubStatus.NEW
+
return Group.objects.create(project=project, **kwargs)
@staticmethod
diff --git a/src/sentry/types/activity.py b/src/sentry/types/activity.py
index e850e61e854b47..1dd4a3c2373ad7 100644
--- a/src/sentry/types/activity.py
+++ b/src/sentry/types/activity.py
@@ -67,3 +67,16 @@ class ActivityType(Enum):
ActivityType.DELETED_ATTACHMENT, # 27
]
)
+
+
+STATUS_CHANGE_ACTIVITY_TYPES = (
+ ActivityType.SET_RESOLVED,
+ ActivityType.SET_UNRESOLVED,
+ ActivityType.SET_IGNORED,
+ ActivityType.SET_REGRESSION,
+ ActivityType.SET_RESOLVED_IN_RELEASE,
+ ActivityType.SET_RESOLVED_BY_AGE,
+ ActivityType.SET_RESOLVED_IN_COMMIT,
+ ActivityType.SET_RESOLVED_IN_PULL_REQUEST,
+ ActivityType.SET_ESCALATING,
+)
diff --git a/tests/sentry/issues/test_status_change.py b/tests/sentry/issues/test_status_change.py
index 7445202729eb68..dec3b58abf86d6 100644
--- a/tests/sentry/issues/test_status_change.py
+++ b/tests/sentry/issues/test_status_change.py
@@ -106,7 +106,7 @@ def test_unresolve_ignored_issue(self, issue_unignored: Any) -> None:
self.projects,
self.project_lookup,
acting_user=self.user,
- is_bulk=True,
+ is_bulk=False,
status_details={},
new_status=GroupStatus.UNRESOLVED,
new_substatus=GroupSubStatus.ONGOING,
@@ -131,7 +131,7 @@ def test_unresolve_resolved_issue(self, issue_unresolved: Any) -> None:
acting_user=self.user,
new_status=GroupStatus.UNRESOLVED,
new_substatus=GroupSubStatus.ONGOING,
- is_bulk=True,
+ is_bulk=False,
status_details={},
sender=self,
)
@@ -154,7 +154,7 @@ def test_ignore_new_issue(self, issue_ignored: Any) -> None:
acting_user=self.user,
new_status=GroupStatus.IGNORED,
new_substatus=None,
- is_bulk=True,
+ is_bulk=False,
status_details={"ignoreDuration": 30},
sender=self,
)
@@ -177,7 +177,7 @@ def test_ignore_until_escalating(self, issue_ignored: Any) -> None:
acting_user=self.user,
new_status=GroupStatus.IGNORED,
new_substatus=None,
- is_bulk=True,
+ is_bulk=False,
status_details={"ignoreUntilEscalating": True},
sender=self,
)
diff --git a/tests/sentry/models/test_activity.py b/tests/sentry/models/test_activity.py
index 990238edd698ac..61527f1fb618d6 100644
--- a/tests/sentry/models/test_activity.py
+++ b/tests/sentry/models/test_activity.py
@@ -1,6 +1,8 @@
import logging
+from unittest.mock import MagicMock, patch
from sentry.event_manager import EventManager
+from sentry.issues.grouptype import MetricIssuePOC
from sentry.models.activity import Activity
from sentry.testutils.cases import TestCase
from sentry.types.activity import ActivityType
@@ -319,3 +321,26 @@ def test_get_activities_for_group_flip_flop(self):
for pair in chunked(act_for_group[:-1], 2):
assert pair[0].type == ActivityType.SET_IGNORED.value
assert pair[1].type == ActivityType.SET_UNRESOLVED.value
+
+ @patch("sentry.tasks.activity.send_activity_notifications.delay")
+ def test_skips_status_change_notifications_if_disabled(
+ self, mock_send_activity_notifications: MagicMock
+ ):
+ project = self.create_project(name="test_activities_group")
+ group = self.create_group(project)
+
+ # Create an activity that would normally trigger a notification
+ activity = Activity.objects.create_group_activity(
+ group=group, type=ActivityType.SET_UNRESOLVED, data=None, send_notification=True
+ )
+
+ mock_send_activity_notifications.assert_called_once_with(activity.id)
+ mock_send_activity_notifications.reset_mock()
+
+ group.type = MetricIssuePOC.type_id
+ group.save()
+ _ = Activity.objects.create_group_activity(
+ group=group, type=ActivityType.SET_RESOLVED, data=None, send_notification=True
+ )
+
+ mock_send_activity_notifications.assert_not_called()
|
72d72aa5eb22dd0553adf883ee6d5da9e02ea659
|
2024-06-18 22:48:49
|
Ryan Albrecht
|
feat(replay): Cross link from inbound-filters to replay settings pages (#72920)
| false
|
Cross link from inbound-filters to replay settings pages (#72920)
|
feat
|
diff --git a/static/app/data/forms/replay.tsx b/static/app/data/forms/replay.tsx
deleted file mode 100644
index d19dd770a51778..00000000000000
--- a/static/app/data/forms/replay.tsx
+++ /dev/null
@@ -1,35 +0,0 @@
-import type {JsonFormObject} from 'sentry/components/forms/types';
-import {t} from 'sentry/locale';
-
-export const route = '/settings/:orgId/projects/:projectId/replays/';
-
-const formGroups: JsonFormObject[] = [
- {
- title: 'Settings',
- fields: [
- {
- name: 'sentry:replay_rage_click_issues',
- type: 'boolean',
-
- // additional data/props that is related to rendering of form field rather than data
- label: t('Create Rage Click Issues'),
- help: t('Toggles whether or not to create Session Replay Rage Click Issues'),
- getData: data => ({options: data}),
- },
- {
- name: 'sentry:replay_hydration_error_issues',
- type: 'boolean',
-
- // additional data/props that is related to rendering of form field rather than data
- label: t('Create Hydration Error Issues'),
- help: t('Toggles whether or not to create Session Replay Hydration Error Issues'),
- getData: data => ({options: data}),
- visible({features}) {
- return features.has('session-replay-hydration-error-issue-creation');
- },
- },
- ],
- },
-];
-
-export default formGroups;
diff --git a/static/app/views/settings/project/projectFilters/projectFiltersSettings.tsx b/static/app/views/settings/project/projectFilters/projectFiltersSettings.tsx
index 93ebbbc6399a40..15f566fb0e3ed3 100644
--- a/static/app/views/settings/project/projectFilters/projectFiltersSettings.tsx
+++ b/static/app/views/settings/project/projectFilters/projectFiltersSettings.tsx
@@ -21,6 +21,7 @@ import Form from 'sentry/components/forms/form';
import FormField from 'sentry/components/forms/formField';
import JsonForm from 'sentry/components/forms/jsonForm';
import ExternalLink from 'sentry/components/links/externalLink';
+import Link from 'sentry/components/links/link';
import LoadingError from 'sentry/components/loadingError';
import LoadingIndicator from 'sentry/components/loadingIndicator';
import Panel from 'sentry/components/panels/panel';
@@ -537,9 +538,22 @@ export function ProjectFiltersSettings({project, params, features}: Props) {
type: 'boolean',
name: 'filters:react-hydration-errors',
label: t('Filter out hydration errors'),
- help: t(
- 'React falls back to do a full re-render on a page and these errors are often not actionable.'
- ),
+ help: organization.features.includes(
+ 'session-replay-hydration-error-issue-creation'
+ )
+ ? tct(
+ 'React falls back to do a full re-render on a page. [replaySettings: Hydration Errors created from captured replays] are excluded from this setting.',
+ {
+ replaySettings: (
+ <Link
+ to={`/settings/projects/${project.slug}/replays/#sentry-replay_hydration_error_issues_help`}
+ />
+ ),
+ }
+ )
+ : t(
+ 'React falls back to do a full re-render on a page and these errors are often not actionable.'
+ ),
disabled: !hasAccess,
}}
/>
diff --git a/static/app/views/settings/project/projectReplays.tsx b/static/app/views/settings/project/projectReplays.tsx
index b4294b1142e3f6..322cf685197cf9 100644
--- a/static/app/views/settings/project/projectReplays.tsx
+++ b/static/app/views/settings/project/projectReplays.tsx
@@ -4,9 +4,10 @@ import Access from 'sentry/components/acl/access';
import {Button} from 'sentry/components/button';
import Form from 'sentry/components/forms/form';
import JsonForm from 'sentry/components/forms/jsonForm';
+import type {JsonFormObject} from 'sentry/components/forms/types';
+import Link from 'sentry/components/links/link';
import SentryDocumentTitle from 'sentry/components/sentryDocumentTitle';
-import formGroups from 'sentry/data/forms/replay';
-import {t} from 'sentry/locale';
+import {t, tct} from 'sentry/locale';
import type {Organization} from 'sentry/types/organization';
import type {Project} from 'sentry/types/project';
import SettingsPageHeader from 'sentry/views/settings/components/settingsPageHeader';
@@ -21,6 +22,46 @@ type Props = RouteComponentProps<RouteParams, {}> & {
};
function ProjectReplaySettings({organization, project, params: {projectId}}: Props) {
+ const formGroups: JsonFormObject[] = [
+ {
+ title: 'Settings',
+ fields: [
+ {
+ name: 'sentry:replay_rage_click_issues',
+ type: 'boolean',
+
+ // additional data/props that is related to rendering of form field rather than data
+ label: t('Create Rage Click Issues'),
+ help: t('Toggles whether or not to create Session Replay Rage Click Issues'),
+ getData: data => ({options: data}),
+ },
+ {
+ name: 'sentry:replay_hydration_error_issues',
+ type: 'boolean',
+
+ // additional data/props that is related to rendering of form field rather than data
+ label: t('Create Hydration Error Issues'),
+ help() {
+ return tct(
+ 'Toggles whether or not to create Session Replay Hydration Error Issues during replay ingest. Using [inboundFilters: inbound filters] to filter out hydration errors does not affect this setting.',
+ {
+ inboundFilters: (
+ <Link
+ to={`/settings/projects/${project.slug}/filters/data-filters/#filters-react-hydration-errors_help`}
+ />
+ ),
+ }
+ );
+ },
+ getData: data => ({options: data}),
+ visible({features}) {
+ return features.has('session-replay-hydration-error-issue-creation');
+ },
+ },
+ ],
+ },
+ ];
+
return (
<SentryDocumentTitle title={t('Replays')} projectSlug={project.slug}>
<SettingsPageHeader
@@ -44,8 +85,8 @@ function ProjectReplaySettings({organization, project, params: {projectId}}: Pro
<Access access={['project:write']} project={project}>
{({hasAccess}) => (
<JsonForm
- features={new Set(organization.features)}
disabled={!hasAccess}
+ features={new Set(organization.features)}
forms={formGroups}
/>
)}
|
6368154d0e24c8edc828be3eceab328404fa577c
|
2025-01-31 11:35:22
|
Markus Hintersteiner
|
chore(insights): Remove feature flag for mobile screen rendering (#84298)
| false
|
Remove feature flag for mobile screen rendering (#84298)
|
chore
|
diff --git a/static/app/components/nav/index.spec.tsx b/static/app/components/nav/index.spec.tsx
index 656d4fa38a51f2..b3f4191f948203 100644
--- a/static/app/components/nav/index.spec.tsx
+++ b/static/app/components/nav/index.spec.tsx
@@ -27,7 +27,6 @@ const ALL_AVAILABLE_FEATURES = [
'ourlogs-enabled',
'performance-view',
'performance-trace-explorer',
- 'starfish-mobile-ui-module',
'profiling',
];
diff --git a/static/app/components/sidebar/index.spec.tsx b/static/app/components/sidebar/index.spec.tsx
index a2e8ba3a560fc5..a9ca42ba6f7237 100644
--- a/static/app/components/sidebar/index.spec.tsx
+++ b/static/app/components/sidebar/index.spec.tsx
@@ -35,7 +35,6 @@ const ALL_AVAILABLE_FEATURES = [
'session-replay-ui',
'performance-view',
'performance-trace-explorer',
- 'starfish-mobile-ui-module',
'profiling',
];
diff --git a/static/app/views/insights/mobile/common/components/tables/samplesTables.tsx b/static/app/views/insights/mobile/common/components/tables/samplesTables.tsx
index d4613f84e56cc9..0aede383cb5226 100644
--- a/static/app/views/insights/mobile/common/components/tables/samplesTables.tsx
+++ b/static/app/views/insights/mobile/common/components/tables/samplesTables.tsx
@@ -32,7 +32,7 @@ export interface SpanOperationTableProps {
}
interface SamplesTablesProps {
- EventSamples: React.ComponentType<EventSamplesProps>;
+ EventSamples: React.ComponentType<EventSamplesProps> | undefined;
SpanOperationTable: React.ComponentType<SpanOperationTableProps>;
transactionName: string;
}
@@ -50,22 +50,26 @@ export function SamplesTables({
return (
<EventSplitContainer>
<ErrorBoundary mini>
- <EventSamples
- cursorName={MobileCursors.RELEASE_1_EVENT_SAMPLE_TABLE}
- sortKey={MobileSortKeys.RELEASE_1_EVENT_SAMPLE_TABLE}
- release={primaryRelease}
- transaction={transactionName}
- footerAlignedPagination
- />
+ {EventSamples && (
+ <EventSamples
+ cursorName={MobileCursors.RELEASE_1_EVENT_SAMPLE_TABLE}
+ sortKey={MobileSortKeys.RELEASE_1_EVENT_SAMPLE_TABLE}
+ release={primaryRelease}
+ transaction={transactionName}
+ footerAlignedPagination
+ />
+ )}
</ErrorBoundary>
<ErrorBoundary mini>
- <EventSamples
- cursorName={MobileCursors.RELEASE_2_EVENT_SAMPLE_TABLE}
- sortKey={MobileSortKeys.RELEASE_2_EVENT_SAMPLE_TABLE}
- release={secondaryRelease}
- transaction={transactionName}
- footerAlignedPagination
- />
+ {EventSamples && (
+ <EventSamples
+ cursorName={MobileCursors.RELEASE_2_EVENT_SAMPLE_TABLE}
+ sortKey={MobileSortKeys.RELEASE_2_EVENT_SAMPLE_TABLE}
+ release={secondaryRelease}
+ transaction={transactionName}
+ footerAlignedPagination
+ />
+ )}
</ErrorBoundary>
</EventSplitContainer>
);
@@ -103,18 +107,20 @@ export function SamplesTables({
<DeviceClassSelector size="md" clearSpansTableCursor />
<SubregionSelector />
</FiltersContainer>
- <SegmentedControl
- onChange={value => setSampleType(value)}
- defaultValue={SPANS}
- label={t('Sample Type Selection')}
- >
- <SegmentedControl.Item key={SPANS} aria-label={t('By Spans')}>
- {t('By Spans')}
- </SegmentedControl.Item>
- <SegmentedControl.Item key={EVENT} aria-label={t('By Event')}>
- {t('By Event')}
- </SegmentedControl.Item>
- </SegmentedControl>
+ {EventSamples && (
+ <SegmentedControl
+ onChange={value => setSampleType(value)}
+ defaultValue={SPANS}
+ label={t('Sample Type Selection')}
+ >
+ <SegmentedControl.Item key={SPANS} aria-label={t('By Spans')}>
+ {t('By Spans')}
+ </SegmentedControl.Item>
+ <SegmentedControl.Item key={EVENT} aria-label={t('By Event')}>
+ {t('By Event')}
+ </SegmentedControl.Item>
+ </SegmentedControl>
+ )}
</Controls>
{content}
</div>
diff --git a/static/app/views/insights/mobile/screens/views/screenDetailsPage.spec.tsx b/static/app/views/insights/mobile/screens/views/screenDetailsPage.spec.tsx
index 3cac2e202a190f..f364a961fc408c 100644
--- a/static/app/views/insights/mobile/screens/views/screenDetailsPage.spec.tsx
+++ b/static/app/views/insights/mobile/screens/views/screenDetailsPage.spec.tsx
@@ -13,11 +13,7 @@ jest.mock('sentry/utils/useLocation');
describe('ScreenDetailsPage', function () {
const organization = OrganizationFixture({
- features: [
- 'insights-addon-modules',
- 'insights-mobile-screens-module',
- 'starfish-mobile-ui-module',
- ],
+ features: ['insights-addon-modules', 'insights-mobile-screens-module'],
});
const project = ProjectFixture();
diff --git a/static/app/views/insights/mobile/screens/views/screenDetailsPage.tsx b/static/app/views/insights/mobile/screens/views/screenDetailsPage.tsx
index e029d30d2d8485..5207f66dd67a41 100644
--- a/static/app/views/insights/mobile/screens/views/screenDetailsPage.tsx
+++ b/static/app/views/insights/mobile/screens/views/screenDetailsPage.tsx
@@ -61,7 +61,6 @@ export function ScreenDetailsPage() {
{
key: 'screen_rendering',
label: t('Screen Rendering'),
- feature: 'starfish-mobile-ui-module',
alpha: true,
content: () => {
return <UiPage key={'screen_rendering'} />;
diff --git a/static/app/views/insights/mobile/ui/views/screenSummaryPage.tsx b/static/app/views/insights/mobile/ui/views/screenSummaryPage.tsx
index e4b50c5c93d0cc..c72c820db2d0f8 100644
--- a/static/app/views/insights/mobile/ui/views/screenSummaryPage.tsx
+++ b/static/app/views/insights/mobile/ui/views/screenSummaryPage.tsx
@@ -104,8 +104,8 @@ export function ScreenSummaryContent() {
<SamplesTables
transactionName={transactionName}
SpanOperationTable={SpanOperationTable}
- // TODO(nar): Add event samples component specific to ui module
- EventSamples={_props => <div />}
+ // for now, let's only show the span ops table
+ EventSamples={undefined}
/>
</SamplesContainer>
</Fragment>
|
2c17e0e6d57c268571346804269a3131207514e4
|
2024-03-21 02:10:13
|
Katie Byers
|
fix(grouping): Improve handling of matching grouping configs (#67270)
| false
|
Improve handling of matching grouping configs (#67270)
|
fix
|
diff --git a/src/sentry/grouping/ingest.py b/src/sentry/grouping/ingest.py
index 70c622ffa8144a..8c41b9941980ca 100644
--- a/src/sentry/grouping/ingest.py
+++ b/src/sentry/grouping/ingest.py
@@ -60,6 +60,21 @@ def _project_should_update_grouping(project: Project) -> bool:
return bool(project.get_option("sentry:grouping_auto_update")) and should_update_org
+def _config_update_happened_recently(project: Project, tolerance: int) -> bool:
+ """
+ Determine whether an auto-upate happened within the last `tolerance` seconds.
+
+ We can use this test to compensate for the delay between config getting updated and Relay
+ picking up the change.
+ """
+ project_transition_expiry = project.get_option("sentry:secondary_grouping_expiry") or 0
+ last_config_update = project_transition_expiry - settings.SENTRY_GROUPING_UPDATE_MIGRATION_PHASE
+ now = int(time.time())
+ time_since_update = now - last_config_update
+
+ return time_since_update < 60
+
+
def _auto_update_grouping(project: Project) -> None:
current_config = project.get_option("sentry:grouping_config")
new_config = DEFAULT_GROUPING_CONFIG
@@ -267,15 +282,21 @@ def run_primary_grouping(
# See https://github.com/getsentry/sentry/pull/65116.
config_from_relay = grouping_config["id"]
config_from_project = project.get_option("sentry:grouping_config")
+
if config_from_relay != config_from_project:
- logger.info(
- "Event grouping config different from project grouping config",
- extra={
- "project": project.id,
- "relay_config": config_from_relay,
- "project_config": config_from_project,
- },
- )
+ # The relay value might not match the value stored on the project if the project was
+ # recently updated and relay's still using its cached value. Based on logs, this delay
+ # seems to be about 3 seconds, but let's be generous and give it a minute to account for
+ # clock skew, network latency, etc.
+ if not _config_update_happened_recently(project, 30):
+ logger.info(
+ "Event grouping config different from project grouping config",
+ extra={
+ "project": project.id,
+ "relay_config": config_from_relay,
+ "project_config": config_from_project,
+ },
+ )
with (
sentry_sdk.start_span(
@@ -456,32 +477,37 @@ def record_hash_calculation_metrics(
"primary_config": primary_config["id"],
"secondary_config": secondary_config["id"],
}
- current_values = primary_hashes.hashes
- secondary_values = secondary_hashes.hashes
- hashes_match = current_values == secondary_values
- if hashes_match:
- tags["result"] = "no change"
- else:
- shared_hashes = set(current_values) & set(secondary_values)
- if len(shared_hashes) > 0:
- tags["result"] = "partial change"
+ # If the configs are the same, *of course* the values are going to match, so no point in
+ # recording a metric
+ #
+ # TODO: If we fix the issue outlined in https://github.com/getsentry/sentry/pull/65116, we
+ # can ditch both this check and the logging below
+ if tags["primary_config"] != tags["secondary_config"]:
+ current_values = primary_hashes.hashes
+ secondary_values = secondary_hashes.hashes
+ hashes_match = current_values == secondary_values
+
+ if hashes_match:
+ tags["result"] = "no change"
else:
- tags["result"] = "full change"
-
- metrics.incr("grouping.hash_comparison", tags=tags)
-
- # TODO: This is temporary, just until we can figure out how we're recording a hash
- # comparison metric showing projects calculating both primary and secondary hashes using the
- # same config
- if primary_config["id"] == secondary_config["id"]:
- logger.info(
- "Equal primary and secondary configs",
- extra={
- "project": project.id,
- "primary_config": primary_config["id"],
- },
- )
+ shared_hashes = set(current_values) & set(secondary_values)
+ if len(shared_hashes) > 0:
+ tags["result"] = "partial change"
+ else:
+ tags["result"] = "full change"
+
+ metrics.incr("grouping.hash_comparison", tags=tags)
+
+ else:
+ if not _config_update_happened_recently(project, 30):
+ logger.info(
+ "Equal primary and secondary configs",
+ extra={
+ "project": project.id,
+ "primary_config": primary_config["id"],
+ },
+ )
# TODO: Once the legacy `_save_aggregate` goes away, this logic can be pulled into
|
1cec9c9b9bcf88171eb99d4c692640a6d1eae865
|
2023-07-25 16:02:25
|
Roman Zavarnitsyn
|
ref(getting-started-docs): Migrate dart/flutter wizards to the main sentry repo (#53504)
| false
|
Migrate dart/flutter wizards to the main sentry repo (#53504)
|
ref
|
diff --git a/static/app/components/onboarding/gettingStartedDoc/sdkDocumentation.tsx b/static/app/components/onboarding/gettingStartedDoc/sdkDocumentation.tsx
index f0c788d5916000..cc172896f695d0 100644
--- a/static/app/components/onboarding/gettingStartedDoc/sdkDocumentation.tsx
+++ b/static/app/components/onboarding/gettingStartedDoc/sdkDocumentation.tsx
@@ -85,6 +85,8 @@ export const migratedDocs = [
'apple',
'apple-ios',
'apple-macos',
+ 'flutter',
+ 'dart',
];
type SdkDocumentationProps = {
@@ -134,6 +136,10 @@ export function SdkDocumentation({
? `unreal/unreal`
: platform?.id === 'capacitor'
? `capacitor/capacitor`
+ : platform?.id === 'flutter'
+ ? `flutter/flutter`
+ : platform?.id === 'dart'
+ ? `dart/dart`
: platform?.id.replace(`${platform.language}-`, `${platform.language}/`)
: `${platform?.language}/${platform?.id}`;
diff --git a/static/app/gettingStartedDocs/dart/dart.spec.tsx b/static/app/gettingStartedDocs/dart/dart.spec.tsx
new file mode 100644
index 00000000000000..ede519c9589d42
--- /dev/null
+++ b/static/app/gettingStartedDocs/dart/dart.spec.tsx
@@ -0,0 +1,20 @@
+import {render, screen} from 'sentry-test/reactTestingLibrary';
+
+import {StepTitle} from 'sentry/components/onboarding/gettingStartedDoc/step';
+
+import {GettingStartedWithDart, steps} from './dart';
+
+describe('GettingStartedWithDart', function () {
+ it('renders doc correctly', function () {
+ const {container} = render(<GettingStartedWithDart dsn="test-dsn" />);
+
+ // Steps
+ for (const step of steps()) {
+ expect(
+ screen.getByRole('heading', {name: step.title ?? StepTitle[step.type]})
+ ).toBeInTheDocument();
+ }
+
+ expect(container).toSnapshot();
+ });
+});
diff --git a/static/app/gettingStartedDocs/dart/dart.tsx b/static/app/gettingStartedDocs/dart/dart.tsx
new file mode 100644
index 00000000000000..b4b85ee768e5b1
--- /dev/null
+++ b/static/app/gettingStartedDocs/dart/dart.tsx
@@ -0,0 +1,169 @@
+import ExternalLink from 'sentry/components/links/externalLink';
+import {Layout, LayoutProps} from 'sentry/components/onboarding/gettingStartedDoc/layout';
+import {ModuleProps} from 'sentry/components/onboarding/gettingStartedDoc/sdkDocumentation';
+import {StepType} from 'sentry/components/onboarding/gettingStartedDoc/step';
+import {t, tct} from 'sentry/locale';
+
+// Configuration Start
+export const steps = ({
+ dsn,
+}: {
+ dsn?: string;
+} = {}): LayoutProps['steps'] => [
+ {
+ type: StepType.INSTALL,
+ description: (
+ <p>
+ {tct(
+ 'Sentry captures data by using an SDK within your application’s runtime. Add the following to your [pubspec: pubspec.yaml]',
+ {
+ pubspec: <code />,
+ }
+ )}
+ </p>
+ ),
+ configurations: [
+ {
+ language: 'yml',
+ code: `
+dependencies:
+ sentry: ^7.8.0
+ `,
+ },
+ ],
+ },
+ {
+ type: StepType.CONFIGURE,
+ description: (
+ <p>
+ {tct('Import [sentry: sentry] and initialize it', {
+ sentry: <code />,
+ })}
+ </p>
+ ),
+ configurations: [
+ {
+ language: 'dart',
+ code: `
+import 'package:sentry/sentry.dart';
+
+Future<void> main() async {
+ await Sentry.init((options) {
+ options.dsn = '${dsn}';
+ // Set tracesSampleRate to 1.0 to capture 100% of transactions for performance monitoring.
+ // We recommend adjusting this value in production.
+ options.tracesSampleRate = 1.0;
+ });
+
+ // or define SENTRY_DSN via Dart environment variable (--dart-define)
+}
+ `,
+ additionalInfo: (
+ <p>
+ {tct(
+ 'You can configure the [sentryDsn: SENTRY_DSN], [sentryRelease: SENTRY_RELEASE], [sentryDist: SENTRY_DIST], and [sentryEnv: SENTRY_ENVIRONMENT] via the Dart environment variables passing the [dartDefine: --dart-define] flag to the compiler, as noted in the code sample.',
+ {
+ sentryDsn: <code />,
+ sentryRelease: <code />,
+ sentryDist: <code />,
+ sentryEnv: <code />,
+ dartDefine: <code />,
+ }
+ )}
+ </p>
+ ),
+ },
+ ],
+ },
+ {
+ type: StepType.VERIFY,
+ description: t(
+ 'Create an intentional error, so you can test that everything is working:'
+ ),
+ configurations: [
+ {
+ language: 'dart',
+ code: `
+import 'package:sentry/sentry.dart';
+
+try {
+ aMethodThatMightFail();
+} catch (exception, stackTrace) {
+ await Sentry.captureException(
+ exception,
+ stackTrace: stackTrace,
+ );
+}
+ `,
+ additionalInfo: (
+ <p>
+ {tct(
+ "If you're new to Sentry, use the email alert to access your account and complete a product tour.[break] If you're an existing user and have disabled alerts, you won't receive this email.",
+ {
+ break: <br />,
+ }
+ )}
+ </p>
+ ),
+ },
+ ],
+ },
+ {
+ title: t('Performance'),
+ description: t(
+ "You'll be able to monitor the performance of your app using the SDK. For example:"
+ ),
+ configurations: [
+ {
+ language: 'dart',
+ code: `
+import 'package:sentry/sentry.dart';
+
+final transaction = Sentry.startTransaction('processOrderBatch()', 'task');
+
+try {
+ await processOrderBatch(transaction);
+} catch (exception) {
+ transaction.throwable = exception;
+ transaction.status = SpanStatus.internalError();
+} finally {
+ await transaction.finish();
+}
+
+Future<void> processOrderBatch(ISentrySpan span) async {
+ // span operation: task, span description: operation
+ final innerSpan = span.startChild('task', description: 'operation');
+
+ try {
+ // omitted code
+ } catch (exception) {
+ innerSpan.throwable = exception;
+ innerSpan.status = SpanStatus.notFound();
+ } finally {
+ await innerSpan.finish();
+ }
+}
+ `,
+ additionalInfo: (
+ <p>
+ {tct(
+ 'To learn more about the API and automatic instrumentations, check out the [perfDocs: performance documentation].',
+ {
+ perfDocs: (
+ <ExternalLink href="https://docs.sentry.io/platforms/dart/performance/instrumentation/" />
+ ),
+ }
+ )}
+ </p>
+ ),
+ },
+ ],
+ },
+];
+// Configuration End
+
+export function GettingStartedWithDart({dsn, ...props}: ModuleProps) {
+ return <Layout steps={steps({dsn})} {...props} />;
+}
+
+export default GettingStartedWithDart;
diff --git a/static/app/gettingStartedDocs/flutter/flutter.spec.tsx b/static/app/gettingStartedDocs/flutter/flutter.spec.tsx
new file mode 100644
index 00000000000000..f39a2b5df2baf1
--- /dev/null
+++ b/static/app/gettingStartedDocs/flutter/flutter.spec.tsx
@@ -0,0 +1,20 @@
+import {render, screen} from 'sentry-test/reactTestingLibrary';
+
+import {StepTitle} from 'sentry/components/onboarding/gettingStartedDoc/step';
+
+import {GettingStartedWithFlutter, steps} from './flutter';
+
+describe('GettingStartedWithFlutter', function () {
+ it('renders doc correctly', function () {
+ const {container} = render(<GettingStartedWithFlutter dsn="test-dsn" />);
+
+ // Steps
+ for (const step of steps()) {
+ expect(
+ screen.getByRole('heading', {name: step.title ?? StepTitle[step.type]})
+ ).toBeInTheDocument();
+ }
+
+ expect(container).toSnapshot();
+ });
+});
diff --git a/static/app/gettingStartedDocs/flutter/flutter.tsx b/static/app/gettingStartedDocs/flutter/flutter.tsx
new file mode 100644
index 00000000000000..484c5ab83452e7
--- /dev/null
+++ b/static/app/gettingStartedDocs/flutter/flutter.tsx
@@ -0,0 +1,246 @@
+import ExternalLink from 'sentry/components/links/externalLink';
+import {Layout, LayoutProps} from 'sentry/components/onboarding/gettingStartedDoc/layout';
+import {ModuleProps} from 'sentry/components/onboarding/gettingStartedDoc/sdkDocumentation';
+import {StepType} from 'sentry/components/onboarding/gettingStartedDoc/step';
+import {t, tct} from 'sentry/locale';
+
+// Configuration Start
+export const steps = ({
+ dsn,
+}: {
+ dsn?: string;
+} = {}): LayoutProps['steps'] => [
+ {
+ type: StepType.INSTALL,
+ description: (
+ <p>
+ {tct(
+ 'Sentry captures data by using an SDK within your application’s runtime. Add the following to your [pubspec: pubspec.yaml]',
+ {
+ pubspec: <code />,
+ }
+ )}
+ </p>
+ ),
+ configurations: [
+ {
+ language: 'yml',
+ code: `
+dependencies:
+ sentry_flutter: ^7.8.0
+ `,
+ },
+ ],
+ },
+ {
+ type: StepType.CONFIGURE,
+ description: (
+ <p>
+ {tct('Import [sentryFlutter: sentry_flutter] and initialize it', {
+ sentryFlutter: <code />,
+ })}
+ </p>
+ ),
+ configurations: [
+ {
+ language: 'dart',
+ code: `
+import 'package:flutter/widgets.dart';
+import 'package:sentry_flutter/sentry_flutter.dart';
+
+Future<void> main() async {
+ await SentryFlutter.init(
+ (options) {
+ options.dsn = '${dsn}';
+ // Set tracesSampleRate to 1.0 to capture 100% of transactions for performance monitoring.
+ // We recommend adjusting this value in production.
+ options.tracesSampleRate = 1.0;
+ },
+ appRunner: () => runApp(MyApp()),
+ );
+
+ // or define SENTRY_DSN via Dart environment variable (--dart-define)
+}
+ `,
+ additionalInfo: (
+ <p>
+ {tct(
+ 'You can configure the [sentryDsn: SENTRY_DSN], [sentryRelease: SENTRY_RELEASE], [sentryDist: SENTRY_DIST], and [sentryEnv: SENTRY_ENVIRONMENT] via the Dart environment variables passing the [dartDefine: --dart-define] flag to the compiler, as noted in the code sample.',
+ {
+ sentryDsn: <code />,
+ sentryRelease: <code />,
+ sentryDist: <code />,
+ sentryEnv: <code />,
+ dartDefine: <code />,
+ }
+ )}
+ </p>
+ ),
+ },
+ ],
+ },
+ {
+ type: StepType.VERIFY,
+ description: t(
+ 'Create an intentional error, so you can test that everything is working:'
+ ),
+ configurations: [
+ {
+ language: 'dart',
+ code: `
+import 'package:sentry/sentry.dart';
+
+try {
+ aMethodThatMightFail();
+} catch (exception, stackTrace) {
+ await Sentry.captureException(
+ exception,
+ stackTrace: stackTrace,
+ );
+}
+ `,
+ additionalInfo: (
+ <p>
+ {tct(
+ "If you're new to Sentry, use the email alert to access your account and complete a product tour.[break] If you're an existing user and have disabled alerts, you won't receive this email.",
+ {
+ break: <br />,
+ }
+ )}
+ </p>
+ ),
+ },
+ ],
+ },
+ {
+ title: t('Performance'),
+ description: t(
+ "You'll be able to monitor the performance of your app using the SDK. For example:"
+ ),
+ configurations: [
+ {
+ language: 'dart',
+ code: `
+import 'package:sentry/sentry.dart';
+
+final transaction = Sentry.startTransaction('processOrderBatch()', 'task');
+
+try {
+ await processOrderBatch(transaction);
+} catch (exception) {
+ transaction.throwable = exception;
+ transaction.status = SpanStatus.internalError();
+} finally {
+ await transaction.finish();
+}
+
+Future<void> processOrderBatch(ISentrySpan span) async {
+ // span operation: task, span description: operation
+ final innerSpan = span.startChild('task', description: 'operation');
+
+ try {
+ // omitted code
+ } catch (exception) {
+ innerSpan.throwable = exception;
+ innerSpan.status = SpanStatus.notFound();
+ } finally {
+ await innerSpan.finish();
+ }
+}
+ `,
+ additionalInfo: (
+ <p>
+ {tct(
+ 'To learn more about the API and automatic instrumentations, check out the [perfDocs: performance documentation].',
+ {
+ perfDocs: (
+ <ExternalLink href="https://docs.sentry.io/platforms/flutter/performance/instrumentation/" />
+ ),
+ }
+ )}
+ </p>
+ ),
+ },
+ ],
+ },
+ {
+ title: t('Debug Symbols'),
+ configurations: [
+ {
+ description: t(
+ 'We offer a range of methods to provide Sentry with debug symbols so that you can see symbolicated stack traces and triage issues faster.'
+ ),
+ },
+ {
+ description: (
+ <p>
+ {tct(
+ "Complete stack traces will be shown for your Dart error by default, but if you use [splitDebugInfo: split-debug-info] and [obfuscate: obfuscate], you'll need to [uploadDebugSymbols: upload the debug information files] generated by the [flutter: flutter] build.",
+ {
+ splitDebugInfo: <code />,
+ obfuscate: <code />,
+ uploadDebugSymbols: (
+ <ExternalLink href="https://docs.sentry.io/platforms/flutter/upload-debug/" />
+ ),
+ flutter: <code />,
+ }
+ )}
+ </p>
+ ),
+ },
+ {
+ description: (
+ <p>
+ {tct(
+ "You'll also need to [uploadDebug: upload the debug information files] generated by the [flutter: flutter] build for iOS, macOS, and Android NDK native crashes.",
+ {
+ uploadDebug: (
+ <ExternalLink href="https://docs.sentry.io/platforms/flutter/upload-debug/" />
+ ),
+ flutter: <code />,
+ }
+ )}
+ </p>
+ ),
+ },
+ ],
+ },
+ {
+ title: t('Source Context'),
+ configurations: [
+ {
+ description: (
+ <p>
+ {tct(
+ "If Sentry has access to your application's source code, it can show snippets of code [sourceContext: source context] around the location of stack frames, which helps to quickly pinpoint problematic code.",
+ {
+ sourceContext: <i />,
+ }
+ )}
+ </p>
+ ),
+ },
+ {
+ description: (
+ <p>
+ {tct(
+ "To enable source context, you'll need to upload debug symbols to Sentry by following the [sourceContext: Uploading Source Code Context for Flutter Android, iOS, and macOS] guide.",
+ {
+ sourceContext: (
+ <ExternalLink href="https://docs.sentry.io/platforms/flutter/upload-debug/#uploading-source-code-context-for-flutter-android-ios-and-macos" />
+ ),
+ }
+ )}
+ </p>
+ ),
+ },
+ ],
+ },
+];
+// Configuration End
+
+export function GettingStartedWithFlutter({dsn, ...props}: ModuleProps) {
+ return <Layout steps={steps({dsn})} {...props} />;
+}
+
+export default GettingStartedWithFlutter;
|
6d53a19d918afe076eee8d0eccb926a28753abc2
|
2024-05-07 23:25:20
|
Gabe Villalobos
|
fix(hc): Adds fix for evaluation context builder error handler, adds feature option (#69001)
| false
|
Adds fix for evaluation context builder error handler, adds feature option (#69001)
|
fix
|
diff --git a/src/flagpole/evaluation_context.py b/src/flagpole/evaluation_context.py
index 6dba72adadc418..3d68889e8807f3 100644
--- a/src/flagpole/evaluation_context.py
+++ b/src/flagpole/evaluation_context.py
@@ -76,7 +76,9 @@ def add_context_transformer(
self.context_transformers.append(context_transformer)
return self
- def add_exception_handler(self, exception_handler: Callable[[Exception], None]):
+ def add_exception_handler(
+ self, exception_handler: Callable[[Exception], None]
+ ) -> ContextBuilder:
"""
Add a custom exception handler to the context builder if you need custom handling
if any of the transformer functions raise an exception. This is useful for swallowing
@@ -88,6 +90,7 @@ def add_exception_handler(self, exception_handler: Callable[[Exception], None]):
raise Exception("Exception handler is already defined")
self.exception_handler = exception_handler
+ return self
def build(self, data: dict[str, Any] | None = None) -> EvaluationContext:
builder_data: dict[str, Any] = data or dict()
diff --git a/src/flagpole/sentry_flagpole_context.py b/src/flagpole/sentry_flagpole_context.py
index 40a66eef75cd32..de3c6971e8b6f0 100644
--- a/src/flagpole/sentry_flagpole_context.py
+++ b/src/flagpole/sentry_flagpole_context.py
@@ -78,10 +78,10 @@ def user_context_transformer(data: dict[str, Any]) -> EvaluationContextDict:
return context_data
-def get_sentry_flagpole_context_builder():
+def get_sentry_flagpole_context_builder() -> ContextBuilder:
"""
Creates and returns a new sentry flagpole context builder with Organization,
- User, Team, and Project transformers appended to it.
+ User, and Project transformers appended to it.
:return:
"""
return (
diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py
index 2cf3fe0f18b793..b5c1d05dec9e09 100644
--- a/src/sentry/options/defaults.py
+++ b/src/sentry/options/defaults.py
@@ -1790,6 +1790,10 @@
register("hybrid_cloud.allow_cross_db_tombstones", default=False, flags=FLAG_AUTOMATOR_MODIFIABLE)
register("hybrid_cloud.disable_tombstone_cleanup", default=False, flags=FLAG_AUTOMATOR_MODIFIABLE)
+# Flagpole Rollout
+register("features", default={}, flags=FLAG_AUTOMATOR_MODIFIABLE)
+register("hybrid_cloud.flagpole_rollout_phase", default=0, flags=FLAG_AUTOMATOR_MODIFIABLE)
+
# Retry controls
register("hybridcloud.regionsiloclient.retries", default=5, flags=FLAG_AUTOMATOR_MODIFIABLE)
register("hybridcloud.rpc.retries", default=5, flags=FLAG_AUTOMATOR_MODIFIABLE)
|
ff8dfa9ae76f034dc48abb40403179913e546cdf
|
2024-01-27 00:05:51
|
Jonas
|
feat(profiling): instrument smart search bar with profiling (#63906)
| false
|
instrument smart search bar with profiling (#63906)
|
feat
|
diff --git a/static/app/components/smartSearchBar/index.tsx b/static/app/components/smartSearchBar/index.tsx
index 1f64e75d10ca73..fcae0cbbee1253 100644
--- a/static/app/components/smartSearchBar/index.tsx
+++ b/static/app/components/smartSearchBar/index.tsx
@@ -785,6 +785,23 @@ class SmartSearchBar extends Component<DefaultProps & Props, State> {
};
onQueryFocus = () => {
+ const txn = Sentry.startTransaction({
+ name: 'smart_search_bar.open',
+ op: 'ui.render',
+ });
+
+ if (typeof window.requestIdleCallback === 'function') {
+ txn.setTag('finish_strategy', 'idle_callback');
+ window.requestIdleCallback(() => {
+ txn.finish();
+ });
+ } else {
+ txn.setTag('finish_strategy', 'timeout');
+ setTimeout(() => {
+ txn.finish();
+ }, 1_000);
+ }
+
this.open();
this.setState({inputHasFocus: true});
};
|
fdb21a463ff0b003668fbd74598d8cdff326207a
|
2024-10-15 23:07:21
|
Evan Purkhiser
|
feat(feedback): Add 'Copy Markdown Link' (#79109)
| false
|
Add 'Copy Markdown Link' (#79109)
|
feat
|
diff --git a/static/app/components/feedback/feedbackItem/feedbackShortId.tsx b/static/app/components/feedback/feedbackItem/feedbackShortId.tsx
index c13274c767765a..48cebcffe89f4f 100644
--- a/static/app/components/feedback/feedbackItem/feedbackShortId.tsx
+++ b/static/app/components/feedback/feedbackItem/feedbackShortId.tsx
@@ -64,6 +64,11 @@ export default function FeedbackShortId({className, feedbackItem, style}: Props)
text: feedbackItem.shortId,
});
+ const {onClick: handleCopyMarkdown} = useCopyToClipboard({
+ text: `[${feedbackItem.shortId}](${feedbackUrl})`,
+ successMessage: t('Copied Markdown Feedback Link to clipboard'),
+ });
+
return (
<Flex
gap={space(1)}
@@ -104,6 +109,11 @@ export default function FeedbackShortId({className, feedbackItem, style}: Props)
label: t('Copy Short-ID'),
onAction: handleCopyShortId,
},
+ {
+ key: 'copy-markdown-link',
+ label: t('Copy Markdown Link'),
+ onAction: handleCopyMarkdown,
+ },
]}
/>
</Flex>
|
522a95af03a44d045f1f4f89aa7e1dfe7f75e064
|
2020-07-16 23:10:58
|
Priscila Oliveira
|
ref(pii): Remove AsyncComponent (#19913)
| false
|
Remove AsyncComponent (#19913)
|
ref
|
diff --git a/src/sentry/static/sentry/app/views/settings/projectSecurityAndPrivacy/index.tsx b/src/sentry/static/sentry/app/views/settings/projectSecurityAndPrivacy/index.tsx
index 6d26464fc4353b..5ff420399aac1f 100644
--- a/src/sentry/static/sentry/app/views/settings/projectSecurityAndPrivacy/index.tsx
+++ b/src/sentry/static/sentry/app/views/settings/projectSecurityAndPrivacy/index.tsx
@@ -8,7 +8,6 @@ import SettingsPageHeader from 'app/views/settings/components/settingsPageHeader
import JsonForm from 'app/views/settings/components/forms/jsonForm';
import Form from 'app/views/settings/components/forms/form';
import {fields} from 'app/data/forms/projectGeneralSettings';
-import AsyncView from 'app/views/asyncView';
import ProjectActions from 'app/actions/projectActions';
import {Organization, Project} from 'app/types';
import withProject from 'app/utils/withProject';
@@ -24,13 +23,13 @@ export type ProjectSecurityAndPrivacyProps = RouteComponentProps<
project: Project;
};
-class ProjectSecurityAndPrivacy extends AsyncView<ProjectSecurityAndPrivacyProps> {
+class ProjectSecurityAndPrivacy extends React.Component<ProjectSecurityAndPrivacyProps> {
handleUpdateProject = (data: Project) => {
// This will update our project global state
ProjectActions.updateSuccess(data);
};
- renderBody() {
+ render() {
const {organization, project} = this.props;
const initialData = project;
const projectSlug = project.slug;
|
f5dbecb63c78a5ad08e78750adb657f682f5734e
|
2023-05-13 00:45:00
|
John
|
feat(generic-metrics): Replace `bulk_record`/`record` with `_uca_bulk_record`/`_uca_record` (#49019)
| false
|
Replace `bulk_record`/`record` with `_uca_bulk_record`/`_uca_record` (#49019)
|
feat
|
diff --git a/src/sentry/sentry_metrics/consumers/indexer/batch.py b/src/sentry/sentry_metrics/consumers/indexer/batch.py
index d2ef1bceedfa06..6b0c20e56173cc 100644
--- a/src/sentry/sentry_metrics/consumers/indexer/batch.py
+++ b/src/sentry/sentry_metrics/consumers/indexer/batch.py
@@ -257,8 +257,8 @@ def extract_strings(self) -> Mapping[UseCaseID, Mapping[OrgId, Set[str]]]:
@metrics.wraps("process_messages.reconstruct_messages")
def reconstruct_messages(
self,
- mapping: Mapping[OrgId, Mapping[str, Optional[int]]],
- bulk_record_meta: Mapping[OrgId, Mapping[str, Metadata]],
+ mapping: Mapping[UseCaseID, Mapping[OrgId, Mapping[str, Optional[int]]]],
+ bulk_record_meta: Mapping[UseCaseID, Mapping[OrgId, Mapping[str, Metadata]]],
) -> IndexerOutputMessageBatch:
new_messages: IndexerOutputMessageBatch = []
@@ -282,6 +282,7 @@ def reconstruct_messages(
metric_name = old_payload_value["name"]
org_id = old_payload_value["org_id"]
+ use_case_id = old_payload_value["use_case_id"]
sentry_sdk.set_tag("sentry_metrics.organization_id", org_id)
tags = old_payload_value.get("tags", {})
used_tags.add(metric_name)
@@ -293,9 +294,9 @@ def reconstruct_messages(
try:
for k, v in tags.items():
used_tags.update({k, v})
- new_k = mapping[org_id][k]
+ new_k = mapping[use_case_id][org_id][k]
if new_k is None:
- metadata = bulk_record_meta[org_id].get(k)
+ metadata = bulk_record_meta[use_case_id][org_id].get(k)
if (
metadata
and metadata.fetch_type_ext
@@ -308,9 +309,9 @@ def reconstruct_messages(
value_to_write: Union[int, str] = v
if self.__should_index_tag_values:
- new_v = mapping[org_id][v]
+ new_v = mapping[use_case_id][org_id][v]
if new_v is None:
- metadata = bulk_record_meta[org_id].get(v)
+ metadata = bulk_record_meta[use_case_id][org_id].get(v)
if (
metadata
and metadata.fetch_type_ext
@@ -344,15 +345,15 @@ def reconstruct_messages(
"string_type": "tags",
"num_global_quotas": exceeded_global_quotas,
"num_org_quotas": exceeded_org_quotas,
- "org_batch_size": len(mapping[org_id]),
+ "org_batch_size": len(mapping[use_case_id][org_id]),
},
)
continue
fetch_types_encountered = set()
for tag in used_tags:
- if tag in bulk_record_meta[org_id]:
- metadata = bulk_record_meta[org_id][tag]
+ if tag in bulk_record_meta[use_case_id][org_id]:
+ metadata = bulk_record_meta[use_case_id][org_id][tag]
fetch_types_encountered.add(metadata.fetch_type)
output_message_meta[metadata.fetch_type.value][str(metadata.id)] = tag
@@ -360,9 +361,9 @@ def reconstruct_messages(
"".join(sorted(t.value for t in fetch_types_encountered)), "utf-8"
)
- numeric_metric_id = mapping[org_id][metric_name]
+ numeric_metric_id = mapping[use_case_id][org_id][metric_name]
if numeric_metric_id is None:
- metadata = bulk_record_meta[org_id].get(metric_name)
+ metadata = bulk_record_meta[use_case_id][org_id].get(metric_name)
metrics.incr(
"sentry_metrics.indexer.process_messages.dropped_message",
tags={
@@ -380,7 +381,7 @@ def reconstruct_messages(
and metadata.fetch_type_ext
and metadata.fetch_type_ext.is_global
),
- "org_batch_size": len(mapping[org_id]),
+ "org_batch_size": len(mapping[use_case_id][org_id]),
},
)
continue
diff --git a/src/sentry/sentry_metrics/consumers/indexer/processing.py b/src/sentry/sentry_metrics/consumers/indexer/processing.py
index e03fb6a9a778c2..5f0c821fb45107 100644
--- a/src/sentry/sentry_metrics/consumers/indexer/processing.py
+++ b/src/sentry/sentry_metrics/consumers/indexer/processing.py
@@ -107,14 +107,11 @@ def _process_messages_impl(
batch.filter_messages(cardinality_limiter_state.keys_to_remove)
extracted_strings = batch.extract_strings()
- org_strings = next(iter(extracted_strings.values())) if extracted_strings else {}
- sdk.set_measurement("org_strings.len", len(org_strings))
+ sdk.set_measurement("org_strings.len", len(extracted_strings))
with metrics.timer("metrics_consumer.bulk_record"), sentry_sdk.start_span(op="bulk_record"):
- record_result = self._indexer.bulk_record(
- use_case_id=self._config.use_case_id, org_strings=org_strings
- )
+ record_result = self._indexer.bulk_record(extracted_strings)
mapping = record_result.get_mapped_results()
bulk_record_meta = record_result.get_fetch_metadata()
diff --git a/src/sentry/sentry_metrics/indexer/base.py b/src/sentry/sentry_metrics/indexer/base.py
index 7bd598982bf6ff..fc9f84a69f1bfc 100644
--- a/src/sentry/sentry_metrics/indexer/base.py
+++ b/src/sentry/sentry_metrics/indexer/base.py
@@ -413,51 +413,6 @@ class StringIndexer(Service):
)
def bulk_record(
- self, use_case_id: UseCaseKey, org_strings: Mapping[int, Set[str]]
- ) -> KeyResults:
- """
- Takes in a mapping with org_ids to sets of strings.
-
- Ultimately returns a mapping of those org_ids to a
- string -> id mapping, for each string in the set.
-
- There are three steps to getting the ids for strings:
- 0. ids from static strings (StaticStringIndexer)
- 1. ids from cache (CachingIndexer)
- 2. ids from existing db records (postgres/spanner)
- 3. ids that have been rate limited (postgres/spanner)
- 4. ids from newly created db records (postgres/spanner)
-
- Each step will start off with a KeyCollection and KeyResults:
- keys = KeyCollection(mapping)
- key_results = KeyResults()
-
- Then the work to get the ids (either from cache, db, etc)
- .... # work to add results to KeyResults()
-
- Those results will be added to `mapped_results` which can
- be retrieved
- key_results.get_mapped_results()
-
- Remaining unmapped keys get turned into a new
- KeyCollection for the next step:
- new_keys = key_results.get_unmapped_keys(mapping)
-
- When the last step is reached or a step resolves all the remaining
- unmapped keys the key_results objects are merged and returned:
- e.g. return cache_key_results.merge(db_read_key_results)
- """
- raise NotImplementedError()
-
- def record(self, use_case_id: UseCaseKey, org_id: int, string: str) -> Optional[int]:
- """Store a string and return the integer ID generated for it
-
- With every call to this method, the lifetime of the entry will be
- prolonged.
- """
- raise NotImplementedError()
-
- def _uca_bulk_record(
self, strings: Mapping[UseCaseID, Mapping[OrgId, Set[str]]]
) -> UseCaseKeyResults:
"""
@@ -487,7 +442,7 @@ def _uca_bulk_record(
"""
raise NotImplementedError()
- def _uca_record(self, use_case_id: UseCaseID, org_id: int, string: str) -> Optional[int]:
+ def record(self, use_case_id: UseCaseID, org_id: int, string: str) -> Optional[int]:
"""Store a string and return the integer ID generated for it
With every call to this method, the lifetime of the entry will be
prolonged.
diff --git a/src/sentry/sentry_metrics/indexer/cache.py b/src/sentry/sentry_metrics/indexer/cache.py
index ac94a87f44d77e..0644ec0bad09bd 100644
--- a/src/sentry/sentry_metrics/indexer/cache.py
+++ b/src/sentry/sentry_metrics/indexer/cache.py
@@ -8,7 +8,6 @@
from sentry.sentry_metrics.configuration import UseCaseKey
from sentry.sentry_metrics.indexer.base import (
FetchType,
- KeyResults,
OrgId,
StringIndexer,
UseCaseKeyCollection,
@@ -103,17 +102,6 @@ def __init__(self, cache: StringIndexerCache, indexer: StringIndexer) -> None:
self.indexer = indexer
def bulk_record(
- self, use_case_id: UseCaseKey, org_strings: Mapping[int, Set[str]]
- ) -> KeyResults:
- res = self._uca_bulk_record({REVERSE_METRIC_PATH_MAPPING[use_case_id]: org_strings})
- return res.results[REVERSE_METRIC_PATH_MAPPING[use_case_id]]
-
- def record(self, use_case_id: UseCaseKey, org_id: int, string: str) -> Optional[int]:
- """Store a string and return the integer ID generated for it"""
- result = self.bulk_record(use_case_id=use_case_id, org_strings={org_id: {string}})
- return result[org_id][string]
-
- def _uca_bulk_record(
self, strings: Mapping[UseCaseID, Mapping[OrgId, Set[str]]]
) -> UseCaseKeyResults:
cache_keys = UseCaseKeyCollection(strings)
@@ -152,7 +140,7 @@ def _uca_bulk_record(
if db_record_keys.size == 0:
return cache_key_results
- db_record_key_results = self.indexer._uca_bulk_record(
+ db_record_key_results = self.indexer.bulk_record(
{
use_case_id: key_collection.mapping
for use_case_id, key_collection in db_record_keys.mapping.items()
@@ -163,8 +151,8 @@ def _uca_bulk_record(
return cache_key_results.merge(db_record_key_results)
- def _uca_record(self, use_case_id: UseCaseID, org_id: int, string: str) -> Optional[int]:
- result = self._uca_bulk_record(strings={use_case_id: {org_id: {string}}})
+ def record(self, use_case_id: UseCaseID, org_id: int, string: str) -> Optional[int]:
+ result = self.bulk_record(strings={use_case_id: {org_id: {string}}})
return result[use_case_id][org_id][string]
def resolve(self, use_case_id: UseCaseKey, org_id: int, string: str) -> Optional[int]:
diff --git a/src/sentry/sentry_metrics/indexer/mock.py b/src/sentry/sentry_metrics/indexer/mock.py
index a3a0902caffd5f..05205fb5e52db7 100644
--- a/src/sentry/sentry_metrics/indexer/mock.py
+++ b/src/sentry/sentry_metrics/indexer/mock.py
@@ -5,7 +5,6 @@
from sentry.sentry_metrics.configuration import UseCaseKey
from sentry.sentry_metrics.indexer.base import (
FetchType,
- KeyResults,
OrgId,
StringIndexer,
UseCaseKeyCollection,
@@ -28,16 +27,6 @@ def __init__(self) -> None:
self._reverse: Dict[int, str] = {}
def bulk_record(
- self, use_case_id: UseCaseKey, org_strings: Mapping[int, Set[str]]
- ) -> KeyResults:
- res = self._uca_bulk_record({REVERSE_METRIC_PATH_MAPPING[use_case_id]: org_strings})
- return res.results[REVERSE_METRIC_PATH_MAPPING[use_case_id]]
-
- def record(self, use_case_id: UseCaseKey, org_id: int, string: str) -> Optional[int]:
- res = self._uca_bulk_record({REVERSE_METRIC_PATH_MAPPING[use_case_id]: {org_id: {string}}})
- return res.results[REVERSE_METRIC_PATH_MAPPING[use_case_id]][org_id][string]
-
- def _uca_bulk_record(
self, strings: Mapping[UseCaseID, Mapping[OrgId, Set[str]]]
) -> UseCaseKeyResults:
db_read_keys = UseCaseKeyCollection(strings)
@@ -71,7 +60,7 @@ def _uca_bulk_record(
return db_read_key_results.merge(db_write_key_results)
- def _uca_record(self, use_case_id: UseCaseID, org_id: int, string: str) -> Optional[int]:
+ def record(self, use_case_id: UseCaseID, org_id: int, string: str) -> Optional[int]:
return self._record(use_case_id, org_id, string)
def resolve(self, use_case_id: UseCaseKey, org_id: int, string: str) -> Optional[int]:
diff --git a/src/sentry/sentry_metrics/indexer/postgres/postgres_v2.py b/src/sentry/sentry_metrics/indexer/postgres/postgres_v2.py
index 1a0ac78346c70f..66067dda3cf049 100644
--- a/src/sentry/sentry_metrics/indexer/postgres/postgres_v2.py
+++ b/src/sentry/sentry_metrics/indexer/postgres/postgres_v2.py
@@ -12,7 +12,6 @@
from sentry.sentry_metrics.configuration import IndexerStorage, UseCaseKey, get_ingest_config
from sentry.sentry_metrics.indexer.base import (
FetchType,
- KeyResults,
OrgId,
StringIndexer,
UseCaseKeyCollection,
@@ -23,11 +22,7 @@
from sentry.sentry_metrics.indexer.limiters.writes import writes_limiter_factory
from sentry.sentry_metrics.indexer.postgres.models import TABLE_MAPPING, BaseIndexer, IndexerTable
from sentry.sentry_metrics.indexer.strings import StaticStringIndexer
-from sentry.sentry_metrics.use_case_id_registry import (
- METRIC_PATH_MAPPING,
- REVERSE_METRIC_PATH_MAPPING,
- UseCaseID,
-)
+from sentry.sentry_metrics.use_case_id_registry import METRIC_PATH_MAPPING, UseCaseID
from sentry.utils import metrics
__all__ = ["PostgresIndexer"]
@@ -106,17 +101,6 @@ def _bulk_create_with_retry(
raise last_seen_exception
def bulk_record(
- self, use_case_id: UseCaseKey, org_strings: Mapping[int, Set[str]]
- ) -> KeyResults:
- res = self._uca_bulk_record({REVERSE_METRIC_PATH_MAPPING[use_case_id]: org_strings})
- return res.results[REVERSE_METRIC_PATH_MAPPING[use_case_id]]
-
- def record(self, use_case_id: UseCaseKey, org_id: int, string: str) -> Optional[int]:
- """Store a string and return the integer ID generated for it"""
- result = self.bulk_record(use_case_id=use_case_id, org_strings={org_id: {string}})
- return result[org_id][string]
-
- def _uca_bulk_record(
self, strings: Mapping[UseCaseID, Mapping[OrgId, Set[str]]]
) -> UseCaseKeyResults:
db_read_keys = UseCaseKeyCollection(strings)
@@ -244,8 +228,8 @@ def _uca_bulk_record(
return db_read_key_results.merge(db_write_key_results).merge(rate_limited_key_results)
- def _uca_record(self, use_case_id: UseCaseID, org_id: int, string: str) -> Optional[int]:
- result = self._uca_bulk_record(strings={use_case_id: {org_id: {string}}})
+ def record(self, use_case_id: UseCaseID, org_id: int, string: str) -> Optional[int]:
+ result = self.bulk_record(strings={use_case_id: {org_id: {string}}})
return result[use_case_id][org_id][string]
def resolve(self, use_case_id: UseCaseKey, org_id: int, string: str) -> Optional[int]:
diff --git a/src/sentry/sentry_metrics/indexer/strings.py b/src/sentry/sentry_metrics/indexer/strings.py
index 20f25a12ad3348..913e94eaf402a7 100644
--- a/src/sentry/sentry_metrics/indexer/strings.py
+++ b/src/sentry/sentry_metrics/indexer/strings.py
@@ -3,14 +3,13 @@
from sentry.sentry_metrics.configuration import UseCaseKey
from sentry.sentry_metrics.indexer.base import (
FetchType,
- KeyResults,
OrgId,
StringIndexer,
UseCaseKeyCollection,
UseCaseKeyResult,
UseCaseKeyResults,
)
-from sentry.sentry_metrics.use_case_id_registry import REVERSE_METRIC_PATH_MAPPING, UseCaseID
+from sentry.sentry_metrics.use_case_id_registry import UseCaseID
# !!! DO NOT CHANGE THESE VALUES !!!
#
@@ -168,15 +167,6 @@ def __init__(self, indexer: StringIndexer) -> None:
self.indexer = indexer
def bulk_record(
- self, use_case_id: UseCaseKey, org_strings: Mapping[int, Set[str]]
- ) -> KeyResults:
- res = self._uca_bulk_record({REVERSE_METRIC_PATH_MAPPING[use_case_id]: org_strings})
- return res.results[REVERSE_METRIC_PATH_MAPPING[use_case_id]]
-
- def record(self, use_case_id: UseCaseKey, org_id: int, string: str) -> Optional[int]:
- return self._uca_record(REVERSE_METRIC_PATH_MAPPING[use_case_id], org_id, string)
-
- def _uca_bulk_record(
self, strings: Mapping[UseCaseID, Mapping[OrgId, Set[str]]]
) -> UseCaseKeyResults:
static_keys = UseCaseKeyCollection(strings)
@@ -193,7 +183,7 @@ def _uca_bulk_record(
if org_strings_left.size == 0:
return static_key_results
- indexer_results = self.indexer._uca_bulk_record(
+ indexer_results = self.indexer.bulk_record(
{
use_case_id: key_collection.mapping
for use_case_id, key_collection in org_strings_left.mapping.items()
@@ -202,10 +192,10 @@ def _uca_bulk_record(
return static_key_results.merge(indexer_results)
- def _uca_record(self, use_case_id: UseCaseID, org_id: int, string: str) -> Optional[int]:
+ def record(self, use_case_id: UseCaseID, org_id: int, string: str) -> Optional[int]:
if string in SHARED_STRINGS:
return SHARED_STRINGS[string]
- return self.indexer._uca_record(use_case_id=use_case_id, org_id=org_id, string=string)
+ return self.indexer.record(use_case_id=use_case_id, org_id=org_id, string=string)
def resolve(self, use_case_id: UseCaseKey, org_id: int, string: str) -> Optional[int]:
if string in SHARED_STRINGS:
diff --git a/src/sentry/testutils/cases.py b/src/sentry/testutils/cases.py
index dd4a2102ed9d74..6cd1b56226cc8e 100644
--- a/src/sentry/testutils/cases.py
+++ b/src/sentry/testutils/cases.py
@@ -2,6 +2,8 @@
import responses
+from sentry.sentry_metrics.use_case_id_registry import REVERSE_METRIC_PATH_MAPPING, UseCaseID
+
__all__ = (
"TestCase",
"TransactionTestCase",
@@ -1258,21 +1260,33 @@ def store_metric(
def metric_id(key: str):
assert isinstance(key, str)
- res = indexer.record(use_case_id=use_case_id, org_id=org_id, string=key)
+ res = indexer.record(
+ use_case_id=REVERSE_METRIC_PATH_MAPPING[use_case_id],
+ org_id=org_id,
+ string=key,
+ )
assert res is not None, key
mapping_meta[str(res)] = key
return res
def tag_key(name):
assert isinstance(name, str)
- res = indexer.record(use_case_id=use_case_id, org_id=org_id, string=name)
+ res = indexer.record(
+ use_case_id=REVERSE_METRIC_PATH_MAPPING[use_case_id],
+ org_id=org_id,
+ string=name,
+ )
assert res is not None, name
mapping_meta[str(res)] = name
return res
def tag_value(name):
assert isinstance(name, str)
- res = indexer.record(use_case_id=use_case_id, org_id=org_id, string=name)
+ res = indexer.record(
+ use_case_id=REVERSE_METRIC_PATH_MAPPING[use_case_id],
+ org_id=org_id,
+ string=name,
+ )
assert res is not None, name
mapping_meta[str(res)] = name
return res
@@ -1565,7 +1579,7 @@ def _index_metric_strings(self):
*list(METRICS_MAP.values()),
]
org_strings = {self.organization.id: set(strings)}
- indexer.bulk_record(use_case_id=UseCaseKey.PERFORMANCE, org_strings=org_strings)
+ indexer.bulk_record({UseCaseID.TRANSACTIONS: org_strings})
def store_transaction_metric(
self,
@@ -2247,7 +2261,7 @@ def build_and_store_session(
class OrganizationMetricMetaIntegrationTestCase(MetricsAPIBaseTestCase):
def __indexer_record(self, org_id: int, value: str) -> int:
- return indexer.record(use_case_id=UseCaseKey.RELEASE_HEALTH, org_id=org_id, string=value)
+ return indexer.record(use_case_id=UseCaseID.SESSIONS, org_id=org_id, string=value)
def setUp(self):
super().setUp()
diff --git a/tests/sentry/api/endpoints/test_organization_metric_data.py b/tests/sentry/api/endpoints/test_organization_metric_data.py
index e9a7dbd80b683c..09fd45b9416851 100644
--- a/tests/sentry/api/endpoints/test_organization_metric_data.py
+++ b/tests/sentry/api/endpoints/test_organization_metric_data.py
@@ -8,7 +8,7 @@
from freezegun import freeze_time
from sentry.sentry_metrics import indexer
-from sentry.sentry_metrics.configuration import UseCaseKey
+from sentry.sentry_metrics.use_case_id_registry import UseCaseID
from sentry.snuba.metrics.naming_layer.mri import ParsedMRI, SessionMRI, TransactionMRI
from sentry.snuba.metrics.naming_layer.public import (
SessionMetricKey,
@@ -23,12 +23,12 @@
from tests.sentry.api.endpoints.test_organization_metrics import MOCKED_DERIVED_METRICS
-def indexer_record(use_case_id: UseCaseKey, org_id: int, string: str) -> int:
- return indexer.record(use_case_id=use_case_id, org_id=org_id, string=string)
+def indexer_record(use_case_id: UseCaseID, org_id: int, string: str) -> int:
+ return indexer.record(use_case_id, org_id, string)
-perf_indexer_record = partial(indexer_record, UseCaseKey.PERFORMANCE)
-rh_indexer_record = partial(indexer_record, UseCaseKey.RELEASE_HEALTH)
+perf_indexer_record = partial(indexer_record, UseCaseID.TRANSACTIONS)
+rh_indexer_record = partial(indexer_record, UseCaseID.SESSIONS)
pytestmark = [pytest.mark.sentry_metrics]
diff --git a/tests/sentry/api/endpoints/test_organization_metric_details.py b/tests/sentry/api/endpoints/test_organization_metric_details.py
index c8358c33d44939..6f78b36a487b34 100644
--- a/tests/sentry/api/endpoints/test_organization_metric_details.py
+++ b/tests/sentry/api/endpoints/test_organization_metric_details.py
@@ -5,7 +5,7 @@
import pytest
from sentry.sentry_metrics import indexer
-from sentry.sentry_metrics.configuration import UseCaseKey
+from sentry.sentry_metrics.use_case_id_registry import UseCaseID
from sentry.sentry_metrics.utils import resolve_weak
from sentry.snuba.metrics import SingularEntityDerivedMetric
from sentry.snuba.metrics.fields.snql import complement, division_float
@@ -37,7 +37,7 @@
def _indexer_record(org_id: int, string: str) -> int:
- return indexer.record(use_case_id=UseCaseKey.RELEASE_HEALTH, org_id=org_id, string=string)
+ return indexer.record(use_case_id=UseCaseID.SESSIONS, org_id=org_id, string=string)
@region_silo_test(stable=True)
@@ -242,7 +242,7 @@ def test_same_entity_multiple_metric_ids(self, mocked_derived_metrics):
"""
mocked_derived_metrics.return_value = MOCKED_DERIVED_METRICS_2
org_id = self.project.organization.id
- use_key_id = UseCaseKey.RELEASE_HEALTH
+ use_key_id = UseCaseID.SESSIONS
metric_id = _indexer_record(org_id, "metric_foo_doe")
self.store_session(
diff --git a/tests/sentry/api/endpoints/test_organization_metric_tag_details.py b/tests/sentry/api/endpoints/test_organization_metric_tag_details.py
index f25703a0611ad4..5a16b3b429fbaa 100644
--- a/tests/sentry/api/endpoints/test_organization_metric_tag_details.py
+++ b/tests/sentry/api/endpoints/test_organization_metric_tag_details.py
@@ -6,7 +6,7 @@
from freezegun import freeze_time
from sentry.sentry_metrics import indexer
-from sentry.sentry_metrics.configuration import UseCaseKey
+from sentry.sentry_metrics.use_case_id_registry import UseCaseID
from sentry.snuba.metrics.naming_layer import get_mri
from sentry.snuba.metrics.naming_layer.public import SessionMetricKey
from sentry.testutils.cases import OrganizationMetricMetaIntegrationTestCase
@@ -20,7 +20,7 @@
def _indexer_record(org_id: int, string: str) -> int:
- return indexer.record(use_case_id=UseCaseKey.RELEASE_HEALTH, org_id=org_id, string=string)
+ return indexer.record(use_case_id=UseCaseID.SESSIONS, org_id=org_id, string=string)
@region_silo_test(stable=True)
diff --git a/tests/sentry/api/endpoints/test_organization_metric_tags.py b/tests/sentry/api/endpoints/test_organization_metric_tags.py
index 36cad6daa2460f..6a570046c60d5c 100644
--- a/tests/sentry/api/endpoints/test_organization_metric_tags.py
+++ b/tests/sentry/api/endpoints/test_organization_metric_tags.py
@@ -4,7 +4,7 @@
import pytest
from sentry.sentry_metrics import indexer
-from sentry.sentry_metrics.configuration import UseCaseKey
+from sentry.sentry_metrics.use_case_id_registry import UseCaseID
from sentry.snuba.metrics.naming_layer import get_mri
from sentry.snuba.metrics.naming_layer.mri import SessionMRI
from sentry.snuba.metrics.naming_layer.public import SessionMetricKey
@@ -84,7 +84,7 @@ def test_metric_tags_metric_does_not_exist_in_naming_layer(self):
def test_metric_tags_metric_does_not_have_data(self):
indexer.record(
- use_case_id=UseCaseKey.RELEASE_HEALTH,
+ use_case_id=UseCaseID.SESSIONS,
org_id=self.organization.id,
string=SessionMRI.SESSION.value,
)
diff --git a/tests/sentry/api/endpoints/test_organization_metrics.py b/tests/sentry/api/endpoints/test_organization_metrics.py
index c7e447a57bcb4c..fe4fabd7e7f7b8 100644
--- a/tests/sentry/api/endpoints/test_organization_metrics.py
+++ b/tests/sentry/api/endpoints/test_organization_metrics.py
@@ -9,7 +9,7 @@
from sentry.models import ApiToken
from sentry.sentry_metrics import indexer
-from sentry.sentry_metrics.configuration import UseCaseKey
+from sentry.sentry_metrics.use_case_id_registry import UseCaseID
from sentry.snuba.metrics import TransactionStatusTagValue, TransactionTagsKey
from sentry.snuba.metrics.fields import (
DERIVED_METRICS,
@@ -47,12 +47,12 @@ def mocked_mri_resolver(metric_names, mri_func):
return lambda x: x if x in metric_names else mri_func(x)
-def indexer_record(use_case_id: UseCaseKey, org_id: int, string: str) -> int:
+def indexer_record(use_case_id: UseCaseID, org_id: int, string: str) -> int:
return indexer.record(use_case_id=use_case_id, org_id=org_id, string=string)
-perf_indexer_record = partial(indexer_record, UseCaseKey.PERFORMANCE)
-rh_indexer_record = partial(indexer_record, UseCaseKey.RELEASE_HEALTH)
+perf_indexer_record = partial(indexer_record, UseCaseID.TRANSACTIONS)
+rh_indexer_record = partial(indexer_record, UseCaseID.SESSIONS)
@region_silo_test(stable=True)
diff --git a/tests/sentry/incidents/action_handlers/test_email.py b/tests/sentry/incidents/action_handlers/test_email.py
index dcd37c1aba73d2..9192dd17afb73b 100644
--- a/tests/sentry/incidents/action_handlers/test_email.py
+++ b/tests/sentry/incidents/action_handlers/test_email.py
@@ -25,7 +25,7 @@
from sentry.models import NotificationSetting, UserEmail, UserOption
from sentry.notifications.types import NotificationSettingOptionValues, NotificationSettingTypes
from sentry.sentry_metrics import indexer
-from sentry.sentry_metrics.configuration import UseCaseKey
+from sentry.sentry_metrics.use_case_id_registry import UseCaseID
from sentry.snuba.dataset import Dataset
from sentry.snuba.models import SnubaQuery
from sentry.testutils import TestCase
@@ -391,7 +391,7 @@ def test_metric_chart(self, mock_generate_chart, mock_fetch_metric_alert_events_
@patch("sentry.incidents.charts.generate_chart", return_value="chart-url")
def test_metric_chart_mep(self, mock_generate_chart, mock_fetch_metric_alert_events_timeseries):
indexer.record(
- use_case_id=UseCaseKey.PERFORMANCE, org_id=self.organization.id, string="level"
+ use_case_id=UseCaseID.TRANSACTIONS, org_id=self.organization.id, string="level"
)
trigger_status = TriggerStatus.ACTIVE
alert_rule = self.create_alert_rule(
diff --git a/tests/sentry/incidents/endpoints/test_project_alert_rule_index.py b/tests/sentry/incidents/endpoints/test_project_alert_rule_index.py
index edbbd6a72ce63a..ad77bce7be4f53 100644
--- a/tests/sentry/incidents/endpoints/test_project_alert_rule_index.py
+++ b/tests/sentry/incidents/endpoints/test_project_alert_rule_index.py
@@ -13,7 +13,7 @@
from sentry.incidents.models import AlertRule, AlertRuleTrigger, AlertRuleTriggerAction
from sentry.models import AuditLogEntry, Integration
from sentry.sentry_metrics import indexer
-from sentry.sentry_metrics.configuration import UseCaseKey
+from sentry.sentry_metrics.use_case_id_registry import UseCaseID
from sentry.snuba.dataset import Dataset
from sentry.snuba.metrics.naming_layer.mri import SessionMRI
from sentry.testutils import APITestCase
@@ -851,6 +851,4 @@ def setUp(self):
"init",
"crashed",
]:
- indexer.record(
- use_case_id=UseCaseKey.RELEASE_HEALTH, org_id=self.organization.id, string=tag
- )
+ indexer.record(use_case_id=UseCaseID.SESSIONS, org_id=self.organization.id, string=tag)
diff --git a/tests/sentry/search/events/builder/test_metrics.py b/tests/sentry/search/events/builder/test_metrics.py
index 30805d6e86c646..baa18f64773dee 100644
--- a/tests/sentry/search/events/builder/test_metrics.py
+++ b/tests/sentry/search/events/builder/test_metrics.py
@@ -17,6 +17,7 @@
from sentry.search.events.types import HistogramParams
from sentry.sentry_metrics import indexer
from sentry.sentry_metrics.configuration import UseCaseKey
+from sentry.sentry_metrics.use_case_id_registry import UseCaseID
from sentry.sentry_metrics.utils import resolve_tag_value
from sentry.testutils.cases import MetricsEnhancedPerformanceTestCase
from sentry.utils.snuba import Dataset
@@ -103,7 +104,7 @@ def setUp(self):
self.expected_tag_value_type = "String"
indexer.record(
- use_case_id=UseCaseKey.PERFORMANCE, org_id=self.organization.id, string="transaction"
+ use_case_id=UseCaseID.TRANSACTIONS, org_id=self.organization.id, string="transaction"
)
def setup_orderby_data(self):
diff --git a/tests/sentry/sentry_metrics/test_all_indexers.py b/tests/sentry/sentry_metrics/test_all_indexers.py
index ebce7cf216131c..2fbf0e096afcb3 100644
--- a/tests/sentry/sentry_metrics/test_all_indexers.py
+++ b/tests/sentry/sentry_metrics/test_all_indexers.py
@@ -16,7 +16,7 @@
from sentry.sentry_metrics.indexer.mock import RawSimpleIndexer
from sentry.sentry_metrics.indexer.postgres.postgres_v2 import PGStringIndexerV2
from sentry.sentry_metrics.indexer.strings import SHARED_STRINGS, StaticStringIndexer
-from sentry.sentry_metrics.use_case_id_registry import REVERSE_METRIC_PATH_MAPPING
+from sentry.sentry_metrics.use_case_id_registry import UseCaseID
from sentry.testutils.helpers.options import override_options
BACKENDS = [
@@ -24,7 +24,17 @@
pytest.param(PGStringIndexerV2, marks=pytest.mark.django_db),
]
-USE_CASE_KEYS = [UseCaseKey.PERFORMANCE, UseCaseKey.RELEASE_HEALTH]
+USE_CASE_IDS = [UseCaseID.SESSIONS, UseCaseID.TRANSACTIONS]
+
+
[email protected](params=BACKENDS)
+def indexer_cls(request):
+ return request.param
+
+
[email protected]
+def indexer(indexer_cls):
+ return indexer_cls()
@pytest.fixture
@@ -39,26 +49,23 @@ def indexer_cache():
indexer_cache.cache.clear()
[email protected](params=BACKENDS)
-def indexer_cls(request):
- return request.param
-
-
[email protected](params=USE_CASE_KEYS)
[email protected](params=USE_CASE_IDS)
def use_case_id(request):
return request.param
@pytest.fixture
-def writes_limiter_option_name(use_case_id):
- if use_case_id is UseCaseKey.RELEASE_HEALTH:
- return "sentry-metrics.writes-limiter.limits.releasehealth"
- return "sentry-metrics.writes-limiter.limits.performance"
+def use_case_key(use_case_id):
+ if use_case_id is UseCaseID.SESSIONS:
+ return UseCaseKey.RELEASE_HEALTH
+ return UseCaseKey.PERFORMANCE
@pytest.fixture
-def indexer(indexer_cls):
- return indexer_cls()
+def writes_limiter_option_name(use_case_key):
+ if use_case_key is UseCaseKey.RELEASE_HEALTH:
+ return "sentry-metrics.writes-limiter.limits.releasehealth"
+ return "sentry-metrics.writes-limiter.limits.performance"
def assert_fetch_type_for_tag_string_set(
@@ -67,35 +74,67 @@ def assert_fetch_type_for_tag_string_set(
assert all([meta[string].fetch_type == fetch_type for string in str_set])
-def test_static_and_non_static_strings(indexer, use_case_id):
+def test_static_and_non_static_strings_release_health(indexer, use_case_id, use_case_key):
static_indexer = StaticStringIndexer(indexer)
- org_strings = {
- 2: {"release", "1.0.0"},
- 3: {"production", "environment", "release", "2.0.0"},
+ strings = {
+ use_case_id: {
+ 2: {"release", "1.0.0"},
+ 3: {"production", "environment", "release", "2.0.0"},
+ }
}
- results = static_indexer.bulk_record(use_case_id=use_case_id, org_strings=org_strings)
+ results = static_indexer.bulk_record(strings=strings)
- v1 = indexer.resolve(use_case_id, 2, "1.0.0")
- v2 = indexer.resolve(use_case_id, 3, "2.0.0")
+ v1 = indexer.resolve(use_case_key, 2, "1.0.0")
+ v2 = indexer.resolve(use_case_key, 3, "2.0.0")
- assert results[2]["release"] == SHARED_STRINGS["release"]
- assert results[3]["production"] == SHARED_STRINGS["production"]
- assert results[3]["environment"] == SHARED_STRINGS["environment"]
- assert results[3]["release"] == SHARED_STRINGS["release"]
+ assert results[use_case_id][2]["release"] == SHARED_STRINGS["release"]
+ assert results[use_case_id][3]["production"] == SHARED_STRINGS["production"]
+ assert results[use_case_id][3]["environment"] == SHARED_STRINGS["environment"]
+ assert results[use_case_id][3]["release"] == SHARED_STRINGS["release"]
- assert results[2]["1.0.0"] == v1
- assert results[3]["2.0.0"] == v2
+ assert results[use_case_id][2]["1.0.0"] == v1
+ assert results[use_case_id][3]["2.0.0"] == v2
meta = results.get_fetch_metadata()
- assert_fetch_type_for_tag_string_set(meta[2], FetchType.HARDCODED, {"release"})
+ assert_fetch_type_for_tag_string_set(meta[use_case_id][2], FetchType.HARDCODED, {"release"})
assert_fetch_type_for_tag_string_set(
- meta[3], FetchType.HARDCODED, {"release", "production", "environment"}
+ meta[use_case_id][3], FetchType.HARDCODED, {"release", "production", "environment"}
)
- assert_fetch_type_for_tag_string_set(meta[2], FetchType.FIRST_SEEN, {"1.0.0"})
- assert_fetch_type_for_tag_string_set(meta[3], FetchType.FIRST_SEEN, {"2.0.0"})
+ assert_fetch_type_for_tag_string_set(meta[use_case_id][2], FetchType.FIRST_SEEN, {"1.0.0"})
+ assert_fetch_type_for_tag_string_set(meta[use_case_id][3], FetchType.FIRST_SEEN, {"2.0.0"})
-def test_indexer(indexer, indexer_cache, use_case_id):
+def test_static_and_non_static_strings_generic_metrics(indexer, use_case_id, use_case_key):
+ static_indexer = StaticStringIndexer(indexer)
+ strings = {
+ use_case_id: {
+ 2: {"release", "1.0.0"},
+ 3: {"production", "environment", "release", "2.0.0"},
+ }
+ }
+ results = static_indexer.bulk_record(strings=strings)
+
+ v1 = indexer.resolve(use_case_key, 2, "1.0.0")
+ v2 = indexer.resolve(use_case_key, 3, "2.0.0")
+
+ assert results[use_case_id][2]["release"] == SHARED_STRINGS["release"]
+ assert results[use_case_id][3]["production"] == SHARED_STRINGS["production"]
+ assert results[use_case_id][3]["environment"] == SHARED_STRINGS["environment"]
+ assert results[use_case_id][3]["release"] == SHARED_STRINGS["release"]
+
+ assert results[use_case_id][2]["1.0.0"] == v1
+ assert results[use_case_id][3]["2.0.0"] == v2
+
+ meta = results.get_fetch_metadata()
+ assert_fetch_type_for_tag_string_set(meta[use_case_id][2], FetchType.HARDCODED, {"release"})
+ assert_fetch_type_for_tag_string_set(
+ meta[use_case_id][3], FetchType.HARDCODED, {"release", "production", "environment"}
+ )
+ assert_fetch_type_for_tag_string_set(meta[use_case_id][2], FetchType.FIRST_SEEN, {"1.0.0"})
+ assert_fetch_type_for_tag_string_set(meta[use_case_id][3], FetchType.FIRST_SEEN, {"2.0.0"})
+
+
+def test_indexer(indexer, indexer_cache, use_case_id, use_case_key):
org1_id = 1
org2_id = 2
strings = {"hello", "hey", "hi"}
@@ -103,58 +142,49 @@ def test_indexer(indexer, indexer_cache, use_case_id):
raw_indexer = indexer
indexer = CachingIndexer(indexer_cache, indexer)
- org_strings = {org1_id: strings, org2_id: {"sup"}}
+ use_case_strings = {use_case_id: {org1_id: strings, org2_id: {"sup"}}}
# create a record with diff org_id but same string that we test against
indexer.record(use_case_id, 999, "hey")
assert list(
indexer_cache.get_many(
- [
- f"{REVERSE_METRIC_PATH_MAPPING[use_case_id].value}:{org1_id}:{string}"
- for string in strings
- ],
+ [f"{use_case_id}:{org1_id}:{string}" for string in strings],
).values()
) == [None, None, None]
- results = indexer.bulk_record(use_case_id=use_case_id, org_strings=org_strings).results
+ results = indexer.bulk_record(use_case_strings).results
org1_string_ids = {
- raw_indexer.resolve(use_case_id, org1_id, "hello"),
- raw_indexer.resolve(use_case_id, org1_id, "hey"),
- raw_indexer.resolve(use_case_id, org1_id, "hi"),
+ raw_indexer.resolve(use_case_key, org1_id, "hello"),
+ raw_indexer.resolve(use_case_key, org1_id, "hey"),
+ raw_indexer.resolve(use_case_key, org1_id, "hi"),
}
assert None not in org1_string_ids
assert len(org1_string_ids) == 3 # no overlapping ids
- org2_string_id = raw_indexer.resolve(use_case_id, org2_id, "sup")
+ org2_string_id = raw_indexer.resolve(use_case_key, org2_id, "sup")
assert org2_string_id not in org1_string_ids
# verify org1 results and cache values
- for value in results[org1_id].values():
- assert value in org1_string_ids
+ for id_value in results[use_case_id].results[org1_id].values():
+ assert id_value in org1_string_ids
for cache_value in indexer_cache.get_many(
- [
- f"{REVERSE_METRIC_PATH_MAPPING[use_case_id].value}:{org1_id}:{string}"
- for string in strings
- ],
+ [f"{use_case_id.value}:{org1_id}:{string}" for string in strings]
).values():
assert cache_value in org1_string_ids
# verify org2 results and cache values
- assert results[org2_id]["sup"] == org2_string_id
- assert (
- indexer_cache.get(f"{REVERSE_METRIC_PATH_MAPPING[use_case_id].value}:{org2_id}:sup")
- == org2_string_id
- )
+ assert results[use_case_id][org2_id]["sup"] == org2_string_id
+ assert indexer_cache.get(f"{use_case_id.value}:{org2_id}:sup") == org2_string_id
# we should have no results for org_id 999
- assert not results.get(999)
+ assert not results[use_case_id].results.get(999)
-def test_resolve_and_reverse_resolve(indexer, indexer_cache, use_case_id):
+def test_resolve_and_reverse_resolve(indexer, indexer_cache, use_case_id, use_case_key):
"""
Test `resolve` and `reverse_resolve` methods
"""
@@ -165,23 +195,25 @@ def test_resolve_and_reverse_resolve(indexer, indexer_cache, use_case_id):
indexer = CachingIndexer(indexer_cache, indexer)
org_strings = {org1_id: strings}
- indexer.bulk_record(use_case_id=use_case_id, org_strings=org_strings)
+ indexer.bulk_record({use_case_id: org_strings})
# test resolve and reverse_resolve
- id = indexer.resolve(use_case_id=use_case_id, org_id=org1_id, string="hello")
+ id = indexer.resolve(use_case_id=use_case_key, org_id=org1_id, string="hello")
assert id is not None
- assert indexer.reverse_resolve(use_case_id=use_case_id, org_id=org1_id, id=id) == "hello"
+ assert indexer.reverse_resolve(use_case_id=use_case_key, org_id=org1_id, id=id) == "hello"
# test record on a string that already exists
indexer.record(use_case_id=use_case_id, org_id=org1_id, string="hello")
- assert indexer.resolve(use_case_id=use_case_id, org_id=org1_id, string="hello") == id
+ assert indexer.resolve(use_case_id=use_case_key, org_id=org1_id, string="hello") == id
# test invalid values
- assert indexer.resolve(use_case_id=use_case_id, org_id=org1_id, string="beep") is None
- assert indexer.reverse_resolve(use_case_id=use_case_id, org_id=org1_id, id=1234) is None
+ assert indexer.resolve(use_case_id=use_case_key, org_id=org1_id, string="beep") is None
+ assert indexer.reverse_resolve(use_case_id=use_case_key, org_id=org1_id, id=1234) is None
-def test_already_created_plus_written_results(indexer, indexer_cache, use_case_id) -> None:
+def test_already_created_plus_written_results(
+ indexer, indexer_cache, use_case_id, use_case_key
+) -> None:
"""
Test that we correctly combine db read results with db write results
for the same organization.
@@ -191,77 +223,81 @@ def test_already_created_plus_written_results(indexer, indexer_cache, use_case_i
raw_indexer = indexer
indexer = CachingIndexer(indexer_cache, indexer)
- v0 = raw_indexer.record(use_case_id, org_id, "v1.2.0")
- v1 = raw_indexer.record(use_case_id, org_id, "v1.2.1")
- v2 = raw_indexer.record(use_case_id, org_id, "v1.2.2")
+ v0 = raw_indexer.record(use_case_id, org_id, "v1.2.0:xyz")
+ v1 = raw_indexer.record(use_case_id, org_id, "v1.2.1:xyz")
+ v2 = raw_indexer.record(use_case_id, org_id, "v1.2.2:xyz")
- expected_mapping = {"v1.2.0": v0, "v1.2.1": v1, "v1.2.2": v2}
+ expected_mapping = {"v1.2.0:xyz": v0, "v1.2.1:xyz": v1, "v1.2.2:xyz": v2}
results = indexer.bulk_record(
- use_case_id=use_case_id, org_strings={org_id: {"v1.2.0", "v1.2.1", "v1.2.2"}}
+ {use_case_id: {org_id: {"v1.2.0:xyz", "v1.2.1:xyz", "v1.2.2:xyz"}}}
)
- assert len(results[org_id]) == len(expected_mapping) == 3
+ assert len(results[use_case_id][org_id]) == len(expected_mapping) == 3
- for string, id in results[org_id].items():
+ for string, id in results[use_case_id][org_id].items():
assert expected_mapping[string] == id
results = indexer.bulk_record(
- use_case_id=use_case_id,
- org_strings={org_id: {"v1.2.0", "v1.2.1", "v1.2.2", "v1.2.3"}},
+ {use_case_id: {org_id: {"v1.2.0:xyz", "v1.2.1:xyz", "v1.2.2:xyz", "v1.2.3:xyz"}}},
)
- v3 = raw_indexer.resolve(use_case_id, org_id, "v1.2.3")
- expected_mapping["v1.2.3"] = v3
+ v3 = raw_indexer.resolve(use_case_key, org_id, "v1.2.3:xyz")
+ expected_mapping["v1.2.3:xyz"] = v3
- assert len(results[org_id]) == len(expected_mapping) == 4
+ assert len(results[use_case_id][org_id]) == len(expected_mapping) == 4
- for string, id in results[org_id].items():
+ for string, id in results[use_case_id][org_id].items():
assert expected_mapping[string] == id
fetch_meta = results.get_fetch_metadata()
assert_fetch_type_for_tag_string_set(
- fetch_meta[org_id], FetchType.CACHE_HIT, {"v1.2.0", "v1.2.1", "v1.2.2"}
+ fetch_meta[use_case_id][org_id],
+ FetchType.CACHE_HIT,
+ {"v1.2.0:xyz", "v1.2.1:xyz", "v1.2.2:xyz"},
+ )
+ assert_fetch_type_for_tag_string_set(
+ fetch_meta[use_case_id][org_id], FetchType.FIRST_SEEN, {"v1.2.3:xyz"}
)
- assert_fetch_type_for_tag_string_set(fetch_meta[org_id], FetchType.FIRST_SEEN, {"v1.2.3"})
-def test_already_cached_plus_read_results(indexer, indexer_cache, use_case_id) -> None:
+def test_already_cached_plus_read_results(
+ indexer, indexer_cache, use_case_id, use_case_key
+) -> None:
"""
Test that we correctly combine cached results with read results
for the same organization.
"""
org_id = 8
- cached = {
- f"{REVERSE_METRIC_PATH_MAPPING[use_case_id].value}:{org_id}:beep": 10,
- f"{REVERSE_METRIC_PATH_MAPPING[use_case_id].value}:{org_id}:boop": 11,
- }
+ cached = {f"{use_case_id.value}:{org_id}:beep": 10, f"{use_case_id.value}:{org_id}:boop": 11}
indexer_cache.set_many(cached)
raw_indexer = indexer
indexer = CachingIndexer(indexer_cache, indexer)
- results = indexer.bulk_record(use_case_id=use_case_id, org_strings={org_id: {"beep", "boop"}})
- assert len(results[org_id]) == 2
- assert results[org_id]["beep"] == 10
- assert results[org_id]["boop"] == 11
+ results = indexer.bulk_record({use_case_id: {org_id: {"beep", "boop"}}})
+ assert len(results[use_case_id][org_id]) == 2
+ assert results[use_case_id][org_id]["beep"] == 10
+ assert results[use_case_id][org_id]["boop"] == 11
# confirm we did not write to the db if results were already cached
- assert not raw_indexer.resolve(use_case_id, org_id, "beep")
- assert not raw_indexer.resolve(use_case_id, org_id, "boop")
+ assert not raw_indexer.resolve(use_case_key, org_id, "beep")
+ assert not raw_indexer.resolve(use_case_key, org_id, "boop")
bam = raw_indexer.record(use_case_id, org_id, "bam")
assert bam is not None
- results = indexer.bulk_record(
- use_case_id=use_case_id, org_strings={org_id: {"beep", "boop", "bam"}}
- )
- assert len(results[org_id]) == 3
- assert results[org_id]["beep"] == 10
- assert results[org_id]["boop"] == 11
- assert results[org_id]["bam"] == bam
+ results = indexer.bulk_record({use_case_id: {org_id: {"beep", "boop", "bam"}}})
+ assert len(results[use_case_id][org_id]) == 3
+ assert results[use_case_id][org_id]["beep"] == 10
+ assert results[use_case_id][org_id]["boop"] == 11
+ assert results[use_case_id][org_id]["bam"] == bam
fetch_meta = results.get_fetch_metadata()
- assert_fetch_type_for_tag_string_set(fetch_meta[org_id], FetchType.CACHE_HIT, {"beep", "boop"})
- assert_fetch_type_for_tag_string_set(fetch_meta[org_id], FetchType.DB_READ, {"bam"})
+ assert_fetch_type_for_tag_string_set(
+ fetch_meta[use_case_id][org_id], FetchType.CACHE_HIT, {"beep", "boop"}
+ )
+ assert_fetch_type_for_tag_string_set(
+ fetch_meta[use_case_id][org_id], FetchType.DB_READ, {"bam"}
+ )
def test_rate_limited(indexer, use_case_id, writes_limiter_option_name):
@@ -284,17 +320,17 @@ def test_rate_limited(indexer, use_case_id, writes_limiter_option_name):
],
}
):
- results = indexer.bulk_record(use_case_id=use_case_id, org_strings=org_strings)
+ results = indexer.bulk_record({use_case_id: org_strings})
- assert len(results[1]) == 3
- assert len(results[2]) == 2
- assert len(results[3]) == 1
- assert results[3]["g"] is not None
+ assert len(results[use_case_id][1]) == 3
+ assert len(results[use_case_id][2]) == 2
+ assert len(results[use_case_id][3]) == 1
+ assert results[use_case_id][3]["g"] is not None
rate_limited_strings = set()
for org_id in 1, 2, 3:
- for k, v in results[org_id].items():
+ for k, v in results[use_case_id][org_id].items():
if v is None:
rate_limited_strings.add((org_id, k))
@@ -302,7 +338,7 @@ def test_rate_limited(indexer, use_case_id, writes_limiter_option_name):
assert (3, "g") not in rate_limited_strings
for org_id, string in rate_limited_strings:
- assert results.get_fetch_metadata()[org_id][string] == Metadata(
+ assert results.get_fetch_metadata()[use_case_id][org_id][string] == Metadata(
id=None,
fetch_type=FetchType.RATE_LIMITED,
fetch_type_ext=FetchTypeExt(is_global=False),
@@ -318,11 +354,11 @@ def test_rate_limited(indexer, use_case_id, writes_limiter_option_name):
],
}
):
- results = indexer.bulk_record(use_case_id=use_case_id, org_strings=org_strings)
+ results = indexer.bulk_record({use_case_id: org_strings})
- assert results[1] == {"x": None, "y": None, "z": None}
+ assert results[use_case_id][1] == {"x": None, "y": None, "z": None}
for letter in "xyz":
- assert results.get_fetch_metadata()[1][letter] == Metadata(
+ assert results.get_fetch_metadata()[use_case_id][1][letter] == Metadata(
id=None,
fetch_type=FetchType.RATE_LIMITED,
fetch_type_ext=FetchTypeExt(is_global=False),
@@ -338,10 +374,10 @@ def test_rate_limited(indexer, use_case_id, writes_limiter_option_name):
],
}
):
- results = indexer.bulk_record(use_case_id=use_case_id, org_strings=org_strings)
+ results = indexer.bulk_record({use_case_id: org_strings})
rate_limited_strings2 = set()
- for k, v in results[1].items():
+ for k, v in results[use_case_id][1].items():
if v is None:
rate_limited_strings2.add(k)
diff --git a/tests/sentry/sentry_metrics/test_batch.py b/tests/sentry/sentry_metrics/test_batch.py
index 32c12b00c2167d..d77118e9b775b0 100644
--- a/tests/sentry/sentry_metrics/test_batch.py
+++ b/tests/sentry/sentry_metrics/test_batch.py
@@ -558,30 +558,34 @@ def test_all_resolved(caplog, settings):
caplog.set_level(logging.ERROR)
snuba_payloads = batch.reconstruct_messages(
{
- 1: {
- "c:sessions/session@none": 1,
- "d:sessions/duration@second": 2,
- "environment": 3,
- "errored": 4,
- "healthy": 5,
- "init": 6,
- "production": 7,
- "s:sessions/error@none": 8,
- "session.status": 9,
+ MockUseCaseID.SESSIONS: {
+ 1: {
+ "c:sessions/session@none": 1,
+ "d:sessions/duration@second": 2,
+ "environment": 3,
+ "errored": 4,
+ "healthy": 5,
+ "init": 6,
+ "production": 7,
+ "s:sessions/error@none": 8,
+ "session.status": 9,
+ }
}
},
{
- 1: {
- "c:sessions/session@none": Metadata(id=1, fetch_type=FetchType.CACHE_HIT),
- "d:sessions/duration@second": Metadata(id=2, fetch_type=FetchType.CACHE_HIT),
- "environment": Metadata(id=3, fetch_type=FetchType.CACHE_HIT),
- "errored": Metadata(id=4, fetch_type=FetchType.DB_READ),
- "healthy": Metadata(id=5, fetch_type=FetchType.HARDCODED),
- "init": Metadata(id=6, fetch_type=FetchType.HARDCODED),
- "production": Metadata(id=7, fetch_type=FetchType.CACHE_HIT),
- "s:sessions/error@none": Metadata(id=8, fetch_type=FetchType.CACHE_HIT),
- "session.status": Metadata(id=9, fetch_type=FetchType.CACHE_HIT),
- }
+ MockUseCaseID.SESSIONS: {
+ 1: {
+ "c:sessions/session@none": Metadata(id=1, fetch_type=FetchType.CACHE_HIT),
+ "d:sessions/duration@second": Metadata(id=2, fetch_type=FetchType.CACHE_HIT),
+ "environment": Metadata(id=3, fetch_type=FetchType.CACHE_HIT),
+ "errored": Metadata(id=4, fetch_type=FetchType.DB_READ),
+ "healthy": Metadata(id=5, fetch_type=FetchType.HARDCODED),
+ "init": Metadata(id=6, fetch_type=FetchType.HARDCODED),
+ "production": Metadata(id=7, fetch_type=FetchType.CACHE_HIT),
+ "s:sessions/error@none": Metadata(id=8, fetch_type=FetchType.CACHE_HIT),
+ "session.status": Metadata(id=9, fetch_type=FetchType.CACHE_HIT),
+ }
+ },
},
)
@@ -701,29 +705,33 @@ def test_all_resolved_with_routing_information(caplog, settings):
caplog.set_level(logging.ERROR)
snuba_payloads = batch.reconstruct_messages(
{
- 1: {
- "c:sessions/session@none": 1,
- "d:sessions/duration@second": 2,
- "environment": 3,
- "errored": 4,
- "healthy": 5,
- "init": 6,
- "production": 7,
- "s:sessions/error@none": 8,
- "session.status": 9,
+ MockUseCaseID.SESSIONS: {
+ 1: {
+ "c:sessions/session@none": 1,
+ "d:sessions/duration@second": 2,
+ "environment": 3,
+ "errored": 4,
+ "healthy": 5,
+ "init": 6,
+ "production": 7,
+ "s:sessions/error@none": 8,
+ "session.status": 9,
+ }
}
},
{
- 1: {
- "c:sessions/session@none": Metadata(id=1, fetch_type=FetchType.CACHE_HIT),
- "d:sessions/duration@second": Metadata(id=2, fetch_type=FetchType.CACHE_HIT),
- "environment": Metadata(id=3, fetch_type=FetchType.CACHE_HIT),
- "errored": Metadata(id=4, fetch_type=FetchType.DB_READ),
- "healthy": Metadata(id=5, fetch_type=FetchType.HARDCODED),
- "init": Metadata(id=6, fetch_type=FetchType.HARDCODED),
- "production": Metadata(id=7, fetch_type=FetchType.CACHE_HIT),
- "s:sessions/error@none": Metadata(id=8, fetch_type=FetchType.CACHE_HIT),
- "session.status": Metadata(id=9, fetch_type=FetchType.CACHE_HIT),
+ MockUseCaseID.SESSIONS: {
+ 1: {
+ "c:sessions/session@none": Metadata(id=1, fetch_type=FetchType.CACHE_HIT),
+ "d:sessions/duration@second": Metadata(id=2, fetch_type=FetchType.CACHE_HIT),
+ "environment": Metadata(id=3, fetch_type=FetchType.CACHE_HIT),
+ "errored": Metadata(id=4, fetch_type=FetchType.DB_READ),
+ "healthy": Metadata(id=5, fetch_type=FetchType.HARDCODED),
+ "init": Metadata(id=6, fetch_type=FetchType.HARDCODED),
+ "production": Metadata(id=7, fetch_type=FetchType.CACHE_HIT),
+ "s:sessions/error@none": Metadata(id=8, fetch_type=FetchType.CACHE_HIT),
+ "session.status": Metadata(id=9, fetch_type=FetchType.CACHE_HIT),
+ }
}
},
)
@@ -857,29 +865,33 @@ def test_all_resolved_retention_days_honored(caplog, settings):
caplog.set_level(logging.ERROR)
snuba_payloads = batch.reconstruct_messages(
{
- 1: {
- "c:sessions/session@none": 1,
- "d:sessions/duration@second": 2,
- "environment": 3,
- "errored": 4,
- "healthy": 5,
- "init": 6,
- "production": 7,
- "s:sessions/error@none": 8,
- "session.status": 9,
+ MockUseCaseID.SESSIONS: {
+ 1: {
+ "c:sessions/session@none": 1,
+ "d:sessions/duration@second": 2,
+ "environment": 3,
+ "errored": 4,
+ "healthy": 5,
+ "init": 6,
+ "production": 7,
+ "s:sessions/error@none": 8,
+ "session.status": 9,
+ }
}
},
{
- 1: {
- "c:sessions/session@none": Metadata(id=1, fetch_type=FetchType.CACHE_HIT),
- "d:sessions/duration@second": Metadata(id=2, fetch_type=FetchType.CACHE_HIT),
- "environment": Metadata(id=3, fetch_type=FetchType.CACHE_HIT),
- "errored": Metadata(id=4, fetch_type=FetchType.DB_READ),
- "healthy": Metadata(id=5, fetch_type=FetchType.HARDCODED),
- "init": Metadata(id=6, fetch_type=FetchType.HARDCODED),
- "production": Metadata(id=7, fetch_type=FetchType.CACHE_HIT),
- "s:sessions/error@none": Metadata(id=8, fetch_type=FetchType.CACHE_HIT),
- "session.status": Metadata(id=9, fetch_type=FetchType.CACHE_HIT),
+ MockUseCaseID.SESSIONS: {
+ 1: {
+ "c:sessions/session@none": Metadata(id=1, fetch_type=FetchType.CACHE_HIT),
+ "d:sessions/duration@second": Metadata(id=2, fetch_type=FetchType.CACHE_HIT),
+ "environment": Metadata(id=3, fetch_type=FetchType.CACHE_HIT),
+ "errored": Metadata(id=4, fetch_type=FetchType.DB_READ),
+ "healthy": Metadata(id=5, fetch_type=FetchType.HARDCODED),
+ "init": Metadata(id=6, fetch_type=FetchType.HARDCODED),
+ "production": Metadata(id=7, fetch_type=FetchType.CACHE_HIT),
+ "s:sessions/error@none": Metadata(id=8, fetch_type=FetchType.CACHE_HIT),
+ "session.status": Metadata(id=9, fetch_type=FetchType.CACHE_HIT),
+ }
}
},
)
@@ -1004,21 +1016,25 @@ def test_batch_resolve_with_values_not_indexed(caplog, settings):
caplog.set_level(logging.ERROR)
snuba_payloads = batch.reconstruct_messages(
{
- 1: {
- "c:sessions/session@none": 1,
- "d:sessions/duration@second": 2,
- "environment": 3,
- "s:sessions/error@none": 4,
- "session.status": 5,
+ MockUseCaseID.SESSIONS: {
+ 1: {
+ "c:sessions/session@none": 1,
+ "d:sessions/duration@second": 2,
+ "environment": 3,
+ "s:sessions/error@none": 4,
+ "session.status": 5,
+ }
}
},
{
- 1: {
- "c:sessions/session@none": Metadata(id=1, fetch_type=FetchType.CACHE_HIT),
- "d:sessions/duration@second": Metadata(id=2, fetch_type=FetchType.CACHE_HIT),
- "environment": Metadata(id=3, fetch_type=FetchType.CACHE_HIT),
- "s:sessions/error@none": Metadata(id=4, fetch_type=FetchType.CACHE_HIT),
- "session.status": Metadata(id=5, fetch_type=FetchType.CACHE_HIT),
+ MockUseCaseID.SESSIONS: {
+ 1: {
+ "c:sessions/session@none": Metadata(id=1, fetch_type=FetchType.CACHE_HIT),
+ "d:sessions/duration@second": Metadata(id=2, fetch_type=FetchType.CACHE_HIT),
+ "environment": Metadata(id=3, fetch_type=FetchType.CACHE_HIT),
+ "s:sessions/error@none": Metadata(id=4, fetch_type=FetchType.CACHE_HIT),
+ "session.status": Metadata(id=5, fetch_type=FetchType.CACHE_HIT),
+ }
}
},
)
@@ -1136,35 +1152,39 @@ def test_metric_id_rate_limited(caplog, settings):
caplog.set_level(logging.ERROR)
snuba_payloads = batch.reconstruct_messages(
{
- 1: {
- "c:sessions/session@none": None,
- "d:sessions/duration@second": None,
- "environment": 3,
- "errored": 4,
- "healthy": 5,
- "init": 6,
- "production": 7,
- "s:sessions/error@none": 8,
- "session.status": 9,
+ MockUseCaseID.SESSIONS: {
+ 1: {
+ "c:sessions/session@none": None,
+ "d:sessions/duration@second": None,
+ "environment": 3,
+ "errored": 4,
+ "healthy": 5,
+ "init": 6,
+ "production": 7,
+ "s:sessions/error@none": 8,
+ "session.status": 9,
+ }
}
},
{
- 1: {
- "c:sessions/session@none": Metadata(
- id=None,
- fetch_type=FetchType.RATE_LIMITED,
- fetch_type_ext=FetchTypeExt(is_global=False),
- ),
- "d:sessions/duration@second": Metadata(
- id=None, fetch_type=FetchType.RATE_LIMITED, fetch_type_ext=None
- ),
- "environment": Metadata(id=3, fetch_type=FetchType.CACHE_HIT),
- "errored": Metadata(id=4, fetch_type=FetchType.DB_READ),
- "healthy": Metadata(id=5, fetch_type=FetchType.HARDCODED),
- "init": Metadata(id=6, fetch_type=FetchType.HARDCODED),
- "production": Metadata(id=7, fetch_type=FetchType.CACHE_HIT),
- "s:sessions/error@none": Metadata(id=None, fetch_type=FetchType.DB_READ),
- "session.status": Metadata(id=9, fetch_type=FetchType.CACHE_HIT),
+ MockUseCaseID.SESSIONS: {
+ 1: {
+ "c:sessions/session@none": Metadata(
+ id=None,
+ fetch_type=FetchType.RATE_LIMITED,
+ fetch_type_ext=FetchTypeExt(is_global=False),
+ ),
+ "d:sessions/duration@second": Metadata(
+ id=None, fetch_type=FetchType.RATE_LIMITED, fetch_type_ext=None
+ ),
+ "environment": Metadata(id=3, fetch_type=FetchType.CACHE_HIT),
+ "errored": Metadata(id=4, fetch_type=FetchType.DB_READ),
+ "healthy": Metadata(id=5, fetch_type=FetchType.HARDCODED),
+ "init": Metadata(id=6, fetch_type=FetchType.HARDCODED),
+ "production": Metadata(id=7, fetch_type=FetchType.CACHE_HIT),
+ "s:sessions/error@none": Metadata(id=None, fetch_type=FetchType.DB_READ),
+ "session.status": Metadata(id=9, fetch_type=FetchType.CACHE_HIT),
+ }
}
},
)
@@ -1239,33 +1259,37 @@ def test_tag_key_rate_limited(caplog, settings):
caplog.set_level(logging.ERROR)
snuba_payloads = batch.reconstruct_messages(
{
- 1: {
- "c:sessions/session@none": 1,
- "d:sessions/duration@second": 2,
- "environment": None,
- "errored": 4,
- "healthy": 5,
- "init": 6,
- "production": 7,
- "s:sessions/error@none": 8,
- "session.status": 9,
+ MockUseCaseID.SESSIONS: {
+ 1: {
+ "c:sessions/session@none": 1,
+ "d:sessions/duration@second": 2,
+ "environment": None,
+ "errored": 4,
+ "healthy": 5,
+ "init": 6,
+ "production": 7,
+ "s:sessions/error@none": 8,
+ "session.status": 9,
+ }
}
},
{
- 1: {
- "c:sessions/session@none": Metadata(id=1, fetch_type=FetchType.CACHE_HIT),
- "d:sessions/duration@second": Metadata(id=2, fetch_type=FetchType.CACHE_HIT),
- "environment": Metadata(
- id=None,
- fetch_type=FetchType.RATE_LIMITED,
- fetch_type_ext=FetchTypeExt(is_global=False),
- ),
- "errored": Metadata(id=4, fetch_type=FetchType.DB_READ),
- "healthy": Metadata(id=5, fetch_type=FetchType.HARDCODED),
- "init": Metadata(id=6, fetch_type=FetchType.HARDCODED),
- "production": Metadata(id=7, fetch_type=FetchType.CACHE_HIT),
- "s:sessions/error@none": Metadata(id=8, fetch_type=FetchType.CACHE_HIT),
- "session.status": Metadata(id=9, fetch_type=FetchType.CACHE_HIT),
+ MockUseCaseID.SESSIONS: {
+ 1: {
+ "c:sessions/session@none": Metadata(id=1, fetch_type=FetchType.CACHE_HIT),
+ "d:sessions/duration@second": Metadata(id=2, fetch_type=FetchType.CACHE_HIT),
+ "environment": Metadata(
+ id=None,
+ fetch_type=FetchType.RATE_LIMITED,
+ fetch_type_ext=FetchTypeExt(is_global=False),
+ ),
+ "errored": Metadata(id=4, fetch_type=FetchType.DB_READ),
+ "healthy": Metadata(id=5, fetch_type=FetchType.HARDCODED),
+ "init": Metadata(id=6, fetch_type=FetchType.HARDCODED),
+ "production": Metadata(id=7, fetch_type=FetchType.CACHE_HIT),
+ "s:sessions/error@none": Metadata(id=8, fetch_type=FetchType.CACHE_HIT),
+ "session.status": Metadata(id=9, fetch_type=FetchType.CACHE_HIT),
+ }
}
},
)
@@ -1320,33 +1344,37 @@ def test_tag_value_rate_limited(caplog, settings):
caplog.set_level(logging.ERROR)
snuba_payloads = batch.reconstruct_messages(
{
- 1: {
- "c:sessions/session@none": 1,
- "d:sessions/duration@second": 2,
- "environment": 3,
- "errored": None,
- "healthy": 5,
- "init": 6,
- "production": 7,
- "s:sessions/error@none": 8,
- "session.status": 9,
+ MockUseCaseID.SESSIONS: {
+ 1: {
+ "c:sessions/session@none": 1,
+ "d:sessions/duration@second": 2,
+ "environment": 3,
+ "errored": None,
+ "healthy": 5,
+ "init": 6,
+ "production": 7,
+ "s:sessions/error@none": 8,
+ "session.status": 9,
+ }
}
},
{
- 1: {
- "c:sessions/session@none": Metadata(id=1, fetch_type=FetchType.CACHE_HIT),
- "d:sessions/duration@second": Metadata(id=2, fetch_type=FetchType.CACHE_HIT),
- "environment": Metadata(id=3, fetch_type=FetchType.CACHE_HIT),
- "errored": Metadata(
- id=None,
- fetch_type=FetchType.RATE_LIMITED,
- fetch_type_ext=FetchTypeExt(is_global=False),
- ),
- "healthy": Metadata(id=5, fetch_type=FetchType.HARDCODED),
- "init": Metadata(id=6, fetch_type=FetchType.HARDCODED),
- "production": Metadata(id=7, fetch_type=FetchType.CACHE_HIT),
- "s:sessions/error@none": Metadata(id=8, fetch_type=FetchType.CACHE_HIT),
- "session.status": Metadata(id=9, fetch_type=FetchType.CACHE_HIT),
+ MockUseCaseID.SESSIONS: {
+ 1: {
+ "c:sessions/session@none": Metadata(id=1, fetch_type=FetchType.CACHE_HIT),
+ "d:sessions/duration@second": Metadata(id=2, fetch_type=FetchType.CACHE_HIT),
+ "environment": Metadata(id=3, fetch_type=FetchType.CACHE_HIT),
+ "errored": Metadata(
+ id=None,
+ fetch_type=FetchType.RATE_LIMITED,
+ fetch_type_ext=FetchTypeExt(is_global=False),
+ ),
+ "healthy": Metadata(id=5, fetch_type=FetchType.HARDCODED),
+ "init": Metadata(id=6, fetch_type=FetchType.HARDCODED),
+ "production": Metadata(id=7, fetch_type=FetchType.CACHE_HIT),
+ "s:sessions/error@none": Metadata(id=8, fetch_type=FetchType.CACHE_HIT),
+ "session.status": Metadata(id=9, fetch_type=FetchType.CACHE_HIT),
+ }
}
},
)
@@ -1455,40 +1483,44 @@ def test_one_org_limited(caplog, settings):
caplog.set_level(logging.ERROR)
snuba_payloads = batch.reconstruct_messages(
{
- 1: {
- "c:sessions/session@none": 1,
- "environment": None,
- "init": 3,
- "production": 4,
- "session.status": 5,
- },
- 2: {
- "d:sessions/duration@second": 1,
- "environment": 2,
- "healthy": 3,
- "production": 4,
- "session.status": 5,
- },
+ MockUseCaseID.SESSIONS: {
+ 1: {
+ "c:sessions/session@none": 1,
+ "environment": None,
+ "init": 3,
+ "production": 4,
+ "session.status": 5,
+ },
+ 2: {
+ "d:sessions/duration@second": 1,
+ "environment": 2,
+ "healthy": 3,
+ "production": 4,
+ "session.status": 5,
+ },
+ }
},
{
- 1: {
- "c:sessions/session@none": Metadata(id=1, fetch_type=FetchType.CACHE_HIT),
- "environment": Metadata(
- id=None,
- fetch_type=FetchType.RATE_LIMITED,
- fetch_type_ext=FetchTypeExt(is_global=False),
- ),
- "init": Metadata(id=3, fetch_type=FetchType.HARDCODED),
- "production": Metadata(id=4, fetch_type=FetchType.CACHE_HIT),
- "session.status": Metadata(id=5, fetch_type=FetchType.CACHE_HIT),
- },
- 2: {
- "d:sessions/duration@second": Metadata(id=1, fetch_type=FetchType.CACHE_HIT),
- "environment": Metadata(id=2, fetch_type=FetchType.CACHE_HIT),
- "healthy": Metadata(id=3, fetch_type=FetchType.HARDCODED),
- "production": Metadata(id=4, fetch_type=FetchType.CACHE_HIT),
- "session.status": Metadata(id=5, fetch_type=FetchType.CACHE_HIT),
- },
+ MockUseCaseID.SESSIONS: {
+ 1: {
+ "c:sessions/session@none": Metadata(id=1, fetch_type=FetchType.CACHE_HIT),
+ "environment": Metadata(
+ id=None,
+ fetch_type=FetchType.RATE_LIMITED,
+ fetch_type_ext=FetchTypeExt(is_global=False),
+ ),
+ "init": Metadata(id=3, fetch_type=FetchType.HARDCODED),
+ "production": Metadata(id=4, fetch_type=FetchType.CACHE_HIT),
+ "session.status": Metadata(id=5, fetch_type=FetchType.CACHE_HIT),
+ },
+ 2: {
+ "d:sessions/duration@second": Metadata(id=1, fetch_type=FetchType.CACHE_HIT),
+ "environment": Metadata(id=2, fetch_type=FetchType.CACHE_HIT),
+ "healthy": Metadata(id=3, fetch_type=FetchType.HARDCODED),
+ "production": Metadata(id=4, fetch_type=FetchType.CACHE_HIT),
+ "session.status": Metadata(id=5, fetch_type=FetchType.CACHE_HIT),
+ },
+ }
},
)
@@ -1580,21 +1612,25 @@ def test_cardinality_limiter(caplog, settings):
snuba_payloads = batch.reconstruct_messages(
{
- 1: {
- "environment": 1,
- "errored": 2,
- "production": 3,
- "s:sessions/error@none": 4,
- "session.status": 5,
- },
+ MockUseCaseID.SESSIONS: {
+ 1: {
+ "environment": 1,
+ "errored": 2,
+ "production": 3,
+ "s:sessions/error@none": 4,
+ "session.status": 5,
+ },
+ }
},
{
- 1: {
- "environment": Metadata(id=1, fetch_type=FetchType.CACHE_HIT),
- "errored": Metadata(id=2, fetch_type=FetchType.CACHE_HIT),
- "production": Metadata(id=3, fetch_type=FetchType.CACHE_HIT),
- "s:sessions/error@none": Metadata(id=4, fetch_type=FetchType.CACHE_HIT),
- "session.status": Metadata(id=5, fetch_type=FetchType.CACHE_HIT),
+ MockUseCaseID.SESSIONS: {
+ 1: {
+ "environment": Metadata(id=1, fetch_type=FetchType.CACHE_HIT),
+ "errored": Metadata(id=2, fetch_type=FetchType.CACHE_HIT),
+ "production": Metadata(id=3, fetch_type=FetchType.CACHE_HIT),
+ "s:sessions/error@none": Metadata(id=4, fetch_type=FetchType.CACHE_HIT),
+ "session.status": Metadata(id=5, fetch_type=FetchType.CACHE_HIT),
+ }
}
},
)
diff --git a/tests/sentry/sentry_metrics/test_strings.py b/tests/sentry/sentry_metrics/test_strings.py
index 0648579783a284..4b75dbc6ce25a5 100644
--- a/tests/sentry/sentry_metrics/test_strings.py
+++ b/tests/sentry/sentry_metrics/test_strings.py
@@ -1,19 +1,19 @@
-from sentry.sentry_metrics.configuration import UseCaseKey
from sentry.sentry_metrics.indexer.mock import MockIndexer
from sentry.sentry_metrics.indexer.strings import SHARED_STRINGS, StaticStringIndexer
+from sentry.sentry_metrics.use_case_id_registry import UseCaseID
-use_case_id = UseCaseKey("release-health")
+use_case_id = UseCaseID.SESSIONS
def test_static_strings_only() -> None:
indexer = StaticStringIndexer(MockIndexer())
org_strings = {2: {"release"}, 3: {"production", "environment", "release"}}
- results = indexer.bulk_record(use_case_id=use_case_id, org_strings=org_strings)
+ results = indexer.bulk_record({use_case_id: org_strings})
- assert results[2]["release"] == SHARED_STRINGS["release"]
- assert results[3]["production"] == SHARED_STRINGS["production"]
- assert results[3]["environment"] == SHARED_STRINGS["environment"]
- assert results[3]["release"] == SHARED_STRINGS["release"]
+ assert results[use_case_id][2]["release"] == SHARED_STRINGS["release"]
+ assert results[use_case_id][3]["production"] == SHARED_STRINGS["production"]
+ assert results[use_case_id][3]["environment"] == SHARED_STRINGS["environment"]
+ assert results[use_case_id][3]["release"] == SHARED_STRINGS["release"]
def test_resolve_shared_org_existing_entry() -> None:
diff --git a/tests/sentry/snuba/metrics/fields/test_base.py b/tests/sentry/snuba/metrics/fields/test_base.py
index cb866a1bd24510..89aae3a957d4f8 100644
--- a/tests/sentry/snuba/metrics/fields/test_base.py
+++ b/tests/sentry/snuba/metrics/fields/test_base.py
@@ -8,6 +8,7 @@
from sentry.sentry_metrics import indexer
from sentry.sentry_metrics.configuration import UseCaseKey
+from sentry.sentry_metrics.use_case_id_registry import REVERSE_METRIC_PATH_MAPPING
from sentry.sentry_metrics.utils import resolve_tag_value, resolve_weak
from sentry.snuba.dataset import EntityKey
from sentry.snuba.metrics import (
@@ -45,7 +46,9 @@
def indexer_record(use_case_id: UseCaseKey, org_id: int, string: str) -> int:
- return indexer.record(use_case_id=use_case_id, org_id=org_id, string=string)
+ return indexer.record(
+ use_case_id=REVERSE_METRIC_PATH_MAPPING[use_case_id], org_id=org_id, string=string
+ )
perf_indexer_record = partial(indexer_record, UseCaseKey.PERFORMANCE)
diff --git a/tests/sentry/snuba/metrics/test_query_builder.py b/tests/sentry/snuba/metrics/test_query_builder.py
index 447212dd6332ea..afeec374b9f7c6 100644
--- a/tests/sentry/snuba/metrics/test_query_builder.py
+++ b/tests/sentry/snuba/metrics/test_query_builder.py
@@ -27,6 +27,7 @@
from sentry.api.utils import InvalidParams
from sentry.sentry_metrics import indexer
from sentry.sentry_metrics.configuration import UseCaseKey
+from sentry.sentry_metrics.use_case_id_registry import REVERSE_METRIC_PATH_MAPPING, UseCaseID
from sentry.sentry_metrics.utils import (
resolve,
resolve_tag_key,
@@ -251,7 +252,7 @@ def test_parse_query(query_string, expected):
use_case_id = UseCaseKey.RELEASE_HEALTH
for s in ("[email protected]", "/bar/:orgId/"):
# will be values 10000, 10001 respectively
- indexer.record(use_case_id=use_case_id, org_id=org_id, string=s)
+ indexer.record(use_case_id=UseCaseID.SESSIONS, org_id=org_id, string=s)
parsed = resolve_tags(
use_case_id,
org_id,
@@ -1437,7 +1438,11 @@ def test_resolve_tags_with_unary_tuple(self):
transactions = ["/foo", "/bar"]
for transaction in ["transaction"] + transactions:
- indexer.record(use_case_id=self.use_case_id, org_id=self.org_id, string=transaction)
+ indexer.record(
+ use_case_id=REVERSE_METRIC_PATH_MAPPING[self.use_case_id],
+ org_id=self.org_id,
+ string=transaction,
+ )
resolved_query = resolve_tags(
self.use_case_id,
@@ -1493,8 +1498,16 @@ def test_resolve_tags_with_binary_tuple(self):
tags = [("/foo", "ios"), ("/bar", "android")]
for transaction, platform in [("transaction", "platform")] + tags:
- indexer.record(use_case_id=self.use_case_id, org_id=self.org_id, string=transaction)
- indexer.record(use_case_id=self.use_case_id, org_id=self.org_id, string=platform)
+ indexer.record(
+ use_case_id=REVERSE_METRIC_PATH_MAPPING[self.use_case_id],
+ org_id=self.org_id,
+ string=transaction,
+ )
+ indexer.record(
+ use_case_id=REVERSE_METRIC_PATH_MAPPING[self.use_case_id],
+ org_id=self.org_id,
+ string=platform,
+ )
resolved_query = resolve_tags(
self.use_case_id,
@@ -1558,7 +1571,11 @@ def test_resolve_tags_with_binary_tuple(self):
def test_resolve_tags_with_has(self):
tag_key = "transaction"
- indexer.record(use_case_id=self.use_case_id, org_id=self.org_id, string=tag_key)
+ indexer.record(
+ use_case_id=REVERSE_METRIC_PATH_MAPPING[self.use_case_id],
+ org_id=self.org_id,
+ string=tag_key,
+ )
resolved_query = resolve_tags(
self.use_case_id,
@@ -1593,7 +1610,11 @@ def test_resolve_tags_with_has(self):
)
def test_resolve_tags_with_match_and_filterable_tag(self):
- indexer.record(use_case_id=self.use_case_id, org_id=self.org_id, string="environment")
+ indexer.record(
+ use_case_id=REVERSE_METRIC_PATH_MAPPING[self.use_case_id],
+ org_id=self.org_id,
+ string="environment",
+ )
resolved_query = resolve_tags(
self.use_case_id,
@@ -1628,7 +1649,11 @@ def test_resolve_tags_with_match_and_filterable_tag(self):
)
def test_resolve_tags_with_match_and_deep_filterable_tag(self):
- indexer.record(use_case_id=self.use_case_id, org_id=self.org_id, string="environment")
+ indexer.record(
+ use_case_id=REVERSE_METRIC_PATH_MAPPING[self.use_case_id],
+ org_id=self.org_id,
+ string="environment",
+ )
resolved_query = resolve_tags(
self.use_case_id,
@@ -1668,7 +1693,11 @@ def test_resolve_tags_with_match_and_deep_filterable_tag(self):
)
def test_resolve_tags_with_match_and_non_filterable_tag(self):
- indexer.record(use_case_id=self.use_case_id, org_id=self.org_id, string="http_status_code")
+ indexer.record(
+ use_case_id=REVERSE_METRIC_PATH_MAPPING[self.use_case_id],
+ org_id=self.org_id,
+ string="http_status_code",
+ )
with pytest.raises(
InvalidParams,
@@ -1694,7 +1723,11 @@ def test_resolve_tags_with_match_and_non_filterable_tag(self):
)
def test_resolve_tags_with_match_and_deep_non_filterable_tag(self):
- indexer.record(use_case_id=self.use_case_id, org_id=self.org_id, string="http_status_code")
+ indexer.record(
+ use_case_id=REVERSE_METRIC_PATH_MAPPING[self.use_case_id],
+ org_id=self.org_id,
+ string="http_status_code",
+ )
with pytest.raises(
InvalidParams,
diff --git a/tests/sentry/snuba/metrics/test_snql.py b/tests/sentry/snuba/metrics/test_snql.py
index a515f4b3716a07..84a18c4cd34e21 100644
--- a/tests/sentry/snuba/metrics/test_snql.py
+++ b/tests/sentry/snuba/metrics/test_snql.py
@@ -11,6 +11,7 @@
)
from sentry.sentry_metrics import indexer
from sentry.sentry_metrics.configuration import UseCaseKey
+from sentry.sentry_metrics.use_case_id_registry import UseCaseID
from sentry.sentry_metrics.utils import resolve_tag_key, resolve_tag_value, resolve_weak
from sentry.snuba.metrics import TransactionMRI
from sentry.snuba.metrics.fields.snql import (
@@ -55,36 +56,38 @@ def setUp(self):
TransactionMRI.MEASUREMENTS_LCP.value,
TransactionMRI.DURATION.value,
]:
- self.metric_ids += [indexer.record(UseCaseKey.PERFORMANCE, self.org_id, metric_name)]
+ self.metric_ids += [indexer.record(UseCaseID.TRANSACTIONS, self.org_id, metric_name)]
indexer.bulk_record(
- use_case_id=UseCaseKey.RELEASE_HEALTH,
- org_strings={
- self.org_id: [
- "abnormal",
- "crashed",
- "errored_preaggr",
- "errored",
- "exited",
- "init",
- "session.status",
- ]
- },
+ {
+ UseCaseID.SESSIONS: {
+ self.org_id: [
+ "abnormal",
+ "crashed",
+ "errored_preaggr",
+ "errored",
+ "exited",
+ "init",
+ "session.status",
+ ]
+ }
+ }
)
indexer.bulk_record(
- use_case_id=UseCaseKey.PERFORMANCE,
- org_strings={
- self.org_id: [
- TransactionSatisfactionTagValue.FRUSTRATED.value,
- TransactionSatisfactionTagValue.SATISFIED.value,
- TransactionSatisfactionTagValue.TOLERATED.value,
- TransactionStatusTagValue.CANCELLED.value,
- TransactionStatusTagValue.OK.value,
- TransactionStatusTagValue.UNKNOWN.value,
- TransactionTagsKey.TRANSACTION_SATISFACTION.value,
- TransactionTagsKey.TRANSACTION_STATUS.value,
- ]
- },
+ {
+ UseCaseID.TRANSACTIONS: {
+ self.org_id: [
+ TransactionSatisfactionTagValue.FRUSTRATED.value,
+ TransactionSatisfactionTagValue.SATISFIED.value,
+ TransactionSatisfactionTagValue.TOLERATED.value,
+ TransactionStatusTagValue.CANCELLED.value,
+ TransactionStatusTagValue.OK.value,
+ TransactionStatusTagValue.UNKNOWN.value,
+ TransactionTagsKey.TRANSACTION_SATISFACTION.value,
+ TransactionTagsKey.TRANSACTION_STATUS.value,
+ ]
+ }
+ }
)
def test_counter_sum_aggregation_on_session_status(self):
diff --git a/tests/sentry/snuba/test_entity_subscriptions.py b/tests/sentry/snuba/test_entity_subscriptions.py
index a2325e776ea161..0483a0e9b5ac21 100644
--- a/tests/sentry/snuba/test_entity_subscriptions.py
+++ b/tests/sentry/snuba/test_entity_subscriptions.py
@@ -12,6 +12,7 @@
from sentry.search.events.constants import METRICS_MAP
from sentry.sentry_metrics import indexer
from sentry.sentry_metrics.configuration import UseCaseKey
+from sentry.sentry_metrics.use_case_id_registry import UseCaseID
from sentry.sentry_metrics.utils import resolve, resolve_tag_key, resolve_tag_value
from sentry.snuba.dataset import Dataset, EntityKey
from sentry.snuba.entity_subscription import (
@@ -43,9 +44,7 @@ def setUp(self) -> None:
"init",
"crashed",
]:
- indexer.record(
- use_case_id=UseCaseKey.RELEASE_HEALTH, org_id=self.organization.id, string=tag
- )
+ indexer.record(use_case_id=UseCaseID.SESSIONS, org_id=self.organization.id, string=tag)
def test_get_entity_subscriptions_for_sessions_dataset_non_supported_aggregate(self) -> None:
aggregate = "count(sessions)"
diff --git a/tests/sentry/snuba/test_tasks.py b/tests/sentry/snuba/test_tasks.py
index b8237cdf165c51..f46679ab585e44 100644
--- a/tests/sentry/snuba/test_tasks.py
+++ b/tests/sentry/snuba/test_tasks.py
@@ -13,6 +13,7 @@
from sentry.search.events.constants import METRICS_MAP
from sentry.sentry_metrics import indexer
from sentry.sentry_metrics.configuration import UseCaseKey
+from sentry.sentry_metrics.use_case_id_registry import REVERSE_METRIC_PATH_MAPPING
from sentry.sentry_metrics.utils import resolve, resolve_tag_key, resolve_tag_value
from sentry.snuba.dataset import Dataset
from sentry.snuba.entity_subscription import (
@@ -39,7 +40,11 @@
def indexer_record(use_case_id: UseCaseKey, org_id: int, string: str) -> int:
- return indexer.record(use_case_id=use_case_id, org_id=org_id, string=string)
+ return indexer.record(
+ use_case_id=REVERSE_METRIC_PATH_MAPPING[use_case_id],
+ org_id=org_id,
+ string=string,
+ )
perf_indexer_record = partial(indexer_record, UseCaseKey.PERFORMANCE)
|
f5967e9483862d9b4c8997339e06270c51a3ce13
|
2020-04-23 00:52:26
|
Alberto Leal
|
fix(issues): Ignore modal for custom num of occurrences opens wrong modal (#18412)
| false
|
Ignore modal for custom num of occurrences opens wrong modal (#18412)
|
fix
|
diff --git a/src/sentry/static/sentry/app/components/actions/ignore.tsx b/src/sentry/static/sentry/app/components/actions/ignore.tsx
index de4410625eb5bd..757657991b3b03 100644
--- a/src/sentry/static/sentry/app/components/actions/ignore.tsx
+++ b/src/sentry/static/sentry/app/components/actions/ignore.tsx
@@ -229,7 +229,7 @@ export default class IgnoreActions extends React.Component<Props, State> {
))}
<MenuItem divider />
<MenuItem noAnchor>
- <a onClick={() => this.setState({modal: ModalStates.USERS})}>
+ <a onClick={() => this.setState({modal: ModalStates.COUNT})}>
{t('Custom')}
</a>
</MenuItem>
|
5b7d9d4345cb57e26a431d57b2d8b0611fb17ba3
|
2023-05-31 01:09:09
|
Dominik Buszowiecki
|
ref(starfish): update folder structure and fix date filters in span summary (#49982)
| false
|
update folder structure and fix date filters in span summary (#49982)
|
ref
|
diff --git a/static/app/views/starfish/queries/useSpanMetrics.tsx b/static/app/views/starfish/queries/useSpanMetrics.tsx
index 731a3183657335..d97ad9955f03f0 100644
--- a/static/app/views/starfish/queries/useSpanMetrics.tsx
+++ b/static/app/views/starfish/queries/useSpanMetrics.tsx
@@ -45,7 +45,7 @@ export const useSpanMetrics = (
: '';
const {isLoading, error, data} = useQuery<Metrics[]>({
- queryKey: ['span-metrics', span?.group_id],
+ queryKey: ['span-metrics', span?.group_id, dateFilters],
queryFn: () =>
fetch(`${HOST}/?query=${query}&referrer=${referrer}`).then(res => res.json()),
retry: false,
diff --git a/static/app/views/starfish/queries/useSpanTransactionMetrics.tsx b/static/app/views/starfish/queries/useSpanTransactionMetrics.tsx
index ea5cff3880c831..12f2d8ffe6612b 100644
--- a/static/app/views/starfish/queries/useSpanTransactionMetrics.tsx
+++ b/static/app/views/starfish/queries/useSpanTransactionMetrics.tsx
@@ -46,6 +46,7 @@ export const useSpanTransactionMetrics = (
'span-transactions-metrics',
span?.group_id,
transactions?.join(',') || '',
+ dateFilters,
],
queryFn: () =>
fetch(`${HOST}/?query=${query}&referrer=${referrer}`).then(res => res.json()),
diff --git a/static/app/views/starfish/views/spanSummaryPage/queries.tsx b/static/app/views/starfish/views/spanSummaryPage/queries.tsx
index eb710fb449c043..42384ea12cdd83 100644
--- a/static/app/views/starfish/views/spanSummaryPage/queries.tsx
+++ b/static/app/views/starfish/views/spanSummaryPage/queries.tsx
@@ -153,7 +153,7 @@ export const useQuerySpansInTransaction = (options: {
`;
return useQuery({
- queryKey: ['spansInTransaction', groupId],
+ queryKey: ['spansInTransaction', groupId, dateFilters],
queryFn: () => fetch(`${HOST}/?query=${query}&format=sql`).then(res => res.json()),
retry: false,
initialData: [],
diff --git a/static/app/views/starfish/views/spanSummaryPage/durationChart/index.tsx b/static/app/views/starfish/views/spanSummaryPage/sampleList/durationChart/index.tsx
similarity index 99%
rename from static/app/views/starfish/views/spanSummaryPage/durationChart/index.tsx
rename to static/app/views/starfish/views/spanSummaryPage/sampleList/durationChart/index.tsx
index b284fafdfaa738..d46316f3b071ed 100644
--- a/static/app/views/starfish/views/spanSummaryPage/durationChart/index.tsx
+++ b/static/app/views/starfish/views/spanSummaryPage/sampleList/durationChart/index.tsx
@@ -72,7 +72,6 @@ function DurationChart({groupId, transactionName, spanDescription}: Props) {
chartColors={theme.charts.getColorPalette(4).slice(3, 6)}
stacked
isLineChart
- disableXAxis
definedAxisTicks={4}
/>
);
diff --git a/static/app/views/starfish/views/spanSummaryPage/sampleList.tsx b/static/app/views/starfish/views/spanSummaryPage/sampleList/index.tsx
similarity index 95%
rename from static/app/views/starfish/views/spanSummaryPage/sampleList.tsx
rename to static/app/views/starfish/views/spanSummaryPage/sampleList/index.tsx
index 5a60fc318aa632..3144a44644c21e 100644
--- a/static/app/views/starfish/views/spanSummaryPage/sampleList.tsx
+++ b/static/app/views/starfish/views/spanSummaryPage/sampleList/index.tsx
@@ -3,8 +3,8 @@ import omit from 'lodash/omit';
import {t} from 'sentry/locale';
import useRouter from 'sentry/utils/useRouter';
import DetailPanel from 'sentry/views/starfish/components/detailPanel';
-import DurationChart from 'sentry/views/starfish/views/spanSummaryPage/durationChart';
-import SampleInfo from 'sentry/views/starfish/views/spanSummaryPage/sampleInfo';
+import DurationChart from 'sentry/views/starfish/views/spanSummaryPage/sampleList/durationChart';
+import SampleInfo from 'sentry/views/starfish/views/spanSummaryPage/sampleList/sampleInfo';
import SampleTable from 'sentry/views/starfish/views/spanSummaryPage/sampleList/sampleTable';
type Props = {
diff --git a/static/app/views/starfish/views/spanSummaryPage/sampleInfo/index.tsx b/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleInfo/index.tsx
similarity index 100%
rename from static/app/views/starfish/views/spanSummaryPage/sampleInfo/index.tsx
rename to static/app/views/starfish/views/spanSummaryPage/sampleList/sampleInfo/index.tsx
diff --git a/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleTable.tsx b/static/app/views/starfish/views/spanSummaryPage/sampleList/sampleTable/index.tsx
similarity index 100%
rename from static/app/views/starfish/views/spanSummaryPage/sampleList/sampleTable.tsx
rename to static/app/views/starfish/views/spanSummaryPage/sampleList/sampleTable/index.tsx
|
1911a68a37dc4800fc78d1167e7cccaa7e06c0e4
|
2023-09-27 12:32:18
|
Matej Minar
|
feat(ddm): Links to release and transaction + summary and focus polishes (#57002)
| false
|
Links to release and transaction + summary and focus polishes (#57002)
|
feat
|
diff --git a/static/app/views/ddm/metricsExplorer.tsx b/static/app/views/ddm/metricsExplorer.tsx
index 2049e0ee700afd..9b16e72b08bcc4 100644
--- a/static/app/views/ddm/metricsExplorer.tsx
+++ b/static/app/views/ddm/metricsExplorer.tsx
@@ -1,6 +1,8 @@
import {Fragment, useCallback, useEffect, useMemo, useState} from 'react';
import {Theme} from '@emotion/react';
import styled from '@emotion/styled';
+import colorFn from 'color';
+import type {LineSeriesOption} from 'echarts';
import moment from 'moment';
import Alert from 'sentry/components/alert';
@@ -291,26 +293,18 @@ function MetricsExplorerDisplayOuter(props?: DisplayProps) {
function MetricsExplorerDisplay({displayType, ...metricsDataProps}: DisplayProps) {
const router = useRouter();
const {data, isLoading, isError} = useMetricsData(metricsDataProps);
- const hiddenSeries = decodeList(router.location.query.hiddenSeries);
+ const focusedSeries = router.location.query.focusedSeries;
+ const [hoveredLegend, setHoveredLegend] = useState('');
const toggleSeriesVisibility = (seriesName: string) => {
- if (hiddenSeries.includes(seriesName)) {
- router.push({
- ...router.location,
- query: {
- ...router.location.query,
- hiddenSeries: hiddenSeries.filter(s => s !== seriesName),
- },
- });
- } else {
- router.push({
- ...router.location,
- query: {
- ...router.location.query,
- hiddenSeries: [...hiddenSeries, seriesName],
- },
- });
- }
+ setHoveredLegend('');
+ router.push({
+ ...router.location,
+ query: {
+ ...router.location.query,
+ focusedSeries: focusedSeries === seriesName ? undefined : seriesName,
+ },
+ });
};
if (!data) {
@@ -330,6 +324,8 @@ function MetricsExplorerDisplay({displayType, ...metricsDataProps}: DisplayProps
return {
values: Object.values(g.series)[0],
name: getSeriesName(g, data.groups.length === 1, metricsDataProps.groupBy),
+ transaction: g.by.transaction,
+ release: g.by.release,
};
});
@@ -338,12 +334,19 @@ function MetricsExplorerDisplay({displayType, ...metricsDataProps}: DisplayProps
const chartSeries = series.map((item, i) => ({
seriesName: item.name,
unit,
- color: colors[i],
- hidden: hiddenSeries.includes(item.name),
+ color: colorFn(colors[i])
+ .alpha(hoveredLegend && hoveredLegend !== item.name ? 0.1 : 1)
+ .string(),
+ hidden: focusedSeries && focusedSeries !== item.name,
data: item.values.map((value, index) => ({
name: sorted.intervals[index],
value,
})),
+ transaction: item.transaction as string | undefined,
+ release: item.release as string | undefined,
+ emphasis: {
+ focus: 'series',
+ } as LineSeriesOption['emphasis'],
}));
return (
@@ -360,6 +363,7 @@ function MetricsExplorerDisplay({displayType, ...metricsDataProps}: DisplayProps
series={chartSeries}
operation={metricsDataProps.op}
onClick={toggleSeriesVisibility}
+ setHoveredLegend={focusedSeries ? undefined : setHoveredLegend}
/>
</DisplayWrapper>
);
@@ -435,6 +439,8 @@ export type Series = {
seriesName: string;
unit: string;
hidden?: boolean;
+ release?: string;
+ transaction?: string;
};
type ChartProps = {
diff --git a/static/app/views/ddm/summaryTable.tsx b/static/app/views/ddm/summaryTable.tsx
index fa6c8a4642bffc..f325e5c5c0171b 100644
--- a/static/app/views/ddm/summaryTable.tsx
+++ b/static/app/views/ddm/summaryTable.tsx
@@ -1,47 +1,127 @@
import {Fragment} from 'react';
import styled from '@emotion/styled';
+import colorFn from 'color';
+import {LinkButton} from 'sentry/components/button';
+import ButtonBar from 'sentry/components/buttonBar';
+import {Tooltip} from 'sentry/components/tooltip';
+import {IconLightning, IconReleases} from 'sentry/icons';
import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
+import {getUtcDateString} from 'sentry/utils/dates';
import {formatMetricsUsingUnitAndOp, getNameFromMRI} from 'sentry/utils/metrics';
+import useOrganization from 'sentry/utils/useOrganization';
+import usePageFilters from 'sentry/utils/usePageFilters';
+import useRouter from 'sentry/utils/useRouter';
import {Series} from 'sentry/views/ddm/metricsExplorer';
+import {transactionSummaryRouteWithQuery} from 'sentry/views/performance/transactionSummary/utils';
export function SummaryTable({
series,
operation,
onClick,
+ setHoveredLegend,
}: {
onClick: (seriesName: string) => void;
series: Series[];
+ setHoveredLegend: React.Dispatch<React.SetStateAction<string>> | undefined;
operation?: string;
}) {
+ const {selection} = usePageFilters();
+ const router = useRouter();
+ const {slug} = useOrganization();
+ const hasActions = series.some(s => s.release || s.transaction);
+ const {start, end, statsPeriod, project, environment} = router.location.query;
+
return (
- <SummaryTableWrapper>
+ <SummaryTableWrapper hasActions={hasActions}>
<HeaderCell />
<HeaderCell>{t('Name')}</HeaderCell>
- <HeaderCell>{t('Avg')}</HeaderCell>
- <HeaderCell>{t('Min')}</HeaderCell>
- <HeaderCell>{t('Max')}</HeaderCell>
- <HeaderCell>{t('Sum')}</HeaderCell>
+ <HeaderCell right>{t('Avg')}</HeaderCell>
+ <HeaderCell right>{t('Min')}</HeaderCell>
+ <HeaderCell right>{t('Max')}</HeaderCell>
+ <HeaderCell right>{t('Sum')}</HeaderCell>
+ {hasActions && <HeaderCell right>{t('Actions')}</HeaderCell>}
{series
.sort((a, b) => a.seriesName.localeCompare(b.seriesName))
- .map(({seriesName, color, hidden, unit, data}) => {
+ .map(({seriesName, color, hidden, unit, data, transaction, release}) => {
const {avg, min, max, sum} = getValues(data);
return (
<Fragment key={seriesName}>
- <FlexCell onClick={() => onClick(seriesName)} hidden={hidden}>
- <ColorDot color={color} />
- </FlexCell>
- <Cell onClick={() => onClick(seriesName)}>
- {getNameFromMRI(seriesName)}
- </Cell>
- {/* TODO(ddm): Add a tooltip with the full value, don't add on click in case users want to copy the value */}
- <Cell>{formatMetricsUsingUnitAndOp(avg, unit, operation)}</Cell>
- <Cell>{formatMetricsUsingUnitAndOp(min, unit, operation)}</Cell>
- <Cell>{formatMetricsUsingUnitAndOp(max, unit, operation)}</Cell>
- <Cell>{formatMetricsUsingUnitAndOp(sum, unit, operation)}</Cell>
+ <CellWrapper
+ onClick={() => onClick(seriesName)}
+ onMouseEnter={() => setHoveredLegend?.(seriesName)}
+ onMouseLeave={() => setHoveredLegend?.('')}
+ >
+ <Cell>
+ <ColorDot color={color} hiddenn={!!hidden} />
+ </Cell>
+ <Cell>{getNameFromMRI(seriesName)}</Cell>
+ {/* TODO(ddm): Add a tooltip with the full value, don't add on click in case users want to copy the value */}
+ <Cell right>{formatMetricsUsingUnitAndOp(avg, unit, operation)}</Cell>
+ <Cell right>{formatMetricsUsingUnitAndOp(min, unit, operation)}</Cell>
+ <Cell right>{formatMetricsUsingUnitAndOp(max, unit, operation)}</Cell>
+ <Cell right>{formatMetricsUsingUnitAndOp(sum, unit, operation)}</Cell>
+ </CellWrapper>
+ {hasActions && (
+ <Cell right>
+ <ButtonBar gap={0.5}>
+ {transaction && (
+ <div>
+ <Tooltip title={t('Open Transaction Summary')}>
+ <LinkButton
+ to={transactionSummaryRouteWithQuery({
+ orgSlug: slug,
+ transaction,
+ projectID: selection.projects.map(p => String(p)),
+ query: {
+ query: '',
+ environment: selection.environments,
+ start: selection.datetime.start
+ ? getUtcDateString(selection.datetime.start)
+ : undefined,
+ end: selection.datetime.end
+ ? getUtcDateString(selection.datetime.end)
+ : undefined,
+ statsPeriod: selection.datetime.period,
+ },
+ })}
+ size="xs"
+ >
+ <IconLightning size="xs" />
+ </LinkButton>
+ </Tooltip>
+ </div>
+ )}
+
+ {release && (
+ <div>
+ <Tooltip title={t('Open Release Details')}>
+ <LinkButton
+ to={{
+ pathname: `/organizations/${slug}/releases/${encodeURIComponent(
+ release
+ )}/`,
+ query: {
+ start,
+ end,
+ pageStatsPeriod: statsPeriod,
+ project,
+ environment,
+ },
+ }}
+ size="xs"
+ >
+ <IconReleases size="xs" />
+ </LinkButton>
+ </Tooltip>
+ </div>
+ )}
+ </ButtonBar>
+ </Cell>
+ )}
</Fragment>
);
})}
@@ -74,13 +154,14 @@ function getValues(seriesData: Series['data']) {
// TODO(ddm): PanelTable component proved to be a bit too opinionated for this use case,
// so we're using a custom styled component instead. Figure out what we want to do here
-const SummaryTableWrapper = styled(`div`)`
+const SummaryTableWrapper = styled(`div`)<{hasActions: boolean}>`
display: grid;
- grid-template-columns: 0.5fr 8fr 1fr 1fr 1fr 1fr;
+ grid-template-columns: ${p =>
+ p.hasActions ? '24px 8fr 1fr 1fr 1fr 1fr 1fr' : '24px 8fr 1fr 1fr 1fr 1fr'};
`;
// TODO(ddm): This is a copy of PanelTableHeader, try to figure out how to reuse it
-const HeaderCell = styled('div')`
+const HeaderCell = styled('div')<{right?: boolean}>`
color: ${p => p.theme.subText};
font-size: ${p => p.theme.fontSizeSmall};
font-weight: 600;
@@ -90,28 +171,33 @@ const HeaderCell = styled('div')`
display: flex;
flex-direction: column;
justify-content: center;
-
- padding: ${space(0.5)};
-`;
-
-const Cell = styled('div')`
- padding: ${space(0.25)};
-
- :hover {
- cursor: ${p => (p.onClick ? 'pointer' : 'default')};
- }
+ text-align: ${p => (p.right ? 'right' : 'left')};
+ padding: ${space(0.5)} ${space(1)};
`;
-const FlexCell = styled(Cell)`
+const Cell = styled('div')<{right?: boolean}>`
display: flex;
- justify-content: center;
+ padding: ${space(0.25)} ${space(1)};
align-items: center;
- opacity: ${p => (p.hidden ? 0.5 : 1)};
+ justify-content: ${p => (p.right ? 'flex-end' : 'flex-start')};
`;
-const ColorDot = styled(`div`)`
- background-color: ${p => p.color};
+const ColorDot = styled(`div`)<{color: string; hiddenn: boolean}>`
+ background-color: ${p =>
+ colorFn(p.color)
+ .alpha(p.hiddenn ? 0.3 : 1)
+ .string()};
border-radius: 50%;
width: ${space(1)};
height: ${space(1)};
`;
+
+const CellWrapper = styled('div')`
+ display: contents;
+ &:hover {
+ cursor: pointer;
+ ${Cell} {
+ background-color: ${p => p.theme.bodyBackground};
+ }
+ }
+`;
|
65ca3ed3a71fdf22e2574abb9e55b9c05f314ad0
|
2023-04-27 05:00:11
|
Evan Purkhiser
|
style(js): Better comments for queryClient functions (#48036)
| false
|
Better comments for queryClient functions (#48036)
|
style
|
diff --git a/static/app/utils/queryClient.tsx b/static/app/utils/queryClient.tsx
index 4d07454c1e4dc0..73dc3b6da0642a 100644
--- a/static/app/utils/queryClient.tsx
+++ b/static/app/utils/queryClient.tsx
@@ -118,6 +118,11 @@ function useApiQuery<TResponseData, TError = RequestError>(
};
}
+/**
+ * Wraps React Query's queryClient.getQueryData to return only the cached API
+ * response data. This does not include the ApiResult type. For that you can
+ * manually call queryClient.getQueryData.
+ */
export function getApiQueryData<TResponseData>(
queryClient: reactQuery.QueryClient,
queryKey: ApiQueryKey
@@ -125,6 +130,10 @@ export function getApiQueryData<TResponseData>(
return queryClient.getQueryData<ApiResult<TResponseData>>(queryKey)?.[0];
}
+/**
+ * Wraps React Query's queryClient.setQueryData to allow setting of API
+ * response data without needing to provide a request object.
+ */
function setApiQueryData<TResponseData>(
queryClient: reactQuery.QueryClient,
queryKey: ApiQueryKey,
|
20d815fb08b26ee41bbac785bdccddfed2da7ef1
|
2022-01-04 22:17:32
|
Stephen Cefali
|
fix(aws-lambda): aws lambda cant finish installation (#30826)
| false
|
aws lambda cant finish installation (#30826)
|
fix
|
diff --git a/src/sentry/integrations/aws_lambda/integration.py b/src/sentry/integrations/aws_lambda/integration.py
index 5ed66fe51cf711..8e60f4cd305d47 100644
--- a/src/sentry/integrations/aws_lambda/integration.py
+++ b/src/sentry/integrations/aws_lambda/integration.py
@@ -18,7 +18,6 @@
from sentry.integrations.serverless import ServerlessMixin
from sentry.models import OrganizationIntegration, Project, ProjectStatus
from sentry.pipeline import PipelineView
-from sentry.utils import json
from sentry.utils.compat import map
from sentry.utils.sdk import capture_exception
@@ -316,9 +315,11 @@ def render_response(error=None):
class AwsLambdaListFunctionsPipelineView(PipelineView):
def dispatch(self, request: Request, pipeline) -> Response:
if request.method == "POST":
- # accept form data or json data
- # form data is needed for tests
- data = request.POST or json.loads(request.body)
+ raw_data = request.POST
+ data = {}
+ for key, val in raw_data.items():
+ # form posts have string values for booleans and this form only sends booleans
+ data[key] = val == "true"
pipeline.bind_state("enabled_lambdas", data)
return pipeline.next_step()
diff --git a/static/app/views/integrationPipeline/awsLambdaFunctionSelect.tsx b/static/app/views/integrationPipeline/awsLambdaFunctionSelect.tsx
index 99becf6c259477..d50fbcab60137c 100644
--- a/static/app/views/integrationPipeline/awsLambdaFunctionSelect.tsx
+++ b/static/app/views/integrationPipeline/awsLambdaFunctionSelect.tsx
@@ -39,12 +39,11 @@ export default class AwsLambdaFunctionSelect extends Component<Props, State> {
super(props);
makeObservable(this, {allStatesToggled: computed});
}
-
state: State = {
submitting: false,
};
- model = new FormModel({apiOptions: {baseUrl: window.location.origin}});
+ model = new FormModel();
get initialData() {
const {lambdaFunctions} = this.props;
@@ -72,8 +71,12 @@ export default class AwsLambdaFunctionSelect extends Component<Props, State> {
return Object.values(this.model.getData()).every(val => val);
}
+ get formFields() {
+ const data = this.model.getTransformedData();
+ return Object.entries(data).map(([name, value]) => ({name, value}));
+ }
+
handleSubmit = () => {
- this.model.saveForm();
this.setState({submitting: true});
};
@@ -155,10 +158,10 @@ export default class AwsLambdaFunctionSelect extends Component<Props, State> {
{t('Decide which functions you would like to enable for Sentry monitoring')}
<StyledForm
initialData={this.initialData}
- skipPreventDefault
model={this.model}
apiEndpoint="/extensions/aws_lambda/setup/"
hideFooter
+ preventFormResetOnUnmount
>
<JsonForm renderHeader={() => FormHeader} forms={[formFields]} />
</StyledForm>
@@ -178,9 +181,16 @@ export default class AwsLambdaFunctionSelect extends Component<Props, State> {
<Observer>
{() => (
<FooterWithButtons
+ formProps={{
+ action: '/extensions/aws_lambda/setup/',
+ method: 'post',
+ onSubmit: this.handleSubmit,
+ }}
+ formFields={this.formFields}
buttonText={t('Finish Setup')}
- onClick={this.handleSubmit}
- disabled={this.model.isError || this.model.isSaving}
+ disabled={
+ this.model.isError || this.model.isSaving || this.state.submitting
+ }
/>
)}
</Observer>
diff --git a/static/app/views/integrationPipeline/components/footerWithButtons.tsx b/static/app/views/integrationPipeline/components/footerWithButtons.tsx
index 19bdf56f1b380b..22a91cf7afea05 100644
--- a/static/app/views/integrationPipeline/components/footerWithButtons.tsx
+++ b/static/app/views/integrationPipeline/components/footerWithButtons.tsx
@@ -4,13 +4,28 @@ import styled from '@emotion/styled';
import Button from 'sentry/components/actions/button';
import space from 'sentry/styles/space';
-type Props = {buttonText: string} & Partial<
- Pick<React.ComponentProps<typeof Button>, 'disabled' | 'onClick' | 'href'>
->;
+type Props = {
+ buttonText: string;
+ formProps?: Omit<React.HTMLProps<HTMLFormElement>, 'as'>;
+ formFields?: Array<{name: string; value: any}>;
+} & Partial<Pick<React.ComponentProps<typeof Button>, 'disabled' | 'onClick' | 'href'>>;
-export default function FooterWithButtons({buttonText, ...rest}: Props) {
+export default function FooterWithButtons({
+ buttonText,
+ formFields,
+ formProps,
+ ...rest
+}: Props) {
+ /**
+ * We use a form post here to replicate what we do with standard HTML views for the integration pipeline.
+ * Since this is a form post, we need to pass a hidden replica of the form inputs
+ * so we can submit this form instead of the one collecting the user inputs.
+ */
return (
- <Footer>
+ <Footer data-test-id="aws-lambda-footer-form" {...formProps}>
+ {formFields?.map(field => {
+ return <input type="hidden" key={field.name} {...field} />;
+ })}
<Button priority="primary" type="submit" size="xsmall" {...rest}>
{buttonText}
</Button>
diff --git a/static/app/views/settings/components/forms/form.tsx b/static/app/views/settings/components/forms/form.tsx
index 9330abf56b8734..4caf591a781dba 100644
--- a/static/app/views/settings/components/forms/form.tsx
+++ b/static/app/views/settings/components/forms/form.tsx
@@ -82,6 +82,10 @@ type Props = {
*/
onSubmit?: OnSubmitCallback;
onPreSubmit?: () => void;
+ /**
+ * Ensure the form model isn't reset when the form unmounts
+ */
+ preventFormResetOnUnmount?: boolean;
} & Pick<FormOptions, 'onSubmitSuccess' | 'onSubmitError' | 'onFieldChange'>;
export default class Form extends React.Component<Props> {
@@ -113,7 +117,7 @@ export default class Form extends React.Component<Props> {
}
componentWillUnmount() {
- this.model.reset();
+ !this.props.preventFormResetOnUnmount && this.model.reset();
}
model: FormModel = this.props.model || new FormModel();
diff --git a/static/app/views/settings/components/forms/jsonForm.tsx b/static/app/views/settings/components/forms/jsonForm.tsx
index 78f0d64fa33bd1..61b854ff74b8b6 100644
--- a/static/app/views/settings/components/forms/jsonForm.tsx
+++ b/static/app/views/settings/components/forms/jsonForm.tsx
@@ -44,7 +44,7 @@ class JsonForm extends React.Component<Props, State> {
}
UNSAFE_componentWillReceiveProps(nextProps: Props) {
- if (this.props.location.hash !== nextProps.location.hash) {
+ if (nextProps.location && this.props.location.hash !== nextProps.location.hash) {
const hash = nextProps.location.hash;
this.scrollToHash(hash);
this.setState({highlighted: hash});
diff --git a/tests/js/spec/views/integrationPipeline/awsLambdaFunctionSelect.spec.jsx b/tests/js/spec/views/integrationPipeline/awsLambdaFunctionSelect.spec.jsx
index 89d8c4e8548ede..44532a703ac2d9 100644
--- a/tests/js/spec/views/integrationPipeline/awsLambdaFunctionSelect.spec.jsx
+++ b/tests/js/spec/views/integrationPipeline/awsLambdaFunctionSelect.spec.jsx
@@ -1,36 +1,23 @@
-import {mountWithTheme} from 'sentry-test/enzyme';
+import {mountWithTheme, screen} from 'sentry-test/reactTestingLibrary';
-import {Client} from 'sentry/api';
import AwsLambdaFunctionSelect from 'sentry/views/integrationPipeline/awsLambdaFunctionSelect';
describe('AwsLambdaFunctionSelect', () => {
- let wrapper;
- let lambdaFunctions;
- let mockRequest;
+ let lambdaFunctions, container;
beforeEach(() => {
- mockRequest = Client.addMockResponse({
- url: '/extensions/aws_lambda/setup/',
- body: {},
- });
-
lambdaFunctions = [
{FunctionName: 'lambdaA', Runtime: 'nodejs12.x'},
{FunctionName: 'lambdaB', Runtime: 'nodejs10.x'},
{FunctionName: 'lambdaC', Runtime: 'nodejs10.x'},
];
- wrapper = mountWithTheme(
+ ({container} = mountWithTheme(
<AwsLambdaFunctionSelect lambdaFunctions={lambdaFunctions} />
- );
+ ));
});
it('choose lambdas', () => {
- wrapper.find('button[name="lambdaB"]').simulate('click');
- wrapper.find('StyledButton[aria-label="Finish Setup"]').simulate('click');
-
- expect(mockRequest).toHaveBeenCalledWith(
- '/extensions/aws_lambda/setup/',
- expect.objectContaining({
- data: {lambdaA: true, lambdaB: false, lambdaC: true},
- })
- );
+ expect(container).toSnapshot();
+ expect(screen.getByLabelText('lambdaB')).toBeInTheDocument();
+ expect(screen.getByLabelText('Finish Setup')).toBeInTheDocument();
+ // TODO: add assertion for form post
});
});
diff --git a/tests/sentry/integrations/aws_lambda/test_integration.py b/tests/sentry/integrations/aws_lambda/test_integration.py
index 33186bc240f3ea..86dbbcd535a062 100644
--- a/tests/sentry/integrations/aws_lambda/test_integration.py
+++ b/tests/sentry/integrations/aws_lambda/test_integration.py
@@ -180,7 +180,7 @@ def test_lambda_setup_layer_success(self, mock_gen_aws_client, mock_get_supporte
# string instead of boolean
resp = self.client.post(
self.setup_path,
- data={"lambdaB": True},
+ data={"lambdaB": "true", "lambdaA": "false"},
format="json",
HTTP_ACCEPT="application/json",
headers={"Content-Type": "application/json", "Accept": "application/json"},
@@ -244,7 +244,7 @@ def test_python_lambda_setup_layer_success(
resp = self.client.post(
self.setup_path,
- data={"lambdaA": True},
+ data={"lambdaA": "true"},
format="json",
HTTP_ACCEPT="application/json",
headers={"Content-Type": "application/json", "Accept": "application/json"},
@@ -320,7 +320,7 @@ class MockException(Exception):
resp = self.client.post(
self.setup_path,
- {"lambdaB": True},
+ {"lambdaB": "true"},
format="json",
HTTP_ACCEPT="application/json",
headers={"Content-Type": "application/json", "Accept": "application/json"},
@@ -378,7 +378,7 @@ class MockException(Exception):
resp = self.client.post(
self.setup_path,
- {"lambdaB": True},
+ {"lambdaB": "true"},
format="json",
HTTP_ACCEPT="application/json",
headers={"Content-Type": "application/json", "Accept": "application/json"},
@@ -437,7 +437,7 @@ class MockException(Exception):
resp = self.client.post(
self.setup_path,
- {"lambdaB": True},
+ {"lambdaB": "true"},
format="json",
HTTP_ACCEPT="application/json",
headers={"Content-Type": "application/json", "Accept": "application/json"},
@@ -501,7 +501,7 @@ class MockException(Exception):
resp = self.client.post(
self.setup_path,
- {"lambdaB": True},
+ {"lambdaB": "true"},
format="json",
HTTP_ACCEPT="application/json",
headers={"Content-Type": "application/json", "Accept": "application/json"},
|
f18a2cd23cae3a8de290ba0376f01e652ca0f8c3
|
2023-10-16 23:46:07
|
Abdkhan14
|
fix(performance-quick-trace-query): Using Sentry.captureException to inspect trace endpoint response. (#58167)
| false
|
Using Sentry.captureException to inspect trace endpoint response. (#58167)
|
fix
|
diff --git a/static/app/utils/performance/quickTrace/quickTraceQuery.tsx b/static/app/utils/performance/quickTrace/quickTraceQuery.tsx
index 2a731ab8a8b08e..30490dd05d68b3 100644
--- a/static/app/utils/performance/quickTrace/quickTraceQuery.tsx
+++ b/static/app/utils/performance/quickTrace/quickTraceQuery.tsx
@@ -1,4 +1,5 @@
import {Fragment} from 'react';
+import * as Sentry from '@sentry/react';
import {Event} from 'sentry/types/event';
import {DiscoverQueryProps} from 'sentry/utils/discover/genericDiscoverQuery';
@@ -73,6 +74,10 @@ export default function QuickTraceQuery({children, event, ...props}: QueryProps)
organization
);
+ const scope = new Sentry.Scope();
+ const traceErrorMsg = 'Trace endpoints returning non-array in response';
+ scope.setFingerprint([traceErrorMsg]);
+
if (
!traceFullResults.isLoading &&
traceFullResults.error === null &&
@@ -88,19 +93,31 @@ export default function QuickTraceQuery({children, event, ...props}: QueryProps)
});
}
- for (const subtrace of transactions ??
- (traceFullResults.traces as TraceFull[])) {
- try {
- const trace = flattenRelevantPaths(event, subtrace);
- return children({
- ...traceFullResults,
- trace,
- currentEvent: trace.find(e => isCurrentEvent(e, event)) ?? null,
- });
- } catch {
- // let this fall through and check the next subtrace
- // or use the trace lite results
+ const traceTransactions =
+ transactions ?? (traceFullResults.traces as TraceFull[]);
+
+ try {
+ for (const subtrace of traceTransactions) {
+ try {
+ const trace = flattenRelevantPaths(event, subtrace);
+ return children({
+ ...traceFullResults,
+ trace,
+ currentEvent: trace.find(e => isCurrentEvent(e, event)) ?? null,
+ });
+ } catch {
+ // let this fall through and check the next subtrace
+ // or use the trace lite results
+ }
}
+ } catch {
+ // capture exception and let this fall through to
+ // use the /events-trace-lite/ response below
+ scope.setExtras({
+ traceTransactions,
+ traceFullResults,
+ });
+ Sentry.captureException(new Error(traceErrorMsg), scope);
}
}
@@ -118,14 +135,25 @@ export default function QuickTraceQuery({children, event, ...props}: QueryProps)
? orphanErrorsLite[0]
: undefined;
const traceTransactions = transactionLite ?? (trace as TraceLite);
+
+ let traceTransaction: EventLite | undefined;
+ try {
+ traceTransaction = traceTransactions.find(e => isCurrentEvent(e, event));
+ } catch {
+ scope.setExtras({
+ traceTransaction,
+ orphanError,
+ traceTransactions,
+ trace,
+ });
+ Sentry.captureException(new Error(traceErrorMsg), scope);
+ }
+
return children({
...traceLiteResults,
- trace: traceTransactions,
+ trace: Array.isArray(traceTransactions) ? traceTransactions : [],
orphanErrors: orphanErrorsLite,
- currentEvent:
- orphanError ??
- traceTransactions.find(e => isCurrentEvent(e, event)) ??
- null,
+ currentEvent: orphanError ?? traceTransaction ?? null,
});
}
|
37e0daf28a29de3addbff1a1e6fe5616ba893f0b
|
2019-09-10 04:37:42
|
Nola Chen
|
fix(app-platform): Scroll to error field in Sentry App form (#14623)
| false
|
Scroll to error field in Sentry App form (#14623)
|
fix
|
diff --git a/src/sentry/static/sentry/app/views/settings/organizationDeveloperSettings/sentryApplicationDetails.tsx b/src/sentry/static/sentry/app/views/settings/organizationDeveloperSettings/sentryApplicationDetails.tsx
index ba95531756ba75..c9121dc7836e8c 100644
--- a/src/sentry/static/sentry/app/views/settings/organizationDeveloperSettings/sentryApplicationDetails.tsx
+++ b/src/sentry/static/sentry/app/views/settings/organizationDeveloperSettings/sentryApplicationDetails.tsx
@@ -2,6 +2,7 @@ import React from 'react';
import {browserHistory} from 'react-router';
import {Observer} from 'mobx-react';
import _ from 'lodash';
+import scrollToElement from 'scroll-to-element';
import {addSuccessMessage, addErrorMessage} from 'app/actionCreators/indicator';
import {Panel, PanelItem, PanelBody, PanelHeader} from 'app/components/panels';
@@ -121,6 +122,16 @@ export default class SentryApplicationDetails extends AsyncView<Props, State> {
errorMessage = _.get(err, 'responseJSON.detail', errorMessage);
}
addErrorMessage(t(errorMessage));
+
+ if (this.form.formErrors) {
+ const firstErrorFieldId = Object.keys(this.form.formErrors)[0];
+
+ if (firstErrorFieldId) {
+ scrollToElement(`#${firstErrorFieldId}`, {
+ align: 'middle',
+ });
+ }
+ }
};
get isInternal() {
|
2bee745db5e4579465c0e2e0c6c0a5917592dd72
|
2022-09-16 00:04:15
|
Stephen Cefali
|
ref(performance-issues): move performance issue generation logic to testutils (#38899)
| false
|
move performance issue generation logic to testutils (#38899)
|
ref
|
diff --git a/src/sentry/testutils/perfomance_issues/__init__.py b/src/sentry/testutils/perfomance_issues/__init__.py
new file mode 100644
index 00000000000000..e69de29bb2d1d6
diff --git a/src/sentry/testutils/perfomance_issues/event_generators.py b/src/sentry/testutils/perfomance_issues/event_generators.py
new file mode 100644
index 00000000000000..1b6540d973f8ba
--- /dev/null
+++ b/src/sentry/testutils/perfomance_issues/event_generators.py
@@ -0,0 +1,46 @@
+import os
+
+from sentry.utils import json
+from tests.sentry.spans.grouping.test_strategy import SpanBuilder
+
+_fixture_path = os.path.join(os.path.dirname(__file__), "events")
+
+EVENTS = {}
+PROJECT_ID = 1
+
+for filename in os.listdir(_fixture_path):
+ if not filename.endswith(".json"):
+ continue
+
+ [event_name, _extension] = filename.split(".")
+
+ with open(os.path.join(_fixture_path, filename)) as f:
+ event = json.load(f)
+ event["project"] = PROJECT_ID
+ EVENTS[event_name] = event
+
+
+# Duration is in ms
+def modify_span_duration(obj, duration):
+ obj["start_timestamp"] = 0.0
+ obj["timestamp"] = duration / 1000.0
+ return obj
+
+
+# Start is in ms
+def modify_span_start(obj, start):
+ duration = obj["timestamp"] - obj["start_timestamp"]
+ obj["start_timestamp"] = start / 1000.0
+ obj["timestamp"] = obj["start_timestamp"] + duration
+ return obj
+
+
+def create_span(op, duration=100.0, desc="SELECT count() FROM table WHERE id = %s", hash=""):
+ return modify_span_duration(
+ SpanBuilder().with_op(op).with_description(desc).with_hash(hash).build(),
+ duration,
+ )
+
+
+def create_event(spans, event_id="a" * 16):
+ return {"event_id": event_id, "project": PROJECT_ID, "spans": spans}
diff --git a/tests/sentry/utils/performance_issues/events/fast-n-plus-one-in-django-new-view.json b/src/sentry/testutils/perfomance_issues/events/fast-n-plus-one-in-django-new-view.json
similarity index 100%
rename from tests/sentry/utils/performance_issues/events/fast-n-plus-one-in-django-new-view.json
rename to src/sentry/testutils/perfomance_issues/events/fast-n-plus-one-in-django-new-view.json
diff --git a/tests/sentry/utils/performance_issues/events/n-plus-one-in-django-index-view-activerecord.json b/src/sentry/testutils/perfomance_issues/events/n-plus-one-in-django-index-view-activerecord.json
similarity index 100%
rename from tests/sentry/utils/performance_issues/events/n-plus-one-in-django-index-view-activerecord.json
rename to src/sentry/testutils/perfomance_issues/events/n-plus-one-in-django-index-view-activerecord.json
diff --git a/tests/sentry/utils/performance_issues/events/n-plus-one-in-django-index-view.json b/src/sentry/testutils/perfomance_issues/events/n-plus-one-in-django-index-view.json
similarity index 100%
rename from tests/sentry/utils/performance_issues/events/n-plus-one-in-django-index-view.json
rename to src/sentry/testutils/perfomance_issues/events/n-plus-one-in-django-index-view.json
diff --git a/tests/sentry/utils/performance_issues/events/n-plus-one-in-django-new-view-truncated-source.json b/src/sentry/testutils/perfomance_issues/events/n-plus-one-in-django-new-view-truncated-source.json
similarity index 100%
rename from tests/sentry/utils/performance_issues/events/n-plus-one-in-django-new-view-truncated-source.json
rename to src/sentry/testutils/perfomance_issues/events/n-plus-one-in-django-new-view-truncated-source.json
diff --git a/tests/sentry/utils/performance_issues/events/n-plus-one-in-django-new-view.json b/src/sentry/testutils/perfomance_issues/events/n-plus-one-in-django-new-view.json
similarity index 100%
rename from tests/sentry/utils/performance_issues/events/n-plus-one-in-django-new-view.json
rename to src/sentry/testutils/perfomance_issues/events/n-plus-one-in-django-new-view.json
diff --git a/tests/sentry/utils/performance_issues/events/n-plus-one-in-django-with-odd-db-sources.json b/src/sentry/testutils/perfomance_issues/events/n-plus-one-in-django-with-odd-db-sources.json
similarity index 100%
rename from tests/sentry/utils/performance_issues/events/n-plus-one-in-django-with-odd-db-sources.json
rename to src/sentry/testutils/perfomance_issues/events/n-plus-one-in-django-with-odd-db-sources.json
diff --git a/tests/sentry/utils/performance_issues/events/n-plus-one-in-rails-index-view.json b/src/sentry/testutils/perfomance_issues/events/n-plus-one-in-rails-index-view.json
similarity index 100%
rename from tests/sentry/utils/performance_issues/events/n-plus-one-in-rails-index-view.json
rename to src/sentry/testutils/perfomance_issues/events/n-plus-one-in-rails-index-view.json
diff --git a/tests/sentry/utils/performance_issues/events/no-issue-in-django-detail-view.json b/src/sentry/testutils/perfomance_issues/events/no-issue-in-django-detail-view.json
similarity index 100%
rename from tests/sentry/utils/performance_issues/events/no-issue-in-django-detail-view.json
rename to src/sentry/testutils/perfomance_issues/events/no-issue-in-django-detail-view.json
diff --git a/tests/sentry/utils/performance_issues/events/query-waterfall-in-django-random-view.json b/src/sentry/testutils/perfomance_issues/events/query-waterfall-in-django-random-view.json
similarity index 100%
rename from tests/sentry/utils/performance_issues/events/query-waterfall-in-django-random-view.json
rename to src/sentry/testutils/perfomance_issues/events/query-waterfall-in-django-random-view.json
diff --git a/tests/sentry/utils/performance_issues/events/solved-n-plus-one-in-django-index-view.json b/src/sentry/testutils/perfomance_issues/events/solved-n-plus-one-in-django-index-view.json
similarity index 100%
rename from tests/sentry/utils/performance_issues/events/solved-n-plus-one-in-django-index-view.json
rename to src/sentry/testutils/perfomance_issues/events/solved-n-plus-one-in-django-index-view.json
diff --git a/tests/sentry/utils/performance_issues/test_performance_detection.py b/tests/sentry/utils/performance_issues/test_performance_detection.py
index 3f8e497486923a..1d3c653f6feea6 100644
--- a/tests/sentry/utils/performance_issues/test_performance_detection.py
+++ b/tests/sentry/utils/performance_issues/test_performance_detection.py
@@ -1,4 +1,3 @@
-import os
import unittest
from unittest.mock import Mock, call, patch
@@ -6,9 +5,15 @@
from sentry.eventstore.models import Event
from sentry.testutils import TestCase
from sentry.testutils.helpers import override_options
+from sentry.testutils.perfomance_issues.event_generators import (
+ EVENTS,
+ PROJECT_ID,
+ create_event,
+ create_span,
+ modify_span_start,
+)
from sentry.testutils.silo import region_silo_test
from sentry.types.issues import GroupType
-from sentry.utils import json
from sentry.utils.performance_issues.performance_detection import (
DETECTOR_TYPE_TO_GROUP_TYPE,
DetectorType,
@@ -19,49 +24,6 @@
prepare_problem_for_grouping,
)
from sentry.utils.performance_issues.performance_span_issue import PerformanceSpanProblem
-from tests.sentry.spans.grouping.test_strategy import SpanBuilder
-
-_fixture_path = os.path.join(os.path.dirname(__file__), "events")
-
-EVENTS = {}
-PROJECT_ID = 1
-
-for filename in os.listdir(_fixture_path):
- if not filename.endswith(".json"):
- continue
-
- [event_name, _extension] = filename.split(".")
-
- with open(os.path.join(_fixture_path, filename)) as f:
- event = json.load(f)
- event["project"] = PROJECT_ID
- EVENTS[event_name] = event
-
-
-# Duration is in ms
-def modify_span_duration(obj, duration):
- obj["start_timestamp"] = 0.0
- obj["timestamp"] = duration / 1000.0
- return obj
-
-
-# Start is in ms
-def modify_span_start(obj, start):
- duration = obj["timestamp"] - obj["start_timestamp"]
- obj["start_timestamp"] = start / 1000.0
- obj["timestamp"] = obj["start_timestamp"] + duration
- return obj
-
-
-def create_span(op, duration=100.0, desc="SELECT count() FROM table WHERE id = %s", hash=""):
- return modify_span_duration(
- SpanBuilder().with_op(op).with_description(desc).with_hash(hash).build(),
- duration,
- )
-
-
-def create_event(spans, event_id="a" * 16):
- return {"event_id": event_id, "project": PROJECT_ID, "spans": spans}
def assert_n_plus_one_db_problem(perf_problems):
|
7ab54f6616f24b3b843b7ae5d69312635218ec29
|
2021-09-14 23:59:57
|
Scott Cooper
|
feat(ui): Fix note component storybook (#28507)
| false
|
Fix note component storybook (#28507)
|
feat
|
diff --git a/docs-ui/stories/views/note.stories.js b/docs-ui/stories/views/note.stories.js
index 2d17d1972b2cf6..58844425a70309 100644
--- a/docs-ui/stories/views/note.stories.js
+++ b/docs-ui/stories/views/note.stories.js
@@ -1,35 +1,103 @@
+import {Component, useState} from 'react';
import {action} from '@storybook/addon-actions';
import Note from 'app/components/activity/note';
+import SentryTypes from 'app/sentryTypes';
import ConfigStore from 'app/stores/configStore';
+import MemberListStore from 'app/stores/memberListStore';
+import ProjectsStore from 'app/stores/projectsStore';
-// TODO(scttcper): Not working
const user = {
username: '[email protected]',
identities: [],
id: '1',
- name: '[email protected]',
+ name: 'billy',
dateJoined: '2019-03-09T06:52:42.836Z',
avatar: {avatarUuid: null, avatarType: 'letter_avatar'},
email: '[email protected]',
};
+const activity = {id: '123', data: {text: 'hello'}, dateCreated: new Date()};
+
ConfigStore.set('user', {...user, isSuperuser: true, options: {}});
+ProjectsStore.loadInitialData([
+ {
+ id: '2',
+ slug: 'project-slug',
+ name: 'Project Name',
+ hasAccess: true,
+ isMember: true,
+ isBookmarked: false,
+ teams: [
+ {
+ id: '1',
+ slug: 'team-slug',
+ name: 'Team Name',
+ isMember: true,
+ memberCount: 0,
+ },
+ ],
+ },
+]);
+MemberListStore.loadInitialData([
+ {
+ username: '[email protected]',
+ id: '2',
+ name: 'doug',
+ dateJoined: '2019-03-09T06:52:42.836Z',
+ avatar: {avatarUuid: null, avatarType: 'letter_avatar'},
+ email: '[email protected]',
+ },
+]);
+
+const organization = {
+ id: '1',
+ slug: 'org-slug',
+ access: ['project:releases'],
+};
+
+class OrganizationContext extends Component {
+ static childContextTypes = {
+ organization: SentryTypes.Organization,
+ };
+
+ getChildContext() {
+ return {organization};
+ }
+
+ render() {
+ return this.props.children;
+ }
+}
export default {
title: 'Views/Activity/Activity Note',
+ component: Note,
};
-export const Default = () => (
- <Note
- author={{name: 'Billy'}}
- item={{id: '123', data: {text: 'hello'}, user, dateCreated: new Date()}}
- group={{project: {slug: 'sentry'}}}
- onDelete={action('Deleted item')}
- sessionUser={{}}
- memberList={[]}
- />
-);
+export const Default = () => {
+ const [text, setText] = useState(activity.data.text);
+
+ return (
+ <OrganizationContext>
+ <Note
+ showTime
+ authorName={user.name}
+ user={user}
+ text={text}
+ modelId={activity.id}
+ dateCreated={activity.dateCreated}
+ projectSlugs={['project-slug']}
+ minHeight={200}
+ onUpdate={(...props) => {
+ action('Updated item', props);
+ setText(props[0].text);
+ }}
+ onDelete={action('Deleted item')}
+ />
+ </OrganizationContext>
+ );
+};
Default.storyName = 'Note';
Default.parameters = {
|
f9c57063871d8d1885ab9cb48ef99366611c89cd
|
2025-01-15 04:07:12
|
Richard Roggenkemper
|
chore(issue-details): Add tooltips to event navigation buttons (#83346)
| false
|
Add tooltips to event navigation buttons (#83346)
|
chore
|
diff --git a/static/app/views/issueDetails/streamline/issueDetailsEventNavigation.tsx b/static/app/views/issueDetails/streamline/issueDetailsEventNavigation.tsx
index 9bd052ae30678f..e9553b2b635895 100644
--- a/static/app/views/issueDetails/streamline/issueDetailsEventNavigation.tsx
+++ b/static/app/views/issueDetails/streamline/issueDetailsEventNavigation.tsx
@@ -95,7 +95,12 @@ export function IssueDetailsEventNavigation({
[EventNavOptions.RECOMMENDED]: isSmallScreen ? t('Rec.') : t('Recommended'),
[EventNavOptions.OLDEST]: t('First'),
[EventNavOptions.LATEST]: t('Last'),
- [EventNavOptions.CUSTOM]: t('Specific'),
+ };
+
+ const EventNavTooltips = {
+ [EventNavOptions.RECOMMENDED]: t('Recommended event matching filters'),
+ [EventNavOptions.OLDEST]: t('First event matching filters'),
+ [EventNavOptions.LATEST]: t('Last event matching filters'),
};
const onTabChange = (tabKey: typeof selectedOption) => {
@@ -178,7 +183,9 @@ export function IssueDetailsEventNavigation({
hidden={label === EventNavOptions.CUSTOM}
textValue={EventNavLabels[label]}
>
- {EventNavLabels[label]}
+ <Tooltip title={EventNavTooltips[label]} skipWrapper>
+ {EventNavLabels[label]}
+ </Tooltip>
</TabList.Item>
);
})}
|
7817b5cb62b777524a979b80cadae796569f733c
|
2018-01-25 02:23:58
|
ted kaemming
|
feat(rules): Add environment relationship to rules (#7010)
| false
|
Add environment relationship to rules (#7010)
|
feat
|
diff --git a/src/sentry/api/serializers/models/rule.py b/src/sentry/api/serializers/models/rule.py
index 1ef829831836e8..b9528842d62142 100644
--- a/src/sentry/api/serializers/models/rule.py
+++ b/src/sentry/api/serializers/models/rule.py
@@ -3,7 +3,7 @@
import six
from sentry.api.serializers import Serializer, register
-from sentry.models import Rule
+from sentry.models import Environment, Rule
def _generate_rule_label(project, rule, data):
@@ -19,7 +19,18 @@ def _generate_rule_label(project, rule, data):
@register(Rule)
class RuleSerializer(Serializer):
+ def get_attrs(self, item_list, user, *args, **kwargs):
+ environments = Environment.objects.in_bulk(
+ filter(None, [i.environment_id for i in item_list]),
+ )
+ return {
+ i: {
+ 'environment': environments.get(i.environment_id)
+ } for i in item_list
+ }
+
def serialize(self, obj, attrs, user):
+ environment = attrs['environment']
d = {
# XXX(dcramer): we currently serialize unsaved rule objects
# as part of the rule editor
@@ -43,5 +54,6 @@ def serialize(self, obj, attrs, user):
obj.label,
'dateCreated':
obj.date_added,
+ 'environment': environment.name if environment is not None else None,
}
return d
diff --git a/src/sentry/api/serializers/rest_framework/rule.py b/src/sentry/api/serializers/rest_framework/rule.py
index c15452942b78a2..b71eab78a38d9e 100644
--- a/src/sentry/api/serializers/rest_framework/rule.py
+++ b/src/sentry/api/serializers/rest_framework/rule.py
@@ -4,6 +4,7 @@
from rest_framework import serializers
+from sentry.models import Environment
from sentry.rules import rules
from . import ListField
@@ -57,6 +58,7 @@ def from_native(self, data):
class RuleSerializer(serializers.Serializer):
name = serializers.CharField(max_length=64)
+ environment = serializers.CharField(max_length=64, required=False)
actionMatch = serializers.ChoiceField(
choices=(('all', 'all'), ('any', 'any'), ('none', 'none'), )
)
@@ -68,8 +70,25 @@ class RuleSerializer(serializers.Serializer):
)
frequency = serializers.IntegerField(min_value=5, max_value=60 * 24 * 30)
+ def validate_environment(self, attrs, source):
+ name = attrs.get(source)
+ if name is None:
+ return attrs
+
+ try:
+ attrs['environment'] = Environment.get_for_organization_id(
+ self.context['project'].organization_id,
+ name,
+ ).id
+ except Environment.DoesNotExist:
+ raise serializers.ValidationError(u'This environment has not been created.')
+
+ return attrs
+
def save(self, rule):
rule.project = self.context['project']
+ if self.data.get('environment'):
+ rule.environment_id = self.data['environment']
if self.data.get('name'):
rule.label = self.data['name']
if self.data.get('actionMatch'):
diff --git a/src/sentry/models/rule.py b/src/sentry/models/rule.py
index 937fb8926c3275..2c124a01b968c5 100644
--- a/src/sentry/models/rule.py
+++ b/src/sentry/models/rule.py
@@ -32,6 +32,7 @@ class Rule(Model):
DEFAULT_FREQUENCY = 30 # minutes
project = FlexibleForeignKey('sentry.Project')
+ environment_id = BoundedPositiveIntegerField(null=True)
label = models.CharField(max_length=64)
data = GzippedDictField()
status = BoundedPositiveIntegerField(
diff --git a/src/sentry/south_migrations/0385_auto__add_field_rule_environment_id.py b/src/sentry/south_migrations/0385_auto__add_field_rule_environment_id.py
new file mode 100644
index 00000000000000..86ba4b1e74e41b
--- /dev/null
+++ b/src/sentry/south_migrations/0385_auto__add_field_rule_environment_id.py
@@ -0,0 +1,1058 @@
+# -*- coding: utf-8 -*-
+from south.utils import datetime_utils as datetime
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+
+class Migration(SchemaMigration):
+
+ # Flag to indicate if this migration is too risky
+ # to run online and needs to be coordinated for offline
+ is_dangerous = False
+
+ def forwards(self, orm):
+ # Adding field 'Rule.environment_id'
+ db.add_column('sentry_rule', 'environment_id',
+ self.gf('sentry.db.models.fields.bounded.BoundedPositiveIntegerField')(null=True),
+ keep_default=False)
+
+
+ def backwards(self, orm):
+ # Deleting field 'Rule.environment_id'
+ db.delete_column('sentry_rule', 'environment_id')
+
+
+ models = {
+ 'sentry.activity': {
+ 'Meta': {'object_name': 'Activity'},
+ 'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True'}),
+ 'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'ident': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
+ 'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
+ 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'})
+ },
+ 'sentry.apiapplication': {
+ 'Meta': {'object_name': 'ApiApplication'},
+ 'allowed_origins': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
+ 'client_id': ('django.db.models.fields.CharField', [], {'default': "'2c582e8c6afd495482a41f9801b41a96e3a22006b2bf4984a960ddaa233dd51b'", 'unique': 'True', 'max_length': '64'}),
+ 'client_secret': ('sentry.db.models.fields.encrypted.EncryptedTextField', [], {'default': "'e0538a3839fc4f16b7e306cf5747ee21b11956105a2643d8b5648db34bbe4e22'"}),
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'homepage_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'default': "'Alive Bluebird'", 'max_length': '64', 'blank': 'True'}),
+ 'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
+ 'privacy_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}),
+ 'redirect_uris': ('django.db.models.fields.TextField', [], {}),
+ 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
+ 'terms_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'})
+ },
+ 'sentry.apiauthorization': {
+ 'Meta': {'unique_together': "(('user', 'application'),)", 'object_name': 'ApiAuthorization'},
+ 'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']", 'null': 'True'}),
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
+ 'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
+ 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
+ },
+ 'sentry.apigrant': {
+ 'Meta': {'object_name': 'ApiGrant'},
+ 'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']"}),
+ 'code': ('django.db.models.fields.CharField', [], {'default': "'fb9b2724e1aa47c290ab11c36a98f20f'", 'max_length': '64', 'db_index': 'True'}),
+ 'expires_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2018, 1, 24, 0, 0)', 'db_index': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'redirect_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
+ 'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
+ 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
+ },
+ 'sentry.apikey': {
+ 'Meta': {'object_name': 'ApiKey'},
+ 'allowed_origins': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
+ 'label': ('django.db.models.fields.CharField', [], {'default': "'Default'", 'max_length': '64', 'blank': 'True'}),
+ 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Organization']"}),
+ 'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
+ 'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
+ 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
+ },
+ 'sentry.apitoken': {
+ 'Meta': {'object_name': 'ApiToken'},
+ 'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']", 'null': 'True'}),
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'expires_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2018, 2, 23, 0, 0)', 'null': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'refresh_token': ('django.db.models.fields.CharField', [], {'default': "'1a06d59dd5814c898dc052a4e723481d4848a2c07b3c41e59255dba0a79f7b72'", 'max_length': '64', 'unique': 'True', 'null': 'True'}),
+ 'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
+ 'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
+ 'token': ('django.db.models.fields.CharField', [], {'default': "'fd8c0cf785b744f6ab1ffeab9f09e3f04c33287e4eaa4361b82cea911e6fb0b9'", 'unique': 'True', 'max_length': '64'}),
+ 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
+ },
+ 'sentry.auditlogentry': {
+ 'Meta': {'object_name': 'AuditLogEntry'},
+ 'actor': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'audit_actors'", 'null': 'True', 'to': "orm['sentry.User']"}),
+ 'actor_key': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiKey']", 'null': 'True', 'blank': 'True'}),
+ 'actor_label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
+ 'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
+ 'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'event': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
+ 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
+ 'target_object': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
+ 'target_user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'audit_targets'", 'null': 'True', 'to': "orm['sentry.User']"})
+ },
+ 'sentry.authenticator': {
+ 'Meta': {'unique_together': "(('user', 'type'),)", 'object_name': 'Authenticator', 'db_table': "'auth_authenticator'"},
+ 'config': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {}),
+ 'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
+ 'last_used_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
+ 'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
+ 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
+ },
+ 'sentry.authidentity': {
+ 'Meta': {'unique_together': "(('auth_provider', 'ident'), ('auth_provider', 'user'))", 'object_name': 'AuthIdentity'},
+ 'auth_provider': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.AuthProvider']"}),
+ 'data': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'ident': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ 'last_synced': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'last_verified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
+ },
+ 'sentry.authprovider': {
+ 'Meta': {'object_name': 'AuthProvider'},
+ 'config': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'default_global_access': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'default_role': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50'}),
+ 'default_teams': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Team']", 'symmetrical': 'False', 'blank': 'True'}),
+ 'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'last_sync': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
+ 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']", 'unique': 'True'}),
+ 'provider': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ 'sync_time': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'})
+ },
+ 'sentry.broadcast': {
+ 'Meta': {'object_name': 'Broadcast'},
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'date_expires': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2018, 1, 31, 0, 0)', 'null': 'True', 'blank': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
+ 'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
+ 'message': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
+ 'title': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
+ 'upstream_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'})
+ },
+ 'sentry.broadcastseen': {
+ 'Meta': {'unique_together': "(('broadcast', 'user'),)", 'object_name': 'BroadcastSeen'},
+ 'broadcast': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Broadcast']"}),
+ 'date_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
+ },
+ 'sentry.commit': {
+ 'Meta': {'unique_together': "(('repository_id', 'key'),)", 'object_name': 'Commit', 'index_together': "(('repository_id', 'date_added'),)"},
+ 'author': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.CommitAuthor']", 'null': 'True'}),
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'message': ('django.db.models.fields.TextField', [], {'null': 'True'}),
+ 'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
+ 'repository_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
+ },
+ 'sentry.commitauthor': {
+ 'Meta': {'unique_together': "(('organization_id', 'email'), ('organization_id', 'external_id'))", 'object_name': 'CommitAuthor'},
+ 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
+ 'external_id': ('django.db.models.fields.CharField', [], {'max_length': '164', 'null': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
+ 'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'})
+ },
+ 'sentry.commitfilechange': {
+ 'Meta': {'unique_together': "(('commit', 'filename'),)", 'object_name': 'CommitFileChange'},
+ 'commit': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Commit']"}),
+ 'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
+ 'type': ('django.db.models.fields.CharField', [], {'max_length': '1'})
+ },
+ 'sentry.counter': {
+ 'Meta': {'object_name': 'Counter', 'db_table': "'sentry_projectcounter'"},
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'unique': 'True'}),
+ 'value': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
+ },
+ 'sentry.deletedorganization': {
+ 'Meta': {'object_name': 'DeletedOrganization'},
+ 'actor_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
+ 'actor_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
+ 'actor_label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
+ 'date_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
+ 'date_deleted': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
+ 'reason': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
+ 'slug': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'})
+ },
+ 'sentry.deletedproject': {
+ 'Meta': {'object_name': 'DeletedProject'},
+ 'actor_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
+ 'actor_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
+ 'actor_label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
+ 'date_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
+ 'date_deleted': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
+ 'organization_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
+ 'organization_name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
+ 'organization_slug': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
+ 'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
+ 'reason': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
+ 'slug': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'})
+ },
+ 'sentry.deletedteam': {
+ 'Meta': {'object_name': 'DeletedTeam'},
+ 'actor_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
+ 'actor_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
+ 'actor_label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
+ 'date_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
+ 'date_deleted': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
+ 'organization_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
+ 'organization_name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
+ 'organization_slug': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
+ 'reason': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
+ 'slug': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'})
+ },
+ 'sentry.deploy': {
+ 'Meta': {'object_name': 'Deploy'},
+ 'date_finished': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'date_started': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
+ 'environment_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
+ 'notified': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
+ 'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
+ 'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"}),
+ 'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
+ },
+ 'sentry.distribution': {
+ 'Meta': {'unique_together': "(('release', 'name'),)", 'object_name': 'Distribution'},
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
+ 'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
+ },
+ 'sentry.dsymapp': {
+ 'Meta': {'unique_together': "(('project', 'platform', 'app_id'),)", 'object_name': 'DSymApp'},
+ 'app_id': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'last_synced': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'platform': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
+ 'sync_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'})
+ },
+ 'sentry.email': {
+ 'Meta': {'object_name': 'Email'},
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'email': ('sentry.db.models.fields.citext.CIEmailField', [], {'unique': 'True', 'max_length': '75'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'})
+ },
+ 'sentry.environment': {
+ 'Meta': {'unique_together': "(('project_id', 'name'), ('organization_id', 'name'))", 'object_name': 'Environment'},
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
+ 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
+ 'projects': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Project']", 'through': "orm['sentry.EnvironmentProject']", 'symmetrical': 'False'})
+ },
+ 'sentry.environmentproject': {
+ 'Meta': {'unique_together': "(('project', 'environment'),)", 'object_name': 'EnvironmentProject'},
+ 'environment': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Environment']"}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
+ },
+ 'sentry.event': {
+ 'Meta': {'unique_together': "(('project_id', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'", 'index_together': "(('group_id', 'datetime'),)"},
+ 'data': ('sentry.db.models.fields.node.NodeField', [], {'null': 'True', 'blank': 'True'}),
+ 'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
+ 'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'message_id'"}),
+ 'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'message': ('django.db.models.fields.TextField', [], {}),
+ 'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
+ 'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}),
+ 'time_spent': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'null': 'True'})
+ },
+ 'sentry.eventmapping': {
+ 'Meta': {'unique_together': "(('project_id', 'event_id'),)", 'object_name': 'EventMapping'},
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
+ 'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
+ },
+ 'sentry.eventprocessingissue': {
+ 'Meta': {'unique_together': "(('raw_event', 'processing_issue'),)", 'object_name': 'EventProcessingIssue'},
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'processing_issue': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ProcessingIssue']"}),
+ 'raw_event': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.RawEvent']"})
+ },
+ 'sentry.eventtag': {
+ 'Meta': {'unique_together': "(('event_id', 'key_id', 'value_id'),)", 'object_name': 'EventTag', 'index_together': "(('group_id', 'key_id', 'value_id'),)"},
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
+ 'event_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
+ 'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'key_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
+ 'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
+ 'value_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
+ },
+ 'sentry.eventuser': {
+ 'Meta': {'unique_together': "(('project_id', 'ident'), ('project_id', 'hash'))", 'object_name': 'EventUser', 'index_together': "(('project_id', 'email'), ('project_id', 'username'), ('project_id', 'ip_address'))"},
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
+ 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True'}),
+ 'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'ident': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
+ 'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
+ 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
+ 'username': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'})
+ },
+ 'sentry.featureadoption': {
+ 'Meta': {'unique_together': "(('organization', 'feature_id'),)", 'object_name': 'FeatureAdoption'},
+ 'applicable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
+ 'date_completed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'feature_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"})
+ },
+ 'sentry.file': {
+ 'Meta': {'object_name': 'File'},
+ 'blob': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'legacy_blob'", 'null': 'True', 'to': "orm['sentry.FileBlob']"}),
+ 'blobs': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.FileBlob']", 'through': "orm['sentry.FileBlobIndex']", 'symmetrical': 'False'}),
+ 'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True'}),
+ 'headers': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ 'path': ('django.db.models.fields.TextField', [], {'null': 'True'}),
+ 'size': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
+ 'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
+ 'type': ('django.db.models.fields.CharField', [], {'max_length': '64'})
+ },
+ 'sentry.fileblob': {
+ 'Meta': {'object_name': 'FileBlob'},
+ 'checksum': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'path': ('django.db.models.fields.TextField', [], {'null': 'True'}),
+ 'size': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
+ 'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'})
+ },
+ 'sentry.fileblobindex': {
+ 'Meta': {'unique_together': "(('file', 'blob', 'offset'),)", 'object_name': 'FileBlobIndex'},
+ 'blob': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.FileBlob']"}),
+ 'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'offset': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
+ },
+ 'sentry.group': {
+ 'Meta': {'unique_together': "(('project', 'short_id'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'", 'index_together': "(('project', 'first_release'),)"},
+ 'active_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
+ 'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
+ 'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
+ 'first_release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']", 'null': 'True', 'on_delete': 'models.PROTECT'}),
+ 'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'is_public': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
+ 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
+ 'level': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
+ 'logger': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
+ 'message': ('django.db.models.fields.TextField', [], {}),
+ 'num_comments': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}),
+ 'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
+ 'resolved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
+ 'score': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
+ 'short_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
+ 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
+ 'time_spent_count': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
+ 'time_spent_total': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
+ 'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '1', 'db_index': 'True'})
+ },
+ 'sentry.groupassignee': {
+ 'Meta': {'object_name': 'GroupAssignee', 'db_table': "'sentry_groupasignee'"},
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'assignee_set'", 'unique': 'True', 'to': "orm['sentry.Group']"}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'assignee_set'", 'to': "orm['sentry.Project']"}),
+ 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'sentry_assignee_set'", 'to': "orm['sentry.User']"})
+ },
+ 'sentry.groupbookmark': {
+ 'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark'},
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
+ 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Group']"}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Project']"}),
+ 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'sentry_bookmark_set'", 'to': "orm['sentry.User']"})
+ },
+ 'sentry.groupcommitresolution': {
+ 'Meta': {'unique_together': "(('group_id', 'commit_id'),)", 'object_name': 'GroupCommitResolution'},
+ 'commit_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
+ 'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
+ 'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'})
+ },
+ 'sentry.groupemailthread': {
+ 'Meta': {'unique_together': "(('email', 'group'), ('email', 'msgid'))", 'object_name': 'GroupEmailThread'},
+ 'date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
+ 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
+ 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'groupemail_set'", 'to': "orm['sentry.Group']"}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'msgid': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'groupemail_set'", 'to': "orm['sentry.Project']"})
+ },
+ 'sentry.groupenvironment': {
+ 'Meta': {'unique_together': "[('group_id', 'environment_id')]", 'object_name': 'GroupEnvironment'},
+ 'environment_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
+ 'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'})
+ },
+ 'sentry.grouphash': {
+ 'Meta': {'unique_together': "(('project', 'hash'),)", 'object_name': 'GroupHash'},
+ 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
+ 'group_tombstone_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
+ 'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
+ 'state': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'})
+ },
+ 'sentry.grouplink': {
+ 'Meta': {'unique_together': "(('group_id', 'linked_type', 'linked_id'),)", 'object_name': 'GroupLink'},
+ 'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
+ 'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
+ 'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'linked_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}),
+ 'linked_type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '1'}),
+ 'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'db_index': 'True'}),
+ 'relationship': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '2'})
+ },
+ 'sentry.groupmeta': {
+ 'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta'},
+ 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'value': ('django.db.models.fields.TextField', [], {})
+ },
+ 'sentry.groupredirect': {
+ 'Meta': {'object_name': 'GroupRedirect'},
+ 'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'db_index': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'previous_group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'unique': 'True'})
+ },
+ 'sentry.grouprelease': {
+ 'Meta': {'unique_together': "(('group_id', 'release_id', 'environment'),)", 'object_name': 'GroupRelease'},
+ 'environment': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '64'}),
+ 'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
+ 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
+ 'release_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'})
+ },
+ 'sentry.groupresolution': {
+ 'Meta': {'object_name': 'GroupResolution'},
+ 'actor_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
+ 'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
+ 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'unique': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"}),
+ 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
+ 'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'})
+ },
+ 'sentry.grouprulestatus': {
+ 'Meta': {'unique_together': "(('rule', 'group'),)", 'object_name': 'GroupRuleStatus'},
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'last_active': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
+ 'rule': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Rule']"}),
+ 'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
+ },
+ 'sentry.groupseen': {
+ 'Meta': {'unique_together': "(('user', 'group'),)", 'object_name': 'GroupSeen'},
+ 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
+ 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'db_index': 'False'})
+ },
+ 'sentry.groupshare': {
+ 'Meta': {'object_name': 'GroupShare'},
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'unique': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
+ 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'}),
+ 'uuid': ('django.db.models.fields.CharField', [], {'default': "'3fd54fbe15f24358811b9fb0ae176dc0'", 'unique': 'True', 'max_length': '32'})
+ },
+ 'sentry.groupsnooze': {
+ 'Meta': {'object_name': 'GroupSnooze'},
+ 'actor_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
+ 'count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
+ 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'unique': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'state': ('jsonfield.fields.JSONField', [], {'null': 'True'}),
+ 'until': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
+ 'user_count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
+ 'user_window': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
+ 'window': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'})
+ },
+ 'sentry.groupsubscription': {
+ 'Meta': {'unique_together': "(('group', 'user'),)", 'object_name': 'GroupSubscription'},
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
+ 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'subscription_set'", 'to': "orm['sentry.Group']"}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'subscription_set'", 'to': "orm['sentry.Project']"}),
+ 'reason': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
+ 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
+ },
+ 'sentry.grouptagkey': {
+ 'Meta': {'unique_together': "(('project_id', 'group_id', 'key'),)", 'object_name': 'GroupTagKey'},
+ 'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
+ 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
+ 'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
+ },
+ 'sentry.grouptagvalue': {
+ 'Meta': {'unique_together': "(('group_id', 'key', 'value'),)", 'object_name': 'GroupTagValue', 'db_table': "'sentry_messagefiltervalue'", 'index_together': "(('project_id', 'key', 'value', 'last_seen'),)"},
+ 'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
+ 'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
+ 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
+ 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
+ 'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
+ 'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
+ },
+ 'sentry.grouptombstone': {
+ 'Meta': {'object_name': 'GroupTombstone'},
+ 'actor_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
+ 'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
+ 'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'level': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '40', 'blank': 'True'}),
+ 'message': ('django.db.models.fields.TextField', [], {}),
+ 'previous_group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'unique': 'True'}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
+ },
+ 'sentry.identity': {
+ 'Meta': {'unique_together': "(('idp', 'external_id'),)", 'object_name': 'Identity'},
+ 'data': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'date_verified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'external_id': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'idp': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.IdentityProvider']"}),
+ 'scopes': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
+ 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
+ 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
+ },
+ 'sentry.identityprovider': {
+ 'Meta': {'unique_together': "(('type', 'organization'),)", 'object_name': 'IdentityProvider'},
+ 'config': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
+ 'type': ('django.db.models.fields.CharField', [], {'max_length': '64'})
+ },
+ 'sentry.integration': {
+ 'Meta': {'unique_together': "(('provider', 'external_id'),)", 'object_name': 'Integration'},
+ 'external_id': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'metadata': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
+ 'organizations': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'integrations'", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationIntegration']", 'to': "orm['sentry.Organization']"}),
+ 'projects': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'integrations'", 'symmetrical': 'False', 'through': "orm['sentry.ProjectIntegration']", 'to': "orm['sentry.Project']"}),
+ 'provider': ('django.db.models.fields.CharField', [], {'max_length': '64'})
+ },
+ 'sentry.lostpasswordhash': {
+ 'Meta': {'object_name': 'LostPasswordHash'},
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'unique': 'True'})
+ },
+ 'sentry.option': {
+ 'Meta': {'object_name': 'Option'},
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
+ 'last_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
+ },
+ 'sentry.organization': {
+ 'Meta': {'object_name': 'Organization'},
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'default_role': ('django.db.models.fields.CharField', [], {'default': "'member'", 'max_length': '32'}),
+ 'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'org_memberships'", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMember']", 'to': "orm['sentry.User']"}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
+ 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
+ },
+ 'sentry.organizationaccessrequest': {
+ 'Meta': {'unique_together': "(('team', 'member'),)", 'object_name': 'OrganizationAccessRequest'},
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'member': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.OrganizationMember']"}),
+ 'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
+ },
+ 'sentry.organizationavatar': {
+ 'Meta': {'object_name': 'OrganizationAvatar'},
+ 'avatar_type': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
+ 'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'ident': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
+ 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'avatar'", 'unique': 'True', 'to': "orm['sentry.Organization']"})
+ },
+ 'sentry.organizationintegration': {
+ 'Meta': {'unique_together': "(('organization', 'integration'),)", 'object_name': 'OrganizationIntegration'},
+ 'config': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
+ 'default_auth_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'integration': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Integration']"}),
+ 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"})
+ },
+ 'sentry.organizationmember': {
+ 'Meta': {'unique_together': "(('organization', 'user'), ('organization', 'email'))", 'object_name': 'OrganizationMember'},
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
+ 'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
+ 'has_global_access': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'member_set'", 'to': "orm['sentry.Organization']"}),
+ 'role': ('django.db.models.fields.CharField', [], {'default': "'member'", 'max_length': '32'}),
+ 'teams': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Team']", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMemberTeam']", 'blank': 'True'}),
+ 'token': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
+ 'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50', 'blank': 'True'}),
+ 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'sentry_orgmember_set'", 'null': 'True', 'to': "orm['sentry.User']"})
+ },
+ 'sentry.organizationmemberteam': {
+ 'Meta': {'unique_together': "(('team', 'organizationmember'),)", 'object_name': 'OrganizationMemberTeam', 'db_table': "'sentry_organizationmember_teams'"},
+ 'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
+ 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'organizationmember': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.OrganizationMember']"}),
+ 'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
+ },
+ 'sentry.organizationonboardingtask': {
+ 'Meta': {'unique_together': "(('organization', 'task'),)", 'object_name': 'OrganizationOnboardingTask'},
+ 'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
+ 'date_completed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
+ 'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}),
+ 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
+ 'task': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
+ 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'})
+ },
+ 'sentry.organizationoption': {
+ 'Meta': {'unique_together': "(('organization', 'key'),)", 'object_name': 'OrganizationOption', 'db_table': "'sentry_organizationoptions'"},
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
+ 'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
+ },
+ 'sentry.processingissue': {
+ 'Meta': {'unique_together': "(('project', 'checksum', 'type'),)", 'object_name': 'ProcessingIssue'},
+ 'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
+ 'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
+ 'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
+ 'type': ('django.db.models.fields.CharField', [], {'max_length': '30'})
+ },
+ 'sentry.project': {
+ 'Meta': {'unique_together': "(('team', 'slug'), ('organization', 'slug'))", 'object_name': 'Project'},
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'first_event': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
+ 'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0', 'null': 'True'}),
+ 'forced_color': ('django.db.models.fields.CharField', [], {'max_length': '6', 'null': 'True', 'blank': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
+ 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
+ 'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
+ 'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True'}),
+ 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
+ 'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']", 'null': 'True'}),
+ 'teams': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'teams'", 'symmetrical': 'False', 'through': "orm['sentry.ProjectTeam']", 'to': "orm['sentry.Team']"})
+ },
+ 'sentry.projectbookmark': {
+ 'Meta': {'unique_together': "(('project_id', 'user'),)", 'object_name': 'ProjectBookmark'},
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True', 'blank': 'True'}),
+ 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
+ },
+ 'sentry.projectdsymfile': {
+ 'Meta': {'unique_together': "(('project', 'uuid'),)", 'object_name': 'ProjectDSymFile'},
+ 'cpu_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
+ 'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'object_name': ('django.db.models.fields.TextField', [], {}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
+ 'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36'})
+ },
+ 'sentry.projectintegration': {
+ 'Meta': {'unique_together': "(('project', 'integration'),)", 'object_name': 'ProjectIntegration'},
+ 'config': ('sentry.db.models.fields.encrypted.EncryptedJsonField', [], {'default': '{}'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'integration': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Integration']"}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
+ },
+ 'sentry.projectkey': {
+ 'Meta': {'object_name': 'ProjectKey'},
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Project']"}),
+ 'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
+ 'rate_limit_count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
+ 'rate_limit_window': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
+ 'roles': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}),
+ 'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
+ 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
+ },
+ 'sentry.projectoption': {
+ 'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'"},
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
+ 'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
+ },
+ 'sentry.projectplatform': {
+ 'Meta': {'unique_together': "(('project_id', 'platform'),)", 'object_name': 'ProjectPlatform'},
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'platform': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
+ },
+ 'sentry.projectsymcachefile': {
+ 'Meta': {'unique_together': "(('project', 'dsym_file'),)", 'object_name': 'ProjectSymCacheFile'},
+ 'cache_file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
+ 'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
+ 'dsym_file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ProjectDSymFile']"}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
+ 'version': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
+ },
+ 'sentry.projectteam': {
+ 'Meta': {'unique_together': "(('project', 'team'),)", 'object_name': 'ProjectTeam'},
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
+ 'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
+ },
+ 'sentry.pullrequest': {
+ 'Meta': {'unique_together': "(('repository_id', 'key'),)", 'object_name': 'PullRequest', 'db_table': "'sentry_pull_request'", 'index_together': "(('repository_id', 'date_added'),)"},
+ 'author': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.CommitAuthor']", 'null': 'True'}),
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'merge_commit_sha': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
+ 'message': ('django.db.models.fields.TextField', [], {'null': 'True'}),
+ 'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
+ 'repository_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
+ 'title': ('django.db.models.fields.TextField', [], {'null': 'True'})
+ },
+ 'sentry.rawevent': {
+ 'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'RawEvent'},
+ 'data': ('sentry.db.models.fields.node.NodeField', [], {'null': 'True', 'blank': 'True'}),
+ 'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
+ },
+ 'sentry.release': {
+ 'Meta': {'unique_together': "(('organization', 'version'),)", 'object_name': 'Release'},
+ 'authors': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
+ 'commit_count': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
+ 'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'date_released': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
+ 'date_started': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'last_commit_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
+ 'last_deploy_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
+ 'new_groups': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
+ 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
+ 'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True', 'blank': 'True'}),
+ 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
+ 'projects': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'releases'", 'symmetrical': 'False', 'through': "orm['sentry.ReleaseProject']", 'to': "orm['sentry.Project']"}),
+ 'ref': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
+ 'total_deploys': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
+ 'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
+ 'version': ('django.db.models.fields.CharField', [], {'max_length': '250'})
+ },
+ 'sentry.releasecommit': {
+ 'Meta': {'unique_together': "(('release', 'commit'), ('release', 'order'))", 'object_name': 'ReleaseCommit'},
+ 'commit': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Commit']"}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'order': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
+ 'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
+ 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
+ 'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
+ },
+ 'sentry.releaseenvironment': {
+ 'Meta': {'unique_together': "(('organization_id', 'release_id', 'environment_id'),)", 'object_name': 'ReleaseEnvironment', 'db_table': "'sentry_environmentrelease'"},
+ 'environment_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
+ 'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
+ 'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
+ 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
+ 'release_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'})
+ },
+ 'sentry.releasefile': {
+ 'Meta': {'unique_together': "(('release', 'ident'),)", 'object_name': 'ReleaseFile'},
+ 'dist': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Distribution']", 'null': 'True'}),
+ 'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'ident': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
+ 'name': ('django.db.models.fields.TextField', [], {}),
+ 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
+ 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
+ 'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
+ },
+ 'sentry.releaseheadcommit': {
+ 'Meta': {'unique_together': "(('repository_id', 'release'),)", 'object_name': 'ReleaseHeadCommit'},
+ 'commit': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Commit']"}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
+ 'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"}),
+ 'repository_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {})
+ },
+ 'sentry.releaseproject': {
+ 'Meta': {'unique_together': "(('project', 'release'),)", 'object_name': 'ReleaseProject', 'db_table': "'sentry_release_project'"},
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'new_groups': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
+ 'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
+ },
+ 'sentry.repository': {
+ 'Meta': {'unique_together': "(('organization_id', 'name'), ('organization_id', 'provider', 'external_id'))", 'object_name': 'Repository'},
+ 'config': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'external_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'integration_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
+ 'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
+ 'provider': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
+ 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
+ 'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'})
+ },
+ 'sentry.reprocessingreport': {
+ 'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'ReprocessingReport'},
+ 'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
+ },
+ 'sentry.rule': {
+ 'Meta': {'object_name': 'Rule'},
+ 'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'environment_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'label': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
+ 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
+ },
+ 'sentry.savedsearch': {
+ 'Meta': {'unique_together': "(('project', 'name'),)", 'object_name': 'SavedSearch'},
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'is_default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ 'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
+ 'query': ('django.db.models.fields.TextField', [], {})
+ },
+ 'sentry.savedsearchuserdefault': {
+ 'Meta': {'unique_together': "(('project', 'user'),)", 'object_name': 'SavedSearchUserDefault', 'db_table': "'sentry_savedsearch_userdefault'"},
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
+ 'savedsearch': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.SavedSearch']"}),
+ 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
+ },
+ 'sentry.scheduleddeletion': {
+ 'Meta': {'unique_together': "(('app_label', 'model_name', 'object_id'),)", 'object_name': 'ScheduledDeletion'},
+ 'aborted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'actor_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
+ 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'date_scheduled': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2018, 2, 23, 0, 0)'}),
+ 'guid': ('django.db.models.fields.CharField', [], {'default': "'f9cf77c9ffe74784b20340a7a463fb93'", 'unique': 'True', 'max_length': '32'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'in_progress': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'model_name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'object_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {})
+ },
+ 'sentry.scheduledjob': {
+ 'Meta': {'object_name': 'ScheduledJob'},
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'date_scheduled': ('django.db.models.fields.DateTimeField', [], {}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ 'payload': ('jsonfield.fields.JSONField', [], {'default': '{}'})
+ },
+ 'sentry.servicehook': {
+ 'Meta': {'object_name': 'ServiceHook'},
+ 'actor_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
+ 'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']", 'null': 'True'}),
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'events': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
+ 'guid': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
+ 'secret': ('sentry.db.models.fields.encrypted.EncryptedTextField', [], {'default': "'eb62662480d84704ad583dfe833aeca2d4294e88fe414e9ea844c83a58ec9ed6'"}),
+ 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
+ 'url': ('django.db.models.fields.URLField', [], {'max_length': '512'}),
+ 'version': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
+ },
+ 'sentry.tagkey': {
+ 'Meta': {'unique_together': "(('project_id', 'key'),)", 'object_name': 'TagKey', 'db_table': "'sentry_filterkey'"},
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
+ 'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
+ 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'db_index': 'True'}),
+ 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
+ 'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
+ },
+ 'sentry.tagvalue': {
+ 'Meta': {'unique_together': "(('project_id', 'key', 'value'),)", 'object_name': 'TagValue', 'db_table': "'sentry_filtervalue'", 'index_together': "(('project_id', 'key', 'last_seen'),)"},
+ 'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
+ 'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
+ 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
+ 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
+ 'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
+ 'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
+ },
+ 'sentry.team': {
+ 'Meta': {'unique_together': "(('organization', 'slug'),)", 'object_name': 'Team'},
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
+ 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
+ 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
+ },
+ 'sentry.user': {
+ 'Meta': {'object_name': 'User', 'db_table': "'auth_user'"},
+ 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
+ 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'is_managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'is_password_expired': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'last_active': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
+ 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'last_password_change': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_column': "'first_name'", 'blank': 'True'}),
+ 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ 'session_nonce': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True'}),
+ 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
+ },
+ 'sentry.useravatar': {
+ 'Meta': {'object_name': 'UserAvatar'},
+ 'avatar_type': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
+ 'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'ident': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
+ 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'avatar'", 'unique': 'True', 'to': "orm['sentry.User']"})
+ },
+ 'sentry.useremail': {
+ 'Meta': {'unique_together': "(('user', 'email'),)", 'object_name': 'UserEmail'},
+ 'date_hash_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'is_verified': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'emails'", 'to': "orm['sentry.User']"}),
+ 'validation_hash': ('django.db.models.fields.CharField', [], {'default': "u'CCUqfELgCSCwOc499PKQCXpQeIhKSP1h'", 'max_length': '32'})
+ },
+ 'sentry.userip': {
+ 'Meta': {'unique_together': "(('user', 'ip_address'),)", 'object_name': 'UserIP'},
+ 'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39'}),
+ 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
+ },
+ 'sentry.useroption': {
+ 'Meta': {'unique_together': "(('user', 'project', 'key'), ('user', 'organization', 'key'))", 'object_name': 'UserOption'},
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
+ 'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']", 'null': 'True'}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
+ 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
+ 'value': ('sentry.db.models.fields.encrypted.EncryptedPickledObjectField', [], {})
+ },
+ 'sentry.userpermission': {
+ 'Meta': {'unique_together': "(('user', 'permission'),)", 'object_name': 'UserPermission'},
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'permission': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
+ 'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
+ },
+ 'sentry.userreport': {
+ 'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'UserReport', 'index_together': "(('project', 'event_id'), ('project', 'date_added'))"},
+ 'comments': ('django.db.models.fields.TextField', [], {}),
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
+ 'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
+ 'event_user_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {'null': 'True'}),
+ 'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ 'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
+ },
+ 'sentry.versiondsymfile': {
+ 'Meta': {'unique_together': "(('dsym_file', 'version', 'build'),)", 'object_name': 'VersionDSymFile'},
+ 'build': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
+ 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'dsym_app': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.DSymApp']"}),
+ 'dsym_file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ProjectDSymFile']", 'null': 'True'}),
+ 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
+ 'version': ('django.db.models.fields.CharField', [], {'max_length': '32'})
+ }
+ }
+
+ complete_apps = ['sentry']
\ No newline at end of file
diff --git a/tests/sentry/api/endpoints/test_project_rule_details.py b/tests/sentry/api/endpoints/test_project_rule_details.py
index 8ebffb3013163d..1b7b6931a636e7 100644
--- a/tests/sentry/api/endpoints/test_project_rule_details.py
+++ b/tests/sentry/api/endpoints/test_project_rule_details.py
@@ -4,7 +4,7 @@
from django.core.urlresolvers import reverse
-from sentry.models import Rule, RuleStatus
+from sentry.models import Environment, Rule, RuleStatus
from sentry.testutils import APITestCase
@@ -30,6 +30,36 @@ def test_simple(self):
assert response.status_code == 200, response.content
assert response.data['id'] == six.text_type(rule.id)
+ assert response.data['environment'] is None
+
+ def test_with_environment(self):
+ self.login_as(user=self.user)
+
+ team = self.create_team()
+ project1 = self.create_project(teams=[team], name='foo')
+ self.create_project(teams=[team], name='bar')
+
+ rule = project1.rule_set.all()[0]
+ rule.update(
+ environment_id=Environment.get_or_create(
+ rule.project,
+ 'production',
+ ).id,
+ )
+
+ url = reverse(
+ 'sentry-api-0-project-rule-details',
+ kwargs={
+ 'organization_slug': project1.organization.slug,
+ 'project_slug': project1.slug,
+ 'rule_id': rule.id,
+ }
+ )
+ response = self.client.get(url, format='json')
+
+ assert response.status_code == 200, response.content
+ assert response.data['id'] == six.text_type(rule.id)
+ assert response.data['environment'] == 'production'
class UpdateProjectRuleTest(APITestCase):
@@ -75,6 +105,7 @@ def test_simple(self):
rule = Rule.objects.get(id=rule.id)
assert rule.label == 'hello world'
+ assert rule.environment_id is None
assert rule.data['action_match'] == 'any'
assert rule.data['actions'] == [
{
@@ -83,6 +114,48 @@ def test_simple(self):
]
assert rule.data['conditions'] == conditions
+ def test_with_environment(self):
+ self.login_as(user=self.user)
+
+ project = self.create_project()
+
+ Environment.get_or_create(
+ project,
+ 'production',
+ )
+
+ rule = Rule.objects.create(project=project, label='foo')
+
+ url = reverse(
+ 'sentry-api-0-project-rule-details',
+ kwargs={
+ 'organization_slug': project.organization.slug,
+ 'project_slug': project.slug,
+ 'rule_id': rule.id,
+ }
+ )
+ response = self.client.put(
+ url,
+ data={
+ 'name': 'hello world',
+ 'environment': 'production',
+ 'actionMatch': 'any',
+ 'actions': [],
+ 'conditions': []
+ },
+ format='json'
+ )
+
+ assert response.status_code == 200, response.content
+ assert response.data['id'] == six.text_type(rule.id)
+
+ rule = Rule.objects.get(id=rule.id)
+ assert rule.label == 'hello world'
+ assert rule.environment_id == Environment.get_or_create(
+ rule.project,
+ 'production',
+ ).id
+
def test_invalid_rule_node_type(self):
self.login_as(user=self.user)
diff --git a/tests/sentry/api/endpoints/test_project_rules.py b/tests/sentry/api/endpoints/test_project_rules.py
index 5db86b4048a977..9f16fdcfcaf0b4 100644
--- a/tests/sentry/api/endpoints/test_project_rules.py
+++ b/tests/sentry/api/endpoints/test_project_rules.py
@@ -2,7 +2,7 @@
from django.core.urlresolvers import reverse
-from sentry.models import Rule
+from sentry.models import Environment, Rule
from sentry.testutils import APITestCase
@@ -75,6 +75,57 @@ def test_simple(self):
assert rule.data['conditions'] == conditions
assert rule.data['frequency'] == 30
+ def test_with_environment(self):
+ self.login_as(user=self.user)
+
+ project = self.create_project()
+
+ Environment.get_or_create(
+ project,
+ 'production',
+ )
+
+ conditions = [
+ {
+ 'id': 'sentry.rules.conditions.first_seen_event.FirstSeenEventCondition',
+ 'key': 'foo',
+ 'match': 'eq',
+ 'value': 'bar',
+ }
+ ]
+
+ actions = [{'id': 'sentry.rules.actions.notify_event.NotifyEventAction'}]
+
+ url = reverse(
+ 'sentry-api-0-project-rules',
+ kwargs={
+ 'organization_slug': project.organization.slug,
+ 'project_slug': project.slug,
+ }
+ )
+ response = self.client.post(
+ url,
+ data={
+ 'name': 'hello world',
+ 'environment': 'production',
+ 'conditions': conditions,
+ 'actions': actions,
+ 'actionMatch': 'any',
+ 'frequency': 30,
+ },
+ format='json'
+ )
+
+ assert response.status_code == 200, response.content
+ assert response.data['id']
+
+ rule = Rule.objects.get(id=response.data['id'])
+ assert rule.label == 'hello world'
+ assert rule.environment_id == Environment.get_or_create(
+ rule.project,
+ 'production',
+ ).id
+
def test_missing_name(self):
self.login_as(user=self.user)
|
a2044b56bccbe7ead62f8c01bad79ca35a07546b
|
2023-06-23 21:08:39
|
Richard Ortenberg
|
feat(crons): Set trace_id on consumer check-ins (#51498)
| false
|
Set trace_id on consumer check-ins (#51498)
|
feat
|
diff --git a/src/sentry/monitors/consumers/monitor_consumer.py b/src/sentry/monitors/consumers/monitor_consumer.py
index 52877c5f77e6f1..710a235bca149e 100644
--- a/src/sentry/monitors/consumers/monitor_consumer.py
+++ b/src/sentry/monitors/consumers/monitor_consumer.py
@@ -298,6 +298,8 @@ def update_existing_check_in(
minutes=(monitor_config or {}).get("max_runtime") or TIMEOUT
)
+ trace_id = validated_params.get("contexts", {}).get("trace", {}).get("trace_id")
+
# If the UUID is unset (zero value) generate a new UUID
if check_in_id.int == 0:
guid = uuid.uuid4()
@@ -316,6 +318,7 @@ def update_existing_check_in(
"expected_time": expected_time,
"timeout_at": timeout_at,
"monitor_config": monitor_config,
+ "trace_id": trace_id,
},
project_id=project_id,
monitor=monitor,
diff --git a/src/sentry/monitors/validators.py b/src/sentry/monitors/validators.py
index 5abd9657d7194d..14757c345fde70 100644
--- a/src/sentry/monitors/validators.py
+++ b/src/sentry/monitors/validators.py
@@ -225,6 +225,14 @@ def create(self, validated_data):
return validated_data
+class TraceContextValidator(serializers.Serializer):
+ trace_id = serializers.CharField(max_length=32)
+
+
+class ContextsValidator(serializers.Serializer):
+ trace = TraceContextValidator(required=False)
+
+
class MonitorCheckInValidator(serializers.Serializer):
status = serializers.ChoiceField(
choices=(
@@ -241,7 +249,7 @@ class MonitorCheckInValidator(serializers.Serializer):
)
environment = serializers.CharField(required=False, allow_null=True)
monitor_config = ConfigValidator(required=False)
- trace_id = serializers.CharField(required=False, allow_null=True, max_length=32)
+ contexts = ContextsValidator(required=False, allow_null=True)
def validate(self, attrs):
attrs = super().validate(attrs)
diff --git a/tests/sentry/monitors/test_monitor_consumer.py b/tests/sentry/monitors/test_monitor_consumer.py
index a84170054ae212..8fc7a4c8a49672 100644
--- a/tests/sentry/monitors/test_monitor_consumer.py
+++ b/tests/sentry/monitors/test_monitor_consumer.py
@@ -53,6 +53,7 @@ def send_message(
) -> None:
now = datetime.now()
self.guid = uuid.uuid4().hex if not guid else guid
+ self.trace_id = uuid.uuid4().hex
payload = {
"monitor_slug": monitor_slug,
@@ -60,6 +61,7 @@ def send_message(
"duration": None,
"check_in_id": self.guid,
"environment": "production",
+ "contexts": {"trace": {"trace_id": self.trace_id}},
}
payload.update(overrides)
@@ -105,6 +107,7 @@ def test_payload(self) -> None:
checkin = MonitorCheckIn.objects.get(guid=self.guid)
# the expected time should not include the margin of 5 minutes
assert checkin.expected_time == expected_time - timedelta(minutes=5)
+ assert checkin.trace_id.hex == self.trace_id
def test_passing(self) -> None:
monitor = self._create_monitor(slug="my-monitor")
|
ef45a320c69218820973e29040d7b804a6b701bb
|
2024-03-14 23:10:19
|
Tony Xiao
|
ref(metrics): Use dataclass instead of tuple for span key for clarity (#66978)
| false
|
Use dataclass instead of tuple for span key for clarity (#66978)
|
ref
|
diff --git a/src/sentry/sentry_metrics/querying/samples_list.py b/src/sentry/sentry_metrics/querying/samples_list.py
index be5b873ac6b93f..cbe8fd2064a160 100644
--- a/src/sentry/sentry_metrics/querying/samples_list.py
+++ b/src/sentry/sentry_metrics/querying/samples_list.py
@@ -1,6 +1,7 @@
from abc import ABC, abstractmethod
from bisect import bisect
from collections.abc import Callable
+from dataclasses import dataclass
from datetime import datetime
from typing import Any, Literal, TypedDict, cast
@@ -25,6 +26,13 @@
from sentry.snuba.referrer import Referrer
+@dataclass(frozen=True)
+class SpanKey:
+ group: str
+ timestamp: str
+ span_id: str
+
+
class Summary(TypedDict):
min: float
max: float
@@ -90,7 +98,7 @@ def execute_unsorted(self, offset, limit):
def get_spans_by_key(
self,
- span_keys: list[tuple[str, str, str]],
+ span_keys: list[SpanKey],
additional_fields: list[str] | None = None,
):
if not span_keys:
@@ -118,13 +126,13 @@ def get_spans_by_key(
conditions = [
And(
[
- Condition(builder.column("span.group"), Op.EQ, group),
+ Condition(builder.column("span.group"), Op.EQ, key.group),
Condition(
- builder.column("timestamp"), Op.EQ, datetime.fromisoformat(timestamp)
+ builder.column("timestamp"), Op.EQ, datetime.fromisoformat(key.timestamp)
),
]
)
- for (group, timestamp, _) in span_keys
+ for key in span_keys
]
if len(conditions) == 1:
@@ -142,7 +150,7 @@ def get_spans_by_key(
span_id_condition = Condition(
builder.column("id"),
Op.IN,
- Function("tuple", [span_id for _, _, span_id in span_keys]),
+ Function("tuple", [key.span_id for key in span_keys]),
)
builder.add_conditions([order_by_condition, span_id_condition])
@@ -188,7 +196,7 @@ def supports_mri(cls, mri: str) -> bool:
def _get_spans(
self,
- span_keys: list[tuple[str, str, str]],
+ span_keys: list[SpanKey],
summaries: dict[str, Summary],
):
result = self.get_spans_by_key(
@@ -200,7 +208,7 @@ def _get_spans(
# if there is a sort, we want to preserve the result in the same
# order as the span keys which we can do by checking the span ids
if self.sort:
- order = {span_id: i for i, (_, _, span_id) in enumerate(span_keys)}
+ order = {key.span_id: i for i, key in enumerate(span_keys)}
result["data"].sort(key=lambda row: order[row["id"]])
# if `id` wasn't initially there, we should remove it
@@ -220,7 +228,7 @@ def get_sorted_span_keys(
self,
offset: int,
limit: int,
- ) -> tuple[list[tuple[str, str, str]], dict[str, Summary]]:
+ ) -> tuple[list[SpanKey], dict[str, Summary]]:
"""
When getting examples for a segment, it's actually much faster to read it
from the transactions dataset compared to the spans dataset as it's a much
@@ -264,10 +272,10 @@ def get_sorted_span_keys(
result = builder.process_results(query_results)
span_keys = [
- (
- "00", # all segments have a group of `00` currently
- row["timestamp"], # timestamp
- row["span_id"], # span_id
+ SpanKey(
+ group="00", # all segments have a group of `00` currently
+ timestamp=row["timestamp"],
+ span_id=row["span_id"],
)
for row in result["data"]
]
@@ -307,7 +315,7 @@ def get_unsorted_span_keys(
self,
offset: int,
limit: int,
- ) -> tuple[list[tuple[str, str, str]], dict[str, Summary]]:
+ ) -> tuple[list[SpanKey], dict[str, Summary]]:
"""
When getting examples for a segment, it's actually much faster to read it
from the transactions dataset compared to the spans dataset as it's a much
@@ -348,10 +356,10 @@ def get_unsorted_span_keys(
row["examples"] = pick_samples(row["examples"], metric_key=metric_key)
span_keys = [
- (
- "00", # all segments have a group of `00` currently
- example[0], # timestamp
- example[1], # span_id
+ SpanKey(
+ group="00", # all segments have a group of `00` currently
+ timestamp=example[0],
+ span_id=example[1],
)
for row in result["data"]
for example in row["examples"]
@@ -536,7 +544,7 @@ def execute_unsorted(self, offset, limit):
return result
- def get_unsorted_span_keys(self, offset: int, limit: int) -> list[tuple[str, str, str]]:
+ def get_unsorted_span_keys(self, offset: int, limit: int) -> list[SpanKey]:
column = self.mri_to_column(self.mri)
for dataset_segmentation_condition_fn in self.dataset_segmentation_conditions():
@@ -581,10 +589,10 @@ def get_unsorted_span_keys(self, offset: int, limit: int) -> list[tuple[str, str
row["examples"] = pick_samples(row["examples"], metric_key=metric_key)
return [
- (
- example[0], # group
- example[1], # timestamp
- example[2], # span_id
+ SpanKey(
+ group=example[0],
+ timestamp=example[1],
+ span_id=example[2],
)
for row in result["data"]
for example in row["examples"]
@@ -731,7 +739,7 @@ def supports_mri(cls, mri: str) -> bool:
def _get_spans(
self,
- span_keys: list[tuple[str, str, str]],
+ span_keys: list[SpanKey],
summaries: dict[str, Summary],
):
result = self.get_spans_by_key(span_keys, additional_fields=["id"])
@@ -739,7 +747,7 @@ def _get_spans(
# if there is a sort, we want to preserve the result in the same
# order as the span keys which we can do by checking the span ids
if self.sort:
- order = {span_id: i for i, (_, _, span_id) in enumerate(span_keys)}
+ order = {key.span_id: i for i, key in enumerate(span_keys)}
result["data"].sort(key=lambda row: order[row["id"]])
should_pop_id = "id" not in self.fields
@@ -758,7 +766,7 @@ def get_sorted_span_keys(
self,
offset: int,
limit: int,
- ) -> tuple[list[tuple[str, str, str]], dict[str, Summary]]:
+ ) -> tuple[list[SpanKey], dict[str, Summary]]:
assert self.sort
sort = self.convert_sort(self.sort, self.operation)
assert sort is not None
@@ -798,10 +806,10 @@ def get_sorted_span_keys(
result = builder.process_results(query_results)
span_keys = [
- (
- cast(str, row["span.group"]), # group
- cast(str, row["timestamp"]), # timestamp
- cast(str, row["id"]), # span_id
+ SpanKey(
+ group=row["span.group"],
+ timestamp=row["timestamp"],
+ span_id=row["id"],
)
for row in result["data"]
]
@@ -834,7 +842,7 @@ def get_unsorted_span_keys(
self,
offset: int,
limit: int,
- ) -> tuple[list[tuple[str, str, str]], dict[str, Summary]]:
+ ) -> tuple[list[SpanKey], dict[str, Summary]]:
builder = MetricsSummariesQueryBuilder(
Dataset.MetricsSummaries,
self.params,
@@ -867,10 +875,10 @@ def get_unsorted_span_keys(
row["examples"] = pick_samples(row["examples"], metric_key=metric_key)
span_keys = [
- (
- cast(str, example[0]), # group
- cast(str, example[1]), # timestamp
- cast(str, example[2]), # span_id
+ SpanKey(
+ group=example[0],
+ timestamp=example[1],
+ span_id=example[2],
)
for row in result["data"]
for example in row["examples"]
|
d31105c87a2ebd158eb955d5deddfbf3c4c0765a
|
2024-07-23 14:29:47
|
Simon Hellmayr
|
feat(admin): add endpoint to invalidate project config on demand (#74705)
| false
|
add endpoint to invalidate project config on demand (#74705)
|
feat
|
diff --git a/src/sentry/api/endpoints/admin_project_configs.py b/src/sentry/api/endpoints/admin_project_configs.py
index b3e9c28fbc26b2..a33b9a6d5a2596 100644
--- a/src/sentry/api/endpoints/admin_project_configs.py
+++ b/src/sentry/api/endpoints/admin_project_configs.py
@@ -8,6 +8,7 @@
from sentry.api.permissions import SuperuserOrStaffFeatureFlaggedPermission
from sentry.models.project import Project
from sentry.relay import projectconfig_cache
+from sentry.tasks.relay import schedule_invalidate_project_config
# NOTE: This endpoint should be in getsentry
@@ -16,6 +17,7 @@ class AdminRelayProjectConfigsEndpoint(Endpoint):
owner = ApiOwner.OWNERS_INGEST
publish_status = {
"GET": ApiPublishStatus.PRIVATE,
+ "POST": ApiPublishStatus.PRIVATE,
}
permission_classes = (SuperuserOrStaffFeatureFlaggedPermission,)
@@ -47,3 +49,18 @@ def get(self, request: Request) -> Response:
# TODO if we don't think we'll add anything to the endpoint
# we may as well return just the configs
return Response({"configs": configs}, status=200)
+
+ def post(self, request: Request) -> Response:
+ """Regenerate the project config"""
+ project_id = request.GET.get("projectId")
+
+ if project_id is not None:
+ try:
+ schedule_invalidate_project_config(
+ project_id=project_id, trigger="_admin_trigger_invalidate_project_config"
+ )
+
+ except Exception:
+ raise Http404
+
+ return Response(status=204)
diff --git a/tests/sentry/api/endpoints/test_admin_project_configs.py b/tests/sentry/api/endpoints/test_admin_project_configs.py
index 80e9aa183c0db6..52784bc95c82fd 100644
--- a/tests/sentry/api/endpoints/test_admin_project_configs.py
+++ b/tests/sentry/api/endpoints/test_admin_project_configs.py
@@ -10,6 +10,8 @@
@no_silo_test
class AdminRelayProjectConfigsEndpointTest(APITestCase):
+ endpoint = "sentry-api-0-internal-project-config"
+
def setUp(self):
super().setUp()
self.owner = self.create_user(
@@ -137,3 +139,12 @@ def test_inexistent_key(self):
expected = {"configs": {str(inexsitent_key): None}}
actual = response.json()
assert actual == expected
+
+ def test_invalidate_project_config(self):
+ response = self.get_response(method="post", project_id=self.project.id)
+ assert response.status_code == 401
+
+ self.login_as(self.superuser, superuser=True)
+
+ response = self.get_response(method="post", project_id=self.project.id)
+ assert response.status_code == 204
|
94f943cca9bed4793244882efe1d141a1126bdc0
|
2021-11-25 00:19:40
|
Kev
|
ref(perf): Remove tag explorer feature flags (#29574)
| false
|
Remove tag explorer feature flags (#29574)
|
ref
|
diff --git a/static/app/views/performance/transactionSummary/header.tsx b/static/app/views/performance/transactionSummary/header.tsx
index 1725742c8fbfcc..f7f487bf8b37f4 100644
--- a/static/app/views/performance/transactionSummary/header.tsx
+++ b/static/app/views/performance/transactionSummary/header.tsx
@@ -258,16 +258,13 @@ class TransactionHeader extends React.Component<Props> {
{t('Overview')}
</ListLink>
{this.renderWebVitalsTab()}
- <Feature features={['organizations:performance-tag-page']}>
- <ListLink
- to={tagsTarget}
- isActive={() => currentTab === Tab.Tags}
- onClick={this.trackTabClick(Tab.Tags)}
- >
- {t('Tags')}
- <FeatureBadge type="new" noTooltip />
- </ListLink>
- </Feature>
+ <ListLink
+ to={tagsTarget}
+ isActive={() => currentTab === Tab.Tags}
+ onClick={this.trackTabClick(Tab.Tags)}
+ >
+ {t('Tags')}
+ </ListLink>
<Feature features={['organizations:performance-events-page']}>
<ListLink
to={eventsTarget}
diff --git a/static/app/views/performance/transactionSummary/transactionOverview/content.tsx b/static/app/views/performance/transactionSummary/transactionOverview/content.tsx
index 211d66a93846b2..3747b662843f3a 100644
--- a/static/app/views/performance/transactionSummary/transactionOverview/content.tsx
+++ b/static/app/views/performance/transactionSummary/transactionOverview/content.tsx
@@ -4,7 +4,6 @@ import styled from '@emotion/styled';
import {Location} from 'history';
import omit from 'lodash/omit';
-import Feature from 'app/components/acl/feature';
import TransactionsList, {DropdownOption} from 'app/components/discover/transactionsList';
import SearchBar from 'app/components/events/searchBar';
import GlobalSdkUpdateAlert from 'app/components/globalSdkUpdateAlert';
@@ -350,19 +349,14 @@ class SummaryContent extends React.Component<Props> {
})}
forceLoading={isLoading}
/>
- <Feature
- requireAll={false}
- features={['performance-tag-explorer', 'performance-tag-page']}
- >
- <TagExplorer
- eventView={eventView}
- organization={organization}
- location={location}
- projects={projects}
- transactionName={transactionName}
- currentFilter={spanOperationBreakdownFilter}
- />
- </Feature>
+ <TagExplorer
+ eventView={eventView}
+ organization={organization}
+ location={location}
+ projects={projects}
+ transactionName={transactionName}
+ currentFilter={spanOperationBreakdownFilter}
+ />
<RelatedIssues
organization={organization}
location={location}
diff --git a/static/app/views/performance/transactionSummary/transactionOverview/tagExplorer.tsx b/static/app/views/performance/transactionSummary/transactionOverview/tagExplorer.tsx
index 6d52da273bd52c..1f7b658eb65516 100644
--- a/static/app/views/performance/transactionSummary/transactionOverview/tagExplorer.tsx
+++ b/static/app/views/performance/transactionSummary/transactionOverview/tagExplorer.tsx
@@ -3,11 +3,9 @@ import {browserHistory} from 'react-router';
import styled from '@emotion/styled';
import {Location, LocationDescriptorObject} from 'history';
-import Feature from 'app/components/acl/feature';
import {GuideAnchor} from 'app/components/assistant/guideAnchor';
import Button from 'app/components/button';
import {SectionHeading} from 'app/components/charts/styles';
-import FeatureBadge from 'app/components/featureBadge';
import GridEditable, {
COL_WIDTH_UNDEFINED,
GridColumn,
@@ -352,18 +350,9 @@ class _TagExplorer extends React.Component<Props> {
query: {...location.query, tagKey: dataRow.tags_key},
});
return (
- <Feature features={['performance-tag-page']} organization={organization}>
- {({hasFeature}) => {
- if (hasFeature) {
- return (
- <Link to={target} onClick={() => this.onTagKeyClick()}>
- {dataRow.tags_key}
- </Link>
- );
- }
- return dataRow.tags_key;
- }}
- </Feature>
+ <Link to={target} onClick={() => this.onTagKeyClick()}>
+ {dataRow.tags_key}
+ </Link>
);
}
@@ -378,27 +367,7 @@ class _TagExplorer extends React.Component<Props> {
handleCellAction={this.handleCellAction(column, dataRow.tags_value, actionRow)}
allowActions={allowActions}
>
- <Feature features={['performance-tag-page']} organization={organization}>
- {({hasFeature}) => {
- if (hasFeature) {
- return <div className="truncate">{dataRow.tags_value}</div>;
- }
- return (
- <Link
- to=""
- onClick={() =>
- this.handleTagValueClick(
- location,
- dataRow.tags_key,
- dataRow.tags_value
- )
- }
- >
- <TagValue row={dataRow} />
- </Link>
- );
- }}
- </Feature>
+ <div className="truncate">{dataRow.tags_value}</div>
</CellAction>
);
}
@@ -560,18 +529,15 @@ function TagsHeader(props: HeaderProps) {
<Header>
<div>
<SectionHeading>{t('Suspect Tags')}</SectionHeading>
- <FeatureBadge type="new" />
</div>
- <Feature features={['performance-tag-page']} organization={organization}>
- <Button
- onClick={handleViewAllTagsClick}
- to={viewAllTarget}
- size="small"
- data-test-id="tags-explorer-open-tags"
- >
- {t('View All Tags')}
- </Button>
- </Feature>
+ <Button
+ onClick={handleViewAllTagsClick}
+ to={viewAllTarget}
+ size="small"
+ data-test-id="tags-explorer-open-tags"
+ >
+ {t('View All Tags')}
+ </Button>
<StyledPagination pageLinks={pageLinks} onCursor={handleCursor} size="small" />
</Header>
);
diff --git a/static/app/views/performance/transactionSummary/transactionTags/index.tsx b/static/app/views/performance/transactionSummary/transactionTags/index.tsx
index e5a89bda359f86..eda942a565464c 100644
--- a/static/app/views/performance/transactionSummary/transactionTags/index.tsx
+++ b/static/app/views/performance/transactionSummary/transactionTags/index.tsx
@@ -31,7 +31,6 @@ function TransactionTags(props: Props) {
getDocumentTitle={getDocumentTitle}
generateEventView={generateEventView}
childComponent={TagsPageContent}
- features={['performance-tag-page']}
/>
);
}
diff --git a/tests/acceptance/test_performance_summary.py b/tests/acceptance/test_performance_summary.py
index eec49479f6bb46..b6464a3d4b2941 100644
--- a/tests/acceptance/test_performance_summary.py
+++ b/tests/acceptance/test_performance_summary.py
@@ -10,11 +10,7 @@
from .page_objects.transaction_summary import TransactionSummaryPage
-FEATURES = {
- "organizations:performance-view": True,
- "organizations:performance-tag-explorer": False,
- "organizations:performance-tag-page": False,
-}
+FEATURES = {"organizations:performance-view": True}
def make_event(event_data):
@@ -65,11 +61,6 @@ def test_with_data(self, mock_now):
with self.feature(FEATURES):
self.browser.get(self.path)
self.page.wait_until_loaded()
- # This test is flakey in that we sometimes load this page before the event is processed
- # depend on pytest-retry to reload the page
- self.browser.wait_until_not(
- '[data-test-id="grid-editable"] [data-test-id="empty-state"]', timeout=2
- )
# We have to wait for this again because there are loaders inside of the table
self.page.wait_until_loaded()
self.browser.snapshot("performance summary - with data")
@@ -109,9 +100,7 @@ def test_tags_page(self, mock_now):
event = make_event(event_data)
self.store_event(data=event, project_id=self.project.id)
- features = dict(FEATURES)
- features["organizations:performance-tag-page"] = True
- with self.feature(features):
+ with self.feature(FEATURES):
self.browser.get(tags_path)
self.page.wait_until_loaded()
self.browser.snapshot("transaction summary tags page")
@@ -224,12 +213,5 @@ def test_transaction_threshold_modal(self, mock_now):
with self.feature(FEATURES):
self.browser.get(self.path)
self.page.wait_until_loaded()
- # This test is flakey in that we sometimes load this page before the event is processed
- # depend on pytest-retry to reload the page
- self.browser.wait_until_not(
- '[data-test-id="grid-editable"] [data-test-id="empty-state"]', timeout=2
- )
- # We have to wait for this again because there are loaders inside of the table
- self.page.wait_until_loaded()
self.browser.click('[data-test-id="set-transaction-threshold"]')
self.browser.snapshot("transaction threshold modal")
diff --git a/tests/js/spec/views/performance/transactionSummary.spec.jsx b/tests/js/spec/views/performance/transactionSummary.spec.jsx
index 21be1ac3171b91..74cc0665939b45 100644
--- a/tests/js/spec/views/performance/transactionSummary.spec.jsx
+++ b/tests/js/spec/views/performance/transactionSummary.spec.jsx
@@ -90,6 +90,10 @@ describe('Performance > TransactionSummary', function () {
url: '/prompts-activity/',
body: {},
});
+ MockApiClient.addMockResponse({
+ url: '/organizations/org-slug/events-facets-performance/',
+ body: {},
+ });
// Mock totals for the sidebar and other summary data
MockApiClient.addMockResponse({
@@ -491,7 +495,7 @@ describe('Performance > TransactionSummary', function () {
wrapper.update();
const pagination = wrapper.find('Pagination');
- expect(pagination).toHaveLength(1);
+ expect(pagination).toHaveLength(2);
// Click the 'next' button'
pagination.find('button[aria-label="Next"]').simulate('click');
diff --git a/tests/js/spec/views/performance/transactionSummary/content.spec.tsx b/tests/js/spec/views/performance/transactionSummary/content.spec.tsx
index 26b3961c756533..40bdf1860fe959 100644
--- a/tests/js/spec/views/performance/transactionSummary/content.spec.tsx
+++ b/tests/js/spec/views/performance/transactionSummary/content.spec.tsx
@@ -80,6 +80,10 @@ describe('Transaction Summary Content', function () {
url: '/organizations/org-slug/events-stats/',
body: [],
});
+ MockApiClient.addMockResponse({
+ url: '/organizations/org-slug/events-facets-performance/',
+ body: {},
+ });
MockApiClient.addMockResponse({
url: '/organizations/org-slug/events-has-measurements/',
body: {measurements: false},
diff --git a/tests/js/spec/views/performance/transactionSummary/tagExplorer.spec.jsx b/tests/js/spec/views/performance/transactionSummary/tagExplorer.spec.jsx
index f2d93224e0a48e..78cfafad595b17 100644
--- a/tests/js/spec/views/performance/transactionSummary/tagExplorer.spec.jsx
+++ b/tests/js/spec/views/performance/transactionSummary/tagExplorer.spec.jsx
@@ -173,7 +173,7 @@ describe('TagExplorer', function () {
{
project: '123',
},
- ['performance-tag-page']
+ []
);
const wrapper = mountWithTheme(
diff --git a/tests/js/spec/views/performance/transactionTags/index.spec.jsx b/tests/js/spec/views/performance/transactionTags/index.spec.jsx
index bc69b2dd739000..3697f2dca6f322 100644
--- a/tests/js/spec/views/performance/transactionTags/index.spec.jsx
+++ b/tests/js/spec/views/performance/transactionTags/index.spec.jsx
@@ -8,7 +8,7 @@ import ProjectsStore from 'sentry/stores/projectsStore';
import TransactionTags from 'sentry/views/performance/transactionSummary/transactionTags';
function initializeData({query} = {query: {}}) {
- const features = ['discover-basic', 'performance-view', 'performance-tag-page'];
+ const features = ['discover-basic', 'performance-view'];
const organization = TestStubs.Organization({
features,
projects: [TestStubs.Project()],
|
ccb3c648d0d065bfe84840d1956fb0a9f5ab3544
|
2024-05-28 01:14:17
|
George Gritsouk
|
feat(insights): New sidebar grouping (#71533)
| false
|
New sidebar grouping (#71533)
|
feat
|
diff --git a/static/app/components/sidebar/index.tsx b/static/app/components/sidebar/index.tsx
index b638f9d3228406..1d9fac8316eb47 100644
--- a/static/app/components/sidebar/index.tsx
+++ b/static/app/components/sidebar/index.tsx
@@ -27,6 +27,7 @@ import {
IconProfiling,
IconProject,
IconReleases,
+ IconSearch,
IconSettings,
IconSiren,
IconStats,
@@ -190,6 +191,10 @@ function Sidebar() {
organization,
};
+ // New hierarchy organizes current links into two accordions: "Explore" and "Insights". This means setting up different sidebar groupings, and changing some link icons to small dots, since they now live under an accordion
+ const hasNewSidebarHierarchy =
+ hasOrganization && organization.features.includes('performance-insights');
+
const sidebarAnchor = isDemoWalkthrough() ? (
<GuideAnchor target="projects" disabled={!DemoWalkthroughStore.get('sidebar')}>
{t('Projects')}
@@ -228,7 +233,7 @@ function Sidebar() {
>
<SidebarItem
{...sidebarItemProps}
- icon={<IconTelescope />}
+ icon={hasNewSidebarHierarchy ? <SubitemDot collapsed /> : <IconTelescope />}
label={<GuideAnchor target="discover">{t('Discover')}</GuideAnchor>}
to={getDiscoverLandingUrl(organization)}
id="discover-v2"
@@ -378,7 +383,7 @@ function Sidebar() {
<Feature features="ai-analytics" organization={organization}>
<SidebarItem
{...sidebarItemProps}
- icon={<IconRobot />}
+ icon={hasNewSidebarHierarchy ? <SubitemDot collapsed /> : <IconRobot />}
label={MODULE_TITLES.ai}
isAlpha
variant="short"
@@ -395,11 +400,13 @@ function Sidebar() {
organization={organization}
>
{(() => {
- // If Database View or Web Vitals View is enabled, show a Performance accordion with a Database and/or Web Vitals sub-item
+ // If the client has the old sidebar hierarchy _and_ something to show inside the Performance dropdown, render an accordion.
if (
- organization.features.includes('spans-first-ui') ||
- organization.features.includes('performance-cache-view') ||
- organization.features.includes('performance-queues-view')
+ !hasNewSidebarHierarchy &&
+ (organization.features.includes('spans-first-ui') ||
+ organization.features.includes('performance-cache-view') ||
+ organization.features.includes('performance-queues-view') ||
+ organization.features.includes('performance-trace-explorer'))
) {
return (
<SidebarAccordion
@@ -502,7 +509,7 @@ function Sidebar() {
>
<SidebarItem
{...sidebarItemProps}
- icon={<IconPlay />}
+ icon={hasNewSidebarHierarchy ? <SubitemDot collapsed /> : <IconPlay />}
label={t('Replays')}
to={`/organizations/${organization.slug}/replays/`}
id="replays"
@@ -516,7 +523,7 @@ function Sidebar() {
const metrics = hasOrganization && canSeeMetricsPage(organization) && (
<SidebarItem
{...sidebarItemProps}
- icon={<IconGraph />}
+ icon={hasNewSidebarHierarchy ? <SubitemDot collapsed /> : <IconGraph />}
label={t('Metrics')}
to={metricsPath}
search={location.pathname === normalizeUrl(metricsPath) ? location.search : ''}
@@ -554,8 +561,8 @@ function Sidebar() {
<SidebarItem
{...sidebarItemProps}
index
- icon={<IconProfiling />}
- label={t('Profiling')}
+ icon={hasNewSidebarHierarchy ? <SubitemDot collapsed /> : <IconProfiling />}
+ label={hasNewSidebarHierarchy ? t('Profiles') : t('Profiling')}
to={`/organizations/${organization.slug}/profiling/`}
id="profiling"
/>
@@ -582,6 +589,43 @@ function Sidebar() {
/>
);
+ const insights = (
+ <SidebarAccordion
+ {...sidebarItemProps}
+ icon={<IconGraph />}
+ label={<GuideAnchor target="insights">{t('Insights')}</GuideAnchor>}
+ id="insights"
+ exact={!shouldAccordionFloat}
+ >
+ {requests}
+ {queries}
+ {resources}
+ {appStarts}
+ {screenLoads}
+ {webVitals}
+ {caches}
+ {queues}
+ {mobileUI}
+ {llmMonitoring}
+ </SidebarAccordion>
+ );
+
+ const explore = (
+ <SidebarAccordion
+ {...sidebarItemProps}
+ icon={<IconSearch />}
+ label={<GuideAnchor target="explore">{t('Explore')}</GuideAnchor>}
+ id="explore"
+ exact={!shouldAccordionFloat}
+ >
+ {traces}
+ {metrics}
+ {profiling}
+ {replays}
+ {discover2}
+ </SidebarAccordion>
+ );
+
return (
<SidebarWrapper aria-label={t('Primary Navigation')} collapsed={collapsed}>
<ExpandedContextProvider>
@@ -604,23 +648,45 @@ function Sidebar() {
{projects}
</SidebarSection>
- <SidebarSection>
- {performance}
- {profiling}
- {metrics}
- {replays}
- {llmMonitoring}
- {feedback}
- {monitors}
- {alerts}
- </SidebarSection>
-
- <SidebarSection>
- {discover2}
- {dashboards}
- {releases}
- {userFeedback}
- </SidebarSection>
+ {hasNewSidebarHierarchy && (
+ <Fragment>
+ <SidebarSection>
+ {explore}
+ {insights}
+ </SidebarSection>
+
+ <SidebarSection>
+ {performance}
+ {feedback}
+ {monitors}
+ {alerts}
+ {dashboards}
+ {releases}
+ </SidebarSection>
+ </Fragment>
+ )}
+
+ {!hasNewSidebarHierarchy && (
+ <Fragment>
+ <SidebarSection>
+ {performance}
+ {profiling}
+ {metrics}
+ {replays}
+ {llmMonitoring}
+ {feedback}
+ {monitors}
+ {alerts}
+ </SidebarSection>
+
+ <SidebarSection>
+ {discover2}
+ {dashboards}
+ {releases}
+ {userFeedback}
+ </SidebarSection>
+ </Fragment>
+ )}
<SidebarSection>
{stats}
|
63cb2b2cac340f5fb4885c4a950ff7a4e41a76ef
|
2024-05-20 21:03:02
|
Yagiz Nizipli
|
perf: replace json parser with orjson (#71155)
| false
|
replace json parser with orjson (#71155)
|
perf
|
diff --git a/src/sentry/api/endpoints/source_map_debug_blue_thunder_edition.py b/src/sentry/api/endpoints/source_map_debug_blue_thunder_edition.py
index 07bf87f9bf646b..9051f72ffd1bd3 100644
--- a/src/sentry/api/endpoints/source_map_debug_blue_thunder_edition.py
+++ b/src/sentry/api/endpoints/source_map_debug_blue_thunder_edition.py
@@ -1,5 +1,6 @@
from typing import Literal, TypedDict
+import orjson
import sentry_sdk
from django.db.models import QuerySet
from django.utils.encoding import force_bytes, force_str
@@ -33,7 +34,6 @@
ReleaseFile,
)
from sentry.sdk_updates import get_sdk_index
-from sentry.utils import json
from sentry.utils.javascript import find_sourcemap
from sentry.utils.safe import get_path
from sentry.utils.urls import non_standard_url_join
@@ -416,7 +416,7 @@ def _find_source_file_in_artifact_indexes(self):
self._get_dist_matched_artifact_index_release_file()
)
if dist_matched_artifact_index_release_file is not None:
- raw_data = json.load(dist_matched_artifact_index_release_file.file.getfile())
+ raw_data = orjson.loads(dist_matched_artifact_index_release_file.file.getfile().read())
files = raw_data.get("files")
for potential_source_file_name in self.matching_source_file_names:
matching_file = files.get(potential_source_file_name)
@@ -453,7 +453,7 @@ def _find_source_file_in_artifact_indexes(self):
return
for artifact_index_file in self._get_artifact_index_release_files():
- raw_data = json.load(artifact_index_file.file.getfile())
+ raw_data = orjson.loads(artifact_index_file.file.getfile().read())
files = raw_data.get("files")
for potential_source_file_name in self.matching_source_file_names:
if files.get(potential_source_file_name) is not None:
@@ -534,14 +534,14 @@ def _find_source_map_in_artifact_indexes(self, matching_source_map_name: str):
self._get_dist_matched_artifact_index_release_file()
)
if dist_matched_artifact_index_release_file is not None:
- raw_data = json.load(dist_matched_artifact_index_release_file.file.getfile())
+ raw_data = orjson.loads(dist_matched_artifact_index_release_file.file.getfile().read())
files = raw_data.get("files")
if files.get(matching_source_map_name) is not None:
self.source_map_lookup_result = "found"
return
for artifact_index_file in self._get_artifact_index_release_files():
- raw_data = json.load(artifact_index_file.file.getfile())
+ raw_data = orjson.loads(artifact_index_file.file.getfile().read())
files = raw_data.get("files")
if files.get(matching_source_map_name) is not None:
self.source_map_lookup_result = "wrong-dist"
diff --git a/src/sentry/backup/crypto.py b/src/sentry/backup/crypto.py
index f0ff04153533a6..5ea2a64eac2e93 100644
--- a/src/sentry/backup/crypto.py
+++ b/src/sentry/backup/crypto.py
@@ -14,7 +14,6 @@
from google.cloud.kms import KeyManagementServiceClient as KeyManagementServiceClient
from google_crc32c import value as crc32c
-from sentry.utils import json
from sentry.utils.env import gcp_project_id
@@ -94,7 +93,7 @@ def from_crypto_key_version(cls, crypto_key_version: CryptoKeyVersion) -> GCPKMS
def get_public_key_pem(self) -> bytes:
if self.crypto_key_version is None:
# Read the user supplied configuration into the proper format.
- gcp_kms_config_json = json.load(self.__fp)
+ gcp_kms_config_json = orjson.loads(self.__fp.read())
try:
self.crypto_key_version = CryptoKeyVersion(**gcp_kms_config_json)
except TypeError:
diff --git a/src/sentry/backup/imports.py b/src/sentry/backup/imports.py
index f6e53bf67f8307..2b2138619d5065 100644
--- a/src/sentry/backup/imports.py
+++ b/src/sentry/backup/imports.py
@@ -38,7 +38,6 @@
from sentry.services.hybrid_cloud.import_export.service import ImportExportService
from sentry.silo.base import SiloMode
from sentry.silo.safety import unguarded_write
-from sentry.utils import json
from sentry.utils.env import is_split_db
__all__ = (
@@ -289,7 +288,11 @@ def yield_json_models(content) -> Iterator[tuple[NormalizedModelName, str, int]]
batch = []
last_seen_model_name = model_name
if len(batch) >= MAX_BATCH_SIZE:
- yield (last_seen_model_name, json.dumps(batch), num_current_model_instances_yielded)
+ yield (
+ last_seen_model_name,
+ orjson.dumps(batch).decode(),
+ num_current_model_instances_yielded,
+ )
num_current_model_instances_yielded += len(batch)
batch = []
diff --git a/src/sentry/backup/sanitize.py b/src/sentry/backup/sanitize.py
index 7e5f4392a8ff38..388f79c4817361 100644
--- a/src/sentry/backup/sanitize.py
+++ b/src/sentry/backup/sanitize.py
@@ -9,12 +9,11 @@
from urllib.parse import urlparse, urlunparse
from uuid import UUID, uuid4
+import orjson
import petname
from dateutil.parser import parse as parse_datetime
from django.utils.text import slugify
-from sentry.utils import json
-
UPPER_CASE_HEX = {"A", "B", "C", "D", "E", "F"}
UPPER_CASE_NON_HEX = {
"H",
@@ -89,14 +88,14 @@ class SanitizableField:
model: NormalizedModelName
field: str
- def validate_json_model(self, json: Any) -> None:
+ def validate_json_model(self, obj: Any) -> None:
"""
Validates the JSON model is shaped the way we expect a serialized Django model to be,
and that we have the right kind of model for this `SanitizableField`. Raises errors if there
is a validation failure.
"""
- model_name = json.get("model", None)
+ model_name = obj.get("model", None)
if model_name is None:
raise InvalidJSONError(
"JSON is not properly formatted, must be a serialized Django model"
@@ -106,12 +105,12 @@ def validate_json_model(self, json: Any) -> None:
return None
-def _get_field_value(json: Any, field: SanitizableField) -> Any | None:
- return json.get("fields", {}).get(field.field, None)
+def _get_field_value(obj: Any, field: SanitizableField) -> Any | None:
+ return obj.get("fields", {}).get(field.field, None)
-def _set_field_value(json: Any, field: SanitizableField, value: Any) -> Any:
- json.get("fields", {})[field.field] = value
+def _set_field_value(obj: Any, field: SanitizableField, value: Any) -> Any:
+ obj.get("fields", {})[field.field] = value
return value
@@ -291,12 +290,12 @@ def map_json(self, old_json: Any, new_json: Any) -> Any:
`set_json()` is the preferred method for doing so.
"""
- old_serialized = json.dumps(old_json)
+ old_serialized = orjson.dumps(old_json).decode()
interned = self.interned_strings.get(old_serialized)
if interned is not None:
- return json.loads(interned)
+ return orjson.loads(interned)
- new_serialized = json.dumps(new_json)
+ new_serialized = orjson.dumps(new_json).decode()
self.interned_strings[old_serialized] = new_serialized
return new_json
diff --git a/src/sentry/buffer/redis.py b/src/sentry/buffer/redis.py
index 9d03dc03592af0..9526b136af3fa5 100644
--- a/src/sentry/buffer/redis.py
+++ b/src/sentry/buffer/redis.py
@@ -9,6 +9,7 @@
from time import time
from typing import Any, TypeVar
+import orjson
import rb
from django.utils.encoding import force_bytes, force_str
from rediscluster import RedisCluster
@@ -16,7 +17,7 @@
from sentry.buffer.base import Buffer
from sentry.db import models
from sentry.tasks.process_buffer import process_incr
-from sentry.utils import json, metrics
+from sentry.utils import metrics
from sentry.utils.hashlib import md5_text
from sentry.utils.imports import import_string
from sentry.utils.redis import (
@@ -47,7 +48,9 @@ def _validate_json_roundtrip(value: dict[str, Any], model: type[models.Model]) -
_last_validation_log = time()
try:
if (
- RedisBuffer._load_values(json.loads(json.dumps(RedisBuffer._dump_values(value))))
+ RedisBuffer._load_values(
+ orjson.loads(orjson.dumps(RedisBuffer._dump_values(value)))
+ )
!= value
):
logger.error("buffer.corrupted_value", extra={"value": value, "model": model})
@@ -250,10 +253,6 @@ def _execute_redis_operation(
def push_to_sorted_set(self, key: str, value: list[int] | int) -> None:
value_dict = {value: time()}
self._execute_redis_operation(key, RedisOperation.SORTED_SET_ADD, value_dict)
- logger.info(
- "redis_buffer.push_to_sorted_set",
- extra={"key_name": key, "value": json.dumps(value_dict)},
- )
def get_sorted_set(self, key: str, min: float, max: float) -> list[tuple[int, datetime]]:
redis_set = self._execute_redis_operation(
@@ -345,7 +344,7 @@ def incr(
_validate_json_roundtrip(filters, model)
if is_instance_redis_cluster(self.cluster, self.is_redis_cluster):
- pipe.hsetnx(key, "f", json.dumps(self._dump_values(filters)))
+ pipe.hsetnx(key, "f", orjson.dumps(self._dump_values(filters)).decode())
else:
pipe.hsetnx(key, "f", pickle.dumps(filters))
@@ -359,7 +358,7 @@ def incr(
_validate_json_roundtrip(extra, model)
for column, value in extra.items():
if is_instance_redis_cluster(self.cluster, self.is_redis_cluster):
- pipe.hset(key, "e+" + column, json.dumps(self._dump_value(value)))
+ pipe.hset(key, "e+" + column, orjson.dumps(self._dump_value(value)).decode())
else:
pipe.hset(key, "e+" + column, pickle.dumps(value))
@@ -472,7 +471,7 @@ def _process_single_incr(self, key: str) -> None:
model = import_string(force_str(values.pop("m")))
if values["f"].startswith(b"{" if not self.is_redis_cluster else "{"):
- filters = self._load_values(json.loads(force_str(values.pop("f"))))
+ filters = self._load_values(orjson.loads(force_str(values.pop("f"))))
else:
# TODO(dcramer): legacy pickle support - remove in Sentry 9.1
filters = pickle.loads(force_bytes(values.pop("f")))
@@ -485,7 +484,7 @@ def _process_single_incr(self, key: str) -> None:
incr_values[k[2:]] = int(v)
elif k.startswith("e+"):
if v.startswith(b"[" if not self.is_redis_cluster else "["):
- extra_values[k[2:]] = self._load_value(json.loads(force_str(v)))
+ extra_values[k[2:]] = self._load_value(orjson.loads(force_str(v)))
else:
# TODO(dcramer): legacy pickle support - remove in Sentry 9.1
extra_values[k[2:]] = pickle.loads(force_bytes(v))
diff --git a/src/sentry/cache/redis.py b/src/sentry/cache/redis.py
index b3b0adbbf60848..15d526cf3a3cba 100644
--- a/src/sentry/cache/redis.py
+++ b/src/sentry/cache/redis.py
@@ -1,4 +1,5 @@
-from sentry.utils import json
+import orjson
+
from sentry.utils.redis import get_cluster_from_options, get_cluster_routing_client, redis_clusters
from .base import BaseCache
@@ -25,7 +26,7 @@ def _client(self, *, raw: bool):
def set(self, key, value, timeout, version=None, raw=False):
key = self.make_key(key, version=version)
- v = json.dumps(value) if not raw else value
+ v = orjson.dumps(value).decode() if not raw else value
if len(v) > self.max_size:
raise ValueTooLarge(f"Cache key too large: {key!r} {len(v)!r}")
if timeout:
@@ -45,7 +46,7 @@ def get(self, key, version=None, raw=False):
key = self.make_key(key, version=version)
result = self._client(raw=raw).get(key)
if result is not None and not raw:
- result = json.loads(result)
+ result = orjson.loads(result)
self._mark_transaction("get")
diff --git a/src/sentry/charts/chartcuterie.py b/src/sentry/charts/chartcuterie.py
index 08763b0c7b108d..9e460987ebef83 100644
--- a/src/sentry/charts/chartcuterie.py
+++ b/src/sentry/charts/chartcuterie.py
@@ -3,6 +3,7 @@
from urllib.parse import urljoin
from uuid import uuid4
+import orjson
import requests
import sentry_sdk
from django.conf import settings
@@ -10,7 +11,6 @@
from sentry import options
from sentry.exceptions import InvalidConfiguration
from sentry.models.file import get_storage
-from sentry.utils import json
from sentry.utils.http import absolute_uri
from .base import ChartRenderer, logger
@@ -76,7 +76,7 @@ def generate_chart(
assert self.service_url is not None
resp = requests.post(
url=urljoin(self.service_url, "render"),
- data=json.dumps(payload),
+ data=orjson.dumps(payload),
headers={"Content-Type": "application/json"},
)
diff --git a/src/sentry/db/models/fields/array.py b/src/sentry/db/models/fields/array.py
index 838722d2407f7a..0ce47b20389735 100644
--- a/src/sentry/db/models/fields/array.py
+++ b/src/sentry/db/models/fields/array.py
@@ -1,9 +1,9 @@
import ast
+import orjson
from django.db import models
from sentry.db.models.utils import Creator
-from sentry.utils import json
# Adapted from django-pgfields
@@ -54,8 +54,8 @@ def to_python(self, value):
value = []
if isinstance(value, str):
try:
- value = json.loads(value)
- except json.JSONDecodeError:
+ value = orjson.loads(value)
+ except orjson.JSONDecodeError:
# This is to accommodate the erroneous exports pre 21.4.0
# See getsentry/sentry#23843 for more details
try:
diff --git a/src/sentry/db/models/fields/gzippeddict.py b/src/sentry/db/models/fields/gzippeddict.py
index bf620def21e5aa..b56fc5708bed43 100644
--- a/src/sentry/db/models/fields/gzippeddict.py
+++ b/src/sentry/db/models/fields/gzippeddict.py
@@ -3,10 +3,10 @@
import logging
import pickle
+import orjson
from django.db.models import TextField
from sentry.db.models.utils import Creator
-from sentry.utils import json
from sentry.utils.strings import decompress
__all__ = ("GzippedDictField",)
@@ -32,7 +32,7 @@ def to_python(self, value):
try:
if not value:
return {}
- return json.loads(value)
+ return orjson.loads(value)
except (ValueError, TypeError):
if isinstance(value, str) and value:
try:
@@ -47,7 +47,7 @@ def to_python(self, value):
def from_db_value(self, value, expression, connection):
return self.to_python(value)
- def get_prep_value(self, value):
+ def get_prep_value(self, value) -> str | None:
if not value and self.null:
# save ourselves some storage
return None
@@ -55,7 +55,7 @@ def get_prep_value(self, value):
value = value.decode("utf-8")
if value is None and self.null:
return None
- return json.dumps(value)
+ return orjson.dumps(value).decode()
def value_to_string(self, obj):
return self.get_prep_value(self.value_from_object(obj))
diff --git a/src/sentry/db/models/fields/jsonfield.py b/src/sentry/db/models/fields/jsonfield.py
index d0dea9b26f0579..bb45908c20eab8 100644
--- a/src/sentry/db/models/fields/jsonfield.py
+++ b/src/sentry/db/models/fields/jsonfield.py
@@ -25,13 +25,13 @@
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
+import orjson
from django.core.exceptions import ValidationError
from django.db import models
from django.db.models.lookups import Contains, Exact, IContains, IExact, In, Lookup
from django.utils.translation import gettext_lazy as _
from sentry.db.models.utils import Creator
-from sentry.utils import json
class JSONField(models.TextField):
@@ -83,8 +83,8 @@ def get_default(self):
if callable(default):
default = default()
if isinstance(default, str):
- return json.loads(default)
- return json.loads(json.dumps(default))
+ return orjson.loads(default)
+ return orjson.loads(orjson.dumps(default))
return super().get_default()
def get_internal_type(self):
@@ -101,7 +101,7 @@ def to_python(self, value):
if self.blank:
return ""
try:
- value = json.loads(value)
+ value = orjson.loads(value)
except ValueError:
msg = self.error_messages["invalid"] % value
raise ValidationError(msg)
@@ -111,12 +111,13 @@ def to_python(self, value):
def get_db_prep_value(self, value, connection=None, prepared=None):
return self.get_prep_value(value)
- def get_prep_value(self, value):
+ def get_prep_value(self, value) -> str | None:
if value is None:
if not self.null and self.blank:
return ""
return None
- return json.dumps(value)
+ # TODO(@anonrig): Remove support for non-string keys.
+ return orjson.dumps(value, option=orjson.OPT_NON_STR_KEYS).decode()
def value_to_string(self, obj):
return self.value_from_object(obj)
diff --git a/src/sentry/db/models/fields/node.py b/src/sentry/db/models/fields/node.py
index 3814f0a593f575..9214e9e1143143 100644
--- a/src/sentry/db/models/fields/node.py
+++ b/src/sentry/db/models/fields/node.py
@@ -7,12 +7,12 @@
from typing import Any
from uuid import uuid4
+import orjson
from django.db.models.signals import post_delete
from django.utils.functional import cached_property
from sentry import nodestore
from sentry.db.models.utils import Creator
-from sentry.utils import json
from sentry.utils.canonical import CANONICAL_TYPES, CanonicalKeyDict
from sentry.utils.strings import decompress
@@ -191,7 +191,7 @@ def to_python(self, value):
# with a dict.
if value and isinstance(value, str):
try:
- value = json.loads(value)
+ value = orjson.loads(value)
except (ValueError, TypeError):
try:
value = pickle.loads(decompress(value))
@@ -228,7 +228,7 @@ def to_python(self, value):
ref_func=self.ref_func,
)
- def get_prep_value(self, value):
+ def get_prep_value(self, value) -> str | None:
"""
Prepares the NodeData to be written in a Model.save() call.
@@ -244,4 +244,4 @@ def get_prep_value(self, value):
value.save()
- return json.dumps({"node_id": value.id})
+ return orjson.dumps({"node_id": value.id}).decode()
diff --git a/src/sentry/db/models/fields/picklefield.py b/src/sentry/db/models/fields/picklefield.py
index 0e5eda2d6347f6..cd1870314d17ab 100644
--- a/src/sentry/db/models/fields/picklefield.py
+++ b/src/sentry/db/models/fields/picklefield.py
@@ -1,5 +1,17 @@
+from typing import Any
+
+import orjson
+
import django_picklefield
-from sentry.utils import json
+
+
+# TODO(@anonrig): Remove support for `bytes` as a JSON value
+def _orjson_defaults(obj: Any) -> Any:
+ if isinstance(obj, bytes):
+ return obj.decode()
+ elif isinstance(obj, set):
+ return list(obj)
+ raise TypeError
class PickledObjectField(django_picklefield.PickledObjectField):
@@ -17,16 +29,19 @@ class PickledObjectField(django_picklefield.PickledObjectField):
def get_db_prep_value(self, value, *args, **kwargs):
if isinstance(value, bytes):
- value = value.decode("utf-8")
+ value = value.decode()
if value is None and self.null:
return None
- return json.dumps(value)
+ # TODO(@anonrig): Remove support for non-string keys.
+ return orjson.dumps(
+ value, option=orjson.OPT_NON_STR_KEYS, default=_orjson_defaults
+ ).decode()
def to_python(self, value):
if value is None:
return None
try:
- return json.loads(value)
+ return orjson.loads(value)
except (ValueError, TypeError):
from sentry.utils import metrics
diff --git a/src/sentry/eventstream/snuba.py b/src/sentry/eventstream/snuba.py
index 68ecc39c9260b0..a64196b23b2404 100644
--- a/src/sentry/eventstream/snuba.py
+++ b/src/sentry/eventstream/snuba.py
@@ -6,6 +6,7 @@
from typing import TYPE_CHECKING, Any
from uuid import uuid4
+import orjson
import urllib3
from sentry import quotas
@@ -418,7 +419,7 @@ def _send(
if event_type == EventStreamEventType.Generic:
entity = "search_issues"
- serialized_data = json.dumps(data)
+ serialized_data = orjson.dumps(data, option=orjson.OPT_UTC_Z).decode()
topic_mapping: Mapping[str, Topic] = {
"events": Topic.EVENTS,
@@ -427,7 +428,7 @@ def _send(
}
codec = get_topic_codec(topic_mapping[entity])
- codec.decode(serialized_data.encode("utf-8"), validate=True)
+ codec.decode(serialized_data.encode(), validate=True)
try:
resp = snuba._snuba_pool.urlopen(
@@ -438,7 +439,7 @@ def _send(
)
if resp.status != 200:
raise snuba.SnubaError(
- f"HTTP {resp.status} response from Snuba! {json.loads(resp.data)}"
+ f"HTTP {resp.status} response from Snuba! {orjson.loads(resp.data)}"
)
return None
except urllib3.exceptions.HTTPError as err:
diff --git a/src/sentry/interfaces/message.py b/src/sentry/interfaces/message.py
index 2f2d730a6f67a5..9aca7aa3fce95a 100644
--- a/src/sentry/interfaces/message.py
+++ b/src/sentry/interfaces/message.py
@@ -1,17 +1,19 @@
__all__ = ("Message",)
+from typing import Any
+
+import orjson
from sentry.interfaces.base import Interface
-from sentry.utils import json
from sentry.utils.json import prune_empty_keys
-def stringify(value):
+def stringify(value: Any) -> str | None:
if isinstance(value, str):
return value
if isinstance(value, (int, float, bool)):
- return json.dumps(value)
+ return orjson.dumps(value).decode()
return None
diff --git a/src/sentry/interfaces/security.py b/src/sentry/interfaces/security.py
index d637c032bb2af3..7c0994e6f337de 100644
--- a/src/sentry/interfaces/security.py
+++ b/src/sentry/interfaces/security.py
@@ -1,8 +1,8 @@
+import orjson
from django.utils.functional import cached_property
from sentry.interfaces.base import Interface
from sentry.security import csp
-from sentry.utils import json
from sentry.web.helpers import render_to_string
__all__ = ("Csp", "Hpkp", "ExpectCT", "ExpectStaple")
@@ -173,7 +173,7 @@ def to_python(cls, data, **kwargs):
return super().to_python(data, **kwargs)
def to_string(self, is_public=False, **kwargs):
- return json.dumps({"csp-report": self.get_api_context()})
+ return orjson.dumps({"csp-report": self.get_api_context()}).decode()
def to_email_html(self, event, **kwargs):
return render_to_string(
diff --git a/src/sentry/monitors/consumers/monitor_consumer.py b/src/sentry/monitors/consumers/monitor_consumer.py
index 3519e184600f11..e4f942d13ad43a 100644
--- a/src/sentry/monitors/consumers/monitor_consumer.py
+++ b/src/sentry/monitors/consumers/monitor_consumer.py
@@ -10,6 +10,7 @@
from functools import partial
from typing import Literal
+import orjson
import sentry_sdk
from arroyo.backends.kafka.consumer import KafkaPayload
from arroyo.processing.strategies.abstract import ProcessingStrategy, ProcessingStrategyFactory
@@ -74,7 +75,7 @@
)
from sentry.monitors.validators import ConfigValidator, MonitorCheckInValidator
from sentry.types.actor import parse_and_validate_actor
-from sentry.utils import json, metrics
+from sentry.utils import metrics
from sentry.utils.dates import to_datetime
from sentry.utils.outcomes import Outcome, track_outcome
@@ -968,7 +969,7 @@ def process_batch(executor: ThreadPoolExecutor, message: Message[ValuesBatch[Kaf
ts=item.timestamp,
partition=item.partition.index,
message=wrapper,
- payload=json.loads(wrapper["payload"]),
+ payload=orjson.loads(wrapper["payload"]),
)
checkin_mapping[item.processing_key].append(item)
@@ -1015,7 +1016,7 @@ def process_single(message: Message[KafkaPayload | FilteredPayload]):
ts=ts,
partition=partition,
message=wrapper,
- payload=json.loads(wrapper["payload"]),
+ payload=orjson.loads(wrapper["payload"]),
)
process_checkin(item)
except Exception:
diff --git a/src/sentry/monitors/processing_errors/manager.py b/src/sentry/monitors/processing_errors/manager.py
index dc94b9807455b1..4b7ca60b35037d 100644
--- a/src/sentry/monitors/processing_errors/manager.py
+++ b/src/sentry/monitors/processing_errors/manager.py
@@ -5,6 +5,7 @@
from datetime import timedelta
from itertools import chain
+import orjson
from django.conf import settings
from redis.client import StrictRedis
from rediscluster import RedisCluster
@@ -14,7 +15,7 @@
from sentry.models.project import Project
from sentry.monitors.models import Monitor
from sentry.monitors.types import CheckinItem
-from sentry.utils import json, metrics, redis
+from sentry.utils import metrics, redis
from .errors import CheckinProcessingError, ProcessingErrorsException
@@ -79,7 +80,7 @@ def _get_for_entities(entity_identifiers: list[str]) -> list[CheckinProcessingEr
for error_identifier in chain(*pipeline.execute())
]
errors = [
- CheckinProcessingError.from_dict(json.loads(raw_error))
+ CheckinProcessingError.from_dict(orjson.loads(raw_error))
for raw_error in redis.mget(error_identifiers)
if raw_error is not None
]
@@ -98,7 +99,7 @@ def store_error(error: CheckinProcessingError, monitor: Monitor | None):
entity_identifier = _get_entity_identifier_from_error(error, monitor)
error_set_key = build_set_identifier(entity_identifier)
error_key = build_error_identifier(error.id)
- serialized_error = json.dumps(error.to_dict())
+ serialized_error = orjson.dumps(error.to_dict()).decode()
redis_client = _get_cluster()
pipeline = redis_client.pipeline(transaction=False)
pipeline.zadd(error_set_key, {error.id.hex: error.checkin.ts.timestamp()})
@@ -115,7 +116,7 @@ def delete_error(project: Project, uuid: uuid.UUID):
raw_error = redis.get(error_identifier)
if raw_error is None:
return
- error = CheckinProcessingError.from_dict(json.loads(raw_error))
+ error = CheckinProcessingError.from_dict(orjson.loads(raw_error))
if error.checkin.message["project_id"] != project.id:
# TODO: Better exception class
raise InvalidProjectError()
diff --git a/src/sentry/notifications/utils/participants.py b/src/sentry/notifications/utils/participants.py
index 96e5cefa237c81..470a66b66c8368 100644
--- a/src/sentry/notifications/utils/participants.py
+++ b/src/sentry/notifications/utils/participants.py
@@ -5,6 +5,7 @@
from collections.abc import Iterable, Mapping, MutableMapping, Sequence
from typing import TYPE_CHECKING, Any
+import orjson
from django.db.models import Q
from sentry import features
@@ -35,7 +36,7 @@
from sentry.services.hybrid_cloud.user_option import get_option_from_list, user_option_service
from sentry.types.actor import Actor, ActorType
from sentry.types.integrations import ExternalProviders, get_provider_enum_from_string
-from sentry.utils import json, metrics
+from sentry.utils import metrics
from sentry.utils.committers import AuthorCommitsSerialized, get_serialized_event_file_committers
if TYPE_CHECKING:
@@ -613,10 +614,10 @@ def _get_recipients_by_provider(
"target_type": target_type,
"target_identifier": target_identifier,
"notification_uuid": notification_uuid,
- "teams": json.dumps([team.id for team in teams]),
- "teams_by_provider": json.dumps(teams_by_provider_dict),
- "users": json.dumps([user.id for user in users]),
- "users_by_provider": json.dumps(users_by_provider_dict),
+ "teams": orjson.dumps([team.id for team in teams]).decode(),
+ "teams_by_provider": orjson.dumps(teams_by_provider_dict).decode(),
+ "users": orjson.dumps([user.id for user in users]).decode(),
+ "users_by_provider": orjson.dumps(users_by_provider_dict).decode(),
}
logger.info("sentry.notifications.recipients_by_provider", extra=extra)
except Exception as e:
diff --git a/src/sentry/pipeline/views/base.py b/src/sentry/pipeline/views/base.py
index b5f0ae4e98b0bb..090081d5eaa31e 100644
--- a/src/sentry/pipeline/views/base.py
+++ b/src/sentry/pipeline/views/base.py
@@ -2,10 +2,10 @@
from collections.abc import Mapping
from typing import TYPE_CHECKING, Any
+import orjson
from django.http.response import HttpResponseBase
from rest_framework.request import Request
-from sentry.utils import json
from sentry.web.frontend.base import BaseView
from sentry.web.helpers import render_to_response
@@ -35,5 +35,5 @@ def render_react_view(
return render_to_response(
template="sentry/bases/react_pipeline.html",
request=request,
- context={"pipelineName": pipeline_name, "props": json.dumps(props)},
+ context={"pipelineName": pipeline_name, "props": orjson.dumps(props).decode()},
)
diff --git a/src/sentry/plugins/base/response.py b/src/sentry/plugins/base/response.py
index 6b1d4c5b793b2c..28fa277c6098d2 100644
--- a/src/sentry/plugins/base/response.py
+++ b/src/sentry/plugins/base/response.py
@@ -1,10 +1,9 @@
__all__ = ("Response", "JSONResponse")
+import orjson
from django.http import HttpResponse
from django.template.context_processors import csrf
-from sentry.utils import json
-
class Response:
def __init__(self, template, context=None):
@@ -35,5 +34,5 @@ def __init__(self, context, status=200):
def respond(self, request, context=None):
return HttpResponse(
- json.dumps(self.context), content_type="application/json", status=self.status
+ orjson.dumps(self.context).decode(), content_type="application/json", status=self.status
)
diff --git a/tests/sentry/rules/processing/test_delayed_processing.py b/tests/sentry/rules/processing/test_delayed_processing.py
index 0be2aadc1b688d..67bc187171a8eb 100644
--- a/tests/sentry/rules/processing/test_delayed_processing.py
+++ b/tests/sentry/rules/processing/test_delayed_processing.py
@@ -3,6 +3,7 @@
from unittest.mock import patch
from uuid import uuid4
+import orjson
import pytest
from sentry.buffer.redis import RedisBuffer
@@ -17,7 +18,6 @@
from sentry.testutils.cases import APITestCase, PerformanceIssueTestCase, TestCase
from sentry.testutils.factories import DEFAULT_EVENT_DATA
from sentry.testutils.helpers.datetime import iso_format
-from sentry.utils import json
from tests.snuba.rules.conditions.test_event_frequency import BaseEventFrequencyPercentTest
pytestmark = pytest.mark.sentry_metrics
@@ -61,7 +61,7 @@ def create_event_frequency_condition(
return {"interval": interval, "id": condition_id, "value": value}
def push_to_hash(self, project_id, rule_id, group_id, event_id=None, occurrence_id=None):
- value = json.dumps({"event_id": event_id, "occurrence_id": occurrence_id})
+ value = orjson.dumps({"event_id": event_id, "occurrence_id": occurrence_id}).decode()
self.redis_buffer.push_to_hash(
model=Project,
filters={"project_id": project_id},
diff --git a/tests/sentry/rules/processing/test_processor.py b/tests/sentry/rules/processing/test_processor.py
index 7fcd9464be6288..cf855e889e9995 100644
--- a/tests/sentry/rules/processing/test_processor.py
+++ b/tests/sentry/rules/processing/test_processor.py
@@ -3,6 +3,7 @@
from unittest import mock
from unittest.mock import patch
+import orjson
from django.core.cache import cache
from django.db import DEFAULT_DB_ALIAS, connections
from django.test.utils import CaptureQueriesContext
@@ -25,7 +26,6 @@
from sentry.testutils.helpers import install_slack
from sentry.testutils.helpers.features import with_feature
from sentry.testutils.skips import requires_snuba
-from sentry.utils import json
from sentry.utils.safe import safe_execute
pytestmark = [requires_snuba]
@@ -142,9 +142,9 @@ def test_delayed_rule_match_any_slow_conditionss(self):
assert project_ids[0][0] == self.project.id
rulegroup_to_events = buffer.get_hash(model=Project, field={"project_id": self.project.id})
assert rulegroup_to_events == {
- f"{self.rule.id}:{self.group_event.group.id}": json.dumps(
+ f"{self.rule.id}:{self.group_event.group.id}": orjson.dumps(
{"event_id": self.group_event.event_id, "occurrence_id": None}
- )
+ ).decode()
}
@with_feature("organizations:process-slow-alerts")
@@ -186,9 +186,9 @@ def test_delayed_rule_match_any_slow_conditions_issue_platform(self):
assert project_ids[0][0] == self.project.id
rulegroup_to_events = buffer.get_hash(model=Project, field={"project_id": self.project.id})
assert rulegroup_to_events == {
- f"{self.rule.id}:{perf_event.group.id}": json.dumps(
+ f"{self.rule.id}:{perf_event.group.id}": orjson.dumps(
{"event_id": perf_event.event_id, "occurrence_id": perf_event.occurrence_id}
- )
+ ).decode()
}
@with_feature("organizations:process-slow-alerts")
@@ -223,9 +223,9 @@ def test_delayed_rule_match_any_slow_fast_conditions(self):
assert project_ids[0][0] == self.project.id
rulegroup_to_events = buffer.get_hash(model=Project, field={"project_id": self.project.id})
assert rulegroup_to_events == {
- f"{self.rule.id}:{self.group_event.group.id}": json.dumps(
+ f"{self.rule.id}:{self.group_event.group.id}": orjson.dumps(
{"event_id": self.group_event.event_id, "occurrence_id": None}
- )
+ ).decode()
}
@with_feature("organizations:process-slow-alerts")
@@ -311,9 +311,9 @@ def test_delayed_rule_match_all(self):
assert project_ids[0][0] == self.project.id
rulegroup_to_events = buffer.get_hash(model=Project, field={"project_id": self.project.id})
assert rulegroup_to_events == {
- f"{self.rule.id}:{self.group_event.group.id}": json.dumps(
+ f"{self.rule.id}:{self.group_event.group.id}": orjson.dumps(
{"event_id": self.group_event.event_id, "occurrence_id": None}
- )
+ ).decode()
}
def test_ignored_issue(self):
@@ -915,7 +915,7 @@ def test_slack_title_link_notification_uuid(self, mock_post):
mock_post.assert_called_once()
assert (
"notification_uuid"
- in json.loads(mock_post.call_args[1]["data"]["blocks"])[0]["text"]["text"]
+ in orjson.loads(mock_post.call_args[1]["data"]["blocks"])[0]["text"]["text"]
)
@patch("sentry.shared_integrations.client.base.BaseApiClient.post")
diff --git a/tests/sentry/web/frontend/test_auth_login.py b/tests/sentry/web/frontend/test_auth_login.py
index 27c9382f1fb62b..64de40065e95e0 100644
--- a/tests/sentry/web/frontend/test_auth_login.py
+++ b/tests/sentry/web/frontend/test_auth_login.py
@@ -4,6 +4,7 @@
from urllib.parse import quote as urlquote
from urllib.parse import urlencode
+import orjson
import pytest
from django.conf import settings
from django.test import override_settings
@@ -24,7 +25,6 @@
from sentry.testutils.helpers.features import with_feature
from sentry.testutils.hybrid_cloud import HybridCloudTestMixin
from sentry.testutils.silo import assume_test_silo_mode, control_silo_test
-from sentry.utils import json
# TODO(dcramer): need tests for SSO behavior and single org behavior
@@ -196,7 +196,7 @@ def test_registration_valid(self, mock_record):
resp.context["register_form"].errors if resp.status_code == 200 else None
)
frontend_events = {"event_name": "Sign Up"}
- marketing_query = urlencode({"frontend_events": json.dumps(frontend_events)})
+ marketing_query = urlencode({"frontend_events": orjson.dumps(frontend_events).decode()})
assert marketing_query in resp.headers["Location"]
user = User.objects.get(username="[email protected]")
diff --git a/tests/sentry/web/frontend/test_auth_organization_login.py b/tests/sentry/web/frontend/test_auth_organization_login.py
index 0c031987e7470d..17dc22a52a3a65 100644
--- a/tests/sentry/web/frontend/test_auth_organization_login.py
+++ b/tests/sentry/web/frontend/test_auth_organization_login.py
@@ -3,6 +3,7 @@
from urllib.parse import quote as urlquote
from urllib.parse import urlencode
+import orjson
from django.test import override_settings
from django.urls import reverse
@@ -20,7 +21,6 @@
from sentry.testutils.cases import AuthProviderTestCase
from sentry.testutils.helpers import with_feature
from sentry.testutils.silo import assume_test_silo_mode, control_silo_test
-from sentry.utils import json
# TODO(dcramer): this is an integration test and repeats tests from
@@ -85,7 +85,7 @@ def test_flow_as_anonymous(self):
assert resp.status_code == 200
frontend_events = {"event_name": "Sign Up", "event_label": "dummy"}
- marketing_query = urlencode({"frontend_events": json.dumps(frontend_events)})
+ marketing_query = urlencode({"frontend_events": orjson.dumps(frontend_events).decode()})
with self.settings(
TERMS_URL="https://example.com/terms", PRIVACY_URL="https://example.com/privacy"
@@ -288,7 +288,7 @@ def test_flow_as_unauthenticated_existing_matched_user_no_merge(self):
assert resp.context["login_form"]
frontend_events = {"event_name": "Sign Up", "event_label": "dummy"}
- marketing_query = urlencode({"frontend_events": json.dumps(frontend_events)})
+ marketing_query = urlencode({"frontend_events": orjson.dumps(frontend_events).decode()})
resp = self.client.post(path, {"op": "newuser"}, follow=True)
assert resp.redirect_chain == [
@@ -559,7 +559,7 @@ def test_flow_as_unauthenticated_existing_inactive_user_with_merge_and_existing_
assert resp.context["login_form"]
frontend_events = {"event_name": "Sign Up", "event_label": "dummy"}
- marketing_query = urlencode({"frontend_events": json.dumps(frontend_events)})
+ marketing_query = urlencode({"frontend_events": orjson.dumps(frontend_events).decode()})
resp = self.client.post(path, {"op": "newuser"}, follow=True)
assert resp.redirect_chain == [
|
eeef9209b5ab163835c879c784fc6662a790717e
|
2023-06-21 19:51:34
|
Francesco Novy
|
fix(style): Align form action spacing with panel item (#51348)
| false
|
Align form action spacing with panel item (#51348)
|
fix
|
diff --git a/static/app/components/forms/form.tsx b/static/app/components/forms/form.tsx
index 771eab21af747c..01380a9a82eb2a 100644
--- a/static/app/components/forms/form.tsx
+++ b/static/app/components/forms/form.tsx
@@ -273,7 +273,7 @@ const StyledFooter = styled('div')<{saveOnBlur?: boolean}>`
`
${Panel} & {
margin-top: 0;
- padding-right: 36px;
+ padding-right: ${space(2)}
}
/* Better padding with form inside of a modal */
|
7c9f4b5d08eb435ea552ade32e5fc0ac6d1bac42
|
2023-03-01 03:22:14
|
Alberto Leal
|
fix(hybrid-cloud): Uncache organization when queueing it for deletion (#45213)
| false
|
Uncache organization when queueing it for deletion (#45213)
|
fix
|
diff --git a/src/sentry/api/endpoints/organization_details.py b/src/sentry/api/endpoints/organization_details.py
index 0a0d3d35d71a67..01b023250fd870 100644
--- a/src/sentry/api/endpoints/organization_details.py
+++ b/src/sentry/api/endpoints/organization_details.py
@@ -598,6 +598,7 @@ def handle_delete(self, request: Request, organization):
transaction_id=schedule.guid,
)
organization.send_delete_confirmation(entry, ONE_DAY)
+ Organization.objects.uncache_object(organization.id)
context = serialize(
organization,
request.user,
diff --git a/tests/sentry/api/endpoints/test_organization_details.py b/tests/sentry/api/endpoints/test_organization_details.py
index 6d32f0447c6d56..7edec28595868d 100644
--- a/tests/sentry/api/endpoints/test_organization_details.py
+++ b/tests/sentry/api/endpoints/test_organization_details.py
@@ -819,6 +819,10 @@ def test_can_remove_as_owner(self):
# No owners should be remaining
assert len(owner_emails) == 0
+ # Ensure cache was flushed
+ org = Organization.objects.get_from_cache(slug=org.slug)
+ assert org.status == OrganizationStatus.PENDING_DELETION
+
def test_cannot_remove_as_admin(self):
org = self.create_organization(owner=self.user)
user = self.create_user(email="[email protected]", is_superuser=False)
|
422ee35f3b77c7ed06db3dad2491b6e1f3f3854f
|
2024-09-26 23:21:05
|
Dominik Buszowiecki
|
fix(insights): fix view all button to link from perf landing to insights (#78221)
| false
|
fix view all button to link from perf landing to insights (#78221)
|
fix
|
diff --git a/static/app/views/insights/pages/useFilters.tsx b/static/app/views/insights/pages/useFilters.tsx
index 7f508e9919dc32..25fc3ac4a5b719 100644
--- a/static/app/views/insights/pages/useFilters.tsx
+++ b/static/app/views/insights/pages/useFilters.tsx
@@ -3,10 +3,10 @@ import type {ModuleName} from 'webpack-cli';
import {useLocation} from 'sentry/utils/useLocation';
import {useNavigate} from 'sentry/utils/useNavigate';
-import type {AI_LANDING_SUB_PATH} from 'sentry/views/insights/pages/aiLandingPage';
-import type {BACKEND_LANDING_SUB_PATH} from 'sentry/views/insights/pages/backendLandingPage';
-import type {FRONTEND_LANDING_SUB_PATH} from 'sentry/views/insights/pages/frontend/settings';
-import type {MOBILE_LANDING_SUB_PATH} from 'sentry/views/insights/pages/mobileLandingPage';
+import {AI_LANDING_SUB_PATH} from 'sentry/views/insights/pages/aiLandingPage';
+import {BACKEND_LANDING_SUB_PATH} from 'sentry/views/insights/pages/backendLandingPage';
+import {FRONTEND_LANDING_SUB_PATH} from 'sentry/views/insights/pages/frontend/settings';
+import {MOBILE_LANDING_SUB_PATH} from 'sentry/views/insights/pages/mobileLandingPage';
export type DomainView =
| typeof FRONTEND_LANDING_SUB_PATH
@@ -14,6 +14,13 @@ export type DomainView =
| typeof AI_LANDING_SUB_PATH
| typeof MOBILE_LANDING_SUB_PATH;
+const domainViews = [
+ FRONTEND_LANDING_SUB_PATH,
+ BACKEND_LANDING_SUB_PATH,
+ AI_LANDING_SUB_PATH,
+ MOBILE_LANDING_SUB_PATH,
+];
+
export type DomainViewFilters = {
isInDomainView?: boolean;
view?: DomainView;
@@ -30,6 +37,9 @@ export const useDomainViewFilters = () => {
const isInDomainView = indexOfPerformance !== -1;
const view = pathSegments[indexOfPerformance + 1] as DomainViewFilters['view'];
+ if (!domainViews.includes(view || '')) {
+ return {isInDomainView: false};
+ }
if (isInDomainView) {
return {
|
1666c88491682b25976456bded4c05fb04b51346
|
2024-02-08 03:05:22
|
Michelle Zhang
|
docs(replays): add API documentation for replay-selectors endpoint (#64713)
| false
|
add API documentation for replay-selectors endpoint (#64713)
|
docs
|
diff --git a/src/sentry/apidocs/examples/replay_examples.py b/src/sentry/apidocs/examples/replay_examples.py
index 15a0ca3c6319ab..ae8238dcb67e38 100644
--- a/src/sentry/apidocs/examples/replay_examples.py
+++ b/src/sentry/apidocs/examples/replay_examples.py
@@ -59,6 +59,32 @@ class ReplayExamples:
),
]
+ GET_SELECTORS = [
+ OpenApiExample(
+ "Retrieve a collection of selectors for an organization.",
+ value={
+ "data": [
+ {
+ "count_dead_clicks": 2,
+ "count_rage_clicks": 1,
+ "dom_element": "div#myid.class1.class2",
+ "element": {
+ "alt": "",
+ "aria_label": "",
+ "class": ["class1", "class2"],
+ "id": "myid",
+ "role": "",
+ "tag": "div",
+ "testid": "",
+ "title": "",
+ },
+ "project_id": "1",
+ }
+ ]
+ },
+ )
+ ]
+
GET_REPLAY_COUNTS = [
OpenApiExample(
"Query replay count by issue or transaction id",
diff --git a/src/sentry/replays/endpoints/organization_replay_selector_index.py b/src/sentry/replays/endpoints/organization_replay_selector_index.py
index b9142591da8636..0b5dcbbb0ed1e6 100644
--- a/src/sentry/replays/endpoints/organization_replay_selector_index.py
+++ b/src/sentry/replays/endpoints/organization_replay_selector_index.py
@@ -1,8 +1,9 @@
from __future__ import annotations
from datetime import datetime
-from typing import Any
+from typing import Any, TypedDict
+from drf_spectacular.utils import extend_schema
from rest_framework.exceptions import ParseError
from rest_framework.request import Request
from rest_framework.response import Response
@@ -28,6 +29,10 @@
from sentry.api.bases.organization import NoProjects, OrganizationEndpoint
from sentry.api.event_search import ParenExpression, SearchFilter, parse_search_query
from sentry.api.paginator import GenericOffsetPaginator
+from sentry.apidocs.constants import RESPONSE_BAD_REQUEST, RESPONSE_FORBIDDEN
+from sentry.apidocs.examples.replay_examples import ReplayExamples
+from sentry.apidocs.parameters import CursorQueryParam, GlobalParams, VisibilityParams
+from sentry.apidocs.utils import inline_sentry_response_serializer
from sentry.exceptions import InvalidSearchQuery
from sentry.models.organization import Organization
from sentry.replays.lib.new_query.conditions import IntegerScalar
@@ -39,12 +44,39 @@
from sentry.replays.validators import ReplaySelectorValidator
from sentry.utils.snuba import raw_snql_query
+ElementResponseType = TypedDict(
+ "ElementResponseType",
+ {
+ "alt": str,
+ "aria_label": str,
+ "class": list[str],
+ "id": str,
+ "role": str,
+ "tag": str,
+ "testid": str,
+ "title": str,
+ },
+)
+
+
+class ReplaySelectorResponseData(TypedDict, total=False):
+ count_dead_clicks: int
+ count_rage_clicks: int
+ dom_element: str
+ element: ElementResponseType
+ project_id: str
+
+
+class ReplaySelectorResponse(TypedDict):
+ data: list[ReplaySelectorResponseData]
+
@region_silo_endpoint
+@extend_schema(tags=["Replays"])
class OrganizationReplaySelectorIndexEndpoint(OrganizationEndpoint):
owner = ApiOwner.REPLAY
publish_status = {
- "GET": ApiPublishStatus.UNKNOWN,
+ "GET": ApiPublishStatus.PUBLIC,
}
def get_replay_filter_params(self, request, organization):
@@ -59,7 +91,25 @@ def get_replay_filter_params(self, request, organization):
return filter_params
@handled_snuba_exceptions
+ @extend_schema(
+ operation_id="List an Organization's Selectors",
+ parameters=[
+ GlobalParams.ORG_SLUG,
+ GlobalParams.ENVIRONMENT,
+ ReplaySelectorValidator,
+ CursorQueryParam,
+ VisibilityParams.PER_PAGE,
+ VisibilityParams.QUERY,
+ ],
+ responses={
+ 200: inline_sentry_response_serializer("ListSelectors", ReplaySelectorResponse),
+ 400: RESPONSE_BAD_REQUEST,
+ 403: RESPONSE_FORBIDDEN,
+ },
+ examples=ReplayExamples.GET_SELECTORS,
+ )
def get(self, request: Request, organization: Organization) -> Response:
+ """Return a list of selectors for a given organization."""
if not features.has("organizations:session-replay", organization, actor=request.user):
return Response(status=404)
try:
|
2ff55f5f91551ef9939f6623dc49f83bc816d684
|
2023-08-01 00:11:59
|
nikkikapadia
|
fix(starfish): make analytics optional on detailPanel (#53733)
| false
|
make analytics optional on detailPanel (#53733)
|
fix
|
diff --git a/static/app/views/starfish/components/detailPanel.tsx b/static/app/views/starfish/components/detailPanel.tsx
index 60493b7c3b2f96..c31e7d64297be2 100644
--- a/static/app/views/starfish/components/detailPanel.tsx
+++ b/static/app/views/starfish/components/detailPanel.tsx
@@ -13,13 +13,14 @@ type DetailProps = {
children: React.ReactNode;
detailKey?: string;
onClose?: () => void;
+ onOpen?: () => void;
};
type DetailState = {
collapsed: boolean;
};
-export default function Detail({children, detailKey, onClose}: DetailProps) {
+export default function Detail({children, detailKey, onClose, onOpen}: DetailProps) {
const [state, setState] = useState<DetailState>({collapsed: true});
const escapeKeyPressed = useKeyPress('Escape');
@@ -51,7 +52,7 @@ export default function Detail({children, detailKey, onClose}: DetailProps) {
}, [escapeKeyPressed]);
return (
- <SlideOverPanel collapsed={state.collapsed} ref={panelRef}>
+ <SlideOverPanel collapsed={state.collapsed} ref={panelRef} onOpen={onOpen}>
<CloseButtonWrapper>
<CloseButton
priority="link"
diff --git a/static/app/views/starfish/components/slideOverPanel.tsx b/static/app/views/starfish/components/slideOverPanel.tsx
index 725264eb731a63..bae6f348fb779e 100644
--- a/static/app/views/starfish/components/slideOverPanel.tsx
+++ b/static/app/views/starfish/components/slideOverPanel.tsx
@@ -3,30 +3,25 @@ import isPropValid from '@emotion/is-prop-valid';
import styled from '@emotion/styled';
import {motion} from 'framer-motion';
-import {trackAnalytics} from 'sentry/utils/analytics';
-import {useLocation} from 'sentry/utils/useLocation';
-import useOrganization from 'sentry/utils/useOrganization';
-
const PANEL_WIDTH = '50vw';
type SlideOverPanelProps = {
children: React.ReactNode;
collapsed: boolean;
+ onOpen?: () => void;
};
export default forwardRef(SlideOverPanel);
function SlideOverPanel(
- {collapsed, children}: SlideOverPanelProps,
+ {collapsed, children, onOpen}: SlideOverPanelProps,
ref: ForwardedRef<HTMLDivElement>
) {
- const {query} = useLocation();
- const organization = useOrganization();
useEffect(() => {
- if (!collapsed) {
- trackAnalytics('starfish.panel.open', {organization});
+ if (!collapsed && onOpen) {
+ onOpen();
}
- }, [query, collapsed, organization]);
+ }, [collapsed, onOpen]);
return (
<_SlideOverPanel
ref={ref}
diff --git a/static/app/views/starfish/views/spanSummaryPage/sampleList/index.tsx b/static/app/views/starfish/views/spanSummaryPage/sampleList/index.tsx
index d5d01988cd31d4..2f83ba024fcc43 100644
--- a/static/app/views/starfish/views/spanSummaryPage/sampleList/index.tsx
+++ b/static/app/views/starfish/views/spanSummaryPage/sampleList/index.tsx
@@ -2,10 +2,13 @@ import {useCallback, useState} from 'react';
import debounce from 'lodash/debounce';
import omit from 'lodash/omit';
+import {trackAnalytics} from 'sentry/utils/analytics';
import {
PageErrorAlert,
PageErrorProvider,
} from 'sentry/utils/performance/contexts/pageError';
+import {useLocation} from 'sentry/utils/useLocation';
+import useOrganization from 'sentry/utils/useOrganization';
import useRouter from 'sentry/utils/useRouter';
import DetailPanel from 'sentry/views/starfish/components/detailPanel';
import DurationChart from 'sentry/views/starfish/views/spanSummaryPage/sampleList/durationChart';
@@ -36,6 +39,9 @@ export function SampleList({groupId, transactionName, transactionMethod}: Props)
[]
);
+ const organization = useOrganization();
+ const {query} = useLocation();
+
return (
<PageErrorProvider>
<DetailPanel
@@ -46,6 +52,11 @@ export function SampleList({groupId, transactionName, transactionMethod}: Props)
query: omit(router.location.query, 'transaction', 'transactionMethod'),
});
}}
+ onOpen={useCallback(() => {
+ if (query.transaction) {
+ trackAnalytics('starfish.panel.open', {organization});
+ }
+ }, [organization, query.transaction])}
>
<h3>{`${transactionMethod} ${transactionName}`}</h3>
<PageErrorAlert />
|
b0049c133e96fb56fadf5f85e515019b45204e3e
|
2019-01-03 21:49:34
|
Billy Vong
|
feat(charts): Add World Map "chart" (#11330)
| false
|
Add World Map "chart" (#11330)
|
feat
|
diff --git a/src/sentry/static/sentry/app/components/charts/series/mapSeries.jsx b/src/sentry/static/sentry/app/components/charts/series/mapSeries.jsx
new file mode 100644
index 00000000000000..c912368756efa6
--- /dev/null
+++ b/src/sentry/static/sentry/app/components/charts/series/mapSeries.jsx
@@ -0,0 +1,14 @@
+import 'echarts/lib/chart/map';
+import 'echarts/lib/component/visualMap';
+import 'echarts/map/js/world';
+
+export default function MapSeries(props = {}) {
+ return {
+ roam: true,
+ itemStyle: {
+ emphasis: {label: {show: false}},
+ },
+ ...props,
+ type: 'map',
+ };
+}
diff --git a/src/sentry/static/sentry/app/components/charts/worldMapChart.jsx b/src/sentry/static/sentry/app/components/charts/worldMapChart.jsx
new file mode 100644
index 00000000000000..9bf8ad18f83c9c
--- /dev/null
+++ b/src/sentry/static/sentry/app/components/charts/worldMapChart.jsx
@@ -0,0 +1,79 @@
+import PropTypes from 'prop-types';
+import React from 'react';
+
+import BaseChart from './baseChart';
+import MapSeries from './series/mapSeries';
+
+export default class WorldMapChart extends React.Component {
+ static propTypes = {
+ ...BaseChart.propTypes,
+ seriesOptions: PropTypes.object,
+ };
+
+ constructor(props) {
+ super(props);
+ this.state = {
+ countryCodesMap: null,
+ };
+ }
+
+ async componentDidMount() {
+ const countryCodesMap = await import(/* webpackChunkName: "countryCodesMap" */ 'app/data/countryCodesMap');
+
+ // eslint-disable-next-line
+ this.setState({
+ countryCodesMap: countryCodesMap.default,
+ });
+ }
+
+ render() {
+ if (this.state.countryCodesMap === null) {
+ return null;
+ }
+
+ const {series, seriesOptions, ...props} = this.props;
+ const processedSeries = series.map(({seriesName, data, ...options}) => {
+ return MapSeries({
+ ...seriesOptions,
+ ...options,
+ mapType: 'world',
+ name: seriesName,
+ nameMap: this.state.countryCodesMap,
+ data,
+ });
+ });
+
+ return (
+ <BaseChart
+ options={{
+ visualMap: {
+ left: 'right',
+ min: 0,
+ max: 20,
+ inRange: {
+ color: [
+ '#313695',
+ '#4575b4',
+ '#74add1',
+ '#abd9e9',
+ '#e0f3f8',
+ '#ffffbf',
+ '#fee090',
+ '#fdae61',
+ '#f46d43',
+ '#d73027',
+ '#a50026',
+ ],
+ },
+ text: ['High', 'Low'],
+ calculable: true,
+ },
+ }}
+ {...props}
+ yAxis={null}
+ xAxis={null}
+ series={processedSeries}
+ />
+ );
+ }
+}
diff --git a/src/sentry/static/sentry/app/data/countryCodesMap.jsx b/src/sentry/static/sentry/app/data/countryCodesMap.jsx
new file mode 100644
index 00000000000000..87684f038a604d
--- /dev/null
+++ b/src/sentry/static/sentry/app/data/countryCodesMap.jsx
@@ -0,0 +1,254 @@
+const countryCodesMap = {
+ Bangladesh: 'BD',
+ Belgium: 'BE',
+ 'Burkina Faso': 'BF',
+ Bulgaria: 'BG',
+ 'Bosnia and Herzegovina': 'BA',
+ Barbados: 'BB',
+ 'Wallis and Futuna': 'WF',
+ 'Saint Barthelemy': 'BL',
+ Bermuda: 'BM',
+ Brunei: 'BN',
+ Bolivia: 'BO',
+ Bahrain: 'BH',
+ Burundi: 'BI',
+ Benin: 'BJ',
+ Bhutan: 'BT',
+ Jamaica: 'JM',
+ 'Bouvet Island': 'BV',
+ Botswana: 'BW',
+ Samoa: 'WS',
+ 'Bonaire, Saint Eustatius and Saba ': 'BQ',
+ Brazil: 'BR',
+ Bahamas: 'BS',
+ Jersey: 'JE',
+ Belarus: 'BY',
+ Belize: 'BZ',
+ Russia: 'RU',
+ Rwanda: 'RW',
+ Serbia: 'RS',
+ 'East Timor': 'TL',
+ Reunion: 'RE',
+ Turkmenistan: 'TM',
+ Tajikistan: 'TJ',
+ Romania: 'RO',
+ Tokelau: 'TK',
+ 'Guinea-Bissau': 'GW',
+ Guam: 'GU',
+ Guatemala: 'GT',
+ 'South Georgia and the South Sandwich Islands': 'GS',
+ Greece: 'GR',
+ 'Equatorial Guinea': 'GQ',
+ Guadeloupe: 'GP',
+ Japan: 'JP',
+ Guyana: 'GY',
+ Guernsey: 'GG',
+ 'French Guiana': 'GF',
+ Georgia: 'GE',
+ Grenada: 'GD',
+ 'United Kingdom': 'GB',
+ Gabon: 'GA',
+ 'El Salvador': 'SV',
+ Guinea: 'GN',
+ Gambia: 'GM',
+ Greenland: 'GL',
+ Gibraltar: 'GI',
+ Ghana: 'GH',
+ Oman: 'OM',
+ Tunisia: 'TN',
+ Jordan: 'JO',
+ Croatia: 'HR',
+ Haiti: 'HT',
+ Hungary: 'HU',
+ 'Hong Kong': 'HK',
+ Honduras: 'HN',
+ 'Heard Island and McDonald Islands': 'HM',
+ Venezuela: 'VE',
+ 'Puerto Rico': 'PR',
+ 'Palestinian Territory': 'PS',
+ Palau: 'PW',
+ Portugal: 'PT',
+ 'Svalbard and Jan Mayen': 'SJ',
+ Paraguay: 'PY',
+ Iraq: 'IQ',
+ Panama: 'PA',
+ 'French Polynesia': 'PF',
+ 'Papua New Guinea': 'PG',
+ Peru: 'PE',
+ Pakistan: 'PK',
+ Philippines: 'PH',
+ Pitcairn: 'PN',
+ Poland: 'PL',
+ 'Saint Pierre and Miquelon': 'PM',
+ Zambia: 'ZM',
+ 'Western Sahara': 'EH',
+ Estonia: 'EE',
+ Egypt: 'EG',
+ 'South Africa': 'ZA',
+ Ecuador: 'EC',
+ Italy: 'IT',
+ Vietnam: 'VN',
+ 'Solomon Islands': 'SB',
+ Ethiopia: 'ET',
+ Somalia: 'SO',
+ Zimbabwe: 'ZW',
+ 'Saudi Arabia': 'SA',
+ Spain: 'ES',
+ Eritrea: 'ER',
+ Montenegro: 'ME',
+ Moldova: 'MD',
+ Madagascar: 'MG',
+ 'Saint Martin': 'MF',
+ Morocco: 'MA',
+ Monaco: 'MC',
+ Uzbekistan: 'UZ',
+ Myanmar: 'MM',
+ Mali: 'ML',
+ Macao: 'MO',
+ Mongolia: 'MN',
+ 'Marshall Islands': 'MH',
+ Macedonia: 'MK',
+ Mauritius: 'MU',
+ Malta: 'MT',
+ Malawi: 'MW',
+ Maldives: 'MV',
+ Martinique: 'MQ',
+ 'Northern Mariana Islands': 'MP',
+ Montserrat: 'MS',
+ Mauritania: 'MR',
+ 'Isle of Man': 'IM',
+ Uganda: 'UG',
+ Tanzania: 'TZ',
+ Malaysia: 'MY',
+ Mexico: 'MX',
+ Israel: 'IL',
+ France: 'FR',
+ 'British Indian Ocean Territory': 'IO',
+ 'Saint Helena': 'SH',
+ Finland: 'FI',
+ Fiji: 'FJ',
+ 'Falkland Islands': 'FK',
+ Micronesia: 'FM',
+ 'Faroe Islands': 'FO',
+ Nicaragua: 'NI',
+ Netherlands: 'NL',
+ Norway: 'NO',
+ Namibia: 'NA',
+ Vanuatu: 'VU',
+ 'New Caledonia': 'NC',
+ Niger: 'NE',
+ 'Norfolk Island': 'NF',
+ Nigeria: 'NG',
+ 'New Zealand': 'NZ',
+ Nepal: 'NP',
+ Nauru: 'NR',
+ Niue: 'NU',
+ 'Cook Islands': 'CK',
+ Kosovo: 'XK',
+ 'Ivory Coast': 'CI',
+ Switzerland: 'CH',
+ Colombia: 'CO',
+ China: 'CN',
+ Cameroon: 'CM',
+ Chile: 'CL',
+ 'Cocos Islands': 'CC',
+ Canada: 'CA',
+ 'Republic of the Congo': 'CG',
+ 'Central African Republic': 'CF',
+ 'Democratic Republic of the Congo': 'CD',
+ 'Czech Republic': 'CZ',
+ Cyprus: 'CY',
+ 'Christmas Island': 'CX',
+ 'Costa Rica': 'CR',
+ Curacao: 'CW',
+ 'Cape Verde': 'CV',
+ Cuba: 'CU',
+ Swaziland: 'SZ',
+ Syria: 'SY',
+ 'Sint Maarten': 'SX',
+ Kyrgyzstan: 'KG',
+ Kenya: 'KE',
+ 'South Sudan': 'SS',
+ Suriname: 'SR',
+ Kiribati: 'KI',
+ Cambodia: 'KH',
+ 'Saint Kitts and Nevis': 'KN',
+ Comoros: 'KM',
+ 'Sao Tome and Principe': 'ST',
+ Slovakia: 'SK',
+ 'South Korea': 'KR',
+ Slovenia: 'SI',
+ 'North Korea': 'KP',
+ Kuwait: 'KW',
+ Senegal: 'SN',
+ 'San Marino': 'SM',
+ 'Sierra Leone': 'SL',
+ Seychelles: 'SC',
+ Kazakhstan: 'KZ',
+ 'Cayman Islands': 'KY',
+ Singapore: 'SG',
+ Sweden: 'SE',
+ Sudan: 'SD',
+ 'Dominican Republic': 'DO',
+ Dominica: 'DM',
+ Djibouti: 'DJ',
+ Denmark: 'DK',
+ 'British Virgin Islands': 'VG',
+ Germany: 'DE',
+ Yemen: 'YE',
+ Algeria: 'DZ',
+ 'United States': 'US',
+ Uruguay: 'UY',
+ Mayotte: 'YT',
+ 'United States Minor Outlying Islands': 'UM',
+ Lebanon: 'LB',
+ 'Saint Lucia': 'LC',
+ Laos: 'LA',
+ Tuvalu: 'TV',
+ Taiwan: 'TW',
+ 'Trinidad and Tobago': 'TT',
+ Turkey: 'TR',
+ 'Sri Lanka': 'LK',
+ Liechtenstein: 'LI',
+ Latvia: 'LV',
+ Tonga: 'TO',
+ Lithuania: 'LT',
+ Luxembourg: 'LU',
+ Liberia: 'LR',
+ Lesotho: 'LS',
+ Thailand: 'TH',
+ 'French Southern Territories': 'TF',
+ Togo: 'TG',
+ Chad: 'TD',
+ 'Turks and Caicos Islands': 'TC',
+ Libya: 'LY',
+ Vatican: 'VA',
+ 'Saint Vincent and the Grenadines': 'VC',
+ 'United Arab Emirates': 'AE',
+ Andorra: 'AD',
+ 'Antigua and Barbuda': 'AG',
+ Afghanistan: 'AF',
+ Anguilla: 'AI',
+ 'U.S. Virgin Islands': 'VI',
+ Iceland: 'IS',
+ Iran: 'IR',
+ Armenia: 'AM',
+ Albania: 'AL',
+ Angola: 'AO',
+ Antarctica: 'AQ',
+ 'American Samoa': 'AS',
+ Argentina: 'AR',
+ Australia: 'AU',
+ Austria: 'AT',
+ Aruba: 'AW',
+ India: 'IN',
+ 'Aland Islands': 'AX',
+ Azerbaijan: 'AZ',
+ Ireland: 'IE',
+ Indonesia: 'ID',
+ Ukraine: 'UA',
+ Qatar: 'QA',
+ Mozambique: 'MZ',
+};
+
+export default countryCodesMap;
diff --git a/src/sentry/static/sentry/app/sentryTypes.jsx b/src/sentry/static/sentry/app/sentryTypes.jsx
index c825842aff270b..3532c31c5d2d46 100644
--- a/src/sentry/static/sentry/app/sentryTypes.jsx
+++ b/src/sentry/static/sentry/app/sentryTypes.jsx
@@ -378,7 +378,7 @@ export const EChartsData = PropTypes.arrayOf(
);
export const EChartsSeriesUnit = PropTypes.shape({
- type: PropTypes.oneOf(['line', 'bar', 'pie']),
+ type: PropTypes.oneOf(['line', 'bar', 'pie', 'map']),
showSymbol: PropTypes.bool,
name: PropTypes.string,
data: EChartsData,
|
12b1cef7a03caab757bc1da39d7f29f48f5fd6f6
|
2025-01-23 18:19:56
|
Vjeran Grozdanić
|
feat(tempest): relax permissions UI needed to add/delete tempest credentials (#83909)
| false
|
relax permissions UI needed to add/delete tempest credentials (#83909)
|
feat
|
diff --git a/static/app/views/settings/project/tempest/utils/access.tsx b/static/app/views/settings/project/tempest/utils/access.tsx
index a007f68a822475..7d8a7ca25300f7 100644
--- a/static/app/views/settings/project/tempest/utils/access.tsx
+++ b/static/app/views/settings/project/tempest/utils/access.tsx
@@ -4,7 +4,7 @@ import {hasEveryAccess} from 'sentry/components/acl/access';
import type {Scope} from 'sentry/types/core';
import useOrganization from 'sentry/utils/useOrganization';
-const TEMPEST_WRITE_ACCESS: Scope[] = ['org:admin'];
+const TEMPEST_WRITE_ACCESS: Scope[] = ['org:admin', 'project:admin', 'project:write'];
export function useHasTempestWriteAccess() {
const organization = useOrganization();
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.