hash
stringlengths 40
40
| date
stringdate 2017-08-30 22:37:25
2025-03-22 03:08:47
| author
stringclasses 173
values | commit_message
stringlengths 15
151
| is_merge
bool 1
class | masked_commit_message
stringlengths 6
126
| type
stringclasses 17
values | git_diff
stringlengths 182
1.51M
⌀ |
|---|---|---|---|---|---|---|---|
f09eaeac19659e6f270fc5f6306d623c1e674a12
|
2024-03-28 05:03:42
|
Evan Purkhiser
|
fix(crons): Constrain monitors by (project_id, slug) (#67820)
| false
|
Constrain monitors by (project_id, slug) (#67820)
|
fix
|
diff --git a/fixtures/backup/model_dependencies/detailed.json b/fixtures/backup/model_dependencies/detailed.json
index 8b5d0ad402bdde..bac1277cecd0f0 100644
--- a/fixtures/backup/model_dependencies/detailed.json
+++ b/fixtures/backup/model_dependencies/detailed.json
@@ -3224,7 +3224,7 @@
"guid"
],
[
- "organization_id",
+ "project_id",
"slug"
]
]
diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt
index b1262cfd061066..f1010b256913df 100644
--- a/migrations_lockfile.txt
+++ b/migrations_lockfile.txt
@@ -9,5 +9,5 @@ feedback: 0004_index_together
hybridcloud: 0015_apitokenreplica_hashed_token_index
nodestore: 0002_nodestore_no_dictfield
replays: 0004_index_together
-sentry: 0681_unpickle_authenticator_again
+sentry: 0682_monitors_constrain_to_project_id_slug
social_auth: 0002_default_auto_field
diff --git a/src/sentry/migrations/0682_monitors_constrain_to_project_id_slug.py b/src/sentry/migrations/0682_monitors_constrain_to_project_id_slug.py
new file mode 100644
index 00000000000000..0eff3de6dc9336
--- /dev/null
+++ b/src/sentry/migrations/0682_monitors_constrain_to_project_id_slug.py
@@ -0,0 +1,57 @@
+# Generated by Django 5.0.3 on 2024-03-27 20:02
+
+from django.db import migrations, models
+
+from sentry.new_migrations.migrations import CheckedMigration
+
+
+class Migration(CheckedMigration):
+ # This flag is used to mark that a migration shouldn't be automatically run in production. For
+ # the most part, this should only be used for operations where it's safe to run the migration
+ # after your code has deployed. So this should not be used for most operations that alter the
+ # schema of a table.
+ # Here are some things that make sense to mark as dangerous:
+ # - Large data migrations. Typically we want these to be run manually by ops so that they can
+ # be monitored and not block the deploy for a long period of time while they run.
+ # - Adding indexes to large tables. Since this can take a long time, we'd generally prefer to
+ # have ops run this and not block the deploy. Note that while adding an index is a schema
+ # change, it's completely safe to run the operation after the code has deployed.
+ is_dangerous = True
+
+ dependencies = [
+ ("sentry", "0681_unpickle_authenticator_again"),
+ ]
+
+ operations = [
+ migrations.SeparateDatabaseAndState(
+ database_operations=[
+ migrations.RunSQL(
+ [
+ 'CREATE UNIQUE INDEX CONCURRENTLY "sentry_monitor_project_id_slug_1f4d3dc3_uniq" ON "sentry_monitor" ("project_id", "slug");',
+ 'ALTER TABLE "sentry_monitor" ADD CONSTRAINT "sentry_monitor_project_id_slug_1f4d3dc3_uniq" UNIQUE USING INDEX "sentry_monitor_project_id_slug_1f4d3dc3_uniq";',
+ 'CREATE INDEX CONCURRENTLY "sentry_moni_organiz_a62466_idx" ON "sentry_monitor" ("organization_id", "slug");',
+ 'ALTER TABLE "sentry_monitor" DROP CONSTRAINT "sentry_monitor_organization_id_slug_c4ac3a42_uniq";',
+ ],
+ reverse_sql=[
+ 'CREATE UNIQUE INDEX CONCURRENTLY "sentry_monitor_organization_id_slug_c4ac3a42_uniq" ON "sentry_monitor" ("organization_id", "slug");',
+ 'ALTER TABLE "sentry_monitor" ADD CONSTRAINT "sentry_monitor_organization_id_slug_c4ac3a42_uniq" UNIQUE USING INDEX "sentry_monitor_organization_id_slug_c4ac3a42_uniq";',
+ 'ALTER TABLE "sentry_monitor" DROP CONSTRAINT "sentry_monitor_project_id_slug_1f4d3dc3_uniq";',
+ 'DROP INDEX CONCURRENTLY "sentry_moni_organiz_a62466_idx";',
+ ],
+ hints={"tables": ["sentry_monitor"]},
+ ),
+ ],
+ state_operations=[
+ migrations.AlterUniqueTogether(
+ name="monitor",
+ unique_together={("project_id", "slug")},
+ ),
+ migrations.AddIndex(
+ model_name="monitor",
+ index=models.Index(
+ fields=["organization_id", "slug"], name="sentry_moni_organiz_a62466_idx"
+ ),
+ ),
+ ],
+ ),
+ ]
diff --git a/src/sentry/monitors/models.py b/src/sentry/monitors/models.py
index 687e70ddd0efc8..faf1977d247126 100644
--- a/src/sentry/monitors/models.py
+++ b/src/sentry/monitors/models.py
@@ -257,7 +257,10 @@ class Monitor(Model):
class Meta:
app_label = "sentry"
db_table = "sentry_monitor"
- unique_together = (("organization_id", "slug"),)
+ unique_together = (("project_id", "slug"),)
+ indexes = [
+ models.Index(fields=["organization_id", "slug"]),
+ ]
__repr__ = sane_repr("guid", "project_id", "name")
diff --git a/tests/sentry/migrations/test_0660_fix_cron_monitor_invalid_orgs.py b/tests/sentry/migrations/test_0660_fix_cron_monitor_invalid_orgs.py
index 9696f2305f7d7a..253cd0e3ed13ab 100644
--- a/tests/sentry/migrations/test_0660_fix_cron_monitor_invalid_orgs.py
+++ b/tests/sentry/migrations/test_0660_fix_cron_monitor_invalid_orgs.py
@@ -4,6 +4,7 @@
from sentry.testutils.cases import TestMigrations
[email protected]("Migration is no longer runnable. Retain until migration is removed.")
class RenamePrioritySortToTrendsTest(TestMigrations):
migrate_from = "0659_artifactbundleindex_cleanup"
migrate_to = "0660_fix_cron_monitor_invalid_orgs"
|
d6327f8928a6c96a4eae8f25f0ca736481a34728
|
2018-05-09 03:14:47
|
Lyn Nagara
|
fix(environments): Fix environment name formatting (#8345)
| false
|
Fix environment name formatting (#8345)
|
fix
|
diff --git a/src/sentry/static/sentry/app/views/projectEnvironments.jsx b/src/sentry/static/sentry/app/views/projectEnvironments.jsx
index a812241dadf347..3a29bf33d3ce33 100644
--- a/src/sentry/static/sentry/app/views/projectEnvironments.jsx
+++ b/src/sentry/static/sentry/app/views/projectEnvironments.jsx
@@ -225,7 +225,7 @@ const ProjectEnvironments = createReactClass({
displayName: t('All Environments'),
name: ALL_ENVIRONMENTS_KEY,
}}
- hideName
+ isSystemRow
isDefault={isAllEnvironmentsDefault}
shouldShowSetDefault={!isAllEnvironmentsDefault && !!project}
onSetAsDefault={this.handleSetAsDefault}
@@ -269,7 +269,7 @@ const ProjectEnvironments = createReactClass({
),
name: project.defaultEnvironment,
}}
- hideName
+ isSystemRow
isDefault
shouldShowSetDefault={false}
onSetAsDefault={this.handleSetAsDefault}
@@ -362,7 +362,7 @@ class EnvironmentRow extends React.Component {
environment: SentryTypes.Environment,
isDefault: PropTypes.bool,
isHidden: PropTypes.bool,
- hideName: PropTypes.bool,
+ isSystemRow: PropTypes.bool,
shouldShowSetDefault: PropTypes.bool,
shouldShowAction: PropTypes.bool,
actionText: PropTypes.string,
@@ -375,7 +375,7 @@ class EnvironmentRow extends React.Component {
environment,
shouldShowSetDefault,
shouldShowAction,
- hideName,
+ isSystemRow,
isDefault,
isHidden,
actionText,
@@ -384,8 +384,7 @@ class EnvironmentRow extends React.Component {
return (
<PanelItem align="center" justify="space-between">
<Flex align="center">
- {environment.displayName}{' '}
- {!hideName && environment.name && <code>{environment.name}</code>}
+ {isSystemRow ? environment.displayName : environment.name}
{isDefault && <Tag priority="success">{t('Default')}</Tag>}
</Flex>
<div>
diff --git a/tests/js/spec/views/__snapshots__/projectEnvironments.spec.jsx.snap b/tests/js/spec/views/__snapshots__/projectEnvironments.spec.jsx.snap
index 94bed844630dcf..02e6ddd0ae417f 100644
--- a/tests/js/spec/views/__snapshots__/projectEnvironments.spec.jsx.snap
+++ b/tests/js/spec/views/__snapshots__/projectEnvironments.spec.jsx.snap
@@ -426,8 +426,8 @@ exports[`ProjectEnvironments render active renders environment list and sets sta
"name": "__all_environments__",
}
}
- hideName={true}
isDefault={false}
+ isSystemRow={true}
name="__all_environments__"
onSetAsDefault={[Function]}
shouldShowSetDefault={true}
@@ -464,7 +464,6 @@ exports[`ProjectEnvironments render active renders environment list and sets sta
is={null}
>
All Environments
-
</div>
</Base>
</Flex>
@@ -592,11 +591,7 @@ exports[`ProjectEnvironments render active renders environment list and sets sta
className="css-5ipae5"
is={null}
>
- Production
-
- <code>
- production
- </code>
+ production
</div>
</Base>
</Flex>
@@ -791,11 +786,7 @@ exports[`ProjectEnvironments render active renders environment list and sets sta
className="css-5ipae5"
is={null}
>
- Staging
-
- <code>
- staging
- </code>
+ staging
<Tag
priority="success"
>
@@ -1363,11 +1354,7 @@ exports[`ProjectEnvironments render hidden renders environment list 1`] = `
className="css-5ipae5"
is={null}
>
- Zzz
-
- <code>
- zzz
- </code>
+ zzz
</div>
</Base>
</Flex>
|
0c9a82d5ab502dbf16f9953b54a0182730967b0e
|
2024-06-04 21:47:49
|
Ryan Albrecht
|
ref: Move statsPeriodToDays() into utils/duration/ (#71962)
| false
|
Move statsPeriodToDays() into utils/duration/ (#71962)
|
ref
|
diff --git a/static/app/utils/dates.tsx b/static/app/utils/dates.tsx
index 3384434a572000..3468ec7a86c651 100644
--- a/static/app/utils/dates.tsx
+++ b/static/app/utils/dates.tsx
@@ -1,7 +1,6 @@
import moment from 'moment';
import ConfigStore from 'sentry/stores/configStore';
-import type {DateString} from 'sentry/types/core';
import type {TableDataRow} from './discover/discoverQuery';
@@ -184,23 +183,6 @@ export function getStartOfPeriodAgo(
return getStartOfDay(getPeriodAgo(period, unit));
}
-export function statsPeriodToDays(
- statsPeriod?: string | null,
- start?: DateString,
- end?: DateString
-) {
- if (statsPeriod?.endsWith('d')) {
- return parseInt(statsPeriod.slice(0, -1), 10);
- }
- if (statsPeriod?.endsWith('h')) {
- return parseInt(statsPeriod.slice(0, -1), 10) / 24;
- }
- if (start && end) {
- return (new Date(end).getTime() - new Date(start).getTime()) / (24 * 60 * 60 * 1000);
- }
- return 0;
-}
-
/**
* Does the user prefer a 24 hour clock?
*/
diff --git a/static/app/utils/discover/eventView.tsx b/static/app/utils/discover/eventView.tsx
index 503835537ce599..02e657eced1627 100644
--- a/static/app/utils/discover/eventView.tsx
+++ b/static/app/utils/discover/eventView.tsx
@@ -41,6 +41,7 @@ import {
type SavedQueryDatasets,
TOP_N,
} from 'sentry/utils/discover/types';
+import {statsPeriodToDays} from 'sentry/utils/duration/statsPeriodToDays';
import {decodeList, decodeScalar, decodeSorts} from 'sentry/utils/queryString';
import {normalizeUrl} from 'sentry/utils/withDomainRequired';
import type {TableColumn, TableColumnSort} from 'sentry/views/discover/table/types';
@@ -49,7 +50,6 @@ import {decodeColumnOrder} from 'sentry/views/discover/utils';
import type {SpanOperationBreakdownFilter} from 'sentry/views/performance/transactionSummary/filter';
import type {EventsDisplayFilterName} from 'sentry/views/performance/transactionSummary/transactionEvents/utils';
-import {statsPeriodToDays} from '../dates';
import type {WebVital} from '../fields';
import {MutableSearch} from '../tokenizeSearch';
diff --git a/static/app/utils/duration/statsPeriodToDays.tsx b/static/app/utils/duration/statsPeriodToDays.tsx
new file mode 100644
index 00000000000000..092b3a62af8080
--- /dev/null
+++ b/static/app/utils/duration/statsPeriodToDays.tsx
@@ -0,0 +1,18 @@
+import type {DateString} from 'sentry/types/core';
+
+export function statsPeriodToDays(
+ statsPeriod?: string | null,
+ start?: DateString,
+ end?: DateString
+) {
+ if (statsPeriod?.endsWith('d')) {
+ return parseInt(statsPeriod.slice(0, -1), 10);
+ }
+ if (statsPeriod?.endsWith('h')) {
+ return parseInt(statsPeriod.slice(0, -1), 10) / 24;
+ }
+ if (start && end) {
+ return (new Date(end).getTime() - new Date(start).getTime()) / (24 * 60 * 60 * 1000);
+ }
+ return 0;
+}
diff --git a/static/app/utils/metrics/index.tsx b/static/app/utils/metrics/index.tsx
index 380706ad792706..62bee2ea5091e0 100644
--- a/static/app/utils/metrics/index.tsx
+++ b/static/app/utils/metrics/index.tsx
@@ -30,8 +30,8 @@ import type {
MRI,
UseCase,
} from 'sentry/types/metrics';
-import {statsPeriodToDays} from 'sentry/utils/dates';
import {isMeasurement} from 'sentry/utils/discover/fields';
+import {statsPeriodToDays} from 'sentry/utils/duration/statsPeriodToDays';
import {getMeasurements} from 'sentry/utils/measurements/measurements';
import {DEFAULT_AGGREGATES} from 'sentry/utils/metrics/constants';
import {formatMRI, formatMRIField, MRIToField, parseMRI} from 'sentry/utils/metrics/mri';
diff --git a/static/app/views/dashboards/datasetConfig/releases.tsx b/static/app/views/dashboards/datasetConfig/releases.tsx
index 6667e9dae28f99..5625c9c0c3324d 100644
--- a/static/app/views/dashboards/datasetConfig/releases.tsx
+++ b/static/app/views/dashboards/datasetConfig/releases.tsx
@@ -16,10 +16,10 @@ import type {
import {SessionField} from 'sentry/types';
import type {Series} from 'sentry/types/echarts';
import {defined} from 'sentry/utils';
-import {statsPeriodToDays} from 'sentry/utils/dates';
import type {TableData} from 'sentry/utils/discover/discoverQuery';
import {getFieldRenderer} from 'sentry/utils/discover/fieldRenderers';
import type {QueryFieldValue} from 'sentry/utils/discover/fields';
+import {statsPeriodToDays} from 'sentry/utils/duration/statsPeriodToDays';
import type {OnDemandControlContext} from 'sentry/utils/performance/contexts/onDemandControl';
import type {FieldValueOption} from 'sentry/views/discover/table/queryField';
import type {FieldValue} from 'sentry/views/discover/table/types';
diff --git a/static/app/views/metrics/createAlertModal.tsx b/static/app/views/metrics/createAlertModal.tsx
index fbdeface217139..ac0a11db281735 100644
--- a/static/app/views/metrics/createAlertModal.tsx
+++ b/static/app/views/metrics/createAlertModal.tsx
@@ -18,8 +18,8 @@ import {Tooltip} from 'sentry/components/tooltip';
import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import type {PageFilters, Project} from 'sentry/types';
-import {statsPeriodToDays} from 'sentry/utils/dates';
import {parsePeriodToHours} from 'sentry/utils/duration/parsePeriodToHours';
+import {statsPeriodToDays} from 'sentry/utils/duration/statsPeriodToDays';
import {
getFieldFromMetricsQuery as getAlertAggregate,
getMetricsInterval,
diff --git a/static/app/views/organizationStats/usageChart/index.tsx b/static/app/views/organizationStats/usageChart/index.tsx
index 4ee7f59324b5b7..3a893f0485830f 100644
--- a/static/app/views/organizationStats/usageChart/index.tsx
+++ b/static/app/views/organizationStats/usageChart/index.tsx
@@ -22,8 +22,8 @@ import {IconWarning} from 'sentry/icons';
import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import type {DataCategoryInfo, IntervalPeriod, SelectValue} from 'sentry/types/core';
-import {statsPeriodToDays} from 'sentry/utils/dates';
import {parsePeriodToHours} from 'sentry/utils/duration/parsePeriodToHours';
+import {statsPeriodToDays} from 'sentry/utils/duration/statsPeriodToDays';
import {hasCustomMetrics} from 'sentry/utils/metrics/features';
import commonTheme from 'sentry/utils/theme';
import useOrganization from 'sentry/utils/useOrganization';
diff --git a/static/app/views/performance/utils/index.tsx b/static/app/views/performance/utils/index.tsx
index 916702b4a79125..85d3e43bbdad4a 100644
--- a/static/app/views/performance/utils/index.tsx
+++ b/static/app/views/performance/utils/index.tsx
@@ -14,10 +14,10 @@ import type {
import {trackAnalytics} from 'sentry/utils/analytics';
import toArray from 'sentry/utils/array/toArray';
import {browserHistory} from 'sentry/utils/browserHistory';
-import {statsPeriodToDays} from 'sentry/utils/dates';
import type {EventData} from 'sentry/utils/discover/eventView';
import EventView from 'sentry/utils/discover/eventView';
import {TRACING_FIELDS} from 'sentry/utils/discover/fields';
+import {statsPeriodToDays} from 'sentry/utils/duration/statsPeriodToDays';
import getCurrentSentryReactRootSpan from 'sentry/utils/getCurrentSentryReactRootSpan';
import {useQuery} from 'sentry/utils/queryClient';
import {decodeScalar} from 'sentry/utils/queryString';
|
a5dbdcb1c6c6e3e2d11805a8640e1f3be0833927
|
2022-05-14 03:40:16
|
Shruthi
|
chore(dashboard): Default RH field to crash_free_rate (#34285)
| false
|
Default RH field to crash_free_rate (#34285)
|
chore
|
diff --git a/static/app/components/modals/addDashboardWidgetModal.tsx b/static/app/components/modals/addDashboardWidgetModal.tsx
index 1a1b196f825638..a1ec07d57eb11d 100644
--- a/static/app/components/modals/addDashboardWidgetModal.tsx
+++ b/static/app/components/modals/addDashboardWidgetModal.tsx
@@ -136,9 +136,9 @@ const newIssueQuery: WidgetQuery = {
const newMetricsQuery: WidgetQuery = {
name: '',
- fields: [`sum(${SessionField.SESSION})`],
+ fields: [`crash_free_rate(${SessionField.SESSION})`],
columns: [],
- aggregates: [`sum(${SessionField.SESSION})`],
+ aggregates: [`crash_free_rate(${SessionField.SESSION})`],
conditions: '',
orderby: '',
};
diff --git a/static/app/views/dashboardsV2/widgetBuilder/widgetBuilder.tsx b/static/app/views/dashboardsV2/widgetBuilder/widgetBuilder.tsx
index be78f47fd54f25..c6ae6779feeb59 100644
--- a/static/app/views/dashboardsV2/widgetBuilder/widgetBuilder.tsx
+++ b/static/app/views/dashboardsV2/widgetBuilder/widgetBuilder.tsx
@@ -114,12 +114,12 @@ function getDataSetQuery(widgetBuilderNewDesign: boolean): Record<DataSet, Widge
},
[DataSet.RELEASES]: {
name: '',
- fields: [`sum(${SessionField.SESSION})`],
+ fields: [`crash_free_rate(${SessionField.SESSION})`],
columns: [],
fieldAliases: [],
- aggregates: [`sum(${SessionField.SESSION})`],
+ aggregates: [`crash_free_rate(${SessionField.SESSION})`],
conditions: '',
- orderby: widgetBuilderNewDesign ? `-sum(${SessionField.SESSION})` : '',
+ orderby: widgetBuilderNewDesign ? `-crash_free_rate(${SessionField.SESSION})` : '',
},
};
}
diff --git a/tests/js/spec/views/dashboardsV2/widgetBuilder/widgetBuilder.spec.tsx b/tests/js/spec/views/dashboardsV2/widgetBuilder/widgetBuilder.spec.tsx
index 0e70a9f3ac88ce..777c711bfdd19a 100644
--- a/tests/js/spec/views/dashboardsV2/widgetBuilder/widgetBuilder.spec.tsx
+++ b/tests/js/spec/views/dashboardsV2/widgetBuilder/widgetBuilder.spec.tsx
@@ -2544,10 +2544,10 @@ describe('WidgetBuilder', function () {
userEvent.click(screen.getByLabelText(/releases/i));
- expect(screen.getByText('sum(…)')).toBeInTheDocument();
+ expect(screen.getByText('crash_free_rate(…)')).toBeInTheDocument();
expect(screen.getByText('session')).toBeInTheDocument();
- userEvent.click(screen.getByText('sum(…)'));
+ userEvent.click(screen.getByText('crash_free_rate(…)'));
expect(screen.getByText('count_unique(…)')).toBeInTheDocument();
expect(screen.getByText('release')).toBeInTheDocument();
@@ -2569,8 +2569,8 @@ describe('WidgetBuilder', function () {
userEvent.click(screen.getByLabelText(/releases/i));
- expect(screen.getByText('sum(…)')).toBeInTheDocument();
- await selectEvent.select(screen.getByText('sum(…)'), 'count_unique(…)');
+ expect(screen.getByText('crash_free_rate(…)')).toBeInTheDocument();
+ await selectEvent.select(screen.getByText('crash_free_rate(…)'), 'count_unique(…)');
userEvent.click(screen.getByText('user'));
expect(screen.queryByText('release')).not.toBeInTheDocument();
@@ -2590,7 +2590,7 @@ describe('WidgetBuilder', function () {
userEvent.click(screen.getByLabelText(/releases/i));
expect(screen.getByText('High to low')).toBeEnabled();
- expect(screen.getByText('sum(session)')).toBeInTheDocument();
+ expect(screen.getByText('crash_free_rate(session)')).toBeInTheDocument();
userEvent.click(screen.getByLabelText('Add a Column'));
await selectEvent.select(screen.getByText('(Required)'), 'session.status');
@@ -2619,7 +2619,7 @@ describe('WidgetBuilder', function () {
expect.objectContaining({
query: expect.objectContaining({
environment: [],
- field: [`sum(sentry.sessions.session)`],
+ field: [`session.crash_free_rate`],
groupBy: [],
interval: '5m',
project: [],
@@ -2654,10 +2654,10 @@ describe('WidgetBuilder', function () {
expect.objectContaining({
query: expect.objectContaining({
environment: [],
- field: ['sum(sentry.sessions.session)'],
+ field: ['session.crash_free_rate'],
groupBy: ['project_id'],
interval: '5m',
- orderBy: '-sum(sentry.sessions.session)',
+ orderBy: '-session.crash_free_rate',
per_page: 5,
project: [],
statsPeriod: '24h',
@@ -2691,7 +2691,7 @@ describe('WidgetBuilder', function () {
expect.objectContaining({
query: expect.objectContaining({
environment: [],
- field: ['sum(session)'],
+ field: ['crash_free_rate(session)'],
groupBy: ['session.status'],
interval: '5m',
project: [],
@@ -2717,10 +2717,10 @@ describe('WidgetBuilder', function () {
userEvent.click(screen.getByText('Table'));
userEvent.click(screen.getByText('Line Chart'));
- expect(screen.getByText('sum(…)')).toBeInTheDocument();
+ expect(screen.getByText('crash_free_rate(…)')).toBeInTheDocument();
expect(screen.getByText(`session`)).toBeInTheDocument();
- userEvent.click(screen.getByText('sum(…)'));
+ userEvent.click(screen.getByText('crash_free_rate(…)'));
expect(screen.getByText('count_unique(…)')).toBeInTheDocument();
userEvent.click(screen.getByText('count_unique(…)'));
@@ -2744,9 +2744,9 @@ describe('WidgetBuilder', function () {
widgetType: WidgetType.RELEASE,
queries: [
expect.objectContaining({
- aggregates: [`sum(session)`],
- fields: [`sum(session)`],
- orderby: `-sum(session)`,
+ aggregates: [`crash_free_rate(session)`],
+ fields: [`crash_free_rate(session)`],
+ orderby: `-crash_free_rate(session)`,
}),
],
}),
|
2188a6db224f7ac1a8367edb76887da789eb585d
|
2023-05-03 01:00:42
|
Shruthi
|
feat(starfish): Remove refs to fromUnixTimestamp (#48354)
| false
|
Remove refs to fromUnixTimestamp (#48354)
|
feat
|
diff --git a/static/app/views/starfish/modules/databaseModule/databaseChartView.tsx b/static/app/views/starfish/modules/databaseModule/databaseChartView.tsx
index c508b974f111d3..c7911ecfc9314d 100644
--- a/static/app/views/starfish/modules/databaseModule/databaseChartView.tsx
+++ b/static/app/views/starfish/modules/databaseModule/databaseChartView.tsx
@@ -17,7 +17,7 @@ import {
getTopOperationsChart,
getTopTablesChart,
} from 'sentry/views/starfish/modules/databaseModule/queries';
-import {getDateFilters} from 'sentry/views/starfish/utils/dates';
+import {datetimeToClickhouseFilterTimestamps} from 'sentry/views/starfish/utils/dates';
import {zeroFillSeries} from 'sentry/views/starfish/utils/zeroFillSeries';
const INTERVAL = 12;
@@ -51,10 +51,12 @@ function parseOptions(options, label) {
export default function APIModuleView({action, table, onChange}: Props) {
const pageFilter = usePageFilters();
- const {startTime, endTime} = getDateFilters(pageFilter);
+ const {start_timestamp, end_timestamp} = datetimeToClickhouseFilterTimestamps(
+ pageFilter.selection.datetime
+ );
const DATE_FILTERS = `
- greater(start_timestamp, fromUnixTimestamp(${startTime.unix()})) and
- less(start_timestamp, fromUnixTimestamp(${endTime.unix()}))
+ ${start_timestamp ? `AND greaterOrEquals(start_timestamp, '${start_timestamp}')` : ''}
+ ${end_timestamp ? `AND lessOrEquals(start_timestamp, '${end_timestamp}')` : ''}
`;
const {data: operationData} = useQuery({
@@ -117,10 +119,20 @@ export default function APIModuleView({action, table, onChange}: Props) {
}
const topDomains = Object.values(seriesByDomain).map(series =>
- zeroFillSeries(series, moment.duration(INTERVAL, 'hours'), startTime, endTime)
+ zeroFillSeries(
+ series,
+ moment.duration(INTERVAL, 'hours'),
+ moment(start_timestamp),
+ moment(end_timestamp)
+ )
);
const tpmDomains = Object.values(tpmByDomain).map(series =>
- zeroFillSeries(series, moment.duration(INTERVAL, 'hours'), startTime, endTime)
+ zeroFillSeries(
+ series,
+ moment.duration(INTERVAL, 'hours'),
+ moment(start_timestamp),
+ moment(end_timestamp)
+ )
);
const tpmByQuery: {[query: string]: Series} = {};
@@ -151,10 +163,20 @@ export default function APIModuleView({action, table, onChange}: Props) {
}
const tpmData = Object.values(tpmByQuery).map(series =>
- zeroFillSeries(series, moment.duration(INTERVAL, 'hours'), startTime, endTime)
+ zeroFillSeries(
+ series,
+ moment.duration(INTERVAL, 'hours'),
+ moment(start_timestamp),
+ moment(end_timestamp)
+ )
);
const topData = Object.values(seriesByQuery).map(series =>
- zeroFillSeries(series, moment.duration(INTERVAL, 'hours'), startTime, endTime)
+ zeroFillSeries(
+ series,
+ moment.duration(INTERVAL, 'hours'),
+ moment(start_timestamp),
+ moment(end_timestamp)
+ )
);
return (
diff --git a/static/app/views/starfish/modules/databaseModule/index.tsx b/static/app/views/starfish/modules/databaseModule/index.tsx
index 355110134b5fa8..f0333cf352e4ad 100644
--- a/static/app/views/starfish/modules/databaseModule/index.tsx
+++ b/static/app/views/starfish/modules/databaseModule/index.tsx
@@ -64,10 +64,6 @@ function DatabaseModule() {
const pageFilter = usePageFilters();
const {startTime, endTime} = getDateFilters(pageFilter);
- const DATE_FILTERS = `
- greater(start_timestamp, fromUnixTimestamp(${startTime.unix()})) and
- less(start_timestamp, fromUnixTimestamp(${endTime.unix()}))
-`;
const transactionFilter =
transaction.length > 0 ? `transaction='${transaction}'` : null;
@@ -115,7 +111,6 @@ function DatabaseModule() {
fetch(
`${HOST}/?query=${getMainTable(
startTime,
- DATE_FILTERS,
endTime,
transactionFilter,
tableFilter,
diff --git a/static/app/views/starfish/modules/databaseModule/panel.tsx b/static/app/views/starfish/modules/databaseModule/panel.tsx
index 503cbb8e219134..c1821efaad0e21 100644
--- a/static/app/views/starfish/modules/databaseModule/panel.tsx
+++ b/static/app/views/starfish/modules/databaseModule/panel.tsx
@@ -25,7 +25,10 @@ import {
getPanelTableQuery,
useQueryTransactionByTPM,
} from 'sentry/views/starfish/modules/databaseModule/queries';
-import {getDateFilters} from 'sentry/views/starfish/utils/dates';
+import {
+ datetimeToClickhouseFilterTimestamps,
+ getDateFilters,
+} from 'sentry/views/starfish/utils/dates';
import {zeroFillSeries} from 'sentry/views/starfish/utils/zeroFillSeries';
import {DataRow} from './databaseTableView';
@@ -146,9 +149,12 @@ function QueryDetailBody({
const location = useLocation();
const pageFilter = usePageFilters();
const {startTime, endTime} = getDateFilters(pageFilter);
+ const {start_timestamp, end_timestamp} = datetimeToClickhouseFilterTimestamps(
+ pageFilter.selection.datetime
+ );
const DATE_FILTERS = `
- greater(start_timestamp, fromUnixTimestamp(${startTime.unix()})) and
- less(start_timestamp, fromUnixTimestamp(${endTime.unix()}))
+ ${start_timestamp ? `AND greaterOrEquals(start_timestamp, '${start_timestamp}')` : ''}
+ ${end_timestamp ? `AND lessOrEquals(start_timestamp, '${end_timestamp}')` : ''}
`;
const {isLoading: isP75GraphLoading, data: tpmTransactionGraphData} =
@@ -163,7 +169,12 @@ function QueryDetailBody({
queryKey: ['dbQueryDetailsGraph', row.group_id, pageFilter.selection.datetime],
queryFn: () =>
fetch(
- `${HOST}/?query=${getPanelGraphQuery(DATE_FILTERS, row, INTERVAL)}&format=sql`
+ `${HOST}/?query=${getPanelGraphQuery(
+ startTime,
+ endTime,
+ row,
+ INTERVAL
+ )}&format=sql`
).then(res => res.json()),
retry: false,
initialData: [],
@@ -181,7 +192,8 @@ function QueryDetailBody({
queryFn: () =>
fetch(
`${HOST}/?query=${getPanelTableQuery(
- DATE_FILTERS,
+ startTime,
+ endTime,
row,
sort.sortHeader?.key,
sort.direction
diff --git a/static/app/views/starfish/modules/databaseModule/queries.ts b/static/app/views/starfish/modules/databaseModule/queries.ts
index 73598ebbe7f941..0d546614209d63 100644
--- a/static/app/views/starfish/modules/databaseModule/queries.ts
+++ b/static/app/views/starfish/modules/databaseModule/queries.ts
@@ -1,10 +1,13 @@
-import {Moment} from 'moment';
+import {Moment, unix} from 'moment';
import {useQuery} from 'sentry/utils/queryClient';
import usePageFilters from 'sentry/utils/usePageFilters';
import {DataRow} from 'sentry/views/starfish/modules/databaseModule/databaseTableView';
import {HOST} from 'sentry/views/starfish/utils/constants';
-import {getDateFilters} from 'sentry/views/starfish/utils/dates';
+import {
+ datetimeToClickhouseFilterTimestamps,
+ getDateFilters,
+} from 'sentry/views/starfish/utils/dates';
const DEFAULT_WHERE = `
startsWith(span_operation, 'db') and
@@ -24,7 +27,7 @@ const getActionSubquery = (date_filters: string) => {
select action
from default.spans_experimental_starfish
where
- ${DEFAULT_WHERE} and
+ ${DEFAULT_WHERE}
${date_filters}
group by action
order by ${ORDERBY}
@@ -37,7 +40,7 @@ const getDomainSubquery = (date_filters: string, action: string) => {
select domain
from default.spans_experimental_starfish
where
- ${DEFAULT_WHERE} and
+ ${DEFAULT_WHERE}
${date_filters} and
domain != ''
${getActionQuery(action)}
@@ -52,34 +55,32 @@ const getActionQuery = (action: string) =>
const SEVEN_DAYS = 7 * 24 * 60 * 60;
-const getNewColumn = (
- duration: number,
- startTime: {unix: () => number},
- endTime: {unix: () => number}
-) =>
- duration > SEVEN_DAYS
+const getNewColumn = (duration: number, startTime: Moment, endTime: Moment) => {
+ const {start_timestamp, end_timestamp} = datetimeToClickhouseFilterTimestamps({
+ start: unix(startTime.unix() + duration / 10).format('YYYY-MM-DD HH:mm:ss'),
+ end: unix(endTime.unix() - duration / 10).format('YYYY-MM-DD HH:mm:ss'),
+ });
+
+ return duration > SEVEN_DAYS
? `(
- greater(min(start_timestamp), fromUnixTimestamp(${
- startTime.unix() + duration / 10
- })) and
- greater(max(start_timestamp), fromUnixTimestamp(${
- endTime.unix() - duration / 10
- }))
+ greater(min(start_timestamp), '${start_timestamp}') and
+ greater(max(start_timestamp), '${end_timestamp}')
) as newish`
: '0 as newish';
-const getRetiredColumn = (
- duration: number,
- startTime: {unix: () => number},
- endTime: {unix: () => number}
-) =>
- duration > SEVEN_DAYS
+};
+
+const getRetiredColumn = (duration: number, startTime: Moment, endTime: Moment) => {
+ const {start_timestamp, end_timestamp} = datetimeToClickhouseFilterTimestamps({
+ start: unix(startTime.unix() + duration / 10).format('YYYY-MM-DD HH:mm:ss'),
+ end: unix(endTime.unix() - duration / 10).format('YYYY-MM-DD HH:mm:ss'),
+ });
+ return duration > SEVEN_DAYS
? `(
- less(max(start_timestamp), fromUnixTimestamp(${
- endTime.unix() - duration / 10
- })) and
- less(min(start_timestamp), fromUnixTimestamp(${startTime.unix() + duration / 10}))
+ less(max(start_timestamp), '${end_timestamp}') and
+ less(min(start_timestamp), '${start_timestamp}')
) as retired`
: '0 as retired';
+};
export const getOperations = (date_filters: string) => {
return `
@@ -88,7 +89,7 @@ export const getOperations = (date_filters: string) => {
uniq(description) as value
from default.spans_experimental_starfish
where
- ${DEFAULT_WHERE} and
+ ${DEFAULT_WHERE}
${date_filters}
group by action
order by ${ORDERBY}
@@ -102,7 +103,7 @@ export const getTables = (date_filters: string, action: string) => {
quantile(0.75)(exclusive_time) as value
from default.spans_experimental_starfish
where
- ${DEFAULT_WHERE} and
+ ${DEFAULT_WHERE}
${date_filters}
${getActionQuery(action)}
group by domain
@@ -119,7 +120,7 @@ export const getTopOperationsChart = (date_filters: string, interval: number) =>
toStartOfInterval(start_timestamp, INTERVAL ${interval} hour) as interval
from default.spans_experimental_starfish
where
- ${DEFAULT_WHERE} and
+ ${DEFAULT_WHERE}
${date_filters} and
action in (${getActionSubquery(date_filters)})
group by action, interval
@@ -140,7 +141,7 @@ export const getTopTablesChart = (
toStartOfInterval(start_timestamp, INTERVAL ${interval} hour) as interval
from default.spans_experimental_starfish
where
- ${DEFAULT_WHERE} and
+ ${DEFAULT_WHERE}
${date_filters} and
domain in (${getDomainSubquery(date_filters, action)})
${getActionQuery(action)}
@@ -150,25 +151,9 @@ export const getTopTablesChart = (
};
export const getPanelTableQuery = (
- date_filters: string,
- row: {
- group_id: string;
- action?: string;
- count?: number;
- data_keys?: string[];
- data_values?: string[];
- description?: string;
- domain?: string;
- epm?: number;
- firstSeen?: string;
- formatted_desc?: string;
- lastSeen?: string;
- newish?: number;
- p75?: number;
- retired?: number;
- total_time?: number;
- transactions?: number;
- },
+ startTime: Moment,
+ endTime: Moment,
+ row: DataRow,
sortKey: string | undefined,
sortDirection: string | undefined
) => {
@@ -180,8 +165,8 @@ export const getPanelTableQuery = (
quantile(0.75)(exclusive_time) as p75
FROM spans_experimental_starfish
WHERE
- ${DEFAULT_WHERE} and
- ${date_filters} and
+ ${DEFAULT_WHERE}
+ ${getDateQueryFilter(startTime, endTime)} AND
group_id = '${row.group_id}'
GROUP BY transaction
ORDER BY ${orderBy}
@@ -201,7 +186,8 @@ const getOrderByFromKey = (
};
export const getPanelGraphQuery = (
- date_filters: string,
+ startTime: Moment,
+ endTime: Moment,
row: {
group_id: string;
action?: string;
@@ -222,6 +208,7 @@ export const getPanelGraphQuery = (
},
interval: number
) => {
+ const dateFilters = getDateQueryFilter(startTime, endTime);
return `
SELECT
toStartOfInterval(start_timestamp, INTERVAL ${interval} HOUR) as interval,
@@ -229,8 +216,8 @@ export const getPanelGraphQuery = (
count() as count
FROM spans_experimental_starfish
WHERE
- ${DEFAULT_WHERE} and
- ${date_filters} and
+ ${DEFAULT_WHERE}
+ ${dateFilters} AND
group_id = '${row.group_id}'
GROUP BY interval
ORDER BY interval
@@ -264,8 +251,8 @@ export const getPanelEventCount = (
count(DISTINCT transaction_id) as uniqueEvents
FROM spans_experimental_starfish
WHERE
- ${DEFAULT_WHERE} and
- ${date_filters} and
+ ${DEFAULT_WHERE}
+ ${date_filters} AND
group_id = '${row.group_id}'
GROUP BY transaction
ORDER BY ${ORDERBY}
@@ -274,7 +261,6 @@ export const getPanelEventCount = (
export const getMainTable = (
startTime: Moment,
- date_filters: string,
endTime: Moment,
transactionFilter: string | null,
tableFilter?: string,
@@ -284,13 +270,10 @@ export const getMainTable = (
newFilter?: string,
oldFilter?: string
) => {
- const filters = [
- DEFAULT_WHERE,
- date_filters,
- transactionFilter,
- tableFilter,
- actionFilter,
- ].filter(fil => !!fil);
+ const filters = [DEFAULT_WHERE, transactionFilter, tableFilter, actionFilter].filter(
+ fil => !!fil
+ );
+ const dateFilters = getDateQueryFilter(startTime, endTime);
const duration = endTime.unix() - startTime.unix();
const newColumn = getNewColumn(duration, startTime, endTime);
const retiredColumn = getRetiredColumn(duration, startTime, endTime);
@@ -316,7 +299,8 @@ export const getMainTable = (
${retiredColumn}
from default.spans_experimental_starfish
where
- ${filters.join(' and ')}
+ ${filters.join(' AND ')}
+ ${dateFilters}
group by
action,
description,
@@ -334,10 +318,7 @@ export const getMainTable = (
export const useQueryTransactionByTPM = (row: DataRow) => {
const pageFilter = usePageFilters();
const {startTime, endTime} = getDateFilters(pageFilter);
- const dateFilters = `
- greater(start_timestamp, fromUnixTimestamp(${startTime.unix()})) and
- less(start_timestamp, fromUnixTimestamp(${endTime.unix()}))
- `;
+ const dateFilters = getDateQueryFilter(startTime, endTime);
const queryFilter = `group_id = '${row.group_id}'`;
const query = `
@@ -347,7 +328,7 @@ export const useQueryTransactionByTPM = (row: DataRow) => {
toStartOfInterval(start_timestamp, INTERVAL ${INTERVAL} hour) as interval
FROM default.spans_experimental_starfish
where
- ${DEFAULT_WHERE} and
+ ${DEFAULT_WHERE}
${dateFilters} and
${queryFilter}
and transaction IN (
@@ -370,3 +351,14 @@ export const useQueryTransactionByTPM = (row: DataRow) => {
initialData: [],
});
};
+
+const getDateQueryFilter = (startTime: Moment, endTime: Moment) => {
+ const {start_timestamp, end_timestamp} = datetimeToClickhouseFilterTimestamps({
+ start: startTime.format('YYYY-MM-DD HH:mm:ss'),
+ end: endTime.format('YYYY-MM-DD HH:mm:ss'),
+ });
+ return `
+ ${start_timestamp ? `AND greaterOrEquals(start_timestamp, '${start_timestamp}')` : ''}
+ ${end_timestamp ? `AND lessOrEquals(start_timestamp, '${end_timestamp}')` : ''}
+ `;
+};
diff --git a/static/app/views/starfish/views/webServiceView/endpointDetails/index.tsx b/static/app/views/starfish/views/webServiceView/endpointDetails/index.tsx
index 4d77de3e0f71e7..41e248500760ef 100644
--- a/static/app/views/starfish/views/webServiceView/endpointDetails/index.tsx
+++ b/static/app/views/starfish/views/webServiceView/endpointDetails/index.tsx
@@ -142,10 +142,6 @@ function EndpointDetailBody({
`http.method:${row.httpOp}`,
]);
const {startTime, endTime} = getDateFilters(pageFilter);
- const DATE_FILTERS = `
- greater(start_timestamp, fromUnixTimestamp(${startTime.unix()})) and
- less(start_timestamp, fromUnixTimestamp(${endTime.unix()}))
-`;
const transactionFilter =
row.transaction.length > 0 ? `transaction='${row.transaction}'` : null;
@@ -157,12 +153,7 @@ function EndpointDetailBody({
queryKey: ['endpoints', pageFilter.selection.datetime, row.transaction],
queryFn: () =>
fetch(
- `${HOST}/?query=${getMainTable(
- startTime,
- DATE_FILTERS,
- endTime,
- transactionFilter
- )}&format=sql`
+ `${HOST}/?query=${getMainTable(startTime, endTime, transactionFilter)}&format=sql`
).then(res => res.json()),
retry: false,
initialData: [],
|
407f04ec400095d5d951f1d4c9781e4848f7a6b3
|
2024-11-16 03:35:08
|
Evan Purkhiser
|
ref(crons): Bump to latest sentry-kafka-schemas (#80860)
| false
|
Bump to latest sentry-kafka-schemas (#80860)
|
ref
|
diff --git a/requirements-base.txt b/requirements-base.txt
index 374b05123139dd..1673bc2cee876e 100644
--- a/requirements-base.txt
+++ b/requirements-base.txt
@@ -66,7 +66,7 @@ rfc3339-validator>=0.1.2
rfc3986-validator>=0.1.1
# [end] jsonschema format validators
sentry-arroyo>=2.16.5
-sentry-kafka-schemas>=0.1.118
+sentry-kafka-schemas>=0.1.119
sentry-ophio==1.0.0
sentry-protos>=0.1.34
sentry-redis-tools>=0.1.7
diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt
index d20fc61b365c70..6ebcf6f6d47f77 100644
--- a/requirements-dev-frozen.txt
+++ b/requirements-dev-frozen.txt
@@ -185,7 +185,7 @@ sentry-cli==2.16.0
sentry-devenv==1.13.0
sentry-forked-django-stubs==5.1.1.post1
sentry-forked-djangorestframework-stubs==3.15.1.post2
-sentry-kafka-schemas==0.1.118
+sentry-kafka-schemas==0.1.119
sentry-ophio==1.0.0
sentry-protos==0.1.34
sentry-redis-tools==0.1.7
diff --git a/requirements-frozen.txt b/requirements-frozen.txt
index 6e0859c5c805ce..049806d54b86b3 100644
--- a/requirements-frozen.txt
+++ b/requirements-frozen.txt
@@ -125,7 +125,7 @@ rpds-py==0.20.0
rsa==4.8
s3transfer==0.10.0
sentry-arroyo==2.16.5
-sentry-kafka-schemas==0.1.118
+sentry-kafka-schemas==0.1.119
sentry-ophio==1.0.0
sentry-protos==0.1.34
sentry-redis-tools==0.1.7
|
ec62a891122c515b442e8d0784a6bcb9bbc08293
|
2024-06-26 21:43:54
|
Scott Cooper
|
feat(u2f): Await bootstrap promises (#73315)
| false
|
Await bootstrap promises (#73315)
|
feat
|
diff --git a/static/app/components/modals/sudoModal.spec.tsx b/static/app/components/modals/sudoModal.spec.tsx
index 618e82ad73f6a6..e9106155ab24c3 100644
--- a/static/app/components/modals/sudoModal.spec.tsx
+++ b/static/app/components/modals/sudoModal.spec.tsx
@@ -4,6 +4,8 @@ import {initializeOrg} from 'sentry-test/initializeOrg';
import {render, screen, userEvent, waitFor} from 'sentry-test/reactTestingLibrary';
import ConfigStore from 'sentry/stores/configStore';
+import ModalStore from 'sentry/stores/modalStore';
+import OrganizationStore from 'sentry/stores/organizationStore';
import App from 'sentry/views/app';
describe('Sudo Modal', function () {
@@ -58,6 +60,8 @@ describe('Sudo Modal', function () {
url: '/authenticators/',
body: [],
});
+ ModalStore.reset();
+ OrganizationStore.reset();
});
it('can delete an org with sudo flow', async function () {
diff --git a/static/app/components/modals/sudoModal.tsx b/static/app/components/modals/sudoModal.tsx
index 9f3868040e9658..bda4528193b675 100644
--- a/static/app/components/modals/sudoModal.tsx
+++ b/static/app/components/modals/sudoModal.tsx
@@ -1,16 +1,15 @@
import {Fragment, useContext, useEffect, useState} from 'react';
-import type {WithRouterProps} from 'react-router';
import styled from '@emotion/styled';
import trimEnd from 'lodash/trimEnd';
import {logout} from 'sentry/actionCreators/account';
import type {ModalRenderProps} from 'sentry/actionCreators/modal';
-import type {Client} from 'sentry/api';
import {Alert} from 'sentry/components/alert';
import {Button} from 'sentry/components/button';
import SecretField from 'sentry/components/forms/fields/secretField';
import Form from 'sentry/components/forms/form';
import Hook from 'sentry/components/hook';
+import LoadingIndicator from 'sentry/components/loadingIndicator';
import U2fContainer from 'sentry/components/u2f/u2fContainer';
import {ErrorCodes} from 'sentry/constants/superuserAccessErrors';
import {t} from 'sentry/locale';
@@ -20,9 +19,6 @@ import type {Authenticator} from 'sentry/types/auth';
import useApi from 'sentry/utils/useApi';
import {useLocation} from 'sentry/utils/useLocation';
import useRouter from 'sentry/utils/useRouter';
-import withApi from 'sentry/utils/withApi';
-// eslint-disable-next-line no-restricted-imports
-import withSentryRouter from 'sentry/utils/withSentryRouter';
import {OrganizationLoaderContext} from 'sentry/views/organizationContext';
import TextBlock from 'sentry/views/settings/components/text/textBlock';
@@ -36,15 +32,14 @@ type State = {
authenticators: Array<Authenticator>;
error: boolean;
errorType: string;
+ isLoading: boolean;
showAccessForms: boolean;
superuserAccessCategory: string;
superuserReason: string;
};
-type Props = WithRouterProps &
- DefaultProps &
+type Props = DefaultProps &
Pick<ModalRenderProps, 'Body' | 'Header'> & {
- api: Client;
closeModal: () => void;
/**
* User is a superuser without an active su session
@@ -66,6 +61,8 @@ function SudoModal({
Body,
closeButton,
}: Props) {
+ const router = useRouter();
+ const api = useApi();
const [state, setState] = useState<State>({
authenticators: [] as Authenticator[],
error: false,
@@ -73,6 +70,7 @@ function SudoModal({
showAccessForms: true,
superuserAccessCategory: '',
superuserReason: '',
+ isLoading: true,
});
const {
@@ -84,29 +82,39 @@ function SudoModal({
superuserReason,
} = state;
- const {loadOrganization} = useContext(OrganizationLoaderContext) || {};
- const router = useRouter();
- const api = useApi();
+ const {organizationPromise} = useContext(OrganizationLoaderContext);
const location = useLocation();
useEffect(() => {
- const getAuthenticators = () => {
- if (!loadOrganization) return;
+ const getAuthenticators = async () => {
try {
- loadOrganization().finally(async () => {
- const fetchedAuthenticators = await api.requestPromise('/authenticators/');
- setState(prevState => ({
- ...prevState,
- authenticators: fetchedAuthenticators ?? [],
- }));
- });
+ // Await all preload requests
+ await Promise.allSettled([
+ organizationPromise,
+ ...Object.values(window.__sentry_preload),
+ ]);
} catch {
// ignore errors
}
+
+ // Fetch authenticators after preload requests to avoid overwriting session cookie
+ try {
+ const fetchedAuthenticators = await api.requestPromise('/authenticators/');
+ setState(prevState => ({
+ ...prevState,
+ authenticators: fetchedAuthenticators ?? [],
+ isLoading: false,
+ }));
+ } catch {
+ setState(prevState => ({
+ ...prevState,
+ isLoading: false,
+ }));
+ }
};
getAuthenticators();
- }, [api, loadOrganization]);
+ }, [api, organizationPromise]);
const handleSubmitCOPS = () => {
setState(prevState => ({
@@ -233,6 +241,10 @@ function SudoModal({
return null;
}
+ if (state.isLoading) {
+ return <LoadingIndicator />;
+ }
+
if (
(!user.hasPasswordAuth && authenticators.length === 0) ||
(isSuperuser && !isSelfHosted && validateSUForm)
@@ -343,8 +355,7 @@ function SudoModal({
);
}
-export default withSentryRouter(withApi(SudoModal));
-export {SudoModal};
+export default SudoModal;
const StyledTextBlock = styled(TextBlock)`
margin-bottom: ${space(1)};
diff --git a/static/app/views/organizationContext.tsx b/static/app/views/organizationContext.tsx
index f74b0d22186374..cd51b7221c4c6a 100644
--- a/static/app/views/organizationContext.tsx
+++ b/static/app/views/organizationContext.tsx
@@ -1,10 +1,10 @@
import {
createContext,
type ReactNode,
- useCallback,
useContext,
useEffect,
useRef,
+ useState,
} from 'react';
import {fetchOrganizationDetails} from 'sentry/actionCreators/organization';
@@ -24,7 +24,7 @@ import {useParams} from 'sentry/utils/useParams';
import {useRoutes} from 'sentry/utils/useRoutes';
interface OrganizationLoaderContextProps {
- loadOrganization: () => Promise<void>;
+ organizationPromise: Promise<unknown> | null;
}
interface Props {
@@ -39,22 +39,23 @@ export const OrganizationContext = createContext<Organization | null>(null);
/**
* Holds a function to load the organization.
*/
-export const OrganizationLoaderContext =
- createContext<OrganizationLoaderContextProps | null>(null);
+export const OrganizationLoaderContext = createContext<OrganizationLoaderContextProps>({
+ organizationPromise: null,
+});
/**
* Ensures that an organization is loaded when the hook is used. This will only
* be done on first render and if an organization is not already loaded.
*/
export function useEnsureOrganization() {
- const {loadOrganization} = useContext(OrganizationLoaderContext) || {};
+ const {organizationPromise} = useContext(OrganizationLoaderContext);
useEffect(() => {
async function fetchData() {
- await loadOrganization?.();
+ await organizationPromise;
}
fetchData();
- }, [loadOrganization]);
+ }, [organizationPromise]);
}
/**
@@ -71,6 +72,9 @@ export function OrganizationContextProvider({children}: Props) {
const {organizations} = useLegacyStore(OrganizationsStore);
const {organization, error} = useLegacyStore(OrganizationStore);
+ const [organizationPromise, setOrganizationPromise] = useState<Promise<unknown> | null>(
+ null
+ );
const lastOrganizationSlug: string | null =
configStore.lastOrganization ?? organizations[0]?.slug ?? null;
@@ -84,29 +88,21 @@ export function OrganizationContextProvider({children}: Props) {
? lastOrganizationSlug
: params.orgId || lastOrganizationSlug;
- const loadOrganization = useCallback(
- () =>
- new Promise<void>((resolve, reject) => {
- // Nothing to do if we already have the organization loaded
- if (organization && organization.slug === orgSlug) {
- resolve();
- return;
- }
-
- if (!orgSlug) {
- OrganizationStore.setNoOrganization();
- resolve();
- return;
- }
-
- metric.mark({name: 'organization-details-fetch-start'});
-
- fetchOrganizationDetails(api, orgSlug, false, true)
- .then(() => resolve())
- .catch(reject);
- }),
- [api, orgSlug, organization]
- );
+ useEffect(() => {
+ // Nothing to do if we already have the organization loaded
+ if (organization && organization.slug === orgSlug) {
+ return;
+ }
+
+ if (!orgSlug) {
+ OrganizationStore.setNoOrganization();
+ return;
+ }
+
+ metric.mark({name: 'organization-details-fetch-start'});
+
+ setOrganizationPromise(fetchOrganizationDetails(api, orgSlug, false, true));
+ }, [api, orgSlug, organization]);
// Take a measurement for when organization details are done loading and the
// new state is applied
@@ -183,7 +179,7 @@ export function OrganizationContextProvider({children}: Props) {
}, [orgSlug]);
return (
- <OrganizationLoaderContext.Provider value={{loadOrganization}}>
+ <OrganizationLoaderContext.Provider value={{organizationPromise}}>
<OrganizationContext.Provider value={organization}>
{children}
</OrganizationContext.Provider>
|
d7c3766248ebae5e105d0c0df1aef94b23e9498d
|
2022-01-17 12:44:18
|
Priscila Oliveira
|
ref(stack-trace-link): Update error handling (#31133)
| false
|
Update error handling (#31133)
|
ref
|
diff --git a/static/app/components/events/interfaces/frame/stacktraceLink.tsx b/static/app/components/events/interfaces/frame/stacktraceLink.tsx
index b7e1a709ed261b..e4062ebd0a856a 100644
--- a/static/app/components/events/interfaces/frame/stacktraceLink.tsx
+++ b/static/app/components/events/interfaces/frame/stacktraceLink.tsx
@@ -1,9 +1,9 @@
import * as React from 'react';
import styled from '@emotion/styled';
-import * as Sentry from '@sentry/react';
import {openModal} from 'sentry/actionCreators/modal';
import {promptsCheck, promptsUpdate} from 'sentry/actionCreators/prompts';
+import {ResponseMeta} from 'sentry/api';
import Access from 'sentry/components/acl/access';
import AsyncComponent from 'sentry/components/asyncComponent';
import {Body, Header, Hovercard} from 'sentry/components/hovercard';
@@ -19,6 +19,7 @@ import {
RepositoryProjectPathConfigWithIntegration,
} from 'sentry/types';
import {Event} from 'sentry/types/event';
+import handleXhrErrorResponse from 'sentry/utils/handleXhrErrorResponse';
import {
getIntegrationIcon,
trackIntegrationAnalytics,
@@ -145,11 +146,8 @@ class StacktraceLink extends AsyncComponent<Props, State> {
];
}
- onRequestError(error, args) {
- Sentry.withScope(scope => {
- scope.setExtra('errorInfo', args);
- Sentry.captureException(new Error(error));
- });
+ onRequestError(resp: ResponseMeta) {
+ handleXhrErrorResponse('Unable to fetch stack trace link')(resp);
}
getDefaultState(): State {
|
6c4532aff4264a78ef09b72202025f33a1b02936
|
2021-06-25 03:25:34
|
Dan Fuller
|
fix(semver): Fix package filtering for semver (#26848)
| false
|
Fix package filtering for semver (#26848)
|
fix
|
diff --git a/src/sentry/search/events/filter.py b/src/sentry/search/events/filter.py
index d42c7ca83a1f43..59ad9f9ae12143 100644
--- a/src/sentry/search/events/filter.py
+++ b/src/sentry/search/events/filter.py
@@ -433,7 +433,7 @@ def parse_semver(version, operator) -> Optional[SemverFilter]:
],
)
if parsed["package"] and parsed["package"] != SEMVER_FAKE_PACKAGE:
- semver_filter.package = parsed.package
+ semver_filter.package = parsed["package"]
return semver_filter
else:
# Try to parse as a wildcard match
@@ -449,7 +449,8 @@ def parse_semver(version, operator) -> Optional[SemverFilter]:
except ValueError:
raise InvalidSearchQuery(f"Invalid format for semver query {version}")
- return SemverFilter("exact", version_parts)
+ package = package if package and package != SEMVER_FAKE_PACKAGE else None
+ return SemverFilter("exact", version_parts, package)
key_conversion_map: Mapping[
diff --git a/tests/sentry/search/events/test_filter.py b/tests/sentry/search/events/test_filter.py
index d65f939822755c..8f853326a6b920 100644
--- a/tests/sentry/search/events/test_filter.py
+++ b/tests/sentry/search/events/test_filter.py
@@ -1555,6 +1555,14 @@ def test_wildcard(self):
self.run_test("=", "1.2.3.4", "IN", [release_4.version])
self.run_test("=", "2.*", "IN", [release_5.version])
+ def test_multi_packagae(self):
+ release_1 = self.create_release(version="[email protected]")
+ release_2 = self.create_release(version="[email protected]")
+ release_3 = self.create_release(version="[email protected]")
+ self.run_test("=", "test@1.*", "IN", [release_1.version, release_2.version])
+ self.run_test(">=", "[email protected]", "IN", [release_1.version, release_2.version])
+ self.run_test(">", "[email protected]", "IN", [release_3.version])
+
class ParseSemverTest(unittest.TestCase):
def run_test(self, version: str, operator: str, expected: SemverFilter):
@@ -1574,9 +1582,11 @@ def test_normal(self):
self.run_test("1.2.3.4", ">", SemverFilter("gt", [1, 2, 3, 4, 1, ""]))
self.run_test("1.2.3-hi", ">", SemverFilter("gt", [1, 2, 3, 0, 0, "hi"]))
self.run_test("1.2.3-hi", "<", SemverFilter("lt", [1, 2, 3, 0, 0, "hi"]))
+ self.run_test("[email protected]", "<", SemverFilter("lt", [1, 2, 3, 0, 0, "hi"], "sentry"))
def test_wildcard(self):
self.run_test("1.*", "=", SemverFilter("exact", [1]))
self.run_test("1.2.*", "=", SemverFilter("exact", [1, 2]))
self.run_test("1.2.3.*", "=", SemverFilter("exact", [1, 2, 3]))
+ self.run_test("[email protected].*", "=", SemverFilter("exact", [1, 2, 3], "sentry"))
self.run_test("1.X", "=", SemverFilter("exact", [1]))
|
26229b7de55a3dc8077b220bf9b6631d61f17b00
|
2022-05-23 13:10:58
|
josh
|
fix(ci): various fixes (#34906)
| false
|
various fixes (#34906)
|
fix
|
diff --git a/Makefile b/Makefile
index 9e39df79b27876..16d3dd26ddd5bb 100644
--- a/Makefile
+++ b/Makefile
@@ -125,12 +125,16 @@ test-python:
test-python-ci:
make build-platform-assets
@echo "--> Running CI Python tests"
- pytest tests/integration tests/sentry --cov . --cov-report="xml:.artifacts/python.coverage.xml" --junit-xml=".artifacts/python.junit.xml" || exit 1
+ pytest tests/integration tests/sentry \
+ --ignore tests/sentry/eventstream/kafka \
+ --ignore tests/sentry/snuba \
+ --ignore tests/sentry/search/events \
+ --cov . --cov-report="xml:.artifacts/python.coverage.xml" --junit-xml=".artifacts/python.junit.xml" || exit 1
@echo ""
test-snuba:
@echo "--> Running snuba tests"
- pytest tests/snuba tests/sentry/eventstream/kafka tests/sentry/snuba/test_discover.py tests/sentry/search/events -vv --cov . --cov-report="xml:.artifacts/snuba.coverage.xml" --junit-xml=".artifacts/snuba.junit.xml"
+ pytest tests/snuba tests/sentry/eventstream/kafka tests/sentry/snuba tests/sentry/search/events -vv --cov . --cov-report="xml:.artifacts/snuba.coverage.xml" --junit-xml=".artifacts/snuba.junit.xml"
@echo ""
test-tools:
diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py
index e270da253aadb8..089f77bc2e12f6 100644
--- a/src/sentry/conf/server.py
+++ b/src/sentry/conf/server.py
@@ -1941,7 +1941,7 @@ def build_cdc_postgres_init_db_volume(settings):
),
"snuba": lambda settings, options: (
{
- "image": "getsentry/snuba:nightly" if not APPLE_ARM64
+ "image": "getsentry/snuba:f063336085e7be0ccbdb52791aaf97882ba7a26f" if not APPLE_ARM64
# We cross-build arm64 images on GH's Apple Intel runners
else "ghcr.io/getsentry/snuba-arm64-dev:latest",
"pull": True,
diff --git a/tests/sentry/eventstream/kafka/test_consumer.py b/tests/sentry/eventstream/kafka/test_consumer.py
index f08a112c2ed966..7298833d21f648 100644
--- a/tests/sentry/eventstream/kafka/test_consumer.py
+++ b/tests/sentry/eventstream/kafka/test_consumer.py
@@ -38,7 +38,7 @@ def create_topic(partitions=1, replication_factor=1):
topic = f"test-{uuid.uuid1().hex}"
subprocess.check_call(
command
- + (
+ + [
"--create",
"--topic",
topic,
@@ -46,12 +46,12 @@ def create_topic(partitions=1, replication_factor=1):
f"{partitions}",
"--replication-factor",
f"{replication_factor}",
- )
+ ]
)
try:
yield topic
finally:
- subprocess.check_call(command + ("--delete", "--topic", topic))
+ subprocess.check_call(command + ["--delete", "--topic", topic])
def test_consumer_start_from_partition_start(requires_kafka):
|
ca6a8236446f560252801db453c11de2966dce41
|
2020-07-13 19:26:28
|
Priscila Oliveira
|
fix(openInContextLine): Replace link component with externalLink (#19824)
| false
|
Replace link component with externalLink (#19824)
|
fix
|
diff --git a/src/sentry/static/sentry/app/components/events/interfaces/openInContextLine.tsx b/src/sentry/static/sentry/app/components/events/interfaces/openInContextLine.tsx
index d877724cb2be5c..760f9b7a36ce46 100644
--- a/src/sentry/static/sentry/app/components/events/interfaces/openInContextLine.tsx
+++ b/src/sentry/static/sentry/app/components/events/interfaces/openInContextLine.tsx
@@ -6,7 +6,7 @@ import {addQueryParamsToExistingUrl} from 'app/utils/queryString';
import space from 'app/styles/space';
import {t} from 'app/locale';
import {recordInteraction} from 'app/utils/recordSentryAppInteraction';
-import Link from 'app/components/links/link';
+import ExternalLink from 'app/components/links/externalLink';
import {SentryAppComponent} from 'app/types';
type Props = {
@@ -39,9 +39,10 @@ const OpenInContextLine = ({lineNo, filename, components}: Props) => {
<OpenInLink
key={component.uuid}
data-test-id={`stacktrace-link-${slug}`}
- to={url}
+ href={url}
onClick={onClickRecordInteraction}
onContextMenu={onClickRecordInteraction}
+ openInNewTab
>
<SentryAppIcon slug={slug} />
<OpenInName>{t(`${component.sentryApp.name}`)}</OpenInName>
@@ -71,7 +72,7 @@ const OpenInContainer = styled('div')<{columnQuantity: number}>`
white-space: nowrap;
`;
-const OpenInLink = styled(Link)`
+const OpenInLink = styled(ExternalLink)`
display: inline-grid;
align-items: center;
grid-template-columns: max-content auto;
diff --git a/tests/js/spec/components/events/interfaces/openInContextLine.spec.jsx b/tests/js/spec/components/events/interfaces/openInContextLine.spec.jsx
index e3e54f13334398..a8ba169cb91be1 100644
--- a/tests/js/spec/components/events/interfaces/openInContextLine.spec.jsx
+++ b/tests/js/spec/components/events/interfaces/openInContextLine.spec.jsx
@@ -63,7 +63,7 @@ describe('OpenInContextLine', function() {
const stacktraceLinkFoo = wrapper.find(
'OpenInLink[data-test-id="stacktrace-link-foo"]'
);
- expect(stacktraceLinkFoo.prop('to')).toEqual(url);
+ expect(stacktraceLinkFoo.prop('href')).toEqual(url);
expect(stacktraceLinkFoo.text()).toEqual('Foo');
expect(
wrapper.find('OpenInLink[data-test-id="stacktrace-link-tesla"]').text()
|
a46097b7b410863eeb62b820c3caffa51c5ee5bb
|
2023-03-29 00:15:02
|
Evan Purkhiser
|
fix(crons): Allow legacy ingest checkins with slugs using DSN (#46468)
| false
|
Allow legacy ingest checkins with slugs using DSN (#46468)
|
fix
|
diff --git a/src/sentry/monitors/endpoints/base.py b/src/sentry/monitors/endpoints/base.py
index bef4522ceccb69..ae0b0904b2e547 100644
--- a/src/sentry/monitors/endpoints/base.py
+++ b/src/sentry/monitors/endpoints/base.py
@@ -98,8 +98,10 @@ class MonitorIngestEndpoint(Endpoint):
validate
[!!]: This type of endpoint supports lookup of monitors by slug AND by
- GUID. However slug lookup is **ONLY** supported when the organization
- slug is part of the URL parameters.
+ GUID. However slug lookup is **ONLY** supported in two scenarios:
+
+ - When the organization slug is part of the URL parameters.
+ - When using DSN auth
"""
authentication_classes = (DSNAuthentication, TokenAuthentication, ApiKeyAuthentication)
@@ -132,6 +134,12 @@ def convert_args(
if self.allow_auto_create_monitors:
kwargs["monitor_id"] = monitor_id
+ using_dsn_auth = isinstance(request.auth, ProjectKey)
+
+ # When using DSN auth we're able to infer the organization slug
+ if not organization_slug and using_dsn_auth:
+ organization_slug = request.auth.project.organization.slug
+
# The only monitor endpoints that do not have the org slug in their
# parameters are the GUID-style checkin endpoints
if organization_slug:
@@ -168,7 +176,7 @@ def convert_args(
# Monitor ingestion supports upsert of monitors This is currently only
# supported when using DSN auth.
- if not monitor and not isinstance(request.auth, ProjectKey):
+ if not monitor and not using_dsn_auth:
raise ResourceDoesNotExist
# No monitor is allowed when using DSN auth. Use the project from the
@@ -184,7 +192,7 @@ def convert_args(
# Validate that the authenticated project matches the monitor. This is
# used for DSN style authentication
- if hasattr(request.auth, "project_id") and project.id != request.auth.project_id:
+ if using_dsn_auth and project.id != request.auth.project_id:
raise ResourceDoesNotExist
# When looking up via GUID we do not check the organization slug,
diff --git a/tests/sentry/monitors/endpoints/test_monitor_ingest_checkin_index.py b/tests/sentry/monitors/endpoints/test_monitor_ingest_checkin_index.py
index 6793dc4f3baa5a..bbdff50603036f 100644
--- a/tests/sentry/monitors/endpoints/test_monitor_ingest_checkin_index.py
+++ b/tests/sentry/monitors/endpoints/test_monitor_ingest_checkin_index.py
@@ -235,6 +235,23 @@ def test_with_dsn_auth(self):
assert list(resp.data.keys()) == ["id"]
assert UUID(resp.data["id"])
+ def test_with_dsn_auth_and_slug(self):
+ monitor = self._create_monitor(slug="my-test-monitor")
+
+ for path_func in self._get_path_functions():
+ path = path_func(monitor.slug)
+
+ resp = self.client.post(
+ path,
+ {"status": "ok"},
+ **self.dsn_auth_headers,
+ )
+ assert resp.status_code == 201, resp.content
+
+ # DSN auth should only return id
+ assert list(resp.data.keys()) == ["id"]
+ assert UUID(resp.data["id"])
+
def test_with_dsn_auth_invalid_project(self):
project2 = self.create_project()
|
ea4168fe6ed8e1794d852bbcd817ff68e35768ea
|
2023-11-09 01:22:24
|
Colleen O'Rourke
|
chore(stacktrace linking): Remove front end usage of flag (#59575)
| false
|
Remove front end usage of flag (#59575)
|
chore
|
diff --git a/static/app/components/events/interfaces/crashContent/exception/content.spec.tsx b/static/app/components/events/interfaces/crashContent/exception/content.spec.tsx
index 1faeae71f98e51..c9d627119b050b 100644
--- a/static/app/components/events/interfaces/crashContent/exception/content.spec.tsx
+++ b/static/app/components/events/interfaces/crashContent/exception/content.spec.tsx
@@ -154,6 +154,16 @@ describe('Exception Content', function () {
describe('exception groups', function () {
const event = TestStubs.Event({entries: [TestStubs.EventEntryExceptionGroup()]});
const project = TestStubs.Project();
+ beforeEach(() => {
+ const promptResponse = {
+ dismissed_ts: undefined,
+ snoozed_ts: undefined,
+ };
+ MockApiClient.addMockResponse({
+ url: '/prompts-activity/',
+ body: promptResponse,
+ });
+ });
const defaultProps = {
type: StackType.ORIGINAL,
diff --git a/static/app/components/events/interfaces/crashContent/stackTrace/content.spec.tsx b/static/app/components/events/interfaces/crashContent/stackTrace/content.spec.tsx
index 7014eda87ad212..bff6b678ebbe3e 100644
--- a/static/app/components/events/interfaces/crashContent/stackTrace/content.spec.tsx
+++ b/static/app/components/events/interfaces/crashContent/stackTrace/content.spec.tsx
@@ -31,6 +31,16 @@ function renderedComponent(
}
describe('StackTrace', function () {
+ beforeEach(() => {
+ const promptResponse = {
+ dismissed_ts: undefined,
+ snoozed_ts: undefined,
+ };
+ MockApiClient.addMockResponse({
+ url: '/prompts-activity/',
+ body: promptResponse,
+ });
+ });
it('renders', function () {
renderedComponent({});
diff --git a/static/app/components/events/interfaces/crashContent/stackTrace/nativeContent.spec.tsx b/static/app/components/events/interfaces/crashContent/stackTrace/nativeContent.spec.tsx
index 985297864b53b1..24c7c896c7a79f 100644
--- a/static/app/components/events/interfaces/crashContent/stackTrace/nativeContent.spec.tsx
+++ b/static/app/components/events/interfaces/crashContent/stackTrace/nativeContent.spec.tsx
@@ -30,6 +30,16 @@ function renderedComponent(
);
}
describe('Native StackTrace', function () {
+ beforeEach(() => {
+ const promptResponse = {
+ dismissed_ts: undefined,
+ snoozed_ts: undefined,
+ };
+ MockApiClient.addMockResponse({
+ url: '/prompts-activity/',
+ body: promptResponse,
+ });
+ });
it('does not render non in app tags', function () {
const dataFrames = [...data.frames];
dataFrames[0] = {...dataFrames[0], inApp: false};
diff --git a/static/app/components/events/interfaces/frame/context.tsx b/static/app/components/events/interfaces/frame/context.tsx
index ffa622923a5e45..c897c9188af6a3 100644
--- a/static/app/components/events/interfaces/frame/context.tsx
+++ b/static/app/components/events/interfaces/frame/context.tsx
@@ -139,12 +139,7 @@ function Context({
}
const startLineNo = hasContextSource ? frame.context[0][0] : 0;
- const hasStacktraceLink =
- frame.inApp &&
- !!frame.filename &&
- isExpanded &&
- organization?.features.includes('integrations-stacktrace-link');
-
+ const hasStacktraceLink = frame.inApp && !!frame.filename && isExpanded;
return (
<Wrapper
start={startLineNo}
diff --git a/static/app/components/events/interfaces/threads.spec.tsx b/static/app/components/events/interfaces/threads.spec.tsx
index e96d773e32e176..846a2a57584004 100644
--- a/static/app/components/events/interfaces/threads.spec.tsx
+++ b/static/app/components/events/interfaces/threads.spec.tsx
@@ -9,6 +9,16 @@ import {EventOrGroupType} from 'sentry/types';
import {EntryType, Event} from 'sentry/types/event';
describe('Threads', function () {
+ beforeEach(() => {
+ const promptResponse = {
+ dismissed_ts: undefined,
+ snoozed_ts: undefined,
+ };
+ MockApiClient.addMockResponse({
+ url: '/prompts-activity/',
+ body: promptResponse,
+ });
+ });
const {project, organization} = initializeOrg();
describe('non native platform', function () {
diff --git a/static/app/views/settings/organizationIntegrations/configureIntegration.tsx b/static/app/views/settings/organizationIntegrations/configureIntegration.tsx
index 16cb6785fd9953..3309fb74d2a22f 100644
--- a/static/app/views/settings/organizationIntegrations/configureIntegration.tsx
+++ b/static/app/views/settings/organizationIntegrations/configureIntegration.tsx
@@ -119,10 +119,7 @@ class ConfigureIntegration extends DeprecatedAsyncView<Props, State> {
hasStacktraceLinking(provider: IntegrationProvider) {
// CodeOwners will only work if the provider has StackTrace Linking
- return (
- provider.features.includes('stacktrace-link') &&
- this.props.organization.features.includes('integrations-stacktrace-link')
- );
+ return provider.features.includes('stacktrace-link');
}
hasCodeOwners(provider: IntegrationProvider) {
|
a1744da6d946c9fbde2e088e6fa4c72a96793a85
|
2023-06-02 13:55:43
|
Francesco Novy
|
ref: Rename feature to org-auth-tokens to align with naming (#50222)
| false
|
Rename feature to org-auth-tokens to align with naming (#50222)
|
ref
|
diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py
index a4855d40264757..a0380285042793 100644
--- a/src/sentry/conf/server.py
+++ b/src/sentry/conf/server.py
@@ -1535,8 +1535,8 @@ def SOCIAL_AUTH_DEFAULT_USERNAME():
"organizations:js-sdk-dynamic-loader": False,
# If true certain Slack messages will be escaped to prevent rendering markdown
"organizations:slack-escape-messages": False,
- # If true, allow to create/use org access tokens
- "organizations:org-access-tokens": False,
+ # If true, allow to create/use org auth tokens
+ "organizations:org-auth-tokens": False,
# Adds additional filters and a new section to issue alert rules.
"projects:alert-filters": True,
# Enable functionality to specify custom inbound filters on events.
diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py
index 12772377eb9a93..151300ddd25826 100644
--- a/src/sentry/features/__init__.py
+++ b/src/sentry/features/__init__.py
@@ -106,7 +106,7 @@
default_manager.add("organizations:minute-resolution-sessions", OrganizationFeature, FeatureHandlerStrategy.INTERNAL)
default_manager.add("organizations:mobile-vitals", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
default_manager.add("organizations:mobile-cpu-memory-in-transactions", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
-default_manager.add("organizations:org-access-tokens", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
+default_manager.add("organizations:org-auth-tokens", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
default_manager.add("organizations:view-hierarchies-options-dev", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
default_manager.add("organizations:anr-improvements", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
default_manager.add("organizations:anr-analyze-frames", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
|
f8302e1664c5fa238ad0d25178f0b16dbf86f286
|
2018-05-18 03:44:20
|
Billy Vong
|
ref(settings): Redirect the rest of org settings (#8472)
| false
|
Redirect the rest of org settings (#8472)
|
ref
|
diff --git a/src/sentry/static/sentry/app/routes.jsx b/src/sentry/static/sentry/app/routes.jsx
index 03d33ae400b608..8ac7495b4d895d 100644
--- a/src/sentry/static/sentry/app/routes.jsx
+++ b/src/sentry/static/sentry/app/routes.jsx
@@ -764,7 +764,17 @@ function routes() {
path="teams/:teamId/settings/"
to="/settings/:orgId/teams/:teamId/settings/"
/>
- {orgSettingsRoutes}
+ <Redirect path="settings/" to="/settings/:orgId/" />
+ <Redirect path="api-keys/" to="/settings/:orgId/api-keys/" />
+ <Redirect path="api-keys/:apiKey/" to="/settings/:orgId/api-keys/:apiKey/" />
+ <Redirect path="members/" to="/settings/:orgId/members/" />
+ <Redirect path="members/new/" to="/settings/:orgId/members/new/" />
+ <Redirect
+ path="members/:memberId/"
+ to="/settings/:orgId/members/:memberId/"
+ />
+ <Redirect path="rate-limits/" to="/settings/:orgId/rate-limits/" />
+ <Redirect path="repos/" to="/settings/:orgId/repos/" />
<Route path="stats/" component={errorHandler(OrganizationStats)} />
</Route>
diff --git a/src/sentry/static/sentry/app/views/settings/components/forms/form.jsx b/src/sentry/static/sentry/app/views/settings/components/forms/form.jsx
index a361f15b0ba59e..de405e240fa61c 100644
--- a/src/sentry/static/sentry/app/views/settings/components/forms/form.jsx
+++ b/src/sentry/static/sentry/app/views/settings/components/forms/form.jsx
@@ -34,6 +34,7 @@ export default class Form extends React.Component {
model: PropTypes.object,
apiMethod: PropTypes.string,
apiEndpoint: PropTypes.string,
+ 'data-test-id': PropTypes.string,
};
static defaultProps = {
@@ -144,7 +145,11 @@ export default class Form extends React.Component {
let shouldShowFooter = typeof hideFooter !== 'undefined' ? !hideFooter : !saveOnBlur;
return (
- <form onSubmit={this.onSubmit} className={className}>
+ <form
+ onSubmit={this.onSubmit}
+ className={className}
+ data-test-id={this.props['data-test-id']}
+ >
<div>{children}</div>
{shouldShowFooter && (
diff --git a/src/sentry/static/sentry/app/views/settings/organizationRateLimits/organizationRateLimits.jsx b/src/sentry/static/sentry/app/views/settings/organizationRateLimits/organizationRateLimits.jsx
index 92fff2f15f5e6f..13f5d343848486 100644
--- a/src/sentry/static/sentry/app/views/settings/organizationRateLimits/organizationRateLimits.jsx
+++ b/src/sentry/static/sentry/app/views/settings/organizationRateLimits/organizationRateLimits.jsx
@@ -65,7 +65,7 @@ export default class OrganizationRateLimit extends React.Component {
</PanelAlert>
<Form
- className="ref-rate-limit-editor"
+ data-test-id="rate-limit-editor"
saveOnBlur
allowUndo
apiMethod="PUT"
diff --git a/tests/acceptance/test_dashboard.py b/tests/acceptance/test_dashboard.py
index 0f4c6d0b7dd67b..6b60f40668042c 100644
--- a/tests/acceptance/test_dashboard.py
+++ b/tests/acceptance/test_dashboard.py
@@ -56,6 +56,7 @@ def test_one_issue(self):
self.project.update(first_event=timezone.now())
self.browser.get(self.path)
self.browser.wait_until_not('.loading-indicator')
+ self.browser.wait_until('[data-test-id] figure')
self.browser.snapshot('org dash one issue')
diff --git a/tests/acceptance/test_issue_details.py b/tests/acceptance/test_issue_details.py
index 6a5ef2eb9f62d6..363bb5fb4fc826 100644
--- a/tests/acceptance/test_issue_details.py
+++ b/tests/acceptance/test_issue_details.py
@@ -65,6 +65,7 @@ def test_javascript_specific_event(self):
'/{}/{}/issues/{}/events/{}/'.format(self.org.slug, self.project.slug, event.group.id, event.id)
)
self.browser.wait_until('.event-details-container')
+ self.browser.wait_until_not('.loading-indicator')
self.browser.snapshot('issue details javascript - event details')
def test_rust_event(self):
diff --git a/tests/acceptance/test_organization_rate_limits.py b/tests/acceptance/test_organization_rate_limits.py
index 5891b0c24cf1e1..c0c63e956e795f 100644
--- a/tests/acceptance/test_organization_rate_limits.py
+++ b/tests/acceptance/test_organization_rate_limits.py
@@ -33,16 +33,16 @@ def setUp(self):
def test_with_rate_limits(self):
self.project.update(first_event=timezone.now())
self.browser.get(self.path)
- self.browser.wait_until('.organization-home')
self.browser.wait_until_not('.loading-indicator')
+ self.browser.wait_until('[data-test-id="rate-limit-editor"]')
self.browser.snapshot('organization rate limits with quota')
- assert self.browser.element_exists('.ref-rate-limit-editor')
+ assert self.browser.element_exists('[data-test-id="rate-limit-editor"]')
@patch('sentry.app.quotas.get_maximum_quota', Mock(return_value=(0, 60)))
def test_without_rate_limits(self):
self.project.update(first_event=timezone.now())
self.browser.get(self.path)
- self.browser.wait_until('.organization-home')
self.browser.wait_until_not('.loading-indicator')
+ self.browser.wait_until('[data-test-id="rate-limit-editor"]')
self.browser.snapshot('organization rate limits without quota')
- assert self.browser.element_exists('.ref-rate-limit-editor')
+ assert self.browser.element_exists('[data-test-id="rate-limit-editor"]')
diff --git a/tests/acceptance/test_organization_settings.py b/tests/acceptance/test_organization_settings.py
index 4c0ef735eedb14..717e44460c1fb1 100644
--- a/tests/acceptance/test_organization_settings.py
+++ b/tests/acceptance/test_organization_settings.py
@@ -26,19 +26,15 @@ def setUp(self):
self.login_as(self.user)
self.path = '/organizations/{}/settings/'.format(self.org.slug)
- def load_organization_helper(self, snapshot_name):
- self.browser.wait_until('.organization-home')
+ def load_organization_helper(self, snapshot_name=None):
self.browser.wait_until_not('.loading-indicator')
- self.browser.snapshot('organization settings -- ' + snapshot_name)
+ if snapshot_name is not None:
+ self.browser.snapshot('organization settings -- ' + snapshot_name)
assert self.browser.element_exists('.ref-organization-settings')
def renders_2fa_setting(self):
return self.browser.element_exists('#require2FA')
- def test_simple(self):
- self.browser.get(self.path)
- self.load_organization_helper("Simple")
-
def test_disabled_2fa_feature(self):
user_owner = self.create_user('[email protected]')
organization = self.create_organization(name="Example", owner=user_owner)
@@ -46,7 +42,7 @@ def test_disabled_2fa_feature(self):
path = '/organizations/%s/settings/' % organization.slug
self.browser.get(path)
- self.load_organization_helper("disabled 2fa feature")
+ self.load_organization_helper()
assert not self.renders_2fa_setting()
def test_renders_2fa_setting_for_owner(self):
@@ -57,7 +53,7 @@ def test_renders_2fa_setting_for_owner(self):
with self.feature('organizations:require-2fa'):
self.browser.get(path)
- self.load_organization_helper("renders 2fa setting for organization owner")
+ self.load_organization_helper()
assert self.renders_2fa_setting()
def test_renders_2fa_setting_for_manager(self):
@@ -70,7 +66,7 @@ def test_renders_2fa_setting_for_manager(self):
with self.feature('organizations:require-2fa'):
self.browser.get(path)
- self.load_organization_helper("renders 2fa setting for organization manager")
+ self.load_organization_helper()
assert self.renders_2fa_setting()
def test_setting_2fa_without_2fa_enabled(self):
|
9cb7b58aaab2f011c7c5e6d56cd94c8aa5fdfece
|
2017-08-30 22:37:25
|
Matt Robenolt
|
fix(saml): enforce only POST request on ACS view
| false
|
enforce only POST request on ACS view
|
fix
|
diff --git a/src/sentry/auth/providers/saml2.py b/src/sentry/auth/providers/saml2.py
index b5d7a6e13b579b..f2bbc51a465cc1 100644
--- a/src/sentry/auth/providers/saml2.py
+++ b/src/sentry/auth/providers/saml2.py
@@ -4,7 +4,7 @@
from django.core.urlresolvers import reverse
from django.http import (
HttpResponse, HttpResponseRedirect, HttpResponseServerError,
- Http404,
+ HttpResponseNotAllowed, Http404,
)
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
@@ -57,6 +57,9 @@ def dispatch(self, request, helper):
class SAML2ACSView(AuthView):
@method_decorator(csrf_exempt)
def dispatch(self, request, organization_slug):
+ if request.method != 'POST':
+ return HttpResponseNotAllowed(['POST'])
+
provider = get_provider(organization_slug)
if provider is None:
raise Http404
|
06a2a9de8f449a481f8a9183aa3622df4e36fd9d
|
2025-03-14 23:47:07
|
Katie Byers
|
fix(rate limits): Handle redis index error gracefully (#87040)
| false
|
Handle redis index error gracefully (#87040)
|
fix
|
diff --git a/src/sentry/ratelimits/redis.py b/src/sentry/ratelimits/redis.py
index 2d43855ae93be3..62238969ae7557 100644
--- a/src/sentry/ratelimits/redis.py
+++ b/src/sentry/ratelimits/redis.py
@@ -113,10 +113,10 @@ def is_limited_with_value(
pipe.expire(redis_key, expiration)
pipeline_result = pipe.execute()
result = pipeline_result[0]
- except RedisError:
+ except (RedisError, IndexError):
# We don't want rate limited endpoints to fail when ratelimits
# can't be updated. We do want to know when that happens.
- logger.exception("Failed to retrieve current value from redis")
+ logger.exception("Failed to retrieve current rate limit value from redis")
return False, 0, reset_time
return result > limit, result, reset_time
|
eb27bc56f13b5a8054fdfb870db40831c7832923
|
2023-11-01 08:25:34
|
Scott Cooper
|
fix(issues): Fix infinite react rerenders in issue list (#59169)
| false
|
Fix infinite react rerenders in issue list (#59169)
|
fix
|
diff --git a/static/app/components/replays/useReplaysCount.tsx b/static/app/components/replays/useReplaysCount.tsx
index 3d606aa8cce1a1..71a75bce8cd545 100644
--- a/static/app/components/replays/useReplaysCount.tsx
+++ b/static/app/components/replays/useReplaysCount.tsx
@@ -1,4 +1,4 @@
-import {useCallback, useMemo, useState} from 'react';
+import {useCallback, useEffect, useMemo, useState} from 'react';
import {DateString, IssueCategory, Organization} from 'sentry/types';
import {ApiQueryKey, useApiQuery} from 'sentry/utils/queryClient';
@@ -115,21 +115,22 @@ function useReplaysCount({
}
);
- return useMemo(() => {
+ useEffect(() => {
if (isFetched) {
- const merged = {
+ setLastData(last => ({
...zeroCounts,
- ...lastData,
+ ...last,
...data,
- };
- setLastData(merged);
- return merged;
+ }));
}
+ }, [isFetched, zeroCounts, data]);
+
+ return useMemo<CountState>(() => {
return {
...lastData,
...data,
};
- }, [isFetched, zeroCounts, lastData, data]);
+ }, [lastData, data]);
}
function makeReplayCountsQueryKey({
|
fb8e853b3942c7845da96a4b5815fb933c767522
|
2023-10-04 05:54:49
|
Pierre Massat
|
feat(spans): Run spans consumer by default in dev (#57424)
| false
|
Run spans consumer by default in dev (#57424)
|
feat
|
diff --git a/src/sentry/runner/commands/devserver.py b/src/sentry/runner/commands/devserver.py
index 16b4a02bfe8009..2819bee9138288 100644
--- a/src/sentry/runner/commands/devserver.py
+++ b/src/sentry/runner/commands/devserver.py
@@ -300,6 +300,7 @@ def devserver(
kafka_consumers.add("ingest-attachments")
kafka_consumers.add("ingest-transactions")
kafka_consumers.add("ingest-monitors")
+ kafka_consumers.add("ingest-spans")
if settings.SENTRY_USE_PROFILING:
kafka_consumers.add("ingest-profiles")
|
b988878a4fcf17d055ef34ebaeefc736e4378df9
|
2024-09-25 16:06:06
|
Jan Michael Auer
|
fix: Make the indexer consumer sample rate relative to the base rate (#78112)
| false
|
Make the indexer consumer sample rate relative to the base rate (#78112)
|
fix
|
diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py
index fd98edfc9b8a46..52089d744953e2 100644
--- a/src/sentry/conf/server.py
+++ b/src/sentry/conf/server.py
@@ -1730,7 +1730,7 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]:
SENTRY_METRICS_INDEXER = "sentry.sentry_metrics.indexer.postgres.postgres_v2.PostgresIndexer"
SENTRY_METRICS_INDEXER_OPTIONS: dict[str, Any] = {}
SENTRY_METRICS_INDEXER_CACHE_TTL = 3600 * 2
-SENTRY_METRICS_INDEXER_TRANSACTIONS_SAMPLE_RATE = 0.1
+SENTRY_METRICS_INDEXER_TRANSACTIONS_SAMPLE_RATE = 0.1 # relative to SENTRY_BACKEND_APM_SAMPLING
SENTRY_METRICS_INDEXER_SPANNER_OPTIONS: dict[str, Any] = {}
diff --git a/src/sentry/sentry_metrics/consumers/indexer/processing.py b/src/sentry/sentry_metrics/consumers/indexer/processing.py
index 9af23bb62da10e..4a72736ff4886e 100644
--- a/src/sentry/sentry_metrics/consumers/indexer/processing.py
+++ b/src/sentry/sentry_metrics/consumers/indexer/processing.py
@@ -73,12 +73,13 @@ def __get_schema_validator(self) -> Callable[[str, IngestMetric], None]:
).validate
def process_messages(self, outer_message: Message[MessageBatch]) -> IndexerOutputMessageBatch:
- # TODO-anton: remove sampled here and let traces_sampler decide
+ sample_rate = (
+ settings.SENTRY_METRICS_INDEXER_TRANSACTIONS_SAMPLE_RATE
+ * settings.SENTRY_BACKEND_APM_SAMPLING
+ )
with sentry_sdk.start_transaction(
name="sentry.sentry_metrics.consumers.indexer.processing.process_messages",
- custom_sampling_context={
- "sample_rate": settings.SENTRY_METRICS_INDEXER_TRANSACTIONS_SAMPLE_RATE
- },
+ custom_sampling_context={"sample_rate": sample_rate},
):
return self._process_messages_impl(outer_message)
|
087ecd3bdca1aa5113ad873b2461448a7dda090a
|
2025-01-07 01:56:20
|
Nikki Kapadia
|
feat(widget-builder): Implement error message for title field (#82948)
| false
|
Implement error message for title field (#82948)
|
feat
|
diff --git a/static/app/views/dashboards/widgetBuilder/components/nameAndDescFields.spec.tsx b/static/app/views/dashboards/widgetBuilder/components/nameAndDescFields.spec.tsx
index 1b262c5758376b..cafc94f130a767 100644
--- a/static/app/views/dashboards/widgetBuilder/components/nameAndDescFields.spec.tsx
+++ b/static/app/views/dashboards/widgetBuilder/components/nameAndDescFields.spec.tsx
@@ -12,6 +12,7 @@ jest.mock('sentry/utils/useNavigate', () => ({
}));
const mockUseNavigate = jest.mocked(useNavigate);
+const mockSetError = jest.fn();
describe('WidgetBuilder', () => {
let router!: ReturnType<typeof RouterFixture>;
@@ -33,7 +34,7 @@ describe('WidgetBuilder', () => {
render(
<WidgetBuilderProvider>
- <WidgetBuilderNameAndDescription error={{}} />
+ <WidgetBuilderNameAndDescription error={{}} setError={mockSetError} />
</WidgetBuilderProvider>,
{
router,
@@ -62,4 +63,20 @@ describe('WidgetBuilder', () => {
})
);
});
+
+ it('displays error', async function () {
+ render(
+ <WidgetBuilderProvider>
+ <WidgetBuilderNameAndDescription
+ error={{title: 'Title is required during creation.'}}
+ setError={mockSetError}
+ />
+ </WidgetBuilderProvider>,
+ {router, organization}
+ );
+
+ expect(
+ await screen.findByText('Title is required during creation.')
+ ).toBeInTheDocument();
+ });
});
diff --git a/static/app/views/dashboards/widgetBuilder/components/nameAndDescFields.tsx b/static/app/views/dashboards/widgetBuilder/components/nameAndDescFields.tsx
index 165760f55b44c0..dae7ad023873d1 100644
--- a/static/app/views/dashboards/widgetBuilder/components/nameAndDescFields.tsx
+++ b/static/app/views/dashboards/widgetBuilder/components/nameAndDescFields.tsx
@@ -3,7 +3,7 @@ import styled from '@emotion/styled';
import {Button} from 'sentry/components/button';
import TextArea from 'sentry/components/forms/controls/textarea';
-import Input from 'sentry/components/input';
+import TextField from 'sentry/components/forms/fields/textField';
import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import {SectionHeader} from 'sentry/views/dashboards/widgetBuilder/components/common/sectionHeader';
@@ -12,9 +12,13 @@ import {BuilderStateAction} from 'sentry/views/dashboards/widgetBuilder/hooks/us
interface WidgetBuilderNameAndDescriptionProps {
error: Record<string, any>;
+ setError: (error: Record<string, any>) => void;
}
-function WidgetBuilderNameAndDescription({}: WidgetBuilderNameAndDescriptionProps) {
+function WidgetBuilderNameAndDescription({
+ error,
+ setError,
+}: WidgetBuilderNameAndDescriptionProps) {
const {state, dispatch} = useWidgetBuilderContext();
const [isDescSelected, setIsDescSelected] = useState(state.description ? true : false);
@@ -22,15 +26,20 @@ function WidgetBuilderNameAndDescription({}: WidgetBuilderNameAndDescriptionProp
<Fragment>
<SectionHeader title={t('Widget Name & Description')} />
<StyledInput
+ name={t('Widget Name')}
size="md"
placeholder={t('Name')}
title={t('Widget Name')}
- type="text"
aria-label={t('Widget Name')}
value={state.title}
- onChange={e => {
- dispatch({type: BuilderStateAction.SET_TITLE, payload: e.target.value});
+ onChange={newTitle => {
+ // clear error once user starts typing
+ setError({...error, title: undefined});
+ dispatch({type: BuilderStateAction.SET_TITLE, payload: newTitle});
}}
+ required
+ error={error.title}
+ inline={false}
/>
{!isDescSelected && (
<AddDescriptionButton
@@ -62,8 +71,10 @@ function WidgetBuilderNameAndDescription({}: WidgetBuilderNameAndDescriptionProp
export default WidgetBuilderNameAndDescription;
-const StyledInput = styled(Input)`
+const StyledInput = styled(TextField)`
margin-bottom: ${space(1)};
+ padding: 0;
+ border: none;
`;
const AddDescriptionButton = styled(Button)`
diff --git a/static/app/views/dashboards/widgetBuilder/components/widgetBuilderSlideout.tsx b/static/app/views/dashboards/widgetBuilder/components/widgetBuilderSlideout.tsx
index 1228519e408901..710c3b8de1ce67 100644
--- a/static/app/views/dashboards/widgetBuilder/components/widgetBuilderSlideout.tsx
+++ b/static/app/views/dashboards/widgetBuilder/components/widgetBuilderSlideout.tsx
@@ -162,7 +162,7 @@ function WidgetBuilderSlideout({
</Section>
)}
<Section>
- <WidgetBuilderNameAndDescription error={error} />
+ <WidgetBuilderNameAndDescription error={error} setError={setError} />
</Section>
<SaveButton isEditing={isEditing} onSave={onSave} setError={setError} />
</SlideoutBodyWrapper>
|
bdcbf2b95223bdda782a3abd5d5f2f303fbe2f10
|
2024-12-19 23:10:08
|
Nikki Kapadia
|
fix(widget-builder): Make draggable UI design accurate (#82366)
| false
|
Make draggable UI design accurate (#82366)
|
fix
|
diff --git a/static/app/views/dashboards/widgetBuilder/components/newWidgetBuilder.tsx b/static/app/views/dashboards/widgetBuilder/components/newWidgetBuilder.tsx
index 2af12422f5d553..2e1e52a69c326a 100644
--- a/static/app/views/dashboards/widgetBuilder/components/newWidgetBuilder.tsx
+++ b/static/app/views/dashboards/widgetBuilder/components/newWidgetBuilder.tsx
@@ -232,6 +232,9 @@ export function WidgetPreviewContainer({
: state.displayType === DisplayType.TABLE
? 'auto'
: PREVIEW_HEIGHT_PX,
+ outline: isDragEnabled
+ ? `${space(1)} solid ${theme.border}`
+ : undefined,
}}
>
<WidgetPreview
@@ -290,7 +293,7 @@ const Backdrop = styled('div')`
const SampleWidgetCard = styled(motion.div)`
width: 100%;
min-width: 100%;
- border: 2px dashed ${p => p.theme.border};
+ border: 1px dashed ${p => p.theme.gray300};
border-radius: ${p => p.theme.borderRadius};
background-color: ${p => p.theme.background};
z-index: ${p => p.theme.zIndex.initial};
diff --git a/static/app/views/dashboards/widgetBuilder/components/widgetPreview.tsx b/static/app/views/dashboards/widgetBuilder/components/widgetPreview.tsx
index 2aef3084a674df..b917f1f7f34005 100644
--- a/static/app/views/dashboards/widgetBuilder/components/widgetPreview.tsx
+++ b/static/app/views/dashboards/widgetBuilder/components/widgetPreview.tsx
@@ -51,6 +51,7 @@ function WidgetPreview({
return (
<WidgetCard
disableFullscreen
+ borderless
isWidgetInvalid={isWidgetInvalid}
shouldResize={state.displayType !== DisplayType.TABLE}
organization={organization}
diff --git a/static/app/views/dashboards/widgetCard/index.tsx b/static/app/views/dashboards/widgetCard/index.tsx
index 2dcdc100d9ec90..90acd1914b1bf1 100644
--- a/static/app/views/dashboards/widgetCard/index.tsx
+++ b/static/app/views/dashboards/widgetCard/index.tsx
@@ -67,6 +67,7 @@ type Props = WithRouterProps & {
widget: Widget;
widgetLegendState: WidgetLegendSelectionState;
widgetLimitReached: boolean;
+ borderless?: boolean;
dashboardFilters?: DashboardFilters;
disableFullscreen?: boolean;
hasEditAccess?: boolean;
@@ -301,6 +302,7 @@ function WidgetCard(props: Props) {
meta={tableMeta}
error={widgetQueryError || errorMessage || undefined}
preferredPolarity="-"
+ borderless={props.borderless}
/>
);
}}
@@ -316,6 +318,7 @@ function WidgetCard(props: Props) {
actionsMessage={actionsMessage}
actions={actions}
onFullScreenViewClick={disableFullscreen ? undefined : onFullScreenViewClick}
+ borderless={props.borderless}
>
<WidgetCardChartContainer
location={location}
diff --git a/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidget.tsx b/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidget.tsx
index 62856d97a67a8a..c613861ad58333 100644
--- a/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidget.tsx
+++ b/static/app/views/dashboards/widgets/bigNumberWidget/bigNumberWidget.tsx
@@ -31,7 +31,11 @@ export function BigNumberWidget(props: BigNumberWidgetProps) {
if (props.isLoading) {
return (
- <WidgetFrame title={props.title} description={props.description}>
+ <WidgetFrame
+ title={props.title}
+ description={props.description}
+ borderless={props.borderless}
+ >
<LoadingPlaceholder>{LOADING_PLACEHOLDER}</LoadingPlaceholder>
</WidgetFrame>
);
@@ -62,6 +66,7 @@ export function BigNumberWidget(props: BigNumberWidgetProps) {
warnings={props.warnings}
error={error}
onRetry={props.onRetry}
+ borderless={props.borderless}
>
{defined(value) && (
<BigNumberResizeWrapper>
diff --git a/static/app/views/dashboards/widgets/common/widgetFrame.tsx b/static/app/views/dashboards/widgets/common/widgetFrame.tsx
index d1d17d37215514..8528412b4dbb00 100644
--- a/static/app/views/dashboards/widgets/common/widgetFrame.tsx
+++ b/static/app/views/dashboards/widgets/common/widgetFrame.tsx
@@ -20,6 +20,7 @@ export interface WidgetFrameProps extends StateProps {
actionsDisabled?: boolean;
actionsMessage?: string;
badgeProps?: BadgeProps | BadgeProps[];
+ borderless?: boolean;
children?: React.ReactNode;
description?: React.ReactElement | string;
onFullScreenViewClick?: () => void;
@@ -45,7 +46,7 @@ export function WidgetFrame(props: WidgetFrameProps) {
: props.actions) ?? [];
return (
- <Frame aria-label="Widget panel">
+ <Frame aria-label="Widget panel" borderless={props.borderless}>
<Header>
{props.warnings && props.warnings.length > 0 && (
<Tooltip title={<WarningsList warnings={props.warnings} />} isHoverable>
@@ -186,7 +187,7 @@ function TitleActionsWrapper({disabled, disabledMessage, children}: TitleActions
);
}
-const Frame = styled('div')`
+const Frame = styled('div')<{borderless?: boolean}>`
position: relative;
display: flex;
flex-direction: column;
@@ -197,8 +198,7 @@ const Frame = styled('div')`
min-width: ${MIN_WIDTH}px;
border-radius: ${p => p.theme.panelBorderRadius};
- border: ${p => p.theme.border};
- border: 1px ${p => 'solid ' + p.theme.border};
+ ${p => !p.borderless && `border: 1px solid ${p.theme.border};`}
background: ${p => p.theme.background};
|
48d3644a26d0985501121f3546de2c67853a348e
|
2024-03-15 14:28:45
|
Iker Barriocanal
|
ref(projconfig): Fix typo in instrumentation in span op (#67035)
| false
|
Fix typo in instrumentation in span op (#67035)
|
ref
|
diff --git a/src/sentry/relay/config/metric_extraction.py b/src/sentry/relay/config/metric_extraction.py
index f9de38a775e85e..4120a3ae818d4f 100644
--- a/src/sentry/relay/config/metric_extraction.py
+++ b/src/sentry/relay/config/metric_extraction.py
@@ -107,7 +107,7 @@ def get_metric_extraction_config(project: Project) -> MetricExtractionConfig | N
with sentry_sdk.start_span(op="get_alert_metric_specs"):
alert_specs = _get_alert_metric_specs(project, enabled_features, prefilling)
- with sentry_sdk.start_span(op="get_alert_metric_specs"):
+ with sentry_sdk.start_span(op="get_widget_metric_specs"):
widget_specs = _get_widget_metric_specs(project, enabled_features, prefilling)
with sentry_sdk.start_span(op="merge_metric_specs"):
|
adb7d223f8ec66175116b1b35c7a28bbc498b061
|
2024-10-29 21:53:12
|
anthony sottile
|
ref: upgrade pyuwsgi (#79703)
| false
|
upgrade pyuwsgi (#79703)
|
ref
|
diff --git a/requirements-base.txt b/requirements-base.txt
index e9942f58727828..b796cd06e715fd 100644
--- a/requirements-base.txt
+++ b/requirements-base.txt
@@ -86,7 +86,7 @@ ua-parser>=0.10.0
unidiff>=0.7.4
urllib3[brotli]>=2.2.2
brotli>=1.1
-pyuwsgi==2.0.27a1
+pyuwsgi==2.0.27.post1
zstandard>=0.18.0
sentry-usage-accountant==0.0.10
orjson>=3.10.3
diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt
index e2a9f98f97b244..419d6f4d7ef27c 100644
--- a/requirements-dev-frozen.txt
+++ b/requirements-dev-frozen.txt
@@ -157,7 +157,7 @@ python-u2flib-server==5.0.0
python-utils==3.3.3
python3-saml==1.15.0
pyupgrade==3.17.0
-pyuwsgi==2.0.27a1
+pyuwsgi==2.0.27.post1
pyvat==1.3.15
pyyaml==6.0.2
rb==1.10.0
diff --git a/requirements-frozen.txt b/requirements-frozen.txt
index 0be79f13b716a5..39820abf4c79c7 100644
--- a/requirements-frozen.txt
+++ b/requirements-frozen.txt
@@ -104,7 +104,7 @@ python-rapidjson==1.8
python-u2flib-server==5.0.0
python-utils==3.3.3
python3-saml==1.15.0
-pyuwsgi==2.0.27a1
+pyuwsgi==2.0.27.post1
pyvat==1.3.15
pyyaml==6.0.2
rb==1.10.0
|
fd4df5f0da9e7d5640d1c36abec3219fb5b41b92
|
2023-08-10 03:41:06
|
anthony sottile
|
ref: remove `if_param` as it does not play nicely with typing (#54490)
| false
|
remove `if_param` as it does not play nicely with typing (#54490)
|
ref
|
diff --git a/src/sentry/mediators/param.py b/src/sentry/mediators/param.py
index 2f89d7336244a2..df85bf8f07da98 100644
--- a/src/sentry/mediators/param.py
+++ b/src/sentry/mediators/param.py
@@ -156,15 +156,3 @@ def __get__(self, inst: C, owner: type[C]) -> T:
def __get__(self, inst: C | None, owner: type[C]) -> T | Self:
...
-
-
-def if_param(name):
- def _if_param(func):
- def wrapper(self, *args):
- if not hasattr(self, name) or getattr(self, name) is None:
- return
- return func(self, *args)
-
- return wrapper
-
- return _if_param
diff --git a/src/sentry/mediators/project_rules/updater.py b/src/sentry/mediators/project_rules/updater.py
index bde1d39b778701..09bd2623ca0205 100644
--- a/src/sentry/mediators/project_rules/updater.py
+++ b/src/sentry/mediators/project_rules/updater.py
@@ -2,7 +2,7 @@
from rest_framework.request import Request
from sentry.mediators.mediator import Mediator
-from sentry.mediators.param import Param, if_param
+from sentry.mediators.param import Param
from sentry.models import Actor, Project, Rule
@@ -41,29 +41,28 @@ def _update_owner(self) -> None:
self.rule.owner = Actor.objects.get(id=self.owner) if self.owner else None
def _update_environment(self):
- # environment can be None so we don't use the if_param decorator
self.rule.environment_id = self.environment
- @if_param("project")
def _update_project(self):
- self.rule.project = self.project
+ if self.project:
+ self.rule.project = self.project
- @if_param("actions")
def _update_actions(self):
- self.rule.data["actions"] = self.actions
+ if self.actions:
+ self.rule.data["actions"] = self.actions
- @if_param("action_match")
def _update_action_match(self):
- self.rule.data["action_match"] = self.action_match
+ if self.action_match:
+ self.rule.data["action_match"] = self.action_match
- @if_param("filter_match")
def _update_filter_match(self):
- self.rule.data["filter_match"] = self.filter_match
+ if self.filter_match:
+ self.rule.data["filter_match"] = self.filter_match
- @if_param("conditions")
def _update_conditions(self):
- self.rule.data["conditions"] = self.conditions
+ if self.conditions:
+ self.rule.data["conditions"] = self.conditions
- @if_param("frequency")
def _update_frequency(self):
- self.rule.data["frequency"] = self.frequency
+ if self.frequency:
+ self.rule.data["frequency"] = self.frequency
diff --git a/src/sentry/mediators/sentry_app_installations/updater.py b/src/sentry/mediators/sentry_app_installations/updater.py
index f89675e3129079..a171d01a88864b 100644
--- a/src/sentry/mediators/sentry_app_installations/updater.py
+++ b/src/sentry/mediators/sentry_app_installations/updater.py
@@ -3,7 +3,7 @@
from sentry import analytics
from sentry.constants import SentryAppInstallationStatus
from sentry.mediators.mediator import Mediator
-from sentry.mediators.param import Param, if_param
+from sentry.mediators.param import Param
from sentry.models.integrations.sentry_app_installation import SentryAppInstallation
from sentry.services.hybrid_cloud.app import RpcSentryAppInstallation
@@ -17,7 +17,6 @@ def call(self):
self._update_status()
return self.sentry_app_installation
- @if_param("status")
def _update_status(self):
# convert from string to integer
if self.status == SentryAppInstallationStatus.INSTALLED_STR:
|
d106cdb9ef95c41c687b6d6e71484decf91b1a28
|
2023-01-26 00:19:31
|
Jonas
|
fix(spans): text rendering fix (#43689)
| false
|
text rendering fix (#43689)
|
fix
|
diff --git a/static/app/components/profiling/flamegraph/flamegraphSpans.tsx b/static/app/components/profiling/flamegraph/flamegraphSpans.tsx
index 1de716f42268bc..a49f4489f10d09 100644
--- a/static/app/components/profiling/flamegraph/flamegraphSpans.tsx
+++ b/static/app/components/profiling/flamegraph/flamegraphSpans.tsx
@@ -136,7 +136,7 @@ export function FlamegraphSpans({
const drawText = () => {
spansTextRenderer.draw(
- spansView.configView.transformRect(spansView.configSpaceTransform),
+ spansView.toOriginConfigView(spansView.configView),
spansView.fromTransformedConfigView(spansCanvas.physicalSpace),
flamegraphSearch.results.spans
);
diff --git a/static/app/utils/profiling/renderers/spansTextRenderer.tsx b/static/app/utils/profiling/renderers/spansTextRenderer.tsx
index 42150ff06336cd..345eab614dec91 100644
--- a/static/app/utils/profiling/renderers/spansTextRenderer.tsx
+++ b/static/app/utils/profiling/renderers/spansTextRenderer.tsx
@@ -64,11 +64,14 @@ class SpansTextRenderer extends TextRenderer {
// 2. We can skip drawing and
// Find the upper and lower bounds of the frames we need to draw so we dont end up
// iterating over all of the root frames and avoid creating shallow copies if we dont need to.
- const start = lowerBound(configView.left, this.spanChart.root.children);
- const end = upperBound(configView.right, this.spanChart.root.children);
-
// Populate the initial set of frames to draw
- const spans: SpanChartNode[] = this.spanChart.root.children.slice(start, end);
+
+ // Note: we cannot apply the same optimization to the roots as we can to the children, because
+ // the root spans are not sorted by start time, so we cannot use binary search to find the
+ // upper and lower bounds. The reason they are not sorted is that they contain all tree roots,
+ // including the overlapping trees. The only case where it does work is if we only have a single tree root
+ // because we then know that all spans are non-overlapping and we have only one range tree
+ const spans: SpanChartNode[] = [...this.spanChart.root.children];
while (spans.length > 0) {
const span = spans.pop()!;
|
f0a57fd99c83533e24fe1141ef3a8ac681092c35
|
2023-08-28 20:45:05
|
Jonas
|
fix(profiling): adjust chart font sized based on DPR (#55258)
| false
|
adjust chart font sized based on DPR (#55258)
|
fix
|
diff --git a/static/app/utils/profiling/flamegraph/flamegraphTheme.tsx b/static/app/utils/profiling/flamegraph/flamegraphTheme.tsx
index 27c51ed770f53c..09a71bd91455c4 100644
--- a/static/app/utils/profiling/flamegraph/flamegraphTheme.tsx
+++ b/static/app/utils/profiling/flamegraph/flamegraphTheme.tsx
@@ -47,10 +47,10 @@ export interface FlamegraphTheme {
COLORS: {
BAR_LABEL_FONT_COLOR: string;
CHART_CURSOR_INDICATOR: string;
+ CHART_LABEL_COLOR: string;
COLOR_BUCKET: (t: number) => ColorChannels;
COLOR_MAPS: Record<FlamegraphColorCodings[number], ColorMapFn>;
CPU_CHART_COLORS: ColorChannels[];
- CPU_CHART_LABEL_COLOR: string;
CURSOR_CROSSHAIR: string;
DIFFERENTIAL_DECREASE: ColorChannels;
DIFFERENTIAL_INCREASE: ColorChannels;
@@ -107,6 +107,7 @@ export interface FlamegraphTheme {
LABEL_FONT_SIZE: number;
MAX_SPANS_HEIGHT: number;
MEMORY_CHART_HEIGHT: number;
+ METRICS_FONT_SIZE: number;
MINIMAP_HEIGHT: number;
MINIMAP_POSITION_OVERLAY_BORDER_WIDTH: number;
SPANS_BAR_HEIGHT: number;
@@ -167,6 +168,7 @@ const SIZES: FlamegraphTheme['SIZES'] = {
SPANS_BAR_HEIGHT: 20,
SPANS_DEPTH_OFFSET: 3,
SPANS_FONT_SIZE: 11,
+ METRICS_FONT_SIZE: 8,
MAX_SPANS_HEIGHT: 160,
TIMELINE_HEIGHT: 20,
TOOLTIP_FONT_SIZE: 12,
@@ -201,7 +203,7 @@ export const LightFlamegraphTheme: FlamegraphTheme = {
hexToColorChannels(CHART_PALETTE[4][3], 0.8),
],
CHART_CURSOR_INDICATOR: 'rgba(31,35,58,.75)',
- CPU_CHART_LABEL_COLOR: 'rgba(31,35,58,.75)',
+ CHART_LABEL_COLOR: 'rgba(31,35,58,.75)',
CURSOR_CROSSHAIR: '#bbbbbb',
DIFFERENTIAL_DECREASE: [0.309, 0.2058, 0.98],
DIFFERENTIAL_INCREASE: [0.98, 0.2058, 0.4381],
@@ -254,7 +256,7 @@ export const DarkFlamegraphTheme: FlamegraphTheme = {
hexToColorChannels(CHART_PALETTE[4][3], 0.8),
],
CHART_CURSOR_INDICATOR: 'rgba(255, 255, 255, 0.5)',
- CPU_CHART_LABEL_COLOR: 'rgba(255, 255, 255, 0.5)',
+ CHART_LABEL_COLOR: 'rgba(255, 255, 255, 0.5)',
CURSOR_CROSSHAIR: '#828285',
DIFFERENTIAL_DECREASE: [0.309, 0.2058, 0.98],
DIFFERENTIAL_INCREASE: [0.98, 0.2058, 0.4381],
diff --git a/static/app/utils/profiling/renderers/chartRenderer.tsx b/static/app/utils/profiling/renderers/chartRenderer.tsx
index 670a5c3cdb5515..25e22ad311db68 100644
--- a/static/app/utils/profiling/renderers/chartRenderer.tsx
+++ b/static/app/utils/profiling/renderers/chartRenderer.tsx
@@ -146,7 +146,9 @@ export class FlamegraphChartRenderer {
}
this.context.clearRect(0, 0, this.canvas.width, this.canvas.height);
- this.context.font = `bold 14px ${this.theme.FONTS.FRAME_FONT}`;
+ this.context.font = `bold ${
+ this.theme.SIZES.METRICS_FONT_SIZE * window.devicePixelRatio
+ }px ${this.theme.FONTS.FRAME_FONT}`;
this.context.beginPath();
this.context.stroke();
@@ -216,8 +218,8 @@ export class FlamegraphChartRenderer {
}
// Draw interval ticks
- this.context.strokeStyle = this.theme.COLORS.CPU_CHART_LABEL_COLOR;
- this.context.fillStyle = this.theme.COLORS.CPU_CHART_LABEL_COLOR;
+ this.context.strokeStyle = this.theme.COLORS.CHART_LABEL_COLOR;
+ this.context.fillStyle = this.theme.COLORS.CHART_LABEL_COLOR;
for (let i = 0; i < intervals.length; i++) {
const interval = vec3.fromValues(configView.left, intervals[i], 1);
vec3.transformMat3(interval, interval, configViewToPhysicalSpace);
|
ed6d1267ce8e4b2c551af9904216dc371a52ce87
|
2024-08-08 22:01:32
|
anthony sottile
|
ref: improve types of digests.backends.redis (#75821)
| false
|
improve types of digests.backends.redis (#75821)
|
ref
|
diff --git a/src/sentry/digests/backends/redis.py b/src/sentry/digests/backends/redis.py
index a566f64ff9a2dc..21dca6dd15ba27 100644
--- a/src/sentry/digests/backends/redis.py
+++ b/src/sentry/digests/backends/redis.py
@@ -1,6 +1,8 @@
+from __future__ import annotations
+
import logging
import time
-from collections.abc import Iterable
+from collections.abc import Generator, Iterable
from contextlib import contextmanager
from typing import Any
@@ -165,8 +167,8 @@ def schedule(self, deadline: float, timestamp: float | None = None) -> Iterable[
error,
)
- def __maintenance_partition(self, host: int, deadline: float, timestamp: float) -> Any:
- return script(
+ def __maintenance_partition(self, host: int, deadline: float, timestamp: float) -> None:
+ script(
["-"],
["MAINTENANCE", self.namespace, self.ttl, timestamp, deadline],
self.cluster.get_local_client(host),
@@ -189,7 +191,7 @@ def maintenance(self, deadline: float, timestamp: float | None = None) -> None:
@contextmanager
def digest(
self, key: str, minimum_delay: int | None = None, timestamp: float | None = None
- ) -> Any:
+ ) -> Generator[list[Record]]:
if minimum_delay is None:
minimum_delay = self.minimum_delay
|
54b7d15d95cb3f8b5fe3d6e786e0809e9beb3d6c
|
2024-03-07 17:02:15
|
Luca Forstner
|
feat(source-maps-debugger): Remove backend feature flag to enable by default (#66348)
| false
|
Remove backend feature flag to enable by default (#66348)
|
feat
|
diff --git a/src/sentry/api/endpoints/source_map_debug_blue_thunder_edition.py b/src/sentry/api/endpoints/source_map_debug_blue_thunder_edition.py
index 6154e1b3ce7cf0..f5b4b5b7938d67 100644
--- a/src/sentry/api/endpoints/source_map_debug_blue_thunder_edition.py
+++ b/src/sentry/api/endpoints/source_map_debug_blue_thunder_edition.py
@@ -9,7 +9,7 @@
from rest_framework.request import Request
from rest_framework.response import Response
-from sentry import eventstore, features
+from sentry import eventstore
from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import region_silo_endpoint
@@ -152,15 +152,6 @@ def get(self, request: Request, project: Project, event_id: str) -> Response:
Return a list of source map errors for a given event.
"""
- if not features.has(
- "organizations:source-maps-debugger-blue-thunder-edition",
- project.organization,
- actor=request.user,
- ):
- raise NotFound(
- detail="Endpoint not available without 'organizations:source-maps-debugger-blue-thunder-edition' feature flag"
- )
-
event = eventstore.backend.get_event_by_id(project.id, event_id)
if event is None:
raise NotFound(detail="Event not found")
diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py
index 098505cbca0fc4..be4791a07df5cb 100644
--- a/src/sentry/conf/server.py
+++ b/src/sentry/conf/server.py
@@ -1443,7 +1443,6 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]:
"organizations:performance-span-histogram-view": "Enable histogram view in span details",
"organizations:performance-transaction-name-only-search-indexed": "Enable transaction name only search on indexed",
"organizations:profiling-global-suspect-functions": "Enable global suspect functions in profiling",
- "organizations:source-maps-debugger-blue-thunder-edition": "Enable source maps debugger",
"organizations:sourcemaps-bundle-flat-file-indexing": "Enable the new flat file indexing system for sourcemaps.",
"organizations:sourcemaps-upload-release-as-artifact-bundle": "Upload release bundles as artifact bundles",
"organizations:user-feedback-ui": "Enable User Feedback v2 UI",
@@ -1875,8 +1874,6 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]:
"organizations:settings-legal-tos-ui": False,
# Enable the UI for the overage alert settings
"organizations:slack-overage-notifications": False,
- # Enable source maps debugger
- "organizations:source-maps-debugger-blue-thunder-edition": False,
# Enable the new flat file indexing system for sourcemaps.
"organizations:sourcemaps-bundle-flat-file-indexing": False,
# Upload release bundles as artifact bundles.
diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py
index 779b40f18cec8d..8f9df48096e7eb 100644
--- a/src/sentry/features/__init__.py
+++ b/src/sentry/features/__init__.py
@@ -257,7 +257,6 @@
default_manager.add("organizations:slack-thread", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
default_manager.add("organizations:slack-block-kit-improvements", OrganizationFeature, FeatureHandlerStrategy.INTERNAL)
default_manager.add("organizations:slack-overage-notifications", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
-default_manager.add("organizations:source-maps-debugger-blue-thunder-edition", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
default_manager.add("organizations:sourcemaps-bundle-flat-file-indexing", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
default_manager.add("organizations:sourcemaps-upload-release-as-artifact-bundle", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
default_manager.add("organizations:stacktrace-processing-caching", OrganizationFeature, FeatureHandlerStrategy.INTERNAL)
diff --git a/tests/sentry/api/endpoints/test_source_map_debug_blue_thunder_edition.py b/tests/sentry/api/endpoints/test_source_map_debug_blue_thunder_edition.py
index 68d04c4ec65caa..7be9d534f1c07d 100644
--- a/tests/sentry/api/endpoints/test_source_map_debug_blue_thunder_edition.py
+++ b/tests/sentry/api/endpoints/test_source_map_debug_blue_thunder_edition.py
@@ -81,1771 +81,1692 @@ def setUp(self) -> None:
self.login_as(self.user)
return super().setUp()
- def test_no_feature_flag(self):
- event = self.store_event(data=create_event([]), project_id=self.project.id)
+ def test_missing_event(self):
resp = self.get_error_response(
self.organization.slug,
self.project.slug,
- event.event_id,
+ "invalid_id",
+ frame_idx=0,
+ exception_idx=0,
status_code=status.HTTP_404_NOT_FOUND,
)
- assert (
- resp.data["detail"]
- == "Endpoint not available without 'organizations:source-maps-debugger-blue-thunder-edition' feature flag"
- )
-
- def test_missing_event(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- resp = self.get_error_response(
- self.organization.slug,
- self.project.slug,
- "invalid_id",
- frame_idx=0,
- exception_idx=0,
- status_code=status.HTTP_404_NOT_FOUND,
- )
- assert resp.data["detail"] == "Event not found"
+ assert resp.data["detail"] == "Event not found"
def test_empty_exceptions_array(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(data=create_event([]), project_id=self.project.id)
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
- assert resp.data["exceptions"] == []
+ event = self.store_event(data=create_event([]), project_id=self.project.id)
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
+ assert resp.data["exceptions"] == []
def test_has_debug_ids_true(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(
- exceptions=[create_exception_with_frame({"abs_path": "/some/path/to/file.js"})],
- debug_meta_images=[
- {
- "type": "sourcemap",
- "code_file": "/some/path/to/file.js",
- "debug_id": "8d65dbd3-bb6c-5632-9049-7751111284ed",
- }
- ],
- ),
- project_id=self.project.id,
- )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
- assert resp.data["has_debug_ids"]
+ event = self.store_event(
+ data=create_event(
+ exceptions=[create_exception_with_frame({"abs_path": "/some/path/to/file.js"})],
+ debug_meta_images=[
+ {
+ "type": "sourcemap",
+ "code_file": "/some/path/to/file.js",
+ "debug_id": "8d65dbd3-bb6c-5632-9049-7751111284ed",
+ }
+ ],
+ ),
+ project_id=self.project.id,
+ )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
+ assert resp.data["has_debug_ids"]
def test_has_debug_ids_false(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(
- exceptions=[create_exception_with_frame({"abs_path": "/some/path/to/file.js"})],
- debug_meta_images=None,
- ),
- project_id=self.project.id,
- )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
- assert not resp.data["has_debug_ids"]
+ event = self.store_event(
+ data=create_event(
+ exceptions=[create_exception_with_frame({"abs_path": "/some/path/to/file.js"})],
+ debug_meta_images=None,
+ ),
+ project_id=self.project.id,
+ )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
+ assert not resp.data["has_debug_ids"]
def test_sdk_version(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(sdk={"name": "sentry.javascript.react", "version": "7.66.0"}),
- project_id=self.project.id,
- )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
- assert resp.data["sdk_version"] == "7.66.0"
+ event = self.store_event(
+ data=create_event(sdk={"name": "sentry.javascript.react", "version": "7.66.0"}),
+ project_id=self.project.id,
+ )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
+ assert resp.data["sdk_version"] == "7.66.0"
def test_no_sdk_version(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(data=create_event(), project_id=self.project.id)
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
- assert resp.data["sdk_version"] is None
+ event = self.store_event(data=create_event(), project_id=self.project.id)
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
+ assert resp.data["sdk_version"] is None
def test_sdk_debug_id_support_full(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(sdk={"name": "sentry.javascript.react", "version": "7.66.0"}),
- project_id=self.project.id,
- )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
- assert resp.data["sdk_debug_id_support"] == "full"
+ event = self.store_event(
+ data=create_event(sdk={"name": "sentry.javascript.react", "version": "7.66.0"}),
+ project_id=self.project.id,
+ )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
+ assert resp.data["sdk_debug_id_support"] == "full"
def test_sdk_debug_id_support_needs_upgrade(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(sdk={"name": "sentry.javascript.react", "version": "7.47.0"}),
- project_id=self.project.id,
- )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
- assert (
- resp.data["sdk_debug_id_support"] == "needs-upgrade"
- ), MIN_JS_SDK_VERSION_FOR_DEBUG_IDS
+ event = self.store_event(
+ data=create_event(sdk={"name": "sentry.javascript.react", "version": "7.47.0"}),
+ project_id=self.project.id,
+ )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
+ assert (
+ resp.data["sdk_debug_id_support"] == "needs-upgrade"
+ ), MIN_JS_SDK_VERSION_FOR_DEBUG_IDS
def test_sdk_debug_id_support_unsupported(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(sdk={"name": "sentry.javascript.cordova", "version": "7.47.0"}),
- project_id=self.project.id,
- )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
- assert resp.data["sdk_debug_id_support"] == "not-supported"
+ event = self.store_event(
+ data=create_event(sdk={"name": "sentry.javascript.cordova", "version": "7.47.0"}),
+ project_id=self.project.id,
+ )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
+ assert resp.data["sdk_debug_id_support"] == "not-supported"
def test_sdk_debug_id_support_community_sdk(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(
- sdk={"name": "sentry.javascript.some-custom-identifier", "version": "7.47.0"}
- ),
- project_id=self.project.id,
- )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
- assert resp.data["sdk_debug_id_support"] == "unofficial-sdk"
+ event = self.store_event(
+ data=create_event(
+ sdk={"name": "sentry.javascript.some-custom-identifier", "version": "7.47.0"}
+ ),
+ project_id=self.project.id,
+ )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
+ assert resp.data["sdk_debug_id_support"] == "unofficial-sdk"
def test_release_has_some_artifact_positive(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(release="some-release"),
- project_id=self.project.id,
- )
+ event = self.store_event(
+ data=create_event(release="some-release"),
+ project_id=self.project.id,
+ )
- release = Release.objects.get(organization=self.organization, version=event.release)
+ release = Release.objects.get(organization=self.organization, version=event.release)
- ReleaseFile.objects.create(
- organization_id=self.organization.id,
- release_id=release.id,
- file=File.objects.create(name="bundle.js", type="release.file"),
- name="~/bundle.js",
- )
+ ReleaseFile.objects.create(
+ organization_id=self.organization.id,
+ release_id=release.id,
+ file=File.objects.create(name="bundle.js", type="release.file"),
+ name="~/bundle.js",
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- assert resp.data["release_has_some_artifact"]
+ assert resp.data["release_has_some_artifact"]
def test_release_has_some_artifact_negative(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(release="some-release"),
- project_id=self.project.id,
- )
+ event = self.store_event(
+ data=create_event(release="some-release"),
+ project_id=self.project.id,
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- assert not resp.data["release_has_some_artifact"]
+ assert not resp.data["release_has_some_artifact"]
def test_project_has_some_artifact_bundle_positive(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- artifact_bundle = ArtifactBundle.objects.create(
- organization_id=self.organization.id,
- file=File.objects.create(name="artifact-bundle.zip", type="dummy.file"),
- artifact_count=1,
- )
+ artifact_bundle = ArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ file=File.objects.create(name="artifact-bundle.zip", type="dummy.file"),
+ artifact_count=1,
+ )
- ProjectArtifactBundle.objects.create(
- organization_id=self.organization.id,
- project_id=self.project.id,
- artifact_bundle=artifact_bundle,
- )
+ ProjectArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ project_id=self.project.id,
+ artifact_bundle=artifact_bundle,
+ )
- event = self.store_event(
- data=create_event(),
- project_id=self.project.id,
- )
+ event = self.store_event(
+ data=create_event(),
+ project_id=self.project.id,
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- assert resp.data["project_has_some_artifact_bundle"]
+ assert resp.data["project_has_some_artifact_bundle"]
def test_project_has_some_artifact_bundle_negative(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(),
- project_id=self.project.id,
- )
+ event = self.store_event(
+ data=create_event(),
+ project_id=self.project.id,
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- assert not resp.data["project_has_some_artifact_bundle"]
+ assert not resp.data["project_has_some_artifact_bundle"]
def test_project_has_some_artifact_bundle_with_a_debug_id_positive(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- artifact_bundle = ArtifactBundle.objects.create(
- organization_id=self.organization.id,
- file=File.objects.create(name="artifact-bundle.zip", type="dummy.file"),
- artifact_count=1,
- )
+ artifact_bundle = ArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ file=File.objects.create(name="artifact-bundle.zip", type="dummy.file"),
+ artifact_count=1,
+ )
- DebugIdArtifactBundle.objects.create(
- organization_id=self.organization.id,
- debug_id="00000000-00000000-00000000-00000000",
- artifact_bundle=artifact_bundle,
- source_file_type=SourceFileType.SOURCE_MAP.value,
- )
+ DebugIdArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ debug_id="00000000-00000000-00000000-00000000",
+ artifact_bundle=artifact_bundle,
+ source_file_type=SourceFileType.SOURCE_MAP.value,
+ )
- ProjectArtifactBundle.objects.create(
- organization_id=self.organization.id,
- project_id=self.project.id,
- artifact_bundle=artifact_bundle,
- )
+ ProjectArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ project_id=self.project.id,
+ artifact_bundle=artifact_bundle,
+ )
- event = self.store_event(
- data=create_event(),
- project_id=self.project.id,
- )
+ event = self.store_event(
+ data=create_event(),
+ project_id=self.project.id,
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- assert resp.data["has_uploaded_some_artifact_with_a_debug_id"]
+ assert resp.data["has_uploaded_some_artifact_with_a_debug_id"]
def test_project_has_some_artifact_bundle_with_a_debug_id_negative(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(),
- project_id=self.project.id,
- )
+ event = self.store_event(
+ data=create_event(),
+ project_id=self.project.id,
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- assert not resp.data["has_uploaded_some_artifact_with_a_debug_id"]
+ assert not resp.data["has_uploaded_some_artifact_with_a_debug_id"]
def test_multiple_exceptions(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(
- exceptions=[
- create_exception_with_frame({"abs_path": "/some/path/to/file.js"}),
- create_exception_with_frame(
- {"abs_path": "/some/path/to/some/other/file.js"}
- ),
- ],
- ),
- project_id=self.project.id,
- )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
- assert len(resp.data["exceptions"]) == 2
+ event = self.store_event(
+ data=create_event(
+ exceptions=[
+ create_exception_with_frame({"abs_path": "/some/path/to/file.js"}),
+ create_exception_with_frame({"abs_path": "/some/path/to/some/other/file.js"}),
+ ],
+ ),
+ project_id=self.project.id,
+ )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
+ assert len(resp.data["exceptions"]) == 2
def test_frame_debug_id_no_debug_id(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(
- exceptions=[create_exception_with_frame({"abs_path": "/some/path/to/file.js"})],
- debug_meta_images=[
- {
- "type": "sourcemap",
- "code_file": "/some/path/to/file/that/doesnt/match.js",
- "debug_id": "8d65dbd3-bb6c-5632-9049-7751111284ed",
- }
- ],
- ),
- project_id=self.project.id,
- )
+ event = self.store_event(
+ data=create_event(
+ exceptions=[create_exception_with_frame({"abs_path": "/some/path/to/file.js"})],
+ debug_meta_images=[
+ {
+ "type": "sourcemap",
+ "code_file": "/some/path/to/file/that/doesnt/match.js",
+ "debug_id": "8d65dbd3-bb6c-5632-9049-7751111284ed",
+ }
+ ],
+ ),
+ project_id=self.project.id,
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- debug_id_process_result = resp.data["exceptions"][0]["frames"][0]["debug_id_process"]
+ debug_id_process_result = resp.data["exceptions"][0]["frames"][0]["debug_id_process"]
- assert debug_id_process_result["debug_id"] is None
- assert not debug_id_process_result["uploaded_source_file_with_correct_debug_id"]
- assert not debug_id_process_result["uploaded_source_map_with_correct_debug_id"]
+ assert debug_id_process_result["debug_id"] is None
+ assert not debug_id_process_result["uploaded_source_file_with_correct_debug_id"]
+ assert not debug_id_process_result["uploaded_source_map_with_correct_debug_id"]
def test_frame_debug_id_no_uploaded_source_no_uploaded_source_map(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(
- exceptions=[create_exception_with_frame({"abs_path": "/some/path/to/file.js"})],
- debug_meta_images=[
- {
- "type": "sourcemap",
- "code_file": "/some/path/to/file.js",
- "debug_id": "a5764857-ae35-34dc-8f25-a9c9e73aa898",
- }
- ],
- ),
- project_id=self.project.id,
- )
+ event = self.store_event(
+ data=create_event(
+ exceptions=[create_exception_with_frame({"abs_path": "/some/path/to/file.js"})],
+ debug_meta_images=[
+ {
+ "type": "sourcemap",
+ "code_file": "/some/path/to/file.js",
+ "debug_id": "a5764857-ae35-34dc-8f25-a9c9e73aa898",
+ }
+ ],
+ ),
+ project_id=self.project.id,
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- debug_id_process_result = resp.data["exceptions"][0]["frames"][0]["debug_id_process"]
+ debug_id_process_result = resp.data["exceptions"][0]["frames"][0]["debug_id_process"]
- assert debug_id_process_result["debug_id"] == "a5764857-ae35-34dc-8f25-a9c9e73aa898"
- assert not debug_id_process_result["uploaded_source_file_with_correct_debug_id"]
- assert not debug_id_process_result["uploaded_source_map_with_correct_debug_id"]
+ assert debug_id_process_result["debug_id"] == "a5764857-ae35-34dc-8f25-a9c9e73aa898"
+ assert not debug_id_process_result["uploaded_source_file_with_correct_debug_id"]
+ assert not debug_id_process_result["uploaded_source_map_with_correct_debug_id"]
def test_frame_debug_id_uploaded_source_no_uploaded_source_map(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- artifact_bundle = ArtifactBundle.objects.create(
- organization_id=self.organization.id,
- file=File.objects.create(name="artifact-bundle.zip", type="test.file"),
- artifact_count=1,
- )
+ artifact_bundle = ArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ file=File.objects.create(name="artifact-bundle.zip", type="test.file"),
+ artifact_count=1,
+ )
- DebugIdArtifactBundle.objects.create(
- organization_id=self.organization.id,
- debug_id="a5764857-ae35-34dc-8f25-a9c9e73aa898",
- artifact_bundle=artifact_bundle,
- source_file_type=SourceFileType.MINIFIED_SOURCE.value,
- )
+ DebugIdArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ debug_id="a5764857-ae35-34dc-8f25-a9c9e73aa898",
+ artifact_bundle=artifact_bundle,
+ source_file_type=SourceFileType.MINIFIED_SOURCE.value,
+ )
- ProjectArtifactBundle.objects.create(
- organization_id=self.organization.id,
- project_id=self.project.id,
- artifact_bundle=artifact_bundle,
- )
+ ProjectArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ project_id=self.project.id,
+ artifact_bundle=artifact_bundle,
+ )
- event = self.store_event(
- data=create_event(
- exceptions=[create_exception_with_frame({"abs_path": "/some/path/to/file.js"})],
- debug_meta_images=[
- {
- "type": "sourcemap",
- "code_file": "/some/path/to/file.js",
- "debug_id": "a5764857-ae35-34dc-8f25-a9c9e73aa898",
- }
- ],
- ),
- project_id=self.project.id,
- )
+ event = self.store_event(
+ data=create_event(
+ exceptions=[create_exception_with_frame({"abs_path": "/some/path/to/file.js"})],
+ debug_meta_images=[
+ {
+ "type": "sourcemap",
+ "code_file": "/some/path/to/file.js",
+ "debug_id": "a5764857-ae35-34dc-8f25-a9c9e73aa898",
+ }
+ ],
+ ),
+ project_id=self.project.id,
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- debug_id_process_result = resp.data["exceptions"][0]["frames"][0]["debug_id_process"]
+ debug_id_process_result = resp.data["exceptions"][0]["frames"][0]["debug_id_process"]
- assert debug_id_process_result["debug_id"] == "a5764857-ae35-34dc-8f25-a9c9e73aa898"
- assert debug_id_process_result["uploaded_source_file_with_correct_debug_id"]
- assert not debug_id_process_result["uploaded_source_map_with_correct_debug_id"]
+ assert debug_id_process_result["debug_id"] == "a5764857-ae35-34dc-8f25-a9c9e73aa898"
+ assert debug_id_process_result["uploaded_source_file_with_correct_debug_id"]
+ assert not debug_id_process_result["uploaded_source_map_with_correct_debug_id"]
def test_frame_debug_id_no_uploaded_source_uploaded_source_map(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- artifact_bundle = ArtifactBundle.objects.create(
- organization_id=self.organization.id,
- file=File.objects.create(name="artifact-bundle.zip", type="test.file"),
- artifact_count=1,
- )
+ artifact_bundle = ArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ file=File.objects.create(name="artifact-bundle.zip", type="test.file"),
+ artifact_count=1,
+ )
- DebugIdArtifactBundle.objects.create(
- organization_id=self.organization.id,
- debug_id="a5764857-ae35-34dc-8f25-a9c9e73aa898",
- artifact_bundle=artifact_bundle,
- source_file_type=SourceFileType.SOURCE_MAP.value,
- )
+ DebugIdArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ debug_id="a5764857-ae35-34dc-8f25-a9c9e73aa898",
+ artifact_bundle=artifact_bundle,
+ source_file_type=SourceFileType.SOURCE_MAP.value,
+ )
- ProjectArtifactBundle.objects.create(
- organization_id=self.organization.id,
- project_id=self.project.id,
- artifact_bundle=artifact_bundle,
- )
+ ProjectArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ project_id=self.project.id,
+ artifact_bundle=artifact_bundle,
+ )
- event = self.store_event(
- data=create_event(
- exceptions=[create_exception_with_frame({"abs_path": "/some/path/to/file.js"})],
- debug_meta_images=[
- {
- "type": "sourcemap",
- "code_file": "/some/path/to/file.js",
- "debug_id": "a5764857-ae35-34dc-8f25-a9c9e73aa898",
- }
- ],
- ),
- project_id=self.project.id,
- )
+ event = self.store_event(
+ data=create_event(
+ exceptions=[create_exception_with_frame({"abs_path": "/some/path/to/file.js"})],
+ debug_meta_images=[
+ {
+ "type": "sourcemap",
+ "code_file": "/some/path/to/file.js",
+ "debug_id": "a5764857-ae35-34dc-8f25-a9c9e73aa898",
+ }
+ ],
+ ),
+ project_id=self.project.id,
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- debug_id_process_result = resp.data["exceptions"][0]["frames"][0]["debug_id_process"]
+ debug_id_process_result = resp.data["exceptions"][0]["frames"][0]["debug_id_process"]
- assert debug_id_process_result["debug_id"] == "a5764857-ae35-34dc-8f25-a9c9e73aa898"
- assert not debug_id_process_result["uploaded_source_file_with_correct_debug_id"]
- assert debug_id_process_result["uploaded_source_map_with_correct_debug_id"]
+ assert debug_id_process_result["debug_id"] == "a5764857-ae35-34dc-8f25-a9c9e73aa898"
+ assert not debug_id_process_result["uploaded_source_file_with_correct_debug_id"]
+ assert debug_id_process_result["uploaded_source_map_with_correct_debug_id"]
def test_frame_debug_id_uploaded_source_uploaded_source_map(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- artifact_bundle = ArtifactBundle.objects.create(
- organization_id=self.organization.id,
- file=File.objects.create(name="artifact-bundle.zip", type="test.file"),
- artifact_count=1,
- )
+ artifact_bundle = ArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ file=File.objects.create(name="artifact-bundle.zip", type="test.file"),
+ artifact_count=1,
+ )
- DebugIdArtifactBundle.objects.create(
- organization_id=self.organization.id,
- debug_id="a5764857-ae35-34dc-8f25-a9c9e73aa898",
- artifact_bundle=artifact_bundle,
- source_file_type=SourceFileType.SOURCE.value,
- )
+ DebugIdArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ debug_id="a5764857-ae35-34dc-8f25-a9c9e73aa898",
+ artifact_bundle=artifact_bundle,
+ source_file_type=SourceFileType.SOURCE.value,
+ )
- DebugIdArtifactBundle.objects.create(
- organization_id=self.organization.id,
- debug_id="a5764857-ae35-34dc-8f25-a9c9e73aa898",
- artifact_bundle=artifact_bundle,
- source_file_type=SourceFileType.SOURCE_MAP.value,
- )
+ DebugIdArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ debug_id="a5764857-ae35-34dc-8f25-a9c9e73aa898",
+ artifact_bundle=artifact_bundle,
+ source_file_type=SourceFileType.SOURCE_MAP.value,
+ )
- ProjectArtifactBundle.objects.create(
- organization_id=self.organization.id,
- project_id=self.project.id,
- artifact_bundle=artifact_bundle,
- )
+ ProjectArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ project_id=self.project.id,
+ artifact_bundle=artifact_bundle,
+ )
- event = self.store_event(
- data=create_event(
- exceptions=[create_exception_with_frame({"abs_path": "/some/path/to/file.js"})],
- debug_meta_images=[
- {
- "type": "sourcemap",
- "code_file": "/some/path/to/file.js",
- "debug_id": "a5764857-ae35-34dc-8f25-a9c9e73aa898",
- }
- ],
- ),
- project_id=self.project.id,
- )
+ event = self.store_event(
+ data=create_event(
+ exceptions=[create_exception_with_frame({"abs_path": "/some/path/to/file.js"})],
+ debug_meta_images=[
+ {
+ "type": "sourcemap",
+ "code_file": "/some/path/to/file.js",
+ "debug_id": "a5764857-ae35-34dc-8f25-a9c9e73aa898",
+ }
+ ],
+ ),
+ project_id=self.project.id,
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- debug_id_process_result = resp.data["exceptions"][0]["frames"][0]["debug_id_process"]
+ debug_id_process_result = resp.data["exceptions"][0]["frames"][0]["debug_id_process"]
- assert debug_id_process_result["debug_id"] == "a5764857-ae35-34dc-8f25-a9c9e73aa898"
- assert debug_id_process_result["uploaded_source_file_with_correct_debug_id"]
- assert debug_id_process_result["uploaded_source_map_with_correct_debug_id"]
+ assert debug_id_process_result["debug_id"] == "a5764857-ae35-34dc-8f25-a9c9e73aa898"
+ assert debug_id_process_result["uploaded_source_file_with_correct_debug_id"]
+ assert debug_id_process_result["uploaded_source_map_with_correct_debug_id"]
def test_frame_release_process_release_file_matching_source_file_names(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(
- exceptions=[
- create_exception_with_frame({"abs_path": "http://example.com/bundle.js"})
- ],
- release="some-release",
- ),
- project_id=self.project.id,
- )
+ event = self.store_event(
+ data=create_event(
+ exceptions=[
+ create_exception_with_frame({"abs_path": "http://example.com/bundle.js"})
+ ],
+ release="some-release",
+ ),
+ project_id=self.project.id,
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
+ release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
- assert release_process_result["matching_source_file_names"] == [
- "http://example.com/bundle.js",
- "~/bundle.js",
- ]
+ assert release_process_result["matching_source_file_names"] == [
+ "http://example.com/bundle.js",
+ "~/bundle.js",
+ ]
def test_frame_release_process_release_file_source_map_reference(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(
- exceptions=[
- create_exception_with_frame({"abs_path": "http://example.com/bundle.js"})
- ],
- release="some-release",
- ),
- project_id=self.project.id,
- )
+ event = self.store_event(
+ data=create_event(
+ exceptions=[
+ create_exception_with_frame({"abs_path": "http://example.com/bundle.js"})
+ ],
+ release="some-release",
+ ),
+ project_id=self.project.id,
+ )
- release = Release.objects.get(organization=self.organization, version=event.release)
+ release = Release.objects.get(organization=self.organization, version=event.release)
- file = File.objects.create(name="bundle.js", type="release.file")
- fileobj = ContentFile(
- b'console.log("hello world");\n//# sourceMappingURL=bundle.js.map\n'
- )
- file.putfile(fileobj)
+ file = File.objects.create(name="bundle.js", type="release.file")
+ fileobj = ContentFile(b'console.log("hello world");\n//# sourceMappingURL=bundle.js.map\n')
+ file.putfile(fileobj)
- ReleaseFile.objects.create(
- organization_id=self.organization.id,
- release_id=release.id,
- file=file,
- name="~/bundle.js",
- )
+ ReleaseFile.objects.create(
+ organization_id=self.organization.id,
+ release_id=release.id,
+ file=file,
+ name="~/bundle.js",
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
+ release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
- assert release_process_result["matching_source_map_name"] == "~/bundle.js.map"
- assert release_process_result["source_map_reference"] == "bundle.js.map"
+ assert release_process_result["matching_source_map_name"] == "~/bundle.js.map"
+ assert release_process_result["source_map_reference"] == "bundle.js.map"
def test_frame_release_process_release_file_data_protocol_source_map_reference(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(
- exceptions=[
- create_exception_with_frame({"abs_path": "http://example.com/bundle.js"})
- ],
- release="some-release",
- ),
- project_id=self.project.id,
- )
+ event = self.store_event(
+ data=create_event(
+ exceptions=[
+ create_exception_with_frame({"abs_path": "http://example.com/bundle.js"})
+ ],
+ release="some-release",
+ ),
+ project_id=self.project.id,
+ )
- release = Release.objects.get(organization=self.organization, version=event.release)
+ release = Release.objects.get(organization=self.organization, version=event.release)
- file = File.objects.create(
- name="bundle.js",
- type="release.file",
- headers={
- "Sourcemap": "data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoibWFpbi5qcy"
- },
- )
+ file = File.objects.create(
+ name="bundle.js",
+ type="release.file",
+ headers={
+ "Sourcemap": "data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoibWFpbi5qcy"
+ },
+ )
- ReleaseFile.objects.create(
- organization_id=self.organization.id,
- release_id=release.id,
- file=file,
- name="~/bundle.js",
- )
+ ReleaseFile.objects.create(
+ organization_id=self.organization.id,
+ release_id=release.id,
+ file=file,
+ name="~/bundle.js",
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
+ release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
- assert release_process_result["source_map_lookup_result"] == "found"
- assert release_process_result["source_map_reference"] == "Inline Sourcemap"
- assert release_process_result["matching_source_map_name"] is None
+ assert release_process_result["source_map_lookup_result"] == "found"
+ assert release_process_result["source_map_reference"] == "Inline Sourcemap"
+ assert release_process_result["matching_source_map_name"] is None
def test_frame_release_process_release_file_source_file_not_found(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(
- exceptions=[
- create_exception_with_frame({"abs_path": "http://example.com/bundle.js"})
- ],
- release="some-release",
- ),
- project_id=self.project.id,
- )
+ event = self.store_event(
+ data=create_event(
+ exceptions=[
+ create_exception_with_frame({"abs_path": "http://example.com/bundle.js"})
+ ],
+ release="some-release",
+ ),
+ project_id=self.project.id,
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
+ release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
- assert release_process_result["source_file_lookup_result"] == "unsuccessful"
- assert release_process_result["source_map_lookup_result"] == "unsuccessful"
- assert release_process_result["source_map_reference"] is None
- assert release_process_result["matching_source_map_name"] is None
+ assert release_process_result["source_file_lookup_result"] == "unsuccessful"
+ assert release_process_result["source_map_lookup_result"] == "unsuccessful"
+ assert release_process_result["source_map_reference"] is None
+ assert release_process_result["matching_source_map_name"] is None
def test_frame_release_process_release_file_source_file_wrong_dist(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(
- exceptions=[
- create_exception_with_frame({"abs_path": "http://example.com/bundle.js"})
- ],
- release="some-release",
- dist="some-dist",
- ),
- project_id=self.project.id,
- )
+ event = self.store_event(
+ data=create_event(
+ exceptions=[
+ create_exception_with_frame({"abs_path": "http://example.com/bundle.js"})
+ ],
+ release="some-release",
+ dist="some-dist",
+ ),
+ project_id=self.project.id,
+ )
- release = Release.objects.get(organization=self.organization, version=event.release)
+ release = Release.objects.get(organization=self.organization, version=event.release)
- file = File.objects.create(
- name="bundle.js", type="release.file", headers={"Sourcemap": "bundle.js.map"}
- )
+ file = File.objects.create(
+ name="bundle.js", type="release.file", headers={"Sourcemap": "bundle.js.map"}
+ )
- ReleaseFile.objects.create(
- organization_id=self.organization.id,
- release_id=release.id,
- file=file,
- name="~/bundle.js",
- )
+ ReleaseFile.objects.create(
+ organization_id=self.organization.id,
+ release_id=release.id,
+ file=file,
+ name="~/bundle.js",
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
+ release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
- assert release_process_result["source_file_lookup_result"] == "wrong-dist"
- assert release_process_result["source_map_lookup_result"] == "unsuccessful"
- assert release_process_result["source_map_reference"] is None
- assert release_process_result["matching_source_map_name"] is None
+ assert release_process_result["source_file_lookup_result"] == "wrong-dist"
+ assert release_process_result["source_map_lookup_result"] == "unsuccessful"
+ assert release_process_result["source_map_reference"] is None
+ assert release_process_result["matching_source_map_name"] is None
def test_frame_release_process_release_file_source_file_successful(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(
- exceptions=[
- create_exception_with_frame({"abs_path": "http://example.com/bundle.js"})
- ],
- release="some-release",
- ),
- project_id=self.project.id,
- )
+ event = self.store_event(
+ data=create_event(
+ exceptions=[
+ create_exception_with_frame({"abs_path": "http://example.com/bundle.js"})
+ ],
+ release="some-release",
+ ),
+ project_id=self.project.id,
+ )
- release = Release.objects.get(organization=self.organization, version=event.release)
+ release = Release.objects.get(organization=self.organization, version=event.release)
- file = File.objects.create(
- name="bundle.js", type="release.file", headers={"Sourcemap": "bundle.js.map"}
- )
+ file = File.objects.create(
+ name="bundle.js", type="release.file", headers={"Sourcemap": "bundle.js.map"}
+ )
- ReleaseFile.objects.create(
- organization_id=self.organization.id,
- release_id=release.id,
- file=file,
- name="~/bundle.js",
- )
+ ReleaseFile.objects.create(
+ organization_id=self.organization.id,
+ release_id=release.id,
+ file=file,
+ name="~/bundle.js",
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
+ release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
- assert release_process_result["source_file_lookup_result"] == "found"
- assert release_process_result["source_map_lookup_result"] == "unsuccessful"
- assert release_process_result["source_map_reference"] == "bundle.js.map"
- assert release_process_result["matching_source_map_name"] == "~/bundle.js.map"
+ assert release_process_result["source_file_lookup_result"] == "found"
+ assert release_process_result["source_map_lookup_result"] == "unsuccessful"
+ assert release_process_result["source_map_reference"] == "bundle.js.map"
+ assert release_process_result["matching_source_map_name"] == "~/bundle.js.map"
def test_frame_release_process_release_file_source_map_wrong_dist(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(
- exceptions=[
- create_exception_with_frame({"abs_path": "http://example.com/bundle.js"})
- ],
- release="some-release",
- dist="some-dist",
- ),
- project_id=self.project.id,
- )
+ event = self.store_event(
+ data=create_event(
+ exceptions=[
+ create_exception_with_frame({"abs_path": "http://example.com/bundle.js"})
+ ],
+ release="some-release",
+ dist="some-dist",
+ ),
+ project_id=self.project.id,
+ )
- release = Release.objects.get(organization=self.organization, version=event.release)
+ release = Release.objects.get(organization=self.organization, version=event.release)
- source_file = File.objects.create(
- name="bundle.js", type="release.file", headers={"Sourcemap": "bundle.js.map"}
- )
+ source_file = File.objects.create(
+ name="bundle.js", type="release.file", headers={"Sourcemap": "bundle.js.map"}
+ )
- source_map_file = File.objects.create(
- name="bundle.js.map",
- type="release.file",
- )
+ source_map_file = File.objects.create(
+ name="bundle.js.map",
+ type="release.file",
+ )
- dist = Distribution.objects.get(name="some-dist", release=release)
+ dist = Distribution.objects.get(name="some-dist", release=release)
- ReleaseFile.objects.create(
- organization_id=self.organization.id,
- release_id=release.id,
- file=source_file,
- name="~/bundle.js",
- ident=ReleaseFile.get_ident("~/bundle.js", dist.name),
- dist_id=dist.id,
- )
+ ReleaseFile.objects.create(
+ organization_id=self.organization.id,
+ release_id=release.id,
+ file=source_file,
+ name="~/bundle.js",
+ ident=ReleaseFile.get_ident("~/bundle.js", dist.name),
+ dist_id=dist.id,
+ )
- ReleaseFile.objects.create(
- organization_id=self.organization.id,
- release_id=release.id,
- file=source_map_file,
- name="~/bundle.js.map",
- )
+ ReleaseFile.objects.create(
+ organization_id=self.organization.id,
+ release_id=release.id,
+ file=source_map_file,
+ name="~/bundle.js.map",
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
+ release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
- assert release_process_result["source_file_lookup_result"] == "found"
- assert release_process_result["source_map_lookup_result"] == "wrong-dist"
- assert release_process_result["source_map_reference"] == "bundle.js.map"
- assert release_process_result["matching_source_map_name"] == "~/bundle.js.map"
+ assert release_process_result["source_file_lookup_result"] == "found"
+ assert release_process_result["source_map_lookup_result"] == "wrong-dist"
+ assert release_process_result["source_map_reference"] == "bundle.js.map"
+ assert release_process_result["matching_source_map_name"] == "~/bundle.js.map"
def test_frame_release_process_release_file_source_map_successful(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(
- exceptions=[
- create_exception_with_frame(
- {"abs_path": "http://example.com/static/bundle.js"}
- )
- ],
- release="some-release",
- dist="some-dist",
- ),
- project_id=self.project.id,
- )
+ event = self.store_event(
+ data=create_event(
+ exceptions=[
+ create_exception_with_frame({"abs_path": "http://example.com/static/bundle.js"})
+ ],
+ release="some-release",
+ dist="some-dist",
+ ),
+ project_id=self.project.id,
+ )
- release = Release.objects.get(organization=self.organization, version=event.release)
+ release = Release.objects.get(organization=self.organization, version=event.release)
- source_file = File.objects.create(
- name="static/bundle.js",
- type="release.file",
- headers={"Sourcemap": "../bundle.js.map"},
- )
+ source_file = File.objects.create(
+ name="static/bundle.js",
+ type="release.file",
+ headers={"Sourcemap": "../bundle.js.map"},
+ )
- source_map_file = File.objects.create(
- name="bundle.js.map",
- type="release.file",
- )
+ source_map_file = File.objects.create(
+ name="bundle.js.map",
+ type="release.file",
+ )
- dist = Distribution.objects.get(name="some-dist", release=release)
+ dist = Distribution.objects.get(name="some-dist", release=release)
- ReleaseFile.objects.create(
- organization_id=self.organization.id,
- release_id=release.id,
- file=source_file,
- name="~/static/bundle.js",
- ident=ReleaseFile.get_ident("~/static/bundle.js", dist.name),
- dist_id=dist.id,
- )
+ ReleaseFile.objects.create(
+ organization_id=self.organization.id,
+ release_id=release.id,
+ file=source_file,
+ name="~/static/bundle.js",
+ ident=ReleaseFile.get_ident("~/static/bundle.js", dist.name),
+ dist_id=dist.id,
+ )
- ReleaseFile.objects.create(
- organization_id=self.organization.id,
- release_id=release.id,
- file=source_map_file,
- name="~/bundle.js.map",
- ident=ReleaseFile.get_ident("~/bundle.js.map", dist.name),
- dist_id=dist.id,
- )
+ ReleaseFile.objects.create(
+ organization_id=self.organization.id,
+ release_id=release.id,
+ file=source_map_file,
+ name="~/bundle.js.map",
+ ident=ReleaseFile.get_ident("~/bundle.js.map", dist.name),
+ dist_id=dist.id,
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
+ release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
- assert release_process_result["source_file_lookup_result"] == "found"
- assert release_process_result["source_map_lookup_result"] == "found"
- assert release_process_result["source_map_reference"] == "../bundle.js.map"
- assert release_process_result["matching_source_map_name"] == "~/bundle.js.map"
+ assert release_process_result["source_file_lookup_result"] == "found"
+ assert release_process_result["source_map_lookup_result"] == "found"
+ assert release_process_result["source_map_reference"] == "../bundle.js.map"
+ assert release_process_result["matching_source_map_name"] == "~/bundle.js.map"
def test_frame_release_process_artifact_bundle_data_protocol_source_map_reference(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- compressed = BytesIO(b"SYSB")
- with zipfile.ZipFile(compressed, "a") as zip_file:
- zip_file.writestr("files/_/_/bundle.min.js", b'console.log("hello world");')
- zip_file.writestr(
- "manifest.json",
- json.dumps(
- {
- "files": {
- "files/_/_/bundle.min.js": {
- "url": "~/bundle.min.js",
- "type": "minified_source",
- "headers": {
- "content-type": "application/json",
- "Sourcemap": "data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoibWFpbi5qcy",
- },
+ compressed = BytesIO(b"SYSB")
+ with zipfile.ZipFile(compressed, "a") as zip_file:
+ zip_file.writestr("files/_/_/bundle.min.js", b'console.log("hello world");')
+ zip_file.writestr(
+ "manifest.json",
+ json.dumps(
+ {
+ "files": {
+ "files/_/_/bundle.min.js": {
+ "url": "~/bundle.min.js",
+ "type": "minified_source",
+ "headers": {
+ "content-type": "application/json",
+ "Sourcemap": "data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoibWFpbi5qcy",
},
},
- }
- ),
- )
- compressed.seek(0)
-
- file_obj = File.objects.create(name="artifact_bundle.zip", type="artifact.bundle")
- file_obj.putfile(compressed)
-
- event = self.store_event(
- data=create_event(
- exceptions=[
- create_exception_with_frame(
- {"abs_path": "http://example.com/bundle.min.js"}
- )
- ],
- release="some-release",
+ },
+ }
),
- project_id=self.project.id,
)
+ compressed.seek(0)
- artifact_bundle = ArtifactBundle.objects.create(
- organization_id=self.organization.id,
- file=file_obj,
- artifact_count=1,
- )
+ file_obj = File.objects.create(name="artifact_bundle.zip", type="artifact.bundle")
+ file_obj.putfile(compressed)
- ProjectArtifactBundle.objects.create(
- organization_id=self.organization.id,
- project_id=self.project.id,
- artifact_bundle=artifact_bundle,
- )
+ event = self.store_event(
+ data=create_event(
+ exceptions=[
+ create_exception_with_frame({"abs_path": "http://example.com/bundle.min.js"})
+ ],
+ release="some-release",
+ ),
+ project_id=self.project.id,
+ )
- ReleaseArtifactBundle.objects.create(
- organization_id=self.organization.id,
- release_name="some-release",
- artifact_bundle=artifact_bundle,
- )
+ artifact_bundle = ArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ file=file_obj,
+ artifact_count=1,
+ )
- ArtifactBundleIndex.objects.create(
- organization_id=self.organization.id,
- artifact_bundle=artifact_bundle,
- url="~/bundle.min.js",
- )
+ ProjectArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ project_id=self.project.id,
+ artifact_bundle=artifact_bundle,
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ ReleaseArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ release_name="some-release",
+ artifact_bundle=artifact_bundle,
+ )
+
+ ArtifactBundleIndex.objects.create(
+ organization_id=self.organization.id,
+ artifact_bundle=artifact_bundle,
+ url="~/bundle.min.js",
+ )
+
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
+ release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
- assert release_process_result["source_file_lookup_result"] == "found"
- assert release_process_result["source_map_lookup_result"] == "found"
- assert release_process_result["source_map_reference"] == "Inline Sourcemap"
- assert release_process_result["matching_source_map_name"] is None
+ assert release_process_result["source_file_lookup_result"] == "found"
+ assert release_process_result["source_map_lookup_result"] == "found"
+ assert release_process_result["source_map_reference"] == "Inline Sourcemap"
+ assert release_process_result["matching_source_map_name"] is None
def test_frame_release_process_artifact_bundle_source_file_wrong_dist(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- compressed = BytesIO(b"SYSB")
- with zipfile.ZipFile(compressed, "a") as zip_file:
- zip_file.writestr(
- "files/_/_/bundle.min.js",
- b'console.log("hello world");\n//# sourceMappingURL=bundle.min.js.map\n',
- )
- zip_file.writestr(
- "manifest.json",
- json.dumps(
- {
- "files": {
- "files/_/_/bundle.min.js": {
- "url": "~/bundle.min.js",
- "type": "minified_source",
- "headers": {
- "content-type": "application/json",
- },
+ compressed = BytesIO(b"SYSB")
+ with zipfile.ZipFile(compressed, "a") as zip_file:
+ zip_file.writestr(
+ "files/_/_/bundle.min.js",
+ b'console.log("hello world");\n//# sourceMappingURL=bundle.min.js.map\n',
+ )
+ zip_file.writestr(
+ "manifest.json",
+ json.dumps(
+ {
+ "files": {
+ "files/_/_/bundle.min.js": {
+ "url": "~/bundle.min.js",
+ "type": "minified_source",
+ "headers": {
+ "content-type": "application/json",
},
},
- }
- ),
- )
- compressed.seek(0)
-
- file_obj = File.objects.create(name="artifact_bundle.zip", type="artifact.bundle")
- file_obj.putfile(compressed)
-
- event = self.store_event(
- data=create_event(
- exceptions=[
- create_exception_with_frame(
- {"abs_path": "http://example.com/bundle.min.js"}
- )
- ],
- release="some-release",
- dist="some-dist",
+ },
+ }
),
- project_id=self.project.id,
)
+ compressed.seek(0)
- artifact_bundle = ArtifactBundle.objects.create(
- organization_id=self.organization.id,
- file=file_obj,
- artifact_count=1,
- )
+ file_obj = File.objects.create(name="artifact_bundle.zip", type="artifact.bundle")
+ file_obj.putfile(compressed)
- ProjectArtifactBundle.objects.create(
- organization_id=self.organization.id,
- project_id=self.project.id,
- artifact_bundle=artifact_bundle,
- )
+ event = self.store_event(
+ data=create_event(
+ exceptions=[
+ create_exception_with_frame({"abs_path": "http://example.com/bundle.min.js"})
+ ],
+ release="some-release",
+ dist="some-dist",
+ ),
+ project_id=self.project.id,
+ )
- ReleaseArtifactBundle.objects.create(
- organization_id=self.organization.id,
- release_name="some-release",
- artifact_bundle=artifact_bundle,
- )
+ artifact_bundle = ArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ file=file_obj,
+ artifact_count=1,
+ )
- ArtifactBundleIndex.objects.create(
- organization_id=self.organization.id,
- artifact_bundle=artifact_bundle,
- url="~/bundle.min.js",
- )
+ ProjectArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ project_id=self.project.id,
+ artifact_bundle=artifact_bundle,
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ ReleaseArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ release_name="some-release",
+ artifact_bundle=artifact_bundle,
+ )
+
+ ArtifactBundleIndex.objects.create(
+ organization_id=self.organization.id,
+ artifact_bundle=artifact_bundle,
+ url="~/bundle.min.js",
+ )
+
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
+ release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
- assert release_process_result["source_file_lookup_result"] == "wrong-dist"
+ assert release_process_result["source_file_lookup_result"] == "wrong-dist"
def test_frame_release_process_artifact_bundle_source_file_successful(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- compressed = BytesIO(b"SYSB")
- with zipfile.ZipFile(compressed, "a") as zip_file:
- zip_file.writestr(
- "files/_/_/bundle.min.js",
- b'console.log("hello world");\n//# sourceMappingURL=bundle.min.js.map\n',
- )
- zip_file.writestr(
- "manifest.json",
- json.dumps(
- {
- "files": {
- "files/_/_/bundle.min.js": {
- "url": "~/bundle.min.js",
- "type": "minified_source",
- "headers": {
- "content-type": "application/json",
- },
+ compressed = BytesIO(b"SYSB")
+ with zipfile.ZipFile(compressed, "a") as zip_file:
+ zip_file.writestr(
+ "files/_/_/bundle.min.js",
+ b'console.log("hello world");\n//# sourceMappingURL=bundle.min.js.map\n',
+ )
+ zip_file.writestr(
+ "manifest.json",
+ json.dumps(
+ {
+ "files": {
+ "files/_/_/bundle.min.js": {
+ "url": "~/bundle.min.js",
+ "type": "minified_source",
+ "headers": {
+ "content-type": "application/json",
},
},
- }
- ),
- )
- compressed.seek(0)
-
- file_obj = File.objects.create(name="artifact_bundle.zip", type="artifact.bundle")
- file_obj.putfile(compressed)
-
- event = self.store_event(
- data=create_event(
- exceptions=[
- create_exception_with_frame(
- {"abs_path": "http://example.com/bundle.min.js"}
- )
- ],
- release="some-release",
+ },
+ }
),
- project_id=self.project.id,
)
+ compressed.seek(0)
- artifact_bundle = ArtifactBundle.objects.create(
- organization_id=self.organization.id,
- file=file_obj,
- artifact_count=1,
- )
+ file_obj = File.objects.create(name="artifact_bundle.zip", type="artifact.bundle")
+ file_obj.putfile(compressed)
- ProjectArtifactBundle.objects.create(
- organization_id=self.organization.id,
- project_id=self.project.id,
- artifact_bundle=artifact_bundle,
- )
+ event = self.store_event(
+ data=create_event(
+ exceptions=[
+ create_exception_with_frame({"abs_path": "http://example.com/bundle.min.js"})
+ ],
+ release="some-release",
+ ),
+ project_id=self.project.id,
+ )
- ReleaseArtifactBundle.objects.create(
- organization_id=self.organization.id,
- release_name="some-release",
- artifact_bundle=artifact_bundle,
- )
+ artifact_bundle = ArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ file=file_obj,
+ artifact_count=1,
+ )
- ArtifactBundleIndex.objects.create(
- organization_id=self.organization.id,
- artifact_bundle=artifact_bundle,
- url="~/bundle.min.js",
- )
+ ProjectArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ project_id=self.project.id,
+ artifact_bundle=artifact_bundle,
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ ReleaseArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ release_name="some-release",
+ artifact_bundle=artifact_bundle,
+ )
+
+ ArtifactBundleIndex.objects.create(
+ organization_id=self.organization.id,
+ artifact_bundle=artifact_bundle,
+ url="~/bundle.min.js",
+ )
+
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
+ release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
- assert release_process_result["source_file_lookup_result"] == "found"
+ assert release_process_result["source_file_lookup_result"] == "found"
def test_frame_release_process_artifact_bundle_source_map_not_found(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- compressed = BytesIO(b"SYSB")
- with zipfile.ZipFile(compressed, "a") as zip_file:
- zip_file.writestr(
- "files/_/_/bundle.min.js",
- b'console.log("hello world");\n//# sourceMappingURL=bundle.min.js.map\n',
- )
- zip_file.writestr("files/_/_/bundle.min.js.map", b"")
- zip_file.writestr(
- "manifest.json",
- json.dumps(
- {
- "files": {
- "files/_/_/bundle.min.js": {
- "url": "~/bundle.min.js",
- "type": "minified_source",
- "headers": {
- "content-type": "application/json",
- },
+ compressed = BytesIO(b"SYSB")
+ with zipfile.ZipFile(compressed, "a") as zip_file:
+ zip_file.writestr(
+ "files/_/_/bundle.min.js",
+ b'console.log("hello world");\n//# sourceMappingURL=bundle.min.js.map\n',
+ )
+ zip_file.writestr("files/_/_/bundle.min.js.map", b"")
+ zip_file.writestr(
+ "manifest.json",
+ json.dumps(
+ {
+ "files": {
+ "files/_/_/bundle.min.js": {
+ "url": "~/bundle.min.js",
+ "type": "minified_source",
+ "headers": {
+ "content-type": "application/json",
},
- "files/_/_/wrong-bundle.min.js.map": {
- "url": "~/wrong-bundle.min.js.map",
- "type": "source_map",
- "headers": {
- "content-type": "application/json",
- },
+ },
+ "files/_/_/wrong-bundle.min.js.map": {
+ "url": "~/wrong-bundle.min.js.map",
+ "type": "source_map",
+ "headers": {
+ "content-type": "application/json",
},
},
- }
- ),
- )
- compressed.seek(0)
-
- file_obj = File.objects.create(name="artifact_bundle.zip", type="artifact.bundle")
- file_obj.putfile(compressed)
-
- event = self.store_event(
- data=create_event(
- exceptions=[
- create_exception_with_frame(
- {"abs_path": "http://example.com/bundle.min.js"}
- )
- ],
- release="some-release",
+ },
+ }
),
- project_id=self.project.id,
)
+ compressed.seek(0)
- artifact_bundle = ArtifactBundle.objects.create(
- organization_id=self.organization.id,
- file=file_obj,
- artifact_count=1,
- )
+ file_obj = File.objects.create(name="artifact_bundle.zip", type="artifact.bundle")
+ file_obj.putfile(compressed)
- ProjectArtifactBundle.objects.create(
- organization_id=self.organization.id,
- project_id=self.project.id,
- artifact_bundle=artifact_bundle,
- )
+ event = self.store_event(
+ data=create_event(
+ exceptions=[
+ create_exception_with_frame({"abs_path": "http://example.com/bundle.min.js"})
+ ],
+ release="some-release",
+ ),
+ project_id=self.project.id,
+ )
- ReleaseArtifactBundle.objects.create(
- organization_id=self.organization.id,
- release_name="some-release",
- artifact_bundle=artifact_bundle,
- )
+ artifact_bundle = ArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ file=file_obj,
+ artifact_count=1,
+ )
- ArtifactBundleIndex.objects.create(
- organization_id=self.organization.id,
- artifact_bundle=artifact_bundle,
- url="~/bundle.min.js",
- )
+ ProjectArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ project_id=self.project.id,
+ artifact_bundle=artifact_bundle,
+ )
- ArtifactBundleIndex.objects.create(
- organization_id=self.organization.id,
- artifact_bundle=artifact_bundle,
- url="~/wrong-bundle.min.js.map",
- )
+ ReleaseArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ release_name="some-release",
+ artifact_bundle=artifact_bundle,
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ ArtifactBundleIndex.objects.create(
+ organization_id=self.organization.id,
+ artifact_bundle=artifact_bundle,
+ url="~/bundle.min.js",
+ )
+
+ ArtifactBundleIndex.objects.create(
+ organization_id=self.organization.id,
+ artifact_bundle=artifact_bundle,
+ url="~/wrong-bundle.min.js.map",
+ )
+
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
+ release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
- assert release_process_result["source_file_lookup_result"] == "found"
- assert release_process_result["source_map_lookup_result"] == "unsuccessful"
- assert release_process_result["source_map_reference"] == "bundle.min.js.map"
- assert release_process_result["matching_source_map_name"] == "~/bundle.min.js.map"
+ assert release_process_result["source_file_lookup_result"] == "found"
+ assert release_process_result["source_map_lookup_result"] == "unsuccessful"
+ assert release_process_result["source_map_reference"] == "bundle.min.js.map"
+ assert release_process_result["matching_source_map_name"] == "~/bundle.min.js.map"
def test_frame_release_process_artifact_bundle_source_map_wrong_dist(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- compressed = BytesIO(b"SYSB")
- with zipfile.ZipFile(compressed, "a") as zip_file:
- zip_file.writestr(
- "files/_/_/bundle.min.js",
- b'console.log("hello world");\n//# sourceMappingURL=bundle.min.js.map\n',
- )
- zip_file.writestr("files/_/_/bundle.min.js.map", b"")
- zip_file.writestr(
- "manifest.json",
- json.dumps(
- {
- "files": {
- "files/_/_/bundle.min.js": {
- "url": "~/bundle.min.js",
- "type": "minified_source",
- "headers": {
- "content-type": "application/json",
- },
+ compressed = BytesIO(b"SYSB")
+ with zipfile.ZipFile(compressed, "a") as zip_file:
+ zip_file.writestr(
+ "files/_/_/bundle.min.js",
+ b'console.log("hello world");\n//# sourceMappingURL=bundle.min.js.map\n',
+ )
+ zip_file.writestr("files/_/_/bundle.min.js.map", b"")
+ zip_file.writestr(
+ "manifest.json",
+ json.dumps(
+ {
+ "files": {
+ "files/_/_/bundle.min.js": {
+ "url": "~/bundle.min.js",
+ "type": "minified_source",
+ "headers": {
+ "content-type": "application/json",
},
- "files/_/_/bundle.min.js.map": {
- "url": "~/bundle.min.js.map",
- "type": "source_map",
- "headers": {
- "content-type": "application/json",
- },
+ },
+ "files/_/_/bundle.min.js.map": {
+ "url": "~/bundle.min.js.map",
+ "type": "source_map",
+ "headers": {
+ "content-type": "application/json",
},
},
- }
- ),
- )
- compressed.seek(0)
-
- file_obj = File.objects.create(name="artifact_bundle.zip", type="artifact.bundle")
- file_obj.putfile(compressed)
-
- event = self.store_event(
- data=create_event(
- exceptions=[
- create_exception_with_frame(
- {"abs_path": "http://example.com/bundle.min.js"}
- )
- ],
- release="some-release",
- dist="some-dist",
+ },
+ }
),
- project_id=self.project.id,
)
+ compressed.seek(0)
- source_file_artifact_bundle = ArtifactBundle.objects.create(
- organization_id=self.organization.id,
- file=file_obj,
- artifact_count=1,
- )
+ file_obj = File.objects.create(name="artifact_bundle.zip", type="artifact.bundle")
+ file_obj.putfile(compressed)
- ProjectArtifactBundle.objects.create(
- organization_id=self.organization.id,
- project_id=self.project.id,
- artifact_bundle=source_file_artifact_bundle,
- )
+ event = self.store_event(
+ data=create_event(
+ exceptions=[
+ create_exception_with_frame({"abs_path": "http://example.com/bundle.min.js"})
+ ],
+ release="some-release",
+ dist="some-dist",
+ ),
+ project_id=self.project.id,
+ )
- ReleaseArtifactBundle.objects.create(
- organization_id=self.organization.id,
- release_name="some-release",
- dist_name="some-dist",
- artifact_bundle=source_file_artifact_bundle,
- )
+ source_file_artifact_bundle = ArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ file=file_obj,
+ artifact_count=1,
+ )
- ArtifactBundleIndex.objects.create(
- organization_id=self.organization.id,
- artifact_bundle=source_file_artifact_bundle,
- url="~/bundle.min.js",
- )
+ ProjectArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ project_id=self.project.id,
+ artifact_bundle=source_file_artifact_bundle,
+ )
- source_map_artifact_bundle = ArtifactBundle.objects.create(
- organization_id=self.organization.id,
- file=file_obj,
- artifact_count=1,
- )
+ ReleaseArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ release_name="some-release",
+ dist_name="some-dist",
+ artifact_bundle=source_file_artifact_bundle,
+ )
- ProjectArtifactBundle.objects.create(
- organization_id=self.organization.id,
- project_id=self.project.id,
- artifact_bundle=source_map_artifact_bundle,
- )
+ ArtifactBundleIndex.objects.create(
+ organization_id=self.organization.id,
+ artifact_bundle=source_file_artifact_bundle,
+ url="~/bundle.min.js",
+ )
- ReleaseArtifactBundle.objects.create(
- organization_id=self.organization.id,
- release_name="some-release",
- dist_name="some-other-dist",
- artifact_bundle=source_map_artifact_bundle,
- )
+ source_map_artifact_bundle = ArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ file=file_obj,
+ artifact_count=1,
+ )
- ArtifactBundleIndex.objects.create(
- organization_id=self.organization.id,
- artifact_bundle=source_map_artifact_bundle,
- url="~/bundle.min.js",
- )
+ ProjectArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ project_id=self.project.id,
+ artifact_bundle=source_map_artifact_bundle,
+ )
- ArtifactBundleIndex.objects.create(
- organization_id=self.organization.id,
- artifact_bundle=source_map_artifact_bundle,
- url="~/bundle.min.js.map",
- )
+ ReleaseArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ release_name="some-release",
+ dist_name="some-other-dist",
+ artifact_bundle=source_map_artifact_bundle,
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ ArtifactBundleIndex.objects.create(
+ organization_id=self.organization.id,
+ artifact_bundle=source_map_artifact_bundle,
+ url="~/bundle.min.js",
+ )
+
+ ArtifactBundleIndex.objects.create(
+ organization_id=self.organization.id,
+ artifact_bundle=source_map_artifact_bundle,
+ url="~/bundle.min.js.map",
+ )
+
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
+ release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
- assert release_process_result["source_file_lookup_result"] == "found"
- assert release_process_result["source_map_lookup_result"] == "wrong-dist"
- assert release_process_result["source_map_reference"] == "bundle.min.js.map"
- assert release_process_result["matching_source_map_name"] == "~/bundle.min.js.map"
+ assert release_process_result["source_file_lookup_result"] == "found"
+ assert release_process_result["source_map_lookup_result"] == "wrong-dist"
+ assert release_process_result["source_map_reference"] == "bundle.min.js.map"
+ assert release_process_result["matching_source_map_name"] == "~/bundle.min.js.map"
def test_frame_release_process_artifact_bundle_source_map_successful(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- compressed = BytesIO(b"SYSB")
- with zipfile.ZipFile(compressed, "a") as zip_file:
- zip_file.writestr(
- "files/_/_/bundle.min.js",
- b'console.log("hello world");\n//# sourceMappingURL=bundle.min.js.map\n',
- )
- zip_file.writestr("files/_/_/bundle.min.js.map", b"")
- zip_file.writestr(
- "manifest.json",
- json.dumps(
- {
- "files": {
- "files/_/_/bundle.min.js": {
- "url": "~/bundle.min.js",
- "type": "minified_source",
- "headers": {
- "content-type": "application/json",
- },
+ compressed = BytesIO(b"SYSB")
+ with zipfile.ZipFile(compressed, "a") as zip_file:
+ zip_file.writestr(
+ "files/_/_/bundle.min.js",
+ b'console.log("hello world");\n//# sourceMappingURL=bundle.min.js.map\n',
+ )
+ zip_file.writestr("files/_/_/bundle.min.js.map", b"")
+ zip_file.writestr(
+ "manifest.json",
+ json.dumps(
+ {
+ "files": {
+ "files/_/_/bundle.min.js": {
+ "url": "~/bundle.min.js",
+ "type": "minified_source",
+ "headers": {
+ "content-type": "application/json",
},
- "files/_/_/bundle.min.js.map": {
- "url": "~/bundle.min.js.map",
- "type": "source_map",
- "headers": {
- "content-type": "application/json",
- },
+ },
+ "files/_/_/bundle.min.js.map": {
+ "url": "~/bundle.min.js.map",
+ "type": "source_map",
+ "headers": {
+ "content-type": "application/json",
},
},
- }
- ),
- )
- compressed.seek(0)
-
- file_obj = File.objects.create(name="artifact_bundle.zip", type="artifact.bundle")
- file_obj.putfile(compressed)
-
- event = self.store_event(
- data=create_event(
- exceptions=[
- create_exception_with_frame(
- {"abs_path": "http://example.com/bundle.min.js"}
- )
- ],
- release="some-release",
+ },
+ }
),
- project_id=self.project.id,
)
+ compressed.seek(0)
- artifact_bundle = ArtifactBundle.objects.create(
- organization_id=self.organization.id,
- file=file_obj,
- artifact_count=1,
- )
+ file_obj = File.objects.create(name="artifact_bundle.zip", type="artifact.bundle")
+ file_obj.putfile(compressed)
- ProjectArtifactBundle.objects.create(
- organization_id=self.organization.id,
- project_id=self.project.id,
- artifact_bundle=artifact_bundle,
- )
+ event = self.store_event(
+ data=create_event(
+ exceptions=[
+ create_exception_with_frame({"abs_path": "http://example.com/bundle.min.js"})
+ ],
+ release="some-release",
+ ),
+ project_id=self.project.id,
+ )
- ReleaseArtifactBundle.objects.create(
- organization_id=self.organization.id,
- release_name="some-release",
- artifact_bundle=artifact_bundle,
- )
+ artifact_bundle = ArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ file=file_obj,
+ artifact_count=1,
+ )
- ArtifactBundleIndex.objects.create(
- organization_id=self.organization.id,
- artifact_bundle=artifact_bundle,
- url="~/bundle.min.js",
- )
+ ProjectArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ project_id=self.project.id,
+ artifact_bundle=artifact_bundle,
+ )
- ArtifactBundleIndex.objects.create(
- organization_id=self.organization.id,
- artifact_bundle=artifact_bundle,
- url="~/bundle.min.js.map",
- )
+ ReleaseArtifactBundle.objects.create(
+ organization_id=self.organization.id,
+ release_name="some-release",
+ artifact_bundle=artifact_bundle,
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ ArtifactBundleIndex.objects.create(
+ organization_id=self.organization.id,
+ artifact_bundle=artifact_bundle,
+ url="~/bundle.min.js",
+ )
+
+ ArtifactBundleIndex.objects.create(
+ organization_id=self.organization.id,
+ artifact_bundle=artifact_bundle,
+ url="~/bundle.min.js.map",
+ )
- release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- assert release_process_result["source_file_lookup_result"] == "found"
- assert release_process_result["source_map_lookup_result"] == "found"
- assert release_process_result["source_map_reference"] == "bundle.min.js.map"
- assert release_process_result["matching_source_map_name"] == "~/bundle.min.js.map"
+ release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
+
+ assert release_process_result["source_file_lookup_result"] == "found"
+ assert release_process_result["source_map_lookup_result"] == "found"
+ assert release_process_result["source_map_reference"] == "bundle.min.js.map"
+ assert release_process_result["matching_source_map_name"] == "~/bundle.min.js.map"
def test_frame_release_file_success(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(
- exceptions=[
- create_exception_with_frame(
- {"abs_path": "http://example.com/bundle.min.js"}
- )
- ],
- release="some-release",
- dist="some-dist",
- ),
- project_id=self.project.id,
- )
+ event = self.store_event(
+ data=create_event(
+ exceptions=[
+ create_exception_with_frame({"abs_path": "http://example.com/bundle.min.js"})
+ ],
+ release="some-release",
+ dist="some-dist",
+ ),
+ project_id=self.project.id,
+ )
- release = Release.objects.get(organization=self.organization, version=event.release)
- dist = Distribution.objects.get(name="some-dist", release=release)
+ release = Release.objects.get(organization=self.organization, version=event.release)
+ dist = Distribution.objects.get(name="some-dist", release=release)
- artifact_index = File.objects.create(
- name="artifact-index.json",
- type=ARTIFACT_INDEX_TYPE,
- )
+ artifact_index = File.objects.create(
+ name="artifact-index.json",
+ type=ARTIFACT_INDEX_TYPE,
+ )
- artifact_index.putfile(
- ContentFile(
- json.dumps(
- {
- "files": {
- "~/bundle.min.js": {
- "type": "minified_source",
- "archive_ident": ReleaseFile.get_ident(
- "release-artifacts.zip", dist.name
- ),
- "headers": {
- "content-type": "application/json",
- },
+ artifact_index.putfile(
+ ContentFile(
+ json.dumps(
+ {
+ "files": {
+ "~/bundle.min.js": {
+ "type": "minified_source",
+ "archive_ident": ReleaseFile.get_ident(
+ "release-artifacts.zip", dist.name
+ ),
+ "headers": {
+ "content-type": "application/json",
},
- "~/bundle.min.js.map": {
- "type": "source_map",
- "archive_ident": ReleaseFile.get_ident(
- "release-artifacts.zip", dist.name
- ),
- "headers": {
- "content-type": "application/json",
- },
+ },
+ "~/bundle.min.js.map": {
+ "type": "source_map",
+ "archive_ident": ReleaseFile.get_ident(
+ "release-artifacts.zip", dist.name
+ ),
+ "headers": {
+ "content-type": "application/json",
},
},
- }
- ).encode()
- )
+ },
+ }
+ ).encode()
)
+ )
- ReleaseFile.objects.create(
- organization_id=self.organization.id,
- release_id=release.id,
- file=artifact_index,
- name=ARTIFACT_INDEX_FILENAME,
- ident=ReleaseFile.get_ident(ARTIFACT_INDEX_FILENAME, dist.name),
- dist_id=dist.id,
- artifact_count=2,
- )
+ ReleaseFile.objects.create(
+ organization_id=self.organization.id,
+ release_id=release.id,
+ file=artifact_index,
+ name=ARTIFACT_INDEX_FILENAME,
+ ident=ReleaseFile.get_ident(ARTIFACT_INDEX_FILENAME, dist.name),
+ dist_id=dist.id,
+ artifact_count=2,
+ )
- compressed = BytesIO(b"SYSB")
- with zipfile.ZipFile(compressed, "a") as zip_file:
- zip_file.writestr(
- "files/_/_/bundle.min.js",
- b'console.log("hello world");\n//# sourceMappingURL=bundle.min.js.map\n',
- )
- zip_file.writestr("files/_/_/bundle.min.js.map", b"")
- zip_file.writestr(
- "manifest.json",
- json.dumps(
- {
- "files": {
- "files/_/_/bundle.min.js": {
- "url": "~/bundle.min.js",
- "type": "minified_source",
- "headers": {
- "content-type": "application/json",
- },
+ compressed = BytesIO(b"SYSB")
+ with zipfile.ZipFile(compressed, "a") as zip_file:
+ zip_file.writestr(
+ "files/_/_/bundle.min.js",
+ b'console.log("hello world");\n//# sourceMappingURL=bundle.min.js.map\n',
+ )
+ zip_file.writestr("files/_/_/bundle.min.js.map", b"")
+ zip_file.writestr(
+ "manifest.json",
+ json.dumps(
+ {
+ "files": {
+ "files/_/_/bundle.min.js": {
+ "url": "~/bundle.min.js",
+ "type": "minified_source",
+ "headers": {
+ "content-type": "application/json",
},
- "files/_/_/bundle.min.js.map": {
- "url": "~/bundle.min.js.map",
- "type": "source_map",
- "headers": {
- "content-type": "application/json",
- },
+ },
+ "files/_/_/bundle.min.js.map": {
+ "url": "~/bundle.min.js.map",
+ "type": "source_map",
+ "headers": {
+ "content-type": "application/json",
},
},
- }
- ),
- )
- compressed.seek(0)
- release_artifact_bundle = File.objects.create(
- name="release-artifacts.zip", type="release.bundle"
- )
- release_artifact_bundle.putfile(compressed)
-
- ReleaseFile.objects.create(
- organization_id=self.organization.id,
- release_id=release.id,
- file=release_artifact_bundle,
- name="release-artifacts.zip",
- ident=ReleaseFile.get_ident("release-artifacts.zip", dist.name),
- dist_id=dist.id,
- artifact_count=0,
+ },
+ }
+ ),
)
+ compressed.seek(0)
+ release_artifact_bundle = File.objects.create(
+ name="release-artifacts.zip", type="release.bundle"
+ )
+ release_artifact_bundle.putfile(compressed)
+
+ ReleaseFile.objects.create(
+ organization_id=self.organization.id,
+ release_id=release.id,
+ file=release_artifact_bundle,
+ name="release-artifacts.zip",
+ ident=ReleaseFile.get_ident("release-artifacts.zip", dist.name),
+ dist_id=dist.id,
+ artifact_count=0,
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
+ release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
- assert release_process_result["source_file_lookup_result"] == "found"
- assert release_process_result["source_map_lookup_result"] == "found"
- assert release_process_result["source_map_reference"] == "bundle.min.js.map"
- assert release_process_result["matching_source_map_name"] == "~/bundle.min.js.map"
+ assert release_process_result["source_file_lookup_result"] == "found"
+ assert release_process_result["source_map_lookup_result"] == "found"
+ assert release_process_result["source_map_reference"] == "bundle.min.js.map"
+ assert release_process_result["matching_source_map_name"] == "~/bundle.min.js.map"
def test_frame_release_file_wrong_dist(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(
- exceptions=[
- create_exception_with_frame(
- {"abs_path": "http://example.com/bundle.min.js"}
- )
- ],
- release="some-release",
- dist="some-dist",
- ),
- project_id=self.project.id,
- )
+ event = self.store_event(
+ data=create_event(
+ exceptions=[
+ create_exception_with_frame({"abs_path": "http://example.com/bundle.min.js"})
+ ],
+ release="some-release",
+ dist="some-dist",
+ ),
+ project_id=self.project.id,
+ )
- release = Release.objects.get(organization=self.organization, version=event.release)
+ release = Release.objects.get(organization=self.organization, version=event.release)
- artifact_index = File.objects.create(
- name="artifact-index.json",
- type=ARTIFACT_INDEX_TYPE,
- )
+ artifact_index = File.objects.create(
+ name="artifact-index.json",
+ type=ARTIFACT_INDEX_TYPE,
+ )
- artifact_index.putfile(
- ContentFile(
- json.dumps(
- {
- "files": {
- "~/bundle.min.js": {
- "type": "minified_source",
- "archive_ident": ReleaseFile.get_ident("release-artifacts.zip"),
- "headers": {
- "content-type": "application/json",
- },
+ artifact_index.putfile(
+ ContentFile(
+ json.dumps(
+ {
+ "files": {
+ "~/bundle.min.js": {
+ "type": "minified_source",
+ "archive_ident": ReleaseFile.get_ident("release-artifacts.zip"),
+ "headers": {
+ "content-type": "application/json",
},
- "~/bundle.min.js.map": {
- "type": "source_map",
- "archive_ident": ReleaseFile.get_ident("release-artifacts.zip"),
- "headers": {
- "content-type": "application/json",
- },
+ },
+ "~/bundle.min.js.map": {
+ "type": "source_map",
+ "archive_ident": ReleaseFile.get_ident("release-artifacts.zip"),
+ "headers": {
+ "content-type": "application/json",
},
},
- }
- ).encode()
- )
+ },
+ }
+ ).encode()
)
+ )
- ReleaseFile.objects.create(
- organization_id=self.organization.id,
- release_id=release.id,
- file=artifact_index,
- name=ARTIFACT_INDEX_FILENAME,
- ident=ReleaseFile.get_ident(ARTIFACT_INDEX_FILENAME),
- artifact_count=2,
- )
+ ReleaseFile.objects.create(
+ organization_id=self.organization.id,
+ release_id=release.id,
+ file=artifact_index,
+ name=ARTIFACT_INDEX_FILENAME,
+ ident=ReleaseFile.get_ident(ARTIFACT_INDEX_FILENAME),
+ artifact_count=2,
+ )
- compressed = BytesIO(b"SYSB")
- with zipfile.ZipFile(compressed, "a") as zip_file:
- zip_file.writestr(
- "files/_/_/bundle.min.js",
- b'console.log("hello world");\n//# sourceMappingURL=bundle.min.js.map\n',
- )
- zip_file.writestr("files/_/_/bundle.min.js.map", b"")
- zip_file.writestr(
- "manifest.json",
- json.dumps(
- {
- "files": {
- "files/_/_/bundle.min.js": {
- "url": "~/bundle.min.js",
- "type": "minified_source",
- "headers": {
- "content-type": "application/json",
- },
+ compressed = BytesIO(b"SYSB")
+ with zipfile.ZipFile(compressed, "a") as zip_file:
+ zip_file.writestr(
+ "files/_/_/bundle.min.js",
+ b'console.log("hello world");\n//# sourceMappingURL=bundle.min.js.map\n',
+ )
+ zip_file.writestr("files/_/_/bundle.min.js.map", b"")
+ zip_file.writestr(
+ "manifest.json",
+ json.dumps(
+ {
+ "files": {
+ "files/_/_/bundle.min.js": {
+ "url": "~/bundle.min.js",
+ "type": "minified_source",
+ "headers": {
+ "content-type": "application/json",
},
- "files/_/_/bundle.min.js.map": {
- "url": "~/bundle.min.js.map",
- "type": "source_map",
- "headers": {
- "content-type": "application/json",
- },
+ },
+ "files/_/_/bundle.min.js.map": {
+ "url": "~/bundle.min.js.map",
+ "type": "source_map",
+ "headers": {
+ "content-type": "application/json",
},
},
- }
- ),
- )
- compressed.seek(0)
- release_artifact_bundle = File.objects.create(
- name="release-artifacts.zip", type="release.bundle"
- )
- release_artifact_bundle.putfile(compressed)
-
- ReleaseFile.objects.create(
- organization_id=self.organization.id,
- release_id=release.id,
- file=release_artifact_bundle,
- name="release-artifacts.zip",
- ident=ReleaseFile.get_ident("release-artifacts.zip"),
- artifact_count=0,
+ },
+ }
+ ),
)
+ compressed.seek(0)
+ release_artifact_bundle = File.objects.create(
+ name="release-artifacts.zip", type="release.bundle"
+ )
+ release_artifact_bundle.putfile(compressed)
+
+ ReleaseFile.objects.create(
+ organization_id=self.organization.id,
+ release_id=release.id,
+ file=release_artifact_bundle,
+ name="release-artifacts.zip",
+ ident=ReleaseFile.get_ident("release-artifacts.zip"),
+ artifact_count=0,
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
+ release_process_result = resp.data["exceptions"][0]["frames"][0]["release_process"]
- assert release_process_result["source_file_lookup_result"] == "wrong-dist"
- assert release_process_result["source_map_lookup_result"] == "unsuccessful"
+ assert release_process_result["source_file_lookup_result"] == "wrong-dist"
+ assert release_process_result["source_map_lookup_result"] == "unsuccessful"
def test_has_scraping_data_flag_true(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(
- exceptions=[],
- scraping_attempts=[
- {
- "url": "https://example.com/bundle0.js",
- "status": "success",
- }
- ],
- ),
- project_id=self.project.id,
- )
+ event = self.store_event(
+ data=create_event(
+ exceptions=[],
+ scraping_attempts=[
+ {
+ "url": "https://example.com/bundle0.js",
+ "status": "success",
+ }
+ ],
+ ),
+ project_id=self.project.id,
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- assert resp.data["has_scraping_data"]
+ assert resp.data["has_scraping_data"]
def test_has_scraping_data_flag_false(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(exceptions=[]),
- project_id=self.project.id,
- )
+ event = self.store_event(
+ data=create_event(exceptions=[]),
+ project_id=self.project.id,
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- assert not resp.data["has_scraping_data"]
+ assert not resp.data["has_scraping_data"]
def test_scraping_result_source_file(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(
- exceptions=[
- create_exception_with_frames(
- [
- {"abs_path": "https://example.com/bundle0.js"},
- {"abs_path": "https://example.com/bundle1.js"},
- {"abs_path": "https://example.com/bundle2.js"},
- {"abs_path": "https://example.com/bundle3.js"},
- ]
- ),
- ],
- scraping_attempts=[
- {
- "url": "https://example.com/bundle0.js",
- "status": "success",
- },
- {
- "url": "https://example.com/bundle1.js",
- "status": "not_attempted",
- },
- {
- "url": "https://example.com/bundle2.js",
- "status": "failure",
- "reason": "not_found",
- "details": "Did not find source",
- },
- ],
- ),
- project_id=self.project.id,
- )
+ event = self.store_event(
+ data=create_event(
+ exceptions=[
+ create_exception_with_frames(
+ [
+ {"abs_path": "https://example.com/bundle0.js"},
+ {"abs_path": "https://example.com/bundle1.js"},
+ {"abs_path": "https://example.com/bundle2.js"},
+ {"abs_path": "https://example.com/bundle3.js"},
+ ]
+ ),
+ ],
+ scraping_attempts=[
+ {
+ "url": "https://example.com/bundle0.js",
+ "status": "success",
+ },
+ {
+ "url": "https://example.com/bundle1.js",
+ "status": "not_attempted",
+ },
+ {
+ "url": "https://example.com/bundle2.js",
+ "status": "failure",
+ "reason": "not_found",
+ "details": "Did not find source",
+ },
+ ],
+ ),
+ project_id=self.project.id,
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- assert resp.data["exceptions"][0]["frames"][0]["scraping_process"]["source_file"] == {
- "url": "https://example.com/bundle0.js",
- "status": "success",
- }
- assert resp.data["exceptions"][0]["frames"][1]["scraping_process"]["source_file"] == {
- "url": "https://example.com/bundle1.js",
- "status": "not_attempted",
- }
- assert resp.data["exceptions"][0]["frames"][2]["scraping_process"]["source_file"] == {
- "url": "https://example.com/bundle2.js",
- "status": "failure",
- "reason": "not_found",
- "details": "Did not find source",
- }
- assert (
- resp.data["exceptions"][0]["frames"][3]["scraping_process"]["source_file"] is None
- )
+ assert resp.data["exceptions"][0]["frames"][0]["scraping_process"]["source_file"] == {
+ "url": "https://example.com/bundle0.js",
+ "status": "success",
+ }
+ assert resp.data["exceptions"][0]["frames"][1]["scraping_process"]["source_file"] == {
+ "url": "https://example.com/bundle1.js",
+ "status": "not_attempted",
+ }
+ assert resp.data["exceptions"][0]["frames"][2]["scraping_process"]["source_file"] == {
+ "url": "https://example.com/bundle2.js",
+ "status": "failure",
+ "reason": "not_found",
+ "details": "Did not find source",
+ }
+ assert resp.data["exceptions"][0]["frames"][3]["scraping_process"]["source_file"] is None
def test_scraping_result_source_map(self):
- with self.feature("organizations:source-maps-debugger-blue-thunder-edition"):
- event = self.store_event(
- data=create_event(
- exceptions=[
- create_exception_with_frames(
- frames=[
- {
- "abs_path": "./app/index.ts",
- "data": {"sourcemap": "https://example.com/bundle0.js.map"},
- },
- {
- "abs_path": "./app/index.ts",
- "data": {"sourcemap": "https://example.com/bundle1.js.map"},
- },
- {
- "abs_path": "./app/index.ts",
- "data": {"sourcemap": "https://example.com/bundle2.js.map"},
- },
- {
- "abs_path": "./app/index.ts",
- "data": {"sourcemap": "https://example.com/bundle3.js.map"},
- },
- ],
- raw_frames=[
- {
- "abs_path": "https://example.com/bundle0.js",
- },
- {
- "abs_path": "https://example.com/bundle1.js",
- },
- {
- "abs_path": "https://example.com/bundle2.js",
- },
- {
- "abs_path": "https://example.com/bundle3.js",
- },
- ],
- )
- ],
- scraping_attempts=[
- {
- "url": "https://example.com/bundle0.js.map",
- "status": "success",
- },
- {
- "url": "https://example.com/bundle1.js.map",
- "status": "not_attempted",
- },
- {
- "url": "https://example.com/bundle2.js.map",
- "status": "failure",
- "reason": "not_found",
- "details": "Did not find source",
- },
- ],
- ),
- project_id=self.project.id,
- )
+ event = self.store_event(
+ data=create_event(
+ exceptions=[
+ create_exception_with_frames(
+ frames=[
+ {
+ "abs_path": "./app/index.ts",
+ "data": {"sourcemap": "https://example.com/bundle0.js.map"},
+ },
+ {
+ "abs_path": "./app/index.ts",
+ "data": {"sourcemap": "https://example.com/bundle1.js.map"},
+ },
+ {
+ "abs_path": "./app/index.ts",
+ "data": {"sourcemap": "https://example.com/bundle2.js.map"},
+ },
+ {
+ "abs_path": "./app/index.ts",
+ "data": {"sourcemap": "https://example.com/bundle3.js.map"},
+ },
+ ],
+ raw_frames=[
+ {
+ "abs_path": "https://example.com/bundle0.js",
+ },
+ {
+ "abs_path": "https://example.com/bundle1.js",
+ },
+ {
+ "abs_path": "https://example.com/bundle2.js",
+ },
+ {
+ "abs_path": "https://example.com/bundle3.js",
+ },
+ ],
+ )
+ ],
+ scraping_attempts=[
+ {
+ "url": "https://example.com/bundle0.js.map",
+ "status": "success",
+ },
+ {
+ "url": "https://example.com/bundle1.js.map",
+ "status": "not_attempted",
+ },
+ {
+ "url": "https://example.com/bundle2.js.map",
+ "status": "failure",
+ "reason": "not_found",
+ "details": "Did not find source",
+ },
+ ],
+ ),
+ project_id=self.project.id,
+ )
- resp = self.get_success_response(
- self.organization.slug,
- self.project.slug,
- event.event_id,
- )
+ resp = self.get_success_response(
+ self.organization.slug,
+ self.project.slug,
+ event.event_id,
+ )
- assert resp.data["exceptions"][0]["frames"][0]["scraping_process"]["source_map"] == {
- "url": "https://example.com/bundle0.js.map",
- "status": "success",
- }
- assert resp.data["exceptions"][0]["frames"][1]["scraping_process"]["source_map"] == {
- "url": "https://example.com/bundle1.js.map",
- "status": "not_attempted",
- }
- assert resp.data["exceptions"][0]["frames"][2]["scraping_process"]["source_map"] == {
- "url": "https://example.com/bundle2.js.map",
- "status": "failure",
- "reason": "not_found",
- "details": "Did not find source",
- }
- assert resp.data["exceptions"][0]["frames"][3]["scraping_process"]["source_map"] is None
+ assert resp.data["exceptions"][0]["frames"][0]["scraping_process"]["source_map"] == {
+ "url": "https://example.com/bundle0.js.map",
+ "status": "success",
+ }
+ assert resp.data["exceptions"][0]["frames"][1]["scraping_process"]["source_map"] == {
+ "url": "https://example.com/bundle1.js.map",
+ "status": "not_attempted",
+ }
+ assert resp.data["exceptions"][0]["frames"][2]["scraping_process"]["source_map"] == {
+ "url": "https://example.com/bundle2.js.map",
+ "status": "failure",
+ "reason": "not_found",
+ "details": "Did not find source",
+ }
+ assert resp.data["exceptions"][0]["frames"][3]["scraping_process"]["source_map"] is None
|
2a985cc1f7411e68e34bbd7c28440409a7952e2a
|
2023-10-16 20:52:05
|
Lukas Stracke
|
chore(deps): Bump platformicons (#58158)
| false
|
Bump platformicons (#58158)
|
chore
|
diff --git a/package.json b/package.json
index fe1b871a8db992..1a32374aadd514 100644
--- a/package.json
+++ b/package.json
@@ -134,7 +134,7 @@
"papaparse": "^5.3.2",
"pegjs": "^0.10.0",
"pegjs-loader": "^0.5.6",
- "platformicons": "^5.6.5",
+ "platformicons": "^5.7.0",
"po-catalog-loader": "2.0.0",
"prettier": "3.0.3",
"prismjs": "^1.29.0",
diff --git a/yarn.lock b/yarn.lock
index 3d2c847adee404..a9568b25949283 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -9175,10 +9175,10 @@ platform@^1.3.3:
resolved "https://registry.yarnpkg.com/platform/-/platform-1.3.6.tgz#48b4ce983164b209c2d45a107adb31f473a6e7a7"
integrity sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg==
-platformicons@^5.6.5:
- version "5.6.5"
- resolved "https://registry.yarnpkg.com/platformicons/-/platformicons-5.6.5.tgz#5cc0a2a39d78c5e71a23cc2f2a69f495947974bc"
- integrity sha512-S96AArz7t6xMxfVW7BSyLKD+EJrAZ49JhcRVdPfVrmH5meLZxfD+Sc9hXY2kb2cTmQmYnRm/bi04kUt3WR0J3Q==
+platformicons@^5.7.0:
+ version "5.7.0"
+ resolved "https://registry.yarnpkg.com/platformicons/-/platformicons-5.7.0.tgz#0499b7f1908774dfdca601c43e4ad91c3bbfcddd"
+ integrity sha512-sI9TbWkx3LCQY/UUGBm9loibbZHjdkmxD1rLyhm+3F0vdjm31qS44td8X1DNCdhXDp7gC30sl8mzcQ1J9s0/nw==
dependencies:
"@types/node" "*"
"@types/react" "*"
|
64f550d17f6099c691095e41e590c9b0c754ae94
|
2021-09-22 04:52:48
|
Marcos Gaeta
|
ref(notifications): Clean up Team Notifications for Issue Owners (#28624)
| false
|
Clean up Team Notifications for Issue Owners (#28624)
|
ref
|
diff --git a/src/sentry/api/serializers/models/notification_setting.py b/src/sentry/api/serializers/models/notification_setting.py
index 7f6121dd9a7b89..ab33d2ce736b03 100644
--- a/src/sentry/api/serializers/models/notification_setting.py
+++ b/src/sentry/api/serializers/models/notification_setting.py
@@ -16,10 +16,10 @@ class NotificationSettingsSerializer(Serializer): # type: ignore
def get_attrs(
self,
- item_list: Union[Iterable[Team], Iterable[User]],
+ item_list: Iterable[Union["Team", "User"]],
user: User,
**kwargs: Any,
- ) -> Mapping[Union[User, Team], Mapping[str, Iterable[Any]]]:
+ ) -> Mapping[Union["Team", "User"], Mapping[str, Iterable[Any]]]:
"""
This takes a list of recipients (which are either Users or Teams,
because both can have Notification Settings). The function
@@ -40,7 +40,7 @@ def get_attrs(
target_ids=actor_mapping.keys(),
)
- results: MutableMapping[Union[User, Team], MutableMapping[str, Set[Any]]] = defaultdict(
+ results: MutableMapping[Union["Team", "User"], MutableMapping[str, Set[Any]]] = defaultdict(
lambda: defaultdict(set)
)
@@ -62,7 +62,7 @@ def get_attrs(
def serialize(
self,
- obj: Union[User, Team],
+ obj: Union["Team", "User"],
attrs: Mapping[str, Iterable[Any]],
user: User,
**kwargs: Any,
diff --git a/src/sentry/api/serializers/models/project.py b/src/sentry/api/serializers/models/project.py
index 74a07e6cff9edb..d61fa117c362e7 100644
--- a/src/sentry/api/serializers/models/project.py
+++ b/src/sentry/api/serializers/models/project.py
@@ -240,7 +240,7 @@ def measure_span(op_tag):
for project, serialized in result.items():
value = get_most_specific_notification_setting_value(
notification_settings_by_scope,
- user=user,
+ recipient=user,
parent_id=project.id,
type=NotificationSettingTypes.ISSUE_ALERTS,
should_use_slack_automatic=should_use_slack_automatic_by_organization_id[
diff --git a/src/sentry/incidents/action_handlers.py b/src/sentry/incidents/action_handlers.py
index f5667ad31199dd..24cda0d5b081c9 100644
--- a/src/sentry/incidents/action_handlers.py
+++ b/src/sentry/incidents/action_handlers.py
@@ -63,7 +63,7 @@ def _get_targets(self) -> Set[int]:
return {target.id}
elif self.action.target_type == AlertRuleTriggerAction.TargetType.TEAM.value:
- users = NotificationSetting.objects.filter_to_subscribed_users(
+ users = NotificationSetting.objects.filter_to_accepting_recipients(
self.project,
{member.user for member in target.member_set},
)[ExternalProviders.EMAIL]
diff --git a/src/sentry/integrations/slack/message_builder/issues.py b/src/sentry/integrations/slack/message_builder/issues.py
index 0bcfcd0573a725..96550d441837df 100644
--- a/src/sentry/integrations/slack/message_builder/issues.py
+++ b/src/sentry/integrations/slack/message_builder/issues.py
@@ -29,7 +29,7 @@
from ..utils import build_notification_footer
-def format_actor_option(actor: Union[User, Team]) -> Mapping[str, str]:
+def format_actor_option(actor: Union["Team", "User"]) -> Mapping[str, str]:
if isinstance(actor, User):
return {"text": actor.get_display_name(), "value": f"user:{actor.id}"}
if isinstance(actor, Team):
@@ -313,7 +313,7 @@ def __init__(
link_to_event: bool = False,
issue_details: bool = False,
notification: Optional[BaseNotification] = None,
- recipient: Optional[Union[Team, User]] = None,
+ recipient: Optional[Union["Team", "User"]] = None,
) -> None:
super().__init__()
self.group = group
diff --git a/src/sentry/integrations/slack/message_builder/notifications.py b/src/sentry/integrations/slack/message_builder/notifications.py
index 5c5df3a2ed4723..42b2402759edbd 100644
--- a/src/sentry/integrations/slack/message_builder/notifications.py
+++ b/src/sentry/integrations/slack/message_builder/notifications.py
@@ -44,7 +44,7 @@ def __init__(
self,
notification: BaseNotification,
context: Mapping[str, Any],
- recipient: Union[Team, User],
+ recipient: Union["Team", "User"],
) -> None:
super().__init__()
self.notification = notification
@@ -90,7 +90,7 @@ def build(self) -> SlackBody:
def build_notification_attachment(
notification: BaseNotification,
context: Mapping[str, Any],
- recipient: Union[Team, User],
+ recipient: Union["Team", "User"],
) -> SlackBody:
"""@deprecated"""
return SlackNotificationsMessageBuilder(notification, context, recipient).build()
diff --git a/src/sentry/integrations/slack/notifications.py b/src/sentry/integrations/slack/notifications.py
index b3802cc8c52109..7d8a8bc1611a4c 100644
--- a/src/sentry/integrations/slack/notifications.py
+++ b/src/sentry/integrations/slack/notifications.py
@@ -1,6 +1,6 @@
import logging
from collections import defaultdict
-from typing import AbstractSet, Any, Mapping, MutableMapping, Optional, Set, Union
+from typing import Any, Iterable, Mapping, MutableMapping, Optional, Union
from sentry import analytics
from sentry.integrations.slack.client import SlackClient # NOQA
@@ -20,20 +20,20 @@
def get_context(
notification: BaseNotification,
- recipient: Union[User, Team],
+ recipient: Union["Team", "User"],
shared_context: Mapping[str, Any],
extra_context: Mapping[str, Any],
) -> Mapping[str, Any]:
"""Compose the various levels of context and add Slack-specific fields."""
return {
**shared_context,
- **notification.get_user_context(recipient, extra_context),
+ **notification.get_recipient_context(recipient, extra_context),
}
def get_channel_and_integration_by_user(
- user: User, organization: Organization
-) -> Mapping[str, Integration]:
+ user: "User", organization: "Organization"
+) -> Mapping[str, "Integration"]:
identities = Identity.objects.filter(
idp__type=EXTERNAL_PROVIDERS[ExternalProviders.SLACK],
@@ -63,8 +63,8 @@ def get_channel_and_integration_by_user(
def get_channel_and_integration_by_team(
- team: Team, organization: Organization
-) -> Mapping[str, Integration]:
+ team: "Team", organization: "Organization"
+) -> Mapping[str, "Integration"]:
try:
external_actor = (
ExternalActor.objects.filter(
@@ -82,9 +82,9 @@ def get_channel_and_integration_by_team(
def get_channel_and_token_by_recipient(
- organization: Organization, recipients: AbstractSet[Union[User, Team]]
-) -> Mapping[Union[User, Team], Mapping[str, str]]:
- output: MutableMapping[Union[User, Team], MutableMapping[str, str]] = defaultdict(dict)
+ organization: "Organization", recipients: Iterable[Union["Team", "User"]]
+) -> Mapping[Union["Team", "User"], Mapping[str, str]]:
+ output: MutableMapping[Union["Team", "User"], MutableMapping[str, str]] = defaultdict(dict)
for recipient in recipients:
channels_to_integrations = (
get_channel_and_integration_by_user(recipient, organization)
@@ -121,7 +121,7 @@ def get_key(notification: BaseNotification) -> str:
@register_notification_provider(ExternalProviders.SLACK)
def send_notification_as_slack(
notification: BaseNotification,
- recipients: Union[Set[User], Set[Team]],
+ recipients: Iterable[Union["Team", "User"]],
shared_context: Mapping[str, Any],
extra_context_by_user_id: Optional[Mapping[int, Mapping[str, Any]]],
) -> None:
diff --git a/src/sentry/integrations/slack/utils.py b/src/sentry/integrations/slack/utils.py
index cf6ef72d8fc26b..738c7ee22c3e24 100644
--- a/src/sentry/integrations/slack/utils.py
+++ b/src/sentry/integrations/slack/utils.py
@@ -424,7 +424,9 @@ def get_settings_url(notification: BaseNotification) -> str:
return str(urljoin(absolute_uri(url_str), get_referrer_qstring(notification)))
-def build_notification_footer(notification: BaseNotification, recipient: Union[Team, User]) -> str:
+def build_notification_footer(
+ notification: BaseNotification, recipient: Union["Team", "User"]
+) -> str:
if isinstance(recipient, Team):
team = Team.objects.get(id=recipient.id)
url_str = f"/settings/{notification.organization.slug}/teams/{team.slug}/notifications/"
diff --git a/src/sentry/mail/notifications.py b/src/sentry/mail/notifications.py
index 7caf162c3fd6eb..5ba4ad9b6c7a0e 100644
--- a/src/sentry/mail/notifications.py
+++ b/src/sentry/mail/notifications.py
@@ -1,10 +1,10 @@
import logging
-from typing import Any, Mapping, Optional, Set
+from typing import Any, Iterable, Mapping, Optional, Union
from django.utils.encoding import force_text
from sentry import options
-from sentry.models import Project, ProjectOption, User
+from sentry.models import Project, ProjectOption, Team, User
from sentry.notifications.notifications.activity.base import ActivityNotification
from sentry.notifications.notifications.base import BaseNotification
from sentry.notifications.notifications.rules import AlertRuleNotification
@@ -36,7 +36,7 @@ def get_headers(notification: BaseNotification) -> Mapping[str, Any]:
return headers
-def build_subject_prefix(project: Project, mail_option_key: Optional[str] = None) -> str:
+def build_subject_prefix(project: "Project", mail_option_key: Optional[str] = None) -> str:
key = mail_option_key or "mail:subject_prefix"
return force_text(
ProjectOption.objects.get_value(project, key) or options.get("mail.subject-prefix")
@@ -64,10 +64,10 @@ def get_unsubscribe_link(
)
-def log_message(notification: BaseNotification, user: User) -> None:
+def log_message(notification: BaseNotification, recipient: Union["Team", "User"]) -> None:
extra = {
"project_id": notification.project.id,
- "user_id": user.id,
+ "actor_id": recipient.actor_id,
}
group = getattr(notification, "group", None)
if group:
@@ -88,7 +88,7 @@ def log_message(notification: BaseNotification, user: User) -> None:
def get_context(
notification: BaseNotification,
- user: User,
+ recipient: Union["Team", "User"],
shared_context: Mapping[str, Any],
extra_context: Mapping[str, Any],
) -> Mapping[str, Any]:
@@ -99,12 +99,14 @@ def get_context(
"""
context = {
**shared_context,
- **notification.get_user_context(user, extra_context),
+ **notification.get_recipient_context(recipient, extra_context),
}
- if notification.get_unsubscribe_key():
+ # TODO(mgaeta): The unsubscribe system relies on `user_id` so it doesn't
+ # work with Teams. We should add the `actor_id` to the signed link.
+ if isinstance(recipient, User) and notification.get_unsubscribe_key():
key, resource_id, referrer = notification.get_unsubscribe_key()
context.update(
- {"unsubscribe_link": get_unsubscribe_link(user.id, resource_id, key, referrer)}
+ {"unsubscribe_link": get_unsubscribe_link(recipient.id, resource_id, key, referrer)}
)
return context
@@ -113,16 +115,19 @@ def get_context(
@register_notification_provider(ExternalProviders.EMAIL)
def send_notification_as_email(
notification: BaseNotification,
- users: Set[User],
+ recipients: Iterable[Union["Team", "User"]],
shared_context: Mapping[str, Any],
extra_context_by_user_id: Optional[Mapping[int, Mapping[str, Any]]],
) -> None:
headers = get_headers(notification)
- for user in users:
- extra_context = (extra_context_by_user_id or {}).get(user.id, {})
- log_message(notification, user)
- context = get_context(notification, user, shared_context, extra_context)
+ for recipient in recipients:
+ if isinstance(recipient, Team):
+ # TODO(mgaeta): MessageBuilder only works with Users so filter out Teams for now.
+ continue
+ extra_context = (extra_context_by_user_id or {}).get(recipient.id, {})
+ log_message(notification, recipient)
+ context = get_context(notification, recipient, shared_context, extra_context)
subject = get_subject_with_prefix(notification, context=context)
msg = MessageBuilder(
subject=subject,
@@ -134,5 +139,5 @@ def send_notification_as_email(
reply_reference=notification.get_reply_reference(),
type=notification.get_type(),
)
- msg.add_users([user.id], project=notification.project)
+ msg.add_users([recipient.id], project=notification.project)
msg.send_async()
diff --git a/src/sentry/models/actor.py b/src/sentry/models/actor.py
index 85d20f6a0e9d2e..812ab4a5bdc770 100644
--- a/src/sentry/models/actor.py
+++ b/src/sentry/models/actor.py
@@ -1,5 +1,5 @@
from collections import defaultdict, namedtuple
-from typing import Any, Optional
+from typing import TYPE_CHECKING, Optional, Sequence, Type, Union
from django.db import models
from django.db.models.signals import pre_save
@@ -8,10 +8,14 @@
from sentry.db.models import Model
from sentry.utils.compat import filter
+if TYPE_CHECKING:
+ from sentry.models import Team, User
+
+
ACTOR_TYPES = {"team": 0, "user": 1}
-def actor_type_to_class(type: int) -> Any:
+def actor_type_to_class(type: int) -> Type[Union["Team", "User"]]:
# type will be 0 or 1 and we want to get Team or User
from sentry.models import Team, User
@@ -65,7 +69,7 @@ class ActorTuple(namedtuple("Actor", "id type")):
"""
def get_actor_identifier(self):
- return "%s:%d" % (self.type.__name__.lower(), self.id)
+ return f"{self.type.__name__.lower()}:{self.id}"
@classmethod
def from_actor_identifier(cls, actor_identifier):
@@ -111,7 +115,7 @@ def resolve_to_actor(self):
return self.resolve().actor
@classmethod
- def resolve_many(cls, actors):
+ def resolve_many(cls, actors: Sequence["ActorTuple"]) -> Sequence[Union["Team", "User"]]:
"""
Resolve multiple actors at the same time. Returns the result in the same order
as the input, minus any actors we couldn't resolve.
diff --git a/src/sentry/models/groupsubscription.py b/src/sentry/models/groupsubscription.py
index 79181042c68730..d2ea95277234c2 100644
--- a/src/sentry/models/groupsubscription.py
+++ b/src/sentry/models/groupsubscription.py
@@ -14,7 +14,7 @@
sane_repr,
)
from sentry.notifications.helpers import (
- transform_to_notification_settings_by_user,
+ transform_to_notification_settings_by_recipient,
where_should_be_participating,
)
from sentry.notifications.types import GroupSubscriptionReason, NotificationSettingTypes
@@ -122,7 +122,7 @@ def get_participants(self, group: "Group") -> Mapping[ExternalProviders, Mapping
subscriptions_by_user_id = {
subscription.user_id: subscription for subscription in active_and_disabled_subscriptions
}
- notification_settings_by_user = transform_to_notification_settings_by_user(
+ notification_settings_by_recipient = transform_to_notification_settings_by_recipient(
notification_settings, all_possible_users
)
@@ -136,7 +136,7 @@ def get_participants(self, group: "Group") -> Mapping[ExternalProviders, Mapping
providers = where_should_be_participating(
user,
subscription_option,
- notification_settings_by_user,
+ notification_settings_by_recipient,
should_use_slack_automatic=should_use_slack_automatic,
)
for provider in providers:
diff --git a/src/sentry/models/projectcodeowners.py b/src/sentry/models/projectcodeowners.py
index 34acab0ab0f215..34f29971755bf6 100644
--- a/src/sentry/models/projectcodeowners.py
+++ b/src/sentry/models/projectcodeowners.py
@@ -133,7 +133,8 @@ def validate_codeowners_associations(self, codeowners, project):
@classmethod
def merge_code_owners_list(self, code_owners_list):
"""
- Merge list of code_owners into a single code_owners object concating all the rules. We assume schema version is constant.
+ Merge list of code_owners into a single code_owners object concatenating
+ all the rules. We assume schema version is constant.
"""
merged_code_owners = None
for code_owners in code_owners_list:
diff --git a/src/sentry/models/projectownership.py b/src/sentry/models/projectownership.py
index 119014b254620a..7fd0cb4bc1237e 100644
--- a/src/sentry/models/projectownership.py
+++ b/src/sentry/models/projectownership.py
@@ -1,10 +1,13 @@
+from typing import Any, Mapping, Optional, Sequence, Tuple, Union
+
from django.db import models
from django.db.models.signals import post_delete, post_save
from django.utils import timezone
from sentry.db.models import Model, sane_repr
from sentry.db.models.fields import FlexibleForeignKey, JSONField
-from sentry.ownership.grammar import load_schema, resolve_actors
+from sentry.models import ActorTuple
+from sentry.ownership.grammar import Rule, load_schema, resolve_actors
from sentry.utils import metrics
from sentry.utils.cache import cache
@@ -75,7 +78,9 @@ def get_ownership_cached(cls, project_id):
return ownership or None
@classmethod
- def get_owners(cls, project_id, data):
+ def get_owners(
+ cls, project_id: int, data: Mapping[str, Any]
+ ) -> Tuple[Union["Everyone", Sequence["ActorTuple"]], Optional[Sequence[Rule]]]:
"""
For a given project_id, and event data blob.
We combine the schemas from IssueOwners and CodeOwners.
@@ -180,7 +185,9 @@ def get_autoassign_owners(cls, project_id, data, limit=2):
)
@classmethod
- def _matching_ownership_rules(cls, ownership, project_id, data):
+ def _matching_ownership_rules(
+ cls, ownership: "ProjectOwnership", project_id: int, data: Mapping[str, Any]
+ ) -> Sequence["Rule"]:
rules = []
if ownership.schema is not None:
for rule in load_schema(ownership.schema):
diff --git a/src/sentry/notifications/helpers.py b/src/sentry/notifications/helpers.py
index fba371116728e6..2b746505b2864e 100644
--- a/src/sentry/notifications/helpers.py
+++ b/src/sentry/notifications/helpers.py
@@ -58,11 +58,11 @@ def _get_notification_setting_default(
def _get_setting_mapping_from_mapping(
- notification_settings_by_user: Mapping[
- "User",
+ notification_settings_by_recipient: Mapping[
+ Union["Team", "User"],
Mapping[NotificationScopeType, Mapping[ExternalProviders, NotificationSettingOptionValues]],
],
- user: "User",
+ recipient: Union["Team", "User"],
type: NotificationSettingTypes,
should_use_slack_automatic: bool = False,
) -> Mapping[ExternalProviders, NotificationSettingOptionValues]:
@@ -71,11 +71,13 @@ def _get_setting_mapping_from_mapping(
from sentry.notifications.notify import notification_providers
specific_scope = get_scope_type(type)
- notification_settings_mapping = notification_settings_by_user.get(user)
+ notification_settings_mapping = notification_settings_by_recipient.get(recipient)
if notification_settings_mapping:
- notification_setting_option = notification_settings_mapping.get(
- specific_scope
- ) or notification_settings_mapping.get(NotificationScopeType.USER)
+ notification_setting_option = (
+ notification_settings_mapping.get(specific_scope)
+ or notification_settings_mapping.get(NotificationScopeType.USER)
+ or notification_settings_mapping.get(NotificationScopeType.TEAM)
+ )
if notification_setting_option:
return notification_setting_option
@@ -87,12 +89,12 @@ def _get_setting_mapping_from_mapping(
}
-def where_should_user_be_notified(
- notification_settings_by_user: Mapping[
- "User",
+def where_should_recipient_be_notified(
+ notification_settings_by_recipient: Mapping[
+ Union["Team", "User"],
Mapping[NotificationScopeType, Mapping[ExternalProviders, NotificationSettingOptionValues]],
],
- user: "User",
+ recipient: Union["Team", "User"],
should_use_slack_automatic: bool = False,
) -> List[ExternalProviders]:
"""
@@ -100,8 +102,8 @@ def where_should_user_be_notified(
return the list of providers after verifying the user has opted into this notification.
"""
mapping = _get_setting_mapping_from_mapping(
- notification_settings_by_user,
- user,
+ notification_settings_by_recipient,
+ recipient,
NotificationSettingTypes.ISSUE_ALERTS,
should_use_slack_automatic=should_use_slack_automatic,
)
@@ -117,9 +119,9 @@ def should_be_participating(
value: NotificationSettingOptionValues,
) -> bool:
"""
- Give a user's subscription (on, off, or null) to a group and their
+ Give an Actor's subscription (on, off, or null) to a group and their
notification setting value(on, off, or sometimes), decide whether or not to
- send the user a notification.
+ send the Actor a notification.
"""
return (
subscription and subscription.is_active and value != NotificationSettingOptionValues.NEVER
@@ -127,10 +129,10 @@ def should_be_participating(
def where_should_be_participating(
- user: "User",
+ recipient: Union["Team", "User"],
subscription: Optional["GroupSubscription"],
- notification_settings_by_user: Mapping[
- "User",
+ notification_settings_by_recipient: Mapping[
+ Union["Team", "User"],
Mapping[NotificationScopeType, Mapping[ExternalProviders, NotificationSettingOptionValues]],
],
should_use_slack_automatic: bool = False,
@@ -143,8 +145,8 @@ def where_should_be_participating(
the group, that overrides their notification preferences.
"""
mapping = _get_setting_mapping_from_mapping(
- notification_settings_by_user,
- user,
+ notification_settings_by_recipient,
+ recipient,
NotificationSettingTypes.WORKFLOW,
should_use_slack_automatic=should_use_slack_automatic,
)
@@ -172,8 +174,10 @@ def get_values_by_provider_by_type(
parent_scope = get_scope_type(type)
parent_specific_mapping = notification_settings_by_scope.get(parent_scope, {})
- organization_independent_mapping = notification_settings_by_scope.get(
- NotificationScopeType.USER, {}
+ organization_independent_mapping = (
+ notification_settings_by_scope.get(NotificationScopeType.USER)
+ or notification_settings_by_scope.get(NotificationScopeType.TEAM)
+ or {}
)
return {
@@ -186,29 +190,29 @@ def get_values_by_provider_by_type(
}
-def transform_to_notification_settings_by_user(
+def transform_to_notification_settings_by_recipient(
notification_settings: Iterable["NotificationSetting"],
- users: Iterable["User"],
+ recipients: Iterable[Union["Team", "User"]],
) -> Mapping[
- "User",
+ Union["Team", "User"],
Mapping[NotificationScopeType, Mapping[ExternalProviders, NotificationSettingOptionValues]],
]:
"""
Given an unsorted list of notification settings, create a mapping of users
to a map of notification scopes to setting values.
"""
- actor_mapping = {user.actor_id: user for user in users}
- notification_settings_by_user: Dict[
- "User",
+ actor_mapping = {recipient.actor_id: recipient for recipient in recipients}
+ notification_settings_by_recipient: Dict[
+ Union["Team", "User"],
Dict[NotificationScopeType, Dict[ExternalProviders, NotificationSettingOptionValues]],
] = defaultdict(lambda: defaultdict(dict))
for notification_setting in notification_settings:
- user = actor_mapping.get(notification_setting.target_id)
+ recipient = actor_mapping.get(notification_setting.target_id)
scope_type = NotificationScopeType(notification_setting.scope_type)
value = NotificationSettingOptionValues(notification_setting.value)
provider = ExternalProviders(notification_setting.provider)
- notification_settings_by_user[user][scope_type][provider] = value
- return notification_settings_by_user
+ notification_settings_by_recipient[recipient][scope_type][provider] = value
+ return notification_settings_by_recipient
def transform_to_notification_settings_by_scope(
@@ -327,7 +331,7 @@ def get_groups_for_query(
for project, groups in groups_by_project.items():
value = get_most_specific_notification_setting_value(
notification_settings_by_scope,
- user=user,
+ recipient=user,
parent_id=project.id,
type=NotificationSettingTypes.WORKFLOW,
should_use_slack_automatic=should_use_slack_automatic_by_organization_id[
@@ -371,7 +375,7 @@ def get_user_subscriptions_for_groups(
for project, groups in groups_by_project.items():
value = get_most_specific_notification_setting_value(
notification_settings_by_scope,
- user=user,
+ recipient=user,
parent_id=project.id,
type=NotificationSettingTypes.WORKFLOW,
should_use_slack_automatic=should_use_slack_automatic_by_organization_id[
@@ -417,7 +421,7 @@ def get_fallback_settings(
types_to_serialize: Iterable[NotificationSettingTypes],
project_ids: Iterable[int],
organization_ids: Iterable[int],
- user: Optional["User"] = None,
+ recipient: Optional[Union["Team", "User"]] = None,
should_use_slack_automatic: bool = False,
) -> MutableMapping[str, MutableMapping[str, MutableMapping[int, MutableMapping[str, str]]]]:
"""
@@ -449,8 +453,7 @@ def get_fallback_settings(
for parent_id in parent_ids:
data[type_str][scope_str][parent_id][provider_str] = parent_independent_value_str
- # Only users (i.e. not teams) have parent-independent notification settings.
- if user:
+ if recipient:
# Each provider has it's own defaults by type.
value = _get_notification_setting_default(
provider, type_enum, should_use_slack_automatic
@@ -458,7 +461,7 @@ def get_fallback_settings(
value_str = NOTIFICATION_SETTING_OPTION_VALUES[value]
user_scope_str = NOTIFICATION_SCOPE_TYPE[NotificationScopeType.USER]
- data[type_str][user_scope_str][user.id][provider_str] = value_str
+ data[type_str][user_scope_str][recipient.id][provider_str] = value_str
return data
@@ -491,7 +494,7 @@ def get_most_specific_notification_setting_value(
NotificationScopeType,
Mapping[int, Mapping[ExternalProviders, NotificationSettingOptionValues]],
],
- user: "User",
+ recipient: Union["Team", "User"],
parent_id: int,
type: NotificationSettingTypes,
should_use_slack_automatic: bool = False,
@@ -505,7 +508,11 @@ def get_most_specific_notification_setting_value(
notification_settings_by_scope.get(get_scope_type(type), {}).get(parent_id, {})
)
or get_highest_notification_setting_value(
- notification_settings_by_scope.get(NotificationScopeType.USER, {}).get(user.id, {})
+ (
+ notification_settings_by_scope.get(NotificationScopeType.USER)
+ or notification_settings_by_scope.get(NotificationScopeType.TEAM)
+ or {}
+ ).get(recipient.id, {})
)
or _get_notification_setting_default(
ExternalProviders.EMAIL, type, should_use_slack_automatic
diff --git a/src/sentry/notifications/manager.py b/src/sentry/notifications/manager.py
index 1986cd244eae2a..6dcd55175e0078 100644
--- a/src/sentry/notifications/manager.py
+++ b/src/sentry/notifications/manager.py
@@ -20,9 +20,9 @@
get_scope,
get_scope_type,
get_target_id,
- transform_to_notification_settings_by_user,
+ transform_to_notification_settings_by_recipient,
validate,
- where_should_user_be_notified,
+ where_should_recipient_be_notified,
)
from sentry.notifications.types import (
VALID_VALUES_FOR_KEY,
@@ -256,7 +256,7 @@ def get_for_recipient_by_parent(
self,
type_: NotificationSettingTypes,
parent: Union["Organization", "Project"],
- recipients: Sequence[Union["Team", "User"]],
+ recipients: Iterable[Union["Team", "User"]],
) -> QuerySet:
from sentry.models import Team, User
@@ -297,36 +297,37 @@ def get_for_recipient_by_parent(
target__in=actor_ids,
)
- def filter_to_subscribed_users(
+ def filter_to_accepting_recipients(
self,
project: "Project",
- users: List["User"],
- ) -> Mapping[ExternalProviders, Iterable["User"]]:
+ recipients: Iterable[Union["Team", "User"]],
+ ) -> Mapping[ExternalProviders, Iterable[Union["Team", "User"]]]:
"""
- Filters a list of users down to the users by provider who are subscribed to alerts.
- We check both the project level settings and global default settings.
+ Filters a list of teams or users down to the recipients by provider who
+ are subscribed to alerts. We check both the project level settings and
+ global default settings.
"""
notification_settings = self.get_for_recipient_by_parent(
- NotificationSettingTypes.ISSUE_ALERTS, parent=project, recipients=users
+ NotificationSettingTypes.ISSUE_ALERTS, parent=project, recipients=recipients
)
- notification_settings_by_user = transform_to_notification_settings_by_user(
- notification_settings, users
+ notification_settings_by_recipient = transform_to_notification_settings_by_recipient(
+ notification_settings, recipients
)
mapping = defaultdict(set)
should_use_slack_automatic = features.has(
"organizations:notification-slack-automatic", project.organization
)
- for user in users:
- providers = where_should_user_be_notified(
- notification_settings_by_user, user, should_use_slack_automatic
+ for recipient in recipients:
+ providers = where_should_recipient_be_notified(
+ notification_settings_by_recipient, recipient, should_use_slack_automatic
)
for provider in providers:
- mapping[provider].add(user)
+ mapping[provider].add(recipient)
return mapping
def get_notification_recipients(
self, project: "Project"
- ) -> Mapping[ExternalProviders, Iterable["User"]]:
+ ) -> Mapping[ExternalProviders, Iterable[Union["Team", "User"]]]:
"""
Return a set of users that should receive Issue Alert emails for a given
project. To start, we get the set of all users. Then we fetch all of
@@ -338,7 +339,7 @@ def get_notification_recipients(
user_ids = project.member_set.values_list("user", flat=True)
users = User.objects.filter(id__in=user_ids)
- return self.filter_to_subscribed_users(project, users)
+ return self.filter_to_accepting_recipients(project, users)
def update_settings_bulk(
self,
diff --git a/src/sentry/notifications/notifications/activity/base.py b/src/sentry/notifications/notifications/activity/base.py
index d90b4e70e6940a..b53e8ac61ed996 100644
--- a/src/sentry/notifications/notifications/activity/base.py
+++ b/src/sentry/notifications/notifications/activity/base.py
@@ -1,12 +1,11 @@
import re
from abc import ABC
-from typing import Any, Mapping, MutableMapping, Optional, Tuple
+from typing import TYPE_CHECKING, Any, Mapping, MutableMapping, Optional, Tuple, Union
from urllib.parse import urlparse, urlunparse
from django.utils.html import escape
from django.utils.safestring import SafeString, mark_safe
-from sentry.models import Activity, User
from sentry.notifications.helpers import get_reason_context
from sentry.notifications.notifications.base import BaseNotification
from sentry.notifications.utils import send_activity_notification
@@ -14,11 +13,14 @@
from sentry.notifications.utils.participants import get_participants_for_group
from sentry.types.integrations import ExternalProviders
+if TYPE_CHECKING:
+ from sentry.models import Activity, Team, User
+
class ActivityNotification(BaseNotification, ABC):
fine_tuning_key = "workflow"
- def __init__(self, activity: Activity) -> None:
+ def __init__(self, activity: "Activity") -> None:
super().__init__(activity.project)
self.activity = activity
@@ -38,8 +40,8 @@ def get_base_context(self) -> MutableMapping[str, Any]:
"project_link": self.get_project_link(),
}
- def get_user_context(
- self, user: User, extra_context: Mapping[str, Any]
+ def get_recipient_context(
+ self, recipient: Union["Team", "User"], extra_context: Mapping[str, Any]
) -> MutableMapping[str, Any]:
return get_reason_context(extra_context)
@@ -54,7 +56,7 @@ def get_context(self) -> MutableMapping[str, Any]:
def get_participants_with_group_subscription_reason(
self,
- ) -> Mapping[ExternalProviders, Mapping[User, int]]:
+ ) -> Mapping[ExternalProviders, Mapping["User", int]]:
raise NotImplementedError
def send(self) -> None:
@@ -64,7 +66,7 @@ def send(self) -> None:
class GroupActivityNotification(ActivityNotification, ABC):
is_message_issue_unfurl = True
- def __init__(self, activity: Activity) -> None:
+ def __init__(self, activity: "Activity") -> None:
super().__init__(activity)
self.group = activity.group
@@ -83,7 +85,7 @@ def get_group_link(self) -> str:
def get_participants_with_group_subscription_reason(
self,
- ) -> Mapping[ExternalProviders, Mapping[User, int]]:
+ ) -> Mapping[ExternalProviders, Mapping["User", int]]:
"""This is overridden by the activity subclasses."""
return get_participants_for_group(self.group, self.activity.user)
diff --git a/src/sentry/notifications/notifications/activity/release.py b/src/sentry/notifications/notifications/activity/release.py
index 745c0bb12d9f88..73b9cf8309dc8d 100644
--- a/src/sentry/notifications/notifications/activity/release.py
+++ b/src/sentry/notifications/notifications/activity/release.py
@@ -1,6 +1,6 @@
-from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Set
+from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Set, Union
-from sentry.models import Activity, CommitFileChange, Project, User
+from sentry.models import Activity, CommitFileChange, Project, Team, User
from sentry.notifications.utils import (
get_commits_for_release,
get_deploy,
@@ -76,16 +76,19 @@ def get_context(self) -> MutableMapping[str, Any]:
"text_description": f"Version {self.version} was deployed to {self.environment}",
}
- def get_projects(self, user: User) -> Set[Project]:
- if user.is_superuser or self.organization.flags.allow_joinleave:
- return self.projects
- team_ids = self.get_users_by_teams()[user.id]
+ def get_projects(self, recipient: Union["Team", "User"]) -> Set[Project]:
+ if isinstance(recipient, User):
+ if recipient.is_superuser or self.organization.flags.allow_joinleave:
+ return self.projects
+ team_ids = self.get_users_by_teams()[recipient.id]
+ else:
+ team_ids = [recipient.id]
return get_projects(self.projects, team_ids)
- def get_user_context(
- self, user: User, extra_context: Mapping[str, Any]
+ def get_recipient_context(
+ self, recipient: Union["Team", "User"], extra_context: Mapping[str, Any]
) -> MutableMapping[str, Any]:
- projects = self.get_projects(user)
+ projects = self.get_projects(recipient)
release_links = [
absolute_uri(
f"/organizations/{self.organization.slug}/releases/{self.version}/?project={p.id}"
@@ -95,7 +98,7 @@ def get_user_context(
resolved_issue_counts = [self.group_counts_by_project.get(p.id, 0) for p in projects]
return {
- **super().get_user_context(user, extra_context),
+ **super().get_recipient_context(recipient, extra_context),
"projects": zip(projects, release_links, resolved_issue_counts),
"project_count": len(projects),
}
diff --git a/src/sentry/notifications/notifications/base.py b/src/sentry/notifications/notifications/base.py
index f6e83941938352..104661e4174525 100644
--- a/src/sentry/notifications/notifications/base.py
+++ b/src/sentry/notifications/notifications/base.py
@@ -1,9 +1,9 @@
-from typing import TYPE_CHECKING, Any, Mapping, MutableMapping, Optional, Tuple
+from typing import TYPE_CHECKING, Any, Mapping, MutableMapping, Optional, Tuple, Union
from sentry.utils.http import absolute_uri
if TYPE_CHECKING:
- from sentry.models import Project, User
+ from sentry.models import Project, Team, User
class BaseNotification:
@@ -42,8 +42,8 @@ def get_html_template(self) -> str:
def get_project_link(self) -> str:
return str(absolute_uri(f"/{self.organization.slug}/{self.project.slug}/"))
- def get_user_context(
- self, user: "User", extra_context: Mapping[str, Any]
+ def get_recipient_context(
+ self, recipient: Union["Team", "User"], extra_context: Mapping[str, Any]
) -> MutableMapping[str, Any]:
# Basically a noop.
return {**extra_context}
diff --git a/src/sentry/notifications/notifications/digest.py b/src/sentry/notifications/notifications/digest.py
index 77f405c520a00a..43082802847a2f 100644
--- a/src/sentry/notifications/notifications/digest.py
+++ b/src/sentry/notifications/notifications/digest.py
@@ -1,5 +1,5 @@
import logging
-from typing import TYPE_CHECKING, Any, Iterable, Mapping, MutableMapping, Optional, Set, Tuple
+from typing import TYPE_CHECKING, Any, Iterable, Mapping, MutableMapping, Optional, Tuple, Union
from sentry.digests import Digest
from sentry.digests.utilities import (
@@ -20,7 +20,7 @@
from sentry.types.integrations import ExternalProviders
if TYPE_CHECKING:
- from sentry.models import Project, User
+ from sentry.models import Project, Team, User
logger = logging.getLogger(__name__)
@@ -39,7 +39,7 @@ def __init__(
self.target_type = target_type
self.target_identifier = target_identifier
- def get_participants(self) -> Mapping[ExternalProviders, Set["User"]]:
+ def get_participants(self) -> Mapping[ExternalProviders, Iterable[Union["Team", "User"]]]:
return get_send_to(
project=self.project,
target_type=self.target_type,
diff --git a/src/sentry/notifications/notifications/rules.py b/src/sentry/notifications/notifications/rules.py
index a2f2efe04108d0..2f5b9cfe69e41e 100644
--- a/src/sentry/notifications/notifications/rules.py
+++ b/src/sentry/notifications/notifications/rules.py
@@ -1,9 +1,9 @@
import logging
-from typing import Any, Mapping, MutableMapping, Optional, Set
+from typing import Any, Iterable, Mapping, MutableMapping, Optional, Union
import pytz
-from sentry.models import User, UserOption
+from sentry.models import Team, User, UserOption
from sentry.notifications.notifications.base import BaseNotification
from sentry.notifications.types import ActionTargetType
from sentry.notifications.utils import (
@@ -43,7 +43,7 @@ def __init__(
self.target_identifier = target_identifier
self.rules = notification.rules
- def get_participants(self) -> Mapping[ExternalProviders, Set[User]]:
+ def get_participants(self) -> Mapping[ExternalProviders, Iterable[Union["Team", "User"]]]:
return get_send_to(
project=self.project,
target_type=self.target_type,
@@ -63,24 +63,26 @@ def get_subject(self, context: Optional[Mapping[str, Any]] = None) -> str:
def get_reference(self) -> Any:
return self.group
- def get_user_context(
- self, user: User, extra_context: Mapping[str, Any]
+ def get_recipient_context(
+ self, recipient: Union["Team", "User"], extra_context: Mapping[str, Any]
) -> MutableMapping[str, Any]:
- parent_context = super().get_user_context(user, extra_context)
+ parent_context = super().get_recipient_context(recipient, extra_context)
user_context = {"timezone": pytz.timezone("UTC"), **parent_context}
try:
# AlertRuleNotification is shared among both email and slack notifications, and in slack
# notifications, the `user` arg could be of type `Team` which is why we need this check
- if isinstance(user, User):
+ if isinstance(recipient, User):
user_context.update(
{
"timezone": pytz.timezone(
- UserOption.objects.get_value(user=user, key="timezone", default="UTC")
+ UserOption.objects.get_value(
+ user=recipient, key="timezone", default="UTC"
+ )
)
}
)
except pytz.UnknownTimeZoneError:
- ...
+ pass
return user_context
def get_context(self) -> MutableMapping[str, Any]:
diff --git a/src/sentry/notifications/notifications/user_report.py b/src/sentry/notifications/notifications/user_report.py
index 729e85b15bad6a..cd546b22756b29 100644
--- a/src/sentry/notifications/notifications/user_report.py
+++ b/src/sentry/notifications/notifications/user_report.py
@@ -1,27 +1,30 @@
import logging
-from typing import Any, Mapping, MutableMapping, Optional
+from typing import TYPE_CHECKING, Any, Mapping, MutableMapping, Optional, Union
from django.utils.encoding import force_text
-from sentry.models import Group, GroupSubscription, Project, User
+from sentry.models import Group, GroupSubscription
from sentry.notifications.helpers import get_reason_context
from sentry.notifications.notifications.base import BaseNotification
from sentry.notifications.utils import send_activity_notification
from sentry.types.integrations import ExternalProviders
from sentry.utils.http import absolute_uri
+if TYPE_CHECKING:
+ from sentry.models import Project, Team, User
+
logger = logging.getLogger(__name__)
class UserReportNotification(BaseNotification):
- def __init__(self, project: Project, report: Mapping[str, Any]) -> None:
+ def __init__(self, project: "Project", report: Mapping[str, Any]) -> None:
super().__init__(project)
self.group = Group.objects.get(id=report["issue"]["id"])
self.report = report
def get_participants_with_group_subscription_reason(
self,
- ) -> Mapping[ExternalProviders, Mapping[User, int]]:
+ ) -> Mapping[ExternalProviders, Mapping["User", int]]:
data_by_provider = GroupSubscription.objects.get_participants(group=self.group)
return {
provider: data
@@ -67,8 +70,8 @@ def get_context(self) -> MutableMapping[str, Any]:
"report": self.report,
}
- def get_user_context(
- self, user: User, extra_context: Mapping[str, Any]
+ def get_recipient_context(
+ self, recipient: Union["Team", "User"], extra_context: Mapping[str, Any]
) -> MutableMapping[str, Any]:
return get_reason_context(extra_context)
diff --git a/src/sentry/notifications/notify.py b/src/sentry/notifications/notify.py
index ab5d2b7d5417f6..87005a0d33814a 100644
--- a/src/sentry/notifications/notify.py
+++ b/src/sentry/notifications/notify.py
@@ -1,4 +1,4 @@
-from typing import Any, Callable, Iterable, Mapping, MutableMapping, Optional, Set, Union
+from typing import Any, Callable, Iterable, Mapping, MutableMapping, Optional, Union
from sentry.models import Team, User
from sentry.notifications.notifications.base import BaseNotification
@@ -8,7 +8,7 @@
Notifiable = Callable[
[
BaseNotification,
- Union[Set[User], Set[Team]],
+ Iterable[Union["Team", "User"]],
Mapping[str, Any],
Optional[Mapping[int, Mapping[str, Any]]],
],
@@ -42,7 +42,7 @@ def wrapped(send_notification: Notifiable) -> Notifiable:
def notify(
provider: ExternalProviders,
notification: Any,
- recipients: Union[Set[User], Set[Team]],
+ recipients: Iterable[Union["Team", "User"]],
shared_context: Mapping[str, Any],
extra_context_by_user_id: Optional[Mapping[int, Mapping[str, Any]]] = None,
) -> None:
diff --git a/src/sentry/notifications/utils/participants.py b/src/sentry/notifications/utils/participants.py
index 7252513546abae..a92dcc1adb3909 100644
--- a/src/sentry/notifications/utils/participants.py
+++ b/src/sentry/notifications/utils/participants.py
@@ -3,7 +3,6 @@
from typing import (
TYPE_CHECKING,
Any,
- Dict,
Iterable,
Mapping,
MutableMapping,
@@ -15,6 +14,7 @@
from sentry import features
from sentry.models import (
+ ActorTuple,
Group,
GroupSubscription,
NotificationSetting,
@@ -28,7 +28,7 @@
from sentry.notifications.helpers import (
get_settings_by_provider,
get_values_by_provider_by_type,
- transform_to_notification_settings_by_user,
+ transform_to_notification_settings_by_recipient,
)
from sentry.notifications.notify import notification_providers
from sentry.notifications.types import (
@@ -40,7 +40,6 @@
)
from sentry.types.integrations import ExternalProviders
from sentry.utils import metrics
-from sentry.utils.cache import cache
if TYPE_CHECKING:
from sentry.eventstore.models import Event
@@ -112,7 +111,7 @@ def get_participants_for_release(
projects: Iterable[Project], organization: Organization, user_ids: Set[int]
) -> Mapping[ExternalProviders, Mapping[User, int]]:
# Collect all users with verified emails on a team in the related projects.
- users = list(User.objects.get_team_members_with_verified_email_for_projects(projects))
+ users = set(User.objects.get_team_members_with_verified_email_for_projects(projects))
# Get all the involved users' settings for deploy-emails (including
# users' organization-independent settings.)
@@ -121,7 +120,7 @@ def get_participants_for_release(
recipients=users,
parent=organization,
)
- notification_settings_by_user = transform_to_notification_settings_by_user(
+ notification_settings_by_recipient = transform_to_notification_settings_by_recipient(
notification_settings, users
)
@@ -135,7 +134,7 @@ def get_participants_for_release(
ExternalProviders, MutableMapping[User, int]
] = defaultdict(dict)
for user in users:
- notification_settings_by_scope = notification_settings_by_user.get(user, {})
+ notification_settings_by_scope = notification_settings_by_recipient.get(user, {})
values_by_provider = get_values_by_provider_by_type(
notification_settings_by_scope,
notification_providers(),
@@ -160,112 +159,34 @@ def split_participants_and_context(
return participants, extra_context
-def get_send_to(
- project: Project,
- target_type: ActionTargetType,
- target_identifier: Optional[int] = None,
- event: Optional["Event"] = None,
-) -> Mapping[ExternalProviders, Union[Set[User], Set[Team]]]:
- """
- Returns a mapping of providers to a list of user IDs for the users that
- should receive notifications for the provided project. This result may come
- from cached data.
- """
- if not (project and project.teams.exists()):
- logger.debug("Tried to send notification to invalid project: %r", project)
- return {}
- if target_type == ActionTargetType.ISSUE_OWNERS:
- if not event:
- return get_send_to_all_in_project(project)
- else:
- return get_send_to_owners(event, project)
- elif target_type == ActionTargetType.MEMBER:
- return get_send_to_member(project, target_identifier)
- elif target_type == ActionTargetType.TEAM:
- return get_send_to_team(project, target_identifier)
- return {}
-
-
-def get_send_to_owners(
- event: "Event", project: Project
-) -> Mapping[ExternalProviders, Union[Set[User], Set[Team]]]:
- owners, _ = ProjectOwnership.get_owners(project.id, event.data)
- if owners == ProjectOwnership.Everyone:
- metrics.incr(
- "features.owners.send_to",
- tags={"organization": project.organization_id, "outcome": "everyone"},
- skip_internal=True,
- )
- return get_send_to_all_in_project(project)
+def get_owners(
+ project: Project, event: Optional["Event"] = None
+) -> Iterable[Union["Team", "User"]]:
+ """Given a project and an event, decide which users and teams are the owners."""
+
+ if event:
+ owners, _ = ProjectOwnership.get_owners(project.id, event.data)
+ else:
+ owners = ProjectOwnership.Everyone
if not owners:
- metrics.incr(
- "features.owners.send_to",
- tags={"organization": project.organization_id, "outcome": "empty"},
- skip_internal=True,
- )
- return {}
+ outcome = "empty"
+ recipients = set()
+
+ elif owners == ProjectOwnership.Everyone:
+ outcome = "everyone"
+ recipients = User.objects.filter(id__in=project.member_set.values_list("user", flat=True))
+
+ else:
+ outcome = "match"
+ recipients = ActorTuple.resolve_many(owners)
metrics.incr(
"features.owners.send_to",
- tags={"organization": project.organization_id, "outcome": "match"},
+ tags={"organization": project.organization_id, "outcome": outcome},
skip_internal=True,
)
- user_ids_to_resolve = set()
- team_ids_to_resolve = set()
- for owner in owners:
- if owner.type == User:
- user_ids_to_resolve.add(owner.id)
- else:
- team_ids_to_resolve.add(owner.id)
-
- all_possible_users = set()
-
- if user_ids_to_resolve:
- all_possible_users |= set(User.objects.filter(id__in=user_ids_to_resolve))
-
- team_mapping: Dict[ExternalProviders, Set[Team]] = {ExternalProviders.SLACK: set()}
- team_ids_to_remove = set()
- if team_ids_to_resolve:
- # check for team Slack settings. if present, notify there instead
- for team_id in team_ids_to_resolve:
- team = Team.objects.get(id=team_id)
- team_slack_settings = NotificationSetting.objects.get_settings(
- provider=ExternalProviders.SLACK,
- type=NotificationSettingTypes.ISSUE_ALERTS,
- team=team,
- )
- if team_slack_settings == NotificationSettingOptionValues.ALWAYS:
- team_mapping[ExternalProviders.SLACK].add(team)
- team_ids_to_remove.add(team_id)
- # Get all users in teams that don't have Slack settings.
- team_ids_to_resolve -= team_ids_to_remove
- all_possible_users |= get_users_for_teams_to_resolve(team_ids_to_resolve)
- mapping: MutableMapping[
- ExternalProviders, Union[Set[User], Set[Team]]
- ] = NotificationSetting.objects.filter_to_subscribed_users(project, all_possible_users)
-
- if not mapping:
- return team_mapping
-
- # combine the user and team mappings
- if team_mapping:
- for provider in set.union(set(team_mapping.keys()), set(mapping.keys())):
- if mapping.get(provider) and team_mapping.get(provider):
- mapping[provider].update(list(team_mapping[provider]))
- else:
- if not mapping.get(provider) and team_mapping.get(provider):
- mapping[provider] = team_mapping[provider]
- return mapping
-
-
-def get_users_for_teams_to_resolve(teams_to_resolve: Set[int]) -> Set[User]:
- return set(
- User.objects.filter(
- is_active=True,
- sentry_orgmember_set__organizationmemberteam__team__id__in=teams_to_resolve,
- )
- )
+ return recipients
def disabled_users_from_project(project: Project) -> Mapping[ExternalProviders, Set[User]]:
@@ -277,18 +198,20 @@ def disabled_users_from_project(project: Project) -> Mapping[ExternalProviders,
parent=project,
recipients=users,
)
- notification_settings_by_user = transform_to_notification_settings_by_user(
+ notification_settings_by_recipient = transform_to_notification_settings_by_recipient(
notification_settings, users
)
# Although this can be done with dict comprehension, looping for clarity.
output = defaultdict(set)
for user in users:
- settings = notification_settings_by_user.get(user)
+ settings = notification_settings_by_recipient.get(user)
if settings:
settings_by_provider = get_settings_by_provider(settings)
for provider, settings_value_by_scope in settings_by_provider.items():
project_setting = settings_value_by_scope.get(NotificationScopeType.PROJECT)
- user_setting = settings_value_by_scope.get(NotificationScopeType.USER)
+ user_setting = settings_value_by_scope.get(
+ NotificationScopeType.USER
+ ) or settings_value_by_scope.get(NotificationScopeType.TEAM)
if project_setting == NotificationSettingOptionValues.NEVER or (
not project_setting and user_setting == NotificationSettingOptionValues.NEVER
):
@@ -296,59 +219,53 @@ def disabled_users_from_project(project: Project) -> Mapping[ExternalProviders,
return output
-def get_send_to_team(
- project: Project, target_identifier: Optional[Union[str, int]]
-) -> Mapping[ExternalProviders, Set[User]]:
+def determine_eligible_recipients(
+ project: "Project",
+ target_type: ActionTargetType,
+ target_identifier: Optional[int] = None,
+ event: Optional["Event"] = None,
+) -> Iterable[Union["Team", "User"]]:
"""
- Get a team's notification settings. If not present, get settings for each subscribed user in the team.
- :param project:
- :param target_identifier: Optional. String or int representation of a team_id.
- :returns: Mapping[ExternalProvider, Iterable[User]] A mapping of provider to
- member that a notification should be sent to as a set.
+ Either get the individual recipient from the target type/id or user the the
+ owners as determined by rules for this project and event.
"""
- if target_identifier is None:
- return {}
- try:
- team = Team.objects.get(id=int(target_identifier), projectteam__project=project)
- except Team.DoesNotExist:
- return {}
+ if not (project and project.teams.exists()):
+ logger.debug(f"Tried to send notification to invalid project: {project}")
- team_notification_settings = NotificationSetting.objects.get_for_recipient_by_parent(
- NotificationSettingTypes.ISSUE_ALERTS, parent=project, recipients=[team]
- )
- if team_notification_settings:
- team_mapping = {
- ExternalProviders(notification_setting.provider): {team}
- for notification_setting in team_notification_settings
- }
- return team_mapping
-
- # fallback to notifying each subscribed user if there aren't team notification settings
- member_list = team.member_set.values_list("user_id", flat=True)
- users = User.objects.filter(id__in=member_list)
-
- mapping: Mapping[
- ExternalProviders, Set[User]
- ] = NotificationSetting.objects.filter_to_subscribed_users(project, users)
- return mapping
-
-
-def get_send_to_member(
- project: Project, target_identifier: Optional[Union[int, str]]
-) -> Mapping[ExternalProviders, Set[User]]:
- """
- No checking for disabled users is done. If a user explicitly specifies a
- member as a target to send to, it should overwrite the user's personal mail
- settings.
- :param project:
- :param target_identifier: Optional. String or int representation of a user_id.
- :returns: Mapping[ExternalProvider, Iterable[User]] A mapping of provider to
- member that a notification should be sent to as a set.
- """
+ elif target_type == ActionTargetType.MEMBER:
+ user = get_user_from_identifier(project, target_identifier)
+ if user:
+ return {user}
+
+ elif target_type == ActionTargetType.TEAM:
+ team = get_team_from_identifier(project, target_identifier)
+ if team:
+ return {team}
+
+ else:
+ return get_owners(project, event)
+
+ return set()
+
+
+def get_send_to(
+ project: "Project",
+ target_type: ActionTargetType,
+ target_identifier: Optional[int] = None,
+ event: Optional["Event"] = None,
+) -> Mapping[ExternalProviders, Iterable[Union["Team", "User"]]]:
+ recipients = determine_eligible_recipients(project, target_type, target_identifier, event)
+ return get_recipients_by_provider(project, recipients)
+
+
+def get_user_from_identifier(
+ project: "Project", target_identifier: Optional[Union[str, int]]
+) -> Optional["User"]:
if target_identifier is None:
- return {}
+ return None
+
try:
- user = (
+ return (
User.objects.filter(
id=int(target_identifier),
sentry_orgmember_set__teams__projectteam__project=project,
@@ -357,28 +274,85 @@ def get_send_to_member(
.get()
)
except User.DoesNotExist:
- return {}
- notification_settings = NotificationSetting.objects.get_for_recipient_by_parent(
- NotificationSettingTypes.ISSUE_ALERTS, parent=project, recipients=[user]
- )
- if notification_settings:
- return {
- ExternalProviders(notification_setting.provider): {user}
- for notification_setting in notification_settings
- }
- # Fall back to email if there are no settings.
- return {ExternalProviders.EMAIL: {user}}
-
-
-def get_send_to_all_in_project(project: Project) -> Mapping[ExternalProviders, Set[User]]:
- cache_key = f"mail:send_to:{project.pk}"
- send_to_mapping: Optional[Mapping[ExternalProviders, Set[User]]] = cache.get(cache_key)
- if send_to_mapping is None:
- users_by_provider = NotificationSetting.objects.get_notification_recipients(project)
- send_to_mapping = {
- provider: {user for user in users if user}
- for provider, users in users_by_provider.items()
- }
- cache.set(cache_key, send_to_mapping, 60) # 1 minute cache
-
- return send_to_mapping
+ return None
+
+
+def get_team_from_identifier(
+ project: "Project", target_identifier: Optional[Union[str, int]]
+) -> Optional["Team"]:
+ if target_identifier is None:
+ return None
+
+ try:
+ return Team.objects.get(id=int(target_identifier), projectteam__project=project)
+ except Team.DoesNotExist:
+ return None
+
+
+def partition_recipients(
+ recipients: Iterable[Union["Team", "User"]]
+) -> Tuple[Iterable["Team"], Iterable["User"]]:
+ teams, users = set(), set()
+ for recipient in recipients:
+ if isinstance(recipient, User):
+ users.add(recipient)
+ else:
+ teams.add(recipient)
+ return teams, users
+
+
+def get_users_from_team_fall_back(
+ teams: Iterable["Team"],
+ recipients_by_provider: Mapping[ExternalProviders, Iterable[Union["Team", "User"]]],
+) -> Iterable["User"]:
+ teams_to_fall_back = set(teams)
+ for recipients in recipients_by_provider.values():
+ for recipient in recipients:
+ teams_to_fall_back.remove(recipient)
+
+ users = set()
+ for team in teams_to_fall_back:
+ # Fall back to notifying each subscribed user if there aren't team notification settings
+ member_list = team.member_set.values_list("user_id", flat=True)
+ users |= set(User.objects.filter(id__in=member_list))
+ return users
+
+
+def combine_recipients_by_provider(
+ teams_by_provider: Mapping[ExternalProviders, Iterable[Union["Team", "User"]]],
+ users_by_provider: Mapping[ExternalProviders, Iterable[Union["Team", "User"]]],
+) -> Mapping[ExternalProviders, Iterable[Union["Team", "User"]]]:
+ """TODO(mgaeta): Make this more generic and move it to utils."""
+ recipients_by_provider = defaultdict(set)
+ for provider, teams in teams_by_provider.items():
+ for team in teams:
+ recipients_by_provider[provider].add(team)
+ for provider, users in users_by_provider.items():
+ for user in users:
+ recipients_by_provider[provider].add(user)
+ return recipients_by_provider
+
+
+def get_recipients_by_provider(
+ project: Project, recipients: Iterable[Union["Team", "User"]]
+) -> Mapping[ExternalProviders, Iterable[Union["Team", "User"]]]:
+ """Get the lists of recipients that should receive an Issue Alert by ExternalProvider."""
+ teams, users = partition_recipients(recipients)
+
+ # First evaluate the teams.
+ teams_by_provider = NotificationSetting.objects.filter_to_accepting_recipients(project, teams)
+
+ # Teams cannot receive emails so omit EMAIL settings.
+ teams_by_provider = {
+ provider: teams
+ for provider, teams in teams_by_provider.items()
+ if provider != ExternalProviders.EMAIL
+ }
+
+ # If there are any teams that didn't get added, fall back and add all users.
+ users = set(users).union(get_users_from_team_fall_back(teams, teams_by_provider))
+
+ # Repeat for users.
+ users_by_provider = NotificationSetting.objects.filter_to_accepting_recipients(project, users)
+
+ return combine_recipients_by_provider(teams_by_provider, users_by_provider)
diff --git a/src/sentry/ownership/grammar.py b/src/sentry/ownership/grammar.py
index 66dff81b1b1170..7c87d214dca8c2 100644
--- a/src/sentry/ownership/grammar.py
+++ b/src/sentry/ownership/grammar.py
@@ -2,13 +2,14 @@
import re
from collections import namedtuple
from functools import reduce
-from typing import List, Pattern, Tuple
+from typing import Iterable, List, Mapping, Pattern, Tuple
from django.db.models import Q
from parsimonious.exceptions import ParseError # noqa
from parsimonious.grammar import Grammar, NodeVisitor
from rest_framework.serializers import ValidationError
+from sentry.models import ActorTuple
from sentry.utils.glob import glob_match
from sentry.utils.safe import get_path
@@ -453,7 +454,7 @@ def convert_codeowners_syntax(codeowners, associations, code_mapping):
return result
-def resolve_actors(owners, project_id):
+def resolve_actors(owners: Iterable["Owner"], project_id: int) -> Mapping["Owner", "ActorTuple"]:
"""Convert a list of Owner objects into a dictionary
of {Owner: Actor} pairs. Actors not identified are returned
as None."""
diff --git a/src/sentry/plugins/bases/notify.py b/src/sentry/plugins/bases/notify.py
index 4c61f590a95228..43b742ade4f67c 100644
--- a/src/sentry/plugins/bases/notify.py
+++ b/src/sentry/plugins/bases/notify.py
@@ -139,7 +139,7 @@ def get_sendable_user_objects(self, project):
ExternalProviders.EMAIL
]
- return self.get_notification_recipients(project, "%s:alert" % self.get_conf_key())
+ return self.get_notification_recipients(project, f"{self.get_conf_key()}:alert")
def __is_rate_limited(self, group, event):
return ratelimits.is_limited(project=group.project, key=self.get_conf_key(), limit=10)
diff --git a/src/sentry/templatetags/sentry_api.py b/src/sentry/templatetags/sentry_api.py
index e2be5cb4005623..a615f76175f5e4 100644
--- a/src/sentry/templatetags/sentry_api.py
+++ b/src/sentry/templatetags/sentry_api.py
@@ -43,7 +43,7 @@ def serialize_detailed_org(context, obj):
@register.simple_tag
-def get_user_context(request, escape=False):
+def get_recipient_context(request, escape=False):
if isinstance(request, HttpRequest):
user = getattr(request, "user", None)
result = {"ip_address": request.META["REMOTE_ADDR"]}
diff --git a/tests/sentry/mail/__init__.py b/tests/sentry/mail/__init__.py
index e69de29bb2d1d6..190eb0afb674e5 100644
--- a/tests/sentry/mail/__init__.py
+++ b/tests/sentry/mail/__init__.py
@@ -0,0 +1,25 @@
+from typing import Any, Mapping
+
+from sentry.event_manager import EventManager, get_event_type
+from sentry.mail import send_notification_as_email
+
+
+def make_event_data(filename: str, url: str = "http://example.com") -> Mapping[str, Any]:
+ mgr = EventManager(
+ {
+ "tags": [("level", "error")],
+ "stacktrace": {"frames": [{"lineno": 1, "filename": filename}]},
+ "request": {"url": url},
+ }
+ )
+ mgr.normalize()
+ data = mgr.get_data()
+ event_type = get_event_type(data)
+ data["type"] = event_type.key
+ data["metadata"] = event_type.get_metadata(data)
+ return data
+
+
+def send_notification(*args: Any) -> None:
+ args_list = list(args)[1:]
+ send_notification_as_email(*args_list, {})
diff --git a/tests/sentry/mail/activity/test_release.py b/tests/sentry/mail/activity/test_release.py
index a08cb7c876cff6..672e4fed1c3ba8 100644
--- a/tests/sentry/mail/activity/test_release.py
+++ b/tests/sentry/mail/activity/test_release.py
@@ -105,7 +105,7 @@ def test_simple(self):
(self.commit4, self.user5),
]
- user_context = email.get_user_context(self.user1, {})
+ user_context = email.get_recipient_context(self.user1, {})
# make sure this only includes projects user has access to
assert len(user_context["projects"]) == 1
assert user_context["projects"][0][0] == self.project
@@ -155,7 +155,7 @@ def test_no_committers(self):
assert context["environment"] == "production"
assert context["repos"] == []
- user_context = email.get_user_context(self.user1, {})
+ user_context = email.get_recipient_context(self.user1, {})
# make sure this only includes projects user has access to
assert len(user_context["projects"]) == 1
assert user_context["projects"][0][0] == self.project
@@ -205,7 +205,7 @@ def test_uses_default(self):
assert context["environment"] == "production"
assert context["repos"] == []
- user_context = email.get_user_context(user6, {})
+ user_context = email.get_recipient_context(user6, {})
# make sure this only includes projects user has access to
assert len(user_context["projects"]) == 1
assert user_context["projects"][0][0] == self.project
diff --git a/tests/sentry/mail/test_adapter.py b/tests/sentry/mail/test_adapter.py
index 5c703079a783b1..7af0b8054e4100 100644
--- a/tests/sentry/mail/test_adapter.py
+++ b/tests/sentry/mail/test_adapter.py
@@ -12,7 +12,7 @@
from sentry.api.serializers.models.userreport import UserReportWithGroupSerializer
from sentry.digests.notifications import build_digest, event_to_record
from sentry.event_manager import EventManager, get_event_type
-from sentry.mail import build_subject_prefix, mail_adapter, send_notification_as_email
+from sentry.mail import build_subject_prefix, mail_adapter
from sentry.models import (
Activity,
GroupRelease,
@@ -37,12 +37,6 @@
NotificationSettingTypes,
)
from sentry.notifications.utils.digest import get_digest_subject
-from sentry.notifications.utils.participants import (
- get_send_to,
- get_send_to_member,
- get_send_to_owners,
- get_send_to_team,
-)
from sentry.ownership import grammar
from sentry.ownership.grammar import Matcher, Owner, dump_schema
from sentry.plugins.base import Notification
@@ -53,110 +47,16 @@
from sentry.utils.compat import mock
from sentry.utils.email import MessageBuilder, get_email_addresses
from sentry_plugins.opsgenie.plugin import OpsGeniePlugin
+from tests.sentry.mail import make_event_data, send_notification
-def send_notification(*args):
- args_list = list(args)[1:]
- send_notification_as_email(*args_list, {})
-
-
-class BaseMailAdapterTest:
+class BaseMailAdapterTest(TestCase):
@fixture
def adapter(self):
return mail_adapter
- def make_event_data(self, filename, url="http://example.com"):
- mgr = EventManager(
- {
- "tags": [("level", "error")],
- "stacktrace": {"frames": [{"lineno": 1, "filename": filename}]},
- "request": {"url": url},
- }
- )
- mgr.normalize()
- data = mgr.get_data()
- event_type = get_event_type(data)
- data["type"] = event_type.key
- data["metadata"] = event_type.get_metadata(data)
- return data
-
-
-class MailAdapterGetSendToTest(BaseMailAdapterTest, TestCase):
- def setUp(self):
- self.user2 = self.create_user(email="[email protected]", is_active=True)
- self.create_member(user=self.user2, organization=self.organization, teams=[self.team])
- ProjectOwnership.objects.create(
- project_id=self.project.id,
- schema=dump_schema(
- [
- grammar.Rule(Matcher("path", "*.py"), [Owner("team", self.team.slug)]),
- grammar.Rule(Matcher("path", "*.jx"), [Owner("user", self.user2.email)]),
- grammar.Rule(
- Matcher("path", "*.cbl"),
- [Owner("user", self.user.email), Owner("user", self.user2.email)],
- ),
- ]
- ),
- fallthrough=True,
- )
-
- def test_get_send_to_with_team_owners(self):
- event = self.store_event(data=self.make_event_data("foo.py"), project_id=self.project.id)
- assert {self.user, self.user2} == get_send_to(
- self.project, ActionTargetType.ISSUE_OWNERS, event=event.data
- )[ExternalProviders.EMAIL]
- # Make sure that disabling mail alerts works as expected
- NotificationSetting.objects.update_settings(
- ExternalProviders.EMAIL,
- NotificationSettingTypes.ISSUE_ALERTS,
- NotificationSettingOptionValues.NEVER,
- user=self.user2,
- project=self.project,
- )
- assert {self.user} == get_send_to(
- self.project, ActionTargetType.ISSUE_OWNERS, event=event.data
- )[ExternalProviders.EMAIL]
-
- def test_get_send_to_with_user_owners(self):
- event = self.store_event(data=self.make_event_data("foo.cbl"), project_id=self.project.id)
- assert {self.user, self.user2} == get_send_to(
- self.project, ActionTargetType.ISSUE_OWNERS, event=event.data
- )[ExternalProviders.EMAIL]
-
- # Make sure that disabling mail alerts works as expected
- NotificationSetting.objects.update_settings(
- ExternalProviders.EMAIL,
- NotificationSettingTypes.ISSUE_ALERTS,
- NotificationSettingOptionValues.NEVER,
- user=self.user2,
- project=self.project,
- )
- assert {self.user} == get_send_to(
- self.project, ActionTargetType.ISSUE_OWNERS, event=event.data
- )[ExternalProviders.EMAIL]
-
- def test_get_send_to_with_user_owner(self):
- event = self.store_event(data=self.make_event_data("foo.jx"), project_id=self.project.id)
- assert {self.user2} == get_send_to(
- self.project, ActionTargetType.ISSUE_OWNERS, event=event.data
- )[ExternalProviders.EMAIL]
-
- def test_get_send_to_with_fallthrough(self):
- event = self.store_event(data=self.make_event_data("foo.cpp"), project_id=self.project.id)
- assert {self.user, self.user2} == get_send_to(
- self.project, ActionTargetType.ISSUE_OWNERS, event=event.data
- )[ExternalProviders.EMAIL]
-
- def test_get_send_to_without_fallthrough(self):
- ProjectOwnership.objects.get(project_id=self.project.id).update(fallthrough=False)
- event = self.store_event(data=self.make_event_data("foo.cpp"), project_id=self.project.id)
- assert set() == set(
- get_send_to(self.project, ActionTargetType.ISSUE_OWNERS, event=event.data)
- )
-
-
-class MailAdapterGetSendableUsersTest(BaseMailAdapterTest, TestCase):
+class MailAdapterGetSendableUsersTest(BaseMailAdapterTest):
def test_get_sendable_user_objects(self):
user = self.create_user(email="[email protected]", is_active=True)
user2 = self.create_user(email="[email protected]", is_active=True)
@@ -222,7 +122,7 @@ def test_get_sendable_user_objects(self):
assert user4 not in self.adapter.get_sendable_user_objects(project)
-class MailAdapterBuildSubjectPrefixTest(BaseMailAdapterTest, TestCase):
+class MailAdapterBuildSubjectPrefixTest(BaseMailAdapterTest):
def test_default_prefix(self):
assert build_subject_prefix(self.project) == "[Sentry] "
@@ -234,7 +134,7 @@ def test_project_level_prefix(self):
assert build_subject_prefix(self.project) == prefix
-class MailAdapterNotifyTest(BaseMailAdapterTest, TestCase):
+class MailAdapterNotifyTest(BaseMailAdapterTest):
def test_simple_notification(self):
event = self.store_event(
data={"message": "Hello world", "level": "error"}, project_id=self.project.id
@@ -291,7 +191,7 @@ def test_notify_users_does_email(self, mock_func):
args, kwargs = mock_func.call_args
notification = args[1]
- assert notification.get_user_context(self.user, {})["timezone"] == pytz.timezone(
+ assert notification.get_recipient_context(self.user, {})["timezone"] == pytz.timezone(
"Europe/Vienna"
)
@@ -471,7 +371,7 @@ def test_notify_with_suspect_commits(self):
def test_slack_link(self):
project = self.project
organization = project.organization
- event = self.store_event(data=self.make_event_data("foo.jx"), project_id=project.id)
+ event = self.store_event(data=make_event_data("foo.jx"), project_id=project.id)
with self.tasks():
notification = Notification(event=event)
@@ -488,7 +388,7 @@ def test_slack_link(self):
def test_slack_link_with_integration(self):
project = self.project
organization = project.organization
- event = self.store_event(data=self.make_event_data("foo.jx"), project_id=project.id)
+ event = self.store_event(data=make_event_data("foo.jx"), project_id=project.id)
integration = Integration.objects.create(provider="msteams")
integration.add_organization(organization)
@@ -508,7 +408,7 @@ def test_slack_link_with_integration(self):
def test_slack_link_with_plugin(self):
project = self.project
organization = project.organization
- event = self.store_event(data=self.make_event_data("foo.jx"), project_id=project.id)
+ event = self.store_event(data=make_event_data("foo.jx"), project_id=project.id)
OpsGeniePlugin().enable(project)
@@ -570,17 +470,13 @@ def test_notify_users_with_owners(self):
fallthrough=True,
)
- event_all_users = self.store_event(
- data=self.make_event_data("foo.cbl"), project_id=project.id
- )
+ event_all_users = self.store_event(data=make_event_data("foo.cbl"), project_id=project.id)
self.assert_notify(event_all_users, [user.email, user2.email])
- event_team = self.store_event(data=self.make_event_data("foo.py"), project_id=project.id)
+ event_team = self.store_event(data=make_event_data("foo.py"), project_id=project.id)
self.assert_notify(event_team, [user.email, user2.email])
- event_single_user = self.store_event(
- data=self.make_event_data("foo.jx"), project_id=project.id
- )
+ event_single_user = self.store_event(data=make_event_data("foo.jx"), project_id=project.id)
self.assert_notify(event_single_user, [user2.email])
# Make sure that disabling mail alerts works as expected
@@ -591,9 +487,7 @@ def test_notify_users_with_owners(self):
user=user2,
project=project,
)
- event_all_users = self.store_event(
- data=self.make_event_data("foo.cbl"), project_id=project.id
- )
+ event_all_users = self.store_event(data=make_event_data("foo.cbl"), project_id=project.id)
self.assert_notify(event_all_users, [user.email])
def test_notify_team_members(self):
@@ -603,17 +497,17 @@ def test_notify_team_members(self):
user2 = self.create_user(email="[email protected]", is_active=True)
team = self.create_team(organization=self.organization, members=[user, user2])
project = self.create_project(teams=[team])
- event = self.store_event(data=self.make_event_data("foo.py"), project_id=project.id)
+ event = self.store_event(data=make_event_data("foo.py"), project_id=project.id)
self.assert_notify(event, [user.email, user2.email], ActionTargetType.TEAM, str(team.id))
def test_notify_user(self):
user = self.create_user(email="[email protected]", is_active=True)
self.create_team(organization=self.organization, members=[user])
- event = self.store_event(data=self.make_event_data("foo.py"), project_id=self.project.id)
+ event = self.store_event(data=make_event_data("foo.py"), project_id=self.project.id)
self.assert_notify(event, [user.email], ActionTargetType.MEMBER, str(user.id))
-class MailAdapterGetDigestSubjectTest(BaseMailAdapterTest, TestCase):
+class MailAdapterGetDigestSubjectTest(BaseMailAdapterTest):
def test_get_digest_subject(self):
assert (
get_digest_subject(
@@ -625,7 +519,7 @@ def test_get_digest_subject(self):
)
-class MailAdapterNotifyDigestTest(BaseMailAdapterTest, TestCase):
+class MailAdapterNotifyDigestTest(BaseMailAdapterTest):
@mock.patch.object(mail_adapter, "notify", side_effect=mail_adapter.notify, autospec=True)
def test_notify_digest(self, notify):
project = self.project
@@ -730,7 +624,7 @@ def test_notify_digest_user_does_not_exist(self, notify):
assert len(mail.outbox) == 0
-class MailAdapterRuleNotifyTest(BaseMailAdapterTest, TestCase):
+class MailAdapterRuleNotifyTest(BaseMailAdapterTest):
def test_normal(self):
event = self.store_event(data={}, project_id=self.project.id)
rule = Rule.objects.create(project=self.project, label="my rule")
@@ -751,7 +645,7 @@ def test_digest(self, digests):
assert digests.add.call_count == 1
-class MailAdapterShouldNotifyTest(BaseMailAdapterTest, TestCase):
+class MailAdapterShouldNotifyTest(BaseMailAdapterTest):
def test_should_notify(self):
assert self.adapter.should_notify(ActionTargetType.ISSUE_OWNERS, self.group)
assert self.adapter.should_notify(ActionTargetType.MEMBER, self.group)
@@ -777,211 +671,7 @@ def test_should_always_notify_target_member(self):
assert self.adapter.should_notify(ActionTargetType.MEMBER, self.group)
-class MailAdapterGetSendToOwnersTest(BaseMailAdapterTest, TestCase):
- def setUp(self):
- self.user = self.create_user(email="[email protected]", is_active=True)
- self.user2 = self.create_user(email="[email protected]", is_active=True)
- self.user3 = self.create_user(email="[email protected]", is_active=True)
-
- self.organization = self.create_organization(owner=self.user)
- self.team = self.create_team(
- organization=self.organization, members=[self.user2, self.user3]
- )
- self.team2 = self.create_team(organization=self.organization, members=[self.user])
- self.project = self.create_project(name="Test", teams=[self.team, self.team2])
- self.group = self.create_group(
- first_seen=timezone.now(),
- last_seen=timezone.now(),
- project=self.project,
- message="hello world",
- logger="root",
- )
- ProjectOwnership.objects.create(
- project_id=self.project.id,
- schema=dump_schema(
- [
- grammar.Rule(Matcher("path", "*.py"), [Owner("team", self.team.slug)]),
- grammar.Rule(Matcher("path", "*.jx"), [Owner("user", self.user2.email)]),
- grammar.Rule(
- Matcher("path", "*.cbl"),
- [
- Owner("user", self.user.email),
- Owner("user", self.user2.email),
- Owner("user", self.user3.email),
- ],
- ),
- ]
- ),
- fallthrough=True,
- )
-
- def test_all_users(self):
- event_all_users = self.store_event(
- data=self.make_event_data("foo.cbl"), project_id=self.project.id
- )
- assert get_send_to_owners(event_all_users, self.project)[ExternalProviders.EMAIL] == {
- self.user,
- self.user2,
- self.user3,
- }
-
- def test_team(self):
- event_team = self.store_event(
- data=self.make_event_data("foo.py"), project_id=self.project.id
- )
- assert get_send_to_owners(event_team, self.project)[ExternalProviders.EMAIL] == {
- self.user2,
- self.user3,
- }
-
- def test_single_user(self):
- event_single_user = self.store_event(
- data=self.make_event_data("foo.jx"), project_id=self.project.id
- )
- assert get_send_to_owners(event_single_user, self.project)[ExternalProviders.EMAIL] == {
- self.user2
- }
-
- def test_disable_alerts_user_scope(self):
- event_all_users = self.store_event(
- data=self.make_event_data("foo.cbl"), project_id=self.project.id
- )
-
- NotificationSetting.objects.update_settings(
- ExternalProviders.EMAIL,
- NotificationSettingTypes.ISSUE_ALERTS,
- NotificationSettingOptionValues.NEVER,
- user=self.user2,
- )
-
- assert (
- self.user2
- not in get_send_to_owners(event_all_users, self.project)[ExternalProviders.EMAIL]
- )
-
- def test_disable_alerts_project_scope(self):
- event_all_users = self.store_event(
- data=self.make_event_data("foo.cbl"), project_id=self.project.id
- )
-
- NotificationSetting.objects.update_settings(
- ExternalProviders.EMAIL,
- NotificationSettingTypes.ISSUE_ALERTS,
- NotificationSettingOptionValues.NEVER,
- user=self.user2,
- project=self.project,
- )
-
- assert (
- self.user2
- not in get_send_to_owners(event_all_users, self.project)[ExternalProviders.EMAIL]
- )
-
- def test_disable_alerts_multiple_scopes(self):
- event_all_users = self.store_event(
- data=self.make_event_data("foo.cbl"), project_id=self.project.id
- )
-
- # Project-independent setting.
- NotificationSetting.objects.update_settings(
- ExternalProviders.EMAIL,
- NotificationSettingTypes.ISSUE_ALERTS,
- NotificationSettingOptionValues.ALWAYS,
- user=self.user2,
- )
-
- # Per-project setting.
- NotificationSetting.objects.update_settings(
- ExternalProviders.EMAIL,
- NotificationSettingTypes.ISSUE_ALERTS,
- NotificationSettingOptionValues.NEVER,
- user=self.user2,
- project=self.project,
- )
-
- assert (
- self.user2
- not in get_send_to_owners(event_all_users, self.project)[ExternalProviders.EMAIL]
- )
-
-
-class MailAdapterGetSendToTeamTest(BaseMailAdapterTest, TestCase):
- def test_send_to_team(self):
- assert {self.user} == get_send_to_team(self.project, str(self.team.id))[
- ExternalProviders.EMAIL
- ]
-
- def test_send_disabled(self):
- NotificationSetting.objects.update_settings(
- ExternalProviders.EMAIL,
- NotificationSettingTypes.ISSUE_ALERTS,
- NotificationSettingOptionValues.NEVER,
- user=self.user,
- project=self.project,
- )
- assert {} == get_send_to_team(self.project, str(self.team.id))
-
- def test_invalid_team(self):
- assert {} == get_send_to_team(self.project, "900001")
-
- def test_other_project_team(self):
- user_2 = self.create_user()
- team_2 = self.create_team(self.organization, members=[user_2])
- project_2 = self.create_project(organization=self.organization, teams=[team_2])
- assert {user_2} == get_send_to_team(project_2, str(team_2.id))[ExternalProviders.EMAIL]
- assert {} == get_send_to_team(self.project, str(team_2.id))
-
- def test_other_org_team(self):
- org_2 = self.create_organization()
- user_2 = self.create_user()
- team_2 = self.create_team(org_2, members=[user_2])
- project_2 = self.create_project(organization=org_2, teams=[team_2])
- assert {user_2} == get_send_to_team(project_2, str(team_2.id))[ExternalProviders.EMAIL]
- assert {} == get_send_to_team(self.project, str(team_2.id))
-
-
-class MailAdapterGetSendToMemberTest(BaseMailAdapterTest, TestCase):
- def test_send_to_user(self):
- assert {self.user} == get_send_to_member(self.project, str(self.user.id))[
- ExternalProviders.EMAIL
- ]
-
- def test_send_disabled_still_sends(self):
- NotificationSetting.objects.update_settings(
- ExternalProviders.EMAIL,
- NotificationSettingTypes.ISSUE_ALERTS,
- NotificationSettingOptionValues.NEVER,
- user=self.user,
- project=self.project,
- )
- assert {self.user} == get_send_to_member(self.project, str(self.user.id))[
- ExternalProviders.EMAIL
- ]
-
- def test_invalid_user(self):
- assert {} == get_send_to_member(self.project, "900001")
-
- def test_other_org_user(self):
- org_2 = self.create_organization()
- user_2 = self.create_user()
- team_2 = self.create_team(org_2, members=[user_2])
- team_3 = self.create_team(org_2, members=[user_2])
- project_2 = self.create_project(organization=org_2, teams=[team_2, team_3])
- assert {user_2} == get_send_to_member(project_2, str(user_2.id))[ExternalProviders.EMAIL]
- assert {} == get_send_to_member(self.project, str(user_2.id))
-
- def test_no_project_access(self):
- org_2 = self.create_organization()
- user_2 = self.create_user()
- team_2 = self.create_team(org_2, members=[user_2])
- user_3 = self.create_user()
- self.create_team(org_2, members=[user_3])
- project_2 = self.create_project(organization=org_2, teams=[team_2])
- assert {user_2} == get_send_to_member(project_2, str(user_2.id))[ExternalProviders.EMAIL]
- assert {} == get_send_to_member(self.project, str(user_3.id))
-
-
-class MailAdapterNotifyAboutActivityTest(BaseMailAdapterTest, TestCase):
+class MailAdapterNotifyAboutActivityTest(BaseMailAdapterTest):
def test_assignment(self):
NotificationSetting.objects.update_settings(
ExternalProviders.EMAIL,
@@ -1063,7 +753,7 @@ def test_note(self):
assert msg.to == [self.user.email]
-class MailAdapterHandleSignalTest(BaseMailAdapterTest, TestCase):
+class MailAdapterHandleSignalTest(BaseMailAdapterTest):
def create_report(self):
user_foo = self.create_user("[email protected]")
self.project.teams.first().organization.member_set.create(user=user_foo)
diff --git a/tests/sentry/models/test_project.py b/tests/sentry/models/test_project.py
index 68edcbf6e605b0..4b8c99881e32f3 100644
--- a/tests/sentry/models/test_project.py
+++ b/tests/sentry/models/test_project.py
@@ -299,7 +299,7 @@ def test_copy_with_previous_settings(self):
class FilterToSubscribedUsersTest(TestCase):
def run_test(self, users: Iterable[User], expected_users: Iterable[User]):
assert (
- NotificationSetting.objects.filter_to_subscribed_users(self.project, users)[
+ NotificationSetting.objects.filter_to_accepting_recipients(self.project, users)[
ExternalProviders.EMAIL
]
== expected_users
diff --git a/tests/sentry/notifications/utils/test_get_most_specific.py b/tests/sentry/notifications/utils/test_get_most_specific.py
index 3235a6dd6bdc21..398b7aa40036ee 100644
--- a/tests/sentry/notifications/utils/test_get_most_specific.py
+++ b/tests/sentry/notifications/utils/test_get_most_specific.py
@@ -19,7 +19,7 @@ def setUp(self) -> None:
def test_get_most_specific_notification_setting_value_empty_workflow(self):
value = get_most_specific_notification_setting_value(
notification_settings_by_scope={},
- user=self.user,
+ recipient=self.user,
parent_id=1,
type=NotificationSettingTypes.WORKFLOW,
)
@@ -28,7 +28,7 @@ def test_get_most_specific_notification_setting_value_empty_workflow(self):
def test_get_most_specific_notification_setting_value_empty_alerts(self):
value = get_most_specific_notification_setting_value(
notification_settings_by_scope={},
- user=self.user,
+ recipient=self.user,
parent_id=1,
type=NotificationSettingTypes.ISSUE_ALERTS,
)
@@ -45,7 +45,7 @@ def test_get_most_specific_notification_setting_value_user(self):
}
value = get_most_specific_notification_setting_value(
notification_settings_by_scope,
- user=self.user,
+ recipient=self.user,
parent_id=1,
type=NotificationSettingTypes.ISSUE_ALERTS,
)
@@ -70,7 +70,7 @@ def test_get_most_specific_notification_setting_value(self):
}
value = get_most_specific_notification_setting_value(
notification_settings_by_scope,
- user=self.user,
+ recipient=self.user,
parent_id=project_id,
type=NotificationSettingTypes.ISSUE_ALERTS,
)
diff --git a/tests/sentry/notifications/utils/test_participants.py b/tests/sentry/notifications/utils/test_participants.py
new file mode 100644
index 00000000000000..fe29aa517ae057
--- /dev/null
+++ b/tests/sentry/notifications/utils/test_participants.py
@@ -0,0 +1,245 @@
+from typing import Iterable, Mapping, Optional, Union
+
+from sentry.eventstore.models import Event
+from sentry.models import NotificationSetting, Project, ProjectOwnership, Team, User
+from sentry.notifications.types import (
+ ActionTargetType,
+ NotificationSettingOptionValues,
+ NotificationSettingTypes,
+)
+from sentry.notifications.utils.participants import get_send_to
+from sentry.ownership import grammar
+from sentry.ownership.grammar import Matcher, Owner, dump_schema
+from sentry.testutils import TestCase
+from sentry.types.integrations import ExternalProviders
+from tests.sentry.mail import make_event_data
+
+
+class GetSendToMemberTest(TestCase):
+ def get_send_to_member(
+ self, project: Optional[Project] = None, user_id: Optional[int] = None
+ ) -> Mapping[ExternalProviders, Iterable[Union["Team", "User"]]]:
+ return get_send_to(
+ project=project or self.project,
+ target_type=ActionTargetType.MEMBER,
+ target_identifier=user_id or self.user.id,
+ )
+
+ def test_invalid_user(self):
+ assert self.get_send_to_member(self.project, 900001) == {}
+
+ def test_send_to_user(self):
+ assert self.get_send_to_member() == {ExternalProviders.EMAIL: {self.user}}
+
+ NotificationSetting.objects.update_settings(
+ ExternalProviders.EMAIL,
+ NotificationSettingTypes.ISSUE_ALERTS,
+ NotificationSettingOptionValues.NEVER,
+ user=self.user,
+ project=self.project,
+ )
+
+ assert self.get_send_to_member() == {}
+
+ def test_other_org_user(self):
+ org_2 = self.create_organization()
+ user_2 = self.create_user()
+ team_2 = self.create_team(org_2, members=[user_2])
+ team_3 = self.create_team(org_2, members=[user_2])
+ project_2 = self.create_project(organization=org_2, teams=[team_2, team_3])
+
+ assert self.get_send_to_member(project_2, user_2.id) == {ExternalProviders.EMAIL: {user_2}}
+ assert self.get_send_to_member(self.project, user_2.id) == {}
+
+ def test_no_project_access(self):
+ org_2 = self.create_organization()
+ user_2 = self.create_user()
+ team_2 = self.create_team(org_2, members=[user_2])
+ user_3 = self.create_user()
+ self.create_team(org_2, members=[user_3])
+ project_2 = self.create_project(organization=org_2, teams=[team_2])
+
+ assert self.get_send_to_member(project_2, user_2.id) == {ExternalProviders.EMAIL: {user_2}}
+ assert self.get_send_to_member(self.project, user_3.id) == {}
+
+
+class GetSendToTeamTest(TestCase):
+ def get_send_to_team(
+ self, project: Optional[Project] = None, team_id: Optional[int] = None
+ ) -> Mapping[ExternalProviders, Iterable[Union["Team", "User"]]]:
+ return get_send_to(
+ project=project or self.project,
+ target_type=ActionTargetType.TEAM,
+ target_identifier=team_id or self.team.id,
+ )
+
+ def test_invalid_team(self):
+ assert self.get_send_to_team(self.project, 900001) == {}
+
+ def test_send_to_team(self):
+ assert self.get_send_to_team() == {ExternalProviders.EMAIL: {self.user}}
+
+ NotificationSetting.objects.update_settings(
+ ExternalProviders.EMAIL,
+ NotificationSettingTypes.ISSUE_ALERTS,
+ NotificationSettingOptionValues.NEVER,
+ user=self.user,
+ project=self.project,
+ )
+
+ assert self.get_send_to_team() == {}
+
+ def test_send_to_team_direct(self):
+ NotificationSetting.objects.update_settings(
+ ExternalProviders.SLACK,
+ NotificationSettingTypes.ISSUE_ALERTS,
+ NotificationSettingOptionValues.ALWAYS,
+ team=self.team,
+ )
+
+ assert self.get_send_to_team() == {ExternalProviders.SLACK: {self.team}}
+
+ NotificationSetting.objects.update_settings(
+ ExternalProviders.SLACK,
+ NotificationSettingTypes.ISSUE_ALERTS,
+ NotificationSettingOptionValues.NEVER,
+ team=self.team,
+ )
+ assert self.get_send_to_team() == {ExternalProviders.EMAIL: {self.user}}
+
+ def test_other_project_team(self):
+ user_2 = self.create_user()
+ team_2 = self.create_team(self.organization, members=[user_2])
+ project_2 = self.create_project(organization=self.organization, teams=[team_2])
+
+ assert self.get_send_to_team(project_2, team_2.id) == {ExternalProviders.EMAIL: {user_2}}
+ assert self.get_send_to_team(self.project, team_2.id) == {}
+
+ def test_other_org_team(self):
+ org_2 = self.create_organization()
+ user_2 = self.create_user()
+ team_2 = self.create_team(org_2, members=[user_2])
+ project_2 = self.create_project(organization=org_2, teams=[team_2])
+
+ assert self.get_send_to_team(project_2, team_2.id) == {ExternalProviders.EMAIL: {user_2}}
+ assert self.get_send_to_team(self.project, team_2.id) == {}
+
+
+class GetSendToOwnersTest(TestCase):
+ def get_send_to_owners(
+ self, event: Event
+ ) -> Mapping[ExternalProviders, Iterable[Union["Team", "User"]]]:
+ return get_send_to(
+ self.project,
+ target_type=ActionTargetType.ISSUE_OWNERS,
+ target_identifier=None,
+ event=event,
+ )
+
+ def store_event(self, filename: str) -> Event:
+ return super().store_event(data=make_event_data(filename), project_id=self.project.id)
+
+ def setUp(self):
+ self.user2 = self.create_user(email="[email protected]", is_active=True)
+ self.user3 = self.create_user(email="[email protected]", is_active=True)
+
+ self.team2 = self.create_team(
+ organization=self.organization, members=[self.user, self.user2]
+ )
+ self.project.add_team(self.team2)
+
+ ProjectOwnership.objects.create(
+ project_id=self.project.id,
+ schema=dump_schema(
+ [
+ grammar.Rule(Matcher("path", "*.py"), [Owner("team", self.team2.slug)]),
+ grammar.Rule(Matcher("path", "*.jsx"), [Owner("user", self.user.email)]),
+ grammar.Rule(Matcher("path", "*.jx"), [Owner("user", self.user3.email)]),
+ grammar.Rule(
+ Matcher("path", "*.cbl"),
+ [
+ Owner("user", user.email)
+ for user in User.objects.filter(
+ id__in=self.project.member_set.values_list("user", flat=True)
+ )
+ ],
+ ),
+ grammar.Rule(Matcher("path", "*.lol"), []),
+ ]
+ ),
+ fallthrough=True,
+ )
+
+ def test_empty(self):
+ event = self.store_event("empty.lol")
+
+ assert self.get_send_to_owners(event) == {}
+
+ def test_single_user(self):
+ event = self.store_event("user.jsx")
+
+ assert self.get_send_to_owners(event) == {ExternalProviders.EMAIL: {self.user}}
+
+ # Make sure that disabling mail alerts works as expected
+ NotificationSetting.objects.update_settings(
+ ExternalProviders.EMAIL,
+ NotificationSettingTypes.ISSUE_ALERTS,
+ NotificationSettingOptionValues.NEVER,
+ user=self.user,
+ project=self.project,
+ )
+
+ assert self.get_send_to_owners(event) == {}
+
+ def test_single_user_no_teams(self):
+ event = self.store_event("user.jx")
+
+ assert self.get_send_to_owners(event) == {}
+
+ def test_team_owners(self):
+ event = self.store_event("team.py")
+
+ assert self.get_send_to_owners(event) == {ExternalProviders.EMAIL: {self.user, self.user2}}
+
+ # Make sure that disabling mail alerts works as expected
+ NotificationSetting.objects.update_settings(
+ ExternalProviders.EMAIL,
+ NotificationSettingTypes.ISSUE_ALERTS,
+ NotificationSettingOptionValues.NEVER,
+ user=self.user2,
+ project=self.project,
+ )
+ assert self.get_send_to_owners(event) == {ExternalProviders.EMAIL: {self.user}}
+
+ def test_disable_alerts_multiple_scopes(self):
+ event = self.store_event("everyone.cbl")
+
+ # Project-independent setting.
+ NotificationSetting.objects.update_settings(
+ ExternalProviders.EMAIL,
+ NotificationSettingTypes.ISSUE_ALERTS,
+ NotificationSettingOptionValues.ALWAYS,
+ user=self.user2,
+ )
+
+ # Per-project setting.
+ NotificationSetting.objects.update_settings(
+ ExternalProviders.EMAIL,
+ NotificationSettingTypes.ISSUE_ALERTS,
+ NotificationSettingOptionValues.NEVER,
+ user=self.user2,
+ project=self.project,
+ )
+
+ assert self.get_send_to_owners(event) == {ExternalProviders.EMAIL: {self.user}}
+
+ def test_fallthrough(self):
+ event = self.store_event("no_rule.cpp")
+
+ assert self.get_send_to_owners(event) == {ExternalProviders.EMAIL: {self.user, self.user2}}
+
+ def test_without_fallthrough(self):
+ ProjectOwnership.objects.get(project_id=self.project.id).update(fallthrough=False)
+ event = self.store_event("no_rule.cpp")
+
+ assert self.get_send_to_owners(event) == {}
diff --git a/tests/sentry/notifications/utils/test_should_be_notified.py b/tests/sentry/notifications/utils/test_should_be_notified.py
index 42d0ade7ee5ecc..3e732099d459f1 100644
--- a/tests/sentry/notifications/utils/test_should_be_notified.py
+++ b/tests/sentry/notifications/utils/test_should_be_notified.py
@@ -1,7 +1,7 @@
from unittest import TestCase
from sentry.models import User
-from sentry.notifications.helpers import where_should_user_be_notified
+from sentry.notifications.helpers import where_should_recipient_be_notified
from sentry.notifications.types import NotificationScopeType, NotificationSettingOptionValues
from sentry.types.integrations import ExternalProviders
@@ -18,7 +18,7 @@ def test_where_should_user_be_notified(self):
}
}
}
- assert where_should_user_be_notified(notification_settings, self.user) == [
+ assert where_should_recipient_be_notified(notification_settings, self.user) == [
ExternalProviders.EMAIL
]
@@ -31,7 +31,7 @@ def test_where_should_user_be_notified_two_providers(self):
}
}
}
- assert where_should_user_be_notified(notification_settings, self.user) == [
+ assert where_should_recipient_be_notified(notification_settings, self.user) == [
ExternalProviders.EMAIL,
ExternalProviders.SLACK,
]
diff --git a/tests/sentry/notifications/utils/test_transforms.py b/tests/sentry/notifications/utils/test_transforms.py
index f4e25149e701f8..ae16b8e270f626 100644
--- a/tests/sentry/notifications/utils/test_transforms.py
+++ b/tests/sentry/notifications/utils/test_transforms.py
@@ -2,8 +2,8 @@
from sentry.models import Group, NotificationSetting, Project, User
from sentry.notifications.helpers import (
+ transform_to_notification_settings_by_recipient,
transform_to_notification_settings_by_scope,
- transform_to_notification_settings_by_user,
)
from sentry.notifications.types import (
NotificationScopeType,
@@ -39,17 +39,22 @@ def setUp(self) -> None:
class TransformToNotificationSettingsByUserTestCase(TransformTestCase):
- def test_transform_to_notification_settings_by_user_empty(self):
- assert transform_to_notification_settings_by_user(notification_settings=[], users=[]) == {}
+ def test_transform_to_notification_settings_by_recipient_empty(self):
+ assert (
+ transform_to_notification_settings_by_recipient(notification_settings=[], recipients=[])
+ == {}
+ )
assert (
- transform_to_notification_settings_by_user(notification_settings=[], users=[self.user])
+ transform_to_notification_settings_by_recipient(
+ notification_settings=[], recipients=[self.user]
+ )
== {}
)
- def test_transform_to_notification_settings_by_user(self):
- assert transform_to_notification_settings_by_user(
- notification_settings=self.notification_settings, users=[self.user]
+ def test_transform_to_notification_settings_by_recipient(self):
+ assert transform_to_notification_settings_by_recipient(
+ notification_settings=self.notification_settings, recipients=[self.user]
) == {
self.user: {
NotificationScopeType.USER: {
|
73c87dadbf4879b6beaac139258fb1f1005356d4
|
2024-01-17 03:24:49
|
edwardgou-sentry
|
feat(webvitals): Updates to markline color and alert message (#63217)
| false
|
Updates to markline color and alert message (#63217)
|
feat
|
diff --git a/static/app/views/performance/browser/webVitals/components/performanceScoreBreakdownChart.tsx b/static/app/views/performance/browser/webVitals/components/performanceScoreBreakdownChart.tsx
index af0a64967cef8c..ec54f528e12d73 100644
--- a/static/app/views/performance/browser/webVitals/components/performanceScoreBreakdownChart.tsx
+++ b/static/app/views/performance/browser/webVitals/components/performanceScoreBreakdownChart.tsx
@@ -166,7 +166,18 @@ export function PerformanceScoreBreakdownChart({transaction}: Props) {
seriesName: t('Mark Line'),
data: [],
markLine: MarkLine({
- lineStyle: {color: theme.blue400, type: 'solid', width: 1},
+ lineStyle: {
+ color: theme.gray300,
+ type: 'dashed',
+ width: 1,
+ opacity: 0.8,
+ },
+ label: {
+ show: true,
+ formatter: () => t('Score Migration'),
+ position: 'insideMiddleBottom',
+ color: theme.gray300,
+ },
data: [{xAxis: SCORE_MIGRATION_TIMESTAMP}],
}),
});
diff --git a/static/app/views/performance/browser/webVitals/pageOverview.spec.tsx b/static/app/views/performance/browser/webVitals/pageOverview.spec.tsx
index 51ba68f38b0b54..da05d6d34e53ca 100644
--- a/static/app/views/performance/browser/webVitals/pageOverview.spec.tsx
+++ b/static/app/views/performance/browser/webVitals/pageOverview.spec.tsx
@@ -76,7 +76,7 @@ describe('PageOverview', function () {
});
render(<PageOverview />);
await screen.findByText(
- 'We changed how Performance Scores are calculated for your projects.'
+ 'We made improvements to how Performance Scores are calculated for your projects. Starting on 17 December 2023, scores are updated to more accurately reflect user experiences. Read more these improvements here.'
);
});
});
diff --git a/static/app/views/performance/browser/webVitals/pageOverview.tsx b/static/app/views/performance/browser/webVitals/pageOverview.tsx
index 9b9690e13f295f..2f3ff8405d5f88 100644
--- a/static/app/views/performance/browser/webVitals/pageOverview.tsx
+++ b/static/app/views/performance/browser/webVitals/pageOverview.tsx
@@ -2,6 +2,7 @@ import {useMemo, useState} from 'react';
import {browserHistory} from 'react-router';
import styled from '@emotion/styled';
import omit from 'lodash/omit';
+import moment from 'moment';
import ProjectAvatar from 'sentry/components/avatar/projectAvatar';
import {Breadcrumbs} from 'sentry/components/breadcrumbs';
@@ -16,7 +17,7 @@ import PageFilterBar from 'sentry/components/organizations/pageFilterBar';
import {ProjectPageFilter} from 'sentry/components/organizations/projectPageFilter';
import {TabList, Tabs} from 'sentry/components/tabs';
import {IconChevron, IconClose} from 'sentry/icons';
-import {t} from 'sentry/locale';
+import {t, tct} from 'sentry/locale';
import ConfigStore from 'sentry/stores/configStore';
import {space} from 'sentry/styles/space';
import {defined} from 'sentry/utils';
@@ -28,7 +29,10 @@ import useProjects from 'sentry/utils/useProjects';
import useRouter from 'sentry/utils/useRouter';
import {normalizeUrl} from 'sentry/utils/withDomainRequired';
import {PageOverviewSidebar} from 'sentry/views/performance/browser/webVitals/components/pageOverviewSidebar';
-import {PerformanceScoreBreakdownChart} from 'sentry/views/performance/browser/webVitals/components/performanceScoreBreakdownChart';
+import {
+ PerformanceScoreBreakdownChart,
+ SCORE_MIGRATION_TIMESTAMP,
+} from 'sentry/views/performance/browser/webVitals/components/performanceScoreBreakdownChart';
import WebVitalMeters from 'sentry/views/performance/browser/webVitals/components/webVitalMeters';
import {PageOverviewWebVitalsDetailPanel} from 'sentry/views/performance/browser/webVitals/pageOverviewWebVitalsDetailPanel';
import {PageSamplePerformanceTable} from 'sentry/views/performance/browser/webVitals/pageSamplePerformanceTable';
@@ -150,6 +154,10 @@ export default function PageOverview() {
? calculatePerformanceScoreFromStoredTableDataRow(projectScores?.data?.[0])
: calculatePerformanceScoreFromTableDataRow(pageData?.data?.[0]);
+ const scoreMigrationTimestampString = moment(SCORE_MIGRATION_TIMESTAMP).format(
+ 'DD MMMM YYYY'
+ );
+
return (
<ModulePageProviders title={[t('Performance'), t('Web Vitals')].join(' — ')}>
<Tabs
@@ -232,9 +240,13 @@ export default function PageOverview() {
{shouldUseStoredScores && !isDismissed && (
<StyledAlert type="info" showIcon>
<AlertContent>
- {t(
- 'We changed how Performance Scores are calculated for your projects.'
- )}
+ {
+ // TODO: Add link to blog when ready
+ tct(
+ `We made improvements to how Performance Scores are calculated for your projects. Starting on [scoreMigrationTimestampString], scores are updated to more accurately reflect user experiences. Read more these improvements here.`,
+ {scoreMigrationTimestampString}
+ )
+ }
<DismissButton
priority="link"
icon={<IconClose />}
diff --git a/static/app/views/performance/browser/webVitals/webVitalsLandingPage.spec.tsx b/static/app/views/performance/browser/webVitals/webVitalsLandingPage.spec.tsx
index e5bffa4dca44e3..1449ef450a56e8 100644
--- a/static/app/views/performance/browser/webVitals/webVitalsLandingPage.spec.tsx
+++ b/static/app/views/performance/browser/webVitals/webVitalsLandingPage.spec.tsx
@@ -72,7 +72,7 @@ describe('WebVitalsLandingPage', function () {
});
render(<WebVitalsLandingPage />);
await screen.findByText(
- 'We changed how Performance Scores are calculated for your projects.'
+ 'We made improvements to how Performance Scores are calculated for your projects. Starting on 17 December 2023, scores are updated to more accurately reflect user experiences. Read more these improvements here.'
);
});
});
diff --git a/static/app/views/performance/browser/webVitals/webVitalsLandingPage.tsx b/static/app/views/performance/browser/webVitals/webVitalsLandingPage.tsx
index 7ddac5cc0b103e..74a23b67732a20 100644
--- a/static/app/views/performance/browser/webVitals/webVitalsLandingPage.tsx
+++ b/static/app/views/performance/browser/webVitals/webVitalsLandingPage.tsx
@@ -1,6 +1,7 @@
import {Fragment, useState} from 'react';
import styled from '@emotion/styled';
import omit from 'lodash/omit';
+import moment from 'moment';
import Alert from 'sentry/components/alert';
import {Breadcrumbs} from 'sentry/components/breadcrumbs';
@@ -12,7 +13,7 @@ import {EnvironmentPageFilter} from 'sentry/components/organizations/environment
import PageFilterBar from 'sentry/components/organizations/pageFilterBar';
import {ProjectPageFilter} from 'sentry/components/organizations/projectPageFilter';
import {IconClose} from 'sentry/icons';
-import {t} from 'sentry/locale';
+import {t, tct} from 'sentry/locale';
import ConfigStore from 'sentry/stores/configStore';
import {space} from 'sentry/styles/space';
import useDismissAlert from 'sentry/utils/useDismissAlert';
@@ -20,6 +21,7 @@ import {useLocation} from 'sentry/utils/useLocation';
import useOrganization from 'sentry/utils/useOrganization';
import useRouter from 'sentry/utils/useRouter';
import {normalizeUrl} from 'sentry/utils/withDomainRequired';
+import {SCORE_MIGRATION_TIMESTAMP} from 'sentry/views/performance/browser/webVitals/components/performanceScoreBreakdownChart';
import WebVitalMeters from 'sentry/views/performance/browser/webVitals/components/webVitalMeters';
import {PagePerformanceTable} from 'sentry/views/performance/browser/webVitals/pagePerformanceTable';
import {PerformanceScoreChart} from 'sentry/views/performance/browser/webVitals/performanceScoreChart';
@@ -65,6 +67,10 @@ export default function WebVitalsLandingPage() {
? calculatePerformanceScoreFromStoredTableDataRow(projectScores?.data?.[0])
: calculatePerformanceScoreFromTableDataRow(projectData?.data?.[0]);
+ const scoreMigrationTimestampString = moment(SCORE_MIGRATION_TIMESTAMP).format(
+ 'DD MMMM YYYY'
+ );
+
return (
<ModulePageProviders title={[t('Performance'), t('Web Vitals')].join(' — ')}>
<Layout.Header>
@@ -107,9 +113,13 @@ export default function WebVitalsLandingPage() {
{shouldUseStoredScores && !isDismissed && (
<StyledAlert type="info" showIcon>
<AlertContent>
- {t(
- 'We changed how Performance Scores are calculated for your projects.'
- )}
+ {
+ // TODO: Add link to blog when ready
+ tct(
+ `We made improvements to how Performance Scores are calculated for your projects. Starting on [scoreMigrationTimestampString], scores are updated to more accurately reflect user experiences. Read more these improvements here.`,
+ {scoreMigrationTimestampString}
+ )
+ }
<DismissButton
priority="link"
icon={<IconClose />}
|
3c8483f8699bd574b482854faa61255683ddd385
|
2023-06-08 22:02:17
|
Jodi Jang
|
fix: Add all archives to previous status for escalating GroupHistory (#50516)
| false
|
Add all archives to previous status for escalating GroupHistory (#50516)
|
fix
|
diff --git a/src/sentry/models/grouphistory.py b/src/sentry/models/grouphistory.py
index b7b5da0fa022c1..15aa47f0e972c7 100644
--- a/src/sentry/models/grouphistory.py
+++ b/src/sentry/models/grouphistory.py
@@ -120,7 +120,11 @@ class GroupHistoryStatus:
GroupHistoryStatus.ASSIGNED: (GroupHistoryStatus.UNASSIGNED,),
GroupHistoryStatus.UNASSIGNED: (GroupHistoryStatus.ASSIGNED,),
GroupHistoryStatus.REGRESSED: RESOLVED_STATUSES,
- GroupHistoryStatus.ESCALATING: (GroupHistoryStatus.ARCHIVED_UNTIL_ESCALATING,),
+ GroupHistoryStatus.ESCALATING: (
+ GroupHistoryStatus.ARCHIVED_UNTIL_ESCALATING,
+ GroupHistoryStatus.ARCHIVED_UNTIL_CONDITION_MET,
+ GroupHistoryStatus.IGNORED,
+ ),
}
ACTIVITY_STATUS_TO_GROUP_HISTORY_STATUS = {
|
93ee4ed579276caa7ec3675db901906c69c76d20
|
2025-01-31 01:21:08
|
Josh Callender
|
feat(workflow_engine): Split fast and slow condition evaluation (#84275)
| false
|
Split fast and slow condition evaluation (#84275)
|
feat
|
diff --git a/src/sentry/workflow_engine/handlers/detector/stateful.py b/src/sentry/workflow_engine/handlers/detector/stateful.py
index 67781e8795ad41..324642a485d296 100644
--- a/src/sentry/workflow_engine/handlers/detector/stateful.py
+++ b/src/sentry/workflow_engine/handlers/detector/stateful.py
@@ -177,8 +177,9 @@ def evaluate_group_key_value(
# store these in `DetectorStateData.counter_updates`, but we don't have anywhere to set the required
# thresholds at the moment. Probably should be a field on the Detector? Could also be on the condition
# level, but usually we want to set this at a higher level.
+ # TODO 2: Validate that we will never have slow conditions here.
new_status = DetectorPriorityLevel.OK
- is_group_condition_met, condition_results = evaluate_condition_group(
+ is_group_condition_met, condition_results, _ = evaluate_condition_group(
self.condition_group, value
)
diff --git a/src/sentry/workflow_engine/models/workflow.py b/src/sentry/workflow_engine/models/workflow.py
index 367edc6d435ba8..14bc2e1f166d7f 100644
--- a/src/sentry/workflow_engine/models/workflow.py
+++ b/src/sentry/workflow_engine/models/workflow.py
@@ -67,17 +67,19 @@ class Meta:
)
]
- def evaluate_trigger_conditions(self, job: WorkflowJob) -> bool:
+ def evaluate_trigger_conditions(self, job: WorkflowJob) -> tuple[bool, list[DataCondition]]:
"""
Evaluate the conditions for the workflow trigger and return if the evaluation was successful.
If there aren't any workflow trigger conditions, the workflow is considered triggered.
"""
if self.when_condition_group is None:
- return True
+ return True, []
job["workflow"] = self
- evaluation, _ = evaluate_condition_group(self.when_condition_group, job)
- return evaluation
+ evaluation, _, remaining_conditions = evaluate_condition_group(
+ self.when_condition_group, job
+ )
+ return evaluation, remaining_conditions
def get_slow_conditions(workflow: Workflow) -> list[DataCondition]:
diff --git a/src/sentry/workflow_engine/processors/action.py b/src/sentry/workflow_engine/processors/action.py
index 2c7dfaab686787..abefdefbd7ddd6 100644
--- a/src/sentry/workflow_engine/processors/action.py
+++ b/src/sentry/workflow_engine/processors/action.py
@@ -7,9 +7,14 @@
from sentry.db.models.manager.base_query_set import BaseQuerySet
from sentry.models.group import Group
-from sentry.workflow_engine.models import Action, ActionGroupStatus, DataConditionGroup, Workflow
-from sentry.workflow_engine.processors.data_condition_group import evaluate_condition_group
-from sentry.workflow_engine.types import WorkflowJob
+from sentry.workflow_engine.models import (
+ Action,
+ ActionGroupStatus,
+ DataCondition,
+ DataConditionGroup,
+)
+
+EnqueuedAction = tuple[DataConditionGroup, list[DataCondition]]
def get_action_last_updated_statuses(now: datetime, actions: BaseQuerySet[Action], group: Group):
@@ -74,29 +79,3 @@ def filter_recently_fired_workflow_actions(
filtered_actions = actions.filter(id__in=actions_to_include | actions_without_statuses_ids)
return filtered_actions
-
-
-def evaluate_workflow_action_filters(
- workflows: set[Workflow], job: WorkflowJob
-) -> BaseQuerySet[Action]:
- filtered_action_groups: set[DataConditionGroup] = set()
-
- # gets the list of the workflow ids, and then get the workflow_data_condition_groups for those workflows
- workflow_ids = {workflow.id for workflow in workflows}
-
- action_conditions = DataConditionGroup.objects.filter(
- workflowdataconditiongroup__workflow_id__in=workflow_ids
- ).distinct()
-
- for action_condition in action_conditions:
- evaluation, result = evaluate_condition_group(action_condition, job)
-
- if evaluation:
- filtered_action_groups.add(action_condition)
-
- # get the actions for any of the triggered data condition groups
- actions = Action.objects.filter(
- dataconditiongroupaction__condition_group__in=filtered_action_groups
- ).distinct()
-
- return filter_recently_fired_workflow_actions(actions, job["event"].group)
diff --git a/src/sentry/workflow_engine/processors/data_condition.py b/src/sentry/workflow_engine/processors/data_condition.py
new file mode 100644
index 00000000000000..a11fe4458a6634
--- /dev/null
+++ b/src/sentry/workflow_engine/processors/data_condition.py
@@ -0,0 +1,16 @@
+from sentry.workflow_engine.models.data_condition import DataCondition, is_slow_condition
+
+
+def split_conditions_by_speed(
+ conditions: list[DataCondition],
+) -> tuple[list[DataCondition], list[DataCondition]]:
+ fast_conditions: list[DataCondition] = []
+ slow_conditions: list[DataCondition] = []
+
+ for condition in conditions:
+ if is_slow_condition(condition):
+ slow_conditions.append(condition)
+ else:
+ fast_conditions.append(condition)
+
+ return fast_conditions, slow_conditions
diff --git a/src/sentry/workflow_engine/processors/data_condition_group.py b/src/sentry/workflow_engine/processors/data_condition_group.py
index 788836eae28b33..b518f50b78f410 100644
--- a/src/sentry/workflow_engine/processors/data_condition_group.py
+++ b/src/sentry/workflow_engine/processors/data_condition_group.py
@@ -3,12 +3,15 @@
from sentry.utils.function_cache import cache_func_for_models
from sentry.workflow_engine.models import DataCondition, DataConditionGroup
-from sentry.workflow_engine.types import ProcessedDataConditionResult
+from sentry.workflow_engine.processors.data_condition import split_conditions_by_speed
+from sentry.workflow_engine.types import DataConditionResult, ProcessedDataConditionResult
logger = logging.getLogger(__name__)
T = TypeVar("T")
+DataConditionGroupResult = tuple[bool, list[DataConditionResult], list[DataCondition]]
+
@cache_func_for_models(
[(DataCondition, lambda condition: (condition.condition_group_id,))],
@@ -18,19 +21,54 @@ def get_data_conditions_for_group(data_condition_group_id: int) -> list[DataCond
return list(DataCondition.objects.filter(condition_group_id=data_condition_group_id))
+def process_condition_group_results(
+ results: list[tuple[bool, DataConditionResult]],
+ logic_type: str,
+) -> ProcessedDataConditionResult:
+ logic_result = False
+ condition_results = []
+
+ if logic_type == DataConditionGroup.Type.NONE:
+ # if we get to this point, no conditions were met
+ # because we would have short-circuited
+ logic_result = True
+
+ elif logic_type == DataConditionGroup.Type.ANY:
+ logic_result = any([result[0] for result in results])
+
+ if logic_result:
+ condition_results = [result[1] for result in results if result[0]]
+
+ elif logic_type == DataConditionGroup.Type.ALL:
+ conditions_met = [result[0] for result in results]
+ logic_result = all(conditions_met)
+
+ if logic_result:
+ condition_results = [result[1] for result in results if result[0]]
+
+ return logic_result, condition_results
+
+
def evaluate_condition_group(
data_condition_group: DataConditionGroup,
value: T,
-) -> ProcessedDataConditionResult:
+ is_fast: bool = True,
+) -> DataConditionGroupResult:
"""
Evaluate the conditions for a given group and value.
"""
- results = []
+ results: list[tuple[bool, DataConditionResult]] = []
conditions = get_data_conditions_for_group(data_condition_group.id)
+ if is_fast:
+ conditions, remaining_conditions = split_conditions_by_speed(conditions)
+ else:
+ _, conditions = split_conditions_by_speed(conditions)
+ remaining_conditions = []
+
if len(conditions) == 0:
# if we don't have any conditions, always return True
- return True, []
+ return True, [], remaining_conditions
for condition in conditions:
evaluation_result = condition.evaluate_value(value)
@@ -39,39 +77,34 @@ def evaluate_condition_group(
if is_condition_triggered:
# Check for short-circuiting evaluations
if data_condition_group.logic_type == data_condition_group.Type.ANY_SHORT_CIRCUIT:
- return is_condition_triggered, [evaluation_result]
+ return is_condition_triggered, [evaluation_result], []
if data_condition_group.logic_type == data_condition_group.Type.NONE:
- return False, []
+ return False, [], []
results.append((is_condition_triggered, evaluation_result))
- if data_condition_group.logic_type == data_condition_group.Type.NONE:
- # if we get to this point, no conditions were met
- return True, []
-
- elif data_condition_group.logic_type == data_condition_group.Type.ANY:
- is_any_condition_met = any([result[0] for result in results])
+ logic_type = data_condition_group.logic_type
+ logic_result, condition_results = process_condition_group_results(
+ results,
+ logic_type,
+ )
- if is_any_condition_met:
- condition_results = [result[1] for result in results if result[0]]
- return is_any_condition_met, condition_results
-
- elif data_condition_group.logic_type == data_condition_group.Type.ALL:
- conditions_met = [result[0] for result in results]
- is_all_conditions_met = all(conditions_met)
+ if (not logic_result and logic_type == DataConditionGroup.Type.ALL) or (
+ logic_result and logic_type == DataConditionGroup.Type.ANY
+ ):
+ # if we have a logic type of all and a False result,
+ # or if we have a logic type of any and a True result
+ # then we can short-circuit any remaining conditions since we have a completd logic result
+ remaining_conditions = []
- if is_all_conditions_met:
- condition_results = [result[1] for result in results if result[0]]
- return is_all_conditions_met, condition_results
-
- return False, []
+ return logic_result, condition_results, remaining_conditions
def process_data_condition_group(
data_condition_group_id: int,
value: Any,
-) -> ProcessedDataConditionResult:
+) -> DataConditionGroupResult:
try:
group = DataConditionGroup.objects.get_from_cache(id=data_condition_group_id)
except DataConditionGroup.DoesNotExist:
@@ -79,6 +112,6 @@ def process_data_condition_group(
"DataConditionGroup does not exist",
extra={"id": data_condition_group_id},
)
- return False, []
+ return False, [], []
return evaluate_condition_group(group, value)
diff --git a/src/sentry/workflow_engine/processors/workflow.py b/src/sentry/workflow_engine/processors/workflow.py
index 1baab325c4ae27..b803489c2e0802 100644
--- a/src/sentry/workflow_engine/processors/workflow.py
+++ b/src/sentry/workflow_engine/processors/workflow.py
@@ -4,10 +4,17 @@
import sentry_sdk
from sentry import buffer
+from sentry.db.models.manager.base_query_set import BaseQuerySet
from sentry.utils import json, metrics
-from sentry.workflow_engine.models import Detector, Workflow, WorkflowDataConditionGroup
-from sentry.workflow_engine.models.workflow import get_slow_conditions
-from sentry.workflow_engine.processors.action import evaluate_workflow_action_filters
+from sentry.workflow_engine.models import (
+ Action,
+ DataCondition,
+ DataConditionGroup,
+ Detector,
+ Workflow,
+ WorkflowDataConditionGroup,
+)
+from sentry.workflow_engine.processors.action import filter_recently_fired_workflow_actions
from sentry.workflow_engine.processors.data_condition_group import evaluate_condition_group
from sentry.workflow_engine.processors.detector import get_detector_by_event
from sentry.workflow_engine.types import WorkflowJob
@@ -17,6 +24,7 @@
WORKFLOW_ENGINE_BUFFER_LIST_KEY = "workflow_engine_delayed_processing_buffer"
+# TODO remove this method
def get_data_condition_groups_to_fire(
workflows: set[Workflow], job: WorkflowJob
) -> dict[int, list[int]]:
@@ -30,7 +38,7 @@ def get_data_condition_groups_to_fire(
for workflow_dcg in workflow_dcgs:
action_condition = workflow_dcg.condition_group
- evaluation, result = evaluate_condition_group(action_condition, job)
+ evaluation, result, _ = evaluate_condition_group(action_condition, job)
if evaluation:
workflow_action_groups[workflow_dcg.workflow_id].append(action_condition.id)
@@ -69,12 +77,13 @@ def evaluate_workflow_triggers(workflows: set[Workflow], job: WorkflowJob) -> se
workflows_to_enqueue: set[Workflow] = set()
for workflow in workflows:
- if workflow.evaluate_trigger_conditions(job):
- triggered_workflows.add(workflow)
+ evaluation, remaining_conditions = workflow.evaluate_trigger_conditions(job)
+ if remaining_conditions:
+ workflows_to_enqueue.add(workflow)
else:
- if get_slow_conditions(workflow):
- # enqueue to be evaluated later
- workflows_to_enqueue.add(workflow)
+ if evaluation:
+ # Only add workflows that have no remaining conditions to check
+ triggered_workflows.add(workflow)
if workflows_to_enqueue:
enqueue_workflows(workflows_to_enqueue, job)
@@ -82,6 +91,40 @@ def evaluate_workflow_triggers(workflows: set[Workflow], job: WorkflowJob) -> se
return triggered_workflows
+def evaluate_workflows_action_filters(
+ workflows: set[Workflow],
+ job: WorkflowJob,
+) -> BaseQuerySet[Action]:
+ filtered_action_groups: set[DataConditionGroup] = set()
+ enqueued_conditions: list[DataCondition] = []
+
+ # gets the list of the workflow ids, and then get the workflow_data_condition_groups for those workflows
+ workflow_ids = {workflow.id for workflow in workflows}
+
+ action_conditions = DataConditionGroup.objects.filter(
+ workflowdataconditiongroup__workflow_id__in=workflow_ids
+ ).distinct()
+
+ for action_condition in action_conditions:
+ evaluation, result, remaining_conditions = evaluate_condition_group(action_condition, job)
+
+ if remaining_conditions:
+ # If there are remaining conditions for the action filter to evaluate,
+ # then return the list of conditions to enqueue
+ enqueued_conditions.extend(remaining_conditions)
+ else:
+ # if we don't have any other conditions to evaluate, add the action to the list
+ if evaluation:
+ filtered_action_groups.add(action_condition)
+
+ # get the actions for any of the triggered data condition groups
+ actions = Action.objects.filter(
+ dataconditiongroupaction__condition_group__in=filtered_action_groups
+ ).distinct()
+
+ return filter_recently_fired_workflow_actions(actions, job["event"].group)
+
+
def process_workflows(job: WorkflowJob) -> set[Workflow]:
"""
This method will get the detector based on the event, and then gather the associated workflows.
@@ -101,7 +144,7 @@ def process_workflows(job: WorkflowJob) -> set[Workflow]:
# Get the workflows, evaluate the when_condition_group, finally evaluate the actions for workflows that are triggered
workflows = set(Workflow.objects.filter(detectorworkflow__detector_id=detector.id).distinct())
triggered_workflows = evaluate_workflow_triggers(workflows, job)
- actions = evaluate_workflow_action_filters(triggered_workflows, job)
+ actions = evaluate_workflows_action_filters(triggered_workflows, job)
with sentry_sdk.start_span(op="workflow_engine.process_workflows.trigger_actions"):
for action in actions:
diff --git a/tests/sentry/workflow_engine/models/test_workflow.py b/tests/sentry/workflow_engine/models/test_workflow.py
index 8a9d1e841a4b9c..b38134c99a67da 100644
--- a/tests/sentry/workflow_engine/models/test_workflow.py
+++ b/tests/sentry/workflow_engine/models/test_workflow.py
@@ -1,3 +1,4 @@
+from sentry.workflow_engine.models.data_condition import Condition
from sentry.workflow_engine.types import WorkflowJob
from tests.sentry.workflow_engine.test_base import BaseWorkflowTest
@@ -12,19 +13,32 @@ def setUp(self):
self.job = WorkflowJob({"event": self.group_event})
def test_evaluate_trigger_conditions__condition_new_event__True(self):
- evaluation = self.workflow.evaluate_trigger_conditions(self.job)
+ evaluation, _ = self.workflow.evaluate_trigger_conditions(self.job)
assert evaluation is True
def test_evaluate_trigger_conditions__condition_new_event__False(self):
# Update event to have been seen before
self.group_event.group.times_seen = 5
- evaluation = self.workflow.evaluate_trigger_conditions(self.job)
+ evaluation, _ = self.workflow.evaluate_trigger_conditions(self.job)
assert evaluation is False
def test_evaluate_trigger_conditions__no_conditions(self):
self.workflow.when_condition_group = None
self.workflow.save()
- evaluation = self.workflow.evaluate_trigger_conditions(self.job)
+ evaluation, _ = self.workflow.evaluate_trigger_conditions(self.job)
assert evaluation is True
+
+ def test_evaluate_trigger_conditions__slow_condition(self):
+ # Update group to _all_, since the fast condition is met
+ self.data_condition_group.update(logic_type="all")
+
+ slow_condition = self.create_data_condition(
+ type=Condition.EVENT_FREQUENCY_COUNT, comparison={"interval": "1d", "value": 7}
+ )
+ self.data_condition_group.conditions.add(slow_condition)
+ evaluation, remaining_conditions = self.workflow.evaluate_trigger_conditions(self.job)
+
+ assert evaluation is True
+ assert remaining_conditions == [slow_condition]
diff --git a/tests/sentry/workflow_engine/processors/test_action.py b/tests/sentry/workflow_engine/processors/test_action.py
index 5b78a9a19f50cd..e6280065bf1e0e 100644
--- a/tests/sentry/workflow_engine/processors/test_action.py
+++ b/tests/sentry/workflow_engine/processors/test_action.py
@@ -3,60 +3,13 @@
from django.utils import timezone
from sentry.testutils.helpers.datetime import freeze_time
-from sentry.workflow_engine.models.action import Action
+from sentry.workflow_engine.models import Action
from sentry.workflow_engine.models.action_group_status import ActionGroupStatus
-from sentry.workflow_engine.models.data_condition import Condition
-from sentry.workflow_engine.processors.action import (
- evaluate_workflow_action_filters,
- filter_recently_fired_workflow_actions,
-)
+from sentry.workflow_engine.processors.action import filter_recently_fired_workflow_actions
from sentry.workflow_engine.types import WorkflowJob
from tests.sentry.workflow_engine.test_base import BaseWorkflowTest
-class TestEvaluateWorkflowActionFilters(BaseWorkflowTest):
- def setUp(self):
- (
- self.workflow,
- self.detector,
- self.detector_workflow,
- self.workflow_triggers,
- ) = self.create_detector_and_workflow()
-
- self.action_group, self.action = self.create_workflow_action(workflow=self.workflow)
-
- self.group, self.event, self.group_event = self.create_group_event(
- occurrence=self.build_occurrence(evidence_data={"detector_id": self.detector.id})
- )
- self.job = WorkflowJob({"event": self.group_event})
-
- def test_basic__no_filter(self):
- triggered_actions = evaluate_workflow_action_filters({self.workflow}, self.job)
- assert set(triggered_actions) == {self.action}
-
- def test_basic__with_filter__passes(self):
- self.create_data_condition(
- condition_group=self.action_group,
- type=Condition.EVENT_SEEN_COUNT,
- comparison=1,
- condition_result=True,
- )
-
- triggered_actions = evaluate_workflow_action_filters({self.workflow}, self.job)
- assert set(triggered_actions) == {self.action}
-
- def test_basic__with_filter__filtered(self):
- # Add a filter to the action's group
- self.create_data_condition(
- condition_group=self.action_group,
- type=Condition.EVENT_CREATED_BY_DETECTOR,
- comparison=self.detector.id + 1,
- )
-
- triggered_actions = evaluate_workflow_action_filters({self.workflow}, self.job)
- assert not triggered_actions
-
-
@freeze_time("2024-01-09")
class TestFilterRecentlyFiredWorkflowActions(BaseWorkflowTest):
def setUp(self):
diff --git a/tests/sentry/workflow_engine/processors/test_data_condition.py b/tests/sentry/workflow_engine/processors/test_data_condition.py
new file mode 100644
index 00000000000000..b065eb056c69f1
--- /dev/null
+++ b/tests/sentry/workflow_engine/processors/test_data_condition.py
@@ -0,0 +1,57 @@
+from sentry.testutils.cases import TestCase
+from sentry.workflow_engine.models.data_condition import Condition, DataCondition
+from sentry.workflow_engine.processors.data_condition import split_conditions_by_speed
+
+
+class SplitConditionsBySpeedTest(TestCase):
+ def setUp(self):
+ self.slow_config = {
+ "interval": "1d",
+ "value": 7,
+ }
+
+ def test_simple(self):
+ conditions = [
+ self.create_data_condition(type=Condition.EQUAL), # fast
+ self.create_data_condition(type=Condition.EQUAL), # fast
+ self.create_data_condition(
+ type=Condition.EVENT_FREQUENCY_COUNT, comparison=self.slow_config
+ ), # slow
+ ]
+
+ fast_conditions, slow_conditions = split_conditions_by_speed(conditions)
+
+ assert fast_conditions == [conditions[0], conditions[1]]
+ assert slow_conditions == [conditions[2]]
+
+ def test_only_fast_conditions(self):
+ conditions = [
+ self.create_data_condition(type=Condition.EQUAL), # fast
+ self.create_data_condition(type=Condition.EQUAL), # fast
+ ]
+
+ fast_conditions, slow_conditions = split_conditions_by_speed(conditions)
+
+ assert fast_conditions == [conditions[0], conditions[1]]
+ assert slow_conditions == []
+
+ def test_only_slow_conditions(self):
+ conditions = [
+ self.create_data_condition(
+ type=Condition.EVENT_FREQUENCY_COUNT, comparison=self.slow_config
+ ), # slow
+ self.create_data_condition(
+ type=Condition.EVENT_FREQUENCY_COUNT, comparison=self.slow_config
+ ), # slow
+ ]
+
+ fast_conditions, slow_conditions = split_conditions_by_speed(conditions)
+
+ assert slow_conditions == [conditions[0], conditions[1]]
+ assert fast_conditions == []
+
+ def test_no_conditions(self):
+ conditions: list[DataCondition] = []
+ fast_conditions, slow_conditions = split_conditions_by_speed(conditions)
+ assert fast_conditions == []
+ assert slow_conditions == []
diff --git a/tests/sentry/workflow_engine/processors/test_data_condition_group.py b/tests/sentry/workflow_engine/processors/test_data_condition_group.py
index c4629bbec753bb..7aba0ee8fd95a6 100644
--- a/tests/sentry/workflow_engine/processors/test_data_condition_group.py
+++ b/tests/sentry/workflow_engine/processors/test_data_condition_group.py
@@ -26,7 +26,7 @@ def test_process_data_condition_group(self):
with mock.patch(
"sentry.workflow_engine.processors.data_condition_group.logger"
) as mock_logger:
- assert process_data_condition_group(1, 1) == (False, [])
+ assert process_data_condition_group(1, 1) == (False, [], [])
assert mock_logger.exception.call_args[0][0] == "DataConditionGroup does not exist"
def test_process_data_condition_group__exists__fails(self):
@@ -35,7 +35,7 @@ def test_process_data_condition_group__exists__fails(self):
condition_group=data_condition_group, type=Condition.GREATER, comparison=5
)
- assert process_data_condition_group(data_condition_group.id, 1) == (False, [])
+ assert process_data_condition_group(data_condition_group.id, 1) == (False, [], [])
def test_process_data_condition_group__exists__passes(self):
data_condition_group = self.create_data_condition_group()
@@ -48,6 +48,7 @@ def test_process_data_condition_group__exists__passes(self):
assert process_data_condition_group(data_condition_group.id, 10) == (
True,
[DetectorPriorityLevel.HIGH],
+ [],
)
@@ -80,6 +81,7 @@ def test_evaluate_condition_group__passes_all(self):
) == (
True,
[DetectorPriorityLevel.HIGH, DetectorPriorityLevel.LOW],
+ [],
)
def test_evaluate_condition_group__passes_one(self):
@@ -89,6 +91,7 @@ def test_evaluate_condition_group__passes_one(self):
) == (
True,
[DetectorPriorityLevel.LOW],
+ [],
)
def test_evaluate_condition_group__fails_all(self):
@@ -98,6 +101,7 @@ def test_evaluate_condition_group__fails_all(self):
) == (
False,
[],
+ [],
)
def test_evaluate_condition_group__passes_without_conditions(self):
@@ -107,6 +111,7 @@ def test_evaluate_condition_group__passes_without_conditions(self):
assert evaluate_condition_group(data_condition_group, 10) == (
True,
[],
+ [],
)
@@ -136,12 +141,14 @@ def test_evaluate_condition_group__passes_all(self):
assert evaluate_condition_group(self.data_condition_group, 10) == (
True,
[True],
+ [],
)
def test_evaluate_condition_group__passes_one(self):
assert evaluate_condition_group(self.data_condition_group, 4) == (
True,
[True],
+ [],
)
def test_evaluate_condition_group__fails_all(self):
@@ -151,6 +158,7 @@ def test_evaluate_condition_group__fails_all(self):
) == (
False,
[],
+ [],
)
def test_evaluate_condition_group__passes_without_conditions(self):
@@ -160,6 +168,7 @@ def test_evaluate_condition_group__passes_without_conditions(self):
assert evaluate_condition_group(data_condition_group, 10) == (
True,
[],
+ [],
)
@@ -189,18 +198,21 @@ def test_evaluate_condition_group__passes_all(self):
assert evaluate_condition_group(self.data_condition_group, 10) == (
True,
[DetectorPriorityLevel.HIGH, DetectorPriorityLevel.LOW],
+ [],
)
def test_evaluate_condition_group__passes_one(self):
assert evaluate_condition_group(self.data_condition_group, 4) == (
False,
[],
+ [],
)
def test_evaluate_condition_group__fails_all(self):
assert evaluate_condition_group(self.data_condition_group, 1) == (
False,
[],
+ [],
)
def test_evaluate_condition_group__passes_without_conditions(self):
@@ -210,6 +222,7 @@ def test_evaluate_condition_group__passes_without_conditions(self):
assert evaluate_condition_group(data_condition_group, 10) == (
True,
[],
+ [],
)
@@ -239,16 +252,85 @@ def test_evaluate_condition_group__all_conditions_pass__fails(self):
assert evaluate_condition_group(self.data_condition_group, 10) == (
False,
[],
+ [],
)
def test_evaluate_condition_group__one_condition_pass__fails(self):
assert evaluate_condition_group(self.data_condition_group, 4) == (
False,
[],
+ [],
)
def test_evaluate_condition_group__no_conditions_pass__passes(self):
assert evaluate_condition_group(self.data_condition_group, 1) == (
True,
[],
+ [],
+ )
+
+
+class TestEvaluateConditionGroupWithSlowConditions(TestCase):
+ def setUp(self):
+ self.data_condition_group = self.create_data_condition_group(
+ logic_type=DataConditionGroup.Type.ALL
+ )
+
+ self.data_condition = self.create_data_condition(
+ comparison=5,
+ type=Condition.GREATER,
+ condition_result=True,
+ condition_group=self.data_condition_group,
+ )
+
+ self.slow_condition = self.create_data_condition(
+ type=Condition.EVENT_FREQUENCY_COUNT,
+ comparison={"interval": "1d", "value": 7},
+ condition_result=True,
+ condition_group=self.data_condition_group,
+ )
+
+ def test_basic_remaining_conditions(self):
+ logic_result, condition_results, remaining_conditions = evaluate_condition_group(
+ self.data_condition_group,
+ 10,
+ True,
+ )
+
+ assert logic_result is True
+ assert condition_results == [True]
+ assert remaining_conditions == [self.slow_condition]
+
+ def test_execute_slow_conditions(self):
+ logic_result, condition_results, remaining_conditions = evaluate_condition_group(
+ self.data_condition_group,
+ {"snuba_results": [10]},
+ False,
+ )
+
+ assert logic_result is True
+ assert condition_results == [True]
+ assert remaining_conditions == []
+
+ def test_short_circuit_with_all(self):
+ logic_result, condition_results, remaining_conditions = evaluate_condition_group(
+ self.data_condition_group,
+ 1,
+ True,
)
+
+ assert logic_result is False
+ assert condition_results == []
+ assert remaining_conditions == []
+
+ def test_short_circuit_with_any(self):
+ self.data_condition_group.update(logic_type=DataConditionGroup.Type.ANY)
+ logic_result, condition_results, remaining_conditions = evaluate_condition_group(
+ self.data_condition_group,
+ 10,
+ True,
+ )
+
+ assert logic_result is True
+ assert condition_results == [True]
+ assert remaining_conditions == []
diff --git a/tests/sentry/workflow_engine/processors/test_workflow.py b/tests/sentry/workflow_engine/processors/test_workflow.py
index 480dcd0da84de1..209893f075c206 100644
--- a/tests/sentry/workflow_engine/processors/test_workflow.py
+++ b/tests/sentry/workflow_engine/processors/test_workflow.py
@@ -1,6 +1,8 @@
from datetime import timedelta
from unittest import mock
+import pytest
+
from sentry import buffer
from sentry.eventstream.base import GroupState
from sentry.grouping.grouptype import ErrorGroupType
@@ -11,6 +13,7 @@
from sentry.workflow_engine.processors.workflow import (
WORKFLOW_ENGINE_BUFFER_LIST_KEY,
evaluate_workflow_triggers,
+ evaluate_workflows_action_filters,
process_workflows,
)
from sentry.workflow_engine.types import WorkflowJob
@@ -147,8 +150,10 @@ def test_many_workflows(self):
assert triggered_workflows == {self.workflow, workflow_two}
- def test_skips_slow_conditions(self):
- # triggers workflow if the logic_type is ANY and a condition is met
+ def test_delays_slow_conditions(self):
+ assert self.workflow.when_condition_group
+ self.workflow.when_condition_group.update(logic_type=DataConditionGroup.Type.ALL)
+
self.create_data_condition(
condition_group=self.workflow.when_condition_group,
type=Condition.EVENT_FREQUENCY_COUNT,
@@ -160,9 +165,11 @@ def test_skips_slow_conditions(self):
)
triggered_workflows = evaluate_workflow_triggers({self.workflow}, self.job)
- assert triggered_workflows == {self.workflow}
+ # no workflows are triggered because the slow conditions need to be evaluted
+ assert triggered_workflows == set()
[email protected](reason="Skipping this test until enqueue is refactored")
@freeze_time(FROZEN_TIME)
class TestEnqueueWorkflow(BaseWorkflowTest):
buffer_timestamp = (FROZEN_TIME + timedelta(seconds=1)).timestamp()
@@ -240,3 +247,72 @@ def test_enqueues_workflow_any_logic_type(self):
WORKFLOW_ENGINE_BUFFER_LIST_KEY, 0, self.buffer_timestamp
)
assert project_ids[0][0] == self.project.id
+
+
+class TestEvaluateWorkflowActionFilters(BaseWorkflowTest):
+ def setUp(self):
+ (
+ self.workflow,
+ self.detector,
+ self.detector_workflow,
+ self.workflow_triggers,
+ ) = self.create_detector_and_workflow()
+
+ self.action_group, self.action = self.create_workflow_action(workflow=self.workflow)
+
+ self.group, self.event, self.group_event = self.create_group_event(
+ occurrence=self.build_occurrence(evidence_data={"detector_id": self.detector.id})
+ )
+ self.job = WorkflowJob({"event": self.group_event})
+
+ def test_basic__no_filter(self):
+ triggered_actions = evaluate_workflows_action_filters({self.workflow}, self.job)
+ assert set(triggered_actions) == {self.action}
+
+ def test_basic__with_filter__passes(self):
+ self.create_data_condition(
+ condition_group=self.action_group,
+ type=Condition.EVENT_SEEN_COUNT,
+ comparison=1,
+ condition_result=True,
+ )
+
+ triggered_actions = evaluate_workflows_action_filters({self.workflow}, self.job)
+ assert set(triggered_actions) == {self.action}
+
+ def test_basic__with_filter__filtered(self):
+ # Add a filter to the action's group
+ self.create_data_condition(
+ condition_group=self.action_group,
+ type=Condition.EVENT_CREATED_BY_DETECTOR,
+ comparison=self.detector.id + 1,
+ )
+
+ triggered_actions = evaluate_workflows_action_filters({self.workflow}, self.job)
+ assert not triggered_actions
+
+ def test_with_slow_conditions(self):
+ self.action_group.logic_type = DataConditionGroup.Type.ALL
+
+ self.create_data_condition(
+ condition_group=self.action_group,
+ type=Condition.EVENT_FREQUENCY_COUNT,
+ comparison={"interval": "1d", "value": 7},
+ )
+
+ self.create_data_condition(
+ condition_group=self.action_group,
+ type=Condition.EVENT_SEEN_COUNT,
+ comparison=1,
+ condition_result=True,
+ )
+ self.action_group.save()
+
+ triggered_actions = evaluate_workflows_action_filters({self.workflow}, self.job)
+
+ assert self.action_group.conditions.count() == 2
+
+ # The first condition passes, but the second is enqueued for later evaluation
+ assert not triggered_actions
+
+ # TODO @saponifi3d - Add a check to ensure the second condition is enqueued for later evaluation
|
d8c3b6246a0c74fdaf436baeba0e49195d4d50a9
|
2021-01-29 00:11:34
|
Stephen Cefali
|
feat(aws-lambda): minor changes for aws lambda integration (#23415)
| false
|
minor changes for aws lambda integration (#23415)
|
feat
|
diff --git a/src/sentry/integrations/aws_lambda/client.py b/src/sentry/integrations/aws_lambda/client.py
index fac14007f40746..4c7315ef684265 100644
--- a/src/sentry/integrations/aws_lambda/client.py
+++ b/src/sentry/integrations/aws_lambda/client.py
@@ -54,7 +54,6 @@ def gen_aws_client(account_number, region, aws_external_id, service_name="lambda
"Action": [
"lambda:ListFunctions",
"lambda:GetLayerVersion",
- "iam:PassRole",
"organizations:DescribeAccount",
],
"Resource": "*",
diff --git a/src/sentry/integrations/aws_lambda/integration.py b/src/sentry/integrations/aws_lambda/integration.py
index 924111f7607c04..ff5d2418ff4987 100644
--- a/src/sentry/integrations/aws_lambda/integration.py
+++ b/src/sentry/integrations/aws_lambda/integration.py
@@ -37,7 +37,7 @@
logger = logging.getLogger("sentry.integrations.aws_lambda")
DESCRIPTION = """
-The AWS Lambda integration will automatically instrument your Lambda functions without any code changes. All you need to do is run a CloudFormation stack that we provide to get started. Note, currently only Node runtimes are supported.
+The AWS Lambda integration will automatically instrument your Lambda functions without any code changes. We use CloudFormation Stack ([Learn more about CloudFormation](https://aws.amazon.com/cloudformation/)) to create Sentry role and enable errors and transactions capture from your Lambda functions.
"""
diff --git a/tests/sentry/integrations/aws_lambda/test_client.py b/tests/sentry/integrations/aws_lambda/test_client.py
index 1ef2ecfd15bad9..04fe60ccfc7e0a 100644
--- a/tests/sentry/integrations/aws_lambda/test_client.py
+++ b/tests/sentry/integrations/aws_lambda/test_client.py
@@ -58,7 +58,6 @@ def test_simple(self, mock_get_client, mock_get_session):
"Action": [
"lambda:ListFunctions",
"lambda:GetLayerVersion",
- "iam:PassRole",
"organizations:DescribeAccount",
],
"Resource": "*",
|
9d65aec43bcc02ea9f0fef1e9c9238a2f826a139
|
2022-07-19 20:54:50
|
Josh Soref
|
meta: Fix spelling typos in Replays (#36047)
| false
|
Fix spelling typos in Replays (#36047)
|
meta
|
diff --git a/static/app/components/replays/breadcrumbs/replayTimeline.tsx b/static/app/components/replays/breadcrumbs/replayTimeline.tsx
index 2a8e79c053e099..70f3f65fca60cb 100644
--- a/static/app/components/replays/breadcrumbs/replayTimeline.tsx
+++ b/static/app/components/replays/breadcrumbs/replayTimeline.tsx
@@ -10,7 +10,7 @@ import {
import ReplayTimelineEvents from 'sentry/components/replays/breadcrumbs/replayTimelineEvents';
import ReplayTimelineSpans from 'sentry/components/replays/breadcrumbs/replayTimelineSpans';
import Stacked from 'sentry/components/replays/breadcrumbs/stacked';
-import {TimelineScubber} from 'sentry/components/replays/player/scrubber';
+import {TimelineScrubber} from 'sentry/components/replays/player/scrubber';
import ScrubberMouseTracking from 'sentry/components/replays/player/scrubberMouseTracking';
import {useReplayContext} from 'sentry/components/replays/replayContext';
import {Resizeable} from 'sentry/components/replays/resizeable';
@@ -49,7 +49,7 @@ function ReplayTimeline({}: Props) {
<Stacked>
<MinorGridlines duration={duration} width={width} />
<MajorGridlines duration={duration} width={width} />
- <TimelineScubber />
+ <TimelineScrubber />
<UnderTimestamp paddingTop="52px">
<ReplayTimelineSpans
duration={duration}
diff --git a/static/app/components/replays/player/scrubber.tsx b/static/app/components/replays/player/scrubber.tsx
index 4d46ca4993a497..0aff61c315cbb3 100644
--- a/static/app/components/replays/player/scrubber.tsx
+++ b/static/app/components/replays/player/scrubber.tsx
@@ -107,7 +107,7 @@ const Wrapper = styled('div')`
}
`;
-export const TimelineScubber = styled(Scrubber)`
+export const TimelineScrubber = styled(Scrubber)`
height: 100%;
${Meter} {
diff --git a/static/app/components/replays/replayContext.tsx b/static/app/components/replays/replayContext.tsx
index d9606405026265..c06b02b2e6a3e0 100644
--- a/static/app/components/replays/replayContext.tsx
+++ b/static/app/components/replays/replayContext.tsx
@@ -49,7 +49,7 @@ type ReplayPlayerContextProps = {
dimensions: Dimensions;
/**
- * Duration of the video, in miliseconds
+ * Duration of the video, in milliseconds
*/
duration: undefined | number;
diff --git a/static/app/utils/replays/hooks/useReplayData.tsx b/static/app/utils/replays/hooks/useReplayData.tsx
index b49fddc285f76e..0f45e2501d4c07 100644
--- a/static/app/utils/replays/hooks/useReplayData.tsx
+++ b/static/app/utils/replays/hooks/useReplayData.tsx
@@ -132,7 +132,7 @@ const INITIAL_STATE: State = Object.freeze({
* must be delegated to the `ReplayReader` class.
*
* @param {orgId, eventSlug} Where to find the root replay event
- * @returns An object representing a unified result of the network reqeusts. Either a single `ReplayReader` data object or fetch errors.
+ * @returns An object representing a unified result of the network requests. Either a single `ReplayReader` data object or fetch errors.
*/
function useReplayData({eventSlug, orgId}: Options): Result {
const [projectId, eventId] = eventSlug.split(':');
diff --git a/static/app/utils/replays/replayDataUtils.tsx b/static/app/utils/replays/replayDataUtils.tsx
index b3b38560c66b19..8593121bb399ad 100644
--- a/static/app/utils/replays/replayDataUtils.tsx
+++ b/static/app/utils/replays/replayDataUtils.tsx
@@ -136,7 +136,7 @@ export function spansFactory(spans: ReplaySpan[]) {
/**
* The original `this._event.startTimestamp` and `this._event.endTimestamp`
* are the same. It's because the root replay event is re-purposing the
- * `transaction` type, but it is not a real span occuring over time.
+ * `transaction` type, but it is not a real span occurring over time.
* So we need to figure out the real start and end timestamps based on when
* first and last bits of data were collected. In milliseconds.
*/
|
d347e3f1646f8da58752248ff16c2f5d28d92d2c
|
2023-09-14 03:41:41
|
Alberto Leal
|
chore(provisioning): Remove EnablePartnerSSO (#56189)
| false
|
Remove EnablePartnerSSO (#56189)
|
chore
|
diff --git a/src/sentry/auth/helper.py b/src/sentry/auth/helper.py
index cabea679649e8d..5effd6b9be82c9 100644
--- a/src/sentry/auth/helper.py
+++ b/src/sentry/auth/helper.py
@@ -35,7 +35,7 @@
from sentry.auth.providers.fly.provider import FlyOAuth2Provider
from sentry.auth.superuser import is_active_superuser
from sentry.locks import locks
-from sentry.models import AuditLogEntry, AuthIdentity, AuthProvider, User, outbox_context
+from sentry.models import AuthIdentity, AuthProvider, User, outbox_context
from sentry.pipeline import Pipeline, PipelineSessionStore
from sentry.pipeline.provider import PipelineProvider
from sentry.services.hybrid_cloud.organization import (
@@ -980,31 +980,4 @@ def disable_2fa_required(self) -> None:
)
-def EnablePartnerSSO(provider_key, sentry_org, provider_config):
- """
- Simplified abstraction from AuthHelper for enabling an SSO AuthProvider for a Sentry organization.
- Fires appropriate Audit Log and signal emitter for SSO Enabled
- """
- with transaction.atomic(router.db_for_write(AuthProvider)):
- provider_model = AuthProvider.objects.create(
- organization_id=sentry_org.id, provider=provider_key, config=provider_config
- )
-
- # TODO: Analytics requires a user id
- # At provisioning time, no user is available so we cannot provide any user
- # sso_enabled.send_robust(
- # organization=sentry_org,
- # provider=provider_key,
- # sender="EnablePartnerSSO",
- # )
-
- AuditLogEntry.objects.create(
- organization_id=sentry_org.id,
- actor_label=f"partner_provisioning_api:{provider_key}",
- target_object=provider_model.id,
- event=audit_log.get_event_id("SSO_ENABLE"),
- data=provider_model.get_audit_log_data(),
- )
-
-
CHANNEL_PROVIDER_MAP = {ChannelName.FLY_IO.value: FlyOAuth2Provider}
|
4f5c332eeab7fcd500d7c575a051ae4362336217
|
2020-05-11 23:46:35
|
Dan Fuller
|
feat(alerts): Convert subscription creation to rely on `SnubaQuery` (#18706)
| false
|
Convert subscription creation to rely on `SnubaQuery` (#18706)
|
feat
|
diff --git a/migrations_lockfile.txt b/migrations_lockfile.txt
index a12630f7d19b27..ba6723b6702752 100644
--- a/migrations_lockfile.txt
+++ b/migrations_lockfile.txt
@@ -10,7 +10,7 @@ auth: 0008_alter_user_username_max_length
contenttypes: 0002_remove_content_type_name
jira_ac: 0001_initial
nodestore: 0001_initial
-sentry: 0074_add_metric_alert_feature
+sentry: 0075_metric_alerts_fix_releases
sessions: 0001_initial
sites: 0002_alter_domain_unique
social_auth: 0001_initial
diff --git a/src/sentry/migrations/0075_metric_alerts_fix_releases.py b/src/sentry/migrations/0075_metric_alerts_fix_releases.py
new file mode 100644
index 00000000000000..b6af6ef1720cfd
--- /dev/null
+++ b/src/sentry/migrations/0075_metric_alerts_fix_releases.py
@@ -0,0 +1,41 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.29 on 2020-05-08 20:43
+from __future__ import unicode_literals
+
+from django.db import migrations
+
+from sentry.utils.query import RangeQuerySetWrapperWithProgressBar
+
+
+def migrate_alert_query_model(apps, schema_editor):
+ SnubaQuery = apps.get_model("sentry", "SnubaQuery")
+ for snuba_query in RangeQuerySetWrapperWithProgressBar(
+ SnubaQuery.objects.filter(aggregate="count_unique(user)")
+ ):
+ snuba_query.aggregate = "count_unique(tags[sentry:user])"
+ snuba_query.save()
+
+
+class Migration(migrations.Migration):
+ # This flag is used to mark that a migration shouldn't be automatically run in
+ # production. We set this to True for operations that we think are risky and want
+ # someone from ops to run manually and monitor.
+ # General advice is that if in doubt, mark your migration as `is_dangerous`.
+ # Some things you should always mark as dangerous:
+ # - Large data migrations. Typically we want these to be run manually by ops so that
+ # they can be monitored. Since data migrations will now hold a transaction open
+ # this is even more important.
+ # - Adding columns to highly active tables, even ones that are NULL.
+ is_dangerous = False
+
+ # This flag is used to decide whether to run this migration in a transaction or not.
+ # By default we prefer to run in a transaction, but for migrations where you want
+ # to `CREATE INDEX CONCURRENTLY` this needs to be set to False. Typically you'll
+ # want to create an index concurrently when adding one to an existing table.
+ atomic = False
+
+ dependencies = [("sentry", "0074_add_metric_alert_feature")]
+
+ operations = [
+ migrations.RunPython(migrate_alert_query_model, reverse_code=migrations.RunPython.noop)
+ ]
diff --git a/src/sentry/snuba/subscriptions.py b/src/sentry/snuba/subscriptions.py
index c9f4caaa9adaf7..33b694b870f033 100644
--- a/src/sentry/snuba/subscriptions.py
+++ b/src/sentry/snuba/subscriptions.py
@@ -20,7 +20,7 @@
aggregation_function_translations = {
QueryAggregations.TOTAL: "count()",
- QueryAggregations.UNIQUE_USERS: "count_unique(user)",
+ QueryAggregations.UNIQUE_USERS: "count_unique(tags[sentry:user])",
}
diff --git a/src/sentry/snuba/tasks.py b/src/sentry/snuba/tasks.py
index 2ad4c104b5f4d8..bdca348c3bd781 100644
--- a/src/sentry/snuba/tasks.py
+++ b/src/sentry/snuba/tasks.py
@@ -2,15 +2,9 @@
import json
-from sentry.api.event_search import get_filter
-from sentry.models import Environment
+from sentry.api.event_search import get_filter, resolve_field_list
from sentry.snuba.discover import resolve_discover_aliases
-from sentry.snuba.models import (
- QueryAggregations,
- QueryDatasets,
- QuerySubscription,
- query_aggregation_to_snuba,
-)
+from sentry.snuba.models import QueryDatasets, QuerySubscription
from sentry.tasks.base import instrumented_task
from sentry.utils import metrics
from sentry.utils.snuba import _snuba_pool, SnubaError
@@ -80,7 +74,9 @@ def update_subscription_in_snuba(query_subscription_id):
return
if subscription.subscription_id is not None:
- _delete_from_snuba(QueryDatasets(subscription.dataset), subscription.subscription_id)
+ _delete_from_snuba(
+ QueryDatasets(subscription.snuba_query.dataset), subscription.subscription_id
+ )
subscription_id = _create_in_snuba(subscription)
subscription.update(
@@ -116,31 +112,28 @@ def delete_subscription_from_snuba(query_subscription_id):
def _create_in_snuba(subscription):
- conditions = resolve_discover_aliases(get_filter(subscription.query))[0].conditions
- try:
- environment = subscription.environments.all()[:1].get()
- except Environment.DoesNotExist:
- environment = None
-
- if environment:
- conditions.append(["environment", "=", environment.name])
- conditions = apply_dataset_conditions(QueryDatasets(subscription.dataset), conditions)
+ snuba_query = subscription.snuba_query
+ snuba_filter = get_filter(snuba_query.query)
+ snuba_filter.update_with(
+ resolve_field_list([snuba_query.aggregate], snuba_filter, auto_fields=False)
+ )
+ snuba_filter = resolve_discover_aliases(snuba_filter)[0]
+ if snuba_query.environment:
+ snuba_filter.conditions.append(["environment", "=", snuba_query.environment.name])
+ conditions = apply_dataset_conditions(
+ QueryDatasets(snuba_query.dataset), snuba_filter.conditions
+ )
response = _snuba_pool.urlopen(
"POST",
"/%s/subscriptions" % (subscription.dataset,),
body=json.dumps(
{
"project_id": subscription.project_id,
- "dataset": subscription.dataset,
- # We only care about conditions here. Filter keys only matter for
- # filtering to project and groups. Projects are handled with an
- # explicit param, and groups can't be queried here.
+ "dataset": snuba_query.dataset,
"conditions": conditions,
- "aggregations": [
- query_aggregation_to_snuba[QueryAggregations(subscription.aggregation)]
- ],
- "time_window": subscription.time_window,
- "resolution": subscription.resolution,
+ "aggregations": snuba_filter.aggregations,
+ "time_window": snuba_query.time_window,
+ "resolution": snuba_query.resolution,
}
),
)
diff --git a/tests/sentry/snuba/test_tasks.py b/tests/sentry/snuba/test_tasks.py
index f02601c6f567ae..b356c6f0a3a993 100644
--- a/tests/sentry/snuba/test_tasks.py
+++ b/tests/sentry/snuba/test_tasks.py
@@ -9,7 +9,8 @@
from mock import Mock, patch
from six import add_metaclass
-from sentry.snuba.models import QueryAggregations, QueryDatasets, QuerySubscription
+from sentry.snuba.models import QueryAggregations, QueryDatasets, QuerySubscription, SnubaQuery
+from sentry.snuba.subscriptions import translate_aggregation
from sentry.snuba.tasks import (
create_subscription_in_snuba,
update_subscription_in_snuba,
@@ -39,16 +40,30 @@ def task(self):
def create_subscription(self, status=None, subscription_id=None):
if status is None:
status = self.expected_status
+ dataset = QueryDatasets.EVENTS.value
+ aggregate = QueryAggregations.UNIQUE_USERS
+ query = "hello"
+ time_window = 60
+ resolution = 60
+
+ snuba_query = SnubaQuery.objects.create(
+ dataset=dataset,
+ aggregate=translate_aggregation(aggregate),
+ query=query,
+ time_window=time_window,
+ resolution=resolution,
+ )
return QuerySubscription.objects.create(
+ snuba_query=snuba_query,
status=status.value,
subscription_id=subscription_id,
project=self.project,
type="something",
- dataset=QueryDatasets.EVENTS.value,
- query="hello",
- aggregation=QueryAggregations.UNIQUE_USERS.value,
- time_window=60,
- resolution=60,
+ dataset=dataset,
+ query=query,
+ aggregation=aggregate.value,
+ time_window=time_window,
+ resolution=resolution,
)
def test_no_subscription(self):
|
e9e892d6a97bf0983054792978f75f2eb6e33c71
|
2023-12-13 04:45:42
|
Seiji Chew
|
chore(superuser): Use renamed constants (#61645)
| false
|
Use renamed constants (#61645)
|
chore
|
diff --git a/src/sentry/api/endpoints/auth_index.py b/src/sentry/api/endpoints/auth_index.py
index f78eee04404d9a..b373e26d000d18 100644
--- a/src/sentry/api/endpoints/auth_index.py
+++ b/src/sentry/api/endpoints/auth_index.py
@@ -30,9 +30,7 @@
PREFILLED_SU_MODAL_KEY = "prefilled_su_modal"
-DISABLE_SSO_CHECK_SU_FORM_FOR_LOCAL_DEV = getattr(
- settings, "DISABLE_SSO_CHECK_SU_FORM_FOR_LOCAL_DEV", False
-)
+DISABLE_SSO_CHECK_FOR_LOCAL_DEV = getattr(settings, "DISABLE_SSO_CHECK_FOR_LOCAL_DEV", False)
DISABLE_SU_FORM_U2F_CHECK_FOR_LOCAL = getattr(
settings, "DISABLE_SU_FORM_U2F_CHECK_FOR_LOCAL", False
@@ -133,15 +131,14 @@ def _validate_superuser(
authenticated = (
self._verify_user_via_inputs(validator, request)
- if (not DISABLE_SSO_CHECK_SU_FORM_FOR_LOCAL_DEV and verify_authenticator)
- or is_self_hosted()
+ if (not DISABLE_SSO_CHECK_FOR_LOCAL_DEV and verify_authenticator) or is_self_hosted()
else True
)
if Superuser.org_id:
if (
not has_completed_sso(request, Superuser.org_id)
- and not DISABLE_SSO_CHECK_SU_FORM_FOR_LOCAL_DEV
+ and not DISABLE_SSO_CHECK_FOR_LOCAL_DEV
):
request.session[PREFILLED_SU_MODAL_KEY] = request.data
self._reauthenticate_with_sso(request, Superuser.org_id)
@@ -224,7 +221,7 @@ def put(self, request: Request):
else:
verify_authenticator = False
- if not DISABLE_SSO_CHECK_SU_FORM_FOR_LOCAL_DEV and not is_self_hosted():
+ if not DISABLE_SSO_CHECK_FOR_LOCAL_DEV and not is_self_hosted():
if Superuser.org_id:
superuser_org = organization_service.get_organization_by_id(id=Superuser.org_id)
diff --git a/src/sentry/auth/superuser.py b/src/sentry/auth/superuser.py
index 20d70a8c94c4df..aaa9d6a6dd4300 100644
--- a/src/sentry/auth/superuser.py
+++ b/src/sentry/auth/superuser.py
@@ -65,9 +65,7 @@
ENABLE_SU_UPON_LOGIN_FOR_LOCAL_DEV = getattr(settings, "ENABLE_SU_UPON_LOGIN_FOR_LOCAL_DEV", False)
-DISABLE_SSO_CHECK_SU_FORM_FOR_LOCAL_DEV = getattr(
- settings, "DISABLE_SSO_CHECK_SU_FORM_FOR_LOCAL_DEV", False
-)
+DISABLE_SSO_CHECK_FOR_LOCAL_DEV = getattr(settings, "DISABLE_SSO_CHECK_FOR_LOCAL_DEV", False)
def is_active_superuser(request):
@@ -155,7 +153,7 @@ def is_privileged_request(self):
# if we've bound superuser to an organization they must
# have completed SSO to gain status
if self.org_id and not has_completed_sso(self.request, self.org_id):
- if not DISABLE_SSO_CHECK_SU_FORM_FOR_LOCAL_DEV:
+ if not DISABLE_SSO_CHECK_FOR_LOCAL_DEV:
return False, "incomplete-sso"
# if there's no IPs configured, we allow assume its the same as *
if not allowed_ips:
|
c57ca6b3991779a056be2b52ab55e5f9fe4eb43f
|
2023-10-04 04:14:43
|
Scott Cooper
|
ref(ui): Remove recursive type in jsonForm (#57416)
| false
|
Remove recursive type in jsonForm (#57416)
|
ref
|
diff --git a/static/app/components/forms/formPanel.tsx b/static/app/components/forms/formPanel.tsx
index 85df34c6b50d72..e1e67e63346941 100644
--- a/static/app/components/forms/formPanel.tsx
+++ b/static/app/components/forms/formPanel.tsx
@@ -11,7 +11,7 @@ import {sanitizeQuerySelector} from 'sentry/utils/sanitizeQuerySelector';
import {FieldObject, JsonFormObject} from './types';
-type Props = {
+export interface FormPanelProps {
/**
* List of fields to render
*/
@@ -44,7 +44,7 @@ type Props = {
* Panel title
*/
title?: React.ReactNode;
-};
+}
function FormPanel({
additionalFieldProps = {},
@@ -57,7 +57,7 @@ function FormPanel({
collapsible,
initiallyCollapsed = false,
...otherProps
-}: Props) {
+}: FormPanelProps) {
const [collapsed, setCollapse] = useState(initiallyCollapsed);
const handleCollapseToggle = useCallback(() => setCollapse(current => !current), []);
diff --git a/static/app/components/forms/jsonForm.tsx b/static/app/components/forms/jsonForm.tsx
index 26e3754c6cc0e1..a12a47905bb397 100644
--- a/static/app/components/forms/jsonForm.tsx
+++ b/static/app/components/forms/jsonForm.tsx
@@ -8,10 +8,12 @@ import {sanitizeQuerySelector} from 'sentry/utils/sanitizeQuerySelector';
// eslint-disable-next-line no-restricted-imports
import withSentryRouter from 'sentry/utils/withSentryRouter';
-import FormPanel from './formPanel';
-import {Field, FieldObject, JsonFormObject} from './types';
+import FormPanel, {FormPanelProps} from './formPanel';
+import type {Field, FieldObject, JsonFormObject} from './types';
-type Props = {
+interface JsonFormProps
+ extends WithRouterProps,
+ Omit<FormPanelProps, 'highlighted' | 'fields' | 'additionalFieldProps'> {
additionalFieldProps?: {[key: string]: any};
/**
@@ -24,18 +26,29 @@ type Props = {
* Fields that are grouped by "section"
*/
forms?: JsonFormObject[];
-} & WithRouterProps &
- Omit<
- React.ComponentProps<typeof FormPanel>,
- 'highlighted' | 'fields' | 'additionalFieldProps'
- >;
+}
type State = {
// Field name that should be highlighted
highlighted?: string;
};
-class JsonForm extends Component<Props, State> {
+interface ChildFormPanelProps
+ extends Pick<
+ FormPanelProps,
+ | 'access'
+ | 'disabled'
+ | 'features'
+ | 'additionalFieldProps'
+ | 'renderFooter'
+ | 'renderHeader'
+ | 'initiallyCollapsed'
+ | 'collapsible'
+ > {
+ highlighted?: State['highlighted'];
+}
+
+class JsonForm extends Component<JsonFormProps, State> {
state: State = {
// location.hash is optional because of tests.
highlighted: this.props.location?.hash,
@@ -45,7 +58,7 @@ class JsonForm extends Component<Props, State> {
this.scrollToHash();
}
- componentDidUpdate(prevProps: Props) {
+ componentDidUpdate(prevProps: JsonFormProps) {
if (this.props.location && this.props.location.hash !== prevProps.location.hash) {
const hash = this.props.location.hash;
this.scrollToHash(hash);
@@ -74,10 +87,10 @@ class JsonForm extends Component<Props, State> {
}
}
- shouldDisplayForm(fields: FieldObject[]) {
+ shouldDisplayForm(fields: FieldObject[]): boolean {
const fieldsWithVisibleProp = fields.filter(
- field => typeof field !== 'function' && defined(field?.visible)
- ) as Array<Omit<Field, 'visible'> & Required<Pick<Field, 'visible'>>>;
+ (field): field is Field => typeof field !== 'function' && defined(field?.visible)
+ );
if (fields.length === fieldsWithVisibleProp.length) {
const {additionalFieldProps, ...props} = this.props;
@@ -102,17 +115,7 @@ class JsonForm extends Component<Props, State> {
initiallyCollapsed,
}: {
fields: FieldObject[];
- formPanelProps: Pick<
- Props,
- | 'access'
- | 'disabled'
- | 'features'
- | 'additionalFieldProps'
- | 'renderFooter'
- | 'renderHeader'
- | 'initiallyCollapsed'
- > &
- Pick<State, 'highlighted'>;
+ formPanelProps: ChildFormPanelProps;
initiallyCollapsed?: boolean;
title?: React.ReactNode;
}) {
@@ -156,7 +159,7 @@ class JsonForm extends Component<Props, State> {
...otherProps
} = this.props;
- const formPanelProps = {
+ const formPanelProps: ChildFormPanelProps = {
access,
disabled,
features,
|
5ccedfcc47c6117de95116edeb97408da3bf523b
|
2025-01-14 18:12:24
|
Priscila Oliveira
|
ref(quick-start): Remove feature flag check (#83296)
| false
|
Remove feature flag check (#83296)
|
ref
|
diff --git a/static/app/components/onboardingWizard/taskConfig.tsx b/static/app/components/onboardingWizard/taskConfig.tsx
index 574f10fc982381..d2dbf04baaad64 100644
--- a/static/app/components/onboardingWizard/taskConfig.tsx
+++ b/static/app/components/onboardingWizard/taskConfig.tsx
@@ -1,14 +1,10 @@
-import {css} from '@emotion/react';
import styled from '@emotion/styled';
import {openInviteMembersModal} from 'sentry/actionCreators/modal';
import {navigateTo} from 'sentry/actionCreators/navigation';
import type {OnboardingContextProps} from 'sentry/components/onboarding/onboardingContext';
import {filterSupportedTasks} from 'sentry/components/onboardingWizard/filterSupportedTasks';
-import {
- hasQuickStartUpdatesFeature,
- taskIsDone,
-} from 'sentry/components/onboardingWizard/utils';
+import {taskIsDone} from 'sentry/components/onboardingWizard/utils';
import {filterProjects} from 'sentry/components/performanceOnboarding/utils';
import {SidebarPanelKey} from 'sentry/components/sidebar/types';
import {Tooltip} from 'sentry/components/tooltip';
@@ -16,7 +12,6 @@ import {sourceMaps} from 'sentry/data/platformCategories';
import {t} from 'sentry/locale';
import SidebarPanelStore from 'sentry/stores/sidebarPanelStore';
import pulsingIndicatorStyles from 'sentry/styles/pulsingIndicator';
-import {space} from 'sentry/styles/space';
import type {
OnboardingSupplementComponentProps,
OnboardingTask,
@@ -26,9 +21,7 @@ import {OnboardingTaskGroup, OnboardingTaskKey} from 'sentry/types/onboarding';
import type {Organization} from 'sentry/types/organization';
import type {Project} from 'sentry/types/project';
import {isDemoModeEnabled} from 'sentry/utils/demoMode';
-import EventWaiter from 'sentry/utils/eventWaiter';
import normalizeUrl from 'sentry/utils/url/normalizeUrl';
-import useApi from 'sentry/utils/useApi';
import {getPerformanceBaseUrl} from 'sentry/views/performance/utils';
function hasPlatformWithSourceMaps(projects: Project[] | undefined) {
@@ -91,23 +84,6 @@ function getOnboardingInstructionsUrl({projects, organization}: Options) {
return url;
}
-function getMetricAlertUrl({projects, organization}: Options) {
- if (!projects || !projects.length) {
- return `/organizations/${organization.slug}/alerts/rules/`;
- }
- // pick the first project with transaction events if we have that, otherwise just pick the first project
- const firstProjectWithEvents = projects.find(
- project => !!project.firstTransactionEvent
- );
- const project = firstProjectWithEvents ?? projects[0]!;
- return {
- pathname: `/organizations/${organization.slug}/alerts/${project.slug}/wizard/`,
- query: {
- alert_option: 'trans_duration',
- },
- };
-}
-
export function getOnboardingTasks({
organization,
projects,
@@ -169,19 +145,14 @@ export function getOnboardingTasks({
},
];
}
+
return [
{
task: OnboardingTaskKey.FIRST_PROJECT,
- title: hasQuickStartUpdatesFeature(organization)
- ? t('Create your first project')
- : t('Create a project'),
- description: hasQuickStartUpdatesFeature(organization)
- ? t(
- 'Select your platform and install the Sentry SDK by adding a few lines of code to your application. HINT: Set up a separate project for each part of your application (for example, your API server and frontend client).'
- )
- : t(
- "Monitor in seconds by adding a simple lines of code to your project. It's as easy as microwaving leftover pizza."
- ),
+ title: t('Create your first project'),
+ description: t(
+ 'Select your platform and install the Sentry SDK by adding a few lines of code to your application. HINT: Set up a separate project for each part of your application (for example, your API server and frontend client).'
+ ),
skippable: false,
requisites: [],
actionType: 'app',
@@ -192,49 +163,19 @@ export function getOnboardingTasks({
{
task: OnboardingTaskKey.FIRST_EVENT,
title: t('Capture your first error'),
- description: hasQuickStartUpdatesFeature(organization)
- ? t(
- 'Throw an error using our example code to make sure things are working as expected.'
- )
- : t(
- "Time to test it out. Now that you've created a project, capture your first error. We've got an example you can fiddle with."
- ),
+ description: t(
+ 'Throw an error using our example code to make sure things are working as expected.'
+ ),
skippable: false,
requisites: [OnboardingTaskKey.FIRST_PROJECT],
actionType: 'app',
location: getOnboardingInstructionsUrl({projects, organization}),
display: true,
- SupplementComponent: ({
- task,
- onCompleteTask,
- }: OnboardingSupplementComponentProps) => {
- const api = useApi();
-
- if (hasQuickStartUpdatesFeature(organization)) {
- if (!projects?.length || task.requisiteTasks.length > 0 || taskIsDone(task)) {
- return null;
- }
- return (
- <EventWaitingIndicator
- text={t('Waiting for error')}
- hasQuickStartUpdatesFeature
- />
- );
+ SupplementComponent: ({task}: OnboardingSupplementComponentProps) => {
+ if (!projects?.length || task.requisiteTasks.length > 0 || taskIsDone(task)) {
+ return null;
}
-
- return !!projects?.length &&
- task.requisiteTasks.length === 0 &&
- !task.completionSeen ? (
- <EventWaiter
- api={api}
- organization={organization}
- project={projects[0]!}
- eventType="error"
- onIssueReceived={() => !taskIsDone(task) && onCompleteTask?.()}
- >
- {() => <EventWaitingIndicator text={t('Waiting for error')} />}
- </EventWaiter>
- ) : null;
+ return <EventWaitingIndicator text={t('Waiting for error')} />;
},
group: OnboardingTaskGroup.GETTING_STARTED,
},
@@ -254,18 +195,6 @@ export function getOnboardingTasks({
'You’ve invited members, and their acceptance is pending. Keep an eye out for updates!'
),
},
- {
- task: OnboardingTaskKey.FIRST_INTEGRATION,
- title: t('Install any of our 40+ integrations'),
- description: t(
- 'Get alerted in Slack. Two-way sync issues between Sentry and Jira. Notify Sentry of releases from GitHub, Vercel, or Netlify.'
- ),
- skippable: true,
- requisites: [OnboardingTaskKey.FIRST_PROJECT, OnboardingTaskKey.FIRST_EVENT],
- actionType: 'app',
- location: `/settings/${organization.slug}/integrations/`,
- display: !hasQuickStartUpdatesFeature(organization),
- },
{
task: OnboardingTaskKey.REAL_TIME_NOTIFICATIONS,
title: t('Get real-time notifications'),
@@ -276,7 +205,7 @@ export function getOnboardingTasks({
requisites: [],
actionType: 'app',
location: `/settings/${organization.slug}/integrations/?category=chat`,
- display: hasQuickStartUpdatesFeature(organization),
+ display: true,
},
{
task: OnboardingTaskKey.LINK_SENTRY_TO_SOURCE_CODE,
@@ -291,54 +220,28 @@ export function getOnboardingTasks({
pathname: `/settings/${organization.slug}/integrations/`,
query: {category: 'source code management'},
},
- display: hasQuickStartUpdatesFeature(organization),
+ display: true,
group: OnboardingTaskGroup.GETTING_STARTED,
},
{
task: OnboardingTaskKey.SECOND_PLATFORM,
- title: hasQuickStartUpdatesFeature(organization)
- ? t('Add Sentry to other parts of your app')
- : t('Create another project'),
- description: hasQuickStartUpdatesFeature(organization)
- ? t(
- 'Create a new project and install Sentry in other parts of your app—such as the backend, frontend, API server—to quickly see where a problem’s coming from'
- )
- : t(
- 'Easy, right? Don’t stop at one. Set up another project and send it events to keep things running smoothly in both the frontend and backend.'
- ),
+ title: t('Add Sentry to other parts of your app'),
+ description: t(
+ 'Create a new project and install Sentry in other parts of your app—such as the backend, frontend, API server—to quickly see where a problem’s coming from'
+ ),
skippable: true,
requisites: [OnboardingTaskKey.FIRST_PROJECT, OnboardingTaskKey.FIRST_EVENT],
actionType: 'app',
location: `/organizations/${organization.slug}/projects/new/`,
display: true,
pendingTitle: t('Awaiting an error for this project.'),
- SupplementComponent: ({task}: OnboardingSupplementComponentProps) => {
- if (hasQuickStartUpdatesFeature(organization)) {
- return null;
- }
- if (!projects?.length || task.requisiteTasks.length > 0 || taskIsDone(task)) {
- return null;
- }
- return (
- <EventWaitingIndicator
- text={t('Waiting for error')}
- hasQuickStartUpdatesFeature
- />
- );
- },
},
{
task: OnboardingTaskKey.FIRST_TRANSACTION,
- title: hasQuickStartUpdatesFeature(organization)
- ? t('Set up Tracing')
- : t('Boost performance'),
- description: hasQuickStartUpdatesFeature(organization)
- ? t(
- 'Instrument tracing in your frontend and backend to identify application performance issues and debug errors across your stack.'
- )
- : t(
- "Don't keep users waiting. Trace transactions, investigate spans and cross-reference related issues for those mission-critical endpoints."
- ),
+ title: t('Set up Tracing'),
+ description: t(
+ 'Instrument tracing in your frontend and backend to identify application performance issues and debug errors across your stack.'
+ ),
skippable: true,
requisites: [OnboardingTaskKey.FIRST_PROJECT],
actionType: 'action',
@@ -381,61 +284,21 @@ export function getOnboardingTasks({
);
},
display: true,
- SupplementComponent: ({
- task,
- onCompleteTask,
- }: OnboardingSupplementComponentProps) => {
- const api = useApi();
-
- if (hasQuickStartUpdatesFeature(organization)) {
- if (!projects?.length || task.requisiteTasks.length > 0 || taskIsDone(task)) {
- return null;
- }
- return <EventWaitingIndicator hasQuickStartUpdatesFeature />;
+ SupplementComponent: ({task}: OnboardingSupplementComponentProps) => {
+ if (!projects?.length || task.requisiteTasks.length > 0 || taskIsDone(task)) {
+ return null;
}
-
- return !!projects?.length &&
- task.requisiteTasks.length === 0 &&
- !task.completionSeen ? (
- <EventWaiter
- api={api}
- organization={organization}
- project={projects[0]!}
- eventType="transaction"
- onIssueReceived={() => !taskIsDone(task) && onCompleteTask?.()}
- >
- {() => <EventWaitingIndicator />}
- </EventWaiter>
- ) : null;
+ return <EventWaitingIndicator />;
},
},
{
- task: OnboardingTaskKey.USER_CONTEXT,
- title: t('Get more user context'),
+ task: OnboardingTaskKey.SESSION_REPLAY,
+ title: t('Set up Session Replay'),
description: t(
- 'Enable us to pinpoint which users are suffering from that bad code, so you can debug the problem more swiftly and maybe even apologize for it.'
+ 'Get video-like reproductions of user sessions to see what happened before, during, and after an error or performance issue occurred.'
),
skippable: true,
requisites: [OnboardingTaskKey.FIRST_PROJECT, OnboardingTaskKey.FIRST_EVENT],
- actionType: 'external',
- location:
- 'https://docs.sentry.io/platform-redirect/?next=/enriching-events/identify-user/',
- display: !hasQuickStartUpdatesFeature(organization),
- },
- {
- task: OnboardingTaskKey.SESSION_REPLAY,
- title: hasQuickStartUpdatesFeature(organization)
- ? t('Set up Session Replay')
- : t('See a video-like reproduction'),
- description: hasQuickStartUpdatesFeature(organization)
- ? t(
- 'Get video-like reproductions of user sessions to see what happened before, during, and after an error or performance issue occurred.'
- )
- : t(
- 'Get to the root cause of error or latency issues faster by seeing all the technical details related to those issues in video-like reproductions of your user sessions.'
- ),
- skippable: true,
- requisites: [OnboardingTaskKey.FIRST_PROJECT, OnboardingTaskKey.FIRST_EVENT],
actionType: 'action',
action: router => {
router.push(
@@ -451,50 +314,20 @@ export function getOnboardingTasks({
}, 0);
},
display: organization.features?.includes('session-replay'),
- SupplementComponent: ({
- task,
- onCompleteTask,
- }: OnboardingSupplementComponentProps) => {
- const api = useApi();
-
- if (hasQuickStartUpdatesFeature(organization)) {
- if (!projects?.length || task.requisiteTasks.length > 0 || taskIsDone(task)) {
- return null;
- }
-
- return (
- <EventWaitingIndicator
- text={t('Waiting for user session')}
- hasQuickStartUpdatesFeature
- />
- );
+ SupplementComponent: ({task}: OnboardingSupplementComponentProps) => {
+ if (!projects?.length || task.requisiteTasks.length > 0 || taskIsDone(task)) {
+ return null;
}
- return !!projects?.length &&
- task.requisiteTasks.length === 0 &&
- !task.completionSeen ? (
- <EventWaiter
- api={api}
- organization={organization}
- project={projects[0]!}
- eventType="replay"
- onIssueReceived={() => !taskIsDone(task) && onCompleteTask?.()}
- >
- {() => <EventWaitingIndicator text={t('Waiting for user session')} />}
- </EventWaiter>
- ) : null;
+ return <EventWaitingIndicator text={t('Waiting for user session')} />;
},
},
{
task: OnboardingTaskKey.RELEASE_TRACKING,
title: t('Track releases'),
- description: hasQuickStartUpdatesFeature(organization)
- ? t(
- 'Identify which release introduced an issue and track release health with crash analytics, errors, and adoption data.'
- )
- : t(
- 'Take an in-depth look at the health of each and every release with crash analytics, errors, related issues and suspect commits.'
- ),
+ description: t(
+ 'Identify which release introduced an issue and track release health with crash analytics, errors, and adoption data.'
+ ),
skippable: true,
requisites: [OnboardingTaskKey.FIRST_PROJECT, OnboardingTaskKey.FIRST_EVENT],
actionType: 'app',
@@ -504,16 +337,10 @@ export function getOnboardingTasks({
},
{
task: OnboardingTaskKey.SOURCEMAPS,
- title: hasQuickStartUpdatesFeature(organization)
- ? t('Unminify your code')
- : t('Upload source maps'),
- description: hasQuickStartUpdatesFeature(organization)
- ? t(
- 'Enable readable stack traces in Sentry errors by uploading your source maps.'
- )
- : t(
- 'Deminify Javascript source code to debug with context. Seeing code in its original form will help you debunk the ghosts of errors past.'
- ),
+ title: t('Unminify your code'),
+ description: t(
+ 'Enable readable stack traces in Sentry errors by uploading your source maps.'
+ ),
skippable: true,
requisites: [OnboardingTaskKey.FIRST_PROJECT, OnboardingTaskKey.FIRST_EVENT],
actionType: 'external',
@@ -533,21 +360,6 @@ export function getOnboardingTasks({
display: true,
group: OnboardingTaskGroup.GETTING_STARTED,
},
- {
- task: OnboardingTaskKey.METRIC_ALERT,
- title: t('Create a Performance Alert'),
- description: t(
- 'See slow fast with performance alerts. Set up alerts for notifications about slow page load times, API latency, or when throughput significantly deviates from normal.'
- ),
- skippable: true,
- requisites: [OnboardingTaskKey.FIRST_PROJECT, OnboardingTaskKey.FIRST_TRANSACTION],
- actionType: 'app',
- location: getMetricAlertUrl({projects, organization, onboardingContext}),
- // Use `features?.` because getsentry has a different `Organization` type/payload
- display:
- organization.features?.includes('incidents') &&
- !hasQuickStartUpdatesFeature(organization),
- },
];
}
@@ -578,56 +390,28 @@ export function getMergedTasks({organization, projects, onboardingContext}: Opti
}));
}
-const PulsingIndicator = styled('div')<{
- hasQuickStartUpdatesFeature?: boolean;
-}>`
+const PulsingIndicator = styled('div')`
${pulsingIndicatorStyles};
- ${p =>
- p.hasQuickStartUpdatesFeature
- ? css`
- margin: 0;
- `
- : css`
- margin-right: ${space(1)};
- `}
+ margin: 0;
`;
const EventWaitingIndicator = styled(
({
- hasQuickStartUpdatesFeature: quickStartUpdatesFeature,
text,
...p
}: React.HTMLAttributes<HTMLDivElement> & {
- hasQuickStartUpdatesFeature?: boolean;
text?: string;
}) => {
- if (quickStartUpdatesFeature) {
- return (
- <div {...p}>
- <Tooltip title={text || t('Waiting for event')}>
- <PulsingIndicator hasQuickStartUpdatesFeature />
- </Tooltip>
- </div>
- );
- }
return (
<div {...p}>
- <PulsingIndicator />
- {text || t('Waiting for event')}
+ <Tooltip title={text || t('Waiting for event')}>
+ <PulsingIndicator />
+ </Tooltip>
</div>
);
}
)`
display: flex;
align-items: center;
- ${p =>
- p.hasQuickStartUpdatesFeature
- ? css`
- height: 16px;
- `
- : css`
- flex-grow: 1;
- font-size: ${p.theme.fontSizeMedium};
- color: ${p.theme.pink400};
- `}
+ height: 16px;
`;
diff --git a/static/app/components/onboardingWizard/utils.tsx b/static/app/components/onboardingWizard/utils.tsx
index 1374b3fc828b03..10935b1ca44ea9 100644
--- a/static/app/components/onboardingWizard/utils.tsx
+++ b/static/app/components/onboardingWizard/utils.tsx
@@ -1,5 +1,4 @@
import type {OnboardingTask} from 'sentry/types/onboarding';
-import type {Organization} from 'sentry/types/organization';
export const taskIsDone = (task: OnboardingTask) =>
['complete', 'skipped'].includes(task.status);
@@ -12,7 +11,3 @@ export const findActiveTasks = (task: OnboardingTask) =>
export const findUpcomingTasks = (task: OnboardingTask) =>
task.requisiteTasks.length > 0 && !findCompleteTasks(task);
-
-export function hasQuickStartUpdatesFeature(organization: Organization) {
- return organization.features?.includes('quick-start-updates');
-}
diff --git a/static/app/components/performanceOnboarding/sidebar.spec.tsx b/static/app/components/performanceOnboarding/sidebar.spec.tsx
index 712c4f8bd491ee..dea14a21169932 100644
--- a/static/app/components/performanceOnboarding/sidebar.spec.tsx
+++ b/static/app/components/performanceOnboarding/sidebar.spec.tsx
@@ -56,6 +56,14 @@ describe('Sidebar > Performance Onboarding Checklist', function () {
body: [broadcast],
});
+ MockApiClient.addMockResponse({
+ url: `/organizations/${organization.slug}/onboarding-tasks/`,
+ method: 'GET',
+ body: {
+ onboardingTasks: [],
+ },
+ });
+
const statusPageData: StatuspageIncident[] = [];
jest
.spyOn(incidentsHook, 'useServiceIncidents')
@@ -68,10 +76,11 @@ describe('Sidebar > Performance Onboarding Checklist', function () {
MockApiClient.clearMockResponses();
});
- it('displays boost performance card', async function () {
+ it('displays "Set up Tracing" card', async function () {
ProjectsStore.loadInitialData([
ProjectFixture({platform: 'javascript-react', firstTransactionEvent: false}),
]);
+
renderSidebar({
organization: {
...organization,
@@ -79,7 +88,7 @@ describe('Sidebar > Performance Onboarding Checklist', function () {
},
});
- const quickStart = await screen.findByText('Quick Start');
+ const quickStart = await screen.findByText('Onboarding');
expect(quickStart).toBeInTheDocument();
await userEvent.click(quickStart);
@@ -88,11 +97,12 @@ describe('Sidebar > Performance Onboarding Checklist', function () {
expect(sidebar).toBeInTheDocument();
expect(screen.getByText('Capture your first error')).toBeInTheDocument();
- expect(screen.getByText('Level Up')).toBeInTheDocument();
- expect(screen.getByText('Boost performance')).toBeInTheDocument();
+
+ await userEvent.click(screen.getByText('Beyond the Basics'));
+ expect(await screen.findByText('Set up Tracing')).toBeInTheDocument();
await userEvent.click(quickStart);
- expect(screen.queryByText('Boost performance')).not.toBeInTheDocument();
+ expect(screen.queryByText('Set up Tracing')).not.toBeInTheDocument();
});
it('checklist feature supported by platform but disabled', async function () {
@@ -108,7 +118,7 @@ describe('Sidebar > Performance Onboarding Checklist', function () {
window.open = jest.fn().mockImplementation(() => true);
- const quickStart = await screen.findByText('Quick Start');
+ const quickStart = await screen.findByText('Onboarding');
expect(quickStart).toBeInTheDocument();
await userEvent.click(quickStart);
@@ -117,11 +127,11 @@ describe('Sidebar > Performance Onboarding Checklist', function () {
expect(sidebar).toBeInTheDocument();
expect(screen.getByText('Capture your first error')).toBeInTheDocument();
- expect(screen.getByText('Level Up')).toBeInTheDocument();
- expect(screen.getByText('Boost performance')).toBeInTheDocument();
- const performanceCard = screen.getByTestId('setup_transactions');
- await userEvent.click(performanceCard);
+ await userEvent.click(screen.getByText('Beyond the Basics'));
+ expect(await screen.findByText('Set up Tracing')).toBeInTheDocument();
+
+ await userEvent.click(screen.getByText('Set up Tracing'));
expect(window.open).toHaveBeenCalledWith(
'https://docs.sentry.io/product/performance/getting-started/',
'_blank'
@@ -140,7 +150,7 @@ describe('Sidebar > Performance Onboarding Checklist', function () {
});
window.open = jest.fn().mockImplementation(() => true);
- const quickStart = await screen.findByText('Quick Start');
+ const quickStart = await screen.findByText('Onboarding');
expect(quickStart).toBeInTheDocument();
await userEvent.click(quickStart);
@@ -149,11 +159,11 @@ describe('Sidebar > Performance Onboarding Checklist', function () {
expect(sidebar).toBeInTheDocument();
expect(screen.getByText('Capture your first error')).toBeInTheDocument();
- expect(screen.getByText('Level Up')).toBeInTheDocument();
- expect(screen.getByText('Boost performance')).toBeInTheDocument();
- const performanceCard = screen.getByTestId('setup_transactions');
- await userEvent.click(performanceCard);
+ await userEvent.click(screen.getByText('Beyond the Basics'));
+ expect(await screen.findByText('Set up Tracing')).toBeInTheDocument();
+
+ await userEvent.click(screen.getByText('Set up Tracing'));
expect(window.open).not.toHaveBeenCalled();
expect(router.push).toHaveBeenCalledWith(
'/organizations/org-slug/performance/?project=2#performance-sidequest'
@@ -172,7 +182,7 @@ describe('Sidebar > Performance Onboarding Checklist', function () {
});
window.open = jest.fn().mockImplementation(() => true);
- const quickStart = await screen.findByText('Quick Start');
+ const quickStart = await screen.findByText('Onboarding');
expect(quickStart).toBeInTheDocument();
await userEvent.click(quickStart);
@@ -181,11 +191,10 @@ describe('Sidebar > Performance Onboarding Checklist', function () {
expect(sidebar).toBeInTheDocument();
expect(screen.getByText('Capture your first error')).toBeInTheDocument();
- expect(screen.getByText('Level Up')).toBeInTheDocument();
- expect(screen.getByText('Boost performance')).toBeInTheDocument();
- const performanceCard = screen.getByTestId('setup_transactions');
+ await userEvent.click(screen.getByText('Beyond the Basics'));
+ expect(await screen.findByText('Set up Tracing')).toBeInTheDocument();
- await userEvent.click(performanceCard);
+ await userEvent.click(screen.getByText('Set up Tracing'));
expect(window.open).not.toHaveBeenCalled();
expect(router.push).toHaveBeenCalledWith(
'/organizations/org-slug/performance/?project=2#performance-sidequest'
@@ -207,7 +216,7 @@ describe('Sidebar > Performance Onboarding Checklist', function () {
window.open = jest.fn().mockImplementation(() => true);
- const quickStart = await screen.findByText('Quick Start');
+ const quickStart = await screen.findByText('Onboarding');
expect(quickStart).toBeInTheDocument();
await userEvent.click(quickStart);
@@ -216,8 +225,8 @@ describe('Sidebar > Performance Onboarding Checklist', function () {
expect(sidebar).toBeInTheDocument();
expect(screen.getByText('Capture your first error')).toBeInTheDocument();
- expect(screen.getByText('Level Up')).toBeInTheDocument();
- expect(screen.queryByText('Boost performance')).not.toBeInTheDocument();
+ await userEvent.click(screen.getByText('Beyond the Basics'));
+ expect(screen.queryByText('Set up Tracing')).not.toBeInTheDocument();
});
it('displays checklist', async function () {
diff --git a/static/app/components/sidebar/index.spec.tsx b/static/app/components/sidebar/index.spec.tsx
index ba4e9463a90470..3269a930f3e007 100644
--- a/static/app/components/sidebar/index.spec.tsx
+++ b/static/app/components/sidebar/index.spec.tsx
@@ -89,6 +89,13 @@ describe('Sidebar', function () {
url: `/organizations/${organization.slug}/sdk-updates/`,
body: [],
});
+ MockApiClient.addMockResponse({
+ url: `/organizations/${organization.slug}/onboarding-tasks/`,
+ method: 'GET',
+ body: {
+ onboardingTasks: [],
+ },
+ });
});
afterEach(function () {
@@ -196,7 +203,7 @@ describe('Sidebar', function () {
organization: {...organization, features: ['onboarding']},
});
- const quickStart = await screen.findByText('Quick Start');
+ const quickStart = await screen.findByText('Onboarding');
expect(quickStart).toBeInTheDocument();
await userEvent.click(quickStart);
diff --git a/static/app/components/sidebar/index.tsx b/static/app/components/sidebar/index.tsx
index 95d911fc7898dc..58759655d9f87d 100644
--- a/static/app/components/sidebar/index.tsx
+++ b/static/app/components/sidebar/index.tsx
@@ -11,7 +11,6 @@ import FeedbackOnboardingSidebar from 'sentry/components/feedback/feedbackOnboar
import Hook from 'sentry/components/hook';
import {OnboardingContext} from 'sentry/components/onboarding/onboardingContext';
import {getMergedTasks} from 'sentry/components/onboardingWizard/taskConfig';
-import {hasQuickStartUpdatesFeature} from 'sentry/components/onboardingWizard/utils';
import PerformanceOnboardingSidebar from 'sentry/components/performanceOnboarding/sidebar';
import ReplaysOnboardingSidebar from 'sentry/components/replaysOnboarding/sidebar';
import {
@@ -83,7 +82,6 @@ import {ProfilingOnboardingSidebar} from '../profiling/profilingOnboardingSideba
import {Broadcasts} from './broadcasts';
import SidebarHelp from './help';
-import OnboardingStatus from './onboardingStatus';
import ServiceIncidents from './serviceIncidents';
import {SidebarAccordion} from './sidebarAccordion';
import SidebarDropdown from './sidebarDropdown';
@@ -623,22 +621,12 @@ function Sidebar() {
{...sidebarItemProps}
/>
<SidebarSection hasNewNav={hasNewNav} noMargin noPadding>
- {hasQuickStartUpdatesFeature(organization) ? (
- <NewOnboardingStatus
- currentPanel={activePanel}
- onShowPanel={() => togglePanel(SidebarPanelKey.ONBOARDING_WIZARD)}
- hidePanel={hidePanel}
- {...sidebarItemProps}
- />
- ) : (
- <OnboardingStatus
- org={organization}
- currentPanel={activePanel}
- onShowPanel={() => togglePanel(SidebarPanelKey.ONBOARDING_WIZARD)}
- hidePanel={hidePanel}
- {...sidebarItemProps}
- />
- )}
+ <NewOnboardingStatus
+ currentPanel={activePanel}
+ onShowPanel={() => togglePanel(SidebarPanelKey.ONBOARDING_WIZARD)}
+ hidePanel={hidePanel}
+ {...sidebarItemProps}
+ />
</SidebarSection>
<SidebarSection hasNewNav={hasNewNav} centeredItems={horizontal}>
|
d81e1ba6b2f24cb5f1c094e9170bd76a79bdeb0f
|
2020-09-18 03:46:14
|
Dan Fuller
|
fix(py3): Fix test_organization_events_stats tests in py3 (#20833)
| false
|
Fix test_organization_events_stats tests in py3 (#20833)
|
fix
|
diff --git a/tests/snuba/api/endpoints/test_organization_events_stats.py b/tests/snuba/api/endpoints/test_organization_events_stats.py
index 97256b9306e52b..97346fcaa4568b 100644
--- a/tests/snuba/api/endpoints/test_organization_events_stats.py
+++ b/tests/snuba/api/endpoints/test_organization_events_stats.py
@@ -74,10 +74,7 @@ def test_simple(self):
)
assert response.status_code == 200, response.content
- assert [attrs for time, attrs in response.data["data"]] == [
- [{"count": 1}],
- [{"count": 2}],
- ]
+ assert [attrs for time, attrs in response.data["data"]] == [[{"count": 1}], [{"count": 2}]]
def test_no_projects(self):
org = self.create_organization(owner=self.user)
@@ -242,7 +239,7 @@ def test_throughput_epm_hour_rollup(self):
for minute in range(count):
self.store_event(
data={
- "event_id": six.binary_type(six.text_type(uuid.uuid1()).encode("ascii")),
+ "event_id": six.text_type(uuid.uuid1()),
"message": "very bad",
"timestamp": iso_format(
self.day_ago + timedelta(hours=hour, minutes=minute)
@@ -281,7 +278,7 @@ def test_throughput_epm_day_rollup(self):
for minute in range(count):
self.store_event(
data={
- "event_id": six.binary_type(six.text_type(uuid.uuid1()).encode("ascii")),
+ "event_id": six.text_type(uuid.uuid1()),
"message": "very bad",
"timestamp": iso_format(
self.day_ago + timedelta(hours=hour, minutes=minute)
@@ -318,7 +315,7 @@ def test_throughput_eps_minute_rollup(self):
for second in range(count):
self.store_event(
data={
- "event_id": six.binary_type(six.text_type(uuid.uuid1()).encode("ascii")),
+ "event_id": six.text_type(uuid.uuid1()),
"message": "very bad",
"timestamp": iso_format(
self.day_ago + timedelta(minutes=minute, seconds=second)
@@ -357,7 +354,7 @@ def test_throughput_eps_no_rollup(self):
for second in range(count):
self.store_event(
data={
- "event_id": six.binary_type(six.text_type(uuid.uuid1()).encode("ascii")),
+ "event_id": six.text_type(uuid.uuid1()),
"message": "very bad",
"timestamp": iso_format(
self.day_ago + timedelta(minutes=minute, seconds=second)
@@ -530,10 +527,7 @@ def test_large_interval_no_drop_values(self):
},
)
assert response.status_code == 200
- assert [attrs for time, attrs in response.data["data"]] == [
- [{"count": 0}],
- [{"count": 1}],
- ]
+ assert [attrs for time, attrs in response.data["data"]] == [[{"count": 0}], [{"count": 1}]]
@mock.patch("sentry.snuba.discover.timeseries_query", return_value={})
def test_multiple_yaxis_only_one_query(self, mock_query):
@@ -750,15 +744,15 @@ def test_top_events_limits(self):
}
with self.feature("organizations:discover-basic"):
data["topEvents"] = 50
- response = self.client.get(self.url, data, format="json",)
+ response = self.client.get(self.url, data, format="json")
assert response.status_code == 400
data["topEvents"] = 0
- response = self.client.get(self.url, data, format="json",)
+ response = self.client.get(self.url, data, format="json")
assert response.status_code == 400
data["topEvents"] = "a"
- response = self.client.get(self.url, data, format="json",)
+ response = self.client.get(self.url, data, format="json")
assert response.status_code == 400
def test_top_events_with_projects(self):
@@ -851,10 +845,7 @@ def test_top_events_with_functions(self):
results = data[self.transaction.transaction]
assert results["order"] == 0
- assert [attrs for time, attrs in results["data"]] == [
- [{"count": 3}],
- [{"count": 0}],
- ]
+ assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
def test_top_events_with_functions_on_different_transactions(self):
""" Transaction2 has less events, but takes longer so order should be self.transaction then transaction2 """
@@ -885,17 +876,11 @@ def test_top_events_with_functions_on_different_transactions(self):
results = data[self.transaction.transaction]
assert results["order"] == 1
- assert [attrs for time, attrs in results["data"]] == [
- [{"count": 3}],
- [{"count": 0}],
- ]
+ assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
results = data[transaction2.transaction]
assert results["order"] == 0
- assert [attrs for time, attrs in results["data"]] == [
- [{"count": 1}],
- [{"count": 0}],
- ]
+ assert [attrs for time, attrs in results["data"]] == [[{"count": 1}], [{"count": 0}]]
def test_top_events_with_query(self):
transaction_data = load_data("transaction")
@@ -1078,10 +1063,7 @@ def test_top_events_with_int(self):
results = data[",".join([self.transaction.transaction, "120000"])]
assert results["order"] == 0
- assert [attrs for time, attrs in results["data"]] == [
- [{"count": 3}],
- [{"count": 0}],
- ]
+ assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
def test_top_events_with_user(self):
with self.feature("organizations:discover-basic"):
@@ -1203,10 +1185,7 @@ def test_top_events_one_field_with_none(self):
assert len(data) == 1
results = data["unknown"]
- assert [attrs for time, attrs in results["data"]] == [
- [{"count": 3}],
- [{"count": 0}],
- ]
+ assert [attrs for time, attrs in results["data"]] == [[{"count": 3}], [{"count": 0}]]
assert results["order"] == 0
def test_top_events_with_error_handled(self):
@@ -1252,23 +1231,14 @@ def test_top_events_with_error_handled(self):
assert len(data) == 3
results = data[""]
- assert [attrs for time, attrs in results["data"]] == [
- [{"count": 22}],
- [{"count": 6}],
- ]
+ assert [attrs for time, attrs in results["data"]] == [[{"count": 22}], [{"count": 6}]]
assert results["order"] == 0
results = data["1"]
- assert [attrs for time, attrs in results["data"]] == [
- [{"count": 1}],
- [{"count": 0}],
- ]
+ assert [attrs for time, attrs in results["data"]] == [[{"count": 1}], [{"count": 0}]]
results = data["0"]
- assert [attrs for time, attrs in results["data"]] == [
- [{"count": 1}],
- [{"count": 0}],
- ]
+ assert [attrs for time, attrs in results["data"]] == [[{"count": 1}], [{"count": 0}]]
def test_top_events_with_aggregate_condition(self):
with self.feature("organizations:discover-basic"):
|
0112821445991cb3cfd33959a72fed51aaba882a
|
2025-03-05 16:03:59
|
Dominik Dorfmeister
|
feat(ui): chonkify InputGroup (#86282)
| false
|
chonkify InputGroup (#86282)
|
feat
|
diff --git a/static/app/components/core/input/chonk.tsx b/static/app/components/core/input/index.chonk.tsx
similarity index 100%
rename from static/app/components/core/input/chonk.tsx
rename to static/app/components/core/input/index.chonk.tsx
diff --git a/static/app/components/core/input/index.tsx b/static/app/components/core/input/index.tsx
index 6ab6d7441522f7..76914beac23165 100644
--- a/static/app/components/core/input/index.tsx
+++ b/static/app/components/core/input/index.tsx
@@ -4,7 +4,7 @@ import type {Theme} from '@emotion/react';
import {css} from '@emotion/react';
import styled from '@emotion/styled';
-import {chonkInputStyles} from 'sentry/components/core/input/chonk';
+import {chonkInputStyles} from 'sentry/components/core/input/index.chonk';
import type {FormSize} from 'sentry/utils/theme';
export interface InputStylesProps {
diff --git a/static/app/components/core/input/inputGroup.chonk.tsx b/static/app/components/core/input/inputGroup.chonk.tsx
new file mode 100644
index 00000000000000..c9c7fda5d7f71a
--- /dev/null
+++ b/static/app/components/core/input/inputGroup.chonk.tsx
@@ -0,0 +1,80 @@
+import {css, type DO_NOT_USE_ChonkTheme} from '@emotion/react';
+import styled from '@emotion/styled';
+
+import {Input} from 'sentry/components/core/input/index';
+import Textarea from 'sentry/components/forms/controls/textarea';
+import {space} from 'sentry/styles/space';
+import type {FormSize, StrictCSSObject} from 'sentry/utils/theme';
+import {chonkStyled} from 'sentry/utils/theme/theme.chonk';
+
+export interface InputStyleProps {
+ leadingWidth?: number;
+ size?: FormSize;
+ trailingWidth?: number;
+}
+
+export const InputItemsWrap = styled('div')`
+ display: grid;
+ grid-auto-flow: column;
+ align-items: center;
+ gap: ${space(1)};
+
+ position: absolute;
+ top: 50%;
+ transform: translateY(-50%);
+`;
+
+const chonkItemsPadding = {
+ md: 8,
+ sm: 6,
+ xs: 4,
+} satisfies Record<FormSize, number>;
+
+const chonkInputStyles = ({
+ leadingWidth,
+ trailingWidth,
+ size = 'md',
+ theme,
+}: InputStyleProps & {theme: DO_NOT_USE_ChonkTheme}): StrictCSSObject => css`
+ ${leadingWidth &&
+ `
+ padding-left: calc(
+ ${theme.formPadding[size].paddingLeft}px
+ + ${chonkItemsPadding[size]}px
+ + ${leadingWidth}px
+ );
+ `}
+
+ ${trailingWidth &&
+ `
+ padding-right: calc(
+ ${theme.formPadding[size].paddingRight}px
+ + ${chonkItemsPadding[size]}px
+ + ${trailingWidth}px
+ );
+ `}
+`;
+
+export const ChonkStyledInput = chonkStyled(Input)<InputStyleProps>`
+ ${chonkInputStyles}
+`;
+
+export const ChonkStyledTextArea = chonkStyled(Textarea)<InputStyleProps>`
+ ${chonkInputStyles}
+`;
+
+export const ChonkStyledLeadingItemsWrap = chonkStyled(InputItemsWrap)<{
+ size: FormSize;
+ disablePointerEvents?: boolean;
+}>`
+ left: ${p => p.theme.formPadding[p.size].paddingLeft + 1}px;
+ ${p => p.disablePointerEvents && `pointer-events: none;`}
+ `;
+
+export const ChonkStyledTrailingItemsWrap = chonkStyled(InputItemsWrap)<{
+ size: FormSize;
+ disablePointerEvents?: boolean;
+}>`
+ right: ${p => p.theme.formPadding[p.size].paddingRight + 1}px;
+ ${p => p.disablePointerEvents && `pointer-events: none;`}
+ `;
diff --git a/static/app/components/core/input/inputGroup.stories.tsx b/static/app/components/core/input/inputGroup.stories.tsx
index 2d7c42170d1439..0e7d82300f4be3 100644
--- a/static/app/components/core/input/inputGroup.stories.tsx
+++ b/static/app/components/core/input/inputGroup.stories.tsx
@@ -2,7 +2,6 @@ import {Fragment} from 'react';
import styled from '@emotion/styled';
import {Button} from 'sentry/components/button';
-import {Input} from 'sentry/components/core/input/index';
import JSXNode from 'sentry/components/stories/jsxNode';
import Matrix from 'sentry/components/stories/matrix';
import {IconAttachment, IconSearch, IconSettings} from 'sentry/icons';
@@ -21,15 +20,46 @@ export default storyBook('InputGroup', (story, APIReference) => {
return (
<Fragment>
<p>
- The <JSXNode name="Input" /> component comes in different sizes:
+ The <JSXNode name="InputGroup" /> component comes in different sizes:
</p>
- <InputGroup>
- <InputGroup.LeadingItems disablePointerEvents>
- <IconSearch />
- </InputGroup.LeadingItems>
- <InputGroup.Input placeholder="Search" />
- </InputGroup>
- <Input placeholder="hodl" />
+ <Grid>
+ <div>
+ <code>md (default)</code>
+ <InputGroup>
+ <InputGroup.LeadingItems disablePointerEvents>
+ <IconSearch />
+ </InputGroup.LeadingItems>
+ <InputGroup.Input size="md" placeholder="Search" />
+ <InputGroup.TrailingItems disablePointerEvents>
+ <IconAttachment />
+ </InputGroup.TrailingItems>
+ </InputGroup>
+ </div>
+ <div>
+ <code>sm</code>
+ <InputGroup>
+ <InputGroup.LeadingItems disablePointerEvents>
+ <IconSearch />
+ </InputGroup.LeadingItems>
+ <InputGroup.Input size="sm" placeholder="Search" />
+ <InputGroup.TrailingItems disablePointerEvents>
+ <IconAttachment />
+ </InputGroup.TrailingItems>
+ </InputGroup>
+ </div>
+ <div>
+ <code>xs</code>
+ <InputGroup>
+ <InputGroup.LeadingItems disablePointerEvents>
+ <IconSearch />
+ </InputGroup.LeadingItems>
+ <InputGroup.Input size="xs" placeholder="Search" />
+ <InputGroup.TrailingItems disablePointerEvents>
+ <IconAttachment />
+ </InputGroup.TrailingItems>
+ </InputGroup>
+ </div>
+ </Grid>
</Fragment>
);
});
diff --git a/static/app/components/core/input/inputGroup.tsx b/static/app/components/core/input/inputGroup.tsx
index 0efa1ce369e8a8..b63e088cf0ef9a 100644
--- a/static/app/components/core/input/inputGroup.tsx
+++ b/static/app/components/core/input/inputGroup.tsx
@@ -13,10 +13,18 @@ import styled from '@emotion/styled';
import type {InputProps} from 'sentry/components/core/input';
import {Input as _Input} from 'sentry/components/core/input';
+import {
+ ChonkStyledInput,
+ ChonkStyledLeadingItemsWrap,
+ ChonkStyledTextArea,
+ ChonkStyledTrailingItemsWrap,
+ InputItemsWrap,
+ type InputStyleProps,
+} from 'sentry/components/core/input/inputGroup.chonk';
import type {TextAreaProps} from 'sentry/components/forms/controls/textarea';
import _TextArea from 'sentry/components/forms/controls/textarea';
-import {space} from 'sentry/styles/space';
import type {FormSize} from 'sentry/utils/theme';
+import {withChonk} from 'sentry/utils/theme/withChonk';
interface InputContext {
/**
@@ -207,23 +215,6 @@ export const InputGroupWrap = styled('div')<{disabled?: boolean}>`
${p => p.disabled && `color: ${p.theme.disabled};`};
`;
-const InputItemsWrap = styled('div')`
- display: grid;
- grid-auto-flow: column;
- align-items: center;
- gap: ${space(1)};
-
- position: absolute;
- top: 50%;
- transform: translateY(-50%);
-`;
-
-interface InputStyleProps {
- leadingWidth?: number;
- size?: FormSize;
- trailingWidth?: number;
-}
-
const getInputStyles = ({
leadingWidth,
trailingWidth,
@@ -247,26 +238,38 @@ const getInputStyles = ({
`}
`;
-const StyledInput = styled(_Input)<InputStyleProps>`
- ${getInputStyles}
-`;
+const StyledInput = withChonk(
+ styled(_Input)<InputStyleProps>`
+ ${getInputStyles}
+ `,
+ ChonkStyledInput
+);
-const StyledTextArea = styled(_TextArea)<InputStyleProps>`
- ${getInputStyles}
-`;
+const StyledTextArea = withChonk(
+ styled(_TextArea)<InputStyleProps>`
+ ${getInputStyles}
+ `,
+ ChonkStyledTextArea
+);
-const InputLeadingItemsWrap = styled(InputItemsWrap)<{
- size: FormSize;
- disablePointerEvents?: boolean;
-}>`
- left: ${p => p.theme.formPadding[p.size].paddingLeft + 1}px;
- ${p => p.disablePointerEvents && `pointer-events: none;`}
-`;
+const InputLeadingItemsWrap = withChonk(
+ styled(InputItemsWrap)<{
+ size: FormSize;
+ disablePointerEvents?: boolean;
+ }>`
+ left: ${p => p.theme.formPadding[p.size].paddingLeft + 1}px;
+ ${p => p.disablePointerEvents && `pointer-events: none;`}
+ `,
+ ChonkStyledLeadingItemsWrap
+);
-const InputTrailingItemsWrap = styled(InputItemsWrap)<{
- size: FormSize;
- disablePointerEvents?: boolean;
-}>`
- right: ${p => p.theme.formPadding[p.size].paddingRight * 0.75 + 1}px;
- ${p => p.disablePointerEvents && `pointer-events: none;`}
-`;
+const InputTrailingItemsWrap = withChonk(
+ styled(InputItemsWrap)<{
+ size: FormSize;
+ disablePointerEvents?: boolean;
+ }>`
+ right: ${p => p.theme.formPadding[p.size].paddingRight * 0.75 + 1}px;
+ ${p => p.disablePointerEvents && `pointer-events: none;`}
+ `,
+ ChonkStyledTrailingItemsWrap
+);
|
0823498dd73a497b69714ac704cea2cf6f65d2bf
|
2023-10-13 00:58:23
|
Dominik Buszowiecki
|
feat(browser-starfish): add sample sidebar and sorting of resource summary (#57982)
| false
|
add sample sidebar and sorting of resource summary (#57982)
|
feat
|
diff --git a/static/app/views/performance/browser/resources/resourceSummaryPage/index.tsx b/static/app/views/performance/browser/resources/resourceSummaryPage/index.tsx
index d5b322d14193f9..59e4275e543535 100644
--- a/static/app/views/performance/browser/resources/resourceSummaryPage/index.tsx
+++ b/static/app/views/performance/browser/resources/resourceSummaryPage/index.tsx
@@ -8,6 +8,7 @@ import PageFilterBar from 'sentry/components/organizations/pageFilterBar';
import {ProjectPageFilter} from 'sentry/components/organizations/projectPageFilter';
import {t} from 'sentry/locale';
import {RateUnits} from 'sentry/utils/discover/fields';
+import {useLocation} from 'sentry/utils/useLocation';
import useOrganization from 'sentry/utils/useOrganization';
import {useParams} from 'sentry/utils/useParams';
import {normalizeUrl} from 'sentry/utils/withDomainRequired';
@@ -21,10 +22,14 @@ import {useSpanMetrics} from 'sentry/views/starfish/queries/useSpanMetrics';
import {SpanFunction, SpanMetricsField} from 'sentry/views/starfish/types';
import {DataTitles, getThroughputTitle} from 'sentry/views/starfish/views/spans/types';
import {Block, BlockContainer} from 'sentry/views/starfish/views/spanSummaryPage/block';
+import {SampleList} from 'sentry/views/starfish/views/spanSummaryPage/sampleList';
function ResourceSummary() {
const organization = useOrganization();
const {groupId} = useParams();
+ const {
+ query: {transaction},
+ } = useLocation();
const {data: spanMetrics} = useSpanMetrics(groupId, {}, [
'avg(span.self_time)',
'spm()',
@@ -90,6 +95,7 @@ function ResourceSummary() {
</HeaderContainer>
<ResourceSummaryCharts groupId={groupId} />
<ResourceSummaryTable />
+ <SampleList groupId={groupId} transactionName={transaction as string} />
</Layout.Main>
</Layout.Body>
</ModulePageProviders>
diff --git a/static/app/views/performance/browser/resources/resourceSummaryPage/resourceSummaryTable.tsx b/static/app/views/performance/browser/resources/resourceSummaryPage/resourceSummaryTable.tsx
index 31d9546b7b3601..a9f395f485ac0f 100644
--- a/static/app/views/performance/browser/resources/resourceSummaryPage/resourceSummaryTable.tsx
+++ b/static/app/views/performance/browser/resources/resourceSummaryPage/resourceSummaryTable.tsx
@@ -1,4 +1,5 @@
import {Fragment} from 'react';
+import {Link} from 'react-router';
import GridEditable, {
COL_WIDTH_UNDEFINED,
@@ -9,6 +10,7 @@ import {RateUnits} from 'sentry/utils/discover/fields';
import {useLocation} from 'sentry/utils/useLocation';
import {useParams} from 'sentry/utils/useParams';
import {useResourcePagesQuery} from 'sentry/views/performance/browser/resources/utils/useResourcePageQuery';
+import {useResourceSummarySort} from 'sentry/views/performance/browser/resources/utils/useResourceSummarySort';
import {DurationCell} from 'sentry/views/starfish/components/tableCells/durationCell';
import {renderHeadCell} from 'sentry/views/starfish/components/tableCells/renderHeadCell';
import {ThroughputCell} from 'sentry/views/starfish/components/tableCells/throughputCell';
@@ -24,7 +26,8 @@ type Column = GridColumnHeader<keyof Row>;
function ResourceSummaryTable() {
const location = useLocation();
const {groupId} = useParams();
- const {data, isLoading} = useResourcePagesQuery(groupId);
+ const sort = useResourceSummarySort();
+ const {data, isLoading} = useResourcePagesQuery(groupId, {sort});
const columnOrder: GridColumnOrder<keyof Row>[] = [
{key: 'transaction', width: COL_WIDTH_UNDEFINED, name: 'Found on page'},
@@ -48,6 +51,21 @@ function ResourceSummaryTable() {
if (key === 'avg(span.self_time)') {
return <DurationCell milliseconds={row[key]} />;
}
+ if (key === 'transaction') {
+ return (
+ <Link
+ to={{
+ pathname: location.pathname,
+ query: {
+ ...location.query,
+ transaction: row[key],
+ },
+ }}
+ >
+ {row[key]}
+ </Link>
+ );
+ }
return <span>{row[key]}</span>;
};
@@ -59,8 +77,8 @@ function ResourceSummaryTable() {
columnOrder={columnOrder}
columnSortBy={[
{
- key: 'avg(span.self_time)',
- order: 'desc',
+ key: sort.field,
+ order: sort.kind,
},
]}
grid={{
@@ -68,10 +86,7 @@ function ResourceSummaryTable() {
renderHeadCell({
column,
location,
- sort: {
- field: 'avg(span.self_time)',
- kind: 'desc',
- },
+ sort,
}),
renderBodyCell,
}}
diff --git a/static/app/views/performance/browser/resources/resourceTable.tsx b/static/app/views/performance/browser/resources/resourceTable.tsx
index e7d82c046bf3d7..ed1e101775c208 100644
--- a/static/app/views/performance/browser/resources/resourceTable.tsx
+++ b/static/app/views/performance/browser/resources/resourceTable.tsx
@@ -25,7 +25,7 @@ type Row = {
'resource.render_blocking_status': string;
'span.description': string;
'span.group': string;
- 'span.op': 'resource.script' | 'resource.img';
+ 'span.op': `resource.${'script' | 'img' | 'css' | 'iframe' | string}`;
'spm()': number;
};
@@ -94,7 +94,15 @@ function ResourceTable({sort}: Props) {
return <DurationCell milliseconds={row[key]} />;
}
if (key === 'span.op') {
- const opName = row[key] === 'resource.script' ? t('Javascript') : t('Image');
+ const opNameMap = {
+ 'resource.script': t('Javascript'),
+ 'resource.img': t('Image'),
+ 'resource.iframe': t('Javascript (iframe)'),
+ 'resource.css': t('Stylesheet'),
+ 'resource.video': t('Video'),
+ 'resource.audio': t('Audio'),
+ };
+ const opName = opNameMap[row[key]] || row[key];
return <span>{opName}</span>;
}
if (key === 'http.decoded_response_content_length') {
diff --git a/static/app/views/performance/browser/resources/utils/useResourcePageQuery.ts b/static/app/views/performance/browser/resources/utils/useResourcePageQuery.ts
index 92f0be96f3a556..b8a737c8313ae7 100644
--- a/static/app/views/performance/browser/resources/utils/useResourcePageQuery.ts
+++ b/static/app/views/performance/browser/resources/utils/useResourcePageQuery.ts
@@ -1,6 +1,6 @@
+import {Sort} from 'sentry/utils/discover/fields';
import {useSpanTransactionMetrics} from 'sentry/views/starfish/queries/useSpanTransactionMetrics';
-export const useResourcePagesQuery = (groupId: string) => {
- // We'll do more this when we have the transaction tag on resource spans.
- return useSpanTransactionMetrics({'span.group': groupId});
+export const useResourcePagesQuery = (groupId: string, {sort}: {sort: Sort}) => {
+ return useSpanTransactionMetrics({'span.group': groupId}, [sort]);
};
diff --git a/static/app/views/performance/browser/resources/utils/useResourceSummarySort.ts b/static/app/views/performance/browser/resources/utils/useResourceSummarySort.ts
new file mode 100644
index 00000000000000..1d986bc6ec5d59
--- /dev/null
+++ b/static/app/views/performance/browser/resources/utils/useResourceSummarySort.ts
@@ -0,0 +1,36 @@
+import {fromSorts} from 'sentry/utils/discover/eventView';
+import type {Sort} from 'sentry/utils/discover/fields';
+import {useLocation} from 'sentry/utils/useLocation';
+import {QueryParameterNames} from 'sentry/views/starfish/views/queryParameters';
+
+type Query = {
+ sort?: string;
+};
+
+const SORTABLE_FIELDS = ['avg(span.self_time)', 'spm()'] as const;
+
+export type ValidSort = Sort & {
+ field: (typeof SORTABLE_FIELDS)[number];
+};
+
+/**
+ * Parses a `Sort` object from the URL. In case of multiple specified sorts
+ * picks the first one, since span module UIs only support one sort at a time.
+ */
+export function useResourceSummarySort(
+ sortParameterName: QueryParameterNames | 'sort' = 'sort',
+ fallback: Sort = DEFAULT_SORT
+) {
+ const location = useLocation<Query>();
+
+ return fromSorts(location.query[sortParameterName]).filter(isAValidSort)[0] ?? fallback;
+}
+
+const DEFAULT_SORT: Sort = {
+ kind: 'desc',
+ field: SORTABLE_FIELDS[0],
+};
+
+function isAValidSort(sort: Sort): sort is ValidSort {
+ return (SORTABLE_FIELDS as unknown as string[]).includes(sort.field);
+}
diff --git a/static/app/views/performance/browser/resources/utils/useResourcesQuery.ts b/static/app/views/performance/browser/resources/utils/useResourcesQuery.ts
index 60f6f3c6a2e1e4..12715438e5905f 100644
--- a/static/app/views/performance/browser/resources/utils/useResourcesQuery.ts
+++ b/static/app/views/performance/browser/resources/utils/useResourcesQuery.ts
@@ -30,6 +30,7 @@ export const useResourcesQuery = ({sort}: {sort: ValidSort}) => {
'spm()',
'span.group',
'resource.render_blocking_status',
+ 'span.domain',
],
name: 'Resource module - resource table',
query: queryConditions.join(' '),
|
445721fdf24b3dbcef9cafa74f2e2fc52bf8e3d6
|
2023-06-14 00:41:06
|
edwardgou-sentry
|
feat(starfish): Adds transaction as selectable for count unique in span metrics (#50774)
| false
|
Adds transaction as selectable for count unique in span metrics (#50774)
|
feat
|
diff --git a/src/sentry/search/events/datasets/spans_metrics.py b/src/sentry/search/events/datasets/spans_metrics.py
index ed34149cae10fb..de6791874d7d00 100644
--- a/src/sentry/search/events/datasets/spans_metrics.py
+++ b/src/sentry/search/events/datasets/spans_metrics.py
@@ -63,7 +63,9 @@ def function_converter(self) -> Mapping[str, fields.MetricsFunction]:
"count_unique",
required_args=[
fields.MetricArg(
- "column", allowed_columns=["user"], allow_custom_measurements=False
+ "column",
+ allowed_columns=["user", "transaction"],
+ allow_custom_measurements=False,
)
],
calculated_args=[resolve_metric_id],
|
2235157839dcbac84b7b3797da192ce59e0acc55
|
2021-12-03 05:34:20
|
Evan Purkhiser
|
chore(deps): Bump to node 16 LTS (#30376)
| false
|
Bump to node 16 LTS (#30376)
|
chore
|
diff --git a/package.json b/package.json
index c0360611d9a10e..e78884194745c5 100644
--- a/package.json
+++ b/package.json
@@ -253,7 +253,7 @@
]
},
"volta": {
- "node": "12.19.0",
+ "node": "16.13.1",
"yarn": "1.22.5"
}
}
diff --git a/tests/js/spec/components/forms/jsonForm.spec.tsx b/tests/js/spec/components/forms/jsonForm.spec.tsx
index 5c242451d24387..17d5d8749d18ef 100644
--- a/tests/js/spec/components/forms/jsonForm.spec.tsx
+++ b/tests/js/spec/components/forms/jsonForm.spec.tsx
@@ -21,7 +21,9 @@ describe('JsonForm', function () {
try {
mountWithTheme(<JsonForm forms={accountDetailsFields} />);
} catch (error) {
- expect(error.message).toBe("Cannot read property 'email' of undefined");
+ expect(error.message).toBe(
+ "Cannot read properties of undefined (reading 'email')"
+ );
}
});
@@ -119,7 +121,9 @@ describe('JsonForm', function () {
/>
);
} catch (error) {
- expect(error.message).toBe("Cannot read property 'email' of undefined");
+ expect(error.message).toBe(
+ "Cannot read properties of undefined (reading 'email')"
+ );
}
});
|
828bdd1b1e1475c4bef226aaf7e2b7993e623a89
|
2025-02-19 02:51:49
|
Rohan Agarwal
|
chore(autofix): Remove solution timeline truncation (#85308)
| false
|
Remove solution timeline truncation (#85308)
|
chore
|
diff --git a/static/app/components/events/autofix/autofixSolution.tsx b/static/app/components/events/autofix/autofixSolution.tsx
index 7bacc8069bd4a6..a290b86524f532 100644
--- a/static/app/components/events/autofix/autofixSolution.tsx
+++ b/static/app/components/events/autofix/autofixSolution.tsx
@@ -14,20 +14,12 @@ import {
type AutofixSolutionTimelineEvent,
AutofixStatus,
AutofixStepType,
- type AutofixTimelineEvent,
} from 'sentry/components/events/autofix/types';
import {
type AutofixResponse,
makeAutofixQueryKey,
} from 'sentry/components/events/autofix/useAutofix';
-import {
- IconCheckmark,
- IconChevron,
- IconClose,
- IconEdit,
- IconEllipsis,
- IconFix,
-} from 'sentry/icons';
+import {IconCheckmark, IconChevron, IconClose, IconEdit, IconFix} from 'sentry/icons';
import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import {setApiQueryData, useMutation, useQueryClient} from 'sentry/utils/queryClient';
@@ -147,19 +139,6 @@ const cardAnimationProps: AnimationProps = {
}),
};
-const VerticalEllipsis = styled(IconEllipsis)`
- height: 16px;
- color: ${p => p.theme.subText};
- transform: rotate(90deg);
- position: relative;
- left: -1px;
-`;
-
-type ExtendedTimelineEvent = AutofixTimelineEvent & {
- isTruncated?: boolean;
- originalIndex?: number;
-};
-
function SolutionDescription({
solution,
groupId,
@@ -176,68 +155,11 @@ function SolutionDescription({
const containerRef = useRef<HTMLDivElement>(null);
const selection = useTextSelection(containerRef);
- // Filter events to keep only 1 event before and after modified events
- const filteredEvents = (() => {
- const events = solution.map((event, index) => ({
- ...event,
- is_most_important_event: event.is_new_event,
- originalIndex: index,
- }));
-
- const firstModifiedIndex = events.findIndex(e => e.is_new_event);
- const lastModifiedIndex = events.findLastIndex(e => e.is_new_event);
-
- if (firstModifiedIndex === -1) {
- return events;
- }
-
- const startIndex = Math.max(0, firstModifiedIndex - 1);
- const endIndex = Math.min(events.length - 1, lastModifiedIndex + 1);
-
- const truncatedEvents = events.slice(startIndex, endIndex + 1);
-
- if (truncatedEvents.length === 0) {
- return events;
- }
-
- // Add truncation indicators if needed
- const finalEvents: ExtendedTimelineEvent[] = [];
-
- // Add truncation at start if we removed events
- if (startIndex > 0) {
- const firstEvent = truncatedEvents[0];
- if (firstEvent) {
- finalEvents.push({
- title: '',
- timeline_item_type: 'internal_code' as const,
- code_snippet_and_analysis: '',
- relevant_code_file: firstEvent.relevant_code_file,
- is_most_important_event: false,
- isTruncated: true,
- });
- }
- }
-
- // Add all filtered events
- finalEvents.push(...truncatedEvents);
-
- // Add truncation at end if we removed events
- if (endIndex < events.length - 1) {
- const lastEvent = truncatedEvents[truncatedEvents.length - 1];
- if (lastEvent) {
- finalEvents.push({
- title: '',
- timeline_item_type: 'internal_code' as const,
- code_snippet_and_analysis: '',
- relevant_code_file: lastEvent.relevant_code_file,
- is_most_important_event: false,
- isTruncated: true,
- });
- }
- }
-
- return finalEvents;
- })();
+ const events = solution.map((event, index) => ({
+ ...event,
+ is_most_important_event: event.is_new_event,
+ originalIndex: index,
+ }));
return (
<SolutionDescriptionWrapper>
@@ -258,13 +180,7 @@ function SolutionDescription({
)}
</AnimatePresence>
<div ref={containerRef}>
- <AutofixTimeline
- events={filteredEvents}
- activeColor="green400"
- getCustomIcon={(event: AutofixTimelineEvent & {isTruncated?: boolean}) =>
- event.isTruncated ? <VerticalEllipsis /> : undefined
- }
- />
+ <AutofixTimeline events={events} activeColor="green400" />
</div>
</SolutionDescriptionWrapper>
);
diff --git a/static/app/components/events/autofix/autofixTimeline.tsx b/static/app/components/events/autofix/autofixTimeline.tsx
index 3446a8a9cb260a..8c841be35d9c72 100644
--- a/static/app/components/events/autofix/autofixTimeline.tsx
+++ b/static/app/components/events/autofix/autofixTimeline.tsx
@@ -12,14 +12,10 @@ import type {Color} from 'sentry/utils/theme';
import type {AutofixTimelineEvent} from './types';
-type ExtendedTimelineEvent = AutofixTimelineEvent & {
- isTruncated?: boolean;
-};
-
type Props = {
- events: ExtendedTimelineEvent[];
+ events: AutofixTimelineEvent[];
activeColor?: Color;
- getCustomIcon?: (event: ExtendedTimelineEvent) => React.ReactNode;
+ getCustomIcon?: (event: AutofixTimelineEvent) => React.ReactNode;
};
function getEventIcon(eventType: AutofixTimelineEvent['timeline_item_type']) {
@@ -66,16 +62,14 @@ export function AutofixTimeline({events, activeColor, getCustomIcon}: Props) {
<Timeline.Container>
{events.map((event, index) => {
const isActive = event.is_most_important_event && index !== events.length - 1;
- const isTruncated = event.isTruncated;
return (
<Timeline.Item
key={index}
title={
<StyledTimelineHeader
- onClick={isTruncated ? undefined : () => toggleItem(index)}
+ onClick={() => toggleItem(index)}
isActive={isActive}
- isTruncated={isTruncated}
data-test-id={`autofix-root-cause-timeline-item-${index}`}
>
<div
@@ -83,12 +77,10 @@ export function AutofixTimeline({events, activeColor, getCustomIcon}: Props) {
__html: singleLineRenderer(event.title),
}}
/>
- {!isTruncated && (
- <StyledIconChevron
- direction={expandedItems.includes(index) ? 'down' : 'right'}
- size="xs"
- />
- )}
+ <StyledIconChevron
+ direction={expandedItems.includes(index) ? 'down' : 'right'}
+ size="xs"
+ />
</StyledTimelineHeader>
}
isActive={isActive}
@@ -96,7 +88,7 @@ export function AutofixTimeline({events, activeColor, getCustomIcon}: Props) {
colorConfig={getEventColor(isActive, activeColor)}
>
<AnimatePresence>
- {!isTruncated && expandedItems.includes(index) && (
+ {expandedItems.includes(index) && (
<AnimatedContent
initial={{height: 0, opacity: 0}}
animate={{height: 'auto', opacity: 1}}
@@ -133,14 +125,14 @@ const StyledSpan = styled('span')`
}
`;
-const StyledTimelineHeader = styled('div')<{isActive?: boolean; isTruncated?: boolean}>`
+const StyledTimelineHeader = styled('div')<{isActive?: boolean}>`
display: flex;
align-items: center;
justify-content: space-between;
width: 100%;
padding: ${space(0.25)};
border-radius: ${p => p.theme.borderRadius};
- cursor: ${p => (p.isTruncated ? 'default' : 'pointer')};
+ cursor: pointer;
font-weight: ${p => (p.isActive ? p.theme.fontWeightBold : p.theme.fontWeightNormal)};
gap: ${space(1)};
@@ -151,8 +143,7 @@ const StyledTimelineHeader = styled('div')<{isActive?: boolean; isTruncated?: bo
}
&:hover {
- background-color: ${p =>
- p.isTruncated ? 'transparent' : p.theme.backgroundSecondary};
+ background-color: ${p => p.theme.backgroundSecondary};
}
`;
|
b892677a902f4b04fe2fe70acc2893ef4e69621e
|
2022-02-26 10:52:28
|
Zhixing Zhang
|
fix(report): Weekly email copy / style / query update (#32095)
| false
|
Weekly email copy / style / query update (#32095)
|
fix
|
diff --git a/src/sentry/templates/sentry/emails/reports/new.html b/src/sentry/templates/sentry/emails/reports/new.html
index bbf8ab84ae4b87..476a5fcb0a6bf4 100644
--- a/src/sentry/templates/sentry/emails/reports/new.html
+++ b/src/sentry/templates/sentry/emails/reports/new.html
@@ -554,16 +554,16 @@ <h4>Errors by Issue Type</h4>
{%if report.key_errors|length > 0 %}
<div id="key-errors">
{% with report.key_errors as errors %}
- <h4>Key Errors</h4>
+ <h4>Issues with the Most Errors</h4>
{% for a in errors %}
<div style="display: flex; flex-direction: row; margin-bottom: 8px; align-items: flex-start;">
<div style="width: 10%; font-size: 17px;">{{a.count|small_count:1}}</div>
<div style="width: 65%;">
{% url 'sentry-organization-issue-detail' issue_id=a.group.id organization_slug=organization.slug as issue_detail %}
- <a style="display: block; text-overflow: ellipsis; white-space: nowrap; overflow: hidden; font-size: 17px;" href="{% absolute_uri issue_detail %}">{{a.group.message}}</a>
+ <a style="display: block; text-overflow: ellipsis; white-space: nowrap; overflow: hidden; font-size: 17px; height: 24px;" href="{% absolute_uri issue_detail %}">{{a.group.message}}</a>
<div style="font-size: 12px; color: #80708F;">{{a.group.project.name}}</div>
</div>
- <span style="background-color: {{a.status_color}}; border-radius: 8px; font-size: 12px; align-self: center; padding: 2px 10px; margin-left: auto;">{{a.status}}</span>
+ <span style="background-color: {{a.status_color}}; border-radius: 8px; font-size: 12px; align-self: center; padding: 2px 10px; margin-left: auto; height: 100%;">{{a.status}}</span>
</div>
{% endfor %}
{% endwith %}
|
8ac04c66ae3d86575330da15a767fd2997842150
|
2024-03-27 02:29:17
|
Colleen O'Rourke
|
ref(daily summary): Handle no attachment text (#67741)
| false
|
Handle no attachment text (#67741)
|
ref
|
diff --git a/src/sentry/integrations/slack/message_builder/notifications/daily_summary.py b/src/sentry/integrations/slack/message_builder/notifications/daily_summary.py
index e051c9c8c09fe8..9188cbae0c01d6 100644
--- a/src/sentry/integrations/slack/message_builder/notifications/daily_summary.py
+++ b/src/sentry/integrations/slack/message_builder/notifications/daily_summary.py
@@ -39,9 +39,10 @@ def linkify_error_title(self, group):
params={"referrer": self.notification.get_referrer(ExternalProviders.SLACK)}
)
title = build_attachment_title(group)
- attachment_text = self.get_attachment_text(group).replace("\n", " ")
+ attachment_text = self.get_attachment_text(group)
if not attachment_text:
return f"<{link}|*{escape_slack_text(title)}*>"
+ attachment_text = attachment_text.replace("\n", " ")
return f"<{link}|*{escape_slack_text(title)}*>\n{attachment_text}"
def linkify_release(self, release, organization):
diff --git a/tests/sentry/tasks/test_daily_summary.py b/tests/sentry/tasks/test_daily_summary.py
index 85faf8ed1b7b10..a1e89b1cefc6c0 100644
--- a/tests/sentry/tasks/test_daily_summary.py
+++ b/tests/sentry/tasks/test_daily_summary.py
@@ -740,6 +740,56 @@ def test_slack_notification_contents_newline(self):
in blocks[4]["fields"][0]["text"]
)
+ @responses.activate
+ @with_feature("organizations:slack-block-kit")
+ def test_slack_notification_contents_newline_no_attachment_text(self):
+ data = {
+ "timestamp": iso_format(self.now),
+ "stacktrace": copy.deepcopy(DEFAULT_EVENT_DATA["stacktrace"]),
+ "fingerprint": ["group-5"],
+ "exception": {
+ "values": [
+ {
+ "type": "WorkerLostError",
+ "value": None,
+ }
+ ]
+ },
+ }
+ self.store_event(
+ data=data,
+ project_id=self.project.id,
+ assert_no_errors=False,
+ )
+ self.store_outcomes(
+ {
+ "org_id": self.organization.id,
+ "project_id": self.project.id,
+ "outcome": Outcome.ACCEPTED,
+ "category": DataCategory.ERROR,
+ "timestamp": self.now,
+ "key_id": 1,
+ },
+ num_times=1,
+ )
+
+ ctx = build_summary_data(
+ timestamp=self.now.timestamp(),
+ duration=ONE_DAY,
+ organization=self.organization,
+ daily=True,
+ )
+ top_projects_context_map = build_top_projects_map(ctx, self.user.id)
+ with self.tasks():
+ DailySummaryNotification(
+ organization=ctx.organization,
+ recipient=self.user,
+ provider=ExternalProviders.SLACK,
+ project_context=top_projects_context_map,
+ ).send()
+ blocks, fallback_text = get_blocks_and_fallback_text()
+ assert "" in blocks[4]["fields"][0]["text"]
+
@responses.activate
@with_feature("organizations:slack-block-kit")
def test_limit_to_two_projects(self):
|
ed105f1755b222fb40418543e87aeece9d4098cc
|
2025-02-19 21:03:25
|
George Gritsouk
|
feat(dashboards): Improve `TimeSeriesVisualization` X axis label selection and rendering (#85409)
| false
|
Improve `TimeSeriesVisualization` X axis label selection and rendering (#85409)
|
feat
|
diff --git a/static/app/views/dashboards/widgets/timeSeriesWidget/formatXAxisTimestamp.spec.tsx b/static/app/views/dashboards/widgets/timeSeriesWidget/formatXAxisTimestamp.spec.tsx
new file mode 100644
index 00000000000000..df36b8db6d515b
--- /dev/null
+++ b/static/app/views/dashboards/widgets/timeSeriesWidget/formatXAxisTimestamp.spec.tsx
@@ -0,0 +1,30 @@
+import moment from 'moment-timezone';
+
+import {formatXAxisTimestamp} from './formatXAxisTimestamp';
+
+describe('formatXAxisTimestamp', () => {
+ it.each([
+ // Year starts
+ ['2025-01-01T00:00:00', 'Jan 1st 2025'],
+ ['2024-01-01T00:00:00', 'Jan 1st 2024'],
+ // // Month starts
+ ['2025-02-01T00:00:00', 'Feb 1st'],
+ ['2024-03-01T00:00:00', 'Mar 1st'],
+ // // Day starts
+ ['2025-02-05T00:00:00', 'Feb 5th'],
+ // // Hour starts
+ ['2025-02-05T12:00:00', '12:00 PM'],
+ ['2025-02-05T05:00:00', '5:00 AM'],
+ ['2025-02-01T01:00:00', '1:00 AM'],
+ // Minute starts
+ ['2025-02-05T12:11:00', '12:11 PM'],
+ ['2025-02-05T05:25:00', '5:25 AM'],
+ // Seconds
+ ['2025-02-05T12:10:05', '12:10:05 PM'],
+ ['2025-02-05T12:10:06', '12:10:06 PM'],
+ ['2025-02-05T05:25:10', '5:25:10 AM'],
+ ])('formats %s as %s', (raw, formatted) => {
+ const timestamp = moment(raw).unix() * 1000;
+ expect(formatXAxisTimestamp(timestamp)).toEqual(formatted);
+ });
+});
diff --git a/static/app/views/dashboards/widgets/timeSeriesWidget/formatXAxisTimestamp.tsx b/static/app/views/dashboards/widgets/timeSeriesWidget/formatXAxisTimestamp.tsx
new file mode 100644
index 00000000000000..d6159592a89ce4
--- /dev/null
+++ b/static/app/views/dashboards/widgets/timeSeriesWidget/formatXAxisTimestamp.tsx
@@ -0,0 +1,61 @@
+import moment from 'moment-timezone';
+
+/**
+ * A "cascading" formatter, based on the recommendations in [ECharts documentation](https://echarts.apache.org/en/option.html#xAxis.axisLabel.formatter). Given a timestamp of an X axis of type `"time"`, return a formatted string, to show under the axis tick.
+ *
+ * The fidelity of the formatted value depends on the fidelity of the tick mark timestamp. ECharts will intelligently choose the location of tick marks based on the total time range, and any significant intervals inside. It always chooses tick marks that fall on a "round" time values (starts of days, starts of hours, 15 minute intervals, etc.). This formatter is called on the time stamps of the selected ticks. Here are some examples of output labels sets you can expect:
+ *
+ * ["Feb 1st", "Feb 2nd", "Feb 3rd"] when ECharts aligns ticks with days of the month
+ * ["11:00pm", "Feb 2nd", "1:00am"] when ECharts aligns ticks with hours across a day boundary
+ * ["Mar 1st", "Apr 1st", "May 1st"] when ECharts aligns ticks with starts of month
+ * ["Dec 1st", "Jan 1st 2025", "Feb 1st"] when ECharts aligns markers with starts of month across a year boundary
+ * ["12:00pm", "1:00am", "2:00am", "3:00am"] when ECharts aligns ticks with hours starts
+ *
+ * @param value
+ * @param options
+ * @returns Formatted X axis label string
+ */
+export function formatXAxisTimestamp(
+ value: number,
+ options: {utc?: boolean} = {utc: false}
+): string {
+ const parsed = getParser(!options.utc)(value);
+
+ // Granularity-aware parsing, adjusts the format based on the
+ // granularity of the object This works well with ECharts since the
+ // parser is not aware of the other ticks
+ let format = 'MMM Do';
+
+ if (
+ parsed.dayOfYear() === 1 &&
+ parsed.hour() === 0 &&
+ parsed.minute() === 0 &&
+ parsed.second() === 0
+ ) {
+ // Start of a year
+ format = 'MMM Do YYYY';
+ } else if (
+ parsed.day() === 0 &&
+ parsed.hour() === 0 &&
+ parsed.minute() === 0 &&
+ parsed.second() === 0
+ ) {
+ // Start of a month
+ format = 'MMM Do';
+ } else if (parsed.hour() === 0 && parsed.minute() === 0 && parsed.second() === 0) {
+ // Start of a day
+ format = 'MMM Do';
+ } else if (parsed.second() === 0) {
+ // Hours, minutes
+ format = 'LT';
+ } else {
+ // Hours, minutes, seconds
+ format = 'LTS';
+ }
+
+ return parsed.format(format);
+}
+
+function getParser(local = false): typeof moment | typeof moment.utc {
+ return local ? moment : moment.utc;
+}
diff --git a/static/app/views/dashboards/widgets/timeSeriesWidget/timeSeriesWidgetVisualization.tsx b/static/app/views/dashboards/widgets/timeSeriesWidget/timeSeriesWidgetVisualization.tsx
index e8a0db6f028524..e1c3818df016c9 100644
--- a/static/app/views/dashboards/widgets/timeSeriesWidget/timeSeriesWidgetVisualization.tsx
+++ b/static/app/views/dashboards/widgets/timeSeriesWidget/timeSeriesWidgetVisualization.tsx
@@ -39,6 +39,7 @@ import {CompleteLineChartWidgetSeries} from './seriesConstructors/completeLineCh
import {IncompleteAreaChartWidgetSeries} from './seriesConstructors/incompleteAreaChartWidgetSeries';
import {IncompleteLineChartWidgetSeries} from './seriesConstructors/incompleteLineChartWidgetSeries';
import {formatTooltipValue} from './formatTooltipValue';
+import {formatXAxisTimestamp} from './formatXAxisTimestamp';
import {formatYAxisValue} from './formatYAxisValue';
import {markDelayedData} from './markDelayedData';
import {ReleaseSeries} from './releaseSeries';
@@ -283,7 +284,7 @@ export function TimeSeriesWidgetVisualization(props: TimeSeriesWidgetVisualizati
// https://github.com/apache/echarts/issues/15562
left: 2,
top: showLegend ? 25 : 10,
- right: 4,
+ right: 8,
bottom: 0,
containLabel: true,
}}
@@ -321,8 +322,16 @@ export function TimeSeriesWidgetVisualization(props: TimeSeriesWidgetVisualizati
axisLabel: {
padding: [0, 10, 0, 10],
width: 60,
+ formatter: (value: number) => {
+ const string = formatXAxisTimestamp(value, {utc: utc ?? undefined});
+
+ // Adding whitespace around the label is equivalent to padding.
+ // ECharts doesn't respect padding when calculating overlaps, but it
+ // does respect whitespace. This prevents overlapping X axis labels
+ return ` ${string} `;
+ },
},
- splitNumber: 0,
+ splitNumber: 5,
}}
yAxis={{
animation: false,
|
8ef2c1aa1804798f0c8d2f52f1e5251749ca650b
|
2018-01-18 04:30:48
|
Jess MacQueen
|
fix(invites): Use correct project team relation in org member invite
| false
|
Use correct project team relation in org member invite
|
fix
|
diff --git a/src/sentry/templates/sentry/accept-organization-invite.html b/src/sentry/templates/sentry/accept-organization-invite.html
index 7f5f9b32b45b84..626974914eac67 100644
--- a/src/sentry/templates/sentry/accept-organization-invite.html
+++ b/src/sentry/templates/sentry/accept-organization-invite.html
@@ -30,9 +30,9 @@ <h2>{% trans "Organization Invite" %}</h2>
{% if project_count %}
<p>{% blocktrans %}You have been invited to join this organization, which manages <strong>{{ project_count }}</strong> project(s), including:{% endblocktrans %}</p>
<ul>
- {% for project in project_list|slice:"5" %}
+ {% for project, team in project_team_list|slice:"5" %}
<li>
- {{ project.team.name }} / {{ project.name }}
+ {{ team.name }} / {{ project.name }}
</li>
{% endfor %}
</ul>
diff --git a/src/sentry/web/frontend/accept_organization_invite.py b/src/sentry/web/frontend/accept_organization_invite.py
index e6dbbc05ed4e9c..1310edb8c3fd7b 100644
--- a/src/sentry/web/frontend/accept_organization_invite.py
+++ b/src/sentry/web/frontend/accept_organization_invite.py
@@ -6,7 +6,7 @@
from django.utils.crypto import constant_time_compare
from django.utils.translation import ugettext_lazy as _
-from sentry.models import (AuditLogEntryEvent, OrganizationMember, Project)
+from sentry.models import (AuditLogEntryEvent, OrganizationMember, Project, ProjectTeam)
from sentry.signals import member_joined
from sentry.utils import auth
from sentry.web.frontend.base import BaseView
@@ -62,12 +62,20 @@ def handle(self, request, member_id, token):
qs = Project.objects.filter(
organization=organization,
)
- project_list = list(qs.select_related('team')[:25])
+ project_list = list(qs[:25])
+ project_teams = list(ProjectTeam.objects.filter(
+ project__in=project_list,
+ ).select_related('team'))
+ projects_by_id = {p.id: p for p in project_list}
+
+ project_team_context = [
+ (projects_by_id[pt.project_id], pt.team) for pt in project_teams
+ ]
project_count = qs.count()
context = {
'organization': om.organization,
- 'project_list': project_list,
+ 'project_team_list': project_team_context,
'project_count': project_count,
'needs_authentication': not request.user.is_authenticated(),
'logout_url': '{}?next={}'.format(
|
3d0839ae8a1b49a6e9a207d31f11812fb8d0ade7
|
2022-09-24 03:02:02
|
Alex Jillard
|
fix(perf): Link user ID tag to transaction summary search (#39266)
| false
|
Link user ID tag to transaction summary search (#39266)
|
fix
|
diff --git a/static/app/utils/discover/fields.tsx b/static/app/utils/discover/fields.tsx
index e5a790338cb860..4a3a9a237debae 100644
--- a/static/app/utils/discover/fields.tsx
+++ b/static/app/utils/discover/fields.tsx
@@ -571,7 +571,7 @@ export const SEMVER_TAGS = {
* Some tag keys should never be formatted as `tag[...]`
* when used as a filter because they are predefined.
*/
-const EXCLUDED_TAG_KEYS = new Set(['release']);
+const EXCLUDED_TAG_KEYS = new Set(['release', 'user']);
export function formatTagKey(key: string): string {
// Some tags may be normalized from context, but not all of them are.
diff --git a/static/app/views/performance/transactionSummary.spec.tsx b/static/app/views/performance/transactionSummary.spec.tsx
index 61e44808991f79..c11cecb80ada56 100644
--- a/static/app/views/performance/transactionSummary.spec.tsx
+++ b/static/app/views/performance/transactionSummary.spec.tsx
@@ -315,6 +315,10 @@ describe('Performance > TransactionSummary', function () {
key: 'foo',
topValues: [{count: 1, value: 'bar', name: 'bar'}],
},
+ {
+ key: 'user',
+ topValues: [{count: 1, value: 'id:100', name: '100'}],
+ },
],
});
MockApiClient.addMockResponse({
@@ -755,8 +759,11 @@ describe('Performance > TransactionSummary', function () {
userEvent.click(
screen.getByLabelText('Add the bar segment tag to the search query')
);
+ userEvent.click(
+ screen.getByLabelText('Add the id:100 segment tag to the search query')
+ );
- expect(router.push).toHaveBeenCalledTimes(2);
+ expect(router.push).toHaveBeenCalledTimes(3);
expect(router.push).toHaveBeenNthCalledWith(1, {
query: {
@@ -775,6 +782,15 @@ describe('Performance > TransactionSummary', function () {
transactionCursor: '1:0:0',
},
});
+
+ expect(router.push).toHaveBeenNthCalledWith(3, {
+ query: {
+ project: '2',
+ query: 'user:"id:100"',
+ transaction: '/performance',
+ transactionCursor: '1:0:0',
+ },
+ });
});
});
|
1c125615e116a671b9654ed54b353d444d7d8c19
|
2023-09-29 23:06:49
|
Sentry Bot
|
ref: bump sentry-arroyo to 2.14.9 (#57176)
| false
|
bump sentry-arroyo to 2.14.9 (#57176)
|
ref
|
diff --git a/requirements-base.txt b/requirements-base.txt
index 07a2d548fee142..920e98aac24b86 100644
--- a/requirements-base.txt
+++ b/requirements-base.txt
@@ -58,7 +58,7 @@ requests>=2.25.1
rfc3339-validator>=0.1.2
rfc3986-validator>=0.1.1
# [end] jsonschema format validators
-sentry-arroyo>=2.14.7
+sentry-arroyo>=2.14.9
sentry-kafka-schemas>=0.1.29
sentry-redis-tools>=0.1.7
sentry-relay>=0.8.30
diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt
index eb01ffa052dc44..10f25bd00a2d87 100644
--- a/requirements-dev-frozen.txt
+++ b/requirements-dev-frozen.txt
@@ -168,7 +168,7 @@ rfc3986-validator==0.1.1
rsa==4.8
s3transfer==0.6.1
selenium==4.3.0
-sentry-arroyo==2.14.7
+sentry-arroyo==2.14.9
sentry-cli==2.16.0
sentry-kafka-schemas==0.1.29
sentry-redis-tools==0.1.7
diff --git a/requirements-frozen.txt b/requirements-frozen.txt
index 12e1302d15efac..c546d17d2a07b3 100644
--- a/requirements-frozen.txt
+++ b/requirements-frozen.txt
@@ -110,7 +110,7 @@ rfc3339-validator==0.1.2
rfc3986-validator==0.1.1
rsa==4.8
s3transfer==0.6.1
-sentry-arroyo==2.14.7
+sentry-arroyo==2.14.9
sentry-kafka-schemas==0.1.29
sentry-redis-tools==0.1.7
sentry-relay==0.8.30
|
7bfc4889f542fd921b188be7fba52e4afaa6e13b
|
2023-07-05 15:52:18
|
Philipp Hofmann
|
feat(sdk-crash): Require sentry-cocoa 8.2.0 (#52247)
| false
|
Require sentry-cocoa 8.2.0 (#52247)
|
feat
|
diff --git a/fixtures/sdk_crash_detection/crash_event.py b/fixtures/sdk_crash_detection/crash_event.py
index 7fbe02a18f17ff..065b4292d29dd5 100644
--- a/fixtures/sdk_crash_detection/crash_event.py
+++ b/fixtures/sdk_crash_detection/crash_event.py
@@ -232,7 +232,7 @@ def get_crash_event_with_frames(
"environment": "test-app",
"sdk": {
"name": "sentry.cocoa",
- "version": "8.1.0",
+ "version": "8.2.0",
"integrations": [
"Crash",
"PerformanceTracking",
diff --git a/src/sentry/utils/sdk_crashes/cocoa_sdk_crash_detector.py b/src/sentry/utils/sdk_crashes/cocoa_sdk_crash_detector.py
index d2f90a2f380904..652d7be204e05c 100644
--- a/src/sentry/utils/sdk_crashes/cocoa_sdk_crash_detector.py
+++ b/src/sentry/utils/sdk_crashes/cocoa_sdk_crash_detector.py
@@ -1,10 +1,50 @@
from typing import Any, Mapping, Sequence
+from packaging.version import InvalidVersion, Version
+
+from sentry.db.models import NodeData
from sentry.utils.glob import glob_match
+from sentry.utils.safe import get_path
from sentry.utils.sdk_crashes.sdk_crash_detector import SDKCrashDetector
class CocoaSDKCrashDetector(SDKCrashDetector):
+ @property
+ def min_sdk_version(self) -> str:
+ """
+ Since changing the debug image type to macho (https://github.com/getsentry/sentry-cocoa/pull/2701)
+ released in sentry-cocoa 8.2.0 (https://github.com/getsentry/sentry-cocoa/blob/main/CHANGELOG.md#820),
+ the frames contain the full paths required for detecting system frames in is_system_library_frame.
+ Therefore, we require at least sentry-cocoa 8.2.0.
+ """
+ return "8.2.0"
+
+ def should_detect_sdk_crash(self, event_data: NodeData) -> bool:
+ sdk_name = get_path(event_data, "sdk", "name")
+ if sdk_name and sdk_name != "sentry.cocoa":
+ return False
+
+ sdk_version = get_path(event_data, "sdk", "version")
+ if not sdk_version:
+ return False
+
+ try:
+ minimum_cocoa_sdk_version = Version(self.min_sdk_version)
+ cocoa_sdk_version = Version(sdk_version)
+
+ if cocoa_sdk_version < minimum_cocoa_sdk_version:
+ return False
+ except InvalidVersion:
+ return False
+
+ is_unhandled = (
+ get_path(event_data, "exception", "values", -1, "mechanism", "handled") is False
+ )
+ if not is_unhandled:
+ return False
+
+ return True
+
def is_sdk_crash(self, frames: Sequence[Mapping[str, Any]]) -> bool:
if not frames:
return False
diff --git a/src/sentry/utils/sdk_crashes/sdk_crash_detection.py b/src/sentry/utils/sdk_crashes/sdk_crash_detection.py
index f79edcccd56317..c5c87acbb2cd61 100644
--- a/src/sentry/utils/sdk_crashes/sdk_crash_detection.py
+++ b/src/sentry/utils/sdk_crashes/sdk_crash_detection.py
@@ -32,10 +32,11 @@ def __init__(
def detect_sdk_crash(
self, event: Event, event_project_id: int, sample_rate: float
) -> Optional[Event]:
+
should_detect_sdk_crash = (
event.group
and event.group.issue_category == GroupCategory.ERROR
- and event.group.platform == "cocoa"
+ and self.cocoa_sdk_crash_detector.should_detect_sdk_crash(event.data)
)
if not should_detect_sdk_crash:
return None
@@ -44,14 +45,6 @@ def detect_sdk_crash(
if context is not None:
return None
- # Getting the frames and checking if the event is unhandled might different per platform.
- # We will change this once we implement this for more platforms.
- is_unhandled = (
- get_path(event.data, "exception", "values", -1, "mechanism", "handled") is False
- )
- if is_unhandled is False:
- return None
-
frames = get_path(event.data, "exception", "values", -1, "stacktrace", "frames")
if not frames:
return None
diff --git a/src/sentry/utils/sdk_crashes/sdk_crash_detector.py b/src/sentry/utils/sdk_crashes/sdk_crash_detector.py
index e1ee76c8ebc7bd..e8afd92e9ce080 100644
--- a/src/sentry/utils/sdk_crashes/sdk_crash_detector.py
+++ b/src/sentry/utils/sdk_crashes/sdk_crash_detector.py
@@ -1,12 +1,18 @@
from abc import ABC, abstractmethod
from typing import Any, Mapping, Sequence, Set
+from sentry.db.models import NodeData
+
class SDKCrashDetector(ABC):
@property
def fields_containing_paths(self) -> Set[str]:
return {"package", "module", "abs_path"}
+ @abstractmethod
+ def should_detect_sdk_crash(self, event_data: NodeData) -> bool:
+ raise NotImplementedError
+
@abstractmethod
def is_sdk_crash(self, frames: Sequence[Mapping[str, Any]]) -> bool:
"""
diff --git a/tests/sentry/utils/sdk_crashes/test_event_stripper.py b/tests/sentry/utils/sdk_crashes/test_event_stripper.py
index dff343d47937df..75cbfb9e1a746f 100644
--- a/tests/sentry/utils/sdk_crashes/test_event_stripper.py
+++ b/tests/sentry/utils/sdk_crashes/test_event_stripper.py
@@ -80,7 +80,7 @@ def test_strip_event_data_strips_sdk(self):
assert stripped_event_data.get("sdk") == {
"name": "sentry.cocoa",
- "version": "8.1.0",
+ "version": "8.2.0",
}
def test_strip_event_data_strips_value_if_not_simple_type(self):
diff --git a/tests/sentry/utils/sdk_crashes/test_sdk_crash_detection.py b/tests/sentry/utils/sdk_crashes/test_sdk_crash_detection.py
index 631d3b0723c817..98c8e28174d1ba 100644
--- a/tests/sentry/utils/sdk_crashes/test_sdk_crash_detection.py
+++ b/tests/sentry/utils/sdk_crashes/test_sdk_crash_detection.py
@@ -74,8 +74,29 @@ def test_handled_is_not_detected(self, mock_sdk_crash_reporter):
def test_wrong_function_not_detected(self, mock_sdk_crash_reporter):
self.execute_test(get_crash_event(function="Senry"), False, mock_sdk_crash_reporter)
- def test_wrong_platform_not_detected(self, mock_sdk_crash_reporter):
- self.execute_test(get_crash_event(platform="coco"), False, mock_sdk_crash_reporter)
+ def test_wrong_sdk_not_detected(self, mock_sdk_crash_reporter):
+ event = get_crash_event()
+ set_path(event, "sdk", "name", value="sentry.coco")
+
+ self.execute_test(event, False, mock_sdk_crash_reporter)
+
+ def test_beta_sdk_version_detected(self, mock_sdk_crash_reporter):
+ event = get_crash_event()
+ set_path(event, "sdk", "version", value="8.2.1-beta.1")
+
+ self.execute_test(event, True, mock_sdk_crash_reporter)
+
+ def test_too_low_min_sdk_version_not_detected(self, mock_sdk_crash_reporter):
+ event = get_crash_event()
+ set_path(event, "sdk", "version", value="8.1.1")
+
+ self.execute_test(event, False, mock_sdk_crash_reporter)
+
+ def test_invalid_sdk_version_not_detected(self, mock_sdk_crash_reporter):
+ event = get_crash_event()
+ set_path(event, "sdk", "version", value="foo")
+
+ self.execute_test(event, False, mock_sdk_crash_reporter)
def test_no_exception_not_detected(self, mock_sdk_crash_reporter):
self.execute_test(get_crash_event(exception=[]), False, mock_sdk_crash_reporter)
|
a148aa4d82cb28934c04d1f1bb77f359269ff279
|
2024-05-06 23:01:50
|
Seiji Chew
|
fix(staff): Remove incorrrect u2f error message (#70362)
| false
|
Remove incorrrect u2f error message (#70362)
|
fix
|
diff --git a/static/app/components/superuserStaffAccessForm.tsx b/static/app/components/superuserStaffAccessForm.tsx
index b2272041de2366..0e2d53d12598e6 100644
--- a/static/app/components/superuserStaffAccessForm.tsx
+++ b/static/app/components/superuserStaffAccessForm.tsx
@@ -59,9 +59,11 @@ class SuperuserStaffAccessForm extends Component<Props, State> {
return;
}
- await this.getAuthenticators();
+ const authenticators = await this.getAuthenticators();
+ this.setState({authenticators: authenticators});
+
// Set the error state if there are no authenticators and U2F is on
- if (!this.state.authenticators.length && !disableU2FForSUForm) {
+ if (!authenticators.length && !disableU2FForSUForm) {
this.handleError(ErrorCodes.NO_AUTHENTICATOR);
}
this.setState({isLoading: false});
@@ -183,10 +185,11 @@ class SuperuserStaffAccessForm extends Component<Props, State> {
try {
const authenticators = await api.requestPromise('/authenticators/');
- this.setState({authenticators: authenticators ?? []});
+ return authenticators ?? [];
} catch {
// ignore errors
}
+ return [];
}
render() {
|
a8dd86b3e137c700c134f68027e335d5a68b199e
|
2022-09-09 01:30:40
|
Evan Purkhiser
|
feat(perf-issues): Add disabled reason to group actions (#38596)
| false
|
Add disabled reason to group actions (#38596)
|
feat
|
diff --git a/static/app/views/organizationGroupDetails/actions/index.tsx b/static/app/views/organizationGroupDetails/actions/index.tsx
index 2c8e51808f17ce..e9d7ca2c7975b5 100644
--- a/static/app/views/organizationGroupDetails/actions/index.tsx
+++ b/static/app/views/organizationGroupDetails/actions/index.tsx
@@ -322,12 +322,7 @@ class Actions extends Component<Props, State> {
);
};
- openDeleteModal = () => {
- const {group} = this.props;
- if (!getIssueCapability(group.issueCategory, 'delete').enabled) {
- return;
- }
-
+ openDeleteModal = () =>
openModal(({Body, Footer, closeModal}: ModalRenderProps) => (
<Fragment>
<Body>
@@ -345,13 +340,9 @@ class Actions extends Component<Props, State> {
</Footer>
</Fragment>
));
- };
openDiscardModal = () => {
- const {group, organization} = this.props;
- if (!getIssueCapability(group.issueCategory, 'deleteAndDiscard').enabled) {
- return;
- }
+ const {organization} = this.props;
openModal(this.renderDiscardModal);
analytics('feature.discard_group.modal_opened', {
@@ -384,6 +375,9 @@ class Actions extends Component<Props, State> {
const isResolved = status === 'resolved';
const isIgnored = status === 'ignored';
+ const deleteCap = getIssueCapability(group.issueCategory, 'delete');
+ const deleteDiscardCap = getIssueCapability(group.issueCategory, 'deleteAndDiscard');
+
return (
<Wrapper>
<GuideAnchor target="resolve" position="bottom" offset={20}>
@@ -475,17 +469,20 @@ class Actions extends Component<Props, State> {
priority: 'danger',
label: t('Delete'),
hidden: !hasAccess,
- disabled: !getIssueCapability(group.issueCategory, 'delete').enabled,
- onAction: () => this.openDeleteModal(),
+ disabled: !deleteCap.enabled,
+ tooltip: deleteCap.disabledReason,
+ onAction: deleteCap.enabled ? () => this.openDeleteModal() : undefined,
},
{
key: 'delete-and-discard',
priority: 'danger',
label: t('Delete and discard future events'),
hidden: !hasAccess,
- disabled: !getIssueCapability(group.issueCategory, 'deleteAndDiscard')
- .enabled,
- onAction: () => this.openDiscardModal(),
+ disabled: !deleteDiscardCap.enabled,
+ tooltip: deleteDiscardCap.disabledReason,
+ onAction: deleteDiscardCap.enabled
+ ? () => this.openDiscardModal()
+ : undefined,
},
]}
/>
|
896f6e6933bc5eb6cf2eec891e8930acd50aade1
|
2022-05-23 12:50:49
|
Evan Purkhiser
|
ref(js): Wrap `cloneElement` return in a Fragment (#34894)
| false
|
Wrap `cloneElement` return in a Fragment (#34894)
|
ref
|
diff --git a/static/app/views/alerts/builder/projectProvider.tsx b/static/app/views/alerts/builder/projectProvider.tsx
index 407371a82a257e..a7ce52e2923d6e 100644
--- a/static/app/views/alerts/builder/projectProvider.tsx
+++ b/static/app/views/alerts/builder/projectProvider.tsx
@@ -1,4 +1,4 @@
-import {cloneElement, isValidElement, useEffect} from 'react';
+import {cloneElement, Fragment, isValidElement, useEffect} from 'react';
import {RouteComponentProps} from 'react-router';
import {fetchOrgMembers} from 'sentry/actionCreators/members';
@@ -61,15 +61,19 @@ function AlertBuilderProjectProvider(props: Props) {
);
}
- return children && isValidElement(children)
- ? cloneElement(children, {
- ...other,
- ...children.props,
- project,
- projectId: useFirstProject ? project.slug : projectId,
- organization,
- })
- : children;
+ return (
+ <Fragment>
+ {children && isValidElement(children)
+ ? cloneElement(children, {
+ ...other,
+ ...children.props,
+ project,
+ projectId: useFirstProject ? project.slug : projectId,
+ organization,
+ })
+ : children}
+ </Fragment>
+ );
}
export default AlertBuilderProjectProvider;
|
17edfd6f83343b485a57de1212dd8147d181c3a0
|
2022-10-17 19:31:01
|
edwardgou-sentry
|
feat(mobile-exp): Backend feature flag for issue detail tags improvement (#40059)
| false
|
Backend feature flag for issue detail tags improvement (#40059)
|
feat
|
diff --git a/src/sentry/conf/server.py b/src/sentry/conf/server.py
index 71edf8181ba4e5..05fd11480488f8 100644
--- a/src/sentry/conf/server.py
+++ b/src/sentry/conf/server.py
@@ -1201,6 +1201,8 @@ def SOCIAL_AUTH_DEFAULT_USERNAME():
"organizations:mobile-screenshots": False,
# Enable the mobile screenshot gallery in the attachments tab
"organizations:mobile-screenshot-gallery": False,
+ # Enable tag improvements in the issue details page
+ "organizations:issue-details-tag-improvements": False,
# Enable the release details performance section
"organizations:release-comparison-performance": False,
# Enable team insights page
diff --git a/src/sentry/features/__init__.py b/src/sentry/features/__init__.py
index b7fe79b869137d..eca0ebd3c156ba 100644
--- a/src/sentry/features/__init__.py
+++ b/src/sentry/features/__init__.py
@@ -93,6 +93,7 @@
default_manager.add("organizations:issue-actions-v2", OrganizationFeature, True)
default_manager.add("organizations:issue-alert-preview", OrganizationFeature, True)
default_manager.add("organizations:issue-details-owners", OrganizationFeature, True)
+default_manager.add("organizations:issue-details-tag-improvements", OrganizationFeature, True)
default_manager.add("organizations:issue-list-removal-action", OrganizationFeature, True)
default_manager.add("organizations:issue-list-trend-sort", OrganizationFeature, True)
default_manager.add("organizations:issue-search-allow-postgres-only-search", OrganizationFeature, True)
|
889f008af0a71b420967b3bb5096f7270e824c37
|
2024-02-22 22:32:44
|
Tony Xiao
|
feat(metrics): Support custom samples (#65582)
| false
|
Support custom samples (#65582)
|
feat
|
diff --git a/src/sentry/search/events/builder/__init__.py b/src/sentry/search/events/builder/__init__.py
index 76c1f9383cd846..a3fa9058b1a388 100644
--- a/src/sentry/search/events/builder/__init__.py
+++ b/src/sentry/search/events/builder/__init__.py
@@ -15,6 +15,7 @@
TimeseriesMetricQueryBuilder,
TopMetricsQueryBuilder,
)
+from .metrics_summaries import MetricsSummariesQueryBuilder # NOQA
from .profile_functions import ( # NOQA
ProfileFunctionsQueryBuilder,
ProfileFunctionsTimeseriesQueryBuilder,
@@ -49,6 +50,7 @@
"ErrorsQueryBuilder",
"HistogramMetricQueryBuilder",
"MetricsQueryBuilder",
+ "MetricsSummariesQueryBuilder",
"TimeseriesMetricQueryBuilder",
"SpansMetricsQueryBuilder",
"ProfilesQueryBuilder",
diff --git a/src/sentry/search/events/builder/discover.py b/src/sentry/search/events/builder/discover.py
index c1f1ec9fe2a5f3..c2601d8bdb733c 100644
--- a/src/sentry/search/events/builder/discover.py
+++ b/src/sentry/search/events/builder/discover.py
@@ -52,6 +52,7 @@
from sentry.search.events.datasets.discover import DiscoverDatasetConfig
from sentry.search.events.datasets.metrics import MetricsDatasetConfig
from sentry.search.events.datasets.metrics_layer import MetricsLayerDatasetConfig
+from sentry.search.events.datasets.metrics_summaries import MetricsSummariesDatasetConfig
from sentry.search.events.datasets.profile_functions import ProfileFunctionsDatasetConfig
from sentry.search.events.datasets.profiles import ProfilesDatasetConfig
from sentry.search.events.datasets.sessions import SessionsDatasetConfig
@@ -365,6 +366,8 @@ def load_config(
self.config = ProfileFunctionsDatasetConfig(self)
elif self.dataset == Dataset.SpansIndexed:
self.config = SpansIndexedDatasetConfig(self)
+ elif self.dataset == Dataset.MetricsSummaries:
+ self.config = MetricsSummariesDatasetConfig(self)
else:
raise NotImplementedError(f"Data Set configuration not found for {self.dataset}.")
diff --git a/src/sentry/search/events/builder/metrics_summaries.py b/src/sentry/search/events/builder/metrics_summaries.py
new file mode 100644
index 00000000000000..661cfc957de1e7
--- /dev/null
+++ b/src/sentry/search/events/builder/metrics_summaries.py
@@ -0,0 +1,31 @@
+from snuba_sdk import Entity, Flags, Query, Request
+
+from sentry.search.events.builder import QueryBuilder
+from sentry.snuba.dataset import Dataset
+
+
+class MetricsSummariesQueryBuilder(QueryBuilder):
+ requires_organization_condition = False
+
+ def get_snql_query(self) -> Request:
+ self.validate_having_clause()
+
+ return Request(
+ # the metrics summaries entity exists within the spans indexed dataset
+ dataset=Dataset.SpansIndexed.value,
+ app_id="default",
+ query=Query(
+ match=Entity(self.dataset.value, sample=self.sample_rate),
+ select=self.columns,
+ array_join=self.array_join,
+ where=self.where,
+ having=self.having,
+ groupby=self.groupby,
+ orderby=self.orderby,
+ limit=self.limit,
+ offset=self.offset,
+ limitby=self.limitby,
+ ),
+ flags=Flags(turbo=self.turbo),
+ tenant_ids=self.tenant_ids,
+ )
diff --git a/src/sentry/search/events/datasets/metrics_summaries.py b/src/sentry/search/events/datasets/metrics_summaries.py
new file mode 100644
index 00000000000000..0703bd879ca103
--- /dev/null
+++ b/src/sentry/search/events/datasets/metrics_summaries.py
@@ -0,0 +1,99 @@
+from __future__ import annotations
+
+from collections.abc import Callable, Mapping
+
+from snuba_sdk import Column, Direction, Function, OrderBy
+
+from sentry.api.event_search import SearchFilter
+from sentry.search.events import builder, constants
+from sentry.search.events.datasets import field_aliases, filter_aliases
+from sentry.search.events.datasets.base import DatasetConfig
+from sentry.search.events.fields import IntervalDefault, SnQLFunction
+from sentry.search.events.types import SelectType, WhereType
+
+
+class MetricsSummariesDatasetConfig(DatasetConfig):
+ def __init__(self, builder: builder.QueryBuilder):
+ self.builder = builder
+
+ @property
+ def search_filter_converter(
+ self,
+ ) -> Mapping[str, Callable[[SearchFilter], WhereType | None]]:
+ return {
+ constants.PROJECT_ALIAS: self._project_slug_filter_converter,
+ constants.PROJECT_NAME_ALIAS: self._project_slug_filter_converter,
+ }
+
+ @property
+ def field_alias_converter(self) -> Mapping[str, Callable[[str], SelectType]]:
+ return {
+ constants.PROJECT_ALIAS: self._resolve_project_slug_alias,
+ constants.PROJECT_NAME_ALIAS: self._resolve_project_slug_alias,
+ }
+
+ @property
+ def function_converter(self) -> Mapping[str, SnQLFunction]:
+ return {
+ function.name: function
+ for function in [
+ SnQLFunction(
+ "example",
+ snql_aggregate=lambda args, alias: Function(
+ "arrayElement",
+ [
+ Function(
+ "groupArraySample(1, 1)", # TODO: paginate via the seed
+ [
+ Function(
+ "tuple",
+ [
+ Column("group"),
+ Column("end_timestamp"),
+ Column("span_id"),
+ ],
+ ),
+ ],
+ ),
+ 1,
+ ],
+ alias,
+ ),
+ private=True,
+ ),
+ SnQLFunction(
+ "rounded_timestamp",
+ required_args=[IntervalDefault("interval", 1, None)],
+ snql_column=lambda args, alias: Function(
+ "toUInt32",
+ [
+ Function(
+ "multiply",
+ [
+ Function(
+ "intDiv",
+ [
+ Function("toUInt32", [Column("end_timestamp")]),
+ args["interval"],
+ ],
+ ),
+ args["interval"],
+ ],
+ ),
+ ],
+ alias,
+ ),
+ private=True,
+ ),
+ ]
+ }
+
+ @property
+ def orderby_converter(self) -> Mapping[str, Callable[[Direction], OrderBy]]:
+ return {}
+
+ def _project_slug_filter_converter(self, search_filter: SearchFilter) -> WhereType | None:
+ return filter_aliases.project_slug_converter(self.builder, search_filter)
+
+ def _resolve_project_slug_alias(self, alias: str) -> SelectType:
+ return field_aliases.resolve_project_slug_alias(self.builder, alias)
diff --git a/src/sentry/sentry_metrics/querying/samples_list.py b/src/sentry/sentry_metrics/querying/samples_list.py
index c9194cc847ac6e..24774a067eb89e 100644
--- a/src/sentry/sentry_metrics/querying/samples_list.py
+++ b/src/sentry/sentry_metrics/querying/samples_list.py
@@ -5,10 +5,20 @@
from snuba_sdk import And, Column, Condition, Function, Op, Or
from sentry import options
-from sentry.search.events.builder import QueryBuilder, SpansIndexedQueryBuilder
+from sentry.search.events.builder import (
+ MetricsSummariesQueryBuilder,
+ QueryBuilder,
+ SpansIndexedQueryBuilder,
+)
from sentry.search.events.types import QueryBuilderConfig, SnubaParams
from sentry.snuba.dataset import Dataset
-from sentry.snuba.metrics.naming_layer.mri import SpanMRI, TransactionMRI, is_measurement, parse_mri
+from sentry.snuba.metrics.naming_layer.mri import (
+ SpanMRI,
+ TransactionMRI,
+ is_custom_metric,
+ is_measurement,
+ parse_mri,
+)
from sentry.snuba.referrer import Referrer
@@ -33,7 +43,7 @@ def __init__(
@classmethod
@abstractmethod
- def supports(cls, metric_mri: str) -> bool:
+ def supports(cls, mri: str) -> bool:
raise NotImplementedError
@abstractmethod
@@ -243,10 +253,52 @@ def get_span_keys(self, offset: int, limit: int) -> list[tuple[str, str, str]]:
]
+class CustomSamplesListExecutor(SamplesListExecutor):
+ @classmethod
+ def supports(cls, mri: str) -> bool:
+ parsed_mri = parse_mri(mri)
+ if parsed_mri is not None and is_custom_metric(parsed_mri):
+ return True
+ return False
+
+ def execute(self, offset, limit):
+ span_keys = self.get_span_keys(offset, limit)
+ return self.get_spans_by_key(span_keys)
+
+ def get_span_keys(self, offset: int, limit: int) -> list[tuple[str, str, str]]:
+ rounded_timestamp = f"rounded_timestamp({self.rollup})"
+
+ builder = MetricsSummariesQueryBuilder(
+ Dataset.MetricsSummaries,
+ self.params,
+ snuba_params=self.snuba_params,
+ query=self.query,
+ selected_columns=[rounded_timestamp, "example()"],
+ limit=limit,
+ offset=offset,
+ # This table has a poor SAMPLE BY so DO NOT use it for now
+ # sample_rate=options.get("metrics.sample-list.sample-rate"),
+ config=QueryBuilderConfig(functions_acl=["rounded_timestamp", "example"]),
+ )
+
+ query_results = builder.run_query(self.referrer.value)
+ result = builder.process_results(query_results)
+
+ return [
+ (
+ row["example"][0], # group
+ row["example"][1], # timestamp
+ row["example"][2], # span_id
+ )
+ for row in result["data"]
+ ]
+
+
SAMPLE_LIST_EXECUTORS = [
SpansSamplesListExecutor,
TransactionDurationSamplesListExecutor,
MeasurementsSamplesListExecutor,
+ CustomSamplesListExecutor,
]
diff --git a/src/sentry/snuba/dataset.py b/src/sentry/snuba/dataset.py
index 14183ada6a9301..ae0eccb5d5efb1 100644
--- a/src/sentry/snuba/dataset.py
+++ b/src/sentry/snuba/dataset.py
@@ -52,6 +52,12 @@ class Dataset(Enum):
indexed spans are similar to indexed transactions in the fields available to search
"""
+ MetricsSummaries = "metrics_summaries"
+ """
+ Summaries of all metrics within a span. Used to correlate indexed
+ spans to a metric.
+ """
+
@unique
class EntityKey(Enum):
diff --git a/src/sentry/snuba/metrics_summaries.py b/src/sentry/snuba/metrics_summaries.py
new file mode 100644
index 00000000000000..2aaa73f69e2bf8
--- /dev/null
+++ b/src/sentry/snuba/metrics_summaries.py
@@ -0,0 +1,57 @@
+from sentry.search.events.builder import MetricsSummariesQueryBuilder
+from sentry.search.events.types import QueryBuilderConfig
+from sentry.snuba.dataset import Dataset
+from sentry.snuba.metrics.extraction import MetricSpecType
+
+
+def query(
+ selected_columns,
+ query,
+ params,
+ snuba_params=None,
+ equations=None,
+ orderby=None,
+ offset=None,
+ limit=50,
+ referrer=None,
+ auto_fields=False,
+ auto_aggregations=False,
+ include_equation_fields=False,
+ allow_metric_aggregates=False,
+ use_aggregate_conditions=False,
+ conditions=None,
+ functions_acl=None,
+ transform_alias_to_input_format=False,
+ sample=None,
+ has_metrics=False,
+ use_metrics_layer=False,
+ skip_tag_resolution=False,
+ extra_columns=None,
+ on_demand_metrics_enabled=False,
+ on_demand_metrics_type: MetricSpecType | None = None,
+):
+ builder = MetricsSummariesQueryBuilder(
+ Dataset.MetricsSummaries,
+ params,
+ snuba_params=snuba_params,
+ query=query,
+ selected_columns=selected_columns,
+ equations=equations,
+ orderby=orderby,
+ limit=limit,
+ offset=offset,
+ sample_rate=sample,
+ config=QueryBuilderConfig(
+ has_metrics=has_metrics,
+ transform_alias_to_input_format=transform_alias_to_input_format,
+ skip_tag_resolution=skip_tag_resolution,
+ equation_config={"auto_add": include_equation_fields},
+ auto_fields=auto_fields,
+ auto_aggregations=auto_aggregations,
+ use_aggregate_conditions=use_aggregate_conditions,
+ functions_acl=functions_acl,
+ ),
+ )
+
+ result = builder.process_results(builder.run_query(referrer))
+ return result
diff --git a/src/sentry/utils/snuba.py b/src/sentry/utils/snuba.py
index 2ab31b4fdcb56e..728e4e4f748172 100644
--- a/src/sentry/utils/snuba.py
+++ b/src/sentry/utils/snuba.py
@@ -147,6 +147,17 @@ def log_snuba_info(content):
"app_start_type": "sentry_tags[app_start_type]",
}
+METRICS_SUMMARIES_COLUMN_MAP = {
+ "project": "project_id",
+ "id": "span_id",
+ "trace": "trace_id",
+ "metric": "metric_mri",
+ "timestamp": "end_timestamp",
+ "segment.id": "segment_id",
+ "span.duration": "duration",
+ "span.group": "group",
+}
+
SPAN_COLUMN_MAP.update(
{col.value.alias: col.value.spans_name for col in Columns if col.value.spans_name is not None}
)
@@ -197,6 +208,7 @@ def log_snuba_info(content):
Dataset.Discover: DISCOVER_COLUMN_MAP,
Dataset.Sessions: SESSIONS_SNUBA_MAP,
Dataset.Metrics: METRICS_COLUMN_MAP,
+ Dataset.MetricsSummaries: METRICS_SUMMARIES_COLUMN_MAP,
Dataset.PerformanceMetrics: METRICS_COLUMN_MAP,
Dataset.SpansIndexed: SPAN_COLUMN_MAP,
Dataset.IssuePlatform: ISSUE_PLATFORM_MAP,
@@ -213,6 +225,7 @@ def log_snuba_info(content):
Dataset.Sessions: SESSIONS_FIELD_LIST,
Dataset.IssuePlatform: list(ISSUE_PLATFORM_MAP.values()),
Dataset.SpansIndexed: list(SPAN_COLUMN_MAP.values()),
+ Dataset.MetricsSummaries: list(METRICS_SUMMARIES_COLUMN_MAP.values()),
}
SNUBA_OR = "or"
diff --git a/tests/sentry/api/endpoints/test_organization_metrics.py b/tests/sentry/api/endpoints/test_organization_metrics.py
index e2f68a32259643..e8b4c75fdc549c 100644
--- a/tests/sentry/api/endpoints/test_organization_metrics.py
+++ b/tests/sentry/api/endpoints/test_organization_metrics.py
@@ -273,3 +273,39 @@ def test_measurement_samples(self):
expected = {int(span_id, 16) for span_id in good_span_ids}
actual = {int(row["id"], 16) for row in response.data["data"]}
assert actual == expected
+
+ def test_custom_samples(self):
+ mri = "d:custom/value@millisecond"
+ span_ids = [uuid4().hex[:16] for _ in range(10)]
+ for i, span_id in enumerate(span_ids):
+ self.store_indexed_span(
+ self.project.id,
+ uuid4().hex,
+ uuid4().hex,
+ span_id=span_id,
+ timestamp=before_now(days=i, minutes=10),
+ group=uuid4().hex[:16], # we need a non 0 group
+ store_metrics_summary={
+ mri: [
+ {
+ "min": 10.0,
+ "max": 100.0,
+ "sum": 110.0,
+ "count": 2,
+ "tags": {},
+ }
+ ]
+ },
+ )
+
+ query = {
+ "mri": mri,
+ "field": ["id"],
+ "project": [self.project.id],
+ "statsPeriod": "14d",
+ }
+ response = self.do_request(query)
+ assert response.status_code == 200, response.data
+ expected = {int(span_id, 16) for span_id in span_ids}
+ actual = {int(row["id"], 16) for row in response.data["data"]}
+ assert actual == expected
|
ca68146bd14bd5cab5e4771c1257e0d3f9eff46d
|
2021-12-22 00:46:34
|
NisanthanNanthakumar
|
feat(integrations): Show integrations pending deletion (#30744)
| false
|
Show integrations pending deletion (#30744)
|
feat
|
diff --git a/static/app/views/organizationIntegrations/installedIntegration.tsx b/static/app/views/organizationIntegrations/installedIntegration.tsx
index 2db143978edcec..1e70ace4f7693e 100644
--- a/static/app/views/organizationIntegrations/installedIntegration.tsx
+++ b/static/app/views/organizationIntegrations/installedIntegration.tsx
@@ -110,79 +110,81 @@ export default class InstalledIntegration extends React.Component<Props> {
return (
<Access access={['org:integrations']}>
- {({hasAccess}) => (
- <IntegrationFlex key={integration.id} className={className}>
- <IntegrationItemBox>
- <IntegrationItem integration={integration} />
- </IntegrationItemBox>
- <div>
- <Tooltip
- disabled={hasAccess}
- position="left"
- title={t(
- 'You must be an organization owner, manager or admin to configure'
- )}
- >
- {requiresUpgrade && (
- <AddIntegrationButton
- analyticsParams={{
- view: 'integrations_directory_integration_detail',
- already_installed: true,
- }}
- buttonText={t('Update Now')}
- data-test-id="integration-upgrade-button"
- disabled={!(hasAccess && this.integrationStatus === 'active')}
- icon={<IconWarning />}
- onAddIntegration={() => {}}
- organization={organization}
- provider={provider}
- priority="primary"
- size="small"
- />
- )}
- <StyledButton
- borderless
- icon={<IconSettings />}
- disabled={!(hasAccess && this.integrationStatus === 'active')}
- to={`/settings/${organization.slug}/integrations/${provider.key}/${integration.id}/`}
- data-test-id="integration-configure-button"
- >
- {t('Configure')}
- </StyledButton>
- </Tooltip>
- </div>
- <div>
- <Tooltip
- disabled={hasAccess}
- title={t(
- 'You must be an organization owner, manager or admin to uninstall'
- )}
- >
- <Confirm
- priority="danger"
- onConfirming={this.handleUninstallClick}
- disabled={!hasAccess}
- {...removeConfirmProps}
+ {({hasAccess}) => {
+ const disableAction = !(hasAccess && this.integrationStatus === 'active');
+ return (
+ <IntegrationFlex key={integration.id} className={className}>
+ <IntegrationItemBox>
+ <IntegrationItem integration={integration} />
+ </IntegrationItemBox>
+ <div>
+ <Tooltip
+ disabled={hasAccess}
+ position="left"
+ title={t(
+ 'You must be an organization owner, manager or admin to configure'
+ )}
>
+ {requiresUpgrade && (
+ <AddIntegrationButton
+ analyticsParams={{
+ view: 'integrations_directory_integration_detail',
+ already_installed: true,
+ }}
+ buttonText={t('Update Now')}
+ data-test-id="integration-upgrade-button"
+ disabled={disableAction}
+ icon={<IconWarning />}
+ onAddIntegration={() => {}}
+ organization={organization}
+ provider={provider}
+ priority="primary"
+ size="small"
+ />
+ )}
<StyledButton
- disabled={!hasAccess}
borderless
- icon={<IconDelete />}
- data-test-id="integration-remove-button"
+ icon={<IconSettings />}
+ disabled={disableAction}
+ to={`/settings/${organization.slug}/integrations/${provider.key}/${integration.id}/`}
+ data-test-id="integration-configure-button"
>
- {t('Uninstall')}
+ {t('Configure')}
</StyledButton>
- </Confirm>
- </Tooltip>
- </div>
-
- <StyledIntegrationStatus
- status={this.integrationStatus}
- // Let the hook handle the alert for disabled org integrations
- hideTooltip={integration.organizationIntegrationStatus === 'disabled'}
- />
- </IntegrationFlex>
- )}
+ </Tooltip>
+ </div>
+ <div>
+ <Tooltip
+ disabled={hasAccess}
+ title={t(
+ 'You must be an organization owner, manager or admin to uninstall'
+ )}
+ >
+ <Confirm
+ priority="danger"
+ onConfirming={this.handleUninstallClick}
+ disabled={!hasAccess}
+ {...removeConfirmProps}
+ >
+ <StyledButton
+ disabled={!hasAccess}
+ borderless
+ icon={<IconDelete />}
+ data-test-id="integration-remove-button"
+ >
+ {t('Uninstall')}
+ </StyledButton>
+ </Confirm>
+ </Tooltip>
+ </div>
+ <StyledIntegrationStatus
+ status={this.integrationStatus}
+ // Let the hook handle the alert for disabled org integrations
+ hideTooltip={integration.organizationIntegrationStatus === 'disabled'}
+ />
+ </IntegrationFlex>
+ );
+ }}
</Access>
);
}
@@ -213,8 +215,12 @@ const IntegrationStatus = (
const inner = (
<div {...p}>
<CircleIndicator size={6} color={color} />
- <IntegrationStatusText>{`${
- status === 'active' ? t('enabled') : t('disabled')
+ <IntegrationStatusText data-test-id="integration-status">{`${
+ status === 'active'
+ ? t('enabled')
+ : status === 'disabled'
+ ? t('disabled')
+ : t('pending deletion')
}`}</IntegrationStatusText>
</div>
);
@@ -224,8 +230,10 @@ const IntegrationStatus = (
<Tooltip
title={
status === 'active'
- ? t('This Integration can be disabled by clicking the Uninstall button')
- : t('This Integration has been disconnected from the external provider')
+ ? t('This integration can be disabled by clicking the Uninstall button')
+ : status === 'disabled'
+ ? t('This integration has been disconnected from the external provider')
+ : t('This integration is pending deletion.')
}
>
{inner}
diff --git a/static/app/views/organizationIntegrations/integrationDetailedView.tsx b/static/app/views/organizationIntegrations/integrationDetailedView.tsx
index b948e2fe3a0545..6795b9d84f1da8 100644
--- a/static/app/views/organizationIntegrations/integrationDetailedView.tsx
+++ b/static/app/views/organizationIntegrations/integrationDetailedView.tsx
@@ -10,7 +10,7 @@ import HookOrDefault from 'sentry/components/hookOrDefault';
import {IconFlag, IconOpen, IconWarning} from 'sentry/icons';
import {t} from 'sentry/locale';
import space from 'sentry/styles/space';
-import {Integration, IntegrationProvider} from 'sentry/types';
+import {Integration, IntegrationProvider, ObjectStatus} from 'sentry/types';
import {getAlertText} from 'sentry/utils/integrationUtil';
import withOrganization from 'sentry/utils/withOrganization';
@@ -140,7 +140,12 @@ class IntegrationDetailedView extends AbstractIntegrationDetailedView<
const origIntegrations = [...this.state.configurations];
- const integrations = this.state.configurations.filter(i => i.id !== integration.id);
+ const integrations = this.state.configurations.map(i =>
+ i.id === integration.id
+ ? {...i, organizationIntegrationStatus: 'pending_deletion' as ObjectStatus}
+ : i
+ );
+
this.setState({configurations: integrations});
const options: RequestOptions = {
diff --git a/tests/acceptance/test_organization_integration_detail_view.py b/tests/acceptance/test_organization_integration_detail_view.py
index 1c98b8a7ef38cb..25268cd6a1acb8 100644
--- a/tests/acceptance/test_organization_integration_detail_view.py
+++ b/tests/acceptance/test_organization_integration_detail_view.py
@@ -70,5 +70,8 @@ def test_uninstallation(self):
detail_view_page = OrganizationIntegrationDetailViewPage(browser=self.browser)
assert self.browser.element_exists('[aria-label="Configure"]')
detail_view_page.uninstall()
- assert not self.browser.element_exists('[aria-label="Configure"]')
+
+ assert (
+ self.browser.element('[data-test-id="integration-status"]').text == "Pending Deletion"
+ )
self.browser.snapshot(name="integrations - integration detail no configurations")
|
bec23d5301a4c02579968d228e7ae5b3601d83cc
|
2024-06-12 22:55:01
|
Tony Xiao
|
feat(trace-explorer): Link to docs (#72606)
| false
|
Link to docs (#72606)
|
feat
|
diff --git a/static/app/views/traces/index.tsx b/static/app/views/traces/index.tsx
index e7292b51cacf71..ae2a3f678b304e 100644
--- a/static/app/views/traces/index.tsx
+++ b/static/app/views/traces/index.tsx
@@ -3,30 +3,35 @@ import styled from '@emotion/styled';
import Feature from 'sentry/components/acl/feature';
import {Alert} from 'sentry/components/alert';
-import {Breadcrumbs} from 'sentry/components/breadcrumbs';
+import FeatureBadge from 'sentry/components/badge/featureBadge';
import FeedbackWidgetButton from 'sentry/components/feedback/widget/feedbackWidgetButton';
import * as Layout from 'sentry/components/layouts/thirds';
import PageFiltersContainer from 'sentry/components/organizations/pageFilters/container';
+import {PageHeadingQuestionTooltip} from 'sentry/components/pageHeadingQuestionTooltip';
import SentryDocumentTitle from 'sentry/components/sentryDocumentTitle';
import {t} from 'sentry/locale';
import useOrganization from 'sentry/utils/useOrganization';
import {Content} from './content';
+export const TRACE_EXPLORER_DOCS_URL = 'https://docs.sentry.io/product/explore/traces/';
+
function TraceExplorerLandingPage() {
return (
<Fragment>
<Layout.Header>
<Layout.HeaderContent>
- <Breadcrumbs
- crumbs={[
- {
- label: 'Traces',
- },
- ]}
- />
<HeaderContentBar>
- <Layout.Title>{t('Traces')}</Layout.Title>
+ <Layout.Title>
+ {t('Traces')}
+ <PageHeadingQuestionTooltip
+ docsUrl={TRACE_EXPLORER_DOCS_URL}
+ title={t(
+ 'Traces lets you search for individual spans that make up a trace, linked by a trace id.'
+ )}
+ />
+ <FeatureBadge type="beta" />
+ </Layout.Title>
<FeedbackWidgetButton />
</HeaderContentBar>
</Layout.HeaderContent>
|
8ac74352694af33add65799bb481a99a287ef10d
|
2024-03-27 03:06:51
|
Evan Purkhiser
|
ref(js): Remove a couple more default exports of Tag (#67750)
| false
|
Remove a couple more default exports of Tag (#67750)
|
ref
|
diff --git a/static/app/components/events/interfaces/breadcrumbs/breadcrumb/level.tsx b/static/app/components/events/interfaces/breadcrumbs/breadcrumb/level.tsx
index 9898f0033dc686..278e705be34313 100644
--- a/static/app/components/events/interfaces/breadcrumbs/breadcrumb/level.tsx
+++ b/static/app/components/events/interfaces/breadcrumbs/breadcrumb/level.tsx
@@ -1,7 +1,7 @@
import styled from '@emotion/styled';
import Highlight from 'sentry/components/highlight';
-import Tag, {Background} from 'sentry/components/tag';
+import {Background, Tag} from 'sentry/components/tag';
import {t} from 'sentry/locale';
import {BreadcrumbLevelType} from 'sentry/types/breadcrumbs';
diff --git a/static/app/components/quickTrace/styles.tsx b/static/app/components/quickTrace/styles.tsx
index fcc1ca530fede1..48aac3182f45dd 100644
--- a/static/app/components/quickTrace/styles.tsx
+++ b/static/app/components/quickTrace/styles.tsx
@@ -5,7 +5,7 @@ import type {LocationDescriptor} from 'history';
import MenuHeader from 'sentry/components/actions/menuHeader';
import ExternalLink from 'sentry/components/links/externalLink';
import MenuItem from 'sentry/components/menuItem';
-import Tag, {Background} from 'sentry/components/tag';
+import {Background, Tag} from 'sentry/components/tag';
import Truncate from 'sentry/components/truncate';
import {space} from 'sentry/styles/space';
import {getDuration} from 'sentry/utils/formatters';
|
7ef77c198e129eda4f6fc29dc263dd266b8825c2
|
2019-01-25 00:50:45
|
Lyn Nagara
|
ref(ui): Refactor short ID (#11693)
| false
|
Refactor short ID (#11693)
|
ref
|
diff --git a/src/sentry/static/sentry/app/components/shortId.jsx b/src/sentry/static/sentry/app/components/shortId.jsx
index a3d77a65cce8a4..8000626223e968 100644
--- a/src/sentry/static/sentry/app/components/shortId.jsx
+++ b/src/sentry/static/sentry/app/components/shortId.jsx
@@ -1,25 +1,18 @@
import PropTypes from 'prop-types';
import React from 'react';
-import createReactClass from 'create-react-class';
import styled from 'react-emotion';
-import ProjectState from 'app/mixins/projectState';
import AutoSelectText from 'app/components/autoSelectText';
-const ShortId = createReactClass({
- displayName: 'ShortId',
-
- propTypes: {
+export default class ShortId extends React.Component {
+ static propTypes = {
shortId: PropTypes.string,
- },
-
- mixins: [ProjectState],
-
+ };
preventPropagation(e) {
// this is a hack for the stream so the click handler doesn't
// affect this element
e.stopPropagation();
- },
+ }
render() {
let shortId = this.props.shortId;
@@ -31,11 +24,9 @@ const ShortId = createReactClass({
<AutoSelectText>{shortId}</AutoSelectText>
</StyledShortId>
);
- },
-});
+ }
+}
const StyledShortId = styled.div`
font-family: ${p => p.theme.text.familyMono};
`;
-
-export default ShortId;
|
77d4b866d60ec49f6320a28093300cc04ffde240
|
2022-08-19 00:23:58
|
Ryan Albrecht
|
fix(replays): Make sure to pass `?query` into the list-replays ajax request (#38019)
| false
|
Make sure to pass `?query` into the list-replays ajax request (#38019)
|
fix
|
diff --git a/static/app/views/replays/replays.tsx b/static/app/views/replays/replays.tsx
index b09f9e0f16d665..bb5a89ef3425d9 100644
--- a/static/app/views/replays/replays.tsx
+++ b/static/app/views/replays/replays.tsx
@@ -17,6 +17,7 @@ import useReplayList, {
DEFAULT_SORT,
REPLAY_LIST_FIELDS,
} from 'sentry/utils/replays/hooks/useReplayList';
+import {MutableSearch} from 'sentry/utils/tokenizeSearch';
import useMedia from 'sentry/utils/useMedia';
import useOrganization from 'sentry/utils/useOrganization';
import ReplaysFilters from 'sentry/views/replays/filters';
@@ -31,6 +32,9 @@ function Replays({location}: Props) {
const minWidthIsSmall = useMedia(`(min-width: ${theme.breakpoints.small})`);
const eventView = useMemo(() => {
+ const query = decodeScalar(location.query.query, '');
+ const conditions = new MutableSearch(query);
+
return EventView.fromNewQueryWithLocation(
{
id: '',
@@ -38,6 +42,7 @@ function Replays({location}: Props) {
version: 2,
fields: REPLAY_LIST_FIELDS,
projects: [],
+ query: conditions.formatString(),
orderby: decodeScalar(location.query.sort, DEFAULT_SORT),
},
location
|
66319e54e7caec4b8108a1c0c71dc1aa45993890
|
2024-03-05 20:13:08
|
Riccardo Busetti
|
fix(ddm): Fix units normalization when * and / are used (#66304)
| false
|
Fix units normalization when * and / are used (#66304)
|
fix
|
diff --git a/src/sentry/sentry_metrics/querying/visitors/query_expression.py b/src/sentry/sentry_metrics/querying/visitors/query_expression.py
index d9c58161bf0513..3602e62b0317ed 100644
--- a/src/sentry/sentry_metrics/querying/visitors/query_expression.py
+++ b/src/sentry/sentry_metrics/querying/visitors/query_expression.py
@@ -1,6 +1,14 @@
from collections.abc import Sequence
-from snuba_sdk import AliasedExpression, Column, Condition, Formula, Op, Timeseries
+from snuba_sdk import (
+ AliasedExpression,
+ ArithmeticOperator,
+ Column,
+ Condition,
+ Formula,
+ Op,
+ Timeseries,
+)
from snuba_sdk.conditions import ConditionGroup
from sentry.models.environment import Environment
@@ -274,6 +282,10 @@ class UnitsNormalizationVisitor(QueryExpressionVisitor[QueryExpression]):
case units are incompatible.
"""
+ UNITLESS_FORMULA_FUNCTIONS = {
+ ArithmeticOperator.DIVIDE.value,
+ ArithmeticOperator.MULTIPLY.value,
+ }
UNITLESS_AGGREGATES = {"count", "count_unique"}
def __init__(self):
@@ -285,7 +297,26 @@ def __init__(self):
def _visit_formula(self, formula: Formula) -> QueryExpression:
self._is_formula = True
- return super()._visit_formula(formula)
+
+ has_all_timeseries_params = True
+ parameters = []
+ for parameter in formula.parameters:
+ if not isinstance(parameter, Timeseries):
+ has_all_timeseries_params = False
+
+ parameters.append(self.visit(parameter))
+
+ # If we have all timeseries as parameters of a formula and the function is belonging to `*` or `/` we will
+ # not perform any units normalization.
+ # TODO: we might want to implement units normalization following a more mathematical approach like `ms^2` or
+ # `byte/s` but this is going to come at a later point.
+ if formula.function_name in self.UNITLESS_FORMULA_FUNCTIONS and has_all_timeseries_params:
+ raise NonNormalizableUnitsError(
+ "A unitless formula function is being used and has at least one "
+ "timeseries in one of its operands"
+ )
+
+ return formula.set_parameters(parameters)
def _visit_timeseries(self, timeseries: Timeseries) -> QueryExpression:
extracted_unit = self._extract_unit(timeseries=timeseries)
diff --git a/tests/sentry/sentry_metrics/querying/data_v2/test_api.py b/tests/sentry/sentry_metrics/querying/data_v2/test_api.py
index db1b073c06751f..447b78fdd73c25 100644
--- a/tests/sentry/sentry_metrics/querying/data_v2/test_api.py
+++ b/tests/sentry/sentry_metrics/querying/data_v2/test_api.py
@@ -1266,7 +1266,7 @@ def test_query_with_basic_formula_and_coercible_units(self):
MetricsQueriesPlan()
.declare_query("query_1", query_1)
.declare_query("query_2", query_2)
- .apply_formula("$query_1 * $query_2")
+ .apply_formula("$query_1 + $query_2")
)
results = self.run_query(
@@ -1282,8 +1282,8 @@ def test_query_with_basic_formula_and_coercible_units(self):
data = results["data"]
assert len(data) == 1
assert data[0][0]["by"] == {}
- assert data[0][0]["series"] == [None, 300000.0, None]
- assert data[0][0]["totals"] == 300000.0
+ assert data[0][0]["series"] == [None, 20015.0, None]
+ assert data[0][0]["totals"] == 20015.0
meta = results["meta"]
assert len(meta) == 1
assert meta[0][1]["unit_family"] == UnitFamily.DURATION.value
@@ -1312,7 +1312,7 @@ def test_query_with_basic_formula_and_non_coercible_units(self):
MetricsQueriesPlan()
.declare_query("query_1", query_1)
.declare_query("query_2", query_2)
- .apply_formula("$query_1 * $query_2")
+ .apply_formula("$query_1 + $query_2")
)
results = self.run_query(
@@ -1328,8 +1328,8 @@ def test_query_with_basic_formula_and_non_coercible_units(self):
data = results["data"]
assert len(data) == 1
assert data[0][0]["by"] == {}
- assert data[0][0]["series"] == [None, 300.0, None]
- assert data[0][0]["totals"] == 300.0
+ assert data[0][0]["series"] == [None, 35.0, None]
+ assert data[0][0]["totals"] == 35.0
meta = results["meta"]
assert len(meta) == 1
assert meta[0][1]["unit_family"] is None
@@ -1358,7 +1358,7 @@ def test_query_with_basic_formula_and_unitless_aggregates(self):
MetricsQueriesPlan()
.declare_query("query_1", query_1)
.declare_query("query_2", query_2)
- .apply_formula("$query_1 * $query_2")
+ .apply_formula("$query_1 + $query_2")
)
results = self.run_query(
@@ -1374,8 +1374,8 @@ def test_query_with_basic_formula_and_unitless_aggregates(self):
data = results["data"]
assert len(data) == 1
assert data[0][0]["by"] == {}
- assert data[0][0]["series"] == [None, 20.0, None]
- assert data[0][0]["totals"] == 20.0
+ assert data[0][0]["series"] == [None, 21.0, None]
+ assert data[0][0]["totals"] == 21.0
meta = results["meta"]
assert len(meta) == 1
assert meta[0][1]["unit_family"] is None
@@ -1404,7 +1404,7 @@ def test_query_with_basic_formula_and_unknown_units(self):
MetricsQueriesPlan()
.declare_query("query_1", query_1)
.declare_query("query_2", query_2)
- .apply_formula("$query_1 * $query_2")
+ .apply_formula("$query_1 + $query_2")
)
results = self.run_query(
@@ -1420,10 +1420,63 @@ def test_query_with_basic_formula_and_unknown_units(self):
data = results["data"]
assert len(data) == 1
assert data[0][0]["by"] == {}
- assert data[0][0]["series"] == [None, 300.0, None]
- assert data[0][0]["totals"] == 300.0
+ assert data[0][0]["series"] == [None, 35.0, None]
+ assert data[0][0]["totals"] == 35.0
meta = results["meta"]
assert len(meta) == 1
assert meta[0][1]["unit_family"] == UnitFamily.UNKNOWN.value
assert meta[0][1]["unit"] is None
assert meta[0][1]["scaling_factor"] is None
+
+ @with_feature("organizations:ddm-metrics-api-unit-normalization")
+ def test_query_with_basic_formula_and_unitless_formula_functions(self):
+ mri_1 = "d:custom/page_load@nanosecond"
+ mri_2 = "d:custom/load_time@microsecond"
+ for mri, value in ((mri_1, 20), (mri_2, 15)):
+ self.store_metric(
+ self.project.organization.id,
+ self.project.id,
+ "distribution",
+ mri,
+ {},
+ self.ts(self.now()),
+ value,
+ UseCaseID.CUSTOM,
+ )
+
+ for formula, expected_result, expected_unit_family in (
+ ("$query_1 * $query_2", 300.0, None),
+ ("$query_1 * $query_2 + 25", 325.0, None),
+ ("$query_1 * $query_2 / 1", 300.0, None),
+ ("$query_1 * 2", 40.0, UnitFamily.DURATION.value),
+ ("$query_1 / 2", 10.0, UnitFamily.DURATION.value)
+ # disabled since the layer fails validation of the use cases used when nested formulas have only scalars
+ # ("$query_1 * (100 / 1)", 21.0)
+ ):
+ query_1 = self.mql("avg", mri_1)
+ query_2 = self.mql("sum", mri_2)
+ plan = (
+ MetricsQueriesPlan()
+ .declare_query("query_1", query_1)
+ .declare_query("query_2", query_2)
+ .apply_formula(formula)
+ )
+
+ results = self.run_query(
+ metrics_queries_plan=plan,
+ start=self.now() - timedelta(minutes=30),
+ end=self.now() + timedelta(hours=1, minutes=30),
+ interval=3600,
+ organization=self.project.organization,
+ projects=[self.project],
+ environments=[],
+ referrer="metrics.data.api",
+ )
+ data = results["data"]
+ assert len(data) == 1
+ assert data[0][0]["by"] == {}
+ assert data[0][0]["series"] == [None, expected_result, None]
+ assert data[0][0]["totals"] == expected_result
+ meta = results["meta"]
+ assert len(meta) == 1
+ assert meta[0][1]["unit_family"] == expected_unit_family
|
0aada6483f549e382262069b164bae9fa3c2b03c
|
2024-03-20 04:25:40
|
Michelle Zhang
|
fix(feedback): only track sidebar views if currentProject exists (#67292)
| false
|
only track sidebar views if currentProject exists (#67292)
|
fix
|
diff --git a/static/app/components/feedback/feedbackOnboarding/sidebar.tsx b/static/app/components/feedback/feedbackOnboarding/sidebar.tsx
index f67754d865427c..9dfa18981e34fd 100644
--- a/static/app/components/feedback/feedbackOnboarding/sidebar.tsx
+++ b/static/app/components/feedback/feedbackOnboarding/sidebar.tsx
@@ -82,14 +82,14 @@ function FeedbackOnboardingSidebar(props: CommonSidebarProps) {
}, [allProjects]);
useEffect(() => {
- if (isActive) {
+ if (isActive && currentProject && hasProjectAccess) {
// this tracks clicks from any source: feedback index, issue details feedback tab, banner callout, etc
trackAnalytics('feedback.list-view-setup-sidebar', {
organization,
platform: currentProject?.platform ?? 'unknown',
});
}
- }, [organization, currentProject, isActive]);
+ }, [organization, currentProject, isActive, hasProjectAccess]);
if (!isActive || !hasProjectAccess || !currentProject) {
return null;
|
2b38d51a1dc62a60b8545fcc497a640acf6037fe
|
2023-10-18 16:56:56
|
ArthurKnaus
|
ref(onboarding): Convert platform aspnetcore to new structure (#58229)
| false
|
Convert platform aspnetcore to new structure (#58229)
|
ref
|
diff --git a/static/app/gettingStartedDocs/dotnet/aspnetcore.spec.tsx b/static/app/gettingStartedDocs/dotnet/aspnetcore.spec.tsx
index 4b17fd65b2906c..7b308d7660f3f8 100644
--- a/static/app/gettingStartedDocs/dotnet/aspnetcore.spec.tsx
+++ b/static/app/gettingStartedDocs/dotnet/aspnetcore.spec.tsx
@@ -1,18 +1,33 @@
-import {render, screen} from 'sentry-test/reactTestingLibrary';
+import {renderWithOnboardingLayout} from 'sentry-test/onboarding/renderWithOnboardingLayout';
+import {screen} from 'sentry-test/reactTestingLibrary';
+import {textWithMarkupMatcher} from 'sentry-test/utils';
-import {StepTitle} from 'sentry/components/onboarding/gettingStartedDoc/step';
+import docs from './aspnetcore';
-import {GettingStartedWithAspnetcore, steps} from './aspnetcore';
+describe('aspnetcore onboarding docs', function () {
+ it('renders gradle docs correctly', async function () {
+ renderWithOnboardingLayout(docs, {
+ releaseRegistry: {
+ 'sentry.dotnet.aspnetcore': {
+ version: '1.99.9',
+ },
+ },
+ });
-describe('GettingStartedWithAspnetcore', function () {
- it('renders doc correctly', function () {
- render(<GettingStartedWithAspnetcore dsn="test-dsn" projectSlug="test-project" />);
+ // Renders main headings
+ expect(screen.getByRole('heading', {name: 'Install'})).toBeInTheDocument();
+ expect(screen.getByRole('heading', {name: 'Configure SDK'})).toBeInTheDocument();
+ expect(screen.getByRole('heading', {name: 'Verify'})).toBeInTheDocument();
+ expect(
+ screen.getByRole('heading', {name: 'Performance Monitoring'})
+ ).toBeInTheDocument();
+ expect(screen.getByRole('heading', {name: 'Samples'})).toBeInTheDocument();
- // Steps
- for (const step of steps()) {
- expect(
- screen.getByRole('heading', {name: step.title ?? StepTitle[step.type]})
- ).toBeInTheDocument();
- }
+ // Renders SDK version from registry
+ expect(
+ await screen.findByText(
+ textWithMarkupMatcher(/Install-Package Sentry.AspNetCore -Version 1\.99\.9/)
+ )
+ ).toBeInTheDocument();
});
});
diff --git a/static/app/gettingStartedDocs/dotnet/aspnetcore.tsx b/static/app/gettingStartedDocs/dotnet/aspnetcore.tsx
index 5208096ee2cfe8..2b30a78be520f6 100644
--- a/static/app/gettingStartedDocs/dotnet/aspnetcore.tsx
+++ b/static/app/gettingStartedDocs/dotnet/aspnetcore.tsx
@@ -3,69 +3,32 @@ import {Fragment} from 'react';
import ExternalLink from 'sentry/components/links/externalLink';
import List from 'sentry/components/list';
import ListItem from 'sentry/components/list/listItem';
-import {Layout, LayoutProps} from 'sentry/components/onboarding/gettingStartedDoc/layout';
-import {ModuleProps} from 'sentry/components/onboarding/gettingStartedDoc/sdkDocumentation';
import {StepType} from 'sentry/components/onboarding/gettingStartedDoc/step';
+import type {
+ Docs,
+ DocsParams,
+ OnboardingConfig,
+} from 'sentry/components/onboarding/gettingStartedDoc/types';
import {t, tct} from 'sentry/locale';
+import {getPackageVersion} from 'sentry/utils/gettingStartedDocs/getPackageVersion';
-// Configuration Start
-export const steps = ({
- dsn,
- sourcePackageRegistries,
-}: Partial<
- Pick<ModuleProps, 'dsn' | 'sourcePackageRegistries'>
-> = {}): LayoutProps['steps'] => [
- {
- type: StepType.INSTALL,
- description: (
- <p>
- {tct('Install the [strong:NuGet] package:', {
- strong: <strong />,
- })}
- </p>
- ),
- configurations: [
- {
- language: 'shell',
- partialLoading: sourcePackageRegistries?.isLoading,
- description: t('Package Manager:'),
- code: `Install-Package Sentry.AspNetCore -Version ${
- sourcePackageRegistries?.isLoading
- ? t('\u2026loading')
- : sourcePackageRegistries?.data?.['sentry.dotnet.aspnetcore']?.version ??
- '3.34.0'
- }`,
- },
- {
- language: 'shell',
- partialLoading: sourcePackageRegistries?.isLoading,
- description: t('Or .NET Core CLI:'),
- code: `dotnet add package Sentry.AspNetCore -v ${
- sourcePackageRegistries?.isLoading
- ? t('\u2026loading')
- : sourcePackageRegistries?.data?.['sentry.dotnet.aspnetcore']?.version ??
- '3.34.0'
- }`,
- },
- ],
- },
- {
- type: StepType.CONFIGURE,
- description: (
- <p>
- {tct(
- 'Add Sentry to [programCode:Program.cs] through the [webHostCode:WebHostBuilder]:',
- {
- webHostCode: <code />,
- programCode: <code />,
- }
- )}
- </p>
- ),
- configurations: [
- {
- language: 'csharp',
- code: `
+type Params = DocsParams;
+
+const getInstallSnippetPackageManager = (params: Params) => `
+Install-Package Sentry.AspNetCore -Version ${getPackageVersion(
+ params,
+ 'sentry.dotnet.aspnetcore',
+ '3.34.0'
+)}`;
+
+const getInstallSnippetCoreCli = (params: Params) => `
+dotnet add package Sentry.AspNetCore -v ${getPackageVersion(
+ params,
+ 'sentry.dotnet.aspnetcore',
+ '3.34.0'
+)}`;
+
+const getConfigureSnippet = (params: Params) => `
public static IHostBuilder CreateHostBuilder(string[] args) =>
Host.CreateDefaultBuilder(args)
.ConfigureWebHostDefaults(webBuilder =>
@@ -73,57 +36,16 @@ public static IHostBuilder CreateHostBuilder(string[] args) =>
// Add the following line:
webBuilder.UseSentry(o =>
{
- o.Dsn = "${dsn}";
+ o.Dsn = "${params.dsn}";
// When configuring for the first time, to see what the SDK is doing:
o.Debug = true;
// Set TracesSampleRate to 1.0 to capture 100% of transactions for performance monitoring.
// We recommend adjusting this value in production.
o.TracesSampleRate = 1.0;
});
- });
- `,
- },
- ],
- },
- {
- type: StepType.VERIFY,
- description: t('To verify your set up, you can capture a message with the SDK:'),
- configurations: [
- {
- language: 'csharp',
- code: 'SentrySdk.CaptureMessage("Hello Sentry");',
- },
- ],
- additionalInfo: (
- <p>
- {tct(
- "If you don't want to depend on the static class, the SDK registers a client in the DI container. In this case, you can [link:take [code:IHub] as a dependency].",
- {
- code: <code />,
- link: (
- <ExternalLink href="https://docs.sentry.io/platforms/dotnet/guides/aspnetcore/unit-testing/" />
- ),
- }
- )}
- </p>
- ),
- },
- {
- title: t('Performance Monitoring'),
- description: (
- <p>
- {tct(
- 'You can measure the performance of your endpoints by adding a middleware to [code:Startup.cs]:',
- {
- code: <code />,
- }
- )}
- </p>
- ),
- configurations: [
- {
- language: 'csharp',
- code: `
+ });`;
+
+const getPerformanceMiddlewareSnippet = () => `
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.Extensions.Configuration;
@@ -149,15 +71,9 @@ public class Startup
pattern: "{controller=Home}/{action=Index}/{id?}");
});
}
-}
- `,
- },
- {
- description: t(
- "You'll be able to monitor the performance of your actions automatically. To add additional spans to it, you can use the API:"
- ),
- language: 'csharp',
- code: `
+}`;
+
+const getPerformanceSpansSnippet = () => `
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Logging;
using Sentry;
@@ -184,56 +100,137 @@ public class HomeController : Controller
throw;
}
}
-}
- `,
- },
- ],
- },
- {
- title: t('Samples'),
- description: (
- <Fragment>
- {t(
- 'See the following examples that demonstrate how to integrate Sentry with various frameworks.'
- )}
- <List symbol="bullet">
- <ListItem>
- {tct(
- '[link:Multiple samples in the [code:dotnet] SDK repository] [strong:(C#)]',
- {
- link: (
- <ExternalLink href="https://github.com/getsentry/sentry-dotnet/tree/main/samples" />
- ),
- code: <code />,
+}`;
+
+const onboarding: OnboardingConfig = {
+ install: params => [
+ {
+ type: StepType.INSTALL,
+ description: tct('Install the [strong:NuGet] package:', {
+ strong: <strong />,
+ }),
+ configurations: [
+ {
+ partialLoading: params.sourcePackageRegistries.isLoading,
+ code: [
+ {
+ language: 'shell',
+ label: 'Package Manager',
+ value: 'packageManager',
+ code: getInstallSnippetPackageManager(params),
+ },
+ {
+ language: 'shell',
+ label: '.NET Core CLI',
+ value: 'coreCli',
+ code: getInstallSnippetCoreCli(params),
+ },
+ ],
+ },
+ ],
+ },
+ ],
+ configure: params => [
+ {
+ type: StepType.CONFIGURE,
+ description: tct(
+ 'Add Sentry to [programCode:Program.cs] through the [webHostCode:WebHostBuilder]:',
+ {
+ webHostCode: <code />,
+ programCode: <code />,
+ }
+ ),
+ configurations: [
+ {
+ language: 'csharp',
+ code: getConfigureSnippet(params),
+ },
+ ],
+ },
+ ],
+ verify: () => [
+ {
+ type: StepType.VERIFY,
+ description: t('To verify your set up, you can capture a message with the SDK:'),
+ configurations: [
+ {
+ language: 'csharp',
+ code: 'SentrySdk.CaptureMessage("Hello Sentry");',
+ },
+ ],
+ additionalInfo: tct(
+ "If you don't want to depend on the static class, the SDK registers a client in the DI container. In this case, you can [link:take [code:IHub] as a dependency].",
+ {
+ code: <code />,
+ link: (
+ <ExternalLink href="https://docs.sentry.io/platforms/dotnet/guides/aspnetcore/unit-testing/" />
+ ),
+ }
+ ),
+ },
+ {
+ title: t('Performance Monitoring'),
+ description: tct(
+ 'You can measure the performance of your endpoints by adding a middleware to [code:Startup.cs]:',
+ {
+ code: <code />,
+ }
+ ),
+ configurations: [
+ {
+ language: 'csharp',
+ code: getPerformanceMiddlewareSnippet(),
+ },
+ {
+ description: t(
+ "You'll be able to monitor the performance of your actions automatically. To add additional spans to it, you can use the API:"
+ ),
+ language: 'csharp',
+ code: getPerformanceSpansSnippet(),
+ },
+ ],
+ },
+ {
+ title: t('Samples'),
+ description: (
+ <Fragment>
+ {t(
+ 'See the following examples that demonstrate how to integrate Sentry with various frameworks.'
+ )}
+ <List symbol="bullet">
+ <ListItem>
+ {tct(
+ '[link:Multiple samples in the [code:dotnet] SDK repository] [strong:(C#)]',
+ {
+ link: (
+ <ExternalLink href="https://github.com/getsentry/sentry-dotnet/tree/main/samples" />
+ ),
+ code: <code />,
+ strong: <strong />,
+ }
+ )}
+ </ListItem>
+ <ListItem>
+ {tct('[link:Basic F# sample] [strong:(F#)]', {
+ link: <ExternalLink href="https://github.com/sentry-demos/fsharp" />,
+ strong: <strong />,
+ })}
+ </ListItem>
+ <ListItem>
+ {tct('[link:Giraffe F# sample] [strong:(F#)]', {
+ link: <ExternalLink href="https://github.com/sentry-demos/giraffe" />,
strong: <strong />,
- }
- )}
- </ListItem>
- <ListItem>
- {tct('[link:Basic F# sample] [strong:(F#)]', {
- link: <ExternalLink href="https://github.com/sentry-demos/fsharp" />,
- strong: <strong />,
- })}
- </ListItem>
- <ListItem>
- {tct('[link:Giraffe F# sample] [strong:(F#)]', {
- link: <ExternalLink href="https://github.com/sentry-demos/giraffe" />,
- strong: <strong />,
- })}
- </ListItem>
- </List>
- </Fragment>
- ),
- },
-];
-// Configuration End
-
-export function GettingStartedWithAspnetcore({
- dsn,
- sourcePackageRegistries,
- ...props
-}: ModuleProps) {
- return <Layout steps={steps({dsn, sourcePackageRegistries})} {...props} />;
-}
-
-export default GettingStartedWithAspnetcore;
+ })}
+ </ListItem>
+ </List>
+ </Fragment>
+ ),
+ },
+ ],
+};
+
+const docs: Docs = {
+ onboarding,
+};
+
+export default docs;
|
67cdb97dc47deb071539c40aeebe9fb587c797b2
|
2024-02-07 19:50:13
|
Tor
|
feat(ddm): Implement global abuse limits for metrics (#64574)
| false
|
Implement global abuse limits for metrics (#64574)
|
feat
|
diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py
index 3eccbc67e4b695..206c3a46a6d62a 100644
--- a/src/sentry/options/defaults.py
+++ b/src/sentry/options/defaults.py
@@ -998,6 +998,14 @@
flags=FLAG_PRIORITIZE_DISK | FLAG_AUTOMATOR_MODIFIABLE,
)
+
+register(
+ "global-abuse-quota.metric-bucket-limit",
+ type=Int,
+ default=0,
+ flags=FLAG_PRIORITIZE_DISK | FLAG_AUTOMATOR_MODIFIABLE,
+)
+
# END ABUSE QUOTAS
# Send event messages for specific project IDs to random partitions in Kafka
diff --git a/src/sentry/quotas/base.py b/src/sentry/quotas/base.py
index 66d9d1bf45c085..6c47bb2233be1f 100644
--- a/src/sentry/quotas/base.py
+++ b/src/sentry/quotas/base.py
@@ -22,6 +22,7 @@ class QuotaScope(IntEnum):
ORGANIZATION = 1
PROJECT = 2
KEY = 3
+ GLOBAL = 4
def api_name(self):
return self.name.lower()
@@ -36,7 +37,7 @@ class AbuseQuota:
# Quota categories.
categories: list[DataCategory]
# Quota Scope.
- scope: Literal[QuotaScope.ORGANIZATION, QuotaScope.PROJECT]
+ scope: Literal[QuotaScope.ORGANIZATION, QuotaScope.PROJECT, QuotaScope.GLOBAL]
# Old org option name still used for compatibility reasons,
# takes precedence over `option` and `compat_option_sentry`.
compat_option_org: str | None = None
@@ -404,6 +405,12 @@ def get_abuse_quotas(self, org):
categories=[DataCategory.METRIC_BUCKET],
scope=QuotaScope.ORGANIZATION,
),
+ AbuseQuota(
+ id="gam",
+ option="global-abuse-quota.metric-bucket-limit",
+ categories=[DataCategory.METRIC_BUCKET],
+ scope=QuotaScope.GLOBAL,
+ ),
]
# XXX: These reason codes are hardcoded in getsentry:
@@ -412,6 +419,7 @@ def get_abuse_quotas(self, org):
reason_codes = {
QuotaScope.ORGANIZATION: "org_abuse_limit",
QuotaScope.PROJECT: "project_abuse_limit",
+ QuotaScope.GLOBAL: "global_abuse_limit",
}
for quota in abuse_quotas:
diff --git a/tests/sentry/quotas/test_base.py b/tests/sentry/quotas/test_base.py
index 2b82de259d2968..5712ed50aaacde 100644
--- a/tests/sentry/quotas/test_base.py
+++ b/tests/sentry/quotas/test_base.py
@@ -159,6 +159,14 @@ def test_check_accept_monitor_checkin(self):
"reasonCode": "go_away",
},
),
+ (
+ QuotaConfig(limit=0, scope=QuotaScope.GLOBAL, reason_code="come back!"),
+ {
+ "limit": 0,
+ "scope": "global",
+ "reasonCode": "come back!",
+ },
+ ),
],
)
def test_quotas_to_json(obj, json):
diff --git a/tests/sentry/quotas/test_redis.py b/tests/sentry/quotas/test_redis.py
index a69431dbbb7062..fb3dc3e87988ae 100644
--- a/tests/sentry/quotas/test_redis.py
+++ b/tests/sentry/quotas/test_redis.py
@@ -71,6 +71,12 @@ class RedisQuotaTest(TestCase):
def quota(self):
return RedisQuota()
+ def test_redis_quota_serialize(self):
+ assert QuotaScope.ORGANIZATION.api_name() == "organization"
+ assert QuotaScope.PROJECT.api_name() == "project"
+ assert QuotaScope.KEY.api_name() == "key"
+ assert QuotaScope.GLOBAL.api_name() == "global"
+
def test_abuse_quotas(self):
# These legacy options need to be set, otherwise we'll run into
# AssertionError: reject-all quotas cannot be tracked
@@ -113,6 +119,7 @@ def test_abuse_quotas(self):
self.organization.update_option("project-abuse-quota.attachment-limit", 601)
self.organization.update_option("project-abuse-quota.session-limit", 602)
self.organization.update_option("organization-abuse-quota.metric-bucket-limit", 603)
+ self.organization.update_option("global-abuse-quota.metric-bucket-limit", 604)
with self.feature("organizations:transaction-metrics-extraction"):
quotas = self.quota.get_quotas(self.project)
@@ -148,6 +155,14 @@ def test_abuse_quotas(self):
assert quotas[4].window == 10
assert quotas[4].reason_code == "org_abuse_limit"
+ assert quotas[5].id == "gam"
+ assert quotas[5].scope == QuotaScope.GLOBAL
+ assert quotas[5].scope_id is None
+ assert quotas[5].categories == {DataCategory.METRIC_BUCKET}
+ assert quotas[5].limit == 6040
+ assert quotas[5].window == 10
+ assert quotas[5].reason_code == "global_abuse_limit"
+
# Let's set the global option for error limits.
# Since we already have an org override for it, it shouldn't change anything.
with self.options({"project-abuse-quota.error-limit": 3}):
|
87e1b4475bf43f7704e2478fc7430f6efe599b3d
|
2018-04-20 21:51:22
|
Billy Vong
|
fix(ui): Fix command palette (#8119)
| false
|
Fix command palette (#8119)
|
fix
|
diff --git a/src/sentry/static/sentry/app/views/app.jsx b/src/sentry/static/sentry/app/views/app.jsx
index 0b60737fc76e04..2b29d7e7905358 100644
--- a/src/sentry/static/sentry/app/views/app.jsx
+++ b/src/sentry/static/sentry/app/views/app.jsx
@@ -143,7 +143,7 @@ const App = createReactClass({
if (Object.keys(newState).length > 0) this.setState(newState);
},
- @keydown('cmd+shift+p')
+ @keydown('meta+shift+p', 'meta+k')
openCommandPalette(e) {
openCommandPalette();
e.preventDefault();
@@ -185,13 +185,13 @@ const App = createReactClass({
return (
<ThemeProvider theme={theme}>
- <React.Fragment>
+ <div className="main-container" tabIndex="-1">
<GlobalModal />
<Alerts className="messages-container" />
<Indicators className="indicators-container" />
<ErrorBoundary>{this.renderBody()}</ErrorBoundary>
{ConfigStore.get('features').has('assistant') && <AssistantHelper />}
- </React.Fragment>
+ </div>
</ThemeProvider>
);
},
diff --git a/src/sentry/static/sentry/less/layout.less b/src/sentry/static/sentry/less/layout.less
index 4c6135d2d6427e..3d7de1298caa58 100644
--- a/src/sentry/static/sentry/less/layout.less
+++ b/src/sentry/static/sentry/less/layout.less
@@ -142,6 +142,11 @@ body.auth {
flex: 1;
}
+// Container around content in app.jsx
+.main-container {
+ min-height: 100vh;
+}
+
// So loader can take up full height so that footer is fixed to bottom of screen
.loading-full-layout {
flex: 1;
|
ba89e911884eeb6e9643dee57850a6c10ec05024
|
2019-10-29 03:59:02
|
Billy Vong
|
chore(jest): Remove noisy stacktraces from unmocked endpoints (#15301)
| false
|
Remove noisy stacktraces from unmocked endpoints (#15301)
|
chore
|
diff --git a/src/sentry/static/sentry/app/__mocks__/api.tsx b/src/sentry/static/sentry/app/__mocks__/api.tsx
index 4cd3210164335c..c6d85b72122893 100644
--- a/src/sentry/static/sentry/app/__mocks__/api.tsx
+++ b/src/sentry/static/sentry/app/__mocks__/api.tsx
@@ -126,7 +126,7 @@ class Client {
// Because we are mocking an API client, we generally catch errors to show
// user-friendly error messages, this means in tests this error gets gobbled
// up and developer frustration ensues.
- console.warn(err); // eslint-disable-line no-console
+ console.warn(err.message); // eslint-disable-line no-console
throw err;
} else {
// has mocked response
|
99823c71e2385a7f1e4575a3383998c20dd3ea9f
|
2024-11-30 01:46:14
|
George Gritsouk
|
feat(dashboards): Add y-axis label to `LineChartWidget` (#81443)
| false
|
Add y-axis label to `LineChartWidget` (#81443)
|
feat
|
diff --git a/static/app/views/dashboards/widgets/lineChartWidget/lineChartWidgetVisualization.tsx b/static/app/views/dashboards/widgets/lineChartWidget/lineChartWidgetVisualization.tsx
index 5ba6952fca89af..1618e76674cd06 100644
--- a/static/app/views/dashboards/widgets/lineChartWidget/lineChartWidgetVisualization.tsx
+++ b/static/app/views/dashboards/widgets/lineChartWidget/lineChartWidgetVisualization.tsx
@@ -175,6 +175,10 @@ export function LineChartWidgetVisualization(props: LineChartWidgetVisualization
left: 0,
}}
tooltip={{
+ trigger: 'axis',
+ axisPointer: {
+ type: 'cross',
+ },
formatter,
valueFormatter: value => {
return formatChartValue(value, type, unit);
@@ -186,6 +190,17 @@ export function LineChartWidgetVisualization(props: LineChartWidgetVisualization
return formatChartValue(value, type, unit);
},
},
+ axisPointer: {
+ type: 'line',
+ snap: false,
+ lineStyle: {
+ type: 'solid',
+ width: 0.5,
+ },
+ label: {
+ show: false,
+ },
+ },
}}
{...chartZoomProps}
isGroupedByDate
|
a4981cb233da2151be102a5ed9d7b1a083ae95c2
|
2023-12-13 22:28:51
|
Billy Vong
|
feat(sdks): Upgrade to 7.87.0 (#61674)
| false
|
Upgrade to 7.87.0 (#61674)
|
feat
|
diff --git a/package.json b/package.json
index db758842da32a5..72e6eadfb496b8 100644
--- a/package.json
+++ b/package.json
@@ -54,14 +54,14 @@
"@sentry-internal/rrweb": "2.3.0",
"@sentry-internal/rrweb-player": "2.3.0",
"@sentry-internal/rrweb-snapshot": "2.3.0",
- "@sentry/core": "^7.85.0",
- "@sentry/integrations": "^7.85.0",
- "@sentry/node": "^7.85.0",
- "@sentry/react": "^7.85.0",
+ "@sentry/core": "^7.87.0",
+ "@sentry/integrations": "^7.87.0",
+ "@sentry/node": "^7.87.0",
+ "@sentry/react": "^7.87.0",
"@sentry/release-parser": "^1.3.1",
- "@sentry/tracing": "^7.85.0",
- "@sentry/types": "^7.85.0",
- "@sentry/utils": "^7.85.0",
+ "@sentry/tracing": "^7.87.0",
+ "@sentry/types": "^7.87.0",
+ "@sentry/utils": "^7.87.0",
"@spotlightjs/spotlight": "^1.1.1",
"@tanstack/react-query": "^4.29.7",
"@types/color": "^3.0.3",
diff --git a/yarn.lock b/yarn.lock
index 6b2ccd607bbf57..18644cc1aeaa8e 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -2422,14 +2422,14 @@
dependencies:
"@react-types/shared" "^3.18.1"
-"@sentry-internal/[email protected]":
- version "7.85.0"
- resolved "https://registry.yarnpkg.com/@sentry-internal/feedback/-/feedback-7.85.0.tgz#94ef44d59a01f145895525a9bd737dc68f4c7d64"
- integrity sha512-MlbIN+N8CWFJBjbqMmARe4+UPo9QRhRar0YoOfmNA2Xqk/EwXcjHWkealosHznXH7tqVbjB25QJpHtDystft/Q==
+"@sentry-internal/[email protected]":
+ version "7.87.0"
+ resolved "https://registry.yarnpkg.com/@sentry-internal/feedback/-/feedback-7.87.0.tgz#21d724a02cfd81b4a0cdb0e1e1f1a7631f1aef48"
+ integrity sha512-31nvAvcchoBQJoZSxAaMh6FOLIoZEbTOGutiMTPzPLhL4bQF4fNl20Bc/mRD1IL/L7hyKsIl2ahxKjo1ZFbPfg==
dependencies:
- "@sentry/core" "7.85.0"
- "@sentry/types" "7.85.0"
- "@sentry/utils" "7.85.0"
+ "@sentry/core" "7.87.0"
+ "@sentry/types" "7.87.0"
+ "@sentry/utils" "7.87.0"
"@sentry-internal/global-search@^0.5.7":
version "0.5.7"
@@ -2501,26 +2501,26 @@
"@sentry/utils" "7.74.1"
tslib "^2.4.1 || ^1.9.3"
-"@sentry-internal/[email protected]":
- version "7.85.0"
- resolved "https://registry.yarnpkg.com/@sentry-internal/tracing/-/tracing-7.85.0.tgz#1b4781a61e1e43badeff826cf40abe33dd760f1d"
- integrity sha512-p3YMUwkPCy2su9cm/3+7QYR4RiMI0+07DU1BZtht9NLTzY2O87/yvUbn1v2yHR3vJQTy/+7N0ud9/mPBFznRQQ==
+"@sentry-internal/[email protected]":
+ version "7.87.0"
+ resolved "https://registry.yarnpkg.com/@sentry-internal/tracing/-/tracing-7.87.0.tgz#e0a9d5d9fd1d68c432eae14bf1f4d4dccffaa97e"
+ integrity sha512-HYa0+rfFmYQ/DadXoiuarTSxrcnYDCd/fm0pFuOHjICtfja8IcLegVYP2/r3CgwB+IjquCtJ5kDcqS/NTgUcpA==
dependencies:
- "@sentry/core" "7.85.0"
- "@sentry/types" "7.85.0"
- "@sentry/utils" "7.85.0"
+ "@sentry/core" "7.87.0"
+ "@sentry/types" "7.87.0"
+ "@sentry/utils" "7.87.0"
-"@sentry/[email protected]":
- version "7.85.0"
- resolved "https://registry.yarnpkg.com/@sentry/browser/-/browser-7.85.0.tgz#70cea7b53e22b4262f770d70e879ff1a621825de"
- integrity sha512-x4sH7vTQnZQgy1U7NuN8XwhleAw7YMQitccHeC5m+kpIKGUO7w4Mdvu8rD3dnjmVmZvASpnwocAxy57/vCU6Ww==
+"@sentry/[email protected]":
+ version "7.87.0"
+ resolved "https://registry.yarnpkg.com/@sentry/browser/-/browser-7.87.0.tgz#98c2a63a8e30fbbcb3b10a39a628abd0a3e4a603"
+ integrity sha512-+WVqIns2ZFF/tsjNf90Km3eIKUd6lGQSsBQ+cVa05v0ebMr5OtnOkKlOHN4CJZ0gT0lyDnNHoys9Af6edIbE1Q==
dependencies:
- "@sentry-internal/feedback" "7.85.0"
- "@sentry-internal/tracing" "7.85.0"
- "@sentry/core" "7.85.0"
- "@sentry/replay" "7.85.0"
- "@sentry/types" "7.85.0"
- "@sentry/utils" "7.85.0"
+ "@sentry-internal/feedback" "7.87.0"
+ "@sentry-internal/tracing" "7.87.0"
+ "@sentry/core" "7.87.0"
+ "@sentry/replay" "7.87.0"
+ "@sentry/types" "7.87.0"
+ "@sentry/utils" "7.87.0"
"@sentry/[email protected]":
version "7.66.0"
@@ -2540,13 +2540,13 @@
"@sentry/utils" "7.74.1"
tslib "^2.4.1 || ^1.9.3"
-"@sentry/[email protected]", "@sentry/core@^7.85.0":
- version "7.85.0"
- resolved "https://registry.yarnpkg.com/@sentry/core/-/core-7.85.0.tgz#dd90d772a5f75ff674f931f59b22a3fc286d0983"
- integrity sha512-DFDAc4tWmHN5IWhr7XbHCiyF1Xgb95jz8Uj/JTX9atlgodId1UIbER77qpEmH3eQGid/QBdqrlR98zCixgSbwg==
+"@sentry/[email protected]", "@sentry/core@^7.87.0":
+ version "7.87.0"
+ resolved "https://registry.yarnpkg.com/@sentry/core/-/core-7.87.0.tgz#98cdd1ce04f781353224dd1ac79b4ef7f6786053"
+ integrity sha512-jkoXqK/nuYh8DYS+n7uaSuSIdw4HJemyRkXsWjAEPtEgD7taGMafZGbP5pl+XE38SE59jTBxmKnkUEZOFMgZGA==
dependencies:
- "@sentry/types" "7.85.0"
- "@sentry/utils" "7.85.0"
+ "@sentry/types" "7.87.0"
+ "@sentry/utils" "7.87.0"
"@sentry/hub@^7.64.0":
version "7.66.0"
@@ -2558,14 +2558,14 @@
"@sentry/utils" "7.66.0"
tslib "^2.4.1 || ^1.9.3"
-"@sentry/integrations@^7.85.0":
- version "7.85.0"
- resolved "https://registry.yarnpkg.com/@sentry/integrations/-/integrations-7.85.0.tgz#967b9e1718cb99d5ba324f3a854e44e46e24a1a5"
- integrity sha512-c/uEhrFbAefK00cnm/SjqZ31rWVsruiQWAvV4dxU/rSQ2dBWDuJz1woXX7Wd03yCSMq14tXtiDy9aTC4xCZ71w==
+"@sentry/integrations@^7.87.0":
+ version "7.87.0"
+ resolved "https://registry.yarnpkg.com/@sentry/integrations/-/integrations-7.87.0.tgz#6112d33e93f35ca2a7000026923b45f018da5f9b"
+ integrity sha512-xbyOQeyfG1sF2PBMIOz3c3i0Y3+8q4UlxoeOhpFe6Vpjek+I/g7onZT6YevT6cWG083cg+rS0VCgPQSUV2lxIw==
dependencies:
- "@sentry/core" "7.85.0"
- "@sentry/types" "7.85.0"
- "@sentry/utils" "7.85.0"
+ "@sentry/core" "7.87.0"
+ "@sentry/types" "7.87.0"
+ "@sentry/utils" "7.87.0"
localforage "^1.8.1"
"@sentry/jest-environment@^4.0.0":
@@ -2587,15 +2587,15 @@
lru_map "^0.3.3"
tslib "^2.4.1 || ^1.9.3"
-"@sentry/node@^7.85.0":
- version "7.85.0"
- resolved "https://registry.yarnpkg.com/@sentry/node/-/node-7.85.0.tgz#cf4e6022b5cd1f3fb007186c5e04427b108ebe1d"
- integrity sha512-uiBtRW9G017NHoCXBlK3ttkTwHXLFyI8ndHpaObtyajKTv3ptGIThVEn7DuK7Pwor//RjwjSEEOa7WDK+FdMVQ==
+"@sentry/node@^7.87.0":
+ version "7.87.0"
+ resolved "https://registry.yarnpkg.com/@sentry/node/-/node-7.87.0.tgz#e5450f197f5efe7ad3d61a5ce670e9c8c7f9c088"
+ integrity sha512-mGcZMCL3/IMTLIRcWLF+H9z2Bb2d34gKmg2rhXqI8BqhhUA551jMRlZv/y4za2Osjy550KwVoNsA1qtEe5mYyQ==
dependencies:
- "@sentry-internal/tracing" "7.85.0"
- "@sentry/core" "7.85.0"
- "@sentry/types" "7.85.0"
- "@sentry/utils" "7.85.0"
+ "@sentry-internal/tracing" "7.87.0"
+ "@sentry/core" "7.87.0"
+ "@sentry/types" "7.87.0"
+ "@sentry/utils" "7.87.0"
https-proxy-agent "^5.0.0"
"@sentry/profiling-node@^1.2.1":
@@ -2612,14 +2612,14 @@
node-abi "^3.28.0"
node-gyp "^9.3.0"
-"@sentry/react@^7.85.0":
- version "7.85.0"
- resolved "https://registry.yarnpkg.com/@sentry/react/-/react-7.85.0.tgz#eb94bee7a72208081d5256d7a8001f91b7d07b7e"
- integrity sha512-digw63l1A9n+74rW8uiG575Xh3qWTkmvwgTfNRFvDokDRMqRTP0iQEqZRBrBEzMZ5JUa6s+5NLc1/dbMh1QQgA==
+"@sentry/react@^7.87.0":
+ version "7.87.0"
+ resolved "https://registry.yarnpkg.com/@sentry/react/-/react-7.87.0.tgz#c0d27d39561df5d9b181b0593fb28bdb249c1f0a"
+ integrity sha512-37O4Tr7Ij1T6B3eAKrlsLCkxjW+euHDRaDjjuD0gxYEyf4Tr+kx6x7s3x9wXZpkkehJCTCOflDSvYM7qH4uRFA==
dependencies:
- "@sentry/browser" "7.85.0"
- "@sentry/types" "7.85.0"
- "@sentry/utils" "7.85.0"
+ "@sentry/browser" "7.87.0"
+ "@sentry/types" "7.87.0"
+ "@sentry/utils" "7.87.0"
hoist-non-react-statics "^3.3.2"
"@sentry/release-parser@^1.3.1":
@@ -2627,22 +2627,22 @@
resolved "https://registry.yarnpkg.com/@sentry/release-parser/-/release-parser-1.3.1.tgz#0ab8be23fd494d80dd0e4ec8ae5f3d13f805b13d"
integrity sha512-/dGpCq+j3sJhqQ14RNEEL45Ot/rgq3jAlZDD/8ufeqq+W8p4gUhSrbGWCRL82NEIWY9SYwxYXGXjRcVPSHiA1Q==
-"@sentry/[email protected]":
- version "7.85.0"
- resolved "https://registry.yarnpkg.com/@sentry/replay/-/replay-7.85.0.tgz#81ad025bc85b343da71e1fb7bd7c5702690e48c8"
- integrity sha512-zVtTKfO+lu5qTwHpETI/oGo8hU3rdKHr3CdI1vRLw+d60PcAa/pWVlXsQeLRTw8PFwE358gHcpFZezj/11afew==
+"@sentry/[email protected]":
+ version "7.87.0"
+ resolved "https://registry.yarnpkg.com/@sentry/replay/-/replay-7.87.0.tgz#67c4adb839889e19a32fe7948a91d7f48e55e463"
+ integrity sha512-QinY5uYARRkZsdInNy8cFY4p/H697DutRdpWEIi/v7hsOiRVRxIVZ3n8dJxVIpPbVQ9upI19s/J3bb4mSERLSg==
dependencies:
- "@sentry-internal/tracing" "7.85.0"
- "@sentry/core" "7.85.0"
- "@sentry/types" "7.85.0"
- "@sentry/utils" "7.85.0"
+ "@sentry-internal/tracing" "7.87.0"
+ "@sentry/core" "7.87.0"
+ "@sentry/types" "7.87.0"
+ "@sentry/utils" "7.87.0"
-"@sentry/tracing@^7.85.0":
- version "7.85.0"
- resolved "https://registry.yarnpkg.com/@sentry/tracing/-/tracing-7.85.0.tgz#831fd6096c4d9e03ff55ac5919f7c7da30f73189"
- integrity sha512-L3bpqiM+zu5f3o6zh6hx3xEzVENyhrkuMlpUOyDo0mUytqp763HqF1xz+R+trzze7R5VWrxJaRPARsCKlXu4Ig==
+"@sentry/tracing@^7.87.0":
+ version "7.87.0"
+ resolved "https://registry.yarnpkg.com/@sentry/tracing/-/tracing-7.87.0.tgz#409f79497f06674c951f6d9890ed8a042899bbf2"
+ integrity sha512-oEj4wo5K9WSyYgMInM/DA5DToS0F/dz2PNe5dkhIdEkMGTarSvLm7DIo3L44NfYUtj4DrKyMM61vXHNIeQitVg==
dependencies:
- "@sentry-internal/tracing" "7.85.0"
+ "@sentry-internal/tracing" "7.87.0"
"@sentry/[email protected]":
version "7.66.0"
@@ -2654,10 +2654,10 @@
resolved "https://registry.yarnpkg.com/@sentry/types/-/types-7.74.1.tgz#b6f9b1bd266254f1f8b55fbcc92fa649ba2100ed"
integrity sha512-2jIuPc+YKvXqZETwr2E8VYnsH1zsSUR/wkIvg1uTVeVNyoowJv+YsOtCdeGyL2AwiotUBSPKu7O1Lz0kq5rMOQ==
-"@sentry/[email protected]", "@sentry/types@^7.85.0":
- version "7.85.0"
- resolved "https://registry.yarnpkg.com/@sentry/types/-/types-7.85.0.tgz#648488b90f958ca6a86922cc5d26004853410ba6"
- integrity sha512-R5jR4XkK5tBU2jDiPdSVqzkmjYRr666bcGaFGUHB/xDQCjPsjk+pEmCCL+vpuWoaZmQJUE1hVU7rgnVX81w8zg==
+"@sentry/[email protected]", "@sentry/types@^7.87.0":
+ version "7.87.0"
+ resolved "https://registry.yarnpkg.com/@sentry/types/-/types-7.87.0.tgz#9f9a5e4ce97676af17dda01766399f31c8916ca4"
+ integrity sha512-w8jKFHq/Llupmr2FezmFgQsnm3y/CnqLjb7s6PstI78E409wrhH7p7oqX/OEuzccH1qNCNwes/3QKvPTRQDB4Q==
"@sentry/[email protected]":
version "7.66.0"
@@ -2675,12 +2675,12 @@
"@sentry/types" "7.74.1"
tslib "^2.4.1 || ^1.9.3"
-"@sentry/[email protected]", "@sentry/utils@^7.85.0":
- version "7.85.0"
- resolved "https://registry.yarnpkg.com/@sentry/utils/-/utils-7.85.0.tgz#b84467fd07bc2ef09fdf382ddcdcdc3f5b0d78b0"
- integrity sha512-JZ7seNOLvhjAQ8GeB3GYknPQJkuhF88xAYOaESZP3xPOWBMFUN+IO4RqjMqMLFDniOwsVQS7GB/MfP+hxufieg==
+"@sentry/[email protected]", "@sentry/utils@^7.87.0":
+ version "7.87.0"
+ resolved "https://registry.yarnpkg.com/@sentry/utils/-/utils-7.87.0.tgz#95ca4bf2208077346f9d433ba536ee1610ec3cae"
+ integrity sha512-7xgtPTnTNP/4IznFMFXxltuaXfLvzznrYCDMv9ny8EeUjJqlLX3CVA8Qq3YALsLCQCKcrGRARbAcd/EGG//w2w==
dependencies:
- "@sentry/types" "7.85.0"
+ "@sentry/types" "7.87.0"
"@sinclair/typebox@^0.27.8":
version "0.27.8"
|
28d0d256ab0cd8a6ff6d0b7340c3c15073465436
|
2024-04-18 00:53:25
|
Evan Purkhiser
|
ref(ui): Adjust spacing on idBadge based on avatarSize (#69053)
| false
|
Adjust spacing on idBadge based on avatarSize (#69053)
|
ref
|
diff --git a/static/app/components/idBadge/badgeDisplayName.tsx b/static/app/components/idBadge/badgeDisplayName.tsx
index 0e1d7b1f0f924a..0e358450d0d7f2 100644
--- a/static/app/components/idBadge/badgeDisplayName.tsx
+++ b/static/app/components/idBadge/badgeDisplayName.tsx
@@ -1,8 +1,6 @@
import {css} from '@emotion/react';
import styled from '@emotion/styled';
-import {space} from 'sentry/styles/space';
-
const BadgeDisplayName = styled('span')<{hideOverflow?: string | boolean}>`
${p =>
p.hideOverflow &&
@@ -12,7 +10,6 @@ const BadgeDisplayName = styled('span')<{hideOverflow?: string | boolean}>`
? p.hideOverflow
: p.theme.settings.maxCrumbWidth};
`};
- padding: ${space(0.25)} 0;
`;
export default BadgeDisplayName;
diff --git a/static/app/components/idBadge/baseBadge.tsx b/static/app/components/idBadge/baseBadge.tsx
index f1f2bb1f53db4a..507cd499c4d9cd 100644
--- a/static/app/components/idBadge/baseBadge.tsx
+++ b/static/app/components/idBadge/baseBadge.tsx
@@ -2,7 +2,7 @@ import {memo} from 'react';
import styled from '@emotion/styled';
import Avatar from 'sentry/components/avatar';
-import {space} from 'sentry/styles/space';
+import {space, type ValidSize} from 'sentry/styles/space';
import type {Actor, AvatarProject, AvatarUser, Organization, Team} from 'sentry/types';
export interface BaseBadgeProps {
@@ -38,35 +38,40 @@ export const BaseBadge = memo(
project,
actor,
className,
- }: AllBaseBadgeProps) => (
- <Wrapper className={className}>
- {!hideAvatar && (
- <Avatar
- {...avatarProps}
- size={avatarSize}
- team={team}
- user={user}
- organization={organization}
- project={project}
- actor={actor}
- />
- )}
+ }: AllBaseBadgeProps) => {
+ // Space items appropriatley depending on avatar size
+ const wrapperGap: ValidSize = avatarSize <= 14 ? 0.5 : avatarSize <= 20 ? 0.75 : 1;
- {(!hideName || !!description) && (
- <DisplayNameAndDescription>
- {!hideName && (
- <DisplayName data-test-id="badge-display-name">{displayName}</DisplayName>
- )}
- {!!description && <Description>{description}</Description>}
- </DisplayNameAndDescription>
- )}
- </Wrapper>
- )
+ return (
+ <Wrapper className={className} gap={wrapperGap}>
+ {!hideAvatar && (
+ <Avatar
+ {...avatarProps}
+ size={avatarSize}
+ team={team}
+ user={user}
+ organization={organization}
+ project={project}
+ actor={actor}
+ />
+ )}
+
+ {(!hideName || !!description) && (
+ <DisplayNameAndDescription>
+ {!hideName && (
+ <DisplayName data-test-id="badge-display-name">{displayName}</DisplayName>
+ )}
+ {!!description && <Description>{description}</Description>}
+ </DisplayNameAndDescription>
+ )}
+ </Wrapper>
+ );
+ }
);
-const Wrapper = styled('div')`
+const Wrapper = styled('div')<{gap: ValidSize}>`
display: flex;
- gap: ${space(1)};
+ gap: ${p => space(p.gap)};
align-items: center;
flex-shrink: 0;
`;
diff --git a/static/app/components/idBadge/index.stories.tsx b/static/app/components/idBadge/index.stories.tsx
index 925c2303d05429..a597b1f00e9928 100644
--- a/static/app/components/idBadge/index.stories.tsx
+++ b/static/app/components/idBadge/index.stories.tsx
@@ -6,11 +6,29 @@ import useProjects from 'sentry/utils/useProjects';
import {useTeams} from 'sentry/utils/useTeams';
import {useUser} from 'sentry/utils/useUser';
+import Matrix, {type PropMatrix} from '../stories/matrix';
import SideBySide from '../stories/sideBySide';
+import type {OrganizationBadgeProps} from './organizationBadge';
import IdBadge from '.';
export default storyBook(IdBadge, story => {
+ story('Props', () => {
+ const org = useOrganization();
+
+ const propMatrix: PropMatrix<OrganizationBadgeProps> = {
+ avatarSize: [12, 16, 24],
+ };
+
+ return (
+ <Matrix<OrganizationBadgeProps>
+ render={props => <IdBadge {...props} organization={org} />}
+ propMatrix={propMatrix}
+ selectedProps={['avatarSize']}
+ />
+ );
+ });
+
story('Organization', () => {
const org = useOrganization();
return <IdBadge organization={org} />;
|
f973b16451203134ea5b5a2ac91209c078b1727e
|
2023-11-02 08:30:07
|
Ryan Albrecht
|
chore(bug reports): Mark new User Feedback as beta! (#59252)
| false
|
Mark new User Feedback as beta! (#59252)
|
chore
|
diff --git a/static/app/components/sidebar/index.tsx b/static/app/components/sidebar/index.tsx
index abbe1a5c68298b..85305b9e83f8d9 100644
--- a/static/app/components/sidebar/index.tsx
+++ b/static/app/components/sidebar/index.tsx
@@ -6,6 +6,7 @@ import {Location} from 'history';
import {hideSidebar, showSidebar} from 'sentry/actionCreators/preferences';
import Feature from 'sentry/components/acl/feature';
import GuideAnchor from 'sentry/components/assistant/guideAnchor';
+import FeatureBadge from 'sentry/components/featureBadge';
import {OnboardingContext} from 'sentry/components/onboarding/onboardingContext';
import {getMergedTasks} from 'sentry/components/onboardingWizard/taskConfig';
import PerformanceOnboardingSidebar from 'sentry/components/performanceOnboarding/sidebar';
@@ -362,11 +363,18 @@ function Sidebar({location, organization}: Props) {
<SidebarItem
{...sidebarItemProps}
icon={<IconMegaphone />}
- label={t('User Feedback')}
+ label={
+ <Fragment>
+ {t('User Feedback')}{' '}
+ <FeatureBadge
+ title={t('This feature is available for early adopters and may change')}
+ type="alpha"
+ variant="short"
+ />
+ </Fragment>
+ }
to={`/organizations/${organization.slug}/feedback/`}
id="feedback"
- isAlpha
- variant="short"
/>
</Feature>
);
|
c950b782b5c03e1afa62a40938487fc34500f06d
|
2018-09-06 01:04:59
|
Chris Clark
|
fix(issue sync): resolve padding issues in issue sync list (#9628)
| false
|
resolve padding issues in issue sync list (#9628)
|
fix
|
diff --git a/src/sentry/static/sentry/app/components/group/externalIssuesList.jsx b/src/sentry/static/sentry/app/components/group/externalIssuesList.jsx
index 959f07c3e8f773..c8541075cb0823 100644
--- a/src/sentry/static/sentry/app/components/group/externalIssuesList.jsx
+++ b/src/sentry/static/sentry/app/components/group/externalIssuesList.jsx
@@ -28,27 +28,15 @@ class ExternalIssueList extends AsyncComponent {
integration => integration.status === 'active'
);
- if (!activeIntegrations.length)
- return (
- <AlertLink
- icon="icon-generic-box"
- priority="default"
- size="small"
- to={`/settings/${this.props.orgId}/integrations`}
- >
- {t('Set up Issue Tracking')}
- </AlertLink>
- );
-
- const externalIssues = activeIntegrations.map(integration => (
- <ExternalIssueActions
- key={integration.id}
- integration={integration}
- group={group}
- />
- ));
-
- return <Box mb={3}>{externalIssues}</Box>;
+ return activeIntegrations.length
+ ? activeIntegrations.map(integration => (
+ <ExternalIssueActions
+ key={integration.id}
+ integration={integration}
+ group={group}
+ />
+ ))
+ : null;
}
renderPluginIssues() {
@@ -64,7 +52,7 @@ class ExternalIssueList extends AsyncComponent {
renderPluginActions() {
const {group} = this.props;
- return group.pluginActions
+ return group.pluginActions && group.pluginActions.length
? group.pluginActions.map((plugin, i) => {
return (
<IssueSyncListElement externalIssueLink={plugin[1]} key={i}>
@@ -76,15 +64,36 @@ class ExternalIssueList extends AsyncComponent {
}
renderBody() {
+ const integrationIssues = this.renderIntegrationIssues(this.state.integrations);
+ const pluginIssues = this.renderPluginIssues();
+ const pluginActions = this.renderPluginActions();
+
+ if (!integrationIssues && !pluginIssues && !pluginActions)
+ return (
+ <React.Fragment>
+ <h6>
+ <span>Linked Issues</span>
+ </h6>
+ <AlertLink
+ icon="icon-generic-box"
+ priority="default"
+ size="small"
+ to={`/settings/${this.props.orgId}/integrations`}
+ >
+ {t('Set up Issue Tracking')}
+ </AlertLink>
+ </React.Fragment>
+ );
+
return (
- <div>
+ <React.Fragment>
<h6>
<span>Linked Issues</span>
</h6>
- {this.renderIntegrationIssues(this.state.integrations)}
- {this.renderPluginIssues()}
- {this.renderPluginActions()}
- </div>
+ {integrationIssues && <Box mb={2}>{integrationIssues}</Box>}
+ {pluginIssues && <Box mb={2}>{pluginIssues}</Box>}
+ {pluginActions && <Box mb={2}>{pluginActions}</Box>}
+ </React.Fragment>
);
}
}
|
ad88919e91641a1ad267ae1237900c1d2d7cd2c6
|
2024-08-01 02:34:27
|
Kevin Liu
|
feat(insights): reload projects when missing data (#75358)
| false
|
reload projects when missing data (#75358)
|
feat
|
diff --git a/static/app/components/createAlertButton.spec.tsx b/static/app/components/createAlertButton.spec.tsx
index 24a69be02e55a2..02ba61ee94f0c6 100644
--- a/static/app/components/createAlertButton.spec.tsx
+++ b/static/app/components/createAlertButton.spec.tsx
@@ -25,6 +25,7 @@ describe('CreateAlertFromViewButton', () => {
jest.mocked(useProjects).mockReturnValue({
projects: [],
onSearch: jest.fn(),
+ reloadProjects: jest.fn(),
placeholders: [],
fetching: false,
hasMore: null,
@@ -72,6 +73,7 @@ describe('CreateAlertFromViewButton', () => {
jest.mocked(useProjects).mockReturnValue({
projects,
onSearch: jest.fn(),
+ reloadProjects: jest.fn(),
placeholders: [],
fetching: false,
hasMore: null,
@@ -107,6 +109,7 @@ describe('CreateAlertFromViewButton', () => {
jest.mocked(useProjects).mockReturnValue({
projects,
onSearch: jest.fn(),
+ reloadProjects: jest.fn(),
placeholders: [],
fetching: false,
hasMore: null,
@@ -155,6 +158,7 @@ describe('CreateAlertFromViewButton', () => {
jest.mocked(useProjects).mockReturnValue({
projects,
onSearch: jest.fn(),
+ reloadProjects: jest.fn(),
placeholders: [],
fetching: false,
hasMore: null,
@@ -257,6 +261,7 @@ describe('CreateAlertFromViewButton', () => {
jest.mocked(useProjects).mockReturnValue({
projects,
onSearch: jest.fn(),
+ reloadProjects: jest.fn(),
placeholders: [],
fetching: false,
hasMore: null,
diff --git a/static/app/components/events/eventReplay/index.spec.tsx b/static/app/components/events/eventReplay/index.spec.tsx
index af75b7c7116b35..e78c310e58dae0 100644
--- a/static/app/components/events/eventReplay/index.spec.tsx
+++ b/static/app/components/events/eventReplay/index.spec.tsx
@@ -135,6 +135,7 @@ describe('EventReplay', function () {
hasMore: false,
initiallyLoaded: false,
onSearch: () => Promise.resolve(),
+ reloadProjects: jest.fn(),
placeholders: [],
projects: [project],
});
diff --git a/static/app/components/events/interfaces/breadcrumbs/breadcrumbs.spec.tsx b/static/app/components/events/interfaces/breadcrumbs/breadcrumbs.spec.tsx
index d41ee11eec7e9a..2a254deb77ccd9 100644
--- a/static/app/components/events/interfaces/breadcrumbs/breadcrumbs.spec.tsx
+++ b/static/app/components/events/interfaces/breadcrumbs/breadcrumbs.spec.tsx
@@ -36,6 +36,7 @@ describe('Breadcrumbs', () => {
hasMore: false,
initiallyLoaded: false,
onSearch: () => Promise.resolve(),
+ reloadProjects: jest.fn(),
placeholders: [],
projects: [project],
});
diff --git a/static/app/components/replays/header/errorCounts.spec.tsx b/static/app/components/replays/header/errorCounts.spec.tsx
index d79677c131764f..afe8f7060dcb51 100644
--- a/static/app/components/replays/header/errorCounts.spec.tsx
+++ b/static/app/components/replays/header/errorCounts.spec.tsx
@@ -40,6 +40,7 @@ describe('ErrorCounts', () => {
hasMore: false,
initiallyLoaded: true,
onSearch: () => Promise.resolve(),
+ reloadProjects: jest.fn(),
placeholders: [],
});
});
diff --git a/static/app/utils/replays/hooks/useReplayData.spec.tsx b/static/app/utils/replays/hooks/useReplayData.spec.tsx
index baa9fa88b36391..2c1d204db73265 100644
--- a/static/app/utils/replays/hooks/useReplayData.spec.tsx
+++ b/static/app/utils/replays/hooks/useReplayData.spec.tsx
@@ -29,6 +29,7 @@ jest.mocked(useProjects).mockReturnValue({
hasMore: false,
initiallyLoaded: true,
onSearch: () => Promise.resolve(),
+ reloadProjects: jest.fn(),
placeholders: [],
});
diff --git a/static/app/utils/useProjects.tsx b/static/app/utils/useProjects.tsx
index cdcfa7be2dabff..9bdb30d2706a81 100644
--- a/static/app/utils/useProjects.tsx
+++ b/static/app/utils/useProjects.tsx
@@ -58,6 +58,10 @@ type Result = {
* The loaded projects list
*/
projects: Project[];
+ /**
+ * Allows consumers to force refetch project data.
+ */
+ reloadProjects: () => Promise<void>;
} & Pick<State, 'fetching' | 'hasMore' | 'fetchError' | 'initiallyLoaded'>;
type Options = {
@@ -199,7 +203,8 @@ function useProjects({limit, slugs, orgId: propOrgId}: Options = {}) {
limit,
});
- const fetchedProjects = uniqBy([...store.projects, ...results], ({slug}) => slug);
+ // Note the order of uniqBy: we prioritize project data recently fetched over previously cached data
+ const fetchedProjects = uniqBy([...results, ...store.projects], ({slug}) => slug);
ProjectsStore.loadInitialData(fetchedProjects);
setState(prev => ({
@@ -308,6 +313,7 @@ function useProjects({limit, slugs, orgId: propOrgId}: Options = {}) {
fetchError,
hasMore,
onSearch: handleSearch,
+ reloadProjects: loadProjectsBySlug,
};
return result;
diff --git a/static/app/views/insights/browser/resources/views/resourcesLandingPage.spec.tsx b/static/app/views/insights/browser/resources/views/resourcesLandingPage.spec.tsx
index 8c53b748456c06..c72736e791d4db 100644
--- a/static/app/views/insights/browser/resources/views/resourcesLandingPage.spec.tsx
+++ b/static/app/views/insights/browser/resources/views/resourcesLandingPage.spec.tsx
@@ -168,6 +168,7 @@ const setupMocks = () => {
initiallyLoaded: true,
projects: [ProjectFixture({hasInsightsAssets: true})],
onSearch: jest.fn(),
+ reloadProjects: jest.fn(),
placeholders: [],
});
};
diff --git a/static/app/views/insights/browser/webVitals/components/tables/pagePerformanceTable.spec.tsx b/static/app/views/insights/browser/webVitals/components/tables/pagePerformanceTable.spec.tsx
index 52c2b26f31b651..151fb100538624 100644
--- a/static/app/views/insights/browser/webVitals/components/tables/pagePerformanceTable.spec.tsx
+++ b/static/app/views/insights/browser/webVitals/components/tables/pagePerformanceTable.spec.tsx
@@ -60,6 +60,7 @@ describe('PagePerformanceTable', function () {
}),
],
onSearch: jest.fn(),
+ reloadProjects: jest.fn(),
placeholders: [],
fetching: false,
hasMore: null,
diff --git a/static/app/views/insights/browser/webVitals/views/webVitalsLandingPage.spec.tsx b/static/app/views/insights/browser/webVitals/views/webVitalsLandingPage.spec.tsx
index 55caaab03a1d22..97cd80613804c1 100644
--- a/static/app/views/insights/browser/webVitals/views/webVitalsLandingPage.spec.tsx
+++ b/static/app/views/insights/browser/webVitals/views/webVitalsLandingPage.spec.tsx
@@ -26,6 +26,7 @@ describe('WebVitalsLandingPage', function () {
jest.mocked(useProjects).mockReturnValue({
projects: [ProjectFixture({hasInsightsVitals: true})],
onSearch: jest.fn(),
+ reloadProjects: jest.fn(),
placeholders: [],
fetching: false,
hasMore: null,
diff --git a/static/app/views/insights/cache/views/cacheLandingPage.spec.tsx b/static/app/views/insights/cache/views/cacheLandingPage.spec.tsx
index 91646ebce390ec..b8db986b5f2086 100644
--- a/static/app/views/insights/cache/views/cacheLandingPage.spec.tsx
+++ b/static/app/views/insights/cache/views/cacheLandingPage.spec.tsx
@@ -71,6 +71,7 @@ describe('CacheLandingPage', function () {
}),
],
onSearch: jest.fn(),
+ reloadProjects: jest.fn(),
placeholders: [],
fetching: false,
hasMore: null,
@@ -302,6 +303,7 @@ describe('CacheLandingPage', function () {
}),
],
onSearch: jest.fn(),
+ reloadProjects: jest.fn(),
placeholders: [],
fetching: false,
hasMore: null,
diff --git a/static/app/views/insights/common/components/modulesOnboarding.spec.tsx b/static/app/views/insights/common/components/modulesOnboarding.spec.tsx
index 0fdac56dd94f7f..b1d8f1ea48ca10 100644
--- a/static/app/views/insights/common/components/modulesOnboarding.spec.tsx
+++ b/static/app/views/insights/common/components/modulesOnboarding.spec.tsx
@@ -26,6 +26,7 @@ describe('ModulesOnboarding', () => {
jest.mocked(useProjects).mockReturnValue({
projects: [project],
onSearch: jest.fn(),
+ reloadProjects: jest.fn(),
placeholders: [],
fetching: false,
hasMore: null,
@@ -66,6 +67,7 @@ describe('ModulesOnboarding', () => {
jest.mocked(useProjects).mockReturnValue({
projects: [project],
onSearch: jest.fn(),
+ reloadProjects: jest.fn(),
placeholders: [],
fetching: false,
hasMore: null,
@@ -105,6 +107,7 @@ describe('ModulesOnboarding', () => {
jest.mocked(useProjects).mockReturnValue({
projects: [project],
onSearch: jest.fn(),
+ reloadProjects: jest.fn(),
placeholders: [],
fetching: false,
hasMore: null,
diff --git a/static/app/views/insights/common/components/modulesOnboarding.tsx b/static/app/views/insights/common/components/modulesOnboarding.tsx
index 7edbd0243ed9f5..39d4415e83a51c 100644
--- a/static/app/views/insights/common/components/modulesOnboarding.tsx
+++ b/static/app/views/insights/common/components/modulesOnboarding.tsx
@@ -1,4 +1,4 @@
-import {Fragment, useState} from 'react';
+import {Fragment, useEffect, useState} from 'react';
import styled from '@emotion/styled';
import startCase from 'lodash/startCase';
import {PlatformIcon} from 'platformicons';
@@ -21,6 +21,7 @@ import {t, tct} from 'sentry/locale';
import {space} from 'sentry/styles/space';
import type {PlatformKey} from 'sentry/types/project';
import useOrganization from 'sentry/utils/useOrganization';
+import useProjects from 'sentry/utils/useProjects';
import * as ModuleLayout from 'sentry/views/insights/common/components/moduleLayout';
import type {TitleableModuleNames} from 'sentry/views/insights/common/components/modulePageProviders';
import {useHasFirstSpan} from 'sentry/views/insights/common/queries/useHasFirstSpan';
@@ -42,8 +43,19 @@ export function ModulesOnboarding({
}) {
const organization = useOrganization();
const onboardingProject = useOnboardingProject();
+ const {reloadProjects} = useProjects();
const hasData = useHasFirstSpan(moduleName);
+ // Refetch the project metadata if the selected project does not have insights data, because
+ // we may have received insight data (and subsequently updated `Project.hasInsightxx`)
+ // after the initial project fetch.
+ useEffect(() => {
+ if (!hasData) {
+ reloadProjects();
+ }
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, [hasData]);
+
if (onboardingProject) {
return (
<ModuleLayout.Full>
diff --git a/static/app/views/insights/database/views/databaseLandingPage.spec.tsx b/static/app/views/insights/database/views/databaseLandingPage.spec.tsx
index efe0cd118b15ab..31595a29e08fff 100644
--- a/static/app/views/insights/database/views/databaseLandingPage.spec.tsx
+++ b/static/app/views/insights/database/views/databaseLandingPage.spec.tsx
@@ -22,6 +22,7 @@ describe('DatabaseLandingPage', function () {
jest.mocked(useProjects).mockReturnValue({
projects: [ProjectFixture({hasInsightsDb: true})],
onSearch: jest.fn(),
+ reloadProjects: jest.fn(),
placeholders: [],
fetching: false,
hasMore: null,
diff --git a/static/app/views/insights/http/views/httpLandingPage.spec.tsx b/static/app/views/insights/http/views/httpLandingPage.spec.tsx
index 5d254c03a27490..d232a6ca709385 100644
--- a/static/app/views/insights/http/views/httpLandingPage.spec.tsx
+++ b/static/app/views/insights/http/views/httpLandingPage.spec.tsx
@@ -60,6 +60,7 @@ describe('HTTPLandingPage', function () {
}),
],
onSearch: jest.fn(),
+ reloadProjects: jest.fn(),
placeholders: [],
fetching: false,
hasMore: null,
diff --git a/static/app/views/insights/mobile/common/queries/useCrossPlatformProject.spec.tsx b/static/app/views/insights/mobile/common/queries/useCrossPlatformProject.spec.tsx
index 596ab581bb7fee..c5965687805af2 100644
--- a/static/app/views/insights/mobile/common/queries/useCrossPlatformProject.spec.tsx
+++ b/static/app/views/insights/mobile/common/queries/useCrossPlatformProject.spec.tsx
@@ -39,6 +39,7 @@ function mockProjects(projects: Project[]) {
hasMore: false,
initiallyLoaded: false,
onSearch: jest.fn(),
+ reloadProjects: jest.fn(),
placeholders: [],
projects,
});
diff --git a/static/app/views/insights/mobile/screenload/views/screenLoadSpansPage.spec.tsx b/static/app/views/insights/mobile/screenload/views/screenLoadSpansPage.spec.tsx
index 4a909fd70e1df3..75e560676ff57e 100644
--- a/static/app/views/insights/mobile/screenload/views/screenLoadSpansPage.spec.tsx
+++ b/static/app/views/insights/mobile/screenload/views/screenLoadSpansPage.spec.tsx
@@ -26,6 +26,7 @@ function mockResponses(organization, project) {
hasMore: false,
initiallyLoaded: false,
onSearch: jest.fn(),
+ reloadProjects: jest.fn(),
placeholders: [],
projects: [project],
});
diff --git a/static/app/views/insights/mobile/screenload/views/screenloadLandingPage.spec.tsx b/static/app/views/insights/mobile/screenload/views/screenloadLandingPage.spec.tsx
index 92d626e0ae1024..ad3310810ecf74 100644
--- a/static/app/views/insights/mobile/screenload/views/screenloadLandingPage.spec.tsx
+++ b/static/app/views/insights/mobile/screenload/views/screenloadLandingPage.spec.tsx
@@ -31,6 +31,7 @@ describe('PageloadModule', function () {
hasMore: false,
initiallyLoaded: false,
onSearch: jest.fn(),
+ reloadProjects: jest.fn(),
placeholders: [],
projects: [project],
});
diff --git a/static/app/views/insights/queues/views/destinationSummaryPage.spec.tsx b/static/app/views/insights/queues/views/destinationSummaryPage.spec.tsx
index 6922be5ba916e2..f6c0af83557baf 100644
--- a/static/app/views/insights/queues/views/destinationSummaryPage.spec.tsx
+++ b/static/app/views/insights/queues/views/destinationSummaryPage.spec.tsx
@@ -46,6 +46,7 @@ describe('destinationSummaryPage', () => {
jest.mocked(useProjects).mockReturnValue({
projects: [],
onSearch: jest.fn(),
+ reloadProjects: jest.fn(),
placeholders: [],
fetching: false,
hasMore: null,
diff --git a/static/app/views/insights/queues/views/queuesLandingPage.spec.tsx b/static/app/views/insights/queues/views/queuesLandingPage.spec.tsx
index 5db67954157c7d..66d70bec26d1ab 100644
--- a/static/app/views/insights/queues/views/queuesLandingPage.spec.tsx
+++ b/static/app/views/insights/queues/views/queuesLandingPage.spec.tsx
@@ -50,6 +50,7 @@ describe('queuesLandingPage', () => {
jest.mocked(useProjects).mockReturnValue({
projects: [project],
onSearch: jest.fn(),
+ reloadProjects: jest.fn(),
placeholders: [],
fetching: false,
hasMore: null,
diff --git a/static/app/views/monitors/components/monitorForm.spec.tsx b/static/app/views/monitors/components/monitorForm.spec.tsx
index 739ac562746bb5..11c6e2dd1eaaf7 100644
--- a/static/app/views/monitors/components/monitorForm.spec.tsx
+++ b/static/app/views/monitors/components/monitorForm.spec.tsx
@@ -32,6 +32,7 @@ describe('MonitorForm', function () {
hasMore: false,
initiallyLoaded: false,
onSearch: jest.fn(),
+ reloadProjects: jest.fn(),
placeholders: [],
projects: [project],
});
diff --git a/static/app/views/performance/content.tsx b/static/app/views/performance/content.tsx
index d008d6acdb106b..226860f36da786 100644
--- a/static/app/views/performance/content.tsx
+++ b/static/app/views/performance/content.tsx
@@ -49,7 +49,7 @@ type State = {
function PerformanceContent({selection, location, demoMode, router}: Props) {
const api = useApi();
const organization = useOrganization();
- const {projects} = useProjects();
+ const {projects, reloadProjects} = useProjects();
const mounted = useRef(false);
const previousDateTime = usePrevious(selection.datetime);
const [state, setState] = useState<State>({error: undefined});
@@ -107,6 +107,16 @@ function PerformanceContent({selection, location, demoMode, router}: Props) {
tab: getLandingDisplayFromParam(location)?.field,
});
+ // Refetch the project metadata if the selected project does not have performance data, because
+ // we may have received performance data (and subsequently updated `Project.firstTransactionEvent`)
+ // after the initial project fetch.
+ useEffect(() => {
+ if (onboardingProject) {
+ reloadProjects();
+ }
+ // eslint-disable-next-line react-hooks/exhaustive-deps
+ }, [onboardingProject]);
+
useEffect(() => {
if (!mounted.current) {
loadOrganizationTags(api, organization.slug, selection);
diff --git a/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/content.spec.tsx b/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/content.spec.tsx
index 7fe5a2948bae52..a9ab40dae9bd95 100644
--- a/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/content.spec.tsx
+++ b/static/app/views/performance/transactionSummary/transactionSpans/spanSummary/content.spec.tsx
@@ -31,6 +31,7 @@ describe('SpanSummaryPage', function () {
jest.mocked(useProjects).mockReturnValue({
projects: [],
onSearch: jest.fn(),
+ reloadProjects: jest.fn(),
placeholders: [],
fetching: false,
hasMore: null,
|
8d6dcccc71044d9ee9332528402fee1ccc490c2e
|
2021-03-23 04:56:18
|
Kelly Carino
|
fix(ui): Chat icon with badge (#24584)
| false
|
Chat icon with badge (#24584)
|
fix
|
diff --git a/src/sentry/static/sentry/app/views/organizationGroupDetails/header.tsx b/src/sentry/static/sentry/app/views/organizationGroupDetails/header.tsx
index 2edd3969dd58c3..0eb44940ebb78a 100644
--- a/src/sentry/static/sentry/app/views/organizationGroupDetails/header.tsx
+++ b/src/sentry/static/sentry/app/views/organizationGroupDetails/header.tsx
@@ -7,6 +7,7 @@ import {fetchOrgMembers} from 'app/actionCreators/members';
import {Client} from 'app/api';
import AssigneeSelector from 'app/components/assigneeSelector';
import GuideAnchor from 'app/components/assistant/guideAnchor';
+import Badge from 'app/components/badge';
import Count from 'app/components/count';
import EventOrGroupTitle from 'app/components/eventOrGroupTitle';
import ErrorLevel from 'app/components/events/errorLevel';
@@ -21,6 +22,7 @@ import ListLink from 'app/components/links/listLink';
import NavTabs from 'app/components/navTabs';
import SeenByList from 'app/components/seenByList';
import ShortId from 'app/components/shortId';
+import Tag from 'app/components/tag';
import Tooltip from 'app/components/tooltip';
import {IconChat} from 'app/icons';
import {t} from 'app/locale';
@@ -257,22 +259,18 @@ class GroupHeader extends React.Component<Props, State> {
isActive={() => currentTab === TAB.ACTIVITY}
disabled={isGroupBeingReprocessing}
>
- {t('Activity')} <TabCount>{group.numComments}</TabCount>
- <IconChat
- size="xs"
- color={
- group.subscriptionDetails?.reason === 'mentioned'
- ? 'green300'
- : 'purple300'
- }
- />
+ {t('Activity')}
+ <StyledTag>
+ <TabCount>{group.numComments}</TabCount>
+ <IconChat size="xs" color="white" />
+ </StyledTag>
</StyledListLink>
<StyledListLink
to={`${baseUrl}feedback/${location.search}`}
isActive={() => currentTab === TAB.USER_FEEDBACK}
disabled={isGroupBeingReprocessing}
>
- {t('User Feedback')} <TabCount>{group.userReportCount}</TabCount>
+ {t('User Feedback')} <Badge text={group.userReportCount} />
</StyledListLink>
{hasEventAttachments && (
<ListLink
@@ -347,11 +345,21 @@ const StyledTagAndMessageWrapper = styled(TagAndMessageWrapper)`
const StyledListLink = styled(ListLink)`
svg {
margin-left: ${space(0.5)};
+ margin-bottom: ${space(0.25)};
+ vertical-align: middle;
}
`;
+const StyledTag = styled(Tag)`
+ div {
+ background-color: ${p => p.theme.badge.default.background};
+ }
+ margin-left: ${space(0.75)};
+`;
+
const TabCount = styled('span')`
- color: ${p => p.theme.purple300};
+ color: ${p => p.theme.white};
+ font-weight: 600;
`;
const StyledProjectBadge = styled(ProjectBadge)`
|
1ff6f855e876de90b730656e7e1924d4ce19151c
|
2025-01-04 00:19:09
|
mia hsu
|
feat(workflow-engine): add `NewHighPriorityIssueConditionHandler` (#82846)
| false
|
add `NewHighPriorityIssueConditionHandler` (#82846)
|
feat
|
diff --git a/src/sentry/workflow_engine/handlers/condition/__init__.py b/src/sentry/workflow_engine/handlers/condition/__init__.py
index 4c74ebaf885683..cc0ca9879d61bb 100644
--- a/src/sentry/workflow_engine/handlers/condition/__init__.py
+++ b/src/sentry/workflow_engine/handlers/condition/__init__.py
@@ -7,6 +7,7 @@
"ExistingHighPriorityIssueConditionHandler",
"EventAttributeConditionHandler",
"FirstSeenEventConditionHandler",
+ "NewHighPriorityIssueConditionHandler",
]
from .group_event_handlers import (
@@ -18,6 +19,7 @@
from .group_state_handlers import (
ExistingHighPriorityIssueConditionHandler,
FirstSeenEventConditionHandler,
+ NewHighPriorityIssueConditionHandler,
ReappearedEventConditionHandler,
RegressionEventConditionHandler,
)
diff --git a/src/sentry/workflow_engine/handlers/condition/group_state_handlers.py b/src/sentry/workflow_engine/handlers/condition/group_state_handlers.py
index 9203ba6662f35c..481de774e49243 100644
--- a/src/sentry/workflow_engine/handlers/condition/group_state_handlers.py
+++ b/src/sentry/workflow_engine/handlers/condition/group_state_handlers.py
@@ -6,6 +6,18 @@
from sentry.workflow_engine.types import DataConditionHandler, WorkflowJob
+def is_new_event(job: WorkflowJob) -> bool:
+ state = job.get("group_state")
+ if state is None:
+ return False
+
+ workflow = job.get("workflow")
+ if workflow is None or workflow.environment_id is None:
+ return state["is_new"]
+
+ return state["is_new_group_environment"]
+
+
@condition_handler_registry.register(Condition.REGRESSION_EVENT)
class RegressionEventConditionHandler(DataConditionHandler[WorkflowJob]):
@staticmethod
@@ -46,12 +58,16 @@ def evaluate_value(job: WorkflowJob, comparison: Any) -> bool:
class FirstSeenEventConditionHandler(DataConditionHandler[WorkflowJob]):
@staticmethod
def evaluate_value(job: WorkflowJob, comparison: Any) -> bool:
- state = job.get("group_state")
- if state is None:
- return False
+ return is_new_event(job)
+
- workflow = job.get("workflow")
- if workflow is None or workflow.environment_id is None:
- return state["is_new"]
+@condition_handler_registry.register(Condition.NEW_HIGH_PRIORITY_ISSUE)
+class NewHighPriorityIssueConditionHandler(DataConditionHandler[WorkflowJob]):
+ @staticmethod
+ def evaluate_value(job: WorkflowJob, comparison: Any) -> bool:
+ is_new = is_new_event(job)
+ event = job["event"]
+ if not event.project.flags.has_high_priority_alerts:
+ return is_new
- return state["is_new_group_environment"]
+ return is_new and event.group.priority == PriorityLevel.HIGH
diff --git a/src/sentry/workflow_engine/migration_helpers/issue_alert_conditions.py b/src/sentry/workflow_engine/migration_helpers/issue_alert_conditions.py
index 215a5adfa8f450..bd6c7bfd1e1484 100644
--- a/src/sentry/workflow_engine/migration_helpers/issue_alert_conditions.py
+++ b/src/sentry/workflow_engine/migration_helpers/issue_alert_conditions.py
@@ -5,6 +5,7 @@
from sentry.rules.conditions.every_event import EveryEventCondition
from sentry.rules.conditions.existing_high_priority_issue import ExistingHighPriorityIssueCondition
from sentry.rules.conditions.first_seen_event import FirstSeenEventCondition
+from sentry.rules.conditions.new_high_priority_issue import NewHighPriorityIssueCondition
from sentry.rules.conditions.reappeared_event import ReappearedEventCondition
from sentry.rules.conditions.regression_event import RegressionEventCondition
from sentry.utils.registry import Registry
@@ -92,3 +93,15 @@ def create_first_seen_event_data_condition(
condition_result=True,
condition_group=dcg,
)
+
+
+@data_condition_translator_registry.register(NewHighPriorityIssueCondition.id)
+def create_new_high_priority_issue_condition(
+ data: dict[str, Any], dcg: DataConditionGroup
+) -> DataCondition:
+ return DataCondition.objects.create(
+ type=Condition.NEW_HIGH_PRIORITY_ISSUE,
+ comparison=True,
+ condition_result=True,
+ condition_group=dcg,
+ )
diff --git a/src/sentry/workflow_engine/models/data_condition.py b/src/sentry/workflow_engine/models/data_condition.py
index e07933696e7905..22aecf98180a67 100644
--- a/src/sentry/workflow_engine/models/data_condition.py
+++ b/src/sentry/workflow_engine/models/data_condition.py
@@ -26,6 +26,7 @@ class Condition(models.TextChoices):
EVERY_EVENT = "every_event"
EXISTING_HIGH_PRIORITY_ISSUE = "existing_high_priority_issue"
FIRST_SEEN_EVENT = "first_seen_event"
+ NEW_HIGH_PRIORITY_ISSUE = "new_high_priority_issue"
REGRESSION_EVENT = "regression_event"
REAPPEARED_EVENT = "reappeared_event"
diff --git a/tests/sentry/workflow_engine/handlers/condition/test_group_state_handlers.py b/tests/sentry/workflow_engine/handlers/condition/test_group_state_handlers.py
index 11b1625916c8c6..422bac062f62f6 100644
--- a/tests/sentry/workflow_engine/handlers/condition/test_group_state_handlers.py
+++ b/tests/sentry/workflow_engine/handlers/condition/test_group_state_handlers.py
@@ -1,6 +1,7 @@
from sentry.eventstream.base import GroupState
from sentry.rules.conditions.existing_high_priority_issue import ExistingHighPriorityIssueCondition
from sentry.rules.conditions.first_seen_event import FirstSeenEventCondition
+from sentry.rules.conditions.new_high_priority_issue import NewHighPriorityIssueCondition
from sentry.rules.conditions.reappeared_event import ReappearedEventCondition
from sentry.rules.conditions.regression_event import RegressionEventCondition
from sentry.types.group import PriorityLevel
@@ -207,3 +208,81 @@ def test_dual_write(self):
assert dc.comparison is True
assert dc.condition_result is True
assert dc.condition_group == dcg
+
+
+class TestNewHighPriorityIssueCondition(ConditionTestCase):
+ condition = Condition.NEW_HIGH_PRIORITY_ISSUE
+ rule_cls = NewHighPriorityIssueCondition
+ payload = {"id": NewHighPriorityIssueCondition.id}
+
+ def setUp(self):
+ super().setUp()
+ self.job = WorkflowJob(
+ {
+ "event": self.group_event,
+ "group_state": GroupState(
+ {
+ "id": 1,
+ "is_regression": True,
+ "is_new": True,
+ "is_new_group_environment": True,
+ }
+ ),
+ "workflow": Workflow(environment_id=1),
+ }
+ )
+ self.dc = self.create_data_condition(
+ type=self.condition,
+ comparison=True,
+ condition_result=True,
+ )
+
+ def test_with_high_priority_alerts(self):
+ self.project.flags.has_high_priority_alerts = True
+ self.project.save()
+
+ # This will only pass for new issues
+ self.group_event.group.update(priority=PriorityLevel.HIGH)
+ self.job["group_state"]["is_new_group_environment"] = True
+ self.assert_passes(self.dc, self.job)
+
+ # These will never pass
+ self.job["group_state"]["is_new_group_environment"] = False
+ self.assert_does_not_pass(self.dc, self.job)
+
+ self.group_event.group.update(priority=PriorityLevel.MEDIUM)
+ self.assert_does_not_pass(self.dc, self.job)
+
+ self.group_event.group.update(priority=PriorityLevel.LOW)
+ self.assert_does_not_pass(self.dc, self.job)
+
+ def test_without_high_priority_alerts(self):
+ self.project.flags.has_high_priority_alerts = False
+ self.project.save()
+
+ self.group_event.group.update(priority=PriorityLevel.HIGH)
+ self.job["group_state"]["is_new_group_environment"] = True
+ self.assert_passes(self.dc, self.job)
+ self.job["group_state"]["is_new_group_environment"] = False
+ self.assert_does_not_pass(self.dc, self.job)
+
+ self.group_event.group.update(priority=PriorityLevel.MEDIUM)
+ self.job["group_state"]["is_new_group_environment"] = True
+ self.assert_passes(self.dc, self.job)
+ self.job["group_state"]["is_new_group_environment"] = False
+ self.assert_does_not_pass(self.dc, self.job)
+
+ self.group_event.group.update(priority=PriorityLevel.LOW)
+ self.job["group_state"]["is_new_group_environment"] = True
+ self.assert_passes(self.dc, self.job)
+ self.job["group_state"]["is_new_group_environment"] = False
+ self.assert_does_not_pass(self.dc, self.job)
+
+ def test_dual_write(self):
+ dcg = self.create_data_condition_group()
+ dc = self.translate_to_data_condition(self.payload, dcg)
+
+ assert dc.type == self.condition
+ assert dc.comparison is True
+ assert dc.condition_result is True
+ assert dc.condition_group == dcg
|
c566e70d5df7da48d8fa484111526a9d22b3a4d7
|
2022-10-27 21:35:40
|
Abdkhan14
|
fix(discover-quick-context): Refactored code to branch out into diffe… (#40652)
| false
|
Refactored code to branch out into diffe… (#40652)
|
fix
|
diff --git a/static/app/views/eventsV2/table/cellAction.tsx b/static/app/views/eventsV2/table/cellAction.tsx
index 994a3ca68da10b..fcfd877dd0465a 100644
--- a/static/app/views/eventsV2/table/cellAction.tsx
+++ b/static/app/views/eventsV2/table/cellAction.tsx
@@ -503,7 +503,11 @@ class CellAction extends Component<Props, State> {
data-placement={placement}
style={arrowProps.style}
/>
- <QuickContext dataRow={this.props.dataRow} column={this.props.column} />
+ <QuickContext
+ dataRow={this.props.dataRow}
+ column={this.props.column}
+ organization={this.props.organization}
+ />
</Menu>
)}
</Popper>,
diff --git a/static/app/views/eventsV2/table/quickContext.tsx b/static/app/views/eventsV2/table/quickContext.tsx
index 0325b734497aec..4ed4b79390188f 100644
--- a/static/app/views/eventsV2/table/quickContext.tsx
+++ b/static/app/views/eventsV2/table/quickContext.tsx
@@ -1,7 +1,7 @@
import {Fragment, useEffect, useState} from 'react';
import styled from '@emotion/styled';
-import {Client} from 'sentry/api';
+import {RequestOptions} from 'sentry/api';
import {QuickContextCommitRow} from 'sentry/components/discover/quickContextCommitRow';
import EventCause from 'sentry/components/events/eventCause';
import {CauseHeader, DataSection} from 'sentry/components/events/styles';
@@ -14,7 +14,7 @@ import {IconCheckmark, IconMute, IconNot} from 'sentry/icons';
import {t} from 'sentry/locale';
import GroupStore from 'sentry/stores/groupStore';
import space from 'sentry/styles/space';
-import {Group} from 'sentry/types';
+import {Group, Organization} from 'sentry/types';
import {TableDataRow} from 'sentry/utils/discover/discoverQuery';
import useApi from 'sentry/utils/useApi';
@@ -25,6 +25,7 @@ const UNKNOWN_ISSUE = 'unknown';
// Will extend this enum as we add contexts for more columns
export enum ColumnType {
ISSUE = 'issue',
+ RELEASE = 'release',
}
function isIssueContext(
@@ -38,6 +39,7 @@ function isIssueContext(
);
}
+// NOTE: Will add release column as an eligible column.
export function hasContext(
dataRow: TableDataRow,
column: TableColumn<keyof TableDataRow>
@@ -45,33 +47,40 @@ export function hasContext(
return isIssueContext(dataRow, column);
}
-// NOTE: Will extend when we add more type of contexts.
-function getUrl(dataRow: TableDataRow, column: TableColumn<keyof TableDataRow>): string {
- return isIssueContext(dataRow, column) ? `/issues/${dataRow['issue.id']}/` : '';
-}
+type RequestParams = {
+ path: string;
+ options?: RequestOptions;
+};
-function fetchData(
- api: Client,
+// NOTE: Will extend when we add more type of contexts. Context is only relevant to issue and release columns for now.
+function getRequestParams(
dataRow: TableDataRow,
- column: TableColumn<keyof TableDataRow>
-): Promise<Group> {
- const promise: Promise<Group> = api.requestPromise(getUrl(dataRow, column), {
- method: 'GET',
- query: {
- collapse: 'release',
- expand: 'inbox',
- },
- });
-
- return promise;
+ column: TableColumn<keyof TableDataRow>,
+ organization?: Organization
+): RequestParams {
+ return isIssueContext(dataRow, column)
+ ? {
+ path: `/issues/${dataRow['issue.id']}/`,
+ options: {
+ method: 'GET',
+ query: {
+ collapse: 'release',
+ expand: 'inbox',
+ },
+ },
+ }
+ : {
+ path: `/organizations/${organization?.slug}/releases/${dataRow.release}/`,
+ };
}
type Props = {
column: TableColumn<keyof TableDataRow>;
dataRow: TableDataRow;
+ organization?: Organization;
};
-export default function QuickContext(props: Props) {
+export default function QuickContext({column, dataRow, organization}: Props) {
// Will add setter for error.
const api = useApi();
const [error, setError] = useState<boolean>(false);
@@ -84,7 +93,9 @@ export default function QuickContext(props: Props) {
// Track mounted state so we dont call setState on unmounted components
let unmounted = false;
- fetchData(api, props.dataRow, props.column)
+ const params = getRequestParams(dataRow, column, organization);
+ api
+ .requestPromise(params.path, params.options)
.then(response => {
if (unmounted) {
return;
@@ -104,7 +115,7 @@ export default function QuickContext(props: Props) {
// If component has unmounted, dont set state
unmounted = true;
};
- }, [api, props.dataRow, props.column]);
+ }, [api, dataRow, column, organization]);
return (
<Wrapper>
@@ -118,8 +129,8 @@ export default function QuickContext(props: Props) {
</NoContextWrapper>
) : error ? (
<NoContextWrapper>{t('Failed to load context for column.')}</NoContextWrapper>
- ) : isIssueContext(props.dataRow, props.column) && data ? (
- <IssueContext data={data} eventID={props.dataRow.id} />
+ ) : isIssueContext(dataRow, column) && data ? (
+ <IssueContext data={data} eventID={dataRow.id} />
) : (
<NoContextWrapper>{t('There is no context available.')}</NoContextWrapper>
)}
|
9a17d6c7a1f9cb0f355caa1823edffce3559b55f
|
2023-10-27 00:27:50
|
Ryan Skonnord
|
test(hc): Convert SyncOptionsTest to all_silo_test (#58828)
| false
|
Convert SyncOptionsTest to all_silo_test (#58828)
|
test
|
diff --git a/tests/sentry/tasks/test_options.py b/tests/sentry/tasks/test_options.py
index 1446fdeb87f3ab..1af4ce255e4d4f 100644
--- a/tests/sentry/tasks/test_options.py
+++ b/tests/sentry/tasks/test_options.py
@@ -1,22 +1,30 @@
from datetime import timedelta
from unittest.mock import patch
-from sentry.models.options.option import Option
-from sentry.options import default_manager, default_store
+from sentry.options import UnknownOption, default_manager, default_store
from sentry.tasks.options import sync_options
from sentry.testutils.cases import TestCase
-from sentry.testutils.silo import control_silo_test
+from sentry.testutils.silo import all_silo_test
-@control_silo_test
+@all_silo_test(stable=True)
class SyncOptionsTest(TestCase):
+ _TEST_KEY = "foo"
+
+ def tearDown(self):
+ super().tearDown()
+ try:
+ default_manager.unregister(self._TEST_KEY)
+ except UnknownOption:
+ pass
+
def test_task_persistent_name(self):
assert sync_options.name == "sentry.tasks.options.sync_options"
@patch.object(default_store, "set_cache")
def test_simple(self, mock_set_cache):
- default_manager.register("foo")
- option = Option.objects.create(key="foo", value="bar")
+ default_manager.register(self._TEST_KEY)
+ option = default_store.model.objects.create(key=self._TEST_KEY, value="bar")
sync_options(cutoff=60)
assert mock_set_cache.called
|
7d7adf116105bcd99351d9935928155a88279642
|
2022-06-17 00:18:19
|
Dameli Ushbayeva
|
fix(perf): Remove all references to user modified query (#35752)
| false
|
Remove all references to user modified query (#35752)
|
fix
|
diff --git a/static/app/actionCreators/events.tsx b/static/app/actionCreators/events.tsx
index 5ab7435345e1ac..d71e2a47d72dd1 100644
--- a/static/app/actionCreators/events.tsx
+++ b/static/app/actionCreators/events.tsx
@@ -36,7 +36,6 @@ type Options = {
start?: DateString;
team?: Readonly<string | string[]>;
topEvents?: number;
- userModified?: string;
withoutZerofill?: boolean;
yAxis?: string | string[];
};
@@ -86,7 +85,6 @@ export const doEventsRequest = (
generatePathname,
queryExtras,
excludeOther,
- userModified,
}: Options
): Promise<EventsStats | MultiSeriesEventsStats> => {
const pathname =
@@ -110,7 +108,6 @@ export const doEventsRequest = (
withoutZerofill: withoutZerofill ? '1' : undefined,
referrer: referrer ? referrer : 'api.organization-event-stats',
excludeOther: excludeOther ? '1' : undefined,
- user_modified: pathname.includes('events-stats') ? userModified : undefined,
}).filter(([, value]) => typeof value !== 'undefined')
);
diff --git a/static/app/components/charts/eventsChart.tsx b/static/app/components/charts/eventsChart.tsx
index 0016b2337a5e7c..b7e7d0d623e024 100644
--- a/static/app/components/charts/eventsChart.tsx
+++ b/static/app/components/charts/eventsChart.tsx
@@ -37,7 +37,7 @@ import {
getEquation,
isEquation,
} from 'sentry/utils/discover/fields';
-import {decodeList, decodeScalar} from 'sentry/utils/queryString';
+import {decodeList} from 'sentry/utils/queryString';
import {Theme} from 'sentry/utils/theme';
import EventsGeoRequest from './eventsGeoRequest';
@@ -667,7 +667,6 @@ class EventsChart extends React.Component<EventsChartProps> {
partial
// Cannot do interpolation when stacking series
withoutZerofill={withoutZerofill && !this.isStacked()}
- userModified={decodeScalar(router.location.query.userModified)}
>
{eventData => {
return chartImplementation({
diff --git a/static/app/components/charts/eventsRequest.tsx b/static/app/components/charts/eventsRequest.tsx
index e184109f0e092c..084a0a2178c357 100644
--- a/static/app/components/charts/eventsRequest.tsx
+++ b/static/app/components/charts/eventsRequest.tsx
@@ -193,10 +193,6 @@ type EventsRequestPartialProps = {
* in the `results` child render function.
*/
topEvents?: number;
- /**
- * Tracks whether the query was modified by a user in the search bar
- */
- userModified?: string;
/**
* Whether or not to zerofill results
*/
diff --git a/static/app/utils/discover/genericDiscoverQuery.tsx b/static/app/utils/discover/genericDiscoverQuery.tsx
index 7989f357dab4ac..894b57ed07dd3e 100644
--- a/static/app/utils/discover/genericDiscoverQuery.tsx
+++ b/static/app/utils/discover/genericDiscoverQuery.tsx
@@ -12,8 +12,6 @@ import EventView, {
import {PerformanceEventViewContext} from 'sentry/utils/performance/contexts/performanceEventViewContext';
import {OrganizationContext} from 'sentry/views/organizationContext';
-import {decodeScalar} from '../queryString';
-
export class QueryError {
message: string;
private originalError: any; // For debugging in case parseError picks a value that doesn't make sense.
@@ -172,7 +170,7 @@ class _GenericDiscoverQuery<T, P> extends Component<Props<T, P>, State<T>> {
}
getPayload(props: Props<T, P>) {
- const {cursor, limit, noPagination, referrer, location} = props;
+ const {cursor, limit, noPagination, referrer} = props;
const payload = this.props.getRequestPayload
? this.props.getRequestPayload(props)
: props.eventView.getEventsAPIPayload(props.location);
@@ -190,13 +188,6 @@ class _GenericDiscoverQuery<T, P> extends Component<Props<T, P>, State<T>> {
payload.referrer = referrer;
}
- if (['events', 'eventsv2'].includes(props.route)) {
- const queryUserModified = decodeScalar(location.query?.userModified);
- if (queryUserModified !== undefined) {
- payload.user_modified = queryUserModified;
- }
- }
-
Object.assign(payload, props.queryExtras ?? {});
return payload;
diff --git a/static/app/views/eventsV2/index.tsx b/static/app/views/eventsV2/index.tsx
index 40b373d386cc7e..f3a6ff7eef297c 100644
--- a/static/app/views/eventsV2/index.tsx
+++ b/static/app/views/eventsV2/index.tsx
@@ -1,6 +1,3 @@
-import {useEffect, useRef} from 'react';
-import {browserHistory} from 'react-router';
-
import Feature from 'sentry/components/acl/feature';
import Alert from 'sentry/components/alert';
import {t} from 'sentry/locale';
@@ -10,30 +7,10 @@ import withOrganization from 'sentry/utils/withOrganization';
type Props = {
children: React.ReactChildren;
- location: any;
organization: Organization;
};
-function DiscoverContainer({organization, children, location}: Props) {
- const prevLocationPathname = useRef('');
-
- useEffect(
- // when new discover page loads, query is pristine
- function () {
- if (location.pathname !== prevLocationPathname.current) {
- prevLocationPathname.current = location.pathname;
- browserHistory.push({
- pathname: location.pathname,
- query: {
- ...location.query,
- userModified: undefined,
- },
- });
- }
- },
- [location]
- );
-
+function DiscoverContainer({organization, children}: Props) {
function renderNoAccess() {
return (
<PageContent>
diff --git a/static/app/views/eventsV2/results.tsx b/static/app/views/eventsV2/results.tsx
index c8915b82d576f4..30f5a4310aa5c6 100644
--- a/static/app/views/eventsV2/results.tsx
+++ b/static/app/views/eventsV2/results.tsx
@@ -314,10 +314,7 @@ class Results extends Component<Props, State> {
router.push({
pathname: location.pathname,
- query: {
- ...searchQueryParams,
- userModified: true,
- },
+ query: searchQueryParams,
});
};
diff --git a/static/app/views/eventsV2/table/index.tsx b/static/app/views/eventsV2/table/index.tsx
index 9f897c58f01328..9f40c9fef4b102 100644
--- a/static/app/views/eventsV2/table/index.tsx
+++ b/static/app/views/eventsV2/table/index.tsx
@@ -16,7 +16,6 @@ import EventView, {
import Measurements from 'sentry/utils/measurements/measurements';
import parseLinkHeader from 'sentry/utils/parseLinkHeader';
import {SPAN_OP_BREAKDOWN_FIELDS} from 'sentry/utils/performance/spanOperationBreakdowns/constants';
-import {decodeScalar} from 'sentry/utils/queryString';
import withApi from 'sentry/utils/withApi';
import TableView from './tableView';
@@ -100,16 +99,10 @@ class Table extends PureComponent<TableProps, TableState> {
: `/organizations/${organization.slug}/eventsv2/`;
const tableFetchID = Symbol('tableFetchID');
- // adding user_modified property. this property will be removed once search bar experiment is complete
const apiPayload = eventView.getEventsAPIPayload(location) as LocationQuery &
- EventQuery & {user_modified?: string};
+ EventQuery;
apiPayload.referrer = 'api.discover.query-table';
- const queryUserModified = decodeScalar(location.query.userModified);
- if (queryUserModified !== undefined) {
- apiPayload.user_modified = queryUserModified;
- }
-
setError('', 200);
this.setState({isLoading: true, tableFetchID});
diff --git a/static/app/views/performance/content.tsx b/static/app/views/performance/content.tsx
index a487d54b85e44a..acf0c872ad968e 100644
--- a/static/app/views/performance/content.tsx
+++ b/static/app/views/performance/content.tsx
@@ -131,7 +131,6 @@ function PerformanceContent({selection, location, demoMode}: Props) {
cursor: undefined,
query: String(searchQuery).trim() || undefined,
isDefaultQuery: false,
- userModified: true,
},
});
}
diff --git a/static/app/views/performance/index.tsx b/static/app/views/performance/index.tsx
index ed8a5b2a1a7e4b..79305fd3df9d2e 100644
--- a/static/app/views/performance/index.tsx
+++ b/static/app/views/performance/index.tsx
@@ -1,7 +1,3 @@
-import {useEffect, useRef} from 'react';
-import {browserHistory} from 'react-router';
-import {Location} from 'history';
-
import Feature from 'sentry/components/acl/feature';
import Alert from 'sentry/components/alert';
import {t} from 'sentry/locale';
@@ -12,31 +8,10 @@ import withOrganization from 'sentry/utils/withOrganization';
type Props = {
children: React.ReactChildren;
- location: Location;
organization: Organization;
};
-function PerformanceContainer({organization, children, location}: Props) {
- const prevLocationPathname = useRef('');
-
- useEffect(
- function () {
- // when new perf page loads, query is pristine
- if (location.pathname !== prevLocationPathname.current) {
- prevLocationPathname.current = location.pathname;
- browserHistory.push({
- pathname: location.pathname,
- query: {
- ...location.query,
- userModified: undefined,
- },
- hash: location.hash,
- });
- }
- },
- [location]
- );
-
+function PerformanceContainer({organization, children}: Props) {
function renderNoAccess() {
return (
<PageContent>
diff --git a/static/app/views/performance/landing/widgets/widgets/lineChartListWidget.tsx b/static/app/views/performance/landing/widgets/widgets/lineChartListWidget.tsx
index 43788d2de6130c..998461d0ed5f79 100644
--- a/static/app/views/performance/landing/widgets/widgets/lineChartListWidget.tsx
+++ b/static/app/views/performance/landing/widgets/widgets/lineChartListWidget.tsx
@@ -13,7 +13,6 @@ import DiscoverQuery from 'sentry/utils/discover/discoverQuery';
import {getAggregateAlias} from 'sentry/utils/discover/fields';
import {useMEPSettingContext} from 'sentry/utils/performance/contexts/metricsEnhancedSetting';
import {usePageError} from 'sentry/utils/performance/contexts/pageError';
-import {decodeScalar} from 'sentry/utils/queryString';
import {MutableSearch} from 'sentry/utils/tokenizeSearch';
import withApi from 'sentry/utils/withApi';
import _DurationChart from 'sentry/views/performance/charts/chart';
@@ -186,7 +185,6 @@ export function LineChartListWidget(props: PerformanceWidgetProps) {
hideError
onError={pageError.setPageError}
queryExtras={getMEPParamsIfApplicable(mepSetting, props.chartSetting)}
- userModified={decodeScalar(props.location.query.userModified)}
/>
);
},
@@ -194,12 +192,7 @@ export function LineChartListWidget(props: PerformanceWidgetProps) {
};
},
// eslint-disable-next-line react-hooks/exhaustive-deps
- [
- props.chartSetting,
- selectedListIndex,
- mepSetting.memoizationKey,
- props.location.query.userModified,
- ]
+ [props.chartSetting, selectedListIndex, mepSetting.memoizationKey]
);
const Queries = {
diff --git a/static/app/views/performance/landing/widgets/widgets/singleFieldAreaWidget.tsx b/static/app/views/performance/landing/widgets/widgets/singleFieldAreaWidget.tsx
index 2b962f9fef0ea1..f664f3b9b209c7 100644
--- a/static/app/views/performance/landing/widgets/widgets/singleFieldAreaWidget.tsx
+++ b/static/app/views/performance/landing/widgets/widgets/singleFieldAreaWidget.tsx
@@ -9,7 +9,6 @@ import {t} from 'sentry/locale';
import {QueryBatchNode} from 'sentry/utils/performance/contexts/genericQueryBatcher';
import {useMEPSettingContext} from 'sentry/utils/performance/contexts/metricsEnhancedSetting';
import {usePageError} from 'sentry/utils/performance/contexts/pageError';
-import {decodeScalar} from 'sentry/utils/queryString';
import withApi from 'sentry/utils/withApi';
import _DurationChart from 'sentry/views/performance/charts/chart';
@@ -23,7 +22,7 @@ type DataType = {
};
export function SingleFieldAreaWidget(props: PerformanceWidgetProps) {
- const {ContainerActions, location} = props;
+ const {ContainerActions} = props;
const globalSelection = props.eventView.getPageFilters();
const pageError = usePageError();
const mepSetting = useMEPSettingContext();
@@ -60,14 +59,13 @@ export function SingleFieldAreaWidget(props: PerformanceWidgetProps) {
hideError
onError={pageError.setPageError}
queryExtras={getMEPQueryParams(mepSetting)}
- userModified={decodeScalar(location.query.userModified)}
/>
)}
</QueryBatchNode>
),
transform: transformEventsRequestToArea,
}),
- [props.chartSetting, mepSetting.memoizationKey, location.query.userModified]
+ [props.chartSetting, mepSetting.memoizationKey]
);
const Queries = {
diff --git a/static/app/views/performance/landing/widgets/widgets/vitalWidget.tsx b/static/app/views/performance/landing/widgets/widgets/vitalWidget.tsx
index 025a06110ca224..1be36fed451ea6 100644
--- a/static/app/views/performance/landing/widgets/widgets/vitalWidget.tsx
+++ b/static/app/views/performance/landing/widgets/widgets/vitalWidget.tsx
@@ -14,7 +14,7 @@ import {getAggregateAlias, WebVital} from 'sentry/utils/discover/fields';
import {useMEPSettingContext} from 'sentry/utils/performance/contexts/metricsEnhancedSetting';
import {usePageError} from 'sentry/utils/performance/contexts/pageError';
import {VitalData} from 'sentry/utils/performance/vitals/vitalsCardsDiscoverQuery';
-import {decodeList, decodeScalar} from 'sentry/utils/queryString';
+import {decodeList} from 'sentry/utils/queryString';
import {MutableSearch} from 'sentry/utils/tokenizeSearch';
import withApi from 'sentry/utils/withApi';
import {vitalDetailRouteWithQuery} from 'sentry/views/performance/vitalDetail/utils';
@@ -173,19 +173,13 @@ export function VitalWidget(props: PerformanceWidgetProps) {
hideError
onError={pageError.setPageError}
queryExtras={getMEPQueryParams(mepSetting)}
- userModified={decodeScalar(props.location.query.userModified)}
/>
);
},
transform: transformEventsRequestToVitals,
}),
// eslint-disable-next-line react-hooks/exhaustive-deps
- [
- props.chartSetting,
- selectedListIndex,
- mepSetting.memoizationKey,
- props.location.query.userModified,
- ]
+ [props.chartSetting, selectedListIndex, mepSetting.memoizationKey]
),
};
diff --git a/static/app/views/performance/transactionSummary/transactionEvents/content.tsx b/static/app/views/performance/transactionSummary/transactionEvents/content.tsx
index 81173130a01218..91ec06451c7851 100644
--- a/static/app/views/performance/transactionSummary/transactionEvents/content.tsx
+++ b/static/app/views/performance/transactionSummary/transactionEvents/content.tsx
@@ -114,10 +114,7 @@ function Search(props: Props) {
browserHistory.push({
pathname: location.pathname,
- query: {
- ...searchQueryParams,
- userModified: true,
- },
+ query: searchQueryParams,
});
};
diff --git a/static/app/views/performance/transactionSummary/transactionOverview/content.tsx b/static/app/views/performance/transactionSummary/transactionOverview/content.tsx
index 83d95480f54e78..9f75dbb74986e0 100644
--- a/static/app/views/performance/transactionSummary/transactionOverview/content.tsx
+++ b/static/app/views/performance/transactionSummary/transactionOverview/content.tsx
@@ -104,10 +104,7 @@ function SummaryContent({
browserHistory.push({
pathname: location.pathname,
- query: {
- ...searchQueryParams,
- userModified: true,
- },
+ query: searchQueryParams,
});
}
diff --git a/static/app/views/performance/transactionSummary/transactionOverview/durationChart/index.tsx b/static/app/views/performance/transactionSummary/transactionOverview/durationChart/index.tsx
index 4d631bc4d10e87..b0d1fdd26ff697 100644
--- a/static/app/views/performance/transactionSummary/transactionOverview/durationChart/index.tsx
+++ b/static/app/views/performance/transactionSummary/transactionOverview/durationChart/index.tsx
@@ -12,7 +12,6 @@ import {t, tct} from 'sentry/locale';
import {OrganizationSummary} from 'sentry/types';
import {getUtcToLocalDateObject} from 'sentry/utils/dates';
import {useMEPSettingContext} from 'sentry/utils/performance/contexts/metricsEnhancedSetting';
-import {decodeScalar} from 'sentry/utils/queryString';
import useApi from 'sentry/utils/useApi';
import {getMEPQueryParams} from 'sentry/views/performance/landing/widgets/utils';
@@ -149,7 +148,6 @@ function DurationChart({
withoutZerofill={withoutZerofill}
referrer="api.performance.transaction-summary.duration-chart"
queryExtras={getMEPQueryParams(mepContext)}
- userModified={decodeScalar(location.query.userModified)}
>
{({results, errored, loading, reloading, timeframe: timeFrame}) => (
<Content
diff --git a/static/app/views/performance/transactionSummary/transactionOverview/trendChart/index.tsx b/static/app/views/performance/transactionSummary/transactionOverview/trendChart/index.tsx
index 08561264846388..121128fde6522f 100644
--- a/static/app/views/performance/transactionSummary/transactionOverview/trendChart/index.tsx
+++ b/static/app/views/performance/transactionSummary/transactionOverview/trendChart/index.tsx
@@ -11,7 +11,6 @@ import QuestionTooltip from 'sentry/components/questionTooltip';
import {t} from 'sentry/locale';
import {OrganizationSummary} from 'sentry/types';
import {getUtcToLocalDateObject} from 'sentry/utils/dates';
-import {decodeScalar} from 'sentry/utils/queryString';
import useApi from 'sentry/utils/useApi';
import {TrendFunctionField} from '../../../trends/types';
@@ -130,7 +129,6 @@ function TrendChart({
partial
withoutZerofill={withoutZerofill}
referrer="api.performance.transaction-summary.trends-chart"
- userModified={decodeScalar(location.query.userModified)}
>
{({errored, loading, reloading, timeseriesData, timeframe: timeFrame}) => (
<Content
diff --git a/static/app/views/performance/transactionSummary/transactionOverview/vitalsChart/index.tsx b/static/app/views/performance/transactionSummary/transactionOverview/vitalsChart/index.tsx
index 91fa19e3384dd7..3ade0b9f6c8cbc 100644
--- a/static/app/views/performance/transactionSummary/transactionOverview/vitalsChart/index.tsx
+++ b/static/app/views/performance/transactionSummary/transactionOverview/vitalsChart/index.tsx
@@ -16,7 +16,6 @@ import {
getMeasurementSlug,
WebVital,
} from 'sentry/utils/discover/fields';
-import {decodeScalar} from 'sentry/utils/queryString';
import useApi from 'sentry/utils/useApi';
import {ViewProps} from '../../../types';
@@ -141,7 +140,6 @@ function VitalsChart({
partial
withoutZerofill={withoutZerofill}
referrer="api.performance.transaction-summary.vitals-chart"
- userModified={decodeScalar(location.query.userModified)}
>
{({results, errored, loading, reloading, timeframe: timeFrame}) => (
<Content
diff --git a/static/app/views/performance/transactionSummary/transactionSpans/content.tsx b/static/app/views/performance/transactionSummary/transactionSpans/content.tsx
index 226fa2cfd499e3..089cfead4fe093 100644
--- a/static/app/views/performance/transactionSummary/transactionSpans/content.tsx
+++ b/static/app/views/performance/transactionSummary/transactionSpans/content.tsx
@@ -78,10 +78,7 @@ function SpansContent(props: Props) {
browserHistory.push({
...location,
- query: {
- ...searchQueryParams,
- userModified: key === 'query',
- },
+ query: searchQueryParams,
});
};
}
diff --git a/static/app/views/performance/transactionSummary/transactionSpans/spanDetails/spanDetailsControls.tsx b/static/app/views/performance/transactionSummary/transactionSpans/spanDetails/spanDetailsControls.tsx
index f9c5daceb5700b..36cb00f58e97ac 100644
--- a/static/app/views/performance/transactionSummary/transactionSpans/spanDetails/spanDetailsControls.tsx
+++ b/static/app/views/performance/transactionSummary/transactionSpans/spanDetails/spanDetailsControls.tsx
@@ -36,7 +36,6 @@ export default function SpanDetailsControls({
...location.query,
cursor: undefined,
query: String(searchQuery).trim() || undefined,
- userModified: true,
},
});
};
diff --git a/static/app/views/performance/vitalDetail/vitalChart.tsx b/static/app/views/performance/vitalDetail/vitalChart.tsx
index 5e682249197c0d..619a5e518560bc 100644
--- a/static/app/views/performance/vitalDetail/vitalChart.tsx
+++ b/static/app/views/performance/vitalDetail/vitalChart.tsx
@@ -18,7 +18,6 @@ import {Series} from 'sentry/types/echarts';
import {axisLabelFormatter, tooltipFormatter} from 'sentry/utils/discover/charts';
import {WebVital} from 'sentry/utils/discover/fields';
import getDynamicText from 'sentry/utils/getDynamicText';
-import {decodeScalar} from 'sentry/utils/queryString';
import useApi from 'sentry/utils/useApi';
import {replaceSeriesName, transformEventStatsSmoothed} from '../trends/utils';
@@ -111,7 +110,6 @@ function VitalChart({
includePrevious={false}
yAxis={[yAxis]}
partial
- userModified={decodeScalar(location.query.userModified)}
>
{({timeseriesData: results, errored, loading, reloading}) => {
if (errored) {
diff --git a/static/app/views/performance/vitalDetail/vitalDetailContent.tsx b/static/app/views/performance/vitalDetail/vitalDetailContent.tsx
index 10e8ad633303d9..4c873c94bdb900 100644
--- a/static/app/views/performance/vitalDetail/vitalDetailContent.tsx
+++ b/static/app/views/performance/vitalDetail/vitalDetailContent.tsx
@@ -93,10 +93,7 @@ class VitalDetailContent extends Component<Props, State> {
browserHistory.push({
pathname: location.pathname,
- query: {
- ...searchQueryParams,
- userModified: true,
- },
+ query: searchQueryParams,
});
};
diff --git a/tests/js/spec/views/eventsV2/results.spec.jsx b/tests/js/spec/views/eventsV2/results.spec.jsx
index 777d7e868d9668..6dd84843f3f4db 100644
--- a/tests/js/spec/views/eventsV2/results.spec.jsx
+++ b/tests/js/spec/views/eventsV2/results.spec.jsx
@@ -308,8 +308,6 @@ describe('Results', function () {
...generateFields(),
query: 'geo:canada',
statsPeriod: '14d',
- // userModified added on new search for the search bar experiment
- userModified: true,
},
});
wrapper.unmount();
@@ -986,8 +984,6 @@ describe('Results', function () {
...generateFields(),
query: 'geo:canada',
statsPeriod: '14d',
- // userModified added on new search for the search bar experiment
- userModified: true,
},
});
wrapper.unmount();
diff --git a/tests/js/spec/views/performance/transactionSummary.spec.tsx b/tests/js/spec/views/performance/transactionSummary.spec.tsx
index 15af8bd118de24..282ef23ba6c571 100644
--- a/tests/js/spec/views/performance/transactionSummary.spec.tsx
+++ b/tests/js/spec/views/performance/transactionSummary.spec.tsx
@@ -560,7 +560,6 @@ describe('Performance > TransactionSummary', function () {
statsPeriod: '14d',
query: 'user.email:uhoh*',
transactionCursor: '1:0:0',
- userModified: true,
},
});
});
@@ -965,7 +964,6 @@ describe('Performance > TransactionSummary', function () {
statsPeriod: '14d',
query: 'user.email:uhoh*',
transactionCursor: '1:0:0',
- userModified: true,
},
});
});
diff --git a/tests/js/spec/views/performance/vitalDetail/index.spec.tsx b/tests/js/spec/views/performance/vitalDetail/index.spec.tsx
index b47eaabc01cc62..8b54a0a765f324 100644
--- a/tests/js/spec/views/performance/vitalDetail/index.spec.tsx
+++ b/tests/js/spec/views/performance/vitalDetail/index.spec.tsx
@@ -303,7 +303,6 @@ describe('Performance > VitalDetail', function () {
project: 1,
statsPeriod: '14d',
query: 'user.email:uhoh*',
- userModified: true,
},
});
});
|
6800d9a76272d32d39a51c2520c66fdc9e6db999
|
2018-01-18 06:48:19
|
Lyn Nagara
|
feat(environments): Release stats uses project default environment
| false
|
Release stats uses project default environment
|
feat
|
diff --git a/src/sentry/static/sentry/app/components/group/releaseStats.jsx b/src/sentry/static/sentry/app/components/group/releaseStats.jsx
index 9f078ac7654c37..dbd08da036b67d 100644
--- a/src/sentry/static/sentry/app/components/group/releaseStats.jsx
+++ b/src/sentry/static/sentry/app/components/group/releaseStats.jsx
@@ -75,7 +75,8 @@ const GroupReleaseStats = createReactClass({
fetchData() {
let group = this.props.group;
- let envName = this.state.environment.urlRoutingName;
+ let env = this.state.environment || {};
+ let envName = env.urlRoutingName;
let stats = this.props.group.stats['24h'];
// due to the current stats logic in Sentry we need to extend the bounds
diff --git a/src/sentry/static/sentry/app/components/group/sidebar.jsx b/src/sentry/static/sentry/app/components/group/sidebar.jsx
index ec54f8ce85efb1..34279b34b5fd74 100644
--- a/src/sentry/static/sentry/app/components/group/sidebar.jsx
+++ b/src/sentry/static/sentry/app/components/group/sidebar.jsx
@@ -194,18 +194,13 @@ const GroupSidebar = createReactClass({
let project = this.getProject();
let projectId = project.slug;
let orgId = this.getOrganization().slug;
- let defaultEnvironment = project.defaultEnvironment;
let group = this.getGroup();
return (
<div className="group-stats">
<SuggestedOwners event={this.props.event} />
- <GroupReleaseStats
- group={group}
- location={this.context.location}
- defaultEnvironment={defaultEnvironment}
- />
+ <GroupReleaseStats group={group} location={this.context.location} />
{this.renderPluginIssue()}
diff --git a/src/sentry/static/sentry/app/stores/environmentStore.jsx b/src/sentry/static/sentry/app/stores/environmentStore.jsx
index dd580741104d82..324c4d86a73ed9 100644
--- a/src/sentry/static/sentry/app/stores/environmentStore.jsx
+++ b/src/sentry/static/sentry/app/stores/environmentStore.jsx
@@ -1,5 +1,6 @@
import Reflux from 'reflux';
import {toTitleCase} from '../utils';
+import ProjectActions from '../actions/projectActions';
const PRODUCTION_ENV_NAMES = new Set([
'production',
@@ -15,6 +16,8 @@ const DEFAULT_ROUTING_NAME = 'none';
const EnvironmentStore = Reflux.createStore({
init() {
this.items = [];
+ this.defaultEnvironment = null;
+ this.listenTo(ProjectActions.setActive, this.onSetActiveProject);
},
loadInitialData(items) {
@@ -45,13 +48,27 @@ const EnvironmentStore = Reflux.createStore({
return this.items;
},
+ onSetActiveProject(project) {
+ if (project) {
+ this.defaultEnvironment = project.defaultEnvironment || null;
+ }
+ },
+
// Default environment is either the first based on the set of common names
// or the first in the environment list if none match
getDefault() {
let allEnvs = this.items;
+
+ let defaultEnv = allEnvs.find(e => e.name === this.defaultEnvironment);
+
let prodEnvs = allEnvs.filter(e => PRODUCTION_ENV_NAMES.has(e.name));
- return (prodEnvs.length && prodEnvs[0]) || (allEnvs.length && allEnvs[0]) || null;
+ return (
+ defaultEnv ||
+ (prodEnvs.length && prodEnvs[0]) ||
+ (allEnvs.length && allEnvs[0]) ||
+ null
+ );
},
});
|
3c2f9e7c2507c295e642302d2c5a28fae45ca3d1
|
2024-09-12 00:03:42
|
edwardgou-sentry
|
feat(insights): Adds insights alerts for http module (#77267)
| false
|
Adds insights alerts for http module (#77267)
|
feat
|
diff --git a/static/app/components/metrics/metricSearchBar.tsx b/static/app/components/metrics/metricSearchBar.tsx
index 20f3714517d7cc..a838bb889e8dfb 100644
--- a/static/app/components/metrics/metricSearchBar.tsx
+++ b/static/app/components/metrics/metricSearchBar.tsx
@@ -46,6 +46,9 @@ const INSIGHTS_ADDITIONAL_TAG_FILTERS: MetricTag[] = [
{
key: SpanMetricsField.SPAN_MODULE,
},
+ {
+ key: SpanMetricsField.FILE_EXTENSION,
+ },
];
export function MetricSearchBar({
diff --git a/static/app/utils/metrics/mri.tsx b/static/app/utils/metrics/mri.tsx
index 8b899d1b06bc8a..df7334a337d0db 100644
--- a/static/app/utils/metrics/mri.tsx
+++ b/static/app/utils/metrics/mri.tsx
@@ -7,6 +7,8 @@ import type {
UseCase,
} from 'sentry/types/metrics';
import {parseFunction} from 'sentry/utils/discover/fields';
+import {SPAN_DURATION_MRI} from 'sentry/utils/metrics/constants';
+import {INSIGHTS_METRICS_OPERATIONS} from 'sentry/views/alerts/rules/metric/utils/isInsightsMetricAlert';
export const DEFAULT_MRI: MRI = 'c:custom/sentry_metric@none';
export const DEFAULT_SPAN_MRI: MRI = 'c:custom/span_attribute_0@none';
@@ -116,11 +118,20 @@ export function isMRIField(field: string): boolean {
// convenience function to get the MRI from a field, returns defaut MRI if it fails
export function getMRI(field: string): MRI {
- // spm() doesn't take an argument and it always operates on the spans exclusive time mri
- if (['spm()', 'cache_miss_rate()'].includes(field)) {
- return 'd:spans/exclusive_time@millisecond';
- }
const parsed = parseField(field);
+ // Insights functions don't always take an MRI as an argument.
+ // In these cases, we need to default to a specific MRI.
+ if (parsed?.aggregation) {
+ const operation = INSIGHTS_METRICS_OPERATIONS.find(({value}) => {
+ return value === parsed?.aggregation;
+ });
+ if (operation) {
+ if (operation.mri) {
+ return operation.mri as MRI;
+ }
+ return SPAN_DURATION_MRI;
+ }
+ }
return parsed?.mri ?? DEFAULT_MRI;
}
diff --git a/static/app/views/alerts/rules/metric/insightsMetricField.spec.tsx b/static/app/views/alerts/rules/metric/insightsMetricField.spec.tsx
index 126dee6353a2c5..b9110acef67a34 100644
--- a/static/app/views/alerts/rules/metric/insightsMetricField.spec.tsx
+++ b/static/app/views/alerts/rules/metric/insightsMetricField.spec.tsx
@@ -89,4 +89,21 @@ describe('InsightsMetricField', () => {
userEvent.click(await screen.findByText('spm'));
await waitFor(() => expect(onChange).toHaveBeenCalledWith('spm()', {}));
});
+
+ it('should call onChange using the http_response_rate function defaulting with argument 3 when switching to http_response_rate', async () => {
+ const {project} = initializeOrg();
+ const onChange = jest.fn();
+ render(
+ <InsightsMetricField
+ aggregate={'avg(d:spans/exclusive_time@millisecond)'}
+ onChange={onChange}
+ project={project}
+ />
+ );
+ userEvent.click(screen.getByText('avg'));
+ userEvent.click(await screen.findByText('http_response_rate'));
+ await waitFor(() =>
+ expect(onChange).toHaveBeenCalledWith('http_response_rate(3)', {})
+ );
+ });
});
diff --git a/static/app/views/alerts/rules/metric/insightsMetricField.tsx b/static/app/views/alerts/rules/metric/insightsMetricField.tsx
index 06181bd86260b2..f8eb1be128b255 100644
--- a/static/app/views/alerts/rules/metric/insightsMetricField.tsx
+++ b/static/app/views/alerts/rules/metric/insightsMetricField.tsx
@@ -5,16 +5,17 @@ import Tag from 'sentry/components/badge/tag';
import SelectControl from 'sentry/components/forms/controls/selectControl';
import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
-import type {MetricMeta, ParsedMRI} from 'sentry/types/metrics';
+import type {MetricAggregation, MetricMeta, ParsedMRI} from 'sentry/types/metrics';
import type {Project} from 'sentry/types/project';
+import {parseFunction} from 'sentry/utils/discover/fields';
import {getDefaultAggregation} from 'sentry/utils/metrics';
import {getReadableMetricType} from 'sentry/utils/metrics/formatters';
import {
DEFAULT_INSIGHTS_METRICS_ALERT_FIELD,
DEFAULT_INSIGHTS_MRI,
formatMRI,
+ isMRI,
MRIToField,
- parseField,
parseMRI,
} from 'sentry/utils/metrics/mri';
import {useVirtualizedMetricsMeta} from 'sentry/utils/metrics/useMetricsMeta';
@@ -22,6 +23,8 @@ import {middleEllipsis} from 'sentry/utils/string/middleEllipsis';
import {
INSIGHTS_METRICS,
INSIGHTS_METRICS_OPERATIONS,
+ INSIGHTS_METRICS_OPERATIONS_WITH_CUSTOM_ARGS,
+ INSIGHTS_METRICS_OPERATIONS_WITHOUT_ARGS,
} from 'sentry/views/alerts/rules/metric/utils/isInsightsMetricAlert';
interface Props {
@@ -49,11 +52,19 @@ const OPERATIONS = [
label: 'max',
value: 'max',
},
- ...INSIGHTS_METRICS_OPERATIONS,
+ ...INSIGHTS_METRICS_OPERATIONS.map(({label, value}) => ({label, value})),
];
function aggregateRequiresArgs(aggregation?: string) {
- return !['spm', 'cache_miss_rate'].includes(aggregation ?? '');
+ return !INSIGHTS_METRICS_OPERATIONS_WITHOUT_ARGS.some(
+ ({value}) => value === aggregation
+ );
+}
+
+function aggregateHasCustomArgs(aggregation?: string) {
+ return INSIGHTS_METRICS_OPERATIONS_WITH_CUSTOM_ARGS.some(
+ ({value}) => value === aggregation
+ );
}
function InsightsMetricField({aggregate, project, onChange}: Props) {
@@ -74,17 +85,31 @@ function InsightsMetricField({aggregate, project, onChange}: Props) {
.filter(metric => INSIGHTS_METRICS.includes(metric.mri));
}, [meta]);
- const selectedValues = parseField(aggregate);
+ // We parse out the aggregation and field from the aggregate string.
+ // This only works for aggregates with <= 1 argument.
+ const {
+ name: aggregation,
+ arguments: [field],
+ } = parseFunction(aggregate) ?? {arguments: [undefined]};
const selectedMriMeta = useMemo(() => {
- return meta.find(metric => metric.mri === selectedValues?.mri);
- }, [meta, selectedValues?.mri]);
+ return meta.find(metric => metric.mri === field);
+ }, [meta, field]);
useEffect(() => {
- if (!aggregateRequiresArgs(selectedValues?.aggregation)) {
+ if (!aggregateRequiresArgs(aggregation)) {
+ return;
+ }
+ if (aggregation && aggregateHasCustomArgs(aggregation)) {
+ const options = INSIGHTS_METRICS_OPERATIONS_WITH_CUSTOM_ARGS.find(
+ ({value}) => value === aggregation
+ )?.options;
+ if (options && !options.some(({value}) => value === field)) {
+ onChange(`${aggregation}(${options?.[0].value})`, {});
+ }
return;
}
- if (selectedValues?.mri && !selectedMriMeta && !isLoading) {
+ if (field && !selectedMriMeta && !isLoading) {
const newSelection = metaArr[0];
if (newSelection) {
onChange(MRIToField(newSelection.mri, 'avg'), {});
@@ -92,15 +117,7 @@ function InsightsMetricField({aggregate, project, onChange}: Props) {
onChange(DEFAULT_INSIGHTS_METRICS_ALERT_FIELD, {});
}
}
- }, [
- metaArr,
- onChange,
- isLoading,
- aggregate,
- selectedValues?.mri,
- selectedMriMeta,
- selectedValues?.aggregation,
- ]);
+ }, [metaArr, onChange, isLoading, aggregate, selectedMriMeta, aggregation, field]);
const handleMriChange = useCallback(
option => {
@@ -110,13 +127,23 @@ function InsightsMetricField({aggregate, project, onChange}: Props) {
}
const newType = parseMRI(option.value)?.type;
// If the type is the same, we can keep the current aggregate
- if (newType === selectedMeta.type && selectedValues?.aggregation) {
- onChange(MRIToField(option.value, selectedValues?.aggregation), {});
+ if (newType === selectedMeta.type && aggregation) {
+ onChange(MRIToField(option.value, aggregation as MetricAggregation), {});
} else {
onChange(MRIToField(option.value, getDefaultAggregation(option.value)), {});
}
},
- [meta, onChange, selectedValues?.aggregation]
+ [meta, onChange, aggregation]
+ );
+
+ const handleOptionChange = useCallback(
+ option => {
+ if (!option || !aggregation) {
+ return;
+ }
+ onChange(`${aggregation}(${option.value})`, {});
+ },
+ [onChange, aggregation]
);
// As SelectControl does not support an options size limit out of the box
@@ -159,9 +186,9 @@ function InsightsMetricField({aggregate, project, onChange}: Props) {
);
// When using the async variant of SelectControl, we need to pass in an option object instead of just the value
- const selectedMriOption = selectedValues?.mri && {
- label: formatMRI(selectedValues.mri),
- value: selectedValues.mri,
+ const selectedOption = field && {
+ label: isMRI(field) ? formatMRI(field) : field,
+ value: field,
};
return (
@@ -171,33 +198,51 @@ function InsightsMetricField({aggregate, project, onChange}: Props) {
isDisabled={isLoading}
placeholder={t('Select an operation')}
options={OPERATIONS}
- value={selectedValues?.aggregation}
+ value={aggregation}
onChange={option => {
if (!aggregateRequiresArgs(option.value)) {
onChange(`${option.value}()`, {});
- } else if (selectedValues?.mri) {
- onChange(MRIToField(selectedValues.mri, option.value), {});
+ } else if (aggregateHasCustomArgs(option.value)) {
+ const options = INSIGHTS_METRICS_OPERATIONS_WITH_CUSTOM_ARGS.find(
+ ({value}) => value === option.value
+ )?.options;
+ onChange(`${option.value}(${options?.[0].value})`, {});
+ } else if (field && isMRI(field)) {
+ onChange(MRIToField(field, option.value), {});
} else {
onChange(MRIToField(DEFAULT_INSIGHTS_MRI, option.value), {});
}
}}
/>
- {aggregateRequiresArgs(selectedValues?.aggregation) && (
- <StyledSelectControl
- searchable
- isDisabled={isLoading}
- placeholder={t('Select a metric')}
- noOptionsMessage={() =>
- metaArr.length === 0 ? t('No metrics in this project') : t('No options')
- }
- async
- defaultOptions={getMriOptions('')}
- loadOptions={searchText => Promise.resolve(getMriOptions(searchText))}
- filterOption={() => true}
- value={selectedMriOption}
- onChange={handleMriChange}
- />
- )}
+ {aggregateRequiresArgs(aggregation) &&
+ (aggregateHasCustomArgs(aggregation) ? (
+ <StyledSelectControl
+ searchable
+ placeholder={t('Select an option')}
+ options={
+ INSIGHTS_METRICS_OPERATIONS_WITH_CUSTOM_ARGS.find(
+ ({value}) => value === aggregation
+ )?.options
+ }
+ value={selectedOption}
+ onChange={handleOptionChange}
+ />
+ ) : (
+ <StyledSelectControl
+ searchable
+ isDisabled={isLoading}
+ placeholder={t('Select a metric')}
+ noOptionsMessage={() =>
+ metaArr.length === 0 ? t('No metrics in this project') : t('No options')
+ }
+ async
+ defaultOptions={getMriOptions('')}
+ loadOptions={searchText => Promise.resolve(getMriOptions(searchText))}
+ filterOption={() => true}
+ value={selectedOption}
+ onChange={handleMriChange}
+ />
+ ))}
</Wrapper>
);
}
diff --git a/static/app/views/alerts/rules/metric/ruleForm.tsx b/static/app/views/alerts/rules/metric/ruleForm.tsx
index 1ac7571839e605..0f785b8ed7bd35 100644
--- a/static/app/views/alerts/rules/metric/ruleForm.tsx
+++ b/static/app/views/alerts/rules/metric/ruleForm.tsx
@@ -57,6 +57,7 @@ import TriggersChart from 'sentry/views/alerts/rules/metric/triggers/chart';
import {getEventTypeFilter} from 'sentry/views/alerts/rules/metric/utils/getEventTypeFilter';
import hasThresholdValue from 'sentry/views/alerts/rules/metric/utils/hasThresholdValue';
import {isCustomMetricAlert} from 'sentry/views/alerts/rules/metric/utils/isCustomMetricAlert';
+import {isInsightsMetricAlert} from 'sentry/views/alerts/rules/metric/utils/isInsightsMetricAlert';
import {isOnDemandMetricAlert} from 'sentry/views/alerts/rules/metric/utils/onDemandMetricAlert';
import {AlertRuleType} from 'sentry/views/alerts/types';
import {ruleNeedsErrorMigration} from 'sentry/views/alerts/utils/migrationUi';
@@ -1197,7 +1198,8 @@ class RuleFormContainer extends DeprecatedAsyncComponent<Props, State> {
return (
<Main fullWidth>
<PermissionAlert access={['alerts:write']} project={project} />
- {isCustomMetricAlert(rule.aggregate) && <MetricsBetaEndAlert />}
+ {isCustomMetricAlert(rule.aggregate) &&
+ !isInsightsMetricAlert(rule.aggregate) && <MetricsBetaEndAlert />}
{eventView && <IncompatibleAlertQuery eventView={eventView} />}
<Form
diff --git a/static/app/views/alerts/rules/metric/utils/isInsightsMetricAlert.tsx b/static/app/views/alerts/rules/metric/utils/isInsightsMetricAlert.tsx
index 203fd32a290883..da887cfd2196e4 100644
--- a/static/app/views/alerts/rules/metric/utils/isInsightsMetricAlert.tsx
+++ b/static/app/views/alerts/rules/metric/utils/isInsightsMetricAlert.tsx
@@ -1,14 +1,47 @@
import {parseField} from 'sentry/utils/metrics/mri';
-export const INSIGHTS_METRICS_OPERATIONS = [
+export const INSIGHTS_METRICS_OPERATIONS_WITHOUT_ARGS = [
{
label: 'spm',
value: 'spm',
+ mri: 'd:spans/duration@millisecond',
},
{
label: 'cache_miss_rate',
value: 'cache_miss_rate',
+ mri: 'd:spans/duration@millisecond',
+ },
+];
+
+export const INSIGHTS_METRICS_OPERATIONS_WITH_CUSTOM_ARGS = [
+ {
+ label: 'http_response_rate',
+ value: 'http_response_rate',
+ options: [
+ {label: '3', value: '3'},
+ {label: '4', value: '4'},
+ {label: '5', value: '5'},
+ ],
+ mri: 'd:spans/duration@millisecond',
},
+ {
+ label: 'performance_score',
+ value: 'performance_score',
+ options: [
+ {label: 'measurements.score.lcp', value: 'measurements.score.lcp'},
+ {label: 'measurements.score.fcp', value: 'measurements.score.fcp'},
+ {label: 'measurements.score.inp', value: 'measurements.score.inp'},
+ {label: 'measurements.score.cls', value: 'measurements.score.cls'},
+ {label: 'measurements.score.ttfb', value: 'measurements.score.ttfb'},
+ {label: 'measurements.score.total', value: 'measurements.score.total'},
+ ],
+ mri: 'd:transactions/measurements.score.total@ratio',
+ },
+];
+
+export const INSIGHTS_METRICS_OPERATIONS = [
+ ...INSIGHTS_METRICS_OPERATIONS_WITH_CUSTOM_ARGS,
+ ...INSIGHTS_METRICS_OPERATIONS_WITHOUT_ARGS,
];
export const INSIGHTS_METRICS = [
diff --git a/static/app/views/insights/http/alerts.ts b/static/app/views/insights/http/alerts.ts
new file mode 100644
index 00000000000000..8c2d3c2d48ddf3
--- /dev/null
+++ b/static/app/views/insights/http/alerts.ts
@@ -0,0 +1,29 @@
+import type {AlertConfig} from 'sentry/views/insights/common/components/chartPanel';
+
+const QUERY = 'span.module:http span.op:http.client';
+
+export const ALERTS: Record<string, AlertConfig> = {
+ spm: {
+ aggregate: 'spm()',
+ query: QUERY,
+ },
+ duration: {
+ aggregate: 'avg(d:spans/duration@millisecond)',
+ query: QUERY,
+ },
+ threeHundreds: {
+ aggregate: 'http_response_rate(3)',
+ query: QUERY,
+ name: 'Create 3XX Response Rate Alert',
+ },
+ fourHundreds: {
+ aggregate: 'http_response_rate(4)',
+ query: QUERY,
+ name: 'Create 4XX Response Rate Alert',
+ },
+ fiveHundreds: {
+ aggregate: 'http_response_rate(5)',
+ query: QUERY,
+ name: 'Create 5XX Response Rate Alert',
+ },
+};
diff --git a/static/app/views/insights/http/components/charts/durationChart.tsx b/static/app/views/insights/http/components/charts/durationChart.tsx
index e182609b5c1ff7..51c90369667a5c 100644
--- a/static/app/views/insights/http/components/charts/durationChart.tsx
+++ b/static/app/views/insights/http/components/charts/durationChart.tsx
@@ -1,16 +1,20 @@
import type {ComponentProps} from 'react';
import type {EChartHighlightHandler, Series} from 'sentry/types/echarts';
+import {MutableSearch} from 'sentry/utils/tokenizeSearch';
import {AVG_COLOR} from 'sentry/views/insights/colors';
import Chart, {ChartType} from 'sentry/views/insights/common/components/chart';
import ChartPanel from 'sentry/views/insights/common/components/chartPanel';
import {getDurationChartTitle} from 'sentry/views/insights/common/views/spans/types';
+import {ALERTS} from 'sentry/views/insights/http/alerts';
import {CHART_HEIGHT} from 'sentry/views/insights/http/settings';
+import type {SpanMetricsQueryFilters} from 'sentry/views/insights/types';
interface Props {
isLoading: boolean;
series: Series[];
error?: Error | null;
+ filters?: SpanMetricsQueryFilters;
onHighlight?: (highlights: Highlight[], event: Event) => void; // TODO: Correctly type this
scatterPlot?: ComponentProps<typeof Chart>['scatterPlot'];
}
@@ -26,6 +30,7 @@ export function DurationChart({
isLoading,
error,
onHighlight,
+ filters,
}: Props) {
// TODO: This is duplicated from `DurationChart` in `SampleList`. Resolve the duplication
const handleChartHighlight: EChartHighlightHandler = function (event) {
@@ -50,8 +55,14 @@ export function DurationChart({
onHighlight?.(highlightedDataPoints, event);
};
+ const filterString = filters && MutableSearch.fromQueryObject(filters).formatString();
+ const alertConfig = {
+ ...ALERTS.duration,
+ query: filterString ?? ALERTS.duration.query,
+ };
+
return (
- <ChartPanel title={getDurationChartTitle('http')}>
+ <ChartPanel title={getDurationChartTitle('http')} alertConfigs={[alertConfig]}>
<Chart
height={CHART_HEIGHT}
grid={{
diff --git a/static/app/views/insights/http/components/charts/responseRateChart.tsx b/static/app/views/insights/http/components/charts/responseRateChart.tsx
index a5fd79dc09d754..e59dd17aa51d39 100644
--- a/static/app/views/insights/http/components/charts/responseRateChart.tsx
+++ b/static/app/views/insights/http/components/charts/responseRateChart.tsx
@@ -1,5 +1,6 @@
import type {Series} from 'sentry/types/echarts';
import {formatPercentage} from 'sentry/utils/number/formatPercentage';
+import {MutableSearch} from 'sentry/utils/tokenizeSearch';
import {
HTTP_RESPONSE_3XX_COLOR,
HTTP_RESPONSE_4XX_COLOR,
@@ -8,17 +9,26 @@ import {
import Chart, {ChartType} from 'sentry/views/insights/common/components/chart';
import ChartPanel from 'sentry/views/insights/common/components/chartPanel';
import {DataTitles} from 'sentry/views/insights/common/views/spans/types';
+import {ALERTS} from 'sentry/views/insights/http/alerts';
import {CHART_HEIGHT} from 'sentry/views/insights/http/settings';
+import type {SpanMetricsQueryFilters} from 'sentry/views/insights/types';
interface Props {
isLoading: boolean;
series: [Series, Series, Series];
error?: Error | null;
+ filters?: SpanMetricsQueryFilters;
}
-export function ResponseRateChart({series, isLoading, error}: Props) {
+export function ResponseRateChart({series, isLoading, error, filters}: Props) {
+ const filterString = filters && MutableSearch.fromQueryObject(filters).formatString();
+ const alertConfig = [
+ {...ALERTS.threeHundreds, query: filterString ?? ALERTS.threeHundreds.query},
+ {...ALERTS.fourHundreds, query: filterString ?? ALERTS.fourHundreds.query},
+ {...ALERTS.fiveHundreds, query: filterString ?? ALERTS.fiveHundreds.query},
+ ];
return (
- <ChartPanel title={DataTitles.unsuccessfulHTTPCodes}>
+ <ChartPanel title={DataTitles.unsuccessfulHTTPCodes} alertConfigs={alertConfig}>
<Chart
showLegend
height={CHART_HEIGHT}
diff --git a/static/app/views/insights/http/components/charts/throughputChart.tsx b/static/app/views/insights/http/components/charts/throughputChart.tsx
index ef223e8c308d1b..d8837350451947 100644
--- a/static/app/views/insights/http/components/charts/throughputChart.tsx
+++ b/static/app/views/insights/http/components/charts/throughputChart.tsx
@@ -1,21 +1,27 @@
import type {Series} from 'sentry/types/echarts';
import {RateUnit} from 'sentry/utils/discover/fields';
import {formatRate} from 'sentry/utils/formatters';
+import {MutableSearch} from 'sentry/utils/tokenizeSearch';
import {THROUGHPUT_COLOR} from 'sentry/views/insights/colors';
import Chart, {ChartType} from 'sentry/views/insights/common/components/chart';
import ChartPanel from 'sentry/views/insights/common/components/chartPanel';
import {getThroughputChartTitle} from 'sentry/views/insights/common/views/spans/types';
+import {ALERTS} from 'sentry/views/insights/http/alerts';
import {CHART_HEIGHT} from 'sentry/views/insights/http/settings';
+import type {SpanMetricsQueryFilters} from 'sentry/views/insights/types';
interface Props {
isLoading: boolean;
series: Series;
error?: Error | null;
+ filters?: SpanMetricsQueryFilters;
}
-export function ThroughputChart({series, isLoading, error}: Props) {
+export function ThroughputChart({series, isLoading, error, filters}: Props) {
+ const filterString = filters && MutableSearch.fromQueryObject(filters).formatString();
+ const alertConfig = {...ALERTS.spm, query: filterString ?? ALERTS.spm.query};
return (
- <ChartPanel title={getThroughputChartTitle('http')}>
+ <ChartPanel title={getThroughputChartTitle('http')} alertConfigs={[alertConfig]}>
<Chart
height={CHART_HEIGHT}
grid={{
diff --git a/static/app/views/insights/http/components/httpSamplesPanel.tsx b/static/app/views/insights/http/components/httpSamplesPanel.tsx
index d2b8f5021b69dd..78610dba87aadf 100644
--- a/static/app/views/insights/http/components/httpSamplesPanel.tsx
+++ b/static/app/views/insights/http/components/httpSamplesPanel.tsx
@@ -462,6 +462,7 @@ export function HTTPSamplesPanel() {
}}
isLoading={isDurationDataFetching}
error={durationError}
+ filters={filters}
/>
</ModuleLayout.Full>
</Fragment>
diff --git a/static/app/views/insights/http/views/httpDomainSummaryPage.tsx b/static/app/views/insights/http/views/httpDomainSummaryPage.tsx
index 1f48d3698d873d..7ce38b192b368b 100644
--- a/static/app/views/insights/http/views/httpDomainSummaryPage.tsx
+++ b/static/app/views/insights/http/views/httpDomainSummaryPage.tsx
@@ -284,6 +284,7 @@ export function HTTPDomainSummaryPage() {
series={throughputData['spm()']}
isLoading={isThroughputDataLoading}
error={throughputError}
+ filters={filters}
/>
</ModuleLayout.Third>
@@ -292,6 +293,7 @@ export function HTTPDomainSummaryPage() {
series={[durationData[`avg(${SpanMetricsField.SPAN_SELF_TIME})`]]}
isLoading={isDurationDataLoading}
error={durationError}
+ filters={filters}
/>
</ModuleLayout.Third>
@@ -313,6 +315,7 @@ export function HTTPDomainSummaryPage() {
]}
isLoading={isResponseCodeDataLoading}
error={responseCodeError}
+ filters={filters}
/>
</ModuleLayout.Third>
diff --git a/static/app/views/insights/http/views/httpLandingPage.tsx b/static/app/views/insights/http/views/httpLandingPage.tsx
index d16f9eabcbc84c..dbd10009f035f3 100644
--- a/static/app/views/insights/http/views/httpLandingPage.tsx
+++ b/static/app/views/insights/http/views/httpLandingPage.tsx
@@ -194,6 +194,7 @@ export function HTTPLandingPage() {
series={throughputData['spm()']}
isLoading={isThroughputDataLoading}
error={throughputError}
+ filters={chartFilters}
/>
</ModuleLayout.Third>
@@ -202,6 +203,7 @@ export function HTTPLandingPage() {
series={[durationData[`avg(span.self_time)`]]}
isLoading={isDurationDataLoading}
error={durationError}
+ filters={chartFilters}
/>
</ModuleLayout.Third>
@@ -223,6 +225,7 @@ export function HTTPLandingPage() {
]}
isLoading={isResponseCodeDataLoading}
error={responseCodeError}
+ filters={chartFilters}
/>
</ModuleLayout.Third>
diff --git a/static/app/views/insights/queues/components/messageSpanSamplesPanel.tsx b/static/app/views/insights/queues/components/messageSpanSamplesPanel.tsx
index 7bdce12c2b11c0..32e06c08c2d1ef 100644
--- a/static/app/views/insights/queues/components/messageSpanSamplesPanel.tsx
+++ b/static/app/views/insights/queues/components/messageSpanSamplesPanel.tsx
@@ -358,6 +358,10 @@ export function MessageSpanSamplesPanel() {
}}
isLoading={isDurationDataFetching}
error={durationError}
+ filters={timeseriesFilters.getFilterKeys().reduce((acc, key) => {
+ acc[key] = timeseriesFilters.getFilterValues(key)[0];
+ return acc;
+ }, {})}
/>
</ModuleLayout.Full>
|
3273e166e81d226bfa2d0a81dc3cb21afea55d0d
|
2024-12-14 05:22:37
|
Josh Callender
|
feat(workflow_engine): Add `process_data_packets` method (#82002)
| false
|
Add `process_data_packets` method (#82002)
|
feat
|
diff --git a/src/sentry/workflow_engine/models/data_source.py b/src/sentry/workflow_engine/models/data_source.py
index ebfbc63fd18f14..320a01709a290e 100644
--- a/src/sentry/workflow_engine/models/data_source.py
+++ b/src/sentry/workflow_engine/models/data_source.py
@@ -29,7 +29,11 @@ class DataSource(DefaultFieldsModel):
__relocation_scope__ = RelocationScope.Organization
organization = FlexibleForeignKey("sentry.Organization")
+
+ # Should this be a string so we can support UUID / ints?
query_id = BoundedBigIntegerField()
+
+ # TODO - Add a type here
type = models.TextField()
detectors = models.ManyToManyField("workflow_engine.Detector", through=DataSourceDetector)
diff --git a/src/sentry/workflow_engine/processors/__init__.py b/src/sentry/workflow_engine/processors/__init__.py
index 700cd48361de44..0dca1394898aeb 100644
--- a/src/sentry/workflow_engine/processors/__init__.py
+++ b/src/sentry/workflow_engine/processors/__init__.py
@@ -1,6 +1,8 @@
__all__ = [
"process_data_sources",
"process_detectors",
+ "process_workflows",
+ "process_data_packet",
]
from .data_source import process_data_sources
diff --git a/src/sentry/workflow_engine/processors/data_packet.py b/src/sentry/workflow_engine/processors/data_packet.py
new file mode 100644
index 00000000000000..35997e02f627e3
--- /dev/null
+++ b/src/sentry/workflow_engine/processors/data_packet.py
@@ -0,0 +1,24 @@
+from sentry.workflow_engine.handlers.detector import DetectorEvaluationResult
+from sentry.workflow_engine.models import DataPacket, Detector
+from sentry.workflow_engine.processors.data_source import process_data_sources
+from sentry.workflow_engine.processors.detector import process_detectors
+from sentry.workflow_engine.types import DetectorGroupKey
+
+
+def process_data_packets(
+ data_packets: list[DataPacket], query_type: str
+) -> list[tuple[Detector, dict[DetectorGroupKey, DetectorEvaluationResult]]]:
+ """
+ This method ties the two main pre-processing methods together to process
+ the incoming data and create issue occurrences.
+ """
+ processed_sources = process_data_sources(data_packets, query_type)
+
+ results: list[tuple[Detector, dict[DetectorGroupKey, DetectorEvaluationResult]]] = []
+ for data_packet, detectors in processed_sources:
+ detector_results = process_detectors(data_packet, detectors)
+
+ for detector, detector_state in detector_results:
+ results.append((detector, detector_state))
+
+ return results
diff --git a/src/sentry/workflow_engine/processors/data_source.py b/src/sentry/workflow_engine/processors/data_source.py
index 52709c302a331f..5df9711f4b7775 100644
--- a/src/sentry/workflow_engine/processors/data_source.py
+++ b/src/sentry/workflow_engine/processors/data_source.py
@@ -14,6 +14,7 @@ def process_data_sources(
) -> list[tuple[DataPacket, list[Detector]]]:
metrics.incr("sentry.workflow_engine.process_data_sources", tags={"query_type": query_type})
+ # TODO - change data_source.query_id to be a string to support UUIDs
data_packet_ids = {int(packet.query_id) for packet in data_packets}
# Fetch all data sources and associated detectors for the given data packets
diff --git a/tests/sentry/workflow_engine/processors/test_data_packet.py b/tests/sentry/workflow_engine/processors/test_data_packet.py
new file mode 100644
index 00000000000000..cf98d7ba533a16
--- /dev/null
+++ b/tests/sentry/workflow_engine/processors/test_data_packet.py
@@ -0,0 +1,21 @@
+from sentry.workflow_engine.processors.data_packet import process_data_packets
+from sentry.workflow_engine.types import DetectorPriorityLevel
+from tests.sentry.workflow_engine.test_base import BaseWorkflowTest
+
+
+class TestProcessDataPacket(BaseWorkflowTest):
+ def setUp(self):
+ self.snuba_query = self.create_snuba_query()
+
+ (self.workflow, self.detector, self.detector_workflow, self.workflow_triggers) = (
+ self.create_detector_and_workflow()
+ )
+
+ self.data_source, self.data_packet = self.create_test_query_data_source(self.detector)
+
+ def test_single_data_packet(self):
+ results = process_data_packets([self.data_packet], "snuba_query_subscription")
+ assert len(results) == 1
+
+ detector, detector_evaluation_result = results[0]
+ assert detector_evaluation_result[None].priority == DetectorPriorityLevel.HIGH
diff --git a/tests/sentry/workflow_engine/processors/test_workflow.py b/tests/sentry/workflow_engine/processors/test_workflow.py
index 0b8c080a683b6c..91ddb9e0636b45 100644
--- a/tests/sentry/workflow_engine/processors/test_workflow.py
+++ b/tests/sentry/workflow_engine/processors/test_workflow.py
@@ -1,6 +1,6 @@
from unittest import mock
-from sentry.incidents.grouptype import MetricAlertFire
+from sentry.issues.grouptype import ErrorGroupType
from sentry.workflow_engine.models import DataConditionGroup
from sentry.workflow_engine.models.data_condition import Condition
from sentry.workflow_engine.processors.workflow import evaluate_workflow_triggers, process_workflows
@@ -14,12 +14,13 @@ def setUp(self):
self.detector,
self.detector_workflow,
self.workflow_triggers,
- ) = self.create_detector_and_workflow(detector_type=MetricAlertFire.slug)
+ ) = self.create_detector_and_workflow()
self.error_workflow, self.error_detector, self.detector_workflow_error, _ = (
self.create_detector_and_workflow(
name_prefix="error",
workflow_triggers=self.create_data_condition_group(),
+ detector_type=ErrorGroupType.slug,
)
)
diff --git a/tests/sentry/workflow_engine/test_base.py b/tests/sentry/workflow_engine/test_base.py
index edf4cb531bc4b2..f91fd33c657ad8 100644
--- a/tests/sentry/workflow_engine/test_base.py
+++ b/tests/sentry/workflow_engine/test_base.py
@@ -1,8 +1,9 @@
-from datetime import datetime
+from datetime import UTC, datetime
from uuid import uuid4
from sentry.eventstore.models import Event, GroupEvent
-from sentry.issues.grouptype import ErrorGroupType
+from sentry.incidents.grouptype import MetricAlertFire
+from sentry.incidents.utils.types import QuerySubscriptionUpdate
from sentry.models.group import Group
from sentry.snuba.models import SnubaQuery
from sentry.testutils.cases import TestCase
@@ -10,11 +11,14 @@
from sentry.workflow_engine.models import (
Action,
DataConditionGroup,
+ DataPacket,
+ DataSource,
Detector,
DetectorWorkflow,
Workflow,
)
from sentry.workflow_engine.models.data_condition import Condition
+from sentry.workflow_engine.types import DetectorPriorityLevel
from tests.sentry.issues.test_utils import OccurrenceTestMixin
@@ -66,9 +70,13 @@ def create_detector_and_workflow(
self,
name_prefix="test",
workflow_triggers: DataConditionGroup | None = None,
- detector_type: str = ErrorGroupType.slug,
+ detector_type: str = MetricAlertFire.slug,
**kwargs,
) -> tuple[Workflow, Detector, DetectorWorkflow, DataConditionGroup]:
+ """
+ Create a Worfkllow, Detector, DetectorWorkflow, and DataConditionGroup for testing.
+ These models are configured to work together to test the workflow engine.
+ """
workflow_triggers = workflow_triggers or self.create_data_condition_group()
if not workflow_triggers.conditions.exists():
@@ -100,6 +108,46 @@ def create_detector_and_workflow(
return workflow, detector, detector_workflow, workflow_triggers
+ def create_test_query_data_source(self, detector) -> tuple[DataSource, DataPacket]:
+ """
+ Create a DataSource and DataPacket for testing; this will create a fake QuerySubscriptionUpdate and link it to a data_source.
+
+ A detector is required to create this test data, so we can ensure that the detector
+ has a condition to evaluate for the data_packet that evalutes to true.
+ """
+ subscription_update: QuerySubscriptionUpdate = {
+ "subscription_id": "123",
+ "values": {"foo": 1},
+ "timestamp": datetime.now(UTC),
+ "entity": "test-entity",
+ }
+
+ data_source = self.create_data_source(
+ query_id=subscription_update["subscription_id"],
+ organization=self.organization,
+ )
+
+ data_source.detectors.add(detector)
+
+ if detector.workflow_condition_group is None:
+ detector.workflow_condition_group = self.create_data_condition_group(logic_type="any")
+ detector.save()
+
+ self.create_data_condition(
+ condition_group=detector.workflow_condition_group,
+ type=Condition.EQUAL,
+ condition_result=DetectorPriorityLevel.HIGH,
+ comparison=1,
+ )
+
+ # Create a data_packet from the update for testing
+ data_packet = DataPacket[QuerySubscriptionUpdate](
+ query_id=subscription_update["subscription_id"],
+ packet=subscription_update,
+ )
+
+ return data_source, data_packet
+
def create_workflow_action(
self,
workflow: Workflow,
diff --git a/tests/sentry/workflow_engine/test_integration.py b/tests/sentry/workflow_engine/test_integration.py
index 5bad851069246a..314bbfcce9fdcf 100644
--- a/tests/sentry/workflow_engine/test_integration.py
+++ b/tests/sentry/workflow_engine/test_integration.py
@@ -3,13 +3,11 @@
from sentry.eventstream.types import EventStreamEventType
from sentry.incidents.grouptype import MetricAlertFire
-from sentry.incidents.utils.types import QuerySubscriptionUpdate
from sentry.issues.grouptype import ErrorGroupType
from sentry.issues.ingest import save_issue_occurrence
from sentry.models.group import Group
from sentry.tasks.post_process import post_process_group
from sentry.testutils.helpers.features import with_feature
-from sentry.workflow_engine.models import DataPacket, DataSource
from sentry.workflow_engine.models.data_condition import Condition
from sentry.workflow_engine.processors import process_data_sources, process_detectors
from sentry.workflow_engine.types import DetectorPriorityLevel
@@ -77,28 +75,6 @@ def call_post_process_group(
return cache_key
- def create_test_data_source(self) -> DataSource:
- self.subscription_update: QuerySubscriptionUpdate = {
- "subscription_id": "123",
- "values": {"foo": 1},
- "timestamp": datetime.utcnow(),
- "entity": "test-entity",
- }
-
- self.data_source = self.create_data_source(
- query_id=self.subscription_update["subscription_id"],
- organization=self.organization,
- )
- self.data_source.detectors.add(self.detector)
-
- # Create a data_packet from the update for testing
- self.data_packet = DataPacket[QuerySubscriptionUpdate](
- query_id=self.subscription_update["subscription_id"],
- packet=self.subscription_update,
- )
-
- return self.data_source
-
class TestWorkflowEngineIntegrationToIssuePlatform(BaseWorkflowIntegrationTest):
@with_feature("organizations:workflow-engine-metric-alert-processing")
@@ -106,7 +82,7 @@ def test_workflow_engine__data_source__to_metric_issue_workflow(self):
"""
This test ensures that a data_source can create the correct event in Issue Platform
"""
- self.create_test_data_source()
+ self.data_source, self.data_packet = self.create_test_query_data_source(self.detector)
with mock.patch(
"sentry.workflow_engine.processors.detector.produce_occurrence_to_kafka"
@@ -121,7 +97,7 @@ def test_workflow_engine__data_source__to_metric_issue_workflow(self):
@with_feature("organizations:workflow-engine-metric-alert-processing")
def test_workflow_engine__data_source__different_type(self):
- self.create_test_data_source()
+ self.data_source, self.data_packet = self.create_test_query_data_source(self.detector)
with mock.patch(
"sentry.workflow_engine.processors.detector.produce_occurrence_to_kafka"
@@ -134,7 +110,7 @@ def test_workflow_engine__data_source__different_type(self):
@with_feature("organizations:workflow-engine-metric-alert-processing")
def test_workflow_engine__data_source__no_detectors(self):
- self.create_test_data_source()
+ self.data_source, self.data_packet = self.create_test_query_data_source(self.detector)
self.detector.delete()
with mock.patch(
|
e2f3a9bd172f2c54d38c5d017bda4ab139defe66
|
2023-06-02 03:52:37
|
Richard Roggenkemper
|
fix(source-maps): Fix partial match bug and update test (#50199)
| false
|
Fix partial match bug and update test (#50199)
|
fix
|
diff --git a/src/sentry/api/endpoints/source_map_debug.py b/src/sentry/api/endpoints/source_map_debug.py
index 7232e08b4e60ed..0d86234c5676af 100644
--- a/src/sentry/api/endpoints/source_map_debug.py
+++ b/src/sentry/api/endpoints/source_map_debug.py
@@ -197,12 +197,14 @@ def _find_matches(self, release_artifacts, abs_path, unified_path, filename, rel
def _find_partial_matches(self, unified_path, artifacts):
filename = unified_path.split("/")[-1]
- filename_matches = [artifact for artifact in artifacts if artifact.name.endswith(filename)]
+ filename_matches = [
+ artifact for artifact in artifacts if artifact.name.split("/")[-1] == filename
+ ]
artifact_names = [artifact.name.split("/") for artifact in filename_matches]
while any(artifact_names):
for i in range(len(artifact_names)):
if unified_path.endswith("/".join(artifact_names[i])):
- return [artifacts[i]]
+ return [filename_matches[i]]
artifact_names[i] = artifact_names[i][1:]
return []
diff --git a/tests/sentry/api/endpoints/test_source_map_debug.py b/tests/sentry/api/endpoints/test_source_map_debug.py
index c3db99729f2a8f..86af3c1e35ba62 100644
--- a/tests/sentry/api/endpoints/test_source_map_debug.py
+++ b/tests/sentry/api/endpoints/test_source_map_debug.py
@@ -361,6 +361,13 @@ def test_partial_url_match(self):
release = Release.objects.get(organization=self.organization, version=event.release)
release.update(user_agent="test_user_agent")
+ ReleaseFile.objects.create(
+ organization_id=self.project.organization_id,
+ release_id=release.id,
+ file=File.objects.create(name="incorrect_application.js", type="release.file"),
+ name="~/dist/static/js/incorrect_application.js",
+ )
+
ReleaseFile.objects.create(
organization_id=self.project.organization_id,
release_id=release.id,
@@ -368,6 +375,13 @@ def test_partial_url_match(self):
name="~/dist/static/js/application.js",
)
+ ReleaseFile.objects.create(
+ organization_id=self.project.organization_id,
+ release_id=release.id,
+ file=File.objects.create(name="also_incorrect_application.js", type="release.file"),
+ name="~/dist/static/js/also_incorrect_application.js",
+ )
+
resp = self.get_success_response(
self.organization.slug,
self.project.slug,
@@ -385,7 +399,11 @@ def test_partial_url_match(self):
"filename": "/static/js/application.js",
"unifiedPath": "~/static/js/application.js",
"urlPrefix": "~/dist",
- "artifactNames": ["~/dist/static/js/application.js"],
+ "artifactNames": [
+ "~/dist/static/js/also_incorrect_application.js",
+ "~/dist/static/js/application.js",
+ "~/dist/static/js/incorrect_application.js",
+ ],
}
def test_no_url_match(self):
|
108f209f66c22f518057f0b7ef41312a6fcff847
|
2017-10-25 02:03:19
|
Jess MacQueen
|
test(repos): Add tests for repo deletion fail emails
| false
|
Add tests for repo deletion fail emails
|
test
|
diff --git a/src/sentry/templates/sentry/debug/mail/preview.html b/src/sentry/templates/sentry/debug/mail/preview.html
index 76ede7de13c31c..99599024837a16 100644
--- a/src/sentry/templates/sentry/debug/mail/preview.html
+++ b/src/sentry/templates/sentry/debug/mail/preview.html
@@ -38,6 +38,9 @@
<optgroup label="Reports">
<option value="mail/report/">Weekly Report</option>
</optgroup>
+ <optgroup label="Repository">
+ <option value="mail/unable-to-delete-repo/">Unable to Delete Repo</option>
+ </optgroup>
<optgroup label="Security">
<option value="mail/mfa-added/">MFA Added</option>
<option value="mail/mfa-removed/">MFA Removed</option>
diff --git a/src/sentry/web/debug_urls.py b/src/sentry/web/debug_urls.py
index 50e9cc1cfa4cb0..e868695fc654f4 100644
--- a/src/sentry/web/debug_urls.py
+++ b/src/sentry/web/debug_urls.py
@@ -23,6 +23,7 @@
from sentry.web.frontend.debug.debug_resolved_in_release_email import (
DebugResolvedInReleaseEmailView, DebugResolvedInReleaseUpcomingEmailView
)
+from sentry.web.frontend.debug.debug_unable_to_delete_repository import DebugUnableToDeleteRepository
from sentry.web.frontend.debug.debug_unable_to_fetch_commits_email import DebugUnableToFetchCommitsEmailView
from sentry.web.frontend.debug.debug_unassigned_email import (DebugUnassignedEmailView)
from sentry.web.frontend.debug.debug_new_processing_issues_email import (
@@ -58,6 +59,7 @@
url(r'^debug/mail/invalid-identity/$', DebugInvalidIdentityEmailView.as_view()),
url(r'^debug/mail/confirm-email/$', sentry.web.frontend.debug.mail.confirm_email),
url(r'^debug/mail/recover-account/$', sentry.web.frontend.debug.mail.recover_account),
+ url(r'^debug/mail/unable-to-delete-repo/$', DebugUnableToDeleteRepository.as_view()),
url(r'^debug/mail/unable-to-fetch-commits/$', DebugUnableToFetchCommitsEmailView.as_view()),
url(r'^debug/mail/unassigned/$', DebugUnassignedEmailView.as_view()),
url(r'^debug/mail/org-delete-confirm/$', sentry.web.frontend.debug.mail.org_delete_confirm),
diff --git a/src/sentry/web/frontend/debug/debug_unable_to_delete_repository.py b/src/sentry/web/frontend/debug/debug_unable_to_delete_repository.py
new file mode 100644
index 00000000000000..370e566c5a3dec
--- /dev/null
+++ b/src/sentry/web/frontend/debug/debug_unable_to_delete_repository.py
@@ -0,0 +1,21 @@
+from __future__ import absolute_import, print_function
+
+from django.views.generic import View
+
+from sentry.models import Repository
+
+from .mail import MailPreview
+
+
+class DebugUnableToDeleteRepository(View):
+ def get(self, request):
+ repo = Repository(name='getsentry/sentry')
+
+ email = repo.generate_delete_fail_email(
+ 'An internal server error occurred'
+ )
+ return MailPreview(
+ html_template=email.html_template,
+ text_template=email.template,
+ context=email.context,
+ ).render(request)
diff --git a/tests/acceptance/test_emails.py b/tests/acceptance/test_emails.py
index e5c44d64697b32..da3830b1b623e2 100644
--- a/tests/acceptance/test_emails.py
+++ b/tests/acceptance/test_emails.py
@@ -16,6 +16,7 @@
('/debug/mail/resolved-in-release/upcoming/', 'resolved in release upcoming'),
('/debug/mail/unassigned/', 'unassigned'),
('/debug/mail/unable-to-fetch-commits/', 'unable to fetch commits'),
+ ('/debug/mail/unable-to-delete-repo/', 'unable to delete repo'),
('/debug/mail/alert/', 'alert'),
('/debug/mail/digest/', 'digest'),
('/debug/mail/invalid-identity/', 'invalid identity'),
diff --git a/tests/sentry/deletions/test_repository.py b/tests/sentry/deletions/test_repository.py
index 251389cdfe3587..0b89b132f6fc88 100644
--- a/tests/sentry/deletions/test_repository.py
+++ b/tests/sentry/deletions/test_repository.py
@@ -1,5 +1,10 @@
from __future__ import absolute_import
+from mock import patch
+
+from django.core import mail
+
+from sentry.exceptions import PluginError
from sentry.models import Commit, Repository, ScheduledDeletion
from sentry.tasks.deletion import run_deletion
from sentry.testutils import TestCase
@@ -38,3 +43,49 @@ def test_simple(self):
assert not Repository.objects.filter(id=repo.id).exists()
assert not Commit.objects.filter(id=commit.id).exists()
assert Commit.objects.filter(id=commit2.id).exists()
+
+ @patch('sentry.plugins.providers.dummy.repository.DummyRepositoryProvider.delete_repository')
+ def test_delete_fail_email(self, mock_delete_repo):
+ mock_delete_repo.side_effect = PluginError('foo')
+
+ org = self.create_organization()
+ repo = Repository.objects.create(
+ organization_id=org.id,
+ provider='dummy',
+ name='example/example',
+ )
+
+ deletion = ScheduledDeletion.schedule(repo, actor=self.user, days=0)
+ deletion.update(in_progress=True)
+
+ with self.assertRaises(PluginError):
+ with self.tasks():
+ run_deletion(deletion.id)
+
+ msg = mail.outbox[-1]
+ assert msg.subject == 'Unable to Delete Repository'
+ assert msg.to == [self.user.email]
+ assert 'foo' in msg.body
+
+ @patch('sentry.plugins.providers.dummy.repository.DummyRepositoryProvider.delete_repository')
+ def test_delete_fail_email_random(self, mock_delete_repo):
+ mock_delete_repo.side_effect = Exception('secrets')
+
+ org = self.create_organization()
+ repo = Repository.objects.create(
+ organization_id=org.id,
+ provider='dummy',
+ name='example/example',
+ )
+
+ deletion = ScheduledDeletion.schedule(repo, actor=self.user, days=0)
+ deletion.update(in_progress=True)
+
+ with self.assertRaises(Exception):
+ with self.tasks():
+ run_deletion(deletion.id)
+
+ msg = mail.outbox[-1]
+ assert msg.subject == 'Unable to Delete Repository'
+ assert msg.to == [self.user.email]
+ assert 'secrets' not in msg.body
|
c801aebe1b14ca3a1a44227ce899b25fa76d5954
|
2024-05-14 18:23:07
|
anthony sottile
|
ref: remove analytics event for stacktrace_link (#70840)
| false
|
remove analytics event for stacktrace_link (#70840)
|
ref
|
diff --git a/src/sentry/api/analytics.py b/src/sentry/api/analytics.py
index e515a7a4b6abe8..339c9bc5bc458d 100644
--- a/src/sentry/api/analytics.py
+++ b/src/sentry/api/analytics.py
@@ -21,19 +21,6 @@ class OrganizationSavedSearchDeletedEvent(analytics.Event):
)
-class FunctionTimerEvent(analytics.Event):
- type = "function_timer.timed"
-
- attributes = (
- analytics.Attribute("function_name"),
- analytics.Attribute("duration"),
- analytics.Attribute("organization_id"),
- analytics.Attribute("project_id"),
- analytics.Attribute("group_id", required=False),
- analytics.Attribute("frame_abs_path", required=False),
- )
-
-
class GroupSimilarIssuesEmbeddingsCountEvent(analytics.Event):
type = "group_similar_issues_embeddings.count"
@@ -48,5 +35,4 @@ class GroupSimilarIssuesEmbeddingsCountEvent(analytics.Event):
analytics.register(OrganizationSavedSearchCreatedEvent)
analytics.register(OrganizationSavedSearchDeletedEvent)
-analytics.register(FunctionTimerEvent)
analytics.register(GroupSimilarIssuesEmbeddingsCountEvent)
diff --git a/src/sentry/api/utils.py b/src/sentry/api/utils.py
index 6516be6e4e3bfd..74ad2340a1d8d0 100644
--- a/src/sentry/api/utils.py
+++ b/src/sentry/api/utils.py
@@ -4,7 +4,6 @@
import logging
import re
import sys
-import time
import traceback
from collections.abc import Generator, Mapping, MutableMapping
from contextlib import contextmanager
@@ -467,25 +466,6 @@ def handle_query_errors() -> Generator[None, None, None]:
raise APIException(detail=message)
-class Timer:
- def __enter__(self):
- self._start = time.time()
- self._duration = None
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- self._end = time.time()
- self._duration = self._end - self._start
-
- @property
- def duration(self):
- # If _duration is set, return it; otherwise, calculate ongoing duration
- if self._duration is not None:
- return self._duration
- else:
- return time.time() - self._start
-
-
def id_or_slug_path_params_enabled(
convert_args_class: str | None = None, organization_id_or_slug: str | None = None
) -> bool:
diff --git a/src/sentry/integrations/utils/stacktrace_link.py b/src/sentry/integrations/utils/stacktrace_link.py
index 6ceab0c06f43b0..3f4bceea30c484 100644
--- a/src/sentry/integrations/utils/stacktrace_link.py
+++ b/src/sentry/integrations/utils/stacktrace_link.py
@@ -3,8 +3,6 @@
import logging
from typing import TYPE_CHECKING, NotRequired, TypedDict
-from sentry import analytics
-from sentry.api.utils import Timer
from sentry.integrations.mixins import RepositoryMixin
from sentry.integrations.utils.code_mapping import convert_stacktrace_frame_path_to_source_path
from sentry.models.integrations.repository_project_path_config import RepositoryProjectPathConfig
@@ -27,11 +25,7 @@ class RepositoryLinkOutcome(TypedDict):
def get_link(
- config: RepositoryProjectPathConfig,
- src_path: str,
- version: str | None = None,
- group_id: str | None = None,
- frame_abs_path: str | None = None,
+ config: RepositoryProjectPathConfig, src_path: str, version: str | None = None
) -> RepositoryLinkOutcome:
result: RepositoryLinkOutcome = {}
@@ -47,20 +41,9 @@ def get_link(
link = None
try:
if isinstance(install, RepositoryMixin):
- with Timer() as t:
- link = install.get_stacktrace_link(
- config.repository, src_path, str(config.default_branch or ""), version
- )
- analytics.record(
- "function_timer.timed",
- function_name="get_stacktrace_link",
- duration=t.duration,
- organization_id=config.project.organization_id,
- project_id=config.project_id,
- group_id=group_id,
- frame_abs_path=frame_abs_path,
- )
-
+ link = install.get_stacktrace_link(
+ config.repository, src_path, str(config.default_branch or ""), version
+ )
except ApiError as e:
if e.code != 403:
raise
@@ -115,7 +98,7 @@ def get_stacktrace_config(
result["error"] = "stack_root_mismatch"
continue
- outcome = get_link(config, src_path, ctx["commit_id"], ctx["group_id"], ctx["abs_path"])
+ outcome = get_link(config, src_path, ctx["commit_id"])
result["iteration_count"] += 1
result["current_config"] = {
diff --git a/tests/sentry/issues/endpoints/test_project_stacktrace_link.py b/tests/sentry/issues/endpoints/test_project_stacktrace_link.py
index 8e3a6e8e077d10..b5bb13ae1cd0b8 100644
--- a/tests/sentry/issues/endpoints/test_project_stacktrace_link.py
+++ b/tests/sentry/issues/endpoints/test_project_stacktrace_link.py
@@ -1,6 +1,6 @@
from collections.abc import Mapping
from typing import Any
-from unittest.mock import MagicMock, PropertyMock, patch
+from unittest.mock import MagicMock, patch
from sentry.integrations.example.integration import ExampleIntegration
from sentry.models.integrations.integration import Integration
@@ -212,48 +212,6 @@ def test_file_no_stack_root_match(self, mock_integration: MagicMock) -> None:
assert response.data["error"] == "stack_root_mismatch"
assert response.data["integrations"] == [serialized_integration(self.integration)]
- @patch("sentry.analytics.record")
- @patch("sentry.integrations.utils.stacktrace_link.Timer")
- @patch.object(ExampleIntegration, "get_stacktrace_link")
- def test_timer_duration_for_analytics(
- self, mock_integration: MagicMock, mock_timer: MagicMock, mock_record: MagicMock
- ) -> None:
- mock_integration.return_value = "https://github.com/"
- mock_duration = PropertyMock(return_value=5)
- type(mock_timer.return_value.__enter__.return_value).duration = mock_duration
-
- self.get_success_response(
- self.organization.slug,
- self.project.slug,
- qs_params={
- "file": self.filepath,
- "groupId": 1,
- "absPath": self.filepath,
- "platform": "python",
- },
- )
-
- mock_record.assert_any_call(
- "function_timer.timed",
- function_name="get_stacktrace_link",
- duration=5,
- organization_id=self.organization.id,
- project_id=self.project.id,
- group_id="1",
- frame_abs_path=self.filepath,
- )
- mock_record.assert_any_call(
- "integration.stacktrace.linked",
- provider="example",
- config_id=str(self.code_mapping1.id),
- project_id=self.project.id,
- organization_id=self.organization.id,
- filepath=self.filepath,
- status="success",
- link_fetch_iterations=1,
- platform="python",
- )
-
class ProjectStacktraceLinkTestMobile(BaseProjectStacktraceLink):
def setUp(self) -> None:
|
c423363165f537e0b1f9290b89589ed2e387fa9e
|
2022-07-27 22:44:25
|
Scott Cooper
|
test(ui): Convert a few project detail tests to RTL (#37123)
| false
|
Convert a few project detail tests to RTL (#37123)
|
test
|
diff --git a/tests/js/spec/views/organizationIntegrations/pluginDetailedView.spec.js b/tests/js/spec/views/organizationIntegrations/pluginDetailedView.spec.jsx
similarity index 100%
rename from tests/js/spec/views/organizationIntegrations/pluginDetailedView.spec.js
rename to tests/js/spec/views/organizationIntegrations/pluginDetailedView.spec.jsx
diff --git a/tests/js/spec/views/projectDetail/projectApdex.spec.jsx b/tests/js/spec/views/projectDetail/projectApdex.spec.jsx
index 9a8fa7430d9811..d3a2e460e631a0 100644
--- a/tests/js/spec/views/projectDetail/projectApdex.spec.jsx
+++ b/tests/js/spec/views/projectDetail/projectApdex.spec.jsx
@@ -1,15 +1,10 @@
-import {mountWithTheme} from 'sentry-test/enzyme';
-import {initializeOrg} from 'sentry-test/initializeOrg';
+import {render} from 'sentry-test/reactTestingLibrary';
import ProjectApdexScoreCard from 'sentry/views/projectDetail/projectScoreCards/projectApdexScoreCard';
describe('ProjectDetail > ProjectApdex', function () {
let endpointMock;
- const {organization} = initializeOrg({
- organization: {
- apdexThreshold: 500,
- },
- });
+ const organization = TestStubs.Organization({apdexThreshold: 500});
const selection = {
projects: [1],
@@ -34,10 +29,9 @@ describe('ProjectDetail > ProjectApdex', function () {
});
it('calls api with apdex', function () {
- organization.features = ['discover-basic', 'performance-view'];
- mountWithTheme(
+ render(
<ProjectApdexScoreCard
- organization={organization}
+ organization={{...organization, features: ['discover-basic', 'performance-view']}}
selection={selection}
isProjectStabilized
hasTransactions
diff --git a/tests/js/spec/views/projectDetail/projectIssues.spec.jsx b/tests/js/spec/views/projectDetail/projectIssues.spec.jsx
index 5579e3915717fb..5729f3004e7b8e 100644
--- a/tests/js/spec/views/projectDetail/projectIssues.spec.jsx
+++ b/tests/js/spec/views/projectDetail/projectIssues.spec.jsx
@@ -1,10 +1,10 @@
-import {mountWithTheme} from 'sentry-test/enzyme';
import {initializeOrg} from 'sentry-test/initializeOrg';
+import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary';
import ProjectIssues from 'sentry/views/projectDetail/projectIssues';
describe('ProjectDetail > ProjectIssues', function () {
- let endpointMock, filteredEndpointMock, wrapper;
+ let endpointMock, filteredEndpointMock;
const {organization, router, routerContext} = initializeOrg({
organization: {
features: ['discover-basic'],
@@ -30,7 +30,7 @@ describe('ProjectDetail > ProjectIssues', function () {
afterEach(function () {
MockApiClient.clearMockResponses();
- wrapper.unmount();
+ jest.clearAllMocks();
});
it('renders a list', async function () {
@@ -38,27 +38,24 @@ describe('ProjectDetail > ProjectIssues', function () {
url: `/organizations/org-slug/issues/?limit=5&query=error.unhandled%3Atrue%20is%3Aunresolved&sort=freq&statsPeriod=14d`,
body: [TestStubs.Group(), TestStubs.Group({id: '2'})],
});
- wrapper = mountWithTheme(
- <ProjectIssues organization={organization} location={router.location} />,
- routerContext
- );
-
- await tick();
- wrapper.update();
+ render(<ProjectIssues organization={organization} location={router.location} />, {
+ context: routerContext,
+ });
- expect(wrapper.find('StreamGroup').length).toBe(2);
+ expect(await screen.findAllByTestId('group')).toHaveLength(2);
});
it('renders a link to Issues', function () {
- wrapper = mountWithTheme(
- <ProjectIssues organization={organization} location={router.location} />,
- routerContext
- );
+ render(<ProjectIssues organization={organization} location={router.location} />, {
+ context: routerContext,
+ });
- expect(
- wrapper.find('ControlsWrapper Link[aria-label="Open in Issues"]').at(0).prop('to')
- ).toEqual({
- pathname: `/organizations/${organization.slug}/issues/`,
+ const link = screen.getByLabelText('Open in Issues');
+ expect(link).toBeInTheDocument();
+ userEvent.click(link);
+
+ expect(router.push).toHaveBeenCalledWith({
+ pathname: '/organizations/org-slug/issues/',
query: {
limit: 5,
query: 'error.unhandled:true is:unresolved',
@@ -69,14 +66,15 @@ describe('ProjectDetail > ProjectIssues', function () {
});
it('renders a link to Discover', function () {
- wrapper = mountWithTheme(
- <ProjectIssues organization={organization} location={router.location} />,
- routerContext
- );
+ render(<ProjectIssues organization={organization} location={router.location} />, {
+ context: routerContext,
+ });
- expect(
- wrapper.find('ControlsWrapper Link[aria-label="Open in Discover"]').at(0).prop('to')
- ).toEqual({
+ const link = screen.getByLabelText('Open in Discover');
+ expect(link).toBeInTheDocument();
+ userEvent.click(link);
+
+ expect(router.push).toHaveBeenCalledWith({
pathname: `/organizations/${organization.slug}/discover/results/`,
query: {
display: 'top5',
@@ -90,22 +88,24 @@ describe('ProjectDetail > ProjectIssues', function () {
});
it('changes according to global header', function () {
- wrapper = mountWithTheme(
+ render(
<ProjectIssues
organization={organization}
location={{
query: {statsPeriod: '7d', environment: 'staging', somethingBad: 'nope'},
}}
/>,
- routerContext
+ {context: routerContext}
);
expect(endpointMock).toHaveBeenCalledTimes(0);
expect(filteredEndpointMock).toHaveBeenCalledTimes(1);
- expect(
- wrapper.find('ControlsWrapper Link[aria-label="Open in Issues"]').at(0).prop('to')
- ).toEqual({
+ const link = screen.getByLabelText('Open in Issues');
+ expect(link).toBeInTheDocument();
+ userEvent.click(link);
+
+ expect(router.push).toHaveBeenCalledWith({
pathname: `/organizations/${organization.slug}/issues/`,
query: {
limit: 5,
diff --git a/tests/js/spec/views/projectDetail/projectQuickLinks.spec.jsx b/tests/js/spec/views/projectDetail/projectQuickLinks.spec.jsx
index a27417de1bfb86..3c65e9a41d85be 100644
--- a/tests/js/spec/views/projectDetail/projectQuickLinks.spec.jsx
+++ b/tests/js/spec/views/projectDetail/projectQuickLinks.spec.jsx
@@ -1,43 +1,50 @@
-import {mountWithTheme} from 'sentry-test/enzyme';
import {initializeOrg} from 'sentry-test/initializeOrg';
+import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary';
import ProjectQuickLinks from 'sentry/views/projectDetail/projectQuickLinks';
describe('ProjectDetail > ProjectQuickLinks', function () {
- const {organization, router} = initializeOrg({
+ const {organization, router, routerContext} = initializeOrg({
organization: {features: ['performance-view']},
});
+ afterEach(() => {
+ jest.clearAllMocks();
+ });
+
it('renders a list', function () {
- const wrapper = mountWithTheme(
+ render(
<ProjectQuickLinks
organization={organization}
location={router.location}
project={TestStubs.Project()}
- />
+ />,
+ {context: routerContext}
);
- expect(wrapper.find('SectionHeading').text()).toBe('Quick Links');
- expect(wrapper.find('QuickLink a').length).toBe(3);
+ expect(screen.getByRole('heading', {name: 'Quick Links'})).toBeInTheDocument();
+ expect(screen.getAllByRole('link')).toHaveLength(3);
- const userFeedback = wrapper.find('QuickLink').at(0);
- const keyTransactions = wrapper.find('QuickLink').at(1);
- const mostChangedTransactions = wrapper.find('QuickLink').at(2);
+ const userFeedback = screen.getByRole('link', {name: 'User Feedback'});
+ const keyTransactions = screen.getByRole('link', {name: 'View Transactions'});
+ const mostChangedTransactions = screen.getByRole('link', {
+ name: 'Most Improved/Regressed Transactions',
+ });
- expect(userFeedback.text()).toBe('User Feedback');
- expect(userFeedback.prop('to')).toEqual({
+ userEvent.click(userFeedback);
+ expect(router.push).toHaveBeenCalledWith({
pathname: '/organizations/org-slug/user-feedback/',
query: {project: '2'},
});
- expect(keyTransactions.text()).toBe('View Transactions');
- expect(keyTransactions.prop('to')).toEqual({
+ userEvent.click(keyTransactions);
+ expect(router.push).toHaveBeenCalledWith({
pathname: '/organizations/org-slug/performance/',
query: {project: '2'},
});
- expect(mostChangedTransactions.text()).toBe('Most Improved/Regressed Transactions');
- expect(mostChangedTransactions.prop('to')).toEqual({
+ userEvent.click(mostChangedTransactions);
+ expect(router.push).toHaveBeenCalledWith({
pathname: '/organizations/org-slug/performance/trends/',
query: {
cursor: undefined,
@@ -47,21 +54,24 @@ describe('ProjectDetail > ProjectQuickLinks', function () {
});
});
- it('disables link if feature is missing', function () {
- const wrapper = mountWithTheme(
+ it('disables link if feature is missing', async function () {
+ render(
<ProjectQuickLinks
organization={{...organization, features: []}}
location={router.location}
project={TestStubs.Project()}
- />
+ />,
+ {context: routerContext}
);
- const keyTransactions = wrapper.find('QuickLink').at(1);
- const tooltip = wrapper.find('Tooltip').at(1);
+ const keyTransactions = screen.getByText('View Transactions');
+
+ userEvent.click(keyTransactions);
+ expect(router.push).toHaveBeenCalledTimes(0);
- expect(keyTransactions.prop('disabled')).toBeTruthy();
- expect(keyTransactions.find('a').exists()).toBeFalsy();
- expect(tooltip.prop('title')).toBe("You don't have access to this feature");
- expect(tooltip.prop('disabled')).toBeFalsy();
+ userEvent.hover(keyTransactions);
+ expect(
+ await screen.findByText("You don't have access to this feature")
+ ).toBeInTheDocument();
});
});
|
f869cb3a4419f2df2ada7aa7eefd85f8e8a7412c
|
2018-04-05 06:06:45
|
Eric Feng
|
feat(assistant): Members Guide (#7842)
| false
|
Members Guide (#7842)
|
feat
|
diff --git a/src/sentry/assistant/guides.py b/src/sentry/assistant/guides.py
index ee3cea5dfd888a..02451625360b88 100644
--- a/src/sentry/assistant/guides.py
+++ b/src/sentry/assistant/guides.py
@@ -123,4 +123,32 @@
},
]
},
+ 'members': {
+ 'id': 4,
+ 'cue': _('Tips for inviting your team'),
+ 'required_targets': ['member_add'],
+ 'steps': [
+ {
+ 'title': _('Fix issues faster, together'),
+ 'message': _('Sentry isn\'t logs. It\'s about shipping faster by immediately '
+ 'alerting, triaging, and assigning issues to the right engineer.'),
+ 'target': 'member_add',
+ },
+ {
+ 'title': _('What is status?'),
+ 'message': _('You can enforce <a href="/settings/${orgSlug}/#require2FA">2-factor auth</a> or '
+ '<a href="/settings/${orgSlug}/auth/">SSO</a> across your organization. Status lets you see '
+ 'which members haven\'t configured them yet.'),
+ 'target': 'member_status',
+ },
+ {
+ 'title': _('A tip for roles'),
+ 'message': _('Consider having two owners, in case one person\'s out, and you '
+ 'need to adjust billing or a new hire.<br><br>'
+ 'Add finance as a billing member. They\'ll get access to '
+ 'invoices, so they won\'t email you for receipts.'),
+ 'target': 'member_role',
+ },
+ ]
+ }
}
diff --git a/src/sentry/static/sentry/app/components/assistant/guideDrawer.jsx b/src/sentry/static/sentry/app/components/assistant/guideDrawer.jsx
index bd6e7d0291310a..842c1024405ccf 100644
--- a/src/sentry/static/sentry/app/components/assistant/guideDrawer.jsx
+++ b/src/sentry/static/sentry/app/components/assistant/guideDrawer.jsx
@@ -15,6 +15,7 @@ export default class GuideDrawer extends React.Component {
step: PropTypes.number.isRequired,
onFinish: PropTypes.func.isRequired,
onDismiss: PropTypes.func.isRequired,
+ orgSlug: PropTypes.string,
};
handleFinish = useful => {
@@ -22,7 +23,18 @@ export default class GuideDrawer extends React.Component {
this.props.onFinish();
};
+ interpolate(template, variables) {
+ let regex = /\${([^{]+)}/g;
+ return template.replace(regex, (match, g1) => {
+ return variables[g1.trim()];
+ });
+ }
+
render() {
+ let messageVariables = {
+ orgSlug: this.props.orgSlug,
+ };
+
return (
<StyledAssistantContainer>
<StyledAssistantInputRow>
@@ -39,7 +51,10 @@ export default class GuideDrawer extends React.Component {
<StyledContent>
<div
dangerouslySetInnerHTML={{
- __html: this.props.guide.steps[this.props.step - 1].message,
+ __html: this.interpolate(
+ this.props.guide.steps[this.props.step - 1].message,
+ messageVariables
+ ),
}}
/>
<div style={{marginTop: '1em'}}>
diff --git a/src/sentry/static/sentry/app/components/assistant/helper.jsx b/src/sentry/static/sentry/app/components/assistant/helper.jsx
index c962126198fdbc..608c346c31d356 100644
--- a/src/sentry/static/sentry/app/components/assistant/helper.jsx
+++ b/src/sentry/static/sentry/app/components/assistant/helper.jsx
@@ -30,6 +30,7 @@ const AssistantHelper = createReactClass({
// is null, if currentStep is 0 the Need-Help button is cued, and if it's > 0
// the support widget is open.
currentStep: 0,
+ currentOrg: null,
};
},
@@ -78,6 +79,9 @@ const AssistantHelper = createReactClass({
step={currentStep}
onFinish={closeGuideOrSupport}
onDismiss={this.handleGuideDismiss}
+ orgSlug={
+ GuideStore.state.currentOrg ? GuideStore.state.currentOrg.slug : null
+ }
/>
)}
diff --git a/src/sentry/static/sentry/app/stores/guideStore.jsx b/src/sentry/static/sentry/app/stores/guideStore.jsx
index 99ede9a6f75d47..230a40b75b072d 100644
--- a/src/sentry/static/sentry/app/stores/guideStore.jsx
+++ b/src/sentry/static/sentry/app/stores/guideStore.jsx
@@ -2,6 +2,7 @@ import Reflux from 'reflux';
import $ from 'jquery';
import GuideActions from '../actions/guideActions';
import HookStore from './hookStore';
+import OrganizationsActions from '../actions/organizationsActions';
const GuideStore = Reflux.createStore({
init() {
@@ -18,12 +19,24 @@ const GuideStore = Reflux.createStore({
// The current step of the current guide (1-indexed). 0 if there's no guide
// or the guide is just cued but not opened.
currentStep: 0,
+
+ currentOrg: null,
};
this.listenTo(GuideActions.fetchSucceeded, this.onFetchSucceeded);
this.listenTo(GuideActions.closeGuideOrSupport, this.onCloseGuideOrSupport);
this.listenTo(GuideActions.nextStep, this.onNextStep);
this.listenTo(GuideActions.registerAnchor, this.onRegisterAnchor);
this.listenTo(GuideActions.unregisterAnchor, this.onUnregisterAnchor);
+ this.listenTo(OrganizationsActions.setActive, this.onSetActiveOrganization);
+ this.listenTo(OrganizationsActions.changeSlug, this.onChangeSlug);
+ },
+
+ onSetActiveOrganization(data) {
+ this.state.currentOrg = data;
+ },
+
+ onChangeSlug(prev, next) {
+ this.state.currentOrg = next;
},
onFetchSucceeded(data) {
diff --git a/src/sentry/static/sentry/app/views/settings/organization/members/organizationMembersView.jsx b/src/sentry/static/sentry/app/views/settings/organization/members/organizationMembersView.jsx
index cf880600d3e68d..6381bfd65c2815 100644
--- a/src/sentry/static/sentry/app/views/settings/organization/members/organizationMembersView.jsx
+++ b/src/sentry/static/sentry/app/views/settings/organization/members/organizationMembersView.jsx
@@ -15,6 +15,7 @@ import {Panel, PanelBody, PanelHeader} from '../../../../components/panels';
import SentryTypes from '../../../../proptypes';
import SettingsPageHeader from '../../components/settingsPageHeader';
import recreateRoute from '../../../../utils/recreateRoute';
+import GuideAnchor from '../../../../components/assistant/guideAnchor';
class OrganizationMembersView extends OrganizationSettingsView {
static propTypes = {
@@ -236,6 +237,13 @@ class OrganizationMembersView extends OrganizationSettingsView {
</Button>
);
+ if (canAddMembers)
+ action = (
+ <GuideAnchor target="member_add" type="invisible">
+ {action}
+ </GuideAnchor>
+ );
+
return (
<div>
<SettingsPageHeader title="Members" action={action} />
@@ -253,10 +261,14 @@ class OrganizationMembersView extends OrganizationSettingsView {
{t('Member')}
</Box>
<Box px={2} w={180}>
- {t('Status')}
+ <GuideAnchor target="member_status" type="text">
+ {t('Status')}
+ </GuideAnchor>
</Box>
<Box px={2} w={140}>
- {t('Role')}
+ <GuideAnchor target="member_role" type="text">
+ {t('Role')}
+ </GuideAnchor>
</Box>
<Box px={2} w={140}>
{t('Actions')}
diff --git a/tests/js/spec/components/assistant/__snapshots__/guideDrawer.spec.jsx.snap b/tests/js/spec/components/assistant/__snapshots__/guideDrawer.spec.jsx.snap
index 9d9364453fbae6..b0bfd0bccad29e 100644
--- a/tests/js/spec/components/assistant/__snapshots__/guideDrawer.spec.jsx.snap
+++ b/tests/js/spec/components/assistant/__snapshots__/guideDrawer.spec.jsx.snap
@@ -25,7 +25,7 @@ exports[`GuideDrawer gets dismissed 1`] = `
<div
dangerouslySetInnerHTML={
Object {
- "__html": "Message 1",
+ "__html": "Message 1 test",
}
}
/>
diff --git a/tests/js/spec/components/assistant/__snapshots__/helper.spec.jsx.snap b/tests/js/spec/components/assistant/__snapshots__/helper.spec.jsx.snap
index e5ce68ae28f1cd..388a748f990025 100644
--- a/tests/js/spec/components/assistant/__snapshots__/helper.spec.jsx.snap
+++ b/tests/js/spec/components/assistant/__snapshots__/helper.spec.jsx.snap
@@ -46,6 +46,7 @@ exports[`Helper renders guide drawer 1`] = `
}
onDismiss={[Function]}
onFinish={[Function]}
+ orgSlug={null}
step={1}
/>
</StyledHelper>
diff --git a/tests/js/spec/components/assistant/guideDrawer.spec.jsx b/tests/js/spec/components/assistant/guideDrawer.spec.jsx
index 5164dd3637fd68..3e6091b632f3fc 100644
--- a/tests/js/spec/components/assistant/guideDrawer.spec.jsx
+++ b/tests/js/spec/components/assistant/guideDrawer.spec.jsx
@@ -10,7 +10,7 @@ describe('GuideDrawer', function() {
page: 'issue',
required_targets: ['target 1'],
steps: [
- {message: 'Message 1', target: 'target 1', title: '1. Title 1'},
+ {message: 'Message 1 ${orgSlug}', target: 'target 1', title: '1. Title 1'},
{message: 'Message 2', target: 'target 2', title: '2. Title 2'},
],
};
@@ -18,8 +18,15 @@ describe('GuideDrawer', function() {
it('gets dismissed', function() {
let mock = jest.fn();
let mock2 = jest.fn();
+ let slug = 'test';
let wrapper = shallow(
- <GuideDrawer guide={data} step={1} onFinish={mock} onDismiss={mock2} />
+ <GuideDrawer
+ guide={data}
+ step={1}
+ onFinish={mock}
+ onDismiss={mock2}
+ orgSlug={slug}
+ />
);
expect(wrapper).toMatchSnapshot();
wrapper
@@ -32,8 +39,15 @@ describe('GuideDrawer', function() {
it('renders next step', function() {
let mock = jest.fn();
let mock2 = jest.fn();
+ let slug = 'test';
let wrapper = shallow(
- <GuideDrawer guide={data} step={2} onFinish={mock} onDismiss={mock2} />
+ <GuideDrawer
+ guide={data}
+ step={2}
+ onFinish={mock}
+ onDismiss={mock2}
+ orgSlug={slug}
+ />
);
expect(wrapper).toMatchSnapshot();
diff --git a/tests/js/spec/components/assistant/helper.spec.jsx b/tests/js/spec/components/assistant/helper.spec.jsx
index 5c6e65a509a2d1..e0817dc8596bbb 100644
--- a/tests/js/spec/components/assistant/helper.spec.jsx
+++ b/tests/js/spec/components/assistant/helper.spec.jsx
@@ -41,6 +41,7 @@ describe('Helper', function() {
],
},
currentStep: 1,
+ currentOrg: 'test',
});
wrapper
.find('.assistant-cue')
diff --git a/tests/js/spec/views/__snapshots__/organizationMembersView.spec.jsx.snap b/tests/js/spec/views/__snapshots__/organizationMembersView.spec.jsx.snap
index e0a172212692be..ecaf1688777698 100644
--- a/tests/js/spec/views/__snapshots__/organizationMembersView.spec.jsx.snap
+++ b/tests/js/spec/views/__snapshots__/organizationMembersView.spec.jsx.snap
@@ -47,16 +47,21 @@ exports[`OrganizationMembersView No Require Link does not have 2fa warning if us
<div>
<SettingsPageHeading
action={
- <Button
- disabled={false}
- icon="icon-circle-add"
- priority="primary"
- size="small"
- title={undefined}
- to="new"
+ <GuideAnchor
+ target="member_add"
+ type="invisible"
>
- Invite Member
- </Button>
+ <Button
+ disabled={false}
+ icon="icon-circle-add"
+ priority="primary"
+ size="small"
+ title={undefined}
+ to="new"
+ >
+ Invite Member
+ </Button>
+ </GuideAnchor>
}
title="Members"
>
@@ -83,92 +88,121 @@ exports[`OrganizationMembersView No Require Link does not have 2fa warning if us
</div>
</Title>
<div>
- <Button
- disabled={false}
- icon="icon-circle-add"
- priority="primary"
- size="small"
- to="new"
+ <GuideAnchor
+ target="member_add"
+ type="invisible"
>
- <Link
- className="button button-primary button-sm"
- disabled={false}
- onClick={[Function]}
- onlyActiveOnIndex={false}
- role="button"
- style={Object {}}
- to="new"
+ <GuideAnchorContainer
+ innerRef={[Function]}
+ type="invisible"
>
- <a
- className="button button-primary button-sm"
- disabled={false}
- onClick={[Function]}
- role="button"
- style={Object {}}
+ <div
+ className="css-kaem6e-GuideAnchorContainer css-1aiyb4m0"
+ type="invisible"
>
- <Flex
- align="center"
- className="button-label"
+ <Button
+ disabled={false}
+ icon="icon-circle-add"
+ priority="primary"
+ size="small"
+ to="new"
>
- <Base
- align="center"
- className="button-label css-5ipae5"
+ <Link
+ className="button button-primary button-sm"
+ disabled={false}
+ onClick={[Function]}
+ onlyActiveOnIndex={false}
+ role="button"
+ style={Object {}}
+ to="new"
>
- <div
- className="button-label css-5ipae5"
- is={null}
+ <a
+ className="button button-primary button-sm"
+ disabled={false}
+ onClick={[Function]}
+ role="button"
+ style={Object {}}
>
- <Icon
- size="small"
+ <Flex
+ align="center"
+ className="button-label"
>
<Base
- className="css-11bwulm-Icon css-1vxxnb60"
- size="small"
+ align="center"
+ className="button-label css-5ipae5"
>
<div
- className="css-11bwulm-Icon css-1vxxnb60"
+ className="button-label css-5ipae5"
is={null}
- size="small"
>
- <StyledInlineSvg
- size="12px"
- src="icon-circle-add"
+ <Icon
+ size="small"
>
- <InlineSvg
- className="css-1ov3rcq-StyledInlineSvg css-1vxxnb61"
- size="12px"
- src="icon-circle-add"
+ <Base
+ className="css-11bwulm-Icon css-1vxxnb60"
+ size="small"
>
- <StyledSvg
- className="css-1ov3rcq-StyledInlineSvg css-1vxxnb61"
- height="12px"
- viewBox={Object {}}
- width="12px"
+ <div
+ className="css-11bwulm-Icon css-1vxxnb60"
+ is={null}
+ size="small"
>
- <svg
- className="css-1vxxnb61 css-1rlza0i-StyledSvg css-adkcw30"
- height="12px"
- viewBox={Object {}}
- width="12px"
+ <StyledInlineSvg
+ size="12px"
+ src="icon-circle-add"
>
- <use
- href="#test"
- xlinkHref="#test"
- />
- </svg>
- </StyledSvg>
- </InlineSvg>
- </StyledInlineSvg>
+ <InlineSvg
+ className="css-1ov3rcq-StyledInlineSvg css-1vxxnb61"
+ size="12px"
+ src="icon-circle-add"
+ >
+ <StyledSvg
+ className="css-1ov3rcq-StyledInlineSvg css-1vxxnb61"
+ height="12px"
+ viewBox={Object {}}
+ width="12px"
+ >
+ <svg
+ className="css-1vxxnb61 css-1rlza0i-StyledSvg css-adkcw30"
+ height="12px"
+ viewBox={Object {}}
+ width="12px"
+ >
+ <use
+ href="#test"
+ xlinkHref="#test"
+ />
+ </svg>
+ </StyledSvg>
+ </InlineSvg>
+ </StyledInlineSvg>
+ </div>
+ </Base>
+ </Icon>
+ Invite Member
</div>
</Base>
- </Icon>
- Invite Member
- </div>
- </Base>
- </Flex>
- </a>
- </Link>
- </Button>
+ </Flex>
+ </a>
+ </Link>
+ </Button>
+ <StyledGuideAnchor
+ active={false}
+ className="guide-anchor-ping member_add"
+ >
+ <div
+ className="guide-anchor-ping member_add css-1yndvnf-StyledGuideAnchor css-1aiyb4m1"
+ >
+ <StyledGuideAnchorRipples>
+ <div
+ className="css-1t9mqkr-StyledGuideAnchorRipples css-1aiyb4m2"
+ />
+ </StyledGuideAnchorRipples>
+ </div>
+ </StyledGuideAnchor>
+ </div>
+ </GuideAnchorContainer>
+ </GuideAnchor>
</div>
</div>
</Base>
@@ -241,7 +275,36 @@ exports[`OrganizationMembersView No Require Link does not have 2fa warning if us
className="css-1pkva7q"
is={null}
>
- Status
+ <GuideAnchor
+ target="member_status"
+ type="text"
+ >
+ <GuideAnchorContainer
+ innerRef={[Function]}
+ type="text"
+ >
+ <div
+ className="css-9u5for-GuideAnchorContainer css-1aiyb4m0"
+ type="text"
+ >
+ Status
+ <StyledGuideAnchor
+ active={false}
+ className="guide-anchor-ping member_status"
+ >
+ <div
+ className="guide-anchor-ping member_status css-1yndvnf-StyledGuideAnchor css-1aiyb4m1"
+ >
+ <StyledGuideAnchorRipples>
+ <div
+ className="css-1t9mqkr-StyledGuideAnchorRipples css-1aiyb4m2"
+ />
+ </StyledGuideAnchorRipples>
+ </div>
+ </StyledGuideAnchor>
+ </div>
+ </GuideAnchorContainer>
+ </GuideAnchor>
</div>
</Base>
</Box>
@@ -258,7 +321,36 @@ exports[`OrganizationMembersView No Require Link does not have 2fa warning if us
className="css-1fsdpfi"
is={null}
>
- Role
+ <GuideAnchor
+ target="member_role"
+ type="text"
+ >
+ <GuideAnchorContainer
+ innerRef={[Function]}
+ type="text"
+ >
+ <div
+ className="css-9u5for-GuideAnchorContainer css-1aiyb4m0"
+ type="text"
+ >
+ Role
+ <StyledGuideAnchor
+ active={false}
+ className="guide-anchor-ping member_role"
+ >
+ <div
+ className="guide-anchor-ping member_role css-1yndvnf-StyledGuideAnchor css-1aiyb4m1"
+ >
+ <StyledGuideAnchorRipples>
+ <div
+ className="css-1t9mqkr-StyledGuideAnchorRipples css-1aiyb4m2"
+ />
+ </StyledGuideAnchorRipples>
+ </div>
+ </StyledGuideAnchor>
+ </div>
+ </GuideAnchorContainer>
+ </GuideAnchor>
</div>
</Base>
</Box>
@@ -1229,16 +1321,21 @@ exports[`OrganizationMembersView Require Link does not have 2fa warning if user
<div>
<SettingsPageHeading
action={
- <Button
- disabled={false}
- icon="icon-circle-add"
- priority="primary"
- size="small"
- title={undefined}
- to="new"
+ <GuideAnchor
+ target="member_add"
+ type="invisible"
>
- Invite Member
- </Button>
+ <Button
+ disabled={false}
+ icon="icon-circle-add"
+ priority="primary"
+ size="small"
+ title={undefined}
+ to="new"
+ >
+ Invite Member
+ </Button>
+ </GuideAnchor>
}
title="Members"
>
@@ -1265,92 +1362,121 @@ exports[`OrganizationMembersView Require Link does not have 2fa warning if user
</div>
</Title>
<div>
- <Button
- disabled={false}
- icon="icon-circle-add"
- priority="primary"
- size="small"
- to="new"
+ <GuideAnchor
+ target="member_add"
+ type="invisible"
>
- <Link
- className="button button-primary button-sm"
- disabled={false}
- onClick={[Function]}
- onlyActiveOnIndex={false}
- role="button"
- style={Object {}}
- to="new"
+ <GuideAnchorContainer
+ innerRef={[Function]}
+ type="invisible"
>
- <a
- className="button button-primary button-sm"
- disabled={false}
- onClick={[Function]}
- role="button"
- style={Object {}}
+ <div
+ className="css-kaem6e-GuideAnchorContainer css-1aiyb4m0"
+ type="invisible"
>
- <Flex
- align="center"
- className="button-label"
+ <Button
+ disabled={false}
+ icon="icon-circle-add"
+ priority="primary"
+ size="small"
+ to="new"
>
- <Base
- align="center"
- className="button-label css-5ipae5"
+ <Link
+ className="button button-primary button-sm"
+ disabled={false}
+ onClick={[Function]}
+ onlyActiveOnIndex={false}
+ role="button"
+ style={Object {}}
+ to="new"
>
- <div
- className="button-label css-5ipae5"
- is={null}
+ <a
+ className="button button-primary button-sm"
+ disabled={false}
+ onClick={[Function]}
+ role="button"
+ style={Object {}}
>
- <Icon
- size="small"
+ <Flex
+ align="center"
+ className="button-label"
>
<Base
- className="css-11bwulm-Icon css-1vxxnb60"
- size="small"
+ align="center"
+ className="button-label css-5ipae5"
>
<div
- className="css-11bwulm-Icon css-1vxxnb60"
+ className="button-label css-5ipae5"
is={null}
- size="small"
>
- <StyledInlineSvg
- size="12px"
- src="icon-circle-add"
+ <Icon
+ size="small"
>
- <InlineSvg
- className="css-1ov3rcq-StyledInlineSvg css-1vxxnb61"
- size="12px"
- src="icon-circle-add"
+ <Base
+ className="css-11bwulm-Icon css-1vxxnb60"
+ size="small"
>
- <StyledSvg
- className="css-1ov3rcq-StyledInlineSvg css-1vxxnb61"
- height="12px"
- viewBox={Object {}}
- width="12px"
+ <div
+ className="css-11bwulm-Icon css-1vxxnb60"
+ is={null}
+ size="small"
>
- <svg
- className="css-1vxxnb61 css-1rlza0i-StyledSvg css-adkcw30"
- height="12px"
- viewBox={Object {}}
- width="12px"
+ <StyledInlineSvg
+ size="12px"
+ src="icon-circle-add"
>
- <use
- href="#test"
- xlinkHref="#test"
- />
- </svg>
- </StyledSvg>
- </InlineSvg>
- </StyledInlineSvg>
+ <InlineSvg
+ className="css-1ov3rcq-StyledInlineSvg css-1vxxnb61"
+ size="12px"
+ src="icon-circle-add"
+ >
+ <StyledSvg
+ className="css-1ov3rcq-StyledInlineSvg css-1vxxnb61"
+ height="12px"
+ viewBox={Object {}}
+ width="12px"
+ >
+ <svg
+ className="css-1vxxnb61 css-1rlza0i-StyledSvg css-adkcw30"
+ height="12px"
+ viewBox={Object {}}
+ width="12px"
+ >
+ <use
+ href="#test"
+ xlinkHref="#test"
+ />
+ </svg>
+ </StyledSvg>
+ </InlineSvg>
+ </StyledInlineSvg>
+ </div>
+ </Base>
+ </Icon>
+ Invite Member
</div>
</Base>
- </Icon>
- Invite Member
- </div>
- </Base>
- </Flex>
- </a>
- </Link>
- </Button>
+ </Flex>
+ </a>
+ </Link>
+ </Button>
+ <StyledGuideAnchor
+ active={false}
+ className="guide-anchor-ping member_add"
+ >
+ <div
+ className="guide-anchor-ping member_add css-1yndvnf-StyledGuideAnchor css-1aiyb4m1"
+ >
+ <StyledGuideAnchorRipples>
+ <div
+ className="css-1t9mqkr-StyledGuideAnchorRipples css-1aiyb4m2"
+ />
+ </StyledGuideAnchorRipples>
+ </div>
+ </StyledGuideAnchor>
+ </div>
+ </GuideAnchorContainer>
+ </GuideAnchor>
</div>
</div>
</Base>
@@ -1423,7 +1549,36 @@ exports[`OrganizationMembersView Require Link does not have 2fa warning if user
className="css-1pkva7q"
is={null}
>
- Status
+ <GuideAnchor
+ target="member_status"
+ type="text"
+ >
+ <GuideAnchorContainer
+ innerRef={[Function]}
+ type="text"
+ >
+ <div
+ className="css-9u5for-GuideAnchorContainer css-1aiyb4m0"
+ type="text"
+ >
+ Status
+ <StyledGuideAnchor
+ active={false}
+ className="guide-anchor-ping member_status"
+ >
+ <div
+ className="guide-anchor-ping member_status css-1yndvnf-StyledGuideAnchor css-1aiyb4m1"
+ >
+ <StyledGuideAnchorRipples>
+ <div
+ className="css-1t9mqkr-StyledGuideAnchorRipples css-1aiyb4m2"
+ />
+ </StyledGuideAnchorRipples>
+ </div>
+ </StyledGuideAnchor>
+ </div>
+ </GuideAnchorContainer>
+ </GuideAnchor>
</div>
</Base>
</Box>
@@ -1440,7 +1595,36 @@ exports[`OrganizationMembersView Require Link does not have 2fa warning if user
className="css-1fsdpfi"
is={null}
>
- Role
+ <GuideAnchor
+ target="member_role"
+ type="text"
+ >
+ <GuideAnchorContainer
+ innerRef={[Function]}
+ type="text"
+ >
+ <div
+ className="css-9u5for-GuideAnchorContainer css-1aiyb4m0"
+ type="text"
+ >
+ Role
+ <StyledGuideAnchor
+ active={false}
+ className="guide-anchor-ping member_role"
+ >
+ <div
+ className="guide-anchor-ping member_role css-1yndvnf-StyledGuideAnchor css-1aiyb4m1"
+ >
+ <StyledGuideAnchorRipples>
+ <div
+ className="css-1t9mqkr-StyledGuideAnchorRipples css-1aiyb4m2"
+ />
+ </StyledGuideAnchorRipples>
+ </div>
+ </StyledGuideAnchor>
+ </div>
+ </GuideAnchorContainer>
+ </GuideAnchor>
</div>
</Base>
</Box>
|
a306f6ff1a7e0502e3adeb05af86b6510c6b7317
|
2024-05-23 02:00:52
|
anthony sottile
|
ref: fix typing for testutils task_runner (#71346)
| false
|
fix typing for testutils task_runner (#71346)
|
ref
|
diff --git a/pyproject.toml b/pyproject.toml
index eb9a38d88f9d7c..5a563f3cf2cc89 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -453,7 +453,6 @@ module = [
"sentry.testutils.helpers.features",
"sentry.testutils.helpers.notifications",
"sentry.testutils.helpers.slack",
- "sentry.testutils.helpers.task_runner",
"sentry.tsdb.dummy",
"sentry.tsdb.inmemory",
"sentry.types.integrations",
@@ -628,6 +627,7 @@ module = [
"sentry.tasks.on_demand_metrics",
"sentry.tasks.reprocessing2",
"sentry.tasks.store",
+ "sentry.testutils.helpers.task_runner",
"sentry.types.actor",
"sentry.types.region",
"sentry.utils.arroyo",
diff --git a/src/sentry/testutils/helpers/task_runner.py b/src/sentry/testutils/helpers/task_runner.py
index 8f221ad9bafaaa..7f6545bac0ba1a 100644
--- a/src/sentry/testutils/helpers/task_runner.py
+++ b/src/sentry/testutils/helpers/task_runner.py
@@ -1,14 +1,19 @@
-__all__ = ["TaskRunner"]
+from __future__ import annotations
-from contextlib import contextmanager
-from unittest.mock import patch
+import contextlib
+from collections.abc import Generator
+from typing import Any, ContextManager, Self
+from unittest import mock
from celery import current_app
+from celery.app.task import Task
from django.conf import settings
+__all__ = ("BurstTaskRunner", "TaskRunner")
-@contextmanager
-def TaskRunner():
+
[email protected]
+def TaskRunner() -> Generator[None, None, None]:
prev = settings.CELERY_ALWAYS_EAGER
settings.CELERY_ALWAYS_EAGER = True
current_app.conf.CELERY_ALWAYS_EAGER = True
@@ -19,7 +24,7 @@ def TaskRunner():
settings.CELERY_ALWAYS_EAGER = prev
-class BustTaskRunnerRetryError(Exception):
+class BurstTaskRunnerRetryError(Exception):
"""
An exception that mocks can throw, which will bubble to tasks run by the `BurstTaskRunner` and
cause them to be re-queued, rather than failed immediately. Useful for simulating the
@@ -27,38 +32,74 @@ class BustTaskRunnerRetryError(Exception):
"""
-@contextmanager
-def BurstTaskRunner():
- """
- A fixture for queueing up Celery tasks and working them off in bursts.
-
- The main interesting property is that one can run tasks at a later point in
- the future, testing "concurrency" without actually spawning any kind of
- worker.
- """
-
- job_queue = []
-
- def apply_async(self, args=(), kwargs=None, countdown=None, queue=None):
- job_queue.append((self, args, {} if kwargs is None else kwargs))
+class _BurstState:
+ def __init__(self) -> None:
+ self._active = False
+ self._orig_apply_async = Task.apply_async
+ self.queue: list[tuple[Task, tuple[Any, ...], dict[str, Any]]] = []
+
+ def _apply_async(
+ self,
+ task: Task,
+ args: tuple[Any, ...] = (),
+ kwargs: dict[str, Any] | None = None,
+ countdown: float | None = None,
+ queue: str | None = None,
+ ) -> None:
+ if not self._active:
+ raise AssertionError("task enqueued to burst runner while burst was not active!")
+ self.queue.append((task, args, {} if kwargs is None else kwargs))
+
+ @contextlib.contextmanager
+ def _patched(self) -> Generator[Self, None, None]:
+ if self._active:
+ raise AssertionError("nested BurstTaskRunner!")
+
+ with mock.patch.object(Task, "apply_async", self._apply_async):
+ self._active = True
+ try:
+ yield self
+ finally:
+ self._active = False
+
+ @contextlib.contextmanager
+ def temporarily_enable_normal_task_processing(self) -> Generator[None, None, None]:
+ if not self._active:
+ raise AssertionError("cannot disable burst when not active")
+
+ with mock.patch.object(Task, "apply_async", self._orig_apply_async):
+ self._active = False
+ try:
+ yield
+ finally:
+ self._active = True
+
+ def __call__(self, max_jobs: int | None = None) -> None:
+ if not self._active:
+ raise AssertionError("burst called outside of mocked context")
- def work(max_jobs=None):
jobs = 0
- while job_queue and (max_jobs is None or max_jobs > jobs):
- self, args, kwargs = job_queue.pop(0)
+ while self.queue and (max_jobs is None or max_jobs > jobs):
+ task, args, kwargs = self.queue.pop(0)
- with patch("celery.app.task.Task.apply_async", apply_async):
- try:
- self(*args, **kwargs)
- except BustTaskRunnerRetryError:
- job_queue.append((self, args, kwargs))
+ try:
+ task(*args, **kwargs)
+ except BurstTaskRunnerRetryError:
+ self.queue.append((task, args, kwargs))
jobs += 1
- if job_queue:
- raise RuntimeError("Could not empty queue, last task items: %s" % repr(job_queue))
+ if self.queue:
+ raise RuntimeError(f"Could not empty queue, last task items: {self.queue!r}")
+
+
+def BurstTaskRunner() -> ContextManager[_BurstState]:
+ """
+ A fixture for queueing up Celery tasks and working them off in bursts.
- work.queue = job_queue
+ The main interesting property is that one can run tasks at a later point in
+ the future, testing "concurrency" without actually spawning any kind of
+ worker.
+ """
- with patch("celery.app.task.Task.apply_async", apply_async):
- yield work
+ return _BurstState()._patched()
diff --git a/src/sentry/testutils/pytest/fixtures.py b/src/sentry/testutils/pytest/fixtures.py
index 1106a4e18a0d54..8b6b295edc53b3 100644
--- a/src/sentry/testutils/pytest/fixtures.py
+++ b/src/sentry/testutils/pytest/fixtures.py
@@ -66,21 +66,6 @@ def task_runner():
return TaskRunner
[email protected]
-def burst_task_runner():
- """Context manager that queues up Celery tasks until called.
-
- The yielded value which can be assigned by the ``as`` clause is callable and will
- execute all queued up tasks. It takes a ``max_jobs`` argument to limit the number of
- jobs to process.
-
- The queue itself can be inspected via the ``queue`` attribute of the yielded value.
- """
- from sentry.testutils.helpers.task_runner import BurstTaskRunner
-
- return BurstTaskRunner
-
-
@pytest.fixture(scope="function")
def default_user(factories):
"""A default (super)user with email ``admin@localhost`` and password ``admin``.
diff --git a/tests/sentry/tasks/deletion/test_hybrid_cloud.py b/tests/sentry/tasks/deletion/test_hybrid_cloud.py
index 16da4bafb043a9..e638e370c37207 100644
--- a/tests/sentry/tasks/deletion/test_hybrid_cloud.py
+++ b/tests/sentry/tasks/deletion/test_hybrid_cloud.py
@@ -163,7 +163,7 @@ def test_region_processing(task_runner):
with BurstTaskRunner() as burst:
schedule_hybrid_cloud_foreign_key_jobs()
- burst()
+ burst()
assert not results1.exists()
assert results2.exists()
@@ -193,7 +193,7 @@ def test_control_processing(task_runner):
with BurstTaskRunner() as burst:
schedule_hybrid_cloud_foreign_key_jobs_control()
- burst()
+ burst()
# Do not process
assert results.exists()
@@ -240,7 +240,7 @@ def test_cascade_deletion_behavior(task_runner):
with BurstTaskRunner() as burst:
schedule_hybrid_cloud_foreign_key_jobs()
- burst()
+ burst()
# Deletion cascaded
assert not ExternalIssue.objects.filter(id=external_issue.id).exists()
@@ -263,7 +263,7 @@ def test_do_nothing_deletion_behavior(task_runner):
with BurstTaskRunner() as burst:
schedule_hybrid_cloud_foreign_key_jobs()
- burst()
+ burst()
# Deletion did nothing
model = DoNothingIntegrationModel.objects.get(id=model.id)
@@ -286,7 +286,7 @@ def test_set_null_deletion_behavior(task_runner):
with BurstTaskRunner() as burst:
schedule_hybrid_cloud_foreign_key_jobs()
- burst()
+ burst()
# Deletion set field to null
saved_query = DiscoverSavedQuery.objects.get(id=saved_query.id)
@@ -353,7 +353,7 @@ def run_hybrid_cloud_fk_jobs(self):
with BurstTaskRunner() as burst:
schedule_hybrid_cloud_foreign_key_jobs()
- burst()
+ burst()
def test_raises_when_option_disabled(self):
data = setup_cross_db_deletion_data()
@@ -369,7 +369,7 @@ def test_raises_when_option_disabled(self):
with BurstTaskRunner() as burst:
schedule_hybrid_cloud_foreign_key_jobs()
- burst()
+ burst()
assert exc.match("Cannot process tombstones due to model living in separate database.")
assert Monitor.objects.filter(id=monitor.id).exists()
diff --git a/tests/sentry/tasks/test_relay.py b/tests/sentry/tasks/test_relay.py
index dfc2c0eef37cc6..53e42584230120 100644
--- a/tests/sentry/tasks/test_relay.py
+++ b/tests/sentry/tasks/test_relay.py
@@ -18,6 +18,7 @@
schedule_build_project_config,
schedule_invalidate_project_config,
)
+from sentry.testutils.helpers.task_runner import BurstTaskRunner
from sentry.testutils.hybrid_cloud import simulated_transaction_watermarks
from sentry.testutils.pytest.fixtures import django_db_all
@@ -44,7 +45,7 @@ def disable_auto_on_commit():
@pytest.fixture
-def emulate_transactions(burst_task_runner, django_capture_on_commit_callbacks):
+def emulate_transactions(django_capture_on_commit_callbacks):
# This contraption helps in testing the usage of `transaction.on_commit` in
# schedule_build_project_config. Normally tests involving transactions would
# require us to use the transactional testcase (or
@@ -52,7 +53,7 @@ def emulate_transactions(burst_task_runner, django_capture_on_commit_callbacks):
# in test speed and we're trying to keep our testcases fast.
@contextlib.contextmanager
def inner(assert_num_callbacks=1):
- with burst_task_runner() as burst:
+ with BurstTaskRunner() as burst:
with django_capture_on_commit_callbacks(execute=True) as callbacks:
yield
@@ -69,15 +70,15 @@ def inner(assert_num_callbacks=1):
# exited, not while they are being registered
assert len(callbacks) == assert_num_callbacks
- # Callbacks have been executed, job(s) should've been scheduled now, so
- # let's execute them.
- #
- # Note: We can't directly assert that the data race has not occured, as
- # there are no real DB transactions available in this testcase. The
- # entire test runs in one transaction because that's how pytest-django
- # sets up things unless one uses
- # pytest.mark.django_db(transaction=True).
- burst(max_jobs=20)
+ # Callbacks have been executed, job(s) should've been scheduled now, so
+ # let's execute them.
+ #
+ # Note: We can't directly assert that the data race has not occured, as
+ # there are no real DB transactions available in this testcase. The
+ # entire test runs in one transaction because that's how pytest-django
+ # sets up things unless one uses
+ # pytest.mark.django_db(transaction=True).
+ burst(max_jobs=20)
return inner
@@ -510,7 +511,6 @@ def test_project_config_invalidations_delayed(
@django_db_all(transaction=True)
def test_invalidate_hierarchy(
monkeypatch,
- burst_task_runner,
default_project,
default_projectkey,
redis_cache,
@@ -530,7 +530,7 @@ def proxy(*args, **kwargs):
monkeypatch.setattr(invalidate_project_config, "apply_async", proxy)
- with burst_task_runner() as run:
+ with BurstTaskRunner() as run:
schedule_invalidate_project_config(
organization_id=default_project.organization.id, trigger="test"
)
diff --git a/tests/sentry/tasks/test_relocation.py b/tests/sentry/tasks/test_relocation.py
index d8273de67a9b99..470bd2a4186fb0 100644
--- a/tests/sentry/tasks/test_relocation.py
+++ b/tests/sentry/tasks/test_relocation.py
@@ -78,7 +78,7 @@
from sentry.testutils.cases import TestCase, TransactionTestCase
from sentry.testutils.factories import get_fixture_path
from sentry.testutils.helpers.backups import FakeKeyManagementServiceClient, generate_rsa_key_pair
-from sentry.testutils.helpers.task_runner import BurstTaskRunner, BustTaskRunnerRetryError
+from sentry.testutils.helpers.task_runner import BurstTaskRunner, BurstTaskRunnerRetryError
from sentry.testutils.silo import assume_test_silo_mode
from sentry.utils import json
from sentry.utils.relocation import RELOCATION_BLOB_SIZE, RELOCATION_FILE_TYPE, OrderedTask
@@ -2174,15 +2174,15 @@ def mock_max_retries(
fake_kms_client: FakeKeyManagementServiceClient,
):
fake_cloudbuild_client.create_build.side_effect = (
- [BustTaskRunnerRetryError("Retry")] * MAX_FAST_TASK_RETRIES
+ [BurstTaskRunnerRetryError("Retry")] * MAX_FAST_TASK_RETRIES
) + [fake_cloudbuild_client.create_build.return_value]
fake_cloudbuild_client.get_build.side_effect = (
- [BustTaskRunnerRetryError("Retry")] * MAX_VALIDATION_POLLS
+ [BurstTaskRunnerRetryError("Retry")] * MAX_VALIDATION_POLLS
) + [fake_cloudbuild_client.get_build.return_value]
fake_kms_client.asymmetric_decrypt.side_effect = (
- [BustTaskRunnerRetryError("Retry")] * MAX_FAST_TASK_RETRIES
+ [BurstTaskRunnerRetryError("Retry")] * MAX_FAST_TASK_RETRIES
) + [
fake_kms_client.asymmetric_decrypt.return_value,
# The second call to `asymmetric_decrypt` occurs from inside the `importing` task, which
@@ -2191,7 +2191,7 @@ def mock_max_retries(
]
fake_kms_client.get_public_key.side_effect = (
- [BustTaskRunnerRetryError("Retry")] * MAX_FAST_TASK_RETRIES
+ [BurstTaskRunnerRetryError("Retry")] * MAX_FAST_TASK_RETRIES
) + [fake_kms_client.get_public_key.return_value]
# Used by two tasks, so repeat the pattern (fail, fail, fail, succeed) twice.
fake_kms_client.get_public_key.side_effect = (
@@ -2263,8 +2263,10 @@ def test_valid_no_retries(
with BurstTaskRunner() as burst:
uploading_complete(self.relocation.uuid)
- with patch.object(LostPasswordHash, "send_relocate_account_email") as mock_relocation_email:
- burst()
+ with patch.object(
+ LostPasswordHash, "send_relocate_account_email"
+ ) as mock_relocation_email:
+ burst()
assert mock_relocation_email.call_count == 2
@@ -2310,8 +2312,10 @@ def test_valid_max_retries(
with BurstTaskRunner() as burst:
uploading_complete(self.relocation.uuid)
- with patch.object(LostPasswordHash, "send_relocate_account_email") as mock_relocation_email:
- burst()
+ with patch.object(
+ LostPasswordHash, "send_relocate_account_email"
+ ) as mock_relocation_email:
+ burst()
assert mock_relocation_email.call_count == 2
@@ -2356,8 +2360,10 @@ def test_invalid_no_retries(
with BurstTaskRunner() as burst:
uploading_complete(self.relocation.uuid)
- with patch.object(LostPasswordHash, "send_relocate_account_email") as mock_relocation_email:
- burst()
+ with patch.object(
+ LostPasswordHash, "send_relocate_account_email"
+ ) as mock_relocation_email:
+ burst()
assert mock_relocation_email.call_count == 0
@@ -2404,8 +2410,10 @@ def test_invalid_max_retries(
with BurstTaskRunner() as burst:
uploading_complete(self.relocation.uuid)
- with patch.object(LostPasswordHash, "send_relocate_account_email") as mock_relocation_email:
- burst()
+ with patch.object(
+ LostPasswordHash, "send_relocate_account_email"
+ ) as mock_relocation_email:
+ burst()
assert mock_relocation_email.call_count == 0
diff --git a/tests/sentry/tasks/test_reprocessing2.py b/tests/sentry/tasks/test_reprocessing2.py
index 29524f818fb86f..71aaf866bbda46 100644
--- a/tests/sentry/tasks/test_reprocessing2.py
+++ b/tests/sentry/tasks/test_reprocessing2.py
@@ -27,6 +27,7 @@
from sentry.tasks.store import preprocess_event
from sentry.testutils.cases import TestCase
from sentry.testutils.helpers.datetime import before_now, iso_format
+from sentry.testutils.helpers.task_runner import BurstTaskRunner
from sentry.testutils.pytest.fixtures import django_db_all
from sentry.testutils.skips import requires_snuba
from sentry.types.activity import ActivityType
@@ -114,7 +115,6 @@ def test_basic(
reset_snuba,
process_and_save,
register_event_preprocessor,
- burst_task_runner,
monkeypatch,
django_cache,
):
@@ -172,10 +172,10 @@ def get_event_by_processing_counter(n):
old_event = event
- with burst_task_runner() as burst:
+ with BurstTaskRunner() as burst:
reprocess_group(default_project.id, event.group_id)
- burst(max_jobs=100)
+ burst(max_jobs=100)
(event,) = get_event_by_processing_counter("x1")
@@ -221,7 +221,6 @@ def test_concurrent_events_go_into_new_group(
reset_snuba,
register_event_preprocessor,
process_and_save,
- burst_task_runner,
default_user,
django_cache,
):
@@ -249,17 +248,20 @@ def event_preprocessor(data):
group_id=original_issue_id, project=default_project, user_id=default_user.id
)
- with burst_task_runner() as burst_reprocess:
+ with BurstTaskRunner() as burst_reprocess:
reprocess_group(default_project.id, event.group_id)
- assert not is_group_finished(event.group_id)
+ assert not is_group_finished(event.group_id)
- event_id2 = process_and_save({"message": "hello world"})
- event2 = eventstore.backend.get_event_by_id(default_project.id, event_id2)
- assert event2.event_id != event.event_id
- assert event2.group_id != event.group_id
+ # this triggers an async task as well: allow it to complete
+ with burst_reprocess.temporarily_enable_normal_task_processing():
+ event_id2 = process_and_save({"message": "hello world"})
+
+ event2 = eventstore.backend.get_event_by_id(default_project.id, event_id2)
+ assert event2.event_id != event.event_id
+ assert event2.group_id != event.group_id
- burst_reprocess(max_jobs=100)
+ burst_reprocess(max_jobs=100)
event3 = eventstore.backend.get_event_by_id(default_project.id, event_id)
assert event3.event_id == event.event_id
@@ -287,7 +289,6 @@ def test_max_events(
reset_snuba,
register_event_preprocessor,
process_and_save,
- burst_task_runner,
monkeypatch,
remaining_events,
max_events,
@@ -313,7 +314,7 @@ def event_preprocessor(data):
(group_id,) = {e.group_id for e in old_events.values()}
- with burst_task_runner() as burst:
+ with BurstTaskRunner() as burst:
reprocess_group(
default_project.id,
group_id,
@@ -321,7 +322,7 @@ def event_preprocessor(data):
remaining_events=remaining_events,
)
- burst(max_jobs=100)
+ burst(max_jobs=100)
for i, event_id in enumerate(event_ids):
event = eventstore.backend.get_event_by_id(default_project.id, event_id)
@@ -361,7 +362,6 @@ def test_attachments_and_userfeedback(
reset_snuba,
register_event_preprocessor,
process_and_save,
- burst_task_runner,
monkeypatch,
):
@register_event_preprocessor
@@ -398,10 +398,10 @@ def event_preprocessor(data):
_create_user_report(evt)
- with burst_task_runner() as burst:
+ with BurstTaskRunner() as burst:
reprocess_group(default_project.id, event.group_id, max_events=1)
- burst(max_jobs=100)
+ burst(max_jobs=100)
new_event = eventstore.backend.get_event_by_id(default_project.id, event_id)
assert new_event.group_id != event.group_id
@@ -430,7 +430,6 @@ def test_nodestore_missing(
default_project,
reset_snuba,
process_and_save,
- burst_task_runner,
monkeypatch,
remaining_events,
django_cache,
@@ -440,12 +439,12 @@ def test_nodestore_missing(
event = eventstore.backend.get_event_by_id(default_project.id, event_id)
old_group = event.group
- with burst_task_runner() as burst:
+ with BurstTaskRunner() as burst:
reprocess_group(
default_project.id, event.group_id, max_events=1, remaining_events=remaining_events
)
- burst(max_jobs=100)
+ burst(max_jobs=100)
assert is_group_finished(event.group_id)
@@ -474,7 +473,6 @@ def test_apply_new_fingerprinting_rules(
reset_snuba,
register_event_preprocessor,
process_and_save,
- burst_task_runner,
):
"""
Assert that after changing fingerprinting rules, the new fingerprinting config
@@ -511,9 +509,9 @@ def event_preprocessor(data):
return_value=new_rules,
):
# Reprocess
- with burst_task_runner() as burst_reprocess:
+ with BurstTaskRunner() as burst_reprocess:
reprocess_group(default_project.id, event1.group_id)
- burst_reprocess(max_jobs=100)
+ burst_reprocess(max_jobs=100)
assert is_group_finished(event1.group_id)
@@ -539,7 +537,6 @@ def test_apply_new_stack_trace_rules(
reset_snuba,
register_event_preprocessor,
process_and_save,
- burst_task_runner,
):
"""
Assert that after changing stack trace rules, the new grouping config
@@ -607,10 +604,10 @@ def event_preprocessor(data):
},
):
# Reprocess
- with burst_task_runner() as burst_reprocess:
+ with BurstTaskRunner() as burst_reprocess:
reprocess_group(default_project.id, event1.group_id)
reprocess_group(default_project.id, event2.group_id)
- burst_reprocess(max_jobs=100)
+ burst_reprocess(max_jobs=100)
assert is_group_finished(event1.group_id)
assert is_group_finished(event2.group_id)
diff --git a/tests/symbolicator/test_minidump_full.py b/tests/symbolicator/test_minidump_full.py
index c0d75601f8e5e0..25d19a14d20f95 100644
--- a/tests/symbolicator/test_minidump_full.py
+++ b/tests/symbolicator/test_minidump_full.py
@@ -178,7 +178,7 @@ def test_reprocessing(self):
with BurstTaskRunner() as burst:
reprocess_group.delay(project_id=self.project.id, group_id=event.group_id)
- burst(max_jobs=100)
+ burst(max_jobs=100)
new_event = eventstore.backend.get_event_by_id(self.project.id, event.event_id)
assert new_event is not None
|
f4b8e46be80e665b3ea930d59ae2eaf91fbf7972
|
2023-09-05 20:28:33
|
anthony sottile
|
ref: fix tests/sentry/sentry_metrics/test_kafka.py missing django mark (#55687)
| false
|
fix tests/sentry/sentry_metrics/test_kafka.py missing django mark (#55687)
|
ref
|
diff --git a/tests/sentry/sentry_metrics/test_kafka.py b/tests/sentry/sentry_metrics/test_kafka.py
index dce387671beaf0..29d789cd3a716f 100644
--- a/tests/sentry/sentry_metrics/test_kafka.py
+++ b/tests/sentry/sentry_metrics/test_kafka.py
@@ -1,6 +1,7 @@
from datetime import datetime
from unittest import TestCase
+import pytest
from arroyo.backends.kafka import KafkaPayload
from arroyo.backends.local.backend import LocalBroker, LocalProducer
from arroyo.backends.local.storages.memory import MemoryMessageStorage
@@ -13,6 +14,7 @@
class KafkaMetricsInterfaceTest(GenericMetricsTestMixIn, TestCase):
+ @pytest.mark.django_db
def test_produce_metrics(self) -> None:
generic_metrics_backend = KafkaMetricsBackend()
# For testing, we are calling close() here because we
|
263a737e00e63baddd93da9f8f69a3ba25b3cc7b
|
2023-07-31 21:42:46
|
Cathy Teng
|
feat(github-growth): fetch missing members in API (#53532)
| false
|
fetch missing members in API (#53532)
|
feat
|
diff --git a/src/sentry/api/endpoints/organization_missing_org_members.py b/src/sentry/api/endpoints/organization_missing_org_members.py
new file mode 100644
index 00000000000000..d4f8b5ab6dfb91
--- /dev/null
+++ b/src/sentry/api/endpoints/organization_missing_org_members.py
@@ -0,0 +1,75 @@
+from datetime import timedelta
+
+from django.db.models import Count, QuerySet
+from django.utils import timezone
+from rest_framework import status
+from rest_framework.request import Request
+from rest_framework.response import Response
+
+from sentry.api.base import region_silo_endpoint
+from sentry.api.bases.organization import OrganizationEndpoint, OrganizationPermission
+from sentry.api.serializers import Serializer, serialize
+from sentry.models import Repository
+from sentry.models.commitauthor import CommitAuthor
+from sentry.models.organization import Organization
+
+
+class MissingOrgMemberSerializer(Serializer):
+ def serialize(self, obj, attrs, user, **kwargs):
+ return {"email": obj.email, "externalId": obj.external_id, "commitCount": obj.commit_count}
+
+
+class MissingMembersPermission(OrganizationPermission):
+ scope_map = {"GET": ["org:write"]}
+
+
+@region_silo_endpoint
+class OrganizationMissingMembersEndpoint(OrganizationEndpoint):
+ permission_classes = (MissingMembersPermission,)
+
+ def _get_missing_members(self, organization: Organization) -> QuerySet[CommitAuthor]:
+ member_emails = set(
+ organization.member_set.exclude(email=None).values_list("email", flat=True)
+ )
+ member_emails.update(
+ set(
+ organization.member_set.exclude(user_email=None).values_list(
+ "user_email", flat=True
+ )
+ )
+ )
+ nonmember_authors = CommitAuthor.objects.filter(organization_id=organization.id).exclude(
+ email__in=member_emails
+ )
+
+ org_repos = Repository.objects.filter(
+ provider="integrations:github", organization_id=organization.id
+ ).values_list("id", flat=True)
+
+ # This is currently for Github only
+ return (
+ nonmember_authors.filter(
+ commit__repository_id__in=set(org_repos),
+ commit__date_added__gte=timezone.now() - timedelta(days=30),
+ )
+ .annotate(commit_count=Count("commit"))
+ .order_by("-commit_count")
+ )
+
+ # TODO(cathy): check domain
+
+ def get(self, request: Request, organization) -> Response:
+ # TODO(cathy): search
+ queryset = self._get_missing_members(organization)
+
+ return Response(
+ [
+ {
+ "integration": "github",
+ "users": serialize(
+ list(queryset), request.user, serializer=MissingOrgMemberSerializer()
+ ),
+ }
+ ],
+ status=status.HTTP_200_OK,
+ )
diff --git a/src/sentry/api/urls.py b/src/sentry/api/urls.py
index 24818fcfa27da0..92dbb7e6c33132 100644
--- a/src/sentry/api/urls.py
+++ b/src/sentry/api/urls.py
@@ -9,6 +9,7 @@
OrganizationEventsFacetsStatsPerformanceEndpoint,
)
from sentry.api.endpoints.organization_events_starfish import OrganizationEventsStarfishEndpoint
+from sentry.api.endpoints.organization_missing_org_members import OrganizationMissingMembersEndpoint
from sentry.api.endpoints.organization_projects_experiment import (
OrganizationProjectsExperimentEndpoint,
)
@@ -1239,6 +1240,11 @@
OrganizationMetricsCompatibilitySums.as_view(),
name="sentry-api-0-organization-metrics-compatibility-sums",
),
+ re_path(
+ r"^(?P<organization_slug>[^\/]+)/missing-members/$",
+ OrganizationMissingMembersEndpoint.as_view(),
+ name="sentry-api-0-organization-missing-members",
+ ),
re_path(
r"^(?P<organization_slug>[^\/]+)/events-histogram/$",
OrganizationEventsHistogramEndpoint.as_view(),
diff --git a/tests/sentry/api/test_organization_missing_members.py b/tests/sentry/api/test_organization_missing_members.py
new file mode 100644
index 00000000000000..8d4c6ed219b063
--- /dev/null
+++ b/tests/sentry/api/test_organization_missing_members.py
@@ -0,0 +1,94 @@
+from datetime import timedelta
+
+from django.utils import timezone
+
+from sentry.testutils import APITestCase
+from sentry.testutils.silo import region_silo_test
+
+
+@region_silo_test(stable=True)
+class OrganizationMissingMembersTestCase(APITestCase):
+ endpoint = "sentry-api-0-organization-missing-members"
+ method = "get"
+
+ def setUp(self):
+ super().setUp()
+
+ self.create_member(
+ email="[email protected]",
+ organization=self.organization,
+ )
+ member = self.create_member(user=self.create_user(), organization=self.organization)
+ member.user_email = "[email protected]"
+ member.save()
+
+ self.member_commit_author = self.create_commit_author(
+ project=self.project, email="[email protected]"
+ )
+ self.nonmember_commit_author1 = self.create_commit_author(
+ project=self.project, email="[email protected]"
+ )
+ self.nonmember_commit_author1.external_id = "c"
+ self.nonmember_commit_author1.save()
+
+ self.nonmember_commit_author2 = self.create_commit_author(
+ project=self.project, email="[email protected]"
+ )
+ self.nonmember_commit_author2.external_id = "d"
+ self.nonmember_commit_author2.save()
+
+ self.repo = self.create_repo(project=self.project, provider="integrations:github")
+ self.create_commit(repo=self.repo, author=self.member_commit_author)
+ self.create_commit(repo=self.repo, author=self.nonmember_commit_author1)
+ self.create_commit(repo=self.repo, author=self.nonmember_commit_author1)
+ self.create_commit(repo=self.repo, author=self.nonmember_commit_author2)
+
+ self.login_as(self.user)
+
+ def test_simple(self):
+ response = self.get_success_response(self.organization.slug)
+ assert response.data[0]["integration"] == "github"
+ assert response.data[0]["users"] == [
+ {"email": "[email protected]", "externalId": "c", "commitCount": 2},
+ {"email": "[email protected]", "externalId": "d", "commitCount": 1},
+ ]
+
+ def test_need_org_write(self):
+ user = self.create_user()
+ self.create_member(organization=self.organization, user=user, role="member")
+ self.login_as(user)
+
+ self.get_error_response(self.organization.slug, status=403)
+
+ def test_filters_github_only(self):
+ repo = self.create_repo(project=self.project, provider="integrations:bitbucket")
+ self.create_commit(repo=repo, author=self.nonmember_commit_author1)
+
+ response = self.get_success_response(self.organization.slug)
+ assert response.data[0]["integration"] == "github"
+ assert response.data[0]["users"] == [
+ {"email": "[email protected]", "externalId": "c", "commitCount": 2},
+ {"email": "[email protected]", "externalId": "d", "commitCount": 1},
+ ]
+
+ def test_filters_old_commits(self):
+ self.create_commit(
+ repo=self.repo,
+ author=self.nonmember_commit_author1,
+ date_added=timezone.now() - timedelta(days=31),
+ )
+
+ response = self.get_success_response(self.organization.slug)
+ assert response.data[0]["integration"] == "github"
+ assert response.data[0]["users"] == [
+ {"email": "[email protected]", "externalId": "c", "commitCount": 2},
+ {"email": "[email protected]", "externalId": "d", "commitCount": 1},
+ ]
+
+ def test_no_authors(self):
+ org = self.create_organization()
+ self.create_member(user=self.user, organization=org, role="manager")
+
+ response = self.get_success_response(org.slug)
+ assert response.data[0]["integration"] == "github"
+ assert response.data[0]["users"] == []
|
6ee764cad3ac68c1668bc648f3698393358d9ae5
|
2024-01-16 23:04:08
|
Nar Saynorath
|
fix(app-start): Use optional access for getting data from row (#63238)
| false
|
Use optional access for getting data from row (#63238)
|
fix
|
diff --git a/static/app/views/starfish/views/appStartup/breakdown.tsx b/static/app/views/starfish/views/appStartup/breakdown.tsx
index e8a60fb86acad2..6b71e2b3383a2c 100644
--- a/static/app/views/starfish/views/appStartup/breakdown.tsx
+++ b/static/app/views/starfish/views/appStartup/breakdown.tsx
@@ -49,11 +49,12 @@ function Breakdown({
row: Row;
['data-test-id']?: string;
}) {
- const total = breakdownGroups.reduce((acc, {key}) => acc + (row[key] ?? 0), 0);
+ const total = breakdownGroups.reduce((acc, {key}) => acc + (row?.[key] ?? 0), 0);
if (total === 0) {
return null;
}
+
return (
<Tooltip
title={
|
56063f9bd59bfb5d6b476f77d50b002f3b3b2207
|
2024-03-22 00:52:38
|
William Mak
|
feat(trace): Add measurements to trace endpoint using spans (#67434)
| false
|
Add measurements to trace endpoint using spans (#67434)
|
feat
|
diff --git a/src/sentry/api/endpoints/organization_events_trace.py b/src/sentry/api/endpoints/organization_events_trace.py
index 794064ff1345d5..b8f9918537dd85 100644
--- a/src/sentry/api/endpoints/organization_events_trace.py
+++ b/src/sentry/api/endpoints/organization_events_trace.py
@@ -334,6 +334,9 @@ def full_dict(
result["timestamp"] = self.event["precise.finish_ts"]
result["start_timestamp"] = self.event["precise.start_ts"]
result["profile_id"] = self.event["profile.id"]
+ # TODO: once we're defaulting measurements we don't need this check
+ if "measurements" in self.event:
+ result["measurements"] = self.event["measurements"]
if self.nodestore_event:
result["timestamp"] = self.nodestore_event.data.get("timestamp")
result["start_timestamp"] = self.nodestore_event.data.get("start_timestamp")
@@ -427,6 +430,7 @@ def query_trace_data(
params: Mapping[str, str],
limit: int,
event_id: str | None,
+ get_measurements: bool,
) -> tuple[Sequence[SnubaTransaction], Sequence[SnubaError]]:
transaction_columns = [
"id",
@@ -452,6 +456,13 @@ def query_trace_data(
# Target is the event_id the frontend plans to render, we try to sort it to the top so it loads even if its not
# within the query limit, needs to be the first orderby cause it takes precedence over finding the root
transaction_orderby.insert(0, "-target")
+ if get_measurements:
+ transaction_columns.extend(
+ [
+ "measurements.key",
+ "measurements.value",
+ ]
+ )
transaction_query = QueryBuilder(
Dataset.Transactions,
params,
@@ -517,6 +528,14 @@ def query_trace_data(
result["issue.ids"] = occurrence_issue_ids.get(result["id"], {})
result["occurrence_id"] = occurrence_ids.get(result["id"])
result["trace.parent_transaction"] = None
+ if get_measurements:
+ result["measurements"] = {
+ key: {
+ "value": value,
+ "type": transaction_query.get_field_type(f"measurements.{key}"),
+ }
+ for key, value in zip(result["measurements.key"], result["measurements.value"])
+ }
return cast(Sequence[SnubaTransaction], transformed_results[0]), cast(
Sequence[SnubaError], transformed_results[1]
@@ -792,7 +811,10 @@ def get(self, request: HttpRequest, organization: Organization, trace_id: str) -
# Detailed is deprecated now that we want to use spans instead
detailed: bool = request.GET.get("detailed", "0") == "1"
+ # Temporary url params until we finish migrating the frontend
use_spans: bool = request.GET.get("useSpans", "0") == "1"
+ # Temporary until we can test getMeasurements in prod a bit to make sure the performance impact is reasonable
+ get_measurements: bool = request.GET.get("getMeasurements", "0") == "1"
update_params_with_timestamp(request, params)
sentry_sdk.set_tag("trace_view.using_spans", str(use_spans))
@@ -812,12 +834,14 @@ def get(self, request: HttpRequest, organization: Organization, trace_id: str) -
)
with handle_query_errors():
if use_spans:
- transactions, errors = query_trace_data(trace_id, params, limit, event_id)
+ transactions, errors = query_trace_data(
+ trace_id, params, limit, event_id, get_measurements
+ )
transactions = augment_transactions_with_spans(
transactions, errors, trace_id, params
)
else:
- transactions, errors = query_trace_data(trace_id, params, limit, None)
+ transactions, errors = query_trace_data(trace_id, params, limit, None, False)
if len(transactions) == 0 and not tracing_without_performance_enabled:
return Response(status=404)
self.record_analytics(transactions, trace_id, self.request.user.id, organization.id)
diff --git a/tests/snuba/api/endpoints/test_organization_events_trace.py b/tests/snuba/api/endpoints/test_organization_events_trace.py
index 49ee6acf43bc1a..2521f078d79946 100644
--- a/tests/snuba/api/endpoints/test_organization_events_trace.py
+++ b/tests/snuba/api/endpoints/test_organization_events_trace.py
@@ -167,6 +167,7 @@ def load_trace(self):
measurements={
"lcp": 1000,
"fcp": 750,
+ "fid": 3.5,
},
parent_span_id=None,
file_io_performance_issue=True,
@@ -1637,6 +1638,21 @@ def test_event_id(self):
trace_transaction = response.data["transactions"][0]
self.assert_event(trace_transaction, self.gen1_events[0], "root")
+ def test_measurements(self):
+ self.load_trace()
+ with self.feature(self.FEATURES):
+ response = self.client_get(
+ data={"project": -1, "getMeasurements": 1},
+ )
+ assert response.status_code == 200, response.content
+ trace_transaction = response.data["transactions"][0]
+ self.assert_trace_data(trace_transaction)
+ root = trace_transaction
+ assert root["measurements"]["lcp"]["value"] == 1000
+ assert root["measurements"]["lcp"]["type"] == "duration"
+ assert root["measurements"]["fid"]["value"] == 3.5
+ assert root["measurements"]["fid"]["type"] == "duration"
+
@region_silo_test
class OrganizationEventsTraceMetaEndpointTest(OrganizationEventsTraceEndpointBase):
|
5692d2f25470db42c3780761106d7426e7a309d3
|
2024-11-08 00:46:50
|
Tony Xiao
|
fix(trace-explorer): Reorder project avatars correctly (#80384)
| false
|
Reorder project avatars correctly (#80384)
|
fix
|
diff --git a/static/app/views/explore/tables/tracesTable/fieldRenderers.tsx b/static/app/views/explore/tables/tracesTable/fieldRenderers.tsx
index 2d20a4a17ed271..8a1f9710533936 100644
--- a/static/app/views/explore/tables/tracesTable/fieldRenderers.tsx
+++ b/static/app/views/explore/tables/tracesTable/fieldRenderers.tsx
@@ -71,8 +71,10 @@ export function ProjectsRenderer({
}: ProjectsRendererProps) {
const organization = useOrganization();
const {projects} = useProjects({slugs: projectSlugs, orgId: organization.slug});
- const projectAvatars =
- projects.length > 0 ? projects : projectSlugs.map(slug => ({slug}));
+ // ensure that projectAvatars is in the same order as the projectSlugs prop
+ const projectAvatars = projectSlugs.map(slug => {
+ return projects.find(project => project.slug === slug) ?? {slug};
+ });
const numProjects = projectAvatars.length;
const numVisibleProjects =
maxVisibleProjects - numProjects >= 0 ? numProjects : maxVisibleProjects - 1;
diff --git a/static/app/views/traces/fieldRenderers.tsx b/static/app/views/traces/fieldRenderers.tsx
index e2bb5586775c9a..fdf4cc983d5bbc 100644
--- a/static/app/views/traces/fieldRenderers.tsx
+++ b/static/app/views/traces/fieldRenderers.tsx
@@ -71,8 +71,10 @@ export function ProjectsRenderer({
return (
<Projects orgId={organization.slug} slugs={projectSlugs}>
{({projects}) => {
- const projectAvatars =
- projects.length > 0 ? projects : projectSlugs.map(slug => ({slug}));
+ // ensure that projectAvatars is in the same order as the projectSlugs prop
+ const projectAvatars = projectSlugs.map(slug => {
+ return projects.find(project => project.slug === slug) ?? {slug};
+ });
const numProjects = projectAvatars.length;
const numVisibleProjects =
maxVisibleProjects - numProjects >= 0 ? numProjects : maxVisibleProjects - 1;
|
31072a4d0129d5573ba7f50619536ceb2785941d
|
2022-08-03 12:40:28
|
Priscila Oliveira
|
ref(extra-data): Replace getMeta (proxy) with _meta object - (#37349)
| false
|
Replace getMeta (proxy) with _meta object - (#37349)
|
ref
|
diff --git a/static/app/components/events/eventEntries.tsx b/static/app/components/events/eventEntries.tsx
index 349ac040271640..b15aeed532edb6 100644
--- a/static/app/components/events/eventEntries.tsx
+++ b/static/app/components/events/eventEntries.tsx
@@ -15,7 +15,7 @@ import EventAttachments from 'sentry/components/events/eventAttachments';
import EventCause from 'sentry/components/events/eventCause';
import EventCauseEmpty from 'sentry/components/events/eventCauseEmpty';
import EventDataSection from 'sentry/components/events/eventDataSection';
-import EventExtraData from 'sentry/components/events/eventExtraData/eventExtraData';
+import EventExtraData from 'sentry/components/events/eventExtraData';
import EventSdk from 'sentry/components/events/eventSdk';
import {EventTags} from 'sentry/components/events/eventTags';
import EventGroupingInfo from 'sentry/components/events/groupingInfo';
diff --git a/static/app/components/events/eventExtraData/eventDataContent.tsx b/static/app/components/events/eventExtraData/eventDataContent.tsx
deleted file mode 100644
index b55bdf3bf7c131..00000000000000
--- a/static/app/components/events/eventExtraData/eventDataContent.tsx
+++ /dev/null
@@ -1,19 +0,0 @@
-import ContextBlock from 'sentry/components/events/contexts/contextBlock';
-import {defined} from 'sentry/utils';
-
-import getEventExtraDataKnownData from './getEventExtraDataKnownData';
-
-type Props = {
- raw: boolean;
- data?: Record<string, any>;
-};
-
-const EventDataContent = ({data, raw}: Props) => {
- if (!defined(data)) {
- return null;
- }
-
- return <ContextBlock data={getEventExtraDataKnownData(data)} raw={raw} />;
-};
-
-export default EventDataContent;
diff --git a/static/app/components/events/eventExtraData/getEventExtraDataKnownData.tsx b/static/app/components/events/eventExtraData/getEventExtraDataKnownData.tsx
index 58745f21872264..66f69a31b60966 100644
--- a/static/app/components/events/eventExtraData/getEventExtraDataKnownData.tsx
+++ b/static/app/components/events/eventExtraData/getEventExtraDataKnownData.tsx
@@ -1,26 +1,64 @@
-import {getMeta} from 'sentry/components/events/meta/metaProxy';
+import isObject from 'lodash/isObject';
+
import {KeyValueListData} from 'sentry/types';
+import AnnotatedText from '../meta/annotatedText';
+
import getEventExtraDataKnownDataDetails from './getEventExtraDataKnownDataDetails';
import {EventExtraData, EventExtraDataType} from './types';
-function getEventExtraDataKnownData(data: EventExtraData): KeyValueListData {
+export function getEventExtraDataKnownData(
+ data: EventExtraData,
+ meta: Record<any, any> | undefined
+): KeyValueListData {
const knownData: KeyValueListData = [];
- const dataKeys = Object.keys(data);
- for (const key of dataKeys) {
- const knownDataDetails = getEventExtraDataKnownDataDetails(
+ for (const key of Object.keys(data)) {
+ const {subject, value} = getEventExtraDataKnownDataDetails(
data,
key as EventExtraDataType
);
+ if (Array.isArray(value)) {
+ knownData.push({
+ key,
+ subject,
+ value: value.map((v, index) =>
+ meta?.[key]?.[index]?.[''] ? (
+ <AnnotatedText key={index} value={v} meta={meta?.[key]?.[index]?.['']} />
+ ) : (
+ v
+ )
+ ),
+ });
+ continue;
+ }
+
+ if (isObject(value)) {
+ knownData.push({
+ key,
+ subject,
+ value: Object.keys(value).map((v, index) =>
+ meta?.[key]?.[index]?.[''] ? (
+ <AnnotatedText
+ key={index}
+ value={value[v]}
+ meta={meta?.[key]?.[index]?.['']}
+ />
+ ) : (
+ value[v]
+ )
+ ),
+ });
+ }
+
knownData.push({
key,
- ...knownDataDetails,
- meta: getMeta(data, key),
+ subject,
+ value,
+ meta: meta?.[key]?.[''],
});
}
+
return knownData;
}
-
-export default getEventExtraDataKnownData;
diff --git a/static/app/components/events/eventExtraData/eventExtraData.tsx b/static/app/components/events/eventExtraData/index.tsx
similarity index 62%
rename from static/app/components/events/eventExtraData/eventExtraData.tsx
rename to static/app/components/events/eventExtraData/index.tsx
index fbc55a8c1853f3..619c6d16d094d7 100644
--- a/static/app/components/events/eventExtraData/eventExtraData.tsx
+++ b/static/app/components/events/eventExtraData/index.tsx
@@ -1,10 +1,12 @@
import {memo, useState} from 'react';
+import ContextBlock from 'sentry/components/events/contexts/contextBlock';
import EventDataSection from 'sentry/components/events/eventDataSection';
import {t} from 'sentry/locale';
import {Event} from 'sentry/types/event';
+import {defined} from 'sentry/utils';
-import EventDataContent from './eventDataContent';
+import {getEventExtraDataKnownData} from './getEventExtraDataKnownData';
type Props = {
event: Event;
@@ -20,7 +22,12 @@ const EventExtraData = memo(
toggleRaw={() => setRaw(!raw)}
raw={raw}
>
- <EventDataContent raw={raw} data={event.context} />
+ {!defined(event.context) ? null : (
+ <ContextBlock
+ data={getEventExtraDataKnownData(event.context, event._meta?.context)}
+ raw={raw}
+ />
+ )}
</EventDataSection>
);
},
diff --git a/tests/js/spec/components/events/eventExtraData.spec.tsx b/tests/js/spec/components/events/eventExtraData.spec.tsx
new file mode 100644
index 00000000000000..a0f4a8568f3036
--- /dev/null
+++ b/tests/js/spec/components/events/eventExtraData.spec.tsx
@@ -0,0 +1,172 @@
+import {render, screen, userEvent} from 'sentry-test/reactTestingLibrary';
+
+import EventExtraData from 'sentry/components/events/eventExtraData';
+
+describe('EventExtraData', function () {
+ it('display redacted data', async function () {
+ const event = {
+ ...TestStubs.Event(),
+ context: {
+ 'sys.argv': ['', '', '', '', '', '', '', '', '', ''],
+ },
+ _meta: {
+ context: {
+ 'sys.argv': {
+ '0': {
+ '': {
+ rem: [['project:3', 's', 0, 0]],
+ len: 49,
+ chunks: [
+ {
+ type: 'redaction',
+ text: '',
+ rule_id: 'project:3',
+ remark: 's',
+ },
+ ],
+ },
+ },
+ '1': {
+ '': {
+ rem: [['project:3', 's', 0, 0]],
+ len: 17,
+ chunks: [
+ {
+ type: 'redaction',
+ text: '',
+ rule_id: 'project:3',
+ remark: 's',
+ },
+ ],
+ },
+ },
+ '2': {
+ '': {
+ rem: [['project:3', 's', 0, 0]],
+ len: 12,
+ chunks: [
+ {
+ type: 'redaction',
+ text: '',
+ rule_id: 'project:3',
+ remark: 's',
+ },
+ ],
+ },
+ },
+ '3': {
+ '': {
+ rem: [['project:3', 's', 0, 0]],
+ len: 8,
+ chunks: [
+ {
+ type: 'redaction',
+ text: '',
+ rule_id: 'project:3',
+ remark: 's',
+ },
+ ],
+ },
+ },
+ '4': {
+ '': {
+ rem: [['project:3', 's', 0, 0]],
+ len: 30,
+ chunks: [
+ {
+ type: 'redaction',
+ text: '',
+ rule_id: 'project:3',
+ remark: 's',
+ },
+ ],
+ },
+ },
+ '5': {
+ '': {
+ rem: [['project:3', 's', 0, 0]],
+ len: 8,
+ chunks: [
+ {
+ type: 'redaction',
+ text: '',
+ rule_id: 'project:3',
+ remark: 's',
+ },
+ ],
+ },
+ },
+ '6': {
+ '': {
+ rem: [['project:3', 's', 0, 0]],
+ len: 18,
+ chunks: [
+ {
+ type: 'redaction',
+ text: '',
+ rule_id: 'project:3',
+ remark: 's',
+ },
+ ],
+ },
+ },
+ '7': {
+ '': {
+ rem: [['project:3', 's', 0, 0]],
+ len: 8,
+ chunks: [
+ {
+ type: 'redaction',
+ text: '',
+ rule_id: 'project:3',
+ remark: 's',
+ },
+ ],
+ },
+ },
+ '8': {
+ '': {
+ rem: [['project:3', 's', 0, 0]],
+ len: 26,
+ chunks: [
+ {
+ type: 'redaction',
+ text: '',
+ rule_id: 'project:3',
+ remark: 's',
+ },
+ ],
+ },
+ },
+ '9': {
+ '': {
+ rem: [['project:3', 's', 0, 0]],
+ len: 8,
+ chunks: [
+ {
+ type: 'redaction',
+ text: '',
+ rule_id: 'project:3',
+ remark: 's',
+ },
+ ],
+ },
+ },
+ '': {
+ len: 14,
+ },
+ },
+ },
+ },
+ };
+ render(<EventExtraData event={event} />);
+
+ expect(screen.getAllByText(/redacted/)).toHaveLength(10);
+
+ userEvent.hover(screen.getAllByText(/redacted/)[0]);
+
+ expect(
+ await screen.findByText('Replaced because of PII rule "project:3"')
+ ).toBeInTheDocument(); // tooltip description
+ });
+});
|
b8ce716c4fef3f455067b69a02baf7af381e63a3
|
2024-09-11 01:48:08
|
Michelle Zhang
|
style(replay): rm some inline styles (#77265)
| false
|
rm some inline styles (#77265)
|
style
|
diff --git a/static/app/components/replays/breadcrumbs/replayComparisonModal.tsx b/static/app/components/replays/breadcrumbs/replayComparisonModal.tsx
index e20b3d0f6d9830..62c33141f5517a 100644
--- a/static/app/components/replays/breadcrumbs/replayComparisonModal.tsx
+++ b/static/app/components/replays/breadcrumbs/replayComparisonModal.tsx
@@ -1,4 +1,3 @@
-import {useTheme} from '@emotion/react';
import styled from '@emotion/styled';
import type {ModalRenderProps} from 'sentry/actionCreators/modal';
@@ -31,7 +30,6 @@ export default function ReplayComparisonModal({
// Callbacks set by GlobalModal on-render.
// We need these to interact with feedback opened while a modal is active.
const {focusTrap} = useGlobalModal();
- const theme = useTheme();
const isSameTimestamp = leftOffsetMs === rightOffsetMs;
@@ -63,8 +61,8 @@ export default function ReplayComparisonModal({
{tct(
'This modal helps with debugging hydration errors by diffing the dom before and after the app hydrated. [boldBefore:Before] refers to the html rendered on the server. [boldAfter:After] refers to the html rendered on the client. This feature is actively being developed; please share any questions or feedback to the discussion linked above.',
{
- boldBefore: <strong css={{color: `${theme.red300}`}} />,
- boldAfter: <strong css={{color: `${theme.green300}`}} />,
+ boldBefore: <Before />,
+ boldAfter: <After />,
}
)}
</StyledParagraph>
@@ -108,3 +106,11 @@ const StyledParagraph = styled('p')`
padding-top: ${space(0.5)};
margin-bottom: ${space(1)};
`;
+
+const Before = styled('strong')`
+ color: ${p => p.theme.red300};
+`;
+
+const After = styled('strong')`
+ color: ${p => p.theme.green300};
+`;
diff --git a/static/app/components/replays/diff/replaySideBySideImageDiff.tsx b/static/app/components/replays/diff/replaySideBySideImageDiff.tsx
index 07a09b8f05a6c7..b083c7221b7558 100644
--- a/static/app/components/replays/diff/replaySideBySideImageDiff.tsx
+++ b/static/app/components/replays/diff/replaySideBySideImageDiff.tsx
@@ -1,4 +1,3 @@
-import {useTheme} from '@emotion/react';
import styled from '@emotion/styled';
import {Flex} from 'sentry/components/container/flex';
@@ -16,17 +15,16 @@ interface Props {
export function ReplaySideBySideImageDiff({leftOffsetMs, replay, rightOffsetMs}: Props) {
const fetching = false;
- const theme = useTheme();
return (
<Flex gap={space(2)} column>
<DiffHeader>
- <Flex flex="1" align="center" css={{color: `${theme.red300}`}}>
+ <Before flex="1" align="center">
{t('Before')}
- </Flex>
- <Flex flex="1" align="center" css={{color: `${theme.green300}`}}>
+ </Before>
+ <After flex="1" align="center">
{t('After')}
- </Flex>
+ </After>
</DiffHeader>
<ReplayGrid>
@@ -67,9 +65,19 @@ const DiffHeader = styled('div')`
div:last-child {
padding-left: ${space(2)};
}
+
+ margin: 10px 0;
`;
const ReplayGrid = styled('div')`
display: grid;
grid-template-columns: 1fr 1fr;
`;
+
+export const Before = styled(Flex)`
+ color: ${p => p.theme.red300};
+`;
+
+export const After = styled(Flex)`
+ color: ${p => p.theme.green300};
+`;
diff --git a/static/app/components/replays/diff/replaySliderDiff.tsx b/static/app/components/replays/diff/replaySliderDiff.tsx
index a913d4bfa00aaf..1dd406e1a023cc 100644
--- a/static/app/components/replays/diff/replaySliderDiff.tsx
+++ b/static/app/components/replays/diff/replaySliderDiff.tsx
@@ -1,5 +1,4 @@
import {Fragment, useCallback, useRef} from 'react';
-import {useTheme} from '@emotion/react';
import styled from '@emotion/styled';
import NegativeSpaceContainer from 'sentry/components/container/negativeSpaceContainer';
@@ -33,22 +32,20 @@ export function ReplaySliderDiff({
}: Props) {
const positionedRef = useRef<HTMLDivElement>(null);
const viewDimensions = useDimensions({elementRef: positionedRef});
- const theme = useTheme();
-
const width = toPixels(viewDimensions.width);
return (
<Fragment>
<Header>
<Tooltip title={t('How the initial server-rendered page looked.')}>
- <div style={{color: `${theme.red300}`, fontWeight: 'bold'}}>{t('Before')}</div>
+ <Before>{t('Before')}</Before>
</Tooltip>
<Tooltip
title={t(
'How React re-rendered the page on your browser, after detecting a hydration error.'
)}
>
- <div style={{color: `${theme.green300}`, fontWeight: 'bold'}}>{t('After')}</div>
+ <After>{t('After')}</After>
</Tooltip>
</Header>
<WithPadding>
@@ -116,22 +113,22 @@ function DiffSides({leftOffsetMs, replay, rightOffsetMs, viewDimensions, width})
<Cover style={{width}}>
<Placement style={{width}}>
<ReplayPlayerStateContextProvider>
- <NegativeSpaceContainer style={{height: '100%'}}>
+ <StyledNegativeSpaceContainer>
<ReplayPlayerMeasurer measure="both">
{style => <ReplayPlayer style={style} offsetMs={leftOffsetMs} />}
</ReplayPlayerMeasurer>
- </NegativeSpaceContainer>
+ </StyledNegativeSpaceContainer>
</ReplayPlayerStateContextProvider>
</Placement>
</Cover>
<Cover ref={rightSideElem} style={{width: 0}}>
<Placement style={{width}}>
<ReplayPlayerStateContextProvider>
- <NegativeSpaceContainer style={{height: '100%'}}>
+ <StyledNegativeSpaceContainer>
<ReplayPlayerMeasurer measure="both">
{style => <ReplayPlayer style={style} offsetMs={rightOffsetMs} />}
</ReplayPlayerMeasurer>
- </NegativeSpaceContainer>
+ </StyledNegativeSpaceContainer>
</ReplayPlayerStateContextProvider>
</Placement>
</Cover>
@@ -216,4 +213,19 @@ const Header = styled('div')`
display: flex;
justify-content: space-between;
align-items: center;
+ margin: 14px 0;
+`;
+
+const Before = styled('div')`
+ color: ${p => p.theme.red300};
+ font-weight: bold;
+`;
+
+const After = styled('div')`
+ color: ${p => p.theme.green300};
+ font-weight: bold;
+`;
+
+const StyledNegativeSpaceContainer = styled(NegativeSpaceContainer)`
+ height: 100%;
`;
diff --git a/static/app/components/replays/diff/replayTextDiff.tsx b/static/app/components/replays/diff/replayTextDiff.tsx
index 2391fc47857728..dabe9a66c385e4 100644
--- a/static/app/components/replays/diff/replayTextDiff.tsx
+++ b/static/app/components/replays/diff/replayTextDiff.tsx
@@ -1,10 +1,9 @@
import {Fragment, useMemo} from 'react';
-import {useTheme} from '@emotion/react';
import styled from '@emotion/styled';
import beautify from 'js-beautify';
-import {Flex} from 'sentry/components/container/flex';
import {CopyToClipboardButton} from 'sentry/components/copyToClipboardButton';
+import {After, Before} from 'sentry/components/replays/diff/replaySideBySideImageDiff';
import SplitDiff from 'sentry/components/splitDiff';
import {t} from 'sentry/locale';
import {space} from 'sentry/styles/space';
@@ -18,7 +17,6 @@ interface Props {
}
export function ReplayTextDiff({replay, leftOffsetMs, rightOffsetMs}: Props) {
- const theme = useTheme();
const {data} = useExtractPageHtml({
replay,
offsetMsToStopAt: [leftOffsetMs, rightOffsetMs],
@@ -32,7 +30,7 @@ export function ReplayTextDiff({replay, leftOffsetMs, rightOffsetMs}: Props) {
return (
<Fragment>
<DiffHeader>
- <Flex flex="1" align="center" css={{color: `${theme.red300}`}}>
+ <Before flex="1" align="center">
{t('Before')}
<CopyToClipboardButton
text={leftBody ?? ''}
@@ -41,8 +39,8 @@ export function ReplayTextDiff({replay, leftOffsetMs, rightOffsetMs}: Props) {
borderless
aria-label={t('Copy Before')}
/>
- </Flex>
- <Flex flex="1" align="center" css={{color: `${theme.green300}`}}>
+ </Before>
+ <After flex="1" align="center">
{t('After')}
<CopyToClipboardButton
text={rightBody ?? ''}
@@ -51,7 +49,7 @@ export function ReplayTextDiff({replay, leftOffsetMs, rightOffsetMs}: Props) {
borderless
aria-label={t('Copy After')}
/>
- </Flex>
+ </After>
</DiffHeader>
<SplitDiffScrollWrapper>
<SplitDiff base={leftBody ?? ''} target={rightBody ?? ''} type="words" />
@@ -80,4 +78,6 @@ const DiffHeader = styled('div')`
div:last-child {
padding-left: ${space(2)};
}
+
+ margin: 10px 0;
`;
diff --git a/static/app/components/replays/timeAndScrubberGrid.tsx b/static/app/components/replays/timeAndScrubberGrid.tsx
index 83338f7b2fd42d..a60d64ea746d81 100644
--- a/static/app/components/replays/timeAndScrubberGrid.tsx
+++ b/static/app/components/replays/timeAndScrubberGrid.tsx
@@ -66,20 +66,20 @@ export default function TimeAndScrubberGrid({
return (
<TimelineScaleContextProvider>
<Grid id="replay-timeline-player" isCompact={isCompact}>
- <Numeric style={{gridArea: 'currentTime', paddingInline: space(1.5)}}>
+ <Numeric style={{gridArea: 'currentTime'}}>
<Duration duration={[currentTime, 'ms']} precision="sec" />
</Numeric>
<div style={{gridArea: 'timeline'}}>
<ReplayTimeline />
</div>
- <div style={{gridArea: 'timelineSize', fontVariantNumeric: 'tabular-nums'}}>
+ <TimelineSize style={{gridArea: 'timelineSize'}}>
{showZoom ? <TimelineSizeBar /> : null}
- </div>
+ </TimelineSize>
<StyledScrubber style={{gridArea: 'scrubber'}} ref={elem} {...mouseTrackingProps}>
<PlayerScrubber showZoomIndicators={showZoom} />
</StyledScrubber>
- <Numeric style={{gridArea: 'duration', paddingInline: space(1.5)}}>
+ <Numeric style={{gridArea: 'duration'}}>
{durationMs === undefined ? (
'--:--'
) : (
@@ -121,4 +121,9 @@ const Numeric = styled('span')`
font-size: ${p => p.theme.fontSizeSmall};
font-variant-numeric: tabular-nums;
font-weight: ${p => p.theme.fontWeightBold};
+ padding-inline: ${space(1.5)};
+`;
+
+const TimelineSize = styled('div')`
+ font-variant-numeric: tabular-nums;
`;
diff --git a/static/app/views/replays/replayTable/index.tsx b/static/app/views/replays/replayTable/index.tsx
index e2e834f49e3168..fcb013a8075bb2 100644
--- a/static/app/views/replays/replayTable/index.tsx
+++ b/static/app/views/replays/replayTable/index.tsx
@@ -103,7 +103,7 @@ function ReplayTable({
data-test-id="replay-table"
emptyMessage={emptyMessage}
gridRows={isFetching ? undefined : gridRows}
- loader={<LoadingIndicator style={{margin: '54px auto'}} />}
+ loader={<StyledLoadingIndicator />}
disableHeaderBorderBottom
>
{replays?.map(
@@ -273,3 +273,7 @@ const Row = styled('div')<{
`;
export default ReplayTable;
+
+const StyledLoadingIndicator = styled(LoadingIndicator)`
+ margin: 54px auto;
+`;
|
4b68f943a07846a0c77cd0a8218747b9f0b2cfcb
|
2018-10-11 06:38:13
|
Brett Hoerner
|
ref(eventstream): Call superclass from KafkaEventStream.insert (#10078)
| false
|
Call superclass from KafkaEventStream.insert (#10078)
|
ref
|
diff --git a/src/sentry/eventstream/kafka.py b/src/sentry/eventstream/kafka.py
index 0e10baddbd6ae7..8d4fbd9d2368ba 100644
--- a/src/sentry/eventstream/kafka.py
+++ b/src/sentry/eventstream/kafka.py
@@ -113,6 +113,14 @@ def _send(self, project_id, _type, extra_data=(), asynchronous=True):
def insert(self, group, event, is_new, is_sample, is_regression,
is_new_group_environment, primary_hash, skip_consume=False):
+ # ensure the superclass's insert() is called, regardless of what happens
+ # attempting to send to Kafka
+ super(KafkaEventStream, self).insert(
+ group, event, is_new, is_sample,
+ is_regression, is_new_group_environment,
+ primary_hash, skip_consume
+ )
+
project = event.project
retention_days = quotas.get_event_retention(
organization=Organization(project.organization_id)
|
03162a9bc6e2d637345f088b9b04a043e14dabca
|
2022-03-17 22:32:38
|
Ahmed Etefy
|
feat(metrics): Return crash_free_rate as a ratio (#32734)
| false
|
Return crash_free_rate as a ratio (#32734)
|
feat
|
diff --git a/src/sentry/snuba/metrics/fields/base.py b/src/sentry/snuba/metrics/fields/base.py
index d4d3649f363439..b60d048e01ee70 100644
--- a/src/sentry/snuba/metrics/fields/base.py
+++ b/src/sentry/snuba/metrics/fields/base.py
@@ -362,7 +362,7 @@ def generate_available_operations(self):
metrics=["session.crashed", "session.init"],
unit="percentage",
snql=lambda *args, metric_ids, alias=None: percentage(
- *args, metric_ids, alias="session.crash_free_rate"
+ *args, alias="session.crash_free_rate"
),
),
SingularEntityDerivedMetric(
diff --git a/src/sentry/snuba/metrics/fields/snql.py b/src/sentry/snuba/metrics/fields/snql.py
index ddccd26f95ba93..70afc9ec442ad4 100644
--- a/src/sentry/snuba/metrics/fields/snql.py
+++ b/src/sentry/snuba/metrics/fields/snql.py
@@ -3,7 +3,7 @@
from sentry.sentry_metrics.utils import resolve_weak
-def __counter_sum_aggregation_on_session_status_factory(session_status, metric_ids, alias=None):
+def _counter_sum_aggregation_on_session_status_factory(session_status, metric_ids, alias=None):
return Function(
"sumIf",
[
@@ -27,19 +27,19 @@ def __counter_sum_aggregation_on_session_status_factory(session_status, metric_i
def init_sessions(metric_ids, alias=None):
- return __counter_sum_aggregation_on_session_status_factory(
+ return _counter_sum_aggregation_on_session_status_factory(
session_status="init", metric_ids=metric_ids, alias=alias
)
def crashed_sessions(metric_ids, alias=None):
- return __counter_sum_aggregation_on_session_status_factory(
+ return _counter_sum_aggregation_on_session_status_factory(
session_status="crashed", metric_ids=metric_ids, alias=alias
)
def errored_preaggr_sessions(metric_ids, alias=None):
- return __counter_sum_aggregation_on_session_status_factory(
+ return _counter_sum_aggregation_on_session_status_factory(
session_status="errored_preaggr", metric_ids=metric_ids, alias=alias
)
@@ -61,12 +61,5 @@ def sessions_errored_set(metric_ids, alias=None):
)
-def percentage(arg1_snql, arg2_snql, metric_ids, alias=None):
- return Function(
- "multiply",
- [
- 100,
- Function("minus", [1, Function("divide", [arg1_snql, arg2_snql])]),
- ],
- alias,
- )
+def percentage(arg1_snql, arg2_snql, alias=None):
+ return Function("minus", [1, Function("divide", [arg1_snql, arg2_snql])], alias)
diff --git a/tests/sentry/api/endpoints/test_organization_metric_data.py b/tests/sentry/api/endpoints/test_organization_metric_data.py
index f764c540b53da4..231715a99da1d4 100644
--- a/tests/sentry/api/endpoints/test_organization_metric_data.py
+++ b/tests/sentry/api/endpoints/test_organization_metric_data.py
@@ -907,10 +907,10 @@ def test_crash_free_percentage(self):
interval="1m",
)
group = response.data["groups"][0]
- assert group["totals"]["session.crash_free_rate"] == 50
+ assert group["totals"]["session.crash_free_rate"] == 0.5
assert group["totals"]["session.init"] == 8
assert group["totals"]["session.crashed"] == 4
- assert group["series"]["session.crash_free_rate"] == [None, None, 50, 50, 50, 50]
+ assert group["series"]["session.crash_free_rate"] == [None, None, 0.5, 0.5, 0.5, 0.5]
@freeze_time((timezone.now() - timedelta(days=2)).replace(hour=3, minute=26, second=31))
def test_crash_free_percentage_with_orderby(self):
@@ -943,13 +943,13 @@ def test_crash_free_percentage_with_orderby(self):
)
group = response.data["groups"][0]
assert group["by"]["release"] == "[email protected]"
- assert group["totals"]["session.crash_free_rate"] == 100
- assert group["series"]["session.crash_free_rate"] == [None, None, 100, 100, 100, 100]
+ assert group["totals"]["session.crash_free_rate"] == 1
+ assert group["series"]["session.crash_free_rate"] == [None, None, 1, 1, 1, 1]
group = response.data["groups"][1]
assert group["by"]["release"] == "[email protected]"
- assert group["totals"]["session.crash_free_rate"] == 50
- assert group["series"]["session.crash_free_rate"] == [None, None, 50, 50, 50, 50]
+ assert group["totals"]["session.crash_free_rate"] == 0.5
+ assert group["series"]["session.crash_free_rate"] == [None, None, 0.5, 0.5, 0.5, 0.5]
def test_crash_free_rate_when_no_session_metrics_data_exist(self):
response = self.get_success_response(
diff --git a/tests/sentry/snuba/metrics/test_fields.py b/tests/sentry/snuba/metrics/test_fields.py
index c5cf4bbd40c841..0d0f945db31baa 100644
--- a/tests/sentry/snuba/metrics/test_fields.py
+++ b/tests/sentry/snuba/metrics/test_fields.py
@@ -98,7 +98,6 @@ def test_generate_select_snql_of_derived_metric(self):
percentage(
crashed_sessions(metric_ids=session_ids, alias="session.crashed"),
init_sessions(metric_ids=session_ids, alias="session.init"),
- metric_ids=session_ids,
alias="session.crash_free_rate",
)
]
diff --git a/tests/sentry/snuba/metrics/test_snql.py b/tests/sentry/snuba/metrics/test_snql.py
index aef34474530361..330aa50354560c 100644
--- a/tests/sentry/snuba/metrics/test_snql.py
+++ b/tests/sentry/snuba/metrics/test_snql.py
@@ -64,15 +64,6 @@ def test_percentage_in_snql(self):
init_session_snql = init_sessions(self.metric_ids, "init_sessions")
crashed_session_snql = crashed_sessions(self.metric_ids, "crashed_sessions")
- assert percentage(
- crashed_session_snql, init_session_snql, self.metric_ids, alias=alias
- ) == Function(
- "multiply",
- [
- 100,
- Function(
- "minus", [1, Function("divide", [crashed_session_snql, init_session_snql])]
- ),
- ],
- alias,
+ assert percentage(crashed_session_snql, init_session_snql, alias=alias) == Function(
+ "minus", [1, Function("divide", [crashed_session_snql, init_session_snql])], alias
)
|
bd57decb7ad718f1a11a967cd6d0aed35429330d
|
2023-06-06 02:27:25
|
Evan Purkhiser
|
ref(crons): Rename `monitors` view -> `overview` (#50331)
| false
|
Rename `monitors` view -> `overview` (#50331)
|
ref
|
diff --git a/static/app/routes.tsx b/static/app/routes.tsx
index 7923b7694be795..3990bcfb6ab059 100644
--- a/static/app/routes.tsx
+++ b/static/app/routes.tsx
@@ -1293,7 +1293,7 @@ function buildRoutes() {
const cronsChildRoutes = ({forCustomerDomain}: {forCustomerDomain: boolean}) => {
return (
<Fragment>
- <IndexRoute component={make(() => import('sentry/views/monitors/monitors'))} />
+ <IndexRoute component={make(() => import('sentry/views/monitors/overview'))} />
<Route
path={
forCustomerDomain ? '/crons/create/' : '/organizations/:orgId/crons/create/'
diff --git a/static/app/views/monitors/monitors.tsx b/static/app/views/monitors/overview.tsx
similarity index 100%
rename from static/app/views/monitors/monitors.tsx
rename to static/app/views/monitors/overview.tsx
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.